summaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/PyECC/MANIFEST.in1
-rw-r--r--python/PyECC/README.md29
-rw-r--r--python/PyECC/ecc/Key.py320
-rw-r--r--python/PyECC/ecc/Rabbit.py270
-rw-r--r--python/PyECC/ecc/SecurityViolationException.py2
-rw-r--r--python/PyECC/ecc/__init__.py0
-rw-r--r--python/PyECC/ecc/curves.py81
-rw-r--r--python/PyECC/ecc/eccrypt.py65
-rw-r--r--python/PyECC/ecc/ecdsa.py153
-rw-r--r--python/PyECC/ecc/elliptic.py381
-rw-r--r--python/PyECC/ecc/encoding.py178
-rw-r--r--python/PyECC/ecc/performance.py50
-rw-r--r--python/PyECC/ecc/primes.py82
-rw-r--r--python/PyECC/ecc/shacrypt.py38
-rw-r--r--python/PyECC/setup.py77
-rw-r--r--python/README21
-rw-r--r--python/altgraph/MANIFEST.in9
-rw-r--r--python/altgraph/PKG-INFO216
-rw-r--r--python/altgraph/README.txt6
-rw-r--r--python/altgraph/altgraph/Dot.py299
-rw-r--r--python/altgraph/altgraph/Graph.py677
-rw-r--r--python/altgraph/altgraph/GraphAlgo.py147
-rw-r--r--python/altgraph/altgraph/GraphStat.py73
-rw-r--r--python/altgraph/altgraph/GraphUtil.py137
-rw-r--r--python/altgraph/altgraph/ObjectGraph.py202
-rw-r--r--python/altgraph/altgraph/__init__.py135
-rw-r--r--python/altgraph/altgraph_tests/__init__.py1
-rw-r--r--python/altgraph/altgraph_tests/test_altgraph.py45
-rw-r--r--python/altgraph/altgraph_tests/test_dot.py370
-rw-r--r--python/altgraph/altgraph_tests/test_graph.py644
-rw-r--r--python/altgraph/altgraph_tests/test_graphstat.py70
-rw-r--r--python/altgraph/altgraph_tests/test_graphutil.py140
-rw-r--r--python/altgraph/altgraph_tests/test_object_graph.py349
-rw-r--r--python/altgraph/doc/Makefile89
-rw-r--r--python/altgraph/doc/_build/doctrees/changelog.doctreebin0 -> 31601 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/core.doctreebin0 -> 9753 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/dot.doctreebin0 -> 51820 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/environment.picklebin0 -> 1265957 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/graph.doctreebin0 -> 88126 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/graphalgo.doctreebin0 -> 9685 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/graphstat.doctreebin0 -> 9392 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/graphutil.doctreebin0 -> 20473 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/index.doctreebin0 -> 9378 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/license.doctreebin0 -> 5203 bytes
-rw-r--r--python/altgraph/doc/_build/doctrees/objectgraph.doctreebin0 -> 45814 bytes
-rw-r--r--python/altgraph/doc/_build/html/.buildinfo4
-rw-r--r--python/altgraph/doc/_build/html/_sources/changelog.txt176
-rw-r--r--python/altgraph/doc/_build/html/_sources/core.txt26
-rw-r--r--python/altgraph/doc/_build/html/_sources/dot.txt224
-rw-r--r--python/altgraph/doc/_build/html/_sources/graph.txt299
-rw-r--r--python/altgraph/doc/_build/html/_sources/graphalgo.txt26
-rw-r--r--python/altgraph/doc/_build/html/_sources/graphstat.txt25
-rw-r--r--python/altgraph/doc/_build/html/_sources/graphutil.txt55
-rw-r--r--python/altgraph/doc/_build/html/_sources/index.txt41
-rw-r--r--python/altgraph/doc/_build/html/_sources/license.txt25
-rw-r--r--python/altgraph/doc/_build/html/_sources/objectgraph.txt134
-rw-r--r--python/altgraph/doc/_build/html/_static/ajax-loader.gifbin0 -> 673 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/basic.css537
-rw-r--r--python/altgraph/doc/_build/html/_static/comment-bright.pngbin0 -> 3500 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/comment-close.pngbin0 -> 3578 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/comment.pngbin0 -> 3445 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/doctools.js238
-rw-r--r--python/altgraph/doc/_build/html/_static/down-pressed.pngbin0 -> 368 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/down.pngbin0 -> 363 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/file.pngbin0 -> 392 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/jquery.js2
-rw-r--r--python/altgraph/doc/_build/html/_static/minus.pngbin0 -> 199 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/nature.css245
-rw-r--r--python/altgraph/doc/_build/html/_static/plus.pngbin0 -> 199 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/pygments.css62
-rw-r--r--python/altgraph/doc/_build/html/_static/searchtools.js622
-rw-r--r--python/altgraph/doc/_build/html/_static/underscore.js31
-rw-r--r--python/altgraph/doc/_build/html/_static/up-pressed.pngbin0 -> 372 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/up.pngbin0 -> 363 bytes
-rw-r--r--python/altgraph/doc/_build/html/_static/websupport.js808
-rw-r--r--python/altgraph/doc/_build/html/changelog.html271
-rw-r--r--python/altgraph/doc/_build/html/core.html130
-rw-r--r--python/altgraph/doc/_build/html/dot.html332
-rw-r--r--python/altgraph/doc/_build/html/genindex.html604
-rw-r--r--python/altgraph/doc/_build/html/graph.html491
-rw-r--r--python/altgraph/doc/_build/html/graphalgo.html134
-rw-r--r--python/altgraph/doc/_build/html/graphstat.html130
-rw-r--r--python/altgraph/doc/_build/html/graphutil.html162
-rw-r--r--python/altgraph/doc/_build/html/index.html142
-rw-r--r--python/altgraph/doc/_build/html/license.html136
-rw-r--r--python/altgraph/doc/_build/html/objectgraph.html283
-rw-r--r--python/altgraph/doc/_build/html/objects.invbin0 -> 954 bytes
-rw-r--r--python/altgraph/doc/_build/html/py-modindex.html139
-rw-r--r--python/altgraph/doc/_build/html/search.html105
-rw-r--r--python/altgraph/doc/_build/html/searchindex.js1
-rw-r--r--python/altgraph/doc/changelog.rst185
-rw-r--r--python/altgraph/doc/conf.py209
-rw-r--r--python/altgraph/doc/core.rst26
-rw-r--r--python/altgraph/doc/dot.rst224
-rw-r--r--python/altgraph/doc/graph.rst305
-rw-r--r--python/altgraph/doc/graphalgo.rst26
-rw-r--r--python/altgraph/doc/graphstat.rst25
-rw-r--r--python/altgraph/doc/graphutil.rst55
-rw-r--r--python/altgraph/doc/index.rst41
-rw-r--r--python/altgraph/doc/license.rst25
-rw-r--r--python/altgraph/doc/objectgraph.rst146
-rw-r--r--python/altgraph/setup.cfg36
-rw-r--r--python/altgraph/setup.py867
-rw-r--r--python/bitstring/PKG-INFO122
-rw-r--r--python/bitstring/README.txt99
-rw-r--r--python/bitstring/bitstring.py4234
-rw-r--r--python/bitstring/doc/bitstring_manual.pdfbin0 -> 439327 bytes
-rw-r--r--python/bitstring/release_notes.txt1523
-rw-r--r--python/bitstring/setup.py44
-rw-r--r--python/bitstring/test/smalltestfile1
-rw-r--r--python/bitstring/test/test.m1vbin0 -> 125300 bytes
-rw-r--r--python/bitstring/test/test_bitarray.py310
-rw-r--r--python/bitstring/test/test_bits.py378
-rw-r--r--python/bitstring/test/test_bitstore.py37
-rw-r--r--python/bitstring/test/test_bitstream.py3940
-rw-r--r--python/bitstring/test/test_bitstring.py97
-rw-r--r--python/bitstring/test/test_constbitstream.py121
-rw-r--r--python/blessings/LICENSE19
-rw-r--r--python/blessings/MANIFEST.in3
-rw-r--r--python/blessings/PKG-INFO426
-rw-r--r--python/blessings/README.rst399
-rw-r--r--python/blessings/blessings/__init__.py450
-rw-r--r--python/blessings/blessings/tests.py231
-rw-r--r--python/blessings/setup.cfg5
-rw-r--r--python/blessings/setup.py42
-rw-r--r--python/blessings/tox.ini7
-rw-r--r--python/compare-locales/compare_locales/__init__.py1
-rw-r--r--python/compare-locales/compare_locales/checks.py438
-rw-r--r--python/compare-locales/compare_locales/commands.py154
-rw-r--r--python/compare-locales/compare_locales/compare.py638
-rw-r--r--python/compare-locales/compare_locales/parser.py521
-rw-r--r--python/compare-locales/compare_locales/paths.py398
-rw-r--r--python/compare-locales/compare_locales/tests/__init__.py49
-rw-r--r--python/compare-locales/compare_locales/tests/data/bug121341.properties68
-rw-r--r--python/compare-locales/compare_locales/tests/data/test.properties14
-rw-r--r--python/compare-locales/compare_locales/tests/data/triple-license.dtd38
-rw-r--r--python/compare-locales/compare_locales/tests/test_checks.py403
-rw-r--r--python/compare-locales/compare_locales/tests/test_compare.py90
-rw-r--r--python/compare-locales/compare_locales/tests/test_dtd.py86
-rw-r--r--python/compare-locales/compare_locales/tests/test_ini.py115
-rw-r--r--python/compare-locales/compare_locales/tests/test_merge.py265
-rw-r--r--python/compare-locales/compare_locales/tests/test_properties.py95
-rw-r--r--python/compare-locales/compare_locales/tests/test_util.py29
-rw-r--r--python/compare-locales/compare_locales/tests/test_webapps.py41
-rw-r--r--python/compare-locales/compare_locales/util.py11
-rw-r--r--python/compare-locales/compare_locales/webapps.py235
-rw-r--r--python/compare-locales/docs/glossary.rst26
-rw-r--r--python/compare-locales/docs/index.rst191
-rw-r--r--python/compare-locales/mach_commands.py81
-rw-r--r--python/compare-locales/moz.build16
-rw-r--r--python/configobj/PKG-INFO47
-rw-r--r--python/configobj/configobj.py2468
-rw-r--r--python/configobj/setup.py83
-rw-r--r--python/configobj/validate.py1450
-rw-r--r--python/devtools/migrate-l10n/README.rst16
-rw-r--r--python/devtools/migrate-l10n/migrate/__init__.py0
-rw-r--r--python/devtools/migrate-l10n/migrate/conf/bug129418622
-rw-r--r--python/devtools/migrate-l10n/migrate/conf/bug1308500_130919197
-rw-r--r--python/devtools/migrate-l10n/migrate/main.py261
-rw-r--r--python/devtools/migrate-l10n/migrate/tests/__init__.py0
-rw-r--r--python/eme/gen-eme-voucher.py633
-rw-r--r--python/futures/CHANGES89
-rw-r--r--python/futures/LICENSE21
-rw-r--r--python/futures/MANIFEST.in5
-rw-r--r--python/futures/PKG-INFO16
-rw-r--r--python/futures/concurrent/__init__.py3
-rw-r--r--python/futures/concurrent/futures/__init__.py23
-rw-r--r--python/futures/concurrent/futures/_base.py605
-rw-r--r--python/futures/concurrent/futures/process.py359
-rw-r--r--python/futures/concurrent/futures/thread.py134
-rw-r--r--python/futures/crawl.py74
-rw-r--r--python/futures/docs/Makefile88
-rw-r--r--python/futures/docs/conf.py194
-rw-r--r--python/futures/docs/index.rst347
-rw-r--r--python/futures/docs/make.bat112
-rw-r--r--python/futures/primes.py50
-rw-r--r--python/futures/setup.cfg12
-rwxr-xr-xpython/futures/setup.py27
-rw-r--r--python/futures/test_futures.py724
-rw-r--r--python/futures/tox.ini8
-rw-r--r--python/gdbpp/gdbpp/__init__.py28
-rw-r--r--python/gdbpp/gdbpp/linkedlist.py49
-rw-r--r--python/gdbpp/gdbpp/owningthread.py24
-rw-r--r--python/gdbpp/gdbpp/smartptr.py55
-rw-r--r--python/gdbpp/gdbpp/string.py19
-rw-r--r--python/gdbpp/gdbpp/tarray.py30
-rw-r--r--python/gdbpp/gdbpp/thashtable.py143
-rw-r--r--python/jsmin/jsmin/__init__.py238
-rw-r--r--python/jsmin/jsmin/test.py394
-rw-r--r--python/jsmin/setup.cfg5
-rw-r--r--python/jsmin/setup.py42
-rw-r--r--python/lldbutils/README.txt221
-rw-r--r--python/lldbutils/lldbutils/__init__.py13
-rw-r--r--python/lldbutils/lldbutils/content.py21
-rw-r--r--python/lldbutils/lldbutils/general.py105
-rw-r--r--python/lldbutils/lldbutils/gfx.py130
-rw-r--r--python/lldbutils/lldbutils/layout.py20
-rw-r--r--python/lldbutils/lldbutils/utils.py70
-rw-r--r--python/mach/README.rst13
-rw-r--r--python/mach/bash-completion.sh29
-rw-r--r--python/mach/docs/commands.rst145
-rw-r--r--python/mach/docs/driver.rst51
-rw-r--r--python/mach/docs/index.rst75
-rw-r--r--python/mach/docs/logging.rst100
-rw-r--r--python/mach/docs/settings.rst140
-rw-r--r--python/mach/mach/__init__.py0
-rw-r--r--python/mach/mach/base.py46
-rw-r--r--python/mach/mach/commands/__init__.py0
-rw-r--r--python/mach/mach/commands/commandinfo.py53
-rw-r--r--python/mach/mach/commands/settings.py132
-rw-r--r--python/mach/mach/config.py461
-rw-r--r--python/mach/mach/decorators.py353
-rw-r--r--python/mach/mach/dispatcher.py453
-rw-r--r--python/mach/mach/locale/en_US/LC_MESSAGES/alias.mobin0 -> 193 bytes
-rw-r--r--python/mach/mach/locale/en_US/LC_MESSAGES/alias.po9
-rw-r--r--python/mach/mach/logging.py256
-rw-r--r--python/mach/mach/main.py594
-rw-r--r--python/mach/mach/mixin/__init__.py0
-rw-r--r--python/mach/mach/mixin/logging.py55
-rw-r--r--python/mach/mach/mixin/process.py175
-rw-r--r--python/mach/mach/registrar.py126
-rw-r--r--python/mach/mach/terminal.py75
-rw-r--r--python/mach/mach/test/__init__.py0
-rw-r--r--python/mach/mach/test/common.py45
-rw-r--r--python/mach/mach/test/providers/__init__.py0
-rw-r--r--python/mach/mach/test/providers/basic.py23
-rw-r--r--python/mach/mach/test/providers/conditions.py53
-rw-r--r--python/mach/mach/test/providers/conditions_invalid.py16
-rw-r--r--python/mach/mach/test/providers/throw.py29
-rw-r--r--python/mach/mach/test/providers/throw2.py13
-rw-r--r--python/mach/mach/test/test_conditions.py83
-rw-r--r--python/mach/mach/test/test_config.py297
-rw-r--r--python/mach/mach/test/test_dispatcher.py61
-rw-r--r--python/mach/mach/test/test_entry_point.py61
-rw-r--r--python/mach/mach/test/test_error_output.py39
-rw-r--r--python/mach/mach/test/test_logger.py47
-rw-r--r--python/mach/setup.py38
-rw-r--r--python/mach_commands.py158
-rw-r--r--python/macholib/MANIFEST.in8
-rw-r--r--python/macholib/PKG-INFO275
-rw-r--r--python/macholib/README.txt8
-rw-r--r--python/macholib/doc/MachO.rst19
-rw-r--r--python/macholib/doc/MachoOGraph.rst14
-rw-r--r--python/macholib/doc/MachoOStandalone.rst13
-rw-r--r--python/macholib/doc/Makefile130
-rw-r--r--python/macholib/doc/SymbolTable.rst24
-rw-r--r--python/macholib/doc/_build/doctrees/MachO.doctreebin0 -> 7229 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/MachoOGraph.doctreebin0 -> 6109 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/MachoOStandalone.doctreebin0 -> 7099 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/SymbolTable.doctreebin0 -> 8138 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/changelog.doctreebin0 -> 45133 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/dyld.doctreebin0 -> 42349 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/dylib.doctreebin0 -> 9129 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/environment.picklebin0 -> 1281784 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/framework.doctreebin0 -> 9557 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/index.doctreebin0 -> 11612 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/license.doctreebin0 -> 4977 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/macho_o.doctreebin0 -> 5186 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/ptypes.doctreebin0 -> 44118 bytes
-rw-r--r--python/macholib/doc/_build/doctrees/scripts.doctreebin0 -> 5399 bytes
-rw-r--r--python/macholib/doc/_build/html/.buildinfo4
-rw-r--r--python/macholib/doc/_build/html/MachO.html132
-rw-r--r--python/macholib/doc/_build/html/MachoOGraph.html125
-rw-r--r--python/macholib/doc/_build/html/MachoOStandalone.html125
-rw-r--r--python/macholib/doc/_build/html/SymbolTable.html136
-rw-r--r--python/macholib/doc/_build/html/_sources/MachO.txt19
-rw-r--r--python/macholib/doc/_build/html/_sources/MachoOGraph.txt14
-rw-r--r--python/macholib/doc/_build/html/_sources/MachoOStandalone.txt13
-rw-r--r--python/macholib/doc/_build/html/_sources/SymbolTable.txt24
-rw-r--r--python/macholib/doc/_build/html/_sources/changelog.txt242
-rw-r--r--python/macholib/doc/_build/html/_sources/dyld.txt159
-rw-r--r--python/macholib/doc/_build/html/_sources/dylib.txt33
-rw-r--r--python/macholib/doc/_build/html/_sources/framework.txt34
-rw-r--r--python/macholib/doc/_build/html/_sources/index.txt59
-rw-r--r--python/macholib/doc/_build/html/_sources/license.txt23
-rw-r--r--python/macholib/doc/_build/html/_sources/macho_o.txt13
-rw-r--r--python/macholib/doc/_build/html/_sources/ptypes.txt157
-rw-r--r--python/macholib/doc/_build/html/_sources/scripts.txt35
-rw-r--r--python/macholib/doc/_build/html/_static/ajax-loader.gifbin0 -> 673 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/basic.css537
-rw-r--r--python/macholib/doc/_build/html/_static/comment-bright.pngbin0 -> 3500 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/comment-close.pngbin0 -> 3578 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/comment.pngbin0 -> 3445 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/doctools.js238
-rw-r--r--python/macholib/doc/_build/html/_static/down-pressed.pngbin0 -> 368 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/down.pngbin0 -> 363 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/file.pngbin0 -> 392 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/jquery.js2
-rw-r--r--python/macholib/doc/_build/html/_static/minus.pngbin0 -> 199 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/nature.css245
-rw-r--r--python/macholib/doc/_build/html/_static/plus.pngbin0 -> 199 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/pygments.css62
-rw-r--r--python/macholib/doc/_build/html/_static/searchtools.js622
-rw-r--r--python/macholib/doc/_build/html/_static/underscore.js31
-rw-r--r--python/macholib/doc/_build/html/_static/up-pressed.pngbin0 -> 372 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/up.pngbin0 -> 363 bytes
-rw-r--r--python/macholib/doc/_build/html/_static/websupport.js808
-rw-r--r--python/macholib/doc/_build/html/changelog.html385
-rw-r--r--python/macholib/doc/_build/html/dyld.html267
-rw-r--r--python/macholib/doc/_build/html/dylib.html145
-rw-r--r--python/macholib/doc/_build/html/framework.html147
-rw-r--r--python/macholib/doc/_build/html/genindex.html365
-rw-r--r--python/macholib/doc/_build/html/index.html170
-rw-r--r--python/macholib/doc/_build/html/license.html140
-rw-r--r--python/macholib/doc/_build/html/macho_o.html122
-rw-r--r--python/macholib/doc/_build/html/objects.invbin0 -> 692 bytes
-rw-r--r--python/macholib/doc/_build/html/ptypes.html317
-rw-r--r--python/macholib/doc/_build/html/py-modindex.html154
-rw-r--r--python/macholib/doc/_build/html/scripts.html156
-rw-r--r--python/macholib/doc/_build/html/search.html105
-rw-r--r--python/macholib/doc/_build/html/searchindex.js1
-rw-r--r--python/macholib/doc/changelog.rst242
-rw-r--r--python/macholib/doc/conf.py275
-rw-r--r--python/macholib/doc/dyld.rst159
-rw-r--r--python/macholib/doc/dylib.rst33
-rw-r--r--python/macholib/doc/framework.rst34
-rw-r--r--python/macholib/doc/index.rst59
-rw-r--r--python/macholib/doc/license.rst23
-rw-r--r--python/macholib/doc/macho_o.rst13
-rw-r--r--python/macholib/doc/ptypes.rst157
-rw-r--r--python/macholib/doc/scripts.rst35
-rw-r--r--python/macholib/macholib/MachO.py398
-rw-r--r--python/macholib/macholib/MachOGraph.py131
-rw-r--r--python/macholib/macholib/MachOStandalone.py147
-rw-r--r--python/macholib/macholib/SymbolTable.py95
-rw-r--r--python/macholib/macholib/__init__.py10
-rw-r--r--python/macholib/macholib/__main__.py73
-rw-r--r--python/macholib/macholib/_cmdline.py44
-rw-r--r--python/macholib/macholib/dyld.py176
-rw-r--r--python/macholib/macholib/dylib.py42
-rw-r--r--python/macholib/macholib/framework.py42
-rw-r--r--python/macholib/macholib/itergraphreport.py73
-rw-r--r--python/macholib/macholib/mach_o.py1311
-rw-r--r--python/macholib/macholib/macho_dump.py48
-rw-r--r--python/macholib/macholib/macho_find.py17
-rw-r--r--python/macholib/macholib/macho_standalone.py26
-rw-r--r--python/macholib/macholib/ptypes.py290
-rw-r--r--python/macholib/macholib/util.py245
-rw-r--r--python/macholib/macholib_tests/__init__.py1
-rw-r--r--python/macholib/macholib_tests/binaries/src/build.py22
-rw-r--r--python/macholib/macholib_tests/test_MachO.py15
-rw-r--r--python/macholib/macholib_tests/test_MachOGraph.py15
-rw-r--r--python/macholib/macholib_tests/test_MachOStandalone.py15
-rw-r--r--python/macholib/macholib_tests/test_SymbolTable.py15
-rw-r--r--python/macholib/macholib_tests/test_command_line.py147
-rw-r--r--python/macholib/macholib_tests/test_dyld.py450
-rw-r--r--python/macholib/macholib_tests/test_dylib.py38
-rw-r--r--python/macholib/macholib_tests/test_framework.py88
-rw-r--r--python/macholib/macholib_tests/test_itergraphreport.py15
-rw-r--r--python/macholib/macholib_tests/test_mach_o.py21
-rw-r--r--python/macholib/macholib_tests/test_ptypes.py191
-rw-r--r--python/macholib/setup.cfg42
-rw-r--r--python/macholib/setup.py867
-rw-r--r--python/mock-1.0.0/LICENSE.txt26
-rw-r--r--python/mock-1.0.0/MANIFEST.in2
-rw-r--r--python/mock-1.0.0/PKG-INFO208
-rw-r--r--python/mock-1.0.0/README.txt177
-rw-r--r--python/mock-1.0.0/docs/changelog.txt725
-rw-r--r--python/mock-1.0.0/docs/compare.txt628
-rw-r--r--python/mock-1.0.0/docs/conf.py209
-rw-r--r--python/mock-1.0.0/docs/examples.txt1063
-rw-r--r--python/mock-1.0.0/docs/getting-started.txt479
-rw-r--r--python/mock-1.0.0/docs/helpers.txt583
-rw-r--r--python/mock-1.0.0/docs/index.txt411
-rw-r--r--python/mock-1.0.0/docs/magicmock.txt258
-rw-r--r--python/mock-1.0.0/docs/mock.txt842
-rw-r--r--python/mock-1.0.0/docs/patch.txt636
-rw-r--r--python/mock-1.0.0/docs/sentinel.txt58
-rw-r--r--python/mock-1.0.0/html/.doctrees/changelog.doctreebin0 -> 282659 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/compare.doctreebin0 -> 56915 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/examples.doctreebin0 -> 167478 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/getting-started.doctreebin0 -> 70942 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/index.doctreebin0 -> 98784 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/magicmock.doctreebin0 -> 75713 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/mock.doctreebin0 -> 152111 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/mocksignature.doctreebin0 -> 42324 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/patch.doctreebin0 -> 123511 bytes
-rw-r--r--python/mock-1.0.0/html/.doctrees/sentinel.doctreebin0 -> 10632 bytes
-rw-r--r--python/mock-1.0.0/html/_sources/changelog.txt725
-rw-r--r--python/mock-1.0.0/html/_sources/compare.txt628
-rw-r--r--python/mock-1.0.0/html/_sources/examples.txt1063
-rw-r--r--python/mock-1.0.0/html/_sources/getting-started.txt479
-rw-r--r--python/mock-1.0.0/html/_sources/index.txt411
-rw-r--r--python/mock-1.0.0/html/_sources/magicmock.txt258
-rw-r--r--python/mock-1.0.0/html/_sources/mock.txt842
-rw-r--r--python/mock-1.0.0/html/_sources/mocksignature.txt262
-rw-r--r--python/mock-1.0.0/html/_sources/patch.txt636
-rw-r--r--python/mock-1.0.0/html/_sources/sentinel.txt58
-rw-r--r--python/mock-1.0.0/html/_static/adctheme.css757
-rw-r--r--python/mock-1.0.0/html/_static/basic.css540
-rw-r--r--python/mock-1.0.0/html/_static/breadcrumb_background.pngbin0 -> 136 bytes
-rw-r--r--python/mock-1.0.0/html/_static/default.css256
-rw-r--r--python/mock-1.0.0/html/_static/doctools.js247
-rw-r--r--python/mock-1.0.0/html/_static/documentation.pngbin0 -> 412 bytes
-rw-r--r--python/mock-1.0.0/html/_static/file.pngbin0 -> 392 bytes
-rw-r--r--python/mock-1.0.0/html/_static/header_sm_mid.pngbin0 -> 159 bytes
-rw-r--r--python/mock-1.0.0/html/_static/jquery.js154
-rw-r--r--python/mock-1.0.0/html/_static/minus.pngbin0 -> 199 bytes
-rw-r--r--python/mock-1.0.0/html/_static/mobile.css17
-rw-r--r--python/mock-1.0.0/html/_static/plus.pngbin0 -> 199 bytes
-rw-r--r--python/mock-1.0.0/html/_static/pygments.css62
-rw-r--r--python/mock-1.0.0/html/_static/scrn1.pngbin0 -> 108046 bytes
-rw-r--r--python/mock-1.0.0/html/_static/scrn2.pngbin0 -> 121395 bytes
-rw-r--r--python/mock-1.0.0/html/_static/searchfield_leftcap.pngbin0 -> 855 bytes
-rw-r--r--python/mock-1.0.0/html/_static/searchfield_repeat.pngbin0 -> 158 bytes
-rw-r--r--python/mock-1.0.0/html/_static/searchfield_rightcap.pngbin0 -> 530 bytes
-rw-r--r--python/mock-1.0.0/html/_static/searchtools.js560
-rw-r--r--python/mock-1.0.0/html/_static/sidebar.js148
-rw-r--r--python/mock-1.0.0/html/_static/title_background.pngbin0 -> 132 bytes
-rw-r--r--python/mock-1.0.0/html/_static/toc.js20
-rw-r--r--python/mock-1.0.0/html/_static/triangle_closed.pngbin0 -> 181 bytes
-rw-r--r--python/mock-1.0.0/html/_static/triangle_left.pngbin0 -> 195 bytes
-rw-r--r--python/mock-1.0.0/html/_static/triangle_open.pngbin0 -> 191 bytes
-rw-r--r--python/mock-1.0.0/html/_static/underscore.js23
-rw-r--r--python/mock-1.0.0/html/changelog.html839
-rw-r--r--python/mock-1.0.0/html/compare.html672
-rw-r--r--python/mock-1.0.0/html/examples.html1006
-rw-r--r--python/mock-1.0.0/html/genindex.html479
-rw-r--r--python/mock-1.0.0/html/getting-started.html510
-rw-r--r--python/mock-1.0.0/html/index.html529
-rw-r--r--python/mock-1.0.0/html/magicmock.html347
-rw-r--r--python/mock-1.0.0/html/mock.html875
-rw-r--r--python/mock-1.0.0/html/mocksignature.html352
-rw-r--r--python/mock-1.0.0/html/objects.invbin0 -> 711 bytes
-rw-r--r--python/mock-1.0.0/html/output.txt126
-rw-r--r--python/mock-1.0.0/html/patch.html648
-rw-r--r--python/mock-1.0.0/html/search.html99
-rw-r--r--python/mock-1.0.0/html/searchindex.js1
-rw-r--r--python/mock-1.0.0/html/sentinel.html156
-rw-r--r--python/mock-1.0.0/mock.py2356
-rw-r--r--python/mock-1.0.0/setup.cfg12
-rwxr-xr-xpython/mock-1.0.0/setup.py72
-rw-r--r--python/mock-1.0.0/tests/__init__.py3
-rw-r--r--python/mock-1.0.0/tests/_testwith.py181
-rw-r--r--python/mock-1.0.0/tests/support.py41
-rw-r--r--python/mock-1.0.0/tests/support_with.py93
-rw-r--r--python/mock-1.0.0/tests/testcallable.py158
-rw-r--r--python/mock-1.0.0/tests/testhelpers.py940
-rw-r--r--python/mock-1.0.0/tests/testmagicmethods.py486
-rw-r--r--python/mock-1.0.0/tests/testmock.py1351
-rw-r--r--python/mock-1.0.0/tests/testpatch.py1790
-rw-r--r--python/mock-1.0.0/tests/testsentinel.py33
-rw-r--r--python/mock-1.0.0/tests/testwith.py16
-rw-r--r--python/mock-1.0.0/tox.ini40
-rw-r--r--python/moz.build88
-rw-r--r--python/mozboot/README.rst19
-rw-r--r--python/mozboot/bin/bootstrap-msys2.vbs116
-rwxr-xr-xpython/mozboot/bin/bootstrap.py170
-rw-r--r--python/mozboot/mozboot/__init__.py0
-rw-r--r--python/mozboot/mozboot/android.py270
-rw-r--r--python/mozboot/mozboot/archlinux.py223
-rw-r--r--python/mozboot/mozboot/base.py452
-rw-r--r--python/mozboot/mozboot/bootstrap.py437
-rw-r--r--python/mozboot/mozboot/centosfedora.py153
-rw-r--r--python/mozboot/mozboot/debian.py188
-rw-r--r--python/mozboot/mozboot/freebsd.py63
-rw-r--r--python/mozboot/mozboot/gentoo.py33
-rw-r--r--python/mozboot/mozboot/mach_commands.py67
-rw-r--r--python/mozboot/mozboot/mozillabuild.py77
-rw-r--r--python/mozboot/mozboot/openbsd.py45
-rw-r--r--python/mozboot/mozboot/osx.py577
-rw-r--r--python/mozboot/mozboot/util.py20
-rw-r--r--python/mozboot/mozboot/windows.py95
-rw-r--r--python/mozboot/setup.py16
-rwxr-xr-xpython/mozboot/support/ConEmu.xml897
-rw-r--r--python/mozbuild/TODO3
-rw-r--r--python/mozbuild/dumbmake/__init__.py0
-rw-r--r--python/mozbuild/dumbmake/dumbmake.py122
-rw-r--r--python/mozbuild/dumbmake/test/__init__.py0
-rw-r--r--python/mozbuild/dumbmake/test/test_dumbmake.py106
-rw-r--r--python/mozbuild/mozbuild/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/action/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/action/buildlist.py52
-rw-r--r--python/mozbuild/mozbuild/action/cl.py124
-rw-r--r--python/mozbuild/mozbuild/action/dump_env.py10
-rw-r--r--python/mozbuild/mozbuild/action/explode_aar.py72
-rw-r--r--python/mozbuild/mozbuild/action/file_generate.py108
-rw-r--r--python/mozbuild/mozbuild/action/generate_browsersearch.py131
-rw-r--r--python/mozbuild/mozbuild/action/generate_searchjson.py23
-rw-r--r--python/mozbuild/mozbuild/action/generate_suggestedsites.py147
-rw-r--r--python/mozbuild/mozbuild/action/generate_symbols_file.py91
-rw-r--r--python/mozbuild/mozbuild/action/jar_maker.py17
-rw-r--r--python/mozbuild/mozbuild/action/make_dmg.py37
-rw-r--r--python/mozbuild/mozbuild/action/output_searchplugins_list.py21
-rw-r--r--python/mozbuild/mozbuild/action/package_fennec_apk.py150
-rw-r--r--python/mozbuild/mozbuild/action/preprocessor.py18
-rw-r--r--python/mozbuild/mozbuild/action/process_define_files.py94
-rw-r--r--python/mozbuild/mozbuild/action/process_install_manifest.py120
-rw-r--r--python/mozbuild/mozbuild/action/test_archive.py565
-rw-r--r--python/mozbuild/mozbuild/action/webidl.py19
-rw-r--r--python/mozbuild/mozbuild/action/xpccheck.py83
-rwxr-xr-xpython/mozbuild/mozbuild/action/xpidl-process.py94
-rw-r--r--python/mozbuild/mozbuild/action/zip.py39
-rw-r--r--python/mozbuild/mozbuild/android_version_code.py167
-rw-r--r--python/mozbuild/mozbuild/artifacts.py1089
-rw-r--r--python/mozbuild/mozbuild/backend/__init__.py26
-rw-r--r--python/mozbuild/mozbuild/backend/android_eclipse.py267
-rw-r--r--python/mozbuild/mozbuild/backend/base.py317
-rw-r--r--python/mozbuild/mozbuild/backend/common.py567
-rw-r--r--python/mozbuild/mozbuild/backend/configenvironment.py199
-rw-r--r--python/mozbuild/mozbuild/backend/cpp_eclipse.py698
-rw-r--r--python/mozbuild/mozbuild/backend/fastermake.py165
-rw-r--r--python/mozbuild/mozbuild/backend/mach_commands.py132
-rw-r--r--python/mozbuild/mozbuild/backend/recursivemake.py1513
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/.classpath10
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ApkBuilder.launch8
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.PreCompilerBuilder.launch8
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ResourceManagerBuilder.launch8
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/org.eclipse.jdt.core.javabuilder.launch8
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/AndroidManifest.xml11
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/gen/tmp1
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/lint.xml5
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse/project.properties14
-rw-r--r--python/mozbuild/mozbuild/backend/templates/android_eclipse_empty_resource_directory/.not_an_android_resource5
-rw-r--r--python/mozbuild/mozbuild/backend/tup.py344
-rw-r--r--python/mozbuild/mozbuild/backend/visualstudio.py582
-rw-r--r--python/mozbuild/mozbuild/base.py850
-rw-r--r--python/mozbuild/mozbuild/codecoverage/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/codecoverage/chrome_map.py105
-rw-r--r--python/mozbuild/mozbuild/codecoverage/packager.py43
-rw-r--r--python/mozbuild/mozbuild/compilation/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/compilation/codecomplete.py63
-rw-r--r--python/mozbuild/mozbuild/compilation/database.py252
-rw-r--r--python/mozbuild/mozbuild/compilation/util.py54
-rw-r--r--python/mozbuild/mozbuild/compilation/warnings.py376
-rw-r--r--python/mozbuild/mozbuild/config_status.py182
-rw-r--r--python/mozbuild/mozbuild/configure/__init__.py935
-rw-r--r--python/mozbuild/mozbuild/configure/check_debug_ranges.py62
-rw-r--r--python/mozbuild/mozbuild/configure/constants.py103
-rw-r--r--python/mozbuild/mozbuild/configure/help.py45
-rw-r--r--python/mozbuild/mozbuild/configure/libstdcxx.py81
-rw-r--r--python/mozbuild/mozbuild/configure/lint.py78
-rw-r--r--python/mozbuild/mozbuild/configure/lint_util.py52
-rw-r--r--python/mozbuild/mozbuild/configure/options.py485
-rw-r--r--python/mozbuild/mozbuild/configure/util.py226
-rw-r--r--python/mozbuild/mozbuild/controller/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/controller/building.py680
-rw-r--r--python/mozbuild/mozbuild/controller/clobber.py237
-rw-r--r--python/mozbuild/mozbuild/doctor.py293
-rw-r--r--python/mozbuild/mozbuild/dotproperties.py83
-rw-r--r--python/mozbuild/mozbuild/frontend/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/frontend/context.py2292
-rw-r--r--python/mozbuild/mozbuild/frontend/data.py1113
-rw-r--r--python/mozbuild/mozbuild/frontend/emitter.py1416
-rw-r--r--python/mozbuild/mozbuild/frontend/gyp_reader.py248
-rw-r--r--python/mozbuild/mozbuild/frontend/mach_commands.py218
-rw-r--r--python/mozbuild/mozbuild/frontend/reader.py1408
-rw-r--r--python/mozbuild/mozbuild/frontend/sandbox.py308
-rw-r--r--python/mozbuild/mozbuild/html_build_viewer.py120
-rw-r--r--python/mozbuild/mozbuild/jar.py597
-rw-r--r--python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.mobin0 -> 301 bytes
-rw-r--r--python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.po8
-rw-r--r--python/mozbuild/mozbuild/mach_commands.py1603
-rw-r--r--python/mozbuild/mozbuild/makeutil.py186
-rw-r--r--python/mozbuild/mozbuild/milestone.py75
-rw-r--r--python/mozbuild/mozbuild/mozconfig.py485
-rwxr-xr-xpython/mozbuild/mozbuild/mozconfig_loader80
-rwxr-xr-xpython/mozbuild/mozbuild/mozinfo.py160
-rw-r--r--python/mozbuild/mozbuild/preprocessor.py805
-rw-r--r--python/mozbuild/mozbuild/pythonutil.py25
-rw-r--r--python/mozbuild/mozbuild/resources/html-build-viewer/index.html475
-rw-r--r--python/mozbuild/mozbuild/shellutil.py209
-rw-r--r--python/mozbuild/mozbuild/sphinx.py200
-rw-r--r--python/mozbuild/mozbuild/test/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/action/data/invalid/region.properties12
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/assets/asset.txt1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/classes.dex1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1.ap_bin0 -> 503 bytes
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/res/res.txt1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/resources.arsc1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2.apkbin0 -> 1649 bytes
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/asset.txt1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/omni.ja1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/classes.dex1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/lib/lib.txt1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/res/res.txt1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/resources.arsc1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/root_file.txt1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/lib/lib.txt1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/omni.ja1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/package_fennec_apk/root_file.txt1
-rw-r--r--python/mozbuild/mozbuild/test/action/data/valid-zh-CN/region.properties37
-rw-r--r--python/mozbuild/mozbuild/test/action/test_buildlist.py89
-rw-r--r--python/mozbuild/mozbuild/test/action/test_generate_browsersearch.py55
-rw-r--r--python/mozbuild/mozbuild/test/action/test_package_fennec_apk.py70
-rw-r--r--python/mozbuild/mozbuild/test/backend/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/common.py156
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/library1/resources/values/strings.xml1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main1/AndroidManifest.xml1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/AndroidManifest.xml1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/assets/dummy.txt1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/extra.jar1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/res/values/strings.xml1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/AndroidManifest.xml1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/a/A.java1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/b/B.java1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/c/C.java1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/main41
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/moz.build37
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/submain/AndroidManifest.xml1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/binary-components/bar/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/binary-components/foo/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/binary-components/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/branding-files/bar.ico0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/branding-files/foo.ico0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/branding-files/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/branding-files/sub/quux.png0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/app/moz.build54
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/bar.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/bar.js2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/bar.jsm1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/baz.ini2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/baz.jsm2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/components.manifest2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/foo.css2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/foo.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/foo.js1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/foo.jsm1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/jar.mn11
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/moz.build68
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/prefs.js1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/qux.ini5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/qux.jsm5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/resource1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/resource21
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/defines/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/dist-files/main.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/dom1.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/dom2.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/gfx.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/pprio.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/foo.res0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/test.manifest0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sdk-files/bar.ico0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sdk-files/foo.ico0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sdk-files/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sdk-files/sub/quux.png0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/bar.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/bar.mm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/bar.s0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/baz.S0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/foo.S0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/foo.asm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/foo.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/foo.mm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/moz.build21
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test_config/file.in3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test_config/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build23
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_android_eclipse.py153
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_build.py233
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_configenvironment.py63
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_recursivemake.py942
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_visualstudio.py64
-rw-r--r--python/mozbuild/mozbuild/test/common.py50
-rw-r--r--python/mozbuild/mozbuild/test/compilation/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/compilation/test_warnings.py241
-rw-r--r--python/mozbuild/mozbuild/test/configure/common.py279
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/decorators.configure44
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/empty_mozconfig0
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/extra.configure13
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure32
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure24
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure31
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure34
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure24
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure24
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/included.configure53
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/moz.configure174
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/set_config.configure43
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/set_define.configure43
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/subprocess.configure23
-rw-r--r--python/mozbuild/mozbuild/test/configure/lint.py65
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_checks_configure.py940
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_compile_checks.py403
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_configure.py1273
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_lint.py132
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_moz_configure.py93
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_options.py852
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py1271
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py437
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py67
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_util.py558
-rw-r--r--python/mozbuild/mozbuild/test/controller/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/controller/test_ccachestats.py208
-rw-r--r--python/mozbuild/mozbuild/test/controller/test_clobber.py213
-rw-r--r--python/mozbuild/mozbuild/test/data/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/bad.properties12
-rw-r--r--python/mozbuild/mozbuild/test/data/test-dir/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/test-dir/with/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/valid.properties11
-rw-r--r--python/mozbuild/mozbuild/test/frontend/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/dir1/foo0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/binary-components/bar/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/binary-components/foo/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/binary-components/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/defines/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/bar.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/baz.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/mem.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/module.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest-stylo.list2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest.list1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1-ref.html0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1.html0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/test_default_mod.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/xpcshell.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/base.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/browser.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/test_mod.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/moz.build22
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/module.jsm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/mochitest.ini2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_general.html0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_specific.html0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/bar.jsm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/submodule/foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/mochitest.ini3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_bar.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_simple.html0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_specific.html0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/xpcshell.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build37
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-defines/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build25
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build27
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file20
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/Cargo.toml12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/Cargo.toml14
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sdk-files/bar.ico0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sdk-files/baz.png0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sdk-files/foo.xpm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sdk-files/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sdk-files/quux.icns0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build27
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/b.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/e.m0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/f.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/g.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/h.s0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/i.asm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/moz.build37
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild21
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest-stylo.list3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/subdir.ini5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/test_foo.html1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest-stylo.list2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-python-unit-test-missing/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build28
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build28
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/use-yasm/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build25
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_context.py721
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_emitter.py1172
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_namespaces.py207
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_reader.py485
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_sandbox.py534
-rw-r--r--python/mozbuild/mozbuild/test/test_android_version_code.py63
-rw-r--r--python/mozbuild/mozbuild/test/test_base.py410
-rw-r--r--python/mozbuild/mozbuild/test/test_containers.py224
-rw-r--r--python/mozbuild/mozbuild/test/test_dotproperties.py178
-rw-r--r--python/mozbuild/mozbuild/test/test_expression.py82
-rw-r--r--python/mozbuild/mozbuild/test/test_jarmaker.py367
-rw-r--r--python/mozbuild/mozbuild/test/test_line_endings.py46
-rw-r--r--python/mozbuild/mozbuild/test/test_makeutil.py165
-rw-r--r--python/mozbuild/mozbuild/test/test_mozconfig.py489
-rwxr-xr-xpython/mozbuild/mozbuild/test/test_mozinfo.py278
-rw-r--r--python/mozbuild/mozbuild/test/test_preprocessor.py646
-rw-r--r--python/mozbuild/mozbuild/test/test_pythonutil.py23
-rw-r--r--python/mozbuild/mozbuild/test/test_testing.py332
-rw-r--r--python/mozbuild/mozbuild/test/test_util.py924
-rw-r--r--python/mozbuild/mozbuild/testing.py535
-rw-r--r--python/mozbuild/mozbuild/util.py1264
-rw-r--r--python/mozbuild/mozbuild/vendor_rust.py86
-rw-r--r--python/mozbuild/mozbuild/virtualenv.py568
-rw-r--r--python/mozbuild/mozpack/__init__.py0
-rw-r--r--python/mozbuild/mozpack/archive.py107
-rw-r--r--python/mozbuild/mozpack/chrome/__init__.py0
-rw-r--r--python/mozbuild/mozpack/chrome/flags.py258
-rw-r--r--python/mozbuild/mozpack/chrome/manifest.py368
-rw-r--r--python/mozbuild/mozpack/copier.py568
-rw-r--r--python/mozbuild/mozpack/dmg.py121
-rw-r--r--python/mozbuild/mozpack/errors.py139
-rw-r--r--python/mozbuild/mozpack/executables.py124
-rw-r--r--python/mozbuild/mozpack/files.py1106
-rw-r--r--python/mozbuild/mozpack/hg.py95
-rw-r--r--python/mozbuild/mozpack/manifests.py419
-rw-r--r--python/mozbuild/mozpack/mozjar.py816
-rw-r--r--python/mozbuild/mozpack/packager/__init__.py408
-rw-r--r--python/mozbuild/mozpack/packager/formats.py324
-rw-r--r--python/mozbuild/mozpack/packager/l10n.py259
-rw-r--r--python/mozbuild/mozpack/packager/unpack.py202
-rw-r--r--python/mozbuild/mozpack/path.py136
-rw-r--r--python/mozbuild/mozpack/test/__init__.py0
-rw-r--r--python/mozbuild/mozpack/test/data/test_data1
-rw-r--r--python/mozbuild/mozpack/test/support/minify_js_verify.py17
-rw-r--r--python/mozbuild/mozpack/test/test_archive.py190
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_flags.py148
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_manifest.py149
-rw-r--r--python/mozbuild/mozpack/test/test_copier.py529
-rw-r--r--python/mozbuild/mozpack/test/test_errors.py93
-rw-r--r--python/mozbuild/mozpack/test/test_files.py1160
-rw-r--r--python/mozbuild/mozpack/test/test_manifests.py375
-rw-r--r--python/mozbuild/mozpack/test/test_mozjar.py342
-rw-r--r--python/mozbuild/mozpack/test/test_packager.py490
-rw-r--r--python/mozbuild/mozpack/test/test_packager_formats.py428
-rw-r--r--python/mozbuild/mozpack/test/test_packager_l10n.py126
-rw-r--r--python/mozbuild/mozpack/test/test_packager_unpack.py65
-rw-r--r--python/mozbuild/mozpack/test/test_path.py143
-rw-r--r--python/mozbuild/mozpack/test/test_unify.py199
-rw-r--r--python/mozbuild/mozpack/unify.py231
-rw-r--r--python/mozbuild/setup.py29
-rw-r--r--python/mozlint/mozlint/__init__.py7
-rw-r--r--python/mozlint/mozlint/cli.py115
-rw-r--r--python/mozlint/mozlint/errors.py25
-rw-r--r--python/mozlint/mozlint/formatters/__init__.py25
-rw-r--r--python/mozlint/mozlint/formatters/stylish.py122
-rw-r--r--python/mozlint/mozlint/formatters/treeherder.py31
-rw-r--r--python/mozlint/mozlint/parser.py85
-rw-r--r--python/mozlint/mozlint/pathutils.py156
-rw-r--r--python/mozlint/mozlint/result.py88
-rw-r--r--python/mozlint/mozlint/roller.py154
-rw-r--r--python/mozlint/mozlint/types.py142
-rw-r--r--python/mozlint/mozlint/vcs.py62
-rw-r--r--python/mozlint/setup.py26
-rw-r--r--python/mozlint/test/__init__.py0
-rw-r--r--python/mozlint/test/conftest.py42
-rw-r--r--python/mozlint/test/files/foobar.js2
-rw-r--r--python/mozlint/test/files/foobar.py2
-rw-r--r--python/mozlint/test/files/no_foobar.js2
-rw-r--r--python/mozlint/test/linters/badreturncode.lint21
-rw-r--r--python/mozlint/test/linters/explicit_path.lint13
-rw-r--r--python/mozlint/test/linters/external.lint30
-rw-r--r--python/mozlint/test/linters/invalid_exclude.lint10
-rw-r--r--python/mozlint/test/linters/invalid_extension.lnt9
-rw-r--r--python/mozlint/test/linters/invalid_include.lint10
-rw-r--r--python/mozlint/test/linters/invalid_type.lint9
-rw-r--r--python/mozlint/test/linters/missing_attrs.lint7
-rw-r--r--python/mozlint/test/linters/missing_definition.lint4
-rw-r--r--python/mozlint/test/linters/raises.lint19
-rw-r--r--python/mozlint/test/linters/regex.lint15
-rw-r--r--python/mozlint/test/linters/string.lint15
-rw-r--r--python/mozlint/test/linters/structured.lint28
-rw-r--r--python/mozlint/test/test_formatters.py90
-rw-r--r--python/mozlint/test/test_parser.py55
-rw-r--r--python/mozlint/test/test_roller.py82
-rw-r--r--python/mozlint/test/test_types.py50
-rw-r--r--python/mozversioncontrol/mozversioncontrol/__init__.py144
-rw-r--r--python/mozversioncontrol/mozversioncontrol/repoupdate.py40
-rw-r--r--python/psutil/CREDITS310
-rw-r--r--python/psutil/HISTORY.rst1018
-rw-r--r--python/psutil/INSTALL.rst116
-rw-r--r--python/psutil/LICENSE27
-rw-r--r--python/psutil/MANIFEST.in22
-rw-r--r--python/psutil/Makefile122
-rw-r--r--python/psutil/PKG-INFO434
-rw-r--r--python/psutil/README.rst386
-rw-r--r--python/psutil/TODO167
-rw-r--r--python/psutil/docs/Makefile177
-rw-r--r--python/psutil/docs/README15
-rw-r--r--python/psutil/docs/_static/copybutton.js57
-rw-r--r--python/psutil/docs/_static/favicon.icobin0 -> 15086 bytes
-rw-r--r--python/psutil/docs/_static/logo.pngbin0 -> 4922 bytes
-rw-r--r--python/psutil/docs/_static/sidebar.js161
-rw-r--r--python/psutil/docs/_template/globaltoc.html12
-rw-r--r--python/psutil/docs/_template/indexcontent.html4
-rw-r--r--python/psutil/docs/_template/indexsidebar.html8
-rw-r--r--python/psutil/docs/_template/page.html66
-rw-r--r--python/psutil/docs/_themes/pydoctheme/static/pydoctheme.css187
-rw-r--r--python/psutil/docs/_themes/pydoctheme/theme.conf23
-rw-r--r--python/psutil/docs/conf.py248
-rw-r--r--python/psutil/docs/index.rst1400
-rw-r--r--python/psutil/docs/make.bat242
-rw-r--r--python/psutil/docs/xxx11
-rwxr-xr-xpython/psutil/examples/disk_usage.py62
-rwxr-xr-xpython/psutil/examples/free.py41
-rw-r--r--python/psutil/examples/ifconfig.py78
-rwxr-xr-xpython/psutil/examples/iotop.py179
-rwxr-xr-xpython/psutil/examples/killall.py32
-rwxr-xr-xpython/psutil/examples/meminfo.py68
-rwxr-xr-xpython/psutil/examples/netstat.py64
-rwxr-xr-xpython/psutil/examples/nettop.py165
-rwxr-xr-xpython/psutil/examples/pidof.py53
-rwxr-xr-xpython/psutil/examples/pmap.py57
-rwxr-xr-xpython/psutil/examples/process_detail.py167
-rw-r--r--python/psutil/examples/ps.py81
-rw-r--r--python/psutil/examples/pstree.py71
-rwxr-xr-xpython/psutil/examples/top.py233
-rwxr-xr-xpython/psutil/examples/who.py33
-rw-r--r--python/psutil/make.bat201
-rw-r--r--python/psutil/psutil/__init__.py1887
-rw-r--r--python/psutil/psutil/_common.py246
-rw-r--r--python/psutil/psutil/_compat.py189
-rw-r--r--python/psutil/psutil/_psbsd.py455
-rw-r--r--python/psutil/psutil/_pslinux.py1206
-rw-r--r--python/psutil/psutil/_psosx.py363
-rw-r--r--python/psutil/psutil/_psposix.py156
-rw-r--r--python/psutil/psutil/_pssunos.py553
-rw-r--r--python/psutil/psutil/_psutil_bsd.c2296
-rw-r--r--python/psutil/psutil/_psutil_bsd.h53
-rw-r--r--python/psutil/psutil/_psutil_common.c37
-rw-r--r--python/psutil/psutil/_psutil_common.h10
-rw-r--r--python/psutil/psutil/_psutil_linux.c689
-rw-r--r--python/psutil/psutil/_psutil_linux.h21
-rw-r--r--python/psutil/psutil/_psutil_osx.c1808
-rw-r--r--python/psutil/psutil/_psutil_osx.h41
-rw-r--r--python/psutil/psutil/_psutil_posix.c531
-rw-r--r--python/psutil/psutil/_psutil_posix.h15
-rw-r--r--python/psutil/psutil/_psutil_sunos.c1389
-rw-r--r--python/psutil/psutil/_psutil_sunos.h28
-rw-r--r--python/psutil/psutil/_psutil_windows.c3405
-rw-r--r--python/psutil/psutil/_psutil_windows.h68
-rw-r--r--python/psutil/psutil/_pswindows.py548
-rw-r--r--python/psutil/psutil/arch/bsd/process_info.c265
-rw-r--r--python/psutil/psutil/arch/bsd/process_info.h15
-rw-r--r--python/psutil/psutil/arch/osx/process_info.c281
-rw-r--r--python/psutil/psutil/arch/osx/process_info.h16
-rw-r--r--python/psutil/psutil/arch/windows/glpi.h41
-rw-r--r--python/psutil/psutil/arch/windows/inet_ntop.c41
-rw-r--r--python/psutil/psutil/arch/windows/inet_ntop.h10
-rw-r--r--python/psutil/psutil/arch/windows/ntextapi.h228
-rw-r--r--python/psutil/psutil/arch/windows/process_handles.c533
-rw-r--r--python/psutil/psutil/arch/windows/process_handles.h113
-rw-r--r--python/psutil/psutil/arch/windows/process_info.c435
-rw-r--r--python/psutil/psutil/arch/windows/process_info.h26
-rw-r--r--python/psutil/psutil/arch/windows/security.c228
-rw-r--r--python/psutil/psutil/arch/windows/security.h17
-rw-r--r--python/psutil/setup.cfg5
-rw-r--r--python/psutil/setup.py206
-rw-r--r--python/psutil/test/README.rst21
-rw-r--r--python/psutil/test/_bsd.py252
-rw-r--r--python/psutil/test/_linux.py473
-rw-r--r--python/psutil/test/_osx.py160
-rw-r--r--python/psutil/test/_posix.py258
-rw-r--r--python/psutil/test/_sunos.py48
-rw-r--r--python/psutil/test/_windows.py464
-rw-r--r--python/psutil/test/test_memory_leaks.py445
-rw-r--r--python/psutil/test/test_psutil.py3013
-rw-r--r--python/psutil/tox.ini32
-rw-r--r--python/py/AUTHORS24
-rw-r--r--python/py/LICENSE19
-rw-r--r--python/py/MANIFEST.in9
-rw-r--r--python/py/PKG-INFO46
-rw-r--r--python/py/README.txt21
-rw-r--r--python/py/py/__init__.py150
-rw-r--r--python/py/py/__metainfo.py2
-rw-r--r--python/py/py/_apipkg.py181
-rw-r--r--python/py/py/_builtin.py248
-rw-r--r--python/py/py/_code/__init__.py1
-rw-r--r--python/py/py/_code/_assertionnew.py339
-rw-r--r--python/py/py/_code/_assertionold.py555
-rw-r--r--python/py/py/_code/_py2traceback.py79
-rw-r--r--python/py/py/_code/assertion.py94
-rw-r--r--python/py/py/_code/code.py787
-rw-r--r--python/py/py/_code/source.py419
-rw-r--r--python/py/py/_error.py88
-rw-r--r--python/py/py/_iniconfig.py162
-rw-r--r--python/py/py/_io/__init__.py1
-rw-r--r--python/py/py/_io/capture.py371
-rw-r--r--python/py/py/_io/saferepr.py71
-rw-r--r--python/py/py/_io/terminalwriter.py348
-rw-r--r--python/py/py/_log/__init__.py2
-rw-r--r--python/py/py/_log/log.py186
-rw-r--r--python/py/py/_log/warning.py76
-rw-r--r--python/py/py/_path/__init__.py1
-rw-r--r--python/py/py/_path/cacheutil.py114
-rw-r--r--python/py/py/_path/common.py403
-rw-r--r--python/py/py/_path/local.py911
-rw-r--r--python/py/py/_path/svnurl.py380
-rw-r--r--python/py/py/_path/svnwc.py1240
-rw-r--r--python/py/py/_process/__init__.py1
-rw-r--r--python/py/py/_process/cmdexec.py49
-rw-r--r--python/py/py/_process/forkedfunc.py120
-rw-r--r--python/py/py/_process/killproc.py23
-rw-r--r--python/py/py/_std.py18
-rw-r--r--python/py/py/_xmlgen.py253
-rw-r--r--python/py/py/test.py10
-rw-r--r--python/py/setup.cfg11
-rw-r--r--python/py/setup.py38
-rw-r--r--python/pyasn1-modules/CHANGES45
-rw-r--r--python/pyasn1-modules/LICENSE24
-rw-r--r--python/pyasn1-modules/MANIFEST.in3
-rw-r--r--python/pyasn1-modules/PKG-INFO26
-rw-r--r--python/pyasn1-modules/README17
-rw-r--r--python/pyasn1-modules/pyasn1_modules/__init__.py2
-rw-r--r--python/pyasn1-modules/pyasn1_modules/pem.py51
-rw-r--r--python/pyasn1-modules/pyasn1_modules/pkcs12.py34
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc1155.py73
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc1157.py90
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc1901.py15
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc1902.py105
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc1905.py100
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc2251.py319
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc2314.py33
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc2315.py205
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc2437.py53
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc2459.py903
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc2511.py176
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc2560.py171
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc3412.py38
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc3414.py17
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc3447.py35
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc4210.py695
-rw-r--r--python/pyasn1-modules/pyasn1_modules/rfc5208.py39
-rw-r--r--python/pyasn1-modules/setup.cfg5
-rw-r--r--python/pyasn1-modules/setup.py89
-rw-r--r--python/pyasn1-modules/test/cmp.sh93
-rw-r--r--python/pyasn1-modules/test/crl.sh14
-rw-r--r--python/pyasn1-modules/test/crmf.sh14
-rw-r--r--python/pyasn1-modules/test/ocspreq.sh7
-rw-r--r--python/pyasn1-modules/test/ocsprsp.sh26
-rw-r--r--python/pyasn1-modules/test/pkcs1.sh26
-rw-r--r--python/pyasn1-modules/test/pkcs10.sh24
-rw-r--r--python/pyasn1-modules/test/pkcs7.sh63
-rw-r--r--python/pyasn1-modules/test/pkcs8.sh26
-rw-r--r--python/pyasn1-modules/test/x509dump.sh23
-rwxr-xr-xpython/pyasn1-modules/tools/cmpdump.py28
-rwxr-xr-xpython/pyasn1-modules/tools/crldump.py38
-rwxr-xr-xpython/pyasn1-modules/tools/crmfdump.py25
-rwxr-xr-xpython/pyasn1-modules/tools/ocspclient.py145
-rwxr-xr-xpython/pyasn1-modules/tools/ocspreqdump.py27
-rwxr-xr-xpython/pyasn1-modules/tools/ocsprspdump.py27
-rwxr-xr-xpython/pyasn1-modules/tools/ocspserver.py143
-rwxr-xr-xpython/pyasn1-modules/tools/pkcs10dump.py39
-rwxr-xr-xpython/pyasn1-modules/tools/pkcs1dump.py42
-rwxr-xr-xpython/pyasn1-modules/tools/pkcs7dump.py47
-rwxr-xr-xpython/pyasn1-modules/tools/pkcs8dump.py41
-rwxr-xr-xpython/pyasn1-modules/tools/snmpget.py37
-rwxr-xr-xpython/pyasn1-modules/tools/x509dump.py40
-rw-r--r--python/pyasn1/CHANGES278
-rw-r--r--python/pyasn1/LICENSE24
-rw-r--r--python/pyasn1/MANIFEST.in3
-rw-r--r--python/pyasn1/PKG-INFO26
-rw-r--r--python/pyasn1/README68
-rw-r--r--python/pyasn1/THANKS4
-rw-r--r--python/pyasn1/TODO36
-rw-r--r--python/pyasn1/doc/codecs.html503
-rw-r--r--python/pyasn1/doc/constraints.html436
-rw-r--r--python/pyasn1/doc/constructed.html377
-rw-r--r--python/pyasn1/doc/intro.html156
-rw-r--r--python/pyasn1/doc/pyasn1-tutorial.html2405
-rw-r--r--python/pyasn1/doc/scalar.html794
-rw-r--r--python/pyasn1/doc/tagging.html233
-rw-r--r--python/pyasn1/pyasn1/__init__.py8
-rw-r--r--python/pyasn1/pyasn1/codec/__init__.py1
-rw-r--r--python/pyasn1/pyasn1/codec/ber/__init__.py1
-rw-r--r--python/pyasn1/pyasn1/codec/ber/decoder.py808
-rw-r--r--python/pyasn1/pyasn1/codec/ber/encoder.py353
-rw-r--r--python/pyasn1/pyasn1/codec/ber/eoo.py8
-rw-r--r--python/pyasn1/pyasn1/codec/cer/__init__.py1
-rw-r--r--python/pyasn1/pyasn1/codec/cer/decoder.py35
-rw-r--r--python/pyasn1/pyasn1/codec/cer/encoder.py87
-rw-r--r--python/pyasn1/pyasn1/codec/der/__init__.py1
-rw-r--r--python/pyasn1/pyasn1/codec/der/decoder.py9
-rw-r--r--python/pyasn1/pyasn1/codec/der/encoder.py28
-rw-r--r--python/pyasn1/pyasn1/compat/__init__.py1
-rw-r--r--python/pyasn1/pyasn1/compat/octets.py20
-rw-r--r--python/pyasn1/pyasn1/debug.py65
-rw-r--r--python/pyasn1/pyasn1/error.py3
-rw-r--r--python/pyasn1/pyasn1/type/__init__.py1
-rw-r--r--python/pyasn1/pyasn1/type/base.py249
-rw-r--r--python/pyasn1/pyasn1/type/char.py61
-rw-r--r--python/pyasn1/pyasn1/type/constraint.py200
-rw-r--r--python/pyasn1/pyasn1/type/error.py3
-rw-r--r--python/pyasn1/pyasn1/type/namedtype.py132
-rw-r--r--python/pyasn1/pyasn1/type/namedval.py46
-rw-r--r--python/pyasn1/pyasn1/type/tag.py122
-rw-r--r--python/pyasn1/pyasn1/type/tagmap.py52
-rw-r--r--python/pyasn1/pyasn1/type/univ.py1042
-rw-r--r--python/pyasn1/pyasn1/type/useful.py12
-rw-r--r--python/pyasn1/setup.cfg5
-rw-r--r--python/pyasn1/setup.py115
-rw-r--r--python/pyasn1/test/__init__.py1
-rw-r--r--python/pyasn1/test/codec/__init__.py1
-rw-r--r--python/pyasn1/test/codec/ber/__init__.py1
-rw-r--r--python/pyasn1/test/codec/ber/suite.py22
-rw-r--r--python/pyasn1/test/codec/ber/test_decoder.py535
-rw-r--r--python/pyasn1/test/codec/ber/test_encoder.py338
-rw-r--r--python/pyasn1/test/codec/cer/__init__.py1
-rw-r--r--python/pyasn1/test/codec/cer/suite.py22
-rw-r--r--python/pyasn1/test/codec/cer/test_decoder.py31
-rw-r--r--python/pyasn1/test/codec/cer/test_encoder.py107
-rw-r--r--python/pyasn1/test/codec/der/__init__.py1
-rw-r--r--python/pyasn1/test/codec/der/suite.py22
-rw-r--r--python/pyasn1/test/codec/der/test_decoder.py20
-rw-r--r--python/pyasn1/test/codec/der/test_encoder.py44
-rw-r--r--python/pyasn1/test/codec/suite.py29
-rw-r--r--python/pyasn1/test/suite.py26
-rw-r--r--python/pyasn1/test/type/__init__.py1
-rw-r--r--python/pyasn1/test/type/suite.py20
-rw-r--r--python/pyasn1/test/type/test_constraint.py280
-rw-r--r--python/pyasn1/test/type/test_namedtype.py87
-rw-r--r--python/pyasn1/test/type/test_tag.py107
-rw-r--r--python/pyasn1/test/type/test_univ.py479
-rw-r--r--python/pylru/pylru.py556
-rw-r--r--python/pylru/test.py238
-rw-r--r--python/pystache/.gitignore17
-rw-r--r--python/pystache/.gitmodules3
-rw-r--r--python/pystache/.travis.yml14
-rw-r--r--python/pystache/HISTORY.md169
-rw-r--r--python/pystache/LICENSE22
-rw-r--r--python/pystache/MANIFEST.in13
-rw-r--r--python/pystache/README.md276
-rw-r--r--python/pystache/TODO.md16
-rw-r--r--python/pystache/gh/images/logo_phillips.pngbin0 -> 173595 bytes
-rw-r--r--python/pystache/pystache/__init__.py13
-rw-r--r--python/pystache/pystache/commands/__init__.py4
-rw-r--r--python/pystache/pystache/commands/render.py95
-rw-r--r--python/pystache/pystache/commands/test.py18
-rw-r--r--python/pystache/pystache/common.py71
-rw-r--r--python/pystache/pystache/context.py342
-rw-r--r--python/pystache/pystache/defaults.py65
-rw-r--r--python/pystache/pystache/init.py19
-rw-r--r--python/pystache/pystache/loader.py170
-rw-r--r--python/pystache/pystache/locator.py171
-rw-r--r--python/pystache/pystache/parsed.py50
-rw-r--r--python/pystache/pystache/parser.py378
-rw-r--r--python/pystache/pystache/renderengine.py181
-rw-r--r--python/pystache/pystache/renderer.py460
-rw-r--r--python/pystache/pystache/specloader.py90
-rw-r--r--python/pystache/pystache/template_spec.py53
-rw-r--r--python/pystache/setup.py413
-rw-r--r--python/pystache/setup_description.rst513
-rw-r--r--python/pystache/test_pystache.py30
-rw-r--r--python/pystache/tox.ini36
-rw-r--r--python/pytest/.coveragerc7
-rw-r--r--python/pytest/AUTHORS91
-rw-r--r--python/pytest/LICENSE21
-rw-r--r--python/pytest/MANIFEST.in34
-rw-r--r--python/pytest/PKG-INFO133
-rw-r--r--python/pytest/README.rst102
-rw-r--r--python/pytest/_pytest/__init__.py2
-rw-r--r--python/pytest/_pytest/_argcomplete.py101
-rw-r--r--python/pytest/_pytest/_code/__init__.py12
-rw-r--r--python/pytest/_pytest/_code/_py2traceback.py81
-rw-r--r--python/pytest/_pytest/_code/code.py805
-rw-r--r--python/pytest/_pytest/_code/source.py421
-rw-r--r--python/pytest/_pytest/_pluggy.py11
-rw-r--r--python/pytest/_pytest/assertion/__init__.py176
-rw-r--r--python/pytest/_pytest/assertion/reinterpret.py407
-rw-r--r--python/pytest/_pytest/assertion/rewrite.py885
-rw-r--r--python/pytest/_pytest/assertion/util.py332
-rwxr-xr-xpython/pytest/_pytest/cacheprovider.py245
-rw-r--r--python/pytest/_pytest/capture.py472
-rw-r--r--python/pytest/_pytest/config.py1192
-rw-r--r--python/pytest/_pytest/doctest.py290
-rwxr-xr-xpython/pytest/_pytest/genscript.py132
-rw-r--r--python/pytest/_pytest/helpconfig.py139
-rw-r--r--python/pytest/_pytest/hookspec.py295
-rw-r--r--python/pytest/_pytest/junitxml.py387
-rw-r--r--python/pytest/_pytest/main.py744
-rw-r--r--python/pytest/_pytest/mark.py311
-rw-r--r--python/pytest/_pytest/monkeypatch.py254
-rw-r--r--python/pytest/_pytest/nose.py71
-rw-r--r--python/pytest/_pytest/pastebin.py92
-rw-r--r--python/pytest/_pytest/pdb.py109
-rw-r--r--python/pytest/_pytest/pytester.py1110
-rw-r--r--python/pytest/_pytest/python.py2300
-rw-r--r--python/pytest/_pytest/recwarn.py221
-rw-r--r--python/pytest/_pytest/resultlog.py104
-rw-r--r--python/pytest/_pytest/runner.py515
-rw-r--r--python/pytest/_pytest/skipping.py361
-rwxr-xr-xpython/pytest/_pytest/standalonetemplate.py89
-rw-r--r--python/pytest/_pytest/terminal.py593
-rw-r--r--python/pytest/_pytest/tmpdir.py123
-rw-r--r--python/pytest/_pytest/unittest.py205
-rw-r--r--python/pytest/_pytest/vendored_packages/README.md13
-rw-r--r--python/pytest/_pytest/vendored_packages/__init__.py0
-rw-r--r--python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/DESCRIPTION.rst10
-rw-r--r--python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/METADATA39
-rw-r--r--python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/RECORD8
-rw-r--r--python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/WHEEL6
-rw-r--r--python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/metadata.json1
-rw-r--r--python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/pbr.json1
-rw-r--r--python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/top_level.txt1
-rw-r--r--python/pytest/_pytest/vendored_packages/pluggy.py777
-rw-r--r--python/pytest/pytest.py28
-rw-r--r--python/pytest/setup.cfg19
-rw-r--r--python/pytest/setup.py122
-rw-r--r--python/pytoml/PKG-INFO10
-rw-r--r--python/pytoml/pytoml/__init__.py3
-rw-r--r--python/pytoml/pytoml/core.py13
-rw-r--r--python/pytoml/pytoml/parser.py366
-rw-r--r--python/pytoml/pytoml/writer.py120
-rw-r--r--python/pytoml/setup.cfg5
-rw-r--r--python/pytoml/setup.py17
-rw-r--r--python/pytoml/test/test.py100
-rw-r--r--python/pyyaml/CHANGES147
-rw-r--r--python/pyyaml/LICENSE19
-rw-r--r--python/pyyaml/PKG-INFO36
-rw-r--r--python/pyyaml/README35
-rw-r--r--python/pyyaml/examples/pygments-lexer/example.yaml302
-rw-r--r--python/pyyaml/examples/pygments-lexer/yaml.py431
-rw-r--r--python/pyyaml/examples/yaml-highlight/yaml_hl.cfg115
-rwxr-xr-xpython/pyyaml/examples/yaml-highlight/yaml_hl.py114
-rw-r--r--python/pyyaml/ext/_yaml.c22190
-rw-r--r--python/pyyaml/ext/_yaml.h23
-rw-r--r--python/pyyaml/ext/_yaml.pxd251
-rw-r--r--python/pyyaml/ext/_yaml.pyx1527
-rw-r--r--python/pyyaml/lib/yaml/__init__.py315
-rw-r--r--python/pyyaml/lib/yaml/composer.py139
-rw-r--r--python/pyyaml/lib/yaml/constructor.py675
-rw-r--r--python/pyyaml/lib/yaml/cyaml.py85
-rw-r--r--python/pyyaml/lib/yaml/dumper.py62
-rw-r--r--python/pyyaml/lib/yaml/emitter.py1140
-rw-r--r--python/pyyaml/lib/yaml/error.py75
-rw-r--r--python/pyyaml/lib/yaml/events.py86
-rw-r--r--python/pyyaml/lib/yaml/loader.py40
-rw-r--r--python/pyyaml/lib/yaml/nodes.py49
-rw-r--r--python/pyyaml/lib/yaml/parser.py589
-rw-r--r--python/pyyaml/lib/yaml/reader.py190
-rw-r--r--python/pyyaml/lib/yaml/representer.py484
-rw-r--r--python/pyyaml/lib/yaml/resolver.py224
-rw-r--r--python/pyyaml/lib/yaml/scanner.py1457
-rw-r--r--python/pyyaml/lib/yaml/serializer.py111
-rw-r--r--python/pyyaml/lib/yaml/tokens.py104
-rw-r--r--python/pyyaml/lib3/yaml/__init__.py312
-rw-r--r--python/pyyaml/lib3/yaml/composer.py139
-rw-r--r--python/pyyaml/lib3/yaml/constructor.py686
-rw-r--r--python/pyyaml/lib3/yaml/cyaml.py85
-rw-r--r--python/pyyaml/lib3/yaml/dumper.py62
-rw-r--r--python/pyyaml/lib3/yaml/emitter.py1137
-rw-r--r--python/pyyaml/lib3/yaml/error.py75
-rw-r--r--python/pyyaml/lib3/yaml/events.py86
-rw-r--r--python/pyyaml/lib3/yaml/loader.py40
-rw-r--r--python/pyyaml/lib3/yaml/nodes.py49
-rw-r--r--python/pyyaml/lib3/yaml/parser.py589
-rw-r--r--python/pyyaml/lib3/yaml/reader.py192
-rw-r--r--python/pyyaml/lib3/yaml/representer.py374
-rw-r--r--python/pyyaml/lib3/yaml/resolver.py224
-rw-r--r--python/pyyaml/lib3/yaml/scanner.py1448
-rw-r--r--python/pyyaml/lib3/yaml/serializer.py111
-rw-r--r--python/pyyaml/lib3/yaml/tokens.py104
-rw-r--r--python/pyyaml/setup.cfg29
-rw-r--r--python/pyyaml/setup.py345
-rw-r--r--python/redo/PKG-INFO10
-rw-r--r--python/redo/README4
-rw-r--r--python/redo/redo/__init__.py240
-rw-r--r--python/redo/redo/cmd.py53
-rw-r--r--python/redo/setup.cfg8
-rw-r--r--python/redo/setup.py18
-rw-r--r--python/requests/HISTORY.rst1130
-rw-r--r--python/requests/LICENSE13
-rw-r--r--python/requests/MANIFEST.in1
-rw-r--r--python/requests/NOTICE54
-rw-r--r--python/requests/PKG-INFO1238
-rw-r--r--python/requests/README.rst86
-rw-r--r--python/requests/requests/__init__.py83
-rw-r--r--python/requests/requests/adapters.py453
-rw-r--r--python/requests/requests/api.py145
-rw-r--r--python/requests/requests/auth.py223
-rw-r--r--python/requests/requests/cacert.pem5616
-rw-r--r--python/requests/requests/certs.py25
-rw-r--r--python/requests/requests/compat.py62
-rw-r--r--python/requests/requests/cookies.py487
-rw-r--r--python/requests/requests/exceptions.py114
-rw-r--r--python/requests/requests/hooks.py34
-rw-r--r--python/requests/requests/models.py851
-rw-r--r--python/requests/requests/packages/__init__.py36
-rw-r--r--python/requests/requests/packages/chardet/__init__.py32
-rw-r--r--python/requests/requests/packages/chardet/big5freq.py925
-rw-r--r--python/requests/requests/packages/chardet/big5prober.py42
-rwxr-xr-xpython/requests/requests/packages/chardet/chardetect.py80
-rw-r--r--python/requests/requests/packages/chardet/chardistribution.py231
-rw-r--r--python/requests/requests/packages/chardet/charsetgroupprober.py106
-rw-r--r--python/requests/requests/packages/chardet/charsetprober.py62
-rw-r--r--python/requests/requests/packages/chardet/codingstatemachine.py61
-rw-r--r--python/requests/requests/packages/chardet/compat.py34
-rw-r--r--python/requests/requests/packages/chardet/constants.py39
-rw-r--r--python/requests/requests/packages/chardet/cp949prober.py44
-rw-r--r--python/requests/requests/packages/chardet/escprober.py86
-rw-r--r--python/requests/requests/packages/chardet/escsm.py242
-rw-r--r--python/requests/requests/packages/chardet/eucjpprober.py90
-rw-r--r--python/requests/requests/packages/chardet/euckrfreq.py596
-rw-r--r--python/requests/requests/packages/chardet/euckrprober.py42
-rw-r--r--python/requests/requests/packages/chardet/euctwfreq.py428
-rw-r--r--python/requests/requests/packages/chardet/euctwprober.py41
-rw-r--r--python/requests/requests/packages/chardet/gb2312freq.py472
-rw-r--r--python/requests/requests/packages/chardet/gb2312prober.py41
-rw-r--r--python/requests/requests/packages/chardet/hebrewprober.py283
-rw-r--r--python/requests/requests/packages/chardet/jisfreq.py569
-rw-r--r--python/requests/requests/packages/chardet/jpcntx.py227
-rw-r--r--python/requests/requests/packages/chardet/langbulgarianmodel.py229
-rw-r--r--python/requests/requests/packages/chardet/langcyrillicmodel.py329
-rw-r--r--python/requests/requests/packages/chardet/langgreekmodel.py225
-rw-r--r--python/requests/requests/packages/chardet/langhebrewmodel.py201
-rw-r--r--python/requests/requests/packages/chardet/langhungarianmodel.py225
-rw-r--r--python/requests/requests/packages/chardet/langthaimodel.py200
-rw-r--r--python/requests/requests/packages/chardet/latin1prober.py139
-rw-r--r--python/requests/requests/packages/chardet/mbcharsetprober.py86
-rw-r--r--python/requests/requests/packages/chardet/mbcsgroupprober.py54
-rw-r--r--python/requests/requests/packages/chardet/mbcssm.py572
-rw-r--r--python/requests/requests/packages/chardet/sbcharsetprober.py120
-rw-r--r--python/requests/requests/packages/chardet/sbcsgroupprober.py69
-rw-r--r--python/requests/requests/packages/chardet/sjisprober.py91
-rw-r--r--python/requests/requests/packages/chardet/universaldetector.py170
-rw-r--r--python/requests/requests/packages/chardet/utf8prober.py76
-rw-r--r--python/requests/requests/packages/urllib3/__init__.py93
-rw-r--r--python/requests/requests/packages/urllib3/_collections.py324
-rw-r--r--python/requests/requests/packages/urllib3/connection.py288
-rw-r--r--python/requests/requests/packages/urllib3/connectionpool.py818
-rw-r--r--python/requests/requests/packages/urllib3/contrib/__init__.py0
-rw-r--r--python/requests/requests/packages/urllib3/contrib/appengine.py223
-rw-r--r--python/requests/requests/packages/urllib3/contrib/ntlmpool.py115
-rw-r--r--python/requests/requests/packages/urllib3/contrib/pyopenssl.py310
-rw-r--r--python/requests/requests/packages/urllib3/exceptions.py201
-rw-r--r--python/requests/requests/packages/urllib3/fields.py178
-rw-r--r--python/requests/requests/packages/urllib3/filepost.py94
-rw-r--r--python/requests/requests/packages/urllib3/packages/__init__.py5
-rw-r--r--python/requests/requests/packages/urllib3/packages/ordered_dict.py259
-rw-r--r--python/requests/requests/packages/urllib3/packages/six.py385
-rw-r--r--python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py13
-rw-r--r--python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py105
-rw-r--r--python/requests/requests/packages/urllib3/poolmanager.py281
-rw-r--r--python/requests/requests/packages/urllib3/request.py151
-rw-r--r--python/requests/requests/packages/urllib3/response.py514
-rw-r--r--python/requests/requests/packages/urllib3/util/__init__.py44
-rw-r--r--python/requests/requests/packages/urllib3/util/connection.py101
-rw-r--r--python/requests/requests/packages/urllib3/util/request.py72
-rw-r--r--python/requests/requests/packages/urllib3/util/response.py74
-rw-r--r--python/requests/requests/packages/urllib3/util/retry.py286
-rw-r--r--python/requests/requests/packages/urllib3/util/ssl_.py317
-rw-r--r--python/requests/requests/packages/urllib3/util/timeout.py242
-rw-r--r--python/requests/requests/packages/urllib3/util/url.py217
-rw-r--r--python/requests/requests/sessions.py680
-rw-r--r--python/requests/requests/status_codes.py90
-rw-r--r--python/requests/requests/structures.py104
-rw-r--r--python/requests/requests/utils.py721
-rw-r--r--python/requests/requirements.txt6
-rw-r--r--python/requests/setup.cfg8
-rwxr-xr-xpython/requests/setup.py74
-rwxr-xr-xpython/requests/test_requests.py1746
-rw-r--r--python/rsa/LICENSE13
-rw-r--r--python/rsa/MANIFEST.in5
-rw-r--r--python/rsa/PKG-INFO18
-rw-r--r--python/rsa/README.rst31
-rwxr-xr-xpython/rsa/create_timing_table.py29
-rwxr-xr-xpython/rsa/playstuff.py41
-rw-r--r--python/rsa/rsa/__init__.py45
-rw-r--r--python/rsa/rsa/_compat.py160
-rw-r--r--python/rsa/rsa/_version133.py442
-rw-r--r--python/rsa/rsa/_version200.py529
-rw-r--r--python/rsa/rsa/asn1.py35
-rw-r--r--python/rsa/rsa/bigfile.py87
-rw-r--r--python/rsa/rsa/cli.py379
-rw-r--r--python/rsa/rsa/common.py185
-rw-r--r--python/rsa/rsa/core.py58
-rw-r--r--python/rsa/rsa/key.py612
-rw-r--r--python/rsa/rsa/parallel.py94
-rw-r--r--python/rsa/rsa/pem.py120
-rw-r--r--python/rsa/rsa/pkcs1.py391
-rw-r--r--python/rsa/rsa/prime.py166
-rw-r--r--python/rsa/rsa/randnum.py85
-rw-r--r--python/rsa/rsa/transform.py220
-rw-r--r--python/rsa/rsa/util.py81
-rw-r--r--python/rsa/rsa/varblock.py155
-rw-r--r--python/rsa/run_tests.py43
-rw-r--r--python/rsa/setup.cfg8
-rwxr-xr-xpython/rsa/setup.py41
-rw-r--r--python/rsa/tests/__init__.py0
-rw-r--r--python/rsa/tests/constants.py9
-rw-r--r--python/rsa/tests/py2kconstants.py3
-rw-r--r--python/rsa/tests/py3kconstants.py3
-rw-r--r--python/rsa/tests/test_bigfile.py60
-rw-r--r--python/rsa/tests/test_common.py61
-rw-r--r--python/rsa/tests/test_compat.py17
-rw-r--r--python/rsa/tests/test_integers.py36
-rw-r--r--python/rsa/tests/test_load_save_keys.py127
-rw-r--r--python/rsa/tests/test_pem.py14
-rw-r--r--python/rsa/tests/test_pkcs1.py94
-rw-r--r--python/rsa/tests/test_strings.py28
-rw-r--r--python/rsa/tests/test_transform.py67
-rw-r--r--python/rsa/tests/test_varblock.py82
-rw-r--r--python/slugid/.gitignore57
-rw-r--r--python/slugid/.travis.yml27
-rw-r--r--python/slugid/LICENSE363
-rw-r--r--python/slugid/README.rst121
-rw-r--r--python/slugid/requirements.txt2
-rw-r--r--python/slugid/setup.py39
-rw-r--r--python/slugid/slugid/__init__.py43
-rw-r--r--python/slugid/slugid/slugid.py43
-rw-r--r--python/slugid/test.py167
-rw-r--r--python/slugid/tox.ini26
-rw-r--r--python/virtualenv/AUTHORS.txt91
-rw-r--r--python/virtualenv/LICENSE.txt22
-rw-r--r--python/virtualenv/MANIFEST.in12
-rw-r--r--python/virtualenv/PKG-INFO87
-rw-r--r--python/virtualenv/README.rst31
-rwxr-xr-xpython/virtualenv/bin/rebuild-script.py73
-rw-r--r--python/virtualenv/docs/Makefile130
-rw-r--r--python/virtualenv/docs/changes.rst985
-rw-r--r--python/virtualenv/docs/conf.py153
-rw-r--r--python/virtualenv/docs/development.rst61
-rw-r--r--python/virtualenv/docs/index.rst137
-rw-r--r--python/virtualenv/docs/installation.rst58
-rw-r--r--python/virtualenv/docs/make.bat170
-rw-r--r--python/virtualenv/docs/reference.rst261
-rw-r--r--python/virtualenv/docs/userguide.rst258
-rw-r--r--python/virtualenv/scripts/virtualenv3
-rw-r--r--python/virtualenv/setup.cfg8
-rw-r--r--python/virtualenv/setup.py123
-rw-r--r--python/virtualenv/site.py760
-rw-r--r--python/virtualenv/tests/__init__.py0
-rwxr-xr-xpython/virtualenv/tests/test_activate.sh96
-rw-r--r--python/virtualenv/tests/test_activate_output.expected2
-rw-r--r--python/virtualenv/tests/test_cmdline.py44
-rw-r--r--python/virtualenv/tests/test_virtualenv.py139
-rwxr-xr-xpython/virtualenv/virtualenv.py2329
-rw-r--r--python/virtualenv/virtualenv_embedded/activate.bat30
-rw-r--r--python/virtualenv/virtualenv_embedded/activate.csh36
-rw-r--r--python/virtualenv/virtualenv_embedded/activate.fish76
-rw-r--r--python/virtualenv/virtualenv_embedded/activate.ps1150
-rw-r--r--python/virtualenv/virtualenv_embedded/activate.sh78
-rw-r--r--python/virtualenv/virtualenv_embedded/activate_this.py34
-rw-r--r--python/virtualenv/virtualenv_embedded/deactivate.bat19
-rw-r--r--python/virtualenv/virtualenv_embedded/distutils-init.py101
-rw-r--r--python/virtualenv/virtualenv_embedded/distutils.cfg6
-rw-r--r--python/virtualenv/virtualenv_embedded/python-config78
-rw-r--r--python/virtualenv/virtualenv_embedded/site.py758
-rw-r--r--python/virtualenv/virtualenv_support/__init__.py0
-rw-r--r--python/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whlbin0 -> 23000 bytes
-rw-r--r--python/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whlbin0 -> 1198961 bytes
-rw-r--r--python/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whlbin0 -> 442860 bytes
-rw-r--r--python/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whlbin0 -> 66878 bytes
-rw-r--r--python/voluptuous/COPYING25
-rw-r--r--python/voluptuous/MANIFEST.in2
-rw-r--r--python/voluptuous/PKG-INFO611
-rw-r--r--python/voluptuous/README.md596
-rw-r--r--python/voluptuous/README.rst589
-rw-r--r--python/voluptuous/setup.cfg10
-rw-r--r--python/voluptuous/setup.py54
-rw-r--r--python/voluptuous/tests.md268
-rw-r--r--python/voluptuous/voluptuous.py1954
-rw-r--r--python/which/LICENSE.txt21
-rw-r--r--python/which/MANIFEST.in3
-rw-r--r--python/which/Makefile.win21
-rw-r--r--python/which/PKG-INFO21
-rw-r--r--python/which/README.txt229
-rw-r--r--python/which/TODO.txt113
-rw-r--r--python/which/build.py442
-rw-r--r--python/which/launcher.cpp404
-rw-r--r--python/which/logo.jpgbin0 -> 3635 bytes
-rw-r--r--python/which/setup.py70
-rw-r--r--python/which/test/test_which.py168
-rw-r--r--python/which/test/testsupport.py83
-rw-r--r--python/which/which.py335
1830 files changed, 306691 insertions, 0 deletions
diff --git a/python/PyECC/MANIFEST.in b/python/PyECC/MANIFEST.in
new file mode 100644
index 000000000..bb3ec5f0d
--- /dev/null
+++ b/python/PyECC/MANIFEST.in
@@ -0,0 +1 @@
+include README.md
diff --git a/python/PyECC/README.md b/python/PyECC/README.md
new file mode 100644
index 000000000..be67fff04
--- /dev/null
+++ b/python/PyECC/README.md
@@ -0,0 +1,29 @@
+ecc
+===
+
+Pure Python implementation of an elliptic curve cryptosystem based on FIPS 186-3
+
+License
+=======
+
+The MIT License (MIT)
+
+Copyright (c) 2010-2015 Toni Mattis
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/python/PyECC/ecc/Key.py b/python/PyECC/ecc/Key.py
new file mode 100644
index 000000000..8ba268576
--- /dev/null
+++ b/python/PyECC/ecc/Key.py
@@ -0,0 +1,320 @@
+# ====================================================================
+#
+# ELLIPTIC CURVE KEY ENCAPSULATION
+# Version 2011-01-26
+#
+# Copyright (c) 2010 - 2011 | Toni Mattis
+#
+# ====================================================================
+
+"""
+== Elliptic Curve Key Encapsulation ==
+
+Keypairs
+--------
+Keypairs are generated using: Key.generate(bits)
+
+The number of bits is tied to the NIST-proposed elliptic curves
+and has to be 192, 224, 256, 384 or 521 (not 512!).
+The result is a Key object containing public and private key.
+
+private() is a method for checking whether the Key object is a
+pure public key or also includes the private part.
+
+
+Exchange
+--------
+Public keys have to be exported using the export()-Method without
+passing an argument. The result is a string which can be safely
+transmitted.
+
+Using Key.decode(<encoded key>) the receiver obtains a new
+public Key object of the sender.
+
+
+Storage
+-------
+For storing a key, export(True) exports both private and public
+key as a string. Make sure this information is properly encrypted
+when stored.
+
+Key.decode(<encoded key>) obtains the full Key object from the
+encoded keypair.
+
+
+Public Keys
+-----------
+A public Key object can perform the following cryptographic
+operations:
+
+* validate() Checks key integrity, i.e. after loading the
+ key from a file. Returns True if the key is
+ valid. Invalid keys should be discarded.
+
+* fingerprint() Returns the public key fingerprint used to
+ identify the key. Optional arguments:
+ 1. as_hex - True, if output should be formatted
+ as hexadecimal number (default: True).
+ 2. hashfunc - The official name of the hash
+ function being used (default: 'sha1')
+ For supported hash functions see below.
+
+* keyid() Returns a (mostly) unique Key ID, which is
+ shorter than the fingerprint. The result
+ is an integer of max. 64 bits.
+
+* verify() Verifies whether the given data (argument 1)
+ matches the signature (argument 2) issued
+ by the owner of this key. A falsification
+ can have multiple causes:
+
+ - Data, public key or signature were altered
+ during transmission/storage.
+ - The siganture was not issued by the owner
+ of this key but may be valid with another
+ key.
+ - The signature was issued for different data.
+ - The signature was issued using a different
+ hash function. Another hash function may work.
+
+ Optionally, the name of a hash algorithm
+ can be provided. For hash names see below.
+
+* encrypt() Encrypts a packet of data destined for the owner
+ of this key*. After encryption only the holder
+ of this Key's private part is able to decrypt
+ the message.
+
+Private Keys / Keypairs
+-----------------------
+
+If the key object is private, then it is a keypair consisting of
+a public and a private key. Therefore all Public key operations
+are supported.
+
+Additional functions:
+
+* sign() Signs given data using this private key. The
+ result is a signature which can be passed as
+ argument to the verify() function in addition
+ to the data being verified.
+
+ As additional argument the name of the hash
+ function can be provided (defaults to 'sha256').
+ For hash names see below.
+
+* auth_encrypt() Performs authenticated encryption of data
+ (argument 1) for the holder of the key provided
+ as second argument. Only the receiver whose
+ public key is given is able to derypt and verify
+ the message. The message will be implicitly
+ signed using the own private key. *
+
+* decrypt() Decrypts a message which has been encrypted
+ using the public key of this keypair*. If
+ decryption yields random data, this can have
+ multiple causes:
+ - You were not the intended receiver, a different
+ private key may be able to decrypt it.
+ - The message was altered.
+ - Your private key is damaged.
+
+* auth_decrypt() Decrypts a message while verifying whether
+ it has been authentically issued by the holder
+ of the given key (argument 2). When
+ authentication failed, a
+ SecurityViolationException is thrown. Reasons
+ for this to happen are those mentioned with
+ decrypt() and verify(). *
+
+*) The encryption used here depends on the "eccrypt" module imported
+by this module. Default implementation should use RABBIT as cipher
+and do the asymmetric part using an optimized El-Gamal scheme.
+
+
+
+Hash functions
+--------------
+The following hash functions can be passed at the moment:
+
+name | hash size | security level
+ | (bits, bytes, hex digits)
+---------+------------------------+----------------
+'sha1' 160 / 20 / 40 medium
+'sha224' 224 / 28 / 56 medium-strong
+'sha256' 256 / 32 / 64 strong
+'sha384' 384 / 48 / 96 very strong
+'sha512' 512 / 64 / 128 very strong
+
+'md5' 128 / 16 / 32 weak (not recommended!)
+
+
+Curves
+------
+According to FIPS 186-3, Appendix D.1.2 there are 5 elliptic
+curves recommended. All of those are strong, but those with
+a higher bit number even stronger.
+
+192 and 224 bits are sufficient for most purposes.
+256 bits offer an additional magnitude of security.
+ (i.e. for classified / strongly confidential data)
+384 and 521 bits provide exceptionally strong security. According
+ to current research they most probably keep this level for
+ decades in the future.
+
+FIPS also recommends curves over polynomial fields but actually
+only prime fields are implemented here. (Because 2^521-1 is a mersenne
+prime having great security characteristics, 521 bits are preferred
+over a constructed 512 bit field.)
+"""
+
+from encoding import *
+from eccrypt import *
+import ecdsa
+import hashlib
+from SecurityViolationException import *
+
+class Key:
+
+ # --- KEY SETUP ------------------------------------------------------------
+
+ def __init__(self, public_key, private_key = None):
+ '''Create a Key(pair) from numeric keys.'''
+ self._pub = public_key
+ self._priv = private_key
+ self._fingerprint = {}
+ self._id = None
+
+ @staticmethod
+ def generate(bits):
+ '''Generate a new ECDSA keypair'''
+ return Key(*ecdsa.keypair(bits))
+
+ # --- BINARY REPRESENTATION ------------------------------------------------
+
+ def encode(self, include_private = False):
+ '''Returns a strict binary representation of this Key'''
+ e = Encoder().int(self.keyid(), 8)
+ e.int(self._pub[0], 2).point(self._pub[1], 2)
+ if include_private and self._priv:
+ e.long(self._priv[1], 2)
+ else:
+ e.long(0, 2)
+ return e.out()
+
+ def compress(self):
+ '''Returns a compact public key representation'''
+
+
+ @staticmethod
+ def decode(s):
+ '''Constructs a new Key object from its binary representation'''
+ kid, ksize, pub, priv = Decoder(s).int(8).int(2).point(2).long(2).out()
+ k = Key((ksize, pub), (ksize, priv) if priv else None)
+ if kid == k.keyid():
+ return k
+ else:
+ raise ValueError, "Invalid Key ID"
+
+ # --- IDENTIFICATION AND VALIDATION ----------------------------------------
+
+ def private(self):
+ '''Checks whether Key object contains private key'''
+ return bool(self._priv)
+
+ def validate(self):
+ '''Checks key validity'''
+ if ecdsa.validate_public_key(self._pub):
+ if self._priv: # ? validate and match private key
+ return ecdsa.validate_private_key(self._priv) and \
+ ecdsa.match_keys(self._pub, self._priv)
+ else:
+ return True # : everything valid
+ else:
+ return False
+
+ def fingerprint(self, as_hex = True, hashfunc = 'sha1'):
+ '''Get the public key fingerprint'''
+ if hashfunc in self._fingerprint:
+ return self._fingerprint[hashfunc] if not as_hex else \
+ self._fingerprint[hashfunc].encode("hex")
+ else:
+ h = hashlib.new(hashfunc, enc_point(self._pub[1]))
+ d = h.digest()
+ self._fingerprint[hashfunc] = d
+ return d.encode("hex") if as_hex else d
+
+ def keyid(self):
+ '''Get a short, unique identifier'''
+ if not self._id:
+ self._id = dec_long(self.fingerprint(False, 'sha1')[:8])
+ return self._id
+
+ # --- DIGITAL SIGNATURES ---------------------------------------------------
+
+ def sign(self, data, hashfunc = 'sha256'):
+ '''Sign data using the specified hash function'''
+ if self._priv:
+ h = dec_long(hashlib.new(hashfunc, data).digest())
+ s = ecdsa.sign(h, self._priv)
+ return enc_point(s)
+ else:
+ raise AttributeError, "Private key needed for signing."
+
+ def verify(self, data, sig, hashfunc = 'sha256'):
+ '''Verify the signature of data using the specified hash function'''
+ h = dec_long(hashlib.new(hashfunc, data).digest())
+ s = dec_point(sig)
+ return ecdsa.verify(h, s, self._pub)
+
+ # --- HYBRID ENCRYPTION ----------------------------------------------------
+
+ def encrypt(self, data):
+ '''Encrypt a message using this public key'''
+ ctext, mkey = encrypt(data, self._pub)
+ return Encoder().point(mkey).str(ctext, 4).out()
+
+ def decrypt(self, data):
+ '''Decrypt an encrypted message using this private key'''
+ mkey, ctext = Decoder(data).point().str(4).out()
+ return decrypt(ctext, mkey, self._priv)
+
+ # --- AUTHENTICATED ENCRYPTION ---------------------------------------------
+
+ def auth_encrypt(self, data, receiver):
+ '''Sign and encrypt a message'''
+ sgn = self.sign(data)
+ ctext, mkey = encrypt(data, receiver._pub)
+ return Encoder().point(mkey).str(ctext, 4).str(sgn, 2).out()
+
+ def auth_decrypt(self, data, source):
+ '''Decrypt and verify a message'''
+ mkey, ctext, sgn = Decoder(data).point().str(4).str(2).out()
+ text = decrypt(ctext, mkey, self._priv)
+ if source.verify(text, sgn):
+ return text
+ else:
+ raise SecurityViolationException, "Invalid Signature"
+
+
+if __name__ == "__main__":
+
+ import time
+
+ def test_overhead():
+ print "sender", "receiver", "+bytes", "+enctime", "+dectime"
+ for s in [192, 224, 256, 384, 521]:
+ sender = Key.generate(s)
+ for r in [192, 224, 256, 384, 521]:
+ receiver = Key.generate(r)
+ t = time.time()
+ e = sender.auth_encrypt("", receiver)
+ t1 = time.time() - t
+ t = time.time()
+ receiver.auth_decrypt(e, sender)
+ t2 = time.time() - t
+ print s, r, len(e), t1, t2
+
+
+
+
diff --git a/python/PyECC/ecc/Rabbit.py b/python/PyECC/ecc/Rabbit.py
new file mode 100644
index 000000000..209f01e1e
--- /dev/null
+++ b/python/PyECC/ecc/Rabbit.py
@@ -0,0 +1,270 @@
+# ------------------------------------------------------------------------------
+#
+# R A B B I T Stream Cipher
+# by M. Boesgaard, M. Vesterager, E. Zenner (specified in RFC 4503)
+#
+#
+# Pure Python Implementation by Toni Mattis
+#
+# ------------------------------------------------------------------------------
+
+
+WORDSIZE = 0x100000000
+
+rot08 = lambda x: ((x << 8) & 0xFFFFFFFF) | (x >> 24)
+rot16 = lambda x: ((x << 16) & 0xFFFFFFFF) | (x >> 16)
+
+def _nsf(u, v):
+ '''Internal non-linear state transition'''
+ s = (u + v) % WORDSIZE
+ s = s * s
+ return (s ^ (s >> 32)) % WORDSIZE
+
+class Rabbit:
+
+ def __init__(self, key, iv = None):
+ '''Initialize Rabbit cipher using a 128 bit integer/string'''
+
+ if isinstance(key, str):
+ # interpret key string in big endian byte order
+ if len(key) < 16:
+ key = '\x00' * (16 - len(key)) + key
+ # if len(key) > 16 bytes only the first 16 will be considered
+ k = [ord(key[i + 1]) | (ord(key[i]) << 8)
+ for i in xrange(14, -1, -2)]
+ else:
+ # k[0] = least significant 16 bits
+ # k[7] = most significant 16 bits
+ k = [(key >> i) & 0xFFFF for i in xrange(0, 128, 16)]
+
+ # State and counter initialization
+ x = [(k[(j + 5) % 8] << 16) | k[(j + 4) % 8] if j & 1 else
+ (k[(j + 1) % 8] << 16) | k[j] for j in xrange(8)]
+ c = [(k[j] << 16) | k[(j + 1) % 8] if j & 1 else
+ (k[(j + 4) % 8] << 16) | k[(j + 5) % 8] for j in xrange(8)]
+
+ self.x = x
+ self.c = c
+ self.b = 0
+ self._buf = 0 # output buffer
+ self._buf_bytes = 0 # fill level of buffer
+
+ self.next()
+ self.next()
+ self.next()
+ self.next()
+
+ for j in xrange(8):
+ c[j] ^= x[(j + 4) % 8]
+
+ self.start_x = self.x[:] # backup initial key for IV/reset
+ self.start_c = self.c[:]
+ self.start_b = self.b
+
+ if iv != None:
+ self.set_iv(iv)
+
+ def reset(self, iv = None):
+ '''Reset the cipher and optionally set a new IV (int64 / string).'''
+
+ self.c = self.start_c[:]
+ self.x = self.start_x[:]
+ self.b = self.start_b
+ self._buf = 0
+ self._buf_bytes = 0
+ if iv != None:
+ self.set_iv(iv)
+
+ def set_iv(self, iv):
+ '''Set a new IV (64 bit integer / bytestring).'''
+
+ if isinstance(iv, str):
+ i = 0
+ for c in iv:
+ i = (i << 8) | ord(c)
+ iv = i
+
+ c = self.c
+ i0 = iv & 0xFFFFFFFF
+ i2 = iv >> 32
+ i1 = ((i0 >> 16) | (i2 & 0xFFFF0000)) % WORDSIZE
+ i3 = ((i2 << 16) | (i0 & 0x0000FFFF)) % WORDSIZE
+
+ c[0] ^= i0
+ c[1] ^= i1
+ c[2] ^= i2
+ c[3] ^= i3
+ c[4] ^= i0
+ c[5] ^= i1
+ c[6] ^= i2
+ c[7] ^= i3
+
+ self.next()
+ self.next()
+ self.next()
+ self.next()
+
+
+ def next(self):
+ '''Proceed to the next internal state'''
+
+ c = self.c
+ x = self.x
+ b = self.b
+
+ t = c[0] + 0x4D34D34D + b
+ c[0] = t % WORDSIZE
+ t = c[1] + 0xD34D34D3 + t // WORDSIZE
+ c[1] = t % WORDSIZE
+ t = c[2] + 0x34D34D34 + t // WORDSIZE
+ c[2] = t % WORDSIZE
+ t = c[3] + 0x4D34D34D + t // WORDSIZE
+ c[3] = t % WORDSIZE
+ t = c[4] + 0xD34D34D3 + t // WORDSIZE
+ c[4] = t % WORDSIZE
+ t = c[5] + 0x34D34D34 + t // WORDSIZE
+ c[5] = t % WORDSIZE
+ t = c[6] + 0x4D34D34D + t // WORDSIZE
+ c[6] = t % WORDSIZE
+ t = c[7] + 0xD34D34D3 + t // WORDSIZE
+ c[7] = t % WORDSIZE
+ b = t // WORDSIZE
+
+ g = [_nsf(x[j], c[j]) for j in xrange(8)]
+
+ x[0] = (g[0] + rot16(g[7]) + rot16(g[6])) % WORDSIZE
+ x[1] = (g[1] + rot08(g[0]) + g[7]) % WORDSIZE
+ x[2] = (g[2] + rot16(g[1]) + rot16(g[0])) % WORDSIZE
+ x[3] = (g[3] + rot08(g[2]) + g[1]) % WORDSIZE
+ x[4] = (g[4] + rot16(g[3]) + rot16(g[2])) % WORDSIZE
+ x[5] = (g[5] + rot08(g[4]) + g[3]) % WORDSIZE
+ x[6] = (g[6] + rot16(g[5]) + rot16(g[4])) % WORDSIZE
+ x[7] = (g[7] + rot08(g[6]) + g[5]) % WORDSIZE
+
+ self.b = b
+ return self
+
+
+ def derive(self):
+ '''Derive a 128 bit integer from the internal state'''
+
+ x = self.x
+ return ((x[0] & 0xFFFF) ^ (x[5] >> 16)) | \
+ (((x[0] >> 16) ^ (x[3] & 0xFFFF)) << 16)| \
+ (((x[2] & 0xFFFF) ^ (x[7] >> 16)) << 32)| \
+ (((x[2] >> 16) ^ (x[5] & 0xFFFF)) << 48)| \
+ (((x[4] & 0xFFFF) ^ (x[1] >> 16)) << 64)| \
+ (((x[4] >> 16) ^ (x[7] & 0xFFFF)) << 80)| \
+ (((x[6] & 0xFFFF) ^ (x[3] >> 16)) << 96)| \
+ (((x[6] >> 16) ^ (x[1] & 0xFFFF)) << 112)
+
+
+ def keystream(self, n):
+ '''Generate a keystream of n bytes'''
+
+ res = ""
+ b = self._buf
+ j = self._buf_bytes
+ next = self.next
+ derive = self.derive
+
+ for i in xrange(n):
+ if not j:
+ j = 16
+ next()
+ b = derive()
+ res += chr(b & 0xFF)
+ j -= 1
+ b >>= 1
+
+ self._buf = b
+ self._buf_bytes = j
+ return res
+
+
+ def encrypt(self, data):
+ '''Encrypt/Decrypt data of arbitrary length.'''
+
+ res = ""
+ b = self._buf
+ j = self._buf_bytes
+ next = self.next
+ derive = self.derive
+
+ for c in data:
+ if not j: # empty buffer => fetch next 128 bits
+ j = 16
+ next()
+ b = derive()
+ res += chr(ord(c) ^ (b & 0xFF))
+ j -= 1
+ b >>= 1
+ self._buf = b
+ self._buf_bytes = j
+ return res
+
+ decrypt = encrypt
+
+
+
+if __name__ == "__main__":
+
+ import time
+
+ # --- Official Test Vectors ---
+
+ # RFC 4503 Appendix A.1 - Testing without IV Setup
+
+ r = Rabbit(0)
+ assert r.next().derive() == 0xB15754F036A5D6ECF56B45261C4AF702
+ assert r.next().derive() == 0x88E8D815C59C0C397B696C4789C68AA7
+ assert r.next().derive() == 0xF416A1C3700CD451DA68D1881673D696
+
+ r = Rabbit(0x912813292E3D36FE3BFC62F1DC51C3AC)
+ assert r.next().derive() == 0x3D2DF3C83EF627A1E97FC38487E2519C
+ assert r.next().derive() == 0xF576CD61F4405B8896BF53AA8554FC19
+ assert r.next().derive() == 0xE5547473FBDB43508AE53B20204D4C5E
+
+ r = Rabbit(0x8395741587E0C733E9E9AB01C09B0043)
+ assert r.next().derive() == 0x0CB10DCDA041CDAC32EB5CFD02D0609B
+ assert r.next().derive() == 0x95FC9FCA0F17015A7B7092114CFF3EAD
+ assert r.next().derive() == 0x9649E5DE8BFC7F3F924147AD3A947428
+
+ # RFC 4503 Appendix A.2 - Testing with IV Setup
+
+ r = Rabbit(0, 0)
+ assert r.next().derive() == 0xC6A7275EF85495D87CCD5D376705B7ED
+ assert r.next().derive() == 0x5F29A6AC04F5EFD47B8F293270DC4A8D
+ assert r.next().derive() == 0x2ADE822B29DE6C1EE52BDB8A47BF8F66
+
+ r = Rabbit(0, 0xC373F575C1267E59)
+ assert r.next().derive() == 0x1FCD4EB9580012E2E0DCCC9222017D6D
+ assert r.next().derive() == 0xA75F4E10D12125017B2499FFED936F2E
+ assert r.next().derive() == 0xEBC112C393E738392356BDD012029BA7
+
+ r = Rabbit(0, 0xA6EB561AD2F41727)
+ assert r.next().derive() == 0x445AD8C805858DBF70B6AF23A151104D
+ assert r.next().derive() == 0x96C8F27947F42C5BAEAE67C6ACC35B03
+ assert r.next().derive() == 0x9FCBFC895FA71C17313DF034F01551CB
+
+
+ # --- Performance Tests ---
+
+ def test_gen(n = 1048576):
+ '''Measure time for generating n bytes => (total, bytes per second)'''
+
+ r = Rabbit(0)
+ t = time.time()
+ r.keystream(n)
+ t = time.time() - t
+ return t, n / t
+
+ def test_enc(n = 1048576):
+ '''Measure time for encrypting n bytes => (total, bytes per second)'''
+
+ r = Rabbit(0)
+ x = 'x' * n
+ t = time.time()
+ r.encrypt(x)
+ t = time.time() - t
+ return t, n / t
diff --git a/python/PyECC/ecc/SecurityViolationException.py b/python/PyECC/ecc/SecurityViolationException.py
new file mode 100644
index 000000000..c4fc13687
--- /dev/null
+++ b/python/PyECC/ecc/SecurityViolationException.py
@@ -0,0 +1,2 @@
+class SecurityViolationException(Exception):
+ pass
diff --git a/python/PyECC/ecc/__init__.py b/python/PyECC/ecc/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/PyECC/ecc/__init__.py
diff --git a/python/PyECC/ecc/curves.py b/python/PyECC/ecc/curves.py
new file mode 100644
index 000000000..ee5847fc5
--- /dev/null
+++ b/python/PyECC/ecc/curves.py
@@ -0,0 +1,81 @@
+#
+# Predefined Elliptic Curves
+# for use in signing and key exchange
+#
+'''
+Predefined elliptic curves for use in signing and key exchange.
+This Module implements FIPS approved standard curves P-192, P-224, P-256,
+P-384 and P-521 along with two weak non-standard curves of field size 128
+and 160 bits.
+
+The weak curves cannot be used for signing but provide a faster way to
+obfuscate non-critical transmissions.
+'''
+
+# FIPS approved elliptic curves over prime fields
+# (see FIPS 186-3, Appendix D.1.2)
+DOMAINS = {
+ # Bits : (p, order of E(GF(P)), parameter b, base point x, base point y)
+ 192 : (0xfffffffffffffffffffffffffffffffeffffffffffffffffL,
+ 0xffffffffffffffffffffffff99def836146bc9b1b4d22831L,
+ 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1L,
+ 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012L,
+ 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811L),
+
+ 224 : (0xffffffffffffffffffffffffffffffff000000000000000000000001L,
+ 0xffffffffffffffffffffffffffff16a2e0b8f03e13dd29455c5c2a3dL,
+ 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4L,
+ 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21L,
+ 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34L),
+
+ 256 : (0xffffffff00000001000000000000000000000000ffffffffffffffffffffffffL,
+ 0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551L,
+ 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604bL,
+ 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296L,
+ 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5L),
+
+ 384 : (0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000ffffffffL,
+ 0xffffffffffffffffffffffffffffffffffffffffffffffffc7634d81f4372ddf581a0db248b0a77aecec196accc52973L,
+ 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aefL,
+ 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7L,
+ 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5fL),
+
+ 521 : (0x1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffL,
+ 0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409L,
+ 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00L,
+ 0x0c6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66L,
+ 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650L)
+ }
+
+
+# Additional non-standard curves for low security but high performance
+# (not intended for use in signing, hence the missing group order)
+
+DOMAINS.update({
+ 128 : (0xffffffffffffffffffffffffffffff61L,
+ None,
+ 0xd83d3eb8266a89927d73d5fe263d5f23L,
+ 0xa94d2d8531f7af8bde367def12b98eadL,
+ 0x9f44e1d671beb68fd2df7f877ab13fa6L),
+
+ 160 : (0xffffffffffffffffffffffffffffffffffffffd1L,
+ None,
+ 0x94bfe70deef7b94742c089ca4db3ca27fbe1f754L,
+ 0xcc6562c2969ac57524b8d0f300d1f598c908c121L,
+ 0x952ddde80a252683dd7ba90fb5919899b5af69f5L)
+ })
+
+CURVE_P = 3 # global parameter of all curves (for efficiency reasons)
+
+
+def get_curve(bits):
+ '''Get a known curve of the given size => (bits, prime, order, p, q, point).
+ Order may be None if unknown.'''
+ if bits in DOMAINS:
+ p, n, b, x, y = DOMAINS[bits]
+ return bits, p, n, CURVE_P, p - b, (x, y)
+ else:
+ raise KeyError, "Key size not implemented: %s" % bits
+
+def implemented_keys(must_sign = False):
+ return [k for k in DOMAINS if not must_sign or DOMAINS[k][1]]
diff --git a/python/PyECC/ecc/eccrypt.py b/python/PyECC/ecc/eccrypt.py
new file mode 100644
index 000000000..c38876d07
--- /dev/null
+++ b/python/PyECC/ecc/eccrypt.py
@@ -0,0 +1,65 @@
+# Elliptic Curve Hybrid Encryption Scheme
+#
+# COPYRIGHT (c) 2010 by Toni Mattis <solaris@live.de>
+#
+
+from curves import get_curve
+from elliptic import mulp
+from encoding import enc_long
+from random import SystemRandom
+from Rabbit import Rabbit
+
+# important for cryptographically secure random numbers:
+random = SystemRandom()
+
+# Encryption Algorithm:
+# ---------------------
+# Input: Message M, public key Q
+#
+# 0. retrieve the group from which Q was generated.
+# 1. generate random number k between 1 and the group order.
+# 2. compute KG = k * G (where G is the base point of the group).
+# 3. compute SG = k * Q (where Q is the public key of the receiver).
+# 4. symmetrically encrypt M to M' using SG's x-coordinate as key.
+#
+# Return: Ciphertext M', temporary key KG
+
+
+def encrypt(message, qk, encrypter = Rabbit):
+ '''Encrypt a message using public key qk => (ciphertext, temp. pubkey)'''
+ bits, q = qk
+ try:
+ bits, cn, n, cp, cq, g = get_curve(bits)
+ if not n:
+ raise ValueError, "Key size %s not suitable for encryption" % bits
+ except KeyError:
+ raise ValueError, "Key size %s not implemented" % bits
+
+ k = random.randint(1, n - 1) # temporary private key k
+ kg = mulp(cp, cq, cn, g, k) # temporary public key k*G
+ sg = mulp(cp, cq, cn, q, k) # shared secret k*Q = k*d*G
+
+ return encrypter(enc_long(sg[0])).encrypt(message), kg
+
+# Decryption Algorithm:
+# ---------------------
+# Input: Ciphertext M', temporary key KG, private key d
+#
+# 0. retrieve the group from which d and KG were generated.
+# 1. compute SG = q * KG.
+# 2. symmetrically decrypt M' to M using SG's x-coordinate as key.
+#
+# Return: M
+
+def decrypt(message, kg, dk, decrypter = Rabbit):
+ '''Decrypt a message using temp. public key kg and private key dk'''
+ bits, d = dk
+ try:
+ bits, cn, n, cp, cq, g = get_curve(bits)
+ except KeyError:
+ raise ValueError, "Key size %s not implemented" % bits
+
+ sg = mulp(cp, cq, cn, kg, d) # shared secret d*(k*G) = k*d*G
+ return decrypter(enc_long(sg[0])).decrypt(message)
+
+
diff --git a/python/PyECC/ecc/ecdsa.py b/python/PyECC/ecc/ecdsa.py
new file mode 100644
index 000000000..6b52aeaa5
--- /dev/null
+++ b/python/PyECC/ecc/ecdsa.py
@@ -0,0 +1,153 @@
+#
+# Elliptic Curve Digital Signature Algorithm (ECDSA)
+#
+# COPYRIGHT (c) 2010 by Toni Mattis <solaris@live.de>
+#
+
+from elliptic import inv, mulf, mulp, muladdp, element
+from curves import get_curve, implemented_keys
+from os import urandom
+
+import hashlib
+
+def randkey(bits, n):
+ '''Generate a random number (mod n) having the specified bit length'''
+ rb = urandom(bits / 8 + 8) # + 64 bits as recommended in FIPS 186-3
+ c = 0
+ for r in rb:
+ c = (c << 8) | ord(r)
+ return (c % (n - 1)) + 1
+
+def keypair(bits):
+ '''Generate a new keypair (qk, dk) with dk = private and qk = public key'''
+ try:
+ bits, cn, n, cp, cq, g = get_curve(bits)
+ except KeyError:
+ raise ValueError, "Key size %s not implemented" % bits
+ if n > 0:
+ d = randkey(bits, n)
+ q = mulp(cp, cq, cn, g, d)
+ return (bits, q), (bits, d)
+ else:
+ raise ValueError, "Key size %s not suitable for signing" % bits
+
+def supported_keys():
+ '''Return a list of all key sizes implemented for signing'''
+ return implemented_keys(True)
+
+def validate_public_key(qk):
+ '''Check whether public key qk is valid'''
+ bits, q = qk
+ x, y = q
+ bits, cn, n, cp, cq, g = get_curve(bits)
+ return q and 0 < x < cn and 0 < y < cn and \
+ element(q, cp, cq, cn) and (mulp(cp, cq, cn, q, n) == None)
+
+def validate_private_key(dk):
+ '''Check whether private key dk is valid'''
+ bits, d = dk
+ bits, cn, n, cp, cq, g = get_curve(bits)
+ return 0 < d < cn
+
+def match_keys(qk, dk):
+ '''Check whether dk is the private key belonging to qk'''
+ bits, d = dk
+ bitz, q = qk
+ if bits == bitz:
+ bits, cn, n, cp, cq, g = get_curve(bits)
+ return mulp(cp, cq, cn, g, d) == q
+ else:
+ return False
+
+def truncate(h, hmax):
+ '''Truncate a hash to the bit size of hmax'''
+ while h > hmax:
+ h >>= 1
+ return h
+
+def sign(h, dk):
+ '''Sign the numeric value h using private key dk'''
+ bits, d = dk
+ bits, cn, n, cp, cq, g = get_curve(bits)
+ h = truncate(h, cn)
+ r = s = 0
+ while r == 0 or s == 0:
+ k = randkey(bits, cn)
+ kinv = inv(k, n)
+ kg = mulp(cp, cq, cn, g, k)
+ r = kg[0] % n
+ if r == 0:
+ continue
+ s = (kinv * (h + r * d)) % n
+ return r, s
+
+def verify(h, sig, qk):
+ '''Verify that 'sig' is a valid signature of h using public key qk'''
+ bits, q = qk
+ try:
+ bits, cn, n, cp, cq, g = get_curve(bits)
+ except KeyError:
+ return False
+ h = truncate(h, cn)
+ r, s = sig
+ if 0 < r < n and 0 < s < n:
+ w = inv(s, n)
+ u1 = (h * w) % n
+ u2 = (r * w) % n
+ x, y = muladdp(cp, cq, cn, g, u1, q, u2)
+ return r % n == x % n
+ return False
+
+def hash_sign(s, dk, hashfunc = 'sha256'):
+ h = int(hashlib.new(hashfunc, s).hexdigest(), 16)
+ return (hashfunc,) + sign(h, dk)
+
+def hash_verify(s, sig, qk):
+ h = int(hashlib.new(sig[0], s).hexdigest(), 16)
+ return verify(h, sig[1:], qk)
+
+
+if __name__ == "__main__":
+
+ import time
+
+ testh1 = 0x0123456789ABCDEF
+ testh2 = 0x0123456789ABCDEE
+
+ for k in supported_keys():
+ qk, dk = keypair(k)
+ s1 = sign(testh1, dk)
+ s2 = sign(testh1, (dk[0], dk[1] ^ 1))
+ s3 = (s1[0], s1[1] ^ 1)
+ qk2 = (qk[0], (qk[1][0] ^ 1, qk[1][1]))
+
+ assert verify(testh1, s1, qk) # everything ok -> must succeed
+ assert not verify(testh2, s1, qk) # modified hash -> must fail
+ assert not verify(testh1, s2, qk) # different priv. key -> must fail
+ assert not verify(testh1, s3, qk) # modified signature -> must fail
+ assert not verify(testh1, s1, qk2) # different publ. key -> must fail
+
+
+ def test_perf(bits, rounds = 50):
+ '''-> (key generations, signatures, verifications) / second'''
+ h = 0x0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF
+ d = get_curve(bits)
+
+ t = time.time()
+ for i in xrange(rounds):
+ qk, dk = keypair(bits)
+ tgen = time.time() - t
+
+ t = time.time()
+ for i in xrange(rounds):
+ s = sign(0, dk)
+ tsign = time.time() - t
+
+ t = time.time()
+ for i in xrange(rounds):
+ verify(0, s, qk)
+ tver = time.time() - t
+
+ return rounds / tgen, rounds / tsign, rounds / tver
+
+
diff --git a/python/PyECC/ecc/elliptic.py b/python/PyECC/ecc/elliptic.py
new file mode 100644
index 000000000..9191a8848
--- /dev/null
+++ b/python/PyECC/ecc/elliptic.py
@@ -0,0 +1,381 @@
+
+# --- ELLIPTIC CURVE MATH ------------------------------------------------------
+#
+# curve definition: y^2 = x^3 - p*x - q
+# over finite field: Z/nZ* (prime residue classes modulo a prime number n)
+#
+#
+# COPYRIGHT (c) 2010 by Toni Mattis <solaris@live.de>
+# ------------------------------------------------------------------------------
+
+'''
+Module for elliptic curve arithmetic over a prime field GF(n).
+E(GF(n)) takes the form y**2 == x**3 - p*x - q (mod n) for a prime n.
+
+0. Structures used by this module
+
+ PARAMETERS and SCALARS are non-negative (long) integers.
+
+ A POINT (x, y), usually denoted p1, p2, ...
+ is a pair of (long) integers where 0 <= x < n and 0 <= y < n
+
+ A POINT in PROJECTIVE COORDINATES, usually denoted jp1, jp2, ...
+ takes the form (X, Y, Z, Z**2, Z**3) where x = X / Z**2
+ and y = Y / z**3. This form is called Jacobian coordinates.
+
+ The NEUTRAL element "0" or "O" is represented by None
+ in both coordinate systems.
+
+1. Basic Functions
+
+ euclid() Is the Extended Euclidean Algorithm.
+ inv() Computes the multiplicative inversion modulo n.
+ curve_q() Finds the curve parameter q (mod n)
+ when p and a point are given.
+ element() Tests whether a point (x, y) is on the curve.
+
+2. Point transformations
+
+ to_projective() Converts a point (x, y) to projective coordinates.
+ from_projective() Converts a point from projective coordinates
+ to (x, y) using the transformation described above.
+ neg() Computes the inverse point -P in both coordinate
+ systems.
+
+3. Slow point arithmetic
+
+ These algorithms make use of basic geometry and modular arithmetic
+ thus being suitable for small numbers and academic study.
+
+ add() Computes the sum of two (x, y)-points
+ mul() Perform scalar multiplication using "double & add"
+
+4. Fast point arithmetic
+
+ These algorithms make use of projective coordinates, signed binary
+ expansion and a JSP-like approach (joint sparse form).
+
+ The following functions consume and return projective coordinates:
+
+ addf() Optimized point addition.
+ doublef() Optimized point doubling.
+ mulf() Highly optimized scalar multiplication.
+ muladdf() Highly optimized addition of two products.
+
+ The following functions use the optimized ones above but consume
+ and output (x, y)-coordinates for a more convenient usage:
+
+ mulp() Encapsulates mulf()
+ muladdp() Encapsulates muladdf()
+
+ For single additions add() is generally faster than an encapsulation of
+ addf() which would involve expensive coordinate transformations.
+ Hence there is no addp() and doublep().
+'''
+
+# BASIC MATH -------------------------------------------------------------------
+
+def euclid(a, b):
+ '''Solve x*a + y*b = ggt(a, b) and return (x, y, ggt(a, b))'''
+ # Non-recursive approach hence suitable for large numbers
+ x = yy = 0
+ y = xx = 1
+ while b:
+ q = a // b
+ a, b = b, a % b
+ x, xx = xx - q * x, x
+ y, yy = yy - q * y, y
+ return xx, yy, a
+
+def inv(a, n):
+ '''Perform inversion 1/a modulo n. a and n should be COPRIME.'''
+ # coprimality is not checked here in favour of performance
+ i = euclid(a, n)[0]
+ while i < 0:
+ i += n
+ return i
+
+def curve_q(x, y, p, n):
+ '''Find curve parameter q mod n having point (x, y) and parameter p'''
+ return ((x * x - p) * x - y * y) % n
+
+def element(point, p, q, n):
+ '''Test, whether the given point is on the curve (p, q, n)'''
+ if point:
+ x, y = point
+ return (x * x * x - p * x - q) % n == (y * y) % n
+ else:
+ return True
+
+def to_projective(p):
+ '''Transform point p given as (x, y) to projective coordinates'''
+ if p:
+ return (p[0], p[1], 1, 1, 1)
+ else:
+ return None # Identity point (0)
+
+def from_projective(jp, n):
+ '''Transform a point from projective coordinates to (x, y) mod n'''
+ if jp:
+ return (jp[0] * inv(jp[3], n)) % n, (jp[1] * inv(jp[4], n)) % n
+ else:
+ return None # Identity point (0)
+
+def neg(p, n):
+ '''Compute the inverse point to p in any coordinate system'''
+ return (p[0], (n - p[1]) % n) + p[2:] if p else None
+
+
+# POINT ADDITION ---------------------------------------------------------------
+
+# addition of points in y**2 = x**3 - p*x - q over <Z/nZ*; +>
+def add(p, q, n, p1, p2):
+ '''Add points p1 and p2 over curve (p, q, n)'''
+ if p1 and p2:
+ x1, y1 = p1
+ x2, y2 = p2
+ if (x1 - x2) % n:
+ s = ((y1 - y2) * inv(x1 - x2, n)) % n # slope
+ x = (s * s - x1 - x2) % n # intersection with curve
+ return (x, n - (y1 + s * (x - x1)) % n)
+ else:
+ if (y1 + y2) % n: # slope s calculated by derivation
+ s = ((3 * x1 * x1 - p) * inv(2 * y1, n)) % n
+ x = (s * s - 2 * x1) % n # intersection with curve
+ return (x, n - (y1 + s * (x - x1)) % n)
+ else:
+ return None
+ else: # either p1 is not none -> ret. p1, otherwiese p2, which may be
+ return p1 if p1 else p2 # none too.
+
+
+# faster addition: redundancy in projective coordinates eliminates
+# expensive inversions mod n.
+def addf(p, q, n, jp1, jp2):
+ '''Add jp1 and jp2 in projective (jacobian) coordinates.'''
+ if jp1 and jp2:
+
+ x1, y1, z1, z1s, z1c = jp1
+ x2, y2, z2, z2s, z2c = jp2
+
+ s1 = (y1 * z2c) % n
+ s2 = (y2 * z1c) % n
+
+ u1 = (x1 * z2s) % n
+ u2 = (x2 * z1s) % n
+
+ if (u1 - u2) % n:
+
+ h = (u2 - u1) % n
+ r = (s2 - s1) % n
+
+ hs = (h * h) % n
+ hc = (hs * h) % n
+
+ x3 = (-hc - 2 * u1 * hs + r * r) % n
+ y3 = (-s1 * hc + r * (u1 * hs - x3)) % n
+ z3 = (z1 * z2 * h) % n
+
+ z3s = (z3 * z3) % n
+ z3c = (z3s * z3) % n
+
+ return (x3, y3, z3, z3s, z3c)
+
+ else:
+ if (s1 + s2) % n:
+ return doublef(p, q, n, jp1)
+ else:
+ return None
+ else:
+ return jp1 if jp1 else jp2
+
+# explicit point doubling using redundant coordinates
+def doublef(p, q, n, jp):
+ '''Double jp in projective (jacobian) coordinates'''
+ if not jp:
+ return None
+ x1, y1, z1, z1p2, z1p3 = jp
+
+ y1p2 = (y1 * y1) % n
+ a = (4 * x1 * y1p2) % n
+ b = (3 * x1 * x1 - p * z1p3 * z1) % n
+ x3 = (b * b - 2 * a) % n
+ y3 = (b * (a - x3) - 8 * y1p2 * y1p2) % n
+ z3 = (2 * y1 * z1) % n
+ z3p2 = (z3 * z3) % n
+
+ return x3, y3, z3, z3p2, (z3p2 * z3) % n
+
+
+# SCALAR MULTIPLICATION --------------------------------------------------------
+
+# scalar multiplication p1 * c = p1 + p1 + ... + p1 (c times) in O(log(n))
+def mul(p, q, n, p1, c):
+ '''multiply point p1 by scalar c over curve (p, q, n)'''
+ res = None
+ while c > 0:
+ if c & 1:
+ res = add(p, q, n, res, p1)
+ c >>= 1 # c = c / 2
+ p1 = add(p, q, n, p1, p1) # p1 = p1 * 2
+ return res
+
+
+# this method allows _signed_bin() to choose between 1 and -1. It will select
+# the sign which leaves the higher number of zeroes in the binary
+# representation (the higher GDB).
+def _gbd(n):
+ '''Compute second greatest base-2 divisor'''
+ i = 1
+ if n <= 0: return 0
+ while not n % i:
+ i <<= 1
+ return i >> 2
+
+
+# This method transforms n into a binary representation having signed bits.
+# A signed binary expansion contains more zero-bits hence reducing the number
+# of additions required by a multiplication algorithm.
+#
+# Example: 15 ( 0b1111 ) can be written as 16 - 1, resulting in (1,0,0,0,-1)
+# and saving 2 additions. Subtraction can be performed as
+# efficiently as addition.
+def _signed_bin(n):
+ '''Transform n into an optimized signed binary representation'''
+ r = []
+ while n > 1:
+ if n & 1:
+ cp = _gbd(n + 1)
+ cn = _gbd(n - 1)
+ if cp > cn: # -1 leaves more zeroes -> subtract -1 (= +1)
+ r.append(-1)
+ n += 1
+ else: # +1 leaves more zeroes -> subtract +1 (= -1)
+ r.append(+1)
+ n -= 1
+ else:
+ r.append(0) # be glad about one more zero
+ n >>= 1
+ r.append(n)
+ return r[::-1]
+
+
+# This multiplication algorithm combines signed binary expansion and
+# fast addition using projective coordinates resulting in 5 to 10 times
+# faster multiplication.
+def mulf(p, q, n, jp1, c):
+ '''Multiply point jp1 by c in projective coordinates'''
+ sb = _signed_bin(c)
+ res = None
+ jp0 = neg(jp1, n) # additive inverse of jp1 to be used fot bit -1
+ for s in sb:
+ res = doublef(p, q, n, res)
+ if s:
+ res = addf(p, q, n, res, jp1) if s > 0 else \
+ addf(p, q, n, res, jp0)
+ return res
+
+# Encapsulates mulf() in order to enable flat coordinates (x, y)
+def mulp(p, q, n, p1, c):
+ '''Multiply point p by c using fast multiplication'''
+ return from_projective(mulf(p, q, n, to_projective(p1), c), n)
+
+
+# Sum of two products using Shamir's trick and signed binary expansion
+def muladdf(p, q, n, jp1, c1, jp2, c2):
+ '''Efficiently compute c1 * jp1 + c2 * jp2 in projective coordinates'''
+ s1 = _signed_bin(c1)
+ s2 = _signed_bin(c2)
+ diff = len(s2) - len(s1)
+ if diff > 0:
+ s1 = [0] * diff + s1
+ elif diff < 0:
+ s2 = [0] * -diff + s2
+
+ jp1p2 = addf(p, q, n, jp1, jp2)
+ jp1n2 = addf(p, q, n, jp1, neg(jp2, n))
+
+ precomp = ((None, jp2, neg(jp2, n)),
+ (jp1, jp1p2, jp1n2),
+ (neg(jp1, n), neg(jp1n2, n), neg(jp1p2, n)))
+ res = None
+
+ for i, j in zip(s1, s2):
+ res = doublef(p, q, n, res)
+ if i or j:
+ res = addf(p, q, n, res, precomp[i][j])
+ return res
+
+# Encapsulate muladdf()
+def muladdp(p, q, n, p1, c1, p2, c2):
+ '''Efficiently compute c1 * p1 + c2 * p2 in (x, y)-coordinates'''
+ return from_projective(muladdf(p, q, n,
+ to_projective(p1), c1,
+ to_projective(p2), c2), n)
+
+# POINT COMPRESSION ------------------------------------------------------------
+
+# Compute the square root modulo n
+
+
+# Determine the sign-bit of a point allowing to reconstruct y-coordinates
+# when x and the sign-bit are given:
+def sign_bit(p1):
+ '''Return the signedness of a point p1'''
+ return p1[1] % 2 if p1 else 0
+
+# Reconstruct the y-coordinate when curve parameters, x and the sign-bit of
+# the y coordinate are given:
+def y_from_x(x, p, q, n, sign):
+ '''Return the y coordinate over curve (p, q, n) for given (x, sign)'''
+
+ # optimized form of (x**3 - p*x - q) % n
+ a = (((x * x) % n - p) * x - q) % n
+
+
+
+if __name__ == "__main__":
+ import rsa
+ import time
+
+ t = time.time()
+ n = rsa.get_prime(256/8, 20)
+ tp = time.time() - t
+ p = rsa.random.randint(1, n)
+ p1 = (rsa.random.randint(1, n), rsa.random.randint(1, n))
+ q = curve_q(p1[0], p1[1], p, n)
+ r1 = rsa.random.randint(1,n)
+ r2 = rsa.random.randint(1,n)
+ q1 = mulp(p, q, n, p1, r1)
+ q2 = mulp(p, q, n, p1, r2)
+ s1 = mulp(p, q, n, q1, r2)
+ s2 = mulp(p, q, n, q2, r1)
+ s1 == s2
+ tt = time.time() - t
+
+ def test(tcount, bits = 256):
+ n = rsa.get_prime(bits/8, 20)
+ p = rsa.random.randint(1, n)
+ p1 = (rsa.random.randint(1, n), rsa.random.randint(1, n))
+ q = curve_q(p1[0], p1[1], p, n)
+ p2 = mulp(p, q, n, p1, rsa.random.randint(1, n))
+
+ c1 = [rsa.random.randint(1, n) for i in xrange(tcount)]
+ c2 = [rsa.random.randint(1, n) for i in xrange(tcount)]
+ c = zip(c1, c2)
+
+ t = time.time()
+ for i, j in c:
+ from_projective(addf(p, q, n,
+ mulf(p, q, n, to_projective(p1), i),
+ mulf(p, q, n, to_projective(p2), j)), n)
+ t1 = time.time() - t
+ t = time.time()
+ for i, j in c:
+ muladdp(p, q, n, p1, i, p2, j)
+ t2 = time.time() - t
+
+ return tcount, t1, t2
+
+
+
diff --git a/python/PyECC/ecc/encoding.py b/python/PyECC/ecc/encoding.py
new file mode 100644
index 000000000..24d3eb5a8
--- /dev/null
+++ b/python/PyECC/ecc/encoding.py
@@ -0,0 +1,178 @@
+#
+# Encodings and Formats for Elliptic Curve Cryptography
+#
+
+import StringIO
+
+# Big-Endian Encoding
+
+def enc_long(n):
+ '''Encodes arbitrarily large number n to a sequence of bytes.
+ Big endian byte order is used.'''
+ s = ""
+ while n > 0:
+ s = chr(n & 0xFF) + s
+ n >>= 8
+ return s
+
+def enc_int(n):
+ '''Encodes an integer n to a 4-byte string.
+ Big endian byte order is used.'''
+ return chr((n >> 24) & 0xFF) + chr((n >> 16) & 0xFF) + \
+ chr((n >> 8) & 0xFF) + chr( n & 0xFF)
+
+def enc_fixed_long(n, length):
+ return enc_long(n)[:length].rjust(length, '\x00')
+
+def dec_long(s):
+ '''Decodes s to its numeric representation.
+ Big endian byte order is used.'''
+ n = 0
+ for c in s:
+ n = (n << 8) | ord(c)
+ return n
+
+# dec_int not necessary,
+# dec_long does the same when provided with 4 bytes input.
+
+# Chunks
+
+def enc_chunks(*args):
+ '''Chain given string args or sub-chunks to a single chunk'''
+ return ''.join([enc_int(len(a)) + a for a in args])
+
+def dec_chunks(s):
+ '''Split a chunk into strings or sub-chunks'''
+ i = 0
+ result = []
+ while i < len(s):
+ size = dec_long(s[i : i + 4])
+ i += 4
+ result.append(s[i : i + size])
+ i += size
+ return result
+
+# Point and signature data
+
+def enc_point(p):
+ '''Encode a point p = (x, y)'''
+ x, y = p
+ sx = enc_long(x)
+ sy = enc_long(y)
+ diff = len(sx) - len(sy)
+ if diff > 0:
+ sy = '\x00' * diff + sy
+ elif diff < 0:
+ sx = '\x00' * -diff + sx
+ return sx + sy
+
+def dec_point(s):
+ '''Decode an even length string s to a point(x, y)'''
+ d = len(s) / 2
+ return (dec_long(s[:d]), dec_long(s[d:]))
+
+
+class Encoder:
+
+ def __init__(self):
+ self._io = StringIO.StringIO()
+
+ def int(self, n, size = 4):
+ self._io.write(enc_fixed_long(n, size))
+ return self
+
+ def long(self, n, pre = 2):
+ lstr = enc_long(n)
+ self._io.write(enc_fixed_long(len(lstr), pre) + lstr)
+ return self
+
+ def str(self, s, pre = 2):
+ self._io.write(enc_fixed_long(len(s), pre) + s)
+ return self
+
+ def point(self, p, pre = 2):
+ lstr = enc_point(p)
+ self._io.write(enc_fixed_long(len(lstr), pre) + lstr)
+ return self
+
+ def chunk(self, enc, pre = 2):
+ lstr = enc.out()
+ self._io.write(enc_fixed_long(len(lstr), pre) + lstr)
+ return self
+
+ def out(self):
+ return self._io.getvalue()
+
+class Decoder:
+
+ def __init__(self, data, offset = 0):
+ self._io = StringIO.StringIO(data)
+ self._io.seek(offset)
+ self._res = []
+ self._limit = None
+ self._parent = None
+
+ def _ret(self):
+## if self._parent and self._io.tell() >= self._limit:
+## return self.exit()
+## else:
+## return self
+ return self
+
+ def int(self, size = 4):
+ self._res.append(dec_long(self._io.read(size)))
+ return self._ret()
+
+
+ def long(self, pre = 2):
+ llen = dec_long(self._io.read(pre))
+ self._res.append(dec_long(self._io.read(llen)))
+ return self._ret()
+
+ def str(self, pre = 2):
+ llen = dec_long(self._io.read(pre))
+ self._res.append(self._io.read(llen))
+ return self._ret()
+
+ def point(self, pre = 2):
+ llen = dec_long(self._io.read(pre))
+ self._res.append(dec_point(self._io.read(llen)))
+ return self._ret()
+
+ def enter(self, pre = 2):
+ llen = dec_long(self._io.read(pre))
+ subcoder = Decoder("")
+ subcoder._io = self._io
+ subcoder._parent = self
+ subcoder._limit = self._io.tell() + llen
+ return subcoder
+
+ def chunk(self, pre = 2):
+ llen = dec_long(self._io.read(pre))
+ self._res.append(Decoder(self._io.read(llen)))
+ return self._ret()
+
+ def exit(self):
+ if self._parent:
+ self._parent._io.seek(self._limit)
+ self._parent._res.append(self._res)
+ return self._parent
+ else:
+ raise RuntimeError, "Cannont exit top level Decoder"
+
+ def continues(self):
+ return (not self._limit) or (self._io.tell() < self._limit)
+
+ def out(self, exit_all = False):
+ if exit_all and self._parent:
+ return self.exit().out()
+ else:
+ r = self._res
+ self._res = []
+ return r
+
+ def only(self):
+ if self._res:
+ return self._res.pop(0)
+ else:
+ return RuntimeError, "Only what? (Empty decoder stack)"
diff --git a/python/PyECC/ecc/performance.py b/python/PyECC/ecc/performance.py
new file mode 100644
index 000000000..724176aef
--- /dev/null
+++ b/python/PyECC/ecc/performance.py
@@ -0,0 +1,50 @@
+from Key import Key
+import time
+from collections import OrderedDict
+
+def test_generation_perf(n = 100):
+ results = OrderedDict()
+ for bits in (192, 224, 256, 384, 521):
+ t = time.time()
+ for i in xrange(n):
+ k = Key.generate(bits)
+ t = time.time() - t
+ results[bits] = t
+ return results
+
+def test_signing_perf(n = 100):
+ results = OrderedDict()
+ for bits in (192, 224, 256, 384, 521):
+ k = Key.generate(bits)
+ t = time.time()
+ for i in xrange(n):
+ k.sign("random string")
+ t = time.time() - t
+ results[bits] = t
+ return results
+
+def test_verification_perf(n = 100):
+ results = OrderedDict()
+ for bits in (192, 224, 256, 384, 521):
+ k = Key.generate(bits)
+ s = k.sign("random string")
+ t = time.time()
+ for i in xrange(n):
+ k.verify("random string", s)
+ t = time.time() - t
+ results[bits] = t
+ return results
+
+def print_dict(title, d):
+ print title
+ print '-' * len(title)
+ for k, v in d.items():
+ print k, '\t', v
+ print
+
+n = 100
+print_dict("Key generation", test_generation_perf(n))
+print_dict("Signing", test_signing_perf(n))
+print_dict("Verifying", test_verification_perf(n))
+
+
diff --git a/python/PyECC/ecc/primes.py b/python/PyECC/ecc/primes.py
new file mode 100644
index 000000000..a8bc1424b
--- /dev/null
+++ b/python/PyECC/ecc/primes.py
@@ -0,0 +1,82 @@
+'''
+This module implements simple prime generation and primality testing.
+'''
+
+from random import SystemRandom
+random = SystemRandom()
+from os import urandom
+
+def exp(x, n, m):
+ '''Efficiently compute x ** n mod m'''
+ y = 1
+ z = x
+ while n > 0:
+ if n & 1:
+ y = (y * z) % m
+ z = (z * z) % m
+ n //= 2
+ return y
+
+
+# Miller-Rabin-Test
+
+def prime(n, k):
+ '''Checks whether n is probably prime (with probability 1 - 4**(-k)'''
+
+ if n % 2 == 0:
+ return False
+
+ d = n - 1
+ s = 0
+
+ while d % 2 == 0:
+ s += 1
+ d /= 2
+
+ for i in xrange(k):
+
+ a = long(2 + random.randint(0, n - 4))
+ x = exp(a, d, n)
+ if (x == 1) or (x == n - 1):
+ continue
+
+ for r in xrange(1, s):
+ x = (x * x) % n
+
+ if x == 1:
+ return False
+
+ if x == n - 1:
+ break
+
+ else:
+ return False
+ return True
+
+
+# Generate and Test Algorithms
+
+def get_prime(size, accuracy):
+ '''Generate a pseudorandom prime number with the specified size (bytes).'''
+
+ while 1:
+
+ # read some random data from the operating system
+ rstr = urandom(size - 1)
+ r = 128 | ord(urandom(1)) # MSB = 1 (not less than size)
+ for c in rstr:
+ r = (r << 8) | ord(c)
+ r |= 1 # LSB = 1 (odd)
+
+ # test whether this results in a prime number
+ if prime(r, accuracy):
+ return r
+
+
+def get_prime_upto(n, accuracy):
+ '''Find largest prime less than n'''
+ n |= 1
+ while n > 0:
+ n -= 2
+ if prime(n, accuracy):
+ return n
diff --git a/python/PyECC/ecc/shacrypt.py b/python/PyECC/ecc/shacrypt.py
new file mode 100644
index 000000000..69ee7b943
--- /dev/null
+++ b/python/PyECC/ecc/shacrypt.py
@@ -0,0 +1,38 @@
+# ------------------------------------------------------------------------------
+#
+# SHA-512-BASED FEISTEL CIPHER
+# by Toni Mattis
+#
+# Feistel Function: SHA-512(Block || Key)
+# Key Size: Fully Dynamic
+# Block Size: 1024 Bits
+# Rounds: User-Specified
+#
+# ------------------------------------------------------------------------------
+
+from hashlib import sha512
+
+BPOS = tuple(range(64))
+
+def enc_block(block, key, rounds = 16):
+ x = block[:64]
+ y = block[64:]
+ for i in xrange(rounds):
+ h = sha512(x + key).digest()
+ y = ''.join([chr(ord(y[k]) ^ ord(h[k])) for k in BPOS])
+ h = sha512(y + key).digest()
+ x = ''.join([chr(ord(x[k]) ^ ord(h[k])) for k in BPOS])
+ return x + y
+
+def dec_block(block, key, rounds = 16):
+ x = block[:64]
+ y = block[64:]
+ for i in xrange(rounds):
+ h = sha512(y + key).digest()
+ x = ''.join([chr(ord(x[k]) ^ ord(h[k])) for k in BPOS])
+ h = sha512(x + key).digest()
+ y = ''.join([chr(ord(y[k]) ^ ord(h[k])) for k in BPOS])
+ return x + y
+
+
+
diff --git a/python/PyECC/setup.py b/python/PyECC/setup.py
new file mode 100644
index 000000000..b9e507c18
--- /dev/null
+++ b/python/PyECC/setup.py
@@ -0,0 +1,77 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2007 The Python-Twitter Developers
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# copied from https://github.com/bear/python-twitter/blob/master/setup.py
+#
+
+'''The setup and build script for the python-twitter library.'''
+
+__author__ = 'niccokunzmann@aol.com'
+__version__ = '0.0.1'
+
+
+# The base package metadata to be used by both distutils and setuptools
+METADATA = dict(
+ name = "ecc",
+ version = __version__,
+ packages = ['ecc'],
+ author='Toni Mattis',
+ author_email='solaris@live.de',
+ description='Pure Python implementation of an elliptic curve cryptosystem based on FIPS 186-3',
+ license='MIT',
+ url='https://github.com/niccokunzmann/ecc',
+ keywords='elliptic curve cryptosystem rabbit cipher',
+)
+
+# Extra package metadata to be used only if setuptools is installed
+SETUPTOOLS_METADATA = dict(
+ install_requires = [],
+ include_package_data = True,
+ classifiers = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ 'Topic :: Communications',
+ 'Topic :: Security :: Cryptography',
+ 'Topic :: Internet',
+ ],
+## test_suite = 'distacc_test',
+)
+
+
+def Read(file):
+ return open(file).read()
+
+def BuildLongDescription():
+ return '\n'.join([Read('README.md'), ])
+
+def Main():
+ # Build the long_description from the README and CHANGES
+ METADATA['long_description'] = BuildLongDescription()
+
+ # Use setuptools if available, otherwise fallback and use distutils
+ try:
+ import setuptools
+ METADATA.update(SETUPTOOLS_METADATA)
+ setuptools.setup(**METADATA)
+ except ImportError:
+ import distutils.core
+ distutils.core.setup(**METADATA)
+
+
+if __name__ == '__main__':
+ Main()
diff --git a/python/README b/python/README
new file mode 100644
index 000000000..f9e677b5e
--- /dev/null
+++ b/python/README
@@ -0,0 +1,21 @@
+This directory contains common Python code.
+
+The basic rule is that if Python code is cross-module (that's "module" in the
+Mozilla meaning - as in "module ownership") and is MPL-compatible, it should
+go here.
+
+What should not go here:
+
+* Python that is not MPL-compatible (see other-licenses/)
+* Python that has good reason to remain close to its "owning" (Mozilla)
+ module (e.g. it is only being consumed from there).
+
+Historical information can be found at
+https://bugzilla.mozilla.org/show_bug.cgi?id=775243
+
+## pyyaml | pystache
+
+Used in taskcluster related mach commands to update download from github
+and remove .git and tests.
+
+Then run tests in taskcluster/tests/
diff --git a/python/altgraph/MANIFEST.in b/python/altgraph/MANIFEST.in
new file mode 100644
index 000000000..9a9b96078
--- /dev/null
+++ b/python/altgraph/MANIFEST.in
@@ -0,0 +1,9 @@
+include ReadMe.txt
+include *.txt MANIFEST.in *.py
+graft doc
+graft doc/_static
+graft doc/_templates
+graft altgraph_tests
+global-exclude .DS_Store
+global-exclude *.pyc
+global-exclude *.so
diff --git a/python/altgraph/PKG-INFO b/python/altgraph/PKG-INFO
new file mode 100644
index 000000000..87b602f52
--- /dev/null
+++ b/python/altgraph/PKG-INFO
@@ -0,0 +1,216 @@
+Metadata-Version: 1.1
+Name: altgraph
+Version: 0.12
+Summary: Python graph (network) package
+Home-page: http://packages.python.org/altgraph
+Author: Ronald Oussoren
+Author-email: ronaldoussoren@mac.com
+License: MIT
+Download-URL: http://pypi.python.org/pypi/altgraph
+Description: altgraph is a fork of graphlib: a graph (network) package for constructing
+ graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with
+ graphviz output.
+
+ altgraph includes some additional usage of Python 2.6+ features and
+ enhancements related to modulegraph and macholib.
+
+
+ Release history
+ ===============
+
+ 0.12
+ ----
+
+ - Added ``ObjectGraph.edgeData`` to retrieve the edge data
+ from a specific edge.
+
+ - Added ``AltGraph.update_edge_data`` and ``ObjectGraph.updateEdgeData``
+ to update the data associated with a graph edge.
+
+ 0.11
+ ----
+
+ - Stabilize the order of elements in dot file exports,
+ patch from bitbucket user 'pombredanne'.
+
+ - Tweak setup.py file to remove dependency on distribute (but
+ keep the dependency on setuptools)
+
+
+ 0.10.2
+ ------
+
+ - There where no classifiers in the package metadata due to a bug
+ in setup.py
+
+ 0.10.1
+ ------
+
+ This is a bugfix release
+
+ Bug fixes:
+
+ - Issue #3: The source archive contains a README.txt
+ while the setup file refers to ReadMe.txt.
+
+ This is caused by a misfeature in distutils, as a
+ workaround I've renamed ReadMe.txt to README.txt
+ in the source tree and setup file.
+
+
+ 0.10
+ -----
+
+ This is a minor feature release
+
+ Features:
+
+ - Do not use "2to3" to support Python 3.
+
+ As a side effect of this altgraph now supports
+ Python 2.6 and later, and no longer supports
+ earlier releases of Python.
+
+ - The order of attributes in the Dot output
+ is now always alphabetical.
+
+ With this change the output will be consistent
+ between runs and Python versions.
+
+ 0.9
+ ---
+
+ This is a minor bugfix release
+
+ Features:
+
+ - Added ``altgraph.ObjectGraph.ObjectGraph.nodes``, a method
+ yielding all nodes in an object graph.
+
+ Bugfixes:
+
+ - The 0.8 release didn't work with py2app when using
+ python 3.x.
+
+
+ 0.8
+ -----
+
+ This is a minor feature release. The major new feature
+ is a extensive set of unittests, which explains almost
+ all other changes in this release.
+
+ Bugfixes:
+
+ - Installing failed with Python 2.5 due to using a distutils
+ class that isn't available in that version of Python
+ (issue #1 on the issue tracker)
+
+ - ``altgraph.GraphStat.degree_dist`` now actually works
+
+ - ``altgraph.Graph.add_edge(a, b, create_nodes=False)`` will
+ no longer create the edge when one of the nodes doesn't
+ exist.
+
+ - ``altgraph.Graph.forw_topo_sort`` failed for some sparse graphs.
+
+ - ``altgraph.Graph.back_topo_sort`` was completely broken in
+ previous releases.
+
+ - ``altgraph.Graph.forw_bfs_subgraph`` now actually works.
+
+ - ``altgraph.Graph.back_bfs_subgraph`` now actually works.
+
+ - ``altgraph.Graph.iterdfs`` now returns the correct result
+ when the ``forward`` argument is ``False``.
+
+ - ``altgraph.Graph.iterdata`` now returns the correct result
+ when the ``forward`` argument is ``False``.
+
+
+ Features:
+
+ - The ``altgraph.Graph`` constructor now accepts an argument
+ that contains 2- and 3-tuples instead of requireing that
+ all items have the same size. The (optional) argument can now
+ also be any iterator.
+
+ - ``altgraph.Graph.Graph.add_node`` has no effect when you
+ add a hidden node.
+
+ - The private method ``altgraph.Graph._bfs`` is no longer
+ present.
+
+ - The private method ``altgraph.Graph._dfs`` is no longer
+ present.
+
+ - ``altgraph.ObjectGraph`` now has a ``__contains__`` methods,
+ which means you can use the ``in`` operator to check if a
+ node is part of a graph.
+
+ - ``altgraph.GraphUtil.generate_random_graph`` will raise
+ ``GraphError`` instead of looping forever when it is
+ impossible to create the requested graph.
+
+ - ``altgraph.Dot.edge_style`` raises ``GraphError`` when
+ one of the nodes is not present in the graph. The method
+ silently added the tail in the past, but without ensuring
+ a consistent graph state.
+
+ - ``altgraph.Dot.save_img`` now works when the mode is
+ ``"neato"``.
+
+ 0.7.2
+ -----
+
+ This is a minor bugfix release
+
+ Bugfixes:
+
+ - distutils didn't include the documentation subtree
+
+ 0.7.1
+ -----
+
+ This is a minor feature release
+
+ Features:
+
+ - Documentation is now generated using `sphinx <http://pypi.python.org/pypi/sphinx>`_
+ and can be viewed at <http://packages.python.org/altgraph>.
+
+ - The repository has moved to bitbucket
+
+ - ``altgraph.GraphStat.avg_hops`` is no longer present, the function had no
+ implementation and no specified behaviour.
+
+ - the module ``altgraph.compat`` is gone, which means altgraph will no
+ longer work with Python 2.3.
+
+
+ 0.7.0
+ -----
+
+ This is a minor feature release.
+
+ Features:
+
+ - Support for Python 3
+
+ - It is now possible to run tests using 'python setup.py test'
+
+ (The actual testsuite is still very minimal though)
+
+Keywords: graph
+Platform: any
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Scientific/Engineering :: Mathematics
+Classifier: Topic :: Scientific/Engineering :: Visualization
diff --git a/python/altgraph/README.txt b/python/altgraph/README.txt
new file mode 100644
index 000000000..904a14be5
--- /dev/null
+++ b/python/altgraph/README.txt
@@ -0,0 +1,6 @@
+altgraph is a fork of graphlib: a graph (network) package for constructing
+graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with
+graphviz output.
+
+altgraph includes some additional usage of Python 2.6+ features and
+enhancements related to modulegraph and macholib.
diff --git a/python/altgraph/altgraph/Dot.py b/python/altgraph/altgraph/Dot.py
new file mode 100644
index 000000000..49a471e4d
--- /dev/null
+++ b/python/altgraph/altgraph/Dot.py
@@ -0,0 +1,299 @@
+'''
+altgraph.Dot - Interface to the dot language
+============================================
+
+The :py:mod:`~altgraph.Dot` module provides a simple interface to the
+file format used in the `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
+program. The module is intended to offload the most tedious part of the process
+(the **dot** file generation) while transparently exposing most of its features.
+
+To display the graphs or to generate image files the `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
+package needs to be installed on the system, moreover the :command:`dot` and :command:`dotty` programs must
+be accesible in the program path so that they can be ran from processes spawned
+within the module.
+
+Example usage
+-------------
+
+Here is a typical usage::
+
+ from altgraph import Graph, Dot
+
+ # create a graph
+ edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
+ graph = Graph.Graph(edges)
+
+ # create a dot representation of the graph
+ dot = Dot.Dot(graph)
+
+ # display the graph
+ dot.display()
+
+ # save the dot representation into the mydot.dot file
+ dot.save_dot(file_name='mydot.dot')
+
+ # save dot file as gif image into the graph.gif file
+ dot.save_img(file_name='graph', file_type='gif')
+
+Directed graph and non-directed graph
+-------------------------------------
+
+Dot class can use for both directed graph and non-directed graph
+by passing ``graphtype`` parameter.
+
+Example::
+
+ # create directed graph(default)
+ dot = Dot.Dot(graph, graphtype="digraph")
+
+ # create non-directed graph
+ dot = Dot.Dot(graph, graphtype="graph")
+
+Customizing the output
+----------------------
+
+The graph drawing process may be customized by passing
+valid :command:`dot` parameters for the nodes and edges. For a list of all
+parameters see the `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
+documentation.
+
+Example::
+
+ # customizing the way the overall graph is drawn
+ dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75)
+
+ # customizing node drawing
+ dot.node_style(1, label='BASE_NODE',shape='box', color='blue' )
+ dot.node_style(2, style='filled', fillcolor='red')
+
+ # customizing edge drawing
+ dot.edge_style(1, 2, style='dotted')
+ dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90')
+ dot.edge_style(4, 5, arrowsize=2, style='bold')
+
+
+.. note::
+
+ dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
+ display all graphics styles. To verify the output save it to an image file
+ and look at it that way.
+
+Valid attributes
+----------------
+
+ - dot styles, passed via the :py:meth:`Dot.style` method::
+
+ rankdir = 'LR' (draws the graph horizontally, left to right)
+ ranksep = number (rank separation in inches)
+
+ - node attributes, passed via the :py:meth:`Dot.node_style` method::
+
+ style = 'filled' | 'invisible' | 'diagonals' | 'rounded'
+ shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle'
+
+ - edge attributes, passed via the :py:meth:`Dot.edge_style` method::
+
+ style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold'
+ arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none' | 'tee' | 'vee'
+ weight = number (the larger the number the closer the nodes will be)
+
+ - valid `graphviz colors <http://www.research.att.com/~erg/graphviz/info/colors.html>`_
+
+ - for more details on how to control the graph drawing process see the
+ `graphviz reference <http://www.research.att.com/sw/tools/graphviz/refs.html>`_.
+'''
+import os
+import warnings
+
+from altgraph import GraphError
+
+
+class Dot(object):
+ '''
+ A class providing a **graphviz** (dot language) representation
+ allowing a fine grained control over how the graph is being
+ displayed.
+
+ If the :command:`dot` and :command:`dotty` programs are not in the current system path
+ their location needs to be specified in the contructor.
+ '''
+
+ def __init__(self, graph=None, nodes=None, edgefn=None, nodevisitor=None, edgevisitor=None, name="G", dot='dot', dotty='dotty', neato='neato', graphtype="digraph"):
+ '''
+ Initialization.
+ '''
+ self.name, self.attr = name, {}
+
+ assert graphtype in ['graph', 'digraph']
+ self.type = graphtype
+
+ self.temp_dot = "tmp_dot.dot"
+ self.temp_neo = "tmp_neo.dot"
+
+ self.dot, self.dotty, self.neato = dot, dotty, neato
+
+ # self.nodes: node styles
+ # self.edges: edge styles
+ self.nodes, self.edges = {}, {}
+
+ if graph is not None and nodes is None:
+ nodes = graph
+ if graph is not None and edgefn is None:
+ def edgefn(node, graph=graph):
+ return graph.out_nbrs(node)
+ if nodes is None:
+ nodes = ()
+
+ seen = set()
+ for node in nodes:
+ if nodevisitor is None:
+ style = {}
+ else:
+ style = nodevisitor(node)
+ if style is not None:
+ self.nodes[node] = {}
+ self.node_style(node, **style)
+ seen.add(node)
+ if edgefn is not None:
+ for head in seen:
+ for tail in (n for n in edgefn(head) if n in seen):
+ if edgevisitor is None:
+ edgestyle = {}
+ else:
+ edgestyle = edgevisitor(head, tail)
+ if edgestyle is not None:
+ if head not in self.edges:
+ self.edges[head] = {}
+ self.edges[head][tail] = {}
+ self.edge_style(head, tail, **edgestyle)
+
+ def style(self, **attr):
+ '''
+ Changes the overall style
+ '''
+ self.attr = attr
+
+ def display(self, mode='dot'):
+ '''
+ Displays the current graph via dotty
+ '''
+
+ if mode == 'neato':
+ self.save_dot(self.temp_neo)
+ neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo)
+ os.system(neato_cmd)
+ else:
+ self.save_dot(self.temp_dot)
+
+ plot_cmd = "%s %s" % (self.dotty, self.temp_dot)
+ os.system(plot_cmd)
+
+ def node_style(self, node, **kwargs):
+ '''
+ Modifies a node style to the dot representation.
+ '''
+ if node not in self.edges:
+ self.edges[node] = {}
+ self.nodes[node] = kwargs
+
+ def all_node_style(self, **kwargs):
+ '''
+ Modifies all node styles
+ '''
+ for node in self.nodes:
+ self.node_style(node, **kwargs)
+
+ def edge_style(self, head, tail, **kwargs):
+ '''
+ Modifies an edge style to the dot representation.
+ '''
+ if tail not in self.nodes:
+ raise GraphError("invalid node %s" % (tail,))
+
+ try:
+ if tail not in self.edges[head]:
+ self.edges[head][tail]= {}
+ self.edges[head][tail] = kwargs
+ except KeyError:
+ raise GraphError("invalid edge %s -> %s " % (head, tail) )
+
+ def iterdot(self):
+ # write graph title
+ if self.type == 'digraph':
+ yield 'digraph %s {\n' % (self.name,)
+ elif self.type == 'graph':
+ yield 'graph %s {\n' % (self.name,)
+
+ else:
+ raise GraphError("unsupported graphtype %s" % (self.type,))
+
+ # write overall graph attributes
+ for attr_name, attr_value in sorted(self.attr.items()):
+ yield '%s="%s";' % (attr_name, attr_value)
+ yield '\n'
+
+ # some reusable patterns
+ cpatt = '%s="%s",' # to separate attributes
+ epatt = '];\n' # to end attributes
+
+ # write node attributes
+ for node_name, node_attr in sorted(self.nodes.items()):
+ yield '\t"%s" [' % (node_name,)
+ for attr_name, attr_value in sorted(node_attr.items()):
+ yield cpatt % (attr_name, attr_value)
+ yield epatt
+
+ # write edge attributes
+ for head in sorted(self.edges):
+ for tail in sorted(self.edges[head]):
+ if self.type == 'digraph':
+ yield '\t"%s" -> "%s" [' % (head, tail)
+ else:
+ yield '\t"%s" -- "%s" [' % (head, tail)
+ for attr_name, attr_value in sorted(self.edges[head][tail].items()):
+ yield cpatt % (attr_name, attr_value)
+ yield epatt
+
+ # finish file
+ yield '}\n'
+
+ def __iter__(self):
+ return self.iterdot()
+
+ def save_dot(self, file_name=None):
+ '''
+ Saves the current graph representation into a file
+ '''
+
+ if not file_name:
+ warnings.warn(DeprecationWarning, "always pass a file_name")
+ file_name = self.temp_dot
+
+ fp = open(file_name, "w")
+ try:
+ for chunk in self.iterdot():
+ fp.write(chunk)
+ finally:
+ fp.close()
+
+ def save_img(self, file_name=None, file_type="gif", mode='dot'):
+ '''
+ Saves the dot file as an image file
+ '''
+
+ if not file_name:
+ warnings.warn(DeprecationWarning, "always pass a file_name")
+ file_name = "out"
+
+ if mode == 'neato':
+ self.save_dot(self.temp_neo)
+ neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo)
+ os.system(neato_cmd)
+ plot_cmd = self.dot
+ else:
+ self.save_dot(self.temp_dot)
+ plot_cmd = self.dot
+
+ file_name = "%s.%s" % (file_name, file_type)
+ create_cmd = "%s -T%s %s -o %s" % (plot_cmd, file_type, self.temp_dot, file_name)
+ os.system(create_cmd)
diff --git a/python/altgraph/altgraph/Graph.py b/python/altgraph/altgraph/Graph.py
new file mode 100644
index 000000000..491e5c228
--- /dev/null
+++ b/python/altgraph/altgraph/Graph.py
@@ -0,0 +1,677 @@
+"""
+altgraph.Graph - Base Graph class
+=================================
+
+..
+ #--Version 2.1
+ #--Bob Ippolito October, 2004
+
+ #--Version 2.0
+ #--Istvan Albert June, 2004
+
+ #--Version 1.0
+ #--Nathan Denny, May 27, 1999
+"""
+
+from altgraph import GraphError
+from collections import deque
+
+class Graph(object):
+ """
+ The Graph class represents a directed graph with *N* nodes and *E* edges.
+
+ Naming conventions:
+
+ - the prefixes such as *out*, *inc* and *all* will refer to methods
+ that operate on the outgoing, incoming or all edges of that node.
+
+ For example: :py:meth:`inc_degree` will refer to the degree of the node
+ computed over the incoming edges (the number of neighbours linking to
+ the node).
+
+ - the prefixes such as *forw* and *back* will refer to the
+ orientation of the edges used in the method with respect to the node.
+
+ For example: :py:meth:`forw_bfs` will start at the node then use the outgoing
+ edges to traverse the graph (goes forward).
+ """
+
+ def __init__(self, edges=None):
+ """
+ Initialization
+ """
+
+ self.next_edge = 0
+ self.nodes, self.edges = {}, {}
+ self.hidden_edges, self.hidden_nodes = {}, {}
+
+ if edges is not None:
+ for item in edges:
+ if len(item) == 2:
+ head, tail = item
+ self.add_edge(head, tail)
+ elif len(item) == 3:
+ head, tail, data = item
+ self.add_edge(head, tail, data)
+ else:
+ raise GraphError("Cannot create edge from %s"%(item,))
+
+
+ def __repr__(self):
+ return '<Graph: %d nodes, %d edges>' % (
+ self.number_of_nodes(), self.number_of_edges())
+
+ def add_node(self, node, node_data=None):
+ """
+ Adds a new node to the graph. Arbitrary data can be attached to the
+ node via the node_data parameter. Adding the same node twice will be
+ silently ignored.
+
+ The node must be a hashable value.
+ """
+ #
+ # the nodes will contain tuples that will store incoming edges,
+ # outgoing edges and data
+ #
+ # index 0 -> incoming edges
+ # index 1 -> outgoing edges
+
+ if node in self.hidden_nodes:
+ # Node is present, but hidden
+ return
+
+ if node not in self.nodes:
+ self.nodes[node] = ([], [], node_data)
+
+ def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True):
+ """
+ Adds a directed edge going from head_id to tail_id.
+ Arbitrary data can be attached to the edge via edge_data.
+ It may create the nodes if adding edges between nonexisting ones.
+
+ :param head_id: head node
+ :param tail_id: tail node
+ :param edge_data: (optional) data attached to the edge
+ :param create_nodes: (optional) creates the head_id or tail_id node in case they did not exist
+ """
+ # shorcut
+ edge = self.next_edge
+
+ # add nodes if on automatic node creation
+ if create_nodes:
+ self.add_node(head_id)
+ self.add_node(tail_id)
+
+ # update the corresponding incoming and outgoing lists in the nodes
+ # index 0 -> incoming edges
+ # index 1 -> outgoing edges
+
+ try:
+ self.nodes[tail_id][0].append(edge)
+ self.nodes[head_id][1].append(edge)
+ except KeyError:
+ raise GraphError('Invalid nodes %s -> %s' % (head_id, tail_id))
+
+ # store edge information
+ self.edges[edge] = (head_id, tail_id, edge_data)
+
+
+ self.next_edge += 1
+
+ def hide_edge(self, edge):
+ """
+ Hides an edge from the graph. The edge may be unhidden at some later
+ time.
+ """
+ try:
+ head_id, tail_id, edge_data = self.hidden_edges[edge] = self.edges[edge]
+ self.nodes[tail_id][0].remove(edge)
+ self.nodes[head_id][1].remove(edge)
+ del self.edges[edge]
+ except KeyError:
+ raise GraphError('Invalid edge %s' % edge)
+
+ def hide_node(self, node):
+ """
+ Hides a node from the graph. The incoming and outgoing edges of the
+ node will also be hidden. The node may be unhidden at some later time.
+ """
+ try:
+ all_edges = self.all_edges(node)
+ self.hidden_nodes[node] = (self.nodes[node], all_edges)
+ for edge in all_edges:
+ self.hide_edge(edge)
+ del self.nodes[node]
+ except KeyError:
+ raise GraphError('Invalid node %s' % node)
+
+ def restore_node(self, node):
+ """
+ Restores a previously hidden node back into the graph and restores
+ all of its incoming and outgoing edges.
+ """
+ try:
+ self.nodes[node], all_edges = self.hidden_nodes[node]
+ for edge in all_edges:
+ self.restore_edge(edge)
+ del self.hidden_nodes[node]
+ except KeyError:
+ raise GraphError('Invalid node %s' % node)
+
+ def restore_edge(self, edge):
+ """
+ Restores a previously hidden edge back into the graph.
+ """
+ try:
+ head_id, tail_id, data = self.hidden_edges[edge]
+ self.nodes[tail_id][0].append(edge)
+ self.nodes[head_id][1].append(edge)
+ self.edges[edge] = head_id, tail_id, data
+ del self.hidden_edges[edge]
+ except KeyError:
+ raise GraphError('Invalid edge %s' % edge)
+
+ def restore_all_edges(self):
+ """
+ Restores all hidden edges.
+ """
+ for edge in list(self.hidden_edges.keys()):
+ try:
+ self.restore_edge(edge)
+ except GraphError:
+ pass
+
+ def restore_all_nodes(self):
+ """
+ Restores all hidden nodes.
+ """
+ for node in list(self.hidden_nodes.keys()):
+ self.restore_node(node)
+
+ def __contains__(self, node):
+ """
+ Test whether a node is in the graph
+ """
+ return node in self.nodes
+
+ def edge_by_id(self, edge):
+ """
+ Returns the edge that connects the head_id and tail_id nodes
+ """
+ try:
+ head, tail, data = self.edges[edge]
+ except KeyError:
+ head, tail = None, None
+ raise GraphError('Invalid edge %s' % edge)
+
+ return (head, tail)
+
+ def edge_by_node(self, head, tail):
+ """
+ Returns the edge that connects the head_id and tail_id nodes
+ """
+ for edge in self.out_edges(head):
+ if self.tail(edge) == tail:
+ return edge
+ return None
+
+ def number_of_nodes(self):
+ """
+ Returns the number of nodes
+ """
+ return len(self.nodes)
+
+ def number_of_edges(self):
+ """
+ Returns the number of edges
+ """
+ return len(self.edges)
+
+ def __iter__(self):
+ """
+ Iterates over all nodes in the graph
+ """
+ return iter(self.nodes)
+
+ def node_list(self):
+ """
+ Return a list of the node ids for all visible nodes in the graph.
+ """
+ return list(self.nodes.keys())
+
+ def edge_list(self):
+ """
+ Returns an iterator for all visible nodes in the graph.
+ """
+ return list(self.edges.keys())
+
+ def number_of_hidden_edges(self):
+ """
+ Returns the number of hidden edges
+ """
+ return len(self.hidden_edges)
+
+ def number_of_hidden_nodes(self):
+ """
+ Returns the number of hidden nodes
+ """
+ return len(self.hidden_nodes)
+
+ def hidden_node_list(self):
+ """
+ Returns the list with the hidden nodes
+ """
+ return list(self.hidden_nodes.keys())
+
+ def hidden_edge_list(self):
+ """
+ Returns a list with the hidden edges
+ """
+ return list(self.hidden_edges.keys())
+
+ def describe_node(self, node):
+ """
+ return node, node data, outgoing edges, incoming edges for node
+ """
+ incoming, outgoing, data = self.nodes[node]
+ return node, data, outgoing, incoming
+
+ def describe_edge(self, edge):
+ """
+ return edge, edge data, head, tail for edge
+ """
+ head, tail, data = self.edges[edge]
+ return edge, data, head, tail
+
+ def node_data(self, node):
+ """
+ Returns the data associated with a node
+ """
+ return self.nodes[node][2]
+
+ def edge_data(self, edge):
+ """
+ Returns the data associated with an edge
+ """
+ return self.edges[edge][2]
+
+ def update_edge_data(self, edge, edge_data):
+ """
+ Replace the edge data for a specific edge
+ """
+ self.edges[edge] = self.edges[edge][0:2] + (edge_data,)
+
+ def head(self, edge):
+ """
+ Returns the node of the head of the edge.
+ """
+ return self.edges[edge][0]
+
+ def tail(self, edge):
+ """
+ Returns node of the tail of the edge.
+ """
+ return self.edges[edge][1]
+
+ def out_nbrs(self, node):
+ """
+ List of nodes connected by outgoing edges
+ """
+ l = [self.tail(n) for n in self.out_edges(node)]
+ return l
+
+ def inc_nbrs(self, node):
+ """
+ List of nodes connected by incoming edges
+ """
+ l = [self.head(n) for n in self.inc_edges(node)]
+ return l
+
+ def all_nbrs(self, node):
+ """
+ List of nodes connected by incoming and outgoing edges
+ """
+ l = dict.fromkeys( self.inc_nbrs(node) + self.out_nbrs(node) )
+ return list(l)
+
+ def out_edges(self, node):
+ """
+ Returns a list of the outgoing edges
+ """
+ try:
+ return list(self.nodes[node][1])
+ except KeyError:
+ raise GraphError('Invalid node %s' % node)
+
+ return None
+
+ def inc_edges(self, node):
+ """
+ Returns a list of the incoming edges
+ """
+ try:
+ return list(self.nodes[node][0])
+ except KeyError:
+ raise GraphError('Invalid node %s' % node)
+
+ return None
+
+ def all_edges(self, node):
+ """
+ Returns a list of incoming and outging edges.
+ """
+ return set(self.inc_edges(node) + self.out_edges(node))
+
+ def out_degree(self, node):
+ """
+ Returns the number of outgoing edges
+ """
+ return len(self.out_edges(node))
+
+ def inc_degree(self, node):
+ """
+ Returns the number of incoming edges
+ """
+ return len(self.inc_edges(node))
+
+ def all_degree(self, node):
+ """
+ The total degree of a node
+ """
+ return self.inc_degree(node) + self.out_degree(node)
+
+ def _topo_sort(self, forward=True):
+ """
+ Topological sort.
+
+ Returns a list of nodes where the successors (based on outgoing and
+ incoming edges selected by the forward parameter) of any given node
+ appear in the sequence after that node.
+ """
+ topo_list = []
+ queue = deque()
+ indeg = {}
+
+ # select the operation that will be performed
+ if forward:
+ get_edges = self.out_edges
+ get_degree = self.inc_degree
+ get_next = self.tail
+ else:
+ get_edges = self.inc_edges
+ get_degree = self.out_degree
+ get_next = self.head
+
+ for node in self.node_list():
+ degree = get_degree(node)
+ if degree:
+ indeg[node] = degree
+ else:
+ queue.append(node)
+
+ while queue:
+ curr_node = queue.popleft()
+ topo_list.append(curr_node)
+ for edge in get_edges(curr_node):
+ tail_id = get_next(edge)
+ if tail_id in indeg:
+ indeg[tail_id] -= 1
+ if indeg[tail_id] == 0:
+ queue.append(tail_id)
+
+ if len(topo_list) == len(self.node_list()):
+ valid = True
+ else:
+ # the graph has cycles, invalid topological sort
+ valid = False
+
+ return (valid, topo_list)
+
+ def forw_topo_sort(self):
+ """
+ Topological sort.
+
+ Returns a list of nodes where the successors (based on outgoing edges)
+ of any given node appear in the sequence after that node.
+ """
+ return self._topo_sort(forward=True)
+
+ def back_topo_sort(self):
+ """
+ Reverse topological sort.
+
+ Returns a list of nodes where the successors (based on incoming edges)
+ of any given node appear in the sequence after that node.
+ """
+ return self._topo_sort(forward=False)
+
+ def _bfs_subgraph(self, start_id, forward=True):
+ """
+ Private method creates a subgraph in a bfs order.
+
+ The forward parameter specifies whether it is a forward or backward
+ traversal.
+ """
+ if forward:
+ get_bfs = self.forw_bfs
+ get_nbrs = self.out_nbrs
+ else:
+ get_bfs = self.back_bfs
+ get_nbrs = self.inc_nbrs
+
+ g = Graph()
+ bfs_list = get_bfs(start_id)
+ for node in bfs_list:
+ g.add_node(node)
+
+ for node in bfs_list:
+ for nbr_id in get_nbrs(node):
+ g.add_edge(node, nbr_id)
+
+ return g
+
+ def forw_bfs_subgraph(self, start_id):
+ """
+ Creates and returns a subgraph consisting of the breadth first
+ reachable nodes based on their outgoing edges.
+ """
+ return self._bfs_subgraph(start_id, forward=True)
+
+ def back_bfs_subgraph(self, start_id):
+ """
+ Creates and returns a subgraph consisting of the breadth first
+ reachable nodes based on the incoming edges.
+ """
+ return self._bfs_subgraph(start_id, forward=False)
+
+ def iterdfs(self, start, end=None, forward=True):
+ """
+ Collecting nodes in some depth first traversal.
+
+ The forward parameter specifies whether it is a forward or backward
+ traversal.
+ """
+ visited, stack = set([start]), deque([start])
+
+ if forward:
+ get_edges = self.out_edges
+ get_next = self.tail
+ else:
+ get_edges = self.inc_edges
+ get_next = self.head
+
+ while stack:
+ curr_node = stack.pop()
+ yield curr_node
+ if curr_node == end:
+ break
+ for edge in sorted(get_edges(curr_node)):
+ tail = get_next(edge)
+ if tail not in visited:
+ visited.add(tail)
+ stack.append(tail)
+
+ def iterdata(self, start, end=None, forward=True, condition=None):
+ """
+ Perform a depth-first walk of the graph (as ``iterdfs``)
+ and yield the item data of every node where condition matches. The
+ condition callback is only called when node_data is not None.
+ """
+
+ visited, stack = set([start]), deque([start])
+
+ if forward:
+ get_edges = self.out_edges
+ get_next = self.tail
+ else:
+ get_edges = self.inc_edges
+ get_next = self.head
+
+ get_data = self.node_data
+
+ while stack:
+ curr_node = stack.pop()
+ curr_data = get_data(curr_node)
+ if curr_data is not None:
+ if condition is not None and not condition(curr_data):
+ continue
+ yield curr_data
+ if curr_node == end:
+ break
+ for edge in get_edges(curr_node):
+ tail = get_next(edge)
+ if tail not in visited:
+ visited.add(tail)
+ stack.append(tail)
+
+ def _iterbfs(self, start, end=None, forward=True):
+ """
+ The forward parameter specifies whether it is a forward or backward
+ traversal. Returns a list of tuples where the first value is the hop
+ value the second value is the node id.
+ """
+ queue, visited = deque([(start, 0)]), set([start])
+
+ # the direction of the bfs depends on the edges that are sampled
+ if forward:
+ get_edges = self.out_edges
+ get_next = self.tail
+ else:
+ get_edges = self.inc_edges
+ get_next = self.head
+
+ while queue:
+ curr_node, curr_step = queue.popleft()
+ yield (curr_node, curr_step)
+ if curr_node == end:
+ break
+ for edge in get_edges(curr_node):
+ tail = get_next(edge)
+ if tail not in visited:
+ visited.add(tail)
+ queue.append((tail, curr_step + 1))
+
+
+ def forw_bfs(self, start, end=None):
+ """
+ Returns a list of nodes in some forward BFS order.
+
+ Starting from the start node the breadth first search proceeds along
+ outgoing edges.
+ """
+ return [node for node, step in self._iterbfs(start, end, forward=True)]
+
+ def back_bfs(self, start, end=None):
+ """
+ Returns a list of nodes in some backward BFS order.
+
+ Starting from the start node the breadth first search proceeds along
+ incoming edges.
+ """
+ return [node for node, step in self._iterbfs(start, end, forward=False)]
+
+ def forw_dfs(self, start, end=None):
+ """
+ Returns a list of nodes in some forward DFS order.
+
+ Starting with the start node the depth first search proceeds along
+ outgoing edges.
+ """
+ return list(self.iterdfs(start, end, forward=True))
+
+ def back_dfs(self, start, end=None):
+ """
+ Returns a list of nodes in some backward DFS order.
+
+ Starting from the start node the depth first search proceeds along
+ incoming edges.
+ """
+ return list(self.iterdfs(start, end, forward=False))
+
+ def connected(self):
+ """
+ Returns :py:data:`True` if the graph's every node can be reached from every
+ other node.
+ """
+ node_list = self.node_list()
+ for node in node_list:
+ bfs_list = self.forw_bfs(node)
+ if len(bfs_list) != len(node_list):
+ return False
+ return True
+
+ def clust_coef(self, node):
+ """
+ Computes and returns the local clustering coefficient of node. The
+ local cluster coefficient is proportion of the actual number of edges between
+ neighbours of node and the maximum number of edges between those neighbours.
+
+ See <http://en.wikipedia.org/wiki/Clustering_coefficient#Local_clustering_coefficient>
+ for a formal definition.
+ """
+ num = 0
+ nbr_set = set(self.out_nbrs(node))
+
+ if node in nbr_set:
+ nbr_set.remove(node) # loop defense
+
+ for nbr in nbr_set:
+ sec_set = set(self.out_nbrs(nbr))
+ if nbr in sec_set:
+ sec_set.remove(nbr) # loop defense
+ num += len(nbr_set & sec_set)
+
+ nbr_num = len(nbr_set)
+ if nbr_num:
+ clust_coef = float(num) / (nbr_num * (nbr_num - 1))
+ else:
+ clust_coef = 0.0
+ return clust_coef
+
+ def get_hops(self, start, end=None, forward=True):
+ """
+ Computes the hop distance to all nodes centered around a specified node.
+
+ First order neighbours are at hop 1, their neigbours are at hop 2 etc.
+ Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value of the forward
+ parameter. If the distance between all neighbouring nodes is 1 the hop
+ number corresponds to the shortest distance between the nodes.
+
+ :param start: the starting node
+ :param end: ending node (optional). When not specified will search the whole graph.
+ :param forward: directionality parameter (optional). If C{True} (default) it uses L{forw_bfs} otherwise L{back_bfs}.
+ :return: returns a list of tuples where each tuple contains the node and the hop.
+
+ Typical usage::
+
+ >>> print (graph.get_hops(1, 8))
+ >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
+ # node 1 is at 0 hops
+ # node 2 is at 1 hop
+ # ...
+ # node 8 is at 5 hops
+ """
+ if forward:
+ return list(self._iterbfs(start=start, end=end, forward=True))
+ else:
+ return list(self._iterbfs(start=start, end=end, forward=False))
diff --git a/python/altgraph/altgraph/GraphAlgo.py b/python/altgraph/altgraph/GraphAlgo.py
new file mode 100644
index 000000000..9e6fff2b1
--- /dev/null
+++ b/python/altgraph/altgraph/GraphAlgo.py
@@ -0,0 +1,147 @@
+'''
+altgraph.GraphAlgo - Graph algorithms
+=====================================
+'''
+from altgraph import GraphError
+
+def dijkstra(graph, start, end=None):
+ """
+ Dijkstra's algorithm for shortest paths
+
+ `David Eppstein, UC Irvine, 4 April 2002 <http://www.ics.uci.edu/~eppstein/161/python/>`_
+
+ `Python Cookbook Recipe <http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/119466>`_
+
+ Find shortest paths from the start node to all nodes nearer than or equal to the end node.
+
+ Dijkstra's algorithm is only guaranteed to work correctly when all edge lengths are positive.
+ This code does not verify this property for all edges (only the edges examined until the end
+ vertex is reached), but will correctly compute shortest paths even for some graphs with negative
+ edges, and will raise an exception if it discovers that a negative edge has caused it to make a mistake.
+
+ *Adapted to altgraph by Istvan Albert, Pennsylvania State University - June, 9 2004*
+
+ """
+ D = {} # dictionary of final distances
+ P = {} # dictionary of predecessors
+ Q = _priorityDictionary() # estimated distances of non-final vertices
+ Q[start] = 0
+
+ for v in Q:
+ D[v] = Q[v]
+ if v == end: break
+
+ for w in graph.out_nbrs(v):
+ edge_id = graph.edge_by_node(v,w)
+ vwLength = D[v] + graph.edge_data(edge_id)
+ if w in D:
+ if vwLength < D[w]:
+ raise GraphError("Dijkstra: found better path to already-final vertex")
+ elif w not in Q or vwLength < Q[w]:
+ Q[w] = vwLength
+ P[w] = v
+
+ return (D,P)
+
+def shortest_path(graph, start, end):
+ """
+ Find a single shortest path from the given start node to the given end node.
+ The input has the same conventions as dijkstra(). The output is a list of the nodes
+ in order along the shortest path.
+
+ **Note that the distances must be stored in the edge data as numeric data**
+ """
+
+ D,P = dijkstra(graph, start, end)
+ Path = []
+ while 1:
+ Path.append(end)
+ if end == start: break
+ end = P[end]
+ Path.reverse()
+ return Path
+
+#
+# Utility classes and functions
+#
+class _priorityDictionary(dict):
+ '''
+ Priority dictionary using binary heaps (internal use only)
+
+ David Eppstein, UC Irvine, 8 Mar 2002
+
+ Implements a data structure that acts almost like a dictionary, with two modifications:
+ 1. D.smallest() returns the value x minimizing D[x]. For this to work correctly,
+ all values D[x] stored in the dictionary must be comparable.
+ 2. iterating "for x in D" finds and removes the items from D in sorted order.
+ Each item is not removed until the next item is requested, so D[x] will still
+ return a useful value until the next iteration of the for-loop.
+ Each operation takes logarithmic amortized time.
+ '''
+ def __init__(self):
+ '''
+ Initialize priorityDictionary by creating binary heap of pairs (value,key).
+ Note that changing or removing a dict entry will not remove the old pair from the heap
+ until it is found by smallest() or until the heap is rebuilt.
+ '''
+ self.__heap = []
+ dict.__init__(self)
+
+ def smallest(self):
+ '''
+ Find smallest item after removing deleted items from front of heap.
+ '''
+ if len(self) == 0:
+ raise IndexError("smallest of empty priorityDictionary")
+ heap = self.__heap
+ while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]:
+ lastItem = heap.pop()
+ insertionPoint = 0
+ while 1:
+ smallChild = 2*insertionPoint+1
+ if smallChild+1 < len(heap) and heap[smallChild] > heap[smallChild+1] :
+ smallChild += 1
+ if smallChild >= len(heap) or lastItem <= heap[smallChild]:
+ heap[insertionPoint] = lastItem
+ break
+ heap[insertionPoint] = heap[smallChild]
+ insertionPoint = smallChild
+ return heap[0][1]
+
+ def __iter__(self):
+ '''
+ Create destructive sorted iterator of priorityDictionary.
+ '''
+ def iterfn():
+ while len(self) > 0:
+ x = self.smallest()
+ yield x
+ del self[x]
+ return iterfn()
+
+ def __setitem__(self,key,val):
+ '''
+ Change value stored in dictionary and add corresponding pair to heap.
+ Rebuilds the heap if the number of deleted items gets large, to avoid memory leakage.
+ '''
+ dict.__setitem__(self,key,val)
+ heap = self.__heap
+ if len(heap) > 2 * len(self):
+ self.__heap = [(v,k) for k,v in self.iteritems()]
+ self.__heap.sort() # builtin sort probably faster than O(n)-time heapify
+ else:
+ newPair = (val,key)
+ insertionPoint = len(heap)
+ heap.append(None)
+ while insertionPoint > 0 and newPair < heap[(insertionPoint-1)//2]:
+ heap[insertionPoint] = heap[(insertionPoint-1)//2]
+ insertionPoint = (insertionPoint-1)//2
+ heap[insertionPoint] = newPair
+
+ def setdefault(self,key,val):
+ '''
+ Reimplement setdefault to pass through our customized __setitem__.
+ '''
+ if key not in self:
+ self[key] = val
+ return self[key]
diff --git a/python/altgraph/altgraph/GraphStat.py b/python/altgraph/altgraph/GraphStat.py
new file mode 100644
index 000000000..25fc46c2d
--- /dev/null
+++ b/python/altgraph/altgraph/GraphStat.py
@@ -0,0 +1,73 @@
+'''
+altgraph.GraphStat - Functions providing various graph statistics
+=================================================================
+'''
+import sys
+
+def degree_dist(graph, limits=(0,0), bin_num=10, mode='out'):
+ '''
+ Computes the degree distribution for a graph.
+
+ Returns a list of tuples where the first element of the tuple is the center of the bin
+ representing a range of degrees and the second element of the tuple are the number of nodes
+ with the degree falling in the range.
+
+ Example::
+
+ ....
+ '''
+
+ deg = []
+ if mode == 'inc':
+ get_deg = graph.inc_degree
+ else:
+ get_deg = graph.out_degree
+
+ for node in graph:
+ deg.append( get_deg(node) )
+
+ if not deg:
+ return []
+
+ results = _binning(values=deg, limits=limits, bin_num=bin_num)
+
+ return results
+
+_EPS = 1.0/(2.0**32)
+def _binning(values, limits=(0,0), bin_num=10):
+ '''
+ Bins data that falls between certain limits, if the limits are (0, 0) the
+ minimum and maximum values are used.
+
+ Returns a list of tuples where the first element of the tuple is the center of the bin
+ and the second element of the tuple are the counts.
+ '''
+ if limits == (0, 0):
+ min_val, max_val = min(values) - _EPS, max(values) + _EPS
+ else:
+ min_val, max_val = limits
+
+ # get bin size
+ bin_size = (max_val - min_val)/float(bin_num)
+ bins = [0] * (bin_num)
+
+ # will ignore these outliers for now
+ out_points = 0
+ for value in values:
+ try:
+ if (value - min_val) < 0:
+ out_points += 1
+ else:
+ index = int((value - min_val)/float(bin_size))
+ bins[index] += 1
+ except IndexError:
+ out_points += 1
+
+ # make it ready for an x,y plot
+ result = []
+ center = (bin_size/2) + min_val
+ for i, y in enumerate(bins):
+ x = center + bin_size * i
+ result.append( (x,y) )
+
+ return result
diff --git a/python/altgraph/altgraph/GraphUtil.py b/python/altgraph/altgraph/GraphUtil.py
new file mode 100644
index 000000000..d3b6acd74
--- /dev/null
+++ b/python/altgraph/altgraph/GraphUtil.py
@@ -0,0 +1,137 @@
+'''
+altgraph.GraphUtil - Utility classes and functions
+==================================================
+'''
+
+import random
+from collections import deque
+from altgraph import Graph
+from altgraph import GraphError
+
+def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False):
+ '''
+ Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with *node_num* nodes
+ randomly connected by *edge_num* edges.
+ '''
+ g = Graph.Graph()
+
+ if not multi_edges:
+ if self_loops:
+ max_edges = node_num * node_num
+ else:
+ max_edges = node_num * (node_num-1)
+
+ if edge_num > max_edges:
+ raise GraphError("inconsistent arguments to 'generate_random_graph'")
+
+ nodes = range(node_num)
+
+ for node in nodes:
+ g.add_node(node)
+
+ while 1:
+ head = random.choice(nodes)
+ tail = random.choice(nodes)
+
+ # loop defense
+ if head == tail and not self_loops:
+ continue
+
+ # multiple edge defense
+ if g.edge_by_node(head,tail) is not None and not multi_edges:
+ continue
+
+ # add the edge
+ g.add_edge(head, tail)
+ if g.number_of_edges() >= edge_num:
+ break
+
+ return g
+
+def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False):
+ '''
+ Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that will have *steps* \* *growth_num* nodes
+ and a scale free (powerlaw) connectivity. Starting with a fully connected graph with *growth_num* nodes
+ at every step *growth_num* nodes are added to the graph and are connected to existing nodes with
+ a probability proportional to the degree of these existing nodes.
+ '''
+ # FIXME: The code doesn't seem to do what the documentation claims.
+ graph = Graph.Graph()
+
+ # initialize the graph
+ store = []
+ for i in range(growth_num):
+ #store += [ i ] * (growth_num - 1)
+ for j in range(i + 1, growth_num):
+ store.append(i)
+ store.append(j)
+ graph.add_edge(i,j)
+
+ # generate
+ for node in range(growth_num, steps * growth_num):
+ graph.add_node(node)
+ while ( graph.out_degree(node) < growth_num ):
+ nbr = random.choice(store)
+
+ # loop defense
+ if node == nbr and not self_loops:
+ continue
+
+ # multi edge defense
+ if graph.edge_by_node(node, nbr) and not multi_edges:
+ continue
+
+ graph.add_edge(node, nbr)
+
+
+ for nbr in graph.out_nbrs(node):
+ store.append(node)
+ store.append(nbr)
+
+ return graph
+
+def filter_stack(graph, head, filters):
+ """
+ Perform a walk in a depth-first order starting
+ at *head*.
+
+ Returns (visited, removes, orphans).
+
+ * visited: the set of visited nodes
+ * removes: the list of nodes where the node
+ data does not all *filters*
+ * orphans: tuples of (last_good, node),
+ where node is not in removes, is directly
+ reachable from a node in *removes* and
+ *last_good* is the closest upstream node that is not
+ in *removes*.
+ """
+
+ visited, removes, orphans = set([head]), set(), set()
+ stack = deque([(head, head)])
+ get_data = graph.node_data
+ get_edges = graph.out_edges
+ get_tail = graph.tail
+
+ while stack:
+ last_good, node = stack.pop()
+ data = get_data(node)
+ if data is not None:
+ for filtfunc in filters:
+ if not filtfunc(data):
+ removes.add(node)
+ break
+ else:
+ last_good = node
+ for edge in get_edges(node):
+ tail = get_tail(edge)
+ if last_good is not node:
+ orphans.add((last_good, tail))
+ if tail not in visited:
+ visited.add(tail)
+ stack.append((last_good, tail))
+
+ orphans = [(last_good, tail) for (last_good, tail) in orphans if tail not in removes]
+ #orphans.sort()
+
+ return visited, removes, orphans
diff --git a/python/altgraph/altgraph/ObjectGraph.py b/python/altgraph/altgraph/ObjectGraph.py
new file mode 100644
index 000000000..d07f51b68
--- /dev/null
+++ b/python/altgraph/altgraph/ObjectGraph.py
@@ -0,0 +1,202 @@
+"""
+altgraph.ObjectGraph - Graph of objects with an identifier
+==========================================================
+
+A graph of objects that have a "graphident" attribute.
+graphident is the key for the object in the graph
+"""
+
+from altgraph import GraphError
+from altgraph.Graph import Graph
+from altgraph.GraphUtil import filter_stack
+
+class ObjectGraph(object):
+ """
+ A graph of objects that have a "graphident" attribute.
+ graphident is the key for the object in the graph
+ """
+ def __init__(self, graph=None, debug=0):
+ if graph is None:
+ graph = Graph()
+ self.graphident = self
+ self.graph = graph
+ self.debug = debug
+ self.indent = 0
+ graph.add_node(self, None)
+
+ def __repr__(self):
+ return '<%s>' % (type(self).__name__,)
+
+ def flatten(self, condition=None, start=None):
+ """
+ Iterate over the subgraph that is entirely reachable by condition
+ starting from the given start node or the ObjectGraph root
+ """
+ if start is None:
+ start = self
+ start = self.getRawIdent(start)
+ return self.graph.iterdata(start=start, condition=condition)
+
+ def nodes(self):
+ for ident in self.graph:
+ node = self.graph.node_data(ident)
+ if node is not None:
+ yield self.graph.node_data(ident)
+
+
+ def get_edges(self, node):
+ start = self.getRawIdent(node)
+ _, _, outraw, incraw = self.graph.describe_node(start)
+ def iter_edges(lst, n):
+ seen = set()
+ for tpl in (self.graph.describe_edge(e) for e in lst):
+ ident = tpl[n]
+ if ident not in seen:
+ yield self.findNode(ident)
+ seen.add(ident)
+ return iter_edges(outraw, 3), iter_edges(incraw, 2)
+
+ def edgeData(self, fromNode, toNode):
+ start = self.getRawIdent(fromNode)
+ stop = self.getRawIdent(toNode)
+ edge = self.graph.edge_by_node(start, stop)
+ return self.graph.edge_data(edge)
+
+ def updateEdgeData(self, fromNode, toNode, edgeData):
+ start = self.getRawIdent(fromNode)
+ stop = self.getRawIdent(toNode)
+ edge = self.graph.edge_by_node(start, stop)
+ self.graph.update_edge_data(edge, edgeData)
+
+ def filterStack(self, filters):
+ """
+ Filter the ObjectGraph in-place by removing all edges to nodes that
+ do not match every filter in the given filter list
+
+ Returns a tuple containing the number of: (nodes_visited, nodes_removed, nodes_orphaned)
+ """
+ visited, removes, orphans = filter_stack(self.graph, self, filters)
+
+ for last_good, tail in orphans:
+ self.graph.add_edge(last_good, tail, edge_data='orphan')
+
+ for node in removes:
+ self.graph.hide_node(node)
+
+ return len(visited)-1, len(removes), len(orphans)
+
+ def removeNode(self, node):
+ """
+ Remove the given node from the graph if it exists
+ """
+ ident = self.getIdent(node)
+ if ident is not None:
+ self.graph.hide_node(ident)
+
+ def removeReference(self, fromnode, tonode):
+ """
+ Remove all edges from fromnode to tonode
+ """
+ if fromnode is None:
+ fromnode = self
+ fromident = self.getIdent(fromnode)
+ toident = self.getIdent(tonode)
+ if fromident is not None and toident is not None:
+ while True:
+ edge = self.graph.edge_by_node(fromident, toident)
+ if edge is None:
+ break
+ self.graph.hide_edge(edge)
+
+ def getIdent(self, node):
+ """
+ Get the graph identifier for a node
+ """
+ ident = self.getRawIdent(node)
+ if ident is not None:
+ return ident
+ node = self.findNode(node)
+ if node is None:
+ return None
+ return node.graphident
+
+ def getRawIdent(self, node):
+ """
+ Get the identifier for a node object
+ """
+ if node is self:
+ return node
+ ident = getattr(node, 'graphident', None)
+ return ident
+
+ def __contains__(self, node):
+ return self.findNode(node) is not None
+
+ def findNode(self, node):
+ """
+ Find the node on the graph
+ """
+ ident = self.getRawIdent(node)
+ if ident is None:
+ ident = node
+ try:
+ return self.graph.node_data(ident)
+ except KeyError:
+ return None
+
+ def addNode(self, node):
+ """
+ Add a node to the graph referenced by the root
+ """
+ self.msg(4, "addNode", node)
+
+ try:
+ self.graph.restore_node(node.graphident)
+ except GraphError:
+ self.graph.add_node(node.graphident, node)
+
+ def createReference(self, fromnode, tonode, edge_data=None):
+ """
+ Create a reference from fromnode to tonode
+ """
+ if fromnode is None:
+ fromnode = self
+ fromident, toident = self.getIdent(fromnode), self.getIdent(tonode)
+ if fromident is None or toident is None:
+ return
+ self.msg(4, "createReference", fromnode, tonode, edge_data)
+ self.graph.add_edge(fromident, toident, edge_data=edge_data)
+
+ def createNode(self, cls, name, *args, **kw):
+ """
+ Add a node of type cls to the graph if it does not already exist
+ by the given name
+ """
+ m = self.findNode(name)
+ if m is None:
+ m = cls(name, *args, **kw)
+ self.addNode(m)
+ return m
+
+ def msg(self, level, s, *args):
+ """
+ Print a debug message with the given level
+ """
+ if s and level <= self.debug:
+ print ("%s%s %s" % (" " * self.indent, s, ' '.join(map(repr, args))))
+
+ def msgin(self, level, s, *args):
+ """
+ Print a debug message and indent
+ """
+ if level <= self.debug:
+ self.msg(level, s, *args)
+ self.indent = self.indent + 1
+
+ def msgout(self, level, s, *args):
+ """
+ Dedent and print a debug message
+ """
+ if level <= self.debug:
+ self.indent = self.indent - 1
+ self.msg(level, s, *args)
diff --git a/python/altgraph/altgraph/__init__.py b/python/altgraph/altgraph/__init__.py
new file mode 100644
index 000000000..9f72c18e5
--- /dev/null
+++ b/python/altgraph/altgraph/__init__.py
@@ -0,0 +1,135 @@
+'''
+altgraph - a python graph library
+=================================
+
+altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
+to use newer Python 2.3+ features, including additional support used by the
+py2app suite (modulegraph and macholib, specifically).
+
+altgraph is a python based graph (network) representation and manipulation package.
+It has started out as an extension to the `graph_lib module <http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_
+written by Nathan Denny it has been significantly optimized and expanded.
+
+The :class:`altgraph.Graph.Graph` class is loosely modeled after the `LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_
+(Library of Efficient Datatypes) representation. The library
+includes methods for constructing graphs, BFS and DFS traversals,
+topological sort, finding connected components, shortest paths as well as a number
+graph statistics functions. The library can also visualize graphs
+via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_.
+
+The package contains the following modules:
+
+ - the :py:mod:`altgraph.Graph` module contains the :class:`~altgraph.Graph.Graph` class that stores the graph data
+
+ - the :py:mod:`altgraph.GraphAlgo` module implements graph algorithms operating on graphs (:py:class:`~altgraph.Graph.Graph`} instances)
+
+ - the :py:mod:`altgraph.GraphStat` module contains functions for computing statistical measures on graphs
+
+ - the :py:mod:`altgraph.GraphUtil` module contains functions for generating, reading and saving graphs
+
+ - the :py:mod:`altgraph.Dot` module contains functions for displaying graphs via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
+
+ - the :py:mod:`altgraph.ObjectGraph` module implements a graph of objects with a unique identifier
+
+Installation
+------------
+
+Download and unpack the archive then type::
+
+ python setup.py install
+
+This will install the library in the default location. For instructions on
+how to customize the install procedure read the output of::
+
+ python setup.py --help install
+
+To verify that the code works run the test suite::
+
+ python setup.py test
+
+Example usage
+-------------
+
+Lets assume that we want to analyze the graph below (links to the full picture) GRAPH_IMG.
+Our script then might look the following way::
+
+ from altgraph import Graph, GraphAlgo, Dot
+
+ # these are the edges
+ edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5),
+ (6,14), (14,15), (6, 15), (5,7), (7, 8), (7,13), (12,8),
+ (8,13), (11,12), (11,9), (13,11), (9,13), (13,10) ]
+
+ # creates the graph
+ graph = Graph.Graph()
+ for head, tail in edges:
+ graph.add_edge(head, tail)
+
+ # do a forward bfs from 1 at most to 20
+ print(graph.forw_bfs(1))
+
+This will print the nodes in some breadth first order::
+
+ [1, 2, 3, 4, 5, 7, 8, 13, 11, 10, 12, 9]
+
+If we wanted to get the hop-distance from node 1 to node 8
+we coud write::
+
+ print(graph.get_hops(1, 8))
+
+This will print the following::
+
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
+
+Node 1 is at 0 hops since it is the starting node, nodes 2,3 are 1 hop away ...
+node 8 is 5 hops away. To find the shortest distance between two nodes you
+can use::
+
+ print(GraphAlgo.shortest_path(graph, 1, 12))
+
+It will print the nodes on one (if there are more) the shortest paths::
+
+ [1, 2, 4, 5, 7, 13, 11, 12]
+
+To display the graph we can use the GraphViz backend::
+
+ dot = Dot.Dot(graph)
+
+ # display the graph on the monitor
+ dot.display()
+
+ # save it in an image file
+ dot.save_img(file_name='graph', file_type='gif')
+
+
+
+..
+ @author: U{Istvan Albert<http://www.personal.psu.edu/staff/i/u/iua1/>}
+
+ @license: MIT License
+
+ Copyright (c) 2004 Istvan Albert unless otherwise noted.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+ and associated documentation files (the "Software"), to deal in the Software without restriction,
+ including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
+ so.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+ INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+ PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+ FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+ @requires: Python 2.3 or higher
+
+ @newfield contributor: Contributors:
+ @contributor: U{Reka Albert <http://www.phys.psu.edu/~ralbert/>}
+
+'''
+import pkg_resources
+__version__ = pkg_resources.require('altgraph')[0].version
+
+class GraphError(ValueError):
+ pass
diff --git a/python/altgraph/altgraph_tests/__init__.py b/python/altgraph/altgraph_tests/__init__.py
new file mode 100644
index 000000000..6890389df
--- /dev/null
+++ b/python/altgraph/altgraph_tests/__init__.py
@@ -0,0 +1 @@
+""" altgraph tests """
diff --git a/python/altgraph/altgraph_tests/test_altgraph.py b/python/altgraph/altgraph_tests/test_altgraph.py
new file mode 100644
index 000000000..2ca6b251e
--- /dev/null
+++ b/python/altgraph/altgraph_tests/test_altgraph.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env py.test
+import os
+import sys
+
+from altgraph import Graph, GraphAlgo
+import unittest
+
+class BasicTests (unittest.TestCase):
+ def setUp(self):
+ self.edges = [
+ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5), (6,14), (14,15),
+ (6, 15), (5,7), (7, 8), (7,13), (12,8), (8,13), (11,12), (11,9),
+ (13,11), (9,13), (13,10)
+ ]
+
+ # these are the edges
+ self.store = {}
+ self.g = Graph.Graph()
+ for head, tail in self.edges:
+ self.store[head] = self.store[tail] = None
+ self.g.add_edge(head, tail)
+
+ def test_num_edges(self):
+ # check the parameters
+ self.assertEqual(self.g.number_of_nodes(), len(self.store))
+ self.assertEqual(self.g.number_of_edges(), len(self.edges))
+
+ def test_forw_bfs(self):
+ # do a forward bfs
+ self.assertEqual( self.g.forw_bfs(1),
+ [1, 2, 3, 4, 5, 7, 8, 13, 11, 10, 12, 9])
+
+
+ def test_get_hops(self):
+ # diplay the hops and hop numbers between nodes
+ self.assertEqual(self.g.get_hops(1, 8),
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)])
+
+ def test_shortest_path(self):
+ self.assertEqual(GraphAlgo.shortest_path(self.g, 1, 12),
+ [1, 2, 4, 5, 7, 13, 11, 12])
+
+
+if __name__ == "__main__": # pragma: no cover
+ unittest.main()
diff --git a/python/altgraph/altgraph_tests/test_dot.py b/python/altgraph/altgraph_tests/test_dot.py
new file mode 100644
index 000000000..83993dad5
--- /dev/null
+++ b/python/altgraph/altgraph_tests/test_dot.py
@@ -0,0 +1,370 @@
+import unittest
+import os
+
+from altgraph import Dot
+from altgraph import Graph
+from altgraph import GraphError
+
+
+class TestDot (unittest.TestCase):
+
+ def test_constructor(self):
+ g = Graph.Graph([
+ (1,2),
+ (1,3),
+ (1,4),
+ (2,4),
+ (2,6),
+ (2,7),
+ (7,4),
+ (6,1),
+ ]
+ )
+
+ dot = Dot.Dot(g)
+
+ self.assertEqual(dot.name, 'G')
+ self.assertEqual(dot.attr, {})
+ self.assertEqual(dot.temp_dot, 'tmp_dot.dot')
+ self.assertEqual(dot.temp_neo, 'tmp_neo.dot')
+ self.assertEqual(dot.dot, 'dot')
+ self.assertEqual(dot.dotty, 'dotty')
+ self.assertEqual(dot.neato, 'neato')
+ self.assertEqual(dot.type, 'digraph')
+
+ self.assertEqual(dot.nodes, dict([(x, {}) for x in g]))
+
+ edges = {}
+ for head in g:
+ edges[head] = {}
+ for tail in g.out_nbrs(head):
+ edges[head][tail] = {}
+
+ self.assertEqual(dot.edges[1], edges[1])
+ self.assertEqual(dot.edges, edges)
+
+
+ dot = Dot.Dot(g, nodes=[1,2],
+ edgefn=lambda node: list(sorted(g.out_nbrs(node)))[:-1],
+ nodevisitor=lambda node: {'label': node},
+ edgevisitor=lambda head, tail: {'label': (head, tail) },
+ name="testgraph",
+ dot='/usr/local/bin/dot',
+ dotty='/usr/local/bin/dotty',
+ neato='/usr/local/bin/neato',
+ graphtype="graph")
+
+ self.assertEqual(dot.name, 'testgraph')
+ self.assertEqual(dot.attr, {})
+ self.assertEqual(dot.temp_dot, 'tmp_dot.dot')
+ self.assertEqual(dot.temp_neo, 'tmp_neo.dot')
+ self.assertEqual(dot.dot, '/usr/local/bin/dot')
+ self.assertEqual(dot.dotty, '/usr/local/bin/dotty')
+ self.assertEqual(dot.neato, '/usr/local/bin/neato')
+ self.assertEqual(dot.type, 'graph')
+
+ self.assertEqual(dot.nodes, dict([(x, {'label': x}) for x in [1,2]]))
+
+ edges = {}
+ for head in [1,2]:
+ edges[head] = {}
+ for tail in list(sorted(g.out_nbrs(head)))[:-1]:
+ if tail not in [1,2]: continue
+ edges[head][tail] = {'label': (head, tail) }
+
+ self.assertEqual(dot.edges[1], edges[1])
+ self.assertEqual(dot.edges, edges)
+
+ self.assertRaises(GraphError, Dot.Dot, g, nodes=[1,2, 9])
+
+ def test_style(self):
+ g = Graph.Graph([])
+
+ dot = Dot.Dot(g)
+
+ self.assertEqual(dot.attr, {})
+
+ dot.style(key='value')
+ self.assertEqual(dot.attr, {'key': 'value'})
+
+ dot.style(key2='value2')
+ self.assertEqual(dot.attr, {'key2': 'value2'})
+
+ def test_node_style(self):
+ g = Graph.Graph([
+ (1,2),
+ (1,3),
+ (1,4),
+ (2,4),
+ (2,6),
+ (2,7),
+ (7,4),
+ (6,1),
+ ]
+ )
+
+ dot = Dot.Dot(g)
+
+ self.assertEqual(dot.nodes[1], {})
+
+ dot.node_style(1, key='value')
+ self.assertEqual(dot.nodes[1], {'key': 'value'})
+
+ dot.node_style(1, key2='value2')
+ self.assertEqual(dot.nodes[1], {'key2': 'value2'})
+ self.assertEqual(dot.nodes[2], {})
+
+ dot.all_node_style(key3='value3')
+ for n in g:
+ self.assertEqual(dot.nodes[n], {'key3': 'value3'})
+
+ self.assertTrue(9 not in dot.nodes)
+ dot.node_style(9, key='value')
+ self.assertEqual(dot.nodes[9], {'key': 'value'})
+
+ def test_edge_style(self):
+ g = Graph.Graph([
+ (1,2),
+ (1,3),
+ (1,4),
+ (2,4),
+ (2,6),
+ (2,7),
+ (7,4),
+ (6,1),
+ ]
+ )
+
+ dot = Dot.Dot(g)
+
+ self.assertEqual(dot.edges[1][2], {})
+ dot.edge_style(1,2, foo='bar')
+ self.assertEqual(dot.edges[1][2], {'foo': 'bar'})
+
+ dot.edge_style(1,2, foo2='2bar')
+ self.assertEqual(dot.edges[1][2], {'foo2': '2bar'})
+
+ self.assertEqual(dot.edges[1][3], {})
+
+ self.assertFalse(6 in dot.edges[1])
+ dot.edge_style(1,6, foo2='2bar')
+ self.assertEqual(dot.edges[1][6], {'foo2': '2bar'})
+
+ self.assertRaises(GraphError, dot.edge_style, 1, 9, a=1)
+ self.assertRaises(GraphError, dot.edge_style, 9, 1, a=1)
+
+
+ def test_iter(self):
+ g = Graph.Graph([
+ (1,2),
+ (1,3),
+ (1,4),
+ (2,4),
+ (2,6),
+ (2,7),
+ (7,4),
+ (6,1),
+ ]
+ )
+
+ dot = Dot.Dot(g)
+ dot.style(graph="foobar")
+ dot.node_style(1, key='value')
+ dot.node_style(2, key='another', key2='world')
+ dot.edge_style(1,4, key1='value1', key2='value2')
+ dot.edge_style(2,4, key1='valueA')
+
+ self.assertEqual(list(iter(dot)), list(dot.iterdot()))
+
+ for item in dot.iterdot():
+ self.assertTrue(isinstance(item, str))
+
+ first = list(dot.iterdot())[0]
+ self.assertEqual(first, "digraph %s {\n"%(dot.name,))
+
+ dot.type = 'graph'
+ first = list(dot.iterdot())[0]
+ self.assertEqual(first, "graph %s {\n"%(dot.name,))
+
+ dot.type = 'foo'
+ self.assertRaises(GraphError, list, dot.iterdot())
+ dot.type = 'digraph'
+
+ self.assertEqual(list(dot), [
+ 'digraph G {\n',
+ 'graph="foobar";',
+ '\n',
+
+ '\t"1" [',
+ 'key="value",',
+ '];\n',
+
+ '\t"2" [',
+ 'key="another",',
+ 'key2="world",',
+ '];\n',
+
+ '\t"3" [',
+ '];\n',
+
+ '\t"4" [',
+ '];\n',
+
+ '\t"6" [',
+ '];\n',
+
+ '\t"7" [',
+ '];\n',
+
+ '\t"1" -> "2" [',
+ '];\n',
+
+ '\t"1" -> "3" [',
+ '];\n',
+
+ '\t"1" -> "4" [',
+ 'key1="value1",',
+ 'key2="value2",',
+ '];\n',
+
+ '\t"2" -> "4" [',
+ 'key1="valueA",',
+ '];\n',
+
+ '\t"2" -> "6" [',
+ '];\n',
+
+ '\t"2" -> "7" [',
+ '];\n',
+
+ '\t"6" -> "1" [',
+ '];\n',
+
+ '\t"7" -> "4" [',
+ '];\n',
+ '}\n'])
+
+
+ def test_save(self):
+ g = Graph.Graph([
+ (1,2),
+ (1,3),
+ (1,4),
+ (2,4),
+ (2,6),
+ (2,7),
+ (7,4),
+ (6,1),
+ ]
+ )
+
+ dot = Dot.Dot(g)
+ dot.style(graph="foobar")
+ dot.node_style(1, key='value')
+ dot.node_style(2, key='another', key2='world')
+ dot.edge_style(1,4, key1='value1', key2='value2')
+ dot.edge_style(2,4, key1='valueA')
+
+ fn = 'test_dot.dot'
+ self.assertTrue(not os.path.exists(fn))
+
+ try:
+ dot.save_dot(fn)
+
+ fp = open(fn, 'r')
+ data = fp.read()
+ fp.close()
+ self.assertEqual(data, ''.join(dot))
+
+ finally:
+ if os.path.exists(fn):
+ os.unlink(fn)
+
+
+ def test_img(self):
+ g = Graph.Graph([
+ (1,2),
+ (1,3),
+ (1,4),
+ (2,4),
+ (2,6),
+ (2,7),
+ (7,4),
+ (6,1),
+ ]
+ )
+
+ dot = Dot.Dot(g, dot='/usr/local/bin/!!dot', dotty='/usr/local/bin/!!dotty', neato='/usr/local/bin/!!neato')
+ dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75)
+ dot.node_style(1, label='BASE_NODE',shape='box', color='blue')
+ dot.node_style(2, style='filled', fillcolor='red')
+ dot.edge_style(1,4, style='dotted')
+ dot.edge_style(2,4, arrowhead='dot', label='binds', labelangle='90')
+
+ system_cmds = []
+ def fake_system(cmd):
+ system_cmds.append(cmd)
+ return None
+
+ try:
+ real_system = os.system
+ os.system = fake_system
+
+ system_cmds = []
+ dot.save_img('foo')
+ self.assertEqual(system_cmds, ['/usr/local/bin/!!dot -Tgif tmp_dot.dot -o foo.gif'])
+
+ system_cmds = []
+ dot.save_img('foo', file_type='jpg')
+ self.assertEqual(system_cmds, ['/usr/local/bin/!!dot -Tjpg tmp_dot.dot -o foo.jpg'])
+
+ system_cmds = []
+ dot.save_img('bar', file_type='jpg', mode='neato')
+ self.assertEqual(system_cmds, [
+ '/usr/local/bin/!!neato -o tmp_dot.dot tmp_neo.dot',
+ '/usr/local/bin/!!dot -Tjpg tmp_dot.dot -o bar.jpg',
+ ])
+
+ system_cmds = []
+ dot.display()
+ self.assertEqual(system_cmds, [
+ '/usr/local/bin/!!dotty tmp_dot.dot'
+ ])
+
+ system_cmds = []
+ dot.display(mode='neato')
+ self.assertEqual(system_cmds, [
+ '/usr/local/bin/!!neato -o tmp_dot.dot tmp_neo.dot',
+ '/usr/local/bin/!!dotty tmp_dot.dot'
+ ])
+
+ finally:
+ if os.path.exists(dot.temp_dot):
+ os.unlink(dot.temp_dot)
+ if os.path.exists(dot.temp_neo):
+ os.unlink(dot.temp_neo)
+ os.system = real_system
+
+ if os.path.exists('/usr/local/bin/dot') and os.path.exists('/usr/local/bin/neato'):
+ try:
+ dot.dot='/usr/local/bin/dot'
+ dot.neato='/usr/local/bin/neato'
+ self.assertFalse(os.path.exists('foo.gif'))
+ dot.save_img('foo')
+ self.assertTrue(os.path.exists('foo.gif'))
+ os.unlink('foo.gif')
+
+ self.assertFalse(os.path.exists('foo.gif'))
+ dot.save_img('foo', mode='neato')
+ self.assertTrue(os.path.exists('foo.gif'))
+ os.unlink('foo.gif')
+
+ finally:
+ if os.path.exists(dot.temp_dot):
+ os.unlink(dot.temp_dot)
+ if os.path.exists(dot.temp_neo):
+ os.unlink(dot.temp_neo)
+
+
+if __name__ == "__main__": # pragma: no cover
+ unittest.main()
diff --git a/python/altgraph/altgraph_tests/test_graph.py b/python/altgraph/altgraph_tests/test_graph.py
new file mode 100644
index 000000000..553549f5a
--- /dev/null
+++ b/python/altgraph/altgraph_tests/test_graph.py
@@ -0,0 +1,644 @@
+import unittest
+
+from altgraph import GraphError
+from altgraph.Graph import Graph
+
+class TestGraph (unittest.TestCase):
+
+ def test_nodes(self):
+ graph = Graph()
+
+ self.assertEqual(graph.node_list(), [])
+
+ o1 = object()
+ o1b = object()
+ o2 = object()
+ graph.add_node(1, o1)
+ graph.add_node(1, o1b)
+ graph.add_node(2, o2)
+ graph.add_node(3)
+
+ self.assertRaises(TypeError, graph.add_node, [])
+
+ self.assertTrue(graph.node_data(1) is o1)
+ self.assertTrue(graph.node_data(2) is o2)
+ self.assertTrue(graph.node_data(3) is None)
+
+ self.assertTrue(1 in graph)
+ self.assertTrue(2 in graph)
+ self.assertTrue(3 in graph)
+
+ self.assertEqual(graph.number_of_nodes(), 3)
+ self.assertEqual(graph.number_of_hidden_nodes(), 0)
+ self.assertEqual(graph.hidden_node_list(), [])
+ self.assertEqual(list(sorted(graph)), [1, 2, 3])
+
+ graph.hide_node(1)
+ graph.hide_node(2)
+ graph.hide_node(3)
+
+
+ self.assertEqual(graph.number_of_nodes(), 0)
+ self.assertEqual(graph.number_of_hidden_nodes(), 3)
+ self.assertEqual(list(sorted(graph.hidden_node_list())), [1, 2, 3])
+
+ self.assertFalse(1 in graph)
+ self.assertFalse(2 in graph)
+ self.assertFalse(3 in graph)
+
+ graph.add_node(1)
+ self.assertFalse(1 in graph)
+
+ graph.restore_node(1)
+ self.assertTrue(1 in graph)
+ self.assertFalse(2 in graph)
+ self.assertFalse(3 in graph)
+
+ graph.restore_all_nodes()
+ self.assertTrue(1 in graph)
+ self.assertTrue(2 in graph)
+ self.assertTrue(3 in graph)
+
+ self.assertEqual(list(sorted(graph.node_list())), [1, 2, 3])
+
+ v = graph.describe_node(1)
+ self.assertEqual(v, (1, o1, [], []))
+
+ def test_edges(self):
+ graph = Graph()
+ graph.add_node(1)
+ graph.add_node(2)
+ graph.add_node(3)
+ graph.add_node(4)
+ graph.add_node(5)
+
+ self.assertTrue(isinstance(graph.edge_list(), list))
+
+ graph.add_edge(1, 2)
+ graph.add_edge(4, 5, 'a')
+
+ self.assertRaises(GraphError, graph.add_edge, 'a', 'b', create_nodes=False)
+
+ self.assertEqual(graph.number_of_hidden_edges(), 0)
+ self.assertEqual(graph.number_of_edges(), 2)
+ e = graph.edge_by_node(1, 2)
+ self.assertTrue(isinstance(e, int))
+ graph.hide_edge(e)
+ self.assertEqual(graph.number_of_hidden_edges(), 1)
+ self.assertEqual(graph.number_of_edges(), 1)
+ e2 = graph.edge_by_node(1, 2)
+ self.assertTrue(e2 is None)
+
+ graph.restore_edge(e)
+ e2 = graph.edge_by_node(1, 2)
+ self.assertEqual(e, e2)
+ self.assertEqual(graph.number_of_hidden_edges(), 0)
+
+ self.assertEqual(graph.number_of_edges(), 2)
+
+ e1 = graph.edge_by_node(1, 2)
+ e2 = graph.edge_by_node(4, 5)
+ graph.hide_edge(e1)
+ graph.hide_edge(e2)
+
+ self.assertEqual(graph.number_of_edges(), 0)
+ graph.restore_all_edges()
+ self.assertEqual(graph.number_of_edges(), 2)
+
+ self.assertEqual(graph.edge_by_id(e1), (1,2))
+ self.assertRaises(GraphError, graph.edge_by_id, (e1+1)*(e2+1)+1)
+
+ self.assertEqual(list(sorted(graph.edge_list())), [e1, e2])
+
+ self.assertEqual(graph.describe_edge(e1), (e1, 1, 1, 2))
+ self.assertEqual(graph.describe_edge(e2), (e2, 'a', 4, 5))
+
+ self.assertEqual(graph.edge_data(e1), 1)
+ self.assertEqual(graph.edge_data(e2), 'a')
+
+ self.assertEqual(graph.head(e2), 4)
+ self.assertEqual(graph.tail(e2), 5)
+
+ graph.add_edge(1, 3)
+ graph.add_edge(1, 5)
+ graph.add_edge(4, 1)
+
+ self.assertEqual(list(sorted(graph.out_nbrs(1))), [2, 3, 5])
+ self.assertEqual(list(sorted(graph.inc_nbrs(1))), [4])
+ self.assertEqual(list(sorted(graph.inc_nbrs(5))), [1, 4])
+ self.assertEqual(list(sorted(graph.all_nbrs(1))), [2, 3, 4, 5])
+
+ graph.add_edge(5, 1)
+ self.assertEqual(list(sorted(graph.all_nbrs(5))), [1, 4])
+
+ self.assertEqual(graph.out_degree(1), 3)
+ self.assertEqual(graph.inc_degree(2), 1)
+ self.assertEqual(graph.inc_degree(5), 2)
+ self.assertEqual(graph.all_degree(5), 3)
+
+ v = graph.out_edges(4)
+ self.assertTrue(isinstance(v, list))
+ self.assertEqual(graph.edge_by_id(v[0]), (4, 5))
+
+ v = graph.out_edges(1)
+ for e in v:
+ self.assertEqual(graph.edge_by_id(e)[0], 1)
+
+ v = graph.inc_edges(1)
+ self.assertTrue(isinstance(v, list))
+ self.assertEqual(graph.edge_by_id(v[0]), (4, 1))
+
+ v = graph.inc_edges(5)
+ for e in v:
+ self.assertEqual(graph.edge_by_id(e)[1], 5)
+
+ v = graph.all_edges(5)
+ for e in v:
+ self.assertTrue(graph.edge_by_id(e)[1] == 5 or graph.edge_by_id(e)[0] == 5)
+
+ e1 = graph.edge_by_node(1, 2)
+ self.assertTrue(isinstance(e1, int))
+ graph.hide_node(1)
+ self.assertRaises(GraphError, graph.edge_by_node, 1, 2)
+ graph.restore_node(1)
+ e2 = graph.edge_by_node(1, 2)
+ self.assertEqual(e1, e2)
+
+
+
+ def test_toposort(self):
+ graph = Graph()
+ graph.add_node(1)
+ graph.add_node(2)
+ graph.add_node(3)
+ graph.add_node(4)
+ graph.add_node(5)
+
+ graph.add_edge(1, 2)
+ graph.add_edge(1, 3)
+ graph.add_edge(2, 4)
+ graph.add_edge(3, 5)
+
+ ok, result = graph.forw_topo_sort()
+ self.assertTrue(ok)
+ for idx in range(1, 6):
+ self.assertTrue(idx in result)
+
+ self.assertTrue(result.index(1) < result.index(2))
+ self.assertTrue(result.index(1) < result.index(3))
+ self.assertTrue(result.index(2) < result.index(4))
+ self.assertTrue(result.index(3) < result.index(5))
+
+ ok, result = graph.back_topo_sort()
+ self.assertTrue(ok)
+ for idx in range(1, 6):
+ self.assertTrue(idx in result)
+ self.assertTrue(result.index(2) < result.index(1))
+ self.assertTrue(result.index(3) < result.index(1))
+ self.assertTrue(result.index(4) < result.index(2))
+ self.assertTrue(result.index(5) < result.index(3))
+
+
+ # Same graph as before, but with edges
+ # reversed, which means we should get
+ # the same results as before if using
+ # back_topo_sort rather than forw_topo_sort
+ # (and v.v.)
+
+ graph = Graph()
+ graph.add_node(1)
+ graph.add_node(2)
+ graph.add_node(3)
+ graph.add_node(4)
+ graph.add_node(5)
+
+ graph.add_edge(2, 1)
+ graph.add_edge(3, 1)
+ graph.add_edge(4, 2)
+ graph.add_edge(5, 3)
+
+ ok, result = graph.back_topo_sort()
+ self.assertTrue(ok)
+ for idx in range(1, 6):
+ self.assertTrue(idx in result)
+
+ self.assertTrue(result.index(1) < result.index(2))
+ self.assertTrue(result.index(1) < result.index(3))
+ self.assertTrue(result.index(2) < result.index(4))
+ self.assertTrue(result.index(3) < result.index(5))
+
+ ok, result = graph.forw_topo_sort()
+ self.assertTrue(ok)
+ for idx in range(1, 6):
+ self.assertTrue(idx in result)
+ self.assertTrue(result.index(2) < result.index(1))
+ self.assertTrue(result.index(3) < result.index(1))
+ self.assertTrue(result.index(4) < result.index(2))
+ self.assertTrue(result.index(5) < result.index(3))
+
+
+ # Create a cycle
+ graph.add_edge(1, 5)
+ ok, result = graph.forw_topo_sort()
+ self.assertFalse(ok)
+ ok, result = graph.back_topo_sort()
+ self.assertFalse(ok)
+
+ def test_bfs_subgraph(self):
+ graph = Graph()
+ graph.add_edge(1, 2)
+ graph.add_edge(1, 4)
+ graph.add_edge(2, 4)
+ graph.add_edge(4, 8)
+ graph.add_edge(4, 9)
+ graph.add_edge(4, 10)
+ graph.add_edge(8, 10)
+
+ subgraph = graph.forw_bfs_subgraph(10)
+ self.assertTrue(isinstance(subgraph, Graph))
+ self.assertEqual(subgraph.number_of_nodes(), 1)
+ self.assertTrue(10 in subgraph)
+ self.assertEqual(subgraph.number_of_edges(), 0)
+
+ subgraph = graph.forw_bfs_subgraph(4)
+ self.assertTrue(isinstance(subgraph, Graph))
+ self.assertEqual(subgraph.number_of_nodes(), 4)
+ self.assertTrue(4 in subgraph)
+ self.assertTrue(8 in subgraph)
+ self.assertTrue(9 in subgraph)
+ self.assertTrue(10 in subgraph)
+ self.assertEqual(subgraph.number_of_edges(), 4)
+ e = subgraph.edge_by_node(4, 8)
+ e = subgraph.edge_by_node(4, 9)
+ e = subgraph.edge_by_node(4, 10)
+ e = subgraph.edge_by_node(8, 10)
+
+ # same graph as before, but switch around
+ # edges. This results in the same test results
+ # but now for back_bfs_subgraph rather than
+ # forw_bfs_subgraph
+
+ graph = Graph()
+ graph.add_edge(2, 1)
+ graph.add_edge(4, 1)
+ graph.add_edge(4, 2)
+ graph.add_edge(8, 4)
+ graph.add_edge(9, 4)
+ graph.add_edge(10, 4)
+ graph.add_edge(10, 8)
+
+ subgraph = graph.back_bfs_subgraph(10)
+ self.assertTrue(isinstance(subgraph, Graph))
+ self.assertEqual(subgraph.number_of_nodes(), 1)
+ self.assertTrue(10 in subgraph)
+ self.assertEqual(subgraph.number_of_edges(), 0)
+
+ subgraph = graph.back_bfs_subgraph(4)
+ self.assertTrue(isinstance(subgraph, Graph))
+ self.assertEqual(subgraph.number_of_nodes(), 4)
+ self.assertTrue(4 in subgraph)
+ self.assertTrue(8 in subgraph)
+ self.assertTrue(9 in subgraph)
+ self.assertTrue(10 in subgraph)
+ self.assertEqual(subgraph.number_of_edges(), 4)
+ e = subgraph.edge_by_node(4, 8)
+ e = subgraph.edge_by_node(4, 9)
+ e = subgraph.edge_by_node(4, 10)
+ e = subgraph.edge_by_node(8, 10)
+
+ def test_iterdfs(self):
+ graph = Graph()
+ graph.add_edge("1", "1.1")
+ graph.add_edge("1", "1.2")
+ graph.add_edge("1", "1.3")
+ graph.add_edge("1.1", "1.1.1")
+ graph.add_edge("1.1", "1.1.2")
+ graph.add_edge("1.2", "1.2.1")
+ graph.add_edge("1.2", "1.2.2")
+ graph.add_edge("1.2.2", "1.2.2.1")
+ graph.add_edge("1.2.2", "1.2.2.2")
+ graph.add_edge("1.2.2", "1.2.2.3")
+
+ result = list(graph.iterdfs("1"))
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1'
+ ])
+ result = list(graph.iterdfs("1", "1.2.1"))
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1'
+ ])
+
+ result = graph.forw_dfs("1")
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1'
+ ])
+ result = graph.forw_dfs("1", "1.2.1")
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1'
+ ])
+
+ graph = Graph()
+ graph.add_edge("1.1", "1")
+ graph.add_edge("1.2", "1")
+ graph.add_edge("1.3", "1")
+ graph.add_edge("1.1.1", "1.1")
+ graph.add_edge("1.1.2", "1.1")
+ graph.add_edge("1.2.1", "1.2")
+ graph.add_edge("1.2.2", "1.2")
+ graph.add_edge("1.2.2.1", "1.2.2")
+ graph.add_edge("1.2.2.2", "1.2.2")
+ graph.add_edge("1.2.2.3", "1.2.2")
+
+ result = list(graph.iterdfs("1", forward=False))
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1'
+ ])
+ result = list(graph.iterdfs("1", "1.2.1", forward=False))
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1'
+ ])
+ result = graph.back_dfs("1")
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1'
+ ])
+ result = graph.back_dfs("1", "1.2.1")
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1'
+ ])
+
+
+ # Introduce cyle:
+ graph.add_edge("1", "1.2")
+ result = list(graph.iterdfs("1", forward=False))
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1'
+ ])
+
+ result = graph.back_dfs("1")
+ self.assertEqual(result, [
+ '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2',
+ '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1'
+ ])
+
+
+ def test_iterdata(self):
+ graph = Graph()
+ graph.add_node("1", "I")
+ graph.add_node("1.1", "I.I")
+ graph.add_node("1.2", "I.II")
+ graph.add_node("1.3", "I.III")
+ graph.add_node("1.1.1", "I.I.I")
+ graph.add_node("1.1.2", "I.I.II")
+ graph.add_node("1.2.1", "I.II.I")
+ graph.add_node("1.2.2", "I.II.II")
+ graph.add_node("1.2.2.1", "I.II.II.I")
+ graph.add_node("1.2.2.2", "I.II.II.II")
+ graph.add_node("1.2.2.3", "I.II.II.III")
+
+ graph.add_edge("1", "1.1")
+ graph.add_edge("1", "1.2")
+ graph.add_edge("1", "1.3")
+ graph.add_edge("1.1", "1.1.1")
+ graph.add_edge("1.1", "1.1.2")
+ graph.add_edge("1.2", "1.2.1")
+ graph.add_edge("1.2", "1.2.2")
+ graph.add_edge("1.2.2", "1.2.2.1")
+ graph.add_edge("1.2.2", "1.2.2.2")
+ graph.add_edge("1.2.2", "1.2.2.3")
+
+ result = list(graph.iterdata("1", forward=True))
+ self.assertEqual(result, [
+ 'I', 'I.III', 'I.II', 'I.II.II', 'I.II.II.III', 'I.II.II.II',
+ 'I.II.II.I', 'I.II.I', 'I.I', 'I.I.II', 'I.I.I'
+ ])
+
+ result = list(graph.iterdata("1", end="1.2.1", forward=True))
+ self.assertEqual(result, [
+ 'I', 'I.III', 'I.II', 'I.II.II', 'I.II.II.III', 'I.II.II.II',
+ 'I.II.II.I', 'I.II.I'
+ ])
+
+ result = list(graph.iterdata("1", condition=lambda n: len(n) < 6, forward=True))
+ self.assertEqual(result, [
+ 'I', 'I.III', 'I.II',
+ 'I.I', 'I.I.I'
+ ])
+
+
+ # And the revese option:
+ graph = Graph()
+ graph.add_node("1", "I")
+ graph.add_node("1.1", "I.I")
+ graph.add_node("1.2", "I.II")
+ graph.add_node("1.3", "I.III")
+ graph.add_node("1.1.1", "I.I.I")
+ graph.add_node("1.1.2", "I.I.II")
+ graph.add_node("1.2.1", "I.II.I")
+ graph.add_node("1.2.2", "I.II.II")
+ graph.add_node("1.2.2.1", "I.II.II.I")
+ graph.add_node("1.2.2.2", "I.II.II.II")
+ graph.add_node("1.2.2.3", "I.II.II.III")
+
+ graph.add_edge("1.1", "1")
+ graph.add_edge("1.2", "1")
+ graph.add_edge("1.3", "1")
+ graph.add_edge("1.1.1", "1.1")
+ graph.add_edge("1.1.2", "1.1")
+ graph.add_edge("1.2.1", "1.2")
+ graph.add_edge("1.2.2", "1.2")
+ graph.add_edge("1.2.2.1", "1.2.2")
+ graph.add_edge("1.2.2.2", "1.2.2")
+ graph.add_edge("1.2.2.3", "1.2.2")
+
+ result = list(graph.iterdata("1", forward=False))
+ self.assertEqual(result, [
+ 'I', 'I.III', 'I.II', 'I.II.II', 'I.II.II.III', 'I.II.II.II',
+ 'I.II.II.I', 'I.II.I', 'I.I', 'I.I.II', 'I.I.I'
+ ])
+
+ result = list(graph.iterdata("1", end="1.2.1", forward=False))
+ self.assertEqual(result, [
+ 'I', 'I.III', 'I.II', 'I.II.II', 'I.II.II.III', 'I.II.II.II',
+ 'I.II.II.I', 'I.II.I'
+ ])
+
+ result = list(graph.iterdata("1", condition=lambda n: len(n) < 6, forward=False))
+ self.assertEqual(result, [
+ 'I', 'I.III', 'I.II',
+ 'I.I', 'I.I.I'
+ ])
+
+ def test_bfs(self):
+ graph = Graph()
+ graph.add_edge("1", "1.1")
+ graph.add_edge("1.1", "1.1.1")
+ graph.add_edge("1.1", "1.1.2")
+ graph.add_edge("1.1.2", "1.1.2.1")
+ graph.add_edge("1.1.2", "1.1.2.2")
+ graph.add_edge("1", "1.2")
+ graph.add_edge("1", "1.3")
+ graph.add_edge("1.2", "1.2.1")
+
+ self.assertEqual(graph.forw_bfs("1"),
+ ['1', '1.1', '1.2', '1.3', '1.1.1', '1.1.2', '1.2.1', '1.1.2.1', '1.1.2.2'])
+ self.assertEqual(graph.forw_bfs("1", "1.1.1"),
+ ['1', '1.1', '1.2', '1.3', '1.1.1'])
+
+
+ # And the "reverse" graph
+ graph = Graph()
+ graph.add_edge("1.1", "1")
+ graph.add_edge("1.1.1", "1.1")
+ graph.add_edge("1.1.2", "1.1")
+ graph.add_edge("1.1.2.1", "1.1.2")
+ graph.add_edge("1.1.2.2", "1.1.2")
+ graph.add_edge("1.2", "1")
+ graph.add_edge("1.3", "1")
+ graph.add_edge("1.2.1", "1.2")
+
+ self.assertEqual(graph.back_bfs("1"),
+ ['1', '1.1', '1.2', '1.3', '1.1.1', '1.1.2', '1.2.1', '1.1.2.1', '1.1.2.2'])
+ self.assertEqual(graph.back_bfs("1", "1.1.1"),
+ ['1', '1.1', '1.2', '1.3', '1.1.1'])
+
+
+
+ # check cycle handling
+ graph.add_edge("1", "1.2.1")
+ self.assertEqual(graph.back_bfs("1"),
+ ['1', '1.1', '1.2', '1.3', '1.1.1', '1.1.2', '1.2.1', '1.1.2.1', '1.1.2.2'])
+
+
+ def test_connected(self):
+ graph = Graph()
+ graph.add_node(1)
+ graph.add_node(2)
+ graph.add_node(3)
+ graph.add_node(4)
+
+ self.assertFalse(graph.connected())
+
+ graph.add_edge(1, 2)
+ graph.add_edge(3, 4)
+ self.assertFalse(graph.connected())
+
+ graph.add_edge(2, 3)
+ graph.add_edge(4, 1)
+ self.assertTrue(graph.connected())
+
+ def test_edges_complex(self):
+ g = Graph()
+ g.add_edge(1, 2)
+ e = g.edge_by_node(1,2)
+ g.hide_edge(e)
+ g.hide_node(2)
+ self.assertRaises(GraphError, g.restore_edge, e)
+
+ g.restore_all_edges()
+ self.assertRaises(GraphError, g.edge_by_id, e)
+
+ def test_clust_coef(self):
+ g = Graph()
+ g.add_edge(1, 2)
+ g.add_edge(1, 3)
+ g.add_edge(1, 4)
+ self.assertEqual(g.clust_coef(1), 0)
+
+ g.add_edge(2, 5)
+ g.add_edge(3, 5)
+ g.add_edge(4, 5)
+ self.assertEqual(g.clust_coef(1), 0)
+
+ g.add_edge(2, 3)
+ self.assertEqual(g.clust_coef(1), 1./6)
+ g.add_edge(2, 4)
+ self.assertEqual(g.clust_coef(1), 2./6)
+ g.add_edge(4, 2)
+ self.assertEqual(g.clust_coef(1), 3./6)
+
+ g.add_edge(2, 3)
+ g.add_edge(2, 4)
+ g.add_edge(3, 4)
+ g.add_edge(3, 2)
+ g.add_edge(4, 2)
+ g.add_edge(4, 3)
+ self.assertEqual(g.clust_coef(1), 1)
+
+
+ def test_get_hops(self):
+ graph = Graph()
+ graph.add_edge(1, 2)
+ graph.add_edge(1, 3)
+ graph.add_edge(2, 4)
+ graph.add_edge(4, 5)
+ graph.add_edge(5, 7)
+ graph.add_edge(7, 8)
+
+ self.assertEqual(graph.get_hops(1),
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)])
+
+ self.assertEqual(graph.get_hops(1, 5),
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3)])
+
+ graph.add_edge(5, 1)
+ graph.add_edge(7, 1)
+ graph.add_edge(7, 4)
+
+ self.assertEqual(graph.get_hops(1),
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)])
+
+ # And the reverse graph
+ graph = Graph()
+ graph.add_edge(2, 1)
+ graph.add_edge(3, 1)
+ graph.add_edge(4, 2)
+ graph.add_edge(5, 4)
+ graph.add_edge(7, 5)
+ graph.add_edge(8, 7)
+
+ self.assertEqual(graph.get_hops(1, forward=False),
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)])
+
+ self.assertEqual(graph.get_hops(1, 5, forward=False),
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3)])
+
+ graph.add_edge(1, 5)
+ graph.add_edge(1, 7)
+ graph.add_edge(4, 7)
+
+ self.assertEqual(graph.get_hops(1, forward=False),
+ [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)])
+
+
+ def test_constructor(self):
+ graph = Graph(iter([
+ (1, 2),
+ (2, 3, 'a'),
+ (1, 3),
+ (3, 4),
+ ]))
+ self.assertEqual(graph.number_of_nodes(), 4)
+ self.assertEqual(graph.number_of_edges(), 4)
+ try:
+ graph.edge_by_node(1,2)
+ graph.edge_by_node(2,3)
+ graph.edge_by_node(1,3)
+ graph.edge_by_node(3,4)
+ except GraphError:
+ self.fail("Incorrect graph")
+
+ self.assertEqual(graph.edge_data(graph.edge_by_node(2, 3)), 'a')
+
+ self.assertRaises(GraphError, Graph, [(1,2,3,4)])
+
+if __name__ == "__main__": # pragma: no cover
+ unittest.main()
diff --git a/python/altgraph/altgraph_tests/test_graphstat.py b/python/altgraph/altgraph_tests/test_graphstat.py
new file mode 100644
index 000000000..b628b6f24
--- /dev/null
+++ b/python/altgraph/altgraph_tests/test_graphstat.py
@@ -0,0 +1,70 @@
+import unittest
+
+from altgraph import GraphStat
+from altgraph import Graph
+import sys
+
+class TestDegreesDist (unittest.TestCase):
+
+ def test_simple(self):
+ a = Graph.Graph()
+ self.assertEqual(GraphStat.degree_dist(a), [])
+
+ a.add_node(1)
+ a.add_node(2)
+ a.add_node(3)
+
+ self.assertEqual(GraphStat.degree_dist(a), GraphStat._binning([0, 0, 0]))
+
+ for x in range(100):
+ a.add_node(x)
+
+ for x in range(1, 100):
+ for y in range(1, 50):
+ if x % y == 0:
+ a.add_edge(x, y)
+
+ counts_inc = []
+ counts_out = []
+ for n in a:
+ counts_inc.append(a.inc_degree(n))
+ counts_out.append(a.out_degree(n))
+
+ self.assertEqual(GraphStat.degree_dist(a), GraphStat._binning(counts_out))
+ self.assertEqual(GraphStat.degree_dist(a, mode='inc'), GraphStat._binning(counts_inc))
+
+class TestBinning (unittest.TestCase):
+ def test_simple(self):
+
+ # Binning [0, 100) into 10 bins
+ a = list(range(100))
+ out = GraphStat._binning(a, limits=(0, 100), bin_num=10)
+
+ self.assertEqual(out,
+ [ (x*1.0, 10) for x in range(5, 100, 10) ])
+
+
+ # Check that outliers are ignored.
+ a = list(range(100))
+ out = GraphStat._binning(a, limits=(0, 90), bin_num=9)
+
+ self.assertEqual(out,
+ [ (x*1.0, 10) for x in range(5, 90, 10) ])
+
+
+ out = GraphStat._binning(a, limits=(0, 100), bin_num=15)
+ binSize = 100 / 15.0
+ result = [0]*15
+ for i in range(100):
+ bin = int(i/binSize)
+ try:
+ result[bin] += 1
+ except IndexError:
+ pass
+
+ result = [ (i * binSize + binSize/2, result[i]) for i in range(len(result))]
+
+ self.assertEqual(result, out)
+
+if __name__ == "__main__": # pragma: no cover
+ unittest.main()
diff --git a/python/altgraph/altgraph_tests/test_graphutil.py b/python/altgraph/altgraph_tests/test_graphutil.py
new file mode 100644
index 000000000..c1166237c
--- /dev/null
+++ b/python/altgraph/altgraph_tests/test_graphutil.py
@@ -0,0 +1,140 @@
+import unittest
+from altgraph import GraphUtil
+from altgraph import Graph, GraphError
+
+class TestGraphUtil (unittest.TestCase):
+
+ def test_generate_random(self):
+ g = GraphUtil.generate_random_graph(10, 50)
+ self.assertEqual(g.number_of_nodes(), 10)
+ self.assertEqual(g.number_of_edges(), 50)
+
+ seen = set()
+
+ for e in g.edge_list():
+ h, t = g.edge_by_id(e)
+ self.assertFalse(h == t)
+ self.assertTrue((h, t) not in seen)
+ seen.add((h, t))
+
+ g = GraphUtil.generate_random_graph(5, 30, multi_edges=True)
+ self.assertEqual(g.number_of_nodes(), 5)
+ self.assertEqual(g.number_of_edges(), 30)
+
+ seen = set()
+
+ for e in g.edge_list():
+ h, t = g.edge_by_id(e)
+ self.assertFalse(h == t)
+ if (h, t) in seen:
+ break
+ seen.add((h, t))
+
+ else:
+ self.fail("no duplicates?")
+
+ g = GraphUtil.generate_random_graph(5, 21, self_loops=True)
+ self.assertEqual(g.number_of_nodes(), 5)
+ self.assertEqual(g.number_of_edges(), 21)
+
+ seen = set()
+
+ for e in g.edge_list():
+ h, t = g.edge_by_id(e)
+ self.assertFalse((h, t) in seen)
+ if h == t:
+ break
+ seen.add((h, t))
+
+ else:
+ self.fail("no self loops?")
+
+ self.assertRaises(GraphError, GraphUtil.generate_random_graph, 5, 21)
+ g = GraphUtil.generate_random_graph(5, 21, True)
+ self.assertRaises(GraphError, GraphUtil.generate_random_graph, 5, 26, True)
+
+ def test_generate_scale_free(self):
+ graph = GraphUtil.generate_scale_free_graph(50, 10)
+ self.assertEqual(graph.number_of_nodes(), 500)
+
+ counts = {}
+ for node in graph:
+ degree = graph.inc_degree(node)
+ try:
+ counts[degree] += 1
+ except KeyError:
+ counts[degree] = 1
+
+ total_counts = sum(counts.values())
+ P = {}
+ for degree, count in counts.items():
+ P[degree] = count * 1.0 / total_counts
+
+ # XXX: use algoritm <http://stackoverflow.com/questions/3433486/how-to-do-exponential-and-logarithmic-curve-fitting-in-python-i-found-only-polyn>
+ # to check if P[degree] ~ degree ** G (for some G)
+
+ #print sorted(P.items())
+
+ #print sorted([(count, degree) for degree, count in counts.items()])
+
+ #self.fail("missing tests for GraphUtil.generate_scale_free_graph")
+
+ def test_filter_stack(self):
+ g = Graph.Graph()
+ g.add_node("1", "N.1")
+ g.add_node("1.1", "N.1.1")
+ g.add_node("1.1.1", "N.1.1.1")
+ g.add_node("1.1.2", "N.1.1.2")
+ g.add_node("1.1.3", "N.1.1.3")
+ g.add_node("1.1.1.1", "N.1.1.1.1")
+ g.add_node("1.1.1.2", "N.1.1.1.2")
+ g.add_node("1.1.2.1", "N.1.1.2.1")
+ g.add_node("1.1.2.2", "N.1.1.2.2")
+ g.add_node("1.1.2.3", "N.1.1.2.3")
+ g.add_node("2", "N.2")
+
+ g.add_edge("1", "1.1")
+ g.add_edge("1.1", "1.1.1")
+ g.add_edge("1.1", "1.1.2")
+ g.add_edge("1.1", "1.1.3")
+ g.add_edge("1.1.1", "1.1.1.1")
+ g.add_edge("1.1.1", "1.1.1.2")
+ g.add_edge("1.1.2", "1.1.2.1")
+ g.add_edge("1.1.2", "1.1.2.2")
+ g.add_edge("1.1.2", "1.1.2.3")
+
+ v, r, o = GraphUtil.filter_stack(g, "1", [
+ lambda n: n != "N.1.1.1", lambda n: n != "N.1.1.2.3" ])
+
+ self.assertEqual(v,
+ set(["1", "1.1", "1.1.1", "1.1.2", "1.1.3",
+ "1.1.1.1", "1.1.1.2", "1.1.2.1", "1.1.2.2",
+ "1.1.2.3"]))
+ self.assertEqual(r, set([
+ "1.1.1", "1.1.2.3"]))
+
+ o.sort()
+ self.assertEqual(o,
+ [
+ ("1.1", "1.1.1.1"),
+ ("1.1", "1.1.1.2")
+ ])
+
+ v, r, o = GraphUtil.filter_stack(g, "1", [
+ lambda n: n != "N.1.1.1", lambda n: n != "N.1.1.1.2" ])
+
+ self.assertEqual(v,
+ set(["1", "1.1", "1.1.1", "1.1.2", "1.1.3",
+ "1.1.1.1", "1.1.1.2", "1.1.2.1", "1.1.2.2",
+ "1.1.2.3"]))
+ self.assertEqual(r, set([
+ "1.1.1", "1.1.1.2"]))
+
+ self.assertEqual(o,
+ [
+ ("1.1", "1.1.1.1"),
+ ])
+
+
+if __name__ == "__main__": # pragma: no cover
+ unittest.main()
diff --git a/python/altgraph/altgraph_tests/test_object_graph.py b/python/altgraph/altgraph_tests/test_object_graph.py
new file mode 100644
index 000000000..9035607e7
--- /dev/null
+++ b/python/altgraph/altgraph_tests/test_object_graph.py
@@ -0,0 +1,349 @@
+import unittest
+import sys
+from altgraph.ObjectGraph import ObjectGraph
+from altgraph.Graph import Graph
+
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+
+class Node (object):
+ def __init__(self, graphident):
+ self.graphident = graphident
+
+class SubNode (Node):
+ pass
+
+class ArgNode (object):
+ def __init__(self, graphident, *args, **kwds):
+ self.graphident = graphident
+ self.args = args
+ self.kwds = kwds
+
+ def __repr__(self):
+ return '<ArgNode %s>'%(self.graphident,)
+
+class TestObjectGraph (unittest.TestCase):
+
+ def test_constructor(self):
+ graph = ObjectGraph()
+ self.assertTrue(isinstance(graph, ObjectGraph))
+
+ g = Graph()
+ graph = ObjectGraph(g)
+ self.assertTrue(graph.graph is g)
+ self.assertEqual(graph.debug, 0)
+ self.assertEqual(graph.indent, 0)
+
+ graph = ObjectGraph(debug=5)
+ self.assertEqual(graph.debug, 5)
+
+ def test_repr(self):
+ graph = ObjectGraph()
+ self.assertEqual(repr(graph), '<ObjectGraph>')
+
+
+ def testNodes(self):
+ graph = ObjectGraph()
+ n1 = Node("n1")
+ n2 = Node("n2")
+ n3 = Node("n3")
+ n4 = Node("n4")
+
+ n1b = Node("n1")
+
+ self.assertTrue(graph.getIdent(graph) is graph)
+ self.assertTrue(graph.getRawIdent(graph) is graph)
+
+ graph.addNode(n1)
+ graph.addNode(n2)
+ graph.addNode(n3)
+
+ self.assertTrue(n1 in graph)
+ self.assertFalse(n4 in graph)
+ self.assertTrue("n1" in graph)
+ self.assertFalse("n4" in graph)
+
+ self.assertTrue(graph.findNode(n1) is n1)
+ self.assertTrue(graph.findNode(n1b) is n1)
+ self.assertTrue(graph.findNode(n2) is n2)
+ self.assertTrue(graph.findNode(n4) is None)
+ self.assertTrue(graph.findNode("n1") is n1)
+ self.assertTrue(graph.findNode("n2") is n2)
+ self.assertTrue(graph.findNode("n4") is None)
+
+ self.assertEqual(graph.getRawIdent(n1), "n1")
+ self.assertEqual(graph.getRawIdent(n1b), "n1")
+ self.assertEqual(graph.getRawIdent(n4), "n4")
+ self.assertEqual(graph.getRawIdent("n1"), None)
+
+ self.assertEqual(graph.getIdent(n1), "n1")
+ self.assertEqual(graph.getIdent(n1b), "n1")
+ self.assertEqual(graph.getIdent(n4), "n4")
+ self.assertEqual(graph.getIdent("n1"), "n1")
+
+ self.assertTrue(n3 in graph)
+ graph.removeNode(n3)
+ self.assertTrue(n3 not in graph)
+ graph.addNode(n3)
+ self.assertTrue(n3 in graph)
+
+ n = graph.createNode(SubNode, "n1")
+ self.assertTrue(n is n1)
+
+ n = graph.createNode(SubNode, "n8")
+ self.assertTrue(isinstance(n, SubNode))
+ self.assertTrue(n in graph)
+ self.assertTrue(graph.findNode("n8") is n)
+
+ n = graph.createNode(ArgNode, "args", 1, 2, 3, a='a', b='b')
+ self.assertTrue(isinstance(n, ArgNode))
+ self.assertTrue(n in graph)
+ self.assertTrue(graph.findNode("args") is n)
+ self.assertEqual(n.args, (1, 2, 3))
+ self.assertEqual(n.kwds, {'a':'a', 'b':'b'})
+
+ def testEdges(self):
+ graph = ObjectGraph()
+ n1 = graph.createNode(ArgNode, "n1", 1)
+ n2 = graph.createNode(ArgNode, "n2", 1)
+ n3 = graph.createNode(ArgNode, "n3", 1)
+ n4 = graph.createNode(ArgNode, "n4", 1)
+
+ graph.createReference(n1, n2, "n1-n2")
+ graph.createReference("n1", "n3", "n1-n3")
+ graph.createReference("n2", n3)
+
+ g = graph.graph
+ e = g.edge_by_node("n1", "n2")
+ self.assertTrue(e is not None)
+ self.assertEqual(g.edge_data(e), "n1-n2")
+
+ e = g.edge_by_node("n1", "n3")
+ self.assertTrue(e is not None)
+ self.assertEqual(g.edge_data(e), "n1-n3")
+
+ e = g.edge_by_node("n2", "n3")
+ self.assertTrue(e is not None)
+ self.assertEqual(g.edge_data(e), None)
+
+ e = g.edge_by_node("n1", "n4")
+ self.assertTrue(e is None)
+
+ graph.removeReference(n1, n2)
+ e = g.edge_by_node("n1", "n2")
+ self.assertTrue(e is None)
+
+ graph.removeReference("n1", "n3")
+ e = g.edge_by_node("n1", "n3")
+ self.assertTrue(e is None)
+
+ graph.createReference(n1, n2, "foo")
+ e = g.edge_by_node("n1", "n2")
+ self.assertTrue(e is not None)
+ self.assertEqual(g.edge_data(e), "foo")
+
+
+ def test_flatten(self):
+ graph = ObjectGraph()
+ n1 = graph.createNode(ArgNode, "n1", 1)
+ n2 = graph.createNode(ArgNode, "n2", 2)
+ n3 = graph.createNode(ArgNode, "n3", 3)
+ n4 = graph.createNode(ArgNode, "n4", 4)
+ n5 = graph.createNode(ArgNode, "n5", 5)
+ n6 = graph.createNode(ArgNode, "n6", 6)
+ n7 = graph.createNode(ArgNode, "n7", 7)
+ n8 = graph.createNode(ArgNode, "n8", 8)
+
+ graph.createReference(graph, n1)
+ graph.createReference(graph, n7)
+ graph.createReference(n1, n2)
+ graph.createReference(n1, n4)
+ graph.createReference(n2, n3)
+ graph.createReference(n2, n5)
+ graph.createReference(n5, n6)
+ graph.createReference(n4, n6)
+ graph.createReference(n4, n2)
+
+ self.assertFalse(isinstance(graph.flatten(), list))
+
+ fl = list(graph.flatten())
+ self.assertTrue(n1 in fl)
+ self.assertTrue(n2 in fl)
+ self.assertTrue(n3 in fl)
+ self.assertTrue(n4 in fl)
+ self.assertTrue(n5 in fl)
+ self.assertTrue(n6 in fl)
+ self.assertTrue(n7 in fl)
+ self.assertFalse(n8 in fl)
+
+ fl = list(graph.flatten(start=n2))
+ self.assertFalse(n1 in fl)
+ self.assertTrue(n2 in fl)
+ self.assertTrue(n3 in fl)
+ self.assertFalse(n4 in fl)
+ self.assertTrue(n5 in fl)
+ self.assertTrue(n6 in fl)
+ self.assertFalse(n7 in fl)
+ self.assertFalse(n8 in fl)
+
+ graph.createReference(n1, n5)
+ fl = list(graph.flatten(lambda n: n.args[0] % 2 != 0))
+ self.assertTrue(n1 in fl)
+ self.assertFalse(n2 in fl)
+ self.assertFalse(n3 in fl)
+ self.assertFalse(n4 in fl)
+ self.assertTrue(n5 in fl)
+ self.assertFalse(n6 in fl)
+ self.assertTrue(n7 in fl)
+ self.assertFalse(n8 in fl)
+
+ def test_iter_nodes(self):
+ graph = ObjectGraph()
+ n1 = graph.createNode(ArgNode, "n1", 1)
+ n2 = graph.createNode(ArgNode, "n2", 2)
+ n3 = graph.createNode(ArgNode, "n3", 3)
+ n4 = graph.createNode(ArgNode, "n4", 4)
+ n5 = graph.createNode(ArgNode, "n5", 5)
+ n6 = graph.createNode(ArgNode, "n6", 5)
+
+ nodes = graph.nodes()
+ if sys.version[0] == '2':
+ self.assertTrue(hasattr(nodes, 'next'))
+ else:
+ self.assertTrue(hasattr(nodes, '__next__'))
+ self.assertTrue(hasattr(nodes, '__iter__'))
+
+ nodes = list(nodes)
+ self.assertEqual(len(nodes), 6)
+ self.assertTrue(n1 in nodes)
+ self.assertTrue(n2 in nodes)
+ self.assertTrue(n3 in nodes)
+ self.assertTrue(n4 in nodes)
+ self.assertTrue(n5 in nodes)
+ self.assertTrue(n6 in nodes)
+
+ def test_get_edges(self):
+ graph = ObjectGraph()
+ n1 = graph.createNode(ArgNode, "n1", 1)
+ n2 = graph.createNode(ArgNode, "n2", 2)
+ n3 = graph.createNode(ArgNode, "n3", 3)
+ n4 = graph.createNode(ArgNode, "n4", 4)
+ n5 = graph.createNode(ArgNode, "n5", 5)
+ n6 = graph.createNode(ArgNode, "n6", 5)
+
+ graph.createReference(n1, n2)
+ graph.createReference(n1, n3)
+ graph.createReference(n3, n1)
+ graph.createReference(n5, n1)
+ graph.createReference(n2, n4)
+ graph.createReference(n2, n5)
+ graph.createReference(n6, n2)
+
+ outs, ins = graph.get_edges(n1)
+
+ self.assertFalse(isinstance(outs, list))
+ self.assertFalse(isinstance(ins, list))
+
+ ins = list(ins)
+ outs = list(outs)
+
+
+ self.assertTrue(n1 not in outs)
+ self.assertTrue(n2 in outs)
+ self.assertTrue(n3 in outs)
+ self.assertTrue(n4 not in outs)
+ self.assertTrue(n5 not in outs)
+ self.assertTrue(n6 not in outs)
+
+ self.assertTrue(n1 not in ins)
+ self.assertTrue(n2 not in ins)
+ self.assertTrue(n3 in ins)
+ self.assertTrue(n4 not in ins)
+ self.assertTrue(n5 in ins)
+ self.assertTrue(n6 not in ins)
+
+ def test_filterStack(self):
+ graph = ObjectGraph()
+ n1 = graph.createNode(ArgNode, "n1", 0)
+ n11 = graph.createNode(ArgNode, "n1.1", 1)
+ n12 = graph.createNode(ArgNode, "n1.2", 0)
+ n111 = graph.createNode(ArgNode, "n1.1.1", 0)
+ n112 = graph.createNode(ArgNode, "n1.1.2",2)
+ n2 = graph.createNode(ArgNode, "n2", 0)
+ n3 = graph.createNode(ArgNode, "n2", 0)
+
+ graph.createReference(None, n1)
+ graph.createReference(None, n2)
+ graph.createReference(n1, n11)
+ graph.createReference(n1, n12)
+ graph.createReference(n11, n111)
+ graph.createReference(n11, n112)
+
+ self.assertTrue(n1 in graph)
+ self.assertTrue(n2 in graph)
+ self.assertTrue(n11 in graph)
+ self.assertTrue(n12 in graph)
+ self.assertTrue(n111 in graph)
+ self.assertTrue(n112 in graph)
+ self.assertTrue(n2 in graph)
+ self.assertTrue(n3 in graph)
+
+ visited, removes, orphans = graph.filterStack(
+ [lambda n: n.args[0] != 1, lambda n: n.args[0] != 2])
+
+ self.assertEqual(visited, 6)
+ self.assertEqual(removes, 2)
+ self.assertEqual(orphans, 1)
+
+ e = graph.graph.edge_by_node(n1.graphident, n111.graphident)
+ self.assertEqual(graph.graph.edge_data(e), "orphan")
+
+ self.assertTrue(n1 in graph)
+ self.assertTrue(n2 in graph)
+ self.assertTrue(n11 not in graph)
+ self.assertTrue(n12 in graph)
+ self.assertTrue(n111 in graph)
+ self.assertTrue(n112 not in graph)
+ self.assertTrue(n2 in graph)
+ self.assertTrue(n3 in graph)
+
+
+class TestObjectGraphIO (unittest.TestCase):
+ def setUp(self):
+ self._stdout = sys.stdout
+
+ def tearDown(self):
+ sys.stdout = self._stdout
+
+ def test_msg(self):
+ graph = ObjectGraph()
+
+ sys.stdout = fp = StringIO()
+ graph.msg(0, "foo")
+ self.assertEqual(fp.getvalue(), "foo \n")
+
+ sys.stdout = fp = StringIO()
+ graph.msg(5, "foo")
+ self.assertEqual(fp.getvalue(), "")
+
+ sys.stdout = fp = StringIO()
+ graph.debug = 10
+ graph.msg(5, "foo")
+ self.assertEqual(fp.getvalue(), "foo \n")
+
+ sys.stdout = fp = StringIO()
+ graph.msg(0, "foo", 1, "a")
+ self.assertEqual(fp.getvalue(), "foo 1 'a'\n")
+
+ sys.stdout = fp = StringIO()
+ graph.msgin(0, "hello", "world")
+ graph.msg(0, "test me")
+ graph.msgout(0, "bye bye")
+ self.assertEqual(fp.getvalue(), "hello 'world'\n test me \nbye bye \n")
+
+
+if __name__ == "__main__": # pragma: no cover
+ unittest.main()
diff --git a/python/altgraph/doc/Makefile b/python/altgraph/doc/Makefile
new file mode 100644
index 000000000..b91ac8142
--- /dev/null
+++ b/python/altgraph/doc/Makefile
@@ -0,0 +1,89 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/altgraph.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/altgraph.qhc"
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+ "run these through (pdf)latex."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/python/altgraph/doc/_build/doctrees/changelog.doctree b/python/altgraph/doc/_build/doctrees/changelog.doctree
new file mode 100644
index 000000000..b78e2f235
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/changelog.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/core.doctree b/python/altgraph/doc/_build/doctrees/core.doctree
new file mode 100644
index 000000000..367409313
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/core.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/dot.doctree b/python/altgraph/doc/_build/doctrees/dot.doctree
new file mode 100644
index 000000000..f2bd9ea89
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/dot.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/environment.pickle b/python/altgraph/doc/_build/doctrees/environment.pickle
new file mode 100644
index 000000000..4e7b4ea03
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/environment.pickle
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/graph.doctree b/python/altgraph/doc/_build/doctrees/graph.doctree
new file mode 100644
index 000000000..5c9aef42c
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/graph.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/graphalgo.doctree b/python/altgraph/doc/_build/doctrees/graphalgo.doctree
new file mode 100644
index 000000000..e22ca572d
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/graphalgo.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/graphstat.doctree b/python/altgraph/doc/_build/doctrees/graphstat.doctree
new file mode 100644
index 000000000..2e0503615
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/graphstat.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/graphutil.doctree b/python/altgraph/doc/_build/doctrees/graphutil.doctree
new file mode 100644
index 000000000..eb9fa2c65
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/graphutil.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/index.doctree b/python/altgraph/doc/_build/doctrees/index.doctree
new file mode 100644
index 000000000..f3241e5e3
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/index.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/license.doctree b/python/altgraph/doc/_build/doctrees/license.doctree
new file mode 100644
index 000000000..ba3ce378a
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/license.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/doctrees/objectgraph.doctree b/python/altgraph/doc/_build/doctrees/objectgraph.doctree
new file mode 100644
index 000000000..802396b9c
--- /dev/null
+++ b/python/altgraph/doc/_build/doctrees/objectgraph.doctree
Binary files differ
diff --git a/python/altgraph/doc/_build/html/.buildinfo b/python/altgraph/doc/_build/html/.buildinfo
new file mode 100644
index 000000000..003a04976
--- /dev/null
+++ b/python/altgraph/doc/_build/html/.buildinfo
@@ -0,0 +1,4 @@
+# Sphinx build info version 1
+# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
+config: 84aa655833f036f5ba0f6f2dbd1945fa
+tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/python/altgraph/doc/_build/html/_sources/changelog.txt b/python/altgraph/doc/_build/html/_sources/changelog.txt
new file mode 100644
index 000000000..e491c9544
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/changelog.txt
@@ -0,0 +1,176 @@
+Release history
+===============
+
+0.11
+----
+
+- Stabilize the order of elements in dot file exports,
+ patch from bitbucket user 'pombredanne'.
+
+- Tweak setup.py file to remove dependency on distribute (but
+ keep the dependency on setuptools)
+
+
+0.10.2
+------
+
+- There where no classifiers in the package metadata due to a bug
+ in setup.py
+
+0.10.1
+------
+
+This is a bugfix release
+
+Bug fixes:
+
+- Issue #3: The source archive contains a README.txt
+ while the setup file refers to ReadMe.txt.
+
+ This is caused by a misfeature in distutils, as a
+ workaround I've renamed ReadMe.txt to README.txt
+ in the source tree and setup file.
+
+
+0.10
+-----
+
+This is a minor feature release
+
+Features:
+
+- Do not use "2to3" to support Python 3.
+
+ As a side effect of this altgraph now supports
+ Python 2.6 and later, and no longer supports
+ earlier releases of Python.
+
+- The order of attributes in the Dot output
+ is now always alphabetical.
+
+ With this change the output will be consistent
+ between runs and Python versions.
+
+0.9
+---
+
+This is a minor bugfix release
+
+Features:
+
+- Added ``altgraph.ObjectGraph.ObjectGraph.nodes``, a method
+ yielding all nodes in an object graph.
+
+Bugfixes:
+
+- The 0.8 release didn't work with py2app when using
+ python 3.x.
+
+
+0.8
+-----
+
+This is a minor feature release. The major new feature
+is a extensive set of unittests, which explains almost
+all other changes in this release.
+
+Bugfixes:
+
+- Installing failed with Python 2.5 due to using a distutils
+ class that isn't available in that version of Python
+ (issue #1 on the issue tracker)
+
+- ``altgraph.GraphStat.degree_dist`` now actually works
+
+- ``altgraph.Graph.add_edge(a, b, create_nodes=False)`` will
+ no longer create the edge when one of the nodes doesn't
+ exist.
+
+- ``altgraph.Graph.forw_topo_sort`` failed for some sparse graphs.
+
+- ``altgraph.Graph.back_topo_sort`` was completely broken in
+ previous releases.
+
+- ``altgraph.Graph.forw_bfs_subgraph`` now actually works.
+
+- ``altgraph.Graph.back_bfs_subgraph`` now actually works.
+
+- ``altgraph.Graph.iterdfs`` now returns the correct result
+ when the ``forward`` argument is ``False``.
+
+- ``altgraph.Graph.iterdata`` now returns the correct result
+ when the ``forward`` argument is ``False``.
+
+
+Features:
+
+- The ``altgraph.Graph`` constructor now accepts an argument
+ that contains 2- and 3-tuples instead of requireing that
+ all items have the same size. The (optional) argument can now
+ also be any iterator.
+
+- ``altgraph.Graph.Graph.add_node`` has no effect when you
+ add a hidden node.
+
+- The private method ``altgraph.Graph._bfs`` is no longer
+ present.
+
+- The private method ``altgraph.Graph._dfs`` is no longer
+ present.
+
+- ``altgraph.ObjectGraph`` now has a ``__contains__`` methods,
+ which means you can use the ``in`` operator to check if a
+ node is part of a graph.
+
+- ``altgraph.GraphUtil.generate_random_graph`` will raise
+ ``GraphError`` instead of looping forever when it is
+ impossible to create the requested graph.
+
+- ``altgraph.Dot.edge_style`` raises ``GraphError`` when
+ one of the nodes is not present in the graph. The method
+ silently added the tail in the past, but without ensuring
+ a consistent graph state.
+
+- ``altgraph.Dot.save_img`` now works when the mode is
+ ``"neato"``.
+
+0.7.2
+-----
+
+This is a minor bugfix release
+
+Bugfixes:
+
+- distutils didn't include the documentation subtree
+
+0.7.1
+-----
+
+This is a minor feature release
+
+Features:
+
+- Documentation is now generated using `sphinx <http://pypi.python.org/pypi/sphinx>`_
+ and can be viewed at <http://packages.python.org/altgraph>.
+
+- The repository has moved to bitbucket
+
+- ``altgraph.GraphStat.avg_hops`` is no longer present, the function had no
+ implementation and no specified behaviour.
+
+- the module ``altgraph.compat`` is gone, which means altgraph will no
+ longer work with Python 2.3.
+
+
+0.7.0
+-----
+
+This is a minor feature release.
+
+Features:
+
+- Support for Python 3
+
+- It is now possible to run tests using 'python setup.py test'
+
+ (The actual testsuite is still very minimal though)
diff --git a/python/altgraph/doc/_build/html/_sources/core.txt b/python/altgraph/doc/_build/html/_sources/core.txt
new file mode 100644
index 000000000..8288f6a94
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/core.txt
@@ -0,0 +1,26 @@
+:mod:`altgraph` --- A Python Graph Library
+==================================================
+
+.. module:: altgraph
+ :synopsis: A directional graph for python
+
+altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
+to use newer Python 2.3+ features, including additional support used by the
+py2app suite (modulegraph and macholib, specifically).
+
+altgraph is a python based graph (network) representation and manipulation package.
+It has started out as an extension to the `graph_lib module <http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_
+written by Nathan Denny it has been significantly optimized and expanded.
+
+The :class:`altgraph.Graph.Graph` class is loosely modeled after the `LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_
+(Library of Efficient Datatypes) representation. The library
+includes methods for constructing graphs, BFS and DFS traversals,
+topological sort, finding connected components, shortest paths as well as a number
+graph statistics functions. The library can also visualize graphs
+via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_.
+
+
+.. exception:: GraphError
+
+ Exception raised when methods are called with bad values of
+ an inconsistent state.
diff --git a/python/altgraph/doc/_build/html/_sources/dot.txt b/python/altgraph/doc/_build/html/_sources/dot.txt
new file mode 100644
index 000000000..3848c488a
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/dot.txt
@@ -0,0 +1,224 @@
+:mod:`altgraph.Dot` --- Interface to the dot language
+=====================================================
+
+.. module:: altgraph.Dot
+ :synopsis: Interface to the dot language as used by Graphviz..
+
+The :py:mod:`~altgraph.Dot` module provides a simple interface to the
+file format used in the `graphviz`_ program. The module is intended to
+offload the most tedious part of the process (the **dot** file generation)
+while transparently exposing most of its features.
+
+.. _`graphviz`: <http://www.research.att.com/sw/tools/graphviz/>`_
+
+To display the graphs or to generate image files the `graphviz`_
+package needs to be installed on the system, moreover the :command:`dot` and :command:`dotty` programs must
+be accesible in the program path so that they can be ran from processes spawned
+within the module.
+
+Example usage
+-------------
+
+Here is a typical usage::
+
+ from altgraph import Graph, Dot
+
+ # create a graph
+ edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
+ graph = Graph.Graph(edges)
+
+ # create a dot representation of the graph
+ dot = Dot.Dot(graph)
+
+ # display the graph
+ dot.display()
+
+ # save the dot representation into the mydot.dot file
+ dot.save_dot(file_name='mydot.dot')
+
+ # save dot file as gif image into the graph.gif file
+ dot.save_img(file_name='graph', file_type='gif')
+
+
+Directed graph and non-directed graph
+-------------------------------------
+
+Dot class can use for both directed graph and non-directed graph
+by passing *graphtype* parameter.
+
+Example::
+
+ # create directed graph(default)
+ dot = Dot.Dot(graph, graphtype="digraph")
+
+ # create non-directed graph
+ dot = Dot.Dot(graph, graphtype="graph")
+
+
+Customizing the output
+----------------------
+
+The graph drawing process may be customized by passing
+valid :command:`dot` parameters for the nodes and edges. For a list of all
+parameters see the `graphviz`_ documentation.
+
+Example::
+
+ # customizing the way the overall graph is drawn
+ dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75)
+
+ # customizing node drawing
+ dot.node_style(1, label='BASE_NODE',shape='box', color='blue' )
+ dot.node_style(2, style='filled', fillcolor='red')
+
+ # customizing edge drawing
+ dot.edge_style(1, 2, style='dotted')
+ dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90')
+ dot.edge_style(4, 5, arrowsize=2, style='bold')
+
+
+ .. note::
+
+ dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
+ display all graphics styles. To verify the output save it to an image
+ file and look at it that way.
+
+Valid attributes
+----------------
+
+- dot styles, passed via the :py:meth:`Dot.style` method::
+
+ rankdir = 'LR' (draws the graph horizontally, left to right)
+ ranksep = number (rank separation in inches)
+
+- node attributes, passed via the :py:meth:`Dot.node_style` method::
+
+ style = 'filled' | 'invisible' | 'diagonals' | 'rounded'
+ shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle'
+
+- edge attributes, passed via the :py:meth:`Dot.edge_style` method::
+
+ style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold'
+ arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none' | 'tee' | 'vee'
+ weight = number (the larger the number the closer the nodes will be)
+
+- valid `graphviz colors <http://www.research.att.com/~erg/graphviz/info/colors.html>`_
+
+- for more details on how to control the graph drawing process see the
+ `graphviz reference <http://www.research.att.com/sw/tools/graphviz/refs.html>`_.
+
+
+Class interface
+---------------
+
+.. class:: Dot(graph[, nodes[, edgefn[, nodevisitor[, edgevisitor[, name[, dot[, dotty[, neato[, graphtype]]]]]]]]])
+
+ Creates a new Dot generator based on the specified
+ :class:`Graph <altgraph.Graph.Graph>`. The Dot generator won't reference
+ the *graph* once it is constructed.
+
+ If the *nodes* argument is present it is the list of nodes to include
+ in the graph, otherwise all nodes in *graph* are included.
+
+ If the *edgefn* argument is present it is a function that yields the
+ nodes connected to another node, this defaults to
+ :meth:`graph.out_nbr <altgraph.Graph.Graph.out_nbr>`. The constructor won't
+ add edges to the dot file unless both the head and tail of the edge
+ are in *nodes*.
+
+ If the *name* is present it specifies the name of the graph in the resulting
+ dot file. The default is ``"G"``.
+
+ The functions *nodevisitor* and *edgevisitor* return the default style
+ for a given edge or node (both default to functions that return an empty
+ style).
+
+ The arguments *dot*, *dotty* and *neato* are used to pass the path to
+ the corresponding `graphviz`_ command.
+
+
+Updating graph attributes
+.........................
+
+.. method:: Dot.style(\**attr)
+
+ Sets the overall style (graph attributes) to the given attributes.
+
+ See `Valid Attributes`_ for more information about the attributes.
+
+.. method:: Dot.node_style(node, \**attr)
+
+ Sets the style for *node* to the given attributes.
+
+ This method will add *node* to the graph when it isn't already
+ present.
+
+ See `Valid Attributes`_ for more information about the attributes.
+
+.. method:: Dot.all_node_style(\**attr)
+
+ Replaces the current style for all nodes
+
+
+.. method:: edge_style(head, tail, \**attr)
+
+ Sets the style of an edge to the given attributes. The edge will
+ be added to the graph when it isn't already present, but *head*
+ and *tail* must both be valid nodes.
+
+ See `Valid Attributes`_ for more information about the attributes.
+
+
+
+Emitting output
+...............
+
+.. method:: Dot.display([mode])
+
+ Displays the current graph via dotty.
+
+ If the *mode* is ``"neato"`` the dot file is processed with
+ the neato command before displaying.
+
+ This method won't return until the dotty command exits.
+
+.. method:: save_dot(filename)
+
+ Saves the current graph representation into the given file.
+
+ .. note::
+
+ For backward compatibility reasons this method can also
+ be called without an argument, it will then write the graph
+ into a fixed filename (present in the attribute :data:`Graph.temp_dot`).
+
+ This feature is deprecated and should not be used.
+
+
+.. method:: save_image(file_name[, file_type[, mode]])
+
+ Saves the current graph representation as an image file. The output
+ is written into a file whose basename is *file_name* and whose suffix
+ is *file_type*.
+
+ The *file_type* specifies the type of file to write, the default
+ is ``"gif"``.
+
+ If the *mode* is ``"neato"`` the dot file is processed with
+ the neato command before displaying.
+
+ .. note::
+
+ For backward compatibility reasons this method can also
+ be called without an argument, it will then write the graph
+ with a fixed basename (``"out"``).
+
+ This feature is deprecated and should not be used.
+
+.. method:: iterdot()
+
+ Yields all lines of a `graphviz`_ input file (including line endings).
+
+.. method:: __iter__()
+
+ Alias for the :meth:`iterdot` method.
diff --git a/python/altgraph/doc/_build/html/_sources/graph.txt b/python/altgraph/doc/_build/html/_sources/graph.txt
new file mode 100644
index 000000000..72e36bbc1
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/graph.txt
@@ -0,0 +1,299 @@
+:mod:`altgraph.Graph` --- Basic directional graphs
+==================================================
+
+.. module:: altgraph.Graph
+ :synopsis: Basic directional graphs.
+
+The module :mod:`altgraph.Graph` provides a class :class:`Graph` that
+represents a directed graph with *N* nodes and *E* edges.
+
+.. class:: Graph([edges])
+
+ Constructs a new empty :class:`Graph` object. If the optional
+ *edges* parameter is supplied, updates the graph by adding the
+ specified edges.
+
+ All of the elements in *edges* should be tuples with two or three
+ elements. The first two elements of the tuple are the source and
+ destination node of the edge, the optional third element is the
+ edge data. The source and destination nodes are added to the graph
+ when the aren't already present.
+
+
+Node related methods
+--------------------
+
+.. method:: Graph.add_node(node[, node_data])
+
+ Adds a new node to the graph if it is not already present. The new
+ node must be a hashable object.
+
+ Arbitrary data can be attached to the node via the optional *node_data*
+ argument.
+
+ .. note:: the node also won't be added to the graph when it is
+ present but currently hidden.
+
+
+.. method:: Graph.hide_node(node)
+
+ Hides a *node* from the graph. The incoming and outgoing edges of
+ the node will also be hidden.
+
+ Raises :class:`altgraph.GraphError` when the node is not (visible)
+ node of the graph.
+
+
+.. method:: Graph.restore_node(node)
+
+ Restores a previously hidden *node*. The incoming and outgoing
+ edges of the node are also restored.
+
+ Raises :class:`altgraph.GraphError` when the node is not a hidden
+ node of the graph.
+
+.. method:: Graph.restore_all_nodes()
+
+ Restores all hidden nodes.
+
+.. method:: Graph.number_of_nodes()
+
+ Return the number of visible nodes in the graph.
+
+.. method:: Graph.number_of_hidden_nodes()
+
+ Return the number of hidden nodes in the graph.
+
+.. method:: Graph.node_list()
+
+ Return a list with all visible nodes in the graph.
+
+.. method:: Graph.hidden_node_list()
+
+ Return a list with all hidden nodes in the graph.
+
+.. method:: node_data(node)
+
+ Return the data associated with the *node* when it was
+ added.
+
+.. method:: Graph.describe_node(node)
+
+ Returns *node*, the node's data and the lists of outgoing
+ and incoming edges for the node.
+
+ .. note::
+
+ the edge lists should not be modified, doing so
+ can result in unpredicatable behavior.
+
+.. method:: Graph.__contains__(node)
+
+ Returns True iff *node* is a node in the graph. This
+ method is accessed through the *in* operator.
+
+.. method:: Graph.__iter__()
+
+ Yield all nodes in the graph.
+
+.. method:: Graph.out_edges(node)
+
+ Return the list of outgoing edges for *node*
+
+.. method:: Graph.inc_edges(node)
+
+ Return the list of incoming edges for *node*
+
+.. method:: Graph.all_edges(node)
+
+ Return the list of incoming and outgoing edges for *node*
+
+.. method:: Graph.out_degree(node)
+
+ Return the number of outgoing edges for *node*.
+
+.. method:: Graph.inc_degree(node)
+
+ Return the number of incoming edges for *node*.
+
+.. method:: Graph.all_degree(node)
+
+ Return the number of edges (incoming or outgoing) for *node*.
+
+Edge related methods
+--------------------
+
+.. method:: Graph.add_edge(head_id, tail_id [, edge data [, create_nodes]])
+
+ Adds a directed edge from *head_id* to *tail_id*. Arbitrary data can
+ be added via *edge_data*. When *create_nodes* is *True* (the default),
+ *head_id* and *tail_id* will be added to the graph when the aren't
+ already present.
+
+.. method:: Graph.hide_edge(edge)
+
+ Hides an edge from the graph. The edge may be unhidden at some later
+ time.
+
+.. method:: Graph.restore_edge(edge)
+
+ Restores a previously hidden *edge*.
+
+.. method:: Graph.restore_all_edges()
+
+ Restore all edges that were hidden before, except for edges
+ referring to hidden nodes.
+
+.. method:: Graph.edge_by_node(head, tail)
+
+ Return the edge ID for an edge from *head* to *tail*,
+ or :data:`None` when no such edge exists.
+
+.. method:: Graph.edge_by_id(edge)
+
+ Return the head and tail of the *edge*
+
+.. method:: Graph.edge_data(edge)
+
+ Return the data associated with the *edge*.
+
+
+.. method:: Graph.head(edge)
+
+ Return the head of an *edge*
+
+.. method:: Graph.tail(edge)
+
+ Return the tail of an *edge*
+
+.. method:: Graph.describe_edge(edge)
+
+ Return the *edge*, the associated data, its head and tail.
+
+.. method:: Graph.number_of_edges()
+
+ Return the number of visible edges.
+
+.. method:: Graph.number_of_hidden_edges()
+
+ Return the number of hidden edges.
+
+.. method:: Graph.edge_list()
+
+ Returns a list with all visible edges in the graph.
+
+.. method:: Graph.hidden_edge_list()
+
+ Returns a list with all hidden edges in the graph.
+
+Graph traversal
+---------------
+
+.. method:: Graph.out_nbrs(node)
+
+ Return a list of all nodes connected by outgoing edges.
+
+.. method:: Graph.inc_nbrs(node)
+
+ Return a list of all nodes connected by incoming edges.
+
+.. method:: Graph.all_nbrs(node)
+
+ Returns a list of nodes connected by an incoming or outgoing edge.
+
+.. method:: Graph.forw_topo_sort()
+
+ Return a list of nodes where the successors (based on outgoing
+ edges) of any given node apear in the sequence after that node.
+
+.. method:: Graph.back_topo_sort()
+
+ Return a list of nodes where the successors (based on incoming
+ edges) of any given node apear in the sequence after that node.
+
+.. method:: Graph.forw_bfs_subgraph(start_id)
+
+ Return a subgraph consisting of the breadth first
+ reachable nodes from *start_id* based on their outgoing edges.
+
+
+.. method:: Graph.back_bfs_subgraph(start_id)
+
+ Return a subgraph consisting of the breadth first
+ reachable nodes from *start_id* based on their incoming edges.
+
+.. method:: Graph.iterdfs(start[, end[, forward]])
+
+ Yield nodes in a depth first traversal starting at the *start*
+ node.
+
+ If *end* is specified traversal stops when reaching that node.
+
+ If forward is True (the default) edges are traversed in forward
+ direction, otherwise they are traversed in reverse direction.
+
+.. method:: Graph.iterdata(start[, end[, forward[, condition]]])
+
+ Yield the associated data for nodes in a depth first traversal
+ starting at the *start* node. This method will not yield values for nodes
+ without associated data.
+
+ If *end* is specified traversal stops when reaching that node.
+
+ If *condition* is specified and the condition callable returns
+ False for the associated data this method will not yield the
+ associated data and will not follow the edges for the node.
+
+ If forward is True (the default) edges are traversed in forward
+ direction, otherwise they are traversed in reverse direction.
+
+.. method:: Graph.forw_bfs(start[, end])
+
+ Returns a list of nodes starting at *start* in some bread first
+ search order (following outgoing edges).
+
+ When *end* is specified iteration stops at that node.
+
+.. method:: Graph.back_bfs(start[, end])
+
+ Returns a list of nodes starting at *start* in some bread first
+ search order (following incoming edges).
+
+ When *end* is specified iteration stops at that node.
+
+.. method:: Graph.get_hops(start[, end[, forward]])
+
+ Computes the hop distance to all nodes centered around a specified node.
+
+ First order neighbours are at hop 1, their neigbours are at hop 2 etc.
+ Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value of
+ the forward parameter.
+
+ If the distance between all neighbouring nodes is 1 the hop number
+ corresponds to the shortest distance between the nodes.
+
+ Typical usage::
+
+ >>> print graph.get_hops(1, 8)
+ >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
+ # node 1 is at 0 hops
+ # node 2 is at 1 hop
+ # ...
+ # node 8 is at 5 hops
+
+
+Graph statistics
+----------------
+
+.. method:: Graph.connected()
+
+ Returns True iff every node in the graph can be reached from
+ every other node.
+
+.. method:: Graph.clust_coef(node)
+
+ Returns the local clustering coefficient of node.
+
+ The local cluster coefficient is the proportion of the actual number
+ of edges between neighbours of node and the maximum number of
+ edges between those nodes.
diff --git a/python/altgraph/doc/_build/html/_sources/graphalgo.txt b/python/altgraph/doc/_build/html/_sources/graphalgo.txt
new file mode 100644
index 000000000..84d492f44
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/graphalgo.txt
@@ -0,0 +1,26 @@
+:mod:`altgraph.GraphAlgo` --- Graph algorithms
+==================================================
+
+.. module:: altgraph.GraphAlgo
+ :synopsis: Basic graphs algoritms
+
+.. function:: dijkstra(graph, start[, end])
+
+ Dijkstra's algorithm for shortest paths.
+
+ Find shortest paths from the start node to all nodes nearer
+ than or equal to the *end* node. The edge data is assumed to be the edge length.
+
+ .. note::
+
+ Dijkstra's algorithm is only guaranteed to work correctly when all edge lengths are positive.
+ This code does not verify this property for all edges (only the edges examined until the end
+ vertex is reached), but will correctly compute shortest paths even for some graphs with negative
+ edges, and will raise an exception if it discovers that a negative edge has caused it to make a mistake.
+
+
+.. function:: shortest_path(graph, start, end)
+
+ Find a single shortest path from the given start node to the given end node.
+ The input has the same conventions as :func:`dijkstra`. The output is a list
+ of the nodes in order along the shortest path.
diff --git a/python/altgraph/doc/_build/html/_sources/graphstat.txt b/python/altgraph/doc/_build/html/_sources/graphstat.txt
new file mode 100644
index 000000000..0931a12dd
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/graphstat.txt
@@ -0,0 +1,25 @@
+:mod:`altgraph.GraphStat` --- Functions providing various graph statistics
+==========================================================================
+
+.. module:: altgraph.GraphStat
+ :synopsis: Functions providing various graph statistics
+
+The module :mod:`altgraph.GraphStat` provides function that calculate
+graph statistics. Currently there is only one such function, more may
+be added later.
+
+.. function:: degree_dist(graph[, limits[, bin_num[, mode]]])
+
+ Groups the number of edges per node into *bin_num* bins
+ and returns the list of those bins. Every item in the result
+ is a tuple with the center of the bin and the number of items
+ in that bin.
+
+ When the *limits* argument is present it must be a tuple with
+ the mininum and maximum number of edges that get binned (that
+ is, when *limits* is ``(4, 10)`` only nodes with between 4
+ and 10 edges get counted.
+
+ The *mode* argument is used to count incoming (``'inc'``) or
+ outgoing (``'out'``) edges. The default is to count the outgoing
+ edges.
diff --git a/python/altgraph/doc/_build/html/_sources/graphutil.txt b/python/altgraph/doc/_build/html/_sources/graphutil.txt
new file mode 100644
index 000000000..c07836df8
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/graphutil.txt
@@ -0,0 +1,55 @@
+:mod:`altgraph.GraphUtil` --- Utility functions
+================================================
+
+.. module:: altgraph.GraphUtil
+ :synopsis: Utility functions
+
+The module :mod:`altgraph.GraphUtil` performs a number of more
+or less useful utility functions.
+
+.. function:: generate_random_graph(node_num, edge_num[, self_loops[, multi_edges])
+
+ Generates and returns a :class:`Graph <altgraph.Graph.Graph>` instance
+ with *node_num* nodes randomly connected by *edge_num* edges.
+
+ When *self_loops* is present and True there can be edges that point from
+ a node to itself.
+
+ When *multi_edge* is present and True there can be duplicate edges.
+
+ This method raises :class:`GraphError <altgraph.GraphError` when
+ a graph with the requested configuration cannot be created.
+
+.. function:: generate_scale_free_graph(steps, growth_num[, self_loops[, multi_edges]])
+
+ Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
+ will have *steps*growth_n um* nodes and a scale free (powerlaw)
+ connectivity.
+
+ Starting with a fully connected graph with *growth_num* nodes
+ at every step *growth_num* nodes are added to the graph and are connected
+ to existing nodes with a probability proportional to the degree of these
+ existing nodes.
+
+ .. warning:: The current implementation is basically untested, although
+ code inspection seems to indicate an implementation that is consistent
+ with the description at
+ `Wolfram MathWorld <http://mathworld.wolfram.com/Scale-FreeNetwork.html>`_
+
+.. function:: filter_stack(graph, head, filters)
+
+ Perform a depth-first oder walk of the graph starting at *head* and
+ apply all filter functions in *filters* on the node data of the nodes
+ found.
+
+ Returns (*visited*, *removes*, *orphans*), where
+
+ * *visited*: the set of visited nodes
+
+ * *removes*: the list of nodes where the node data doesn't match
+ all *filters*.
+
+ * *orphans*: list of tuples (*last_good*, *node*), where
+ node is not in *removes* and one of the nodes that is connected
+ by an incoming edge is in *removes*. *Last_good* is the
+ closest upstream node that is not in *removes*.
diff --git a/python/altgraph/doc/_build/html/_sources/index.txt b/python/altgraph/doc/_build/html/_sources/index.txt
new file mode 100644
index 000000000..1e8d504ed
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/index.txt
@@ -0,0 +1,41 @@
+.. altgraph documentation master file, created by
+ sphinx-quickstart on Tue Aug 31 11:04:49 2010.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Altgraph - A basic graph library
+================================
+
+altgraph is a fork of graphlib: a graph (network) package for constructing
+graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with
+graphviz output.
+
+The primary users of this package are `macholib <http://pypi.python.org/pypi/macholib>`_ and `modulegraph <http://pypi.python.org/pypi/modulegraph>`_.
+
+.. toctree::
+ :maxdepth: 1
+
+ changelog
+ license
+ core
+ graph
+ objectgraph
+ graphalgo
+ graphstat
+ graphutil
+ dot
+
+Online Resources
+----------------
+
+* `Sourcecode repository on bitbucket <http://bitbucket.org/ronaldoussoren/altgraph/>`_
+
+* `The issue tracker <http://bitbucket.org/ronaldoussoren/altgraph/issues>`_
+
+Indices and tables
+------------------
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/python/altgraph/doc/_build/html/_sources/license.txt b/python/altgraph/doc/_build/html/_sources/license.txt
new file mode 100644
index 000000000..498e60be0
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/license.txt
@@ -0,0 +1,25 @@
+License
+=======
+
+Copyright (c) 2004 Istvan Albert unless otherwise noted.
+
+Parts are copyright (c) Bob Ippolito
+
+Parts are copyright (c) 2010-2014 Ronald Oussoren
+
+MIT License
+...........
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
+so.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/python/altgraph/doc/_build/html/_sources/objectgraph.txt b/python/altgraph/doc/_build/html/_sources/objectgraph.txt
new file mode 100644
index 000000000..87485255f
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_sources/objectgraph.txt
@@ -0,0 +1,134 @@
+:mod:`altgraph.ObjectGraph` --- Graphs of objecs with an identifier
+===================================================================
+
+.. module:: altgraph.ObjectGraph
+ :synopsis: A graph of objects that have a "graphident" attribute.
+
+.. class:: ObjectGraph([graph[, debug]])
+
+ A graph of objects that have a "graphident" attribute. The
+ value of this attribute is the key for the object in the
+ graph.
+
+ The optional *graph* is a previously constructed
+ :class:`Graph <altgraph.Graph.Graph>`.
+
+ The optional *debug* level controls the amount of debug output
+ (see :meth:`msg`, :meth:`msgin` and :meth:`msgout`).
+
+ .. note:: the altgraph library does not generate output, the
+ debug attribute and message methods are present for use
+ by subclasses.
+
+.. data:: ObjectGraph.graph
+
+ An :class:`Graph <altgraph.Graph.Graph>` object that contains
+ the graph data.
+
+
+.. method:: ObjectGraph.addNode(node)
+
+ Adds a *node* to the graph.
+
+ .. note:: re-adding a node that was previously removed
+ using :meth:`removeNode` will reinstate the previously
+ removed node.
+
+.. method:: ObjectGraph.createNode(self, cls, name, \*args, \**kwds)
+
+ Creates a new node using ``cls(*args, **kwds)`` and adds that
+ node using :meth:`addNode`.
+
+ Returns the newly created node.
+
+.. method:: ObjectGraph.removeNode(node)
+
+ Removes a *node* from the graph when it exists. The *node* argument
+ is either a node object, or the graphident of a node.
+
+.. method:: ObjectGraph.createReferences(fromnode, tonode[, edge_data])
+
+ Creates a reference from *fromnode* to *tonode*. The optional
+ *edge_data* is associated with the edge.
+
+ *Fromnode* and *tonode* can either be node objects or the graphident
+ values for nodes.
+
+.. method:: removeReference(fromnode, tonode)
+
+ Removes the reference from *fromnode* to *tonode* if it exists.
+
+.. method:: ObjectGraph.getRawIdent(node)
+
+ Returns the *graphident* attribute of *node*, or the graph itself
+ when *node* is :data:`None`.
+
+.. method:: getIdent(node)
+
+ Same as :meth:`getRawIdent`, but only if the node is part
+ of the graph.
+
+ *Node* can either be an actual node object or the graphident of
+ a node.
+
+.. method:: ObjectGraph.findNode(node)
+
+ Returns a given node in the graph, or :data:`Node` when it cannot
+ be found.
+
+ *Node* is either an object with a *graphident* attribute or
+ the *graphident* attribute itself.
+
+.. method:: ObjectGraph.__contains__(node)
+
+ Returns True if *node* is a member of the graph. *Node* is either an
+ object with a *graphident* attribute or the *graphident* attribute itself.
+
+.. method:: ObjectGraph.flatten([condition[, start]])
+
+ Yield all nodes that are entirely reachable by *condition*
+ starting fromt he given *start* node or the graph root.
+
+ .. note:: objects are only reachable from the graph root
+ when there is a reference from the root to the node
+ (either directly or through another node)
+
+.. method:: ObjectGraph.nodes()
+
+ Yield all nodes in the graph.
+
+.. method:: ObjectGraph.get_edges(node)
+
+ Returns two iterators that yield the nodes reaching by
+ outgoing and incoming edges.
+
+.. method:: ObjectGraph.filterStack(filters)
+
+ Filter the ObjectGraph in-place by removing all edges to nodes that
+ do not match every filter in the given filter list
+
+ Returns a tuple containing the number of:
+ (*nodes_visited*, *nodes_removed*, *nodes_orphaned*)
+
+
+Debug output
+------------
+
+.. data:: ObjectGraph.debug
+
+ The current debug level.
+
+.. method:: ObjectGraph.msg(level, text, \*args)
+
+ Print a debug message at the current indentation level when the current
+ debug level is *level* or less.
+
+.. method:: ObjectGraph.msgin(level, text, \*args)
+
+ Print a debug message when the current debug level is *level* or less,
+ and increase the indentation level.
+
+.. method:: ObjectGraph.msgout(level, text, \*args)
+
+ Decrease the indentation level and print a debug message when the
+ current debug level is *level* or less.
diff --git a/python/altgraph/doc/_build/html/_static/ajax-loader.gif b/python/altgraph/doc/_build/html/_static/ajax-loader.gif
new file mode 100644
index 000000000..61faf8cab
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/ajax-loader.gif
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/basic.css b/python/altgraph/doc/_build/html/_static/basic.css
new file mode 100644
index 000000000..c959cf0db
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/basic.css
@@ -0,0 +1,537 @@
+/*
+ * basic.css
+ * ~~~~~~~~~
+ *
+ * Sphinx stylesheet -- basic theme.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+ clear: both;
+}
+
+/* -- relbar ---------------------------------------------------------------- */
+
+div.related {
+ width: 100%;
+ font-size: 90%;
+}
+
+div.related h3 {
+ display: none;
+}
+
+div.related ul {
+ margin: 0;
+ padding: 0 0 0 10px;
+ list-style: none;
+}
+
+div.related li {
+ display: inline;
+}
+
+div.related li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+/* -- sidebar --------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+ float: left;
+ width: 230px;
+ margin-left: -100%;
+ font-size: 90%;
+}
+
+div.sphinxsidebar ul {
+ list-style: none;
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+ margin-left: 20px;
+ list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar form {
+ margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #98dbcc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar #searchbox input[type="text"] {
+ width: 170px;
+}
+
+div.sphinxsidebar #searchbox input[type="submit"] {
+ width: 30px;
+}
+
+img {
+ border: 0;
+ max-width: 100%;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable {
+ width: 100%;
+}
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+div.modindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+div.genindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+/* -- general body styles --------------------------------------------------- */
+
+a.headerlink {
+ visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+ visibility: visible;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+.field-list ul {
+ padding-left: 1em;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+img.align-left, .figure.align-left, object.align-left {
+ clear: left;
+ float: left;
+ margin-right: 1em;
+}
+
+img.align-right, .figure.align-right, object.align-right {
+ clear: right;
+ float: right;
+ margin-left: 1em;
+}
+
+img.align-center, .figure.align-center, object.align-center {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+.align-left {
+ text-align: left;
+}
+
+.align-center {
+ text-align: center;
+}
+
+.align-right {
+ text-align: right;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+div.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px 7px 0 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
+/* -- topics ---------------------------------------------------------------- */
+
+div.topic {
+ border: 1px solid #ccc;
+ padding: 7px 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+
+div.admonition {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ padding: 7px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+ border: 0;
+ border-collapse: collapse;
+}
+
+table.docutils td, table.docutils th {
+ padding: 1px 8px 1px 5px;
+ border-top: 0;
+ border-left: 0;
+ border-right: 0;
+ border-bottom: 1px solid #aaa;
+}
+
+table.field-list td, table.field-list th {
+ border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+table.citation {
+ border-left: solid 1px gray;
+ margin-left: 1px;
+}
+
+table.citation td {
+ border-bottom: none;
+}
+
+/* -- other body styles ----------------------------------------------------- */
+
+ol.arabic {
+ list-style: decimal;
+}
+
+ol.loweralpha {
+ list-style: lower-alpha;
+}
+
+ol.upperalpha {
+ list-style: upper-alpha;
+}
+
+ol.lowerroman {
+ list-style: lower-roman;
+}
+
+ol.upperroman {
+ list-style: upper-roman;
+}
+
+dl {
+ margin-bottom: 15px;
+}
+
+dd p {
+ margin-top: 0px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+}
+
+dt:target, .highlighted {
+ background-color: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 1.1em;
+}
+
+.field-list ul {
+ margin: 0;
+ padding-left: 1em;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+.footnote:target {
+ background-color: #ffa;
+}
+
+.line-block {
+ display: block;
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
+
+.line-block .line-block {
+ margin-top: 0;
+ margin-bottom: 0;
+ margin-left: 1.5em;
+}
+
+.guilabel, .menuselection {
+ font-family: sans-serif;
+}
+
+.accelerator {
+ text-decoration: underline;
+}
+
+.classifier {
+ font-style: oblique;
+}
+
+abbr, acronym {
+ border-bottom: dotted 1px;
+ cursor: help;
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+ overflow: auto;
+ overflow-y: hidden; /* fixes display issues on Chrome browsers */
+}
+
+td.linenos pre {
+ padding: 5px 0px;
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ margin-left: 0.5em;
+}
+
+table.highlighttable td {
+ padding: 0 0.5em 0 0.5em;
+}
+
+tt.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1.2em;
+}
+
+tt.descclassname {
+ background-color: transparent;
+}
+
+tt.xref, a tt {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ background-color: transparent;
+}
+
+.viewcode-link {
+ float: right;
+}
+
+.viewcode-back {
+ float: right;
+ font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+ margin: -1px -10px;
+ padding: 0 10px;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+ vertical-align: middle;
+}
+
+div.body div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0 !important;
+ width: 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ #top-link {
+ display: none;
+ }
+} \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/_static/comment-bright.png b/python/altgraph/doc/_build/html/_static/comment-bright.png
new file mode 100644
index 000000000..551517b8c
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/comment-bright.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/comment-close.png b/python/altgraph/doc/_build/html/_static/comment-close.png
new file mode 100644
index 000000000..09b54be46
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/comment-close.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/comment.png b/python/altgraph/doc/_build/html/_static/comment.png
new file mode 100644
index 000000000..92feb52b8
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/comment.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/doctools.js b/python/altgraph/doc/_build/html/_static/doctools.js
new file mode 100644
index 000000000..2036e5f5f
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/doctools.js
@@ -0,0 +1,238 @@
+/*
+ * doctools.js
+ * ~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for all documentation.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/**
+ * select a different prefix for underscore
+ */
+$u = _.noConflict();
+
+/**
+ * make the code below compatible with browsers without
+ * an installed firebug like debugger
+if (!window.console || !console.firebug) {
+ var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
+ "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
+ "profile", "profileEnd"];
+ window.console = {};
+ for (var i = 0; i < names.length; ++i)
+ window.console[names[i]] = function() {};
+}
+ */
+
+/**
+ * small helper function to urldecode strings
+ */
+jQuery.urldecode = function(x) {
+ return decodeURIComponent(x).replace(/\+/g, ' ');
+};
+
+/**
+ * small helper function to urlencode strings
+ */
+jQuery.urlencode = encodeURIComponent;
+
+/**
+ * This function returns the parsed url parameters of the
+ * current request. Multiple values per key are supported,
+ * it will always return arrays of strings for the value parts.
+ */
+jQuery.getQueryParameters = function(s) {
+ if (typeof s == 'undefined')
+ s = document.location.search;
+ var parts = s.substr(s.indexOf('?') + 1).split('&');
+ var result = {};
+ for (var i = 0; i < parts.length; i++) {
+ var tmp = parts[i].split('=', 2);
+ var key = jQuery.urldecode(tmp[0]);
+ var value = jQuery.urldecode(tmp[1]);
+ if (key in result)
+ result[key].push(value);
+ else
+ result[key] = [value];
+ }
+ return result;
+};
+
+/**
+ * highlight a given string on a jquery object by wrapping it in
+ * span elements with the given class name.
+ */
+jQuery.fn.highlightText = function(text, className) {
+ function highlight(node) {
+ if (node.nodeType == 3) {
+ var val = node.nodeValue;
+ var pos = val.toLowerCase().indexOf(text);
+ if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
+ var span = document.createElement("span");
+ span.className = className;
+ span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+ node.parentNode.insertBefore(span, node.parentNode.insertBefore(
+ document.createTextNode(val.substr(pos + text.length)),
+ node.nextSibling));
+ node.nodeValue = val.substr(0, pos);
+ }
+ }
+ else if (!jQuery(node).is("button, select, textarea")) {
+ jQuery.each(node.childNodes, function() {
+ highlight(this);
+ });
+ }
+ }
+ return this.each(function() {
+ highlight(this);
+ });
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+var Documentation = {
+
+ init : function() {
+ this.fixFirefoxAnchorBug();
+ this.highlightSearchWords();
+ this.initIndexTable();
+ },
+
+ /**
+ * i18n support
+ */
+ TRANSLATIONS : {},
+ PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
+ LOCALE : 'unknown',
+
+ // gettext and ngettext don't access this so that the functions
+ // can safely bound to a different name (_ = Documentation.gettext)
+ gettext : function(string) {
+ var translated = Documentation.TRANSLATIONS[string];
+ if (typeof translated == 'undefined')
+ return string;
+ return (typeof translated == 'string') ? translated : translated[0];
+ },
+
+ ngettext : function(singular, plural, n) {
+ var translated = Documentation.TRANSLATIONS[singular];
+ if (typeof translated == 'undefined')
+ return (n == 1) ? singular : plural;
+ return translated[Documentation.PLURALEXPR(n)];
+ },
+
+ addTranslations : function(catalog) {
+ for (var key in catalog.messages)
+ this.TRANSLATIONS[key] = catalog.messages[key];
+ this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
+ this.LOCALE = catalog.locale;
+ },
+
+ /**
+ * add context elements like header anchor links
+ */
+ addContextElements : function() {
+ $('div[id] > :header:first').each(function() {
+ $('<a class="headerlink">\u00B6</a>').
+ attr('href', '#' + this.id).
+ attr('title', _('Permalink to this headline')).
+ appendTo(this);
+ });
+ $('dt[id]').each(function() {
+ $('<a class="headerlink">\u00B6</a>').
+ attr('href', '#' + this.id).
+ attr('title', _('Permalink to this definition')).
+ appendTo(this);
+ });
+ },
+
+ /**
+ * workaround a firefox stupidity
+ */
+ fixFirefoxAnchorBug : function() {
+ if (document.location.hash && $.browser.mozilla)
+ window.setTimeout(function() {
+ document.location.href += '';
+ }, 10);
+ },
+
+ /**
+ * highlight the search words provided in the url in the text
+ */
+ highlightSearchWords : function() {
+ var params = $.getQueryParameters();
+ var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
+ if (terms.length) {
+ var body = $('div.body');
+ if (!body.length) {
+ body = $('body');
+ }
+ window.setTimeout(function() {
+ $.each(terms, function() {
+ body.highlightText(this.toLowerCase(), 'highlighted');
+ });
+ }, 10);
+ $('<p class="highlight-link"><a href="javascript:Documentation.' +
+ 'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
+ .appendTo($('#searchbox'));
+ }
+ },
+
+ /**
+ * init the domain index toggle buttons
+ */
+ initIndexTable : function() {
+ var togglers = $('img.toggler').click(function() {
+ var src = $(this).attr('src');
+ var idnum = $(this).attr('id').substr(7);
+ $('tr.cg-' + idnum).toggle();
+ if (src.substr(-9) == 'minus.png')
+ $(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
+ else
+ $(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
+ }).css('display', '');
+ if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
+ togglers.click();
+ }
+ },
+
+ /**
+ * helper function to hide the search marks again
+ */
+ hideSearchWords : function() {
+ $('#searchbox .highlight-link').fadeOut(300);
+ $('span.highlighted').removeClass('highlighted');
+ },
+
+ /**
+ * make the url absolute
+ */
+ makeURL : function(relativeURL) {
+ return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
+ },
+
+ /**
+ * get the current relative url
+ */
+ getCurrentURL : function() {
+ var path = document.location.pathname;
+ var parts = path.split(/\//);
+ $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
+ if (this == '..')
+ parts.pop();
+ });
+ var url = parts.join('/');
+ return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
+ }
+};
+
+// quick alias for translations
+_ = Documentation.gettext;
+
+$(document).ready(function() {
+ Documentation.init();
+});
diff --git a/python/altgraph/doc/_build/html/_static/down-pressed.png b/python/altgraph/doc/_build/html/_static/down-pressed.png
new file mode 100644
index 000000000..6f7ad7827
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/down-pressed.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/down.png b/python/altgraph/doc/_build/html/_static/down.png
new file mode 100644
index 000000000..3003a8877
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/down.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/file.png b/python/altgraph/doc/_build/html/_static/file.png
new file mode 100644
index 000000000..d18082e39
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/file.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/jquery.js b/python/altgraph/doc/_build/html/_static/jquery.js
new file mode 100644
index 000000000..83589daa7
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/jquery.js
@@ -0,0 +1,2 @@
+/*! jQuery v1.8.3 jquery.com | jquery.org/license */
+(function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r<i;r++)v.event.add(t,n,u[n][r])}o.data&&(o.data=v.extend({},o.data))}function Ot(e,t){var n;if(t.nodeType!==1)return;t.clearAttributes&&t.clearAttributes(),t.mergeAttributes&&t.mergeAttributes(e),n=t.nodeName.toLowerCase(),n==="object"?(t.parentNode&&(t.outerHTML=e.outerHTML),v.support.html5Clone&&e.innerHTML&&!v.trim(t.innerHTML)&&(t.innerHTML=e.innerHTML)):n==="input"&&Et.test(e.type)?(t.defaultChecked=t.checked=e.checked,t.value!==e.value&&(t.value=e.value)):n==="option"?t.selected=e.defaultSelected:n==="input"||n==="textarea"?t.defaultValue=e.defaultValue:n==="script"&&t.text!==e.text&&(t.text=e.text),t.removeAttribute(v.expando)}function Mt(e){return typeof e.getElementsByTagName!="undefined"?e.getElementsByTagName("*"):typeof e.querySelectorAll!="undefined"?e.querySelectorAll("*"):[]}function _t(e){Et.test(e.type)&&(e.defaultChecked=e.checked)}function Qt(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=Jt.length;while(i--){t=Jt[i]+n;if(t in e)return t}return r}function Gt(e,t){return e=t||e,v.css(e,"display")==="none"||!v.contains(e.ownerDocument,e)}function Yt(e,t){var n,r,i=[],s=0,o=e.length;for(;s<o;s++){n=e[s];if(!n.style)continue;i[s]=v._data(n,"olddisplay"),t?(!i[s]&&n.style.display==="none"&&(n.style.display=""),n.style.display===""&&Gt(n)&&(i[s]=v._data(n,"olddisplay",nn(n.nodeName)))):(r=Dt(n,"display"),!i[s]&&r!=="none"&&v._data(n,"olddisplay",r))}for(s=0;s<o;s++){n=e[s];if(!n.style)continue;if(!t||n.style.display==="none"||n.style.display==="")n.style.display=t?i[s]||"":"none"}return e}function Zt(e,t,n){var r=Rt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function en(e,t,n,r){var i=n===(r?"border":"content")?4:t==="width"?1:0,s=0;for(;i<4;i+=2)n==="margin"&&(s+=v.css(e,n+$t[i],!0)),r?(n==="content"&&(s-=parseFloat(Dt(e,"padding"+$t[i]))||0),n!=="margin"&&(s-=parseFloat(Dt(e,"border"+$t[i]+"Width"))||0)):(s+=parseFloat(Dt(e,"padding"+$t[i]))||0,n!=="padding"&&(s+=parseFloat(Dt(e,"border"+$t[i]+"Width"))||0));return s}function tn(e,t,n){var r=t==="width"?e.offsetWidth:e.offsetHeight,i=!0,s=v.support.boxSizing&&v.css(e,"boxSizing")==="border-box";if(r<=0||r==null){r=Dt(e,t);if(r<0||r==null)r=e.style[t];if(Ut.test(r))return r;i=s&&(v.support.boxSizingReliable||r===e.style[t]),r=parseFloat(r)||0}return r+en(e,t,n||(s?"border":"content"),i)+"px"}function nn(e){if(Wt[e])return Wt[e];var t=v("<"+e+">").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write("<!doctype html><html><body>"),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u<a;u++)r=o[u],s=/^\+/.test(r),s&&(r=r.substr(1)||"*"),i=e[r]=e[r]||[],i[s?"unshift":"push"](n)}}function kn(e,n,r,i,s,o){s=s||n.dataTypes[0],o=o||{},o[s]=!0;var u,a=e[s],f=0,l=a?a.length:0,c=e===Sn;for(;f<l&&(c||!u);f++)u=a[f](n,r,i),typeof u=="string"&&(!c||o[u]?u=t:(n.dataTypes.unshift(u),u=kn(e,n,r,i,u,o)));return(c||!u)&&!o["*"]&&(u=kn(e,n,r,i,"*",o)),u}function Ln(e,n){var r,i,s=v.ajaxSettings.flatOptions||{};for(r in n)n[r]!==t&&((s[r]?e:i||(i={}))[r]=n[r]);i&&v.extend(!0,e,i)}function An(e,n,r){var i,s,o,u,a=e.contents,f=e.dataTypes,l=e.responseFields;for(s in l)s in r&&(n[l[s]]=r[s]);while(f[0]==="*")f.shift(),i===t&&(i=e.mimeType||n.getResponseHeader("content-type"));if(i)for(s in a)if(a[s]&&a[s].test(i)){f.unshift(s);break}if(f[0]in r)o=f[0];else{for(s in r){if(!f[0]||e.converters[s+" "+f[0]]){o=s;break}u||(u=s)}o=o||u}if(o)return o!==f[0]&&f.unshift(o),r[o]}function On(e,t){var n,r,i,s,o=e.dataTypes.slice(),u=o[0],a={},f=0;e.dataFilter&&(t=e.dataFilter(t,e.dataType));if(o[1])for(n in e.converters)a[n.toLowerCase()]=e.converters[n];for(;i=o[++f];)if(i!=="*"){if(u!=="*"&&u!==i){n=a[u+" "+i]||a["* "+i];if(!n)for(r in a){s=r.split(" ");if(s[1]===i){n=a[u+" "+s[0]]||a["* "+s[0]];if(n){n===!0?n=a[r]:a[r]!==!0&&(i=s[0],o.splice(f--,0,i));break}}}if(n!==!0)if(n&&e["throws"])t=n(t);else try{t=n(t)}catch(l){return{state:"parsererror",error:n?l:"No conversion from "+u+" to "+i}}}u=i}return{state:"success",data:t}}function Fn(){try{return new e.XMLHttpRequest}catch(t){}}function In(){try{return new e.ActiveXObject("Microsoft.XMLHTTP")}catch(t){}}function $n(){return setTimeout(function(){qn=t},0),qn=v.now()}function Jn(e,t){v.each(t,function(t,n){var r=(Vn[t]||[]).concat(Vn["*"]),i=0,s=r.length;for(;i<s;i++)if(r[i].call(e,t,n))return})}function Kn(e,t,n){var r,i=0,s=0,o=Xn.length,u=v.Deferred().always(function(){delete a.elem}),a=function(){var t=qn||$n(),n=Math.max(0,f.startTime+f.duration-t),r=n/f.duration||0,i=1-r,s=0,o=f.tweens.length;for(;s<o;s++)f.tweens[s].run(i);return u.notifyWith(e,[f,i,n]),i<1&&o?n:(u.resolveWith(e,[f]),!1)},f=u.promise({elem:e,props:v.extend({},t),opts:v.extend(!0,{specialEasing:{}},n),originalProperties:t,originalOptions:n,startTime:qn||$n(),duration:n.duration,tweens:[],createTween:function(t,n,r){var i=v.Tween(e,f.opts,t,n,f.opts.specialEasing[t]||f.opts.easing);return f.tweens.push(i),i},stop:function(t){var n=0,r=t?f.tweens.length:0;for(;n<r;n++)f.tweens[n].run(1);return t?u.resolveWith(e,[f,t]):u.rejectWith(e,[f,t]),this}}),l=f.props;Qn(l,f.opts.specialEasing);for(;i<o;i++){r=Xn[i].call(f,e,l,f.opts);if(r)return r}return Jn(f,l),v.isFunction(f.opts.start)&&f.opts.start.call(e,f),v.fx.timer(v.extend(a,{anim:f,queue:f.opts.queue,elem:e})),f.progress(f.opts.progress).done(f.opts.done,f.opts.complete).fail(f.opts.fail).always(f.opts.always)}function Qn(e,t){var n,r,i,s,o;for(n in e){r=v.camelCase(n),i=t[r],s=e[n],v.isArray(s)&&(i=s[1],s=e[n]=s[0]),n!==r&&(e[r]=s,delete e[n]),o=v.cssHooks[r];if(o&&"expand"in o){s=o.expand(s),delete e[r];for(n in s)n in e||(e[n]=s[n],t[n]=i)}else t[r]=i}}function Gn(e,t,n){var r,i,s,o,u,a,f,l,c,h=this,p=e.style,d={},m=[],g=e.nodeType&&Gt(e);n.queue||(l=v._queueHooks(e,"fx"),l.unqueued==null&&(l.unqueued=0,c=l.empty.fire,l.empty.fire=function(){l.unqueued||c()}),l.unqueued++,h.always(function(){h.always(function(){l.unqueued--,v.queue(e,"fx").length||l.empty.fire()})})),e.nodeType===1&&("height"in t||"width"in t)&&(n.overflow=[p.overflow,p.overflowX,p.overflowY],v.css(e,"display")==="inline"&&v.css(e,"float")==="none"&&(!v.support.inlineBlockNeedsLayout||nn(e.nodeName)==="inline"?p.display="inline-block":p.zoom=1)),n.overflow&&(p.overflow="hidden",v.support.shrinkWrapBlocks||h.done(function(){p.overflow=n.overflow[0],p.overflowX=n.overflow[1],p.overflowY=n.overflow[2]}));for(r in t){s=t[r];if(Un.exec(s)){delete t[r],a=a||s==="toggle";if(s===(g?"hide":"show"))continue;m.push(r)}}o=m.length;if(o){u=v._data(e,"fxshow")||v._data(e,"fxshow",{}),"hidden"in u&&(g=u.hidden),a&&(u.hidden=!g),g?v(e).show():h.done(function(){v(e).hide()}),h.done(function(){var t;v.removeData(e,"fxshow",!0);for(t in d)v.style(e,t,d[t])});for(r=0;r<o;r++)i=m[r],f=h.createTween(i,g?u[i]:0),d[i]=u[i]||v.style(e,i),i in u||(u[i]=f.start,g&&(f.end=f.start,f.start=i==="width"||i==="height"?1:0))}}function Yn(e,t,n,r,i){return new Yn.prototype.init(e,t,n,r,i)}function Zn(e,t){var n,r={height:e},i=0;t=t?1:0;for(;i<4;i+=2-t)n=$t[i],r["margin"+n]=r["padding"+n]=e;return t&&(r.opacity=r.width=e),r}function tr(e){return v.isWindow(e)?e:e.nodeType===9?e.defaultView||e.parentWindow:!1}var n,r,i=e.document,s=e.location,o=e.navigator,u=e.jQuery,a=e.$,f=Array.prototype.push,l=Array.prototype.slice,c=Array.prototype.indexOf,h=Object.prototype.toString,p=Object.prototype.hasOwnProperty,d=String.prototype.trim,v=function(e,t){return new v.fn.init(e,t,n)},m=/[\-+]?(?:\d*\.|)\d+(?:[eE][\-+]?\d+|)/.source,g=/\S/,y=/\s+/,b=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,w=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a<f;a++)if((e=arguments[a])!=null)for(n in e){r=u[n],i=e[n];if(u===i)continue;l&&i&&(v.isPlainObject(i)||(s=v.isArray(i)))?(s?(s=!1,o=r&&v.isArray(r)?r:[]):o=r&&v.isPlainObject(r)?r:{},u[n]=v.extend(l,o,i)):i!==t&&(u[n]=i)}return u},v.extend({noConflict:function(t){return e.$===v&&(e.$=a),t&&e.jQuery===v&&(e.jQuery=u),v},isReady:!1,readyWait:1,holdReady:function(e){e?v.readyWait++:v.ready(!0)},ready:function(e){if(e===!0?--v.readyWait:v.isReady)return;if(!i.body)return setTimeout(v.ready,1);v.isReady=!0;if(e!==!0&&--v.readyWait>0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s<o;)if(n.apply(e[s++],r)===!1)break}else if(u){for(i in e)if(n.call(e[i],i,e[i])===!1)break}else for(;s<o;)if(n.call(e[s],s,e[s++])===!1)break;return e},trim:d&&!d.call("\ufeff\u00a0")?function(e){return e==null?"":d.call(e)}:function(e){return e==null?"":(e+"").replace(b,"")},makeArray:function(e,t){var n,r=t||[];return e!=null&&(n=v.type(e),e.length==null||n==="string"||n==="function"||n==="regexp"||v.isWindow(e)?f.call(r,e):v.merge(r,e)),r},inArray:function(e,t,n){var r;if(t){if(c)return c.call(t,e,n);r=t.length,n=n?n<0?Math.max(0,r+n):n:0;for(;n<r;n++)if(n in t&&t[n]===e)return n}return-1},merge:function(e,n){var r=n.length,i=e.length,s=0;if(typeof r=="number")for(;s<r;s++)e[i++]=n[s];else while(n[s]!==t)e[i++]=n[s++];return e.length=i,e},grep:function(e,t,n){var r,i=[],s=0,o=e.length;n=!!n;for(;s<o;s++)r=!!t(e[s],s),n!==r&&i.push(e[s]);return i},map:function(e,n,r){var i,s,o=[],u=0,a=e.length,f=e instanceof v||a!==t&&typeof a=="number"&&(a>0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u<a;u++)i=n(e[u],u,r),i!=null&&(o[o.length]=i);else for(s in e)i=n(e[s],s,r),i!=null&&(o[o.length]=i);return o.concat.apply([],o)},guid:1,proxy:function(e,n){var r,i,s;return typeof n=="string"&&(r=e[n],n=e,e=r),v.isFunction(e)?(i=l.call(arguments,2),s=function(){return e.apply(n,i.concat(l.call(arguments)))},s.guid=e.guid=e.guid||v.guid++,s):t},access:function(e,n,r,i,s,o,u){var a,f=r==null,l=0,c=e.length;if(r&&typeof r=="object"){for(l in r)v.access(e,n,l,r[l],1,o,i);s=1}else if(i!==t){a=u===t&&v.isFunction(i),f&&(a?(a=n,n=function(e,t,n){return a.call(v(e),n)}):(n.call(e,i),n=null));if(n)for(;l<c;l++)n(e[l],r,a?i.call(e[l],l,n(e[l],r)):i,u);s=1}return s?e:f?n.call(e):c?n(e[0],r):o},now:function(){return(new Date).getTime()}}),v.ready.promise=function(t){if(!r){r=v.Deferred();if(i.readyState==="complete")setTimeout(v.ready,1);else if(i.addEventListener)i.addEventListener("DOMContentLoaded",A,!1),e.addEventListener("load",v.ready,!1);else{i.attachEvent("onreadystatechange",A),e.attachEvent("onload",v.ready);var n=!1;try{n=e.frameElement==null&&i.documentElement}catch(s){}n&&n.doScroll&&function o(){if(!v.isReady){try{n.doScroll("left")}catch(e){return setTimeout(o,50)}v.ready()}}()}}return r.promise(t)},v.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(e,t){O["[object "+t+"]"]=t.toLowerCase()}),n=v(i);var M={};v.Callbacks=function(e){e=typeof e=="string"?M[e]||_(e):v.extend({},e);var n,r,i,s,o,u,a=[],f=!e.once&&[],l=function(t){n=e.memory&&t,r=!0,u=s||0,s=0,o=a.length,i=!0;for(;a&&u<o;u++)if(a[u].apply(t[0],t[1])===!1&&e.stopOnFalse){n=!1;break}i=!1,a&&(f?f.length&&l(f.shift()):n?a=[]:c.disable())},c={add:function(){if(a){var t=a.length;(function r(t){v.each(t,function(t,n){var i=v.type(n);i==="function"?(!e.unique||!c.has(n))&&a.push(n):n&&n.length&&i!=="string"&&r(n)})})(arguments),i?o=a.length:n&&(s=t,l(n))}return this},remove:function(){return a&&v.each(arguments,function(e,t){var n;while((n=v.inArray(t,a,n))>-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t<r;t++)n[t]&&v.isFunction(n[t].promise)?n[t].promise().done(o(t,f,n)).fail(s.reject).progress(o(t,a,u)):--i}return i||s.resolveWith(f,n),s.promise()}}),v.support=function(){var t,n,r,s,o,u,a,f,l,c,h,p=i.createElement("div");p.setAttribute("className","t"),p.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="<table><tr><td></td><td>t</td></tr></table>",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="<div></div>",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i<s;i++)delete r[t[i]];if(!(n?B:v.isEmptyObject)(r))return}}if(!n){delete u[a].data;if(!B(u[a]))return}o?v.cleanData([e],!0):v.support.deleteExpando||u!=u.window?delete u[a]:u[a]=null},_data:function(e,t,n){return v.data(e,t,n,!0)},acceptData:function(e){var t=e.nodeName&&v.noData[e.nodeName.toLowerCase()];return!t||t!==!0&&e.getAttribute("classid")===t}}),v.fn.extend({data:function(e,n){var r,i,s,o,u,a=this[0],f=0,l=null;if(e===t){if(this.length){l=v.data(a);if(a.nodeType===1&&!v._data(a,"parsedAttrs")){s=a.attributes;for(u=s.length;f<u;f++)o=s[f].name,o.indexOf("data-")||(o=v.camelCase(o.substring(5)),H(a,o,l[o]));v._data(a,"parsedAttrs",!0)}}return l}return typeof e=="object"?this.each(function(){v.data(this,e)}):(r=e.split(".",2),r[1]=r[1]?"."+r[1]:"",i=r[1]+"!",v.access(this,function(n){if(n===t)return l=this.triggerHandler("getData"+i,[r[0]]),l===t&&a&&(l=v.data(a,e),l=H(a,e,l)),l===t&&r[1]?this.data(r[0]):l;r[1]=n,this.each(function(){var t=v(this);t.triggerHandler("setData"+i,r),v.data(this,e,n),t.triggerHandler("changeData"+i,r)})},null,n,arguments.length>1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length<r?v.queue(this[0],e):n===t?this:this.each(function(){var t=v.queue(this,e,n);v._queueHooks(this,e),e==="fx"&&t[0]!=="inprogress"&&v.dequeue(this,e)})},dequeue:function(e){return this.each(function(){v.dequeue(this,e)})},delay:function(e,t){return e=v.fx?v.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,n){var r,i=1,s=v.Deferred(),o=this,u=this.length,a=function(){--i||s.resolveWith(o,[o])};typeof e!="string"&&(n=e,e=t),e=e||"fx";while(u--)r=v._data(o[u],e+"queueHooks"),r&&r.empty&&(i++,r.empty.add(a));return a(),s.promise(n)}});var j,F,I,q=/[\t\r\n]/g,R=/\r/g,U=/^(?:button|input)$/i,z=/^(?:button|input|object|select|textarea)$/i,W=/^a(?:rea|)$/i,X=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,V=v.support.getSetAttribute;v.fn.extend({attr:function(e,t){return v.access(this,v.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n<r;n++){i=this[n];if(i.nodeType===1)if(!i.className&&t.length===1)i.className=e;else{s=" "+i.className+" ";for(o=0,u=t.length;o<u;o++)s.indexOf(" "+t[o]+" ")<0&&(s+=t[o]+" ");i.className=v.trim(s)}}}return this},removeClass:function(e){var n,r,i,s,o,u,a;if(v.isFunction(e))return this.each(function(t){v(this).removeClass(e.call(this,t,this.className))});if(e&&typeof e=="string"||e===t){n=(e||"").split(y);for(u=0,a=this.length;u<a;u++){i=this[u];if(i.nodeType===1&&i.className){r=(" "+i.className+" ").replace(q," ");for(s=0,o=n.length;s<o;s++)while(r.indexOf(" "+n[s]+" ")>=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n<r;n++)if(this[n].nodeType===1&&(" "+this[n].className+" ").replace(q," ").indexOf(t)>=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a<u;a++){n=r[a];if((n.selected||a===i)&&(v.support.optDisabled?!n.disabled:n.getAttribute("disabled")===null)&&(!n.parentNode.disabled||!v.nodeName(n.parentNode,"optgroup"))){t=v(n).val();if(s)return t;o.push(t)}}return o},set:function(e,t){var n=v.makeArray(t);return v(e).find("option").each(function(){this.selected=v.inArray(v(this).val(),n)>=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o<r.length;o++)i=r[o],i&&(n=v.propFix[i]||i,s=X.test(i),s||v.attr(e,i,""),e.removeAttribute(V?i:n),s&&n in e&&(e[n]=!1))}},attrHooks:{type:{set:function(e,t){if(U.test(e.nodeName)&&e.parentNode)v.error("type property can't be changed");else if(!v.support.radioValue&&t==="radio"&&v.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}},value:{get:function(e,t){return j&&v.nodeName(e,"button")?j.get(e,t):t in e?e.value:null},set:function(e,t,n){if(j&&v.nodeName(e,"button"))return j.set(e,t,n);e.value=t}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(e,n,r){var i,s,o,u=e.nodeType;if(!e||u===3||u===8||u===2)return;return o=u!==1||!v.isXMLDoc(e),o&&(n=v.propFix[n]||n,s=v.propHooks[n]),r!==t?s&&"set"in s&&(i=s.set(e,r,n))!==t?i:e[n]=r:s&&"get"in s&&(i=s.get(e,n))!==null?i:e[n]},propHooks:{tabIndex:{get:function(e){var n=e.getAttributeNode("tabindex");return n&&n.specified?parseInt(n.value,10):z.test(e.nodeName)||W.test(e.nodeName)&&e.href?0:t}}}}),F={get:function(e,n){var r,i=v.prop(e,n);return i===!0||typeof i!="boolean"&&(r=e.getAttributeNode(n))&&r.nodeValue!==!1?n.toLowerCase():t},set:function(e,t,n){var r;return t===!1?v.removeAttr(e,n):(r=v.propFix[n]||n,r in e&&(e[r]=!0),e.setAttribute(n,n.toLowerCase())),n}},V||(I={name:!0,id:!0,coords:!0},j=v.valHooks.button={get:function(e,n){var r;return r=e.getAttributeNode(n),r&&(I[n]?r.value!=="":r.specified)?r.value:t},set:function(e,t,n){var r=e.getAttributeNode(n);return r||(r=i.createAttribute(n),e.setAttributeNode(r)),r.value=t+""}},v.each(["width","height"],function(e,t){v.attrHooks[t]=v.extend(v.attrHooks[t],{set:function(e,n){if(n==="")return e.setAttribute(t,"auto"),n}})}),v.attrHooks.contenteditable={get:j.get,set:function(e,t,n){t===""&&(t="false"),j.set(e,t,n)}}),v.support.hrefNormalized||v.each(["href","src","width","height"],function(e,n){v.attrHooks[n]=v.extend(v.attrHooks[n],{get:function(e){var r=e.getAttribute(n,2);return r===null?t:r}})}),v.support.style||(v.attrHooks.style={get:function(e){return e.style.cssText.toLowerCase()||t},set:function(e,t){return e.style.cssText=t+""}}),v.support.optSelected||(v.propHooks.selected=v.extend(v.propHooks.selected,{get:function(e){var t=e.parentNode;return t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex),null}})),v.support.enctype||(v.propFix.enctype="encoding"),v.support.checkOn||v.each(["radio","checkbox"],function(){v.valHooks[this]={get:function(e){return e.getAttribute("value")===null?"on":e.value}}}),v.each(["radio","checkbox"],function(){v.valHooks[this]=v.extend(v.valHooks[this],{set:function(e,t){if(v.isArray(t))return e.checked=v.inArray(v(e).val(),t)>=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f<n.length;f++){l=J.exec(n[f])||[],c=l[1],h=(l[2]||"").split(".").sort(),g=v.event.special[c]||{},c=(s?g.delegateType:g.bindType)||c,g=v.event.special[c]||{},p=v.extend({type:c,origType:l[1],data:i,handler:r,guid:r.guid,selector:s,needsContext:s&&v.expr.match.needsContext.test(s),namespace:h.join(".")},d),m=a[c];if(!m){m=a[c]=[],m.delegateCount=0;if(!g.setup||g.setup.call(e,i,h,u)===!1)e.addEventListener?e.addEventListener(c,u,!1):e.attachEvent&&e.attachEvent("on"+c,u)}g.add&&(g.add.call(e,p),p.handler.guid||(p.handler.guid=r.guid)),s?m.splice(m.delegateCount++,0,p):m.push(p),v.event.global[c]=!0}e=null},global:{},remove:function(e,t,n,r,i){var s,o,u,a,f,l,c,h,p,d,m,g=v.hasData(e)&&v._data(e);if(!g||!(h=g.events))return;t=v.trim(Z(t||"")).split(" ");for(s=0;s<t.length;s++){o=J.exec(t[s])||[],u=a=o[1],f=o[2];if(!u){for(u in h)v.event.remove(e,u+t[s],n,r,!0);continue}p=v.event.special[u]||{},u=(r?p.delegateType:p.bindType)||u,d=h[u]||[],l=d.length,f=f?new RegExp("(^|\\.)"+f.split(".").sort().join("\\.(?:.*\\.|)")+"(\\.|$)"):null;for(c=0;c<d.length;c++)m=d[c],(i||a===m.origType)&&(!n||n.guid===m.guid)&&(!f||f.test(m.namespace))&&(!r||r===m.selector||r==="**"&&m.selector)&&(d.splice(c--,1),m.selector&&d.delegateCount--,p.remove&&p.remove.call(e,m));d.length===0&&l!==d.length&&((!p.teardown||p.teardown.call(e,f,g.handle)===!1)&&v.removeEvent(e,u,g.handle),delete h[u])}v.isEmptyObject(h)&&(delete g.handle,v.removeData(e,"events",!0))},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(n,r,s,o){if(!s||s.nodeType!==3&&s.nodeType!==8){var u,a,f,l,c,h,p,d,m,g,y=n.type||n,b=[];if(Y.test(y+v.event.triggered))return;y.indexOf("!")>=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f<m.length&&!n.isPropagationStopped();f++)l=m[f][0],n.type=m[f][1],d=(v._data(l,"events")||{})[n.type]&&v._data(l,"handle"),d&&d.apply(l,r),d=h&&l[h],d&&v.acceptData(l)&&d.apply&&d.apply(l,r)===!1&&n.preventDefault();return n.type=y,!o&&!n.isDefaultPrevented()&&(!p._default||p._default.apply(s.ownerDocument,r)===!1)&&(y!=="click"||!v.nodeName(s,"a"))&&v.acceptData(s)&&h&&s[y]&&(y!=="focus"&&y!=="blur"||n.target.offsetWidth!==0)&&!v.isWindow(s)&&(c=s[h],c&&(s[h]=null),v.event.triggered=y,s[y](),v.event.triggered=t,c&&(s[h]=c)),n.result}return},dispatch:function(n){n=v.event.fix(n||e.event);var r,i,s,o,u,a,f,c,h,p,d=(v._data(this,"events")||{})[n.type]||[],m=d.delegateCount,g=l.call(arguments),y=!n.exclusive&&!n.namespace,b=v.event.special[n.type]||{},w=[];g[0]=n,n.delegateTarget=this;if(b.preDispatch&&b.preDispatch.call(this,n)===!1)return;if(m&&(!n.button||n.type!=="click"))for(s=n.target;s!=this;s=s.parentNode||this)if(s.disabled!==!0||n.type!=="click"){u={},f=[];for(r=0;r<m;r++)c=d[r],h=c.selector,u[h]===t&&(u[h]=c.needsContext?v(h,this).index(s)>=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r<w.length&&!n.isPropagationStopped();r++){a=w[r],n.currentTarget=a.elem;for(i=0;i<a.matches.length&&!n.isImmediatePropagationStopped();i++){c=a.matches[i];if(y||!n.namespace&&!c.namespace||n.namespace_re&&n.namespace_re.test(c.namespace))n.data=c.data,n.handleObj=c,o=((v.event.special[c.origType]||{}).handle||c.handler).apply(a.elem,g),o!==t&&(n.result=o,o===!1&&(n.preventDefault(),n.stopPropagation()))}}return b.postDispatch&&b.postDispatch.call(this,n),n.result},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return e.which==null&&(e.which=t.charCode!=null?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,n){var r,s,o,u=n.button,a=n.fromElement;return e.pageX==null&&n.clientX!=null&&(r=e.target.ownerDocument||i,s=r.documentElement,o=r.body,e.pageX=n.clientX+(s&&s.scrollLeft||o&&o.scrollLeft||0)-(s&&s.clientLeft||o&&o.clientLeft||0),e.pageY=n.clientY+(s&&s.scrollTop||o&&o.scrollTop||0)-(s&&s.clientTop||o&&o.clientTop||0)),!e.relatedTarget&&a&&(e.relatedTarget=a===e.target?n.toElement:a),!e.which&&u!==t&&(e.which=u&1?1:u&2?3:u&4?2:0),e}},fix:function(e){if(e[v.expando])return e;var t,n,r=e,s=v.event.fixHooks[e.type]||{},o=s.props?this.props.concat(s.props):this.props;e=v.Event(r);for(t=o.length;t;)n=o[--t],e[n]=r[n];return e.target||(e.target=r.srcElement||i),e.target.nodeType===3&&(e.target=e.target.parentNode),e.metaKey=!!e.metaKey,s.filter?s.filter(e,r):e},special:{load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(e,t,n){v.isWindow(this)&&(this.onbeforeunload=n)},teardown:function(e,t){this.onbeforeunload===t&&(this.onbeforeunload=null)}}},simulate:function(e,t,n,r){var i=v.extend(new v.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?v.event.trigger(i,null,t):v.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},v.event.handle=v.event.dispatch,v.removeEvent=i.removeEventListener?function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)}:function(e,t,n){var r="on"+t;e.detachEvent&&(typeof e[r]=="undefined"&&(e[r]=null),e.detachEvent(r,n))},v.Event=function(e,t){if(!(this instanceof v.Event))return new v.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.returnValue===!1||e.getPreventDefault&&e.getPreventDefault()?tt:et):this.type=e,t&&v.extend(this,t),this.timeStamp=e&&e.timeStamp||v.now(),this[v.expando]=!0},v.Event.prototype={preventDefault:function(){this.isDefaultPrevented=tt;var e=this.originalEvent;if(!e)return;e.preventDefault?e.preventDefault():e.returnValue=!1},stopPropagation:function(){this.isPropagationStopped=tt;var e=this.originalEvent;if(!e)return;e.stopPropagation&&e.stopPropagation(),e.cancelBubble=!0},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=tt,this.stopPropagation()},isDefaultPrevented:et,isPropagationStopped:et,isImmediatePropagationStopped:et},v.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){v.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,s=e.handleObj,o=s.selector;if(!i||i!==r&&!v.contains(r,i))e.type=s.origType,n=s.handler.apply(this,arguments),e.type=t;return n}}}),v.support.submitBubbles||(v.event.special.submit={setup:function(){if(v.nodeName(this,"form"))return!1;v.event.add(this,"click._submit keypress._submit",function(e){var n=e.target,r=v.nodeName(n,"input")||v.nodeName(n,"button")?n.form:t;r&&!v._data(r,"_submit_attached")&&(v.event.add(r,"submit._submit",function(e){e._submit_bubble=!0}),v._data(r,"_submit_attached",!0))})},postDispatch:function(e){e._submit_bubble&&(delete e._submit_bubble,this.parentNode&&!e.isTrigger&&v.event.simulate("submit",this.parentNode,e,!0))},teardown:function(){if(v.nodeName(this,"form"))return!1;v.event.remove(this,"._submit")}}),v.support.changeBubbles||(v.event.special.change={setup:function(){if($.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")v.event.add(this,"propertychange._change",function(e){e.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),v.event.add(this,"click._change",function(e){this._just_changed&&!e.isTrigger&&(this._just_changed=!1),v.event.simulate("change",this,e,!0)});return!1}v.event.add(this,"beforeactivate._change",function(e){var t=e.target;$.test(t.nodeName)&&!v._data(t,"_change_attached")&&(v.event.add(t,"change._change",function(e){this.parentNode&&!e.isSimulated&&!e.isTrigger&&v.event.simulate("change",this.parentNode,e,!0)}),v._data(t,"_change_attached",!0))})},handle:function(e){var t=e.target;if(this!==t||e.isSimulated||e.isTrigger||t.type!=="radio"&&t.type!=="checkbox")return e.handleObj.handler.apply(this,arguments)},teardown:function(){return v.event.remove(this,"._change"),!$.test(this.nodeName)}}),v.support.focusinBubbles||v.each({focus:"focusin",blur:"focusout"},function(e,t){var n=0,r=function(e){v.event.simulate(t,e.target,v.event.fix(e),!0)};v.event.special[t]={setup:function(){n++===0&&i.addEventListener(e,r,!0)},teardown:function(){--n===0&&i.removeEventListener(e,r,!0)}}}),v.fn.extend({on:function(e,n,r,i,s){var o,u;if(typeof e=="object"){typeof n!="string"&&(r=r||n,n=t);for(u in e)this.on(u,n,r,e[u],s);return this}r==null&&i==null?(i=n,r=n=t):i==null&&(typeof n=="string"?(i=r,r=t):(i=r,r=n,n=t));if(i===!1)i=et;else if(!i)return this;return s===1&&(o=i,i=function(e){return v().off(e),o.apply(this,arguments)},i.guid=o.guid||(o.guid=v.guid++)),this.each(function(){v.event.add(this,e,i,r,n)})},one:function(e,t,n,r){return this.on(e,t,n,r,1)},off:function(e,n,r){var i,s;if(e&&e.preventDefault&&e.handleObj)return i=e.handleObj,v(e.delegateTarget).off(i.namespace?i.origType+"."+i.namespace:i.origType,i.selector,i.handler),this;if(typeof e=="object"){for(s in e)this.off(s,n,e[s]);return this}if(n===!1||typeof n=="function")r=n,n=t;return r===!1&&(r=et),this.each(function(){v.event.remove(this,e,r,n)})},bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},live:function(e,t,n){return v(this.context).on(e,this.selector,t,n),this},die:function(e,t){return v(this.context).off(e,this.selector||"**",t),this},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return arguments.length===1?this.off(e,"**"):this.off(t,e||"**",n)},trigger:function(e,t){return this.each(function(){v.event.trigger(e,t,this)})},triggerHandler:function(e,t){if(this[0])return v.event.trigger(e,t,this[0],!0)},toggle:function(e){var t=arguments,n=e.guid||v.guid++,r=0,i=function(n){var i=(v._data(this,"lastToggle"+e.guid)||0)%r;return v._data(this,"lastToggle"+e.guid,i+1),n.preventDefault(),t[i].apply(this,arguments)||!1};i.guid=n;while(r<t.length)t[r++].guid=n;return this.click(i)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),v.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(e,t){v.fn[t]=function(e,n){return n==null&&(n=e,e=null),arguments.length>0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u<a;u++)if(s=e[u])if(!n||n(s,r,i))o.push(s),f&&t.push(u);return o}function ct(e,t,n,r,i,s){return r&&!r[d]&&(r=ct(r)),i&&!i[d]&&(i=ct(i,s)),N(function(s,o,u,a){var f,l,c,h=[],p=[],d=o.length,v=s||dt(t||"*",u.nodeType?[u]:u,[]),m=e&&(s||!t)?lt(v,h,e,u,a):v,g=n?i||(s?e:d||r)?[]:o:m;n&&n(m,g,u,a);if(r){f=lt(g,p),r(f,[],u,a),l=f.length;while(l--)if(c=f[l])g[p[l]]=!(m[p[l]]=c)}if(s){if(i||e){if(i){f=[],l=g.length;while(l--)(c=g[l])&&f.push(m[l]=c);i(null,g=[],f,a)}l=g.length;while(l--)(c=g[l])&&(f=i?T.call(s,c):h[l])>-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a<s;a++)if(n=i.relative[e[a].type])h=[at(ft(h),n)];else{n=i.filter[e[a].type].apply(null,e[a].matches);if(n[d]){r=++a;for(;r<s;r++)if(i.relative[e[r].type])break;return ct(a>1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a<r&&ht(e.slice(a,r)),r<s&&ht(e=e.slice(r)),r<s&&e.join(""))}h.push(n)}return ft(h)}function pt(e,t){var r=t.length>0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r<i;r++)nt(e,t[r],n);return n}function vt(e,t,n,r,s){var o,u,f,l,c,h=ut(e),p=h.length;if(!r&&h.length===1){u=h[0]=h[0].slice(0);if(u.length>2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;t<n;t++)if(this[t]===e)return t;return-1},N=function(e,t){return e[d]=t==null||t,e},C=function(){var e={},t=[];return N(function(n,r){return t.push(n)>i.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="<a href='#'></a>",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="<select></select>";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="<div class='hidden e'></div><div class='hidden'></div>",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="<a name='"+d+"'></a><div name='"+d+"'></div>",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:st(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:st(function(e,t,n){for(var r=n<0?n+t:n;--r>=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}},f=y.compareDocumentPosition?function(e,t){return e===t?(l=!0,0):(!e.compareDocumentPosition||!t.compareDocumentPosition?e.compareDocumentPosition:e.compareDocumentPosition(t)&4)?-1:1}:function(e,t){if(e===t)return l=!0,0;if(e.sourceIndex&&t.sourceIndex)return e.sourceIndex-t.sourceIndex;var n,r,i=[],s=[],o=e.parentNode,u=t.parentNode,a=o;if(o===u)return ot(e,t);if(!o)return-1;if(!u)return 1;while(a)i.unshift(a),a=a.parentNode;a=u;while(a)s.unshift(a),a=a.parentNode;n=i.length,r=s.length;for(var f=0;f<n&&f<r;f++)if(i[f]!==s[f])return ot(i[f],s[f]);return f===n?ot(e,s[f],-1):ot(i[f],t,1)},[0,0].sort(f),h=!l,nt.uniqueSort=function(e){var t,n=[],r=1,i=0;l=h,e.sort(f);if(l){for(;t=e[r];r++)t===e[r-1]&&(i=n.push(r));while(i--)e.splice(n[i],1)}return e},nt.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},a=nt.compile=function(e,t){var n,r=[],i=[],s=A[d][e+" "];if(!s){t||(t=ut(e)),n=t.length;while(n--)s=ht(t[n]),s[d]?r.push(s):i.push(s);s=A(e,pt(i,r))}return s},g.querySelectorAll&&function(){var e,t=vt,n=/'|\\/g,r=/\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g,i=[":focus"],s=[":active"],u=y.matchesSelector||y.mozMatchesSelector||y.webkitMatchesSelector||y.oMatchesSelector||y.msMatchesSelector;K(function(e){e.innerHTML="<select><option selected=''></option></select>",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="<p test=''></p>",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="<input type='hidden'/>",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t<n;t++)if(v.contains(u[t],this))return!0});o=this.pushStack("","find",e);for(t=0,n=this.length;t<n;t++){r=o.length,v.find(e,this[t],o);if(t>0)for(i=r;i<o.length;i++)for(s=0;s<r;s++)if(o[s]===o[i]){o.splice(i--,1);break}}return o},has:function(e){var t,n=v(e,this),r=n.length;return this.filter(function(){for(t=0;t<r;t++)if(v.contains(this,n[t]))return!0})},not:function(e){return this.pushStack(ft(this,e,!1),"not",e)},filter:function(e){return this.pushStack(ft(this,e,!0),"filter",e)},is:function(e){return!!e&&(typeof e=="string"?st.test(e)?v(e,this.context).index(this[0])>=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r<i;r++){n=this[r];while(n&&n.ownerDocument&&n!==t&&n.nodeType!==11){if(o?o.index(n)>-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/<tbody/i,gt=/<|&#?\w+;/,yt=/<(?:script|style|link)/i,bt=/<(?:script|object|embed|option|style)/i,wt=new RegExp("<(?:"+ct+")[\\s/>]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*<!(?:\[CDATA\[|\-\-)|[\]\-]{2}>\s*$/g,Nt={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X<div>","</div>"]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1></$2>");try{for(;r<i;r++)n=this[r]||{},n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),n.innerHTML=e);n=0}catch(s){}}n&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(e){return ut(this[0])?this.length?this.pushStack(v(v.isFunction(e)?e():e),"replaceWith",e):this:v.isFunction(e)?this.each(function(t){var n=v(this),r=n.html();n.replaceWith(e.call(this,t,r))}):(typeof e!="string"&&(e=v(e).detach()),this.each(function(){var t=this.nextSibling,n=this.parentNode;v(this).remove(),t?v(t).before(e):v(n).append(e)}))},detach:function(e){return this.remove(e,!0)},domManip:function(e,n,r){e=[].concat.apply([],e);var i,s,o,u,a=0,f=e[0],l=[],c=this.length;if(!v.support.checkClone&&c>1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a<c;a++)r.call(n&&v.nodeName(this[a],"table")?Lt(this[a],"tbody"):this[a],a===u?o:v.clone(o,!0,!0))}o=s=null,l.length&&v.each(l,function(e,t){t.src?v.ajax?v.ajax({url:t.src,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0}):v.error("no ajax"):v.globalEval((t.text||t.textContent||t.innerHTML||"").replace(Tt,"")),t.parentNode&&t.parentNode.removeChild(t)})}return this}}),v.buildFragment=function(e,n,r){var s,o,u,a=e[0];return n=n||i,n=!n.nodeType&&n[0]||n,n=n.ownerDocument||n,e.length===1&&typeof a=="string"&&a.length<512&&n===i&&a.charAt(0)==="<"&&!bt.test(a)&&(v.support.checkClone||!St.test(a))&&(v.support.html5Clone||!wt.test(a))&&(o=!0,s=v.fragments[a],u=s!==t),s||(s=n.createDocumentFragment(),v.clean(e,n,s,r),o&&(v.fragments[a]=u&&s)),{fragment:s,cacheable:o}},v.fragments={},v.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){v.fn[e]=function(n){var r,i=0,s=[],o=v(n),u=o.length,a=this.length===1&&this[0].parentNode;if((a==null||a&&a.nodeType===11&&a.childNodes.length===1)&&u===1)return o[t](this[0]),this;for(;i<u;i++)r=(i>0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1></$2>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]==="<table>"&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("<div>").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r<i;r++)n=e[r],Vn[n]=Vn[n]||[],Vn[n].unshift(t)},prefilter:function(e,t){t?Xn.unshift(e):Xn.push(e)}}),v.Tween=Yn,Yn.prototype={constructor:Yn,init:function(e,t,n,r,i,s){this.elem=e,this.prop=n,this.easing=i||"swing",this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=s||(v.cssNumber[n]?"":"px")},cur:function(){var e=Yn.propHooks[this.prop];return e&&e.get?e.get(this):Yn.propHooks._default.get(this)},run:function(e){var t,n=Yn.propHooks[this.prop];return this.options.duration?this.pos=t=v.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):Yn.propHooks._default.set(this),this}},Yn.prototype.init.prototype=Yn.prototype,Yn.propHooks={_default:{get:function(e){var t;return e.elem[e.prop]==null||!!e.elem.style&&e.elem.style[e.prop]!=null?(t=v.css(e.elem,e.prop,!1,""),!t||t==="auto"?0:t):e.elem[e.prop]},set:function(e){v.fx.step[e.prop]?v.fx.step[e.prop](e):e.elem.style&&(e.elem.style[v.cssProps[e.prop]]!=null||v.cssHooks[e.prop])?v.style(e.elem,e.prop,e.now+e.unit):e.elem[e.prop]=e.now}}},Yn.propHooks.scrollTop=Yn.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},v.each(["toggle","show","hide"],function(e,t){var n=v.fn[t];v.fn[t]=function(r,i,s){return r==null||typeof r=="boolean"||!e&&v.isFunction(r)&&v.isFunction(i)?n.apply(this,arguments):this.animate(Zn(t,!0),r,i,s)}}),v.fn.extend({fadeTo:function(e,t,n,r){return this.filter(Gt).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=v.isEmptyObject(e),s=v.speed(t,n,r),o=function(){var t=Kn(this,v.extend({},e),s);i&&t.stop(!0)};return i||s.queue===!1?this.each(o):this.queue(s.queue,o)},stop:function(e,n,r){var i=function(e){var t=e.stop;delete e.stop,t(r)};return typeof e!="string"&&(r=n,n=e,e=t),n&&e!==!1&&this.queue(e||"fx",[]),this.each(function(){var t=!0,n=e!=null&&e+"queueHooks",s=v.timers,o=v._data(this);if(n)o[n]&&o[n].stop&&i(o[n]);else for(n in o)o[n]&&o[n].stop&&Wn.test(n)&&i(o[n]);for(n=s.length;n--;)s[n].elem===this&&(e==null||s[n].queue===e)&&(s[n].anim.stop(r),t=!1,s.splice(n,1));(t||!r)&&v.dequeue(this,e)})}}),v.each({slideDown:Zn("show"),slideUp:Zn("hide"),slideToggle:Zn("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){v.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),v.speed=function(e,t,n){var r=e&&typeof e=="object"?v.extend({},e):{complete:n||!n&&t||v.isFunction(e)&&e,duration:e,easing:n&&t||t&&!v.isFunction(t)&&t};r.duration=v.fx.off?0:typeof r.duration=="number"?r.duration:r.duration in v.fx.speeds?v.fx.speeds[r.duration]:v.fx.speeds._default;if(r.queue==null||r.queue===!0)r.queue="fx";return r.old=r.complete,r.complete=function(){v.isFunction(r.old)&&r.old.call(this),r.queue&&v.dequeue(this,r.queue)},r},v.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2}},v.timers=[],v.fx=Yn.prototype.init,v.fx.tick=function(){var e,n=v.timers,r=0;qn=v.now();for(;r<n.length;r++)e=n[r],!e()&&n[r]===e&&n.splice(r--,1);n.length||v.fx.stop(),qn=t},v.fx.timer=function(e){e()&&v.timers.push(e)&&!Rn&&(Rn=setInterval(v.fx.tick,v.fx.interval))},v.fx.interval=13,v.fx.stop=function(){clearInterval(Rn),Rn=null},v.fx.speeds={slow:600,fast:200,_default:400},v.fx.step={},v.expr&&v.expr.filters&&(v.expr.filters.animated=function(e){return v.grep(v.timers,function(t){return e===t.elem}).length});var er=/^(?:body|html)$/i;v.fn.offset=function(e){if(arguments.length)return e===t?this:this.each(function(t){v.offset.setOffset(this,e,t)});var n,r,i,s,o,u,a,f={top:0,left:0},l=this[0],c=l&&l.ownerDocument;if(!c)return;return(r=c.body)===l?v.offset.bodyOffset(l):(n=c.documentElement,v.contains(n,l)?(typeof l.getBoundingClientRect!="undefined"&&(f=l.getBoundingClientRect()),i=tr(c),s=n.clientTop||r.clientTop||0,o=n.clientLeft||r.clientLeft||0,u=i.pageYOffset||n.scrollTop,a=i.pageXOffset||n.scrollLeft,{top:f.top+u-s,left:f.left+a-o}):f)},v.offset={bodyOffset:function(e){var t=e.offsetTop,n=e.offsetLeft;return v.support.doesNotIncludeMarginInBodyOffset&&(t+=parseFloat(v.css(e,"marginTop"))||0,n+=parseFloat(v.css(e,"marginLeft"))||0),{top:t,left:n}},setOffset:function(e,t,n){var r=v.css(e,"position");r==="static"&&(e.style.position="relative");var i=v(e),s=i.offset(),o=v.css(e,"top"),u=v.css(e,"left"),a=(r==="absolute"||r==="fixed")&&v.inArray("auto",[o,u])>-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window); \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/_static/minus.png b/python/altgraph/doc/_build/html/_static/minus.png
new file mode 100644
index 000000000..da1c5620d
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/minus.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/nature.css b/python/altgraph/doc/_build/html/_static/nature.css
new file mode 100644
index 000000000..f46081870
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/nature.css
@@ -0,0 +1,245 @@
+/*
+ * nature.css_t
+ * ~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- nature theme.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+@import url("basic.css");
+
+/* -- page layout ----------------------------------------------------------- */
+
+body {
+ font-family: Arial, sans-serif;
+ font-size: 100%;
+ background-color: #111;
+ color: #555;
+ margin: 0;
+ padding: 0;
+}
+
+div.documentwrapper {
+ float: left;
+ width: 100%;
+}
+
+div.bodywrapper {
+ margin: 0 0 0 230px;
+}
+
+hr {
+ border: 1px solid #B1B4B6;
+}
+
+div.document {
+ background-color: #eee;
+}
+
+div.body {
+ background-color: #ffffff;
+ color: #3E4349;
+ padding: 0 30px 30px 30px;
+ font-size: 0.9em;
+}
+
+div.footer {
+ color: #555;
+ width: 100%;
+ padding: 13px 0;
+ text-align: center;
+ font-size: 75%;
+}
+
+div.footer a {
+ color: #444;
+ text-decoration: underline;
+}
+
+div.related {
+ background-color: #6BA81E;
+ line-height: 32px;
+ color: #fff;
+ text-shadow: 0px 1px 0 #444;
+ font-size: 0.9em;
+}
+
+div.related a {
+ color: #E2F3CC;
+}
+
+div.sphinxsidebar {
+ font-size: 0.75em;
+ line-height: 1.5em;
+}
+
+div.sphinxsidebarwrapper{
+ padding: 20px 0;
+}
+
+div.sphinxsidebar h3,
+div.sphinxsidebar h4 {
+ font-family: Arial, sans-serif;
+ color: #222;
+ font-size: 1.2em;
+ font-weight: normal;
+ margin: 0;
+ padding: 5px 10px;
+ background-color: #ddd;
+ text-shadow: 1px 1px 0 white
+}
+
+div.sphinxsidebar h4{
+ font-size: 1.1em;
+}
+
+div.sphinxsidebar h3 a {
+ color: #444;
+}
+
+
+div.sphinxsidebar p {
+ color: #888;
+ padding: 5px 20px;
+}
+
+div.sphinxsidebar p.topless {
+}
+
+div.sphinxsidebar ul {
+ margin: 10px 20px;
+ padding: 0;
+ color: #000;
+}
+
+div.sphinxsidebar a {
+ color: #444;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #ccc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar input[type=text]{
+ margin-left: 20px;
+}
+
+/* -- body styles ----------------------------------------------------------- */
+
+a {
+ color: #005B81;
+ text-decoration: none;
+}
+
+a:hover {
+ color: #E32E00;
+ text-decoration: underline;
+}
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+ font-family: Arial, sans-serif;
+ background-color: #BED4EB;
+ font-weight: normal;
+ color: #212224;
+ margin: 30px 0px 10px 0px;
+ padding: 5px 0 5px 10px;
+ text-shadow: 0px 1px 0 white
+}
+
+div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; }
+div.body h2 { font-size: 150%; background-color: #C8D5E3; }
+div.body h3 { font-size: 120%; background-color: #D8DEE3; }
+div.body h4 { font-size: 110%; background-color: #D8DEE3; }
+div.body h5 { font-size: 100%; background-color: #D8DEE3; }
+div.body h6 { font-size: 100%; background-color: #D8DEE3; }
+
+a.headerlink {
+ color: #c60f0f;
+ font-size: 0.8em;
+ padding: 0 4px 0 4px;
+ text-decoration: none;
+}
+
+a.headerlink:hover {
+ background-color: #c60f0f;
+ color: white;
+}
+
+div.body p, div.body dd, div.body li {
+ line-height: 1.5em;
+}
+
+div.admonition p.admonition-title + p {
+ display: inline;
+}
+
+div.highlight{
+ background-color: white;
+}
+
+div.note {
+ background-color: #eee;
+ border: 1px solid #ccc;
+}
+
+div.seealso {
+ background-color: #ffc;
+ border: 1px solid #ff6;
+}
+
+div.topic {
+ background-color: #eee;
+}
+
+div.warning {
+ background-color: #ffe4e4;
+ border: 1px solid #f66;
+}
+
+p.admonition-title {
+ display: inline;
+}
+
+p.admonition-title:after {
+ content: ":";
+}
+
+pre {
+ padding: 10px;
+ background-color: White;
+ color: #222;
+ line-height: 1.2em;
+ border: 1px solid #C6C9CB;
+ font-size: 1.1em;
+ margin: 1.5em 0 1.5em 0;
+ -webkit-box-shadow: 1px 1px 1px #d8d8d8;
+ -moz-box-shadow: 1px 1px 1px #d8d8d8;
+}
+
+tt {
+ background-color: #ecf0f3;
+ color: #222;
+ /* padding: 1px 2px; */
+ font-size: 1.1em;
+ font-family: monospace;
+}
+
+.viewcode-back {
+ font-family: Arial, sans-serif;
+}
+
+div.viewcode-block:target {
+ background-color: #f4debf;
+ border-top: 1px solid #ac9;
+ border-bottom: 1px solid #ac9;
+} \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/_static/plus.png b/python/altgraph/doc/_build/html/_static/plus.png
new file mode 100644
index 000000000..b3cb37425
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/plus.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/pygments.css b/python/altgraph/doc/_build/html/_static/pygments.css
new file mode 100644
index 000000000..d79caa151
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/pygments.css
@@ -0,0 +1,62 @@
+.highlight .hll { background-color: #ffffcc }
+.highlight { background: #eeffcc; }
+.highlight .c { color: #408090; font-style: italic } /* Comment */
+.highlight .err { border: 1px solid #FF0000 } /* Error */
+.highlight .k { color: #007020; font-weight: bold } /* Keyword */
+.highlight .o { color: #666666 } /* Operator */
+.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #007020 } /* Comment.Preproc */
+.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */
+.highlight .gd { color: #A00000 } /* Generic.Deleted */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .gr { color: #FF0000 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #00A000 } /* Generic.Inserted */
+.highlight .go { color: #333333 } /* Generic.Output */
+.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #0044DD } /* Generic.Traceback */
+.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #007020 } /* Keyword.Pseudo */
+.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #902000 } /* Keyword.Type */
+.highlight .m { color: #208050 } /* Literal.Number */
+.highlight .s { color: #4070a0 } /* Literal.String */
+.highlight .na { color: #4070a0 } /* Name.Attribute */
+.highlight .nb { color: #007020 } /* Name.Builtin */
+.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
+.highlight .no { color: #60add5 } /* Name.Constant */
+.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
+.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
+.highlight .ne { color: #007020 } /* Name.Exception */
+.highlight .nf { color: #06287e } /* Name.Function */
+.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
+.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
+.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #bb60d5 } /* Name.Variable */
+.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
+.highlight .w { color: #bbbbbb } /* Text.Whitespace */
+.highlight .mf { color: #208050 } /* Literal.Number.Float */
+.highlight .mh { color: #208050 } /* Literal.Number.Hex */
+.highlight .mi { color: #208050 } /* Literal.Number.Integer */
+.highlight .mo { color: #208050 } /* Literal.Number.Oct */
+.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
+.highlight .sc { color: #4070a0 } /* Literal.String.Char */
+.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #4070a0 } /* Literal.String.Double */
+.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
+.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
+.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
+.highlight .sx { color: #c65d09 } /* Literal.String.Other */
+.highlight .sr { color: #235388 } /* Literal.String.Regex */
+.highlight .s1 { color: #4070a0 } /* Literal.String.Single */
+.highlight .ss { color: #517918 } /* Literal.String.Symbol */
+.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
+.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
+.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
+.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
+.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/_static/searchtools.js b/python/altgraph/doc/_build/html/_static/searchtools.js
new file mode 100644
index 000000000..f5c7e5fee
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/searchtools.js
@@ -0,0 +1,622 @@
+/*
+ * searchtools.js_t
+ * ~~~~~~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilties for the full-text search.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+
+/**
+ * Porter Stemmer
+ */
+var Stemmer = function() {
+
+ var step2list = {
+ ational: 'ate',
+ tional: 'tion',
+ enci: 'ence',
+ anci: 'ance',
+ izer: 'ize',
+ bli: 'ble',
+ alli: 'al',
+ entli: 'ent',
+ eli: 'e',
+ ousli: 'ous',
+ ization: 'ize',
+ ation: 'ate',
+ ator: 'ate',
+ alism: 'al',
+ iveness: 'ive',
+ fulness: 'ful',
+ ousness: 'ous',
+ aliti: 'al',
+ iviti: 'ive',
+ biliti: 'ble',
+ logi: 'log'
+ };
+
+ var step3list = {
+ icate: 'ic',
+ ative: '',
+ alize: 'al',
+ iciti: 'ic',
+ ical: 'ic',
+ ful: '',
+ ness: ''
+ };
+
+ var c = "[^aeiou]"; // consonant
+ var v = "[aeiouy]"; // vowel
+ var C = c + "[^aeiouy]*"; // consonant sequence
+ var V = v + "[aeiou]*"; // vowel sequence
+
+ var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
+ var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
+ var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
+ var s_v = "^(" + C + ")?" + v; // vowel in stem
+
+ this.stemWord = function (w) {
+ var stem;
+ var suffix;
+ var firstch;
+ var origword = w;
+
+ if (w.length < 3)
+ return w;
+
+ var re;
+ var re2;
+ var re3;
+ var re4;
+
+ firstch = w.substr(0,1);
+ if (firstch == "y")
+ w = firstch.toUpperCase() + w.substr(1);
+
+ // Step 1a
+ re = /^(.+?)(ss|i)es$/;
+ re2 = /^(.+?)([^s])s$/;
+
+ if (re.test(w))
+ w = w.replace(re,"$1$2");
+ else if (re2.test(w))
+ w = w.replace(re2,"$1$2");
+
+ // Step 1b
+ re = /^(.+?)eed$/;
+ re2 = /^(.+?)(ed|ing)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ re = new RegExp(mgr0);
+ if (re.test(fp[1])) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1];
+ re2 = new RegExp(s_v);
+ if (re2.test(stem)) {
+ w = stem;
+ re2 = /(at|bl|iz)$/;
+ re3 = new RegExp("([^aeiouylsz])\\1$");
+ re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re2.test(w))
+ w = w + "e";
+ else if (re3.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ else if (re4.test(w))
+ w = w + "e";
+ }
+ }
+
+ // Step 1c
+ re = /^(.+?)y$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(s_v);
+ if (re.test(stem))
+ w = stem + "i";
+ }
+
+ // Step 2
+ re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step2list[suffix];
+ }
+
+ // Step 3
+ re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step3list[suffix];
+ }
+
+ // Step 4
+ re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
+ re2 = /^(.+?)(s|t)(ion)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ if (re.test(stem))
+ w = stem;
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1] + fp[2];
+ re2 = new RegExp(mgr1);
+ if (re2.test(stem))
+ w = stem;
+ }
+
+ // Step 5
+ re = /^(.+?)e$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ re2 = new RegExp(meq1);
+ re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
+ w = stem;
+ }
+ re = /ll$/;
+ re2 = new RegExp(mgr1);
+ if (re.test(w) && re2.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+
+ // and turn initial Y back to y
+ if (firstch == "y")
+ w = firstch.toLowerCase() + w.substr(1);
+ return w;
+ }
+}
+
+
+
+/**
+ * Simple result scoring code.
+ */
+var Scorer = {
+ // Implement the following function to further tweak the score for each result
+ // The function takes a result array [filename, title, anchor, descr, score]
+ // and returns the new score.
+ /*
+ score: function(result) {
+ return result[4];
+ },
+ */
+
+ // query matches the full name of an object
+ objNameMatch: 11,
+ // or matches in the last dotted part of the object name
+ objPartialMatch: 6,
+ // Additive scores depending on the priority of the object
+ objPrio: {0: 15, // used to be importantResults
+ 1: 5, // used to be objectResults
+ 2: -5}, // used to be unimportantResults
+ // Used when the priority is not in the mapping.
+ objPrioDefault: 0,
+
+ // query found in title
+ title: 15,
+ // query found in terms
+ term: 5
+};
+
+
+/**
+ * Search Module
+ */
+var Search = {
+
+ _index : null,
+ _queued_query : null,
+ _pulse_status : -1,
+
+ init : function() {
+ var params = $.getQueryParameters();
+ if (params.q) {
+ var query = params.q[0];
+ $('input[name="q"]')[0].value = query;
+ this.performSearch(query);
+ }
+ },
+
+ loadIndex : function(url) {
+ $.ajax({type: "GET", url: url, data: null,
+ dataType: "script", cache: true,
+ complete: function(jqxhr, textstatus) {
+ if (textstatus != "success") {
+ document.getElementById("searchindexloader").src = url;
+ }
+ }});
+ },
+
+ setIndex : function(index) {
+ var q;
+ this._index = index;
+ if ((q = this._queued_query) !== null) {
+ this._queued_query = null;
+ Search.query(q);
+ }
+ },
+
+ hasIndex : function() {
+ return this._index !== null;
+ },
+
+ deferQuery : function(query) {
+ this._queued_query = query;
+ },
+
+ stopPulse : function() {
+ this._pulse_status = 0;
+ },
+
+ startPulse : function() {
+ if (this._pulse_status >= 0)
+ return;
+ function pulse() {
+ var i;
+ Search._pulse_status = (Search._pulse_status + 1) % 4;
+ var dotString = '';
+ for (i = 0; i < Search._pulse_status; i++)
+ dotString += '.';
+ Search.dots.text(dotString);
+ if (Search._pulse_status > -1)
+ window.setTimeout(pulse, 500);
+ }
+ pulse();
+ },
+
+ /**
+ * perform a search for something (or wait until index is loaded)
+ */
+ performSearch : function(query) {
+ // create the required interface elements
+ this.out = $('#search-results');
+ this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
+ this.dots = $('<span></span>').appendTo(this.title);
+ this.status = $('<p style="display: none"></p>').appendTo(this.out);
+ this.output = $('<ul class="search"/>').appendTo(this.out);
+
+ $('#search-progress').text(_('Preparing search...'));
+ this.startPulse();
+
+ // index already loaded, the browser was quick!
+ if (this.hasIndex())
+ this.query(query);
+ else
+ this.deferQuery(query);
+ },
+
+ /**
+ * execute search (requires search index to be loaded)
+ */
+ query : function(query) {
+ var i;
+ var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"];
+
+ // stem the searchterms and add them to the correct list
+ var stemmer = new Stemmer();
+ var searchterms = [];
+ var excluded = [];
+ var hlterms = [];
+ var tmp = query.split(/\s+/);
+ var objectterms = [];
+ for (i = 0; i < tmp.length; i++) {
+ if (tmp[i] !== "") {
+ objectterms.push(tmp[i].toLowerCase());
+ }
+
+ if ($u.indexOf(stopwords, tmp[i].toLowerCase()) != -1 || tmp[i].match(/^\d+$/) ||
+ tmp[i] === "") {
+ // skip this "word"
+ continue;
+ }
+ // stem the word
+ var word = stemmer.stemWord(tmp[i].toLowerCase());
+ var toAppend;
+ // select the correct list
+ if (word[0] == '-') {
+ toAppend = excluded;
+ word = word.substr(1);
+ }
+ else {
+ toAppend = searchterms;
+ hlterms.push(tmp[i].toLowerCase());
+ }
+ // only add if not already in the list
+ if (!$u.contains(toAppend, word))
+ toAppend.push(word);
+ }
+ var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
+
+ // console.debug('SEARCH: searching for:');
+ // console.info('required: ', searchterms);
+ // console.info('excluded: ', excluded);
+
+ // prepare search
+ var terms = this._index.terms;
+ var titleterms = this._index.titleterms;
+
+ // array of [filename, title, anchor, descr, score]
+ var results = [];
+ $('#search-progress').empty();
+
+ // lookup as object
+ for (i = 0; i < objectterms.length; i++) {
+ var others = [].concat(objectterms.slice(0, i),
+ objectterms.slice(i+1, objectterms.length));
+ results = results.concat(this.performObjectSearch(objectterms[i], others));
+ }
+
+ // lookup as search terms in fulltext
+ results = results.concat(this.performTermsSearch(searchterms, excluded, terms, Scorer.term))
+ .concat(this.performTermsSearch(searchterms, excluded, titleterms, Scorer.title));
+
+ // let the scorer override scores with a custom scoring function
+ if (Scorer.score) {
+ for (i = 0; i < results.length; i++)
+ results[i][4] = Scorer.score(results[i]);
+ }
+
+ // now sort the results by score (in opposite order of appearance, since the
+ // display function below uses pop() to retrieve items) and then
+ // alphabetically
+ results.sort(function(a, b) {
+ var left = a[4];
+ var right = b[4];
+ if (left > right) {
+ return 1;
+ } else if (left < right) {
+ return -1;
+ } else {
+ // same score: sort alphabetically
+ left = a[1].toLowerCase();
+ right = b[1].toLowerCase();
+ return (left > right) ? -1 : ((left < right) ? 1 : 0);
+ }
+ });
+
+ // for debugging
+ //Search.lastresults = results.slice(); // a copy
+ //console.info('search results:', Search.lastresults);
+
+ // print the results
+ var resultCount = results.length;
+ function displayNextItem() {
+ // results left, load the summary and display it
+ if (results.length) {
+ var item = results.pop();
+ var listItem = $('<li style="display:none"></li>');
+ if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') {
+ // dirhtml builder
+ var dirname = item[0] + '/';
+ if (dirname.match(/\/index\/$/)) {
+ dirname = dirname.substring(0, dirname.length-6);
+ } else if (dirname == 'index/') {
+ dirname = '';
+ }
+ listItem.append($('<a/>').attr('href',
+ DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
+ highlightstring + item[2]).html(item[1]));
+ } else {
+ // normal html builders
+ listItem.append($('<a/>').attr('href',
+ item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
+ highlightstring + item[2]).html(item[1]));
+ }
+ if (item[3]) {
+ listItem.append($('<span> (' + item[3] + ')</span>'));
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
+ $.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' + item[0] + '.txt',
+ dataType: "text",
+ complete: function(jqxhr, textstatus) {
+ var data = jqxhr.responseText;
+ if (data !== '') {
+ listItem.append(Search.makeSearchSummary(data, searchterms, hlterms));
+ }
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ }});
+ } else {
+ // no source available, just display title
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ }
+ }
+ // search finished, update title and status message
+ else {
+ Search.stopPulse();
+ Search.title.text(_('Search Results'));
+ if (!resultCount)
+ Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
+ else
+ Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
+ Search.status.fadeIn(500);
+ }
+ }
+ displayNextItem();
+ },
+
+ /**
+ * search for object names
+ */
+ performObjectSearch : function(object, otherterms) {
+ var filenames = this._index.filenames;
+ var objects = this._index.objects;
+ var objnames = this._index.objnames;
+ var titles = this._index.titles;
+
+ var i;
+ var results = [];
+
+ for (var prefix in objects) {
+ for (var name in objects[prefix]) {
+ var fullname = (prefix ? prefix + '.' : '') + name;
+ if (fullname.toLowerCase().indexOf(object) > -1) {
+ var score = 0;
+ var parts = fullname.split('.');
+ // check for different match types: exact matches of full name or
+ // "last name" (i.e. last dotted part)
+ if (fullname == object || parts[parts.length - 1] == object) {
+ score += Scorer.objNameMatch;
+ // matches in last name
+ } else if (parts[parts.length - 1].indexOf(object) > -1) {
+ score += Scorer.objPartialMatch;
+ }
+ var match = objects[prefix][name];
+ var objname = objnames[match[1]][2];
+ var title = titles[match[0]];
+ // If more than one term searched for, we require other words to be
+ // found in the name/title/description
+ if (otherterms.length > 0) {
+ var haystack = (prefix + ' ' + name + ' ' +
+ objname + ' ' + title).toLowerCase();
+ var allfound = true;
+ for (i = 0; i < otherterms.length; i++) {
+ if (haystack.indexOf(otherterms[i]) == -1) {
+ allfound = false;
+ break;
+ }
+ }
+ if (!allfound) {
+ continue;
+ }
+ }
+ var descr = objname + _(', in ') + title;
+
+ var anchor = match[3];
+ if (anchor === '')
+ anchor = fullname;
+ else if (anchor == '-')
+ anchor = objnames[match[1]][1] + '-' + fullname;
+ // add custom score for some objects according to scorer
+ if (Scorer.objPrio.hasOwnProperty(match[2])) {
+ score += Scorer.objPrio[match[2]];
+ } else {
+ score += Scorer.objPrioDefault;
+ }
+ results.push([filenames[match[0]], fullname, '#'+anchor, descr, score]);
+ }
+ }
+ }
+
+ return results;
+ },
+
+ /**
+ * search for full-text terms in the index
+ */
+ performTermsSearch : function(searchterms, excluded, terms, score) {
+ var filenames = this._index.filenames;
+ var titles = this._index.titles;
+
+ var i, j, file, files;
+ var fileMap = {};
+ var results = [];
+
+ // perform the search on the required terms
+ for (i = 0; i < searchterms.length; i++) {
+ var word = searchterms[i];
+ // no match but word was a required one
+ if ((files = terms[word]) === undefined)
+ break;
+ if (files.length === undefined) {
+ files = [files];
+ }
+ // create the mapping
+ for (j = 0; j < files.length; j++) {
+ file = files[j];
+ if (file in fileMap)
+ fileMap[file].push(word);
+ else
+ fileMap[file] = [word];
+ }
+ }
+
+ // now check if the files don't contain excluded terms
+ for (file in fileMap) {
+ var valid = true;
+
+ // check if all requirements are matched
+ if (fileMap[file].length != searchterms.length)
+ continue;
+
+ // ensure that none of the excluded terms is in the search result
+ for (i = 0; i < excluded.length; i++) {
+ if (terms[excluded[i]] == file ||
+ $u.contains(terms[excluded[i]] || [], file)) {
+ valid = false;
+ break;
+ }
+ }
+
+ // if we have still a valid result we can add it to the result list
+ if (valid) {
+ results.push([filenames[file], titles[file], '', null, score]);
+ }
+ }
+ return results;
+ },
+
+ /**
+ * helper function to return a node containing the
+ * search summary for a given text. keywords is a list
+ * of stemmed words, hlwords is the list of normal, unstemmed
+ * words. the first one is used to find the occurance, the
+ * latter for highlighting it.
+ */
+ makeSearchSummary : function(text, keywords, hlwords) {
+ var textLower = text.toLowerCase();
+ var start = 0;
+ $.each(keywords, function() {
+ var i = textLower.indexOf(this.toLowerCase());
+ if (i > -1)
+ start = i;
+ });
+ start = Math.max(start - 120, 0);
+ var excerpt = ((start > 0) ? '...' : '') +
+ $.trim(text.substr(start, 240)) +
+ ((start + 240 - text.length) ? '...' : '');
+ var rv = $('<div class="context"></div>').text(excerpt);
+ $.each(hlwords, function() {
+ rv = rv.highlightText(this, 'highlighted');
+ });
+ return rv;
+ }
+};
+
+$(document).ready(function() {
+ Search.init();
+}); \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/_static/underscore.js b/python/altgraph/doc/_build/html/_static/underscore.js
new file mode 100644
index 000000000..5b55f32be
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/underscore.js
@@ -0,0 +1,31 @@
+// Underscore.js 1.3.1
+// (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc.
+// Underscore is freely distributable under the MIT license.
+// Portions of Underscore are inspired or borrowed from Prototype,
+// Oliver Steele's Functional, and John Resig's Micro-Templating.
+// For all details and documentation:
+// http://documentcloud.github.com/underscore
+(function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source==
+c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c,
+h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each=
+b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e<f;e++){if(e in a&&c.call(d,a[e],e,a)===n)break}else for(e in a)if(b.has(a,e)&&c.call(d,a[e],e,a)===n)break};b.map=b.collect=function(a,c,b){var e=[];if(a==null)return e;if(x&&a.map===x)return a.map(c,b);j(a,function(a,g,h){e[e.length]=c.call(b,a,g,h)});if(a.length===+a.length)e.length=a.length;return e};b.reduce=b.foldl=b.inject=function(a,c,d,e){var f=arguments.length>2;a==
+null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect=
+function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e=
+e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck=
+function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b<e.computed&&(e={value:a,computed:b})});
+return e.value};b.shuffle=function(a){var b=[],d;j(a,function(a,f){f==0?b[0]=a:(d=Math.floor(Math.random()*(f+1)),b[f]=b[d],b[d]=a)});return b};b.sortBy=function(a,c,d){return b.pluck(b.map(a,function(a,b,g){return{value:a,criteria:c.call(d,a,b,g)}}).sort(function(a,b){var c=a.criteria,d=b.criteria;return c<d?-1:c>d?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a,
+c,d){d||(d=b.identity);for(var e=0,f=a.length;e<f;){var g=e+f>>1;d(a[g])<d(c)?e=g+1:f=g}return e};b.toArray=function(a){return!a?[]:a.toArray?a.toArray():b.isArray(a)?i.call(a):b.isArguments(a)?i.call(a):b.values(a)};b.size=function(a){return b.toArray(a).length};b.first=b.head=function(a,b,d){return b!=null&&!d?i.call(a,0,b):a[0]};b.initial=function(a,b,d){return i.call(a,0,a.length-(b==null||d?1:b))};b.last=function(a,b,d){return b!=null&&!d?i.call(a,Math.max(a.length-b,0)):a[a.length-1]};b.rest=
+b.tail=function(a,b,d){return i.call(a,b==null||d?1:b)};b.compact=function(a){return b.filter(a,function(a){return!!a})};b.flatten=function(a,c){return b.reduce(a,function(a,e){if(b.isArray(e))return a.concat(c?e:b.flatten(e));a[a.length]=e;return a},[])};b.without=function(a){return b.difference(a,i.call(arguments,1))};b.uniq=b.unique=function(a,c,d){var d=d?b.map(a,d):a,e=[];b.reduce(d,function(d,g,h){if(0==h||(c===true?b.last(d)!=g:!b.include(d,g)))d[d.length]=g,e[e.length]=a[h];return d},[]);
+return e};b.union=function(){return b.uniq(b.flatten(arguments,true))};b.intersection=b.intersect=function(a){var c=i.call(arguments,1);return b.filter(b.uniq(a),function(a){return b.every(c,function(c){return b.indexOf(c,a)>=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e<c;e++)d[e]=b.pluck(a,""+e);return d};b.indexOf=function(a,c,
+d){if(a==null)return-1;var e;if(d)return d=b.sortedIndex(a,c),a[d]===c?d:-1;if(p&&a.indexOf===p)return a.indexOf(c);for(d=0,e=a.length;d<e;d++)if(d in a&&a[d]===c)return d;return-1};b.lastIndexOf=function(a,b){if(a==null)return-1;if(D&&a.lastIndexOf===D)return a.lastIndexOf(b);for(var d=a.length;d--;)if(d in a&&a[d]===b)return d;return-1};b.range=function(a,b,d){arguments.length<=1&&(b=a||0,a=0);for(var d=arguments[2]||1,e=Math.max(Math.ceil((b-a)/d),0),f=0,g=Array(e);f<e;)g[f++]=a,a+=d;return g};
+var F=function(){};b.bind=function(a,c){var d,e;if(a.bind===s&&s)return s.apply(a,i.call(arguments,1));if(!b.isFunction(a))throw new TypeError;e=i.call(arguments,2);return d=function(){if(!(this instanceof d))return a.apply(c,e.concat(i.call(arguments)));F.prototype=a.prototype;var b=new F,g=a.apply(b,e.concat(i.call(arguments)));return Object(g)===g?g:b}};b.bindAll=function(a){var c=i.call(arguments,1);c.length==0&&(c=b.functions(a));j(c,function(c){a[c]=b.bind(a[c],a)});return a};b.memoize=function(a,
+c){var d={};c||(c=b.identity);return function(){var e=c.apply(this,arguments);return b.has(d,e)?d[e]:d[e]=a.apply(this,arguments)}};b.delay=function(a,b){var d=i.call(arguments,2);return setTimeout(function(){return a.apply(a,d)},b)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(i.call(arguments,1)))};b.throttle=function(a,c){var d,e,f,g,h,i=b.debounce(function(){h=g=false},c);return function(){d=this;e=arguments;var b;f||(f=setTimeout(function(){f=null;h&&a.apply(d,e);i()},c));g?h=true:
+a.apply(d,e);i();g=true}};b.debounce=function(a,b){var d;return function(){var e=this,f=arguments;clearTimeout(d);d=setTimeout(function(){d=null;a.apply(e,f)},b)}};b.once=function(a){var b=false,d;return function(){if(b)return d;b=true;return d=a.apply(this,arguments)}};b.wrap=function(a,b){return function(){var d=[a].concat(i.call(arguments,0));return b.apply(this,d)}};b.compose=function(){var a=arguments;return function(){for(var b=arguments,d=a.length-1;d>=0;d--)b=[a[d].apply(this,b)];return b[0]}};
+b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments,
+1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)};
+b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"};
+b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e<a;e++)b.call(d,e)};b.escape=function(a){return(""+a).replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;").replace(/'/g,"&#x27;").replace(/\//g,"&#x2F;")};b.mixin=function(a){j(b.functions(a),
+function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+
+u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]=
+function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain=
+true;return this};m.prototype.value=function(){return this._wrapped}}).call(this);
diff --git a/python/altgraph/doc/_build/html/_static/up-pressed.png b/python/altgraph/doc/_build/html/_static/up-pressed.png
new file mode 100644
index 000000000..8bd587afe
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/up-pressed.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/up.png b/python/altgraph/doc/_build/html/_static/up.png
new file mode 100644
index 000000000..b94625680
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/up.png
Binary files differ
diff --git a/python/altgraph/doc/_build/html/_static/websupport.js b/python/altgraph/doc/_build/html/_static/websupport.js
new file mode 100644
index 000000000..19fcda564
--- /dev/null
+++ b/python/altgraph/doc/_build/html/_static/websupport.js
@@ -0,0 +1,808 @@
+/*
+ * websupport.js
+ * ~~~~~~~~~~~~~
+ *
+ * sphinx.websupport utilties for all documentation.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+(function($) {
+ $.fn.autogrow = function() {
+ return this.each(function() {
+ var textarea = this;
+
+ $.fn.autogrow.resize(textarea);
+
+ $(textarea)
+ .focus(function() {
+ textarea.interval = setInterval(function() {
+ $.fn.autogrow.resize(textarea);
+ }, 500);
+ })
+ .blur(function() {
+ clearInterval(textarea.interval);
+ });
+ });
+ };
+
+ $.fn.autogrow.resize = function(textarea) {
+ var lineHeight = parseInt($(textarea).css('line-height'), 10);
+ var lines = textarea.value.split('\n');
+ var columns = textarea.cols;
+ var lineCount = 0;
+ $.each(lines, function() {
+ lineCount += Math.ceil(this.length / columns) || 1;
+ });
+ var height = lineHeight * (lineCount + 1);
+ $(textarea).css('height', height);
+ };
+})(jQuery);
+
+(function($) {
+ var comp, by;
+
+ function init() {
+ initEvents();
+ initComparator();
+ }
+
+ function initEvents() {
+ $('a.comment-close').live("click", function(event) {
+ event.preventDefault();
+ hide($(this).attr('id').substring(2));
+ });
+ $('a.vote').live("click", function(event) {
+ event.preventDefault();
+ handleVote($(this));
+ });
+ $('a.reply').live("click", function(event) {
+ event.preventDefault();
+ openReply($(this).attr('id').substring(2));
+ });
+ $('a.close-reply').live("click", function(event) {
+ event.preventDefault();
+ closeReply($(this).attr('id').substring(2));
+ });
+ $('a.sort-option').live("click", function(event) {
+ event.preventDefault();
+ handleReSort($(this));
+ });
+ $('a.show-proposal').live("click", function(event) {
+ event.preventDefault();
+ showProposal($(this).attr('id').substring(2));
+ });
+ $('a.hide-proposal').live("click", function(event) {
+ event.preventDefault();
+ hideProposal($(this).attr('id').substring(2));
+ });
+ $('a.show-propose-change').live("click", function(event) {
+ event.preventDefault();
+ showProposeChange($(this).attr('id').substring(2));
+ });
+ $('a.hide-propose-change').live("click", function(event) {
+ event.preventDefault();
+ hideProposeChange($(this).attr('id').substring(2));
+ });
+ $('a.accept-comment').live("click", function(event) {
+ event.preventDefault();
+ acceptComment($(this).attr('id').substring(2));
+ });
+ $('a.delete-comment').live("click", function(event) {
+ event.preventDefault();
+ deleteComment($(this).attr('id').substring(2));
+ });
+ $('a.comment-markup').live("click", function(event) {
+ event.preventDefault();
+ toggleCommentMarkupBox($(this).attr('id').substring(2));
+ });
+ }
+
+ /**
+ * Set comp, which is a comparator function used for sorting and
+ * inserting comments into the list.
+ */
+ function setComparator() {
+ // If the first three letters are "asc", sort in ascending order
+ // and remove the prefix.
+ if (by.substring(0,3) == 'asc') {
+ var i = by.substring(3);
+ comp = function(a, b) { return a[i] - b[i]; };
+ } else {
+ // Otherwise sort in descending order.
+ comp = function(a, b) { return b[by] - a[by]; };
+ }
+
+ // Reset link styles and format the selected sort option.
+ $('a.sel').attr('href', '#').removeClass('sel');
+ $('a.by' + by).removeAttr('href').addClass('sel');
+ }
+
+ /**
+ * Create a comp function. If the user has preferences stored in
+ * the sortBy cookie, use those, otherwise use the default.
+ */
+ function initComparator() {
+ by = 'rating'; // Default to sort by rating.
+ // If the sortBy cookie is set, use that instead.
+ if (document.cookie.length > 0) {
+ var start = document.cookie.indexOf('sortBy=');
+ if (start != -1) {
+ start = start + 7;
+ var end = document.cookie.indexOf(";", start);
+ if (end == -1) {
+ end = document.cookie.length;
+ by = unescape(document.cookie.substring(start, end));
+ }
+ }
+ }
+ setComparator();
+ }
+
+ /**
+ * Show a comment div.
+ */
+ function show(id) {
+ $('#ao' + id).hide();
+ $('#ah' + id).show();
+ var context = $.extend({id: id}, opts);
+ var popup = $(renderTemplate(popupTemplate, context)).hide();
+ popup.find('textarea[name="proposal"]').hide();
+ popup.find('a.by' + by).addClass('sel');
+ var form = popup.find('#cf' + id);
+ form.submit(function(event) {
+ event.preventDefault();
+ addComment(form);
+ });
+ $('#s' + id).after(popup);
+ popup.slideDown('fast', function() {
+ getComments(id);
+ });
+ }
+
+ /**
+ * Hide a comment div.
+ */
+ function hide(id) {
+ $('#ah' + id).hide();
+ $('#ao' + id).show();
+ var div = $('#sc' + id);
+ div.slideUp('fast', function() {
+ div.remove();
+ });
+ }
+
+ /**
+ * Perform an ajax request to get comments for a node
+ * and insert the comments into the comments tree.
+ */
+ function getComments(id) {
+ $.ajax({
+ type: 'GET',
+ url: opts.getCommentsURL,
+ data: {node: id},
+ success: function(data, textStatus, request) {
+ var ul = $('#cl' + id);
+ var speed = 100;
+ $('#cf' + id)
+ .find('textarea[name="proposal"]')
+ .data('source', data.source);
+
+ if (data.comments.length === 0) {
+ ul.html('<li>No comments yet.</li>');
+ ul.data('empty', true);
+ } else {
+ // If there are comments, sort them and put them in the list.
+ var comments = sortComments(data.comments);
+ speed = data.comments.length * 100;
+ appendComments(comments, ul);
+ ul.data('empty', false);
+ }
+ $('#cn' + id).slideUp(speed + 200);
+ ul.slideDown(speed);
+ },
+ error: function(request, textStatus, error) {
+ showError('Oops, there was a problem retrieving the comments.');
+ },
+ dataType: 'json'
+ });
+ }
+
+ /**
+ * Add a comment via ajax and insert the comment into the comment tree.
+ */
+ function addComment(form) {
+ var node_id = form.find('input[name="node"]').val();
+ var parent_id = form.find('input[name="parent"]').val();
+ var text = form.find('textarea[name="comment"]').val();
+ var proposal = form.find('textarea[name="proposal"]').val();
+
+ if (text == '') {
+ showError('Please enter a comment.');
+ return;
+ }
+
+ // Disable the form that is being submitted.
+ form.find('textarea,input').attr('disabled', 'disabled');
+
+ // Send the comment to the server.
+ $.ajax({
+ type: "POST",
+ url: opts.addCommentURL,
+ dataType: 'json',
+ data: {
+ node: node_id,
+ parent: parent_id,
+ text: text,
+ proposal: proposal
+ },
+ success: function(data, textStatus, error) {
+ // Reset the form.
+ if (node_id) {
+ hideProposeChange(node_id);
+ }
+ form.find('textarea')
+ .val('')
+ .add(form.find('input'))
+ .removeAttr('disabled');
+ var ul = $('#cl' + (node_id || parent_id));
+ if (ul.data('empty')) {
+ $(ul).empty();
+ ul.data('empty', false);
+ }
+ insertComment(data.comment);
+ var ao = $('#ao' + node_id);
+ ao.find('img').attr({'src': opts.commentBrightImage});
+ if (node_id) {
+ // if this was a "root" comment, remove the commenting box
+ // (the user can get it back by reopening the comment popup)
+ $('#ca' + node_id).slideUp();
+ }
+ },
+ error: function(request, textStatus, error) {
+ form.find('textarea,input').removeAttr('disabled');
+ showError('Oops, there was a problem adding the comment.');
+ }
+ });
+ }
+
+ /**
+ * Recursively append comments to the main comment list and children
+ * lists, creating the comment tree.
+ */
+ function appendComments(comments, ul) {
+ $.each(comments, function() {
+ var div = createCommentDiv(this);
+ ul.append($(document.createElement('li')).html(div));
+ appendComments(this.children, div.find('ul.comment-children'));
+ // To avoid stagnating data, don't store the comments children in data.
+ this.children = null;
+ div.data('comment', this);
+ });
+ }
+
+ /**
+ * After adding a new comment, it must be inserted in the correct
+ * location in the comment tree.
+ */
+ function insertComment(comment) {
+ var div = createCommentDiv(comment);
+
+ // To avoid stagnating data, don't store the comments children in data.
+ comment.children = null;
+ div.data('comment', comment);
+
+ var ul = $('#cl' + (comment.node || comment.parent));
+ var siblings = getChildren(ul);
+
+ var li = $(document.createElement('li'));
+ li.hide();
+
+ // Determine where in the parents children list to insert this comment.
+ for(i=0; i < siblings.length; i++) {
+ if (comp(comment, siblings[i]) <= 0) {
+ $('#cd' + siblings[i].id)
+ .parent()
+ .before(li.html(div));
+ li.slideDown('fast');
+ return;
+ }
+ }
+
+ // If we get here, this comment rates lower than all the others,
+ // or it is the only comment in the list.
+ ul.append(li.html(div));
+ li.slideDown('fast');
+ }
+
+ function acceptComment(id) {
+ $.ajax({
+ type: 'POST',
+ url: opts.acceptCommentURL,
+ data: {id: id},
+ success: function(data, textStatus, request) {
+ $('#cm' + id).fadeOut('fast');
+ $('#cd' + id).removeClass('moderate');
+ },
+ error: function(request, textStatus, error) {
+ showError('Oops, there was a problem accepting the comment.');
+ }
+ });
+ }
+
+ function deleteComment(id) {
+ $.ajax({
+ type: 'POST',
+ url: opts.deleteCommentURL,
+ data: {id: id},
+ success: function(data, textStatus, request) {
+ var div = $('#cd' + id);
+ if (data == 'delete') {
+ // Moderator mode: remove the comment and all children immediately
+ div.slideUp('fast', function() {
+ div.remove();
+ });
+ return;
+ }
+ // User mode: only mark the comment as deleted
+ div
+ .find('span.user-id:first')
+ .text('[deleted]').end()
+ .find('div.comment-text:first')
+ .text('[deleted]').end()
+ .find('#cm' + id + ', #dc' + id + ', #ac' + id + ', #rc' + id +
+ ', #sp' + id + ', #hp' + id + ', #cr' + id + ', #rl' + id)
+ .remove();
+ var comment = div.data('comment');
+ comment.username = '[deleted]';
+ comment.text = '[deleted]';
+ div.data('comment', comment);
+ },
+ error: function(request, textStatus, error) {
+ showError('Oops, there was a problem deleting the comment.');
+ }
+ });
+ }
+
+ function showProposal(id) {
+ $('#sp' + id).hide();
+ $('#hp' + id).show();
+ $('#pr' + id).slideDown('fast');
+ }
+
+ function hideProposal(id) {
+ $('#hp' + id).hide();
+ $('#sp' + id).show();
+ $('#pr' + id).slideUp('fast');
+ }
+
+ function showProposeChange(id) {
+ $('#pc' + id).hide();
+ $('#hc' + id).show();
+ var textarea = $('#pt' + id);
+ textarea.val(textarea.data('source'));
+ $.fn.autogrow.resize(textarea[0]);
+ textarea.slideDown('fast');
+ }
+
+ function hideProposeChange(id) {
+ $('#hc' + id).hide();
+ $('#pc' + id).show();
+ var textarea = $('#pt' + id);
+ textarea.val('').removeAttr('disabled');
+ textarea.slideUp('fast');
+ }
+
+ function toggleCommentMarkupBox(id) {
+ $('#mb' + id).toggle();
+ }
+
+ /** Handle when the user clicks on a sort by link. */
+ function handleReSort(link) {
+ var classes = link.attr('class').split(/\s+/);
+ for (var i=0; i<classes.length; i++) {
+ if (classes[i] != 'sort-option') {
+ by = classes[i].substring(2);
+ }
+ }
+ setComparator();
+ // Save/update the sortBy cookie.
+ var expiration = new Date();
+ expiration.setDate(expiration.getDate() + 365);
+ document.cookie= 'sortBy=' + escape(by) +
+ ';expires=' + expiration.toUTCString();
+ $('ul.comment-ul').each(function(index, ul) {
+ var comments = getChildren($(ul), true);
+ comments = sortComments(comments);
+ appendComments(comments, $(ul).empty());
+ });
+ }
+
+ /**
+ * Function to process a vote when a user clicks an arrow.
+ */
+ function handleVote(link) {
+ if (!opts.voting) {
+ showError("You'll need to login to vote.");
+ return;
+ }
+
+ var id = link.attr('id');
+ if (!id) {
+ // Didn't click on one of the voting arrows.
+ return;
+ }
+ // If it is an unvote, the new vote value is 0,
+ // Otherwise it's 1 for an upvote, or -1 for a downvote.
+ var value = 0;
+ if (id.charAt(1) != 'u') {
+ value = id.charAt(0) == 'u' ? 1 : -1;
+ }
+ // The data to be sent to the server.
+ var d = {
+ comment_id: id.substring(2),
+ value: value
+ };
+
+ // Swap the vote and unvote links.
+ link.hide();
+ $('#' + id.charAt(0) + (id.charAt(1) == 'u' ? 'v' : 'u') + d.comment_id)
+ .show();
+
+ // The div the comment is displayed in.
+ var div = $('div#cd' + d.comment_id);
+ var data = div.data('comment');
+
+ // If this is not an unvote, and the other vote arrow has
+ // already been pressed, unpress it.
+ if ((d.value !== 0) && (data.vote === d.value * -1)) {
+ $('#' + (d.value == 1 ? 'd' : 'u') + 'u' + d.comment_id).hide();
+ $('#' + (d.value == 1 ? 'd' : 'u') + 'v' + d.comment_id).show();
+ }
+
+ // Update the comments rating in the local data.
+ data.rating += (data.vote === 0) ? d.value : (d.value - data.vote);
+ data.vote = d.value;
+ div.data('comment', data);
+
+ // Change the rating text.
+ div.find('.rating:first')
+ .text(data.rating + ' point' + (data.rating == 1 ? '' : 's'));
+
+ // Send the vote information to the server.
+ $.ajax({
+ type: "POST",
+ url: opts.processVoteURL,
+ data: d,
+ error: function(request, textStatus, error) {
+ showError('Oops, there was a problem casting that vote.');
+ }
+ });
+ }
+
+ /**
+ * Open a reply form used to reply to an existing comment.
+ */
+ function openReply(id) {
+ // Swap out the reply link for the hide link
+ $('#rl' + id).hide();
+ $('#cr' + id).show();
+
+ // Add the reply li to the children ul.
+ var div = $(renderTemplate(replyTemplate, {id: id})).hide();
+ $('#cl' + id)
+ .prepend(div)
+ // Setup the submit handler for the reply form.
+ .find('#rf' + id)
+ .submit(function(event) {
+ event.preventDefault();
+ addComment($('#rf' + id));
+ closeReply(id);
+ })
+ .find('input[type=button]')
+ .click(function() {
+ closeReply(id);
+ });
+ div.slideDown('fast', function() {
+ $('#rf' + id).find('textarea').focus();
+ });
+ }
+
+ /**
+ * Close the reply form opened with openReply.
+ */
+ function closeReply(id) {
+ // Remove the reply div from the DOM.
+ $('#rd' + id).slideUp('fast', function() {
+ $(this).remove();
+ });
+
+ // Swap out the hide link for the reply link
+ $('#cr' + id).hide();
+ $('#rl' + id).show();
+ }
+
+ /**
+ * Recursively sort a tree of comments using the comp comparator.
+ */
+ function sortComments(comments) {
+ comments.sort(comp);
+ $.each(comments, function() {
+ this.children = sortComments(this.children);
+ });
+ return comments;
+ }
+
+ /**
+ * Get the children comments from a ul. If recursive is true,
+ * recursively include childrens' children.
+ */
+ function getChildren(ul, recursive) {
+ var children = [];
+ ul.children().children("[id^='cd']")
+ .each(function() {
+ var comment = $(this).data('comment');
+ if (recursive)
+ comment.children = getChildren($(this).find('#cl' + comment.id), true);
+ children.push(comment);
+ });
+ return children;
+ }
+
+ /** Create a div to display a comment in. */
+ function createCommentDiv(comment) {
+ if (!comment.displayed && !opts.moderator) {
+ return $('<div class="moderate">Thank you! Your comment will show up '
+ + 'once it is has been approved by a moderator.</div>');
+ }
+ // Prettify the comment rating.
+ comment.pretty_rating = comment.rating + ' point' +
+ (comment.rating == 1 ? '' : 's');
+ // Make a class (for displaying not yet moderated comments differently)
+ comment.css_class = comment.displayed ? '' : ' moderate';
+ // Create a div for this comment.
+ var context = $.extend({}, opts, comment);
+ var div = $(renderTemplate(commentTemplate, context));
+
+ // If the user has voted on this comment, highlight the correct arrow.
+ if (comment.vote) {
+ var direction = (comment.vote == 1) ? 'u' : 'd';
+ div.find('#' + direction + 'v' + comment.id).hide();
+ div.find('#' + direction + 'u' + comment.id).show();
+ }
+
+ if (opts.moderator || comment.text != '[deleted]') {
+ div.find('a.reply').show();
+ if (comment.proposal_diff)
+ div.find('#sp' + comment.id).show();
+ if (opts.moderator && !comment.displayed)
+ div.find('#cm' + comment.id).show();
+ if (opts.moderator || (opts.username == comment.username))
+ div.find('#dc' + comment.id).show();
+ }
+ return div;
+ }
+
+ /**
+ * A simple template renderer. Placeholders such as <%id%> are replaced
+ * by context['id'] with items being escaped. Placeholders such as <#id#>
+ * are not escaped.
+ */
+ function renderTemplate(template, context) {
+ var esc = $(document.createElement('div'));
+
+ function handle(ph, escape) {
+ var cur = context;
+ $.each(ph.split('.'), function() {
+ cur = cur[this];
+ });
+ return escape ? esc.text(cur || "").html() : cur;
+ }
+
+ return template.replace(/<([%#])([\w\.]*)\1>/g, function() {
+ return handle(arguments[2], arguments[1] == '%' ? true : false);
+ });
+ }
+
+ /** Flash an error message briefly. */
+ function showError(message) {
+ $(document.createElement('div')).attr({'class': 'popup-error'})
+ .append($(document.createElement('div'))
+ .attr({'class': 'error-message'}).text(message))
+ .appendTo('body')
+ .fadeIn("slow")
+ .delay(2000)
+ .fadeOut("slow");
+ }
+
+ /** Add a link the user uses to open the comments popup. */
+ $.fn.comment = function() {
+ return this.each(function() {
+ var id = $(this).attr('id').substring(1);
+ var count = COMMENT_METADATA[id];
+ var title = count + ' comment' + (count == 1 ? '' : 's');
+ var image = count > 0 ? opts.commentBrightImage : opts.commentImage;
+ var addcls = count == 0 ? ' nocomment' : '';
+ $(this)
+ .append(
+ $(document.createElement('a')).attr({
+ href: '#',
+ 'class': 'sphinx-comment-open' + addcls,
+ id: 'ao' + id
+ })
+ .append($(document.createElement('img')).attr({
+ src: image,
+ alt: 'comment',
+ title: title
+ }))
+ .click(function(event) {
+ event.preventDefault();
+ show($(this).attr('id').substring(2));
+ })
+ )
+ .append(
+ $(document.createElement('a')).attr({
+ href: '#',
+ 'class': 'sphinx-comment-close hidden',
+ id: 'ah' + id
+ })
+ .append($(document.createElement('img')).attr({
+ src: opts.closeCommentImage,
+ alt: 'close',
+ title: 'close'
+ }))
+ .click(function(event) {
+ event.preventDefault();
+ hide($(this).attr('id').substring(2));
+ })
+ );
+ });
+ };
+
+ var opts = {
+ processVoteURL: '/_process_vote',
+ addCommentURL: '/_add_comment',
+ getCommentsURL: '/_get_comments',
+ acceptCommentURL: '/_accept_comment',
+ deleteCommentURL: '/_delete_comment',
+ commentImage: '/static/_static/comment.png',
+ closeCommentImage: '/static/_static/comment-close.png',
+ loadingImage: '/static/_static/ajax-loader.gif',
+ commentBrightImage: '/static/_static/comment-bright.png',
+ upArrow: '/static/_static/up.png',
+ downArrow: '/static/_static/down.png',
+ upArrowPressed: '/static/_static/up-pressed.png',
+ downArrowPressed: '/static/_static/down-pressed.png',
+ voting: false,
+ moderator: false
+ };
+
+ if (typeof COMMENT_OPTIONS != "undefined") {
+ opts = jQuery.extend(opts, COMMENT_OPTIONS);
+ }
+
+ var popupTemplate = '\
+ <div class="sphinx-comments" id="sc<%id%>">\
+ <p class="sort-options">\
+ Sort by:\
+ <a href="#" class="sort-option byrating">best rated</a>\
+ <a href="#" class="sort-option byascage">newest</a>\
+ <a href="#" class="sort-option byage">oldest</a>\
+ </p>\
+ <div class="comment-header">Comments</div>\
+ <div class="comment-loading" id="cn<%id%>">\
+ loading comments... <img src="<%loadingImage%>" alt="" /></div>\
+ <ul id="cl<%id%>" class="comment-ul"></ul>\
+ <div id="ca<%id%>">\
+ <p class="add-a-comment">Add a comment\
+ (<a href="#" class="comment-markup" id="ab<%id%>">markup</a>):</p>\
+ <div class="comment-markup-box" id="mb<%id%>">\
+ reStructured text markup: <i>*emph*</i>, <b>**strong**</b>, \
+ <tt>``code``</tt>, \
+ code blocks: <tt>::</tt> and an indented block after blank line</div>\
+ <form method="post" id="cf<%id%>" class="comment-form" action="">\
+ <textarea name="comment" cols="80"></textarea>\
+ <p class="propose-button">\
+ <a href="#" id="pc<%id%>" class="show-propose-change">\
+ Propose a change &#9657;\
+ </a>\
+ <a href="#" id="hc<%id%>" class="hide-propose-change">\
+ Propose a change &#9663;\
+ </a>\
+ </p>\
+ <textarea name="proposal" id="pt<%id%>" cols="80"\
+ spellcheck="false"></textarea>\
+ <input type="submit" value="Add comment" />\
+ <input type="hidden" name="node" value="<%id%>" />\
+ <input type="hidden" name="parent" value="" />\
+ </form>\
+ </div>\
+ </div>';
+
+ var commentTemplate = '\
+ <div id="cd<%id%>" class="sphinx-comment<%css_class%>">\
+ <div class="vote">\
+ <div class="arrow">\
+ <a href="#" id="uv<%id%>" class="vote" title="vote up">\
+ <img src="<%upArrow%>" />\
+ </a>\
+ <a href="#" id="uu<%id%>" class="un vote" title="vote up">\
+ <img src="<%upArrowPressed%>" />\
+ </a>\
+ </div>\
+ <div class="arrow">\
+ <a href="#" id="dv<%id%>" class="vote" title="vote down">\
+ <img src="<%downArrow%>" id="da<%id%>" />\
+ </a>\
+ <a href="#" id="du<%id%>" class="un vote" title="vote down">\
+ <img src="<%downArrowPressed%>" />\
+ </a>\
+ </div>\
+ </div>\
+ <div class="comment-content">\
+ <p class="tagline comment">\
+ <span class="user-id"><%username%></span>\
+ <span class="rating"><%pretty_rating%></span>\
+ <span class="delta"><%time.delta%></span>\
+ </p>\
+ <div class="comment-text comment"><#text#></div>\
+ <p class="comment-opts comment">\
+ <a href="#" class="reply hidden" id="rl<%id%>">reply &#9657;</a>\
+ <a href="#" class="close-reply" id="cr<%id%>">reply &#9663;</a>\
+ <a href="#" id="sp<%id%>" class="show-proposal">proposal &#9657;</a>\
+ <a href="#" id="hp<%id%>" class="hide-proposal">proposal &#9663;</a>\
+ <a href="#" id="dc<%id%>" class="delete-comment hidden">delete</a>\
+ <span id="cm<%id%>" class="moderation hidden">\
+ <a href="#" id="ac<%id%>" class="accept-comment">accept</a>\
+ </span>\
+ </p>\
+ <pre class="proposal" id="pr<%id%>">\
+<#proposal_diff#>\
+ </pre>\
+ <ul class="comment-children" id="cl<%id%>"></ul>\
+ </div>\
+ <div class="clearleft"></div>\
+ </div>\
+ </div>';
+
+ var replyTemplate = '\
+ <li>\
+ <div class="reply-div" id="rd<%id%>">\
+ <form id="rf<%id%>">\
+ <textarea name="comment" cols="80"></textarea>\
+ <input type="submit" value="Add reply" />\
+ <input type="button" value="Cancel" />\
+ <input type="hidden" name="parent" value="<%id%>" />\
+ <input type="hidden" name="node" value="" />\
+ </form>\
+ </div>\
+ </li>';
+
+ $(document).ready(function() {
+ init();
+ });
+})(jQuery);
+
+$(document).ready(function() {
+ // add comment anchors for all paragraphs that are commentable
+ $('.sphinx-has-comment').comment();
+
+ // highlight search words in search results
+ $("div.context").each(function() {
+ var params = $.getQueryParameters();
+ var terms = (params.q) ? params.q[0].split(/\s+/) : [];
+ var result = $(this);
+ $.each(terms, function() {
+ result.highlightText(this.toLowerCase(), 'highlighted');
+ });
+ });
+
+ // directly open comment window if requested
+ var anchor = document.location.hash;
+ if (anchor.substring(0, 9) == '#comment-') {
+ $('#ao' + anchor.substring(9)).click();
+ document.location.hash = '#s' + anchor.substring(9);
+ }
+});
diff --git a/python/altgraph/doc/_build/html/changelog.html b/python/altgraph/doc/_build/html/changelog.html
new file mode 100644
index 000000000..8f0f459ea
--- /dev/null
+++ b/python/altgraph/doc/_build/html/changelog.html
@@ -0,0 +1,271 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Release history &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="next" title="License" href="license.html" />
+ <link rel="prev" title="Altgraph - A basic graph library" href="index.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="license.html" title="License"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="index.html" title="Altgraph - A basic graph library"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="release-history">
+<h1>Release history<a class="headerlink" href="#release-history" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="id1">
+<h2>0.11<a class="headerlink" href="#id1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Stabilize the order of elements in dot file exports,
+patch from bitbucket user &#8216;pombredanne&#8217;.</li>
+<li>Tweak setup.py file to remove dependency on distribute (but
+keep the dependency on setuptools)</li>
+</ul>
+</div>
+<div class="section" id="id2">
+<h2>0.10.2<a class="headerlink" href="#id2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>There where no classifiers in the package metadata due to a bug
+in setup.py</li>
+</ul>
+</div>
+<div class="section" id="id3">
+<h2>0.10.1<a class="headerlink" href="#id3" title="Permalink to this headline">¶</a></h2>
+<p>This is a bugfix release</p>
+<p>Bug fixes:</p>
+<ul>
+<li><p class="first">Issue #3: The source archive contains a README.txt
+while the setup file refers to ReadMe.txt.</p>
+<p>This is caused by a misfeature in distutils, as a
+workaround I&#8217;ve renamed ReadMe.txt to README.txt
+in the source tree and setup file.</p>
+</li>
+</ul>
+</div>
+<div class="section" id="id4">
+<h2>0.10<a class="headerlink" href="#id4" title="Permalink to this headline">¶</a></h2>
+<p>This is a minor feature release</p>
+<p>Features:</p>
+<ul>
+<li><p class="first">Do not use &#8220;2to3&#8221; to support Python 3.</p>
+<p>As a side effect of this altgraph now supports
+Python 2.6 and later, and no longer supports
+earlier releases of Python.</p>
+</li>
+<li><p class="first">The order of attributes in the Dot output
+is now always alphabetical.</p>
+<p>With this change the output will be consistent
+between runs and Python versions.</p>
+</li>
+</ul>
+</div>
+<div class="section" id="id5">
+<h2>0.9<a class="headerlink" href="#id5" title="Permalink to this headline">¶</a></h2>
+<p>This is a minor bugfix release</p>
+<p>Features:</p>
+<ul class="simple">
+<li>Added <tt class="docutils literal"><span class="pre">altgraph.ObjectGraph.ObjectGraph.nodes</span></tt>, a method
+yielding all nodes in an object graph.</li>
+</ul>
+<p>Bugfixes:</p>
+<ul class="simple">
+<li>The 0.8 release didn&#8217;t work with py2app when using
+python 3.x.</li>
+</ul>
+</div>
+<div class="section" id="id6">
+<h2>0.8<a class="headerlink" href="#id6" title="Permalink to this headline">¶</a></h2>
+<p>This is a minor feature release. The major new feature
+is a extensive set of unittests, which explains almost
+all other changes in this release.</p>
+<p>Bugfixes:</p>
+<ul class="simple">
+<li>Installing failed with Python 2.5 due to using a distutils
+class that isn&#8217;t available in that version of Python
+(issue #1 on the issue tracker)</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.GraphStat.degree_dist</span></tt> now actually works</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Graph.add_edge(a,</span> <span class="pre">b,</span> <span class="pre">create_nodes=False)</span></tt> will
+no longer create the edge when one of the nodes doesn&#8217;t
+exist.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Graph.forw_topo_sort</span></tt> failed for some sparse graphs.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Graph.back_topo_sort</span></tt> was completely broken in
+previous releases.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Graph.forw_bfs_subgraph</span></tt> now actually works.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Graph.back_bfs_subgraph</span></tt> now actually works.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Graph.iterdfs</span></tt> now returns the correct result
+when the <tt class="docutils literal"><span class="pre">forward</span></tt> argument is <tt class="docutils literal"><span class="pre">False</span></tt>.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Graph.iterdata</span></tt> now returns the correct result
+when the <tt class="docutils literal"><span class="pre">forward</span></tt> argument is <tt class="docutils literal"><span class="pre">False</span></tt>.</li>
+</ul>
+<p>Features:</p>
+<ul class="simple">
+<li>The <tt class="docutils literal"><span class="pre">altgraph.Graph</span></tt> constructor now accepts an argument
+that contains 2- and 3-tuples instead of requireing that
+all items have the same size. The (optional) argument can now
+also be any iterator.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Graph.Graph.add_node</span></tt> has no effect when you
+add a hidden node.</li>
+<li>The private method <tt class="docutils literal"><span class="pre">altgraph.Graph._bfs</span></tt> is no longer
+present.</li>
+<li>The private method <tt class="docutils literal"><span class="pre">altgraph.Graph._dfs</span></tt> is no longer
+present.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.ObjectGraph</span></tt> now has a <tt class="docutils literal"><span class="pre">__contains__</span></tt> methods,
+which means you can use the <tt class="docutils literal"><span class="pre">in</span></tt> operator to check if a
+node is part of a graph.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.GraphUtil.generate_random_graph</span></tt> will raise
+<tt class="docutils literal"><span class="pre">GraphError</span></tt> instead of looping forever when it is
+impossible to create the requested graph.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Dot.edge_style</span></tt> raises <tt class="docutils literal"><span class="pre">GraphError</span></tt> when
+one of the nodes is not present in the graph. The method
+silently added the tail in the past, but without ensuring
+a consistent graph state.</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.Dot.save_img</span></tt> now works when the mode is
+<tt class="docutils literal"><span class="pre">&quot;neato&quot;</span></tt>.</li>
+</ul>
+</div>
+<div class="section" id="id7">
+<h2>0.7.2<a class="headerlink" href="#id7" title="Permalink to this headline">¶</a></h2>
+<p>This is a minor bugfix release</p>
+<p>Bugfixes:</p>
+<ul class="simple">
+<li>distutils didn&#8217;t include the documentation subtree</li>
+</ul>
+</div>
+<div class="section" id="id8">
+<h2>0.7.1<a class="headerlink" href="#id8" title="Permalink to this headline">¶</a></h2>
+<p>This is a minor feature release</p>
+<p>Features:</p>
+<ul class="simple">
+<li>Documentation is now generated using <a class="reference external" href="http://pypi.python.org/pypi/sphinx">sphinx</a>
+and can be viewed at &lt;<a class="reference external" href="http://packages.python.org/altgraph">http://packages.python.org/altgraph</a>&gt;.</li>
+<li>The repository has moved to bitbucket</li>
+<li><tt class="docutils literal"><span class="pre">altgraph.GraphStat.avg_hops</span></tt> is no longer present, the function had no
+implementation and no specified behaviour.</li>
+<li>the module <tt class="docutils literal"><span class="pre">altgraph.compat</span></tt> is gone, which means altgraph will no
+longer work with Python 2.3.</li>
+</ul>
+</div>
+<div class="section" id="id9">
+<h2>0.7.0<a class="headerlink" href="#id9" title="Permalink to this headline">¶</a></h2>
+<p>This is a minor feature release.</p>
+<p>Features:</p>
+<ul>
+<li><p class="first">Support for Python 3</p>
+</li>
+<li><p class="first">It is now possible to run tests using &#8216;python setup.py test&#8217;</p>
+<p>(The actual testsuite is still very minimal though)</p>
+</li>
+</ul>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Release history</a><ul>
+<li><a class="reference internal" href="#id1">0.11</a></li>
+<li><a class="reference internal" href="#id2">0.10.2</a></li>
+<li><a class="reference internal" href="#id3">0.10.1</a></li>
+<li><a class="reference internal" href="#id4">0.10</a></li>
+<li><a class="reference internal" href="#id5">0.9</a></li>
+<li><a class="reference internal" href="#id6">0.8</a></li>
+<li><a class="reference internal" href="#id7">0.7.2</a></li>
+<li><a class="reference internal" href="#id8">0.7.1</a></li>
+<li><a class="reference internal" href="#id9">0.7.0</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="index.html"
+ title="previous chapter">Altgraph - A basic graph library</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="license.html"
+ title="next chapter">License</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="license.html" title="License"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="index.html" title="Altgraph - A basic graph library"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/core.html b/python/altgraph/doc/_build/html/core.html
new file mode 100644
index 000000000..3d2ee1737
--- /dev/null
+++ b/python/altgraph/doc/_build/html/core.html
@@ -0,0 +1,130 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>altgraph — A Python Graph Library &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="next" title="altgraph.Graph — Basic directional graphs" href="graph.html" />
+ <link rel="prev" title="License" href="license.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graph.html" title="altgraph.Graph — Basic directional graphs"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="license.html" title="License"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-altgraph">
+<span id="altgraph-a-python-graph-library"></span><h1><a class="reference internal" href="#module-altgraph" title="altgraph: A directional graph for python"><tt class="xref py py-mod docutils literal"><span class="pre">altgraph</span></tt></a> &#8212; A Python Graph Library<a class="headerlink" href="#module-altgraph" title="Permalink to this headline">¶</a></h1>
+<p>altgraph is a fork of <a class="reference external" href="http://pygraphlib.sourceforge.net">graphlib</a> tailored
+to use newer Python 2.3+ features, including additional support used by the
+py2app suite (modulegraph and macholib, specifically).</p>
+<p>altgraph is a python based graph (network) representation and manipulation package.
+It has started out as an extension to the <a class="reference external" href="http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html">graph_lib module</a>
+written by Nathan Denny it has been significantly optimized and expanded.</p>
+<p>The <a class="reference internal" href="graph.html#altgraph.Graph.Graph" title="altgraph.Graph.Graph"><tt class="xref py py-class docutils literal"><span class="pre">altgraph.Graph.Graph</span></tt></a> class is loosely modeled after the <a class="reference external" href="http://www.algorithmic-solutions.com/enleda.htm">LEDA</a>
+(Library of Efficient Datatypes) representation. The library
+includes methods for constructing graphs, BFS and DFS traversals,
+topological sort, finding connected components, shortest paths as well as a number
+graph statistics functions. The library can also visualize graphs
+via <a class="reference external" href="http://www.research.att.com/sw/tools/graphviz/">graphviz</a>.</p>
+<dl class="exception">
+<dt id="altgraph.GraphError">
+<em class="property">exception </em><tt class="descclassname">altgraph.</tt><tt class="descname">GraphError</tt><a class="headerlink" href="#altgraph.GraphError" title="Permalink to this definition">¶</a></dt>
+<dd><p>Exception raised when methods are called with bad values of
+an inconsistent state.</p>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="license.html"
+ title="previous chapter">License</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="graph.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">altgraph.Graph</span></tt> &#8212; Basic directional graphs</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graph.html" title="altgraph.Graph — Basic directional graphs"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="license.html" title="License"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/dot.html b/python/altgraph/doc/_build/html/dot.html
new file mode 100644
index 000000000..87fabc077
--- /dev/null
+++ b/python/altgraph/doc/_build/html/dot.html
@@ -0,0 +1,332 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>altgraph.Dot — Interface to the dot language &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="prev" title="altgraph.GraphUtil — Utility functions" href="graphutil.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graphutil.html" title="altgraph.GraphUtil — Utility functions"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-altgraph.Dot">
+<span id="altgraph-dot-interface-to-the-dot-language"></span><h1><a class="reference internal" href="#module-altgraph.Dot" title="altgraph.Dot: Interface to the dot language as used by Graphviz.."><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.Dot</span></tt></a> &#8212; Interface to the dot language<a class="headerlink" href="#module-altgraph.Dot" title="Permalink to this headline">¶</a></h1>
+<p>The <a class="reference internal" href="#module-altgraph.Dot" title="altgraph.Dot: Interface to the dot language as used by Graphviz.."><tt class="xref py py-mod docutils literal"><span class="pre">Dot</span></tt></a> module provides a simple interface to the
+file format used in the <a class="reference external" href="&lt;http://www.research.att.com/sw/tools/graphviz/&gt;`_">graphviz</a> program. The module is intended to
+offload the most tedious part of the process (the <strong>dot</strong> file generation)
+while transparently exposing most of its features.</p>
+<p>To display the graphs or to generate image files the <a class="reference external" href="&lt;http://www.research.att.com/sw/tools/graphviz/&gt;`_">graphviz</a>
+package needs to be installed on the system, moreover the <strong class="command">dot</strong> and <strong class="command">dotty</strong> programs must
+be accesible in the program path so that they can be ran from processes spawned
+within the module.</p>
+<div class="section" id="example-usage">
+<h2>Example usage<a class="headerlink" href="#example-usage" title="Permalink to this headline">¶</a></h2>
+<p>Here is a typical usage:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="kn">from</span> <span class="nn">altgraph</span> <span class="kn">import</span> <span class="n">Graph</span><span class="p">,</span> <span class="n">Dot</span>
+
+<span class="c"># create a graph</span>
+<span class="n">edges</span> <span class="o">=</span> <span class="p">[</span> <span class="p">(</span><span class="mi">1</span><span class="p">,</span><span class="mi">2</span><span class="p">),</span> <span class="p">(</span><span class="mi">1</span><span class="p">,</span><span class="mi">3</span><span class="p">),</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span><span class="mi">4</span><span class="p">),</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span><span class="mi">5</span><span class="p">),</span> <span class="p">(</span><span class="mi">4</span><span class="p">,</span><span class="mi">5</span><span class="p">),</span> <span class="p">(</span><span class="mi">5</span><span class="p">,</span><span class="mi">4</span><span class="p">)</span> <span class="p">]</span>
+<span class="n">graph</span> <span class="o">=</span> <span class="n">Graph</span><span class="o">.</span><span class="n">Graph</span><span class="p">(</span><span class="n">edges</span><span class="p">)</span>
+
+<span class="c"># create a dot representation of the graph</span>
+<span class="n">dot</span> <span class="o">=</span> <span class="n">Dot</span><span class="o">.</span><span class="n">Dot</span><span class="p">(</span><span class="n">graph</span><span class="p">)</span>
+
+<span class="c"># display the graph</span>
+<span class="n">dot</span><span class="o">.</span><span class="n">display</span><span class="p">()</span>
+
+<span class="c"># save the dot representation into the mydot.dot file</span>
+<span class="n">dot</span><span class="o">.</span><span class="n">save_dot</span><span class="p">(</span><span class="n">file_name</span><span class="o">=</span><span class="s">&#39;mydot.dot&#39;</span><span class="p">)</span>
+
+<span class="c"># save dot file as gif image into the graph.gif file</span>
+<span class="n">dot</span><span class="o">.</span><span class="n">save_img</span><span class="p">(</span><span class="n">file_name</span><span class="o">=</span><span class="s">&#39;graph&#39;</span><span class="p">,</span> <span class="n">file_type</span><span class="o">=</span><span class="s">&#39;gif&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="directed-graph-and-non-directed-graph">
+<h2>Directed graph and non-directed graph<a class="headerlink" href="#directed-graph-and-non-directed-graph" title="Permalink to this headline">¶</a></h2>
+<p>Dot class can use for both directed graph and non-directed graph
+by passing <em>graphtype</em> parameter.</p>
+<p>Example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># create directed graph(default)</span>
+<span class="n">dot</span> <span class="o">=</span> <span class="n">Dot</span><span class="o">.</span><span class="n">Dot</span><span class="p">(</span><span class="n">graph</span><span class="p">,</span> <span class="n">graphtype</span><span class="o">=</span><span class="s">&quot;digraph&quot;</span><span class="p">)</span>
+
+<span class="c"># create non-directed graph</span>
+<span class="n">dot</span> <span class="o">=</span> <span class="n">Dot</span><span class="o">.</span><span class="n">Dot</span><span class="p">(</span><span class="n">graph</span><span class="p">,</span> <span class="n">graphtype</span><span class="o">=</span><span class="s">&quot;graph&quot;</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="customizing-the-output">
+<h2>Customizing the output<a class="headerlink" href="#customizing-the-output" title="Permalink to this headline">¶</a></h2>
+<p>The graph drawing process may be customized by passing
+valid <strong class="command">dot</strong> parameters for the nodes and edges. For a list of all
+parameters see the <a class="reference external" href="&lt;http://www.research.att.com/sw/tools/graphviz/&gt;`_">graphviz</a> documentation.</p>
+<p>Example:</p>
+<div class="highlight-python"><div class="highlight"><pre># customizing the way the overall graph is drawn
+dot.style(size=&#39;10,10&#39;, rankdir=&#39;RL&#39;, page=&#39;5, 5&#39; , ranksep=0.75)
+
+# customizing node drawing
+dot.node_style(1, label=&#39;BASE_NODE&#39;,shape=&#39;box&#39;, color=&#39;blue&#39; )
+dot.node_style(2, style=&#39;filled&#39;, fillcolor=&#39;red&#39;)
+
+# customizing edge drawing
+dot.edge_style(1, 2, style=&#39;dotted&#39;)
+dot.edge_style(3, 5, arrowhead=&#39;dot&#39;, label=&#39;binds&#39;, labelangle=&#39;90&#39;)
+dot.edge_style(4, 5, arrowsize=2, style=&#39;bold&#39;)
+
+
+.. note::
+
+ dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
+ display all graphics styles. To verify the output save it to an image
+ file and look at it that way.
+</pre></div>
+</div>
+</div>
+<div class="section" id="valid-attributes">
+<h2>Valid attributes<a class="headerlink" href="#valid-attributes" title="Permalink to this headline">¶</a></h2>
+<ul>
+<li><p class="first">dot styles, passed via the <a class="reference internal" href="#altgraph.Dot.Dot.style" title="altgraph.Dot.Dot.style"><tt class="xref py py-meth docutils literal"><span class="pre">Dot.style()</span></tt></a> method:</p>
+<div class="highlight-python"><div class="highlight"><pre>rankdir = &#39;LR&#39; (draws the graph horizontally, left to right)
+ranksep = number (rank separation in inches)
+</pre></div>
+</div>
+</li>
+<li><p class="first">node attributes, passed via the <a class="reference internal" href="#altgraph.Dot.Dot.node_style" title="altgraph.Dot.Dot.node_style"><tt class="xref py py-meth docutils literal"><span class="pre">Dot.node_style()</span></tt></a> method:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="n">style</span> <span class="o">=</span> <span class="s">&#39;filled&#39;</span> <span class="o">|</span> <span class="s">&#39;invisible&#39;</span> <span class="o">|</span> <span class="s">&#39;diagonals&#39;</span> <span class="o">|</span> <span class="s">&#39;rounded&#39;</span>
+<span class="n">shape</span> <span class="o">=</span> <span class="s">&#39;box&#39;</span> <span class="o">|</span> <span class="s">&#39;ellipse&#39;</span> <span class="o">|</span> <span class="s">&#39;circle&#39;</span> <span class="o">|</span> <span class="s">&#39;point&#39;</span> <span class="o">|</span> <span class="s">&#39;triangle&#39;</span>
+</pre></div>
+</div>
+</li>
+<li><p class="first">edge attributes, passed via the <tt class="xref py py-meth docutils literal"><span class="pre">Dot.edge_style()</span></tt> method:</p>
+<div class="highlight-python"><div class="highlight"><pre>style = &#39;dashed&#39; | &#39;dotted&#39; | &#39;solid&#39; | &#39;invis&#39; | &#39;bold&#39;
+arrowhead = &#39;box&#39; | &#39;crow&#39; | &#39;diamond&#39; | &#39;dot&#39; | &#39;inv&#39; | &#39;none&#39; | &#39;tee&#39; | &#39;vee&#39;
+weight = number (the larger the number the closer the nodes will be)
+</pre></div>
+</div>
+</li>
+<li><p class="first">valid <a class="reference external" href="http://www.research.att.com/~erg/graphviz/info/colors.html">graphviz colors</a></p>
+</li>
+<li><p class="first">for more details on how to control the graph drawing process see the
+<a class="reference external" href="http://www.research.att.com/sw/tools/graphviz/refs.html">graphviz reference</a>.</p>
+</li>
+</ul>
+</div>
+<div class="section" id="class-interface">
+<h2>Class interface<a class="headerlink" href="#class-interface" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="altgraph.Dot.Dot">
+<em class="property">class </em><tt class="descclassname">altgraph.Dot.</tt><tt class="descname">Dot</tt><big>(</big><em>graph</em><span class="optional">[</span>, <em>nodes</em><span class="optional">[</span>, <em>edgefn</em><span class="optional">[</span>, <em>nodevisitor</em><span class="optional">[</span>, <em>edgevisitor</em><span class="optional">[</span>, <em>name</em><span class="optional">[</span>, <em>dot</em><span class="optional">[</span>, <em>dotty</em><span class="optional">[</span>, <em>neato</em><span class="optional">[</span>, <em>graphtype</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Dot.Dot" title="Permalink to this definition">¶</a></dt>
+<dd><p>Creates a new Dot generator based on the specified
+<a class="reference internal" href="graph.html#altgraph.Graph.Graph" title="altgraph.Graph.Graph"><tt class="xref py py-class docutils literal"><span class="pre">Graph</span></tt></a>. The Dot generator won&#8217;t reference
+the <em>graph</em> once it is constructed.</p>
+<p>If the <em>nodes</em> argument is present it is the list of nodes to include
+in the graph, otherwise all nodes in <em>graph</em> are included.</p>
+<p>If the <em>edgefn</em> argument is present it is a function that yields the
+nodes connected to another node, this defaults to
+<tt class="xref py py-meth docutils literal"><span class="pre">graph.out_nbr</span></tt>. The constructor won&#8217;t
+add edges to the dot file unless both the head and tail of the edge
+are in <em>nodes</em>.</p>
+<p>If the <em>name</em> is present it specifies the name of the graph in the resulting
+dot file. The default is <tt class="docutils literal"><span class="pre">&quot;G&quot;</span></tt>.</p>
+<p>The functions <em>nodevisitor</em> and <em>edgevisitor</em> return the default style
+for a given edge or node (both default to functions that return an empty
+style).</p>
+<p>The arguments <em>dot</em>, <em>dotty</em> and <em>neato</em> are used to pass the path to
+the corresponding <a class="reference external" href="&lt;http://www.research.att.com/sw/tools/graphviz/&gt;`_">graphviz</a> command.</p>
+</dd></dl>
+
+<div class="section" id="updating-graph-attributes">
+<h3>Updating graph attributes<a class="headerlink" href="#updating-graph-attributes" title="Permalink to this headline">¶</a></h3>
+<dl class="method">
+<dt id="altgraph.Dot.Dot.style">
+<tt class="descclassname">Dot.</tt><tt class="descname">style</tt><big>(</big><em>**attr</em><big>)</big><a class="headerlink" href="#altgraph.Dot.Dot.style" title="Permalink to this definition">¶</a></dt>
+<dd><p>Sets the overall style (graph attributes) to the given attributes.</p>
+<p>See <a class="reference internal" href="#valid-attributes">Valid Attributes</a> for more information about the attributes.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Dot.Dot.node_style">
+<tt class="descclassname">Dot.</tt><tt class="descname">node_style</tt><big>(</big><em>node</em>, <em>**attr</em><big>)</big><a class="headerlink" href="#altgraph.Dot.Dot.node_style" title="Permalink to this definition">¶</a></dt>
+<dd><p>Sets the style for <em>node</em> to the given attributes.</p>
+<p>This method will add <em>node</em> to the graph when it isn&#8217;t already
+present.</p>
+<p>See <a class="reference internal" href="#valid-attributes">Valid Attributes</a> for more information about the attributes.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Dot.Dot.all_node_style">
+<tt class="descclassname">Dot.</tt><tt class="descname">all_node_style</tt><big>(</big><em>**attr</em><big>)</big><a class="headerlink" href="#altgraph.Dot.Dot.all_node_style" title="Permalink to this definition">¶</a></dt>
+<dd><p>Replaces the current style for all nodes</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Dot.edge_style">
+<tt class="descclassname">altgraph.Dot.</tt><tt class="descname">edge_style</tt><big>(</big><em>head</em>, <em>tail</em>, <em>**attr</em><big>)</big><a class="headerlink" href="#altgraph.Dot.edge_style" title="Permalink to this definition">¶</a></dt>
+<dd><p>Sets the style of an edge to the given attributes. The edge will
+be added to the graph when it isn&#8217;t already present, but <em>head</em>
+and <em>tail</em> must both be valid nodes.</p>
+<p>See <a class="reference internal" href="#valid-attributes">Valid Attributes</a> for more information about the attributes.</p>
+</dd></dl>
+
+</div>
+<div class="section" id="emitting-output">
+<h3>Emitting output<a class="headerlink" href="#emitting-output" title="Permalink to this headline">¶</a></h3>
+<dl class="method">
+<dt id="altgraph.Dot.Dot.display">
+<tt class="descclassname">Dot.</tt><tt class="descname">display</tt><big>(</big><span class="optional">[</span><em>mode</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Dot.Dot.display" title="Permalink to this definition">¶</a></dt>
+<dd><p>Displays the current graph via dotty.</p>
+<p>If the <em>mode</em> is <tt class="docutils literal"><span class="pre">&quot;neato&quot;</span></tt> the dot file is processed with
+the neato command before displaying.</p>
+<p>This method won&#8217;t return until the dotty command exits.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Dot.save_dot">
+<tt class="descclassname">altgraph.Dot.</tt><tt class="descname">save_dot</tt><big>(</big><em>filename</em><big>)</big><a class="headerlink" href="#altgraph.Dot.save_dot" title="Permalink to this definition">¶</a></dt>
+<dd><p>Saves the current graph representation into the given file.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>For backward compatibility reasons this method can also
+be called without an argument, it will then write the graph
+into a fixed filename (present in the attribute <tt class="xref py py-data docutils literal"><span class="pre">Graph.temp_dot</span></tt>).</p>
+<p class="last">This feature is deprecated and should not be used.</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Dot.save_image">
+<tt class="descclassname">altgraph.Dot.</tt><tt class="descname">save_image</tt><big>(</big><em>file_name</em><span class="optional">[</span>, <em>file_type</em><span class="optional">[</span>, <em>mode</em><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Dot.save_image" title="Permalink to this definition">¶</a></dt>
+<dd><p>Saves the current graph representation as an image file. The output
+is written into a file whose basename is <em>file_name</em> and whose suffix
+is <em>file_type</em>.</p>
+<p>The <em>file_type</em> specifies the type of file to write, the default
+is <tt class="docutils literal"><span class="pre">&quot;gif&quot;</span></tt>.</p>
+<p>If the <em>mode</em> is <tt class="docutils literal"><span class="pre">&quot;neato&quot;</span></tt> the dot file is processed with
+the neato command before displaying.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>For backward compatibility reasons this method can also
+be called without an argument, it will then write the graph
+with a fixed basename (<tt class="docutils literal"><span class="pre">&quot;out&quot;</span></tt>).</p>
+<p class="last">This feature is deprecated and should not be used.</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Dot.iterdot">
+<tt class="descclassname">altgraph.Dot.</tt><tt class="descname">iterdot</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Dot.iterdot" title="Permalink to this definition">¶</a></dt>
+<dd><p>Yields all lines of a <a class="reference external" href="&lt;http://www.research.att.com/sw/tools/graphviz/&gt;`_">graphviz</a> input file (including line endings).</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Dot.__iter__">
+<tt class="descclassname">altgraph.Dot.</tt><tt class="descname">__iter__</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Dot.__iter__" title="Permalink to this definition">¶</a></dt>
+<dd><p>Alias for the <a class="reference internal" href="#altgraph.Dot.iterdot" title="altgraph.Dot.iterdot"><tt class="xref py py-meth docutils literal"><span class="pre">iterdot()</span></tt></a> method.</p>
+</dd></dl>
+
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#"><tt class="docutils literal"><span class="pre">altgraph.Dot</span></tt> &#8212; Interface to the dot language</a><ul>
+<li><a class="reference internal" href="#example-usage">Example usage</a></li>
+<li><a class="reference internal" href="#directed-graph-and-non-directed-graph">Directed graph and non-directed graph</a></li>
+<li><a class="reference internal" href="#customizing-the-output">Customizing the output</a></li>
+<li><a class="reference internal" href="#valid-attributes">Valid attributes</a></li>
+<li><a class="reference internal" href="#class-interface">Class interface</a><ul>
+<li><a class="reference internal" href="#updating-graph-attributes">Updating graph attributes</a></li>
+<li><a class="reference internal" href="#emitting-output">Emitting output</a></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="graphutil.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">altgraph.GraphUtil</span></tt> &#8212; Utility functions</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graphutil.html" title="altgraph.GraphUtil — Utility functions"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/genindex.html b/python/altgraph/doc/_build/html/genindex.html
new file mode 100644
index 000000000..16977c712
--- /dev/null
+++ b/python/altgraph/doc/_build/html/genindex.html
@@ -0,0 +1,604 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Index &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="#" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+
+<h1 id="index">Index</h1>
+
+<div class="genindex-jumpbox">
+ <a href="#_"><strong>_</strong></a>
+ | <a href="#A"><strong>A</strong></a>
+ | <a href="#B"><strong>B</strong></a>
+ | <a href="#C"><strong>C</strong></a>
+ | <a href="#D"><strong>D</strong></a>
+ | <a href="#E"><strong>E</strong></a>
+ | <a href="#F"><strong>F</strong></a>
+ | <a href="#G"><strong>G</strong></a>
+ | <a href="#H"><strong>H</strong></a>
+ | <a href="#I"><strong>I</strong></a>
+ | <a href="#M"><strong>M</strong></a>
+ | <a href="#N"><strong>N</strong></a>
+ | <a href="#O"><strong>O</strong></a>
+ | <a href="#R"><strong>R</strong></a>
+ | <a href="#S"><strong>S</strong></a>
+ | <a href="#T"><strong>T</strong></a>
+
+</div>
+<h2 id="_">_</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.__contains__">__contains__() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ <dd><dl>
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.__contains__">(altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+ </dl></dd>
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.__iter__">__iter__() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ <dd><dl>
+
+ <dt><a href="dot.html#altgraph.Dot.__iter__">(in module altgraph.Dot)</a>
+ </dt>
+
+ </dl></dd>
+ </dl></td>
+</tr></table>
+
+<h2 id="A">A</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.add_edge">add_edge() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.add_node">add_node() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.addNode">addNode() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.all_degree">all_degree() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.all_edges">all_edges() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.all_nbrs">all_nbrs() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#altgraph.Dot.Dot.all_node_style">all_node_style() (altgraph.Dot.Dot method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="core.html#module-altgraph">altgraph (module)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#module-altgraph.Dot">altgraph.Dot (module)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#module-altgraph.Graph">altgraph.Graph (module)</a>
+ </dt>
+
+
+ <dt><a href="graphalgo.html#module-altgraph.GraphAlgo">altgraph.GraphAlgo (module)</a>
+ </dt>
+
+
+ <dt><a href="graphstat.html#module-altgraph.GraphStat">altgraph.GraphStat (module)</a>
+ </dt>
+
+
+ <dt><a href="graphutil.html#module-altgraph.GraphUtil">altgraph.GraphUtil (module)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#module-altgraph.ObjectGraph">altgraph.ObjectGraph (module)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="B">B</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.back_bfs">back_bfs() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.back_bfs_subgraph">back_bfs_subgraph() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.back_topo_sort">back_topo_sort() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="C">C</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.clust_coef">clust_coef() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.connected">connected() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.createNode">createNode() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.createReferences">createReferences() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="D">D</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graphstat.html#altgraph.GraphStat.degree_dist">degree_dist() (in module altgraph.GraphStat)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.describe_edge">describe_edge() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.describe_node">describe_node() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graphalgo.html#altgraph.GraphAlgo.dijkstra">dijkstra() (in module altgraph.GraphAlgo)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#altgraph.Dot.Dot.display">display() (altgraph.Dot.Dot method)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#altgraph.Dot.Dot">Dot (class in altgraph.Dot)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="E">E</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.edge_by_id">edge_by_id() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.edge_by_node">edge_by_node() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.edge_data">edge_data() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.edge_list">edge_list() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#altgraph.Dot.edge_style">edge_style() (in module altgraph.Dot)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="F">F</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graphutil.html#altgraph.GraphUtil.filter_stack">filter_stack() (in module altgraph.GraphUtil)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.filterStack">filterStack() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.findNode">findNode() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.flatten">flatten() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.forw_bfs">forw_bfs() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.forw_bfs_subgraph">forw_bfs_subgraph() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.forw_topo_sort">forw_topo_sort() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="G">G</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graphutil.html#altgraph.GraphUtil.generate_random_graph">generate_random_graph() (in module altgraph.GraphUtil)</a>
+ </dt>
+
+
+ <dt><a href="graphutil.html#altgraph.GraphUtil.generate_scale_free_graph">generate_scale_free_graph() (in module altgraph.GraphUtil)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.get_edges">get_edges() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.get_hops">get_hops() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.getIdent">getIdent() (in module altgraph.ObjectGraph)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.getRawIdent">getRawIdent() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph">Graph (class in altgraph.Graph)</a>
+ </dt>
+
+
+ <dt><a href="core.html#altgraph.GraphError">GraphError</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="H">H</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.head">head() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.hidden_edge_list">hidden_edge_list() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.hidden_node_list">hidden_node_list() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.hide_edge">hide_edge() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.hide_node">hide_node() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="I">I</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.inc_degree">inc_degree() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.inc_edges">inc_edges() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.inc_nbrs">inc_nbrs() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.iterdata">iterdata() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.iterdfs">iterdfs() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#altgraph.Dot.iterdot">iterdot() (in module altgraph.Dot)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="M">M</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.msg">msg() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.msgin">msgin() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.msgout">msgout() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="N">N</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.node_data">node_data() (in module altgraph.Graph)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.node_list">node_list() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#altgraph.Dot.Dot.node_style">node_style() (altgraph.Dot.Dot method)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.nodes">nodes() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.number_of_edges">number_of_edges() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.number_of_hidden_edges">number_of_hidden_edges() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.number_of_hidden_nodes">number_of_hidden_nodes() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.number_of_nodes">number_of_nodes() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="O">O</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph">ObjectGraph (class in altgraph.ObjectGraph)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.debug">ObjectGraph.debug (in module altgraph.ObjectGraph)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.graph">ObjectGraph.graph (in module altgraph.ObjectGraph)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.out_degree">out_degree() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.out_edges">out_edges() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.out_nbrs">out_nbrs() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="R">R</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.ObjectGraph.removeNode">removeNode() (altgraph.ObjectGraph.ObjectGraph method)</a>
+ </dt>
+
+
+ <dt><a href="objectgraph.html#altgraph.ObjectGraph.removeReference">removeReference() (in module altgraph.ObjectGraph)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.restore_all_edges">restore_all_edges() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.restore_all_nodes">restore_all_nodes() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.restore_edge">restore_edge() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.restore_node">restore_node() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="S">S</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="dot.html#altgraph.Dot.save_dot">save_dot() (in module altgraph.Dot)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#altgraph.Dot.save_image">save_image() (in module altgraph.Dot)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graphalgo.html#altgraph.GraphAlgo.shortest_path">shortest_path() (in module altgraph.GraphAlgo)</a>
+ </dt>
+
+
+ <dt><a href="dot.html#altgraph.Dot.Dot.style">style() (altgraph.Dot.Dot method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="T">T</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="graph.html#altgraph.Graph.Graph.tail">tail() (altgraph.Graph.Graph method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+
+
+
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="#" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/graph.html b/python/altgraph/doc/_build/html/graph.html
new file mode 100644
index 000000000..72dd1cdef
--- /dev/null
+++ b/python/altgraph/doc/_build/html/graph.html
@@ -0,0 +1,491 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>altgraph.Graph — Basic directional graphs &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="next" title="altgraph.ObjectGraph — Graphs of objecs with an identifier" href="objectgraph.html" />
+ <link rel="prev" title="altgraph — A Python Graph Library" href="core.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="objectgraph.html" title="altgraph.ObjectGraph — Graphs of objecs with an identifier"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="core.html" title="altgraph — A Python Graph Library"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-altgraph.Graph">
+<span id="altgraph-graph-basic-directional-graphs"></span><h1><a class="reference internal" href="#module-altgraph.Graph" title="altgraph.Graph: Basic directional graphs."><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.Graph</span></tt></a> &#8212; Basic directional graphs<a class="headerlink" href="#module-altgraph.Graph" title="Permalink to this headline">¶</a></h1>
+<p>The module <a class="reference internal" href="#module-altgraph.Graph" title="altgraph.Graph: Basic directional graphs."><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.Graph</span></tt></a> provides a class <a class="reference internal" href="#altgraph.Graph.Graph" title="altgraph.Graph.Graph"><tt class="xref py py-class docutils literal"><span class="pre">Graph</span></tt></a> that
+represents a directed graph with <em>N</em> nodes and <em>E</em> edges.</p>
+<dl class="class">
+<dt id="altgraph.Graph.Graph">
+<em class="property">class </em><tt class="descclassname">altgraph.Graph.</tt><tt class="descname">Graph</tt><big>(</big><span class="optional">[</span><em>edges</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph" title="Permalink to this definition">¶</a></dt>
+<dd><p>Constructs a new empty <a class="reference internal" href="#altgraph.Graph.Graph" title="altgraph.Graph.Graph"><tt class="xref py py-class docutils literal"><span class="pre">Graph</span></tt></a> object. If the optional
+<em>edges</em> parameter is supplied, updates the graph by adding the
+specified edges.</p>
+<p>All of the elements in <em>edges</em> should be tuples with two or three
+elements. The first two elements of the tuple are the source and
+destination node of the edge, the optional third element is the
+edge data. The source and destination nodes are added to the graph
+when the aren&#8217;t already present.</p>
+</dd></dl>
+
+<div class="section" id="node-related-methods">
+<h2>Node related methods<a class="headerlink" href="#node-related-methods" title="Permalink to this headline">¶</a></h2>
+<dl class="method">
+<dt id="altgraph.Graph.Graph.add_node">
+<tt class="descclassname">Graph.</tt><tt class="descname">add_node</tt><big>(</big><em>node</em><span class="optional">[</span>, <em>node_data</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.add_node" title="Permalink to this definition">¶</a></dt>
+<dd><p>Adds a new node to the graph if it is not already present. The new
+node must be a hashable object.</p>
+<p>Arbitrary data can be attached to the node via the optional <em>node_data</em>
+argument.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">the node also won&#8217;t be added to the graph when it is
+present but currently hidden.</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.hide_node">
+<tt class="descclassname">Graph.</tt><tt class="descname">hide_node</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.hide_node" title="Permalink to this definition">¶</a></dt>
+<dd><p>Hides a <em>node</em> from the graph. The incoming and outgoing edges of
+the node will also be hidden.</p>
+<p>Raises <a class="reference internal" href="core.html#altgraph.GraphError" title="altgraph.GraphError"><tt class="xref py py-class docutils literal"><span class="pre">altgraph.GraphError</span></tt></a> when the node is not (visible)
+node of the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.restore_node">
+<tt class="descclassname">Graph.</tt><tt class="descname">restore_node</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.restore_node" title="Permalink to this definition">¶</a></dt>
+<dd><p>Restores a previously hidden <em>node</em>. The incoming and outgoing
+edges of the node are also restored.</p>
+<p>Raises <a class="reference internal" href="core.html#altgraph.GraphError" title="altgraph.GraphError"><tt class="xref py py-class docutils literal"><span class="pre">altgraph.GraphError</span></tt></a> when the node is not a hidden
+node of the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.restore_all_nodes">
+<tt class="descclassname">Graph.</tt><tt class="descname">restore_all_nodes</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.restore_all_nodes" title="Permalink to this definition">¶</a></dt>
+<dd><p>Restores all hidden nodes.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.number_of_nodes">
+<tt class="descclassname">Graph.</tt><tt class="descname">number_of_nodes</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.number_of_nodes" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the number of visible nodes in the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.number_of_hidden_nodes">
+<tt class="descclassname">Graph.</tt><tt class="descname">number_of_hidden_nodes</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.number_of_hidden_nodes" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the number of hidden nodes in the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.node_list">
+<tt class="descclassname">Graph.</tt><tt class="descname">node_list</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.node_list" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a list with all visible nodes in the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.hidden_node_list">
+<tt class="descclassname">Graph.</tt><tt class="descname">hidden_node_list</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.hidden_node_list" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a list with all hidden nodes in the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.node_data">
+<tt class="descclassname">altgraph.Graph.</tt><tt class="descname">node_data</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.node_data" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the data associated with the <em>node</em> when it was
+added.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.describe_node">
+<tt class="descclassname">Graph.</tt><tt class="descname">describe_node</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.describe_node" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns <em>node</em>, the node&#8217;s data and the lists of outgoing
+and incoming edges for the node.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">the edge lists should not be modified, doing so
+can result in unpredicatable behavior.</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.__contains__">
+<tt class="descclassname">Graph.</tt><tt class="descname">__contains__</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.__contains__" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns True iff <em>node</em> is a node in the graph. This
+method is accessed through the <em>in</em> operator.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.__iter__">
+<tt class="descclassname">Graph.</tt><tt class="descname">__iter__</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.__iter__" title="Permalink to this definition">¶</a></dt>
+<dd><p>Yield all nodes in the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.out_edges">
+<tt class="descclassname">Graph.</tt><tt class="descname">out_edges</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.out_edges" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the list of outgoing edges for <em>node</em></p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.inc_edges">
+<tt class="descclassname">Graph.</tt><tt class="descname">inc_edges</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.inc_edges" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the list of incoming edges for <em>node</em></p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.all_edges">
+<tt class="descclassname">Graph.</tt><tt class="descname">all_edges</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.all_edges" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the list of incoming and outgoing edges for <em>node</em></p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.out_degree">
+<tt class="descclassname">Graph.</tt><tt class="descname">out_degree</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.out_degree" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the number of outgoing edges for <em>node</em>.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.inc_degree">
+<tt class="descclassname">Graph.</tt><tt class="descname">inc_degree</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.inc_degree" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the number of incoming edges for <em>node</em>.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.all_degree">
+<tt class="descclassname">Graph.</tt><tt class="descname">all_degree</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.all_degree" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the number of edges (incoming or outgoing) for <em>node</em>.</p>
+</dd></dl>
+
+</div>
+<div class="section" id="edge-related-methods">
+<h2>Edge related methods<a class="headerlink" href="#edge-related-methods" title="Permalink to this headline">¶</a></h2>
+<dl class="method">
+<dt id="altgraph.Graph.Graph.add_edge">
+<tt class="descclassname">Graph.</tt><tt class="descname">add_edge</tt><big>(</big><em>head_id</em>, <em>tail_id</em><span class="optional">[</span>, <em>edge data</em><span class="optional">[</span>, <em>create_nodes</em><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.add_edge" title="Permalink to this definition">¶</a></dt>
+<dd><p>Adds a directed edge from <em>head_id</em> to <em>tail_id</em>. Arbitrary data can
+be added via <em>edge_data</em>. When <em>create_nodes</em> is <em>True</em> (the default),
+<em>head_id</em> and <em>tail_id</em> will be added to the graph when the aren&#8217;t
+already present.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.hide_edge">
+<tt class="descclassname">Graph.</tt><tt class="descname">hide_edge</tt><big>(</big><em>edge</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.hide_edge" title="Permalink to this definition">¶</a></dt>
+<dd><p>Hides an edge from the graph. The edge may be unhidden at some later
+time.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.restore_edge">
+<tt class="descclassname">Graph.</tt><tt class="descname">restore_edge</tt><big>(</big><em>edge</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.restore_edge" title="Permalink to this definition">¶</a></dt>
+<dd><p>Restores a previously hidden <em>edge</em>.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.restore_all_edges">
+<tt class="descclassname">Graph.</tt><tt class="descname">restore_all_edges</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.restore_all_edges" title="Permalink to this definition">¶</a></dt>
+<dd><p>Restore all edges that were hidden before, except for edges
+referring to hidden nodes.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.edge_by_node">
+<tt class="descclassname">Graph.</tt><tt class="descname">edge_by_node</tt><big>(</big><em>head</em>, <em>tail</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.edge_by_node" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the edge ID for an edge from <em>head</em> to <em>tail</em>,
+or <a class="reference external" href="http://docs.python.org/library/constants.html#None" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">None</span></tt></a> when no such edge exists.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.edge_by_id">
+<tt class="descclassname">Graph.</tt><tt class="descname">edge_by_id</tt><big>(</big><em>edge</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.edge_by_id" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the head and tail of the <em>edge</em></p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.edge_data">
+<tt class="descclassname">Graph.</tt><tt class="descname">edge_data</tt><big>(</big><em>edge</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.edge_data" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the data associated with the <em>edge</em>.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.head">
+<tt class="descclassname">Graph.</tt><tt class="descname">head</tt><big>(</big><em>edge</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.head" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the head of an <em>edge</em></p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.tail">
+<tt class="descclassname">Graph.</tt><tt class="descname">tail</tt><big>(</big><em>edge</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.tail" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the tail of an <em>edge</em></p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.describe_edge">
+<tt class="descclassname">Graph.</tt><tt class="descname">describe_edge</tt><big>(</big><em>edge</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.describe_edge" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the <em>edge</em>, the associated data, its head and tail.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.number_of_edges">
+<tt class="descclassname">Graph.</tt><tt class="descname">number_of_edges</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.number_of_edges" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the number of visible edges.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.number_of_hidden_edges">
+<tt class="descclassname">Graph.</tt><tt class="descname">number_of_hidden_edges</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.number_of_hidden_edges" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the number of hidden edges.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.edge_list">
+<tt class="descclassname">Graph.</tt><tt class="descname">edge_list</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.edge_list" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a list with all visible edges in the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.hidden_edge_list">
+<tt class="descclassname">Graph.</tt><tt class="descname">hidden_edge_list</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.hidden_edge_list" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a list with all hidden edges in the graph.</p>
+</dd></dl>
+
+</div>
+<div class="section" id="graph-traversal">
+<h2>Graph traversal<a class="headerlink" href="#graph-traversal" title="Permalink to this headline">¶</a></h2>
+<dl class="method">
+<dt id="altgraph.Graph.Graph.out_nbrs">
+<tt class="descclassname">Graph.</tt><tt class="descname">out_nbrs</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.out_nbrs" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a list of all nodes connected by outgoing edges.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.inc_nbrs">
+<tt class="descclassname">Graph.</tt><tt class="descname">inc_nbrs</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.inc_nbrs" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a list of all nodes connected by incoming edges.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.all_nbrs">
+<tt class="descclassname">Graph.</tt><tt class="descname">all_nbrs</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.all_nbrs" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a list of nodes connected by an incoming or outgoing edge.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.forw_topo_sort">
+<tt class="descclassname">Graph.</tt><tt class="descname">forw_topo_sort</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.forw_topo_sort" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a list of nodes where the successors (based on outgoing
+edges) of any given node apear in the sequence after that node.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.back_topo_sort">
+<tt class="descclassname">Graph.</tt><tt class="descname">back_topo_sort</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.back_topo_sort" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a list of nodes where the successors (based on incoming
+edges) of any given node apear in the sequence after that node.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.forw_bfs_subgraph">
+<tt class="descclassname">Graph.</tt><tt class="descname">forw_bfs_subgraph</tt><big>(</big><em>start_id</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.forw_bfs_subgraph" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a subgraph consisting of the breadth first
+reachable nodes from <em>start_id</em> based on their outgoing edges.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.back_bfs_subgraph">
+<tt class="descclassname">Graph.</tt><tt class="descname">back_bfs_subgraph</tt><big>(</big><em>start_id</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.back_bfs_subgraph" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a subgraph consisting of the breadth first
+reachable nodes from <em>start_id</em> based on their incoming edges.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.iterdfs">
+<tt class="descclassname">Graph.</tt><tt class="descname">iterdfs</tt><big>(</big><em>start</em><span class="optional">[</span>, <em>end</em><span class="optional">[</span>, <em>forward</em><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.iterdfs" title="Permalink to this definition">¶</a></dt>
+<dd><p>Yield nodes in a depth first traversal starting at the <em>start</em>
+node.</p>
+<p>If <em>end</em> is specified traversal stops when reaching that node.</p>
+<p>If forward is True (the default) edges are traversed in forward
+direction, otherwise they are traversed in reverse direction.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.iterdata">
+<tt class="descclassname">Graph.</tt><tt class="descname">iterdata</tt><big>(</big><em>start</em><span class="optional">[</span>, <em>end</em><span class="optional">[</span>, <em>forward</em><span class="optional">[</span>, <em>condition</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.iterdata" title="Permalink to this definition">¶</a></dt>
+<dd><p>Yield the associated data for nodes in a depth first traversal
+starting at the <em>start</em> node. This method will not yield values for nodes
+without associated data.</p>
+<p>If <em>end</em> is specified traversal stops when reaching that node.</p>
+<p>If <em>condition</em> is specified and the condition callable returns
+False for the associated data this method will not yield the
+associated data and will not follow the edges for the node.</p>
+<p>If forward is True (the default) edges are traversed in forward
+direction, otherwise they are traversed in reverse direction.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.forw_bfs">
+<tt class="descclassname">Graph.</tt><tt class="descname">forw_bfs</tt><big>(</big><em>start</em><span class="optional">[</span>, <em>end</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.forw_bfs" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a list of nodes starting at <em>start</em> in some bread first
+search order (following outgoing edges).</p>
+<p>When <em>end</em> is specified iteration stops at that node.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.back_bfs">
+<tt class="descclassname">Graph.</tt><tt class="descname">back_bfs</tt><big>(</big><em>start</em><span class="optional">[</span>, <em>end</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.back_bfs" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a list of nodes starting at <em>start</em> in some bread first
+search order (following incoming edges).</p>
+<p>When <em>end</em> is specified iteration stops at that node.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.get_hops">
+<tt class="descclassname">Graph.</tt><tt class="descname">get_hops</tt><big>(</big><em>start</em><span class="optional">[</span>, <em>end</em><span class="optional">[</span>, <em>forward</em><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.get_hops" title="Permalink to this definition">¶</a></dt>
+<dd><p>Computes the hop distance to all nodes centered around a specified node.</p>
+<p>First order neighbours are at hop 1, their neigbours are at hop 2 etc.
+Uses <a class="reference internal" href="#altgraph.Graph.Graph.forw_bfs" title="altgraph.Graph.Graph.forw_bfs"><tt class="xref py py-meth docutils literal"><span class="pre">forw_bfs()</span></tt></a> or <a class="reference internal" href="#altgraph.Graph.Graph.back_bfs" title="altgraph.Graph.Graph.back_bfs"><tt class="xref py py-meth docutils literal"><span class="pre">back_bfs()</span></tt></a> depending on the value of
+the forward parameter.</p>
+<p>If the distance between all neighbouring nodes is 1 the hop number
+corresponds to the shortest distance between the nodes.</p>
+<p>Typical usage:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">print</span> <span class="n">graph</span><span class="o">.</span><span class="n">get_hops</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="p">[(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">0</span><span class="p">),</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="mi">4</span><span class="p">,</span> <span class="mi">2</span><span class="p">),</span> <span class="p">(</span><span class="mi">5</span><span class="p">,</span> <span class="mi">3</span><span class="p">),</span> <span class="p">(</span><span class="mi">7</span><span class="p">,</span> <span class="mi">4</span><span class="p">),</span> <span class="p">(</span><span class="mi">8</span><span class="p">,</span> <span class="mi">5</span><span class="p">)]</span>
+<span class="go"># node 1 is at 0 hops</span>
+<span class="go"># node 2 is at 1 hop</span>
+<span class="go"># ...</span>
+<span class="go"># node 8 is at 5 hops</span>
+</pre></div>
+</div>
+</dd></dl>
+
+</div>
+<div class="section" id="graph-statistics">
+<h2>Graph statistics<a class="headerlink" href="#graph-statistics" title="Permalink to this headline">¶</a></h2>
+<dl class="method">
+<dt id="altgraph.Graph.Graph.connected">
+<tt class="descclassname">Graph.</tt><tt class="descname">connected</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.connected" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns True iff every node in the graph can be reached from
+every other node.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.Graph.Graph.clust_coef">
+<tt class="descclassname">Graph.</tt><tt class="descname">clust_coef</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.Graph.Graph.clust_coef" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns the local clustering coefficient of node.</p>
+<p>The local cluster coefficient is the proportion of the actual number
+of edges between neighbours of node and the maximum number of
+edges between those nodes.</p>
+</dd></dl>
+
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#"><tt class="docutils literal"><span class="pre">altgraph.Graph</span></tt> &#8212; Basic directional graphs</a><ul>
+<li><a class="reference internal" href="#node-related-methods">Node related methods</a></li>
+<li><a class="reference internal" href="#edge-related-methods">Edge related methods</a></li>
+<li><a class="reference internal" href="#graph-traversal">Graph traversal</a></li>
+<li><a class="reference internal" href="#graph-statistics">Graph statistics</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="core.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">altgraph</span></tt> &#8212; A Python Graph Library</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="objectgraph.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">altgraph.ObjectGraph</span></tt> &#8212; Graphs of objecs with an identifier</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="objectgraph.html" title="altgraph.ObjectGraph — Graphs of objecs with an identifier"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="core.html" title="altgraph — A Python Graph Library"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/graphalgo.html b/python/altgraph/doc/_build/html/graphalgo.html
new file mode 100644
index 000000000..07800d977
--- /dev/null
+++ b/python/altgraph/doc/_build/html/graphalgo.html
@@ -0,0 +1,134 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>altgraph.GraphAlgo — Graph algorithms &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="next" title="altgraph.GraphStat — Functions providing various graph statistics" href="graphstat.html" />
+ <link rel="prev" title="altgraph.ObjectGraph — Graphs of objecs with an identifier" href="objectgraph.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graphstat.html" title="altgraph.GraphStat — Functions providing various graph statistics"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="objectgraph.html" title="altgraph.ObjectGraph — Graphs of objecs with an identifier"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-altgraph.GraphAlgo">
+<span id="altgraph-graphalgo-graph-algorithms"></span><h1><a class="reference internal" href="#module-altgraph.GraphAlgo" title="altgraph.GraphAlgo: Basic graphs algoritms"><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.GraphAlgo</span></tt></a> &#8212; Graph algorithms<a class="headerlink" href="#module-altgraph.GraphAlgo" title="Permalink to this headline">¶</a></h1>
+<dl class="function">
+<dt id="altgraph.GraphAlgo.dijkstra">
+<tt class="descclassname">altgraph.GraphAlgo.</tt><tt class="descname">dijkstra</tt><big>(</big><em>graph</em>, <em>start</em><span class="optional">[</span>, <em>end</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.GraphAlgo.dijkstra" title="Permalink to this definition">¶</a></dt>
+<dd><p>Dijkstra&#8217;s algorithm for shortest paths.</p>
+<p>Find shortest paths from the start node to all nodes nearer
+than or equal to the <em>end</em> node. The edge data is assumed to be the edge length.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">Dijkstra&#8217;s algorithm is only guaranteed to work correctly when all edge lengths are positive.
+This code does not verify this property for all edges (only the edges examined until the end
+vertex is reached), but will correctly compute shortest paths even for some graphs with negative
+edges, and will raise an exception if it discovers that a negative edge has caused it to make a mistake.</p>
+</div>
+</dd></dl>
+
+<dl class="function">
+<dt id="altgraph.GraphAlgo.shortest_path">
+<tt class="descclassname">altgraph.GraphAlgo.</tt><tt class="descname">shortest_path</tt><big>(</big><em>graph</em>, <em>start</em>, <em>end</em><big>)</big><a class="headerlink" href="#altgraph.GraphAlgo.shortest_path" title="Permalink to this definition">¶</a></dt>
+<dd><p>Find a single shortest path from the given start node to the given end node.
+The input has the same conventions as <a class="reference internal" href="#altgraph.GraphAlgo.dijkstra" title="altgraph.GraphAlgo.dijkstra"><tt class="xref py py-func docutils literal"><span class="pre">dijkstra()</span></tt></a>. The output is a list
+of the nodes in order along the shortest path.</p>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="objectgraph.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">altgraph.ObjectGraph</span></tt> &#8212; Graphs of objecs with an identifier</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="graphstat.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">altgraph.GraphStat</span></tt> &#8212; Functions providing various graph statistics</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graphstat.html" title="altgraph.GraphStat — Functions providing various graph statistics"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="objectgraph.html" title="altgraph.ObjectGraph — Graphs of objecs with an identifier"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/graphstat.html b/python/altgraph/doc/_build/html/graphstat.html
new file mode 100644
index 000000000..49a89bf51
--- /dev/null
+++ b/python/altgraph/doc/_build/html/graphstat.html
@@ -0,0 +1,130 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>altgraph.GraphStat — Functions providing various graph statistics &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="next" title="altgraph.GraphUtil — Utility functions" href="graphutil.html" />
+ <link rel="prev" title="altgraph.GraphAlgo — Graph algorithms" href="graphalgo.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graphutil.html" title="altgraph.GraphUtil — Utility functions"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="graphalgo.html" title="altgraph.GraphAlgo — Graph algorithms"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-altgraph.GraphStat">
+<span id="altgraph-graphstat-functions-providing-various-graph-statistics"></span><h1><a class="reference internal" href="#module-altgraph.GraphStat" title="altgraph.GraphStat: Functions providing various graph statistics"><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.GraphStat</span></tt></a> &#8212; Functions providing various graph statistics<a class="headerlink" href="#module-altgraph.GraphStat" title="Permalink to this headline">¶</a></h1>
+<p>The module <a class="reference internal" href="#module-altgraph.GraphStat" title="altgraph.GraphStat: Functions providing various graph statistics"><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.GraphStat</span></tt></a> provides function that calculate
+graph statistics. Currently there is only one such function, more may
+be added later.</p>
+<dl class="function">
+<dt id="altgraph.GraphStat.degree_dist">
+<tt class="descclassname">altgraph.GraphStat.</tt><tt class="descname">degree_dist</tt><big>(</big><em>graph</em><span class="optional">[</span>, <em>limits</em><span class="optional">[</span>, <em>bin_num</em><span class="optional">[</span>, <em>mode</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.GraphStat.degree_dist" title="Permalink to this definition">¶</a></dt>
+<dd><p>Groups the number of edges per node into <em>bin_num</em> bins
+and returns the list of those bins. Every item in the result
+is a tuple with the center of the bin and the number of items
+in that bin.</p>
+<p>When the <em>limits</em> argument is present it must be a tuple with
+the mininum and maximum number of edges that get binned (that
+is, when <em>limits</em> is <tt class="docutils literal"><span class="pre">(4,</span> <span class="pre">10)</span></tt> only nodes with between 4
+and 10 edges get counted.</p>
+<p>The <em>mode</em> argument is used to count incoming (<tt class="docutils literal"><span class="pre">'inc'</span></tt>) or
+outgoing (<tt class="docutils literal"><span class="pre">'out'</span></tt>) edges. The default is to count the outgoing
+edges.</p>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="graphalgo.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">altgraph.GraphAlgo</span></tt> &#8212; Graph algorithms</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="graphutil.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">altgraph.GraphUtil</span></tt> &#8212; Utility functions</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graphutil.html" title="altgraph.GraphUtil — Utility functions"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="graphalgo.html" title="altgraph.GraphAlgo — Graph algorithms"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/graphutil.html b/python/altgraph/doc/_build/html/graphutil.html
new file mode 100644
index 000000000..774efcfdf
--- /dev/null
+++ b/python/altgraph/doc/_build/html/graphutil.html
@@ -0,0 +1,162 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>altgraph.GraphUtil — Utility functions &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="next" title="altgraph.Dot — Interface to the dot language" href="dot.html" />
+ <link rel="prev" title="altgraph.GraphStat — Functions providing various graph statistics" href="graphstat.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="dot.html" title="altgraph.Dot — Interface to the dot language"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="graphstat.html" title="altgraph.GraphStat — Functions providing various graph statistics"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-altgraph.GraphUtil">
+<span id="altgraph-graphutil-utility-functions"></span><h1><a class="reference internal" href="#module-altgraph.GraphUtil" title="altgraph.GraphUtil: Utility functions"><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.GraphUtil</span></tt></a> &#8212; Utility functions<a class="headerlink" href="#module-altgraph.GraphUtil" title="Permalink to this headline">¶</a></h1>
+<p>The module <a class="reference internal" href="#module-altgraph.GraphUtil" title="altgraph.GraphUtil: Utility functions"><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.GraphUtil</span></tt></a> performs a number of more
+or less useful utility functions.</p>
+<dl class="function">
+<dt id="altgraph.GraphUtil.generate_random_graph">
+<tt class="descclassname">altgraph.GraphUtil.</tt><tt class="descname">generate_random_graph</tt><big>(</big><em>node_num, edge_num[, self_loops[, multi_edges]</em><big>)</big><a class="headerlink" href="#altgraph.GraphUtil.generate_random_graph" title="Permalink to this definition">¶</a></dt>
+<dd><p>Generates and returns a <a class="reference internal" href="graph.html#altgraph.Graph.Graph" title="altgraph.Graph.Graph"><tt class="xref py py-class docutils literal"><span class="pre">Graph</span></tt></a> instance
+with <em>node_num</em> nodes randomly connected by <em>edge_num</em> edges.</p>
+<p>When <em>self_loops</em> is present and True there can be edges that point from
+a node to itself.</p>
+<p>When <em>multi_edge</em> is present and True there can be duplicate edges.</p>
+<p>This method raises <tt class="xref py py-class docutils literal"><span class="pre">GraphError</span> <span class="pre">&lt;altgraph.GraphError</span></tt> when
+a graph with the requested configuration cannot be created.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="altgraph.GraphUtil.generate_scale_free_graph">
+<tt class="descclassname">altgraph.GraphUtil.</tt><tt class="descname">generate_scale_free_graph</tt><big>(</big><em>steps</em>, <em>growth_num</em><span class="optional">[</span>, <em>self_loops</em><span class="optional">[</span>, <em>multi_edges</em><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.GraphUtil.generate_scale_free_graph" title="Permalink to this definition">¶</a></dt>
+<dd><p>Generates and returns a <a class="reference internal" href="graph.html#altgraph.Graph.Graph" title="altgraph.Graph.Graph"><tt class="xref py py-class docutils literal"><span class="pre">Graph</span></tt></a> instance that
+will have <em>steps*growth_n um</em> nodes and a scale free (powerlaw)
+connectivity.</p>
+<p>Starting with a fully connected graph with <em>growth_num</em> nodes
+at every step <em>growth_num</em> nodes are added to the graph and are connected
+to existing nodes with a probability proportional to the degree of these
+existing nodes.</p>
+<div class="admonition warning">
+<p class="first admonition-title">Warning</p>
+<p class="last">The current implementation is basically untested, although
+code inspection seems to indicate an implementation that is consistent
+with the description at
+<a class="reference external" href="http://mathworld.wolfram.com/Scale-FreeNetwork.html">Wolfram MathWorld</a></p>
+</div>
+</dd></dl>
+
+<dl class="function">
+<dt id="altgraph.GraphUtil.filter_stack">
+<tt class="descclassname">altgraph.GraphUtil.</tt><tt class="descname">filter_stack</tt><big>(</big><em>graph</em>, <em>head</em>, <em>filters</em><big>)</big><a class="headerlink" href="#altgraph.GraphUtil.filter_stack" title="Permalink to this definition">¶</a></dt>
+<dd><p>Perform a depth-first oder walk of the graph starting at <em>head</em> and
+apply all filter functions in <em>filters</em> on the node data of the nodes
+found.</p>
+<p>Returns (<em>visited</em>, <em>removes</em>, <em>orphans</em>), where</p>
+<ul class="simple">
+<li><em>visited</em>: the set of visited nodes</li>
+<li><em>removes</em>: the list of nodes where the node data doesn&#8217;t match
+all <em>filters</em>.</li>
+<li><em>orphans</em>: list of tuples (<em>last_good</em>, <em>node</em>), where
+node is not in <em>removes</em> and one of the nodes that is connected
+by an incoming edge is in <em>removes</em>. <em>Last_good</em> is the
+closest upstream node that is not in <em>removes</em>.</li>
+</ul>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="graphstat.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">altgraph.GraphStat</span></tt> &#8212; Functions providing various graph statistics</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="dot.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">altgraph.Dot</span></tt> &#8212; Interface to the dot language</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="dot.html" title="altgraph.Dot — Interface to the dot language"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="graphstat.html" title="altgraph.GraphStat — Functions providing various graph statistics"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/index.html b/python/altgraph/doc/_build/html/index.html
new file mode 100644
index 000000000..83c8c9a25
--- /dev/null
+++ b/python/altgraph/doc/_build/html/index.html
@@ -0,0 +1,142 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Altgraph - A basic graph library &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="#" />
+ <link rel="next" title="Release history" href="changelog.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="changelog.html" title="Release history"
+ accesskey="N">next</a> |</li>
+ <li><a href="#">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="altgraph-a-basic-graph-library">
+<h1>Altgraph - A basic graph library<a class="headerlink" href="#altgraph-a-basic-graph-library" title="Permalink to this headline">¶</a></h1>
+<p>altgraph is a fork of graphlib: a graph (network) package for constructing
+graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with
+graphviz output.</p>
+<p>The primary users of this package are <a class="reference external" href="http://pypi.python.org/pypi/macholib">macholib</a> and <a class="reference external" href="http://pypi.python.org/pypi/modulegraph">modulegraph</a>.</p>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="changelog.html">Release history</a></li>
+<li class="toctree-l1"><a class="reference internal" href="license.html">License</a></li>
+<li class="toctree-l1"><a class="reference internal" href="core.html"><tt class="docutils literal"><span class="pre">altgraph</span></tt> &#8212; A Python Graph Library</a></li>
+<li class="toctree-l1"><a class="reference internal" href="graph.html"><tt class="docutils literal"><span class="pre">altgraph.Graph</span></tt> &#8212; Basic directional graphs</a></li>
+<li class="toctree-l1"><a class="reference internal" href="objectgraph.html"><tt class="docutils literal"><span class="pre">altgraph.ObjectGraph</span></tt> &#8212; Graphs of objecs with an identifier</a></li>
+<li class="toctree-l1"><a class="reference internal" href="graphalgo.html"><tt class="docutils literal"><span class="pre">altgraph.GraphAlgo</span></tt> &#8212; Graph algorithms</a></li>
+<li class="toctree-l1"><a class="reference internal" href="graphstat.html"><tt class="docutils literal"><span class="pre">altgraph.GraphStat</span></tt> &#8212; Functions providing various graph statistics</a></li>
+<li class="toctree-l1"><a class="reference internal" href="graphutil.html"><tt class="docutils literal"><span class="pre">altgraph.GraphUtil</span></tt> &#8212; Utility functions</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dot.html"><tt class="docutils literal"><span class="pre">altgraph.Dot</span></tt> &#8212; Interface to the dot language</a></li>
+</ul>
+</div>
+<div class="section" id="online-resources">
+<h2>Online Resources<a class="headerlink" href="#online-resources" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><a class="reference external" href="http://bitbucket.org/ronaldoussoren/altgraph/">Sourcecode repository on bitbucket</a></li>
+<li><a class="reference external" href="http://bitbucket.org/ronaldoussoren/altgraph/issues">The issue tracker</a></li>
+</ul>
+</div>
+<div class="section" id="indices-and-tables">
+<h2>Indices and tables<a class="headerlink" href="#indices-and-tables" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><a class="reference internal" href="genindex.html"><em>Index</em></a></li>
+<li><a class="reference internal" href="py-modindex.html"><em>Module Index</em></a></li>
+<li><a class="reference internal" href="search.html"><em>Search Page</em></a></li>
+</ul>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="#">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Altgraph - A basic graph library</a><ul>
+<li><a class="reference internal" href="#online-resources">Online Resources</a></li>
+<li><a class="reference internal" href="#indices-and-tables">Indices and tables</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Next topic</h4>
+ <p class="topless"><a href="changelog.html"
+ title="next chapter">Release history</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="changelog.html" title="Release history"
+ >next</a> |</li>
+ <li><a href="#">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/license.html b/python/altgraph/doc/_build/html/license.html
new file mode 100644
index 000000000..546f6e66a
--- /dev/null
+++ b/python/altgraph/doc/_build/html/license.html
@@ -0,0 +1,136 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>License &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="next" title="altgraph — A Python Graph Library" href="core.html" />
+ <link rel="prev" title="Release history" href="changelog.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="core.html" title="altgraph — A Python Graph Library"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="changelog.html" title="Release history"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="license">
+<h1>License<a class="headerlink" href="#license" title="Permalink to this headline">¶</a></h1>
+<p>Copyright (c) 2004 Istvan Albert unless otherwise noted.</p>
+<p>Parts are copyright (c) Bob Ippolito</p>
+<p>Parts are copyright (c) 2010-2014 Ronald Oussoren</p>
+<div class="section" id="mit-license">
+<h2>MIT License<a class="headerlink" href="#mit-license" title="Permalink to this headline">¶</a></h2>
+<p>Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the &#8220;Software&#8221;), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
+so.</p>
+<p>THE SOFTWARE IS PROVIDED &#8220;AS IS&#8221;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.</p>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">License</a><ul>
+<li><a class="reference internal" href="#mit-license">MIT License</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="changelog.html"
+ title="previous chapter">Release history</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="core.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">altgraph</span></tt> &#8212; A Python Graph Library</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="core.html" title="altgraph — A Python Graph Library"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="changelog.html" title="Release history"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/objectgraph.html b/python/altgraph/doc/_build/html/objectgraph.html
new file mode 100644
index 000000000..c9879f656
--- /dev/null
+++ b/python/altgraph/doc/_build/html/objectgraph.html
@@ -0,0 +1,283 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>altgraph.ObjectGraph — Graphs of objecs with an identifier &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <link rel="next" title="altgraph.GraphAlgo — Graph algorithms" href="graphalgo.html" />
+ <link rel="prev" title="altgraph.Graph — Basic directional graphs" href="graph.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graphalgo.html" title="altgraph.GraphAlgo — Graph algorithms"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="graph.html" title="altgraph.Graph — Basic directional graphs"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-altgraph.ObjectGraph">
+<span id="altgraph-objectgraph-graphs-of-objecs-with-an-identifier"></span><h1><a class="reference internal" href="#module-altgraph.ObjectGraph" title="altgraph.ObjectGraph: A graph of objects that have a &quot;graphident&quot; attribute."><tt class="xref py py-mod docutils literal"><span class="pre">altgraph.ObjectGraph</span></tt></a> &#8212; Graphs of objecs with an identifier<a class="headerlink" href="#module-altgraph.ObjectGraph" title="Permalink to this headline">¶</a></h1>
+<dl class="class">
+<dt id="altgraph.ObjectGraph.ObjectGraph">
+<em class="property">class </em><tt class="descclassname">altgraph.ObjectGraph.</tt><tt class="descname">ObjectGraph</tt><big>(</big><span class="optional">[</span><em>graph</em><span class="optional">[</span>, <em>debug</em><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph" title="Permalink to this definition">¶</a></dt>
+<dd><p>A graph of objects that have a &#8220;graphident&#8221; attribute. The
+value of this attribute is the key for the object in the
+graph.</p>
+<p>The optional <em>graph</em> is a previously constructed
+<a class="reference internal" href="graph.html#altgraph.Graph.Graph" title="altgraph.Graph.Graph"><tt class="xref py py-class docutils literal"><span class="pre">Graph</span></tt></a>.</p>
+<p>The optional <em>debug</em> level controls the amount of debug output
+(see <a class="reference internal" href="#altgraph.ObjectGraph.ObjectGraph.msg" title="altgraph.ObjectGraph.ObjectGraph.msg"><tt class="xref py py-meth docutils literal"><span class="pre">msg()</span></tt></a>, <a class="reference internal" href="#altgraph.ObjectGraph.ObjectGraph.msgin" title="altgraph.ObjectGraph.ObjectGraph.msgin"><tt class="xref py py-meth docutils literal"><span class="pre">msgin()</span></tt></a> and <a class="reference internal" href="#altgraph.ObjectGraph.ObjectGraph.msgout" title="altgraph.ObjectGraph.ObjectGraph.msgout"><tt class="xref py py-meth docutils literal"><span class="pre">msgout()</span></tt></a>).</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">the altgraph library does not generate output, the
+debug attribute and message methods are present for use
+by subclasses.</p>
+</div>
+</dd></dl>
+
+<dl class="data">
+<dt id="altgraph.ObjectGraph.ObjectGraph.graph">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">graph</tt><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.graph" title="Permalink to this definition">¶</a></dt>
+<dd><p>An <a class="reference internal" href="graph.html#altgraph.Graph.Graph" title="altgraph.Graph.Graph"><tt class="xref py py-class docutils literal"><span class="pre">Graph</span></tt></a> object that contains
+the graph data.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.addNode">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">addNode</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.addNode" title="Permalink to this definition">¶</a></dt>
+<dd><p>Adds a <em>node</em> to the graph.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">re-adding a node that was previously removed
+using <a class="reference internal" href="#altgraph.ObjectGraph.ObjectGraph.removeNode" title="altgraph.ObjectGraph.ObjectGraph.removeNode"><tt class="xref py py-meth docutils literal"><span class="pre">removeNode()</span></tt></a> will reinstate the previously
+removed node.</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.createNode">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">createNode</tt><big>(</big><em>self</em>, <em>cls</em>, <em>name</em>, <em>*args</em>, <em>**kwds</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.createNode" title="Permalink to this definition">¶</a></dt>
+<dd><p>Creates a new node using <tt class="docutils literal"><span class="pre">cls(*args,</span> <span class="pre">**kwds)</span></tt> and adds that
+node using <a class="reference internal" href="#altgraph.ObjectGraph.ObjectGraph.addNode" title="altgraph.ObjectGraph.ObjectGraph.addNode"><tt class="xref py py-meth docutils literal"><span class="pre">addNode()</span></tt></a>.</p>
+<p>Returns the newly created node.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.removeNode">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">removeNode</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.removeNode" title="Permalink to this definition">¶</a></dt>
+<dd><p>Removes a <em>node</em> from the graph when it exists. The <em>node</em> argument
+is either a node object, or the graphident of a node.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.createReferences">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">createReferences</tt><big>(</big><em>fromnode</em>, <em>tonode</em><span class="optional">[</span>, <em>edge_data</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.createReferences" title="Permalink to this definition">¶</a></dt>
+<dd><p>Creates a reference from <em>fromnode</em> to <em>tonode</em>. The optional
+<em>edge_data</em> is associated with the edge.</p>
+<p><em>Fromnode</em> and <em>tonode</em> can either be node objects or the graphident
+values for nodes.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.removeReference">
+<tt class="descclassname">altgraph.ObjectGraph.</tt><tt class="descname">removeReference</tt><big>(</big><em>fromnode</em>, <em>tonode</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.removeReference" title="Permalink to this definition">¶</a></dt>
+<dd><p>Removes the reference from <em>fromnode</em> to <em>tonode</em> if it exists.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.getRawIdent">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">getRawIdent</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.getRawIdent" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns the <em>graphident</em> attribute of <em>node</em>, or the graph itself
+when <em>node</em> is <a class="reference external" href="http://docs.python.org/library/constants.html#None" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">None</span></tt></a>.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.getIdent">
+<tt class="descclassname">altgraph.ObjectGraph.</tt><tt class="descname">getIdent</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.getIdent" title="Permalink to this definition">¶</a></dt>
+<dd><p>Same as <tt class="xref py py-meth docutils literal"><span class="pre">getRawIdent()</span></tt>, but only if the node is part
+of the graph.</p>
+<p><em>Node</em> can either be an actual node object or the graphident of
+a node.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.findNode">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">findNode</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.findNode" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a given node in the graph, or <tt class="xref py py-data docutils literal"><span class="pre">Node</span></tt> when it cannot
+be found.</p>
+<p><em>Node</em> is either an object with a <em>graphident</em> attribute or
+the <em>graphident</em> attribute itself.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.__contains__">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">__contains__</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.__contains__" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns True if <em>node</em> is a member of the graph. <em>Node</em> is either an
+object with a <em>graphident</em> attribute or the <em>graphident</em> attribute itself.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.flatten">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">flatten</tt><big>(</big><span class="optional">[</span><em>condition</em><span class="optional">[</span>, <em>start</em><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.flatten" title="Permalink to this definition">¶</a></dt>
+<dd><p>Yield all nodes that are entirely reachable by <em>condition</em>
+starting fromt he given <em>start</em> node or the graph root.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">objects are only reachable from the graph root
+when there is a reference from the root to the node
+(either directly or through another node)</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.nodes">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">nodes</tt><big>(</big><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.nodes" title="Permalink to this definition">¶</a></dt>
+<dd><p>Yield all nodes in the graph.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.get_edges">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">get_edges</tt><big>(</big><em>node</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.get_edges" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns two iterators that yield the nodes reaching by
+outgoing and incoming edges.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.filterStack">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">filterStack</tt><big>(</big><em>filters</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.filterStack" title="Permalink to this definition">¶</a></dt>
+<dd><p>Filter the ObjectGraph in-place by removing all edges to nodes that
+do not match every filter in the given filter list</p>
+<p>Returns a tuple containing the number of:
+(<em>nodes_visited</em>, <em>nodes_removed</em>, <em>nodes_orphaned</em>)</p>
+</dd></dl>
+
+<div class="section" id="debug-output">
+<h2>Debug output<a class="headerlink" href="#debug-output" title="Permalink to this headline">¶</a></h2>
+<dl class="data">
+<dt id="altgraph.ObjectGraph.ObjectGraph.debug">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">debug</tt><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.debug" title="Permalink to this definition">¶</a></dt>
+<dd><p>The current debug level.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.msg">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">msg</tt><big>(</big><em>level</em>, <em>text</em>, <em>*args</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.msg" title="Permalink to this definition">¶</a></dt>
+<dd><p>Print a debug message at the current indentation level when the current
+debug level is <em>level</em> or less.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.msgin">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">msgin</tt><big>(</big><em>level</em>, <em>text</em>, <em>*args</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.msgin" title="Permalink to this definition">¶</a></dt>
+<dd><p>Print a debug message when the current debug level is <em>level</em> or less,
+and increase the indentation level.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="altgraph.ObjectGraph.ObjectGraph.msgout">
+<tt class="descclassname">ObjectGraph.</tt><tt class="descname">msgout</tt><big>(</big><em>level</em>, <em>text</em>, <em>*args</em><big>)</big><a class="headerlink" href="#altgraph.ObjectGraph.ObjectGraph.msgout" title="Permalink to this definition">¶</a></dt>
+<dd><p>Decrease the indentation level and print a debug message when the
+current debug level is <em>level</em> or less.</p>
+</dd></dl>
+
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#"><tt class="docutils literal"><span class="pre">altgraph.ObjectGraph</span></tt> &#8212; Graphs of objecs with an identifier</a><ul>
+<li><a class="reference internal" href="#debug-output">Debug output</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="graph.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">altgraph.Graph</span></tt> &#8212; Basic directional graphs</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="graphalgo.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">altgraph.GraphAlgo</span></tt> &#8212; Graph algorithms</a></p>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="graphalgo.html" title="altgraph.GraphAlgo — Graph algorithms"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="graph.html" title="altgraph.Graph — Basic directional graphs"
+ >previous</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/objects.inv b/python/altgraph/doc/_build/html/objects.inv
new file mode 100644
index 000000000..7c8a150fb
--- /dev/null
+++ b/python/altgraph/doc/_build/html/objects.inv
Binary files differ
diff --git a/python/altgraph/doc/_build/html/py-modindex.html b/python/altgraph/doc/_build/html/py-modindex.html
new file mode 100644
index 000000000..dd60a9b2c
--- /dev/null
+++ b/python/altgraph/doc/_build/html/py-modindex.html
@@ -0,0 +1,139 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Python Module Index &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+
+
+
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="#" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+
+ <h1>Python Module Index</h1>
+
+ <div class="modindex-jumpbox">
+ <a href="#cap-a"><strong>a</strong></a>
+ </div>
+
+ <table class="indextable modindextable" cellspacing="0" cellpadding="2">
+ <tr class="pcap"><td></td><td>&nbsp;</td><td></td></tr>
+ <tr class="cap" id="cap-a"><td></td><td>
+ <strong>a</strong></td><td></td></tr>
+ <tr>
+ <td><img src="_static/minus.png" class="toggler"
+ id="toggle-1" style="display: none" alt="-" /></td>
+ <td>
+ <a href="core.html#module-altgraph"><tt class="xref">altgraph</tt></a></td><td>
+ <em>A directional graph for python</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="dot.html#module-altgraph.Dot"><tt class="xref">altgraph.Dot</tt></a></td><td>
+ <em>Interface to the dot language as used by Graphviz..</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="graph.html#module-altgraph.Graph"><tt class="xref">altgraph.Graph</tt></a></td><td>
+ <em>Basic directional graphs.</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="graphalgo.html#module-altgraph.GraphAlgo"><tt class="xref">altgraph.GraphAlgo</tt></a></td><td>
+ <em>Basic graphs algoritms</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="graphstat.html#module-altgraph.GraphStat"><tt class="xref">altgraph.GraphStat</tt></a></td><td>
+ <em>Functions providing various graph statistics</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="graphutil.html#module-altgraph.GraphUtil"><tt class="xref">altgraph.GraphUtil</tt></a></td><td>
+ <em>Utility functions</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="objectgraph.html#module-altgraph.ObjectGraph"><tt class="xref">altgraph.ObjectGraph</tt></a></td><td>
+ <em>A graph of objects that have a &#34;graphident&#34; attribute.</em></td></tr>
+ </table>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="#" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/search.html b/python/altgraph/doc/_build/html/search.html
new file mode 100644
index 000000000..93acf8dc5
--- /dev/null
+++ b/python/altgraph/doc/_build/html/search.html
@@ -0,0 +1,105 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Search &mdash; altgraph 0.11 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '0.11',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <script type="text/javascript" src="_static/searchtools.js"></script>
+ <link rel="top" title="altgraph 0.11 documentation" href="index.html" />
+ <script type="text/javascript">
+ jQuery(function() { Search.loadIndex("searchindex.js"); });
+ </script>
+
+ <script type="text/javascript" id="searchindexloader"></script>
+
+
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <h1 id="search-documentation">Search</h1>
+ <div id="fallback" class="admonition warning">
+ <script type="text/javascript">$('#fallback').hide();</script>
+ <p>
+ Please activate JavaScript to enable the search
+ functionality.
+ </p>
+ </div>
+ <p>
+ From here you can search these documents. Enter your search
+ words into the box below and click "search". Note that the search
+ function will automatically search for all of the words. Pages
+ containing fewer words won't appear in the result list.
+ </p>
+ <form action="" method="get">
+ <input type="text" name="q" value="" />
+ <input type="submit" value="search" />
+ <span id="search-progress" style="padding-left: 10px"></span>
+ </form>
+
+ <div id="search-results">
+
+ </div>
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">altgraph 0.11 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/altgraph/doc/_build/html/searchindex.js b/python/altgraph/doc/_build/html/searchindex.js
new file mode 100644
index 000000000..972760abe
--- /dev/null
+++ b/python/altgraph/doc/_build/html/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({envversion:42,terms:{represent:[0,9],all:[2,3,5,6,8,9],code:[6,5],edg:[9,2],breadth:3,ellips:9,per:4,follow:3,degree_dist:[4,2],whose:9,privat:2,depend:[3,2],graph:2,graph_lib:0,tail:[3,9,2],program:9,present:[2,3,4,6,8,9],aris:1,hidden_node_list:3,merchant:1,sourc:[3,2],everi:[3,4,8,6],fals:[3,2],objec:7,upstream:6,veri:2,edgefn:9,level:8,py2app:[0,2],iter:[3,8,2],item:[4,2],describe_nod:3,round:9,self_loop:6,impli:1,crow:9,tee:9,past:2,pass:9,blue:9,index:7,hide:3,neg:5,abl:9,invok:9,current:[3,4,8,9,6],version:2,"new":[3,8,9,2],growth_num:6,oussoren:1,method:[0,9,2],restore_all_edg:3,metadata:2,subtre:2,gener:[6,8,9,2],even:5,coeffici:3,here:9,behaviour:2,same:[8,5,2],path:[0,5,9,7],along:5,modifi:[3,1],valu:[0,8,3],invis:9,search:[3,7],nodes_remov:8,larger:9,reason:9,amount:8,edge_data:[3,8],permit:1,action:1,implement:[6,2],getrawid:8,via:[0,9,3],repositori:[7,2],tweak:2,modul:[2,0,7,3,4,6,9],nodes_orphan:8,"while":[9,2],clust_coef:3,iterdot:9,filenam:9,visibl:3,instal:[9,2],txt:2,from:[1,2,3,5,6,8,9],etc:[3,7],visit:6,two:[3,8],suit:0,vee:9,call:[0,9],msg:8,type:9,until:[5,9],minor:2,more:[6,4,9],reachabl:[3,8],diamond:9,minim:2,altgraph:2,visual:0,indic:6,examin:5,particular:1,sourcecod:7,herebi:1,must:[3,4,9],none:[3,8,9],graphic:9,graphid:8,restor:3,alia:9,setup:2,work:[5,2],archiv:2,can:[0,2,3,6,8,9],purpos:1,root:8,control:[8,9],claim:1,process:9,graphstat:2,accept:2,all_nbr:3,alwai:2,end:[3,5,9],anoth:[8,9],"__iter__":[3,9],classifi:2,write:9,how:9,hop:3,instead:2,simpl:9,wolfram:6,head_id:3,after:[0,3],ronald:1,befor:[3,9],forw_bf:3,mai:[3,4,9],generate_scale_free_graph:6,associ:[3,8,1],stabil:2,third:3,bind:9,author:1,correspond:[3,9],shortest_path:5,caus:[5,2],inform:9,tailor:0,callabl:3,untest:6,order:[3,5,2],furnish:1,edge_list:3,move:2,save_imag:9,graphviz:[0,9,7],dijkstra:5,through:[3,8],add_edg:[3,2],misfeatur:2,out_nbr:[3,9],still:2,paramet:[3,9],style:9,group:4,fit:1,fix:[9,2],tort:1,restore_nod:3,requir:2,hidden:[3,2],unpredicat:3,"return":[2,3,4,6,8,9],thei:[3,9],python:2,overal:9,createrefer:8,avg_hop:2,generate_random_graph:[6,2],fillcolor:9,verifi:[5,9],all_node_styl:9,now:2,bread:3,diagon:9,document:[9,1,2],name:[8,9],didn:2,arrows:9,iterdf:[3,2],leda:0,iff:3,mode:[4,9,2],found:[6,8],nodes_visit:8,side:2,mean:2,weight:9,list:[3,4,5,6,8,9],ensur:2,connect:[0,6,9,1,3],event:1,out:[0,4,9,1],all_degre:3,network:[0,7],newli:8,publish:1,neigbour:3,graphutil:2,invi:9,print:[3,8],correct:2,red:9,file_nam:9,edgevisitor:9,earlier:2,out_degre:3,manipul:0,free:[6,1],number_of_hidden_nod:3,base:[0,9,3],org:2,shortest:[0,5,7,3],modulegraph:[0,7],indent:8,convent:5,where:[3,6,2],keep:2,filter:[6,8],length:5,place:8,isn:[9,2],imposs:2,first:[3,6],oper:[3,2],softwar:1,major:2,back_bf:3,suffix:9,directli:8,save_dot:9,forw_topo_sort:[3,2],onc:9,number:[0,3,4,6,8,9],rank:9,restrict:1,fromt:8,alreadi:[3,9],messag:8,primari:7,size:[9,2],given:[8,3,5,9],silent:2,workaround:2,gif:9,data:[8,3,5,6],licens:7,system:9,construct:[8,0,7,9,3],attach:3,circl:9,testsuit:2,graphtyp:9,option:[3,8,2],copi:1,albert:1,travers:0,specifi:[3,9,2],mathworld:6,part:[8,9,1,2],holder:1,than:5,kind:1,nathan:0,provid:[3,9],remov:[6,8,2],tree:2,horizont:9,were:3,posit:5,randomli:6,fork:[0,7],macholib:[0,7],comput:[3,5],ran:9,well:0,ani:[3,1,2],dash:9,packag:[0,7,9,2],have:[6,8,2],need:9,seem:6,element:[3,2],sell:1,issu:[7,2],moreov:9,note:[9,1],also:[0,3,9,2],which:2,tupl:[8,3,4,6,2],singl:5,compat:[9,2],offload:9,distribut:[1,2],though:2,setuptool:2,previou:2,reach:[8,3,5],discov:5,most:9,sublicens:1,describe_edg:3,"class":[0,2],charg:1,renam:2,mininum:4,later:[3,4,2],request:[6,2],doe:[8,5],left:9,dot:2,base_nod:9,text:8,restore_edg:3,permiss:1,"_bf":2,find:[0,5],nearer:5,onli:[8,4,5],get_hop:3,copyright:1,explain:2,configur:6,forev:2,should:[3,9],add_nod:[3,2],neato:[9,2],local:3,gone:2,get:4,express:1,stop:3,getid:8,filter_stack:6,acces:9,cannot:[6,8],edge_styl:[9,2],drawn:9,increas:8,liabl:1,createnod:8,hide_edg:3,yield:[3,8,9,2],patch:2,"default":[3,4,9],bad:0,statist:0,contain:[8,2],attribut:2,grapherror:[0,6,3,2],view:2,set:[6,9,2],all_edg:3,orphan:6,iterdata:[3,2],displai:9,datatyp:0,see:[8,9],result:[3,4,9,2],arg:8,fail:2,closer:9,correctli:5,label:9,written:[0,9],won:[3,9],between:[3,4,2],"import":9,msgout:8,spars:2,kei:8,reinstat:8,entir:8,addit:0,both:9,findnod:8,extens:[0,2],hashabl:3,equal:5,contract:1,get_edg:8,multi_edg:6,instanc:6,edge_num:6,node_data:3,kwd:8,point:[6,9],color:9,unittest:2,walk:6,suppli:3,mistak:5,assum:5,duplic:6,liabil:1,creat:[6,8,9,2],oder:6,due:2,been:0,compon:0,edge_by_nod:3,box:9,imag:9,argument:[3,4,8,9,2],neighbour:3,func:9,ranksep:9,those:[3,4],number_of_nod:3,save:9,look:9,solid:9,removenod:8,properti:5,save_img:[9,2],node_styl:9,back_bfs_subgraph:[3,2],calcul:4,behavior:3,exist:[8,3,6,2],loos:0,loop:2,spawn:9,almost:2,readm:2,destin:3,cluster:3,itself:[6,8],incom:[3,4,8,6],tediou:9,rankdir:9,grant:1,perform:6,alphabet:2,make:5,format:9,back_topo_sort:[3,2],node_list:3,member:8,inconsist:0,inch:9,nodevisitor:9,temp_dot:9,inc:4,complet:2,http:2,closest:6,optim:0,effect:2,rais:[6,0,5,3,2],user:[7,2],distutil:2,typic:[3,9],expand:0,noninfring:1,center:[3,4],hidden_edge_list:3,fromnod:8,edge_by_id:3,person:1,without:[3,9,1,2],command:9,thi:[7,1,2,3,5,6,8,9],model:0,self:8,tonod:8,distanc:3,identifi:7,less:[6,8],when:[0,2,3,4,5,6,8,9],obtain:1,out_edg:3,shape:9,previous:[3,8],msgin:8,expos:9,tail_id:3,had:2,except:[0,5,3],add:[3,8,9,2],appli:6,input:[5,9],successor:3,match:[6,8],bin:4,around:3,transpar:9,start_id:3,removerefer:8,objectgraph:[7,2],specif:0,deprec:9,arbitrari:3,either:8,fill:9,output:2,page:[7,9],revers:3,deal:1,some:[3,5,2],"_df":2,"export":2,flatten:8,number_of_hidden_edg:3,file_typ:9,separ:9,scale:6,"__contains__":[3,8,2],pombredann:2,shall:1,subclass:8,tracker:[7,2],exit:9,condit:[3,8],damag:1,topolog:[0,7],refer:[3,8,9,2],ippolito:1,object:[3,8,2],run:2,bold:9,inspect:6,broken:2,step:6,powerlaw:6,although:6,fulli:6,about:9,actual:[3,8,2],dotti:9,restore_all_nod:3,degre:6,outgo:[3,4,8],constructor:[9,2],denni:0,effici:0,forw_bfs_subgraph:[3,2],digraph:9,basenam:9,within:9,three:3,warranti:1,right:[9,1],empti:[3,9],inc_nbr:3,chang:2,merg:1,triangl:9,wai:9,whom:1,aren:3,addnod:8,support:[0,2],avail:2,start:[8,0,5,6,3],includ:[0,9,1,2],replac:9,forward:[3,2],"function":[0,9,2],head:[3,9,6],subgraph:3,last_good:6,bin_num:4,state:[0,2],newer:0,mydot:9,inc_edg:3,line:9,"true":[3,8,6],bug:2,count:4,attr:9,consist:[3,6,2],possibl:2,whether:1,bugfix:2,access:3,maximum:[3,4],limit:[4,1],otherwis:[3,9,1],significantli:0,sort:[0,7],featur:[0,9,2],growth_n:6,inv:9,doesn:[6,2],repres:3,decreas:8,file:[9,1,2],guarante:5,bob:1,proport:[3,6],check:2,probabl:6,arrowhead:9,unhidden:3,"2to3":2,detail:9,hide_nod:3,other:[3,1,2],node_num:6,test:2,you:2,labelangl:9,node:[9,2],draw:9,intend:9,inc_degre:3,sequenc:3,vertex:5,istvan:1,create_nod:[3,2],graphlib:[0,7],unless:[9,1],bitbucket:[7,2],sphinx:2,longer:2,filterstack:8,number_of_edg:3,descript:6,apear:3,depth:[3,6],time:3,backward:9},objtypes:{"0":"py:module","1":"py:method","2":"py:class","3":"py:function","4":"py:data","5":"py:exception"},objnames:{"0":["py","module","Python module"],"1":["py","method","Python method"],"2":["py","class","Python class"],"3":["py","function","Python function"],"4":["py","data","Python data"],"5":["py","exception","Python exception"]},filenames:["core","license","changelog","graph","graphstat","graphalgo","graphutil","index","objectgraph","dot"],titles:["<tt class=\"docutils literal\"><span class=\"pre\">altgraph</span></tt> &#8212; A Python Graph Library","License","Release history","<tt class=\"docutils literal\"><span class=\"pre\">altgraph.Graph</span></tt> &#8212; Basic directional graphs","<tt class=\"docutils literal\"><span class=\"pre\">altgraph.GraphStat</span></tt> &#8212; Functions providing various graph statistics","<tt class=\"docutils literal\"><span class=\"pre\">altgraph.GraphAlgo</span></tt> &#8212; Graph algorithms","<tt class=\"docutils literal\"><span class=\"pre\">altgraph.GraphUtil</span></tt> &#8212; Utility functions","Altgraph - A basic graph library","<tt class=\"docutils literal\"><span class=\"pre\">altgraph.ObjectGraph</span></tt> &#8212; Graphs of objecs with an identifier","<tt class=\"docutils literal\"><span class=\"pre\">altgraph.Dot</span></tt> &#8212; Interface to the dot language"],objects:{"":{altgraph:[0,0,0,"-"]},"altgraph.Graph":{Graph:[3,2,1,""],node_data:[3,1,1,""]},"altgraph.Dot.Dot":{node_style:[9,1,1,""],style:[9,1,1,""],all_node_style:[9,1,1,""],display:[9,1,1,""]},"altgraph.Graph.Graph":{number_of_hidden_nodes:[3,1,1,""],out_edges:[3,1,1,""],inc_degree:[3,1,1,""],forw_bfs_subgraph:[3,1,1,""],edge_list:[3,1,1,""],add_node:[3,1,1,""],hidden_node_list:[3,1,1,""],iterdata:[3,1,1,""],clust_coef:[3,1,1,""],all_edges:[3,1,1,""],describe_edge:[3,1,1,""],all_degree:[3,1,1,""],get_hops:[3,1,1,""],hide_node:[3,1,1,""],out_degree:[3,1,1,""],edge_by_node:[3,1,1,""],"__contains__":[3,1,1,""],restore_edge:[3,1,1,""],describe_node:[3,1,1,""],back_topo_sort:[3,1,1,""],node_list:[3,1,1,""],forw_topo_sort:[3,1,1,""],tail:[3,1,1,""],restore_all_nodes:[3,1,1,""],all_nbrs:[3,1,1,""],restore_all_edges:[3,1,1,""],restore_node:[3,1,1,""],head:[3,1,1,""],number_of_nodes:[3,1,1,""],iterdfs:[3,1,1,""],"__iter__":[3,1,1,""],connected:[3,1,1,""],forw_bfs:[3,1,1,""],add_edge:[3,1,1,""],number_of_hidden_edges:[3,1,1,""],inc_nbrs:[3,1,1,""],out_nbrs:[3,1,1,""],hidden_edge_list:[3,1,1,""],inc_edges:[3,1,1,""],number_of_edges:[3,1,1,""],back_bfs_subgraph:[3,1,1,""],edge_data:[3,1,1,""],hide_edge:[3,1,1,""],edge_by_id:[3,1,1,""],back_bfs:[3,1,1,""]},"altgraph.GraphAlgo":{dijkstra:[5,3,1,""],shortest_path:[5,3,1,""]},altgraph:{Graph:[3,0,0,"-"],GraphStat:[4,0,0,"-"],GraphAlgo:[5,0,0,"-"],GraphUtil:[6,0,0,"-"],GraphError:[0,5,1,""],ObjectGraph:[8,0,0,"-"],Dot:[9,0,0,"-"]},"altgraph.Dot":{save_image:[9,1,1,""],save_dot:[9,1,1,""],iterdot:[9,1,1,""],"__iter__":[9,1,1,""],edge_style:[9,1,1,""],Dot:[9,2,1,""]},"altgraph.GraphStat":{degree_dist:[4,3,1,""]},"altgraph.ObjectGraph":{removeReference:[8,1,1,""],getIdent:[8,1,1,""],ObjectGraph:[8,2,1,""]},"altgraph.GraphUtil":{generate_random_graph:[6,3,1,""],generate_scale_free_graph:[6,3,1,""],filter_stack:[6,3,1,""]},"altgraph.ObjectGraph.ObjectGraph":{findNode:[8,1,1,""],createNode:[8,1,1,""],msgin:[8,1,1,""],removeNode:[8,1,1,""],addNode:[8,1,1,""],"__contains__":[8,1,1,""],graph:[8,4,1,""],createReferences:[8,1,1,""],filterStack:[8,1,1,""],msg:[8,1,1,""],flatten:[8,1,1,""],debug:[8,4,1,""],get_edges:[8,1,1,""],getRawIdent:[8,1,1,""],nodes:[8,1,1,""],msgout:[8,1,1,""]}},titleterms:{node:3,identifi:8,edg:3,graphstat:4,direct:[3,9],indic:7,tabl:7,onlin:7,histori:2,librari:[0,7],graph:[0,7,3,4,5,8,9],licens:1,custom:9,graphutil:6,valid:9,interfac:9,languag:9,basic:[3,7],method:3,updat:9,"function":[6,4],non:9,altgraph:[0,7,3,4,5,6,8,9],resourc:7,variou:4,python:0,relat:3,usag:9,util:6,objec:8,releas:2,objectgraph:8,"class":9,travers:3,algorithm:5,provid:4,graphalgo:5,mit:1,exampl:9,statist:[3,4],debug:8,output:[8,9],attribut:9,emit:9,dot:9}}) \ No newline at end of file
diff --git a/python/altgraph/doc/changelog.rst b/python/altgraph/doc/changelog.rst
new file mode 100644
index 000000000..02fd412d6
--- /dev/null
+++ b/python/altgraph/doc/changelog.rst
@@ -0,0 +1,185 @@
+Release history
+===============
+
+0.12
+----
+
+- Added ``ObjectGraph.edgeData`` to retrieve the edge data
+ from a specific edge.
+
+- Added ``AltGraph.update_edge_data`` and ``ObjectGraph.updateEdgeData``
+ to update the data associated with a graph edge.
+
+0.11
+----
+
+- Stabilize the order of elements in dot file exports,
+ patch from bitbucket user 'pombredanne'.
+
+- Tweak setup.py file to remove dependency on distribute (but
+ keep the dependency on setuptools)
+
+
+0.10.2
+------
+
+- There where no classifiers in the package metadata due to a bug
+ in setup.py
+
+0.10.1
+------
+
+This is a bugfix release
+
+Bug fixes:
+
+- Issue #3: The source archive contains a README.txt
+ while the setup file refers to ReadMe.txt.
+
+ This is caused by a misfeature in distutils, as a
+ workaround I've renamed ReadMe.txt to README.txt
+ in the source tree and setup file.
+
+
+0.10
+-----
+
+This is a minor feature release
+
+Features:
+
+- Do not use "2to3" to support Python 3.
+
+ As a side effect of this altgraph now supports
+ Python 2.6 and later, and no longer supports
+ earlier releases of Python.
+
+- The order of attributes in the Dot output
+ is now always alphabetical.
+
+ With this change the output will be consistent
+ between runs and Python versions.
+
+0.9
+---
+
+This is a minor bugfix release
+
+Features:
+
+- Added ``altgraph.ObjectGraph.ObjectGraph.nodes``, a method
+ yielding all nodes in an object graph.
+
+Bugfixes:
+
+- The 0.8 release didn't work with py2app when using
+ python 3.x.
+
+
+0.8
+-----
+
+This is a minor feature release. The major new feature
+is a extensive set of unittests, which explains almost
+all other changes in this release.
+
+Bugfixes:
+
+- Installing failed with Python 2.5 due to using a distutils
+ class that isn't available in that version of Python
+ (issue #1 on the issue tracker)
+
+- ``altgraph.GraphStat.degree_dist`` now actually works
+
+- ``altgraph.Graph.add_edge(a, b, create_nodes=False)`` will
+ no longer create the edge when one of the nodes doesn't
+ exist.
+
+- ``altgraph.Graph.forw_topo_sort`` failed for some sparse graphs.
+
+- ``altgraph.Graph.back_topo_sort`` was completely broken in
+ previous releases.
+
+- ``altgraph.Graph.forw_bfs_subgraph`` now actually works.
+
+- ``altgraph.Graph.back_bfs_subgraph`` now actually works.
+
+- ``altgraph.Graph.iterdfs`` now returns the correct result
+ when the ``forward`` argument is ``False``.
+
+- ``altgraph.Graph.iterdata`` now returns the correct result
+ when the ``forward`` argument is ``False``.
+
+
+Features:
+
+- The ``altgraph.Graph`` constructor now accepts an argument
+ that contains 2- and 3-tuples instead of requireing that
+ all items have the same size. The (optional) argument can now
+ also be any iterator.
+
+- ``altgraph.Graph.Graph.add_node`` has no effect when you
+ add a hidden node.
+
+- The private method ``altgraph.Graph._bfs`` is no longer
+ present.
+
+- The private method ``altgraph.Graph._dfs`` is no longer
+ present.
+
+- ``altgraph.ObjectGraph`` now has a ``__contains__`` methods,
+ which means you can use the ``in`` operator to check if a
+ node is part of a graph.
+
+- ``altgraph.GraphUtil.generate_random_graph`` will raise
+ ``GraphError`` instead of looping forever when it is
+ impossible to create the requested graph.
+
+- ``altgraph.Dot.edge_style`` raises ``GraphError`` when
+ one of the nodes is not present in the graph. The method
+ silently added the tail in the past, but without ensuring
+ a consistent graph state.
+
+- ``altgraph.Dot.save_img`` now works when the mode is
+ ``"neato"``.
+
+0.7.2
+-----
+
+This is a minor bugfix release
+
+Bugfixes:
+
+- distutils didn't include the documentation subtree
+
+0.7.1
+-----
+
+This is a minor feature release
+
+Features:
+
+- Documentation is now generated using `sphinx <http://pypi.python.org/pypi/sphinx>`_
+ and can be viewed at <http://packages.python.org/altgraph>.
+
+- The repository has moved to bitbucket
+
+- ``altgraph.GraphStat.avg_hops`` is no longer present, the function had no
+ implementation and no specified behaviour.
+
+- the module ``altgraph.compat`` is gone, which means altgraph will no
+ longer work with Python 2.3.
+
+
+0.7.0
+-----
+
+This is a minor feature release.
+
+Features:
+
+- Support for Python 3
+
+- It is now possible to run tests using 'python setup.py test'
+
+ (The actual testsuite is still very minimal though)
diff --git a/python/altgraph/doc/conf.py b/python/altgraph/doc/conf.py
new file mode 100644
index 000000000..cd3fd9912
--- /dev/null
+++ b/python/altgraph/doc/conf.py
@@ -0,0 +1,209 @@
+# -*- coding: utf-8 -*-
+#
+# altgraph documentation build configuration file, created by
+# sphinx-quickstart on Tue Aug 31 11:04:49 2010.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+def get_version():
+ fn = os.path.join(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
+ 'setup.cfg')
+ for ln in open(fn):
+ if ln.startswith('version'):
+ version = ln.split('=')[-1].strip()
+ return version
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.append(os.path.abspath('.'))
+sys.path.insert(0,
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.autodoc' ]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'altgraph'
+copyright = u'2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = get_version()
+# The full version, including alpha/beta/rc tags.
+release = version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'nature'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+html_show_sourcelink = False
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'altgraphdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'altgraph.tex', u'altgraph Documentation',
+ u'Ronald Oussoren', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'python': ('http://docs.python.org/', None) }
diff --git a/python/altgraph/doc/core.rst b/python/altgraph/doc/core.rst
new file mode 100644
index 000000000..8288f6a94
--- /dev/null
+++ b/python/altgraph/doc/core.rst
@@ -0,0 +1,26 @@
+:mod:`altgraph` --- A Python Graph Library
+==================================================
+
+.. module:: altgraph
+ :synopsis: A directional graph for python
+
+altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
+to use newer Python 2.3+ features, including additional support used by the
+py2app suite (modulegraph and macholib, specifically).
+
+altgraph is a python based graph (network) representation and manipulation package.
+It has started out as an extension to the `graph_lib module <http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_
+written by Nathan Denny it has been significantly optimized and expanded.
+
+The :class:`altgraph.Graph.Graph` class is loosely modeled after the `LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_
+(Library of Efficient Datatypes) representation. The library
+includes methods for constructing graphs, BFS and DFS traversals,
+topological sort, finding connected components, shortest paths as well as a number
+graph statistics functions. The library can also visualize graphs
+via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_.
+
+
+.. exception:: GraphError
+
+ Exception raised when methods are called with bad values of
+ an inconsistent state.
diff --git a/python/altgraph/doc/dot.rst b/python/altgraph/doc/dot.rst
new file mode 100644
index 000000000..3848c488a
--- /dev/null
+++ b/python/altgraph/doc/dot.rst
@@ -0,0 +1,224 @@
+:mod:`altgraph.Dot` --- Interface to the dot language
+=====================================================
+
+.. module:: altgraph.Dot
+ :synopsis: Interface to the dot language as used by Graphviz..
+
+The :py:mod:`~altgraph.Dot` module provides a simple interface to the
+file format used in the `graphviz`_ program. The module is intended to
+offload the most tedious part of the process (the **dot** file generation)
+while transparently exposing most of its features.
+
+.. _`graphviz`: <http://www.research.att.com/sw/tools/graphviz/>`_
+
+To display the graphs or to generate image files the `graphviz`_
+package needs to be installed on the system, moreover the :command:`dot` and :command:`dotty` programs must
+be accesible in the program path so that they can be ran from processes spawned
+within the module.
+
+Example usage
+-------------
+
+Here is a typical usage::
+
+ from altgraph import Graph, Dot
+
+ # create a graph
+ edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
+ graph = Graph.Graph(edges)
+
+ # create a dot representation of the graph
+ dot = Dot.Dot(graph)
+
+ # display the graph
+ dot.display()
+
+ # save the dot representation into the mydot.dot file
+ dot.save_dot(file_name='mydot.dot')
+
+ # save dot file as gif image into the graph.gif file
+ dot.save_img(file_name='graph', file_type='gif')
+
+
+Directed graph and non-directed graph
+-------------------------------------
+
+Dot class can use for both directed graph and non-directed graph
+by passing *graphtype* parameter.
+
+Example::
+
+ # create directed graph(default)
+ dot = Dot.Dot(graph, graphtype="digraph")
+
+ # create non-directed graph
+ dot = Dot.Dot(graph, graphtype="graph")
+
+
+Customizing the output
+----------------------
+
+The graph drawing process may be customized by passing
+valid :command:`dot` parameters for the nodes and edges. For a list of all
+parameters see the `graphviz`_ documentation.
+
+Example::
+
+ # customizing the way the overall graph is drawn
+ dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75)
+
+ # customizing node drawing
+ dot.node_style(1, label='BASE_NODE',shape='box', color='blue' )
+ dot.node_style(2, style='filled', fillcolor='red')
+
+ # customizing edge drawing
+ dot.edge_style(1, 2, style='dotted')
+ dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90')
+ dot.edge_style(4, 5, arrowsize=2, style='bold')
+
+
+ .. note::
+
+ dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
+ display all graphics styles. To verify the output save it to an image
+ file and look at it that way.
+
+Valid attributes
+----------------
+
+- dot styles, passed via the :py:meth:`Dot.style` method::
+
+ rankdir = 'LR' (draws the graph horizontally, left to right)
+ ranksep = number (rank separation in inches)
+
+- node attributes, passed via the :py:meth:`Dot.node_style` method::
+
+ style = 'filled' | 'invisible' | 'diagonals' | 'rounded'
+ shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle'
+
+- edge attributes, passed via the :py:meth:`Dot.edge_style` method::
+
+ style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold'
+ arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none' | 'tee' | 'vee'
+ weight = number (the larger the number the closer the nodes will be)
+
+- valid `graphviz colors <http://www.research.att.com/~erg/graphviz/info/colors.html>`_
+
+- for more details on how to control the graph drawing process see the
+ `graphviz reference <http://www.research.att.com/sw/tools/graphviz/refs.html>`_.
+
+
+Class interface
+---------------
+
+.. class:: Dot(graph[, nodes[, edgefn[, nodevisitor[, edgevisitor[, name[, dot[, dotty[, neato[, graphtype]]]]]]]]])
+
+ Creates a new Dot generator based on the specified
+ :class:`Graph <altgraph.Graph.Graph>`. The Dot generator won't reference
+ the *graph* once it is constructed.
+
+ If the *nodes* argument is present it is the list of nodes to include
+ in the graph, otherwise all nodes in *graph* are included.
+
+ If the *edgefn* argument is present it is a function that yields the
+ nodes connected to another node, this defaults to
+ :meth:`graph.out_nbr <altgraph.Graph.Graph.out_nbr>`. The constructor won't
+ add edges to the dot file unless both the head and tail of the edge
+ are in *nodes*.
+
+ If the *name* is present it specifies the name of the graph in the resulting
+ dot file. The default is ``"G"``.
+
+ The functions *nodevisitor* and *edgevisitor* return the default style
+ for a given edge or node (both default to functions that return an empty
+ style).
+
+ The arguments *dot*, *dotty* and *neato* are used to pass the path to
+ the corresponding `graphviz`_ command.
+
+
+Updating graph attributes
+.........................
+
+.. method:: Dot.style(\**attr)
+
+ Sets the overall style (graph attributes) to the given attributes.
+
+ See `Valid Attributes`_ for more information about the attributes.
+
+.. method:: Dot.node_style(node, \**attr)
+
+ Sets the style for *node* to the given attributes.
+
+ This method will add *node* to the graph when it isn't already
+ present.
+
+ See `Valid Attributes`_ for more information about the attributes.
+
+.. method:: Dot.all_node_style(\**attr)
+
+ Replaces the current style for all nodes
+
+
+.. method:: edge_style(head, tail, \**attr)
+
+ Sets the style of an edge to the given attributes. The edge will
+ be added to the graph when it isn't already present, but *head*
+ and *tail* must both be valid nodes.
+
+ See `Valid Attributes`_ for more information about the attributes.
+
+
+
+Emitting output
+...............
+
+.. method:: Dot.display([mode])
+
+ Displays the current graph via dotty.
+
+ If the *mode* is ``"neato"`` the dot file is processed with
+ the neato command before displaying.
+
+ This method won't return until the dotty command exits.
+
+.. method:: save_dot(filename)
+
+ Saves the current graph representation into the given file.
+
+ .. note::
+
+ For backward compatibility reasons this method can also
+ be called without an argument, it will then write the graph
+ into a fixed filename (present in the attribute :data:`Graph.temp_dot`).
+
+ This feature is deprecated and should not be used.
+
+
+.. method:: save_image(file_name[, file_type[, mode]])
+
+ Saves the current graph representation as an image file. The output
+ is written into a file whose basename is *file_name* and whose suffix
+ is *file_type*.
+
+ The *file_type* specifies the type of file to write, the default
+ is ``"gif"``.
+
+ If the *mode* is ``"neato"`` the dot file is processed with
+ the neato command before displaying.
+
+ .. note::
+
+ For backward compatibility reasons this method can also
+ be called without an argument, it will then write the graph
+ with a fixed basename (``"out"``).
+
+ This feature is deprecated and should not be used.
+
+.. method:: iterdot()
+
+ Yields all lines of a `graphviz`_ input file (including line endings).
+
+.. method:: __iter__()
+
+ Alias for the :meth:`iterdot` method.
diff --git a/python/altgraph/doc/graph.rst b/python/altgraph/doc/graph.rst
new file mode 100644
index 000000000..502a2186a
--- /dev/null
+++ b/python/altgraph/doc/graph.rst
@@ -0,0 +1,305 @@
+:mod:`altgraph.Graph` --- Basic directional graphs
+==================================================
+
+.. module:: altgraph.Graph
+ :synopsis: Basic directional graphs.
+
+The module :mod:`altgraph.Graph` provides a class :class:`Graph` that
+represents a directed graph with *N* nodes and *E* edges.
+
+.. class:: Graph([edges])
+
+ Constructs a new empty :class:`Graph` object. If the optional
+ *edges* parameter is supplied, updates the graph by adding the
+ specified edges.
+
+ All of the elements in *edges* should be tuples with two or three
+ elements. The first two elements of the tuple are the source and
+ destination node of the edge, the optional third element is the
+ edge data. The source and destination nodes are added to the graph
+ when the aren't already present.
+
+
+Node related methods
+--------------------
+
+.. method:: Graph.add_node(node[, node_data])
+
+ Adds a new node to the graph if it is not already present. The new
+ node must be a hashable object.
+
+ Arbitrary data can be attached to the node via the optional *node_data*
+ argument.
+
+ .. note:: the node also won't be added to the graph when it is
+ present but currently hidden.
+
+
+.. method:: Graph.hide_node(node)
+
+ Hides a *node* from the graph. The incoming and outgoing edges of
+ the node will also be hidden.
+
+ Raises :class:`altgraph.GraphError` when the node is not (visible)
+ node of the graph.
+
+
+.. method:: Graph.restore_node(node)
+
+ Restores a previously hidden *node*. The incoming and outgoing
+ edges of the node are also restored.
+
+ Raises :class:`altgraph.GraphError` when the node is not a hidden
+ node of the graph.
+
+.. method:: Graph.restore_all_nodes()
+
+ Restores all hidden nodes.
+
+.. method:: Graph.number_of_nodes()
+
+ Return the number of visible nodes in the graph.
+
+.. method:: Graph.number_of_hidden_nodes()
+
+ Return the number of hidden nodes in the graph.
+
+.. method:: Graph.node_list()
+
+ Return a list with all visible nodes in the graph.
+
+.. method:: Graph.hidden_node_list()
+
+ Return a list with all hidden nodes in the graph.
+
+.. method:: node_data(node)
+
+ Return the data associated with the *node* when it was
+ added.
+
+.. method:: Graph.describe_node(node)
+
+ Returns *node*, the node's data and the lists of outgoing
+ and incoming edges for the node.
+
+ .. note::
+
+ the edge lists should not be modified, doing so
+ can result in unpredicatable behavior.
+
+.. method:: Graph.__contains__(node)
+
+ Returns True iff *node* is a node in the graph. This
+ method is accessed through the *in* operator.
+
+.. method:: Graph.__iter__()
+
+ Yield all nodes in the graph.
+
+.. method:: Graph.out_edges(node)
+
+ Return the list of outgoing edges for *node*
+
+.. method:: Graph.inc_edges(node)
+
+ Return the list of incoming edges for *node*
+
+.. method:: Graph.all_edges(node)
+
+ Return the list of incoming and outgoing edges for *node*
+
+.. method:: Graph.out_degree(node)
+
+ Return the number of outgoing edges for *node*.
+
+.. method:: Graph.inc_degree(node)
+
+ Return the number of incoming edges for *node*.
+
+.. method:: Graph.all_degree(node)
+
+ Return the number of edges (incoming or outgoing) for *node*.
+
+Edge related methods
+--------------------
+
+.. method:: Graph.add_edge(head_id, tail_id [, edge data [, create_nodes]])
+
+ Adds a directed edge from *head_id* to *tail_id*. Arbitrary data can
+ be added via *edge_data*. When *create_nodes* is *True* (the default),
+ *head_id* and *tail_id* will be added to the graph when the aren't
+ already present.
+
+.. method:: Graph.hide_edge(edge)
+
+ Hides an edge from the graph. The edge may be unhidden at some later
+ time.
+
+.. method:: Graph.restore_edge(edge)
+
+ Restores a previously hidden *edge*.
+
+.. method:: Graph.restore_all_edges()
+
+ Restore all edges that were hidden before, except for edges
+ referring to hidden nodes.
+
+.. method:: Graph.edge_by_node(head, tail)
+
+ Return the edge ID for an edge from *head* to *tail*,
+ or :data:`None` when no such edge exists.
+
+.. method:: Graph.edge_by_id(edge)
+
+ Return the head and tail of the *edge*
+
+.. method:: Graph.edge_data(edge)
+
+ Return the data associated with the *edge*.
+
+.. method:: Graph.update_edge_data(edge, data)
+
+ Replace the edge data for *edge* by *data*. Raises
+ :exc:`KeyError` when the edge does not exist.
+
+ .. versionadded:: 0.12
+
+.. method:: Graph.head(edge)
+
+ Return the head of an *edge*
+
+.. method:: Graph.tail(edge)
+
+ Return the tail of an *edge*
+
+.. method:: Graph.describe_edge(edge)
+
+ Return the *edge*, the associated data, its head and tail.
+
+.. method:: Graph.number_of_edges()
+
+ Return the number of visible edges.
+
+.. method:: Graph.number_of_hidden_edges()
+
+ Return the number of hidden edges.
+
+.. method:: Graph.edge_list()
+
+ Returns a list with all visible edges in the graph.
+
+.. method:: Graph.hidden_edge_list()
+
+ Returns a list with all hidden edges in the graph.
+
+Graph traversal
+---------------
+
+.. method:: Graph.out_nbrs(node)
+
+ Return a list of all nodes connected by outgoing edges.
+
+.. method:: Graph.inc_nbrs(node)
+
+ Return a list of all nodes connected by incoming edges.
+
+.. method:: Graph.all_nbrs(node)
+
+ Returns a list of nodes connected by an incoming or outgoing edge.
+
+.. method:: Graph.forw_topo_sort()
+
+ Return a list of nodes where the successors (based on outgoing
+ edges) of any given node apear in the sequence after that node.
+
+.. method:: Graph.back_topo_sort()
+
+ Return a list of nodes where the successors (based on incoming
+ edges) of any given node apear in the sequence after that node.
+
+.. method:: Graph.forw_bfs_subgraph(start_id)
+
+ Return a subgraph consisting of the breadth first
+ reachable nodes from *start_id* based on their outgoing edges.
+
+
+.. method:: Graph.back_bfs_subgraph(start_id)
+
+ Return a subgraph consisting of the breadth first
+ reachable nodes from *start_id* based on their incoming edges.
+
+.. method:: Graph.iterdfs(start[, end[, forward]])
+
+ Yield nodes in a depth first traversal starting at the *start*
+ node.
+
+ If *end* is specified traversal stops when reaching that node.
+
+ If forward is True (the default) edges are traversed in forward
+ direction, otherwise they are traversed in reverse direction.
+
+.. method:: Graph.iterdata(start[, end[, forward[, condition]]])
+
+ Yield the associated data for nodes in a depth first traversal
+ starting at the *start* node. This method will not yield values for nodes
+ without associated data.
+
+ If *end* is specified traversal stops when reaching that node.
+
+ If *condition* is specified and the condition callable returns
+ False for the associated data this method will not yield the
+ associated data and will not follow the edges for the node.
+
+ If forward is True (the default) edges are traversed in forward
+ direction, otherwise they are traversed in reverse direction.
+
+.. method:: Graph.forw_bfs(start[, end])
+
+ Returns a list of nodes starting at *start* in some bread first
+ search order (following outgoing edges).
+
+ When *end* is specified iteration stops at that node.
+
+.. method:: Graph.back_bfs(start[, end])
+
+ Returns a list of nodes starting at *start* in some bread first
+ search order (following incoming edges).
+
+ When *end* is specified iteration stops at that node.
+
+.. method:: Graph.get_hops(start[, end[, forward]])
+
+ Computes the hop distance to all nodes centered around a specified node.
+
+ First order neighbours are at hop 1, their neigbours are at hop 2 etc.
+ Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value of
+ the forward parameter.
+
+ If the distance between all neighbouring nodes is 1 the hop number
+ corresponds to the shortest distance between the nodes.
+
+ Typical usage::
+
+ >>> print graph.get_hops(1, 8)
+ >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
+ # node 1 is at 0 hops
+ # node 2 is at 1 hop
+ # ...
+ # node 8 is at 5 hops
+
+
+Graph statistics
+----------------
+
+.. method:: Graph.connected()
+
+ Returns True iff every node in the graph can be reached from
+ every other node.
+
+.. method:: Graph.clust_coef(node)
+
+ Returns the local clustering coefficient of node.
+
+ The local cluster coefficient is the proportion of the actual number
+ of edges between neighbours of node and the maximum number of
+ edges between those nodes.
diff --git a/python/altgraph/doc/graphalgo.rst b/python/altgraph/doc/graphalgo.rst
new file mode 100644
index 000000000..84d492f44
--- /dev/null
+++ b/python/altgraph/doc/graphalgo.rst
@@ -0,0 +1,26 @@
+:mod:`altgraph.GraphAlgo` --- Graph algorithms
+==================================================
+
+.. module:: altgraph.GraphAlgo
+ :synopsis: Basic graphs algoritms
+
+.. function:: dijkstra(graph, start[, end])
+
+ Dijkstra's algorithm for shortest paths.
+
+ Find shortest paths from the start node to all nodes nearer
+ than or equal to the *end* node. The edge data is assumed to be the edge length.
+
+ .. note::
+
+ Dijkstra's algorithm is only guaranteed to work correctly when all edge lengths are positive.
+ This code does not verify this property for all edges (only the edges examined until the end
+ vertex is reached), but will correctly compute shortest paths even for some graphs with negative
+ edges, and will raise an exception if it discovers that a negative edge has caused it to make a mistake.
+
+
+.. function:: shortest_path(graph, start, end)
+
+ Find a single shortest path from the given start node to the given end node.
+ The input has the same conventions as :func:`dijkstra`. The output is a list
+ of the nodes in order along the shortest path.
diff --git a/python/altgraph/doc/graphstat.rst b/python/altgraph/doc/graphstat.rst
new file mode 100644
index 000000000..0931a12dd
--- /dev/null
+++ b/python/altgraph/doc/graphstat.rst
@@ -0,0 +1,25 @@
+:mod:`altgraph.GraphStat` --- Functions providing various graph statistics
+==========================================================================
+
+.. module:: altgraph.GraphStat
+ :synopsis: Functions providing various graph statistics
+
+The module :mod:`altgraph.GraphStat` provides function that calculate
+graph statistics. Currently there is only one such function, more may
+be added later.
+
+.. function:: degree_dist(graph[, limits[, bin_num[, mode]]])
+
+ Groups the number of edges per node into *bin_num* bins
+ and returns the list of those bins. Every item in the result
+ is a tuple with the center of the bin and the number of items
+ in that bin.
+
+ When the *limits* argument is present it must be a tuple with
+ the mininum and maximum number of edges that get binned (that
+ is, when *limits* is ``(4, 10)`` only nodes with between 4
+ and 10 edges get counted.
+
+ The *mode* argument is used to count incoming (``'inc'``) or
+ outgoing (``'out'``) edges. The default is to count the outgoing
+ edges.
diff --git a/python/altgraph/doc/graphutil.rst b/python/altgraph/doc/graphutil.rst
new file mode 100644
index 000000000..c07836df8
--- /dev/null
+++ b/python/altgraph/doc/graphutil.rst
@@ -0,0 +1,55 @@
+:mod:`altgraph.GraphUtil` --- Utility functions
+================================================
+
+.. module:: altgraph.GraphUtil
+ :synopsis: Utility functions
+
+The module :mod:`altgraph.GraphUtil` performs a number of more
+or less useful utility functions.
+
+.. function:: generate_random_graph(node_num, edge_num[, self_loops[, multi_edges])
+
+ Generates and returns a :class:`Graph <altgraph.Graph.Graph>` instance
+ with *node_num* nodes randomly connected by *edge_num* edges.
+
+ When *self_loops* is present and True there can be edges that point from
+ a node to itself.
+
+ When *multi_edge* is present and True there can be duplicate edges.
+
+ This method raises :class:`GraphError <altgraph.GraphError` when
+ a graph with the requested configuration cannot be created.
+
+.. function:: generate_scale_free_graph(steps, growth_num[, self_loops[, multi_edges]])
+
+ Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
+ will have *steps*growth_n um* nodes and a scale free (powerlaw)
+ connectivity.
+
+ Starting with a fully connected graph with *growth_num* nodes
+ at every step *growth_num* nodes are added to the graph and are connected
+ to existing nodes with a probability proportional to the degree of these
+ existing nodes.
+
+ .. warning:: The current implementation is basically untested, although
+ code inspection seems to indicate an implementation that is consistent
+ with the description at
+ `Wolfram MathWorld <http://mathworld.wolfram.com/Scale-FreeNetwork.html>`_
+
+.. function:: filter_stack(graph, head, filters)
+
+ Perform a depth-first oder walk of the graph starting at *head* and
+ apply all filter functions in *filters* on the node data of the nodes
+ found.
+
+ Returns (*visited*, *removes*, *orphans*), where
+
+ * *visited*: the set of visited nodes
+
+ * *removes*: the list of nodes where the node data doesn't match
+ all *filters*.
+
+ * *orphans*: list of tuples (*last_good*, *node*), where
+ node is not in *removes* and one of the nodes that is connected
+ by an incoming edge is in *removes*. *Last_good* is the
+ closest upstream node that is not in *removes*.
diff --git a/python/altgraph/doc/index.rst b/python/altgraph/doc/index.rst
new file mode 100644
index 000000000..1e8d504ed
--- /dev/null
+++ b/python/altgraph/doc/index.rst
@@ -0,0 +1,41 @@
+.. altgraph documentation master file, created by
+ sphinx-quickstart on Tue Aug 31 11:04:49 2010.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Altgraph - A basic graph library
+================================
+
+altgraph is a fork of graphlib: a graph (network) package for constructing
+graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with
+graphviz output.
+
+The primary users of this package are `macholib <http://pypi.python.org/pypi/macholib>`_ and `modulegraph <http://pypi.python.org/pypi/modulegraph>`_.
+
+.. toctree::
+ :maxdepth: 1
+
+ changelog
+ license
+ core
+ graph
+ objectgraph
+ graphalgo
+ graphstat
+ graphutil
+ dot
+
+Online Resources
+----------------
+
+* `Sourcecode repository on bitbucket <http://bitbucket.org/ronaldoussoren/altgraph/>`_
+
+* `The issue tracker <http://bitbucket.org/ronaldoussoren/altgraph/issues>`_
+
+Indices and tables
+------------------
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/python/altgraph/doc/license.rst b/python/altgraph/doc/license.rst
new file mode 100644
index 000000000..498e60be0
--- /dev/null
+++ b/python/altgraph/doc/license.rst
@@ -0,0 +1,25 @@
+License
+=======
+
+Copyright (c) 2004 Istvan Albert unless otherwise noted.
+
+Parts are copyright (c) Bob Ippolito
+
+Parts are copyright (c) 2010-2014 Ronald Oussoren
+
+MIT License
+...........
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
+so.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/python/altgraph/doc/objectgraph.rst b/python/altgraph/doc/objectgraph.rst
new file mode 100644
index 000000000..e3df396b1
--- /dev/null
+++ b/python/altgraph/doc/objectgraph.rst
@@ -0,0 +1,146 @@
+:mod:`altgraph.ObjectGraph` --- Graphs of objecs with an identifier
+===================================================================
+
+.. module:: altgraph.ObjectGraph
+ :synopsis: A graph of objects that have a "graphident" attribute.
+
+.. class:: ObjectGraph([graph[, debug]])
+
+ A graph of objects that have a "graphident" attribute. The
+ value of this attribute is the key for the object in the
+ graph.
+
+ The optional *graph* is a previously constructed
+ :class:`Graph <altgraph.Graph.Graph>`.
+
+ The optional *debug* level controls the amount of debug output
+ (see :meth:`msg`, :meth:`msgin` and :meth:`msgout`).
+
+ .. note:: the altgraph library does not generate output, the
+ debug attribute and message methods are present for use
+ by subclasses.
+
+.. data:: ObjectGraph.graph
+
+ An :class:`Graph <altgraph.Graph.Graph>` object that contains
+ the graph data.
+
+
+.. method:: ObjectGraph.addNode(node)
+
+ Adds a *node* to the graph.
+
+ .. note:: re-adding a node that was previously removed
+ using :meth:`removeNode` will reinstate the previously
+ removed node.
+
+.. method:: ObjectGraph.createNode(self, cls, name, \*args, \**kwds)
+
+ Creates a new node using ``cls(*args, **kwds)`` and adds that
+ node using :meth:`addNode`.
+
+ Returns the newly created node.
+
+.. method:: ObjectGraph.removeNode(node)
+
+ Removes a *node* from the graph when it exists. The *node* argument
+ is either a node object, or the graphident of a node.
+
+.. method:: ObjectGraph.createReferences(fromnode, tonode[, edge_data])
+
+ Creates a reference from *fromnode* to *tonode*. The optional
+ *edge_data* is associated with the edge.
+
+ *Fromnode* and *tonode* can either be node objects or the graphident
+ values for nodes.
+
+.. method:: removeReference(fromnode, tonode)
+
+ Removes the reference from *fromnode* to *tonode* if it exists.
+
+.. method:: ObjectGraph.getRawIdent(node)
+
+ Returns the *graphident* attribute of *node*, or the graph itself
+ when *node* is :data:`None`.
+
+.. method:: getIdent(node)
+
+ Same as :meth:`getRawIdent`, but only if the node is part
+ of the graph.
+
+ *Node* can either be an actual node object or the graphident of
+ a node.
+
+.. method:: ObjectGraph.findNode(node)
+
+ Returns a given node in the graph, or :data:`Node` when it cannot
+ be found.
+
+ *Node* is either an object with a *graphident* attribute or
+ the *graphident* attribute itself.
+
+.. method:: ObjectGraph.__contains__(node)
+
+ Returns True if *node* is a member of the graph. *Node* is either an
+ object with a *graphident* attribute or the *graphident* attribute itself.
+
+.. method:: ObjectGraph.flatten([condition[, start]])
+
+ Yield all nodes that are entirely reachable by *condition*
+ starting fromt he given *start* node or the graph root.
+
+ .. note:: objects are only reachable from the graph root
+ when there is a reference from the root to the node
+ (either directly or through another node)
+
+.. method:: ObjectGraph.nodes()
+
+ Yield all nodes in the graph.
+
+.. method:: ObjectGraph.get_edges(node)
+
+ Returns two iterators that yield the nodes reaching by
+ outgoing and incoming edges.
+
+.. method:: ObjectGraph.filterStack(filters)
+
+ Filter the ObjectGraph in-place by removing all edges to nodes that
+ do not match every filter in the given filter list
+
+ Returns a tuple containing the number of:
+ (*nodes_visited*, *nodes_removed*, *nodes_orphaned*)
+
+.. method:: ObjectGraph.edgeData(fromNode, toNode):
+ Return the edge data associated with the edge from *fromNode*
+ to *toNode*. Raises :exc:`KeyError` when no such edge exists.
+
+ .. versionadded: 0.12
+
+.. method:: ObjectGraph.updateEdgeData(fromNode, toNode, edgeData)
+
+ Replace the data associated with the edge from *fromNode* to
+ *toNode* by *edgeData*.
+
+ Raises :exc:`KeyError` when the edge does not exist.
+
+Debug output
+------------
+
+.. data:: ObjectGraph.debug
+
+ The current debug level.
+
+.. method:: ObjectGraph.msg(level, text, \*args)
+
+ Print a debug message at the current indentation level when the current
+ debug level is *level* or less.
+
+.. method:: ObjectGraph.msgin(level, text, \*args)
+
+ Print a debug message when the current debug level is *level* or less,
+ and increase the indentation level.
+
+.. method:: ObjectGraph.msgout(level, text, \*args)
+
+ Decrease the indentation level and print a debug message when the
+ current debug level is *level* or less.
diff --git a/python/altgraph/setup.cfg b/python/altgraph/setup.cfg
new file mode 100644
index 000000000..9c6880e3c
--- /dev/null
+++ b/python/altgraph/setup.cfg
@@ -0,0 +1,36 @@
+[metadata]
+name = altgraph
+version = 0.12
+description = Python graph (network) package
+long_description_file =
+ README.txt
+ doc/changelog.rst
+author = Ronald Oussoren
+author_email = ronaldoussoren@mac.com
+maintainer = Ronald Oussoren
+maintainer_email = ronaldoussoren@mac.com
+url = http://packages.python.org/altgraph
+download_url = http://pypi.python.org/pypi/altgraph
+license = MIT
+classifiers =
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.3
+ Programming Language :: Python :: 3.4
+ Topic :: Software Development :: Libraries :: Python Modules
+ Topic :: Scientific/Engineering :: Mathematics
+ Topic :: Scientific/Engineering :: Visualization
+keywords = graph
+platforms = any
+packages = altgraph
+zip-safe = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/altgraph/setup.py b/python/altgraph/setup.py
new file mode 100644
index 000000000..a1a4cb6eb
--- /dev/null
+++ b/python/altgraph/setup.py
@@ -0,0 +1,867 @@
+"""
+Shared setup file for simple python packages. Uses a setup.cfg that
+is the same as the distutils2 project, unless noted otherwise.
+
+It exists for two reasons:
+1) This makes it easier to reuse setup.py code between my own
+ projects
+
+2) Easier migration to distutils2 when that catches on.
+
+Additional functionality:
+
+* Section metadata:
+ requires-test: Same as 'tests_require' option for setuptools.
+
+"""
+
+import sys
+import os
+import re
+import platform
+from fnmatch import fnmatch
+import os
+import sys
+import time
+import tempfile
+import tarfile
+try:
+ import urllib.request as urllib
+except ImportError:
+ import urllib
+from distutils import log
+try:
+ from hashlib import md5
+
+except ImportError:
+ from md5 import md5
+
+if sys.version_info[0] == 2:
+ from ConfigParser import RawConfigParser, NoOptionError, NoSectionError
+else:
+ from configparser import RawConfigParser, NoOptionError, NoSectionError
+
+ROOTDIR = os.path.dirname(os.path.abspath(__file__))
+
+
+#
+#
+#
+# Parsing the setup.cfg and converting it to something that can be
+# used by setuptools.setup()
+#
+#
+#
+
+def eval_marker(value):
+ """
+ Evaluate an distutils2 environment marker.
+
+ This code is unsafe when used with hostile setup.cfg files,
+ but that's not a problem for our own files.
+ """
+ value = value.strip()
+
+ class M:
+ def __init__(self, **kwds):
+ for k, v in kwds.items():
+ setattr(self, k, v)
+
+ variables = {
+ 'python_version': '%d.%d'%(sys.version_info[0], sys.version_info[1]),
+ 'python_full_version': sys.version.split()[0],
+ 'os': M(
+ name=os.name,
+ ),
+ 'sys': M(
+ platform=sys.platform,
+ ),
+ 'platform': M(
+ version=platform.version(),
+ machine=platform.machine(),
+ ),
+ }
+
+ return bool(eval(value, variables, variables))
+
+
+ return True
+
+def _opt_value(cfg, into, section, key, transform = None):
+ try:
+ v = cfg.get(section, key)
+ if transform != _as_lines and ';' in v:
+ v, marker = v.rsplit(';', 1)
+ if not eval_marker(marker):
+ return
+
+ v = v.strip()
+
+ if v:
+ if transform:
+ into[key] = transform(v.strip())
+ else:
+ into[key] = v.strip()
+
+ except (NoOptionError, NoSectionError):
+ pass
+
+def _as_bool(value):
+ if value.lower() in ('y', 'yes', 'on'):
+ return True
+ elif value.lower() in ('n', 'no', 'off'):
+ return False
+ elif value.isdigit():
+ return bool(int(value))
+ else:
+ raise ValueError(value)
+
+def _as_list(value):
+ return value.split()
+
+def _as_lines(value):
+ result = []
+ for v in value.splitlines():
+ if ';' in v:
+ v, marker = v.rsplit(';', 1)
+ if not eval_marker(marker):
+ continue
+
+ v = v.strip()
+ if v:
+ result.append(v)
+ else:
+ result.append(v)
+ return result
+
+def _map_requirement(value):
+ m = re.search(r'(\S+)\s*(?:\((.*)\))?', value)
+ name = m.group(1)
+ version = m.group(2)
+
+ if version is None:
+ return name
+
+ else:
+ mapped = []
+ for v in version.split(','):
+ v = v.strip()
+ if v[0].isdigit():
+ # Checks for a specific version prefix
+ m = v.rsplit('.', 1)
+ mapped.append('>=%s,<%s.%s'%(
+ v, m[0], int(m[1])+1))
+
+ else:
+ mapped.append(v)
+ return '%s %s'%(name, ','.join(mapped),)
+
+def _as_requires(value):
+ requires = []
+ for req in value.splitlines():
+ if ';' in req:
+ req, marker = v.rsplit(';', 1)
+ if not eval_marker(marker):
+ continue
+ req = req.strip()
+
+ if not req:
+ continue
+ requires.append(_map_requirement(req))
+ return requires
+
+def parse_setup_cfg():
+ cfg = RawConfigParser()
+ r = cfg.read([os.path.join(ROOTDIR, 'setup.cfg')])
+ if len(r) != 1:
+ print("Cannot read 'setup.cfg'")
+ sys.exit(1)
+
+ metadata = dict(
+ name = cfg.get('metadata', 'name'),
+ version = cfg.get('metadata', 'version'),
+ description = cfg.get('metadata', 'description'),
+ )
+
+ _opt_value(cfg, metadata, 'metadata', 'license')
+ _opt_value(cfg, metadata, 'metadata', 'maintainer')
+ _opt_value(cfg, metadata, 'metadata', 'maintainer_email')
+ _opt_value(cfg, metadata, 'metadata', 'author')
+ _opt_value(cfg, metadata, 'metadata', 'author_email')
+ _opt_value(cfg, metadata, 'metadata', 'url')
+ _opt_value(cfg, metadata, 'metadata', 'download_url')
+ _opt_value(cfg, metadata, 'metadata', 'classifiers', _as_lines)
+ _opt_value(cfg, metadata, 'metadata', 'platforms', _as_list)
+ _opt_value(cfg, metadata, 'metadata', 'packages', _as_list)
+ _opt_value(cfg, metadata, 'metadata', 'keywords', _as_list)
+
+ try:
+ v = cfg.get('metadata', 'requires-dist')
+
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ requires = _as_requires(v)
+ if requires:
+ metadata['install_requires'] = requires
+
+ try:
+ v = cfg.get('metadata', 'requires-test')
+
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ requires = _as_requires(v)
+ if requires:
+ metadata['tests_require'] = requires
+
+
+ try:
+ v = cfg.get('metadata', 'long_description_file')
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ parts = []
+ for nm in v.split():
+ fp = open(nm, 'rU')
+ parts.append(fp.read())
+ fp.close()
+
+ metadata['long_description'] = '\n\n'.join(parts)
+
+
+ try:
+ v = cfg.get('metadata', 'zip-safe')
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ metadata['zip_safe'] = _as_bool(v)
+
+ try:
+ v = cfg.get('metadata', 'console_scripts')
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ if 'entry_points' not in metadata:
+ metadata['entry_points'] = {}
+
+ metadata['entry_points']['console_scripts'] = v.splitlines()
+
+ if sys.version_info[:2] <= (2,6):
+ try:
+ metadata['tests_require'] += ", unittest2"
+ except KeyError:
+ metadata['tests_require'] = "unittest2"
+
+ return metadata
+
+
+#
+#
+#
+# Bootstrapping setuptools/distribute, based on
+# a heavily modified version of distribute_setup.py
+#
+#
+#
+
+
+SETUPTOOLS_PACKAGE='setuptools'
+
+
+try:
+ import subprocess
+
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ return subprocess.call(args) == 0
+
+except ImportError:
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ new_args = []
+ for a in args:
+ new_args.append(a.replace("'", "'\"'\"'"))
+ os.system(' '.join(new_args)) == 0
+
+
+try:
+ import json
+
+ def get_pypi_src_download(package):
+ url = 'https://pypi.python.org/pypi/%s/json'%(package,)
+ fp = urllib.urlopen(url)
+ try:
+ try:
+ data = fp.read()
+
+ finally:
+ fp.close()
+ except urllib.error:
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ pkgdata = json.loads(data.decode('utf-8'))
+ if 'urls' not in pkgdata:
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ for info in pkgdata['urls']:
+ if info['packagetype'] == 'sdist' and info['url'].endswith('tar.gz'):
+ return (info.get('md5_digest'), info['url'])
+
+ raise RuntimeError("Cannot determine downlink link for %s"%(package,))
+
+except ImportError:
+ # Python 2.5 compatibility, no JSON in stdlib but luckily JSON syntax is
+ # simular enough to Python's syntax to be able to abuse the Python compiler
+
+ import _ast as ast
+
+ def get_pypi_src_download(package):
+ url = 'https://pypi.python.org/pypi/%s/json'%(package,)
+ fp = urllib.urlopen(url)
+ try:
+ try:
+ data = fp.read()
+
+ finally:
+ fp.close()
+ except urllib.error:
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+
+ a = compile(data, '-', 'eval', ast.PyCF_ONLY_AST)
+ if not isinstance(a, ast.Expression):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ a = a.body
+ if not isinstance(a, ast.Dict):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ for k, v in zip(a.keys, a.values):
+ if not isinstance(k, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ k = k.s
+ if k == 'urls':
+ a = v
+ break
+ else:
+ raise RuntimeError("PyPI JSON for %s doesn't contain URLs section"%(package,))
+
+ if not isinstance(a, ast.List):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ for info in v.elts:
+ if not isinstance(info, ast.Dict):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+ url = None
+ packagetype = None
+ chksum = None
+
+ for k, v in zip(info.keys, info.values):
+ if not isinstance(k, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ if k.s == 'url':
+ if not isinstance(v, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+ url = v.s
+
+ elif k.s == 'packagetype':
+ if not isinstance(v, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+ packagetype = v.s
+
+ elif k.s == 'md5_digest':
+ if not isinstance(v, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+ chksum = v.s
+
+ if url is not None and packagetype == 'sdist' and url.endswith('.tar.gz'):
+ return (chksum, url)
+
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+def _build_egg(egg, tarball, to_dir):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # building an egg
+ log.warn('Building a %s egg in %s', egg, to_dir)
+ _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+ finally:
+ os.chdir(old_wd)
+ # returning the result
+ log.warn(egg)
+ if not os.path.exists(egg):
+ raise IOError('Could not build the egg.')
+
+
+def _do_download(to_dir, packagename=SETUPTOOLS_PACKAGE):
+ tarball = download_setuptools(packagename, to_dir)
+ version = tarball.split('-')[-1][:-7]
+ egg = os.path.join(to_dir, '%s-%s-py%d.%d.egg'
+ % (packagename, version, sys.version_info[0], sys.version_info[1]))
+ if not os.path.exists(egg):
+ _build_egg(egg, tarball, to_dir)
+ sys.path.insert(0, egg)
+ import setuptools
+ setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools():
+ # making sure we use the absolute path
+ return _do_download(os.path.abspath(os.curdir))
+
+def download_setuptools(packagename, to_dir):
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ try:
+ from urllib.request import urlopen
+ except ImportError:
+ from urllib2 import urlopen
+
+ chksum, url = get_pypi_src_download(packagename)
+ tgz_name = os.path.basename(url)
+ saveto = os.path.join(to_dir, tgz_name)
+
+ src = dst = None
+ if not os.path.exists(saveto): # Avoid repeated downloads
+ try:
+ log.warn("Downloading %s", url)
+ src = urlopen(url)
+ # Read/write all in one block, so we don't create a corrupt file
+ # if the download is interrupted.
+ data = src.read()
+
+ if chksum is not None:
+ data_sum = md5(data).hexdigest()
+ if data_sum != chksum:
+ raise RuntimeError("Downloading %s failed: corrupt checksum"%(url,))
+
+
+ dst = open(saveto, "wb")
+ dst.write(data)
+ finally:
+ if src:
+ src.close()
+ if dst:
+ dst.close()
+ return os.path.realpath(saveto)
+
+
+
+def _extractall(self, path=".", members=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ import copy
+ import operator
+ from tarfile import ExtractError
+ directories = []
+
+ if members is None:
+ members = self
+
+ for tarinfo in members:
+ if tarinfo.isdir():
+ # Extract directories with a safe mode.
+ directories.append(tarinfo)
+ tarinfo = copy.copy(tarinfo)
+ tarinfo.mode = 448 # decimal for oct 0700
+ self.extract(tarinfo, path)
+
+ # Reverse sort directories.
+ if sys.version_info < (2, 4):
+ def sorter(dir1, dir2):
+ return cmp(dir1.name, dir2.name)
+ directories.sort(sorter)
+ directories.reverse()
+ else:
+ directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError:
+ e = sys.exc_info()[1]
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+
+#
+#
+#
+# Definitions of custom commands
+#
+#
+#
+
+try:
+ import setuptools
+
+except ImportError:
+ use_setuptools()
+
+from setuptools import setup
+
+try:
+ from distutils.core import PyPIRCCommand
+except ImportError:
+ PyPIRCCommand = None # Ancient python version
+
+from distutils.core import Command
+from distutils.errors import DistutilsError
+from distutils import log
+
+if PyPIRCCommand is None:
+ class upload_docs (Command):
+ description = "upload sphinx documentation"
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ raise DistutilsError("not supported on this version of python")
+
+else:
+ class upload_docs (PyPIRCCommand):
+ description = "upload sphinx documentation"
+ user_options = PyPIRCCommand.user_options
+
+ def initialize_options(self):
+ PyPIRCCommand.initialize_options(self)
+ self.username = ''
+ self.password = ''
+
+
+ def finalize_options(self):
+ PyPIRCCommand.finalize_options(self)
+ config = self._read_pypirc()
+ if config != {}:
+ self.username = config['username']
+ self.password = config['password']
+
+
+ def run(self):
+ import subprocess
+ import shutil
+ import zipfile
+ import os
+ import urllib
+ import StringIO
+ from base64 import standard_b64encode
+ import httplib
+ import urlparse
+
+ # Extract the package name from distutils metadata
+ meta = self.distribution.metadata
+ name = meta.get_name()
+
+ # Run sphinx
+ if os.path.exists('doc/_build'):
+ shutil.rmtree('doc/_build')
+ os.mkdir('doc/_build')
+
+ p = subprocess.Popen(['make', 'html'],
+ cwd='doc')
+ exit = p.wait()
+ if exit != 0:
+ raise DistutilsError("sphinx-build failed")
+
+ # Collect sphinx output
+ if not os.path.exists('dist'):
+ os.mkdir('dist')
+ zf = zipfile.ZipFile('dist/%s-docs.zip'%(name,), 'w',
+ compression=zipfile.ZIP_DEFLATED)
+
+ for toplevel, dirs, files in os.walk('doc/_build/html'):
+ for fn in files:
+ fullname = os.path.join(toplevel, fn)
+ relname = os.path.relpath(fullname, 'doc/_build/html')
+
+ print ("%s -> %s"%(fullname, relname))
+
+ zf.write(fullname, relname)
+
+ zf.close()
+
+ # Upload the results, this code is based on the distutils
+ # 'upload' command.
+ content = open('dist/%s-docs.zip'%(name,), 'rb').read()
+
+ data = {
+ ':action': 'doc_upload',
+ 'name': name,
+ 'content': ('%s-docs.zip'%(name,), content),
+ }
+ auth = "Basic " + standard_b64encode(self.username + ":" +
+ self.password)
+
+
+ boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = '\n--' + boundary
+ end_boundary = sep_boundary + '--'
+ body = StringIO.StringIO()
+ for key, value in data.items():
+ if not isinstance(value, list):
+ value = [value]
+
+ for value in value:
+ if isinstance(value, tuple):
+ fn = ';filename="%s"'%(value[0])
+ value = value[1]
+ else:
+ fn = ''
+
+ body.write(sep_boundary)
+ body.write('\nContent-Disposition: form-data; name="%s"'%key)
+ body.write(fn)
+ body.write("\n\n")
+ body.write(value)
+
+ body.write(end_boundary)
+ body.write('\n')
+ body = body.getvalue()
+
+ self.announce("Uploading documentation to %s"%(self.repository,), log.INFO)
+
+ schema, netloc, url, params, query, fragments = \
+ urlparse.urlparse(self.repository)
+
+
+ if schema == 'http':
+ http = httplib.HTTPConnection(netloc)
+ elif schema == 'https':
+ http = httplib.HTTPSConnection(netloc)
+ else:
+ raise AssertionError("unsupported schema "+schema)
+
+ data = ''
+ loglevel = log.INFO
+ try:
+ http.connect()
+ http.putrequest("POST", url)
+ http.putheader('Content-type',
+ 'multipart/form-data; boundary=%s'%boundary)
+ http.putheader('Content-length', str(len(body)))
+ http.putheader('Authorization', auth)
+ http.endheaders()
+ http.send(body)
+ except socket.error:
+ e = socket.exc_info()[1]
+ self.announce(str(e), log.ERROR)
+ return
+
+ r = http.getresponse()
+ if r.status in (200, 301):
+ self.announce('Upload succeeded (%s): %s' % (r.status, r.reason),
+ log.INFO)
+ else:
+ self.announce('Upload failed (%s): %s' % (r.status, r.reason),
+ log.ERROR)
+
+ print ('-'*75)
+ print (r.read())
+ print ('-'*75)
+
+
+def recursiveGlob(root, pathPattern):
+ """
+ Recursively look for files matching 'pathPattern'. Return a list
+ of matching files/directories.
+ """
+ result = []
+
+ for rootpath, dirnames, filenames in os.walk(root):
+ for fn in filenames:
+ if fnmatch(fn, pathPattern):
+ result.append(os.path.join(rootpath, fn))
+ return result
+
+
+def importExternalTestCases(unittest,
+ pathPattern="test_*.py", root=".", package=None):
+ """
+ Import all unittests in the PyObjC tree starting at 'root'
+ """
+
+ testFiles = recursiveGlob(root, pathPattern)
+ testModules = map(lambda x:x[len(root)+1:-3].replace('/', '.'), testFiles)
+ if package is not None:
+ testModules = [(package + '.' + m) for m in testModules]
+
+ suites = []
+
+ for modName in testModules:
+ try:
+ module = __import__(modName)
+ except ImportError:
+ print("SKIP %s: %s"%(modName, sys.exc_info()[1]))
+ continue
+
+ if '.' in modName:
+ for elem in modName.split('.')[1:]:
+ module = getattr(module, elem)
+
+ s = unittest.defaultTestLoader.loadTestsFromModule(module)
+ suites.append(s)
+
+ return unittest.TestSuite(suites)
+
+
+
+class test (Command):
+ description = "run test suite"
+ user_options = [
+ ('verbosity=', None, "print what tests are run"),
+ ]
+
+ def initialize_options(self):
+ self.verbosity='1'
+
+ def finalize_options(self):
+ if isinstance(self.verbosity, str):
+ self.verbosity = int(self.verbosity)
+
+
+ def cleanup_environment(self):
+ ei_cmd = self.get_finalized_command('egg_info')
+ egg_name = ei_cmd.egg_name.replace('-', '_')
+
+ to_remove = []
+ for dirname in sys.path:
+ bn = os.path.basename(dirname)
+ if bn.startswith(egg_name + "-"):
+ to_remove.append(dirname)
+
+ for dirname in to_remove:
+ log.info("removing installed %r from sys.path before testing"%(
+ dirname,))
+ sys.path.remove(dirname)
+
+ def add_project_to_sys_path(self):
+ from pkg_resources import normalize_path, add_activation_listener
+ from pkg_resources import working_set, require
+
+ self.reinitialize_command('egg_info')
+ self.run_command('egg_info')
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+
+ # Check if this distribution is already on sys.path
+ # and remove that version, this ensures that the right
+ # copy of the package gets tested.
+
+ self.__old_path = sys.path[:]
+ self.__old_modules = sys.modules.copy()
+
+
+ ei_cmd = self.get_finalized_command('egg_info')
+ sys.path.insert(0, normalize_path(ei_cmd.egg_base))
+ sys.path.insert(1, os.path.dirname(__file__))
+
+ # Strip the namespace packages defined in this distribution
+ # from sys.modules, needed to reset the search path for
+ # those modules.
+
+ nspkgs = getattr(self.distribution, 'namespace_packages')
+ if nspkgs is not None:
+ for nm in nspkgs:
+ del sys.modules[nm]
+
+ # Reset pkg_resources state:
+ add_activation_listener(lambda dist: dist.activate())
+ working_set.__init__()
+ require('%s==%s'%(ei_cmd.egg_name, ei_cmd.egg_version))
+
+ def remove_from_sys_path(self):
+ from pkg_resources import working_set
+ sys.path[:] = self.__old_path
+ sys.modules.clear()
+ sys.modules.update(self.__old_modules)
+ working_set.__init__()
+
+
+ def run(self):
+ import unittest
+
+ # Ensure that build directory is on sys.path (py3k)
+
+ self.cleanup_environment()
+ self.add_project_to_sys_path()
+
+ try:
+ meta = self.distribution.metadata
+ name = meta.get_name()
+ test_pkg = name + "_tests"
+ suite = importExternalTestCases(unittest,
+ "test_*.py", test_pkg, test_pkg)
+
+ runner = unittest.TextTestRunner(verbosity=self.verbosity)
+ result = runner.run(suite)
+
+ # Print out summary. This is a structured format that
+ # should make it easy to use this information in scripts.
+ summary = dict(
+ count=result.testsRun,
+ fails=len(result.failures),
+ errors=len(result.errors),
+ xfails=len(getattr(result, 'expectedFailures', [])),
+ xpass=len(getattr(result, 'expectedSuccesses', [])),
+ skip=len(getattr(result, 'skipped', [])),
+ )
+ print("SUMMARY: %s"%(summary,))
+
+ finally:
+ self.remove_from_sys_path()
+
+#
+#
+#
+# And finally run the setuptools main entry point.
+#
+#
+#
+
+metadata = parse_setup_cfg()
+
+setup(
+ cmdclass=dict(
+ upload_docs=upload_docs,
+ test=test,
+ ),
+ **metadata
+)
diff --git a/python/bitstring/PKG-INFO b/python/bitstring/PKG-INFO
new file mode 100644
index 000000000..1036c45d7
--- /dev/null
+++ b/python/bitstring/PKG-INFO
@@ -0,0 +1,122 @@
+Metadata-Version: 1.1
+Name: bitstring
+Version: 3.1.3
+Summary: Simple construction, analysis and modification of binary data.
+Home-page: http://python-bitstring.googlecode.com
+Author: Scott Griffiths
+Author-email: scott@griffiths.name
+License: The MIT License: http://www.opensource.org/licenses/mit-license.php
+Download-URL: http://python-bitstring.googlecode.com
+Description: ================
+ bitstring module
+ ================
+
+ **bitstring** is a pure Python module designed to help make
+ the creation and analysis of binary data as simple and natural as possible.
+
+ Bitstrings can be constructed from integers (big and little endian), hex,
+ octal, binary, strings or files. They can be sliced, joined, reversed,
+ inserted into, overwritten, etc. with simple functions or slice notation.
+ They can also be read from, searched and replaced, and navigated in,
+ similar to a file or stream.
+
+ bitstring is open source software, and has been released under the MIT
+ licence.
+
+ This version supports Python 2.6 and later (including Python 3).
+ For Python 2.4 and 2.5 you should instead download version 1.0.
+
+ Documentation
+ -------------
+ The manual for the bitstring module is available here
+ <http://packages.python.org/bitstring>. It contains a walk-through of all
+ the features and a complete reference section.
+
+ It is also available as a PDF as part of the source download.
+
+ Installation
+ ------------
+ If you have downloaded and unzipped the package then you need to run the
+ ``setup.py`` script with the 'install' argument::
+
+ python setup.py install
+
+ You may need to run this with root privileges on Unix-like systems.
+
+
+ If you haven't yet downloaded the package then you can just try::
+
+ easy_install bitstring
+
+ or ::
+
+ pip install bitstring
+
+
+ Simple Examples
+ ---------------
+ Creation::
+
+ >>> a = BitArray(bin='00101')
+ >>> b = Bits(a_file_object)
+ >>> c = BitArray('0xff, 0b101, 0o65, uint:6=22')
+ >>> d = pack('intle:16, hex=a, 0b1', 100, a='0x34f')
+ >>> e = pack('<16h', *range(16))
+
+ Different interpretations, slicing and concatenation::
+
+ >>> a = BitArray('0x1af')
+ >>> a.hex, a.bin, a.uint
+ ('1af', '000110101111', 431)
+ >>> a[10:3:-1].bin
+ '1110101'
+ >>> 3*a + '0b100'
+ BitArray('0o0657056705674')
+
+ Reading data sequentially::
+
+ >>> b = BitStream('0x160120f')
+ >>> b.read(12).hex
+ '160'
+ >>> b.pos = 0
+ >>> b.read('uint:12')
+ 352
+ >>> b.readlist('uint:12, bin:3')
+ [288, '111']
+
+ Searching, inserting and deleting::
+
+ >>> c = BitArray('0b00010010010010001111') # c.hex == '0x1248f'
+ >>> c.find('0x48')
+ (8,)
+ >>> c.replace('0b001', '0xabc')
+ >>> c.insert('0b0000')
+ >>> del c[12:16]
+
+ Unit Tests
+ ----------
+
+ The 400+ unit tests should all pass for Python 2.6 and later.
+
+ ----
+
+ The bitstring module has been released as open source under the MIT License.
+ Copyright (c) 2014 Scott Griffiths
+
+ For more information see the project's homepage on Google Code:
+ <http://python-bitstring.googlecode.com>
+
+
+Platform: all
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.0
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/python/bitstring/README.txt b/python/bitstring/README.txt
new file mode 100644
index 000000000..491c2f8cf
--- /dev/null
+++ b/python/bitstring/README.txt
@@ -0,0 +1,99 @@
+================
+bitstring module
+================
+
+**bitstring** is a pure Python module designed to help make
+the creation and analysis of binary data as simple and natural as possible.
+
+Bitstrings can be constructed from integers (big and little endian), hex,
+octal, binary, strings or files. They can be sliced, joined, reversed,
+inserted into, overwritten, etc. with simple functions or slice notation.
+They can also be read from, searched and replaced, and navigated in,
+similar to a file or stream.
+
+bitstring is open source software, and has been released under the MIT
+licence.
+
+This version supports Python 2.6 and later (including Python 3).
+For Python 2.4 and 2.5 you should instead download version 1.0.
+
+Documentation
+-------------
+The manual for the bitstring module is available here
+<http://packages.python.org/bitstring>. It contains a walk-through of all
+the features and a complete reference section.
+
+It is also available as a PDF as part of the source download.
+
+Installation
+------------
+If you have downloaded and unzipped the package then you need to run the
+``setup.py`` script with the 'install' argument::
+
+ python setup.py install
+
+You may need to run this with root privileges on Unix-like systems.
+
+
+If you haven't yet downloaded the package then you can just try::
+
+ easy_install bitstring
+
+or ::
+
+ pip install bitstring
+
+
+Simple Examples
+---------------
+Creation::
+
+ >>> a = BitArray(bin='00101')
+ >>> b = Bits(a_file_object)
+ >>> c = BitArray('0xff, 0b101, 0o65, uint:6=22')
+ >>> d = pack('intle:16, hex=a, 0b1', 100, a='0x34f')
+ >>> e = pack('<16h', *range(16))
+
+Different interpretations, slicing and concatenation::
+
+ >>> a = BitArray('0x1af')
+ >>> a.hex, a.bin, a.uint
+ ('1af', '000110101111', 431)
+ >>> a[10:3:-1].bin
+ '1110101'
+ >>> 3*a + '0b100'
+ BitArray('0o0657056705674')
+
+Reading data sequentially::
+
+ >>> b = BitStream('0x160120f')
+ >>> b.read(12).hex
+ '160'
+ >>> b.pos = 0
+ >>> b.read('uint:12')
+ 352
+ >>> b.readlist('uint:12, bin:3')
+ [288, '111']
+
+Searching, inserting and deleting::
+
+ >>> c = BitArray('0b00010010010010001111') # c.hex == '0x1248f'
+ >>> c.find('0x48')
+ (8,)
+ >>> c.replace('0b001', '0xabc')
+ >>> c.insert('0b0000')
+ >>> del c[12:16]
+
+Unit Tests
+----------
+
+The 400+ unit tests should all pass for Python 2.6 and later.
+
+----
+
+The bitstring module has been released as open source under the MIT License.
+Copyright (c) 2014 Scott Griffiths
+
+For more information see the project's homepage on Google Code:
+<http://python-bitstring.googlecode.com>
+
diff --git a/python/bitstring/bitstring.py b/python/bitstring/bitstring.py
new file mode 100644
index 000000000..86f969c7f
--- /dev/null
+++ b/python/bitstring/bitstring.py
@@ -0,0 +1,4234 @@
+#!/usr/bin/env python
+# cython: profile=True
+"""
+This package defines classes that simplify bit-wise creation, manipulation and
+interpretation of data.
+
+Classes:
+
+Bits -- An immutable container for binary data.
+BitArray -- A mutable container for binary data.
+ConstBitStream -- An immutable container with streaming methods.
+BitStream -- A mutable container with streaming methods.
+
+ Bits (base class)
+ / \
+ + mutating methods / \ + streaming methods
+ / \
+ BitArray ConstBitStream
+ \ /
+ \ /
+ \ /
+ BitStream
+
+Functions:
+
+pack -- Create a BitStream from a format string.
+
+Exceptions:
+
+Error -- Module exception base class.
+CreationError -- Error during creation.
+InterpretError -- Inappropriate interpretation of binary data.
+ByteAlignError -- Whole byte position or length needed.
+ReadError -- Reading or peeking past the end of a bitstring.
+
+http://python-bitstring.googlecode.com
+"""
+
+__licence__ = """
+The MIT License
+
+Copyright (c) 2006-2014 Scott Griffiths (scott@griffiths.name)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+__version__ = "3.1.3"
+
+__author__ = "Scott Griffiths"
+
+import numbers
+import copy
+import sys
+import re
+import binascii
+import mmap
+import os
+import struct
+import operator
+import collections
+
+byteorder = sys.byteorder
+
+bytealigned = False
+"""Determines whether a number of methods default to working only on byte boundaries."""
+
+# Maximum number of digits to use in __str__ and __repr__.
+MAX_CHARS = 250
+
+# Maximum size of caches used for speed optimisations.
+CACHE_SIZE = 1000
+
+class Error(Exception):
+ """Base class for errors in the bitstring module."""
+
+ def __init__(self, *params):
+ self.msg = params[0] if params else ''
+ self.params = params[1:]
+
+ def __str__(self):
+ if self.params:
+ return self.msg.format(*self.params)
+ return self.msg
+
+
+class ReadError(Error, IndexError):
+ """Reading or peeking past the end of a bitstring."""
+
+ def __init__(self, *params):
+ Error.__init__(self, *params)
+
+
+class InterpretError(Error, ValueError):
+ """Inappropriate interpretation of binary data."""
+
+ def __init__(self, *params):
+ Error.__init__(self, *params)
+
+
+class ByteAlignError(Error):
+ """Whole-byte position or length needed."""
+
+ def __init__(self, *params):
+ Error.__init__(self, *params)
+
+
+class CreationError(Error, ValueError):
+ """Inappropriate argument during bitstring creation."""
+
+ def __init__(self, *params):
+ Error.__init__(self, *params)
+
+
+class ConstByteStore(object):
+ """Stores raw bytes together with a bit offset and length.
+
+ Used internally - not part of public interface.
+ """
+
+ __slots__ = ('offset', '_rawarray', 'bitlength')
+
+ def __init__(self, data, bitlength=None, offset=None):
+ """data is either a bytearray or a MmapByteArray"""
+ self._rawarray = data
+ if offset is None:
+ offset = 0
+ if bitlength is None:
+ bitlength = 8 * len(data) - offset
+ self.offset = offset
+ self.bitlength = bitlength
+
+ def getbit(self, pos):
+ assert 0 <= pos < self.bitlength
+ byte, bit = divmod(self.offset + pos, 8)
+ return bool(self._rawarray[byte] & (128 >> bit))
+
+ def getbyte(self, pos):
+ """Direct access to byte data."""
+ return self._rawarray[pos]
+
+ def getbyteslice(self, start, end):
+ """Direct access to byte data."""
+ c = self._rawarray[start:end]
+ return c
+
+ @property
+ def bytelength(self):
+ if not self.bitlength:
+ return 0
+ sb = self.offset // 8
+ eb = (self.offset + self.bitlength - 1) // 8
+ return eb - sb + 1
+
+ def __copy__(self):
+ return ByteStore(self._rawarray[:], self.bitlength, self.offset)
+
+ def _appendstore(self, store):
+ """Join another store on to the end of this one."""
+ if not store.bitlength:
+ return
+ # Set new array offset to the number of bits in the final byte of current array.
+ store = offsetcopy(store, (self.offset + self.bitlength) % 8)
+ if store.offset:
+ # first do the byte with the join.
+ joinval = (self._rawarray.pop() & (255 ^ (255 >> store.offset)) |
+ (store.getbyte(0) & (255 >> store.offset)))
+ self._rawarray.append(joinval)
+ self._rawarray.extend(store._rawarray[1:])
+ else:
+ self._rawarray.extend(store._rawarray)
+ self.bitlength += store.bitlength
+
+ def _prependstore(self, store):
+ """Join another store on to the start of this one."""
+ if not store.bitlength:
+ return
+ # Set the offset of copy of store so that it's final byte
+ # ends in a position that matches the offset of self,
+ # then join self on to the end of it.
+ store = offsetcopy(store, (self.offset - store.bitlength) % 8)
+ assert (store.offset + store.bitlength) % 8 == self.offset % 8
+ bit_offset = self.offset % 8
+ if bit_offset:
+ # first do the byte with the join.
+ store.setbyte(-1, (store.getbyte(-1) & (255 ^ (255 >> bit_offset)) | \
+ (self._rawarray[self.byteoffset] & (255 >> bit_offset))))
+ store._rawarray.extend(self._rawarray[self.byteoffset + 1: self.byteoffset + self.bytelength])
+ else:
+ store._rawarray.extend(self._rawarray[self.byteoffset: self.byteoffset + self.bytelength])
+ self._rawarray = store._rawarray
+ self.offset = store.offset
+ self.bitlength += store.bitlength
+
+ @property
+ def byteoffset(self):
+ return self.offset // 8
+
+ @property
+ def rawbytes(self):
+ return self._rawarray
+
+
+class ByteStore(ConstByteStore):
+ """Adding mutating methods to ConstByteStore
+
+ Used internally - not part of public interface.
+ """
+ __slots__ = ()
+
+ def setbit(self, pos):
+ assert 0 <= pos < self.bitlength
+ byte, bit = divmod(self.offset + pos, 8)
+ self._rawarray[byte] |= (128 >> bit)
+
+ def unsetbit(self, pos):
+ assert 0 <= pos < self.bitlength
+ byte, bit = divmod(self.offset + pos, 8)
+ self._rawarray[byte] &= ~(128 >> bit)
+
+ def invertbit(self, pos):
+ assert 0 <= pos < self.bitlength
+ byte, bit = divmod(self.offset + pos, 8)
+ self._rawarray[byte] ^= (128 >> bit)
+
+ def setbyte(self, pos, value):
+ self._rawarray[pos] = value
+
+ def setbyteslice(self, start, end, value):
+ self._rawarray[start:end] = value
+
+
+def offsetcopy(s, newoffset):
+ """Return a copy of a ByteStore with the newoffset.
+
+ Not part of public interface.
+ """
+ assert 0 <= newoffset < 8
+ if not s.bitlength:
+ return copy.copy(s)
+ else:
+ if newoffset == s.offset % 8:
+ return ByteStore(s.getbyteslice(s.byteoffset, s.byteoffset + s.bytelength), s.bitlength, newoffset)
+ newdata = []
+ d = s._rawarray
+ assert newoffset != s.offset % 8
+ if newoffset < s.offset % 8:
+ # We need to shift everything left
+ shiftleft = s.offset % 8 - newoffset
+ # First deal with everything except for the final byte
+ for x in range(s.byteoffset, s.byteoffset + s.bytelength - 1):
+ newdata.append(((d[x] << shiftleft) & 0xff) +\
+ (d[x + 1] >> (8 - shiftleft)))
+ bits_in_last_byte = (s.offset + s.bitlength) % 8
+ if not bits_in_last_byte:
+ bits_in_last_byte = 8
+ if bits_in_last_byte > shiftleft:
+ newdata.append((d[s.byteoffset + s.bytelength - 1] << shiftleft) & 0xff)
+ else: # newoffset > s._offset % 8
+ shiftright = newoffset - s.offset % 8
+ newdata.append(s.getbyte(0) >> shiftright)
+ for x in range(s.byteoffset + 1, s.byteoffset + s.bytelength):
+ newdata.append(((d[x - 1] << (8 - shiftright)) & 0xff) +\
+ (d[x] >> shiftright))
+ bits_in_last_byte = (s.offset + s.bitlength) % 8
+ if not bits_in_last_byte:
+ bits_in_last_byte = 8
+ if bits_in_last_byte + shiftright > 8:
+ newdata.append((d[s.byteoffset + s.bytelength - 1] << (8 - shiftright)) & 0xff)
+ new_s = ByteStore(bytearray(newdata), s.bitlength, newoffset)
+ assert new_s.offset == newoffset
+ return new_s
+
+
+def equal(a, b):
+ """Return True if ByteStores a == b.
+
+ Not part of public interface.
+ """
+ # We want to return False for inequality as soon as possible, which
+ # means we get lots of special cases.
+ # First the easy one - compare lengths:
+ a_bitlength = a.bitlength
+ b_bitlength = b.bitlength
+ if a_bitlength != b_bitlength:
+ return False
+ if not a_bitlength:
+ assert b_bitlength == 0
+ return True
+ # Make 'a' the one with the smaller offset
+ if (a.offset % 8) > (b.offset % 8):
+ a, b = b, a
+ # and create some aliases
+ a_bitoff = a.offset % 8
+ b_bitoff = b.offset % 8
+ a_byteoffset = a.byteoffset
+ b_byteoffset = b.byteoffset
+ a_bytelength = a.bytelength
+ b_bytelength = b.bytelength
+ da = a._rawarray
+ db = b._rawarray
+
+ # If they are pointing to the same data, they must be equal
+ if da is db and a.offset == b.offset:
+ return True
+
+ if a_bitoff == b_bitoff:
+ bits_spare_in_last_byte = 8 - (a_bitoff + a_bitlength) % 8
+ if bits_spare_in_last_byte == 8:
+ bits_spare_in_last_byte = 0
+ # Special case for a, b contained in a single byte
+ if a_bytelength == 1:
+ a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength)
+ b_val = ((db[b_byteoffset] << b_bitoff) & 0xff) >> (8 - b_bitlength)
+ return a_val == b_val
+ # Otherwise check first byte
+ if da[a_byteoffset] & (0xff >> a_bitoff) != db[b_byteoffset] & (0xff >> b_bitoff):
+ return False
+ # then everything up to the last
+ b_a_offset = b_byteoffset - a_byteoffset
+ for x in range(1 + a_byteoffset, a_byteoffset + a_bytelength - 1):
+ if da[x] != db[b_a_offset + x]:
+ return False
+ # and finally the last byte
+ return (da[a_byteoffset + a_bytelength - 1] >> bits_spare_in_last_byte ==
+ db[b_byteoffset + b_bytelength - 1] >> bits_spare_in_last_byte)
+
+ assert a_bitoff != b_bitoff
+ # This is how much we need to shift a to the right to compare with b:
+ shift = b_bitoff - a_bitoff
+ # Special case for b only one byte long
+ if b_bytelength == 1:
+ assert a_bytelength == 1
+ a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength)
+ b_val = ((db[b_byteoffset] << b_bitoff) & 0xff) >> (8 - b_bitlength)
+ return a_val == b_val
+ # Special case for a only one byte long
+ if a_bytelength == 1:
+ assert b_bytelength == 2
+ a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength)
+ b_val = ((db[b_byteoffset] << 8) + db[b_byteoffset + 1]) << b_bitoff
+ b_val &= 0xffff
+ b_val >>= 16 - b_bitlength
+ return a_val == b_val
+
+ # Compare first byte of b with bits from first byte of a
+ if (da[a_byteoffset] & (0xff >> a_bitoff)) >> shift != db[b_byteoffset] & (0xff >> b_bitoff):
+ return False
+ # Now compare every full byte of b with bits from 2 bytes of a
+ for x in range(1, b_bytelength - 1):
+ # Construct byte from 2 bytes in a to compare to byte in b
+ b_val = db[b_byteoffset + x]
+ a_val = ((da[a_byteoffset + x - 1] << 8) + da[a_byteoffset + x]) >> shift
+ a_val &= 0xff
+ if a_val != b_val:
+ return False
+
+ # Now check bits in final byte of b
+ final_b_bits = (b.offset + b_bitlength) % 8
+ if not final_b_bits:
+ final_b_bits = 8
+ b_val = db[b_byteoffset + b_bytelength - 1] >> (8 - final_b_bits)
+ final_a_bits = (a.offset + a_bitlength) % 8
+ if not final_a_bits:
+ final_a_bits = 8
+ if b.bytelength > a_bytelength:
+ assert b_bytelength == a_bytelength + 1
+ a_val = da[a_byteoffset + a_bytelength - 1] >> (8 - final_a_bits)
+ a_val &= 0xff >> (8 - final_b_bits)
+ return a_val == b_val
+ assert a_bytelength == b_bytelength
+ a_val = da[a_byteoffset + a_bytelength - 2] << 8
+ a_val += da[a_byteoffset + a_bytelength - 1]
+ a_val >>= (8 - final_a_bits)
+ a_val &= 0xff >> (8 - final_b_bits)
+ return a_val == b_val
+
+
+class MmapByteArray(object):
+ """Looks like a bytearray, but from an mmap.
+
+ Not part of public interface.
+ """
+
+ __slots__ = ('filemap', 'filelength', 'source', 'byteoffset', 'bytelength')
+
+ def __init__(self, source, bytelength=None, byteoffset=None):
+ self.source = source
+ source.seek(0, os.SEEK_END)
+ self.filelength = source.tell()
+ if byteoffset is None:
+ byteoffset = 0
+ if bytelength is None:
+ bytelength = self.filelength - byteoffset
+ self.byteoffset = byteoffset
+ self.bytelength = bytelength
+ self.filemap = mmap.mmap(source.fileno(), 0, access=mmap.ACCESS_READ)
+
+ def __getitem__(self, key):
+ try:
+ start = key.start
+ stop = key.stop
+ except AttributeError:
+ try:
+ assert 0 <= key < self.bytelength
+ return ord(self.filemap[key + self.byteoffset])
+ except TypeError:
+ # for Python 3
+ return self.filemap[key + self.byteoffset]
+ else:
+ if start is None:
+ start = 0
+ if stop is None:
+ stop = self.bytelength
+ assert key.step is None
+ assert 0 <= start < self.bytelength
+ assert 0 <= stop <= self.bytelength
+ s = slice(start + self.byteoffset, stop + self.byteoffset)
+ return bytearray(self.filemap.__getitem__(s))
+
+ def __len__(self):
+ return self.bytelength
+
+
+# This creates a dictionary for every possible byte with the value being
+# the key with its bits reversed.
+BYTE_REVERSAL_DICT = dict()
+
+# For Python 2.x/ 3.x coexistence
+# Yes this is very very hacky.
+try:
+ xrange
+ for i in range(256):
+ BYTE_REVERSAL_DICT[i] = chr(int("{0:08b}".format(i)[::-1], 2))
+except NameError:
+ for i in range(256):
+ BYTE_REVERSAL_DICT[i] = bytes([int("{0:08b}".format(i)[::-1], 2)])
+ from io import IOBase as file
+ xrange = range
+ basestring = str
+
+# Python 2.x octals start with '0', in Python 3 it's '0o'
+LEADING_OCT_CHARS = len(oct(1)) - 1
+
+def tidy_input_string(s):
+ """Return string made lowercase and with all whitespace removed."""
+ s = ''.join(s.split()).lower()
+ return s
+
+INIT_NAMES = ('uint', 'int', 'ue', 'se', 'sie', 'uie', 'hex', 'oct', 'bin', 'bits',
+ 'uintbe', 'intbe', 'uintle', 'intle', 'uintne', 'intne',
+ 'float', 'floatbe', 'floatle', 'floatne', 'bytes', 'bool', 'pad')
+
+TOKEN_RE = re.compile(r'(?P<name>' + '|'.join(INIT_NAMES) +
+ r')((:(?P<len>[^=]+)))?(=(?P<value>.*))?$', re.IGNORECASE)
+DEFAULT_UINT = re.compile(r'(?P<len>[^=]+)?(=(?P<value>.*))?$', re.IGNORECASE)
+
+MULTIPLICATIVE_RE = re.compile(r'(?P<factor>.*)\*(?P<token>.+)')
+
+# Hex, oct or binary literals
+LITERAL_RE = re.compile(r'(?P<name>0(x|o|b))(?P<value>.+)', re.IGNORECASE)
+
+# An endianness indicator followed by one or more struct.pack codes
+STRUCT_PACK_RE = re.compile(r'(?P<endian><|>|@)?(?P<fmt>(?:\d*[bBhHlLqQfd])+)$')
+
+# A number followed by a single character struct.pack code
+STRUCT_SPLIT_RE = re.compile(r'\d*[bBhHlLqQfd]')
+
+# These replicate the struct.pack codes
+# Big-endian
+REPLACEMENTS_BE = {'b': 'intbe:8', 'B': 'uintbe:8',
+ 'h': 'intbe:16', 'H': 'uintbe:16',
+ 'l': 'intbe:32', 'L': 'uintbe:32',
+ 'q': 'intbe:64', 'Q': 'uintbe:64',
+ 'f': 'floatbe:32', 'd': 'floatbe:64'}
+# Little-endian
+REPLACEMENTS_LE = {'b': 'intle:8', 'B': 'uintle:8',
+ 'h': 'intle:16', 'H': 'uintle:16',
+ 'l': 'intle:32', 'L': 'uintle:32',
+ 'q': 'intle:64', 'Q': 'uintle:64',
+ 'f': 'floatle:32', 'd': 'floatle:64'}
+
+# Size in bytes of all the pack codes.
+PACK_CODE_SIZE = {'b': 1, 'B': 1, 'h': 2, 'H': 2, 'l': 4, 'L': 4,
+ 'q': 8, 'Q': 8, 'f': 4, 'd': 8}
+
+_tokenname_to_initialiser = {'hex': 'hex', '0x': 'hex', '0X': 'hex', 'oct': 'oct',
+ '0o': 'oct', '0O': 'oct', 'bin': 'bin', '0b': 'bin',
+ '0B': 'bin', 'bits': 'auto', 'bytes': 'bytes', 'pad': 'pad'}
+
+def structparser(token):
+ """Parse struct-like format string token into sub-token list."""
+ m = STRUCT_PACK_RE.match(token)
+ if not m:
+ return [token]
+ else:
+ endian = m.group('endian')
+ if endian is None:
+ return [token]
+ # Split the format string into a list of 'q', '4h' etc.
+ formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt'))
+ # Now deal with mulitiplicative factors, 4h -> hhhh etc.
+ fmt = ''.join([f[-1] * int(f[:-1]) if len(f) != 1 else
+ f for f in formatlist])
+ if endian == '@':
+ # Native endianness
+ if byteorder == 'little':
+ endian = '<'
+ else:
+ assert byteorder == 'big'
+ endian = '>'
+ if endian == '<':
+ tokens = [REPLACEMENTS_LE[c] for c in fmt]
+ else:
+ assert endian == '>'
+ tokens = [REPLACEMENTS_BE[c] for c in fmt]
+ return tokens
+
+def tokenparser(fmt, keys=None, token_cache={}):
+ """Divide the format string into tokens and parse them.
+
+ Return stretchy token and list of [initialiser, length, value]
+ initialiser is one of: hex, oct, bin, uint, int, se, ue, 0x, 0o, 0b etc.
+ length is None if not known, as is value.
+
+ If the token is in the keyword dictionary (keys) then it counts as a
+ special case and isn't messed with.
+
+ tokens must be of the form: [factor*][initialiser][:][length][=value]
+
+ """
+ try:
+ return token_cache[(fmt, keys)]
+ except KeyError:
+ token_key = (fmt, keys)
+ # Very inefficient expanding of brackets.
+ fmt = expand_brackets(fmt)
+ # Split tokens by ',' and remove whitespace
+ # The meta_tokens can either be ordinary single tokens or multiple
+ # struct-format token strings.
+ meta_tokens = (''.join(f.split()) for f in fmt.split(','))
+ return_values = []
+ stretchy_token = False
+ for meta_token in meta_tokens:
+ # See if it has a multiplicative factor
+ m = MULTIPLICATIVE_RE.match(meta_token)
+ if not m:
+ factor = 1
+ else:
+ factor = int(m.group('factor'))
+ meta_token = m.group('token')
+ # See if it's a struct-like format
+ tokens = structparser(meta_token)
+ ret_vals = []
+ for token in tokens:
+ if keys and token in keys:
+ # Don't bother parsing it, it's a keyword argument
+ ret_vals.append([token, None, None])
+ continue
+ value = length = None
+ if token == '':
+ continue
+ # Match literal tokens of the form 0x... 0o... and 0b...
+ m = LITERAL_RE.match(token)
+ if m:
+ name = m.group('name')
+ value = m.group('value')
+ ret_vals.append([name, length, value])
+ continue
+ # Match everything else:
+ m1 = TOKEN_RE.match(token)
+ if not m1:
+ # and if you don't specify a 'name' then the default is 'uint':
+ m2 = DEFAULT_UINT.match(token)
+ if not m2:
+ raise ValueError("Don't understand token '{0}'.".format(token))
+ if m1:
+ name = m1.group('name')
+ length = m1.group('len')
+ if m1.group('value'):
+ value = m1.group('value')
+ else:
+ assert m2
+ name = 'uint'
+ length = m2.group('len')
+ if m2.group('value'):
+ value = m2.group('value')
+ if name == 'bool':
+ if length is not None:
+ raise ValueError("You can't specify a length with bool tokens - they are always one bit.")
+ length = 1
+ if length is None and name not in ('se', 'ue', 'sie', 'uie'):
+ stretchy_token = True
+ if length is not None:
+ # Try converting length to int, otherwise check it's a key.
+ try:
+ length = int(length)
+ if length < 0:
+ raise Error
+ # For the 'bytes' token convert length to bits.
+ if name == 'bytes':
+ length *= 8
+ except Error:
+ raise ValueError("Can't read a token with a negative length.")
+ except ValueError:
+ if not keys or length not in keys:
+ raise ValueError("Don't understand length '{0}' of token.".format(length))
+ ret_vals.append([name, length, value])
+ # This multiplies by the multiplicative factor, but this means that
+ # we can't allow keyword values as multipliers (e.g. n*uint:8).
+ # The only way to do this would be to return the factor in some fashion
+ # (we can't use the key's value here as it would mean that we couldn't
+ # sensibly continue to cache the function's results. (TODO).
+ return_values.extend(ret_vals * factor)
+ return_values = [tuple(x) for x in return_values]
+ if len(token_cache) < CACHE_SIZE:
+ token_cache[token_key] = stretchy_token, return_values
+ return stretchy_token, return_values
+
+# Looks for first number*(
+BRACKET_RE = re.compile(r'(?P<factor>\d+)\*\(')
+
+def expand_brackets(s):
+ """Remove whitespace and expand all brackets."""
+ s = ''.join(s.split())
+ while True:
+ start = s.find('(')
+ if start == -1:
+ break
+ count = 1 # Number of hanging open brackets
+ p = start + 1
+ while p < len(s):
+ if s[p] == '(':
+ count += 1
+ if s[p] == ')':
+ count -= 1
+ if not count:
+ break
+ p += 1
+ if count:
+ raise ValueError("Unbalanced parenthesis in '{0}'.".format(s))
+ if start == 0 or s[start - 1] != '*':
+ s = s[0:start] + s[start + 1:p] + s[p + 1:]
+ else:
+ m = BRACKET_RE.search(s)
+ if m:
+ factor = int(m.group('factor'))
+ matchstart = m.start('factor')
+ s = s[0:matchstart] + (factor - 1) * (s[start + 1:p] + ',') + s[start + 1:p] + s[p + 1:]
+ else:
+ raise ValueError("Failed to parse '{0}'.".format(s))
+ return s
+
+
+# This converts a single octal digit to 3 bits.
+OCT_TO_BITS = ['{0:03b}'.format(i) for i in xrange(8)]
+
+# A dictionary of number of 1 bits contained in binary representation of any byte
+BIT_COUNT = dict(zip(xrange(256), [bin(i).count('1') for i in xrange(256)]))
+
+
+class Bits(object):
+ """A container holding an immutable sequence of bits.
+
+ For a mutable container use the BitArray class instead.
+
+ Methods:
+
+ all() -- Check if all specified bits are set to 1 or 0.
+ any() -- Check if any of specified bits are set to 1 or 0.
+ count() -- Count the number of bits set to 1 or 0.
+ cut() -- Create generator of constant sized chunks.
+ endswith() -- Return whether the bitstring ends with a sub-string.
+ find() -- Find a sub-bitstring in the current bitstring.
+ findall() -- Find all occurrences of a sub-bitstring in the current bitstring.
+ join() -- Join bitstrings together using current bitstring.
+ rfind() -- Seek backwards to find a sub-bitstring.
+ split() -- Create generator of chunks split by a delimiter.
+ startswith() -- Return whether the bitstring starts with a sub-bitstring.
+ tobytes() -- Return bitstring as bytes, padding if needed.
+ tofile() -- Write bitstring to file, padding if needed.
+ unpack() -- Interpret bits using format string.
+
+ Special methods:
+
+ Also available are the operators [], ==, !=, +, *, ~, <<, >>, &, |, ^.
+
+ Properties:
+
+ bin -- The bitstring as a binary string.
+ bool -- For single bit bitstrings, interpret as True or False.
+ bytes -- The bitstring as a bytes object.
+ float -- Interpret as a floating point number.
+ floatbe -- Interpret as a big-endian floating point number.
+ floatle -- Interpret as a little-endian floating point number.
+ floatne -- Interpret as a native-endian floating point number.
+ hex -- The bitstring as a hexadecimal string.
+ int -- Interpret as a two's complement signed integer.
+ intbe -- Interpret as a big-endian signed integer.
+ intle -- Interpret as a little-endian signed integer.
+ intne -- Interpret as a native-endian signed integer.
+ len -- Length of the bitstring in bits.
+ oct -- The bitstring as an octal string.
+ se -- Interpret as a signed exponential-Golomb code.
+ ue -- Interpret as an unsigned exponential-Golomb code.
+ sie -- Interpret as a signed interleaved exponential-Golomb code.
+ uie -- Interpret as an unsigned interleaved exponential-Golomb code.
+ uint -- Interpret as a two's complement unsigned integer.
+ uintbe -- Interpret as a big-endian unsigned integer.
+ uintle -- Interpret as a little-endian unsigned integer.
+ uintne -- Interpret as a native-endian unsigned integer.
+
+ """
+
+ __slots__ = ('_datastore')
+
+ def __init__(self, auto=None, length=None, offset=None, **kwargs):
+ """Either specify an 'auto' initialiser:
+ auto -- a string of comma separated tokens, an integer, a file object,
+ a bytearray, a boolean iterable or another bitstring.
+
+ Or initialise via **kwargs with one (and only one) of:
+ bytes -- raw data as a string, for example read from a binary file.
+ bin -- binary string representation, e.g. '0b001010'.
+ hex -- hexadecimal string representation, e.g. '0x2ef'
+ oct -- octal string representation, e.g. '0o777'.
+ uint -- an unsigned integer.
+ int -- a signed integer.
+ float -- a floating point number.
+ uintbe -- an unsigned big-endian whole byte integer.
+ intbe -- a signed big-endian whole byte integer.
+ floatbe - a big-endian floating point number.
+ uintle -- an unsigned little-endian whole byte integer.
+ intle -- a signed little-endian whole byte integer.
+ floatle -- a little-endian floating point number.
+ uintne -- an unsigned native-endian whole byte integer.
+ intne -- a signed native-endian whole byte integer.
+ floatne -- a native-endian floating point number.
+ se -- a signed exponential-Golomb code.
+ ue -- an unsigned exponential-Golomb code.
+ sie -- a signed interleaved exponential-Golomb code.
+ uie -- an unsigned interleaved exponential-Golomb code.
+ bool -- a boolean (True or False).
+ filename -- a file which will be opened in binary read-only mode.
+
+ Other keyword arguments:
+ length -- length of the bitstring in bits, if needed and appropriate.
+ It must be supplied for all integer and float initialisers.
+ offset -- bit offset to the data. These offset bits are
+ ignored and this is mainly intended for use when
+ initialising using 'bytes' or 'filename'.
+
+ """
+ pass
+
+ def __new__(cls, auto=None, length=None, offset=None, _cache={}, **kwargs):
+ # For instances auto-initialised with a string we intern the
+ # instance for re-use.
+ try:
+ if isinstance(auto, basestring):
+ try:
+ return _cache[auto]
+ except KeyError:
+ x = object.__new__(Bits)
+ try:
+ _, tokens = tokenparser(auto)
+ except ValueError as e:
+ raise CreationError(*e.args)
+ x._datastore = ConstByteStore(bytearray(0), 0, 0)
+ for token in tokens:
+ x._datastore._appendstore(Bits._init_with_token(*token)._datastore)
+ assert x._assertsanity()
+ if len(_cache) < CACHE_SIZE:
+ _cache[auto] = x
+ return x
+ if isinstance(auto, Bits):
+ return auto
+ except TypeError:
+ pass
+ x = super(Bits, cls).__new__(cls)
+ x._initialise(auto, length, offset, **kwargs)
+ return x
+
+ def _initialise(self, auto, length, offset, **kwargs):
+ if length is not None and length < 0:
+ raise CreationError("bitstring length cannot be negative.")
+ if offset is not None and offset < 0:
+ raise CreationError("offset must be >= 0.")
+ if auto is not None:
+ self._initialise_from_auto(auto, length, offset)
+ return
+ if not kwargs:
+ # No initialisers, so initialise with nothing or zero bits
+ if length is not None and length != 0:
+ data = bytearray((length + 7) // 8)
+ self._setbytes_unsafe(data, length, 0)
+ return
+ self._setbytes_unsafe(bytearray(0), 0, 0)
+ return
+ k, v = kwargs.popitem()
+ try:
+ init_without_length_or_offset[k](self, v)
+ if length is not None or offset is not None:
+ raise CreationError("Cannot use length or offset with this initialiser.")
+ except KeyError:
+ try:
+ init_with_length_only[k](self, v, length)
+ if offset is not None:
+ raise CreationError("Cannot use offset with this initialiser.")
+ except KeyError:
+ if offset is None:
+ offset = 0
+ try:
+ init_with_length_and_offset[k](self, v, length, offset)
+ except KeyError:
+ raise CreationError("Unrecognised keyword '{0}' used to initialise.", k)
+
+ def _initialise_from_auto(self, auto, length, offset):
+ if offset is None:
+ offset = 0
+ self._setauto(auto, length, offset)
+ return
+
+ def __copy__(self):
+ """Return a new copy of the Bits for the copy module."""
+ # Note that if you want a new copy (different ID), use _copy instead.
+ # The copy can return self as it's immutable.
+ return self
+
+ def __lt__(self, other):
+ raise TypeError("unorderable type: {0}".format(type(self).__name__))
+
+ def __gt__(self, other):
+ raise TypeError("unorderable type: {0}".format(type(self).__name__))
+
+ def __le__(self, other):
+ raise TypeError("unorderable type: {0}".format(type(self).__name__))
+
+ def __ge__(self, other):
+ raise TypeError("unorderable type: {0}".format(type(self).__name__))
+
+ def __add__(self, bs):
+ """Concatenate bitstrings and return new bitstring.
+
+ bs -- the bitstring to append.
+
+ """
+ bs = Bits(bs)
+ if bs.len <= self.len:
+ s = self._copy()
+ s._append(bs)
+ else:
+ s = bs._copy()
+ s = self.__class__(s)
+ s._prepend(self)
+ return s
+
+ def __radd__(self, bs):
+ """Append current bitstring to bs and return new bitstring.
+
+ bs -- the string for the 'auto' initialiser that will be appended to.
+
+ """
+ bs = self._converttobitstring(bs)
+ return bs.__add__(self)
+
+ def __getitem__(self, key):
+ """Return a new bitstring representing a slice of the current bitstring.
+
+ Indices are in units of the step parameter (default 1 bit).
+ Stepping is used to specify the number of bits in each item.
+
+ >>> print BitArray('0b00110')[1:4]
+ '0b011'
+ >>> print BitArray('0x00112233')[1:3:8]
+ '0x1122'
+
+ """
+ length = self.len
+ try:
+ step = key.step if key.step is not None else 1
+ except AttributeError:
+ # single element
+ if key < 0:
+ key += length
+ if not 0 <= key < length:
+ raise IndexError("Slice index out of range.")
+ # Single bit, return True or False
+ return self._datastore.getbit(key)
+ else:
+ if step != 1:
+ # convert to binary string and use string slicing
+ bs = self.__class__()
+ bs._setbin_unsafe(self._getbin().__getitem__(key))
+ return bs
+ start, stop = 0, length
+ if key.start is not None:
+ start = key.start
+ if key.start < 0:
+ start += stop
+ if key.stop is not None:
+ stop = key.stop
+ if key.stop < 0:
+ stop += length
+ start = max(start, 0)
+ stop = min(stop, length)
+ if start < stop:
+ return self._slice(start, stop)
+ else:
+ return self.__class__()
+
+ def __len__(self):
+ """Return the length of the bitstring in bits."""
+ return self._getlength()
+
+ def __str__(self):
+ """Return approximate string representation of bitstring for printing.
+
+ Short strings will be given wholly in hexadecimal or binary. Longer
+ strings may be part hexadecimal and part binary. Very long strings will
+ be truncated with '...'.
+
+ """
+ length = self.len
+ if not length:
+ return ''
+ if length > MAX_CHARS * 4:
+ # Too long for hex. Truncate...
+ return ''.join(('0x', self._readhex(MAX_CHARS * 4, 0), '...'))
+ # If it's quite short and we can't do hex then use bin
+ if length < 32 and length % 4 != 0:
+ return '0b' + self.bin
+ # If we can use hex then do so
+ if not length % 4:
+ return '0x' + self.hex
+ # Otherwise first we do as much as we can in hex
+ # then add on 1, 2 or 3 bits on at the end
+ bits_at_end = length % 4
+ return ''.join(('0x', self._readhex(length - bits_at_end, 0),
+ ', ', '0b',
+ self._readbin(bits_at_end, length - bits_at_end)))
+
+ def __repr__(self):
+ """Return representation that could be used to recreate the bitstring.
+
+ If the returned string is too long it will be truncated. See __str__().
+
+ """
+ length = self.len
+ if isinstance(self._datastore._rawarray, MmapByteArray):
+ offsetstring = ''
+ if self._datastore.byteoffset or self._offset:
+ offsetstring = ", offset=%d" % (self._datastore._rawarray.byteoffset * 8 + self._offset)
+ lengthstring = ", length=%d" % length
+ return "{0}(filename='{1}'{2}{3})".format(self.__class__.__name__,
+ self._datastore._rawarray.source.name, lengthstring, offsetstring)
+ else:
+ s = self.__str__()
+ lengthstring = ''
+ if s.endswith('...'):
+ lengthstring = " # length={0}".format(length)
+ return "{0}('{1}'){2}".format(self.__class__.__name__, s, lengthstring)
+
+ def __eq__(self, bs):
+ """Return True if two bitstrings have the same binary representation.
+
+ >>> BitArray('0b1110') == '0xe'
+ True
+
+ """
+ try:
+ bs = Bits(bs)
+ except TypeError:
+ return False
+ return equal(self._datastore, bs._datastore)
+
+ def __ne__(self, bs):
+ """Return False if two bitstrings have the same binary representation.
+
+ >>> BitArray('0b111') == '0x7'
+ False
+
+ """
+ return not self.__eq__(bs)
+
+ def __invert__(self):
+ """Return bitstring with every bit inverted.
+
+ Raises Error if the bitstring is empty.
+
+ """
+ if not self.len:
+ raise Error("Cannot invert empty bitstring.")
+ s = self._copy()
+ s._invert_all()
+ return s
+
+ def __lshift__(self, n):
+ """Return bitstring with bits shifted by n to the left.
+
+ n -- the number of bits to shift. Must be >= 0.
+
+ """
+ if n < 0:
+ raise ValueError("Cannot shift by a negative amount.")
+ if not self.len:
+ raise ValueError("Cannot shift an empty bitstring.")
+ n = min(n, self.len)
+ s = self._slice(n, self.len)
+ s._append(Bits(n))
+ return s
+
+ def __rshift__(self, n):
+ """Return bitstring with bits shifted by n to the right.
+
+ n -- the number of bits to shift. Must be >= 0.
+
+ """
+ if n < 0:
+ raise ValueError("Cannot shift by a negative amount.")
+ if not self.len:
+ raise ValueError("Cannot shift an empty bitstring.")
+ if not n:
+ return self._copy()
+ s = self.__class__(length=min(n, self.len))
+ s._append(self[:-n])
+ return s
+
+ def __mul__(self, n):
+ """Return bitstring consisting of n concatenations of self.
+
+ Called for expression of the form 'a = b*3'.
+ n -- The number of concatenations. Must be >= 0.
+
+ """
+ if n < 0:
+ raise ValueError("Cannot multiply by a negative integer.")
+ if not n:
+ return self.__class__()
+ s = self._copy()
+ s._imul(n)
+ return s
+
+ def __rmul__(self, n):
+ """Return bitstring consisting of n concatenations of self.
+
+ Called for expressions of the form 'a = 3*b'.
+ n -- The number of concatenations. Must be >= 0.
+
+ """
+ return self.__mul__(n)
+
+ def __and__(self, bs):
+ """Bit-wise 'and' between two bitstrings. Returns new bitstring.
+
+ bs -- The bitstring to '&' with.
+
+ Raises ValueError if the two bitstrings have differing lengths.
+
+ """
+ bs = Bits(bs)
+ if self.len != bs.len:
+ raise ValueError("Bitstrings must have the same length "
+ "for & operator.")
+ s = self._copy()
+ s._iand(bs)
+ return s
+
+ def __rand__(self, bs):
+ """Bit-wise 'and' between two bitstrings. Returns new bitstring.
+
+ bs -- the bitstring to '&' with.
+
+ Raises ValueError if the two bitstrings have differing lengths.
+
+ """
+ return self.__and__(bs)
+
+ def __or__(self, bs):
+ """Bit-wise 'or' between two bitstrings. Returns new bitstring.
+
+ bs -- The bitstring to '|' with.
+
+ Raises ValueError if the two bitstrings have differing lengths.
+
+ """
+ bs = Bits(bs)
+ if self.len != bs.len:
+ raise ValueError("Bitstrings must have the same length "
+ "for | operator.")
+ s = self._copy()
+ s._ior(bs)
+ return s
+
+ def __ror__(self, bs):
+ """Bit-wise 'or' between two bitstrings. Returns new bitstring.
+
+ bs -- The bitstring to '|' with.
+
+ Raises ValueError if the two bitstrings have differing lengths.
+
+ """
+ return self.__or__(bs)
+
+ def __xor__(self, bs):
+ """Bit-wise 'xor' between two bitstrings. Returns new bitstring.
+
+ bs -- The bitstring to '^' with.
+
+ Raises ValueError if the two bitstrings have differing lengths.
+
+ """
+ bs = Bits(bs)
+ if self.len != bs.len:
+ raise ValueError("Bitstrings must have the same length "
+ "for ^ operator.")
+ s = self._copy()
+ s._ixor(bs)
+ return s
+
+ def __rxor__(self, bs):
+ """Bit-wise 'xor' between two bitstrings. Returns new bitstring.
+
+ bs -- The bitstring to '^' with.
+
+ Raises ValueError if the two bitstrings have differing lengths.
+
+ """
+ return self.__xor__(bs)
+
+ def __contains__(self, bs):
+ """Return whether bs is contained in the current bitstring.
+
+ bs -- The bitstring to search for.
+
+ """
+ # Don't want to change pos
+ try:
+ pos = self._pos
+ except AttributeError:
+ pass
+ found = Bits.find(self, bs, bytealigned=False)
+ try:
+ self._pos = pos
+ except AttributeError:
+ pass
+ return bool(found)
+
+ def __hash__(self):
+ """Return an integer hash of the object."""
+ # We can't in general hash the whole bitstring (it could take hours!)
+ # So instead take some bits from the start and end.
+ if self.len <= 160:
+ # Use the whole bitstring.
+ shorter = self
+ else:
+ # Take 10 bytes from start and end
+ shorter = self[:80] + self[-80:]
+ h = 0
+ for byte in shorter.tobytes():
+ try:
+ h = (h << 4) + ord(byte)
+ except TypeError:
+ # Python 3
+ h = (h << 4) + byte
+ g = h & 0xf0000000
+ if g & (1 << 31):
+ h ^= (g >> 24)
+ h ^= g
+ return h % 1442968193
+
+ # This is only used in Python 2.x...
+ def __nonzero__(self):
+ """Return True if any bits are set to 1, otherwise return False."""
+ return self.any(True)
+
+ # ...whereas this is used in Python 3.x
+ __bool__ = __nonzero__
+
+ def _assertsanity(self):
+ """Check internal self consistency as a debugging aid."""
+ assert self.len >= 0
+ assert 0 <= self._offset, "offset={0}".format(self._offset)
+ assert (self.len + self._offset + 7) // 8 == self._datastore.bytelength + self._datastore.byteoffset
+ return True
+
+ @classmethod
+ def _init_with_token(cls, name, token_length, value):
+ if token_length is not None:
+ token_length = int(token_length)
+ if token_length == 0:
+ return cls()
+ # For pad token just return the length in zero bits
+ if name == 'pad':
+ return cls(token_length)
+
+ if value is None:
+ if token_length is None:
+ error = "Token has no value ({0}=???).".format(name)
+ else:
+ error = "Token has no value ({0}:{1}=???).".format(name, token_length)
+ raise ValueError(error)
+ try:
+ b = cls(**{_tokenname_to_initialiser[name]: value})
+ except KeyError:
+ if name in ('se', 'ue', 'sie', 'uie'):
+ b = cls(**{name: int(value)})
+ elif name in ('uint', 'int', 'uintbe', 'intbe', 'uintle', 'intle', 'uintne', 'intne'):
+ b = cls(**{name: int(value), 'length': token_length})
+ elif name in ('float', 'floatbe', 'floatle', 'floatne'):
+ b = cls(**{name: float(value), 'length': token_length})
+ elif name == 'bool':
+ if value in (1, 'True', '1'):
+ b = cls(bool=True)
+ elif value in (0, 'False', '0'):
+ b = cls(bool=False)
+ else:
+ raise CreationError("bool token can only be 'True' or 'False'.")
+ else:
+ raise CreationError("Can't parse token name {0}.", name)
+ if token_length is not None and b.len != token_length:
+ msg = "Token with length {0} packed with value of length {1} ({2}:{3}={4})."
+ raise CreationError(msg, token_length, b.len, name, token_length, value)
+ return b
+
+ def _clear(self):
+ """Reset the bitstring to an empty state."""
+ self._datastore = ByteStore(bytearray(0))
+
+ def _setauto(self, s, length, offset):
+ """Set bitstring from a bitstring, file, bool, integer, iterable or string."""
+ # As s can be so many different things it's important to do the checks
+ # in the correct order, as some types are also other allowed types.
+ # So basestring must be checked before Iterable
+ # and bytes/bytearray before Iterable but after basestring!
+ if isinstance(s, Bits):
+ if length is None:
+ length = s.len - offset
+ self._setbytes_unsafe(s._datastore.rawbytes, length, s._offset + offset)
+ return
+ if isinstance(s, file):
+ if offset is None:
+ offset = 0
+ if length is None:
+ length = os.path.getsize(s.name) * 8 - offset
+ byteoffset, offset = divmod(offset, 8)
+ bytelength = (length + byteoffset * 8 + offset + 7) // 8 - byteoffset
+ m = MmapByteArray(s, bytelength, byteoffset)
+ if length + byteoffset * 8 + offset > m.filelength * 8:
+ raise CreationError("File is not long enough for specified "
+ "length and offset.")
+ self._datastore = ConstByteStore(m, length, offset)
+ return
+ if length is not None:
+ raise CreationError("The length keyword isn't applicable to this initialiser.")
+ if offset:
+ raise CreationError("The offset keyword isn't applicable to this initialiser.")
+ if isinstance(s, basestring):
+ bs = self._converttobitstring(s)
+ assert bs._offset == 0
+ self._setbytes_unsafe(bs._datastore.rawbytes, bs.length, 0)
+ return
+ if isinstance(s, (bytes, bytearray)):
+ self._setbytes_unsafe(bytearray(s), len(s) * 8, 0)
+ return
+ if isinstance(s, numbers.Integral):
+ # Initialise with s zero bits.
+ if s < 0:
+ msg = "Can't create bitstring of negative length {0}."
+ raise CreationError(msg, s)
+ data = bytearray((s + 7) // 8)
+ self._datastore = ByteStore(data, s, 0)
+ return
+ if isinstance(s, collections.Iterable):
+ # Evaluate each item as True or False and set bits to 1 or 0.
+ self._setbin_unsafe(''.join(str(int(bool(x))) for x in s))
+ return
+ raise TypeError("Cannot initialise bitstring from {0}.".format(type(s)))
+
+ def _setfile(self, filename, length, offset):
+ """Use file as source of bits."""
+ source = open(filename, 'rb')
+ if offset is None:
+ offset = 0
+ if length is None:
+ length = os.path.getsize(source.name) * 8 - offset
+ byteoffset, offset = divmod(offset, 8)
+ bytelength = (length + byteoffset * 8 + offset + 7) // 8 - byteoffset
+ m = MmapByteArray(source, bytelength, byteoffset)
+ if length + byteoffset * 8 + offset > m.filelength * 8:
+ raise CreationError("File is not long enough for specified "
+ "length and offset.")
+ self._datastore = ConstByteStore(m, length, offset)
+
+ def _setbytes_safe(self, data, length=None, offset=0):
+ """Set the data from a string."""
+ data = bytearray(data)
+ if length is None:
+ # Use to the end of the data
+ length = len(data)*8 - offset
+ self._datastore = ByteStore(data, length, offset)
+ else:
+ if length + offset > len(data) * 8:
+ msg = "Not enough data present. Need {0} bits, have {1}."
+ raise CreationError(msg, length + offset, len(data) * 8)
+ if length == 0:
+ self._datastore = ByteStore(bytearray(0))
+ else:
+ self._datastore = ByteStore(data, length, offset)
+
+ def _setbytes_unsafe(self, data, length, offset):
+ """Unchecked version of _setbytes_safe."""
+ self._datastore = ByteStore(data[:], length, offset)
+ assert self._assertsanity()
+
+ def _readbytes(self, length, start):
+ """Read bytes and return them. Note that length is in bits."""
+ assert length % 8 == 0
+ assert start + length <= self.len
+ if not (start + self._offset) % 8:
+ return bytes(self._datastore.getbyteslice((start + self._offset) // 8,
+ (start + self._offset + length) // 8))
+ return self._slice(start, start + length).tobytes()
+
+ def _getbytes(self):
+ """Return the data as an ordinary string."""
+ if self.len % 8:
+ raise InterpretError("Cannot interpret as bytes unambiguously - "
+ "not multiple of 8 bits.")
+ return self._readbytes(self.len, 0)
+
+ def _setuint(self, uint, length=None):
+ """Reset the bitstring to have given unsigned int interpretation."""
+ try:
+ if length is None:
+ # Use the whole length. Deliberately not using .len here.
+ length = self._datastore.bitlength
+ except AttributeError:
+ # bitstring doesn't have a _datastore as it hasn't been created!
+ pass
+ # TODO: All this checking code should be hoisted out of here!
+ if length is None or length == 0:
+ raise CreationError("A non-zero length must be specified with a "
+ "uint initialiser.")
+ if uint >= (1 << length):
+ msg = "{0} is too large an unsigned integer for a bitstring of length {1}. "\
+ "The allowed range is [0, {2}]."
+ raise CreationError(msg, uint, length, (1 << length) - 1)
+ if uint < 0:
+ raise CreationError("uint cannot be initialsed by a negative number.")
+ s = hex(uint)[2:]
+ s = s.rstrip('L')
+ if len(s) & 1:
+ s = '0' + s
+ try:
+ data = bytes.fromhex(s)
+ except AttributeError:
+ # the Python 2.x way
+ data = binascii.unhexlify(s)
+ # Now add bytes as needed to get the right length.
+ extrabytes = ((length + 7) // 8) - len(data)
+ if extrabytes > 0:
+ data = b'\x00' * extrabytes + data
+ offset = 8 - (length % 8)
+ if offset == 8:
+ offset = 0
+ self._setbytes_unsafe(bytearray(data), length, offset)
+
+ def _readuint(self, length, start):
+ """Read bits and interpret as an unsigned int."""
+ if not length:
+ raise InterpretError("Cannot interpret a zero length bitstring "
+ "as an integer.")
+ offset = self._offset
+ startbyte = (start + offset) // 8
+ endbyte = (start + offset + length - 1) // 8
+
+ b = binascii.hexlify(bytes(self._datastore.getbyteslice(startbyte, endbyte + 1)))
+ assert b
+ i = int(b, 16)
+ final_bits = 8 - ((start + offset + length) % 8)
+ if final_bits != 8:
+ i >>= final_bits
+ i &= (1 << length) - 1
+ return i
+
+ def _getuint(self):
+ """Return data as an unsigned int."""
+ return self._readuint(self.len, 0)
+
+ def _setint(self, int_, length=None):
+ """Reset the bitstring to have given signed int interpretation."""
+ # If no length given, and we've previously been given a length, use it.
+ if length is None and hasattr(self, 'len') and self.len != 0:
+ length = self.len
+ if length is None or length == 0:
+ raise CreationError("A non-zero length must be specified with an int initialiser.")
+ if int_ >= (1 << (length - 1)) or int_ < -(1 << (length - 1)):
+ raise CreationError("{0} is too large a signed integer for a bitstring of length {1}. "
+ "The allowed range is [{2}, {3}].", int_, length, -(1 << (length - 1)),
+ (1 << (length - 1)) - 1)
+ if int_ >= 0:
+ self._setuint(int_, length)
+ return
+ # TODO: We should decide whether to just use the _setuint, or to do the bit flipping,
+ # based upon which will be quicker. If the -ive number is less than half the maximum
+ # possible then it's probably quicker to do the bit flipping...
+
+ # Do the 2's complement thing. Add one, set to minus number, then flip bits.
+ int_ += 1
+ self._setuint(-int_, length)
+ self._invert_all()
+
+ def _readint(self, length, start):
+ """Read bits and interpret as a signed int"""
+ ui = self._readuint(length, start)
+ if not ui >> (length - 1):
+ # Top bit not set, number is positive
+ return ui
+ # Top bit is set, so number is negative
+ tmp = (~(ui - 1)) & ((1 << length) - 1)
+ return -tmp
+
+ def _getint(self):
+ """Return data as a two's complement signed int."""
+ return self._readint(self.len, 0)
+
+ def _setuintbe(self, uintbe, length=None):
+ """Set the bitstring to a big-endian unsigned int interpretation."""
+ if length is not None and length % 8 != 0:
+ raise CreationError("Big-endian integers must be whole-byte. "
+ "Length = {0} bits.", length)
+ self._setuint(uintbe, length)
+
+ def _readuintbe(self, length, start):
+ """Read bits and interpret as a big-endian unsigned int."""
+ if length % 8:
+ raise InterpretError("Big-endian integers must be whole-byte. "
+ "Length = {0} bits.", length)
+ return self._readuint(length, start)
+
+ def _getuintbe(self):
+ """Return data as a big-endian two's complement unsigned int."""
+ return self._readuintbe(self.len, 0)
+
+ def _setintbe(self, intbe, length=None):
+ """Set bitstring to a big-endian signed int interpretation."""
+ if length is not None and length % 8 != 0:
+ raise CreationError("Big-endian integers must be whole-byte. "
+ "Length = {0} bits.", length)
+ self._setint(intbe, length)
+
+ def _readintbe(self, length, start):
+ """Read bits and interpret as a big-endian signed int."""
+ if length % 8:
+ raise InterpretError("Big-endian integers must be whole-byte. "
+ "Length = {0} bits.", length)
+ return self._readint(length, start)
+
+ def _getintbe(self):
+ """Return data as a big-endian two's complement signed int."""
+ return self._readintbe(self.len, 0)
+
+ def _setuintle(self, uintle, length=None):
+ if length is not None and length % 8 != 0:
+ raise CreationError("Little-endian integers must be whole-byte. "
+ "Length = {0} bits.", length)
+ self._setuint(uintle, length)
+ self._reversebytes(0, self.len)
+
+ def _readuintle(self, length, start):
+ """Read bits and interpret as a little-endian unsigned int."""
+ if length % 8:
+ raise InterpretError("Little-endian integers must be whole-byte. "
+ "Length = {0} bits.", length)
+ assert start + length <= self.len
+ absolute_pos = start + self._offset
+ startbyte, offset = divmod(absolute_pos, 8)
+ val = 0
+ if not offset:
+ endbyte = (absolute_pos + length - 1) // 8
+ chunksize = 4 # for 'L' format
+ while endbyte - chunksize + 1 >= startbyte:
+ val <<= 8 * chunksize
+ val += struct.unpack('<L', bytes(self._datastore.getbyteslice(endbyte + 1 - chunksize, endbyte + 1)))[0]
+ endbyte -= chunksize
+ for b in xrange(endbyte, startbyte - 1, -1):
+ val <<= 8
+ val += self._datastore.getbyte(b)
+ else:
+ data = self._slice(start, start + length)
+ assert data.len % 8 == 0
+ data._reversebytes(0, self.len)
+ for b in bytearray(data.bytes):
+ val <<= 8
+ val += b
+ return val
+
+ def _getuintle(self):
+ return self._readuintle(self.len, 0)
+
+ def _setintle(self, intle, length=None):
+ if length is not None and length % 8 != 0:
+ raise CreationError("Little-endian integers must be whole-byte. "
+ "Length = {0} bits.", length)
+ self._setint(intle, length)
+ self._reversebytes(0, self.len)
+
+ def _readintle(self, length, start):
+ """Read bits and interpret as a little-endian signed int."""
+ ui = self._readuintle(length, start)
+ if not ui >> (length - 1):
+ # Top bit not set, number is positive
+ return ui
+ # Top bit is set, so number is negative
+ tmp = (~(ui - 1)) & ((1 << length) - 1)
+ return -tmp
+
+ def _getintle(self):
+ return self._readintle(self.len, 0)
+
+ def _setfloat(self, f, length=None):
+ # If no length given, and we've previously been given a length, use it.
+ if length is None and hasattr(self, 'len') and self.len != 0:
+ length = self.len
+ if length is None or length == 0:
+ raise CreationError("A non-zero length must be specified with a "
+ "float initialiser.")
+ if length == 32:
+ b = struct.pack('>f', f)
+ elif length == 64:
+ b = struct.pack('>d', f)
+ else:
+ raise CreationError("floats can only be 32 or 64 bits long, "
+ "not {0} bits", length)
+ self._setbytes_unsafe(bytearray(b), length, 0)
+
+ def _readfloat(self, length, start):
+ """Read bits and interpret as a float."""
+ if not (start + self._offset) % 8:
+ startbyte = (start + self._offset) // 8
+ if length == 32:
+ f, = struct.unpack('>f', bytes(self._datastore.getbyteslice(startbyte, startbyte + 4)))
+ elif length == 64:
+ f, = struct.unpack('>d', bytes(self._datastore.getbyteslice(startbyte, startbyte + 8)))
+ else:
+ if length == 32:
+ f, = struct.unpack('>f', self._readbytes(32, start))
+ elif length == 64:
+ f, = struct.unpack('>d', self._readbytes(64, start))
+ try:
+ return f
+ except NameError:
+ raise InterpretError("floats can only be 32 or 64 bits long, not {0} bits", length)
+
+ def _getfloat(self):
+ """Interpret the whole bitstring as a float."""
+ return self._readfloat(self.len, 0)
+
+ def _setfloatle(self, f, length=None):
+ # If no length given, and we've previously been given a length, use it.
+ if length is None and hasattr(self, 'len') and self.len != 0:
+ length = self.len
+ if length is None or length == 0:
+ raise CreationError("A non-zero length must be specified with a "
+ "float initialiser.")
+ if length == 32:
+ b = struct.pack('<f', f)
+ elif length == 64:
+ b = struct.pack('<d', f)
+ else:
+ raise CreationError("floats can only be 32 or 64 bits long, "
+ "not {0} bits", length)
+ self._setbytes_unsafe(bytearray(b), length, 0)
+
+ def _readfloatle(self, length, start):
+ """Read bits and interpret as a little-endian float."""
+ startbyte, offset = divmod(start + self._offset, 8)
+ if not offset:
+ if length == 32:
+ f, = struct.unpack('<f', bytes(self._datastore.getbyteslice(startbyte, startbyte + 4)))
+ elif length == 64:
+ f, = struct.unpack('<d', bytes(self._datastore.getbyteslice(startbyte, startbyte + 8)))
+ else:
+ if length == 32:
+ f, = struct.unpack('<f', self._readbytes(32, start))
+ elif length == 64:
+ f, = struct.unpack('<d', self._readbytes(64, start))
+ try:
+ return f
+ except NameError:
+ raise InterpretError("floats can only be 32 or 64 bits long, "
+ "not {0} bits", length)
+
+ def _getfloatle(self):
+ """Interpret the whole bitstring as a little-endian float."""
+ return self._readfloatle(self.len, 0)
+
+ def _setue(self, i):
+ """Initialise bitstring with unsigned exponential-Golomb code for integer i.
+
+ Raises CreationError if i < 0.
+
+ """
+ if i < 0:
+ raise CreationError("Cannot use negative initialiser for unsigned "
+ "exponential-Golomb.")
+ if not i:
+ self._setbin_unsafe('1')
+ return
+ tmp = i + 1
+ leadingzeros = -1
+ while tmp > 0:
+ tmp >>= 1
+ leadingzeros += 1
+ remainingpart = i + 1 - (1 << leadingzeros)
+ binstring = '0' * leadingzeros + '1' + Bits(uint=remainingpart,
+ length=leadingzeros).bin
+ self._setbin_unsafe(binstring)
+
+ def _readue(self, pos):
+ """Return interpretation of next bits as unsigned exponential-Golomb code.
+
+ Raises ReadError if the end of the bitstring is encountered while
+ reading the code.
+
+ """
+ oldpos = pos
+ try:
+ while not self[pos]:
+ pos += 1
+ except IndexError:
+ raise ReadError("Read off end of bitstring trying to read code.")
+ leadingzeros = pos - oldpos
+ codenum = (1 << leadingzeros) - 1
+ if leadingzeros > 0:
+ if pos + leadingzeros + 1 > self.len:
+ raise ReadError("Read off end of bitstring trying to read code.")
+ codenum += self._readuint(leadingzeros, pos + 1)
+ pos += leadingzeros + 1
+ else:
+ assert codenum == 0
+ pos += 1
+ return codenum, pos
+
+ def _getue(self):
+ """Return data as unsigned exponential-Golomb code.
+
+ Raises InterpretError if bitstring is not a single exponential-Golomb code.
+
+ """
+ try:
+ value, newpos = self._readue(0)
+ if value is None or newpos != self.len:
+ raise ReadError
+ except ReadError:
+ raise InterpretError("Bitstring is not a single exponential-Golomb code.")
+ return value
+
+ def _setse(self, i):
+ """Initialise bitstring with signed exponential-Golomb code for integer i."""
+ if i > 0:
+ u = (i * 2) - 1
+ else:
+ u = -2 * i
+ self._setue(u)
+
+ def _getse(self):
+ """Return data as signed exponential-Golomb code.
+
+ Raises InterpretError if bitstring is not a single exponential-Golomb code.
+
+ """
+ try:
+ value, newpos = self._readse(0)
+ if value is None or newpos != self.len:
+ raise ReadError
+ except ReadError:
+ raise InterpretError("Bitstring is not a single exponential-Golomb code.")
+ return value
+
+ def _readse(self, pos):
+ """Return interpretation of next bits as a signed exponential-Golomb code.
+
+ Advances position to after the read code.
+
+ Raises ReadError if the end of the bitstring is encountered while
+ reading the code.
+
+ """
+ codenum, pos = self._readue(pos)
+ m = (codenum + 1) // 2
+ if not codenum % 2:
+ return -m, pos
+ else:
+ return m, pos
+
+ def _setuie(self, i):
+ """Initialise bitstring with unsigned interleaved exponential-Golomb code for integer i.
+
+ Raises CreationError if i < 0.
+
+ """
+ if i < 0:
+ raise CreationError("Cannot use negative initialiser for unsigned "
+ "interleaved exponential-Golomb.")
+ self._setbin_unsafe('1' if i == 0 else '0' + '0'.join(bin(i + 1)[3:]) + '1')
+
+ def _readuie(self, pos):
+ """Return interpretation of next bits as unsigned interleaved exponential-Golomb code.
+
+ Raises ReadError if the end of the bitstring is encountered while
+ reading the code.
+
+ """
+ try:
+ codenum = 1
+ while not self[pos]:
+ pos += 1
+ codenum <<= 1
+ codenum += self[pos]
+ pos += 1
+ pos += 1
+ except IndexError:
+ raise ReadError("Read off end of bitstring trying to read code.")
+ codenum -= 1
+ return codenum, pos
+
+ def _getuie(self):
+ """Return data as unsigned interleaved exponential-Golomb code.
+
+ Raises InterpretError if bitstring is not a single exponential-Golomb code.
+
+ """
+ try:
+ value, newpos = self._readuie(0)
+ if value is None or newpos != self.len:
+ raise ReadError
+ except ReadError:
+ raise InterpretError("Bitstring is not a single interleaved exponential-Golomb code.")
+ return value
+
+ def _setsie(self, i):
+ """Initialise bitstring with signed interleaved exponential-Golomb code for integer i."""
+ if not i:
+ self._setbin_unsafe('1')
+ else:
+ self._setuie(abs(i))
+ self._append(Bits([i < 0]))
+
+ def _getsie(self):
+ """Return data as signed interleaved exponential-Golomb code.
+
+ Raises InterpretError if bitstring is not a single exponential-Golomb code.
+
+ """
+ try:
+ value, newpos = self._readsie(0)
+ if value is None or newpos != self.len:
+ raise ReadError
+ except ReadError:
+ raise InterpretError("Bitstring is not a single interleaved exponential-Golomb code.")
+ return value
+
+ def _readsie(self, pos):
+ """Return interpretation of next bits as a signed interleaved exponential-Golomb code.
+
+ Advances position to after the read code.
+
+ Raises ReadError if the end of the bitstring is encountered while
+ reading the code.
+
+ """
+ codenum, pos = self._readuie(pos)
+ if not codenum:
+ return 0, pos
+ try:
+ if self[pos]:
+ return -codenum, pos + 1
+ else:
+ return codenum, pos + 1
+ except IndexError:
+ raise ReadError("Read off end of bitstring trying to read code.")
+
+ def _setbool(self, value):
+ # We deliberately don't want to have implicit conversions to bool here.
+ # If we did then it would be difficult to deal with the 'False' string.
+ if value in (1, 'True'):
+ self._setbytes_unsafe(bytearray(b'\x80'), 1, 0)
+ elif value in (0, 'False'):
+ self._setbytes_unsafe(bytearray(b'\x00'), 1, 0)
+ else:
+ raise CreationError('Cannot initialise boolean with {0}.', value)
+
+ def _getbool(self):
+ if self.length != 1:
+ msg = "For a bool interpretation a bitstring must be 1 bit long, not {0} bits."
+ raise InterpretError(msg, self.length)
+ return self[0]
+
+ def _readbool(self, pos):
+ return self[pos], pos + 1
+
+ def _setbin_safe(self, binstring):
+ """Reset the bitstring to the value given in binstring."""
+ binstring = tidy_input_string(binstring)
+ # remove any 0b if present
+ binstring = binstring.replace('0b', '')
+ self._setbin_unsafe(binstring)
+
+ def _setbin_unsafe(self, binstring):
+ """Same as _setbin_safe, but input isn't sanity checked. binstring mustn't start with '0b'."""
+ length = len(binstring)
+ # pad with zeros up to byte boundary if needed
+ boundary = ((length + 7) // 8) * 8
+ padded_binstring = binstring + '0' * (boundary - length)\
+ if len(binstring) < boundary else binstring
+ try:
+ bytelist = [int(padded_binstring[x:x + 8], 2)
+ for x in xrange(0, len(padded_binstring), 8)]
+ except ValueError:
+ raise CreationError("Invalid character in bin initialiser {0}.", binstring)
+ self._setbytes_unsafe(bytearray(bytelist), length, 0)
+
+ def _readbin(self, length, start):
+ """Read bits and interpret as a binary string."""
+ if not length:
+ return ''
+ # Get the byte slice containing our bit slice
+ startbyte, startoffset = divmod(start + self._offset, 8)
+ endbyte = (start + self._offset + length - 1) // 8
+ b = self._datastore.getbyteslice(startbyte, endbyte + 1)
+ # Convert to a string of '0' and '1's (via a hex string an and int!)
+ try:
+ c = "{:0{}b}".format(int(binascii.hexlify(b), 16), 8*len(b))
+ except TypeError:
+ # Hack to get Python 2.6 working
+ c = "{0:0{1}b}".format(int(binascii.hexlify(str(b)), 16), 8*len(b))
+ # Finally chop off any extra bits.
+ return c[startoffset:startoffset + length]
+
+ def _getbin(self):
+ """Return interpretation as a binary string."""
+ return self._readbin(self.len, 0)
+
+ def _setoct(self, octstring):
+ """Reset the bitstring to have the value given in octstring."""
+ octstring = tidy_input_string(octstring)
+ # remove any 0o if present
+ octstring = octstring.replace('0o', '')
+ binlist = []
+ for i in octstring:
+ try:
+ if not 0 <= int(i) < 8:
+ raise ValueError
+ binlist.append(OCT_TO_BITS[int(i)])
+ except ValueError:
+ raise CreationError("Invalid symbol '{0}' in oct initialiser.", i)
+ self._setbin_unsafe(''.join(binlist))
+
+ def _readoct(self, length, start):
+ """Read bits and interpret as an octal string."""
+ if length % 3:
+ raise InterpretError("Cannot convert to octal unambiguously - "
+ "not multiple of 3 bits.")
+ if not length:
+ return ''
+ # Get main octal bit by converting from int.
+ # Strip starting 0 or 0o depending on Python version.
+ end = oct(self._readuint(length, start))[LEADING_OCT_CHARS:]
+ if end.endswith('L'):
+ end = end[:-1]
+ middle = '0' * (length // 3 - len(end))
+ return middle + end
+
+ def _getoct(self):
+ """Return interpretation as an octal string."""
+ return self._readoct(self.len, 0)
+
+ def _sethex(self, hexstring):
+ """Reset the bitstring to have the value given in hexstring."""
+ hexstring = tidy_input_string(hexstring)
+ # remove any 0x if present
+ hexstring = hexstring.replace('0x', '')
+ length = len(hexstring)
+ if length % 2:
+ hexstring += '0'
+ try:
+ try:
+ data = bytearray.fromhex(hexstring)
+ except TypeError:
+ # Python 2.6 needs a unicode string (a bug). 2.7 and 3.x work fine.
+ data = bytearray.fromhex(unicode(hexstring))
+ except ValueError:
+ raise CreationError("Invalid symbol in hex initialiser.")
+ self._setbytes_unsafe(data, length * 4, 0)
+
+ def _readhex(self, length, start):
+ """Read bits and interpret as a hex string."""
+ if length % 4:
+ raise InterpretError("Cannot convert to hex unambiguously - "
+ "not multiple of 4 bits.")
+ if not length:
+ return ''
+ # This monstrosity is the only thing I could get to work for both 2.6 and 3.1.
+ # TODO: Is utf-8 really what we mean here?
+ s = str(binascii.hexlify(self._slice(start, start + length).tobytes()).decode('utf-8'))
+ # If there's one nibble too many then cut it off
+ return s[:-1] if (length // 4) % 2 else s
+
+ def _gethex(self):
+ """Return the hexadecimal representation as a string prefixed with '0x'.
+
+ Raises an InterpretError if the bitstring's length is not a multiple of 4.
+
+ """
+ return self._readhex(self.len, 0)
+
+ def _getoffset(self):
+ return self._datastore.offset
+
+ def _getlength(self):
+ """Return the length of the bitstring in bits."""
+ return self._datastore.bitlength
+
+ def _ensureinmemory(self):
+ """Ensure the data is held in memory, not in a file."""
+ self._setbytes_unsafe(self._datastore.getbyteslice(0, self._datastore.bytelength),
+ self.len, self._offset)
+
+ @classmethod
+ def _converttobitstring(cls, bs, offset=0, cache={}):
+ """Convert bs to a bitstring and return it.
+
+ offset gives the suggested bit offset of first significant
+ bit, to optimise append etc.
+
+ """
+ if isinstance(bs, Bits):
+ return bs
+ try:
+ return cache[(bs, offset)]
+ except KeyError:
+ if isinstance(bs, basestring):
+ b = cls()
+ try:
+ _, tokens = tokenparser(bs)
+ except ValueError as e:
+ raise CreationError(*e.args)
+ if tokens:
+ b._append(Bits._init_with_token(*tokens[0]))
+ b._datastore = offsetcopy(b._datastore, offset)
+ for token in tokens[1:]:
+ b._append(Bits._init_with_token(*token))
+ assert b._assertsanity()
+ assert b.len == 0 or b._offset == offset
+ if len(cache) < CACHE_SIZE:
+ cache[(bs, offset)] = b
+ return b
+ except TypeError:
+ # Unhashable type
+ pass
+ return cls(bs)
+
+ def _copy(self):
+ """Create and return a new copy of the Bits (always in memory)."""
+ s_copy = self.__class__()
+ s_copy._setbytes_unsafe(self._datastore.getbyteslice(0, self._datastore.bytelength),
+ self.len, self._offset)
+ return s_copy
+
+ def _slice(self, start, end):
+ """Used internally to get a slice, without error checking."""
+ if end == start:
+ return self.__class__()
+ offset = self._offset
+ startbyte, newoffset = divmod(start + offset, 8)
+ endbyte = (end + offset - 1) // 8
+ bs = self.__class__()
+ bs._setbytes_unsafe(self._datastore.getbyteslice(startbyte, endbyte + 1), end - start, newoffset)
+ return bs
+
+ def _readtoken(self, name, pos, length):
+ """Reads a token from the bitstring and returns the result."""
+ if length is not None and int(length) > self.length - pos:
+ raise ReadError("Reading off the end of the data. "
+ "Tried to read {0} bits when only {1} available.".format(int(length), self.length - pos))
+ try:
+ val = name_to_read[name](self, length, pos)
+ return val, pos + length
+ except KeyError:
+ if name == 'pad':
+ return None, pos + length
+ raise ValueError("Can't parse token {0}:{1}".format(name, length))
+ except TypeError:
+ # This is for the 'ue', 'se' and 'bool' tokens. They will also return the new pos.
+ return name_to_read[name](self, pos)
+
+ def _append(self, bs):
+ """Append a bitstring to the current bitstring."""
+ self._datastore._appendstore(bs._datastore)
+
+ def _prepend(self, bs):
+ """Prepend a bitstring to the current bitstring."""
+ self._datastore._prependstore(bs._datastore)
+
+ def _reverse(self):
+ """Reverse all bits in-place."""
+ # Reverse the contents of each byte
+ n = [BYTE_REVERSAL_DICT[b] for b in self._datastore.rawbytes]
+ # Then reverse the order of the bytes
+ n.reverse()
+ # The new offset is the number of bits that were unused at the end.
+ newoffset = 8 - (self._offset + self.len) % 8
+ if newoffset == 8:
+ newoffset = 0
+ self._setbytes_unsafe(bytearray().join(n), self.length, newoffset)
+
+ def _truncatestart(self, bits):
+ """Truncate bits from the start of the bitstring."""
+ assert 0 <= bits <= self.len
+ if not bits:
+ return
+ if bits == self.len:
+ self._clear()
+ return
+ bytepos, offset = divmod(self._offset + bits, 8)
+ self._setbytes_unsafe(self._datastore.getbyteslice(bytepos, self._datastore.bytelength), self.len - bits,
+ offset)
+ assert self._assertsanity()
+
+ def _truncateend(self, bits):
+ """Truncate bits from the end of the bitstring."""
+ assert 0 <= bits <= self.len
+ if not bits:
+ return
+ if bits == self.len:
+ self._clear()
+ return
+ newlength_in_bytes = (self._offset + self.len - bits + 7) // 8
+ self._setbytes_unsafe(self._datastore.getbyteslice(0, newlength_in_bytes), self.len - bits,
+ self._offset)
+ assert self._assertsanity()
+
+ def _insert(self, bs, pos):
+ """Insert bs at pos."""
+ assert 0 <= pos <= self.len
+ if pos > self.len // 2:
+ # Inserting nearer end, so cut off end.
+ end = self._slice(pos, self.len)
+ self._truncateend(self.len - pos)
+ self._append(bs)
+ self._append(end)
+ else:
+ # Inserting nearer start, so cut off start.
+ start = self._slice(0, pos)
+ self._truncatestart(pos)
+ self._prepend(bs)
+ self._prepend(start)
+ try:
+ self._pos = pos + bs.len
+ except AttributeError:
+ pass
+ assert self._assertsanity()
+
+ def _overwrite(self, bs, pos):
+ """Overwrite with bs at pos."""
+ assert 0 <= pos < self.len
+ if bs is self:
+ # Just overwriting with self, so do nothing.
+ assert pos == 0
+ return
+ firstbytepos = (self._offset + pos) // 8
+ lastbytepos = (self._offset + pos + bs.len - 1) // 8
+ bytepos, bitoffset = divmod(self._offset + pos, 8)
+ if firstbytepos == lastbytepos:
+ mask = ((1 << bs.len) - 1) << (8 - bs.len - bitoffset)
+ self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) & (~mask))
+ d = offsetcopy(bs._datastore, bitoffset)
+ self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) | (d.getbyte(0) & mask))
+ else:
+ # Do first byte
+ mask = (1 << (8 - bitoffset)) - 1
+ self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) & (~mask))
+ d = offsetcopy(bs._datastore, bitoffset)
+ self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) | (d.getbyte(0) & mask))
+ # Now do all the full bytes
+ self._datastore.setbyteslice(firstbytepos + 1, lastbytepos, d.getbyteslice(1, lastbytepos - firstbytepos))
+ # and finally the last byte
+ bitsleft = (self._offset + pos + bs.len) % 8
+ if not bitsleft:
+ bitsleft = 8
+ mask = (1 << (8 - bitsleft)) - 1
+ self._datastore.setbyte(lastbytepos, self._datastore.getbyte(lastbytepos) & mask)
+ self._datastore.setbyte(lastbytepos,
+ self._datastore.getbyte(lastbytepos) | (d.getbyte(d.bytelength - 1) & ~mask))
+ assert self._assertsanity()
+
+ def _delete(self, bits, pos):
+ """Delete bits at pos."""
+ assert 0 <= pos <= self.len
+ assert pos + bits <= self.len
+ if not pos:
+ # Cutting bits off at the start.
+ self._truncatestart(bits)
+ return
+ if pos + bits == self.len:
+ # Cutting bits off at the end.
+ self._truncateend(bits)
+ return
+ if pos > self.len - pos - bits:
+ # More bits before cut point than after it, so do bit shifting
+ # on the final bits.
+ end = self._slice(pos + bits, self.len)
+ assert self.len - pos > 0
+ self._truncateend(self.len - pos)
+ self._append(end)
+ return
+ # More bits after the cut point than before it.
+ start = self._slice(0, pos)
+ self._truncatestart(pos + bits)
+ self._prepend(start)
+ return
+
+ def _reversebytes(self, start, end):
+ """Reverse bytes in-place."""
+ # Make the start occur on a byte boundary
+ # TODO: We could be cleverer here to avoid changing the offset.
+ newoffset = 8 - (start % 8)
+ if newoffset == 8:
+ newoffset = 0
+ self._datastore = offsetcopy(self._datastore, newoffset)
+ # Now just reverse the byte data
+ toreverse = bytearray(self._datastore.getbyteslice((newoffset + start) // 8, (newoffset + end) // 8))
+ toreverse.reverse()
+ self._datastore.setbyteslice((newoffset + start) // 8, (newoffset + end) // 8, toreverse)
+
+ def _set(self, pos):
+ """Set bit at pos to 1."""
+ assert 0 <= pos < self.len
+ self._datastore.setbit(pos)
+
+ def _unset(self, pos):
+ """Set bit at pos to 0."""
+ assert 0 <= pos < self.len
+ self._datastore.unsetbit(pos)
+
+ def _invert(self, pos):
+ """Flip bit at pos 1<->0."""
+ assert 0 <= pos < self.len
+ self._datastore.invertbit(pos)
+
+ def _invert_all(self):
+ """Invert every bit."""
+ set = self._datastore.setbyte
+ get = self._datastore.getbyte
+ for p in xrange(self._datastore.byteoffset, self._datastore.byteoffset + self._datastore.bytelength):
+ set(p, 256 + ~get(p))
+
+ def _ilshift(self, n):
+ """Shift bits by n to the left in place. Return self."""
+ assert 0 < n <= self.len
+ self._append(Bits(n))
+ self._truncatestart(n)
+ return self
+
+ def _irshift(self, n):
+ """Shift bits by n to the right in place. Return self."""
+ assert 0 < n <= self.len
+ self._prepend(Bits(n))
+ self._truncateend(n)
+ return self
+
+ def _imul(self, n):
+ """Concatenate n copies of self in place. Return self."""
+ assert n >= 0
+ if not n:
+ self._clear()
+ return self
+ m = 1
+ old_len = self.len
+ while m * 2 < n:
+ self._append(self)
+ m *= 2
+ self._append(self[0:(n - m) * old_len])
+ return self
+
+ def _inplace_logical_helper(self, bs, f):
+ """Helper function containing most of the __ior__, __iand__, __ixor__ code."""
+ # Give the two bitstrings the same offset (modulo 8)
+ self_byteoffset, self_bitoffset = divmod(self._offset, 8)
+ bs_byteoffset, bs_bitoffset = divmod(bs._offset, 8)
+ if bs_bitoffset != self_bitoffset:
+ if not self_bitoffset:
+ bs._datastore = offsetcopy(bs._datastore, 0)
+ else:
+ self._datastore = offsetcopy(self._datastore, bs_bitoffset)
+ a = self._datastore.rawbytes
+ b = bs._datastore.rawbytes
+ for i in xrange(len(a)):
+ a[i] = f(a[i + self_byteoffset], b[i + bs_byteoffset])
+ return self
+
+ def _ior(self, bs):
+ return self._inplace_logical_helper(bs, operator.ior)
+
+ def _iand(self, bs):
+ return self._inplace_logical_helper(bs, operator.iand)
+
+ def _ixor(self, bs):
+ return self._inplace_logical_helper(bs, operator.xor)
+
+ def _readbits(self, length, start):
+ """Read some bits from the bitstring and return newly constructed bitstring."""
+ return self._slice(start, start + length)
+
+ def _validate_slice(self, start, end):
+ """Validate start and end and return them as positive bit positions."""
+ if start is None:
+ start = 0
+ elif start < 0:
+ start += self.len
+ if end is None:
+ end = self.len
+ elif end < 0:
+ end += self.len
+ if not 0 <= end <= self.len:
+ raise ValueError("end is not a valid position in the bitstring.")
+ if not 0 <= start <= self.len:
+ raise ValueError("start is not a valid position in the bitstring.")
+ if end < start:
+ raise ValueError("end must not be less than start.")
+ return start, end
+
+ def unpack(self, fmt, **kwargs):
+ """Interpret the whole bitstring using fmt and return list.
+
+ fmt -- A single string or a list of strings with comma separated tokens
+ describing how to interpret the bits in the bitstring. Items
+ can also be integers, for reading new bitstring of the given length.
+ kwargs -- A dictionary or keyword-value pairs - the keywords used in the
+ format string will be replaced with their given value.
+
+ Raises ValueError if the format is not understood. If not enough bits
+ are available then all bits to the end of the bitstring will be used.
+
+ See the docstring for 'read' for token examples.
+
+ """
+ return self._readlist(fmt, 0, **kwargs)[0]
+
+ def _readlist(self, fmt, pos, **kwargs):
+ tokens = []
+ stretchy_token = None
+ if isinstance(fmt, basestring):
+ fmt = [fmt]
+ # Not very optimal this, but replace integers with 'bits' tokens
+ # TODO: optimise
+ for i, f in enumerate(fmt):
+ if isinstance(f, numbers.Integral):
+ fmt[i] = "bits:{0}".format(f)
+ for f_item in fmt:
+ stretchy, tkns = tokenparser(f_item, tuple(sorted(kwargs.keys())))
+ if stretchy:
+ if stretchy_token:
+ raise Error("It's not possible to have more than one 'filler' token.")
+ stretchy_token = stretchy
+ tokens.extend(tkns)
+ if not stretchy_token:
+ lst = []
+ for name, length, _ in tokens:
+ if length in kwargs:
+ length = kwargs[length]
+ if name == 'bytes':
+ length *= 8
+ if name in kwargs and length is None:
+ # Using default 'uint' - the name is really the length.
+ value, pos = self._readtoken('uint', pos, kwargs[name])
+ lst.append(value)
+ continue
+ value, pos = self._readtoken(name, pos, length)
+ if value is not None: # Don't append pad tokens
+ lst.append(value)
+ return lst, pos
+ stretchy_token = False
+ bits_after_stretchy_token = 0
+ for token in tokens:
+ name, length, _ = token
+ if length in kwargs:
+ length = kwargs[length]
+ if name == 'bytes':
+ length *= 8
+ if name in kwargs and length is None:
+ # Default 'uint'.
+ length = kwargs[name]
+ if stretchy_token:
+ if name in ('se', 'ue', 'sie', 'uie'):
+ raise Error("It's not possible to parse a variable"
+ "length token after a 'filler' token.")
+ else:
+ if length is None:
+ raise Error("It's not possible to have more than "
+ "one 'filler' token.")
+ bits_after_stretchy_token += length
+ if length is None and name not in ('se', 'ue', 'sie', 'uie'):
+ assert not stretchy_token
+ stretchy_token = token
+ bits_left = self.len - pos
+ return_values = []
+ for token in tokens:
+ name, length, _ = token
+ if token is stretchy_token:
+ # Set length to the remaining bits
+ length = max(bits_left - bits_after_stretchy_token, 0)
+ if length in kwargs:
+ length = kwargs[length]
+ if name == 'bytes':
+ length *= 8
+ if name in kwargs and length is None:
+ # Default 'uint'
+ length = kwargs[name]
+ if length is not None:
+ bits_left -= length
+ value, pos = self._readtoken(name, pos, length)
+ if value is not None:
+ return_values.append(value)
+ return return_values, pos
+
+ def _findbytes(self, bytes_, start, end, bytealigned):
+ """Quicker version of find when everything's whole byte
+ and byte aligned.
+
+ """
+ assert self._datastore.offset == 0
+ assert bytealigned is True
+ # Extract data bytes from bitstring to be found.
+ bytepos = (start + 7) // 8
+ found = False
+ p = bytepos
+ finalpos = end // 8
+ increment = max(1024, len(bytes_) * 10)
+ buffersize = increment + len(bytes_)
+ while p < finalpos:
+ # Read in file or from memory in overlapping chunks and search the chunks.
+ buf = bytearray(self._datastore.getbyteslice(p, min(p + buffersize, finalpos)))
+ pos = buf.find(bytes_)
+ if pos != -1:
+ found = True
+ p += pos
+ break
+ p += increment
+ if not found:
+ return ()
+ return (p * 8,)
+
+ def _findregex(self, reg_ex, start, end, bytealigned):
+ """Find first occurrence of a compiled regular expression.
+
+ Note that this doesn't support arbitrary regexes, in particular they
+ must match a known length.
+
+ """
+ p = start
+ length = len(reg_ex.pattern)
+ # We grab overlapping chunks of the binary representation and
+ # do an ordinary string search within that.
+ increment = max(4096, length * 10)
+ buffersize = increment + length
+ while p < end:
+ buf = self._readbin(min(buffersize, end - p), p)
+ # Test using regular expressions...
+ m = reg_ex.search(buf)
+ if m:
+ pos = m.start()
+ # pos = buf.find(targetbin)
+ # if pos != -1:
+ # if bytealigned then we only accept byte aligned positions.
+ if not bytealigned or (p + pos) % 8 == 0:
+ return (p + pos,)
+ if bytealigned:
+ # Advance to just beyond the non-byte-aligned match and try again...
+ p += pos + 1
+ continue
+ p += increment
+ # Not found, return empty tuple
+ return ()
+
+ def find(self, bs, start=None, end=None, bytealigned=None):
+ """Find first occurrence of substring bs.
+
+ Returns a single item tuple with the bit position if found, or an
+ empty tuple if not found. The bit position (pos property) will
+ also be set to the start of the substring if it is found.
+
+ bs -- The bitstring to find.
+ start -- The bit position to start the search. Defaults to 0.
+ end -- The bit position one past the last bit to search.
+ Defaults to self.len.
+ bytealigned -- If True the bitstring will only be
+ found on byte boundaries.
+
+ Raises ValueError if bs is empty, if start < 0, if end > self.len or
+ if end < start.
+
+ >>> BitArray('0xc3e').find('0b1111')
+ (6,)
+
+ """
+ bs = Bits(bs)
+ if not bs.len:
+ raise ValueError("Cannot find an empty bitstring.")
+ start, end = self._validate_slice(start, end)
+ if bytealigned is None:
+ bytealigned = globals()['bytealigned']
+ if bytealigned and not bs.len % 8 and not self._datastore.offset:
+ p = self._findbytes(bs.bytes, start, end, bytealigned)
+ else:
+ p = self._findregex(re.compile(bs._getbin()), start, end, bytealigned)
+ # If called from a class that has a pos, set it
+ try:
+ self._pos = p[0]
+ except (AttributeError, IndexError):
+ pass
+ return p
+
+ def findall(self, bs, start=None, end=None, count=None, bytealigned=None):
+ """Find all occurrences of bs. Return generator of bit positions.
+
+ bs -- The bitstring to find.
+ start -- The bit position to start the search. Defaults to 0.
+ end -- The bit position one past the last bit to search.
+ Defaults to self.len.
+ count -- The maximum number of occurrences to find.
+ bytealigned -- If True the bitstring will only be found on
+ byte boundaries.
+
+ Raises ValueError if bs is empty, if start < 0, if end > self.len or
+ if end < start.
+
+ Note that all occurrences of bs are found, even if they overlap.
+
+ """
+ if count is not None and count < 0:
+ raise ValueError("In findall, count must be >= 0.")
+ bs = Bits(bs)
+ start, end = self._validate_slice(start, end)
+ if bytealigned is None:
+ bytealigned = globals()['bytealigned']
+ c = 0
+ if bytealigned and not bs.len % 8 and not self._datastore.offset:
+ # Use the quick find method
+ f = self._findbytes
+ x = bs._getbytes()
+ else:
+ f = self._findregex
+ x = re.compile(bs._getbin())
+ while True:
+
+ p = f(x, start, end, bytealigned)
+ if not p:
+ break
+ if count is not None and c >= count:
+ return
+ c += 1
+ try:
+ self._pos = p[0]
+ except AttributeError:
+ pass
+ yield p[0]
+ if bytealigned:
+ start = p[0] + 8
+ else:
+ start = p[0] + 1
+ if start >= end:
+ break
+ return
+
+ def rfind(self, bs, start=None, end=None, bytealigned=None):
+ """Find final occurrence of substring bs.
+
+ Returns a single item tuple with the bit position if found, or an
+ empty tuple if not found. The bit position (pos property) will
+ also be set to the start of the substring if it is found.
+
+ bs -- The bitstring to find.
+ start -- The bit position to end the reverse search. Defaults to 0.
+ end -- The bit position one past the first bit to reverse search.
+ Defaults to self.len.
+ bytealigned -- If True the bitstring will only be found on byte
+ boundaries.
+
+ Raises ValueError if bs is empty, if start < 0, if end > self.len or
+ if end < start.
+
+ """
+ bs = Bits(bs)
+ start, end = self._validate_slice(start, end)
+ if bytealigned is None:
+ bytealigned = globals()['bytealigned']
+ if not bs.len:
+ raise ValueError("Cannot find an empty bitstring.")
+ # Search chunks starting near the end and then moving back
+ # until we find bs.
+ increment = max(8192, bs.len * 80)
+ buffersize = min(increment + bs.len, end - start)
+ pos = max(start, end - buffersize)
+ while True:
+ found = list(self.findall(bs, start=pos, end=pos + buffersize,
+ bytealigned=bytealigned))
+ if not found:
+ if pos == start:
+ return ()
+ pos = max(start, pos - increment)
+ continue
+ return (found[-1],)
+
+ def cut(self, bits, start=None, end=None, count=None):
+ """Return bitstring generator by cutting into bits sized chunks.
+
+ bits -- The size in bits of the bitstring chunks to generate.
+ start -- The bit position to start the first cut. Defaults to 0.
+ end -- The bit position one past the last bit to use in the cut.
+ Defaults to self.len.
+ count -- If specified then at most count items are generated.
+ Default is to cut as many times as possible.
+
+ """
+ start, end = self._validate_slice(start, end)
+ if count is not None and count < 0:
+ raise ValueError("Cannot cut - count must be >= 0.")
+ if bits <= 0:
+ raise ValueError("Cannot cut - bits must be >= 0.")
+ c = 0
+ while count is None or c < count:
+ c += 1
+ nextchunk = self._slice(start, min(start + bits, end))
+ if nextchunk.len != bits:
+ return
+ assert nextchunk._assertsanity()
+ yield nextchunk
+ start += bits
+ return
+
+ def split(self, delimiter, start=None, end=None, count=None,
+ bytealigned=None):
+ """Return bitstring generator by splittling using a delimiter.
+
+ The first item returned is the initial bitstring before the delimiter,
+ which may be an empty bitstring.
+
+ delimiter -- The bitstring used as the divider.
+ start -- The bit position to start the split. Defaults to 0.
+ end -- The bit position one past the last bit to use in the split.
+ Defaults to self.len.
+ count -- If specified then at most count items are generated.
+ Default is to split as many times as possible.
+ bytealigned -- If True splits will only occur on byte boundaries.
+
+ Raises ValueError if the delimiter is empty.
+
+ """
+ delimiter = Bits(delimiter)
+ if not delimiter.len:
+ raise ValueError("split delimiter cannot be empty.")
+ start, end = self._validate_slice(start, end)
+ if bytealigned is None:
+ bytealigned = globals()['bytealigned']
+ if count is not None and count < 0:
+ raise ValueError("Cannot split - count must be >= 0.")
+ if count == 0:
+ return
+ if bytealigned and not delimiter.len % 8 and not self._datastore.offset:
+ # Use the quick find method
+ f = self._findbytes
+ x = delimiter._getbytes()
+ else:
+ f = self._findregex
+ x = re.compile(delimiter._getbin())
+ found = f(x, start, end, bytealigned)
+ if not found:
+ # Initial bits are the whole bitstring being searched
+ yield self._slice(start, end)
+ return
+ # yield the bytes before the first occurrence of the delimiter, even if empty
+ yield self._slice(start, found[0])
+ startpos = pos = found[0]
+ c = 1
+ while count is None or c < count:
+ pos += delimiter.len
+ found = f(x, pos, end, bytealigned)
+ if not found:
+ # No more occurrences, so return the rest of the bitstring
+ yield self._slice(startpos, end)
+ return
+ c += 1
+ yield self._slice(startpos, found[0])
+ startpos = pos = found[0]
+ # Have generated count bitstrings, so time to quit.
+ return
+
+ def join(self, sequence):
+ """Return concatenation of bitstrings joined by self.
+
+ sequence -- A sequence of bitstrings.
+
+ """
+ s = self.__class__()
+ i = iter(sequence)
+ try:
+ s._append(Bits(next(i)))
+ while True:
+ n = next(i)
+ s._append(self)
+ s._append(Bits(n))
+ except StopIteration:
+ pass
+ return s
+
+ def tobytes(self):
+ """Return the bitstring as bytes, padding with zero bits if needed.
+
+ Up to seven zero bits will be added at the end to byte align.
+
+ """
+ d = offsetcopy(self._datastore, 0).rawbytes
+ # Need to ensure that unused bits at end are set to zero
+ unusedbits = 8 - self.len % 8
+ if unusedbits != 8:
+ d[-1] &= (0xff << unusedbits)
+ return bytes(d)
+
+ def tofile(self, f):
+ """Write the bitstring to a file object, padding with zero bits if needed.
+
+ Up to seven zero bits will be added at the end to byte align.
+
+ """
+ # If the bitstring is file based then we don't want to read it all
+ # in to memory.
+ chunksize = 1024 * 1024 # 1 MB chunks
+ if not self._offset:
+ a = 0
+ bytelen = self._datastore.bytelength
+ p = self._datastore.getbyteslice(a, min(a + chunksize, bytelen - 1))
+ while len(p) == chunksize:
+ f.write(p)
+ a += chunksize
+ p = self._datastore.getbyteslice(a, min(a + chunksize, bytelen - 1))
+ f.write(p)
+ # Now the final byte, ensuring that unused bits at end are set to 0.
+ bits_in_final_byte = self.len % 8
+ if not bits_in_final_byte:
+ bits_in_final_byte = 8
+ f.write(self[-bits_in_final_byte:].tobytes())
+ else:
+ # Really quite inefficient...
+ a = 0
+ b = a + chunksize * 8
+ while b <= self.len:
+ f.write(self._slice(a, b)._getbytes())
+ a += chunksize * 8
+ b += chunksize * 8
+ if a != self.len:
+ f.write(self._slice(a, self.len).tobytes())
+
+ def startswith(self, prefix, start=None, end=None):
+ """Return whether the current bitstring starts with prefix.
+
+ prefix -- The bitstring to search for.
+ start -- The bit position to start from. Defaults to 0.
+ end -- The bit position to end at. Defaults to self.len.
+
+ """
+ prefix = Bits(prefix)
+ start, end = self._validate_slice(start, end)
+ if end < start + prefix.len:
+ return False
+ end = start + prefix.len
+ return self._slice(start, end) == prefix
+
+ def endswith(self, suffix, start=None, end=None):
+ """Return whether the current bitstring ends with suffix.
+
+ suffix -- The bitstring to search for.
+ start -- The bit position to start from. Defaults to 0.
+ end -- The bit position to end at. Defaults to self.len.
+
+ """
+ suffix = Bits(suffix)
+ start, end = self._validate_slice(start, end)
+ if start + suffix.len > end:
+ return False
+ start = end - suffix.len
+ return self._slice(start, end) == suffix
+
+ def all(self, value, pos=None):
+ """Return True if one or many bits are all set to value.
+
+ value -- If value is True then checks for bits set to 1, otherwise
+ checks for bits set to 0.
+ pos -- An iterable of bit positions. Negative numbers are treated in
+ the same way as slice indices. Defaults to the whole bitstring.
+
+ """
+ value = bool(value)
+ length = self.len
+ if pos is None:
+ pos = xrange(self.len)
+ for p in pos:
+ if p < 0:
+ p += length
+ if not 0 <= p < length:
+ raise IndexError("Bit position {0} out of range.".format(p))
+ if not self._datastore.getbit(p) is value:
+ return False
+ return True
+
+ def any(self, value, pos=None):
+ """Return True if any of one or many bits are set to value.
+
+ value -- If value is True then checks for bits set to 1, otherwise
+ checks for bits set to 0.
+ pos -- An iterable of bit positions. Negative numbers are treated in
+ the same way as slice indices. Defaults to the whole bitstring.
+
+ """
+ value = bool(value)
+ length = self.len
+ if pos is None:
+ pos = xrange(self.len)
+ for p in pos:
+ if p < 0:
+ p += length
+ if not 0 <= p < length:
+ raise IndexError("Bit position {0} out of range.".format(p))
+ if self._datastore.getbit(p) is value:
+ return True
+ return False
+
+ def count(self, value):
+ """Return count of total number of either zero or one bits.
+
+ value -- If True then bits set to 1 are counted, otherwise bits set
+ to 0 are counted.
+
+ >>> Bits('0xef').count(1)
+ 7
+
+ """
+ if not self.len:
+ return 0
+ # count the number of 1s (from which it's easy to work out the 0s).
+ # Don't count the final byte yet.
+ count = sum(BIT_COUNT[self._datastore.getbyte(i)] for i in xrange(self._datastore.bytelength - 1))
+ # adjust for bits at start that aren't part of the bitstring
+ if self._offset:
+ count -= BIT_COUNT[self._datastore.getbyte(0) >> (8 - self._offset)]
+ # and count the last 1 - 8 bits at the end.
+ endbits = self._datastore.bytelength * 8 - (self._offset + self.len)
+ count += BIT_COUNT[self._datastore.getbyte(self._datastore.bytelength - 1) >> endbits]
+ return count if value else self.len - count
+
+ # Create native-endian functions as aliases depending on the byteorder
+ if byteorder == 'little':
+ _setfloatne = _setfloatle
+ _readfloatne = _readfloatle
+ _getfloatne = _getfloatle
+ _setuintne = _setuintle
+ _readuintne = _readuintle
+ _getuintne = _getuintle
+ _setintne = _setintle
+ _readintne = _readintle
+ _getintne = _getintle
+ else:
+ _setfloatne = _setfloat
+ _readfloatne = _readfloat
+ _getfloatne = _getfloat
+ _setuintne = _setuintbe
+ _readuintne = _readuintbe
+ _getuintne = _getuintbe
+ _setintne = _setintbe
+ _readintne = _readintbe
+ _getintne = _getintbe
+
+ _offset = property(_getoffset)
+
+ len = property(_getlength,
+ doc="""The length of the bitstring in bits. Read only.
+ """)
+ length = property(_getlength,
+ doc="""The length of the bitstring in bits. Read only.
+ """)
+ bool = property(_getbool,
+ doc="""The bitstring as a bool (True or False). Read only.
+ """)
+ hex = property(_gethex,
+ doc="""The bitstring as a hexadecimal string. Read only.
+ """)
+ bin = property(_getbin,
+ doc="""The bitstring as a binary string. Read only.
+ """)
+ oct = property(_getoct,
+ doc="""The bitstring as an octal string. Read only.
+ """)
+ bytes = property(_getbytes,
+ doc="""The bitstring as a bytes object. Read only.
+ """)
+ int = property(_getint,
+ doc="""The bitstring as a two's complement signed int. Read only.
+ """)
+ uint = property(_getuint,
+ doc="""The bitstring as a two's complement unsigned int. Read only.
+ """)
+ float = property(_getfloat,
+ doc="""The bitstring as a floating point number. Read only.
+ """)
+ intbe = property(_getintbe,
+ doc="""The bitstring as a two's complement big-endian signed int. Read only.
+ """)
+ uintbe = property(_getuintbe,
+ doc="""The bitstring as a two's complement big-endian unsigned int. Read only.
+ """)
+ floatbe = property(_getfloat,
+ doc="""The bitstring as a big-endian floating point number. Read only.
+ """)
+ intle = property(_getintle,
+ doc="""The bitstring as a two's complement little-endian signed int. Read only.
+ """)
+ uintle = property(_getuintle,
+ doc="""The bitstring as a two's complement little-endian unsigned int. Read only.
+ """)
+ floatle = property(_getfloatle,
+ doc="""The bitstring as a little-endian floating point number. Read only.
+ """)
+ intne = property(_getintne,
+ doc="""The bitstring as a two's complement native-endian signed int. Read only.
+ """)
+ uintne = property(_getuintne,
+ doc="""The bitstring as a two's complement native-endian unsigned int. Read only.
+ """)
+ floatne = property(_getfloatne,
+ doc="""The bitstring as a native-endian floating point number. Read only.
+ """)
+ ue = property(_getue,
+ doc="""The bitstring as an unsigned exponential-Golomb code. Read only.
+ """)
+ se = property(_getse,
+ doc="""The bitstring as a signed exponential-Golomb code. Read only.
+ """)
+ uie = property(_getuie,
+ doc="""The bitstring as an unsigned interleaved exponential-Golomb code. Read only.
+ """)
+ sie = property(_getsie,
+ doc="""The bitstring as a signed interleaved exponential-Golomb code. Read only.
+ """)
+
+
+# Dictionary that maps token names to the function that reads them.
+name_to_read = {'uint': Bits._readuint,
+ 'uintle': Bits._readuintle,
+ 'uintbe': Bits._readuintbe,
+ 'uintne': Bits._readuintne,
+ 'int': Bits._readint,
+ 'intle': Bits._readintle,
+ 'intbe': Bits._readintbe,
+ 'intne': Bits._readintne,
+ 'float': Bits._readfloat,
+ 'floatbe': Bits._readfloat, # floatbe is a synonym for float
+ 'floatle': Bits._readfloatle,
+ 'floatne': Bits._readfloatne,
+ 'hex': Bits._readhex,
+ 'oct': Bits._readoct,
+ 'bin': Bits._readbin,
+ 'bits': Bits._readbits,
+ 'bytes': Bits._readbytes,
+ 'ue': Bits._readue,
+ 'se': Bits._readse,
+ 'uie': Bits._readuie,
+ 'sie': Bits._readsie,
+ 'bool': Bits._readbool,
+ }
+
+# Dictionaries for mapping init keywords with init functions.
+init_with_length_and_offset = {'bytes': Bits._setbytes_safe,
+ 'filename': Bits._setfile,
+ }
+
+init_with_length_only = {'uint': Bits._setuint,
+ 'int': Bits._setint,
+ 'float': Bits._setfloat,
+ 'uintbe': Bits._setuintbe,
+ 'intbe': Bits._setintbe,
+ 'floatbe': Bits._setfloat,
+ 'uintle': Bits._setuintle,
+ 'intle': Bits._setintle,
+ 'floatle': Bits._setfloatle,
+ 'uintne': Bits._setuintne,
+ 'intne': Bits._setintne,
+ 'floatne': Bits._setfloatne,
+ }
+
+init_without_length_or_offset = {'bin': Bits._setbin_safe,
+ 'hex': Bits._sethex,
+ 'oct': Bits._setoct,
+ 'ue': Bits._setue,
+ 'se': Bits._setse,
+ 'uie': Bits._setuie,
+ 'sie': Bits._setsie,
+ 'bool': Bits._setbool,
+ }
+
+
+class BitArray(Bits):
+ """A container holding a mutable sequence of bits.
+
+ Subclass of the immutable Bits class. Inherits all of its
+ methods (except __hash__) and adds mutating methods.
+
+ Mutating methods:
+
+ append() -- Append a bitstring.
+ byteswap() -- Change byte endianness in-place.
+ insert() -- Insert a bitstring.
+ invert() -- Flip bit(s) between one and zero.
+ overwrite() -- Overwrite a section with a new bitstring.
+ prepend() -- Prepend a bitstring.
+ replace() -- Replace occurrences of one bitstring with another.
+ reverse() -- Reverse bits in-place.
+ rol() -- Rotate bits to the left.
+ ror() -- Rotate bits to the right.
+ set() -- Set bit(s) to 1 or 0.
+
+ Methods inherited from Bits:
+
+ all() -- Check if all specified bits are set to 1 or 0.
+ any() -- Check if any of specified bits are set to 1 or 0.
+ count() -- Count the number of bits set to 1 or 0.
+ cut() -- Create generator of constant sized chunks.
+ endswith() -- Return whether the bitstring ends with a sub-string.
+ find() -- Find a sub-bitstring in the current bitstring.
+ findall() -- Find all occurrences of a sub-bitstring in the current bitstring.
+ join() -- Join bitstrings together using current bitstring.
+ rfind() -- Seek backwards to find a sub-bitstring.
+ split() -- Create generator of chunks split by a delimiter.
+ startswith() -- Return whether the bitstring starts with a sub-bitstring.
+ tobytes() -- Return bitstring as bytes, padding if needed.
+ tofile() -- Write bitstring to file, padding if needed.
+ unpack() -- Interpret bits using format string.
+
+ Special methods:
+
+ Mutating operators are available: [], <<=, >>=, +=, *=, &=, |= and ^=
+ in addition to the inherited [], ==, !=, +, *, ~, <<, >>, &, | and ^.
+
+ Properties:
+
+ bin -- The bitstring as a binary string.
+ bool -- For single bit bitstrings, interpret as True or False.
+ bytepos -- The current byte position in the bitstring.
+ bytes -- The bitstring as a bytes object.
+ float -- Interpret as a floating point number.
+ floatbe -- Interpret as a big-endian floating point number.
+ floatle -- Interpret as a little-endian floating point number.
+ floatne -- Interpret as a native-endian floating point number.
+ hex -- The bitstring as a hexadecimal string.
+ int -- Interpret as a two's complement signed integer.
+ intbe -- Interpret as a big-endian signed integer.
+ intle -- Interpret as a little-endian signed integer.
+ intne -- Interpret as a native-endian signed integer.
+ len -- Length of the bitstring in bits.
+ oct -- The bitstring as an octal string.
+ pos -- The current bit position in the bitstring.
+ se -- Interpret as a signed exponential-Golomb code.
+ ue -- Interpret as an unsigned exponential-Golomb code.
+ sie -- Interpret as a signed interleaved exponential-Golomb code.
+ uie -- Interpret as an unsigned interleaved exponential-Golomb code.
+ uint -- Interpret as a two's complement unsigned integer.
+ uintbe -- Interpret as a big-endian unsigned integer.
+ uintle -- Interpret as a little-endian unsigned integer.
+ uintne -- Interpret as a native-endian unsigned integer.
+
+ """
+
+ __slots__ = ()
+
+ # As BitArray objects are mutable, we shouldn't allow them to be hashed.
+ __hash__ = None
+
+ def __init__(self, auto=None, length=None, offset=None, **kwargs):
+ """Either specify an 'auto' initialiser:
+ auto -- a string of comma separated tokens, an integer, a file object,
+ a bytearray, a boolean iterable or another bitstring.
+
+ Or initialise via **kwargs with one (and only one) of:
+ bytes -- raw data as a string, for example read from a binary file.
+ bin -- binary string representation, e.g. '0b001010'.
+ hex -- hexadecimal string representation, e.g. '0x2ef'
+ oct -- octal string representation, e.g. '0o777'.
+ uint -- an unsigned integer.
+ int -- a signed integer.
+ float -- a floating point number.
+ uintbe -- an unsigned big-endian whole byte integer.
+ intbe -- a signed big-endian whole byte integer.
+ floatbe - a big-endian floating point number.
+ uintle -- an unsigned little-endian whole byte integer.
+ intle -- a signed little-endian whole byte integer.
+ floatle -- a little-endian floating point number.
+ uintne -- an unsigned native-endian whole byte integer.
+ intne -- a signed native-endian whole byte integer.
+ floatne -- a native-endian floating point number.
+ se -- a signed exponential-Golomb code.
+ ue -- an unsigned exponential-Golomb code.
+ sie -- a signed interleaved exponential-Golomb code.
+ uie -- an unsigned interleaved exponential-Golomb code.
+ bool -- a boolean (True or False).
+ filename -- a file which will be opened in binary read-only mode.
+
+ Other keyword arguments:
+ length -- length of the bitstring in bits, if needed and appropriate.
+ It must be supplied for all integer and float initialisers.
+ offset -- bit offset to the data. These offset bits are
+ ignored and this is intended for use when
+ initialising using 'bytes' or 'filename'.
+
+ """
+ # For mutable BitArrays we always read in files to memory:
+ if not isinstance(self._datastore, ByteStore):
+ self._ensureinmemory()
+
+ def __new__(cls, auto=None, length=None, offset=None, **kwargs):
+ x = super(BitArray, cls).__new__(cls)
+ y = Bits.__new__(BitArray, auto, length, offset, **kwargs)
+ x._datastore = y._datastore
+ return x
+
+ def __iadd__(self, bs):
+ """Append bs to current bitstring. Return self.
+
+ bs -- the bitstring to append.
+
+ """
+ self.append(bs)
+ return self
+
+ def __copy__(self):
+ """Return a new copy of the BitArray."""
+ s_copy = BitArray()
+ if not isinstance(self._datastore, ByteStore):
+ # Let them both point to the same (invariant) array.
+ # If either gets modified then at that point they'll be read into memory.
+ s_copy._datastore = self._datastore
+ else:
+ s_copy._datastore = copy.copy(self._datastore)
+ return s_copy
+
+ def __setitem__(self, key, value):
+ """Set item or range to new value.
+
+ Indices are in units of the step parameter (default 1 bit).
+ Stepping is used to specify the number of bits in each item.
+
+ If the length of the bitstring is changed then pos will be moved
+ to after the inserted section, otherwise it will remain unchanged.
+
+ >>> s = BitArray('0xff')
+ >>> s[0:1:4] = '0xe'
+ >>> print s
+ '0xef'
+ >>> s[4:4] = '0x00'
+ >>> print s
+ '0xe00f'
+
+ """
+ try:
+ # A slice
+ start, step = 0, 1
+ if key.step is not None:
+ step = key.step
+ except AttributeError:
+ # single element
+ if key < 0:
+ key += self.len
+ if not 0 <= key < self.len:
+ raise IndexError("Slice index out of range.")
+ if isinstance(value, numbers.Integral):
+ if not value:
+ self._unset(key)
+ return
+ if value in (1, -1):
+ self._set(key)
+ return
+ raise ValueError("Cannot set a single bit with integer {0}.".format(value))
+ value = Bits(value)
+ if value.len == 1:
+ # TODO: this can't be optimal
+ if value[0]:
+ self._set(key)
+ else:
+ self._unset(key)
+ else:
+ self._delete(1, key)
+ self._insert(value, key)
+ return
+ else:
+ if step != 1:
+ # convert to binary string and use string slicing
+ # TODO: Horribly inefficent
+ temp = list(self._getbin())
+ v = list(Bits(value)._getbin())
+ temp.__setitem__(key, v)
+ self._setbin_unsafe(''.join(temp))
+ return
+
+ # If value is an integer then we want to set the slice to that
+ # value rather than initialise a new bitstring of that length.
+ if not isinstance(value, numbers.Integral):
+ try:
+ # TODO: Better way than calling constructor here?
+ value = Bits(value)
+ except TypeError:
+ raise TypeError("Bitstring, integer or string expected. "
+ "Got {0}.".format(type(value)))
+ if key.start is not None:
+ start = key.start
+ if key.start < 0:
+ start += self.len
+ if start < 0:
+ start = 0
+ stop = self.len
+ if key.stop is not None:
+ stop = key.stop
+ if key.stop < 0:
+ stop += self.len
+ if start > stop:
+ # The standard behaviour for lists is to just insert at the
+ # start position if stop < start and step == 1.
+ stop = start
+ if isinstance(value, numbers.Integral):
+ if value >= 0:
+ value = self.__class__(uint=value, length=stop - start)
+ else:
+ value = self.__class__(int=value, length=stop - start)
+ stop = min(stop, self.len)
+ start = max(start, 0)
+ start = min(start, stop)
+ if (stop - start) == value.len:
+ if not value.len:
+ return
+ if step >= 0:
+ self._overwrite(value, start)
+ else:
+ self._overwrite(value.__getitem__(slice(None, None, 1)), start)
+ else:
+ # TODO: A delete then insert is wasteful - it could do unneeded shifts.
+ # Could be either overwrite + insert or overwrite + delete.
+ self._delete(stop - start, start)
+ if step >= 0:
+ self._insert(value, start)
+ else:
+ self._insert(value.__getitem__(slice(None, None, 1)), start)
+ # pos is now after the inserted piece.
+ return
+
+ def __delitem__(self, key):
+ """Delete item or range.
+
+ Indices are in units of the step parameter (default 1 bit).
+ Stepping is used to specify the number of bits in each item.
+
+ >>> a = BitArray('0x001122')
+ >>> del a[1:2:8]
+ >>> print a
+ 0x0022
+
+ """
+ try:
+ # A slice
+ start = 0
+ step = key.step if key.step is not None else 1
+ except AttributeError:
+ # single element
+ if key < 0:
+ key += self.len
+ if not 0 <= key < self.len:
+ raise IndexError("Slice index out of range.")
+ self._delete(1, key)
+ return
+ else:
+ if step != 1:
+ # convert to binary string and use string slicing
+ # TODO: Horribly inefficent
+ temp = list(self._getbin())
+ temp.__delitem__(key)
+ self._setbin_unsafe(''.join(temp))
+ return
+ stop = key.stop
+ if key.start is not None:
+ start = key.start
+ if key.start < 0 and stop is None:
+ start += self.len
+ if start < 0:
+ start = 0
+ if stop is None:
+ stop = self.len
+ if start > stop:
+ return
+ stop = min(stop, self.len)
+ start = max(start, 0)
+ start = min(start, stop)
+ self._delete(stop - start, start)
+ return
+
+ def __ilshift__(self, n):
+ """Shift bits by n to the left in place. Return self.
+
+ n -- the number of bits to shift. Must be >= 0.
+
+ """
+ if n < 0:
+ raise ValueError("Cannot shift by a negative amount.")
+ if not self.len:
+ raise ValueError("Cannot shift an empty bitstring.")
+ if not n:
+ return self
+ n = min(n, self.len)
+ return self._ilshift(n)
+
+ def __irshift__(self, n):
+ """Shift bits by n to the right in place. Return self.
+
+ n -- the number of bits to shift. Must be >= 0.
+
+ """
+ if n < 0:
+ raise ValueError("Cannot shift by a negative amount.")
+ if not self.len:
+ raise ValueError("Cannot shift an empty bitstring.")
+ if not n:
+ return self
+ n = min(n, self.len)
+ return self._irshift(n)
+
+ def __imul__(self, n):
+ """Concatenate n copies of self in place. Return self.
+
+ Called for expressions of the form 'a *= 3'.
+ n -- The number of concatenations. Must be >= 0.
+
+ """
+ if n < 0:
+ raise ValueError("Cannot multiply by a negative integer.")
+ return self._imul(n)
+
+ def __ior__(self, bs):
+ bs = Bits(bs)
+ if self.len != bs.len:
+ raise ValueError("Bitstrings must have the same length "
+ "for |= operator.")
+ return self._ior(bs)
+
+ def __iand__(self, bs):
+ bs = Bits(bs)
+ if self.len != bs.len:
+ raise ValueError("Bitstrings must have the same length "
+ "for &= operator.")
+ return self._iand(bs)
+
+ def __ixor__(self, bs):
+ bs = Bits(bs)
+ if self.len != bs.len:
+ raise ValueError("Bitstrings must have the same length "
+ "for ^= operator.")
+ return self._ixor(bs)
+
+ def replace(self, old, new, start=None, end=None, count=None,
+ bytealigned=None):
+ """Replace all occurrences of old with new in place.
+
+ Returns number of replacements made.
+
+ old -- The bitstring to replace.
+ new -- The replacement bitstring.
+ start -- Any occurrences that start before this will not be replaced.
+ Defaults to 0.
+ end -- Any occurrences that finish after this will not be replaced.
+ Defaults to self.len.
+ count -- The maximum number of replacements to make. Defaults to
+ replace all occurrences.
+ bytealigned -- If True replacements will only be made on byte
+ boundaries.
+
+ Raises ValueError if old is empty or if start or end are
+ out of range.
+
+ """
+ old = Bits(old)
+ new = Bits(new)
+ if not old.len:
+ raise ValueError("Empty bitstring cannot be replaced.")
+ start, end = self._validate_slice(start, end)
+ if bytealigned is None:
+ bytealigned = globals()['bytealigned']
+ # Adjust count for use in split()
+ if count is not None:
+ count += 1
+ sections = self.split(old, start, end, count, bytealigned)
+ lengths = [s.len for s in sections]
+ if len(lengths) == 1:
+ # Didn't find anything to replace.
+ return 0 # no replacements done
+ if new is self:
+ # Prevent self assignment woes
+ new = copy.copy(self)
+ positions = [lengths[0] + start]
+ for l in lengths[1:-1]:
+ # Next position is the previous one plus the length of the next section.
+ positions.append(positions[-1] + l)
+ # We have all the positions that need replacements. We do them
+ # in reverse order so that they won't move around as we replace.
+ positions.reverse()
+ try:
+ # Need to calculate new pos, if this is a bitstream
+ newpos = self._pos
+ for p in positions:
+ self[p:p + old.len] = new
+ if old.len != new.len:
+ diff = new.len - old.len
+ for p in positions:
+ if p >= newpos:
+ continue
+ if p + old.len <= newpos:
+ newpos += diff
+ else:
+ newpos = p
+ self._pos = newpos
+ except AttributeError:
+ for p in positions:
+ self[p:p + old.len] = new
+ assert self._assertsanity()
+ return len(lengths) - 1
+
+ def insert(self, bs, pos=None):
+ """Insert bs at bit position pos.
+
+ bs -- The bitstring to insert.
+ pos -- The bit position to insert at.
+
+ Raises ValueError if pos < 0 or pos > self.len.
+
+ """
+ bs = Bits(bs)
+ if not bs.len:
+ return self
+ if bs is self:
+ bs = self.__copy__()
+ if pos is None:
+ try:
+ pos = self._pos
+ except AttributeError:
+ raise TypeError("insert require a bit position for this type.")
+ if pos < 0:
+ pos += self.len
+ if not 0 <= pos <= self.len:
+ raise ValueError("Invalid insert position.")
+ self._insert(bs, pos)
+
+ def overwrite(self, bs, pos=None):
+ """Overwrite with bs at bit position pos.
+
+ bs -- The bitstring to overwrite with.
+ pos -- The bit position to begin overwriting from.
+
+ Raises ValueError if pos < 0 or pos + bs.len > self.len
+
+ """
+ bs = Bits(bs)
+ if not bs.len:
+ return
+ if pos is None:
+ try:
+ pos = self._pos
+ except AttributeError:
+ raise TypeError("overwrite require a bit position for this type.")
+ if pos < 0:
+ pos += self.len
+ if pos < 0 or pos + bs.len > self.len:
+ raise ValueError("Overwrite exceeds boundary of bitstring.")
+ self._overwrite(bs, pos)
+ try:
+ self._pos = pos + bs.len
+ except AttributeError:
+ pass
+
+ def append(self, bs):
+ """Append a bitstring to the current bitstring.
+
+ bs -- The bitstring to append.
+
+ """
+ # The offset is a hint to make bs easily appendable.
+ bs = self._converttobitstring(bs, offset=(self.len + self._offset) % 8)
+ self._append(bs)
+
+ def prepend(self, bs):
+ """Prepend a bitstring to the current bitstring.
+
+ bs -- The bitstring to prepend.
+
+ """
+ bs = Bits(bs)
+ self._prepend(bs)
+
+ def reverse(self, start=None, end=None):
+ """Reverse bits in-place.
+
+ start -- Position of first bit to reverse. Defaults to 0.
+ end -- One past the position of the last bit to reverse.
+ Defaults to self.len.
+
+ Using on an empty bitstring will have no effect.
+
+ Raises ValueError if start < 0, end > self.len or end < start.
+
+ """
+ start, end = self._validate_slice(start, end)
+ if start == 0 and end == self.len:
+ self._reverse()
+ return
+ s = self._slice(start, end)
+ s._reverse()
+ self[start:end] = s
+
+ def set(self, value, pos=None):
+ """Set one or many bits to 1 or 0.
+
+ value -- If True bits are set to 1, otherwise they are set to 0.
+ pos -- Either a single bit position or an iterable of bit positions.
+ Negative numbers are treated in the same way as slice indices.
+ Defaults to the entire bitstring.
+
+ Raises IndexError if pos < -self.len or pos >= self.len.
+
+ """
+ f = self._set if value else self._unset
+ if pos is None:
+ pos = xrange(self.len)
+ try:
+ length = self.len
+ for p in pos:
+ if p < 0:
+ p += length
+ if not 0 <= p < length:
+ raise IndexError("Bit position {0} out of range.".format(p))
+ f(p)
+ except TypeError:
+ # Single pos
+ if pos < 0:
+ pos += self.len
+ if not 0 <= pos < length:
+ raise IndexError("Bit position {0} out of range.".format(pos))
+ f(pos)
+
+ def invert(self, pos=None):
+ """Invert one or many bits from 0 to 1 or vice versa.
+
+ pos -- Either a single bit position or an iterable of bit positions.
+ Negative numbers are treated in the same way as slice indices.
+
+ Raises IndexError if pos < -self.len or pos >= self.len.
+
+ """
+ if pos is None:
+ self._invert_all()
+ return
+ if not isinstance(pos, collections.Iterable):
+ pos = (pos,)
+ length = self.len
+
+ for p in pos:
+ if p < 0:
+ p += length
+ if not 0 <= p < length:
+ raise IndexError("Bit position {0} out of range.".format(p))
+ self._invert(p)
+
+ def ror(self, bits, start=None, end=None):
+ """Rotate bits to the right in-place.
+
+ bits -- The number of bits to rotate by.
+ start -- Start of slice to rotate. Defaults to 0.
+ end -- End of slice to rotate. Defaults to self.len.
+
+ Raises ValueError if bits < 0.
+
+ """
+ if not self.len:
+ raise Error("Cannot rotate an empty bitstring.")
+ if bits < 0:
+ raise ValueError("Cannot rotate right by negative amount.")
+ start, end = self._validate_slice(start, end)
+ bits %= (end - start)
+ if not bits:
+ return
+ rhs = self._slice(end - bits, end)
+ self._delete(bits, end - bits)
+ self._insert(rhs, start)
+
+ def rol(self, bits, start=None, end=None):
+ """Rotate bits to the left in-place.
+
+ bits -- The number of bits to rotate by.
+ start -- Start of slice to rotate. Defaults to 0.
+ end -- End of slice to rotate. Defaults to self.len.
+
+ Raises ValueError if bits < 0.
+
+ """
+ if not self.len:
+ raise Error("Cannot rotate an empty bitstring.")
+ if bits < 0:
+ raise ValueError("Cannot rotate left by negative amount.")
+ start, end = self._validate_slice(start, end)
+ bits %= (end - start)
+ if not bits:
+ return
+ lhs = self._slice(start, start + bits)
+ self._delete(bits, start)
+ self._insert(lhs, end - bits)
+
+ def byteswap(self, fmt=None, start=None, end=None, repeat=True):
+ """Change the endianness in-place. Return number of repeats of fmt done.
+
+ fmt -- A compact structure string, an integer number of bytes or
+ an iterable of integers. Defaults to 0, which byte reverses the
+ whole bitstring.
+ start -- Start bit position, defaults to 0.
+ end -- End bit position, defaults to self.len.
+ repeat -- If True (the default) the byte swapping pattern is repeated
+ as much as possible.
+
+ """
+ start, end = self._validate_slice(start, end)
+ if fmt is None or fmt == 0:
+ # reverse all of the whole bytes.
+ bytesizes = [(end - start) // 8]
+ elif isinstance(fmt, numbers.Integral):
+ if fmt < 0:
+ raise ValueError("Improper byte length {0}.".format(fmt))
+ bytesizes = [fmt]
+ elif isinstance(fmt, basestring):
+ m = STRUCT_PACK_RE.match(fmt)
+ if not m:
+ raise ValueError("Cannot parse format string {0}.".format(fmt))
+ # Split the format string into a list of 'q', '4h' etc.
+ formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt'))
+ # Now deal with multiplicative factors, 4h -> hhhh etc.
+ bytesizes = []
+ for f in formatlist:
+ if len(f) == 1:
+ bytesizes.append(PACK_CODE_SIZE[f])
+ else:
+ bytesizes.extend([PACK_CODE_SIZE[f[-1]]] * int(f[:-1]))
+ elif isinstance(fmt, collections.Iterable):
+ bytesizes = fmt
+ for bytesize in bytesizes:
+ if not isinstance(bytesize, numbers.Integral) or bytesize < 0:
+ raise ValueError("Improper byte length {0}.".format(bytesize))
+ else:
+ raise TypeError("Format must be an integer, string or iterable.")
+
+ repeats = 0
+ totalbitsize = 8 * sum(bytesizes)
+ if not totalbitsize:
+ return 0
+ if repeat:
+ # Try to repeat up to the end of the bitstring.
+ finalbit = end
+ else:
+ # Just try one (set of) byteswap(s).
+ finalbit = start + totalbitsize
+ for patternend in xrange(start + totalbitsize, finalbit + 1, totalbitsize):
+ bytestart = patternend - totalbitsize
+ for bytesize in bytesizes:
+ byteend = bytestart + bytesize * 8
+ self._reversebytes(bytestart, byteend)
+ bytestart += bytesize * 8
+ repeats += 1
+ return repeats
+
+ def clear(self):
+ """Remove all bits, reset to zero length."""
+ self._clear()
+
+ def copy(self):
+ """Return a copy of the bitstring."""
+ return self._copy()
+
+ int = property(Bits._getint, Bits._setint,
+ doc="""The bitstring as a two's complement signed int. Read and write.
+ """)
+ uint = property(Bits._getuint, Bits._setuint,
+ doc="""The bitstring as a two's complement unsigned int. Read and write.
+ """)
+ float = property(Bits._getfloat, Bits._setfloat,
+ doc="""The bitstring as a floating point number. Read and write.
+ """)
+ intbe = property(Bits._getintbe, Bits._setintbe,
+ doc="""The bitstring as a two's complement big-endian signed int. Read and write.
+ """)
+ uintbe = property(Bits._getuintbe, Bits._setuintbe,
+ doc="""The bitstring as a two's complement big-endian unsigned int. Read and write.
+ """)
+ floatbe = property(Bits._getfloat, Bits._setfloat,
+ doc="""The bitstring as a big-endian floating point number. Read and write.
+ """)
+ intle = property(Bits._getintle, Bits._setintle,
+ doc="""The bitstring as a two's complement little-endian signed int. Read and write.
+ """)
+ uintle = property(Bits._getuintle, Bits._setuintle,
+ doc="""The bitstring as a two's complement little-endian unsigned int. Read and write.
+ """)
+ floatle = property(Bits._getfloatle, Bits._setfloatle,
+ doc="""The bitstring as a little-endian floating point number. Read and write.
+ """)
+ intne = property(Bits._getintne, Bits._setintne,
+ doc="""The bitstring as a two's complement native-endian signed int. Read and write.
+ """)
+ uintne = property(Bits._getuintne, Bits._setuintne,
+ doc="""The bitstring as a two's complement native-endian unsigned int. Read and write.
+ """)
+ floatne = property(Bits._getfloatne, Bits._setfloatne,
+ doc="""The bitstring as a native-endian floating point number. Read and write.
+ """)
+ ue = property(Bits._getue, Bits._setue,
+ doc="""The bitstring as an unsigned exponential-Golomb code. Read and write.
+ """)
+ se = property(Bits._getse, Bits._setse,
+ doc="""The bitstring as a signed exponential-Golomb code. Read and write.
+ """)
+ uie = property(Bits._getuie, Bits._setuie,
+ doc="""The bitstring as an unsigned interleaved exponential-Golomb code. Read and write.
+ """)
+ sie = property(Bits._getsie, Bits._setsie,
+ doc="""The bitstring as a signed interleaved exponential-Golomb code. Read and write.
+ """)
+ hex = property(Bits._gethex, Bits._sethex,
+ doc="""The bitstring as a hexadecimal string. Read and write.
+ """)
+ bin = property(Bits._getbin, Bits._setbin_safe,
+ doc="""The bitstring as a binary string. Read and write.
+ """)
+ oct = property(Bits._getoct, Bits._setoct,
+ doc="""The bitstring as an octal string. Read and write.
+ """)
+ bool = property(Bits._getbool, Bits._setbool,
+ doc="""The bitstring as a bool (True or False). Read and write.
+ """)
+ bytes = property(Bits._getbytes, Bits._setbytes_safe,
+ doc="""The bitstring as a ordinary string. Read and write.
+ """)
+
+
+
+class ConstBitStream(Bits):
+ """A container or stream holding an immutable sequence of bits.
+
+ For a mutable container use the BitStream class instead.
+
+ Methods inherited from Bits:
+
+ all() -- Check if all specified bits are set to 1 or 0.
+ any() -- Check if any of specified bits are set to 1 or 0.
+ count() -- Count the number of bits set to 1 or 0.
+ cut() -- Create generator of constant sized chunks.
+ endswith() -- Return whether the bitstring ends with a sub-string.
+ find() -- Find a sub-bitstring in the current bitstring.
+ findall() -- Find all occurrences of a sub-bitstring in the current bitstring.
+ join() -- Join bitstrings together using current bitstring.
+ rfind() -- Seek backwards to find a sub-bitstring.
+ split() -- Create generator of chunks split by a delimiter.
+ startswith() -- Return whether the bitstring starts with a sub-bitstring.
+ tobytes() -- Return bitstring as bytes, padding if needed.
+ tofile() -- Write bitstring to file, padding if needed.
+ unpack() -- Interpret bits using format string.
+
+ Other methods:
+
+ bytealign() -- Align to next byte boundary.
+ peek() -- Peek at and interpret next bits as a single item.
+ peeklist() -- Peek at and interpret next bits as a list of items.
+ read() -- Read and interpret next bits as a single item.
+ readlist() -- Read and interpret next bits as a list of items.
+
+ Special methods:
+
+ Also available are the operators [], ==, !=, +, *, ~, <<, >>, &, |, ^.
+
+ Properties:
+
+ bin -- The bitstring as a binary string.
+ bool -- For single bit bitstrings, interpret as True or False.
+ bytepos -- The current byte position in the bitstring.
+ bytes -- The bitstring as a bytes object.
+ float -- Interpret as a floating point number.
+ floatbe -- Interpret as a big-endian floating point number.
+ floatle -- Interpret as a little-endian floating point number.
+ floatne -- Interpret as a native-endian floating point number.
+ hex -- The bitstring as a hexadecimal string.
+ int -- Interpret as a two's complement signed integer.
+ intbe -- Interpret as a big-endian signed integer.
+ intle -- Interpret as a little-endian signed integer.
+ intne -- Interpret as a native-endian signed integer.
+ len -- Length of the bitstring in bits.
+ oct -- The bitstring as an octal string.
+ pos -- The current bit position in the bitstring.
+ se -- Interpret as a signed exponential-Golomb code.
+ ue -- Interpret as an unsigned exponential-Golomb code.
+ sie -- Interpret as a signed interleaved exponential-Golomb code.
+ uie -- Interpret as an unsigned interleaved exponential-Golomb code.
+ uint -- Interpret as a two's complement unsigned integer.
+ uintbe -- Interpret as a big-endian unsigned integer.
+ uintle -- Interpret as a little-endian unsigned integer.
+ uintne -- Interpret as a native-endian unsigned integer.
+
+ """
+
+ __slots__ = ('_pos')
+
+ def __init__(self, auto=None, length=None, offset=None, **kwargs):
+ """Either specify an 'auto' initialiser:
+ auto -- a string of comma separated tokens, an integer, a file object,
+ a bytearray, a boolean iterable or another bitstring.
+
+ Or initialise via **kwargs with one (and only one) of:
+ bytes -- raw data as a string, for example read from a binary file.
+ bin -- binary string representation, e.g. '0b001010'.
+ hex -- hexadecimal string representation, e.g. '0x2ef'
+ oct -- octal string representation, e.g. '0o777'.
+ uint -- an unsigned integer.
+ int -- a signed integer.
+ float -- a floating point number.
+ uintbe -- an unsigned big-endian whole byte integer.
+ intbe -- a signed big-endian whole byte integer.
+ floatbe - a big-endian floating point number.
+ uintle -- an unsigned little-endian whole byte integer.
+ intle -- a signed little-endian whole byte integer.
+ floatle -- a little-endian floating point number.
+ uintne -- an unsigned native-endian whole byte integer.
+ intne -- a signed native-endian whole byte integer.
+ floatne -- a native-endian floating point number.
+ se -- a signed exponential-Golomb code.
+ ue -- an unsigned exponential-Golomb code.
+ sie -- a signed interleaved exponential-Golomb code.
+ uie -- an unsigned interleaved exponential-Golomb code.
+ bool -- a boolean (True or False).
+ filename -- a file which will be opened in binary read-only mode.
+
+ Other keyword arguments:
+ length -- length of the bitstring in bits, if needed and appropriate.
+ It must be supplied for all integer and float initialisers.
+ offset -- bit offset to the data. These offset bits are
+ ignored and this is intended for use when
+ initialising using 'bytes' or 'filename'.
+
+ """
+ self._pos = 0
+
+ def __new__(cls, auto=None, length=None, offset=None, **kwargs):
+ x = super(ConstBitStream, cls).__new__(cls)
+ x._initialise(auto, length, offset, **kwargs)
+ return x
+
+ def _setbytepos(self, bytepos):
+ """Move to absolute byte-aligned position in stream."""
+ self._setbitpos(bytepos * 8)
+
+ def _getbytepos(self):
+ """Return the current position in the stream in bytes. Must be byte aligned."""
+ if self._pos % 8:
+ raise ByteAlignError("Not byte aligned in _getbytepos().")
+ return self._pos // 8
+
+ def _setbitpos(self, pos):
+ """Move to absolute postion bit in bitstream."""
+ if pos < 0:
+ raise ValueError("Bit position cannot be negative.")
+ if pos > self.len:
+ raise ValueError("Cannot seek past the end of the data.")
+ self._pos = pos
+
+ def _getbitpos(self):
+ """Return the current position in the stream in bits."""
+ return self._pos
+
+ def _clear(self):
+ Bits._clear(self)
+ self._pos = 0
+
+ def __copy__(self):
+ """Return a new copy of the ConstBitStream for the copy module."""
+ # Note that if you want a new copy (different ID), use _copy instead.
+ # The copy can use the same datastore as it's immutable.
+ s = ConstBitStream()
+ s._datastore = self._datastore
+ # Reset the bit position, don't copy it.
+ s._pos = 0
+ return s
+
+ def __add__(self, bs):
+ """Concatenate bitstrings and return new bitstring.
+
+ bs -- the bitstring to append.
+
+ """
+ s = Bits.__add__(self, bs)
+ s._pos = 0
+ return s
+
+ def read(self, fmt):
+ """Interpret next bits according to the format string and return result.
+
+ fmt -- Token string describing how to interpret the next bits.
+
+ Token examples: 'int:12' : 12 bits as a signed integer
+ 'uint:8' : 8 bits as an unsigned integer
+ 'float:64' : 8 bytes as a big-endian float
+ 'intbe:16' : 2 bytes as a big-endian signed integer
+ 'uintbe:16' : 2 bytes as a big-endian unsigned integer
+ 'intle:32' : 4 bytes as a little-endian signed integer
+ 'uintle:32' : 4 bytes as a little-endian unsigned integer
+ 'floatle:64': 8 bytes as a little-endian float
+ 'intne:24' : 3 bytes as a native-endian signed integer
+ 'uintne:24' : 3 bytes as a native-endian unsigned integer
+ 'floatne:32': 4 bytes as a native-endian float
+ 'hex:80' : 80 bits as a hex string
+ 'oct:9' : 9 bits as an octal string
+ 'bin:1' : single bit binary string
+ 'ue' : next bits as unsigned exp-Golomb code
+ 'se' : next bits as signed exp-Golomb code
+ 'uie' : next bits as unsigned interleaved exp-Golomb code
+ 'sie' : next bits as signed interleaved exp-Golomb code
+ 'bits:5' : 5 bits as a bitstring
+ 'bytes:10' : 10 bytes as a bytes object
+ 'bool' : 1 bit as a bool
+ 'pad:3' : 3 bits of padding to ignore - returns None
+
+ fmt may also be an integer, which will be treated like the 'bits' token.
+
+ The position in the bitstring is advanced to after the read items.
+
+ Raises ReadError if not enough bits are available.
+ Raises ValueError if the format is not understood.
+
+ """
+ if isinstance(fmt, numbers.Integral):
+ if fmt < 0:
+ raise ValueError("Cannot read negative amount.")
+ if fmt > self.len - self._pos:
+ raise ReadError("Cannot read {0} bits, only {1} available.",
+ fmt, self.len - self._pos)
+ bs = self._slice(self._pos, self._pos + fmt)
+ self._pos += fmt
+ return bs
+ p = self._pos
+ _, token = tokenparser(fmt)
+ if len(token) != 1:
+ self._pos = p
+ raise ValueError("Format string should be a single token, not {0} "
+ "tokens - use readlist() instead.".format(len(token)))
+ name, length, _ = token[0]
+ if length is None:
+ length = self.len - self._pos
+ value, self._pos = self._readtoken(name, self._pos, length)
+ return value
+
+ def readlist(self, fmt, **kwargs):
+ """Interpret next bits according to format string(s) and return list.
+
+ fmt -- A single string or list of strings with comma separated tokens
+ describing how to interpret the next bits in the bitstring. Items
+ can also be integers, for reading new bitstring of the given length.
+ kwargs -- A dictionary or keyword-value pairs - the keywords used in the
+ format string will be replaced with their given value.
+
+ The position in the bitstring is advanced to after the read items.
+
+ Raises ReadError is not enough bits are available.
+ Raises ValueError if the format is not understood.
+
+ See the docstring for 'read' for token examples. 'pad' tokens are skipped
+ and not added to the returned list.
+
+ >>> h, b1, b2 = s.readlist('hex:20, bin:5, bin:3')
+ >>> i, bs1, bs2 = s.readlist(['uint:12', 10, 10])
+
+ """
+ value, self._pos = self._readlist(fmt, self._pos, **kwargs)
+ return value
+
+ def readto(self, bs, bytealigned=None):
+ """Read up to and including next occurrence of bs and return result.
+
+ bs -- The bitstring to find. An integer is not permitted.
+ bytealigned -- If True the bitstring will only be
+ found on byte boundaries.
+
+ Raises ValueError if bs is empty.
+ Raises ReadError if bs is not found.
+
+ """
+ if isinstance(bs, numbers.Integral):
+ raise ValueError("Integers cannot be searched for")
+ bs = Bits(bs)
+ oldpos = self._pos
+ p = self.find(bs, self._pos, bytealigned=bytealigned)
+ if not p:
+ raise ReadError("Substring not found")
+ self._pos += bs.len
+ return self._slice(oldpos, self._pos)
+
+ def peek(self, fmt):
+ """Interpret next bits according to format string and return result.
+
+ fmt -- Token string describing how to interpret the next bits.
+
+ The position in the bitstring is not changed. If not enough bits are
+ available then all bits to the end of the bitstring will be used.
+
+ Raises ReadError if not enough bits are available.
+ Raises ValueError if the format is not understood.
+
+ See the docstring for 'read' for token examples.
+
+ """
+ pos_before = self._pos
+ value = self.read(fmt)
+ self._pos = pos_before
+ return value
+
+ def peeklist(self, fmt, **kwargs):
+ """Interpret next bits according to format string(s) and return list.
+
+ fmt -- One or more strings with comma separated tokens describing
+ how to interpret the next bits in the bitstring.
+ kwargs -- A dictionary or keyword-value pairs - the keywords used in the
+ format string will be replaced with their given value.
+
+ The position in the bitstring is not changed. If not enough bits are
+ available then all bits to the end of the bitstring will be used.
+
+ Raises ReadError if not enough bits are available.
+ Raises ValueError if the format is not understood.
+
+ See the docstring for 'read' for token examples.
+
+ """
+ pos = self._pos
+ return_values = self.readlist(fmt, **kwargs)
+ self._pos = pos
+ return return_values
+
+ def bytealign(self):
+ """Align to next byte and return number of skipped bits.
+
+ Raises ValueError if the end of the bitstring is reached before
+ aligning to the next byte.
+
+ """
+ skipped = (8 - (self._pos % 8)) % 8
+ self.pos += self._offset + skipped
+ assert self._assertsanity()
+ return skipped
+
+ pos = property(_getbitpos, _setbitpos,
+ doc="""The position in the bitstring in bits. Read and write.
+ """)
+ bitpos = property(_getbitpos, _setbitpos,
+ doc="""The position in the bitstring in bits. Read and write.
+ """)
+ bytepos = property(_getbytepos, _setbytepos,
+ doc="""The position in the bitstring in bytes. Read and write.
+ """)
+
+
+
+
+
+class BitStream(ConstBitStream, BitArray):
+ """A container or stream holding a mutable sequence of bits
+
+ Subclass of the ConstBitStream and BitArray classes. Inherits all of
+ their methods.
+
+ Methods:
+
+ all() -- Check if all specified bits are set to 1 or 0.
+ any() -- Check if any of specified bits are set to 1 or 0.
+ append() -- Append a bitstring.
+ bytealign() -- Align to next byte boundary.
+ byteswap() -- Change byte endianness in-place.
+ count() -- Count the number of bits set to 1 or 0.
+ cut() -- Create generator of constant sized chunks.
+ endswith() -- Return whether the bitstring ends with a sub-string.
+ find() -- Find a sub-bitstring in the current bitstring.
+ findall() -- Find all occurrences of a sub-bitstring in the current bitstring.
+ insert() -- Insert a bitstring.
+ invert() -- Flip bit(s) between one and zero.
+ join() -- Join bitstrings together using current bitstring.
+ overwrite() -- Overwrite a section with a new bitstring.
+ peek() -- Peek at and interpret next bits as a single item.
+ peeklist() -- Peek at and interpret next bits as a list of items.
+ prepend() -- Prepend a bitstring.
+ read() -- Read and interpret next bits as a single item.
+ readlist() -- Read and interpret next bits as a list of items.
+ replace() -- Replace occurrences of one bitstring with another.
+ reverse() -- Reverse bits in-place.
+ rfind() -- Seek backwards to find a sub-bitstring.
+ rol() -- Rotate bits to the left.
+ ror() -- Rotate bits to the right.
+ set() -- Set bit(s) to 1 or 0.
+ split() -- Create generator of chunks split by a delimiter.
+ startswith() -- Return whether the bitstring starts with a sub-bitstring.
+ tobytes() -- Return bitstring as bytes, padding if needed.
+ tofile() -- Write bitstring to file, padding if needed.
+ unpack() -- Interpret bits using format string.
+
+ Special methods:
+
+ Mutating operators are available: [], <<=, >>=, +=, *=, &=, |= and ^=
+ in addition to [], ==, !=, +, *, ~, <<, >>, &, | and ^.
+
+ Properties:
+
+ bin -- The bitstring as a binary string.
+ bool -- For single bit bitstrings, interpret as True or False.
+ bytepos -- The current byte position in the bitstring.
+ bytes -- The bitstring as a bytes object.
+ float -- Interpret as a floating point number.
+ floatbe -- Interpret as a big-endian floating point number.
+ floatle -- Interpret as a little-endian floating point number.
+ floatne -- Interpret as a native-endian floating point number.
+ hex -- The bitstring as a hexadecimal string.
+ int -- Interpret as a two's complement signed integer.
+ intbe -- Interpret as a big-endian signed integer.
+ intle -- Interpret as a little-endian signed integer.
+ intne -- Interpret as a native-endian signed integer.
+ len -- Length of the bitstring in bits.
+ oct -- The bitstring as an octal string.
+ pos -- The current bit position in the bitstring.
+ se -- Interpret as a signed exponential-Golomb code.
+ ue -- Interpret as an unsigned exponential-Golomb code.
+ sie -- Interpret as a signed interleaved exponential-Golomb code.
+ uie -- Interpret as an unsigned interleaved exponential-Golomb code.
+ uint -- Interpret as a two's complement unsigned integer.
+ uintbe -- Interpret as a big-endian unsigned integer.
+ uintle -- Interpret as a little-endian unsigned integer.
+ uintne -- Interpret as a native-endian unsigned integer.
+
+ """
+
+ __slots__ = ()
+
+ # As BitStream objects are mutable, we shouldn't allow them to be hashed.
+ __hash__ = None
+
+ def __init__(self, auto=None, length=None, offset=None, **kwargs):
+ """Either specify an 'auto' initialiser:
+ auto -- a string of comma separated tokens, an integer, a file object,
+ a bytearray, a boolean iterable or another bitstring.
+
+ Or initialise via **kwargs with one (and only one) of:
+ bytes -- raw data as a string, for example read from a binary file.
+ bin -- binary string representation, e.g. '0b001010'.
+ hex -- hexadecimal string representation, e.g. '0x2ef'
+ oct -- octal string representation, e.g. '0o777'.
+ uint -- an unsigned integer.
+ int -- a signed integer.
+ float -- a floating point number.
+ uintbe -- an unsigned big-endian whole byte integer.
+ intbe -- a signed big-endian whole byte integer.
+ floatbe - a big-endian floating point number.
+ uintle -- an unsigned little-endian whole byte integer.
+ intle -- a signed little-endian whole byte integer.
+ floatle -- a little-endian floating point number.
+ uintne -- an unsigned native-endian whole byte integer.
+ intne -- a signed native-endian whole byte integer.
+ floatne -- a native-endian floating point number.
+ se -- a signed exponential-Golomb code.
+ ue -- an unsigned exponential-Golomb code.
+ sie -- a signed interleaved exponential-Golomb code.
+ uie -- an unsigned interleaved exponential-Golomb code.
+ bool -- a boolean (True or False).
+ filename -- a file which will be opened in binary read-only mode.
+
+ Other keyword arguments:
+ length -- length of the bitstring in bits, if needed and appropriate.
+ It must be supplied for all integer and float initialisers.
+ offset -- bit offset to the data. These offset bits are
+ ignored and this is intended for use when
+ initialising using 'bytes' or 'filename'.
+
+ """
+ self._pos = 0
+ # For mutable BitStreams we always read in files to memory:
+ if not isinstance(self._datastore, ByteStore):
+ self._ensureinmemory()
+
+ def __new__(cls, auto=None, length=None, offset=None, **kwargs):
+ x = super(BitStream, cls).__new__(cls)
+ x._initialise(auto, length, offset, **kwargs)
+ return x
+
+ def __copy__(self):
+ """Return a new copy of the BitStream."""
+ s_copy = BitStream()
+ s_copy._pos = 0
+ if not isinstance(self._datastore, ByteStore):
+ # Let them both point to the same (invariant) array.
+ # If either gets modified then at that point they'll be read into memory.
+ s_copy._datastore = self._datastore
+ else:
+ s_copy._datastore = ByteStore(self._datastore._rawarray[:],
+ self._datastore.bitlength,
+ self._datastore.offset)
+ return s_copy
+
+ def prepend(self, bs):
+ """Prepend a bitstring to the current bitstring.
+
+ bs -- The bitstring to prepend.
+
+ """
+ bs = self._converttobitstring(bs)
+ self._prepend(bs)
+ self._pos += bs.len
+
+
+def pack(fmt, *values, **kwargs):
+ """Pack the values according to the format string and return a new BitStream.
+
+ fmt -- A single string or a list of strings with comma separated tokens
+ describing how to create the BitStream.
+ values -- Zero or more values to pack according to the format.
+ kwargs -- A dictionary or keyword-value pairs - the keywords used in the
+ format string will be replaced with their given value.
+
+ Token examples: 'int:12' : 12 bits as a signed integer
+ 'uint:8' : 8 bits as an unsigned integer
+ 'float:64' : 8 bytes as a big-endian float
+ 'intbe:16' : 2 bytes as a big-endian signed integer
+ 'uintbe:16' : 2 bytes as a big-endian unsigned integer
+ 'intle:32' : 4 bytes as a little-endian signed integer
+ 'uintle:32' : 4 bytes as a little-endian unsigned integer
+ 'floatle:64': 8 bytes as a little-endian float
+ 'intne:24' : 3 bytes as a native-endian signed integer
+ 'uintne:24' : 3 bytes as a native-endian unsigned integer
+ 'floatne:32': 4 bytes as a native-endian float
+ 'hex:80' : 80 bits as a hex string
+ 'oct:9' : 9 bits as an octal string
+ 'bin:1' : single bit binary string
+ 'ue' / 'uie': next bits as unsigned exp-Golomb code
+ 'se' / 'sie': next bits as signed exp-Golomb code
+ 'bits:5' : 5 bits as a bitstring object
+ 'bytes:10' : 10 bytes as a bytes object
+ 'bool' : 1 bit as a bool
+ 'pad:3' : 3 zero bits as padding
+
+ >>> s = pack('uint:12, bits', 100, '0xffe')
+ >>> t = pack(['bits', 'bin:3'], s, '111')
+ >>> u = pack('uint:8=a, uint:8=b, uint:55=a', a=6, b=44)
+
+ """
+ tokens = []
+ if isinstance(fmt, basestring):
+ fmt = [fmt]
+ try:
+ for f_item in fmt:
+ _, tkns = tokenparser(f_item, tuple(sorted(kwargs.keys())))
+ tokens.extend(tkns)
+ except ValueError as e:
+ raise CreationError(*e.args)
+ value_iter = iter(values)
+ s = BitStream()
+ try:
+ for name, length, value in tokens:
+ # If the value is in the kwd dictionary then it takes precedence.
+ if value in kwargs:
+ value = kwargs[value]
+ # If the length is in the kwd dictionary then use that too.
+ if length in kwargs:
+ length = kwargs[length]
+ # Also if we just have a dictionary name then we want to use it
+ if name in kwargs and length is None and value is None:
+ s.append(kwargs[name])
+ continue
+ if length is not None:
+ length = int(length)
+ if value is None and name != 'pad':
+ # Take the next value from the ones provided
+ value = next(value_iter)
+ s._append(BitStream._init_with_token(name, length, value))
+ except StopIteration:
+ raise CreationError("Not enough parameters present to pack according to the "
+ "format. {0} values are needed.", len(tokens))
+ try:
+ next(value_iter)
+ except StopIteration:
+ # Good, we've used up all the *values.
+ return s
+ raise CreationError("Too many parameters present to pack according to the format.")
+
+
+# Aliases for backward compatibility
+ConstBitArray = Bits
+BitString = BitStream
+
+__all__ = ['ConstBitArray', 'ConstBitStream', 'BitStream', 'BitArray',
+ 'Bits', 'BitString', 'pack', 'Error', 'ReadError',
+ 'InterpretError', 'ByteAlignError', 'CreationError', 'bytealigned']
diff --git a/python/bitstring/doc/bitstring_manual.pdf b/python/bitstring/doc/bitstring_manual.pdf
new file mode 100644
index 000000000..dc17385b7
--- /dev/null
+++ b/python/bitstring/doc/bitstring_manual.pdf
Binary files differ
diff --git a/python/bitstring/release_notes.txt b/python/bitstring/release_notes.txt
new file mode 100644
index 000000000..8cad4ca9d
--- /dev/null
+++ b/python/bitstring/release_notes.txt
@@ -0,0 +1,1523 @@
+--------------------------------
+bitstring module version history
+--------------------------------
+
+---------------------------------------
+March 4th 2014: version 3.1.3 released
+---------------------------------------
+This is another bug fix release.
+
+* Fix for problem with prepend for bitstrings with byte offsets in their data store.
+
+---------------------------------------
+April 18th 2013: version 3.1.2 released
+---------------------------------------
+This is another bug fix release.
+
+* Fix for problem where unpacking bytes would by eight times too long
+
+---------------------------------------
+March 21st 2013: version 3.1.1 released
+---------------------------------------
+This is a bug fix release.
+
+* Fix for problem where concatenating bitstrings sometimes modified method's arguments
+
+------------------------------------------
+February 26th 2013: version 3.1.0 released
+------------------------------------------
+This is a minor release with a couple of new features and some bug fixes.
+
+New 'pad' token
+---------------
+
+This token can be used in reads and when packing/unpacking to indicate that
+you don't care about the contents of these bits. Any padding bits will just
+be skipped over when reading/unpacking or zero-filled when packing.
+
+ >>> a, b = s.readlist('pad:5, uint:3, pad:1, uint:3')
+
+Here only two items are returned in the list - the padding bits are ignored.
+
+New clear and copy convenience methods
+--------------------------------------
+
+These methods have been introduced in Python 3.3 for lists and bytearrays,
+as more obvious ways of clearing and copying, and we mirror that change here.
+
+t = s.copy() is equivalent to t = s[:], and s.clear() is equivalent to del s[:].
+
+Other changes
+-------------
+
+* Some bug fixes.
+
+-----------------------------------------
+February 7th 2012: version 3.0.2 released
+-----------------------------------------
+This is a minor update that fixes a few bugs.
+
+* Fix for subclasses of bitstring classes behaving strangely (Issue 121).
+* Fix for excessive memory usage in rare cases (Issue 120).
+* Fixes for slicing edge cases.
+
+There has also been a reorganisation of the code to return it to a single
+'bitstring.py' file rather than the package that has been used for the past
+several releases. This change shouldn't affect users directly.
+
+------------------------------------------
+November 21st 2011: version 3.0.1 released
+------------------------------------------
+This release fixed a small but very visible bug in bitstring printing.
+
+------------------------------------------
+November 21st 2011: version 3.0.0 released
+------------------------------------------
+This is a major release which breaks backward compatibility in a few places.
+
+Backwardly incompatible changes
+===============================
+
+Hex, oct and bin properties don't have leading 0x, 0o and 0b
+------------------------------------------------------------
+
+If you ask for the hex, octal or binary representations of a bitstring then
+they will no longer be prefixed with '0x', 0o' or '0b'. This was done as it
+was noticed that the first thing a lot of user code does after getting these
+representations was to cut off the first two characters before further
+processing.
+
+ >>> a = BitArray('0x123')
+ >>> a.hex, a.oct, a.bin
+ ('123', '0443', '000100100011')
+
+Previously this would have returned ('0x123', '0o0443', '0b000100100011')
+
+This change might require some recoding, but it should all be simplifications.
+
+ConstBitArray renamed to Bits
+-----------------------------
+
+Previously Bits was an alias for ConstBitStream (for backward compatibility).
+This has now changed so that Bits and BitArray loosely correspond to the
+built-in types bytes and bytearray.
+
+If you were using streaming/reading methods on a Bits object then you will
+have to change it to a ConstBitStream.
+
+The ConstBitArray name is kept as an alias for Bits.
+
+Stepping in slices has conventional meaning
+-------------------------------------------
+
+The step parameter in __getitem__, __setitem__ and __delitem__ used to act
+as a multiplier for the start and stop parameters. No one seemed to use it
+though and so it has now reverted to the convential meaning for containers.
+
+If you are using step then recoding is simple: s[a:b:c] becomes s[a*c:b*c].
+
+Some examples of the new usage:
+
+ >>> s = BitArray('0x0000')
+ s[::4] = [1, 1, 1, 1]
+ >>> s.hex
+ '8888'
+ >>> del s[8::2]
+ >>> s.hex
+ '880'
+
+
+New features
+============
+
+New readto method
+-----------------
+
+This method is a mix between a find and a read - it searches for a bitstring
+and then reads up to and including it. For example:
+
+ >>> s = ConstBitStream('0x47000102034704050647')
+ >>> s.readto('0x47', bytealigned=True)
+ BitStream('0x47')
+ >>> s.readto('0x47', bytealigned=True)
+ BitStream('0x0001020347')
+ >>> s.readto('0x47', bytealigned=True)
+ BitStream('0x04050647')
+
+pack function accepts an iterable as its format
+-----------------------------------------------
+
+Previously only a string was accepted as the format in the pack function.
+This was an oversight as it broke the symmetry between pack and unpack.
+Now you can use formats like this:
+
+ fmt = ['hex:8', 'bin:3']
+ a = pack(fmt, '47', '001')
+ a.unpack(fmt)
+
+
+--------------------------------------
+June 18th 2011: version 2.2.0 released
+--------------------------------------
+This is a minor upgrade with a couple of new features.
+
+New interleaved exponential-Golomb interpretations
+--------------------------------------------------
+
+New bit interpretations for interleaved exponential-Golomb (as used in the
+Dirac video codec) are supplied via 'uie' and 'sie':
+
+ >>> s = BitArray(uie=41)
+ >>> s.uie
+ 41
+ >>> s.bin
+ '0b00010001001'
+
+These are pretty similar to the non-interleaved versions - see the manual
+for more details. Credit goes to Paul Sargent for the patch.
+
+New package-level bytealigned variable
+--------------------------------------
+
+A number of methods take a 'bytealigned' parameter to indicate that they
+should only work on byte boundaries (e.g. find, replace, split). Previously
+this parameter defaulted to 'False'. Instead it now defaults to
+'bitstring.bytealigned', which itself defaults to 'False', but can be changed
+to modify the default behaviour of the methods. For example:
+
+ >>> a = BitArray('0x00 ff 0f ff')
+ >>> a.find('0x0f')
+ (4,) # found first not on a byte boundary
+ >>> a.find('0x0f', bytealigned=True)
+ (16,) # forced looking only on byte boundaries
+ >>> bitstring.bytealigned = True # Change default behaviour
+ >>> a.find('0x0f')
+ (16,)
+ >>> a.find('0x0f', bytealigned=False)
+ (4,)
+
+If you're only working with bytes then this can help avoid some errors and
+save some typing!
+
+Other changes
+-------------
+
+* Fix for Python 3.2, correcting for a change to the binascii module.
+* Fix for bool initialisation from 0 or 1.
+* Efficiency improvements, including interning strategy.
+
+------------------------------------------
+February 23rd 2011: version 2.1.1 released
+------------------------------------------
+This is a release to fix a couple of bugs that were introduced in 2.1.0.
+
+* Bug fix: Reading using the 'bytes' token had been broken (Issue 102).
+* Fixed problem using some methods on ConstBitArrays.
+* Better exception handling for tokens missing values.
+* Some performance improvements.
+
+-----------------------------------------
+January 23rd 2011: version 2.1.0 released
+-----------------------------------------
+
+New class hierarchy introduced with simpler classes
+---------------------------------------------------
+Previously there were just two classes, the immutable Bits which was the base
+class for the mutable BitString class. Both of these classes have the concept
+of a bit position, from which reads etc. take place so that the bitstring could
+be treated as if it were a file or stream.
+
+Two simpler classes have now been added which are purely bit containers and
+don't have a bit position. These are called ConstBitArray and BitArray. As you
+can guess the former is an immutable version of the latter.
+
+The other classes have also been renamed to better reflect their capabilities.
+Instead of BitString you can use BitStream, and instead of Bits you can use
+ConstBitStream. The old names are kept as aliases for backward compatibility.
+
+The classes hierarchy is:
+
+ ConstBitArray
+ / \
+ / \
+ BitArray ConstBitStream (formerly Bits)
+ \ /
+ \ /
+ BitStream (formerly BitString)
+
+
+Other changes
+-------------
+A lot of internal reorganisation has taken place since the previous version,
+most of which won't be noticed by the end user. Some things you might see are:
+
+* New package structure. Previous versions have been a single file for the
+ module and another for the unit tests. The module is now split into many
+ more files so it can't be used just by copying bitstring.py any more.
+* To run the unit tests there is now a script called runtests.py in the test
+ directory.
+* File based bitstring are now implemented in terms of an mmap. This should
+ be just an implementation detail, but unfortunately for 32-bit versions of
+ Python this creates a limit of 4GB on the files that can be used. The work
+ around is either to get a 64-bit Python, or just stick with version 2.0.
+* The ConstBitArray and ConstBitStream classes no longer copy byte data when
+ a slice or a read takes place, they just take a reference. This is mostly
+ a very nice optimisation, but there are occassions where it could have an
+ adverse effect. For example if a very large bitstring is created, a small
+ slice taken and the original deleted. The byte data from the large
+ bitstring would still be retained in memory.
+* Optimisations. Once again this version should be faster than the last.
+ The module is still pure Python but some of the reorganisation was to make
+ it more feasible to put some of the code into Cython or similar, so
+ hopefully more speed will be on the way.
+
+--------------------------------------
+July 26th 2010: version 2.0.3 released
+--------------------------------------
+* Bug fix: Using peek and read for a single bit now returns a new bitstring
+ as was intended, rather than the old behaviour of returning a bool.
+* Removed HTML docs from source archive - better to use the online version.
+
+--------------------------------------
+July 25th 2010: version 2.0.2 released
+--------------------------------------
+This is a major release, with a number of backwardly incompatible changes.
+The main change is the removal of many methods, all of which have simple
+alternatives. Other changes are quite minor but may need some recoding.
+
+There are a few new features, most of which have been made to help the
+stream-lining of the API. As always there are performance improvements and
+some API changes were made purely with future performance in mind.
+
+The backwardly incompatible changes are:
+-----------------------------------------
+* Methods removed.
+
+About half of the class methods have been removed from the API. They all have
+simple alternatives, so what remains is more powerful and easier to remember.
+The removed methods are listed here on the left, with their equivalent
+replacements on the right:
+
+s.advancebit() -> s.pos += 1
+s.advancebits(bits) -> s.pos += bits
+s.advancebyte() -> s.pos += 8
+s.advancebytes(bytes) -> s.pos += 8*bytes
+s.allunset([a, b]) -> s.all(False, [a, b])
+s.anyunset([a, b]) -> s.any(False, [a, b])
+s.delete(bits, pos) -> del s[pos:pos+bits]
+s.peekbit() -> s.peek(1)
+s.peekbitlist(a, b) -> s.peeklist([a, b])
+s.peekbits(bits) -> s.peek(bits)
+s.peekbyte() -> s.peek(8)
+s.peekbytelist(a, b) -> s.peeklist([8*a, 8*b])
+s.peekbytes(bytes) -> s.peek(8*bytes)
+s.readbit() -> s.read(1)
+s.readbitlist(a, b) -> s.readlist([a, b])
+s.readbits(bits) -> s.read(bits)
+s.readbyte() -> s.read(8)
+s.readbytelist(a, b) -> s.readlist([8*a, 8*b])
+s.readbytes(bytes) -> s.read(8*bytes)
+s.retreatbit() -> s.pos -= 1
+s.retreatbits(bits) -> s.pos -= bits
+s.retreatbyte() -> s.pos -= 8
+s.retreatbytes(bytes) -> s.pos -= 8*bytes
+s.reversebytes(start, end) -> s.byteswap(0, start, end)
+s.seek(pos) -> s.pos = pos
+s.seekbyte(bytepos) -> s.bytepos = bytepos
+s.slice(start, end, step) -> s[start:end:step]
+s.tell() -> s.pos
+s.tellbyte() -> s.bytepos
+s.truncateend(bits) -> del s[-bits:]
+s.truncatestart(bits) -> del s[:bits]
+s.unset([a, b]) -> s.set(False, [a, b])
+
+Many of these methods have been deprecated for the last few releases, but
+there are some new removals too. Any recoding needed should be quite
+straightforward, so while I apologise for the hassle, I had to take the
+opportunity to streamline and rationalise what was becoming a bit of an
+overblown API.
+
+* set / unset methods combined.
+
+The set/unset methods have been combined in a single method, which now
+takes a boolean as its first argument:
+
+s.set([a, b]) -> s.set(1, [a, b])
+s.unset([a, b]) -> s.set(0, [a, b])
+s.allset([a, b]) -> s.all(1, [a, b])
+s.allunset([a, b]) -> s.all(0, [a, b])
+s.anyset([a, b]) -> s.any(1, [a, b])
+s.anyunset([a, b]) -> s.any(0, [a, b])
+
+* all / any only accept iterables.
+
+The all and any methods (previously called allset, allunset, anyset and
+anyunset) no longer accept a single bit position. The recommended way of
+testing a single bit is just to index it, for example instead of:
+
+>>> if s.all(True, i):
+
+just use
+
+>>> if s[i]:
+
+If you really want to you can of course use an iterable with a single
+element, such as 's.any(False, [i])', but it's clearer just to write
+'not s[i]'.
+
+* Exception raised on reading off end of bitstring.
+
+If a read or peek goes beyond the end of the bitstring then a ReadError
+will be raised. The previous behaviour was that the rest of the bitstring
+would be returned and no exception raised.
+
+* BitStringError renamed to Error.
+
+The base class for errors in the bitstring module is now just Error, so
+it will likely appears in your code as bitstring.Error instead of
+the rather repetitive bitstring.BitStringError.
+
+* Single bit slices and reads return a bool.
+
+A single index slice (such as s[5]) will now return a bool (i.e. True or
+False) rather than a single bit bitstring. This is partly to reflect the
+style of the bytearray type, which returns an integer for single items, but
+mostly to avoid common errors like:
+
+>>> if s[0]:
+... do_something()
+
+While the intent of this code snippet is quite clear (i.e. do_something if
+the first bit of s is set) under the old rules s[0] would be true as long
+as s wasn't empty. That's because any one-bit bitstring was true as it was a
+non-empty container. Under the new rule s[0] is True if s starts with a '1'
+bit and False if s starts with a '0' bit.
+
+The change does not affect reads and peeks, so s.peek(1) will still return
+a single bit bitstring, which leads on to the next item...
+
+* Empty bitstrings or bitstrings with only zero bits are considered False.
+
+Previously a bitstring was False if it had no elements, otherwise it was True.
+This is standard behaviour for containers, but wasn't very useful for a container
+of just 0s and 1s. The new behaviour means that the bitstring is False if it
+has no 1 bits. This means that code like this:
+
+>>> if s.peek(1):
+... do_something()
+
+should work as you'd expect. It also means that Bits(1000), Bits(0x00) and
+Bits('uint:12=0') are all also False. If you need to check for the emptiness of
+a bitstring then instead check the len property:
+
+if s -> if s.len
+if not s -> if not s.len
+
+* Length and offset disallowed for some initialisers.
+
+Previously you could create bitstring using expressions like:
+
+>>> s = Bits(hex='0xabcde', offset=4, length=13)
+
+This has now been disallowed, and the offset and length parameters may only
+be used when initialising with bytes or a file. To replace the old behaviour
+you could instead use
+
+>>> s = Bits(hex='0xabcde')[4:17]
+
+* Renamed 'format' parameter 'fmt'.
+
+Methods with a 'format' parameter have had it renamed to 'fmt', to prevent
+hiding the built-in 'format'. Affects methods unpack, read, peek, readlist,
+peeklist and byteswap and the pack function.
+
+* Iterables instead of *format accepted for some methods.
+
+This means that for the affected methods (unpack, readlist and peeklist) you
+will need to use an iterable to specify multiple items. This is easier to
+show than to describe, so instead of
+
+>>> a, b, c, d = s.readlist('uint:12', 'hex:4', 'bin:7')
+
+you would instead write
+
+>>> a, b, c, d = s.readlist(['uint:12', 'hex:4', 'bin:7'])
+
+Note that you could still use the single string 'uint:12, hex:4, bin:7' if
+you preferred.
+
+* Bool auto-initialisation removed.
+
+You can no longer use True and False to initialise single bit bitstrings.
+The reasoning behind this is that as bool is a subclass of int, it really is
+bad practice to have Bits(False) be different to Bits(0) and to have Bits(True)
+different to Bits(1).
+
+If you have used bool auto-initialisation then you will have to be careful to
+replace it as the bools will now be interpreted as ints, so Bits(False) will
+be empty (a bitstring of length 0), and Bits(True) will be a single zero bit
+(a bitstring of length 1). Sorry for the confusion, but I think this will
+prevent bigger problems in the future.
+
+There are a few alternatives for creating a single bit bitstring. My favourite
+it to use a list with a single item:
+
+Bits(False) -> Bits([0])
+Bits(True) -> Bits([1])
+
+* New creation from file strategy
+
+Previously if you created a bitstring from a file, either by auto-initialising
+with a file object or using the filename parameter, the file would not be read
+into memory unless you tried to modify it, at which point the whole file would
+be read.
+
+The new behaviour depends on whether you create a Bits or a BitString from the
+file. If you create a Bits (which is immutable) then the file will never be
+read into memory. This allows very large files to be opened for examination
+even if they could never fit in memory.
+
+If however you create a BitString, the whole of the referenced file will be read
+to store in memory. If the file is very big this could take a long time, or fail,
+but the idea is that in saying you want the mutable BitString you are implicitly
+saying that you want to make changes and so (for now) we need to load it into
+memory.
+
+The new strategy is a bit more predictable in terms of performance than the old.
+The main point to remember is that if you want to open a file and don't plan to
+alter the bitstring then use the Bits class rather than BitString.
+
+Just to be clear, in neither case will the contents of the file ever be changed -
+if you want to output the modified BitString then use the tofile method, for
+example.
+
+* find and rfind return a tuple instead of a bool.
+
+If a find is unsuccessful then an empty tuple is returned (which is False in a
+boolean sense) otherwise a single item tuple with the bit position is returned
+(which is True in a boolean sense). You shouldn't need to recode unless you
+explicitly compared the result of a find to True or False, for example this
+snippet doesn't need to be altered:
+
+>>> if s.find('0x23'):
+... print(s.bitpos)
+
+but you could now instead use
+
+>>> found = s.find('0x23')
+>>> if found:
+... print(found[0])
+
+The reason for returning the bit position in a tuple is so that finding at
+position zero can still be True - it's the tuple (0,) - whereas not found can
+be False - the empty tuple ().
+
+The new features in this release are:
+-------------------------------------
+* New count method.
+
+This method just counts the number of 1 or 0 bits in the bitstring.
+
+>>> s = Bits('0x31fff4')
+>>> s.count(1)
+16
+
+* read and peek methods accept integers.
+
+The read, readlist, peek and peeklist methods now accept integers as parameters
+to mean "read this many bits and return a bitstring". This has allowed a number
+of methods to be removed from this release, so for example instead of:
+
+>>> a, b, c = s.readbits(5, 6, 7)
+>>> if s.peekbit():
+... do_something()
+
+you should write:
+
+>>> a, b, c = s.readlist([5, 6, 7])
+>>> if s.peek(1):
+... do_something()
+
+* byteswap used to reverse all bytes.
+
+The byteswap method now allows a format specifier of 0 (the default) to signify
+that all of the whole bytes should be reversed. This means that calling just
+byteswap() is almost equivalent to the now removed bytereverse() method (a small
+difference is that byteswap won't raise an exception if the bitstring isn't a
+whole number of bytes long).
+
+* Auto initialise with bytearray or (for Python 3 only) bytes.
+
+So rather than writing:
+
+>>> a = Bits(bytes=some_bytearray)
+
+you can just write
+
+>>> a = Bits(some_bytearray)
+
+This also works for the bytes type, but only if you're using Python 3.
+For Python 2 it's not possible to distinguish between a bytes object and a
+str. For this reason this method should be used with some caution as it will
+make you code behave differently with the different major Python versions.
+
+>>> b = Bits(b'abcd\x23\x00') # Only Python 3!
+
+* set, invert, all and any default to whole bitstring.
+
+This means that you can for example write:
+
+>>> a = BitString(100) # 100 zero bits
+>>> a.set(1) # set all bits to 1
+>>> a.all(1) # are all bits set to 1?
+True
+>>> a.any(0) # are any set to 0?
+False
+>>> a.invert() # invert every bit
+
+* New exception types.
+
+As well as renaming BitStringError to just Error
+there are also new exceptions which use Error as a base class.
+
+These can be caught in preference to Error if you need finer control.
+The new exceptions sometimes also derive from built-in exceptions:
+
+ByteAlignError(Error) - whole byte position or length needed.
+
+ReadError(Error, IndexError) - reading or peeking off the end of
+the bitstring.
+
+CreationError(Error, ValueError) - inappropriate argument during
+bitstring creation.
+
+InterpretError(Error, ValueError) - inappropriate interpretation of
+binary data.
+
+
+--------------------------------------------------------------
+March 18th 2010: version 1.3.0 for Python 2.6 and 3.x released
+--------------------------------------------------------------
+New features:
+
+* byteswap method for changing endianness.
+
+Changes the endianness in-place according to a format string or
+integer(s) giving the byte pattern. See the manual for details.
+
+>>> s = BitString('0x00112233445566')
+>>> s.byteswap(2)
+3
+>>> s
+BitString('0x11003322554466')
+>>> s.byteswap('h')
+3
+>>> s
+BitString('0x00112233445566')
+>>> s.byteswap([2, 5])
+1
+>>> s
+BitString('0x11006655443322')
+
+* Multiplicative factors in bitstring creation and reading.
+
+For example:
+
+>>> s = Bits('100*0x123')
+
+* Token grouping using parenthesis.
+
+For example:
+
+>>> s = Bits('3*(uint:6=3, 0b1)')
+
+* Negative slice indices allowed.
+
+The start and end parameters of many methods may now be negative, with the
+same meaning as for negative slice indices. Affects all methods with these
+parameters.
+
+* Sequence ABCs used.
+
+The Bits class now derives from collections.Sequence, while the BitString
+class derives from collections.MutableSequence.
+
+* Keywords allowed in readlist, peeklist and unpack.
+
+Keywords for token lengths are now permitted when reading. So for example,
+you can write
+
+>>> s = bitstring.pack('4*(uint:n)', 2, 3, 4, 5, n=7)
+>>> s.unpack('4*(uint:n)', n=7)
+[2, 3, 4, 5]
+
+* start and end parameters added to rol and ror.
+
+* join function accepts other iterables.
+
+Also its parameter has changed from 'bitstringlist' to 'sequence'. This is
+technically a backward incompatibility in the unlikely event that you are
+referring to the parameter by name.
+
+* __init__ method accepts keywords.
+
+Rather than a long list of initialisers the __init__ methods now use a
+**kwargs dictionary for all initialisers except 'auto'. This should have no
+effect, except that this is a small backward incompatibility if you use
+positional arguments when initialising with anything other than auto
+(which would be rather unusual).
+
+* More optimisations.
+
+* Bug fixed in replace method (it could fail if start != 0).
+
+----------------------------------------------------------------
+January 19th 2010: version 1.2.0 for Python 2.6 and 3.x released
+----------------------------------------------------------------
+
+* New 'Bits' class.
+
+Introducing a brand new class, Bits, representing an immutable sequence of
+bits.
+
+The Bits class is the base class for the mutable BitString. The differences
+between Bits and BitStrings are:
+
+1) Bits are immutable, so once they have been created their value cannot change.
+This of course means that mutating methods (append, replace, del etc.) are not
+available for Bits.
+
+2) Bits are hashable, so they can be used in sets and as keys in dictionaries.
+
+3) Bits are potentially more efficient than BitStrings, both in terms of
+computation and memory. The current implementation is only marginally
+more efficient though - this should improve in future versions.
+
+You can switch from Bits to a BitString or vice versa by constructing a new
+object from the old.
+
+>>> s = Bits('0xabcd')
+>>> t = BitString(s)
+>>> t.append('0xe')
+>>> u = Bits(t)
+
+The relationship between Bits and BitString is supposed to loosely mirror that
+between bytes and bytearray in Python 3.
+
+* Deprecation messages turned on.
+
+A number of methods have been flagged for removal in version 2. Deprecation
+warnings will now be given, which include an alternative way to do the same
+thing. All of the deprecated methods have simpler equivalent alternatives.
+
+>>> t = s.slice(0, 2)
+__main__:1: DeprecationWarning: Call to deprecated function slice.
+Instead of 's.slice(a, b, c)' use 's[a:b:c]'.
+
+The deprecated methods are: advancebit, advancebits, advancebyte, advancebytes,
+retreatbit, retreatbits, retreatbyte, retreatbytes, tell, seek, slice, delete,
+tellbyte, seekbyte, truncatestart and truncateend.
+
+* Initialise from bool.
+
+Booleans have been added to the list of types that can 'auto'
+initialise a bitstring.
+
+>>> zerobit = BitString(False)
+>>> onebit = BitString(True)
+
+* Improved efficiency.
+
+More methods have been speeded up, in particular some deletions and insertions.
+
+* Bug fixes.
+
+A rare problem with truncating the start of bitstrings was fixed.
+
+A possible problem outputting the final byte in tofile() was fixed.
+
+-----------------------------------------------------------------
+December 22nd 2009: version 1.1.3 for Python 2.6 and 3.x released
+-----------------------------------------------------------------
+
+This version hopefully fixes an installation problem for platforms with
+case-sensitive file systems. There are no new features or other bug fixes.
+
+-----------------------------------------------------------------
+December 18th 2009: version 1.1.2 for Python 2.6 and 3.x released
+-----------------------------------------------------------------
+
+This is a minor update with (almost) no new features.
+
+* Improved efficiency.
+
+The speed of many typical operations has been increased, some substantially.
+
+* Initialise from integer.
+
+A BitString of '0' bits can be created using just an integer to give the length
+in bits. So instead of
+
+>>> s = BitString(length=100)
+
+you can write just
+
+>>> s = BitString(100)
+
+This matches the behaviour of bytearrays and (in Python 3) bytes.
+
+* A defect related to using the set / unset functions on BitStrings initialised
+from a file has been fixed.
+
+-----------------------------------------------------------------
+November 24th 2009: version 1.1.0 for Python 2.6 and 3.x released
+-----------------------------------------------------------------
+Note that this version will not work for Python 2.4 or 2.5. There may be an
+update for these Python versions some time next year, but it's not a priorty
+quite yet. Also note that only one version is now provided, which works for
+Python 2.6 and 3.x (done with the minimum of hackery!)
+
+* Improved efficiency.
+
+A fair number of functions have improved efficiency, some quite dramatically.
+
+* New bit setting and checking functions.
+
+Although these functions don't do anything that couldn't be done before, they
+do make some common use cases much more efficient. If you need to set or check
+single bits then these are the functions you need.
+
+set / unset : Set bit(s) to 1 or 0 respectively.
+allset / allunset : Check if all bits are 1 or all 0.
+anyset / anyunset : Check if any bits are 1 or any 0.
+
+>>> s = BitString(length=1000)
+>>> s.set((10, 100, 44, 12, 1))
+>>> s.allunset((2, 22, 222))
+True
+>>> s.anyset(range(7, 77))
+True
+
+* New rotate functions.
+
+ror / rol : Rotate bits to the right or left respectively.
+
+>>> s = BitString('0b100000000')
+>>> s.ror(2)
+>>> s.bin
+'0b001000000'
+>>> s.rol(5)
+>>> s.bin
+'0b000000100'
+
+* Floating point interpretations.
+
+New float initialisations and interpretations are available. These only work
+for BitStrings of length 32 or 64 bits.
+
+>>> s = BitString(float=0.2, length=64)
+>>> s.float
+0.200000000000000001
+>>> t = bitstring.pack('<3f', -0.4, 1e34, 17.0)
+>>> t.hex
+'0xcdccccbedf84f67700008841'
+
+* 'bytes' token reintroduced.
+
+This token returns a bytes object (equivalent to a str in Python 2.6).
+
+>>> s = BitString('0x010203')
+>>> s.unpack('bytes:2, bytes:1')
+['\x01\x02', '\x03']
+
+* 'uint' is now the default token type.
+
+So for example these are equivalent:
+
+a, b = s.readlist('uint:12, uint:12')
+a, b = s.readlist('12, 12')
+
+--------------------------------------------------------
+October 10th 2009: version 1.0.1 for Python 3.x released
+--------------------------------------------------------
+This is a straight port of version 1.0.0 to Python 3.
+
+For changes since the last Python 3 release read all the way down in this
+document to version 0.4.3.
+
+This version will also work for Python 2.6, but there's no advantage to using
+it over the 1.0.0 release. It won't work for anything before 2.6.
+
+-------------------------------------------------------
+October 9th 2009: version 1.0.0 for Python 2.x released
+-------------------------------------------------------
+Version 1 is here!
+
+This is the first release not to carry the 'beta' tag. It contains a couple of
+minor new features but is principally a release to fix the API. If you've been
+using an older version then you almost certainly will have to recode a bit. If
+you're not ready to do that then you may wish to delay updating.
+
+So the bad news is that there are lots of small changes to the API. The good
+news is that all the changes are pretty trivial, the new API is cleaner and
+more 'Pythonic', and that by making it version 1.0 I'm promising not to
+tweak it again for some time.
+
+** API Changes **
+
+* New read / peek functions for returning multiple items.
+
+The functions read, readbits, readbytes, peek, peekbits and peekbytes now only
+ever return a single item, never a list.
+
+The new functions readlist, readbitlist, readbytelist, peeklist, peekbitlist
+and peekbytelist can be used to read multiple items and will always return a
+list.
+
+So a line like:
+
+>>> a, b = s.read('uint:12, hex:32')
+
+becomes
+
+>>> a, b = s.readlist('uint:12, hex:32')
+
+* Renaming / removing functions.
+
+Functions have been renamed as follows:
+
+seekbit -> seek
+tellbit -> tell
+reversebits -> reverse
+deletebits -> delete
+tostring -> tobytes
+
+and a couple have been removed altogether:
+
+deletebytes - use delete instead.
+empty - use 'not s' rather than 's.empty()'.
+
+* Renaming parameters.
+
+The parameters 'startbit' and 'endbit' have been renamed 'start' and 'end'.
+This affects the functions slice, find, findall, rfind, reverse, cut and split.
+
+The parameter 'bitpos' has been renamed to 'pos'. The affects the functions
+seek, tell, insert, overwrite and delete.
+
+* Mutating methods return None rather than self.
+
+This means that you can't chain functions together so
+
+>>> s.append('0x00').prepend('0xff')
+>>> t = s.reverse()
+
+Needs to be rewritten
+
+>>> s.append('0x00')
+>>> s.prepend('0xff)
+>>> s.reverse()
+>>> t = s
+
+Affects truncatestart, truncateend, insert, overwrite, delete, append,
+prepend, reverse and reversebytes.
+
+* Properties renamed.
+
+The 'data' property has been renamed to 'bytes'. Also if the BitString is not a
+whole number of bytes then a ValueError exception will be raised when using
+'bytes' as a 'getter'.
+
+Properties 'len' and 'pos' have been added to replace 'length' and 'bitpos',
+although the longer names have not been removed so you can continue to use them
+if you prefer.
+
+* Other changes.
+
+The unpack function now always returns a list, never a single item.
+
+BitStrings are now 'unhashable', so calling hash on one or making a set will
+fail.
+
+The colon separating the token name from its length is now mandatory. So for
+example BitString('uint12=100') becomes BitString('uint:12=100').
+
+Removed support for the 'bytes' token in format strings. Instead of
+s.read('bytes:4') use s.read('bits:32').
+
+** New features **
+
+* Added endswith and startswith functions.
+
+These do much as you'd expect; they return True or False depending on whether
+the BitString starts or ends with the parameter.
+
+>>> BitString('0xef342').startswith('0b11101')
+True
+
+----------------------------------------------------------
+September 11th 2009: version 0.5.2 for Python 2.x released
+----------------------------------------------------------
+Finally some tools for dealing with endianness!
+
+* New interpretations are now available for whole-byte BitStrings that treat
+them as big, little, or native-endian.
+
+>>> big = BitString(intbe=1, length=16) # or BitString('intbe:16=1') if you prefer.
+>>> little = BitString(intle=1, length=16)
+>>> print big.hex, little.hex
+0x0001 0x0100
+>>> print big.intbe, little.intle
+1 1
+
+* 'Struct'-like compact format codes
+
+To save some typing when using pack, unpack, read and peek, compact format
+codes based on those used in the struct and array modules have been added.
+These must start with a character indicating the endianness (>, < or @ for
+big, little and native-endian), followed by characters giving the format:
+
+b 1-byte signed int
+B 1-byte unsigned int
+h 2-byte signed int
+H 2-byte unsigned int
+l 4-byte signed int
+L 4-byte unsigned int
+q 8-byte signed int
+Q 8-byte unsigned int
+
+For example:
+
+>>> s = bitstring.pack('<4h', 0, 1, 2, 3)
+
+creates a BitString with four little-endian 2-byte integers. While
+
+>>> x, y, z = s.read('>hhl')
+
+reads them back as two big-endian two-byte integers and one four-byte big
+endian integer.
+
+Of course you can combine this new format with the old ones however you like:
+
+>>> s.unpack('<h, intle:24, uint:5, bin')
+[0, 131073, 0, '0b0000000001100000000']
+
+-------------------------------------------------------
+August 26th 2009: version 0.5.1 for Python 2.x released
+-------------------------------------------------------
+
+This update introduces pack and unpack functions for creating and dissembling
+BitStrings.
+
+* New pack() and unpack() functions.
+
+The module level pack function provides a flexible new method for creating
+BitStrings. Tokens for BitString 'literals' can be used in the same way as in
+the constructor.
+
+>>> from bitstring import BitString, pack
+>>> a = pack('0b11, 0xff, 0o77, int:5=-1, se=33')
+
+You can also leave placeholders in the format, which will be filled in by
+the values provided.
+
+>>> b = pack('uint:10, hex:4', 33, 'f')
+
+Finally you can use a dictionary or keywords.
+
+>>> c = pack('bin=a, hex=b, bin=a', a='010', b='ef')
+
+The unpack function is similar to the read function except that it always
+unpacks from the start of the BitString.
+
+>>> x, y = b.unpack('uint:10, hex')
+
+If a token is given without a length (as above) then it will expand to fill the
+remaining bits in the BitString. This also now works with read() and peek().
+
+* New tostring() and tofile() functions.
+
+The tostring() function just returns the data as a string, with up to seven
+zero bits appended to byte align. The tofile() function does the same except
+writes to a file object.
+
+>>> f = open('myfile', 'wb')
+>>> BitString('0x1234ff').tofile(f)
+
+* Other changes.
+
+The use of '=' is now mandatory in 'auto' initialisers. Tokens like 'uint12 100' will
+no longer work. Also the use of a ':' before the length is encouraged, but not yet
+mandated. So the previous example should be written as 'uint:12=100'.
+
+The 'auto' initialiser will now take a file object.
+
+>>> f = open('myfile', 'rb')
+>>> s = BitString(f)
+
+-----------------------------------------------------
+July 19th 2009: version 0.5.0 for Python 2.x released
+-----------------------------------------------------
+
+This update breaks backward compatibility in a couple of areas. The only one
+you probably need to be concerned about is the change to the default for
+bytealigned in find, replace, split, etc.
+
+See the user manual for more details on each of these items.
+
+* Expanded abilities of 'auto' initialiser.
+
+More types can be initialised through the 'auto' initialiser. For example
+instead of
+
+>>> a = BitString(uint=44, length=16)
+
+you can write
+
+>>> a = BitString('uint16=44')
+
+Also, different comma-separated tokens will be joined together, e.g.
+
+>>> b = BitString('0xff') + 'int8=-5'
+
+can be written
+
+>>> b = BitString('0xff, int8=-5')
+
+* New formatted read() and peek() functions.
+
+These takes a format string similar to that used in the auto initialiser.
+If only one token is provided then a single value is returned, otherwise a
+list of values is returned.
+
+>>> start_code, width, height = s.read('hex32, uint12, uint12')
+
+is equivalent to
+
+>>> start_code = s.readbits(32).hex
+>>> width = s.readbits(12).uint
+>>> height = s.readbits(12).uint
+
+The tokens are:
+
+ int n : n bits as an unsigned integer.
+ uint n : n bits as a signed integer.
+ hex n : n bits as a hexadecimal string.
+ oct n : n bits as an octal string.
+ bin n : n bits as a binary string.
+ ue : next bits as an unsigned exp-Golomb.
+ se : next bits as a signed exp-Golomb.
+ bits n : n bits as a new BitString.
+ bytes n : n bytes as a new BitString.
+
+See the user manual for more details.
+
+* hex() and oct() functions removed.
+
+The special functions for hex() and oct() have been removed. Please use the
+hex and oct properties instead.
+
+>>> hex(s)
+
+becomes
+
+>>> s.hex
+
+* join made a member function.
+
+The join function must now be called on a BitString object, which will be
+used to join the list together. You may need to recode slightly:
+
+>>> s = bitstring.join('0x34', '0b1001', '0b1')
+
+becomes
+
+>>> s = BitString().join('0x34', '0b1001', '0b1')
+
+* More than one value allowed in readbits, readbytes, peekbits and peekbytes
+
+If you specify more than one bit or byte length then a list of BitStrings will
+be returned.
+
+>>> a, b, c = s.readbits(10, 5, 5)
+
+is equivalent to
+
+>>> a = readbits(10)
+>>> b = readbits(5)
+>>> c = readbits(5)
+
+* bytealigned defaults to False, and is at the end of the parameter list
+
+Functions that have a bytealigned paramater have changed so that it now
+defaults to False rather than True. Also its position in the parameter list
+has changed to be at the end. You may need to recode slightly (sorry!)
+
+* readue and readse functions have been removed
+
+Instead you should use the new read function with a 'ue' or 'se' token:
+
+>>> i = s.readue()
+
+becomes
+
+>>> i = s.read('ue')
+
+This is more flexible as you can read multiple items in one go, plus you can
+now also use the peek function with ue and se.
+
+* Minor bugs fixed.
+
+See the issue tracker for more details.
+
+-----------------------------------------------------
+June 15th 2009: version 0.4.3 for Python 2.x released
+-----------------------------------------------------
+
+This is a minor update. This release is the first to bundle the bitstring
+manual. This is a PDF and you can find it in the docs directory.
+
+Changes in version 0.4.3
+
+* New 'cut' function
+
+This function returns a generator for constant sized chunks of a BitString.
+
+>>> for byte in s.cut(8):
+... do_something_with(byte)
+
+You can also specify a startbit and endbit, as well as a count, which limits
+the number of items generated:
+
+>>> first100TSPackets = list(s.cut(188*8, count=100))
+
+* 'slice' function now equivalent to __getitem__.
+
+This means that a step can also be given to the slice function so that the
+following are now the same thing, and it's just a personal preference which
+to use:
+
+>>> s1 = s[a:b:c]
+>>> s2 = s.slice(a, b, c)
+
+* findall gets a 'count' parameter.
+
+So now
+
+>>> list(a.findall(s, count=n))
+
+is equivalent to
+
+>>> list(a.findall(s))[:n]
+
+except that it won't need to generate the whole list and so is much more
+efficient.
+
+* Changes to 'split'.
+
+The split function now has a 'count' parameter rather than 'maxsplit'. This
+makes the interface closer to that for cut, replace and findall. The final item
+generated is now no longer the whole of the rest of the BitString.
+
+* A couple of minor bugs were fixed. See the issue tracker for details.
+
+----------------------------------------------------
+May 25th 2009: version 0.4.2 for Python 2.x released
+----------------------------------------------------
+
+This is a minor update, and almost doesn't break compatibility with version
+0.4.0, but with the slight exception of findall() returning a generator,
+detailed below.
+
+Changes in version 0.4.2
+
+* Stepping in slices
+
+The use of the step parameter (also known as the stride) in slices has been
+added. Its use is a little non-standard as it effectively gives a multiplicative
+factor to apply to the start and stop parameters, rather than skipping over
+bits.
+
+For example this makes it much more convenient if you want to give slices in
+terms of bytes instead of bits. Instead of writing s[a*8:b*8] you can use
+s[a:b:8].
+
+When using a step the BitString is effectively truncated to a multiple of the
+step, so s[::8] is equal to s if s is an integer number of bytes, otherwise it
+is truncated by up to 7 bits. So the final seven complete 16-bit words could be
+written as s[-7::16]
+
+Negative slices are also allowed, and should do what you'd expect. So for
+example s[::-1] returns a bit-reversed copy of s (which is similar to
+s.reversebits(), which does the same operation on s in-place). As another
+example, to get the first 10 bytes in reverse byte order you could use
+s_bytereversed = s[0:10:-8].
+
+* Removed restrictions on offset
+
+You can now specify an offset of greater than 7 bits when creating a BitString,
+and the use of offset is also now permitted when using the filename initialiser.
+This is useful when you want to create a BitString from the middle of a file
+without having to read the file into memory.
+
+>>> f = BitString(filename='reallybigfile', offset=8000000, length=32)
+
+* Integers can be assigned to slices
+
+You can now assign an integer to a slice of a BitString. If the integer doesn't
+fit in the size of slice given then a ValueError exception is raised. So this
+is now allowed and works as expected:
+
+>>> s[8:16] = 106
+
+and is equivalent to
+
+>>> s[8:16] = BitString(uint=106, length=8)
+
+* Less exceptions raised
+
+Some changes have been made to slicing so that less exceptions are raised,
+bringing the interface closer to that for lists. So for example trying to delete
+past the end of the BitString will now just delete to the end, rather than
+raising a ValueError.
+
+* Initialisation from lists and tuples
+
+A new option for the auto initialiser is to pass it a list or tuple. The items
+in the list or tuple are evaluated as booleans and the bits in the BitString are
+set to 1 for True items and 0 for False items. This can be used anywhere the
+auto initialiser can currently be used. For example:
+
+>>> a = BitString([True, 7, False, 0, ()]) # 0b11000
+>>> b = a + ['Yes', ''] # Adds '0b10'
+>>> (True, True, False) in a
+True
+
+* Miscellany
+
+reversebits() now has optional startbit and endbit parameters.
+
+As an optimisation findall() will return a generator, rather than a list. If you
+still want the whole list then of course you can just call list() on the
+generator.
+
+Improved efficiency of rfind().
+
+A couple of minor bugs were fixed. See the issue tracker for details.
+
+-----------------------------------------------------
+April 23rd 2009: Python 3 only version 0.4.1 released
+-----------------------------------------------------
+
+This version is just a port of version 0.4.0 to Python 3. All the unit tests
+pass, but beyond that only limited ad hoc testing has been done and so it
+should be considered an experimental release. That said, the unit test
+coverage is very good - I'm just not sure if anyone even wants a Python 3
+version!
+
+---------------------------------------
+April 11th 2009: version 0.4.0 released
+---------------------------------------
+Changes in version 0.4.0
+
+* New functions
+
+Added rfind(), findall(), replace(). These do pretty much what you'd expect -
+see the docstrings or the wiki for more information.
+
+* More special functions
+
+Some missing functions were added: __repr__, __contains__, __rand__,
+__ror__, _rxor__ and __delitem__.
+
+* Miscellany
+
+A couple of small bugs were fixed (see the issue tracker).
+
+----
+
+There are some small backward incompatibilities relative to version 0.3.2:
+
+* Combined find() and findbytealigned()
+
+findbytealigned() has been removed, and becomes part of find(). The default
+start position has changed on both find() and split() to be the start of the
+BitString. You may need to recode:
+
+>>> s1.find(bs)
+>>> s2.findbytealigned(bs)
+>>> s2.split(bs)
+
+becomes
+
+>>> s1.find(bs, bytealigned=False, startbit=s1.bitpos)
+>>> s2.find(bs, startbit=s1.bitpos) # bytealigned defaults to True
+>>> s2.split(bs, startbit=s2.bitpos)
+
+* Reading off end of BitString no longer raises exception.
+
+Previously a read or peek function that encountered the end of the BitString
+would raise a ValueError. It will now instead return the remainder of the
+BitString, which could be an empty BitString. This is closer to the file
+object interface.
+
+* Removed visibility of offset.
+
+The offset property was previously read-only, and has now been removed from
+public view altogether. As it is used internally for efficiency reasons you
+shouldn't really have needed to use it. If you do then use the _offset parameter
+instead (with caution).
+
+---------------------------------------
+March 11th 2009: version 0.3.2 released
+---------------------------------------
+Changes in version 0.3.2
+
+* Better performance
+
+A number of functions (especially find() and findbytealigned()) have been sped
+up considerably.
+
+* Bit-wise operations
+
+Added support for bit-wise AND (&), OR (|) and XOR (^). For example:
+
+>>> a = BitString('0b00111')
+>>> print a & '0b10101'
+0b00101
+
+* Miscellany
+
+Added seekbit() and seekbyte() functions. These complement the 'advance' and
+'retreat' functions, although you can still just use bitpos and bytepos
+properties directly.
+
+>>> a.seekbit(100) # Equivalent to a.bitpos = 100
+
+Allowed comparisons between BitString objects and strings. For example this
+will now work:
+
+>>> a = BitString('0b00001111')
+>>> a == '0x0f'
+True
+
+------------------------------------------
+February 26th 2009: version 0.3.1 released
+------------------------------------------
+Changes in version 0.3.1
+
+This version only adds features and fixes bugs relative to 0.3.0, and doesn't
+break backwards compatibility.
+
+* Octal interpretation and initialisation
+
+The oct property now joins bin and hex. Just prefix octal numbers with '0o'.
+
+>>> a = BitString('0o755')
+>>> print a.bin
+0b111101101
+
+* Simpler copying
+
+Rather than using b = copy.copy(a) to create a copy of a BitString, now you
+can just use b = BitString(a).
+
+* More special methods
+
+Lots of new special methods added, for example bit-shifting via << and >>,
+equality testing via == and !=, bit inversion (~) and concatenation using *.
+
+Also __setitem__ is now supported so BitString objects can be modified using
+standard index notation.
+
+* Proper installer
+
+Finally got round to writing the distutils script. To install just
+python setup.py install.
+
+------------------------------------------
+February 15th 2009: version 0.3.0 released
+------------------------------------------
+Changes in version 0.3.0
+
+* Simpler initialisation from binary and hexadecimal
+
+The first argument in the BitString constructor is now called auto and will
+attempt to interpret the type of a string. Prefix binary numbers with '0b'
+and hexadecimals with '0x'.
+
+>>> a = BitString('0b0') # single zero bit
+>>> b = BitString('0xffff') # two bytes
+
+Previously the first argument was data, so if you relied on this then you
+will need to recode:
+
+>>> a = BitString('\x00\x00\x01\xb3') # Don't do this any more!
+
+becomes
+
+>>> a = BitString(data='\x00\x00\x01\xb3')
+
+or just
+
+>>> a = BitString('0x000001b3')
+
+This new notation can also be used in functions that take a BitString as an
+argument. For example:
+
+>>> a = BitString('0x0011') + '0xff'
+>>> a.insert('0b001', 6)
+>>> a.find('0b1111')
+
+* BitString made more mutable
+
+The functions append, deletebits, insert, overwrite, truncatestart and
+truncateend now modify the BitString that they act upon. This allows for
+cleaner and more efficient code, but you may need to rewrite slightly if you
+depended upon the old behaviour:
+
+>>> a = BitString(hex='0xffff')
+>>> a = a.append(BitString(hex='0x00'))
+>>> b = a.deletebits(10, 10)
+
+becomes:
+
+>>> a = BitString('0xffff')
+>>> a.append('0x00')
+>>> b = copy.copy(a)
+>>> b.deletebits(10, 10)
+
+Thanks to Frank Aune for suggestions in this and other areas.
+
+* Changes to printing
+
+The binary interpretation of a BitString is now prepended with '0b'. This is
+in keeping with the Python 2.6 (and 3.0) bin function. The prefix is optional
+when initialising using 'bin='.
+
+Also, if you just print a BitString with no interpretation it will pick
+something appropriate - hex if it is an integer number of bytes, otherwise
+binary. If the BitString representation is very long it will be truncated
+by '...' so it is only an approximate interpretation.
+
+>>> a = BitString('0b0011111')
+>>> print a
+0b0011111
+>>> a += '0b0'
+>>> print a
+0x3e
+
+* More convenience functions
+
+Some missing functions such as advancebit and deletebytes have been added. Also
+a number of peek functions make an appearance as have prepend and reversebits.
+See the Tutorial for more details.
+
+-----------------------------------------
+January 13th 2009: version 0.2.0 released
+-----------------------------------------
+Some fairly minor updates, not really deserving of a whole version point update.
+------------------------------------------
+December 29th 2008: version 0.1.0 released
+------------------------------------------
+First release!
diff --git a/python/bitstring/setup.py b/python/bitstring/setup.py
new file mode 100644
index 000000000..9f088dda9
--- /dev/null
+++ b/python/bitstring/setup.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+from distutils.core import setup
+# from distutils.extension import Extension
+# from Cython.Distutils import build_ext
+import sys
+
+kwds = {'long_description': open('README.txt').read()}
+
+if sys.version_info[:2] < (2, 6):
+ raise Exception('This version of bitstring needs Python 2.6 or later. '
+ 'For Python 2.4 / 2.5 please use bitstring version 1.0 instead.')
+
+# macros = [('PYREX_WITHOUT_ASSERTIONS', None)]
+# ext_modules = [Extension('bitstring', ["bitstring.pyx"], define_macros=macros)]
+
+setup(name='bitstring',
+ version='3.1.3',
+ description='Simple construction, analysis and modification of binary data.',
+ author='Scott Griffiths',
+ author_email='scott@griffiths.name',
+ url='http://python-bitstring.googlecode.com',
+ download_url='http://python-bitstring.googlecode.com',
+ license='The MIT License: http://www.opensource.org/licenses/mit-license.php',
+ # cmdclass = {'build_ext': build_ext},
+ # ext_modules = ext_modules,
+ py_modules=['bitstring'],
+ platforms='all',
+ classifiers = [
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Operating System :: OS Independent',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.0',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ ],
+ **kwds
+ )
+
diff --git a/python/bitstring/test/smalltestfile b/python/bitstring/test/smalltestfile
new file mode 100644
index 000000000..be687ec35
--- /dev/null
+++ b/python/bitstring/test/smalltestfile
@@ -0,0 +1 @@
+#Eg‰«Íï \ No newline at end of file
diff --git a/python/bitstring/test/test.m1v b/python/bitstring/test/test.m1v
new file mode 100644
index 000000000..2da3ece11
--- /dev/null
+++ b/python/bitstring/test/test.m1v
Binary files differ
diff --git a/python/bitstring/test/test_bitarray.py b/python/bitstring/test/test_bitarray.py
new file mode 100644
index 000000000..b80f90617
--- /dev/null
+++ b/python/bitstring/test/test_bitarray.py
@@ -0,0 +1,310 @@
+#!/usr/bin/env python
+"""
+Unit tests for the bitarray module.
+"""
+
+import unittest
+import sys
+
+sys.path.insert(0, '..')
+import bitstring
+from bitstring import BitArray
+
+class All(unittest.TestCase):
+ def testCreationFromUint(self):
+ s = BitArray(uint=15, length=6)
+ self.assertEqual(s.bin, '001111')
+ s = BitArray(uint=0, length=1)
+ self.assertEqual(s.bin, '0')
+ s.uint = 1
+ self.assertEqual(s.uint, 1)
+ s = BitArray(length=8)
+ s.uint = 0
+ self.assertEqual(s.uint, 0)
+ s.uint = 255
+ self.assertEqual(s.uint, 255)
+ self.assertEqual(s.len, 8)
+ self.assertRaises(bitstring.CreationError, s._setuint, 256)
+
+ def testCreationFromOct(self):
+ s = BitArray(oct='7')
+ self.assertEqual(s.oct, '7')
+ self.assertEqual(s.bin, '111')
+ s.append('0o1')
+ self.assertEqual(s.bin, '111001')
+ s.oct = '12345670'
+ self.assertEqual(s.length, 24)
+ self.assertEqual(s.bin, '001010011100101110111000')
+ s = BitArray('0o123')
+ self.assertEqual(s.oct, '123')
+
+
+class NoPosAttribute(unittest.TestCase):
+ def testReplace(self):
+ s = BitArray('0b01')
+ s.replace('0b1', '0b11')
+ self.assertEqual(s, '0b011')
+
+ def testDelete(self):
+ s = BitArray('0b000000001')
+ del s[-1:]
+ self.assertEqual(s, '0b00000000')
+
+ def testInsert(self):
+ s = BitArray('0b00')
+ s.insert('0xf', 1)
+ self.assertEqual(s, '0b011110')
+
+ def testInsertParameters(self):
+ s = BitArray('0b111')
+ self.assertRaises(TypeError, s.insert, '0x4')
+
+ def testOverwrite(self):
+ s = BitArray('0b01110')
+ s.overwrite('0b000', 1)
+ self.assertEqual(s, '0b00000')
+
+ def testOverwriteParameters(self):
+ s = BitArray('0b0000')
+ self.assertRaises(TypeError, s.overwrite, '0b111')
+
+ def testPrepend(self):
+ s = BitArray('0b0')
+ s.prepend([1])
+ self.assertEqual(s, [1, 0])
+
+ def testRol(self):
+ s = BitArray('0b0001')
+ s.rol(1)
+ self.assertEqual(s, '0b0010')
+
+ def testRor(self):
+ s = BitArray('0b1000')
+ s.ror(1)
+ self.assertEqual(s, '0b0100')
+
+ def testSetItem(self):
+ s = BitArray('0b000100')
+ s[4:5] = '0xf'
+ self.assertEqual(s, '0b000111110')
+ s[0:1] = [1]
+ self.assertEqual(s, '0b100111110')
+
+
+class Bugs(unittest.TestCase):
+ def testAddingNonsense(self):
+ a = BitArray([0])
+ a += '0' # a uint of length 0 - so nothing gets added.
+ self.assertEqual(a, [0])
+ self.assertRaises(ValueError, a.__iadd__, '3')
+ self.assertRaises(ValueError, a.__iadd__, 'se')
+ self.assertRaises(ValueError, a.__iadd__, 'float:32')
+
+ def testPrependAfterCreationFromDataWithOffset(self):
+ s1 = BitArray(bytes=b'\x00\x00\x07\xff\xf0\x00', offset=21, length=15)
+ self.assertFalse(s1.any(0))
+ s1.prepend('0b0')
+ self.assertEqual(s1.bin, '0111111111111111')
+ s1.prepend('0b0')
+ self.assertEqual(s1.bin, '00111111111111111')
+
+
+class ByteAligned(unittest.TestCase):
+ def testDefault(self, defaultbytealigned=bitstring.bytealigned):
+ self.assertFalse(defaultbytealigned)
+
+ def testChangingIt(self):
+ bitstring.bytealigned = True
+ self.assertTrue(bitstring.bytealigned)
+ bitstring.bytealigned = False
+
+ def testNotByteAligned(self):
+ bitstring.bytealigned = False
+ a = BitArray('0x00 ff 0f f')
+ l = list(a.findall('0xff'))
+ self.assertEqual(l, [8, 20])
+ p = a.find('0x0f')[0]
+ self.assertEqual(p, 4)
+ p = a.rfind('0xff')[0]
+ self.assertEqual(p, 20)
+ s = list(a.split('0xff'))
+ self.assertEqual(s, ['0x00', '0xff0', '0xff'])
+ a.replace('0xff', '')
+ self.assertEqual(a, '0x000')
+
+ def testByteAligned(self):
+ bitstring.bytealigned = True
+ a = BitArray('0x00 ff 0f f')
+ l = list(a.findall('0xff'))
+ self.assertEqual(l, [8])
+ p = a.find('0x0f')[0]
+ self.assertEqual(p, 16)
+ p = a.rfind('0xff')[0]
+ self.assertEqual(p, 8)
+ s = list(a.split('0xff'))
+ self.assertEqual(s, ['0x00', '0xff0ff'])
+ a.replace('0xff', '')
+ self.assertEqual(a, '0x000ff')
+
+
+class SliceAssignment(unittest.TestCase):
+
+ def testSliceAssignmentSingleBit(self):
+ a = BitArray('0b000')
+ a[2] = '0b1'
+ self.assertEqual(a.bin, '001')
+ a[0] = BitArray(bin='1')
+ self.assertEqual(a.bin, '101')
+ a[-1] = '0b0'
+ self.assertEqual(a.bin, '100')
+ a[-3] = '0b0'
+ self.assertEqual(a.bin, '000')
+
+ def testSliceAssignmentSingleBitErrors(self):
+ a = BitArray('0b000')
+ self.assertRaises(IndexError, a.__setitem__, -4, '0b1')
+ self.assertRaises(IndexError, a.__setitem__, 3, '0b1')
+ self.assertRaises(TypeError, a.__setitem__, 1, 1.3)
+
+ def testSliceAssignmentMulipleBits(self):
+ a = BitArray('0b0')
+ a[0] = '0b110'
+ self.assertEqual(a.bin, '110')
+ a[0] = '0b000'
+ self.assertEqual(a.bin, '00010')
+ a[0:3] = '0b111'
+ self.assertEqual(a.bin, '11110')
+ a[-2:] = '0b011'
+ self.assertEqual(a.bin, '111011')
+ a[:] = '0x12345'
+ self.assertEqual(a.hex, '12345')
+ a[:] = ''
+ self.assertFalse(a)
+
+ def testSliceAssignmentMultipleBitsErrors(self):
+ a = BitArray()
+ self.assertRaises(IndexError, a.__setitem__, 0, '0b00')
+ a += '0b1'
+ a[0:2] = '0b11'
+ self.assertEqual(a, '0b11')
+
+ def testDelSliceStep(self):
+ a = BitArray(bin='100111101001001110110100101')
+ del a[::2]
+ self.assertEqual(a.bin, '0110010101100')
+ del a[3:9:3]
+ self.assertEqual(a.bin, '01101101100')
+ del a[2:7:1]
+ self.assertEqual(a.bin, '011100')
+ del a[::99]
+ self.assertEqual(a.bin, '11100')
+ del a[::1]
+ self.assertEqual(a.bin, '')
+
+ def testDelSliceNegativeStep(self):
+ a = BitArray('0b0001011101101100100110000001')
+ del a[5:23:-3]
+ self.assertEqual(a.bin, '0001011101101100100110000001')
+ del a[25:3:-3]
+ self.assertEqual(a.bin, '00011101010000100001')
+ del a[:6:-7]
+ self.assertEqual(a.bin, '000111010100010000')
+ del a[15::-2]
+ self.assertEqual(a.bin, '0010000000')
+ del a[::-1]
+ self.assertEqual(a.bin, '')
+
+ def testDelSliceErrors(self):
+ a = BitArray(10)
+ del a[5:3]
+ self.assertEqual(a, 10)
+ del a[3:5:-1]
+ self.assertEqual(a, 10)
+
+ def testDelSingleElement(self):
+ a = BitArray('0b0010011')
+ del a[-1]
+ self.assertEqual(a.bin, '001001')
+ del a[2]
+ self.assertEqual(a.bin, '00001')
+ try:
+ del a[5]
+ self.assertTrue(False)
+ except IndexError:
+ pass
+
+ def testSetSliceStep(self):
+ a = BitArray(bin='0000000000')
+ a[::2] = '0b11111'
+ self.assertEqual(a.bin, '1010101010')
+ a[4:9:3] = [0, 0]
+ self.assertEqual(a.bin, '1010001010')
+ a[7:3:-1] = [1, 1, 1, 0]
+ self.assertEqual(a.bin, '1010011110')
+ a[7:1:-2] = [0, 0, 1]
+ self.assertEqual(a.bin, '1011001010')
+ a[::-5] = [1, 1]
+ self.assertEqual(a.bin, '1011101011')
+ a[::-1] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 1]
+ self.assertEqual(a.bin, '1000000000')
+
+ def testSetSliceErrors(self):
+ a = BitArray(8)
+ try:
+ a[::3] = [1]
+ self.assertTrue(False)
+ except ValueError:
+ pass
+ class A(object): pass
+ try:
+ a[1:2] = A()
+ self.assertTrue(False)
+ except TypeError:
+ pass
+ try:
+ a[1:4:-1] = [1, 2]
+ self.assertTrue(False)
+ except ValueError:
+ pass
+
+
+class Subclassing(unittest.TestCase):
+
+ def testIsInstance(self):
+ class SubBits(BitArray): pass
+ a = SubBits()
+ self.assertTrue(isinstance(a, SubBits))
+
+ def testClassType(self):
+ class SubBits(BitArray): pass
+ self.assertEqual(SubBits().__class__, SubBits)
+
+
+class Clear(unittest.TestCase):
+
+ def testClear(self):
+ s = BitArray('0xfff')
+ s.clear()
+ self.assertEqual(s.len, 0)
+
+
+class Copy(unittest.TestCase):
+
+ def testCopyMethod(self):
+ s = BitArray(9)
+ t = s.copy()
+ self.assertEqual(s, t)
+ t[0] = True
+ self.assertEqual(t.bin, '100000000')
+ self.assertEqual(s.bin, '000000000')
+
+
+class ModifiedByAddingBug(unittest.TestCase):
+
+ def testAdding(self):
+ a = BitArray('0b0')
+ b = BitArray('0b11')
+ c = a + b
+ self.assertEqual(c, '0b011')
+ self.assertEqual(a, '0b0')
+ self.assertEqual(b, '0b11') \ No newline at end of file
diff --git a/python/bitstring/test/test_bits.py b/python/bitstring/test/test_bits.py
new file mode 100644
index 000000000..402c03899
--- /dev/null
+++ b/python/bitstring/test/test_bits.py
@@ -0,0 +1,378 @@
+#!/usr/bin/env python
+
+import unittest
+import sys
+
+sys.path.insert(0, '..')
+import bitstring
+from bitstring import MmapByteArray
+from bitstring import Bits, BitArray, ConstByteStore, ByteStore
+
+class Creation(unittest.TestCase):
+ def testCreationFromBytes(self):
+ s = Bits(bytes=b'\xa0\xff')
+ self.assertEqual((s.len, s.hex), (16, 'a0ff'))
+ s = Bits(bytes=b'abc', length=0)
+ self.assertEqual(s, '')
+
+ def testCreationFromBytesErrors(self):
+ self.assertRaises(bitstring.CreationError, Bits, bytes=b'abc', length=25)
+
+ def testCreationFromDataWithOffset(self):
+ s1 = Bits(bytes=b'\x0b\x1c\x2f', offset=0, length=20)
+ s2 = Bits(bytes=b'\xa0\xb1\xC2', offset=4)
+ self.assertEqual((s2.len, s2.hex), (20, '0b1c2'))
+ self.assertEqual((s1.len, s1.hex), (20, '0b1c2'))
+ self.assertTrue(s1 == s2)
+
+ def testCreationFromHex(self):
+ s = Bits(hex='0xA0ff')
+ self.assertEqual((s.len, s.hex), (16, 'a0ff'))
+ s = Bits(hex='0x0x0X')
+ self.assertEqual((s.length, s.hex), (0, ''))
+
+ def testCreationFromHexWithWhitespace(self):
+ s = Bits(hex=' \n0 X a 4e \r3 \n')
+ self.assertEqual(s.hex, 'a4e3')
+
+ def testCreationFromHexErrors(self):
+ self.assertRaises(bitstring.CreationError, Bits, hex='0xx0')
+ self.assertRaises(bitstring.CreationError, Bits, hex='0xX0')
+ self.assertRaises(bitstring.CreationError, Bits, hex='0Xx0')
+ self.assertRaises(bitstring.CreationError, Bits, hex='-2e')
+ # These really should fail, but it's awkward and not a big deal...
+# self.assertRaises(bitstring.CreationError, Bits, '0x2', length=2)
+# self.assertRaises(bitstring.CreationError, Bits, '0x3', offset=1)
+
+ def testCreationFromBin(self):
+ s = Bits(bin='1010000011111111')
+ self.assertEqual((s.length, s.hex), (16, 'a0ff'))
+ s = Bits(bin='00')[:1]
+ self.assertEqual(s.bin, '0')
+ s = Bits(bin=' 0000 \n 0001\r ')
+ self.assertEqual(s.bin, '00000001')
+
+ def testCreationFromBinWithWhitespace(self):
+ s = Bits(bin=' \r\r\n0 B 00 1 1 \t0 ')
+ self.assertEqual(s.bin, '00110')
+
+ def testCreationFromOctErrors(self):
+ s = Bits('0b00011')
+ self.assertRaises(bitstring.InterpretError, s._getoct)
+ self.assertRaises(bitstring.CreationError, s._setoct, '8')
+
+ def testCreationFromUintWithOffset(self):
+ self.assertRaises(bitstring.Error, Bits, uint=12, length=8, offset=1)
+
+ def testCreationFromUintErrors(self):
+ self.assertRaises(bitstring.CreationError, Bits, uint=-1, length=10)
+ self.assertRaises(bitstring.CreationError, Bits, uint=12)
+ self.assertRaises(bitstring.CreationError, Bits, uint=4, length=2)
+ self.assertRaises(bitstring.CreationError, Bits, uint=0, length=0)
+ self.assertRaises(bitstring.CreationError, Bits, uint=12, length=-12)
+
+ def testCreationFromInt(self):
+ s = Bits(int=0, length=4)
+ self.assertEqual(s.bin, '0000')
+ s = Bits(int=1, length=2)
+ self.assertEqual(s.bin, '01')
+ s = Bits(int=-1, length=11)
+ self.assertEqual(s.bin, '11111111111')
+ s = Bits(int=12, length=7)
+ self.assertEqual(s.int, 12)
+ s = Bits(int=-243, length=108)
+ self.assertEqual((s.int, s.length), (-243, 108))
+ for length in range(6, 10):
+ for value in range(-17, 17):
+ s = Bits(int=value, length=length)
+ self.assertEqual((s.int, s.length), (value, length))
+ s = Bits(int=10, length=8)
+
+ def testCreationFromIntErrors(self):
+ self.assertRaises(bitstring.CreationError, Bits, int=-1, length=0)
+ self.assertRaises(bitstring.CreationError, Bits, int=12)
+ self.assertRaises(bitstring.CreationError, Bits, int=4, length=3)
+ self.assertRaises(bitstring.CreationError, Bits, int=-5, length=3)
+
+ def testCreationFromSe(self):
+ for i in range(-100, 10):
+ s = Bits(se=i)
+ self.assertEqual(s.se, i)
+
+ def testCreationFromSeWithOffset(self):
+ self.assertRaises(bitstring.CreationError, Bits, se=-13, offset=1)
+
+ def testCreationFromSeErrors(self):
+ self.assertRaises(bitstring.CreationError, Bits, se=-5, length=33)
+ s = Bits(bin='001000')
+ self.assertRaises(bitstring.InterpretError, s._getse)
+
+ def testCreationFromUe(self):
+ [self.assertEqual(Bits(ue=i).ue, i) for i in range(0, 20)]
+
+ def testCreationFromUeWithOffset(self):
+ self.assertRaises(bitstring.CreationError, Bits, ue=104, offset=2)
+
+ def testCreationFromUeErrors(self):
+ self.assertRaises(bitstring.CreationError, Bits, ue=-1)
+ self.assertRaises(bitstring.CreationError, Bits, ue=1, length=12)
+ s = Bits(bin='10')
+ self.assertRaises(bitstring.InterpretError, s._getue)
+
+ def testCreationFromBool(self):
+ a = Bits('bool=1')
+ self.assertEqual(a, 'bool=1')
+ b = Bits('bool=0')
+ self.assertEqual(b, [0])
+ c = bitstring.pack('2*bool', 0, 1)
+ self.assertEqual(c, '0b01')
+
+ def testCreationKeywordError(self):
+ self.assertRaises(bitstring.CreationError, Bits, squirrel=5)
+
+ def testDataStoreType(self):
+ a = Bits('0xf')
+ self.assertEqual(type(a._datastore), bitstring.ConstByteStore)
+
+
+class Initialisation(unittest.TestCase):
+ def testEmptyInit(self):
+ a = Bits()
+ self.assertEqual(a, '')
+
+ def testNoPos(self):
+ a = Bits('0xabcdef')
+ try:
+ a.pos
+ except AttributeError:
+ pass
+ else:
+ assert False
+
+ def testFind(self):
+ a = Bits('0xabcd')
+ r = a.find('0xbc')
+ self.assertEqual(r[0], 4)
+ r = a.find('0x23462346246', bytealigned=True)
+ self.assertFalse(r)
+
+ def testRfind(self):
+ a = Bits('0b11101010010010')
+ b = a.rfind('0b010')
+ self.assertEqual(b[0], 11)
+
+ def testFindAll(self):
+ a = Bits('0b0010011')
+ b = list(a.findall([1]))
+ self.assertEqual(b, [2, 5, 6])
+
+
+class Cut(unittest.TestCase):
+ def testCut(self):
+ s = Bits(30)
+ for t in s.cut(3):
+ self.assertEqual(t, [0] * 3)
+
+
+class InterleavedExpGolomb(unittest.TestCase):
+ def testCreation(self):
+ s1 = Bits(uie=0)
+ s2 = Bits(uie=1)
+ self.assertEqual(s1, [1])
+ self.assertEqual(s2, [0, 0, 1])
+ s1 = Bits(sie=0)
+ s2 = Bits(sie=-1)
+ s3 = Bits(sie=1)
+ self.assertEqual(s1, [1])
+ self.assertEqual(s2, [0, 0, 1, 1])
+ self.assertEqual(s3, [0, 0, 1, 0])
+
+ def testCreationFromProperty(self):
+ s = BitArray()
+ s.uie = 45
+ self.assertEqual(s.uie, 45)
+ s.sie = -45
+ self.assertEqual(s.sie, -45)
+
+ def testInterpretation(self):
+ for x in range(101):
+ self.assertEqual(Bits(uie=x).uie, x)
+ for x in range(-100, 100):
+ self.assertEqual(Bits(sie=x).sie, x)
+
+ def testErrors(self):
+ for f in ['sie=100, 0b1001', '0b00', 'uie=100, 0b1001']:
+ s = Bits(f)
+ self.assertRaises(bitstring.InterpretError, s._getsie)
+ self.assertRaises(bitstring.InterpretError, s._getuie)
+ self.assertRaises(ValueError, Bits, 'uie=-10')
+
+
+class FileBased(unittest.TestCase):
+ def setUp(self):
+ self.a = Bits(filename='smalltestfile')
+ self.b = Bits(filename='smalltestfile', offset=16)
+ self.c = Bits(filename='smalltestfile', offset=20, length=16)
+ self.d = Bits(filename='smalltestfile', offset=20, length=4)
+
+ def testCreationWithOffset(self):
+ self.assertEqual(self.a, '0x0123456789abcdef')
+ self.assertEqual(self.b, '0x456789abcdef')
+ self.assertEqual(self.c, '0x5678')
+
+ def testBitOperators(self):
+ x = self.b[4:20]
+ self.assertEqual(x, '0x5678')
+ self.assertEqual((x & self.c).hex, self.c.hex)
+ self.assertEqual(self.c ^ self.b[4:20], 16)
+ self.assertEqual(self.a[23:36] | self.c[3:], self.c[3:])
+
+ def testAddition(self):
+ h = self.d + '0x1'
+ x = self.a[20:24] + self.c[-4:] + self.c[8:12]
+ self.assertEqual(x, '0x587')
+ x = self.b + x
+ self.assertEqual(x.hex, '456789abcdef587')
+ x = BitArray(x)
+ del x[12:24]
+ self.assertEqual(x, '0x456abcdef587')
+
+class Mmap(unittest.TestCase):
+ def setUp(self):
+ self.f = open('smalltestfile', 'rb')
+
+ def tearDown(self):
+ self.f.close()
+
+ def testByteArrayEquivalence(self):
+ a = MmapByteArray(self.f)
+ self.assertEqual(a.bytelength, 8)
+ self.assertEqual(len(a), 8)
+ self.assertEqual(a[0], 0x01)
+ self.assertEqual(a[1], 0x23)
+ self.assertEqual(a[7], 0xef)
+ self.assertEqual(a[0:1], bytearray([1]))
+ self.assertEqual(a[:], bytearray([0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]))
+ self.assertEqual(a[2:4], bytearray([0x45, 0x67]))
+
+ def testWithLength(self):
+ a = MmapByteArray(self.f, 3)
+ self.assertEqual(a[0], 0x01)
+ self.assertEqual(len(a), 3)
+
+ def testWithOffset(self):
+ a = MmapByteArray(self.f, None, 5)
+ self.assertEqual(len(a), 3)
+ self.assertEqual(a[0], 0xab)
+
+ def testWithLengthAndOffset(self):
+ a = MmapByteArray(self.f, 3, 3)
+ self.assertEqual(len(a), 3)
+ self.assertEqual(a[0], 0x67)
+ self.assertEqual(a[:], bytearray([0x67, 0x89, 0xab]))
+
+
+class Comparisons(unittest.TestCase):
+ def testUnorderable(self):
+ a = Bits(5)
+ b = Bits(5)
+ self.assertRaises(TypeError, a.__lt__, b)
+ self.assertRaises(TypeError, a.__gt__, b)
+ self.assertRaises(TypeError, a.__le__, b)
+ self.assertRaises(TypeError, a.__ge__, b)
+
+
+class Subclassing(unittest.TestCase):
+
+ def testIsInstance(self):
+ class SubBits(bitstring.Bits): pass
+ a = SubBits()
+ self.assertTrue(isinstance(a, SubBits))
+
+ def testClassType(self):
+ class SubBits(bitstring.Bits): pass
+ self.assertEqual(SubBits().__class__, SubBits)
+
+
+class LongBoolConversion(unittest.TestCase):
+
+ def testLongBool(self):
+ a = Bits(1000)
+ b = bool(a)
+ self.assertTrue(b is False)
+
+
+# Some basic tests for the private ByteStore classes
+
+class ConstByteStoreCreation(unittest.TestCase):
+
+ def testProperties(self):
+ a = ConstByteStore(bytearray(b'abc'))
+ self.assertEqual(a.bytelength, 3)
+ self.assertEqual(a.offset, 0)
+ self.assertEqual(a.bitlength, 24)
+ self.assertEqual(a._rawarray, b'abc')
+
+ def testGetBit(self):
+ a = ConstByteStore(bytearray([0x0f]))
+ self.assertEqual(a.getbit(0), False)
+ self.assertEqual(a.getbit(3), False)
+ self.assertEqual(a.getbit(4), True)
+ self.assertEqual(a.getbit(7), True)
+
+ b = ConstByteStore(bytearray([0x0f]), 7, 1)
+ self.assertEqual(b.getbit(2), False)
+ self.assertEqual(b.getbit(3), True)
+
+ def testGetByte(self):
+ a = ConstByteStore(bytearray(b'abcde'), 1, 13)
+ self.assertEqual(a.getbyte(0), 97)
+ self.assertEqual(a.getbyte(1), 98)
+ self.assertEqual(a.getbyte(4), 101)
+
+
+class PadToken(unittest.TestCase):
+
+ def testCreation(self):
+ a = Bits('pad:10')
+ self.assertEqual(a, Bits(10))
+ b = Bits('pad:0')
+ self.assertEqual(b, Bits())
+ c = Bits('0b11, pad:1, 0b111')
+ self.assertEqual(c, Bits('0b110111'))
+
+ def testPack(self):
+ s = bitstring.pack('0b11, pad:3=5, 0b1')
+ self.assertEqual(s.bin, '110001')
+ d = bitstring.pack('pad:c', c=12)
+ self.assertEqual(d, Bits(12))
+ e = bitstring.pack('0xf, uint:12, pad:1, bin, pad:4, 0b10', 0, '111')
+ self.assertEqual(e.bin, '11110000000000000111000010')
+
+ def testUnpack(self):
+ s = Bits('0b111000111')
+ x, y = s.unpack('3, pad:3, 3')
+ self.assertEqual((x, y), (7, 7))
+ x, y = s.unpack('2, pad:2, bin')
+ self.assertEqual((x, y), (3, '00111'))
+ x = s.unpack('pad:1, pad:2, pad:3')
+ self.assertEqual(x, [])
+
+
+class ModifiedByAddingBug(unittest.TestCase):
+
+ def testAdding(self):
+ a = Bits('0b0')
+ b = Bits('0b11')
+ c = a + b
+ self.assertEqual(c, '0b011')
+ self.assertEqual(a, '0b0')
+ self.assertEqual(b, '0b11')
+
+ def testAdding2(self):
+ a = Bits(100)
+ b = Bits(101)
+ c = a + b
+ self.assertEqual(a, 100)
+ self.assertEqual(b, 101)
+ self.assertEqual(c, 201)
diff --git a/python/bitstring/test/test_bitstore.py b/python/bitstring/test/test_bitstore.py
new file mode 100644
index 000000000..9f5c9036e
--- /dev/null
+++ b/python/bitstring/test/test_bitstore.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+import unittest
+import sys
+sys.path.insert(0, '..')
+from bitstring import ByteStore, ConstByteStore, equal, offsetcopy
+
+
+class OffsetCopy(unittest.TestCase):
+ def testStraightCopy(self):
+ s = ByteStore(bytearray([10, 5, 1]), 24, 0)
+ t = offsetcopy(s, 0)
+ self.assertEqual(t._rawarray, bytearray([10, 5, 1]))
+
+ def testOffsetIncrease(self):
+ s = ByteStore(bytearray([1, 1, 1]), 24, 0)
+ t = offsetcopy(s, 4)
+ self.assertEqual(t.bitlength, 24)
+ self.assertEqual(t.offset, 4)
+ self.assertEqual(t._rawarray, bytearray([0, 16, 16, 16]))
+
+
+class Equals(unittest.TestCase):
+
+ def testBothSingleByte(self):
+ s = ByteStore(bytearray([128]), 3, 0)
+ t = ByteStore(bytearray([64]), 3, 1)
+ u = ByteStore(bytearray([32]), 3, 2)
+ self.assertTrue(equal(s, t))
+ self.assertTrue(equal(s, u))
+ self.assertTrue(equal(u, t))
+
+ def testOneSingleByte(self):
+ s = ByteStore(bytearray([1, 0]), 2, 7)
+ t = ByteStore(bytearray([64]), 2, 1)
+ self.assertTrue(equal(s, t))
+ self.assertTrue(equal(t, s)) \ No newline at end of file
diff --git a/python/bitstring/test/test_bitstream.py b/python/bitstring/test/test_bitstream.py
new file mode 100644
index 000000000..f94193d32
--- /dev/null
+++ b/python/bitstring/test/test_bitstream.py
@@ -0,0 +1,3940 @@
+#!/usr/bin/env python
+
+import unittest
+import sys
+sys.path.insert(0, '..')
+import bitstring
+import copy
+import os
+import collections
+from bitstring import BitStream, ConstBitStream, pack
+from bitstring import ByteStore, offsetcopy
+
+
+class FlexibleInitialisation(unittest.TestCase):
+ def testFlexibleInitialisation(self):
+ a = BitStream('uint:8=12')
+ c = BitStream(' uint : 8 = 12')
+ self.assertTrue(a == c == BitStream(uint=12, length=8))
+ self.assertEqual(a.uint, 12)
+ a = BitStream(' int:2= -1')
+ b = BitStream('int :2 = -1')
+ c = BitStream(' int: 2 =-1 ')
+ self.assertTrue(a == b == c == BitStream(int=-1, length=2))
+
+ def testFlexibleInitialisation2(self):
+ h = BitStream('hex=12')
+ o = BitStream('oct=33')
+ b = BitStream('bin=10')
+ self.assertEqual(h, '0x12')
+ self.assertEqual(o, '0o33')
+ self.assertEqual(b, '0b10')
+
+ def testFlexibleInitialisation3(self):
+ for s in ['se=-1', ' se = -1 ', 'se = -1']:
+ a = BitStream(s)
+ self.assertEqual(a.se, -1)
+ for s in ['ue=23', 'ue =23', 'ue = 23']:
+ a = BitStream(s)
+ self.assertEqual(a.ue, 23)
+
+ def testMultipleStringInitialisation(self):
+ a = BitStream('0b1 , 0x1')
+ self.assertEqual(a, '0b10001')
+ a = BitStream('ue=5, ue=1, se=-2')
+ self.assertEqual(a.read('ue'), 5)
+ self.assertEqual(a.read('ue'), 1)
+ self.assertEqual(a.read('se'), -2)
+ b = BitStream('uint:32 = 12, 0b11') + 'int:100=-100, 0o44'
+ self.assertEqual(b.read(32).uint, 12)
+ self.assertEqual(b.read(2).bin, '11')
+ self.assertEqual(b.read(100).int, -100)
+
+
+class Reading(unittest.TestCase):
+ def testReadBits(self):
+ s = BitStream(bytes=b'\x4d\x55')
+ self.assertEqual(s.read(4).hex, '4')
+ self.assertEqual(s.read(8).hex, 'd5')
+ self.assertEqual(s.read(1), [0])
+ self.assertEqual(s.read(3).bin, '101')
+ self.assertFalse(s.read(0))
+
+ def testReadByte(self):
+ s = BitStream(hex='4d55')
+ self.assertEqual(s.read(8).hex, '4d')
+ self.assertEqual(s.read(8).hex, '55')
+
+ def testReadBytes(self):
+ s = BitStream(hex='0x112233448811')
+ self.assertEqual(s.read(3 * 8).hex, '112233')
+ self.assertRaises(ValueError, s.read, -2 * 8)
+ s.bitpos += 1
+ self.assertEqual(s.read(2 * 8).bin, '1000100100010000')
+
+ def testReadUE(self):
+ self.assertRaises(bitstring.InterpretError, BitStream('')._getue)
+ # The numbers 0 to 8 as unsigned Exponential-Golomb codes
+ s = BitStream(bin='1 010 011 00100 00101 00110 00111 0001000 0001001')
+ self.assertEqual(s.pos, 0)
+ for i in range(9):
+ self.assertEqual(s.read('ue'), i)
+ self.assertRaises(bitstring.ReadError, s.read, 'ue')
+
+ def testReadSE(self):
+ s = BitStream(bin='010 00110 0001010 0001000 00111')
+ self.assertEqual(s.read('se'), 1)
+ self.assertEqual(s.read('se'), 3)
+ self.assertEqual(s.readlist(3 * ['se']), [5, 4, -3])
+
+
+class Find(unittest.TestCase):
+ def testFind1(self):
+ s = ConstBitStream(bin='0b0000110110000')
+ self.assertTrue(s.find(BitStream(bin='11011'), False))
+ self.assertEqual(s.bitpos, 4)
+ self.assertEqual(s.read(5).bin, '11011')
+ s.bitpos = 0
+ self.assertFalse(s.find('0b11001', False))
+
+ def testFind2(self):
+ s = BitStream(bin='0')
+ self.assertTrue(s.find(s, False))
+ self.assertEqual(s.pos, 0)
+ self.assertFalse(s.find('0b00', False))
+ self.assertRaises(ValueError, s.find, BitStream(), False)
+
+ def testFindWithOffset(self):
+ s = BitStream(hex='0x112233')[4:]
+ self.assertTrue(s.find('0x23', False))
+ self.assertEqual(s.pos, 8)
+
+ def testFindCornerCases(self):
+ s = BitStream(bin='000111000111')
+ self.assertTrue(s.find('0b000'))
+ self.assertEqual(s.pos, 0)
+ self.assertTrue(s.find('0b000'))
+ self.assertEqual(s.pos, 0)
+ self.assertTrue(s.find('0b0111000111'))
+ self.assertEqual(s.pos, 2)
+ self.assertTrue(s.find('0b000', start=2))
+ self.assertEqual(s.pos, 6)
+ self.assertTrue(s.find('0b111', start=6))
+ self.assertEqual(s.pos, 9)
+ s.pos += 2
+ self.assertTrue(s.find('0b1', start=s.pos))
+
+ def testFindBytes(self):
+ s = BitStream('0x010203040102ff')
+ self.assertFalse(s.find('0x05', bytealigned=True))
+ self.assertTrue(s.find('0x02', bytealigned=True))
+ self.assertEqual(s.read(16).hex, '0203')
+ self.assertTrue(s.find('0x02', start=s.bitpos, bytealigned=True))
+ s.read(1)
+ self.assertFalse(s.find('0x02', start=s.bitpos, bytealigned=True))
+
+ def testFindBytesAlignedCornerCases(self):
+ s = BitStream('0xff')
+ self.assertTrue(s.find(s))
+ self.assertFalse(s.find(BitStream(hex='0x12')))
+ self.assertFalse(s.find(BitStream(hex='0xffff')))
+
+ def testFindBytesBitpos(self):
+ s = BitStream(hex='0x1122334455')
+ s.pos = 2
+ s.find('0x66', bytealigned=True)
+ self.assertEqual(s.pos, 2)
+ s.pos = 38
+ s.find('0x66', bytealigned=True)
+ self.assertEqual(s.pos, 38)
+
+ def testFindByteAligned(self):
+ s = BitStream(hex='0x12345678')
+ self.assertTrue(s.find(BitStream(hex='0x56'), bytealigned=True))
+ self.assertEqual(s.bytepos, 2)
+ s.pos = 0
+ self.assertFalse(s.find(BitStream(hex='0x45'), bytealigned=True))
+ s = BitStream('0x1234')
+ s.find('0x1234')
+ self.assertTrue(s.find('0x1234'))
+ s += '0b111'
+ s.pos = 3
+ s.find('0b1', start=17, bytealigned=True)
+ self.assertFalse(s.find('0b1', start=17, bytealigned=True))
+ self.assertEqual(s.pos, 3)
+
+ def testFindByteAlignedWithOffset(self):
+ s = BitStream(hex='0x112233')[4:]
+ self.assertTrue(s.find(BitStream(hex='0x23')))
+
+ def testFindByteAlignedErrors(self):
+ s = BitStream(hex='0xffff')
+ self.assertRaises(ValueError, s.find, '')
+ self.assertRaises(ValueError, s.find, BitStream())
+
+
+class Rfind(unittest.TestCase):
+ def testRfind(self):
+ a = BitStream('0b001001001')
+ b = a.rfind('0b001')
+ self.assertEqual(b, (6,))
+ self.assertEqual(a.pos, 6)
+ big = BitStream(length=100000) + '0x12' + BitStream(length=10000)
+ found = big.rfind('0x12', bytealigned=True)
+ self.assertEqual(found, (100000,))
+ self.assertEqual(big.pos, 100000)
+
+ def testRfindByteAligned(self):
+ a = BitStream('0x8888')
+ b = a.rfind('0b1', bytealigned=True)
+ self.assertEqual(b, (8,))
+ self.assertEqual(a.pos, 8)
+
+ def testRfindStartbit(self):
+ a = BitStream('0x0000ffffff')
+ b = a.rfind('0x0000', start=1, bytealigned=True)
+ self.assertEqual(b, ())
+ self.assertEqual(a.pos, 0)
+ b = a.rfind('0x00', start=1, bytealigned=True)
+ self.assertEqual(b, (8,))
+ self.assertEqual(a.pos, 8)
+
+ def testRfindEndbit(self):
+ a = BitStream('0x000fff')
+ b = a.rfind('0b011', bytealigned=False, start=0, end=14)
+ self.assertEqual(bool(b), True)
+ b = a.rfind('0b011', False, 0, 13)
+ self.assertEqual(b, ())
+
+ def testRfindErrors(self):
+ a = BitStream('0x43234234')
+ self.assertRaises(ValueError, a.rfind, '', bytealigned=True)
+ self.assertRaises(ValueError, a.rfind, '0b1', start=-99, bytealigned=True)
+ self.assertRaises(ValueError, a.rfind, '0b1', end=33, bytealigned=True)
+ self.assertRaises(ValueError, a.rfind, '0b1', start=10, end=9, bytealigned=True)
+
+
+class Shift(unittest.TestCase):
+ def testShiftLeft(self):
+ s = BitStream('0b1010')
+ t = s << 1
+ self.assertEqual(s.bin, '1010')
+ self.assertEqual(t.bin, '0100')
+ t = t << 0
+ self.assertEqual(t, '0b0100')
+ t = t << 100
+ self.assertEqual(t.bin, '0000')
+
+ def testShiftLeftErrors(self):
+ s = BitStream()
+ self.assertRaises(ValueError, s.__lshift__, 1)
+ s = BitStream('0xf')
+ self.assertRaises(ValueError, s.__lshift__, -1)
+
+ def testShiftRight(self):
+ s = BitStream('0b1010')
+ t = s >> 1
+ self.assertEqual(s.bin, '1010')
+ self.assertEqual(t.bin, '0101')
+ q = s >> 0
+ self.assertEqual(q, '0b1010')
+ q.replace('0b1010', '')
+ s = s >> 100
+ self.assertEqual(s.bin, '0000')
+
+ def testShiftRightErrors(self):
+ s = BitStream()
+ self.assertRaises(ValueError, s.__rshift__, 1)
+ s = BitStream('0xf')
+ self.assertRaises(ValueError, s.__rshift__, -1)
+
+ def testShiftRightInPlace(self):
+ s = BitStream('0xffff')[4:12]
+ s >>= 1
+ self.assertEqual(s, '0b01111111')
+ s = BitStream('0b11011')
+ s >>= 2
+ self.assertEqual(s.bin, '00110')
+ s >>= 100000000000000
+ self.assertEqual(s.bin, '00000')
+ s = BitStream('0xff')
+ s >>= 1
+ self.assertEqual(s, '0x7f')
+ s >>= 0
+ self.assertEqual(s, '0x7f')
+
+ def testShiftRightInPlaceErrors(self):
+ s = BitStream()
+ self.assertRaises(ValueError, s.__irshift__, 1)
+ s += '0b11'
+ self.assertRaises(ValueError, s.__irshift__, -1)
+
+ def testShiftLeftInPlace(self):
+ s = BitStream('0xffff')
+ t = s[4:12]
+ t <<= 2
+ self.assertEqual(t, '0b11111100')
+ s = BitStream('0b11011')
+ s <<= 2
+ self.assertEqual(s.bin, '01100')
+ s <<= 100000000000000000000
+ self.assertEqual(s.bin, '00000')
+ s = BitStream('0xff')
+ s <<= 1
+ self.assertEqual(s, '0xfe')
+ s <<= 0
+ self.assertEqual(s, '0xfe')
+
+ def testShiftLeftInPlaceErrors(self):
+ s = BitStream()
+ self.assertRaises(ValueError, s.__ilshift__, 1)
+ s += '0b11'
+ self.assertRaises(ValueError, s.__ilshift__, -1)
+
+
+class Replace(unittest.TestCase):
+ def testReplace1(self):
+ a = BitStream('0b1')
+ n = a.replace('0b1', '0b0', bytealigned=True)
+ self.assertEqual(a.bin, '0')
+ self.assertEqual(n, 1)
+ n = a.replace('0b1', '0b0', bytealigned=True)
+ self.assertEqual(n, 0)
+
+ def testReplace2(self):
+ a = BitStream('0b00001111111')
+ n = a.replace('0b1', '0b0', bytealigned=True)
+ self.assertEqual(a.bin, '00001111011')
+ self.assertEqual(n, 1)
+ n = a.replace('0b1', '0b0', bytealigned=False)
+ self.assertEqual(a.bin, '00000000000')
+ self.assertEqual(n, 6)
+
+ def testReplace3(self):
+ a = BitStream('0b0')
+ n = a.replace('0b0', '0b110011111', bytealigned=True)
+ self.assertEqual(n, 1)
+ self.assertEqual(a.bin, '110011111')
+ n = a.replace('0b11', '', bytealigned=False)
+ self.assertEqual(n, 3)
+ self.assertEqual(a.bin, '001')
+
+ def testReplace4(self):
+ a = BitStream('0x00114723ef4732344700')
+ n = a.replace('0x47', '0x00', bytealigned=True)
+ self.assertEqual(n, 3)
+ self.assertEqual(a.hex, '00110023ef0032340000')
+ a.replace('0x00', '', bytealigned=True)
+ self.assertEqual(a.hex, '1123ef3234')
+ a.replace('0x11', '', start=1, bytealigned=True)
+ self.assertEqual(a.hex, '1123ef3234')
+ a.replace('0x11', '0xfff', end=7, bytealigned=True)
+ self.assertEqual(a.hex, '1123ef3234')
+ a.replace('0x11', '0xfff', end=8, bytealigned=True)
+ self.assertEqual(a.hex, 'fff23ef3234')
+
+ def testReplace5(self):
+ a = BitStream('0xab')
+ b = BitStream('0xcd')
+ c = BitStream('0xabef')
+ c.replace(a, b)
+ self.assertEqual(c, '0xcdef')
+ self.assertEqual(a, '0xab')
+ self.assertEqual(b, '0xcd')
+ a = BitStream('0x0011223344')
+ a.pos = 12
+ a.replace('0x11', '0xfff', bytealigned=True)
+ self.assertEqual(a.pos, 8)
+ self.assertEqual(a, '0x00fff223344')
+
+ def testReplaceWithSelf(self):
+ a = BitStream('0b11')
+ a.replace('0b1', a)
+ self.assertEqual(a, '0xf')
+ a.replace(a, a)
+ self.assertEqual(a, '0xf')
+
+ def testReplaceCount(self):
+ a = BitStream('0x223344223344223344')
+ n = a.replace('0x2', '0x0', count=0, bytealigned=True)
+ self.assertEqual(n, 0)
+ self.assertEqual(a.hex, '223344223344223344')
+ n = a.replace('0x2', '0x0', count=1, bytealigned=True)
+ self.assertEqual(n, 1)
+ self.assertEqual(a.hex, '023344223344223344')
+ n = a.replace('0x33', '', count=2, bytealigned=True)
+ self.assertEqual(n, 2)
+ self.assertEqual(a.hex, '02442244223344')
+ n = a.replace('0x44', '0x4444', count=1435, bytealigned=True)
+ self.assertEqual(n, 3)
+ self.assertEqual(a.hex, '02444422444422334444')
+
+ def testReplaceBitpos(self):
+ a = BitStream('0xff')
+ a.bitpos = 8
+ a.replace('0xff', '', bytealigned=True)
+ self.assertEqual(a.bitpos, 0)
+ a = BitStream('0b0011110001')
+ a.bitpos = 4
+ a.replace('0b1', '0b000')
+ self.assertEqual(a.bitpos, 8)
+ a = BitStream('0b1')
+ a.bitpos = 1
+ a.replace('0b1', '0b11111', bytealigned=True)
+ self.assertEqual(a.bitpos, 5)
+ a.replace('0b11', '0b0', False)
+ self.assertEqual(a.bitpos, 3)
+ a.append('0b00')
+ a.replace('0b00', '0xffff')
+ self.assertEqual(a.bitpos, 17)
+
+ def testReplaceErrors(self):
+ a = BitStream('0o123415')
+ self.assertRaises(ValueError, a.replace, '', '0o7', bytealigned=True)
+ self.assertRaises(ValueError, a.replace, '0b1', '0b1', start=-100, bytealigned=True)
+ self.assertRaises(ValueError, a.replace, '0b1', '0b1', end=19, bytealigned=True)
+
+
+class SliceAssignment(unittest.TestCase):
+
+ # TODO: Move this to another class
+ def testSetSlice(self):
+ a = BitStream()
+ a[0:0] = '0xabcdef'
+ self.assertEqual(a.bytepos, 3)
+ a[4:16] = ''
+ self.assertEqual(a, '0xaef')
+ self.assertEqual(a.bitpos, 4)
+ a[8:] = '0x00'
+ self.assertEqual(a, '0xae00')
+ self.assertEqual(a.bytepos, 2)
+ a += '0xf'
+ a[8:] = '0xe'
+ self.assertEqual(a, '0xaee')
+ self.assertEqual(a.bitpos, 12)
+ b = BitStream()
+ b[0:800] = '0xffee'
+ self.assertEqual(b, '0xffee')
+ b[4:48] = '0xeed123'
+ self.assertEqual(b, '0xfeed123')
+ b[-800:8] = '0x0000'
+ self.assertEqual(b, '0x0000ed123')
+ a = BitStream('0xabcde')
+ self.assertEqual(a[-100:-90], '')
+ self.assertEqual(a[-100:-16], '0xa')
+ a[-100:-16] = '0x0'
+ self.assertEqual(a, '0x0bcde')
+
+ def testInsertingUsingSetItem(self):
+ a = BitStream()
+ a[0:0] = '0xdeadbeef'
+ self.assertEqual(a, '0xdeadbeef')
+ self.assertEqual(a.bytepos, 4)
+ a[16:16] = '0xfeed'
+ self.assertEqual(a, '0xdeadfeedbeef')
+ self.assertEqual(a.bytepos, 4)
+ a[0:0] = '0xa'
+ self.assertEqual(a, '0xadeadfeedbeef')
+ self.assertEqual(a.bitpos, 4)
+ a.bytepos = 6
+ a[0:0] = '0xff'
+ self.assertEqual(a.bytepos, 1)
+ a[8:0] = '0x00000'
+ self.assertTrue(a.startswith('0xff00000adead'))
+
+ def testSliceAssignmentBitPos(self):
+ a = BitStream('int:64=-1')
+ a.pos = 64
+ a[0:8] = ''
+ self.assertEqual(a.pos, 0)
+ a.pos = 52
+ a[48:56] = '0x0000'
+ self.assertEqual(a.pos, 64)
+ a[10:10] = '0x0'
+ self.assertEqual(a.pos, 14)
+ a[56:68] = '0x000'
+ self.assertEqual(a.pos, 14)
+
+
+class Pack(unittest.TestCase):
+ def testPack1(self):
+ s = bitstring.pack('uint:6, bin, hex, int:6, se, ue, oct', 10, '0b110', 'ff', -1, -6, 6, '54')
+ t = BitStream('uint:6=10, 0b110, 0xff, int:6=-1, se=-6, ue=6, oct=54')
+ self.assertEqual(s, t)
+ self.assertRaises(bitstring.CreationError, pack, 'tomato', '0')
+ self.assertRaises(bitstring.CreationError, pack, 'uint', 12)
+ self.assertRaises(bitstring.CreationError, pack, 'hex', 'penguin')
+ self.assertRaises(bitstring.CreationError, pack, 'hex12', '0x12')
+
+ def testPackWithLiterals(self):
+ s = bitstring.pack('0xf')
+ self.assertEqual(s, '0xf')
+ self.assertTrue(type(s), BitStream)
+ s = pack('0b1')
+ self.assertEqual(s, '0b1')
+ s = pack('0o7')
+ self.assertEqual(s, '0o7')
+ s = pack('int:10=-1')
+ self.assertEqual(s, '0b1111111111')
+ s = pack('uint:10=1')
+ self.assertEqual(s, '0b0000000001')
+ s = pack('ue=12')
+ self.assertEqual(s.ue, 12)
+ s = pack('se=-12')
+ self.assertEqual(s.se, -12)
+ s = pack('bin=01')
+ self.assertEqual(s.bin, '01')
+ s = pack('hex=01')
+ self.assertEqual(s.hex, '01')
+ s = pack('oct=01')
+ self.assertEqual(s.oct, '01')
+
+ def testPackWithDict(self):
+ a = pack('uint:6=width, se=height', height=100, width=12)
+ w, h = a.unpack('uint:6, se')
+ self.assertEqual(w, 12)
+ self.assertEqual(h, 100)
+ d = {}
+ d['w'] = '0xf'
+ d['300'] = 423
+ d['e'] = '0b1101'
+ a = pack('int:100=300, bin=e, uint:12=300', **d)
+ x, y, z = a.unpack('int:100, bin, uint:12')
+ self.assertEqual(x, 423)
+ self.assertEqual(y, '1101')
+ self.assertEqual(z, 423)
+
+ def testPackWithDict2(self):
+ a = pack('int:5, bin:3=b, 0x3, bin=c, se=12', 10, b='0b111', c='0b1')
+ b = BitStream('int:5=10, 0b111, 0x3, 0b1, se=12')
+ self.assertEqual(a, b)
+ a = pack('bits:3=b', b=BitStream('0b101'))
+ self.assertEqual(a, '0b101')
+ a = pack('bits:24=b', b=BitStream('0x001122'))
+ self.assertEqual(a, '0x001122')
+
+ def testPackWithDict3(self):
+ s = pack('hex:4=e, hex:4=0xe, hex:4=e', e='f')
+ self.assertEqual(s, '0xfef')
+ s = pack('sep', sep='0b00')
+ self.assertEqual(s, '0b00')
+
+ def testPackWithDict4(self):
+ s = pack('hello', hello='0xf')
+ self.assertEqual(s, '0xf')
+ s = pack('x, y, x, y, x', x='0b10', y='uint:12=100')
+ t = BitStream('0b10, uint:12=100, 0b10, uint:12=100, 0b10')
+ self.assertEqual(s, t)
+ a = [1, 2, 3, 4, 5]
+ s = pack('int:8, div,' * 5, *a, **{'div': '0b1'})
+ t = BitStream('int:8=1, 0b1, int:8=2, 0b1, int:8=3, 0b1, int:8=4, 0b1, int:8=5, 0b1')
+ self.assertEqual(s, t)
+
+ def testPackWithLocals(self):
+ width = 352
+ height = 288
+ s = pack('uint:12=width, uint:12=height', **locals())
+ self.assertEqual(s, '0x160120')
+
+ def testPackWithLengthRestriction(self):
+ s = pack('bin:3', '0b000')
+ self.assertRaises(bitstring.CreationError, pack, 'bin:3', '0b0011')
+ self.assertRaises(bitstring.CreationError, pack, 'bin:3', '0b11')
+ self.assertRaises(bitstring.CreationError, pack, 'bin:3=0b0011')
+ self.assertRaises(bitstring.CreationError, pack, 'bin:3=0b11')
+
+ s = pack('hex:4', '0xf')
+ self.assertRaises(bitstring.CreationError, pack, 'hex:4', '0b111')
+ self.assertRaises(bitstring.CreationError, pack, 'hex:4', '0b11111')
+ self.assertRaises(bitstring.CreationError, pack, 'hex:8=0xf')
+
+ s = pack('oct:6', '0o77')
+ self.assertRaises(bitstring.CreationError, pack, 'oct:6', '0o1')
+ self.assertRaises(bitstring.CreationError, pack, 'oct:6', '0o111')
+ self.assertRaises(bitstring.CreationError, pack, 'oct:3', '0b1')
+ self.assertRaises(bitstring.CreationError, pack, 'oct:3=hello', hello='0o12')
+
+ s = pack('bits:3', BitStream('0b111'))
+ self.assertRaises(bitstring.CreationError, pack, 'bits:3', BitStream('0b11'))
+ self.assertRaises(bitstring.CreationError, pack, 'bits:3', BitStream('0b1111'))
+ self.assertRaises(bitstring.CreationError, pack, 'bits:12=b', b=BitStream('0b11'))
+
+ def testPackNull(self):
+ s = pack('')
+ self.assertFalse(s)
+ s = pack(',')
+ self.assertFalse(s)
+ s = pack(',,,,,0b1,,,,,,,,,,,,,0b1,,,,,,,,,,')
+ self.assertEqual(s, '0b11')
+ s = pack(',,uint:12,,bin:3,', 100, '100')
+ a, b = s.unpack(',,,uint:12,,,,bin:3,,,')
+ self.assertEqual(a, 100)
+ self.assertEqual(b, '100')
+
+ def testPackDefaultUint(self):
+ s = pack('10, 5', 1, 2)
+ a, b = s.unpack('10, 5')
+ self.assertEqual((a, b), (1, 2))
+ s = pack('10=150, 12=qee', qee=3)
+ self.assertEqual(s, 'uint:10=150, uint:12=3')
+ t = BitStream('100=5')
+ self.assertEqual(t, 'uint:100=5')
+
+ def testPackDefualtUintErrors(self):
+ self.assertRaises(bitstring.CreationError, BitStream, '5=-1')
+
+ def testPackingLongKeywordBitstring(self):
+ s = pack('bits=b', b=BitStream(128000))
+ self.assertEqual(s, BitStream(128000))
+
+ def testPackingWithListFormat(self):
+ f = ['bin', 'hex', 'uint:10']
+ a = pack(','.join(f), '00', '234', 100)
+ b = pack(f, '00', '234', 100)
+ self.assertEqual(a, b)
+
+
+class Unpack(unittest.TestCase):
+ def testUnpack1(self):
+ s = BitStream('uint:13=23, hex=e, bin=010, int:41=-554, 0o44332, se=-12, ue=4')
+ s.pos = 11
+ a, b, c, d, e, f, g = s.unpack('uint:13, hex:4, bin:3, int:41, oct:15, se, ue')
+ self.assertEqual(a, 23)
+ self.assertEqual(b, 'e')
+ self.assertEqual(c, '010')
+ self.assertEqual(d, -554)
+ self.assertEqual(e, '44332')
+ self.assertEqual(f, -12)
+ self.assertEqual(g, 4)
+ self.assertEqual(s.pos, 11)
+
+ def testUnpack2(self):
+ s = BitStream('0xff, 0b000, uint:12=100')
+ a, b, c = s.unpack('bits:8, bits, uint:12')
+ self.assertEqual(type(s), BitStream)
+ self.assertEqual(a, '0xff')
+ self.assertEqual(type(s), BitStream)
+ self.assertEqual(b, '0b000')
+ self.assertEqual(c, 100)
+ a, b = s.unpack(['bits:11', 'uint'])
+ self.assertEqual(a, '0xff, 0b000')
+ self.assertEqual(b, 100)
+
+ def testUnpackNull(self):
+ s = pack('0b1, , , 0xf,')
+ a, b = s.unpack('bin:1,,,hex:4,')
+ self.assertEqual(a, '1')
+ self.assertEqual(b, 'f')
+
+
+class FromFile(unittest.TestCase):
+ def testCreationFromFileOperations(self):
+ s = BitStream(filename='smalltestfile')
+ s.append('0xff')
+ self.assertEqual(s.hex, '0123456789abcdefff')
+
+ s = ConstBitStream(filename='smalltestfile')
+ t = BitStream('0xff') + s
+ self.assertEqual(t.hex, 'ff0123456789abcdef')
+
+ s = BitStream(filename='smalltestfile')
+ del s[:1]
+ self.assertEqual((BitStream('0b0') + s).hex, '0123456789abcdef')
+
+ s = BitStream(filename='smalltestfile')
+ del s[:7 * 8]
+ self.assertEqual(s.hex, 'ef')
+
+ s = BitStream(filename='smalltestfile')
+ s.insert('0xc', 4)
+ self.assertEqual(s.hex, '0c123456789abcdef')
+
+ s = BitStream(filename='smalltestfile')
+ s.prepend('0xf')
+ self.assertEqual(s.hex, 'f0123456789abcdef')
+
+ s = BitStream(filename='smalltestfile')
+ s.overwrite('0xaaa', 12)
+ self.assertEqual(s.hex, '012aaa6789abcdef')
+
+ s = BitStream(filename='smalltestfile')
+ s.reverse()
+ self.assertEqual(s.hex, 'f7b3d591e6a2c480')
+
+ s = BitStream(filename='smalltestfile')
+ del s[-60:]
+ self.assertEqual(s.hex, '0')
+
+ s = BitStream(filename='smalltestfile')
+ del s[:60]
+ self.assertEqual(s.hex, 'f')
+
+ def testFileProperties(self):
+ s = ConstBitStream(filename='smalltestfile')
+ self.assertEqual(s.hex, '0123456789abcdef')
+ self.assertEqual(s.uint, 81985529216486895)
+ self.assertEqual(s.int, 81985529216486895)
+ self.assertEqual(s.bin, '0000000100100011010001010110011110001001101010111100110111101111')
+ self.assertEqual(s[:-1].oct, '002215053170465363367')
+ s.bitpos = 0
+ self.assertEqual(s.read('se'), -72)
+ s.bitpos = 0
+ self.assertEqual(s.read('ue'), 144)
+ self.assertEqual(s.bytes, b'\x01\x23\x45\x67\x89\xab\xcd\xef')
+ self.assertEqual(s.tobytes(), b'\x01\x23\x45\x67\x89\xab\xcd\xef')
+
+ def testCreationFromFileWithLength(self):
+ s = ConstBitStream(filename='test.m1v', length=32)
+ self.assertEqual(s.length, 32)
+ self.assertEqual(s.hex, '000001b3')
+ s = ConstBitStream(filename='test.m1v', length=0)
+ self.assertFalse(s)
+ self.assertRaises(bitstring.CreationError, BitStream, filename='smalltestfile', length=65)
+ self.assertRaises(bitstring.CreationError, ConstBitStream, filename='smalltestfile', length=64, offset=1)
+ # self.assertRaises(bitstring.CreationError, ConstBitStream, filename='smalltestfile', offset=65)
+ f = open('smalltestfile', 'rb')
+ # self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, offset=65)
+ self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, length=65)
+ self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, offset=60, length=5)
+
+ def testCreationFromFileWithOffset(self):
+ a = BitStream(filename='test.m1v', offset=4)
+ self.assertEqual(a.peek(4 * 8).hex, '00001b31')
+ b = BitStream(filename='test.m1v', offset=28)
+ self.assertEqual(b.peek(8).hex, '31')
+
+ def testFileSlices(self):
+ s = BitStream(filename='smalltestfile')
+ self.assertEqual(s[-16:].hex, 'cdef')
+
+ def testCreataionFromFileErrors(self):
+ self.assertRaises(IOError, BitStream, filename='Idonotexist')
+
+ def testFindInFile(self):
+ s = BitStream(filename='test.m1v')
+ self.assertTrue(s.find('0x160120'))
+ self.assertEqual(s.bytepos, 4)
+ s3 = s.read(3 * 8)
+ self.assertEqual(s3.hex, '160120')
+ s.bytepos = 0
+ self.assertTrue(s._pos == 0)
+ self.assertTrue(s.find('0x0001b2'))
+ self.assertEqual(s.bytepos, 13)
+
+ def testHexFromFile(self):
+ s = BitStream(filename='test.m1v')
+ self.assertEqual(s[0:32].hex, '000001b3')
+ self.assertEqual(s[-32:].hex, '000001b7')
+ s.hex = '0x11'
+ self.assertEqual(s.hex, '11')
+
+ def testFileOperations(self):
+ s1 = BitStream(filename='test.m1v')
+ s2 = BitStream(filename='test.m1v')
+ self.assertEqual(s1.read(32).hex, '000001b3')
+ self.assertEqual(s2.read(32).hex, '000001b3')
+ s1.bytepos += 4
+ self.assertEqual(s1.read(8).hex, '02')
+ self.assertEqual(s2.read(5 * 8).hex, '1601208302')
+ s1.pos = s1.len
+ try:
+ s1.pos += 1
+ self.assertTrue(False)
+ except ValueError:
+ pass
+
+ def testFileBitGetting(self):
+ s = ConstBitStream(filename='smalltestfile', offset=16, length=8) # 0x45
+ b = s[1]
+ self.assertTrue(b)
+ b = s.any(0, [-1, -2, -3])
+ self.assertTrue(b)
+ b = s.all(0, [0, 1, 2])
+ self.assertFalse(b)
+
+ def testVeryLargeFiles(self):
+ # This uses an 11GB file which isn't distributed for obvious reasons
+ # and so this test won't work for anyone except me!
+ try:
+ s = ConstBitStream(filename='11GB.mkv')
+ except IOError:
+ return
+ self.assertEqual(s.len, 11743020505 * 8)
+ self.assertEqual(s[1000000000:1000000100].hex, 'bdef7335d4545f680d669ce24')
+ self.assertEqual(s[-4::8].hex, 'bbebf7a1')
+
+
+class CreationErrors(unittest.TestCase):
+ def testIncorrectBinAssignment(self):
+ s = BitStream()
+ self.assertRaises(bitstring.CreationError, s._setbin_safe, '0010020')
+
+ def testIncorrectHexAssignment(self):
+ s = BitStream()
+ self.assertRaises(bitstring.CreationError, s._sethex, '0xabcdefg')
+
+
+class Length(unittest.TestCase):
+ def testLengthZero(self):
+ self.assertEqual(BitStream('').len, 0)
+
+ def testLength(self):
+ self.assertEqual(BitStream('0x80').len, 8)
+
+ def testLengthErrors(self):
+ #TODO: Lots of new checks, for various inits which now disallow length and offset
+ pass
+ #self.assertRaises(ValueError, BitStream, bin='111', length=-1)
+ #self.assertRaises(ValueError, BitStream, bin='111', length=4)
+
+ def testOffsetLengthError(self):
+ self.assertRaises(bitstring.CreationError, BitStream, hex='0xffff', offset=-1)
+
+
+class SimpleConversions(unittest.TestCase):
+ def testConvertToUint(self):
+ self.assertEqual(BitStream('0x10').uint, 16)
+ self.assertEqual(BitStream('0b000111').uint, 7)
+
+ def testConvertToInt(self):
+ self.assertEqual(BitStream('0x10').int, 16)
+ self.assertEqual(BitStream('0b11110').int, -2)
+
+ def testConvertToHex(self):
+ self.assertEqual(BitStream(bytes=b'\x00\x12\x23\xff').hex, '001223ff')
+ s = BitStream('0b11111')
+ self.assertRaises(bitstring.InterpretError, s._gethex)
+
+
+class Empty(unittest.TestCase):
+ def testEmptyBitstring(self):
+ s = BitStream()
+ self.assertRaises(bitstring.ReadError, s.read, 1)
+ self.assertEqual(s.bin, '')
+ self.assertEqual(s.hex, '')
+ self.assertRaises(bitstring.InterpretError, s._getint)
+ self.assertRaises(bitstring.InterpretError, s._getuint)
+ self.assertFalse(s)
+
+ def testNonEmptyBitStream(self):
+ s = BitStream(bin='0')
+ self.assertFalse(not s.len)
+
+
+class Position(unittest.TestCase):
+ def testBitPosition(self):
+ s = BitStream(bytes=b'\x00\x00\x00')
+ self.assertEqual(s.bitpos, 0)
+ s.read(5)
+ self.assertEqual(s.pos, 5)
+ s.pos = s.len
+ self.assertRaises(bitstring.ReadError, s.read, 1)
+
+ def testBytePosition(self):
+ s = BitStream(bytes=b'\x00\x00\x00')
+ self.assertEqual(s.bytepos, 0)
+ s.read(10)
+ self.assertRaises(bitstring.ByteAlignError, s._getbytepos)
+ s.read(6)
+ self.assertEqual(s.bytepos, 2)
+
+ def testSeekToBit(self):
+ s = BitStream(bytes=b'\x00\x00\x00\x00\x00\x00')
+ s.bitpos = 0
+ self.assertEqual(s.bitpos, 0)
+ self.assertRaises(ValueError, s._setbitpos, -1)
+ self.assertRaises(ValueError, s._setbitpos, 6 * 8 + 1)
+ s.bitpos = 6 * 8
+ self.assertEqual(s.bitpos, 6 * 8)
+
+ def testSeekToByte(self):
+ s = BitStream(bytes=b'\x00\x00\x00\x00\x00\xab')
+ s.bytepos = 5
+ self.assertEqual(s.read(8).hex, 'ab')
+
+ def testAdvanceBitsAndBytes(self):
+ s = BitStream(bytes=b'\x00\x00\x00\x00\x00\x00\x00\x00')
+ s.pos += 5
+ self.assertEqual(s.pos, 5)
+ s.bitpos += 16
+ self.assertEqual(s.pos, 2 * 8 + 5)
+ s.pos -= 8
+ self.assertEqual(s.pos, 8 + 5)
+
+ def testRetreatBitsAndBytes(self):
+ a = BitStream(length=100)
+ a.pos = 80
+ a.bytepos -= 5
+ self.assertEqual(a.bytepos, 5)
+ a.pos -= 5
+ self.assertEqual(a.pos, 35)
+
+
+class Offset(unittest.TestCase):
+ def testOffset1(self):
+ s = BitStream(bytes=b'\x00\x1b\x3f', offset=4)
+ self.assertEqual(s.read(8).bin, '00000001')
+ self.assertEqual(s.length, 20)
+
+ def testOffset2(self):
+ s1 = BitStream(bytes=b'\xf1\x02\x04')
+ s2 = BitStream(bytes=b'\xf1\x02\x04', length=23)
+ for i in [1, 2, 3, 4, 5, 6, 7, 6, 5, 4, 3, 2, 1, 0, 7, 3, 5, 1, 4]:
+ s1._datastore = offsetcopy(s1._datastore, i)
+ self.assertEqual(s1.hex, 'f10204')
+ s2._datastore = offsetcopy(s2._datastore, i)
+ self.assertEqual(s2.bin, '11110001000000100000010')
+
+
+class Append(unittest.TestCase):
+ def testAppend(self):
+ s1 = BitStream('0b00000')
+ s1.append(BitStream(bool=True))
+ self.assertEqual(s1.bin, '000001')
+ self.assertEqual((BitStream('0x0102') + BitStream('0x0304')).hex, '01020304')
+
+ def testAppendSameBitstring(self):
+ s1 = BitStream('0xf0')[:6]
+ s1.append(s1)
+ self.assertEqual(s1.bin, '111100111100')
+
+ def testAppendWithOffset(self):
+ s = BitStream(bytes=b'\x28\x28', offset=1)
+ s.append('0b0')
+ self.assertEqual(s.hex, '5050')
+
+
+class ByteAlign(unittest.TestCase):
+ def testByteAlign(self):
+ s = BitStream(hex='0001ff23')
+ s.bytealign()
+ self.assertEqual(s.bytepos, 0)
+ s.pos += 11
+ s.bytealign()
+ self.assertEqual(s.bytepos, 2)
+ s.pos -= 10
+ s.bytealign()
+ self.assertEqual(s.bytepos, 1)
+
+ def testByteAlignWithOffset(self):
+ s = BitStream(hex='0112233')
+ s._datastore = offsetcopy(s._datastore, 3)
+ bitstoalign = s.bytealign()
+ self.assertEqual(bitstoalign, 0)
+ self.assertEqual(s.read(5).bin, '00001')
+
+ def testInsertByteAligned(self):
+ s = BitStream('0x0011')
+ s.insert(BitStream('0x22'), 8)
+ self.assertEqual(s.hex, '002211')
+ s = BitStream(0)
+ s.insert(BitStream(bin='101'), 0)
+ self.assertEqual(s.bin, '101')
+
+
+class Truncate(unittest.TestCase):
+ def testTruncateStart(self):
+ s = BitStream('0b1')
+ del s[:1]
+ self.assertFalse(s)
+ s = BitStream(hex='1234')
+ self.assertEqual(s.hex, '1234')
+ del s[:4]
+ self.assertEqual(s.hex, '234')
+ del s[:9]
+ self.assertEqual(s.bin, '100')
+ del s[:2]
+ self.assertEqual(s.bin, '0')
+ self.assertEqual(s.len, 1)
+ del s[:1]
+ self.assertFalse(s)
+
+ def testTruncateEnd(self):
+ s = BitStream('0b1')
+ del s[-1:]
+ self.assertFalse(s)
+ s = BitStream(bytes=b'\x12\x34')
+ self.assertEqual(s.hex, '1234')
+ del s[-4:]
+ self.assertEqual(s.hex, '123')
+ del s[-9:]
+ self.assertEqual(s.bin, '000')
+ del s[-3:]
+ self.assertFalse(s)
+ s = BitStream('0b001')
+ del s[:2]
+ del s[-1:]
+ self.assertFalse(s)
+
+
+class Slice(unittest.TestCase):
+ def testByteAlignedSlice(self):
+ s = BitStream(hex='0x123456')
+ self.assertEqual(s[8:16].hex, '34')
+ s = s[8:24]
+ self.assertEqual(s.len, 16)
+ self.assertEqual(s.hex, '3456')
+ s = s[0:8]
+ self.assertEqual(s.hex, '34')
+ s.hex = '0x123456'
+ self.assertEqual(s[8:24][0:8].hex, '34')
+
+ def testSlice(self):
+ s = BitStream(bin='000001111100000')
+ s1 = s[0:5]
+ s2 = s[5:10]
+ s3 = s[10:15]
+ self.assertEqual(s1.bin, '00000')
+ self.assertEqual(s2.bin, '11111')
+ self.assertEqual(s3.bin, '00000')
+
+
+class Insert(unittest.TestCase):
+ def testInsert(self):
+ s1 = BitStream(hex='0x123456')
+ s2 = BitStream(hex='0xff')
+ s1.bytepos = 1
+ s1.insert(s2)
+ self.assertEqual(s1.bytepos, 2)
+ self.assertEqual(s1.hex, '12ff3456')
+ s1.insert('0xee', 24)
+ self.assertEqual(s1.hex, '12ff34ee56')
+ self.assertEqual(s1.bitpos, 32)
+ self.assertRaises(ValueError, s1.insert, '0b1', -1000)
+ self.assertRaises(ValueError, s1.insert, '0b1', 1000)
+
+ def testInsertNull(self):
+ s = BitStream(hex='0x123').insert(BitStream(), 3)
+ self.assertEqual(s.hex, '123')
+
+ def testInsertBits(self):
+ one = BitStream(bin='1')
+ zero = BitStream(bin='0')
+ s = BitStream(bin='00')
+ s.insert(one, 0)
+ self.assertEqual(s.bin, '100')
+ s.insert(zero, 0)
+ self.assertEqual(s.bin, '0100')
+ s.insert(one, s.len)
+ self.assertEqual(s.bin, '01001')
+ s.insert(s, 2)
+ self.assertEqual(s.bin, '0101001001')
+
+
+class Resetting(unittest.TestCase):
+ def testSetHex(self):
+ s = BitStream()
+ s.hex = '0'
+ self.assertEqual(s.hex, '0')
+ s.hex = '0x010203045'
+ self.assertEqual(s.hex, '010203045')
+ self.assertRaises(bitstring.CreationError, s._sethex, '0x002g')
+
+ def testSetBin(self):
+ s = BitStream(bin="000101101")
+ self.assertEqual(s.bin, '000101101')
+ self.assertEqual(s.len, 9)
+ s.bin = '0'
+ self.assertEqual(s.bin, '0')
+ self.assertEqual(s.len, 1)
+
+ def testSetEmptyBin(self):
+ s = BitStream(hex='0x000001b3')
+ s.bin = ''
+ self.assertEqual(s.len, 0)
+ self.assertEqual(s.bin, '')
+
+ def testSetInvalidBin(self):
+ s = BitStream()
+ self.assertRaises(bitstring.CreationError, s._setbin_safe, '00102')
+
+
+class Overwriting(unittest.TestCase):
+ def testOverwriteBit(self):
+ s = BitStream(bin='0')
+ s.overwrite(BitStream(bin='1'), 0)
+ self.assertEqual(s.bin, '1')
+
+ def testOverwriteLimits(self):
+ s = BitStream(bin='0b11111')
+ s.overwrite(BitStream(bin='000'), 0)
+ self.assertEqual(s.bin, '00011')
+ s.overwrite('0b000', 2)
+ self.assertEqual(s.bin, '00000')
+
+ def testOverwriteNull(self):
+ s = BitStream(hex='342563fedec')
+ s2 = BitStream(s)
+ s.overwrite(BitStream(bin=''), 23)
+ self.assertEqual(s.bin, s2.bin)
+
+ def testOverwritePosition(self):
+ s1 = BitStream(hex='0123456')
+ s2 = BitStream(hex='ff')
+ s1.bytepos = 1
+ s1.overwrite(s2)
+ self.assertEqual((s1.hex, s1.bytepos), ('01ff456', 2))
+ s1.overwrite('0xff', 0)
+ self.assertEqual((s1.hex, s1.bytepos), ('ffff456', 1))
+
+ def testOverwriteWithSelf(self):
+ s = BitStream('0x123')
+ s.overwrite(s)
+ self.assertEqual(s, '0x123')
+
+
+class Split(unittest.TestCase):
+ def testSplitByteAlignedCornerCases(self):
+ s = BitStream()
+ bsl = s.split(BitStream(hex='0xff'))
+ self.assertEqual(next(bsl).hex, '')
+ self.assertRaises(StopIteration, next, bsl)
+ s = BitStream(hex='aabbcceeddff')
+ delimiter = BitStream()
+ bsl = s.split(delimiter)
+ self.assertRaises(ValueError, next, bsl)
+ delimiter = BitStream(hex='11')
+ bsl = s.split(delimiter)
+ self.assertEqual(next(bsl).hex, s.hex)
+
+ def testSplitByteAligned(self):
+ s = BitStream(hex='0x1234aa1234bbcc1234ffff')
+ delimiter = BitStream(hex='1234')
+ bsl = s.split(delimiter)
+ self.assertEqual([b.hex for b in bsl], ['', '1234aa', '1234bbcc', '1234ffff'])
+ self.assertEqual(s.pos, 0)
+
+ def testSplitByteAlignedWithIntialBytes(self):
+ s = BitStream(hex='aa471234fedc43 47112233 47 4723 472314')
+ delimiter = BitStream(hex='47')
+ s.find(delimiter)
+ self.assertEqual(s.bytepos, 1)
+ bsl = s.split(delimiter, start=0)
+ self.assertEqual([b.hex for b in bsl], ['aa', '471234fedc43', '47112233',
+ '47', '4723', '472314'])
+ self.assertEqual(s.bytepos, 1)
+
+ def testSplitByteAlignedWithOverlappingDelimiter(self):
+ s = BitStream(hex='aaffaaffaaffaaffaaff')
+ bsl = s.split(BitStream(hex='aaffaa'))
+ self.assertEqual([b.hex for b in bsl], ['', 'aaffaaff', 'aaffaaffaaff'])
+
+
+class Adding(unittest.TestCase):
+ def testAdding(self):
+ s1 = BitStream(hex='0x0102')
+ s2 = BitStream(hex='0x0304')
+ s3 = s1 + s2
+ self.assertEqual(s1.hex, '0102')
+ self.assertEqual(s2.hex, '0304')
+ self.assertEqual(s3.hex, '01020304')
+ s3 += s1
+ self.assertEqual(s3.hex, '010203040102')
+ self.assertEqual(s2[9:16].bin, '0000100')
+ self.assertEqual(s1[0:9].bin, '000000010')
+ s4 = BitStream(bin='000000010') +\
+ BitStream(bin='0000100')
+ self.assertEqual(s4.bin, '0000000100000100')
+ s2p = s2[9:16]
+ s1p = s1[0:9]
+ s5p = s1p + s2p
+ s5 = s1[0:9] + s2[9:16]
+ self.assertEqual(s5.bin, '0000000100000100')
+
+ def testMoreAdding(self):
+ s = BitStream(bin='00') + BitStream(bin='') + BitStream(bin='11')
+ self.assertEqual(s.bin, '0011')
+ s = '0b01'
+ s += BitStream('0b11')
+ self.assertEqual(s.bin, '0111')
+ s = BitStream('0x00')
+ t = BitStream('0x11')
+ s += t
+ self.assertEqual(s.hex, '0011')
+ self.assertEqual(t.hex, '11')
+ s += s
+ self.assertEqual(s.hex, '00110011')
+
+ def testRadd(self):
+ s = '0xff' + BitStream('0xee')
+ self.assertEqual(s.hex, 'ffee')
+
+
+ def testTruncateAsserts(self):
+ s = BitStream('0x001122')
+ s.bytepos = 2
+ del s[-s.len:]
+ self.assertEqual(s.bytepos, 0)
+ s.append('0x00')
+ s.append('0x1122')
+ s.bytepos = 2
+ del s[:s.len]
+ self.assertEqual(s.bytepos, 0)
+ s.append('0x00')
+
+ def testOverwriteErrors(self):
+ s = BitStream(bin='11111')
+ self.assertRaises(ValueError, s.overwrite, BitStream(bin='1'), -10)
+ self.assertRaises(ValueError, s.overwrite, BitStream(bin='1'), 6)
+ self.assertRaises(ValueError, s.overwrite, BitStream(bin='11111'), 1)
+
+ def testDeleteBits(self):
+ s = BitStream(bin='000111100000')
+ s.bitpos = 4
+ del s[4:8]
+ self.assertEqual(s.bin, '00010000')
+ del s[4:1004]
+ self.assertTrue(s.bin, '0001')
+
+ def testDeleteBitsWithPosition(self):
+ s = BitStream(bin='000111100000')
+ del s[4:8]
+ self.assertEqual(s.bin, '00010000')
+
+ def testDeleteBytes(self):
+ s = BitStream('0x00112233')
+ del s[8:8]
+ self.assertEqual(s.hex, '00112233')
+ self.assertEqual(s.pos, 0)
+ del s[8:16]
+ self.assertEqual(s.hex, '002233')
+ self.assertEqual(s.bytepos, 0)
+ del s[:24]
+ self.assertFalse(s)
+ self.assertEqual(s.pos, 0)
+
+ def testGetItemWithPositivePosition(self):
+ s = BitStream(bin='0b1011')
+ self.assertEqual(s[0], True)
+ self.assertEqual(s[1], False)
+ self.assertEqual(s[2], True)
+ self.assertEqual(s[3], True)
+ self.assertRaises(IndexError, s.__getitem__, 4)
+
+ def testGetItemWithNegativePosition(self):
+ s = BitStream(bin='1011')
+ self.assertEqual(s[-1], True)
+ self.assertEqual(s[-2], True)
+ self.assertEqual(s[-3], False)
+ self.assertEqual(s[-4], True)
+ self.assertRaises(IndexError, s.__getitem__, -5)
+
+ def testSlicing(self):
+ s = ConstBitStream(hex='0123456789')
+ self.assertEqual(s[0:8].hex, '01')
+ self.assertFalse(s[0:0])
+ self.assertFalse(s[23:20])
+ self.assertEqual(s[8:12].bin, '0010')
+ self.assertEqual(s[32:80], '0x89')
+
+ def testNegativeSlicing(self):
+ s = ConstBitStream(hex='012345678')
+ self.assertEqual(s[:-8].hex, '0123456')
+ self.assertEqual(s[-16:-8].hex, '56')
+ self.assertEqual(s[-24:].hex, '345678')
+ self.assertEqual(s[-1000:-24], '0x012')
+
+ def testLen(self):
+ s = BitStream()
+ self.assertEqual(len(s), 0)
+ s.append(BitStream(bin='001'))
+ self.assertEqual(len(s), 3)
+
+ def testJoin(self):
+ s1 = BitStream(bin='0')
+ s2 = BitStream(bin='1')
+ s3 = BitStream(bin='000')
+ s4 = BitStream(bin='111')
+ strings = [s1, s2, s1, s3, s4]
+ s = BitStream().join(strings)
+ self.assertEqual(s.bin, '010000111')
+
+ def testJoin2(self):
+ s1 = BitStream(hex='00112233445566778899aabbccddeeff')
+ s2 = BitStream(bin='0b000011')
+ bsl = [s1[0:32], s1[4:12], s2, s2, s2, s2]
+ s = ConstBitStream().join(bsl)
+ self.assertEqual(s.hex, '00112233010c30c3')
+
+ bsl = [BitStream(uint=j, length=12) for j in range(10) for i in range(10)]
+ s = BitStream().join(bsl)
+ self.assertEqual(s.length, 1200)
+
+
+ def testPos(self):
+ s = BitStream(bin='1')
+ self.assertEqual(s.bitpos, 0)
+ s.read(1)
+ self.assertEqual(s.bitpos, 1)
+
+ def testWritingData(self):
+ strings = [BitStream(bin=x) for x in ['0', '001', '0011010010', '010010', '1011']]
+ s = BitStream().join(strings)
+ s2 = BitStream(bytes=s.bytes)
+ self.assertEqual(s2.bin, '000100110100100100101011')
+ s2.append(BitStream(bin='1'))
+ s3 = BitStream(bytes=s2.tobytes())
+ self.assertEqual(s3.bin, '00010011010010010010101110000000')
+
+ def testWritingDataWithOffsets(self):
+ s1 = BitStream(bytes=b'\x10')
+ s2 = BitStream(bytes=b'\x08\x00', length=8, offset=1)
+ s3 = BitStream(bytes=b'\x04\x00', length=8, offset=2)
+ self.assertTrue(s1 == s2)
+ self.assertTrue(s2 == s3)
+ self.assertTrue(s1.bytes == s2.bytes)
+ self.assertTrue(s2.bytes == s3.bytes)
+
+ def testVariousThings1(self):
+ hexes = ['12345678', '87654321', 'ffffffffff', 'ed', '12ec']
+ bins = ['001010', '1101011', '0010000100101110110110', '11', '011']
+ bsl = []
+ for (hex, bin) in list(zip(hexes, bins)) * 5:
+ bsl.append(BitStream(hex=hex))
+ bsl.append(BitStream(bin=bin))
+ s = BitStream().join(bsl)
+ for (hex, bin) in list(zip(hexes, bins)) * 5:
+ h = s.read(4 * len(hex))
+ b = s.read(len(bin))
+ self.assertEqual(h.hex, hex)
+ self.assertEqual(b.bin, bin)
+
+ def testVariousThings2(self):
+ s1 = BitStream(hex="0x1f08")[:13]
+ self.assertEqual(s1.bin, '0001111100001')
+ s2 = BitStream(bin='0101')
+ self.assertEqual(s2.bin, '0101')
+ s1.append(s2)
+ self.assertEqual(s1.length, 17)
+ self.assertEqual(s1.bin, '00011111000010101')
+ s1 = s1[3:8]
+ self.assertEqual(s1.bin, '11111')
+
+ def testVariousThings3(self):
+ s1 = BitStream(hex='0x012480ff')[2:27]
+ s2 = s1 + s1
+ self.assertEqual(s2.length, 50)
+ s3 = s2[0:25]
+ s4 = s2[25:50]
+ self.assertEqual(s3.bin, s4.bin)
+
+ def testPeekBit(self):
+ s = BitStream(bin='01')
+ self.assertEqual(s.peek(1), [0])
+ self.assertEqual(s.peek(1), [0])
+ self.assertEqual(s.read(1), [0])
+ self.assertEqual(s.peek(1), [1])
+ self.assertEqual(s.peek(1), [1])
+
+ s = BitStream(bytes=b'\x1f', offset=3)
+ self.assertEqual(s.len, 5)
+ self.assertEqual(s.peek(5).bin, '11111')
+ self.assertEqual(s.peek(5).bin, '11111')
+ s.pos += 1
+ self.assertRaises(bitstring.ReadError, s.peek, 5)
+
+ s = BitStream(hex='001122334455')
+ self.assertEqual(s.peek(8).hex, '00')
+ self.assertEqual(s.read(8).hex, '00')
+ s.pos += 33
+ self.assertRaises(bitstring.ReadError, s.peek, 8)
+
+ s = BitStream(hex='001122334455')
+ self.assertEqual(s.peek(8 * 2).hex, '0011')
+ self.assertEqual(s.read(8 * 3).hex, '001122')
+ self.assertEqual(s.peek(8 * 3).hex, '334455')
+ self.assertRaises(bitstring.ReadError, s.peek, 25)
+
+ def testAdvanceBit(self):
+ s = BitStream(hex='0xff')
+ s.bitpos = 6
+ s.pos += 1
+ self.assertEqual(s.bitpos, 7)
+ s.bitpos += 1
+ try:
+ s.pos += 1
+ self.assertTrue(False)
+ except ValueError:
+ pass
+
+ def testAdvanceByte(self):
+ s = BitStream(hex='0x010203')
+ s.bytepos += 1
+ self.assertEqual(s.bytepos, 1)
+ s.bytepos += 1
+ self.assertEqual(s.bytepos, 2)
+ s.bytepos += 1
+ try:
+ s.bytepos += 1
+ self.assertTrue(False)
+ except ValueError:
+ pass
+
+ def testRetreatBit(self):
+ s = BitStream(hex='0xff')
+ try:
+ s.pos -= 1
+ self.assertTrue(False)
+ except ValueError:
+ pass
+ s.pos = 5
+ s.pos -= 1
+ self.assertEqual(s.pos, 4)
+
+ def testRetreatByte(self):
+ s = BitStream(hex='0x010203')
+ try:
+ s.bytepos -= 1
+ self.assertTrue(False)
+ except ValueError:
+ pass
+ s.bytepos = 3
+ s.bytepos -= 1
+ self.assertEqual(s.bytepos, 2)
+ self.assertEqual(s.read(8).hex, '03')
+
+ def testCreationByAuto(self):
+ s = BitStream('0xff')
+ self.assertEqual(s.hex, 'ff')
+ s = BitStream('0b00011')
+ self.assertEqual(s.bin, '00011')
+ self.assertRaises(bitstring.CreationError, BitStream, 'hello')
+ s1 = BitStream(bytes=b'\xf5', length=3, offset=5)
+ s2 = BitStream(s1, length=1, offset=1)
+ self.assertEqual(s2, '0b0')
+ s = BitStream(bytes=b'\xff', offset=2)
+ t = BitStream(s, offset=2)
+ self.assertEqual(t, '0b1111')
+ self.assertRaises(TypeError, BitStream, auto=1.2)
+
+ def testCreationByAuto2(self):
+ s = BitStream('bin=001')
+ self.assertEqual(s.bin, '001')
+ s = BitStream('oct=0o007')
+ self.assertEqual(s.oct, '007')
+ s = BitStream('hex=123abc')
+ self.assertEqual(s, '0x123abc')
+
+ s = BitStream('bin:2=01')
+ self.assertEqual(s, '0b01')
+ for s in ['bin:1=01', 'bits:4=0b1', 'oct:3=000', 'hex:4=0x1234']:
+ self.assertRaises(bitstring.CreationError, BitStream, s)
+
+ def testInsertUsingAuto(self):
+ s = BitStream('0xff')
+ s.insert('0x00', 4)
+ self.assertEqual(s.hex, 'f00f')
+ self.assertRaises(ValueError, s.insert, 'ff')
+
+ def testOverwriteUsingAuto(self):
+ s = BitStream('0x0110')
+ s.overwrite('0b1')
+ self.assertEqual(s.hex, '8110')
+ s.overwrite('')
+ self.assertEqual(s.hex, '8110')
+ self.assertRaises(ValueError, s.overwrite, '0bf')
+
+ def testFindUsingAuto(self):
+ s = BitStream('0b000000010100011000')
+ self.assertTrue(s.find('0b101'))
+ self.assertEqual(s.pos, 7)
+
+ def testFindbytealignedUsingAuto(self):
+ s = BitStream('0x00004700')
+ self.assertTrue(s.find('0b01000111', bytealigned=True))
+ self.assertEqual(s.bytepos, 2)
+
+ def testAppendUsingAuto(self):
+ s = BitStream('0b000')
+ s.append('0b111')
+ self.assertEqual(s.bin, '000111')
+ s.append('0b0')
+ self.assertEqual(s.bin, '0001110')
+
+ def testSplitByteAlignedUsingAuto(self):
+ s = BitStream('0x000143563200015533000123')
+ sections = s.split('0x0001')
+ self.assertEqual(next(sections).hex, '')
+ self.assertEqual(next(sections).hex, '0001435632')
+ self.assertEqual(next(sections).hex, '00015533')
+ self.assertEqual(next(sections).hex, '000123')
+ self.assertRaises(StopIteration, next, sections)
+
+ def testSplitByteAlignedWithSelf(self):
+ s = BitStream('0x1234')
+ sections = s.split(s)
+ self.assertEqual(next(sections).hex, '')
+ self.assertEqual(next(sections).hex, '1234')
+ self.assertRaises(StopIteration, next, sections)
+
+ def testPrepend(self):
+ s = BitStream('0b000')
+ s.prepend('0b11')
+ self.assertEqual(s.bin, '11000')
+ s.prepend(s)
+ self.assertEqual(s.bin, '1100011000')
+ s.prepend('')
+ self.assertEqual(s.bin, '1100011000')
+
+ def testNullSlice(self):
+ s = BitStream('0x111')
+ t = s[1:1]
+ self.assertEqual(t._datastore.bytelength, 0)
+
+ def testMultipleAutos(self):
+ s = BitStream('0xa')
+ s.prepend('0xf')
+ s.append('0xb')
+ self.assertEqual(s, '0xfab')
+ s.prepend(s)
+ s.append('0x100')
+ s.overwrite('0x5', 4)
+ self.assertEqual(s, '0xf5bfab100')
+
+ def testReverse(self):
+ s = BitStream('0b0011')
+ s.reverse()
+ self.assertEqual(s.bin, '1100')
+ s = BitStream('0b10')
+ s.reverse()
+ self.assertEqual(s.bin, '01')
+ s = BitStream()
+ s.reverse()
+ self.assertEqual(s.bin, '')
+
+ def testInitWithConcatenatedStrings(self):
+ s = BitStream('0xff 0Xee 0xd 0xcc')
+ self.assertEqual(s.hex, 'ffeedcc')
+ s = BitStream('0b0 0B111 0b001')
+ self.assertEqual(s.bin, '0111001')
+ s += '0b1' + '0B1'
+ self.assertEqual(s.bin, '011100111')
+ s = BitStream(hex='ff0xee')
+ self.assertEqual(s.hex, 'ffee')
+ s = BitStream(bin='000b0b11')
+ self.assertEqual(s.bin, '0011')
+ s = BitStream(' 0o123 0O 7 0 o1')
+ self.assertEqual(s.oct, '12371')
+ s += ' 0 o 332'
+ self.assertEqual(s.oct, '12371332')
+
+ def testEquals(self):
+ s1 = BitStream('0b01010101')
+ s2 = BitStream('0b01010101')
+ self.assertTrue(s1 == s2)
+ s3 = BitStream()
+ s4 = BitStream()
+ self.assertTrue(s3 == s4)
+ self.assertFalse(s3 != s4)
+ s5 = BitStream(bytes=b'\xff', offset=2, length=3)
+ s6 = BitStream('0b111')
+ self.assertTrue(s5 == s6)
+ class A(object):
+ pass
+ self.assertFalse(s5 == A())
+
+ def testLargeEquals(self):
+ s1 = BitStream(1000000)
+ s2 = BitStream(1000000)
+ s1.set(True, [-1, 55, 53214, 534211, 999999])
+ s2.set(True, [-1, 55, 53214, 534211, 999999])
+ self.assertEqual(s1, s2)
+ s1.set(True, 800000)
+ self.assertNotEqual(s1, s2)
+
+ def testNotEquals(self):
+ s1 = BitStream('0b0')
+ s2 = BitStream('0b1')
+ self.assertTrue(s1 != s2)
+ self.assertFalse(s1 != BitStream('0b0'))
+
+ def testEqualityWithAutoInitialised(self):
+ a = BitStream('0b00110111')
+ self.assertTrue(a == '0b00110111')
+ self.assertTrue(a == '0x37')
+ self.assertTrue('0b0011 0111' == a)
+ self.assertTrue('0x3 0x7' == a)
+ self.assertFalse(a == '0b11001000')
+ self.assertFalse('0x3737' == a)
+
+ def testInvertSpecialMethod(self):
+ s = BitStream('0b00011001')
+ self.assertEqual((~s).bin, '11100110')
+ self.assertEqual((~BitStream('0b0')).bin, '1')
+ self.assertEqual((~BitStream('0b1')).bin, '0')
+ self.assertTrue(~~s == s)
+
+ def testInvertBitPosition(self):
+ s = ConstBitStream('0xefef')
+ s.pos = 8
+ t = ~s
+ self.assertEqual(s.pos, 8)
+ self.assertEqual(t.pos, 0)
+
+ def testInvertSpecialMethodErrors(self):
+ s = BitStream()
+ self.assertRaises(bitstring.Error, s.__invert__)
+
+ def testJoinWithAuto(self):
+ s = BitStream().join(['0xf', '0b00', BitStream(bin='11')])
+ self.assertEqual(s, '0b11110011')
+
+ def testAutoBitStringCopy(self):
+ s = BitStream('0xabcdef')
+ t = BitStream(s)
+ self.assertEqual(t.hex, 'abcdef')
+ del s[-8:]
+ self.assertEqual(t.hex, 'abcdef')
+
+class Multiplication(unittest.TestCase):
+
+ def testMultiplication(self):
+ a = BitStream('0xff')
+ b = a * 8
+ self.assertEqual(b, '0xffffffffffffffff')
+ b = 4 * a
+ self.assertEqual(b, '0xffffffff')
+ self.assertTrue(1 * a == a * 1 == a)
+ c = a * 0
+ self.assertFalse(c)
+ a *= 3
+ self.assertEqual(a, '0xffffff')
+ a *= 0
+ self.assertFalse(a)
+ one = BitStream('0b1')
+ zero = BitStream('0b0')
+ mix = one * 2 + 3 * zero + 2 * one * 2
+ self.assertEqual(mix, '0b110001111')
+ q = BitStream()
+ q *= 143
+ self.assertFalse(q)
+ q += [True, True, False]
+ q.pos += 2
+ q *= 0
+ self.assertFalse(q)
+ self.assertEqual(q.bitpos, 0)
+
+ def testMultiplicationWithFiles(self):
+ a = BitStream(filename='test.m1v')
+ b = a.len
+ a *= 3
+ self.assertEqual(a.len, 3 * b)
+
+ def testMultiplicationErrors(self):
+ a = BitStream('0b1')
+ b = BitStream('0b0')
+ self.assertRaises(ValueError, a.__mul__, -1)
+ self.assertRaises(ValueError, a.__imul__, -1)
+ self.assertRaises(ValueError, a.__rmul__, -1)
+ self.assertRaises(TypeError, a.__mul__, 1.2)
+ self.assertRaises(TypeError, a.__rmul__, b)
+ self.assertRaises(TypeError, a.__imul__, b)
+
+ def testFileAndMemEquivalence(self):
+ a = ConstBitStream(filename='smalltestfile')
+ b = BitStream(filename='smalltestfile')
+ self.assertTrue(isinstance(a._datastore._rawarray, bitstring.MmapByteArray))
+ self.assertTrue(isinstance(b._datastore._rawarray, bytearray))
+ self.assertEqual(a._datastore.getbyte(0), b._datastore.getbyte(0))
+ self.assertEqual(a._datastore.getbyteslice(1, 5), bytearray(b._datastore.getbyteslice(1, 5)))
+
+
+class BitWise(unittest.TestCase):
+
+ def testBitwiseAnd(self):
+ a = BitStream('0b01101')
+ b = BitStream('0b00110')
+ self.assertEqual((a & b).bin, '00100')
+ self.assertEqual((a & '0b11111'), a)
+ self.assertRaises(ValueError, a.__and__, '0b1')
+ self.assertRaises(ValueError, b.__and__, '0b110111111')
+ c = BitStream('0b0011011')
+ c.pos = 4
+ d = c & '0b1111000'
+ self.assertEqual(d.pos, 0)
+ self.assertEqual(d.bin, '0011000')
+ d = '0b1111000' & c
+ self.assertEqual(d.bin, '0011000')
+
+ def testBitwiseOr(self):
+ a = BitStream('0b111001001')
+ b = BitStream('0b011100011')
+ self.assertEqual((a | b).bin, '111101011')
+ self.assertEqual((a | '0b000000000'), a)
+ self.assertRaises(ValueError, a.__or__, '0b0000')
+ self.assertRaises(ValueError, b.__or__, a + '0b1')
+ a = '0xff00' | BitStream('0x00f0')
+ self.assertEqual(a.hex, 'fff0')
+
+ def testBitwiseXor(self):
+ a = BitStream('0b111001001')
+ b = BitStream('0b011100011')
+ self.assertEqual((a ^ b).bin, '100101010')
+ self.assertEqual((a ^ '0b111100000').bin, '000101001')
+ self.assertRaises(ValueError, a.__xor__, '0b0000')
+ self.assertRaises(ValueError, b.__xor__, a + '0b1')
+ a = '0o707' ^ BitStream('0o777')
+ self.assertEqual(a.oct, '070')
+
+class Split(unittest.TestCase):
+
+ def testSplit(self):
+ a = BitStream('0b0 010100111 010100 0101 010')
+ a.pos = 20
+ subs = [i.bin for i in a.split('0b010')]
+ self.assertEqual(subs, ['0', '010100111', '010100', '0101', '010'])
+ self.assertEqual(a.pos, 20)
+
+ def testSplitCornerCases(self):
+ a = BitStream('0b000000')
+ bsl = a.split('0b1', False)
+ self.assertEqual(next(bsl), a)
+ self.assertRaises(StopIteration, next, bsl)
+ b = BitStream()
+ bsl = b.split('0b001', False)
+ self.assertFalse(next(bsl))
+ self.assertRaises(StopIteration, next, bsl)
+
+ def testSplitErrors(self):
+ a = BitStream('0b0')
+ b = a.split('', False)
+ self.assertRaises(ValueError, next, b)
+
+ def testSliceWithOffset(self):
+ a = BitStream(bytes=b'\x00\xff\x00', offset=7)
+ b = a[7:12]
+ self.assertEqual(b.bin, '11000')
+
+ def testSplitWithMaxsplit(self):
+ a = BitStream('0xaabbccbbccddbbccddee')
+ self.assertEqual(len(list(a.split('0xbb', bytealigned=True))), 4)
+ bsl = list(a.split('0xbb', count=1, bytealigned=True))
+ self.assertEqual((len(bsl), bsl[0]), (1, '0xaa'))
+ bsl = list(a.split('0xbb', count=2, bytealigned=True))
+ self.assertEqual(len(bsl), 2)
+ self.assertEqual(bsl[0], '0xaa')
+ self.assertEqual(bsl[1], '0xbbcc')
+
+ def testSplitMore(self):
+ s = BitStream('0b1100011001110110')
+ for i in range(10):
+ a = list(s.split('0b11', False, count=i))
+ b = list(s.split('0b11', False))[:i]
+ self.assertEqual(a, b)
+ b = s.split('0b11', count=-1)
+ self.assertRaises(ValueError, next, b)
+
+ def testSplitStartbit(self):
+ a = BitStream('0b0010101001000000001111')
+ bsl = a.split('0b001', bytealigned=False, start=1)
+ self.assertEqual([x.bin for x in bsl], ['010101', '001000000', '001111'])
+ b = a.split('0b001', start=-100)
+ self.assertRaises(ValueError, next, b)
+ b = a.split('0b001', start=23)
+ self.assertRaises(ValueError, next, b)
+ b = a.split('0b1', start=10, end=9)
+ self.assertRaises(ValueError, next, b)
+
+ def testSplitStartbitByteAligned(self):
+ a = BitStream('0x00ffffee')
+ bsl = list(a.split('0b111', start=9, bytealigned=True))
+ self.assertEqual([x.bin for x in bsl], ['1111111', '11111111', '11101110'])
+
+ def testSplitEndbit(self):
+ a = BitStream('0b000010001001011')
+ bsl = list(a.split('0b1', bytealigned=False, end=14))
+ self.assertEqual([x.bin for x in bsl], ['0000', '1000', '100', '10', '1'])
+ self.assertEqual(list(a[4:12].split('0b0', False)), list(a.split('0b0', start=4, end=12)))
+ # Shouldn't raise ValueError
+ bsl = list(a.split('0xffee', end=15))
+ # Whereas this one will when we call next()
+ bsl = a.split('0xffee', end=16)
+ self.assertRaises(ValueError, next, bsl)
+
+ def testSplitEndbitByteAligned(self):
+ a = BitStream('0xff00ff')[:22]
+ bsl = list(a.split('0b 0000 0000 111', end=19))
+ self.assertEqual([x.bin for x in bsl], ['11111111', '00000000111'])
+ bsl = list(a.split('0b 0000 0000 111', end=18))
+ self.assertEqual([x.bin for x in bsl], ['111111110000000011'])
+
+ def testSplitMaxSplit(self):
+ a = BitStream('0b1' * 20)
+ for i in range(10):
+ bsl = list(a.split('0b1', count=i))
+ self.assertEqual(len(bsl), i)
+
+ #######################
+
+ def testPositionInSlice(self):
+ a = BitStream('0x00ffff00')
+ a.bytepos = 2
+ b = a[8:24]
+ self.assertEqual(b.bytepos, 0)
+
+ def testFindByteAlignedWithBits(self):
+ a = BitStream('0x00112233445566778899')
+ a.find('0b0001', bytealigned=True)
+ self.assertEqual(a.bitpos, 8)
+
+ def testFindStartbitNotByteAligned(self):
+ a = BitStream('0b0010000100')
+ found = a.find('0b1', start=4)
+ self.assertEqual((found, a.bitpos), ((7,), 7))
+ found = a.find('0b1', start=2)
+ self.assertEqual((found, a.bitpos), ((2,), 2))
+ found = a.find('0b1', bytealigned=False, start=8)
+ self.assertEqual((found, a.bitpos), ((), 2))
+
+ def testFindEndbitNotByteAligned(self):
+ a = BitStream('0b0010010000')
+ found = a.find('0b1', bytealigned=False, end=2)
+ self.assertEqual((found, a.bitpos), ((), 0))
+ found = a.find('0b1', end=3)
+ self.assertEqual((found, a.bitpos), ((2,), 2))
+ found = a.find('0b1', bytealigned=False, start=3, end=5)
+ self.assertEqual((found, a.bitpos), ((), 2))
+ found = a.find('0b1', start=3, end=6)
+ self.assertEqual((found[0], a.bitpos), (5, 5))
+
+ def testFindStartbitByteAligned(self):
+ a = BitStream('0xff001122ff0011ff')
+ a.pos = 40
+ found = a.find('0x22', start=23, bytealigned=True)
+ self.assertEqual((found, a.bytepos), ((24,), 3))
+ a.bytepos = 4
+ found = a.find('0x22', start=24, bytealigned=True)
+ self.assertEqual((found, a.bytepos), ((24,), 3))
+ found = a.find('0x22', start=25, bytealigned=True)
+ self.assertEqual((found, a.pos), ((), 24))
+ found = a.find('0b111', start=40, bytealigned=True)
+ self.assertEqual((found, a.pos), ((56,), 56))
+
+ def testFindEndbitByteAligned(self):
+ a = BitStream('0xff001122ff0011ff')
+ found = a.find('0x22', end=31, bytealigned=True)
+ self.assertFalse(found)
+ self.assertEqual(a.pos, 0)
+ found = a.find('0x22', end=32, bytealigned=True)
+ self.assertTrue(found)
+ self.assertEqual(a.pos, 24)
+ self.assertEqual(found[0], 24)
+
+ def testFindStartEndbitErrors(self):
+ a = BitStream('0b00100')
+ self.assertRaises(ValueError, a.find, '0b1', bytealigned=False, start=-100)
+ self.assertRaises(ValueError, a.find, '0b1', end=6)
+ self.assertRaises(ValueError, a.find, '0b1', start=4, end=3)
+ b = BitStream('0x0011223344')
+ self.assertRaises(ValueError, a.find, '0x22', bytealigned=True, start=-100)
+ self.assertRaises(ValueError, a.find, '0x22', end=41, bytealigned=True)
+
+ def testPrependAndAppendAgain(self):
+ c = BitStream('0x1122334455667788')
+ c.bitpos = 40
+ c.prepend('0b1')
+ self.assertEqual(c.bitpos, 41)
+ c = BitStream()
+ c.prepend('0x1234')
+ self.assertEqual(c.bytepos, 2)
+ c = BitStream()
+ c.append('0x1234')
+ self.assertEqual(c.bytepos, 0)
+ s = BitStream(bytes=b'\xff\xff', offset=2)
+ self.assertEqual(s.length, 14)
+ t = BitStream(bytes=b'\x80', offset=1, length=2)
+ s.prepend(t)
+ self.assertEqual(s, '0x3fff')
+
+ def testFindAll(self):
+ a = BitStream('0b11111')
+ p = a.findall('0b1')
+ self.assertEqual(list(p), [0, 1, 2, 3, 4])
+ p = a.findall('0b11')
+ self.assertEqual(list(p), [0, 1, 2, 3])
+ p = a.findall('0b10')
+ self.assertEqual(list(p), [])
+ a = BitStream('0x4733eeff66554747335832434547')
+ p = a.findall('0x47', bytealigned=True)
+ self.assertEqual(list(p), [0, 6 * 8, 7 * 8, 13 * 8])
+ p = a.findall('0x4733', bytealigned=True)
+ self.assertEqual(list(p), [0, 7 * 8])
+ a = BitStream('0b1001001001001001001')
+ p = a.findall('0b1001', bytealigned=False)
+ self.assertEqual(list(p), [0, 3, 6, 9, 12, 15])
+ self.assertEqual(a.pos, 15)
+
+ def testFindAllGenerator(self):
+ a = BitStream('0xff1234512345ff1234ff12ff')
+ p = a.findall('0xff', bytealigned=True)
+ self.assertEqual(next(p), 0)
+ self.assertEqual(next(p), 6 * 8)
+ self.assertEqual(next(p), 9 * 8)
+ self.assertEqual(next(p), 11 * 8)
+ self.assertRaises(StopIteration, next, p)
+
+ def testFindAllCount(self):
+ s = BitStream('0b1') * 100
+ for i in [0, 1, 23]:
+ self.assertEqual(len(list(s.findall('0b1', count=i))), i)
+ b = s.findall('0b1', bytealigned=True, count=-1)
+ self.assertRaises(ValueError, next, b)
+
+ def testContains(self):
+ a = BitStream('0b1') + '0x0001dead0001'
+ self.assertTrue('0xdead' in a)
+ self.assertEqual(a.pos, 0)
+ self.assertFalse('0xfeed' in a)
+
+ def testRepr(self):
+ max = bitstring.MAX_CHARS
+ bls = ['', '0b1', '0o5', '0x43412424f41', '0b00101001010101']
+ for bs in bls:
+ a = BitStream(bs)
+ b = eval(a.__repr__())
+ self.assertTrue(a == b)
+ for f in [ConstBitStream(filename='test.m1v'),
+ ConstBitStream(filename='test.m1v', length=17),
+ ConstBitStream(filename='test.m1v', length=23, offset=23102)]:
+ f2 = eval(f.__repr__())
+ self.assertEqual(f._datastore._rawarray.source.name, f2._datastore._rawarray.source.name)
+ self.assertTrue(f2.tobytes() == f.tobytes())
+ a = BitStream('0b1')
+ self.assertEqual(repr(a), "BitStream('0b1')")
+ a += '0b11'
+ self.assertEqual(repr(a), "BitStream('0b111')")
+ a += '0b1'
+ self.assertEqual(repr(a), "BitStream('0xf')")
+ a *= max
+ self.assertEqual(repr(a), "BitStream('0x" + "f" * max + "')")
+ a += '0xf'
+ self.assertEqual(repr(a), "BitStream('0x" + "f" * max + "...') # length=%d" % (max * 4 + 4))
+
+ def testPrint(self):
+ s = BitStream(hex='0x00')
+ self.assertEqual('0x' + s.hex, s.__str__())
+ s = BitStream(filename='test.m1v')
+ self.assertEqual('0x' + s[0:bitstring.MAX_CHARS * 4].hex + '...', s.__str__())
+ self.assertEqual(BitStream().__str__(), '')
+ s = BitStream('0b11010')
+ self.assertEqual('0b' + s.bin, s.__str__())
+ s = BitStream('0x12345678901234567890,0b1')
+ self.assertEqual('0x12345678901234567890, 0b1', s.__str__())
+
+ def testIter(self):
+ a = BitStream('0b001010')
+ b = BitStream()
+ for bit in a:
+ b.append(ConstBitStream(bool=bit))
+ self.assertEqual(a, b)
+
+ def testDelitem(self):
+ a = BitStream('0xffee')
+ del a[0:8]
+ self.assertEqual(a.hex, 'ee')
+ del a[0:8]
+ self.assertFalse(a)
+ del a[10:12]
+ self.assertFalse(a)
+
+ def testNonZeroBitsAtStart(self):
+ a = BitStream(bytes=b'\xff', offset=2)
+ b = BitStream('0b00')
+ b += a
+ self.assertTrue(b == '0b0011 1111')
+ #self.assertEqual(a._datastore.rawbytes, b'\xff')
+ self.assertEqual(a.tobytes(), b'\xfc')
+
+ def testNonZeroBitsAtEnd(self):
+ a = BitStream(bytes=b'\xff', length=5)
+ #self.assertEqual(a._datastore.rawbytes, b'\xff')
+ b = BitStream('0b00')
+ a += b
+ self.assertTrue(a == '0b1111100')
+ self.assertEqual(a.tobytes(), b'\xf8')
+ self.assertRaises(ValueError, a._getbytes)
+
+ def testNewOffsetErrors(self):
+ self.assertRaises(bitstring.CreationError, BitStream, hex='ff', offset=-1)
+ self.assertRaises(bitstring.CreationError, BitStream, '0xffffffff', offset=33)
+
+ def testSliceStep(self):
+ a = BitStream('0x3')
+ b = a[::1]
+ self.assertEqual(a, b)
+ self.assertEqual(a[2:4:1], '0b11')
+ self.assertEqual(a[0:2:1], '0b00')
+ self.assertEqual(a[:3], '0o1')
+
+ a = BitStream('0x0011223344556677')
+ self.assertEqual(a[-8:], '0x77')
+ self.assertEqual(a[:-24], '0x0011223344')
+ self.assertEqual(a[-1000:-24], '0x0011223344')
+
+ def testInterestingSliceStep(self):
+ a = BitStream('0b0011000111')
+ self.assertEqual(a[7:3:-1], '0b1000')
+ self.assertEqual(a[9:2:-1], '0b1110001')
+ self.assertEqual(a[8:2:-2], '0b100')
+ self.assertEqual(a[100:-20:-3], '0b1010')
+ self.assertEqual(a[100:-20:-1], '0b1110001100')
+ self.assertEqual(a[10:2:-1], '0b1110001')
+ self.assertEqual(a[100:2:-1], '0b1110001')
+
+ def testInsertionOrderAndBitpos(self):
+ b = BitStream()
+ b[0:0] = '0b0'
+ b[0:0] = '0b1'
+ self.assertEqual(b, '0b10')
+ self.assertEqual(b.bitpos, 1)
+ a = BitStream()
+ a.insert('0b0')
+ a.insert('0b1')
+ self.assertEqual(a, '0b01')
+ self.assertEqual(a.bitpos, 2)
+
+ def testOverwriteOrderAndBitpos(self):
+ a = BitStream('0xff')
+ a.overwrite('0xa')
+ self.assertEqual(a, '0xaf')
+ self.assertEqual(a.bitpos, 4)
+ a.overwrite('0xb')
+ self.assertEqual(a, '0xab')
+ self.assertEqual(a.bitpos, 8)
+ self.assertRaises(ValueError, a.overwrite, '0b1')
+ a.overwrite('0xa', 4)
+ self.assertEqual(a, '0xaa')
+ self.assertEqual(a.bitpos, 8)
+ a.overwrite(a, 0)
+ self.assertEqual(a, '0xaa')
+
+ def testInitSliceWithInt(self):
+ a = BitStream(length=8)
+ a[:] = 100
+ self.assertEqual(a.uint, 100)
+ a[0] = 1
+ self.assertEqual(a.bin, '11100100')
+ a[1] = 0
+ self.assertEqual(a.bin, '10100100')
+ a[-1] = -1
+ self.assertEqual(a.bin, '10100101')
+ a[-3:] = -2
+ self.assertEqual(a.bin, '10100110')
+
+ def testInitSliceWithIntErrors(self):
+ a = BitStream('0b0000')
+ self.assertRaises(ValueError, a.__setitem__, slice(0, 4), 16)
+ self.assertRaises(ValueError, a.__setitem__, slice(0, 4), -9)
+ self.assertRaises(ValueError, a.__setitem__, 0, 2)
+ self.assertRaises(ValueError, a.__setitem__, 0, -2)
+
+ def testReverseWithSlice(self):
+ a = BitStream('0x0012ff')
+ a.reverse()
+ self.assertEqual(a, '0xff4800')
+ a.reverse(8, 16)
+ self.assertEqual(a, '0xff1200')
+ b = a[8:16]
+ b.reverse()
+ a[8:16] = b
+ self.assertEqual(a, '0xff4800')
+
+ def testReverseWithSliceErrors(self):
+ a = BitStream('0x123')
+ self.assertRaises(ValueError, a.reverse, -1, 4)
+ self.assertRaises(ValueError, a.reverse, 10, 9)
+ self.assertRaises(ValueError, a.reverse, 1, 10000)
+
+ def testInitialiseFromList(self):
+ a = BitStream([])
+ self.assertFalse(a)
+ a = BitStream([True, False, [], [0], 'hello'])
+ self.assertEqual(a, '0b10011')
+ a += []
+ self.assertEqual(a, '0b10011')
+ a += [True, False, True]
+ self.assertEqual(a, '0b10011101')
+ a.find([12, 23])
+ self.assertEqual(a.pos, 3)
+ self.assertEqual([1, 0, False, True], BitStream('0b1001'))
+ a = [True] + BitStream('0b1')
+ self.assertEqual(a, '0b11')
+
+ def testInitialiseFromTuple(self):
+ a = BitStream(())
+ self.assertFalse(a)
+ a = BitStream((0, 1, '0', '1'))
+ self.assertEqual('0b0111', a)
+ a.replace((True, True), [])
+ self.assertEqual(a, (False, True))
+
+ def testCut(self):
+ a = BitStream('0x00112233445')
+ b = list(a.cut(8))
+ self.assertEqual(b, ['0x00', '0x11', '0x22', '0x33', '0x44'])
+ b = list(a.cut(4, 8, 16))
+ self.assertEqual(b, ['0x1', '0x1'])
+ b = list(a.cut(4, 0, 44, 4))
+ self.assertEqual(b, ['0x0', '0x0', '0x1', '0x1'])
+ a = BitStream()
+ b = list(a.cut(10))
+ self.assertTrue(not b)
+
+ def testCutErrors(self):
+ a = BitStream('0b1')
+ b = a.cut(1, 1, 2)
+ self.assertRaises(ValueError, next, b)
+ b = a.cut(1, -2, 1)
+ self.assertRaises(ValueError, next, b)
+ b = a.cut(0)
+ self.assertRaises(ValueError, next, b)
+ b = a.cut(1, count=-1)
+ self.assertRaises(ValueError, next, b)
+
+ def testCutProblem(self):
+ s = BitStream('0x1234')
+ for n in list(s.cut(4)):
+ s.prepend(n)
+ self.assertEqual(s, '0x43211234')
+
+ def testJoinFunctions(self):
+ a = BitStream().join(['0xa', '0xb', '0b1111'])
+ self.assertEqual(a, '0xabf')
+ a = BitStream('0b1').join(['0b0' for i in range(10)])
+ self.assertEqual(a, '0b0101010101010101010')
+ a = BitStream('0xff').join([])
+ self.assertFalse(a)
+
+ def testAddingBitpos(self):
+ a = BitStream('0xff')
+ b = BitStream('0x00')
+ a.bitpos = b.bitpos = 8
+ c = a + b
+ self.assertEqual(c.bitpos, 0)
+
+ def testIntelligentRead1(self):
+ a = BitStream(uint=123, length=23)
+ u = a.read('uint:23')
+ self.assertEqual(u, 123)
+ self.assertEqual(a.pos, a.len)
+ b = BitStream(int=-12, length=44)
+ i = b.read('int:44')
+ self.assertEqual(i, -12)
+ self.assertEqual(b.pos, b.len)
+ u2, i2 = (a + b).readlist('uint:23, int:44')
+ self.assertEqual((u2, i2), (123, -12))
+
+ def testIntelligentRead2(self):
+ a = BitStream(ue=822)
+ u = a.read('ue')
+ self.assertEqual(u, 822)
+ self.assertEqual(a.pos, a.len)
+ b = BitStream(se=-1001)
+ s = b.read('se')
+ self.assertEqual(s, -1001)
+ self.assertEqual(b.pos, b.len)
+ s, u1, u2 = (b + 2 * a).readlist('se, ue, ue')
+ self.assertEqual((s, u1, u2), (-1001, 822, 822))
+
+ def testIntelligentRead3(self):
+ a = BitStream('0x123') + '0b11101'
+ h = a.read('hex:12')
+ self.assertEqual(h, '123')
+ b = a.read('bin: 5')
+ self.assertEqual(b, '11101')
+ c = '0b' + b + a
+ b, h = c.readlist('bin:5, hex:12')
+ self.assertEqual((b, h), ('11101', '123'))
+
+ def testIntelligentRead4(self):
+ a = BitStream('0o007')
+ o = a.read('oct:9')
+ self.assertEqual(o, '007')
+ self.assertEqual(a.pos, a.len)
+
+ def testIntelligentRead5(self):
+ a = BitStream('0x00112233')
+ c0, c1, c2 = a.readlist('bits:8, bits:8, bits:16')
+ self.assertEqual((c0, c1, c2), (BitStream('0x00'), BitStream('0x11'), BitStream('0x2233')))
+ a.pos = 0
+ c = a.read('bits:16')
+ self.assertEqual(c, BitStream('0x0011'))
+
+ def testIntelligentRead6(self):
+ a = BitStream('0b000111000')
+ b1, b2, b3 = a.readlist('bin :3, int: 3, int:3')
+ self.assertEqual(b1, '000')
+ self.assertEqual(b2, -1)
+ self.assertEqual(b3, 0)
+
+ def testIntelligentRead7(self):
+ a = BitStream('0x1234')
+ a1, a2, a3, a4 = a.readlist('bin:0, oct:0, hex:0, bits:0')
+ self.assertTrue(a1 == a2 == a3 == '')
+ self.assertFalse(a4)
+ self.assertRaises(ValueError, a.read, 'int:0')
+ self.assertRaises(ValueError, a.read, 'uint:0')
+ self.assertEqual(a.pos, 0)
+
+ def testIntelligentRead8(self):
+ a = BitStream('0x123456')
+ for t in ['hex:1', 'oct:1', 'hex4', '-5', 'fred', 'bin:-2',
+ 'uint:p', 'uint:-2', 'int:u', 'int:-3', 'ses', 'uee', '-14']:
+ self.assertRaises(ValueError, a.read, t)
+
+ def testIntelligentRead9(self):
+ a = BitStream('0xff')
+ self.assertEqual(a.read('intle'), -1)
+
+ def testFillerReads1(self):
+ s = BitStream('0x012345')
+ t = s.read('bits')
+ self.assertEqual(s, t)
+ s.pos = 0
+ a, b = s.readlist('hex:8, hex')
+ self.assertEqual(a, '01')
+ self.assertEqual(b, '2345')
+ self.assertTrue(isinstance(b, str))
+ s.bytepos = 0
+ a, b = s.readlist('bin, hex:20')
+ self.assertEqual(a, '0000')
+ self.assertEqual(b, '12345')
+ self.assertTrue(isinstance(a, str))
+
+ def testFillerReads2(self):
+ s = BitStream('0xabcdef')
+ self.assertRaises(bitstring.Error, s.readlist, 'bits, se')
+ self.assertRaises(bitstring.Error, s.readlist, 'hex:4, bits, ue, bin:4')
+ s.pos = 0
+ self.assertRaises(bitstring.Error, s.readlist, 'bin, bin')
+
+ def testIntelligentPeek(self):
+ a = BitStream('0b01, 0x43, 0o4, uint:23=2, se=5, ue=3')
+ b, c, e = a.peeklist('bin:2, hex:8, oct:3')
+ self.assertEqual((b, c, e), ('01', '43', '4'))
+ self.assertEqual(a.pos, 0)
+ a.pos = 13
+ f, g, h = a.peeklist('uint:23, se, ue')
+ self.assertEqual((f, g, h), (2, 5, 3))
+ self.assertEqual(a.pos, 13)
+
+ def testReadMultipleBits(self):
+ s = BitStream('0x123456789abcdef')
+ a, b = s.readlist([4, 4])
+ self.assertEqual(a, '0x1')
+ self.assertEqual(b, '0x2')
+ c, d, e = s.readlist([8, 16, 8])
+ self.assertEqual(c, '0x34')
+ self.assertEqual(d, '0x5678')
+ self.assertEqual(e, '0x9a')
+
+ def testPeekMultipleBits(self):
+ s = BitStream('0b1101, 0o721, 0x2234567')
+ a, b, c, d = s.peeklist([2, 1, 1, 9])
+ self.assertEqual(a, '0b11')
+ self.assertEqual(bool(b), False)
+ self.assertEqual(bool(c), True)
+ self.assertEqual(d, '0o721')
+ self.assertEqual(s.pos, 0)
+ a, b = s.peeklist([4, 9])
+ self.assertEqual(a, '0b1101')
+ self.assertEqual(b, '0o721')
+ s.pos = 13
+ a, b = s.peeklist([16, 8])
+ self.assertEqual(a, '0x2234')
+ self.assertEqual(b, '0x56')
+ self.assertEqual(s.pos, 13)
+
+ def testDifficultPrepends(self):
+ a = BitStream('0b1101011')
+ b = BitStream()
+ for i in range(10):
+ b.prepend(a)
+ self.assertEqual(b, a * 10)
+
+ def testPackingWrongNumberOfThings(self):
+ self.assertRaises(bitstring.CreationError, pack, 'bin:1')
+ self.assertRaises(bitstring.CreationError, pack, '', 100)
+
+ def testPackWithVariousKeys(self):
+ a = pack('uint10', uint10='0b1')
+ self.assertEqual(a, '0b1')
+ b = pack('0b110', **{'0b110': '0xfff'})
+ self.assertEqual(b, '0xfff')
+
+ def testPackWithVariableLength(self):
+ for i in range(1, 11):
+ a = pack('uint:n', 0, n=i)
+ self.assertEqual(a.bin, '0' * i)
+
+ def testToBytes(self):
+ a = BitStream(bytes=b'\xab\x00')
+ b = a.tobytes()
+ self.assertEqual(a.bytes, b)
+ for i in range(7):
+ del a[-1:]
+ self.assertEqual(a.tobytes(), b'\xab\x00')
+ del a[-1:]
+ self.assertEqual(a.tobytes(), b'\xab')
+
+ def testToFile(self):
+ a = BitStream('0x0000ff')[:17]
+ f = open('temp_bitstring_unit_testing_file', 'wb')
+ a.tofile(f)
+ f.close()
+ b = BitStream(filename='temp_bitstring_unit_testing_file')
+ self.assertEqual(b, '0x000080')
+
+ a = BitStream('0x911111')
+ del a[:1]
+ self.assertEqual(a + '0b0', '0x222222')
+ f = open('temp_bitstring_unit_testing_file', 'wb')
+ a.tofile(f)
+ f.close()
+ b = BitStream(filename='temp_bitstring_unit_testing_file')
+ self.assertEqual(b, '0x222222')
+ os.remove('temp_bitstring_unit_testing_file')
+
+ #def testToFileWithLargerFile(self):
+ # a = BitStream(length=16000000)
+ # a[1] = '0b1'
+ # a[-2] = '0b1'
+ # f = open('temp_bitstring_unit_testing_file' ,'wb')
+ # a.tofile(f)
+ # f.close()
+ # b = BitStream(filename='temp_bitstring_unit_testing_file')
+ # self.assertEqual(b.len, 16000000)
+ # self.assertEqual(b[1], True)
+ #
+ # f = open('temp_bitstring_unit_testing_file' ,'wb')
+ # a[1:].tofile(f)
+ # f.close()
+ # b = BitStream(filename='temp_bitstring_unit_testing_file')
+ # self.assertEqual(b.len, 16000000)
+ # self.assertEqual(b[0], True)
+ # os.remove('temp_bitstring_unit_testing_file')
+
+ def testTokenParser(self):
+ tp = bitstring.tokenparser
+ self.assertEqual(tp('hex'), (True, [('hex', None, None)]))
+ self.assertEqual(tp('hex=14'), (True, [('hex', None, '14')]))
+ self.assertEqual(tp('se'), (False, [('se', None, None)]))
+ self.assertEqual(tp('ue=12'), (False, [('ue', None, '12')]))
+ self.assertEqual(tp('0xef'), (False, [('0x', None, 'ef')]))
+ self.assertEqual(tp('uint:12'), (False, [('uint', 12, None)]))
+ self.assertEqual(tp('int:30=-1'), (False, [('int', 30, '-1')]))
+ self.assertEqual(tp('bits:10'), (False, [('bits', 10, None)]))
+ self.assertEqual(tp('bits:10'), (False, [('bits', 10, None)]))
+ self.assertEqual(tp('123'), (False, [('uint', 123, None)]))
+ self.assertEqual(tp('123'), (False, [('uint', 123, None)]))
+ self.assertRaises(ValueError, tp, 'hex12')
+ self.assertEqual(tp('hex12', ('hex12',)), (False, [('hex12', None, None)]))
+ self.assertEqual(tp('2*bits:6'), (False, [('bits', 6, None), ('bits', 6, None)]))
+
+ def testAutoFromFileObject(self):
+ with open('test.m1v', 'rb') as f:
+ s = ConstBitStream(f, offset=32, length=12)
+ self.assertEqual(s.uint, 352)
+ t = ConstBitStream('0xf') + f
+ self.assertTrue(t.startswith('0xf000001b3160'))
+ s2 = ConstBitStream(f)
+ t2 = BitStream('0xc')
+ t2.prepend(s2)
+ self.assertTrue(t2.startswith('0x000001b3'))
+ self.assertTrue(t2.endswith('0xc'))
+ with open('test.m1v', 'rb') as b:
+ u = BitStream(bytes=b.read())
+ # TODO: u == s2 is much slower than u.bytes == s2.bytes
+ self.assertEqual(u.bytes, s2.bytes)
+
+ def testFileBasedCopy(self):
+ with open('smalltestfile', 'rb') as f:
+ s = BitStream(f)
+ t = BitStream(s)
+ s.prepend('0b1')
+ self.assertEqual(s[1:], t)
+ s = BitStream(f)
+ t = copy.copy(s)
+ t.append('0b1')
+ self.assertEqual(s, t[:-1])
+
+ def testBigEndianSynonyms(self):
+ s = BitStream('0x12318276ef')
+ self.assertEqual(s.int, s.intbe)
+ self.assertEqual(s.uint, s.uintbe)
+ s = BitStream(intbe=-100, length=16)
+ self.assertEqual(s, 'int:16=-100')
+ s = BitStream(uintbe=13, length=24)
+ self.assertEqual(s, 'int:24=13')
+ s = BitStream('uintbe:32=1000')
+ self.assertEqual(s, 'uint:32=1000')
+ s = BitStream('intbe:8=2')
+ self.assertEqual(s, 'int:8=2')
+ self.assertEqual(s.read('intbe'), 2)
+ s.pos = 0
+ self.assertEqual(s.read('uintbe'), 2)
+
+ def testBigEndianSynonymErrors(self):
+ self.assertRaises(bitstring.CreationError, BitStream, uintbe=100, length=15)
+ self.assertRaises(bitstring.CreationError, BitStream, intbe=100, length=15)
+ self.assertRaises(bitstring.CreationError, BitStream, 'uintbe:17=100')
+ self.assertRaises(bitstring.CreationError, BitStream, 'intbe:7=2')
+ s = BitStream('0b1')
+ self.assertRaises(bitstring.InterpretError, s._getintbe)
+ self.assertRaises(bitstring.InterpretError, s._getuintbe)
+ self.assertRaises(ValueError, s.read, 'uintbe')
+ self.assertRaises(ValueError, s.read, 'intbe')
+
+ def testLittleEndianUint(self):
+ s = BitStream(uint=100, length=16)
+ self.assertEqual(s.uintle, 25600)
+ s = BitStream(uintle=100, length=16)
+ self.assertEqual(s.uint, 25600)
+ self.assertEqual(s.uintle, 100)
+ s.uintle += 5
+ self.assertEqual(s.uintle, 105)
+ s = BitStream('uintle:32=999')
+ self.assertEqual(s.uintle, 999)
+ s.byteswap()
+ self.assertEqual(s.uint, 999)
+ s = pack('uintle:24', 1001)
+ self.assertEqual(s.uintle, 1001)
+ self.assertEqual(s.length, 24)
+ self.assertEqual(s.read('uintle'), 1001)
+
+ def testLittleEndianInt(self):
+ s = BitStream(int=100, length=16)
+ self.assertEqual(s.intle, 25600)
+ s = BitStream(intle=100, length=16)
+ self.assertEqual(s.int, 25600)
+ self.assertEqual(s.intle, 100)
+ s.intle += 5
+ self.assertEqual(s.intle, 105)
+ s = BitStream('intle:32=999')
+ self.assertEqual(s.intle, 999)
+ s.byteswap()
+ self.assertEqual(s.int, 999)
+ s = pack('intle:24', 1001)
+ self.assertEqual(s.intle, 1001)
+ self.assertEqual(s.length, 24)
+ self.assertEqual(s.read('intle'), 1001)
+
+ def testLittleEndianErrors(self):
+ self.assertRaises(bitstring.CreationError, BitStream, 'uintle:15=10')
+ self.assertRaises(bitstring.CreationError, BitStream, 'intle:31=-999')
+ self.assertRaises(bitstring.CreationError, BitStream, uintle=100, length=15)
+ self.assertRaises(bitstring.CreationError, BitStream, intle=100, length=15)
+ s = BitStream('0xfff')
+ self.assertRaises(bitstring.InterpretError, s._getintle)
+ self.assertRaises(bitstring.InterpretError, s._getuintle)
+ self.assertRaises(ValueError, s.read, 'uintle')
+ self.assertRaises(ValueError, s.read, 'intle')
+
+ def testStructTokens1(self):
+ self.assertEqual(pack('<b', 23), BitStream('intle:8=23'))
+ self.assertEqual(pack('<B', 23), BitStream('uintle:8=23'))
+ self.assertEqual(pack('<h', 23), BitStream('intle:16=23'))
+ self.assertEqual(pack('<H', 23), BitStream('uintle:16=23'))
+ self.assertEqual(pack('<l', 23), BitStream('intle:32=23'))
+ self.assertEqual(pack('<L', 23), BitStream('uintle:32=23'))
+ self.assertEqual(pack('<q', 23), BitStream('intle:64=23'))
+ self.assertEqual(pack('<Q', 23), BitStream('uintle:64=23'))
+ self.assertEqual(pack('>b', 23), BitStream('intbe:8=23'))
+ self.assertEqual(pack('>B', 23), BitStream('uintbe:8=23'))
+ self.assertEqual(pack('>h', 23), BitStream('intbe:16=23'))
+ self.assertEqual(pack('>H', 23), BitStream('uintbe:16=23'))
+ self.assertEqual(pack('>l', 23), BitStream('intbe:32=23'))
+ self.assertEqual(pack('>L', 23), BitStream('uintbe:32=23'))
+ self.assertEqual(pack('>q', 23), BitStream('intbe:64=23'))
+ self.assertEqual(pack('>Q', 23), BitStream('uintbe:64=23'))
+ self.assertRaises(bitstring.CreationError, pack, '<B', -1)
+ self.assertRaises(bitstring.CreationError, pack, '<H', -1)
+ self.assertRaises(bitstring.CreationError, pack, '<L', -1)
+ self.assertRaises(bitstring.CreationError, pack, '<Q', -1)
+
+ def testStructTokens2(self):
+ endianness = sys.byteorder
+ sys.byteorder = 'little'
+ self.assertEqual(pack('@b', 23), BitStream('intle:8=23'))
+ self.assertEqual(pack('@B', 23), BitStream('uintle:8=23'))
+ self.assertEqual(pack('@h', 23), BitStream('intle:16=23'))
+ self.assertEqual(pack('@H', 23), BitStream('uintle:16=23'))
+ self.assertEqual(pack('@l', 23), BitStream('intle:32=23'))
+ self.assertEqual(pack('@L', 23), BitStream('uintle:32=23'))
+ self.assertEqual(pack('@q', 23), BitStream('intle:64=23'))
+ self.assertEqual(pack('@Q', 23), BitStream('uintle:64=23'))
+ sys.byteorder = 'big'
+ self.assertEqual(pack('@b', 23), BitStream('intbe:8=23'))
+ self.assertEqual(pack('@B', 23), BitStream('uintbe:8=23'))
+ self.assertEqual(pack('@h', 23), BitStream('intbe:16=23'))
+ self.assertEqual(pack('@H', 23), BitStream('uintbe:16=23'))
+ self.assertEqual(pack('@l', 23), BitStream('intbe:32=23'))
+ self.assertEqual(pack('@L', 23), BitStream('uintbe:32=23'))
+ self.assertEqual(pack('@q', 23), BitStream('intbe:64=23'))
+ self.assertEqual(pack('@Q', 23), BitStream('uintbe:64=23'))
+ sys.byteorder = endianness
+
+ def testNativeEndianness(self):
+ s = pack('@2L', 40, 40)
+ if sys.byteorder == 'little':
+ self.assertEqual(s, pack('<2L', 40, 40))
+ else:
+ self.assertEqual(sys.byteorder, 'big')
+ self.assertEqual(s, pack('>2L', 40, 40))
+
+ def testStructTokens2(self):
+ s = pack('>hhl', 1, 2, 3)
+ a, b, c = s.unpack('>hhl')
+ self.assertEqual((a, b, c), (1, 2, 3))
+ s = pack('<QL, >Q \tL', 1001, 43, 21, 9999)
+ self.assertEqual(s.unpack('<QL, >QL'), [1001, 43, 21, 9999])
+
+ def testStructTokensMultiplicativeFactors(self):
+ s = pack('<2h', 1, 2)
+ a, b = s.unpack('<2h')
+ self.assertEqual((a, b), (1, 2))
+ s = pack('<100q', *range(100))
+ self.assertEqual(s.len, 100 * 64)
+ self.assertEqual(s[44*64:45*64].uintle, 44)
+ s = pack('@L0B2h', 5, 5, 5)
+ self.assertEqual(s.unpack('@Lhh'), [5, 5, 5])
+
+ def testStructTokensErrors(self):
+ for f in ['>>q', '<>q', 'q>', '2q', 'q', '>-2q', '@a', '>int:8', '>q2']:
+ self.assertRaises(bitstring.CreationError, pack, f, 100)
+
+ def testImmutableBitStreams(self):
+ a = ConstBitStream('0x012345')
+ self.assertEqual(a, '0x012345')
+ b = BitStream('0xf') + a
+ self.assertEqual(b, '0xf012345')
+ try:
+ a.append(b)
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.prepend(b)
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a[0] = '0b1'
+ self.assertTrue(False)
+ except TypeError:
+ pass
+ try:
+ del a[5]
+ self.assertTrue(False)
+ except TypeError:
+ pass
+ try:
+ a.replace('0b1', '0b0')
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.insert('0b11', 4)
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.reverse()
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.reversebytes()
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ self.assertEqual(a, '0x012345')
+ self.assertTrue(isinstance(a, ConstBitStream))
+
+ def testReverseBytes(self):
+ a = BitStream('0x123456')
+ a.byteswap()
+ self.assertEqual(a, '0x563412')
+ b = a + '0b1'
+ b.byteswap()
+ self.assertEqual('0x123456, 0b1', b)
+ a = BitStream('0x54')
+ a.byteswap()
+ self.assertEqual(a, '0x54')
+ a = BitStream()
+ a.byteswap()
+ self.assertFalse(a)
+
+ def testReverseBytes2(self):
+ a = BitStream()
+ a.byteswap()
+ self.assertFalse(a)
+ a = BitStream('0x00112233')
+ a.byteswap(0, 0, 16)
+ self.assertEqual(a, '0x11002233')
+ a.byteswap(0, 4, 28)
+ self.assertEqual(a, '0x12302103')
+ a.byteswap(start=0, end=18)
+ self.assertEqual(a, '0x30122103')
+ self.assertRaises(ValueError, a.byteswap, 0, 10, 2)
+ self.assertRaises(ValueError, a.byteswap, 0, -4, 4)
+ self.assertRaises(ValueError, a.byteswap, 0, 24, 48)
+ a.byteswap(0, 24)
+ self.assertEqual(a, '0x30122103')
+ a.byteswap(0, 11, 11)
+ self.assertEqual(a, '0x30122103')
+
+ def testCapitalsInPack(self):
+ a = pack('A', A='0b1')
+ self.assertEqual(a, '0b1')
+ format = 'bits:4=BL_OFFT, uint:12=width, uint:12=height'
+ d = {'BL_OFFT': '0b1011', 'width': 352, 'height': 288}
+ s = bitstring.pack(format, **d)
+ self.assertEqual(s, '0b1011, uint:12=352, uint:12=288')
+ a = pack('0X0, uint:8, hex', 45, '0XABcD')
+ self.assertEqual(a, '0x0, uint:8=45, 0xabCD')
+
+ def testOtherCapitals(self):
+ a = ConstBitStream('0XABC, 0O0, 0B11')
+ self.assertEqual(a, 'hex=0Xabc, oct=0, bin=0B11')
+
+ def testEfficientOverwrite(self):
+ a = BitStream(1000000000)
+ a.overwrite([1], 123456)
+ self.assertEqual(a[123456], True)
+ a.overwrite('0xff', 1)
+ self.assertEqual(a[0:32:1], '0x7f800000')
+ b = BitStream('0xffff')
+ b.overwrite('0x0000')
+ self.assertEqual(b, '0x0000')
+ self.assertEqual(b.pos, 16)
+ c = BitStream(length=1000)
+ c.overwrite('0xaaaaaaaaaaaa', 81)
+ self.assertEqual(c[81:81 + 6 * 8], '0xaaaaaaaaaaaa')
+ self.assertEqual(len(list(c.findall('0b1'))), 24)
+ s = BitStream(length=1000)
+ s = s[5:]
+ s.overwrite('0xffffff', 500)
+ s.pos = 500
+ self.assertEqual(s.read(4 * 8), '0xffffff00')
+ s.overwrite('0xff', 502)
+ self.assertEqual(s[502:518], '0xffff')
+
+ def testPeekAndReadListErrors(self):
+ a = BitStream('0x123456')
+ self.assertRaises(ValueError, a.read, 'hex:8, hex:8')
+ self.assertRaises(ValueError, a.peek, 'hex:8, hex:8')
+ self.assertRaises(TypeError, a.read, 10, 12)
+ self.assertRaises(TypeError, a.peek, 12, 14)
+ self.assertRaises(TypeError, a.read, 8, 8)
+ self.assertRaises(TypeError, a.peek, 80, 80)
+
+ def testStartswith(self):
+ a = BitStream()
+ self.assertTrue(a.startswith(BitStream()))
+ self.assertFalse(a.startswith('0b0'))
+ a = BitStream('0x12ff')
+ self.assertTrue(a.startswith('0x1'))
+ self.assertTrue(a.startswith('0b0001001'))
+ self.assertTrue(a.startswith('0x12ff'))
+ self.assertFalse(a.startswith('0x12ff, 0b1'))
+ self.assertFalse(a.startswith('0x2'))
+
+ def testStartswithStartEnd(self):
+ s = BitStream('0x123456')
+ self.assertTrue(s.startswith('0x234', 4))
+ self.assertFalse(s.startswith('0x123', end=11))
+ self.assertTrue(s.startswith('0x123', end=12))
+ self.assertTrue(s.startswith('0x34', 8, 16))
+ self.assertFalse(s.startswith('0x34', 7, 16))
+ self.assertFalse(s.startswith('0x34', 9, 16))
+ self.assertFalse(s.startswith('0x34', 8, 15))
+
+ def testEndswith(self):
+ a = BitStream()
+ self.assertTrue(a.endswith(''))
+ self.assertFalse(a.endswith(BitStream('0b1')))
+ a = BitStream('0xf2341')
+ self.assertTrue(a.endswith('0x41'))
+ self.assertTrue(a.endswith('0b001'))
+ self.assertTrue(a.endswith('0xf2341'))
+ self.assertFalse(a.endswith('0x1f2341'))
+ self.assertFalse(a.endswith('0o34'))
+
+ def testEndswithStartEnd(self):
+ s = BitStream('0x123456')
+ self.assertTrue(s.endswith('0x234', end=16))
+ self.assertFalse(s.endswith('0x456', start=13))
+ self.assertTrue(s.endswith('0x456', start=12))
+ self.assertTrue(s.endswith('0x34', 8, 16))
+ self.assertTrue(s.endswith('0x34', 7, 16))
+ self.assertFalse(s.endswith('0x34', 9, 16))
+ self.assertFalse(s.endswith('0x34', 8, 15))
+
+ def testUnhashability(self):
+ s = BitStream('0xf')
+ self.assertRaises(TypeError, set, [s])
+ self.assertRaises(TypeError, hash, [s])
+
+ def testConstBitStreamSetCreation(self):
+ sl = [ConstBitStream(uint=i, length=7) for i in range(15)]
+ s = set(sl)
+ self.assertEqual(len(s), 15)
+ s.add(ConstBitStream('0b0000011'))
+ self.assertEqual(len(s), 15)
+ self.assertRaises(TypeError, s.add, BitStream('0b0000011'))
+
+ def testConstBitStreamFunctions(self):
+ s = ConstBitStream('0xf, 0b1')
+ self.assertEqual(type(s), ConstBitStream)
+ t = copy.copy(s)
+ self.assertEqual(type(t), ConstBitStream)
+ a = s + '0o3'
+ self.assertEqual(type(a), ConstBitStream)
+ b = a[0:4]
+ self.assertEqual(type(b), ConstBitStream)
+ b = a[4:3]
+ self.assertEqual(type(b), ConstBitStream)
+ b = a[5:2:-1]
+ self.assertEqual(type(b), ConstBitStream)
+ b = ~a
+ self.assertEqual(type(b), ConstBitStream)
+ b = a << 2
+ self.assertEqual(type(b), ConstBitStream)
+ b = a >> 2
+ self.assertEqual(type(b), ConstBitStream)
+ b = a * 2
+ self.assertEqual(type(b), ConstBitStream)
+ b = a * 0
+ self.assertEqual(type(b), ConstBitStream)
+ b = a & ~a
+ self.assertEqual(type(b), ConstBitStream)
+ b = a | ~a
+ self.assertEqual(type(b), ConstBitStream)
+ b = a ^ ~a
+ self.assertEqual(type(b), ConstBitStream)
+ b = a._slice(4, 4)
+ self.assertEqual(type(b), ConstBitStream)
+ b = a.read(4)
+ self.assertEqual(type(b), ConstBitStream)
+
+ def testConstBitStreamProperties(self):
+ a = ConstBitStream('0x123123')
+ try:
+ a.hex = '0x234'
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.oct = '0o234'
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.bin = '0b101'
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.ue = 3453
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.se = -123
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.int = 432
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.uint = 4412
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.intle = 123
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.uintle = 4412
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.intbe = 123
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.uintbe = 4412
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.intne = 123
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.uintne = 4412
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+ try:
+ a.bytes = b'hello'
+ self.assertTrue(False)
+ except AttributeError:
+ pass
+
+ def testConstBitStreamMisc(self):
+ a = ConstBitStream('0xf')
+ b = a
+ a += '0xe'
+ self.assertEqual(b, '0xf')
+ self.assertEqual(a, '0xfe')
+ c = BitStream(a)
+ self.assertEqual(a, c)
+ a = ConstBitStream('0b1')
+ a._append(a)
+ self.assertEqual(a, '0b11')
+ self.assertEqual(type(a), ConstBitStream)
+ a._prepend(a)
+ self.assertEqual(a, '0b1111')
+ self.assertEqual(type(a), ConstBitStream)
+
+ def testConstBitStreamHashibility(self):
+ a = ConstBitStream('0x1')
+ b = ConstBitStream('0x2')
+ c = ConstBitStream('0x1')
+ c.pos = 3
+ s = set((a, b, c))
+ self.assertEqual(len(s), 2)
+ self.assertEqual(hash(a), hash(c))
+
+ def testConstBitStreamCopy(self):
+ a = ConstBitStream('0xabc')
+ a.pos = 11
+ b = copy.copy(a)
+ b.pos = 4
+ self.assertEqual(id(a._datastore), id(b._datastore))
+ self.assertEqual(a.pos, 11)
+ self.assertEqual(b.pos, 4)
+
+ def testPython26stuff(self):
+ s = BitStream('0xff')
+ self.assertTrue(isinstance(s.tobytes(), bytes))
+ self.assertTrue(isinstance(s.bytes, bytes))
+
+ def testReadFromBits(self):
+ a = ConstBitStream('0xaabbccdd')
+ b = a.read(8)
+ self.assertEqual(b, '0xaa')
+ self.assertEqual(a[0:8], '0xaa')
+ self.assertEqual(a[-1], True)
+ a.pos = 0
+ self.assertEqual(a.read(4).uint, 10)
+
+
+class Set(unittest.TestCase):
+ def testSet(self):
+ a = BitStream(length=16)
+ a.set(True, 0)
+ self.assertEqual(a, '0b10000000 00000000')
+ a.set(1, 15)
+ self.assertEqual(a, '0b10000000 00000001')
+ b = a[4:12]
+ b.set(True, 1)
+ self.assertEqual(b, '0b01000000')
+ b.set(True, -1)
+ self.assertEqual(b, '0b01000001')
+ b.set(1, -8)
+ self.assertEqual(b, '0b11000001')
+ self.assertRaises(IndexError, b.set, True, -9)
+ self.assertRaises(IndexError, b.set, True, 8)
+
+ def testSetNegativeIndex(self):
+ a = BitStream(10)
+ a.set(1, -1)
+ self.assertEqual(a.bin, '0000000001')
+ a.set(1, [-1, -10])
+ self.assertEqual(a.bin, '1000000001')
+ self.assertRaises(IndexError, a.set, 1, [-11])
+
+ def testFileBasedSetUnset(self):
+ a = BitStream(filename='test.m1v')
+ a.set(True, (0, 1, 2, 3, 4))
+ self.assertEqual(a[0:32], '0xf80001b3')
+ a = BitStream(filename='test.m1v')
+ a.set(False, (28, 29, 30, 31))
+ self.assertTrue(a.startswith('0x000001b0'))
+
+ def testSetList(self):
+ a = BitStream(length=18)
+ a.set(True, range(18))
+ self.assertEqual(a.int, -1)
+ a.set(False, range(18))
+ self.assertEqual(a.int, 0)
+
+ def testUnset(self):
+ a = BitStream(length=16, int=-1)
+ a.set(False, 0)
+ self.assertEqual(~a, '0b10000000 00000000')
+ a.set(0, 15)
+ self.assertEqual(~a, '0b10000000 00000001')
+ b = a[4:12]
+ b.set(False, 1)
+ self.assertEqual(~b, '0b01000000')
+ b.set(False, -1)
+ self.assertEqual(~b, '0b01000001')
+ b.set(False, -8)
+ self.assertEqual(~b, '0b11000001')
+ self.assertRaises(IndexError, b.set, False, -9)
+ self.assertRaises(IndexError, b.set, False, 8)
+
+ def testSetWholeBitStream(self):
+ a = BitStream(14)
+ a.set(1)
+ self.assertTrue(a.all(1))
+ a.set(0)
+ self.assertTrue(a.all(0))
+
+
+class Invert(unittest.TestCase):
+ def testInvertBits(self):
+ a = BitStream('0b111000')
+ a.invert(range(a.len))
+ self.assertEqual(a, '0b000111')
+ a.invert([0, 1, -1])
+ self.assertEqual(a, '0b110110')
+
+ def testInvertWholeBitStream(self):
+ a = BitStream('0b11011')
+ a.invert()
+ self.assertEqual(a, '0b00100')
+
+ def testInvertSingleBit(self):
+ a = BitStream('0b000001')
+ a.invert(0)
+ self.assertEqual(a.bin, '100001')
+ a.invert(-1)
+ self.assertEqual(a.bin, '100000')
+
+ def testInvertErrors(self):
+ a = BitStream(10)
+ self.assertRaises(IndexError, a.invert, 10)
+ self.assertRaises(IndexError, a.invert, -11)
+ self.assertRaises(IndexError, a.invert, [1, 2, 10])
+
+
+ #######################
+
+ def testIor(self):
+ a = BitStream('0b1101001')
+ a |= '0b1110000'
+ self.assertEqual(a, '0b1111001')
+ b = a[2:]
+ c = a[1:-1]
+ b |= c
+ self.assertEqual(c, '0b11100')
+ self.assertEqual(b, '0b11101')
+
+ def testIand(self):
+ a = BitStream('0b0101010101000')
+ a &= '0b1111110000000'
+ self.assertEqual(a, '0b0101010000000')
+ s = BitStream(filename='test.m1v', offset=26, length=24)
+ s &= '0xff00ff'
+ self.assertEqual(s, '0xcc0004')
+
+ def testIxor(self):
+ a = BitStream('0b11001100110011')
+ a ^= '0b11111100000010'
+ self.assertEqual(a, '0b00110000110001')
+
+ def testLogicalInplaceErrors(self):
+ a = BitStream(4)
+ self.assertRaises(ValueError, a.__ior__, '0b111')
+ self.assertRaises(ValueError, a.__iand__, '0b111')
+ self.assertRaises(ValueError, a.__ixor__, '0b111')
+
+
+class AllAndAny(unittest.TestCase):
+ def testAll(self):
+ a = BitStream('0b0111')
+ self.assertTrue(a.all(True, (1, 3)))
+ self.assertFalse(a.all(True, (0, 1, 2)))
+ self.assertTrue(a.all(True, [-1]))
+ self.assertFalse(a.all(True, [0]))
+
+ def testFileBasedAll(self):
+ a = BitStream(filename='test.m1v')
+ self.assertTrue(a.all(True, [31]))
+ a = BitStream(filename='test.m1v')
+ self.assertTrue(a.all(False, (0, 1, 2, 3, 4)))
+
+ def testFileBasedAny(self):
+ a = BitStream(filename='test.m1v')
+ self.assertTrue(a.any(True, (31, 12)))
+ a = BitStream(filename='test.m1v')
+ self.assertTrue(a.any(False, (0, 1, 2, 3, 4)))
+
+ def testAny(self):
+ a = BitStream('0b10011011')
+ self.assertTrue(a.any(True, (1, 2, 3, 5)))
+ self.assertFalse(a.any(True, (1, 2, 5)))
+ self.assertTrue(a.any(True, (-1,)))
+ self.assertFalse(a.any(True, (1,)))
+
+ def testAllFalse(self):
+ a = BitStream('0b0010011101')
+ self.assertTrue(a.all(False, (0, 1, 3, 4)))
+ self.assertFalse(a.all(False, (0, 1, 2, 3, 4)))
+
+ def testAnyFalse(self):
+ a = BitStream('0b01001110110111111111111111111')
+ self.assertTrue(a.any(False, (4, 5, 6, 2)))
+ self.assertFalse(a.any(False, (1, 15, 20)))
+
+ def testAnyEmptyBitstring(self):
+ a = ConstBitStream()
+ self.assertFalse(a.any(True))
+ self.assertFalse(a.any(False))
+
+ def testAllEmptyBitStream(self):
+ a = ConstBitStream()
+ self.assertTrue(a.all(True))
+ self.assertTrue(a.all(False))
+
+ def testAnyWholeBitstring(self):
+ a = ConstBitStream('0xfff')
+ self.assertTrue(a.any(True))
+ self.assertFalse(a.any(False))
+
+ def testAllWholeBitstring(self):
+ a = ConstBitStream('0xfff')
+ self.assertTrue(a.all(True))
+ self.assertFalse(a.all(False))
+
+ def testErrors(self):
+ a = BitStream('0xf')
+ self.assertRaises(IndexError, a.all, True, [5])
+ self.assertRaises(IndexError, a.all, True, [-5])
+ self.assertRaises(IndexError, a.any, True, [5])
+ self.assertRaises(IndexError, a.any, True, [-5])
+
+ ###################
+
+ def testFloatInitialisation(self):
+ for f in (0.0000001, -1.0, 1.0, 0.2, -3.1415265, 1.331e32):
+ a = BitStream(float=f, length=64)
+ a.pos = 6
+ self.assertEqual(a.float, f)
+ a = BitStream('float:64=%s' % str(f))
+ a.pos = 6
+ self.assertEqual(a.float, f)
+ a = BitStream('floatbe:64=%s' % str(f))
+ a.pos = 6
+ self.assertEqual(a.floatbe, f)
+ a = BitStream('floatle:64=%s' % str(f))
+ a.pos = 6
+ self.assertEqual(a.floatle, f)
+ a = BitStream('floatne:64=%s' % str(f))
+ a.pos = 6
+ self.assertEqual(a.floatne, f)
+ b = BitStream(float=f, length=32)
+ b.pos = 6
+ self.assertAlmostEqual(b.float / f, 1.0)
+ b = BitStream('float:32=%s' % str(f))
+ b.pos = 6
+ self.assertAlmostEqual(b.float / f, 1.0)
+ b = BitStream('floatbe:32=%s' % str(f))
+ b.pos = 6
+ self.assertAlmostEqual(b.floatbe / f, 1.0)
+ b = BitStream('floatle:32=%s' % str(f))
+ b.pos = 6
+ self.assertAlmostEqual(b.floatle / f, 1.0)
+ b = BitStream('floatne:32=%s' % str(f))
+ b.pos = 6
+ self.assertAlmostEqual(b.floatne / f, 1.0)
+ a = BitStream('0x12345678')
+ a.pos = 6
+ a.float = 23
+ self.assertEqual(a.float, 23.0)
+
+ def testFloatInitStrings(self):
+ for s in ('5', '+0.0001', '-1e101', '4.', '.2', '-.65', '43.21E+32'):
+ a = BitStream('float:64=%s' % s)
+ self.assertEqual(a.float, float(s))
+
+ def testFloatPacking(self):
+ a = pack('>d', 0.01)
+ self.assertEqual(a.float, 0.01)
+ self.assertEqual(a.floatbe, 0.01)
+ a.byteswap()
+ self.assertEqual(a.floatle, 0.01)
+ b = pack('>f', 1e10)
+ self.assertAlmostEqual(b.float / 1e10, 1.0)
+ c = pack('<f', 10.3)
+ self.assertAlmostEqual(c.floatle / 10.3, 1.0)
+ d = pack('>5d', 10.0, 5.0, 2.5, 1.25, 0.1)
+ self.assertEqual(d.unpack('>5d'), [10.0, 5.0, 2.5, 1.25, 0.1])
+
+ def testFloatReading(self):
+ a = BitStream('floatle:64=12, floatbe:64=-0.01, floatne:64=3e33')
+ x, y, z = a.readlist('floatle:64, floatbe:64, floatne:64')
+ self.assertEqual(x, 12.0)
+ self.assertEqual(y, -0.01)
+ self.assertEqual(z, 3e33)
+ a = BitStream('floatle:32=12, floatbe:32=-0.01, floatne:32=3e33')
+ x, y, z = a.readlist('floatle:32, floatbe:32, floatne:32')
+ self.assertAlmostEqual(x / 12.0, 1.0)
+ self.assertAlmostEqual(y / -0.01, 1.0)
+ self.assertAlmostEqual(z / 3e33, 1.0)
+ a = BitStream('0b11, floatle:64=12, 0xfffff')
+ a.pos = 2
+ self.assertEqual(a.read('floatle:64'), 12.0)
+ b = BitStream(floatle=20, length=32)
+ b.floatle = 10.0
+ b = [0] + b
+ self.assertEqual(b[1:].floatle, 10.0)
+
+ def testNonAlignedFloatReading(self):
+ s = BitStream('0b1, float:32 = 10.0')
+ x, y = s.readlist('1, float:32')
+ self.assertEqual(y, 10.0)
+ s[1:] = 'floatle:32=20.0'
+ x, y = s.unpack('1, floatle:32')
+ self.assertEqual(y, 20.0)
+
+ def testFloatErrors(self):
+ a = BitStream('0x3')
+ self.assertRaises(bitstring.InterpretError, a._getfloat)
+ self.assertRaises(bitstring.CreationError, a._setfloat, -0.2)
+ for l in (8, 10, 12, 16, 30, 128, 200):
+ self.assertRaises(ValueError, BitStream, float=1.0, length=l)
+ self.assertRaises(bitstring.CreationError, BitStream, floatle=0.3, length=0)
+ self.assertRaises(bitstring.CreationError, BitStream, floatle=0.3, length=1)
+ self.assertRaises(bitstring.CreationError, BitStream, float=2)
+ self.assertRaises(bitstring.InterpretError, a.read, 'floatle:2')
+
+ def testReadErrorChangesPos(self):
+ a = BitStream('0x123123')
+ try:
+ a.read('10, 5')
+ except ValueError:
+ pass
+ self.assertEqual(a.pos, 0)
+
+ def testRor(self):
+ a = BitStream('0b11001')
+ a.ror(0)
+ self.assertEqual(a, '0b11001')
+ a.ror(1)
+ self.assertEqual(a, '0b11100')
+ a.ror(5)
+ self.assertEqual(a, '0b11100')
+ a.ror(101)
+ self.assertEqual(a, '0b01110')
+ a = BitStream('0b1')
+ a.ror(1000000)
+ self.assertEqual(a, '0b1')
+
+ def testRorErrors(self):
+ a = BitStream()
+ self.assertRaises(bitstring.Error, a.ror, 0)
+ a += '0b001'
+ self.assertRaises(ValueError, a.ror, -1)
+
+ def testRol(self):
+ a = BitStream('0b11001')
+ a.rol(0)
+ self.assertEqual(a, '0b11001')
+ a.rol(1)
+ self.assertEqual(a, '0b10011')
+ a.rol(5)
+ self.assertEqual(a, '0b10011')
+ a.rol(101)
+ self.assertEqual(a, '0b00111')
+ a = BitStream('0b1')
+ a.rol(1000000)
+ self.assertEqual(a, '0b1')
+
+ def testRolFromFile(self):
+ a = BitStream(filename='test.m1v')
+ l = a.len
+ a.rol(1)
+ self.assertTrue(a.startswith('0x000003'))
+ self.assertEqual(a.len, l)
+ self.assertTrue(a.endswith('0x0036e'))
+
+ def testRorFromFile(self):
+ a = BitStream(filename='test.m1v')
+ l = a.len
+ a.ror(1)
+ self.assertTrue(a.startswith('0x800000'))
+ self.assertEqual(a.len, l)
+ self.assertTrue(a.endswith('0x000db'))
+
+ def testRolErrors(self):
+ a = BitStream()
+ self.assertRaises(bitstring.Error, a.rol, 0)
+ a += '0b001'
+ self.assertRaises(ValueError, a.rol, -1)
+
+ def testBytesToken(self):
+ a = BitStream('0x010203')
+ b = a.read('bytes:1')
+ self.assertTrue(isinstance(b, bytes))
+ self.assertEqual(b, b'\x01')
+ x, y, z = a.unpack('4, bytes:2, uint')
+ self.assertEqual(x, 0)
+ self.assertEqual(y, b'\x10\x20')
+ self.assertEqual(z, 3)
+ s = pack('bytes:4', b'abcd')
+ self.assertEqual(s.bytes, b'abcd')
+
+ def testBytesTokenMoreThoroughly(self):
+ a = BitStream('0x0123456789abcdef')
+ a.pos += 16
+ self.assertEqual(a.read('bytes:1'), b'\x45')
+ self.assertEqual(a.read('bytes:3'), b'\x67\x89\xab')
+ x, y, z = a.unpack('bits:28, bytes, bits:12')
+ self.assertEqual(y, b'\x78\x9a\xbc')
+
+ def testDedicatedReadFunctions(self):
+ a = BitStream('0b11, uint:43=98798798172, 0b11111')
+ x = a._readuint(43, 2)
+ self.assertEqual(x, 98798798172)
+ self.assertEqual(a.pos, 0)
+ x = a._readint(43, 2)
+ self.assertEqual(x, 98798798172)
+ self.assertEqual(a.pos, 0)
+
+ a = BitStream('0b11, uintbe:48=98798798172, 0b11111')
+ x = a._readuintbe(48, 2)
+ self.assertEqual(x, 98798798172)
+ self.assertEqual(a.pos, 0)
+ x = a._readintbe(48, 2)
+ self.assertEqual(x, 98798798172)
+ self.assertEqual(a.pos, 0)
+
+ a = BitStream('0b111, uintle:40=123516, 0b111')
+ self.assertEqual(a._readuintle(40, 3), 123516)
+ b = BitStream('0xff, uintle:800=999, 0xffff')
+ self.assertEqual(b._readuintle(800, 8), 999)
+
+ a = BitStream('0b111, intle:48=999999999, 0b111111111111')
+ self.assertEqual(a._readintle(48, 3), 999999999)
+ b = BitStream('0xff, intle:200=918019283740918263512351235, 0xfffffff')
+ self.assertEqual(b._readintle(200, 8), 918019283740918263512351235)
+
+ a = BitStream('0b111, floatbe:64=-5.32, 0xffffffff')
+ self.assertEqual(a._readfloat(64, 3), -5.32)
+
+ a = BitStream('0b111, floatle:64=9.9998, 0b111')
+ self.assertEqual(a._readfloatle(64, 3), 9.9998)
+
+ def testAutoInitWithInt(self):
+ a = BitStream(0)
+ self.assertFalse(a)
+ a = BitStream(1)
+ self.assertEqual(a, '0b0')
+ a = BitStream(1007)
+ self.assertEqual(a, BitStream(length=1007))
+ self.assertRaises(bitstring.CreationError, BitStream, -1)
+
+ a = 6 + ConstBitStream('0b1') + 3
+ self.assertEqual(a, '0b0000001000')
+ a += 1
+ self.assertEqual(a, '0b00000010000')
+ self.assertEqual(ConstBitStream(13), 13)
+
+ def testReadingProblems(self):
+ a = BitStream('0x000001')
+ b = a.read('uint:24')
+ self.assertEqual(b, 1)
+ a.pos = 0
+ self.assertRaises(bitstring.ReadError, a.read, 'bytes:4')
+
+ def testAddVersesInPlaceAdd(self):
+ a1 = ConstBitStream('0xabc')
+ b1 = a1
+ a1 += '0xdef'
+ self.assertEqual(a1, '0xabcdef')
+ self.assertEqual(b1, '0xabc')
+
+ a2 = BitStream('0xabc')
+ b2 = a2
+ c2 = a2 + '0x0'
+ a2 += '0xdef'
+ self.assertEqual(a2, '0xabcdef')
+ self.assertEqual(b2, '0xabcdef')
+ self.assertEqual(c2, '0xabc0')
+
+ def testAndVersesInPlaceAnd(self):
+ a1 = ConstBitStream('0xabc')
+ b1 = a1
+ a1 &= '0xf0f'
+ self.assertEqual(a1, '0xa0c')
+ self.assertEqual(b1, '0xabc')
+
+ a2 = BitStream('0xabc')
+ b2 = a2
+ c2 = a2 & '0x00f'
+ a2 &= '0xf0f'
+ self.assertEqual(a2, '0xa0c')
+ self.assertEqual(b2, '0xa0c')
+ self.assertEqual(c2, '0x00c')
+
+ def testOrVersesInPlaceOr(self):
+ a1 = ConstBitStream('0xabc')
+ b1 = a1
+ a1 |= '0xf0f'
+ self.assertEqual(a1, '0xfbf')
+ self.assertEqual(b1, '0xabc')
+
+ a2 = BitStream('0xabc')
+ b2 = a2
+ c2 = a2 | '0x00f'
+ a2 |= '0xf0f'
+ self.assertEqual(a2, '0xfbf')
+ self.assertEqual(b2, '0xfbf')
+ self.assertEqual(c2, '0xabf')
+
+ def testXorVersesInPlaceXor(self):
+ a1 = ConstBitStream('0xabc')
+ b1 = a1
+ a1 ^= '0xf0f'
+ self.assertEqual(a1, '0x5b3')
+ self.assertEqual(b1, '0xabc')
+
+ a2 = BitStream('0xabc')
+ b2 = a2
+ c2 = a2 ^ '0x00f'
+ a2 ^= '0xf0f'
+ self.assertEqual(a2, '0x5b3')
+ self.assertEqual(b2, '0x5b3')
+ self.assertEqual(c2, '0xab3')
+
+ def testMulVersesInPlaceMul(self):
+ a1 = ConstBitStream('0xabc')
+ b1 = a1
+ a1 *= 3
+ self.assertEqual(a1, '0xabcabcabc')
+ self.assertEqual(b1, '0xabc')
+
+ a2 = BitStream('0xabc')
+ b2 = a2
+ c2 = a2 * 2
+ a2 *= 3
+ self.assertEqual(a2, '0xabcabcabc')
+ self.assertEqual(b2, '0xabcabcabc')
+ self.assertEqual(c2, '0xabcabc')
+
+ def testLshiftVersesInPlaceLshift(self):
+ a1 = ConstBitStream('0xabc')
+ b1 = a1
+ a1 <<= 4
+ self.assertEqual(a1, '0xbc0')
+ self.assertEqual(b1, '0xabc')
+
+ a2 = BitStream('0xabc')
+ b2 = a2
+ c2 = a2 << 8
+ a2 <<= 4
+ self.assertEqual(a2, '0xbc0')
+ self.assertEqual(b2, '0xbc0')
+ self.assertEqual(c2, '0xc00')
+
+ def testRshiftVersesInPlaceRshift(self):
+ a1 = ConstBitStream('0xabc')
+ b1 = a1
+ a1 >>= 4
+ self.assertEqual(a1, '0x0ab')
+ self.assertEqual(b1, '0xabc')
+
+ a2 = BitStream('0xabc')
+ b2 = a2
+ c2 = a2 >> 8
+ a2 >>= 4
+ self.assertEqual(a2, '0x0ab')
+ self.assertEqual(b2, '0x0ab')
+ self.assertEqual(c2, '0x00a')
+
+ def testAutoFromBool(self):
+ a = ConstBitStream() + True + False + True
+ self.assertEqual(a, '0b00')
+ # self.assertEqual(a, '0b101')
+ # b = ConstBitStream(False)
+ # self.assertEqual(b, '0b0')
+ # c = ConstBitStream(True)
+ # self.assertEqual(c, '0b1')
+ # self.assertEqual(b, False)
+ # self.assertEqual(c, True)
+ # self.assertEqual(b & True, False)
+
+
+class Bugs(unittest.TestCase):
+ def testBugInReplace(self):
+ s = BitStream('0x00112233')
+ l = list(s.split('0x22', start=8, bytealigned=True))
+ self.assertEqual(l, ['0x11', '0x2233'])
+ s = BitStream('0x00112233')
+ s.replace('0x22', '0xffff', start=8, bytealigned=True)
+ self.assertEqual(s, '0x0011ffff33')
+ s = BitStream('0x0123412341234')
+ s.replace('0x23', '0xf', start=9, bytealigned=True)
+ self.assertEqual(s, '0x012341f41f4')
+
+ def testTruncateStartBug(self):
+ a = BitStream('0b000000111')[2:]
+ a._truncatestart(6)
+ self.assertEqual(a, '0b1')
+
+ def testNullBits(self):
+ s = ConstBitStream(bin='')
+ t = ConstBitStream(oct='')
+ u = ConstBitStream(hex='')
+ v = ConstBitStream(bytes=b'')
+ self.assertFalse(s)
+ self.assertFalse(t)
+ self.assertFalse(u)
+ self.assertFalse(v)
+
+ def testMultiplicativeFactorsCreation(self):
+ s = BitStream('1*0b1')
+ self.assertEqual(s, '0b1')
+ s = BitStream('4*0xc')
+ self.assertEqual(s, '0xcccc')
+ s = BitStream('0b1, 0*0b0')
+ self.assertEqual(s, '0b1')
+ s = BitStream('0b1, 3*uint:8=34, 2*0o755')
+ self.assertEqual(s, '0b1, uint:8=34, uint:8=34, uint:8=34, 0o755755')
+ s = BitStream('0*0b1001010')
+ self.assertFalse(s)
+
+ def testMultiplicativeFactorsReading(self):
+ s = BitStream('0xc') * 5
+ a, b, c, d, e = s.readlist('5*4')
+ self.assertTrue(a == b == c == d == e == 12)
+ s = ConstBitStream('2*0b101, 4*uint:7=3')
+ a, b, c, d, e = s.readlist('2*bin:3, 3*uint:7')
+ self.assertTrue(a == b == '101')
+ self.assertTrue(c == d == e == 3)
+
+ def testMultiplicativeFactorsPacking(self):
+ s = pack('3*bin', '1', '001', '101')
+ self.assertEqual(s, '0b1001101')
+ s = pack('hex, 2*se=-56, 3*uint:37', '34', 1, 2, 3)
+ a, b, c, d, e, f = s.unpack('hex:8, 2*se, 3*uint:37')
+ self.assertEqual(a, '34')
+ self.assertEqual(b, -56)
+ self.assertEqual(c, -56)
+ self.assertEqual((d, e, f), (1, 2, 3))
+ # This isn't allowed yet. See comment in tokenparser.
+ #s = pack('fluffy*uint:8', *range(3), fluffy=3)
+ #a, b, c = s.readlist('2*uint:8, 1*uint:8, 0*uint:8')
+ #self.assertEqual((a, b, c), (0, 1, 2))
+
+ def testMultiplicativeFactorsUnpacking(self):
+ s = ConstBitStream('0b10111')
+ a, b, c, d = s.unpack('3*bool, bin')
+ self.assertEqual((a, b, c), (True, False, True))
+ self.assertEqual(d, '11')
+
+
+ def testPackingDefaultIntWithKeyword(self):
+ s = pack('12', 100)
+ self.assertEqual(s.unpack('12')[0], 100)
+ s = pack('oh_no_not_the_eyes=33', oh_no_not_the_eyes=17)
+ self.assertEqual(s.uint, 33)
+ self.assertEqual(s.len, 17)
+
+ def testInitFromIterable(self):
+ self.assertTrue(isinstance(range(10), collections.Iterable))
+ s = ConstBitStream(range(12))
+ self.assertEqual(s, '0x7ff')
+
+ def testFunctionNegativeIndices(self):
+ # insert
+ s = BitStream('0b0111')
+ s.insert('0b0', -1)
+ self.assertEqual(s, '0b01101')
+ self.assertRaises(ValueError, s.insert, '0b0', -1000)
+
+ # reverse
+ s.reverse(-2)
+ self.assertEqual(s, '0b01110')
+ t = BitStream('0x778899abcdef')
+ t.reverse(-12, -4)
+ self.assertEqual(t, '0x778899abc7bf')
+
+ # reversebytes
+ t.byteswap(0, -40, -16)
+ self.assertEqual(t, '0x77ab9988c7bf')
+
+ # overwrite
+ t.overwrite('0x666', -20)
+ self.assertEqual(t, '0x77ab998666bf')
+
+ # find
+ found = t.find('0x998', bytealigned=True, start=-31)
+ self.assertFalse(found)
+ found = t.find('0x998', bytealigned=True, start=-32)
+ self.assertTrue(found)
+ self.assertEqual(t.pos, 16)
+ t.pos = 0
+ found = t.find('0x988', bytealigned=True, end=-21)
+ self.assertFalse(found)
+ found = t.find('0x998', bytealigned=True, end=-20)
+ self.assertTrue(found)
+ self.assertEqual(t.pos, 16)
+
+ #findall
+ s = BitStream('0x1234151f')
+ l = list(s.findall('0x1', bytealigned=True, start=-15))
+ self.assertEqual(l, [24])
+ l = list(s.findall('0x1', bytealigned=True, start=-16))
+ self.assertEqual(l, [16, 24])
+ l = list(s.findall('0x1', bytealigned=True, end=-5))
+ self.assertEqual(l, [0, 16])
+ l = list(s.findall('0x1', bytealigned=True, end=-4))
+ self.assertEqual(l, [0, 16, 24])
+
+ # rfind
+ found = s.rfind('0x1f', end=-1)
+ self.assertFalse(found)
+ found = s.rfind('0x12', start=-31)
+ self.assertFalse(found)
+
+ # cut
+ s = BitStream('0x12345')
+ l = list(s.cut(4, start=-12, end=-4))
+ self.assertEqual(l, ['0x3', '0x4'])
+
+ # split
+ s = BitStream('0xfe0012fe1200fe')
+ l = list(s.split('0xfe', bytealigned=True, end=-1))
+ self.assertEqual(l, ['', '0xfe0012', '0xfe1200f, 0b111'])
+ l = list(s.split('0xfe', bytealigned=True, start=-8))
+ self.assertEqual(l, ['', '0xfe'])
+
+ # startswith
+ self.assertTrue(s.startswith('0x00f', start=-16))
+ self.assertTrue(s.startswith('0xfe00', end=-40))
+ self.assertFalse(s.startswith('0xfe00', end=-41))
+
+ # endswith
+ self.assertTrue(s.endswith('0x00fe', start=-16))
+ self.assertFalse(s.endswith('0x00fe', start=-15))
+ self.assertFalse(s.endswith('0x00fe', end=-1))
+ self.assertTrue(s.endswith('0x00f', end=-4))
+
+ # replace
+ s.replace('0xfe', '', end=-1)
+ self.assertEqual(s, '0x00121200fe')
+ s.replace('0x00', '', start=-24)
+ self.assertEqual(s, '0x001212fe')
+
+ def testRotateStartAndEnd(self):
+ a = BitStream('0b110100001')
+ a.rol(1, 3, 6)
+ self.assertEqual(a, '0b110001001')
+ a.ror(1, start=-4)
+ self.assertEqual(a, '0b110001100')
+ a.rol(202, end=-5)
+ self.assertEqual(a, '0b001101100')
+ a.ror(3, end=4)
+ self.assertEqual(a, '0b011001100')
+ self.assertRaises(ValueError, a.rol, 5, start=-4, end=-6)
+
+ def testByteSwapInt(self):
+ s = pack('5*uintle:16', *range(10, 15))
+ self.assertEqual(list(range(10, 15)), s.unpack('5*uintle:16'))
+ swaps = s.byteswap(2)
+ self.assertEqual(list(range(10, 15)), s.unpack('5*uintbe:16'))
+ self.assertEqual(swaps, 5)
+ s = BitStream('0xf234567f')
+ swaps = s.byteswap(1, start=4)
+ self.assertEqual(swaps, 3)
+ self.assertEqual(s, '0xf234567f')
+ s.byteswap(2, start=4)
+ self.assertEqual(s, '0xf452367f')
+ s.byteswap(2, start=4, end=-4)
+ self.assertEqual(s, '0xf234567f')
+ s.byteswap(3)
+ self.assertEqual(s, '0x5634f27f')
+ s.byteswap(2, repeat=False)
+ self.assertEqual(s, '0x3456f27f')
+ swaps = s.byteswap(5)
+ self.assertEqual(swaps, 0)
+ swaps = s.byteswap(4, repeat=False)
+ self.assertEqual(swaps, 1)
+ self.assertEqual(s, '0x7ff25634')
+
+ def testByteSwapPackCode(self):
+ s = BitStream('0x0011223344556677')
+ swaps = s.byteswap('b')
+ self.assertEqual(s, '0x0011223344556677')
+ self.assertEqual(swaps, 8)
+ swaps = s.byteswap('>3h', repeat=False)
+ self.assertEqual(s, '0x1100332255446677')
+ self.assertEqual(swaps, 1)
+
+ def testByteSwapIterable(self):
+ s = BitStream('0x0011223344556677')
+ swaps = s.byteswap(range(1, 4), repeat=False)
+ self.assertEqual(swaps, 1)
+ self.assertEqual(s, '0x0022115544336677')
+ swaps = s.byteswap([2], start=8)
+ self.assertEqual(s, '0x0011224455663377')
+ self.assertEqual(3, swaps)
+ swaps = s.byteswap([2, 3], start=4)
+ self.assertEqual(swaps, 1)
+ self.assertEqual(s, '0x0120156452463377')
+
+ def testByteSwapErrors(self):
+ s = BitStream('0x0011223344556677')
+ self.assertRaises(ValueError, s.byteswap, 'z')
+ self.assertRaises(ValueError, s.byteswap, -1)
+ self.assertRaises(ValueError, s.byteswap, [-1])
+ self.assertRaises(ValueError, s.byteswap, [1, 'e'])
+ self.assertRaises(ValueError, s.byteswap, '!h')
+ self.assertRaises(ValueError, s.byteswap, 2, start=-1000)
+ self.assertRaises(TypeError, s.byteswap, 5.4)
+
+ def testByteSwapFromFile(self):
+ s = BitStream(filename='smalltestfile')
+ swaps = s.byteswap('2bh')
+ self.assertEqual(s, '0x0123674589abefcd')
+ self.assertEqual(swaps, 2)
+
+ def testBracketExpander(self):
+ be = bitstring.expand_brackets
+ self.assertEqual(be('hello'), 'hello')
+ self.assertEqual(be('(hello)'), 'hello')
+ self.assertEqual(be('1*(hello)'), 'hello')
+ self.assertEqual(be('2*(hello)'), 'hello,hello')
+ self.assertEqual(be('1*(a, b)'), 'a,b')
+ self.assertEqual(be('2*(a, b)'), 'a,b,a,b')
+ self.assertEqual(be('2*(a), 3*(b)'), 'a,a,b,b,b')
+ self.assertEqual(be('2*(a, b, 3*(c, d), e)'), 'a,b,c,d,c,d,c,d,e,a,b,c,d,c,d,c,d,e')
+
+ def testBracketTokens(self):
+ s = BitStream('3*(0x0, 0b1)')
+ self.assertEqual(s, '0x0, 0b1, 0x0, 0b1, 0x0, 0b1')
+ s = pack('2*(uint:12, 3*(7, 6))', *range(3, 17))
+ a = s.unpack('12, 7, 6, 7, 6, 7, 6, 12, 7, 6, 7, 6, 7, 6')
+ self.assertEqual(a, list(range(3, 17)))
+ b = s.unpack('2*(12,3*(7,6))')
+ self.assertEqual(a, b)
+
+ def testPackCodeDicts(self):
+ self.assertEqual(sorted(bitstring.REPLACEMENTS_BE.keys()),
+ sorted(bitstring.REPLACEMENTS_LE.keys()))
+ self.assertEqual(sorted(bitstring.REPLACEMENTS_BE.keys()),
+ sorted(bitstring.PACK_CODE_SIZE.keys()))
+ for key in bitstring.PACK_CODE_SIZE:
+ be = pack(bitstring.REPLACEMENTS_BE[key], 0)
+ le = pack(bitstring.REPLACEMENTS_LE[key], 0)
+ self.assertEqual(be.len, bitstring.PACK_CODE_SIZE[key] * 8)
+ self.assertEqual(le.len, be.len)
+
+ # These tests don't compile for Python 3, so they're commented out to save me stress.
+ #def testUnicode(self):
+ #a = ConstBitStream(u'uint:12=34')
+ #self.assertEqual(a.uint, 34)
+ #a += u'0xfe'
+ #self.assertEqual(a[12:], '0xfe')
+ #a = BitStream('0x1122')
+ #c = a.byteswap(u'h')
+ #self.assertEqual(c, 1)
+ #self.assertEqual(a, u'0x2211')
+
+ #def testLongInt(self):
+ #a = BitStream(4L)
+ #self.assertEqual(a, '0b0000')
+ #a[1:3] = -1L
+ #self.assertEqual(a, '0b0110')
+ #a[0] = 1L
+ #self.assertEqual(a, '0b1110')
+ #a *= 4L
+ #self.assertEqual(a, '0xeeee')
+ #c = a.byteswap(2L)
+ #self.assertEqual(c, 1)
+ #a = BitStream('0x11223344')
+ #a.byteswap([1, 2L])
+ #self.assertEqual(a, '0x11332244')
+ #b = a*2L
+ #self.assertEqual(b, '0x1133224411332244')
+ #s = pack('uint:12', 46L)
+ #self.assertEqual(s.uint, 46)
+
+
+class UnpackWithDict(unittest.TestCase):
+ def testLengthKeywords(self):
+ a = ConstBitStream('2*13=100, 0b111')
+ x, y, z = a.unpack('n, uint:m, bin:q', n=13, m=13, q=3)
+ self.assertEqual(x, 100)
+ self.assertEqual(y, 100)
+ self.assertEqual(z, '111')
+
+ def testLengthKeywordsWithStretch(self):
+ a = ConstBitStream('0xff, 0b000, 0xf')
+ x, y, z = a.unpack('hex:a, bin, hex:b', a=8, b=4)
+ self.assertEqual(y, '000')
+
+ def testUnusedKeyword(self):
+ a = ConstBitStream('0b110')
+ x, = a.unpack('bin:3', notused=33)
+ self.assertEqual(x, '110')
+
+ def testLengthKeywordErrors(self):
+ a = pack('uint:p=33', p=12)
+ self.assertRaises(ValueError, a.unpack, 'uint:p')
+ self.assertRaises(ValueError, a.unpack, 'uint:p', p='a_string')
+
+
+class ReadWithDict(unittest.TestCase):
+ def testLengthKeywords(self):
+ s = BitStream('0x0102')
+ x, y = s.readlist('a, hex:b', a=8, b=4)
+ self.assertEqual((x, y), (1, '0'))
+ self.assertEqual(s.pos, 12)
+
+ def testBytesKeywordProblem(self):
+ s = BitStream('0x01')
+ x, = s.unpack('bytes:a', a=1)
+ self.assertEqual(x, b'\x01')
+
+ s = BitStream('0x000ff00a')
+ x, y, z = s.unpack('12, bytes:x, bits', x=2)
+ self.assertEqual((x, y, z), (0, b'\xff\x00', '0xa'))
+
+
+
+class PeekWithDict(unittest.TestCase):
+ def testLengthKeywords(self):
+ s = BitStream('0x0102')
+ x, y = s.peeklist('a, hex:b', a=8, b=4)
+ self.assertEqual((x, y), (1, '0'))
+ self.assertEqual(s.pos, 0)
+
+##class Miscellany(unittest.TestCase):
+##
+## def testNumpyInt(self):
+## try:
+## import numpy
+## a = ConstBitStream(uint=numpy.uint8(5), length=3)
+## self.assertEqual(a.uint, 5)
+## except ImportError:
+## # Not to worry
+## pass
+
+class BoolToken(unittest.TestCase):
+ def testInterpretation(self):
+ a = ConstBitStream('0b1')
+ self.assertEqual(a.bool, True)
+ self.assertEqual(a.read('bool'), True)
+ self.assertEqual(a.unpack('bool')[0], True)
+ b = ConstBitStream('0b0')
+ self.assertEqual(b.bool, False)
+ self.assertEqual(b.peek('bool'), False)
+ self.assertEqual(b.unpack('bool')[0], False)
+
+ def testPack(self):
+ a = pack('bool=True')
+ b = pack('bool=False')
+ self.assertEqual(a.bool, True)
+ self.assertEqual(b.bool, False)
+ c = pack('4*bool', False, True, 'False', 'True')
+ self.assertEqual(c, '0b0101')
+
+ def testAssignment(self):
+ a = BitStream()
+ a.bool = True
+ self.assertEqual(a.bool, True)
+ a.hex = 'ee'
+ a.bool = False
+ self.assertEqual(a.bool, False)
+ a.bool = 'False'
+ self.assertEqual(a.bool, False)
+ a.bool = 'True'
+ self.assertEqual(a.bool, True)
+ a.bool = 0
+ self.assertEqual(a.bool, False)
+ a.bool = 1
+ self.assertEqual(a.bool, True)
+
+ def testErrors(self):
+ self.assertRaises(bitstring.CreationError, pack, 'bool', 'hello')
+ self.assertRaises(bitstring.CreationError, pack, 'bool=true')
+ self.assertRaises(bitstring.CreationError, pack, 'True')
+ self.assertRaises(bitstring.CreationError, pack, 'bool', 2)
+ a = BitStream('0b11')
+ self.assertRaises(bitstring.InterpretError, a._getbool)
+ b = BitStream()
+ self.assertRaises(bitstring.InterpretError, a._getbool)
+ self.assertRaises(bitstring.CreationError, a._setbool, 'false')
+
+ def testLengthWithBoolRead(self):
+ a = ConstBitStream('0xf')
+ self.assertRaises(ValueError, a.read, 'bool:0')
+ self.assertRaises(ValueError, a.read, 'bool:1')
+ self.assertRaises(ValueError, a.read, 'bool:2')
+
+
+class ReadWithIntegers(unittest.TestCase):
+ def testReadInt(self):
+ a = ConstBitStream('0xffeedd')
+ b = a.read(8)
+ self.assertEqual(b.hex, 'ff')
+ self.assertEqual(a.pos, 8)
+ b = a.peek(8)
+ self.assertEqual(b.hex, 'ee')
+ self.assertEqual(a.pos, 8)
+ b = a.peek(1)
+ self.assertEqual(b, '0b1')
+ b = a.read(1)
+ self.assertEqual(b, '0b1')
+
+ def testReadIntList(self):
+ a = ConstBitStream('0xab, 0b110')
+ b, c = a.readlist([8, 3])
+ self.assertEqual(b.hex, 'ab')
+ self.assertEqual(c.bin, '110')
+
+
+class FileReadingStrategy(unittest.TestCase):
+ def testBitStreamIsAlwaysRead(self):
+ a = BitStream(filename='smalltestfile')
+ self.assertTrue(isinstance(a._datastore, bitstring.ByteStore))
+ f = open('smalltestfile', 'rb')
+ b = BitStream(f)
+ self.assertTrue(isinstance(b._datastore, bitstring.ByteStore))
+
+ def testBitsIsNeverRead(self):
+ a = ConstBitStream(filename='smalltestfile')
+ self.assertTrue(isinstance(a._datastore._rawarray, bitstring.MmapByteArray))
+ f = open('smalltestfile', 'rb')
+ b = ConstBitStream(f)
+ self.assertTrue(isinstance(b._datastore._rawarray, bitstring.MmapByteArray))
+
+
+class Count(unittest.TestCase):
+ def testCount(self):
+ a = ConstBitStream('0xf0f')
+ self.assertEqual(a.count(True), 8)
+ self.assertEqual(a.count(False), 4)
+
+ b = BitStream()
+ self.assertEqual(b.count(True), 0)
+ self.assertEqual(b.count(False), 0)
+
+ def testCountWithOffsetData(self):
+ a = ConstBitStream('0xff0120ff')
+ b = a[1:-1]
+ self.assertEqual(b.count(1), 16)
+ self.assertEqual(b.count(0), 14)
+
+
+class ZeroBitReads(unittest.TestCase):
+ def testInteger(self):
+ a = ConstBitStream('0x123456')
+ self.assertRaises(bitstring.InterpretError, a.read, 'uint:0')
+ self.assertRaises(bitstring.InterpretError, a.read, 'float:0')
+
+#class EfficientBitsCopies(unittest.TestCase):
+#
+# def testBitsCopy(self):
+# a = ConstBitStream('0xff')
+# b = ConstBitStream(a)
+# c = a[:]
+# d = copy.copy(a)
+# self.assertTrue(a._datastore is b._datastore)
+# self.assertTrue(a._datastore is c._datastore)
+# self.assertTrue(a._datastore is d._datastore)
+
+class InitialiseFromBytes(unittest.TestCase):
+ def testBytesBehaviour(self):
+ a = ConstBitStream(b'uint:5=2')
+ b = ConstBitStream(b'')
+ c = ConstBitStream(bytes=b'uint:5=2')
+ if b'' == '':
+ # Python 2
+ self.assertEqual(a, 'uint:5=2')
+ self.assertFalse(b)
+ self.assertEqual(c.bytes, b'uint:5=2')
+ else:
+ self.assertEqual(a.bytes, b'uint:5=2')
+ self.assertFalse(b)
+ self.assertEqual(c, b'uint:5=2')
+
+ def testBytearrayBehaviour(self):
+ a = ConstBitStream(bytearray(b'uint:5=2'))
+ b = ConstBitStream(bytearray(4))
+ c = ConstBitStream(bytes=bytearray(b'uint:5=2'))
+ self.assertEqual(a.bytes, b'uint:5=2')
+ self.assertEqual(b, '0x00000000')
+ self.assertEqual(c.bytes, b'uint:5=2')
+
+
+class CoverageCompletionTests(unittest.TestCase):
+ def testUeReadError(self):
+ s = ConstBitStream('0b000000001')
+ self.assertRaises(bitstring.ReadError, s.read, 'ue')
+
+ def testOverwriteWithSelf(self):
+ s = BitStream('0b1101')
+ s.overwrite(s)
+ self.assertEqual(s, '0b1101')
+
+
+class Subclassing(unittest.TestCase):
+
+ def testIsInstance(self):
+ class SubBits(BitStream): pass
+ a = SubBits()
+ self.assertTrue(isinstance(a, SubBits))
+
+ def testClassType(self):
+ class SubBits(BitStream): pass
+ self.assertEqual(SubBits().__class__, SubBits)
+
+
+class BytesProblems(unittest.TestCase):
+
+ def testOffsetButNoLength(self):
+ b = BitStream(bytes=b'\x00\xaa', offset=8)
+ self.assertEqual(b.hex, 'aa')
+ b = BitStream(bytes=b'\x00\xaa', offset=4)
+ self.assertEqual(b.hex, '0aa')
+
+ def testInvert(self):
+ b = BitStream(bytes=b'\x00\xaa', offset=8, length=8)
+ self.assertEqual(b.hex, 'aa')
+ b.invert()
+ self.assertEqual(b.hex, '55')
+
+ def testPrepend(self):
+ b = BitStream(bytes=b'\xaa\xbb', offset=8, length=4)
+ self.assertEqual(b.hex, 'b')
+ b.prepend('0xe')
+ self.assertEqual(b.hex, 'eb')
+ b = BitStream(bytes=b'\x00\xaa', offset=8, length=8)
+ b.prepend('0xee')
+ self.assertEqual(b.hex, 'eeaa')
+
+ def testByteSwap(self):
+ b = BitStream(bytes=b'\x01\x02\x03\x04', offset=8)
+ b.byteswap()
+ self.assertEqual(b, '0x040302')
+
+ def testBinProperty(self):
+ b = BitStream(bytes=b'\x00\xaa', offset=8, length=4)
+ self.assertEqual(b.bin, '1010') \ No newline at end of file
diff --git a/python/bitstring/test/test_bitstring.py b/python/bitstring/test/test_bitstring.py
new file mode 100644
index 000000000..1b52b7b80
--- /dev/null
+++ b/python/bitstring/test/test_bitstring.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+"""
+Module-level unit tests.
+"""
+
+import unittest
+import sys
+sys.path.insert(0, '..')
+import bitstring
+import copy
+
+
+class ModuleData(unittest.TestCase):
+ def testVersion(self):
+ self.assertEqual(bitstring.__version__, '3.1.3')
+
+ def testAll(self):
+ exported = ['ConstBitArray', 'ConstBitStream', 'BitStream', 'BitArray',
+ 'Bits', 'BitString', 'pack', 'Error', 'ReadError',
+ 'InterpretError', 'ByteAlignError', 'CreationError', 'bytealigned']
+ self.assertEqual(set(bitstring.__all__), set(exported))
+
+ def testReverseDict(self):
+ d = bitstring.BYTE_REVERSAL_DICT
+ for i in range(256):
+ a = bitstring.Bits(uint=i, length=8)
+ b = d[i]
+ self.assertEqual(a.bin[::-1], bitstring.Bits(bytes=b).bin)
+
+ def testAliases(self):
+ self.assertTrue(bitstring.Bits is bitstring.ConstBitArray)
+ self.assertTrue(bitstring.BitStream is bitstring.BitString)
+
+
+class MemoryUsage(unittest.TestCase):
+ def testBaselineMemory(self):
+ try:
+ import pympler.asizeof.asizeof as size
+ except ImportError:
+ return
+ # These values might be platform dependent, so don't fret too much.
+ self.assertEqual(size(bitstring.ConstBitStream([0])), 64)
+ self.assertEqual(size(bitstring.Bits([0])), 64)
+ self.assertEqual(size(bitstring.BitStream([0])), 64)
+ self.assertEqual(size(bitstring.BitArray([0])), 64)
+ from bitstring.bitstore import ByteStore
+ self.assertEqual(size(ByteStore(bytearray())), 100)
+
+
+class Copy(unittest.TestCase):
+ def testConstBitArrayCopy(self):
+ import copy
+ cba = bitstring.Bits(100)
+ cba_copy = copy.copy(cba)
+ self.assertTrue(cba is cba_copy)
+
+ def testBitArrayCopy(self):
+ ba = bitstring.BitArray(100)
+ ba_copy = copy.copy(ba)
+ self.assertFalse(ba is ba_copy)
+ self.assertFalse(ba._datastore is ba_copy._datastore)
+ self.assertTrue(ba == ba_copy)
+
+ def testConstBitStreamCopy(self):
+ cbs = bitstring.ConstBitStream(100)
+ cbs.pos = 50
+ cbs_copy = copy.copy(cbs)
+ self.assertEqual(cbs_copy.pos, 0)
+ self.assertTrue(cbs._datastore is cbs_copy._datastore)
+ self.assertTrue(cbs == cbs_copy)
+
+ def testBitStreamCopy(self):
+ bs = bitstring.BitStream(100)
+ bs.pos = 50
+ bs_copy = copy.copy(bs)
+ self.assertEqual(bs_copy.pos, 0)
+ self.assertFalse(bs._datastore is bs_copy._datastore)
+ self.assertTrue(bs == bs_copy)
+
+
+class Interning(unittest.TestCase):
+ def testBits(self):
+ a = bitstring.Bits('0xf')
+ b = bitstring.Bits('0xf')
+ self.assertTrue(a is b)
+ c = bitstring.Bits('0b1111')
+ self.assertFalse(a is c)
+
+ def testCBS(self):
+ a = bitstring.ConstBitStream('0b11000')
+ b = bitstring.ConstBitStream('0b11000')
+ self.assertFalse(a is b)
+ # self.assertTrue(a._datastore is b._datastore)
+
+
+
+ \ No newline at end of file
diff --git a/python/bitstring/test/test_constbitstream.py b/python/bitstring/test/test_constbitstream.py
new file mode 100644
index 000000000..a1bef743f
--- /dev/null
+++ b/python/bitstring/test/test_constbitstream.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+
+import unittest
+import sys
+sys.path.insert(0, '..')
+import bitstring
+from bitstring import ConstBitStream as CBS
+
+class All(unittest.TestCase):
+ def testFromFile(self):
+ s = CBS(filename='test.m1v')
+ self.assertEqual(s[0:32].hex, '000001b3')
+ self.assertEqual(s.read(8 * 4).hex, '000001b3')
+ width = s.read(12).uint
+ height = s.read(12).uint
+ self.assertEqual((width, height), (352, 288))
+
+
+class InterleavedExpGolomb(unittest.TestCase):
+ def testReading(self):
+ s = CBS(uie=333)
+ a = s.read('uie')
+ self.assertEqual(a, 333)
+ s = CBS('uie=12, sie=-9, sie=9, uie=1000000')
+ u = s.unpack('uie, 2*sie, uie')
+ self.assertEqual(u, [12, -9, 9, 1000000])
+
+ def testReadingErrors(self):
+ s = CBS(10)
+ self.assertRaises(bitstring.ReadError, s.read, 'uie')
+ self.assertEqual(s.pos, 0)
+ self.assertRaises(bitstring.ReadError, s.read, 'sie')
+ self.assertEqual(s.pos, 0)
+
+
+class ReadTo(unittest.TestCase):
+ def testByteAligned(self):
+ a = CBS('0xaabb00aa00bb')
+ b = a.readto('0x00', bytealigned=True)
+ self.assertEqual(b, '0xaabb00')
+ self.assertEqual(a.bytepos, 3)
+ b = a.readto('0xaa', bytealigned=True)
+ self.assertEqual(b, '0xaa')
+ self.assertRaises(bitstring.ReadError, a.readto, '0xcc', bytealigned=True)
+
+ def testNotAligned(self):
+ a = CBS('0b00111001001010011011')
+ a.pos = 1
+ self.assertEqual(a.readto('0b00'), '0b011100')
+ self.assertEqual(a.readto('0b110'), '0b10010100110')
+ self.assertRaises(ValueError, a.readto, '')
+
+ def testDisallowIntegers(self):
+ a = CBS('0x0f')
+ self.assertRaises(ValueError, a.readto, 4)
+
+ def testReadingLines(self):
+ s = b"This is a test\nof reading lines\nof text\n"
+ b = CBS(bytes=s)
+ n = bitstring.Bits(bytes=b'\n')
+ self.assertEqual(b.readto(n).bytes, b'This is a test\n')
+ self.assertEqual(b.readto(n).bytes, b'of reading lines\n')
+ self.assertEqual(b.readto(n).bytes, b'of text\n')
+
+
+class Subclassing(unittest.TestCase):
+
+ def testIsInstance(self):
+ class SubBits(CBS): pass
+ a = SubBits()
+ self.assertTrue(isinstance(a, SubBits))
+
+ def testClassType(self):
+ class SubBits(CBS): pass
+ self.assertEqual(SubBits().__class__, SubBits)
+
+
+class PadToken(unittest.TestCase):
+
+ def testRead(self):
+ s = CBS('0b100011110001')
+ a = s.read('pad:1')
+ self.assertEqual(a, None)
+ self.assertEqual(s.pos, 1)
+ a = s.read(3)
+ self.assertEqual(a, CBS('0b000'))
+ a = s.read('pad:0')
+ self.assertEqual(a, None)
+ self.assertEqual(s.pos, 4)
+
+ def testReadList(self):
+ s = CBS('0b10001111001')
+ t = s.readlist('pad:1, uint:3, pad:4, uint:3')
+ self.assertEqual(t, [0, 1])
+ s.pos = 0
+ t = s.readlist('pad:1, pad:5')
+ self.assertEqual(t, [])
+ self.assertEqual(s.pos, 6)
+ s.pos = 0
+ t = s.readlist('pad:1, bin, pad:4, uint:3')
+ self.assertEqual(t, ['000', 1])
+ s.pos = 0
+ t = s.readlist('pad, bin:3, pad:4, uint:3')
+ self.assertEqual(t, ['000', 1])
+
+class ReadingBytes(unittest.TestCase):
+
+ def testUnpackingBytes(self):
+ s = CBS(80)
+ t = s.unpack('bytes:1')
+ self.assertEqual(t[0], b'\x00')
+ a, b, c = s.unpack('bytes:1, bytes, bytes:2')
+ self.assertEqual(a, b'\x00')
+ self.assertEqual(b, b'\x00'*7)
+ self.assertEqual(c, b'\x00'*2)
+
+ def testUnpackingBytesWithKeywords(self):
+ s = CBS('0x55'*10)
+ t = s.unpack('pad:a, bytes:b, bytes, pad:a', a=4, b=6)
+ self.assertEqual(t, [b'\x55'*6, b'\x55'*3])
+
diff --git a/python/blessings/LICENSE b/python/blessings/LICENSE
new file mode 100644
index 000000000..3d3a44e65
--- /dev/null
+++ b/python/blessings/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2011 Erik Rose
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/python/blessings/MANIFEST.in b/python/blessings/MANIFEST.in
new file mode 100644
index 000000000..3f4fbd708
--- /dev/null
+++ b/python/blessings/MANIFEST.in
@@ -0,0 +1,3 @@
+include README.rst
+include LICENSE
+include tox.ini
diff --git a/python/blessings/PKG-INFO b/python/blessings/PKG-INFO
new file mode 100644
index 000000000..c52ca3cf9
--- /dev/null
+++ b/python/blessings/PKG-INFO
@@ -0,0 +1,426 @@
+Metadata-Version: 1.0
+Name: blessings
+Version: 1.3
+Summary: A thin, practical wrapper around terminal formatting, positioning, and more
+Home-page: https://github.com/erikrose/blessings
+Author: Erik Rose
+Author-email: erikrose@grinchcentral.com
+License: MIT
+Description: =========
+ Blessings
+ =========
+
+ Coding with Blessings looks like this... ::
+
+ from blessings import Terminal
+
+ t = Terminal()
+
+ print t.bold('Hi there!')
+ print t.bold_red_on_bright_green('It hurts my eyes!')
+
+ with t.location(0, t.height - 1):
+ print 'This is at the bottom.'
+
+ Or, for byte-level control, you can drop down and play with raw terminal
+ capabilities::
+
+ print '{t.bold}All your {t.red}bold and red base{t.normal}'.format(t=t)
+ print t.wingo(2)
+
+ The Pitch
+ =========
+
+ Blessings lifts several of curses_' limiting assumptions, and it makes your
+ code pretty, too:
+
+ * Use styles, color, and maybe a little positioning without clearing the whole
+ screen first.
+ * Leave more than one screenful of scrollback in the buffer after your program
+ exits, like a well-behaved command-line app should.
+ * Get rid of all those noisy, C-like calls to ``tigetstr`` and ``tparm``, so
+ your code doesn't get crowded out by terminal bookkeeping.
+ * Act intelligently when somebody redirects your output to a file, omitting the
+ terminal control codes the user doesn't want to see (optional).
+
+ .. _curses: http://docs.python.org/library/curses.html
+
+ Before And After
+ ----------------
+
+ Without Blessings, this is how you'd print some underlined text at the bottom
+ of the screen::
+
+ from curses import tigetstr, setupterm, tparm
+ from fcntl import ioctl
+ from os import isatty
+ import struct
+ import sys
+ from termios import TIOCGWINSZ
+
+ # If we want to tolerate having our output piped to other commands or
+ # files without crashing, we need to do all this branching:
+ if hasattr(sys.stdout, 'fileno') and isatty(sys.stdout.fileno()):
+ setupterm()
+ sc = tigetstr('sc')
+ cup = tigetstr('cup')
+ rc = tigetstr('rc')
+ underline = tigetstr('smul')
+ normal = tigetstr('sgr0')
+ else:
+ sc = cup = rc = underline = normal = ''
+ print sc # Save cursor position.
+ if cup:
+ # tigetnum('lines') doesn't always update promptly, hence this:
+ height = struct.unpack('hhhh', ioctl(0, TIOCGWINSZ, '\000' * 8))[0]
+ print tparm(cup, height - 1, 0) # Move cursor to bottom.
+ print 'This is {under}underlined{normal}!'.format(under=underline,
+ normal=normal)
+ print rc # Restore cursor position.
+
+ Phew! That was long and full of incomprehensible trash! Let's try it again,
+ this time with Blessings::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.location(0, term.height - 1):
+ print 'This is', term.underline('pretty!')
+
+ Much better.
+
+ What It Provides
+ ================
+
+ Blessings provides just one top-level object: ``Terminal``. Instantiating a
+ ``Terminal`` figures out whether you're on a terminal at all and, if so, does
+ any necessary terminal setup. After that, you can proceed to ask it all sorts
+ of things about the terminal. Terminal terminal terminal.
+
+ Simple Formatting
+ -----------------
+
+ Lots of handy formatting codes ("capabilities" in low-level parlance) are
+ available as attributes on a ``Terminal``. For example::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print 'I am ' + term.bold + 'bold' + term.normal + '!'
+
+ You can also use them as wrappers so you don't have to say ``normal``
+ afterward::
+
+ print 'I am', term.bold('bold') + '!'
+
+ Or, if you want fine-grained control while maintaining some semblance of
+ brevity, you can combine it with Python's string formatting, which makes
+ attributes easy to access::
+
+ print 'All your {t.red}base {t.underline}are belong to us{t.normal}'.format(t=term)
+
+ Simple capabilities of interest include...
+
+ * ``bold``
+ * ``reverse``
+ * ``underline``
+ * ``no_underline`` (which turns off underlining)
+ * ``blink``
+ * ``normal`` (which turns off everything, even colors)
+ * ``clear_eol`` (clear to the end of the line)
+ * ``clear_bol`` (clear to beginning of line)
+ * ``clear_eos`` (clear to end of screen)
+
+ Here are a few more which are less likely to work on all terminals:
+
+ * ``dim``
+ * ``italic`` and ``no_italic``
+ * ``shadow`` and ``no_shadow``
+ * ``standout`` and ``no_standout``
+ * ``subscript`` and ``no_subscript``
+ * ``superscript`` and ``no_superscript``
+ * ``flash`` (which flashes the screen once)
+
+ Note that, while the inverse of ``underline`` is ``no_underline``, the only way
+ to turn off ``bold`` or ``reverse`` is ``normal``, which also cancels any
+ custom colors. This is because there's no way to tell the terminal to undo
+ certain pieces of formatting, even at the lowest level.
+
+ You might notice that the above aren't the typical incomprehensible terminfo
+ capability names; we alias a few of the harder-to-remember ones for
+ readability. However, you aren't limited to these: you can reference any
+ string-returning capability listed on the `terminfo man page`_ by the name
+ under the "Cap-name" column: for example, ``term.rum``.
+
+ .. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/
+
+ Color
+ -----
+
+ 16 colors, both foreground and background, are available as easy-to-remember
+ attributes::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.red + term.on_green + 'Red on green? Ick!' + term.normal
+ print term.bright_red + term.on_bright_blue + 'This is even worse!' + term.normal
+
+ You can also call them as wrappers, which sets everything back to normal at the
+ end::
+
+ print term.red_on_green('Red on green? Ick!')
+ print term.yellow('I can barely see it.')
+
+ The available colors are...
+
+ * ``black``
+ * ``red``
+ * ``green``
+ * ``yellow``
+ * ``blue``
+ * ``magenta``
+ * ``cyan``
+ * ``white``
+
+ You can set the background color instead of the foreground by prepending
+ ``on_``, as in ``on_blue``. There is also a ``bright`` version of each color:
+ for example, ``on_bright_blue``.
+
+ There is also a numerical interface to colors, which takes an integer from
+ 0-15::
+
+ term.color(5) + 'Hello' + term.normal
+ term.on_color(3) + 'Hello' + term.normal
+
+ term.color(5)('Hello')
+ term.on_color(3)('Hello')
+
+ If some color is unsupported (for instance, if only the normal colors are
+ available, not the bright ones), trying to use it will, on most terminals, have
+ no effect: the foreground and background colors will stay as they were. You can
+ get fancy and do different things depending on the supported colors by checking
+ `number_of_colors`_.
+
+ .. _`number_of_colors`: http://packages.python.org/blessings/#blessings.Terminal.number_of_colors
+
+ Compound Formatting
+ -------------------
+
+ If you want to do lots of crazy formatting all at once, you can just mash it
+ all together::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.bold_underline_green_on_yellow + 'Woo' + term.normal
+
+ Or you can use your newly coined attribute as a wrapper, which implicitly sets
+ everything back to normal afterward::
+
+ print term.bold_underline_green_on_yellow('Woo')
+
+ This compound notation comes in handy if you want to allow users to customize
+ the formatting of your app: just have them pass in a format specifier like
+ "bold_green" on the command line, and do a quick ``getattr(term,
+ that_option)('Your text')`` when you do your formatting.
+
+ I'd be remiss if I didn't credit couleur_, where I probably got the idea for
+ all this mashing.
+
+ .. _couleur: http://pypi.python.org/pypi/couleur
+
+ Parametrized Capabilities
+ -------------------------
+
+ Some capabilities take parameters. Rather than making you dig up ``tparm()``
+ all the time, we simply make such capabilities into callable strings. You can
+ pass the parameters right in::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.move(10, 1)
+
+ Here are some of interest:
+
+ ``move``
+ Position the cursor elsewhere. Parameters are y coordinate, then x
+ coordinate.
+ ``move_x``
+ Move the cursor to the given column.
+ ``move_y``
+ Move the cursor to the given row.
+
+ You can also reference any other string-returning capability listed on the
+ `terminfo man page`_ by its name under the "Cap-name" column.
+
+ .. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/
+
+ Height and Width
+ ----------------
+
+ It's simple to get the height and width of the terminal, in characters::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ height = term.height
+ width = term.width
+
+ These are newly updated each time you ask for them, so they're safe to use from
+ SIGWINCH handlers.
+
+ Temporary Repositioning
+ -----------------------
+
+ Sometimes you need to flit to a certain location, print something, and then
+ return: for example, when updating a progress bar at the bottom of the screen.
+ ``Terminal`` provides a context manager for doing this concisely::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.location(0, term.height - 1):
+ print 'Here is the bottom.'
+ print 'This is back where I came from.'
+
+ Parameters to ``location()`` are ``x`` and then ``y``, but you can also pass
+ just one of them, leaving the other alone. For example... ::
+
+ with term.location(y=10):
+ print 'We changed just the row.'
+
+ If you want to reposition permanently, see ``move``, in an example above.
+
+ Pipe Savvy
+ ----------
+
+ If your program isn't attached to a terminal, like if it's being piped to
+ another command or redirected to a file, all the capability attributes on
+ ``Terminal`` will return empty strings. You'll get a nice-looking file without
+ any formatting codes gumming up the works.
+
+ If you want to override this--like if you anticipate your program being piped
+ through ``less -r``, which handles terminal escapes just fine--pass
+ ``force_styling=True`` to the ``Terminal`` constructor.
+
+ In any case, there is an ``is_a_tty`` attribute on ``Terminal`` that lets you
+ see whether the attached stream seems to be a terminal. If it's false, you
+ might refrain from drawing progress bars and other frippery, since you're
+ apparently headed into a pipe::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ if term.is_a_tty:
+ with term.location(0, term.height - 1):
+ print 'Progress: [=======> ]'
+ print term.bold('Important stuff')
+
+ Shopping List
+ =============
+
+ There are decades of legacy tied up in terminal interaction, so attention to
+ detail and behavior in edge cases make a difference. Here are some ways
+ Blessings has your back:
+
+ * Uses the terminfo database so it works with any terminal type
+ * Provides up-to-the-moment terminal height and width, so you can respond to
+ terminal size changes (SIGWINCH signals). (Most other libraries query the
+ ``COLUMNS`` and ``LINES`` environment variables or the ``cols`` or ``lines``
+ terminal capabilities, which don't update promptly, if at all.)
+ * Avoids making a mess if the output gets piped to a non-terminal
+ * Works great with standard Python string templating
+ * Provides convenient access to all terminal capabilities, not just a sugared
+ few
+ * Outputs to any file-like object, not just stdout
+ * Keeps a minimum of internal state, so you can feel free to mix and match with
+ calls to curses or whatever other terminal libraries you like
+
+ Blessings does not provide...
+
+ * Native color support on the Windows command prompt. However, it should work
+ when used in concert with colorama_.
+
+ .. _colorama: http://pypi.python.org/pypi/colorama/0.2.4
+
+ Bugs
+ ====
+
+ Bugs or suggestions? Visit the `issue tracker`_.
+
+ .. _`issue tracker`: https://github.com/erikrose/blessings/issues/new
+
+ License
+ =======
+
+ Blessings is under the MIT License. See the LICENSE file.
+
+ Version History
+ ===============
+
+ 1.3
+ * Add ``number_of_colors``, which tells you how many colors the terminal
+ supports.
+ * Made ``color(n)`` and ``on_color(n)`` callable to wrap a string, like the
+ named colors can. Also, make them both fall back to the ``setf`` and
+ ``setb`` capabilities (like the named colors do) if the ANSI ``setaf`` and
+ ``setab`` aren't available.
+ * Allow ``color`` attr to act as an unparametrized string, not just a
+ callable.
+ * Make ``height`` and ``width`` examine any passed-in stream before falling
+ back to stdout. (This rarely if ever affects actual behavior; it's mostly
+ philosophical.)
+ * Make caching simpler and slightly more efficient.
+ * Get rid of a reference cycle between Terminals and FormattingStrings.
+ * Update docs to reflect that terminal addressing (as in ``location()``) is
+ 0-based.
+
+ 1.2
+ * Added support for Python 3! We need 3.2.3 or greater, because the curses
+ library couldn't decide whether to accept strs or bytes before that
+ (http://bugs.python.org/issue10570).
+ * Everything that comes out of the library is now unicode. This lets us
+ support Python 3 without making a mess of the code, and Python 2 should
+ continue to work unless you were testing types (and badly). Please file a
+ bug if this causes trouble for you.
+ * Changed to the MIT License for better world domination.
+ * Added Sphinx docs.
+
+ 1.1
+ * Added nicely named attributes for colors.
+ * Introduced compound formatting.
+ * Added wrapper behavior for styling and colors.
+ * Let you force capabilities to be non-empty, even if the output stream is
+ not a terminal.
+ * Added the ``is_a_tty`` attribute for telling whether the output stream is a
+ terminal.
+ * Sugared the remaining interesting string capabilities.
+ * Let ``location()`` operate on just an x *or* y coordinate.
+
+ 1.0
+ * Extracted Blessings from nose-progressive, my `progress-bar-having,
+ traceback-shortcutting, rootin', tootin' testrunner`_. It provided the
+ tootin' functionality.
+
+ .. _`progress-bar-having, traceback-shortcutting, rootin', tootin' testrunner`: http://pypi.python.org/pypi/nose-progressive/
+
+Keywords: terminal,tty,curses,ncurses,formatting,style,color,console
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Environment :: Console :: Curses
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: User Interfaces
+Classifier: Topic :: Terminals
diff --git a/python/blessings/README.rst b/python/blessings/README.rst
new file mode 100644
index 000000000..59983de86
--- /dev/null
+++ b/python/blessings/README.rst
@@ -0,0 +1,399 @@
+=========
+Blessings
+=========
+
+Coding with Blessings looks like this... ::
+
+ from blessings import Terminal
+
+ t = Terminal()
+
+ print t.bold('Hi there!')
+ print t.bold_red_on_bright_green('It hurts my eyes!')
+
+ with t.location(0, t.height - 1):
+ print 'This is at the bottom.'
+
+Or, for byte-level control, you can drop down and play with raw terminal
+capabilities::
+
+ print '{t.bold}All your {t.red}bold and red base{t.normal}'.format(t=t)
+ print t.wingo(2)
+
+The Pitch
+=========
+
+Blessings lifts several of curses_' limiting assumptions, and it makes your
+code pretty, too:
+
+* Use styles, color, and maybe a little positioning without clearing the whole
+ screen first.
+* Leave more than one screenful of scrollback in the buffer after your program
+ exits, like a well-behaved command-line app should.
+* Get rid of all those noisy, C-like calls to ``tigetstr`` and ``tparm``, so
+ your code doesn't get crowded out by terminal bookkeeping.
+* Act intelligently when somebody redirects your output to a file, omitting the
+ terminal control codes the user doesn't want to see (optional).
+
+.. _curses: http://docs.python.org/library/curses.html
+
+Before And After
+----------------
+
+Without Blessings, this is how you'd print some underlined text at the bottom
+of the screen::
+
+ from curses import tigetstr, setupterm, tparm
+ from fcntl import ioctl
+ from os import isatty
+ import struct
+ import sys
+ from termios import TIOCGWINSZ
+
+ # If we want to tolerate having our output piped to other commands or
+ # files without crashing, we need to do all this branching:
+ if hasattr(sys.stdout, 'fileno') and isatty(sys.stdout.fileno()):
+ setupterm()
+ sc = tigetstr('sc')
+ cup = tigetstr('cup')
+ rc = tigetstr('rc')
+ underline = tigetstr('smul')
+ normal = tigetstr('sgr0')
+ else:
+ sc = cup = rc = underline = normal = ''
+ print sc # Save cursor position.
+ if cup:
+ # tigetnum('lines') doesn't always update promptly, hence this:
+ height = struct.unpack('hhhh', ioctl(0, TIOCGWINSZ, '\000' * 8))[0]
+ print tparm(cup, height - 1, 0) # Move cursor to bottom.
+ print 'This is {under}underlined{normal}!'.format(under=underline,
+ normal=normal)
+ print rc # Restore cursor position.
+
+Phew! That was long and full of incomprehensible trash! Let's try it again,
+this time with Blessings::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.location(0, term.height - 1):
+ print 'This is', term.underline('pretty!')
+
+Much better.
+
+What It Provides
+================
+
+Blessings provides just one top-level object: ``Terminal``. Instantiating a
+``Terminal`` figures out whether you're on a terminal at all and, if so, does
+any necessary terminal setup. After that, you can proceed to ask it all sorts
+of things about the terminal. Terminal terminal terminal.
+
+Simple Formatting
+-----------------
+
+Lots of handy formatting codes ("capabilities" in low-level parlance) are
+available as attributes on a ``Terminal``. For example::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print 'I am ' + term.bold + 'bold' + term.normal + '!'
+
+You can also use them as wrappers so you don't have to say ``normal``
+afterward::
+
+ print 'I am', term.bold('bold') + '!'
+
+Or, if you want fine-grained control while maintaining some semblance of
+brevity, you can combine it with Python's string formatting, which makes
+attributes easy to access::
+
+ print 'All your {t.red}base {t.underline}are belong to us{t.normal}'.format(t=term)
+
+Simple capabilities of interest include...
+
+* ``bold``
+* ``reverse``
+* ``underline``
+* ``no_underline`` (which turns off underlining)
+* ``blink``
+* ``normal`` (which turns off everything, even colors)
+* ``clear_eol`` (clear to the end of the line)
+* ``clear_bol`` (clear to beginning of line)
+* ``clear_eos`` (clear to end of screen)
+
+Here are a few more which are less likely to work on all terminals:
+
+* ``dim``
+* ``italic`` and ``no_italic``
+* ``shadow`` and ``no_shadow``
+* ``standout`` and ``no_standout``
+* ``subscript`` and ``no_subscript``
+* ``superscript`` and ``no_superscript``
+* ``flash`` (which flashes the screen once)
+
+Note that, while the inverse of ``underline`` is ``no_underline``, the only way
+to turn off ``bold`` or ``reverse`` is ``normal``, which also cancels any
+custom colors. This is because there's no way to tell the terminal to undo
+certain pieces of formatting, even at the lowest level.
+
+You might notice that the above aren't the typical incomprehensible terminfo
+capability names; we alias a few of the harder-to-remember ones for
+readability. However, you aren't limited to these: you can reference any
+string-returning capability listed on the `terminfo man page`_ by the name
+under the "Cap-name" column: for example, ``term.rum``.
+
+.. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/
+
+Color
+-----
+
+16 colors, both foreground and background, are available as easy-to-remember
+attributes::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.red + term.on_green + 'Red on green? Ick!' + term.normal
+ print term.bright_red + term.on_bright_blue + 'This is even worse!' + term.normal
+
+You can also call them as wrappers, which sets everything back to normal at the
+end::
+
+ print term.red_on_green('Red on green? Ick!')
+ print term.yellow('I can barely see it.')
+
+The available colors are...
+
+* ``black``
+* ``red``
+* ``green``
+* ``yellow``
+* ``blue``
+* ``magenta``
+* ``cyan``
+* ``white``
+
+You can set the background color instead of the foreground by prepending
+``on_``, as in ``on_blue``. There is also a ``bright`` version of each color:
+for example, ``on_bright_blue``.
+
+There is also a numerical interface to colors, which takes an integer from
+0-15::
+
+ term.color(5) + 'Hello' + term.normal
+ term.on_color(3) + 'Hello' + term.normal
+
+ term.color(5)('Hello')
+ term.on_color(3)('Hello')
+
+If some color is unsupported (for instance, if only the normal colors are
+available, not the bright ones), trying to use it will, on most terminals, have
+no effect: the foreground and background colors will stay as they were. You can
+get fancy and do different things depending on the supported colors by checking
+`number_of_colors`_.
+
+.. _`number_of_colors`: http://packages.python.org/blessings/#blessings.Terminal.number_of_colors
+
+Compound Formatting
+-------------------
+
+If you want to do lots of crazy formatting all at once, you can just mash it
+all together::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.bold_underline_green_on_yellow + 'Woo' + term.normal
+
+Or you can use your newly coined attribute as a wrapper, which implicitly sets
+everything back to normal afterward::
+
+ print term.bold_underline_green_on_yellow('Woo')
+
+This compound notation comes in handy if you want to allow users to customize
+the formatting of your app: just have them pass in a format specifier like
+"bold_green" on the command line, and do a quick ``getattr(term,
+that_option)('Your text')`` when you do your formatting.
+
+I'd be remiss if I didn't credit couleur_, where I probably got the idea for
+all this mashing.
+
+.. _couleur: http://pypi.python.org/pypi/couleur
+
+Parametrized Capabilities
+-------------------------
+
+Some capabilities take parameters. Rather than making you dig up ``tparm()``
+all the time, we simply make such capabilities into callable strings. You can
+pass the parameters right in::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ print term.move(10, 1)
+
+Here are some of interest:
+
+``move``
+ Position the cursor elsewhere. Parameters are y coordinate, then x
+ coordinate.
+``move_x``
+ Move the cursor to the given column.
+``move_y``
+ Move the cursor to the given row.
+
+You can also reference any other string-returning capability listed on the
+`terminfo man page`_ by its name under the "Cap-name" column.
+
+.. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/
+
+Height and Width
+----------------
+
+It's simple to get the height and width of the terminal, in characters::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ height = term.height
+ width = term.width
+
+These are newly updated each time you ask for them, so they're safe to use from
+SIGWINCH handlers.
+
+Temporary Repositioning
+-----------------------
+
+Sometimes you need to flit to a certain location, print something, and then
+return: for example, when updating a progress bar at the bottom of the screen.
+``Terminal`` provides a context manager for doing this concisely::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ with term.location(0, term.height - 1):
+ print 'Here is the bottom.'
+ print 'This is back where I came from.'
+
+Parameters to ``location()`` are ``x`` and then ``y``, but you can also pass
+just one of them, leaving the other alone. For example... ::
+
+ with term.location(y=10):
+ print 'We changed just the row.'
+
+If you want to reposition permanently, see ``move``, in an example above.
+
+Pipe Savvy
+----------
+
+If your program isn't attached to a terminal, like if it's being piped to
+another command or redirected to a file, all the capability attributes on
+``Terminal`` will return empty strings. You'll get a nice-looking file without
+any formatting codes gumming up the works.
+
+If you want to override this--like if you anticipate your program being piped
+through ``less -r``, which handles terminal escapes just fine--pass
+``force_styling=True`` to the ``Terminal`` constructor.
+
+In any case, there is an ``is_a_tty`` attribute on ``Terminal`` that lets you
+see whether the attached stream seems to be a terminal. If it's false, you
+might refrain from drawing progress bars and other frippery, since you're
+apparently headed into a pipe::
+
+ from blessings import Terminal
+
+ term = Terminal()
+ if term.is_a_tty:
+ with term.location(0, term.height - 1):
+ print 'Progress: [=======> ]'
+ print term.bold('Important stuff')
+
+Shopping List
+=============
+
+There are decades of legacy tied up in terminal interaction, so attention to
+detail and behavior in edge cases make a difference. Here are some ways
+Blessings has your back:
+
+* Uses the terminfo database so it works with any terminal type
+* Provides up-to-the-moment terminal height and width, so you can respond to
+ terminal size changes (SIGWINCH signals). (Most other libraries query the
+ ``COLUMNS`` and ``LINES`` environment variables or the ``cols`` or ``lines``
+ terminal capabilities, which don't update promptly, if at all.)
+* Avoids making a mess if the output gets piped to a non-terminal
+* Works great with standard Python string templating
+* Provides convenient access to all terminal capabilities, not just a sugared
+ few
+* Outputs to any file-like object, not just stdout
+* Keeps a minimum of internal state, so you can feel free to mix and match with
+ calls to curses or whatever other terminal libraries you like
+
+Blessings does not provide...
+
+* Native color support on the Windows command prompt. However, it should work
+ when used in concert with colorama_.
+
+.. _colorama: http://pypi.python.org/pypi/colorama/0.2.4
+
+Bugs
+====
+
+Bugs or suggestions? Visit the `issue tracker`_.
+
+.. _`issue tracker`: https://github.com/erikrose/blessings/issues/new
+
+License
+=======
+
+Blessings is under the MIT License. See the LICENSE file.
+
+Version History
+===============
+
+1.3
+ * Add ``number_of_colors``, which tells you how many colors the terminal
+ supports.
+ * Made ``color(n)`` and ``on_color(n)`` callable to wrap a string, like the
+ named colors can. Also, make them both fall back to the ``setf`` and
+ ``setb`` capabilities (like the named colors do) if the ANSI ``setaf`` and
+ ``setab`` aren't available.
+ * Allow ``color`` attr to act as an unparametrized string, not just a
+ callable.
+ * Make ``height`` and ``width`` examine any passed-in stream before falling
+ back to stdout. (This rarely if ever affects actual behavior; it's mostly
+ philosophical.)
+ * Make caching simpler and slightly more efficient.
+ * Get rid of a reference cycle between Terminals and FormattingStrings.
+ * Update docs to reflect that terminal addressing (as in ``location()``) is
+ 0-based.
+
+1.2
+ * Added support for Python 3! We need 3.2.3 or greater, because the curses
+ library couldn't decide whether to accept strs or bytes before that
+ (http://bugs.python.org/issue10570).
+ * Everything that comes out of the library is now unicode. This lets us
+ support Python 3 without making a mess of the code, and Python 2 should
+ continue to work unless you were testing types (and badly). Please file a
+ bug if this causes trouble for you.
+ * Changed to the MIT License for better world domination.
+ * Added Sphinx docs.
+
+1.1
+ * Added nicely named attributes for colors.
+ * Introduced compound formatting.
+ * Added wrapper behavior for styling and colors.
+ * Let you force capabilities to be non-empty, even if the output stream is
+ not a terminal.
+ * Added the ``is_a_tty`` attribute for telling whether the output stream is a
+ terminal.
+ * Sugared the remaining interesting string capabilities.
+ * Let ``location()`` operate on just an x *or* y coordinate.
+
+1.0
+ * Extracted Blessings from nose-progressive, my `progress-bar-having,
+ traceback-shortcutting, rootin', tootin' testrunner`_. It provided the
+ tootin' functionality.
+
+.. _`progress-bar-having, traceback-shortcutting, rootin', tootin' testrunner`: http://pypi.python.org/pypi/nose-progressive/
diff --git a/python/blessings/blessings/__init__.py b/python/blessings/blessings/__init__.py
new file mode 100644
index 000000000..081288ba6
--- /dev/null
+++ b/python/blessings/blessings/__init__.py
@@ -0,0 +1,450 @@
+from collections import defaultdict
+import curses
+from curses import tigetstr, tigetnum, setupterm, tparm
+from fcntl import ioctl
+try:
+ from io import UnsupportedOperation as IOUnsupportedOperation
+except ImportError:
+ class IOUnsupportedOperation(Exception):
+ """A dummy exception to take the place of Python 3's ``io.UnsupportedOperation`` in Python 2"""
+ pass
+import os
+from os import isatty, environ
+from platform import python_version_tuple
+import struct
+import sys
+from termios import TIOCGWINSZ
+
+
+if ('3', '0', '0') <= python_version_tuple() < ('3', '2', '2+'): # Good till 3.2.10
+ # Python 3.x < 3.2.3 has a bug in which tparm() erroneously takes a string.
+ raise ImportError('Blessings needs Python 3.2.3 or greater for Python 3 '
+ 'support due to http://bugs.python.org/issue10570.')
+
+
+__all__ = ['Terminal']
+
+
+class Terminal(object):
+ """An abstraction around terminal capabilities
+
+ Unlike curses, this doesn't require clearing the screen before doing
+ anything, and it's friendlier to use. It keeps the endless calls to
+ ``tigetstr()`` and ``tparm()`` out of your code, and it acts intelligently
+ when somebody pipes your output to a non-terminal.
+
+ Instance attributes:
+
+ ``stream``
+ The stream the terminal outputs to. It's convenient to pass the stream
+ around with the terminal; it's almost always needed when the terminal
+ is and saves sticking lots of extra args on client functions in
+ practice.
+ ``is_a_tty``
+ Whether ``stream`` appears to be a terminal. You can examine this value
+ to decide whether to draw progress bars or other frippery.
+
+ """
+ def __init__(self, kind=None, stream=None, force_styling=False):
+ """Initialize the terminal.
+
+ If ``stream`` is not a tty, I will default to returning an empty
+ Unicode string for all capability values, so things like piping your
+ output to a file won't strew escape sequences all over the place. The
+ ``ls`` command sets a precedent for this: it defaults to columnar
+ output when being sent to a tty and one-item-per-line when not.
+
+ :arg kind: A terminal string as taken by ``setupterm()``. Defaults to
+ the value of the ``TERM`` environment variable.
+ :arg stream: A file-like object representing the terminal. Defaults to
+ the original value of stdout, like ``curses.initscr()`` does.
+ :arg force_styling: Whether to force the emission of capabilities, even
+ if we don't seem to be in a terminal. This comes in handy if users
+ are trying to pipe your output through something like ``less -r``,
+ which supports terminal codes just fine but doesn't appear itself
+ to be a terminal. Just expose a command-line option, and set
+ ``force_styling`` based on it. Terminal initialization sequences
+ will be sent to ``stream`` if it has a file descriptor and to
+ ``sys.__stdout__`` otherwise. (``setupterm()`` demands to send them
+ somewhere, and stdout is probably where the output is ultimately
+ headed. If not, stderr is probably bound to the same terminal.)
+
+ """
+ if stream is None:
+ stream = sys.__stdout__
+ try:
+ stream_descriptor = (stream.fileno() if hasattr(stream, 'fileno')
+ and callable(stream.fileno)
+ else None)
+ except IOUnsupportedOperation:
+ stream_descriptor = None
+
+ self.is_a_tty = stream_descriptor is not None and isatty(stream_descriptor)
+ self._does_styling = self.is_a_tty or force_styling
+
+ # The desciptor to direct terminal initialization sequences to.
+ # sys.__stdout__ seems to always have a descriptor of 1, even if output
+ # is redirected.
+ self._init_descriptor = (sys.__stdout__.fileno()
+ if stream_descriptor is None
+ else stream_descriptor)
+ if self._does_styling:
+ # Make things like tigetstr() work. Explicit args make setupterm()
+ # work even when -s is passed to nosetests. Lean toward sending
+ # init sequences to the stream if it has a file descriptor, and
+ # send them to stdout as a fallback, since they have to go
+ # somewhere.
+ setupterm(kind or environ.get('TERM', 'unknown'),
+ self._init_descriptor)
+
+ self.stream = stream
+
+ # Sugary names for commonly-used capabilities, intended to help avoid trips
+ # to the terminfo man page and comments in your code:
+ _sugar = dict(
+ # Don't use "on" or "bright" as an underscore-separated chunk in any of
+ # these (e.g. on_cology or rock_on) so we don't interfere with
+ # __getattr__.
+ save='sc',
+ restore='rc',
+
+ clear_eol='el',
+ clear_bol='el1',
+ clear_eos='ed',
+ position='cup', # deprecated
+ move='cup',
+ move_x='hpa',
+ move_y='vpa',
+
+ reset_colors='op', # oc doesn't work on my OS X terminal.
+
+ normal='sgr0',
+ reverse='rev',
+ # 'bold' is just 'bold'. Similarly...
+ # blink
+ # dim
+ # flash
+ italic='sitm',
+ no_italic='ritm',
+ shadow='sshm',
+ no_shadow='rshm',
+ standout='smso',
+ no_standout='rmso',
+ subscript='ssubm',
+ no_subscript='rsubm',
+ superscript='ssupm',
+ no_superscript='rsupm',
+ underline='smul',
+ no_underline='rmul')
+
+ def __getattr__(self, attr):
+ """Return parametrized terminal capabilities, like bold.
+
+ For example, you can say ``term.bold`` to get the string that turns on
+ bold formatting and ``term.normal`` to get the string that turns it off
+ again. Or you can take a shortcut: ``term.bold('hi')`` bolds its
+ argument and sets everything to normal afterward. You can even combine
+ things: ``term.bold_underline_red_on_bright_green('yowzers!')``.
+
+ For a parametrized capability like ``cup``, pass the parameters too:
+ ``some_term.cup(line, column)``.
+
+ ``man terminfo`` for a complete list of capabilities.
+
+ Return values are always Unicode.
+
+ """
+ resolution = self._resolve_formatter(attr) if self._does_styling else NullCallableString()
+ setattr(self, attr, resolution) # Cache capability codes.
+ return resolution
+
+ @property
+ def height(self):
+ """The height of the terminal in characters
+
+ If no stream or a stream not representing a terminal was passed in at
+ construction, return the dimension of the controlling terminal so
+ piping to things that eventually display on the terminal (like ``less
+ -R``) work. If a stream representing a terminal was passed in, return
+ the dimensions of that terminal. If there somehow is no controlling
+ terminal, return ``None``. (Thus, you should check that ``is_a_tty`` is
+ true before doing any math on the result.)
+
+ """
+ return self._height_and_width()[0]
+
+ @property
+ def width(self):
+ """The width of the terminal in characters
+
+ See ``height()`` for some corner cases.
+
+ """
+ return self._height_and_width()[1]
+
+ def _height_and_width(self):
+ """Return a tuple of (terminal height, terminal width)."""
+ # tigetnum('lines') and tigetnum('cols') update only if we call
+ # setupterm() again.
+ for descriptor in self._init_descriptor, sys.__stdout__:
+ try:
+ return struct.unpack('hhhh', ioctl(descriptor, TIOCGWINSZ, '\000' * 8))[0:2]
+ except IOError:
+ pass
+ return None, None # Should never get here
+
+ def location(self, x=None, y=None):
+ """Return a context manager for temporarily moving the cursor.
+
+ Move the cursor to a certain position on entry, let you print stuff
+ there, then return the cursor to its original position::
+
+ term = Terminal()
+ with term.location(2, 5):
+ print 'Hello, world!'
+ for x in xrange(10):
+ print 'I can do it %i times!' % x
+
+ Specify ``x`` to move to a certain column, ``y`` to move to a certain
+ row, or both.
+
+ """
+ return Location(self, x, y)
+
+ @property
+ def color(self):
+ """Return a capability that sets the foreground color.
+
+ The capability is unparametrized until called and passed a number
+ (0-15), at which point it returns another string which represents a
+ specific color change. This second string can further be called to
+ color a piece of text and set everything back to normal afterward.
+
+ :arg num: The number, 0-15, of the color
+
+ """
+ return ParametrizingString(self._foreground_color, self.normal)
+
+ @property
+ def on_color(self):
+ """Return a capability that sets the background color.
+
+ See ``color()``.
+
+ """
+ return ParametrizingString(self._background_color, self.normal)
+
+ @property
+ def number_of_colors(self):
+ """Return the number of colors the terminal supports.
+
+ Common values are 0, 8, 16, 88, and 256.
+
+ Though the underlying capability returns -1 when there is no color
+ support, we return 0. This lets you test more Pythonically::
+
+ if term.number_of_colors:
+ ...
+
+ We also return 0 if the terminal won't tell us how many colors it
+ supports, which I think is rare.
+
+ """
+ # This is actually the only remotely useful numeric capability. We
+ # don't name it after the underlying capability, because we deviate
+ # slightly from its behavior, and we might someday wish to give direct
+ # access to it.
+ colors = tigetnum('colors') # Returns -1 if no color support, -2 if no such cap.
+ #self.__dict__['colors'] = ret # Cache it. It's not changing. (Doesn't work.)
+ return colors if colors >= 0 else 0
+
+ def _resolve_formatter(self, attr):
+ """Resolve a sugary or plain capability name, color, or compound formatting function name into a callable capability."""
+ if attr in COLORS:
+ return self._resolve_color(attr)
+ elif attr in COMPOUNDABLES:
+ # Bold, underline, or something that takes no parameters
+ return self._formatting_string(self._resolve_capability(attr))
+ else:
+ formatters = split_into_formatters(attr)
+ if all(f in COMPOUNDABLES for f in formatters):
+ # It's a compound formatter, like "bold_green_on_red". Future
+ # optimization: combine all formatting into a single escape
+ # sequence.
+ return self._formatting_string(
+ u''.join(self._resolve_formatter(s) for s in formatters))
+ else:
+ return ParametrizingString(self._resolve_capability(attr))
+
+ def _resolve_capability(self, atom):
+ """Return a terminal code for a capname or a sugary name, or an empty Unicode.
+
+ The return value is always Unicode, because otherwise it is clumsy
+ (especially in Python 3) to concatenate with real (Unicode) strings.
+
+ """
+ code = tigetstr(self._sugar.get(atom, atom))
+ if code:
+ # We can encode escape sequences as UTF-8 because they never
+ # contain chars > 127, and UTF-8 never changes anything within that
+ # range..
+ return code.decode('utf-8')
+ return u''
+
+ def _resolve_color(self, color):
+ """Resolve a color like red or on_bright_green into a callable capability."""
+ # TODO: Does curses automatically exchange red and blue and cyan and
+ # yellow when a terminal supports setf/setb rather than setaf/setab?
+ # I'll be blasted if I can find any documentation. The following
+ # assumes it does.
+ color_cap = (self._background_color if 'on_' in color else
+ self._foreground_color)
+ # curses constants go up to only 7, so add an offset to get at the
+ # bright colors at 8-15:
+ offset = 8 if 'bright_' in color else 0
+ base_color = color.rsplit('_', 1)[-1]
+ return self._formatting_string(
+ color_cap(getattr(curses, 'COLOR_' + base_color.upper()) + offset))
+
+ @property
+ def _foreground_color(self):
+ return self.setaf or self.setf
+
+ @property
+ def _background_color(self):
+ return self.setab or self.setb
+
+ def _formatting_string(self, formatting):
+ """Return a new ``FormattingString`` which implicitly receives my notion of "normal"."""
+ return FormattingString(formatting, self.normal)
+
+
+def derivative_colors(colors):
+ """Return the names of valid color variants, given the base colors."""
+ return set([('on_' + c) for c in colors] +
+ [('bright_' + c) for c in colors] +
+ [('on_bright_' + c) for c in colors])
+
+
+COLORS = set(['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'])
+COLORS.update(derivative_colors(COLORS))
+COMPOUNDABLES = (COLORS |
+ set(['bold', 'underline', 'reverse', 'blink', 'dim', 'italic',
+ 'shadow', 'standout', 'subscript', 'superscript']))
+
+
+class ParametrizingString(unicode):
+ """A Unicode string which can be called to parametrize it as a terminal capability"""
+ def __new__(cls, formatting, normal=None):
+ """Instantiate.
+
+ :arg normal: If non-None, indicates that, once parametrized, this can
+ be used as a ``FormattingString``. The value is used as the
+ "normal" capability.
+
+ """
+ new = unicode.__new__(cls, formatting)
+ new._normal = normal
+ return new
+
+ def __call__(self, *args):
+ try:
+ # Re-encode the cap, because tparm() takes a bytestring in Python
+ # 3. However, appear to be a plain Unicode string otherwise so
+ # concats work.
+ parametrized = tparm(self.encode('utf-8'), *args).decode('utf-8')
+ return (parametrized if self._normal is None else
+ FormattingString(parametrized, self._normal))
+ except curses.error:
+ # Catch "must call (at least) setupterm() first" errors, as when
+ # running simply `nosetests` (without progressive) on nose-
+ # progressive. Perhaps the terminal has gone away between calling
+ # tigetstr and calling tparm.
+ return u''
+ except TypeError:
+ # If the first non-int (i.e. incorrect) arg was a string, suggest
+ # something intelligent:
+ if len(args) == 1 and isinstance(args[0], basestring):
+ raise TypeError(
+ 'A native or nonexistent capability template received '
+ '%r when it was expecting ints. You probably misspelled a '
+ 'formatting call like bright_red_on_white(...).' % args)
+ else:
+ # Somebody passed a non-string; I don't feel confident
+ # guessing what they were trying to do.
+ raise
+
+
+class FormattingString(unicode):
+ """A Unicode string which can be called upon a piece of text to wrap it in formatting"""
+ def __new__(cls, formatting, normal):
+ new = unicode.__new__(cls, formatting)
+ new._normal = normal
+ return new
+
+ def __call__(self, text):
+ """Return a new string that is ``text`` formatted with my contents.
+
+ At the beginning of the string, I prepend the formatting that is my
+ contents. At the end, I append the "normal" sequence to set everything
+ back to defaults. The return value is always a Unicode.
+
+ """
+ return self + text + self._normal
+
+
+class NullCallableString(unicode):
+ """A dummy class to stand in for ``FormattingString`` and ``ParametrizingString``
+
+ A callable bytestring that returns an empty Unicode when called with an int
+ and the arg otherwise. We use this when there is no tty and so all
+ capabilities are blank.
+
+ """
+ def __new__(cls):
+ new = unicode.__new__(cls, u'')
+ return new
+
+ def __call__(self, arg):
+ if isinstance(arg, int):
+ return u''
+ return arg # TODO: Force even strs in Python 2.x to be unicodes? Nah. How would I know what encoding to use to convert it?
+
+
+def split_into_formatters(compound):
+ """Split a possibly compound format string into segments.
+
+ >>> split_into_formatters('bold_underline_bright_blue_on_red')
+ ['bold', 'underline', 'bright_blue', 'on_red']
+
+ """
+ merged_segs = []
+ # These occur only as prefixes, so they can always be merged:
+ mergeable_prefixes = ['on', 'bright', 'on_bright']
+ for s in compound.split('_'):
+ if merged_segs and merged_segs[-1] in mergeable_prefixes:
+ merged_segs[-1] += '_' + s
+ else:
+ merged_segs.append(s)
+ return merged_segs
+
+
+class Location(object):
+ """Context manager for temporarily moving the cursor"""
+ def __init__(self, term, x=None, y=None):
+ self.x, self.y = x, y
+ self.term = term
+
+ def __enter__(self):
+ """Save position and move to the requested column, row, or both."""
+ self.term.stream.write(self.term.save) # save position
+ if self.x and self.y:
+ self.term.stream.write(self.term.move(self.y, self.x))
+ elif self.x:
+ self.term.stream.write(self.term.move_x(self.x))
+ elif self.y:
+ self.term.stream.write(self.term.move_y(self.y))
+
+ def __exit__(self, type, value, tb):
+ """Restore original cursor position."""
+ self.term.stream.write(self.term.restore)
diff --git a/python/blessings/blessings/tests.py b/python/blessings/blessings/tests.py
new file mode 100644
index 000000000..a02a3924a
--- /dev/null
+++ b/python/blessings/blessings/tests.py
@@ -0,0 +1,231 @@
+# -*- coding: utf-8 -*-
+"""Automated tests (as opposed to human-verified test patterns)
+
+It was tempting to mock out curses to get predictable output from ``tigetstr``,
+but there are concrete integration-testing benefits in not doing so. For
+instance, ``tigetstr`` changed its return type in Python 3.2.3. So instead, we
+simply create all our test ``Terminal`` instances with a known terminal type.
+All we require from the host machine is that a standard terminfo definition of
+xterm-256color exists.
+
+"""
+from __future__ import with_statement # Make 2.5-compatible
+from curses import tigetstr, tparm
+from functools import partial
+from StringIO import StringIO
+import sys
+
+from nose import SkipTest
+from nose.tools import eq_
+
+# This tests that __all__ is correct, since we use below everything that should
+# be imported:
+from blessings import *
+
+
+TestTerminal = partial(Terminal, kind='xterm-256color')
+
+
+def unicode_cap(cap):
+ """Return the result of ``tigetstr`` except as Unicode."""
+ return tigetstr(cap).decode('utf-8')
+
+
+def unicode_parm(cap, *parms):
+ """Return the result of ``tparm(tigetstr())`` except as Unicode."""
+ return tparm(tigetstr(cap), *parms).decode('utf-8')
+
+
+def test_capability():
+ """Check that a capability lookup works.
+
+ Also test that Terminal grabs a reasonable default stream. This test
+ assumes it will be run from a tty.
+
+ """
+ t = TestTerminal()
+ sc = unicode_cap('sc')
+ eq_(t.save, sc)
+ eq_(t.save, sc) # Make sure caching doesn't screw it up.
+
+
+def test_capability_without_tty():
+ """Assert capability templates are '' when stream is not a tty."""
+ t = TestTerminal(stream=StringIO())
+ eq_(t.save, u'')
+ eq_(t.red, u'')
+
+
+def test_capability_with_forced_tty():
+ """If we force styling, capabilities had better not (generally) be empty."""
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+ eq_(t.save, unicode_cap('sc'))
+
+
+def test_parametrization():
+ """Test parametrizing a capability."""
+ eq_(TestTerminal().cup(3, 4), unicode_parm('cup', 3, 4))
+
+
+def height_and_width():
+ """Assert that ``height_and_width()`` returns ints."""
+ t = TestTerminal() # kind shouldn't matter.
+ assert isinstance(int, t.height)
+ assert isinstance(int, t.width)
+
+
+def test_stream_attr():
+ """Make sure Terminal exposes a ``stream`` attribute that defaults to something sane."""
+ eq_(Terminal().stream, sys.__stdout__)
+
+
+def test_location():
+ """Make sure ``location()`` does what it claims."""
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+
+ with t.location(3, 4):
+ t.stream.write(u'hi')
+
+ eq_(t.stream.getvalue(), unicode_cap('sc') +
+ unicode_parm('cup', 4, 3) +
+ u'hi' +
+ unicode_cap('rc'))
+
+
+def test_horizontal_location():
+ """Make sure we can move the cursor horizontally without changing rows."""
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+ with t.location(x=5):
+ pass
+ eq_(t.stream.getvalue(), unicode_cap('sc') +
+ unicode_parm('hpa', 5) +
+ unicode_cap('rc'))
+
+
+def test_null_fileno():
+ """Make sure ``Terminal`` works when ``fileno`` is ``None``.
+
+ This simulates piping output to another program.
+
+ """
+ out = StringIO()
+ out.fileno = None
+ t = TestTerminal(stream=out)
+ eq_(t.save, u'')
+
+
+def test_mnemonic_colors():
+ """Make sure color shortcuts work."""
+ def color(num):
+ return unicode_parm('setaf', num)
+
+ def on_color(num):
+ return unicode_parm('setab', num)
+
+ # Avoid testing red, blue, yellow, and cyan, since they might someday
+ # change depending on terminal type.
+ t = TestTerminal()
+ eq_(t.white, color(7))
+ eq_(t.green, color(2)) # Make sure it's different than white.
+ eq_(t.on_black, on_color(0))
+ eq_(t.on_green, on_color(2))
+ eq_(t.bright_black, color(8))
+ eq_(t.bright_green, color(10))
+ eq_(t.on_bright_black, on_color(8))
+ eq_(t.on_bright_green, on_color(10))
+
+
+def test_callable_numeric_colors():
+ """``color(n)`` should return a formatting wrapper."""
+ t = TestTerminal()
+ eq_(t.color(5)('smoo'), t.magenta + 'smoo' + t.normal)
+ eq_(t.color(5)('smoo'), t.color(5) + 'smoo' + t.normal)
+ eq_(t.on_color(2)('smoo'), t.on_green + 'smoo' + t.normal)
+ eq_(t.on_color(2)('smoo'), t.on_color(2) + 'smoo' + t.normal)
+
+
+def test_null_callable_numeric_colors():
+ """``color(n)`` should be a no-op on null terminals."""
+ t = TestTerminal(stream=StringIO())
+ eq_(t.color(5)('smoo'), 'smoo')
+ eq_(t.on_color(6)('smoo'), 'smoo')
+
+
+def test_naked_color_cap():
+ """``term.color`` should return a stringlike capability."""
+ t = TestTerminal()
+ eq_(t.color + '', t.setaf + '')
+
+
+def test_number_of_colors_without_tty():
+ """``number_of_colors`` should return 0 when there's no tty."""
+ # Hypothesis: once setupterm() has run and decided the tty supports 256
+ # colors, it never changes its mind.
+ raise SkipTest
+
+ t = TestTerminal(stream=StringIO())
+ eq_(t.number_of_colors, 0)
+ t = TestTerminal(stream=StringIO(), force_styling=True)
+ eq_(t.number_of_colors, 0)
+
+
+def test_number_of_colors_with_tty():
+ """``number_of_colors`` should work."""
+ t = TestTerminal()
+ eq_(t.number_of_colors, 256)
+
+
+def test_formatting_functions():
+ """Test crazy-ass formatting wrappers, both simple and compound."""
+ t = TestTerminal()
+ # By now, it should be safe to use sugared attributes. Other tests test those.
+ eq_(t.bold(u'hi'), t.bold + u'hi' + t.normal)
+ eq_(t.green('hi'), t.green + u'hi' + t.normal) # Plain strs for Python 2.x
+ # Test some non-ASCII chars, probably not necessary:
+ eq_(t.bold_green(u'boö'), t.bold + t.green + u'boö' + t.normal)
+ eq_(t.bold_underline_green_on_red('boo'),
+ t.bold + t.underline + t.green + t.on_red + u'boo' + t.normal)
+ # Don't spell things like this:
+ eq_(t.on_bright_red_bold_bright_green_underline('meh'),
+ t.on_bright_red + t.bold + t.bright_green + t.underline + u'meh' + t.normal)
+
+
+def test_formatting_functions_without_tty():
+ """Test crazy-ass formatting wrappers when there's no tty."""
+ t = TestTerminal(stream=StringIO())
+ eq_(t.bold(u'hi'), u'hi')
+ eq_(t.green('hi'), u'hi')
+ # Test non-ASCII chars, no longer really necessary:
+ eq_(t.bold_green(u'boö'), u'boö')
+ eq_(t.bold_underline_green_on_red('loo'), u'loo')
+ eq_(t.on_bright_red_bold_bright_green_underline('meh'), u'meh')
+
+
+def test_nice_formatting_errors():
+ """Make sure you get nice hints if you misspell a formatting wrapper."""
+ t = TestTerminal()
+ try:
+ t.bold_misspelled('hey')
+ except TypeError, e:
+ assert 'probably misspelled' in e.args[0]
+
+ try:
+ t.bold_misspelled(u'hey') # unicode
+ except TypeError, e:
+ assert 'probably misspelled' in e.args[0]
+
+ try:
+ t.bold_misspelled(None) # an arbitrary non-string
+ except TypeError, e:
+ assert 'probably misspelled' not in e.args[0]
+
+ try:
+ t.bold_misspelled('a', 'b') # >1 string arg
+ except TypeError, e:
+ assert 'probably misspelled' not in e.args[0]
+
+
+def test_init_descriptor_always_initted():
+ """We should be able to get a height and width even on no-tty Terminals."""
+ t = Terminal(stream=StringIO())
+ eq_(type(t.height), int)
diff --git a/python/blessings/setup.cfg b/python/blessings/setup.cfg
new file mode 100644
index 000000000..861a9f554
--- /dev/null
+++ b/python/blessings/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/blessings/setup.py b/python/blessings/setup.py
new file mode 100644
index 000000000..6af55452d
--- /dev/null
+++ b/python/blessings/setup.py
@@ -0,0 +1,42 @@
+import sys
+
+from setuptools import setup, find_packages
+
+
+extra_setup = {}
+if sys.version_info >= (3,):
+ extra_setup['use_2to3'] = True
+
+setup(
+ name='blessings',
+ version='1.3',
+ description='A thin, practical wrapper around terminal formatting, positioning, and more',
+ long_description=open('README.rst').read(),
+ author='Erik Rose',
+ author_email='erikrose@grinchcentral.com',
+ license='MIT',
+ packages=find_packages(exclude=['ez_setup']),
+ tests_require=['Nose'],
+ url='https://github.com/erikrose/blessings',
+ include_package_data=True,
+ classifiers=[
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Console',
+ 'Environment :: Console :: Curses',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: POSIX',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Software Development :: User Interfaces',
+ 'Topic :: Terminals'
+ ],
+ keywords=['terminal', 'tty', 'curses', 'ncurses', 'formatting', 'style', 'color', 'console'],
+ **extra_setup
+)
diff --git a/python/blessings/tox.ini b/python/blessings/tox.ini
new file mode 100644
index 000000000..e1753f261
--- /dev/null
+++ b/python/blessings/tox.ini
@@ -0,0 +1,7 @@
+[tox]
+envlist = py25, py26, py27, py32
+
+[testenv]
+commands = nosetests blessings
+deps = nose
+changedir = .tox # So Python 3 runs don't pick up incompatible, un-2to3'd source from the cwd
diff --git a/python/compare-locales/compare_locales/__init__.py b/python/compare-locales/compare_locales/__init__.py
new file mode 100644
index 000000000..bad265e4f
--- /dev/null
+++ b/python/compare-locales/compare_locales/__init__.py
@@ -0,0 +1 @@
+version = "1.1"
diff --git a/python/compare-locales/compare_locales/checks.py b/python/compare-locales/compare_locales/checks.py
new file mode 100644
index 000000000..ee3bef03d
--- /dev/null
+++ b/python/compare-locales/compare_locales/checks.py
@@ -0,0 +1,438 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+from difflib import SequenceMatcher
+from xml import sax
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from compare_locales.parser import DTDParser, PropertiesParser
+
+
+class Checker(object):
+ '''Abstract class to implement checks per file type.
+ '''
+ pattern = None
+
+ @classmethod
+ def use(cls, file):
+ return cls.pattern.match(file.file)
+
+ def check(self, refEnt, l10nEnt):
+ '''Given the reference and localized Entities, performs checks.
+
+ This is a generator yielding tuples of
+ - "warning" or "error", depending on what should be reported,
+ - tuple of line, column info for the error within the string
+ - description string to be shown in the report
+ '''
+ if True:
+ raise NotImplementedError("Need to subclass")
+ yield ("error", (0, 0), "This is an example error", "example")
+
+
+class PrintfException(Exception):
+ def __init__(self, msg, pos):
+ self.pos = pos
+ self.msg = msg
+
+
+class PropertiesChecker(Checker):
+ '''Tests to run on .properties files.
+ '''
+ pattern = re.compile('.*\.properties$')
+ printf = re.compile(r'%(?P<good>%|'
+ r'(?:(?P<number>[1-9][0-9]*)\$)?'
+ r'(?P<width>\*|[0-9]+)?'
+ r'(?P<prec>\.(?:\*|[0-9]+)?)?'
+ r'(?P<spec>[duxXosScpfg]))?')
+
+ def check(self, refEnt, l10nEnt):
+ '''Test for the different variable formats.
+ '''
+ refValue, l10nValue = refEnt.val, l10nEnt.val
+ refSpecs = None
+ # check for PluralForm.jsm stuff, should have the docs in the
+ # comment
+ if 'Localization_and_Plurals' in refEnt.pre_comment:
+ # For plurals, common variable pattern is #1. Try that.
+ pats = set(int(m.group(1)) for m in re.finditer('#([0-9]+)',
+ refValue))
+ if len(pats) == 0:
+ return
+ lpats = set(int(m.group(1)) for m in re.finditer('#([0-9]+)',
+ l10nValue))
+ if pats - lpats:
+ yield ('warning', 0, 'not all variables used in l10n',
+ 'plural')
+ return
+ if lpats - pats:
+ yield ('error', 0, 'unreplaced variables in l10n',
+ 'plural')
+ return
+ return
+ # check for lost escapes
+ raw_val = l10nEnt.raw_val
+ for m in PropertiesParser.escape.finditer(raw_val):
+ if m.group('single') and \
+ m.group('single') not in PropertiesParser.known_escapes:
+ yield ('warning', m.start(),
+ 'unknown escape sequence, \\' + m.group('single'),
+ 'escape')
+ try:
+ refSpecs = self.getPrintfSpecs(refValue)
+ except PrintfException:
+ refSpecs = []
+ if refSpecs:
+ for t in self.checkPrintf(refSpecs, l10nValue):
+ yield t
+ return
+
+ def checkPrintf(self, refSpecs, l10nValue):
+ try:
+ l10nSpecs = self.getPrintfSpecs(l10nValue)
+ except PrintfException, e:
+ yield ('error', e.pos, e.msg, 'printf')
+ return
+ if refSpecs != l10nSpecs:
+ sm = SequenceMatcher()
+ sm.set_seqs(refSpecs, l10nSpecs)
+ msgs = []
+ warn = None
+ for action, i1, i2, j1, j2 in sm.get_opcodes():
+ if action == 'equal':
+ continue
+ if action == 'delete':
+ # missing argument in l10n
+ if i2 == len(refSpecs):
+ # trailing specs missing, that's just a warning
+ warn = ', '.join('trailing argument %d `%s` missing' %
+ (i+1, refSpecs[i])
+ for i in xrange(i1, i2))
+ else:
+ for i in xrange(i1, i2):
+ msgs.append('argument %d `%s` missing' %
+ (i+1, refSpecs[i]))
+ continue
+ if action == 'insert':
+ # obsolete argument in l10n
+ for i in xrange(j1, j2):
+ msgs.append('argument %d `%s` obsolete' %
+ (i+1, l10nSpecs[i]))
+ continue
+ if action == 'replace':
+ for i, j in zip(xrange(i1, i2), xrange(j1, j2)):
+ msgs.append('argument %d `%s` should be `%s`' %
+ (j+1, l10nSpecs[j], refSpecs[i]))
+ if msgs:
+ yield ('error', 0, ', '.join(msgs), 'printf')
+ if warn is not None:
+ yield ('warning', 0, warn, 'printf')
+
+ def getPrintfSpecs(self, val):
+ hasNumber = False
+ specs = []
+ for m in self.printf.finditer(val):
+ if m.group("good") is None:
+ # found just a '%', signal an error
+ raise PrintfException('Found single %', m.start())
+ if m.group("good") == '%':
+ # escaped %
+ continue
+ if ((hasNumber and m.group('number') is None) or
+ (not hasNumber and specs and
+ m.group('number') is not None)):
+ # mixed style, numbered and not
+ raise PrintfException('Mixed ordered and non-ordered args',
+ m.start())
+ hasNumber = m.group('number') is not None
+ if hasNumber:
+ pos = int(m.group('number')) - 1
+ ls = len(specs)
+ if pos >= ls:
+ # pad specs
+ nones = pos - ls
+ specs[ls:pos] = nones*[None]
+ specs.append(m.group('spec'))
+ else:
+ if specs[pos] is not None:
+ raise PrintfException('Double ordered argument %d' %
+ (pos+1),
+ m.start())
+ specs[pos] = m.group('spec')
+ else:
+ specs.append(m.group('spec'))
+ # check for missing args
+ if hasNumber and not all(specs):
+ raise PrintfException('Ordered argument missing', 0)
+ return specs
+
+
+class DTDChecker(Checker):
+ """Tests to run on DTD files.
+
+ Uses xml.sax for the heavy lifting of xml parsing.
+
+ The code tries to parse until it doesn't find any unresolved entities
+ anymore. If it finds one, it tries to grab the key, and adds an empty
+ <!ENTITY key ""> definition to the header.
+
+ Also checks for some CSS and number heuristics in the values.
+ """
+ pattern = re.compile('.*\.dtd$')
+
+ eref = re.compile('&(%s);' % DTDParser.Name)
+ tmpl = '''<!DOCTYPE elem [%s]>
+<elem>%s</elem>
+'''
+ xmllist = set(('amp', 'lt', 'gt', 'apos', 'quot'))
+
+ def __init__(self, reference):
+ self.reference = reference
+ self.__known_entities = None
+
+ def known_entities(self, refValue):
+ if self.__known_entities is None and self.reference is not None:
+ self.__known_entities = set()
+ for ent in self.reference:
+ self.__known_entities.update(self.entities_for_value(ent.val))
+ return self.__known_entities if self.__known_entities is not None \
+ else self.entities_for_value(refValue)
+
+ def entities_for_value(self, value):
+ reflist = set(m.group(1).encode('utf-8')
+ for m in self.eref.finditer(value))
+ reflist -= self.xmllist
+ return reflist
+
+ # Setup for XML parser, with default and text-only content handler
+ class TextContent(sax.handler.ContentHandler):
+ textcontent = ''
+
+ def characters(self, content):
+ self.textcontent += content
+
+ defaulthandler = sax.handler.ContentHandler()
+ texthandler = TextContent()
+
+ numPattern = r'([0-9]+|[0-9]*\.[0-9]+)'
+ num = re.compile('^%s$' % numPattern)
+ lengthPattern = '%s(em|px|ch|cm|in)' % numPattern
+ length = re.compile('^%s$' % lengthPattern)
+ spec = re.compile(r'((?:min\-)?(?:width|height))\s*:\s*%s' %
+ lengthPattern)
+ style = re.compile(r'^%(spec)s\s*(;\s*%(spec)s\s*)*;?$' %
+ {'spec': spec.pattern})
+
+ processContent = None
+
+ def check(self, refEnt, l10nEnt):
+ """Try to parse the refvalue inside a dummy element, and keep
+ track of entities that we need to define to make that work.
+
+ Return a checker that offers just those entities.
+ """
+ refValue, l10nValue = refEnt.val, l10nEnt.val
+ # find entities the refValue references,
+ # reusing markup from DTDParser.
+ reflist = self.known_entities(refValue)
+ inContext = self.entities_for_value(refValue)
+ entities = ''.join('<!ENTITY %s "">' % s for s in sorted(reflist))
+ parser = sax.make_parser()
+ parser.setFeature(sax.handler.feature_external_ges, False)
+
+ parser.setContentHandler(self.defaulthandler)
+ try:
+ parser.parse(StringIO(self.tmpl %
+ (entities, refValue.encode('utf-8'))))
+ # also catch stray %
+ parser.parse(StringIO(self.tmpl %
+ (refEnt.all.encode('utf-8') + entities,
+ '&%s;' % refEnt.key.encode('utf-8'))))
+ except sax.SAXParseException, e:
+ yield ('warning',
+ (0, 0),
+ "can't parse en-US value", 'xmlparse')
+
+ # find entities the l10nValue references,
+ # reusing markup from DTDParser.
+ l10nlist = self.entities_for_value(l10nValue)
+ missing = sorted(l10nlist - reflist)
+ _entities = entities + ''.join('<!ENTITY %s "">' % s for s in missing)
+ if self.processContent is not None:
+ self.texthandler.textcontent = ''
+ parser.setContentHandler(self.texthandler)
+ try:
+ parser.parse(StringIO(self.tmpl % (_entities,
+ l10nValue.encode('utf-8'))))
+ # also catch stray %
+ # if this fails, we need to substract the entity definition
+ parser.setContentHandler(self.defaulthandler)
+ parser.parse(StringIO(self.tmpl % (
+ l10nEnt.all.encode('utf-8') + _entities,
+ '&%s;' % l10nEnt.key.encode('utf-8'))))
+ except sax.SAXParseException, e:
+ # xml parse error, yield error
+ # sometimes, the error is reported on our fake closing
+ # element, make that the end of the last line
+ lnr = e.getLineNumber() - 1
+ lines = l10nValue.splitlines()
+ if lnr > len(lines):
+ lnr = len(lines)
+ col = len(lines[lnr-1])
+ else:
+ col = e.getColumnNumber()
+ if lnr == 1:
+ # first line starts with <elem>, substract
+ col -= len("<elem>")
+ elif lnr == 0:
+ col -= len("<!DOCTYPE elem [") # first line is DOCTYPE
+ yield ('error', (lnr, col), ' '.join(e.args), 'xmlparse')
+
+ warntmpl = u'Referencing unknown entity `%s`'
+ if reflist:
+ if inContext:
+ elsewhere = reflist - inContext
+ warntmpl += ' (%s used in context' % \
+ ', '.join(sorted(inContext))
+ if elsewhere:
+ warntmpl += ', %s known)' % ', '.join(sorted(elsewhere))
+ else:
+ warntmpl += ')'
+ else:
+ warntmpl += ' (%s known)' % ', '.join(sorted(reflist))
+ for key in missing:
+ yield ('warning', (0, 0), warntmpl % key.decode('utf-8'),
+ 'xmlparse')
+ if inContext and l10nlist and l10nlist - inContext - set(missing):
+ mismatch = sorted(l10nlist - inContext - set(missing))
+ for key in mismatch:
+ yield ('warning', (0, 0),
+ 'Entity %s referenced, but %s used in context' % (
+ key.decode('utf-8'),
+ ', '.join(sorted(inContext))
+ ), 'xmlparse')
+
+ # Number check
+ if self.num.match(refValue) and not self.num.match(l10nValue):
+ yield ('warning', 0, 'reference is a number', 'number')
+ # CSS checks
+ # just a length, width="100em"
+ if self.length.match(refValue) and not self.length.match(l10nValue):
+ yield ('error', 0, 'reference is a CSS length', 'css')
+ # real CSS spec, style="width:100px;"
+ if self.style.match(refValue):
+ if not self.style.match(l10nValue):
+ yield ('error', 0, 'reference is a CSS spec', 'css')
+ else:
+ # warn if different properties or units
+ refMap = dict((s, u) for s, _, u in
+ self.spec.findall(refValue))
+ msgs = []
+ for s, _, u in self.spec.findall(l10nValue):
+ if s not in refMap:
+ msgs.insert(0, '%s only in l10n' % s)
+ continue
+ else:
+ ru = refMap.pop(s)
+ if u != ru:
+ msgs.append("units for %s don't match "
+ "(%s != %s)" % (s, u, ru))
+ for s in refMap.iterkeys():
+ msgs.insert(0, '%s only in reference' % s)
+ if msgs:
+ yield ('warning', 0, ', '.join(msgs), 'css')
+
+ if self.processContent is not None:
+ for t in self.processContent(self.texthandler.textcontent):
+ yield t
+
+
+class PrincessAndroid(DTDChecker):
+ """Checker for the string values that Android puts into an XML container.
+
+ http://developer.android.com/guide/topics/resources/string-resource.html#FormattingAndStyling # noqa
+ has more info. Check for unescaped apostrophes and bad unicode escapes.
+ """
+ quoted = re.compile("(?P<q>[\"']).*(?P=q)$")
+
+ def unicode_escape(self, str):
+ """Helper method to try to decode all unicode escapes in a string.
+
+ This code uses the standard python decode for unicode-escape, but
+ that's somewhat tricky, as its input needs to be ascii. To get to
+ ascii, the unicode string gets converted to ascii with
+ backslashreplace, i.e., all non-ascii unicode chars get unicode
+ escaped. And then we try to roll all of that back.
+ Now, when that hits an error, that's from the original string, and we
+ need to search for the actual error position in the original string,
+ as the backslashreplace code changes string positions quite badly.
+ See also the last check in TestAndroid.test_android_dtd, with a
+ lengthy chinese string.
+ """
+ val = str.encode('ascii', 'backslashreplace')
+ try:
+ val.decode('unicode-escape')
+ except UnicodeDecodeError, e:
+ args = list(e.args)
+ badstring = args[1][args[2]:args[3]]
+ i = len(args[1][:args[2]].decode('unicode-escape'))
+ args[2] = i
+ args[3] = i + len(badstring)
+ raise UnicodeDecodeError(*args)
+
+ @classmethod
+ def use(cls, file):
+ """Use this Checker only for DTD files in embedding/android."""
+ return (file.module in ("embedding/android",
+ "mobile/android/base") and
+ cls.pattern.match(file.file))
+
+ def processContent(self, val):
+ """Actual check code.
+ Check for unicode escapes and unescaped quotes and apostrophes,
+ if string's not quoted.
+ """
+ # first, try to decode unicode escapes
+ try:
+ self.unicode_escape(val)
+ except UnicodeDecodeError, e:
+ yield ('error', e.args[2], e.args[4], 'android')
+ # check for unescaped single or double quotes.
+ # first, see if the complete string is single or double quoted,
+ # that changes the rules
+ m = self.quoted.match(val)
+ if m:
+ q = m.group('q')
+ offset = 0
+ val = val[1:-1] # strip quotes
+ else:
+ q = "[\"']"
+ offset = -1
+ stray_quot = re.compile(r"[\\\\]*(%s)" % q)
+
+ for m in stray_quot.finditer(val):
+ if len(m.group(0)) % 2:
+ # found an unescaped single or double quote, which message?
+ if m.group(1) == '"':
+ msg = u"Quotes in Android DTDs need escaping with \\\" "\
+ u"or \\u0022, or put string in apostrophes."
+ else:
+ msg = u"Apostrophes in Android DTDs need escaping with "\
+ u"\\' or \\u0027, or use \u2019, or put string in "\
+ u"quotes."
+ yield ('error', m.end(0)+offset, msg, 'android')
+
+
+def getChecker(file, reference=None):
+ if PropertiesChecker.use(file):
+ return PropertiesChecker()
+ if PrincessAndroid.use(file):
+ return PrincessAndroid(reference)
+ if DTDChecker.use(file):
+ return DTDChecker(reference)
+ return None
diff --git a/python/compare-locales/compare_locales/commands.py b/python/compare-locales/compare_locales/commands.py
new file mode 100644
index 000000000..61b58ec4b
--- /dev/null
+++ b/python/compare-locales/compare_locales/commands.py
@@ -0,0 +1,154 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'Commands exposed to commandlines'
+
+import logging
+from optparse import OptionParser, make_option
+
+from compare_locales.paths import EnumerateApp
+from compare_locales.compare import compareApp, compareDirs
+from compare_locales.webapps import compare_web_app
+
+
+class BaseCommand(object):
+ """Base class for compare-locales commands.
+ This handles command line parsing, and general sugar for setuptools
+ entry_points.
+ """
+ options = [
+ make_option('-v', '--verbose', action='count', dest='v', default=0,
+ help='Make more noise'),
+ make_option('-q', '--quiet', action='count', dest='q', default=0,
+ help='Make less noise'),
+ make_option('-m', '--merge',
+ help='''Use this directory to stage merged files,
+use {ab_CD} to specify a different directory for each locale'''),
+ ]
+ data_option = make_option('--data', choices=['text', 'exhibit', 'json'],
+ default='text',
+ help='''Choose data and format (one of text,
+exhibit, json); text: (default) Show which files miss which strings, together
+with warnings and errors. Also prints a summary; json: Serialize the internal
+tree, useful for tools. Also always succeeds; exhibit: Serialize the summary
+data in a json useful for Exhibit
+''')
+
+ def __init__(self):
+ self.parser = None
+
+ def get_parser(self):
+ """Get an OptionParser, with class docstring as usage, and
+ self.options.
+ """
+ parser = OptionParser()
+ parser.set_usage(self.__doc__)
+ for option in self.options:
+ parser.add_option(option)
+ return parser
+
+ @classmethod
+ def call(cls):
+ """Entry_point for setuptools.
+ The actual command handling is done in the handle() method of the
+ subclasses.
+ """
+ cmd = cls()
+ cmd.handle_()
+
+ def handle_(self):
+ """The instance part of the classmethod call."""
+ self.parser = self.get_parser()
+ (options, args) = self.parser.parse_args()
+ # log as verbose or quiet as we want, warn by default
+ logging.basicConfig()
+ logging.getLogger().setLevel(logging.WARNING -
+ (options.v - options.q)*10)
+ observer = self.handle(args, options)
+ print observer.serialize(type=options.data).encode('utf-8', 'replace')
+
+ def handle(self, args, options):
+ """Subclasses need to implement this method for the actual
+ command handling.
+ """
+ raise NotImplementedError
+
+
+class CompareLocales(BaseCommand):
+ """usage: %prog [options] l10n.ini l10n_base_dir [locale ...]
+
+Check the localization status of a gecko application.
+The first argument is a path to the l10n.ini file for the application,
+followed by the base directory of the localization repositories.
+Then you pass in the list of locale codes you want to compare. If there are
+not locales given, the list of locales will be taken from the all-locales file
+of the application\'s l10n.ini."""
+
+ options = BaseCommand.options + [
+ make_option('--clobber-merge', action="store_true", default=False,
+ dest='clobber',
+ help="""WARNING: DATALOSS.
+Use this option with care. If specified, the merge directory will
+be clobbered for each module. That means, the subdirectory will
+be completely removed, any files that were there are lost.
+Be careful to specify the right merge directory when using this option."""),
+ make_option('-r', '--reference', default='en-US', dest='reference',
+ help='Explicitly set the reference '
+ 'localization. [default: en-US]'),
+ BaseCommand.data_option
+ ]
+
+ def handle(self, args, options):
+ if len(args) < 2:
+ self.parser.error('Need to pass in list of languages')
+ inipath, l10nbase = args[:2]
+ locales = args[2:]
+ app = EnumerateApp(inipath, l10nbase, locales)
+ app.reference = options.reference
+ try:
+ observer = compareApp(app, merge_stage=options.merge,
+ clobber=options.clobber)
+ except (OSError, IOError), exc:
+ print "FAIL: " + str(exc)
+ self.parser.exit(2)
+ return observer
+
+
+class CompareDirs(BaseCommand):
+ """usage: %prog [options] reference localization
+
+Check the localization status of a directory tree.
+The first argument is a path to the reference data,the second is the
+localization to be tested."""
+
+ options = BaseCommand.options + [
+ BaseCommand.data_option
+ ]
+
+ def handle(self, args, options):
+ if len(args) != 2:
+ self.parser.error('Reference and localizatino required')
+ reference, locale = args
+ observer = compareDirs(reference, locale, merge_stage=options.merge)
+ return observer
+
+
+class CompareWebApp(BaseCommand):
+ """usage: %prog [options] webapp [locale locale]
+
+Check the localization status of a gaia-style web app.
+The first argument is the directory of the web app.
+Following arguments explicitly state the locales to test.
+If none are given, test all locales in manifest.webapp or files."""
+
+ options = BaseCommand.options[:-1] + [
+ BaseCommand.data_option]
+
+ def handle(self, args, options):
+ if len(args) < 1:
+ self.parser.error('Webapp directory required')
+ basedir = args[0]
+ locales = args[1:]
+ observer = compare_web_app(basedir, locales)
+ return observer
diff --git a/python/compare-locales/compare_locales/compare.py b/python/compare-locales/compare_locales/compare.py
new file mode 100644
index 000000000..4f71c46f8
--- /dev/null
+++ b/python/compare-locales/compare_locales/compare.py
@@ -0,0 +1,638 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'Mozilla l10n compare locales tool'
+
+import codecs
+import os
+import os.path
+import shutil
+import re
+from difflib import SequenceMatcher
+from collections import defaultdict
+
+try:
+ from json import dumps
+except:
+ from simplejson import dumps
+
+from compare_locales import parser
+from compare_locales import paths
+from compare_locales.checks import getChecker
+
+
+class Tree(object):
+ def __init__(self, valuetype):
+ self.branches = dict()
+ self.valuetype = valuetype
+ self.value = None
+
+ def __getitem__(self, leaf):
+ parts = []
+ if isinstance(leaf, paths.File):
+ parts = [p for p in [leaf.locale, leaf.module] if p] + \
+ leaf.file.split('/')
+ else:
+ parts = leaf.split('/')
+ return self.__get(parts)
+
+ def __get(self, parts):
+ common = None
+ old = None
+ new = tuple(parts)
+ t = self
+ for k, v in self.branches.iteritems():
+ for i, part in enumerate(zip(k, parts)):
+ if part[0] != part[1]:
+ i -= 1
+ break
+ if i < 0:
+ continue
+ i += 1
+ common = tuple(k[:i])
+ old = tuple(k[i:])
+ new = tuple(parts[i:])
+ break
+ if old:
+ self.branches.pop(k)
+ t = Tree(self.valuetype)
+ t.branches[old] = v
+ self.branches[common] = t
+ elif common:
+ t = self.branches[common]
+ if new:
+ if common:
+ return t.__get(new)
+ t2 = t
+ t = Tree(self.valuetype)
+ t2.branches[new] = t
+ if t.value is None:
+ t.value = t.valuetype()
+ return t.value
+
+ indent = ' '
+
+ def getContent(self, depth=0):
+ '''
+ Returns iterator of (depth, flag, key_or_value) tuples.
+ If flag is 'value', key_or_value is a value object, otherwise
+ (flag is 'key') it's a key string.
+ '''
+ keys = self.branches.keys()
+ keys.sort()
+ if self.value is not None:
+ yield (depth, 'value', self.value)
+ for key in keys:
+ yield (depth, 'key', key)
+ for child in self.branches[key].getContent(depth + 1):
+ yield child
+
+ def toJSON(self):
+ '''
+ Returns this Tree as a JSON-able tree of hashes.
+ Only the values need to take care that they're JSON-able.
+ '''
+ json = {}
+ keys = self.branches.keys()
+ keys.sort()
+ if self.value is not None:
+ json['value'] = self.value
+ children = [('/'.join(key), self.branches[key].toJSON())
+ for key in keys]
+ if children:
+ json['children'] = children
+ return json
+
+ def getStrRows(self):
+ def tostr(t):
+ if t[1] == 'key':
+ return self.indent * t[0] + '/'.join(t[2])
+ return self.indent * (t[0] + 1) + str(t[2])
+
+ return map(tostr, self.getContent())
+
+ def __str__(self):
+ return '\n'.join(self.getStrRows())
+
+
+class AddRemove(SequenceMatcher):
+ def __init__(self):
+ SequenceMatcher.__init__(self, None, None, None)
+
+ def set_left(self, left):
+ if not isinstance(left, list):
+ left = [l for l in left]
+ self.set_seq1(left)
+
+ def set_right(self, right):
+ if not isinstance(right, list):
+ right = [l for l in right]
+ self.set_seq2(right)
+
+ def __iter__(self):
+ for tag, i1, i2, j1, j2 in self.get_opcodes():
+ if tag == 'equal':
+ for pair in zip(self.a[i1:i2], self.b[j1:j2]):
+ yield ('equal', pair)
+ elif tag == 'delete':
+ for item in self.a[i1:i2]:
+ yield ('delete', item)
+ elif tag == 'insert':
+ for item in self.b[j1:j2]:
+ yield ('add', item)
+ else:
+ # tag == 'replace'
+ for item in self.a[i1:i2]:
+ yield ('delete', item)
+ for item in self.b[j1:j2]:
+ yield ('add', item)
+
+
+class DirectoryCompare(SequenceMatcher):
+ def __init__(self, reference):
+ SequenceMatcher.__init__(self, None, [i for i in reference],
+ [])
+ self.watcher = None
+
+ def setWatcher(self, watcher):
+ self.watcher = watcher
+
+ def compareWith(self, other):
+ if not self.watcher:
+ return
+ self.set_seq2([i for i in other])
+ for tag, i1, i2, j1, j2 in self.get_opcodes():
+ if tag == 'equal':
+ for i, j in zip(xrange(i1, i2), xrange(j1, j2)):
+ self.watcher.compare(self.a[i], self.b[j])
+ elif tag == 'delete':
+ for i in xrange(i1, i2):
+ self.watcher.add(self.a[i], other.cloneFile(self.a[i]))
+ elif tag == 'insert':
+ for j in xrange(j1, j2):
+ self.watcher.remove(self.b[j])
+ else:
+ for j in xrange(j1, j2):
+ self.watcher.remove(self.b[j])
+ for i in xrange(i1, i2):
+ self.watcher.add(self.a[i], other.cloneFile(self.a[i]))
+
+
+class Observer(object):
+ stat_cats = ['missing', 'obsolete', 'missingInFiles', 'report',
+ 'changed', 'unchanged', 'keys']
+
+ def __init__(self):
+ class intdict(defaultdict):
+ def __init__(self):
+ defaultdict.__init__(self, int)
+
+ self.summary = defaultdict(intdict)
+ self.details = Tree(dict)
+ self.filter = None
+
+ # support pickling
+ def __getstate__(self):
+ return dict(summary=self.getSummary(), details=self.details)
+
+ def __setstate__(self, state):
+ class intdict(defaultdict):
+ def __init__(self):
+ defaultdict.__init__(self, int)
+
+ self.summary = defaultdict(intdict)
+ if 'summary' in state:
+ for loc, stats in state['summary'].iteritems():
+ self.summary[loc].update(stats)
+ self.details = state['details']
+ self.filter = None
+
+ def getSummary(self):
+ plaindict = {}
+ for k, v in self.summary.iteritems():
+ plaindict[k] = dict(v)
+ return plaindict
+
+ def toJSON(self):
+ return dict(summary=self.getSummary(), details=self.details.toJSON())
+
+ def notify(self, category, file, data):
+ rv = "error"
+ if category in self.stat_cats:
+ # these get called post reporting just for stats
+ # return "error" to forward them to other other_observers
+ self.summary[file.locale][category] += data
+ # keep track of how many strings are in a missing file
+ # we got the {'missingFile': 'error'} from the first pass
+ if category == 'missingInFiles':
+ self.details[file]['strings'] = data
+ return "error"
+ if category in ['missingFile', 'obsoleteFile']:
+ if self.filter is not None:
+ rv = self.filter(file)
+ if rv != "ignore":
+ self.details[file][category] = rv
+ return rv
+ if category in ['missingEntity', 'obsoleteEntity']:
+ if self.filter is not None:
+ rv = self.filter(file, data)
+ if rv == "ignore":
+ return rv
+ v = self.details[file]
+ try:
+ v[category].append(data)
+ except KeyError:
+ v[category] = [data]
+ return rv
+ if category == 'error':
+ try:
+ self.details[file][category].append(data)
+ except KeyError:
+ self.details[file][category] = [data]
+ self.summary[file.locale]['errors'] += 1
+ elif category == 'warning':
+ try:
+ self.details[file][category].append(data)
+ except KeyError:
+ self.details[file][category] = [data]
+ self.summary[file.locale]['warnings'] += 1
+ return rv
+
+ def toExhibit(self):
+ items = []
+ for locale in sorted(self.summary.iterkeys()):
+ summary = self.summary[locale]
+ if locale is not None:
+ item = {'id': 'xxx/' + locale,
+ 'label': locale,
+ 'locale': locale}
+ else:
+ item = {'id': 'xxx',
+ 'label': 'xxx',
+ 'locale': 'xxx'}
+ item['type'] = 'Build'
+ total = sum([summary[k]
+ for k in ('changed', 'unchanged', 'report', 'missing',
+ 'missingInFiles')
+ if k in summary])
+ rate = (('changed' in summary and summary['changed'] * 100) or
+ 0) / total
+ item.update((k, summary.get(k, 0))
+ for k in ('changed', 'unchanged'))
+ item.update((k, summary[k])
+ for k in ('report', 'errors', 'warnings')
+ if k in summary)
+ item['missing'] = summary.get('missing', 0) + \
+ summary.get('missingInFiles', 0)
+ item['completion'] = rate
+ item['total'] = total
+ result = 'success'
+ if item.get('warnings', 0):
+ result = 'warning'
+ if item.get('errors', 0) or item.get('missing', 0):
+ result = 'failure'
+ item['result'] = result
+ items.append(item)
+ data = {
+ "properties": dict.fromkeys(
+ ("completion", "errors", "warnings", "missing", "report",
+ "unchanged", "changed", "obsolete"),
+ {"valueType": "number"}),
+ "types": {
+ "Build": {"pluralLabel": "Builds"}
+ }}
+ data['items'] = items
+ return dumps(data, indent=2)
+
+ def serialize(self, type="text"):
+ if type == "exhibit":
+ return self.toExhibit()
+ if type == "json":
+ return dumps(self.toJSON())
+
+ def tostr(t):
+ if t[1] == 'key':
+ return ' ' * t[0] + '/'.join(t[2])
+ o = []
+ indent = ' ' * (t[0] + 1)
+ if 'error' in t[2]:
+ o += [indent + 'ERROR: ' + e for e in t[2]['error']]
+ if 'warning' in t[2]:
+ o += [indent + 'WARNING: ' + e for e in t[2]['warning']]
+ if 'missingEntity' in t[2] or 'obsoleteEntity' in t[2]:
+ missingEntities = ('missingEntity' in t[2] and
+ t[2]['missingEntity']) or []
+ obsoleteEntities = ('obsoleteEntity' in t[2] and
+ t[2]['obsoleteEntity']) or []
+ entities = missingEntities + obsoleteEntities
+ entities.sort()
+ for entity in entities:
+ op = '+'
+ if entity in obsoleteEntities:
+ op = '-'
+ o.append(indent + op + entity)
+ elif 'missingFile' in t[2]:
+ o.append(indent + '// add and localize this file')
+ elif 'obsoleteFile' in t[2]:
+ o.append(indent + '// remove this file')
+ return '\n'.join(o)
+
+ out = []
+ for locale, summary in sorted(self.summary.iteritems()):
+ if locale is not None:
+ out.append(locale + ':')
+ out += [k + ': ' + str(v) for k, v in sorted(summary.iteritems())]
+ total = sum([summary[k]
+ for k in ['changed', 'unchanged', 'report', 'missing',
+ 'missingInFiles']
+ if k in summary])
+ rate = 0
+ if total:
+ rate = (('changed' in summary and summary['changed'] * 100) or
+ 0) / total
+ out.append('%d%% of entries changed' % rate)
+ return '\n'.join(map(tostr, self.details.getContent()) + out)
+
+ def __str__(self):
+ return 'observer'
+
+
+class ContentComparer:
+ keyRE = re.compile('[kK]ey')
+ nl = re.compile('\n', re.M)
+
+ def __init__(self):
+ '''Create a ContentComparer.
+ observer is usually a instance of Observer. The return values
+ of the notify method are used to control the handling of missing
+ entities.
+ '''
+ self.reference = dict()
+ self.observer = Observer()
+ self.other_observers = []
+ self.merge_stage = None
+
+ def add_observer(self, obs):
+ '''Add a non-filtering observer.
+ Results from the notify calls are ignored.
+ '''
+ self.other_observers.append(obs)
+
+ def set_merge_stage(self, merge_stage):
+ self.merge_stage = merge_stage
+
+ def merge(self, ref_entities, ref_map, ref_file, l10n_file, missing,
+ skips, p):
+ outfile = os.path.join(self.merge_stage, l10n_file.module,
+ l10n_file.file)
+ outdir = os.path.dirname(outfile)
+ if not os.path.isdir(outdir):
+ os.makedirs(outdir)
+ if not p.canMerge:
+ shutil.copyfile(ref_file.fullpath, outfile)
+ print "copied reference to " + outfile
+ return
+ if skips:
+ # skips come in ordered by key name, we need them in file order
+ skips.sort(key=lambda s: s.span[0])
+ trailing = (['\n'] +
+ [ref_entities[ref_map[key]].all for key in missing] +
+ [ref_entities[ref_map[skip.key]].all for skip in skips
+ if not isinstance(skip, parser.Junk)])
+ if skips:
+ # we need to skip a few errornous blocks in the input, copy by hand
+ f = codecs.open(outfile, 'wb', p.encoding)
+ offset = 0
+ for skip in skips:
+ chunk = skip.span
+ f.write(p.contents[offset:chunk[0]])
+ offset = chunk[1]
+ f.write(p.contents[offset:])
+ else:
+ shutil.copyfile(l10n_file.fullpath, outfile)
+ f = codecs.open(outfile, 'ab', p.encoding)
+ print "adding to " + outfile
+
+ def ensureNewline(s):
+ if not s.endswith('\n'):
+ return s + '\n'
+ return s
+
+ f.write(''.join(map(ensureNewline, trailing)))
+ f.close()
+
+ def notify(self, category, file, data):
+ """Check observer for the found data, and if it's
+ not to ignore, notify other_observers.
+ """
+ rv = self.observer.notify(category, file, data)
+ if rv == 'ignore':
+ return rv
+ for obs in self.other_observers:
+ # non-filtering other_observers, ignore results
+ obs.notify(category, file, data)
+ return rv
+
+ def remove(self, obsolete):
+ self.notify('obsoleteFile', obsolete, None)
+ pass
+
+ def compare(self, ref_file, l10n):
+ try:
+ p = parser.getParser(ref_file.file)
+ except UserWarning:
+ # no comparison, XXX report?
+ return
+ if ref_file not in self.reference:
+ # we didn't parse this before
+ try:
+ p.readContents(ref_file.getContents())
+ except Exception, e:
+ self.notify('error', ref_file, str(e))
+ return
+ self.reference[ref_file] = p.parse()
+ ref = self.reference[ref_file]
+ ref_list = ref[1].keys()
+ ref_list.sort()
+ try:
+ p.readContents(l10n.getContents())
+ l10n_entities, l10n_map = p.parse()
+ except Exception, e:
+ self.notify('error', l10n, str(e))
+ return
+ lines = []
+
+ def _getLine(offset):
+ if not lines:
+ lines.append(0)
+ for m in self.nl.finditer(p.contents):
+ lines.append(m.end())
+ for i in xrange(len(lines), 0, -1):
+ if offset >= lines[i - 1]:
+ return (i, offset - lines[i - 1])
+ return (1, offset)
+
+ l10n_list = l10n_map.keys()
+ l10n_list.sort()
+ ar = AddRemove()
+ ar.set_left(ref_list)
+ ar.set_right(l10n_list)
+ report = missing = obsolete = changed = unchanged = keys = 0
+ missings = []
+ skips = []
+ checker = getChecker(l10n, reference=ref[0])
+ for action, item_or_pair in ar:
+ if action == 'delete':
+ # missing entity
+ _rv = self.notify('missingEntity', l10n, item_or_pair)
+ if _rv == "ignore":
+ continue
+ if _rv == "error":
+ # only add to missing entities for l10n-merge on error,
+ # not report
+ missings.append(item_or_pair)
+ missing += 1
+ else:
+ # just report
+ report += 1
+ elif action == 'add':
+ # obsolete entity or junk
+ if isinstance(l10n_entities[l10n_map[item_or_pair]],
+ parser.Junk):
+ junk = l10n_entities[l10n_map[item_or_pair]]
+ params = (junk.val,) + junk.span
+ self.notify('error', l10n,
+ 'Unparsed content "%s" at %d-%d' % params)
+ if self.merge_stage is not None:
+ skips.append(junk)
+ elif self.notify('obsoleteEntity', l10n,
+ item_or_pair) != 'ignore':
+ obsolete += 1
+ else:
+ # entity found in both ref and l10n, check for changed
+ entity = item_or_pair[0]
+ refent = ref[0][ref[1][entity]]
+ l10nent = l10n_entities[l10n_map[entity]]
+ if self.keyRE.search(entity):
+ keys += 1
+ else:
+ if refent.val == l10nent.val:
+ self.doUnchanged(l10nent)
+ unchanged += 1
+ else:
+ self.doChanged(ref_file, refent, l10nent)
+ changed += 1
+ # run checks:
+ if checker:
+ for tp, pos, msg, cat in checker.check(refent, l10nent):
+ # compute real src position, if first line,
+ # col needs adjustment
+ _l, _offset = _getLine(l10nent.val_span[0])
+ if isinstance(pos, tuple):
+ # line, column
+ if pos[0] == 1:
+ col = pos[1] + _offset
+ else:
+ col = pos[1]
+ _l += pos[0] - 1
+ else:
+ _l, col = _getLine(l10nent.val_span[0] + pos)
+ # skip error entities when merging
+ if tp == 'error' and self.merge_stage is not None:
+ skips.append(l10nent)
+ self.notify(tp, l10n,
+ u"%s at line %d, column %d for %s" %
+ (msg, _l, col, refent.key))
+ pass
+ if missing:
+ self.notify('missing', l10n, missing)
+ if self.merge_stage is not None and (missings or skips):
+ self.merge(ref[0], ref[1], ref_file, l10n, missings, skips, p)
+ if report:
+ self.notify('report', l10n, report)
+ if obsolete:
+ self.notify('obsolete', l10n, obsolete)
+ if changed:
+ self.notify('changed', l10n, changed)
+ if unchanged:
+ self.notify('unchanged', l10n, unchanged)
+ if keys:
+ self.notify('keys', l10n, keys)
+ pass
+
+ def add(self, orig, missing):
+ if self.notify('missingFile', missing, None) == "ignore":
+ # filter said that we don't need this file, don't count it
+ return
+ f = orig
+ try:
+ p = parser.getParser(f.file)
+ except UserWarning:
+ return
+ try:
+ p.readContents(f.getContents())
+ entities, map = p.parse()
+ except Exception, e:
+ self.notify('error', f, str(e))
+ return
+ self.notify('missingInFiles', missing, len(map))
+
+ def doUnchanged(self, entity):
+ # overload this if needed
+ pass
+
+ def doChanged(self, file, ref_entity, l10n_entity):
+ # overload this if needed
+ pass
+
+
+def compareApp(app, other_observer=None, merge_stage=None, clobber=False):
+ '''Compare locales set in app.
+
+ Optional arguments are:
+ - other_observer. A object implementing
+ notify(category, _file, data)
+ The return values of that callback are ignored.
+ - merge_stage. A directory to be used for staging the output of
+ l10n-merge.
+ - clobber. Clobber the module subdirectories of the merge dir as we go.
+ Use wisely, as it might cause data loss.
+ '''
+ comparer = ContentComparer()
+ if other_observer is not None:
+ comparer.add_observer(other_observer)
+ comparer.observer.filter = app.filter
+ for module, reference, locales in app:
+ dir_comp = DirectoryCompare(reference)
+ dir_comp.setWatcher(comparer)
+ for _, localization in locales:
+ if merge_stage is not None:
+ locale_merge = merge_stage.format(ab_CD=localization.locale)
+ comparer.set_merge_stage(locale_merge)
+ if clobber:
+ # if clobber, remove the stage for the module if it exists
+ clobberdir = os.path.join(locale_merge, module)
+ if os.path.exists(clobberdir):
+ shutil.rmtree(clobberdir)
+ print "clobbered " + clobberdir
+ dir_comp.compareWith(localization)
+ return comparer.observer
+
+
+def compareDirs(reference, locale, other_observer=None, merge_stage=None):
+ '''Compare reference and locale dir.
+
+ Optional arguments are:
+ - other_observer. A object implementing
+ notify(category, _file, data)
+ The return values of that callback are ignored.
+ '''
+ comparer = ContentComparer()
+ if other_observer is not None:
+ comparer.add_observer(other_observer)
+ comparer.set_merge_stage(merge_stage)
+ dir_comp = DirectoryCompare(paths.EnumerateDir(reference))
+ dir_comp.setWatcher(comparer)
+ dir_comp.compareWith(paths.EnumerateDir(locale))
+ return comparer.observer
diff --git a/python/compare-locales/compare_locales/parser.py b/python/compare-locales/compare_locales/parser.py
new file mode 100644
index 000000000..a97cf201b
--- /dev/null
+++ b/python/compare-locales/compare_locales/parser.py
@@ -0,0 +1,521 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+import codecs
+import logging
+from HTMLParser import HTMLParser
+
+__constructors = []
+
+
+class Entity(object):
+ '''
+ Abstraction layer for a localizable entity.
+ Currently supported are grammars of the form:
+
+ 1: pre white space
+ 2: pre comments
+ 3: entity definition
+ 4: entity key (name)
+ 5: entity value
+ 6: post comment (and white space) in the same line (dtd only)
+ <--[1]
+ <!-- pre comments --> <--[2]
+ <!ENTITY key "value"> <!-- comment -->
+
+ <-------[3]---------><------[6]------>
+ '''
+ def __init__(self, contents, pp,
+ span, pre_ws_span, pre_comment_span, def_span,
+ key_span, val_span, post_span):
+ self.contents = contents
+ self.span = span
+ self.pre_ws_span = pre_ws_span
+ self.pre_comment_span = pre_comment_span
+ self.def_span = def_span
+ self.key_span = key_span
+ self.val_span = val_span
+ self.post_span = post_span
+ self.pp = pp
+ pass
+
+ # getter helpers
+
+ def get_all(self):
+ return self.contents[self.span[0]:self.span[1]]
+
+ def get_pre_ws(self):
+ return self.contents[self.pre_ws_span[0]:self.pre_ws_span[1]]
+
+ def get_pre_comment(self):
+ return self.contents[self.pre_comment_span[0]:
+ self.pre_comment_span[1]]
+
+ def get_def(self):
+ return self.contents[self.def_span[0]:self.def_span[1]]
+
+ def get_key(self):
+ return self.contents[self.key_span[0]:self.key_span[1]]
+
+ def get_val(self):
+ return self.pp(self.contents[self.val_span[0]:self.val_span[1]])
+
+ def get_raw_val(self):
+ return self.contents[self.val_span[0]:self.val_span[1]]
+
+ def get_post(self):
+ return self.contents[self.post_span[0]:self.post_span[1]]
+
+ # getters
+
+ all = property(get_all)
+ pre_ws = property(get_pre_ws)
+ pre_comment = property(get_pre_comment)
+ definition = property(get_def)
+ key = property(get_key)
+ val = property(get_val)
+ raw_val = property(get_raw_val)
+ post = property(get_post)
+
+ def __repr__(self):
+ return self.key
+
+
+class Junk(object):
+ '''
+ An almost-Entity, representing junk data that we didn't parse.
+ This way, we can signal bad content as stuff we don't understand.
+ And the either fix that, or report real bugs in localizations.
+ '''
+ junkid = 0
+
+ def __init__(self, contents, span):
+ self.contents = contents
+ self.span = span
+ self.pre_ws = self.pre_comment = self.definition = self.post = ''
+ self.__class__.junkid += 1
+ self.key = '_junk_%d_%d-%d' % (self.__class__.junkid, span[0], span[1])
+
+ # getter helpers
+ def get_all(self):
+ return self.contents[self.span[0]:self.span[1]]
+
+ # getters
+ all = property(get_all)
+ val = property(get_all)
+
+ def __repr__(self):
+ return self.key
+
+
+class Parser:
+ canMerge = True
+
+ def __init__(self):
+ if not hasattr(self, 'encoding'):
+ self.encoding = 'utf-8'
+ pass
+
+ def readFile(self, file):
+ f = codecs.open(file, 'r', self.encoding)
+ try:
+ self.contents = f.read()
+ except UnicodeDecodeError, e:
+ (logging.getLogger('locales')
+ .error("Can't read file: " + file + '; ' + str(e)))
+ self.contents = u''
+ f.close()
+
+ def readContents(self, contents):
+ (self.contents, length) = codecs.getdecoder(self.encoding)(contents)
+
+ def parse(self):
+ l = []
+ m = {}
+ for e in self:
+ m[e.key] = len(l)
+ l.append(e)
+ return (l, m)
+
+ def postProcessValue(self, val):
+ return val
+
+ def __iter__(self):
+ contents = self.contents
+ offset = 0
+ self.header, offset = self.getHeader(contents, offset)
+ self.footer = ''
+ entity, offset = self.getEntity(contents, offset)
+ while entity:
+ yield entity
+ entity, offset = self.getEntity(contents, offset)
+ f = self.reFooter.match(contents, offset)
+ if f:
+ self.footer = f.group()
+ offset = f.end()
+ if len(contents) > offset:
+ yield Junk(contents, (offset, len(contents)))
+ pass
+
+ def getHeader(self, contents, offset):
+ header = ''
+ h = self.reHeader.match(contents)
+ if h:
+ header = h.group()
+ offset = h.end()
+ return (header, offset)
+
+ def getEntity(self, contents, offset):
+ m = self.reKey.match(contents, offset)
+ if m:
+ offset = m.end()
+ entity = self.createEntity(contents, m)
+ return (entity, offset)
+ # first check if footer has a non-empty match,
+ # 'cause then we don't find junk
+ m = self.reFooter.match(contents, offset)
+ if m and m.end() > offset:
+ return (None, offset)
+ m = self.reKey.search(contents, offset)
+ if m:
+ # we didn't match, but search, so there's junk between offset
+ # and start. We'll match() on the next turn
+ junkend = m.start()
+ return (Junk(contents, (offset, junkend)), junkend)
+ return (None, offset)
+
+ def createEntity(self, contents, m):
+ return Entity(contents, self.postProcessValue,
+ *[m.span(i) for i in xrange(7)])
+
+
+def getParser(path):
+ for item in __constructors:
+ if re.search(item[0], path):
+ return item[1]
+ raise UserWarning("Cannot find Parser")
+
+
+# Subgroups of the match will:
+# 1: pre white space
+# 2: pre comments
+# 3: entity definition
+# 4: entity key (name)
+# 5: entity value
+# 6: post comment (and white space) in the same line (dtd only)
+# <--[1]
+# <!-- pre comments --> <--[2]
+# <!ENTITY key "value"> <!-- comment -->
+#
+# <-------[3]---------><------[6]------>
+
+
+class DTDParser(Parser):
+ # http://www.w3.org/TR/2006/REC-xml11-20060816/#NT-NameStartChar
+ # ":" | [A-Z] | "_" | [a-z] |
+ # [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF]
+ # | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] |
+ # [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] |
+ # [#x10000-#xEFFFF]
+ CharMinusDash = u'\x09\x0A\x0D\u0020-\u002C\u002E-\uD7FF\uE000-\uFFFD'
+ XmlComment = '<!--(?:-?[%s])*?-->' % CharMinusDash
+ NameStartChar = u':A-Z_a-z\xC0-\xD6\xD8-\xF6\xF8-\u02FF' + \
+ u'\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F' + \
+ u'\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD'
+ # + \U00010000-\U000EFFFF seems to be unsupported in python
+
+ # NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7 |
+ # [#x0300-#x036F] | [#x203F-#x2040]
+ NameChar = NameStartChar + ur'\-\.0-9' + u'\xB7\u0300-\u036F\u203F-\u2040'
+ Name = '[' + NameStartChar + '][' + NameChar + ']*'
+ reKey = re.compile('(?:(?P<pre>\s*)(?P<precomment>(?:' + XmlComment +
+ '\s*)*)(?P<entity><!ENTITY\s+(?P<key>' + Name +
+ ')\s+(?P<val>\"[^\"]*\"|\'[^\']*\'?)\s*>)'
+ '(?P<post>[ \t]*(?:' + XmlComment + '\s*)*\n?)?)',
+ re.DOTALL)
+ # add BOM to DTDs, details in bug 435002
+ reHeader = re.compile(u'^\ufeff?'
+ u'(\s*<!--.*(http://mozilla.org/MPL/2.0/|'
+ u'LICENSE BLOCK)([^-]+-)*[^-]+-->)?', re.S)
+ reFooter = re.compile('\s*(<!--([^-]+-)*[^-]+-->\s*)*$')
+ rePE = re.compile('(?:(\s*)((?:' + XmlComment + '\s*)*)'
+ '(<!ENTITY\s+%\s+(' + Name +
+ ')\s+SYSTEM\s+(\"[^\"]*\"|\'[^\']*\')\s*>\s*%' + Name +
+ ';)([ \t]*(?:' + XmlComment + '\s*)*\n?)?)')
+
+ def getEntity(self, contents, offset):
+ '''
+ Overload Parser.getEntity to special-case ParsedEntities.
+ Just check for a parsed entity if that method claims junk.
+
+ <!ENTITY % foo SYSTEM "url">
+ %foo;
+ '''
+ entity, inneroffset = Parser.getEntity(self, contents, offset)
+ if (entity and isinstance(entity, Junk)) or entity is None:
+ m = self.rePE.match(contents, offset)
+ if m:
+ inneroffset = m.end()
+ entity = Entity(contents, self.postProcessValue,
+ *[m.span(i) for i in xrange(7)])
+ return (entity, inneroffset)
+
+ def createEntity(self, contents, m):
+ valspan = m.span('val')
+ valspan = (valspan[0]+1, valspan[1]-1)
+ return Entity(contents, self.postProcessValue, m.span(),
+ m.span('pre'), m.span('precomment'),
+ m.span('entity'), m.span('key'), valspan,
+ m.span('post'))
+
+
+class PropertiesParser(Parser):
+ escape = re.compile(r'\\((?P<uni>u[0-9a-fA-F]{1,4})|'
+ '(?P<nl>\n\s*)|(?P<single>.))', re.M)
+ known_escapes = {'n': '\n', 'r': '\r', 't': '\t', '\\': '\\'}
+
+ def __init__(self):
+ self.reKey = re.compile('^(\s*)'
+ '((?:[#!].*?\n\s*)*)'
+ '([^#!\s\n][^=:\n]*?)\s*[:=][ \t]*', re.M)
+ self.reHeader = re.compile('^\s*([#!].*\s*)+')
+ self.reFooter = re.compile('\s*([#!].*\s*)*$')
+ self._escapedEnd = re.compile(r'\\+$')
+ self._trailingWS = re.compile(r'[ \t]*$')
+ Parser.__init__(self)
+
+ def getHeader(self, contents, offset):
+ header = ''
+ h = self.reHeader.match(contents, offset)
+ if h:
+ candidate = h.group()
+ if 'http://mozilla.org/MPL/2.0/' in candidate or \
+ 'LICENSE BLOCK' in candidate:
+ header = candidate
+ offset = h.end()
+ return (header, offset)
+
+ def getEntity(self, contents, offset):
+ # overwritten to parse values line by line
+ m = self.reKey.match(contents, offset)
+ if m:
+ offset = m.end()
+ while True:
+ endval = nextline = contents.find('\n', offset)
+ if nextline == -1:
+ endval = offset = len(contents)
+ break
+ # is newline escaped?
+ _e = self._escapedEnd.search(contents, offset, nextline)
+ offset = nextline + 1
+ if _e is None:
+ break
+ # backslashes at end of line, if 2*n, not escaped
+ if len(_e.group()) % 2 == 0:
+ break
+ # strip trailing whitespace
+ ws = self._trailingWS.search(contents, m.end(), offset)
+ if ws:
+ endval -= ws.end() - ws.start()
+ entity = Entity(contents, self.postProcessValue,
+ (m.start(), offset), # full span
+ m.span(1), # leading whitespan
+ m.span(2), # leading comment span
+ (m.start(3), offset), # entity def span
+ m.span(3), # key span
+ (m.end(), endval), # value span
+ (offset, offset)) # post comment span, empty
+ return (entity, offset)
+ m = self.reKey.search(contents, offset)
+ if m:
+ # we didn't match, but search, so there's junk between offset
+ # and start. We'll match() on the next turn
+ junkend = m.start()
+ return (Junk(contents, (offset, junkend)), junkend)
+ return (None, offset)
+
+ def postProcessValue(self, val):
+
+ def unescape(m):
+ found = m.groupdict()
+ if found['uni']:
+ return unichr(int(found['uni'][1:], 16))
+ if found['nl']:
+ return ''
+ return self.known_escapes.get(found['single'], found['single'])
+ val = self.escape.sub(unescape, val)
+ return val
+
+
+class DefinesParser(Parser):
+ # can't merge, #unfilter needs to be the last item, which we don't support
+ canMerge = False
+
+ def __init__(self):
+ self.reKey = re.compile('^(\s*)((?:^#(?!define\s).*\s*)*)'
+ '(#define[ \t]+(\w+)[ \t]+(.*?))([ \t]*$\n?)',
+ re.M)
+ self.reHeader = re.compile('^\s*(#(?!define\s).*\s*)*')
+ self.reFooter = re.compile('\s*(#(?!define\s).*\s*)*$', re.M)
+ Parser.__init__(self)
+
+
+class IniParser(Parser):
+ '''
+ Parse files of the form:
+ # initial comment
+ [cat]
+ whitespace*
+ #comment
+ string=value
+ ...
+ '''
+ def __init__(self):
+ self.reHeader = re.compile('^((?:\s*|[;#].*)\n)*\[.+?\]\n', re.M)
+ self.reKey = re.compile('(\s*)((?:[;#].*\n\s*)*)((.+?)=(.*))(\n?)')
+ self.reFooter = re.compile('\s*([;#].*\s*)*$')
+ Parser.__init__(self)
+
+
+DECL, COMMENT, START, END, CONTENT = range(5)
+
+
+class BookmarksParserInner(HTMLParser):
+
+ class Token(object):
+ _type = None
+ content = ''
+
+ def __str__(self):
+ return self.content
+
+ class DeclToken(Token):
+ _type = DECL
+
+ def __init__(self, decl):
+ self.content = decl
+ pass
+
+ def __str__(self):
+ return '<!%s>' % self.content
+ pass
+
+ class CommentToken(Token):
+ _type = COMMENT
+
+ def __init__(self, comment):
+ self.content = comment
+ pass
+
+ def __str__(self):
+ return '<!--%s-->' % self.content
+ pass
+
+ class StartToken(Token):
+ _type = START
+
+ def __init__(self, tag, attrs, content):
+ self.tag = tag
+ self.attrs = dict(attrs)
+ self.content = content
+ pass
+ pass
+
+ class EndToken(Token):
+ _type = END
+
+ def __init__(self, tag):
+ self.tag = tag
+ pass
+
+ def __str__(self):
+ return '</%s>' % self.tag.upper()
+ pass
+
+ class ContentToken(Token):
+ _type = CONTENT
+
+ def __init__(self, content):
+ self.content = content
+ pass
+ pass
+
+ def __init__(self):
+ HTMLParser.__init__(self)
+ self.tokens = []
+
+ def parse(self, contents):
+ self.tokens = []
+ self.feed(contents)
+ self.close()
+ return self.tokens
+
+ # Called when we hit an end DL tag to reset the folder selections
+ def handle_decl(self, decl):
+ self.tokens.append(self.DeclToken(decl))
+
+ # Called when we hit an end DL tag to reset the folder selections
+ def handle_comment(self, comment):
+ self.tokens.append(self.CommentToken(comment))
+
+ def handle_starttag(self, tag, attrs):
+ self.tokens.append(self.StartToken(tag, attrs,
+ self.get_starttag_text()))
+
+ # Called when text data is encountered
+ def handle_data(self, data):
+ if self.tokens[-1]._type == CONTENT:
+ self.tokens[-1].content += data
+ else:
+ self.tokens.append(self.ContentToken(data))
+
+ def handle_charref(self, data):
+ self.handle_data('&#%s;' % data)
+
+ def handle_entityref(self, data):
+ self.handle_data('&%s;' % data)
+
+ # Called when we hit an end DL tag to reset the folder selections
+ def handle_endtag(self, tag):
+ self.tokens.append(self.EndToken(tag))
+
+
+class BookmarksParser(Parser):
+ canMerge = False
+
+ class BMEntity(object):
+ def __init__(self, key, val):
+ self.key = key
+ self.val = val
+
+ def __iter__(self):
+ p = BookmarksParserInner()
+ tks = p.parse(self.contents)
+ i = 0
+ k = []
+ for i in xrange(len(tks)):
+ t = tks[i]
+ if t._type == START:
+ k.append(t.tag)
+ keys = t.attrs.keys()
+ keys.sort()
+ for attrname in keys:
+ yield self.BMEntity('.'.join(k) + '.@' + attrname,
+ t.attrs[attrname])
+ if i + 1 < len(tks) and tks[i+1]._type == CONTENT:
+ i += 1
+ t = tks[i]
+ v = t.content.strip()
+ if v:
+ yield self.BMEntity('.'.join(k), v)
+ elif t._type == END:
+ k.pop()
+
+
+__constructors = [('\\.dtd$', DTDParser()),
+ ('\\.properties$', PropertiesParser()),
+ ('\\.ini$', IniParser()),
+ ('\\.inc$', DefinesParser()),
+ ('bookmarks\\.html$', BookmarksParser())]
diff --git a/python/compare-locales/compare_locales/paths.py b/python/compare-locales/compare_locales/paths.py
new file mode 100644
index 000000000..f72b3a2e7
--- /dev/null
+++ b/python/compare-locales/compare_locales/paths.py
@@ -0,0 +1,398 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os.path
+import os
+from ConfigParser import ConfigParser, NoSectionError, NoOptionError
+from urlparse import urlparse, urljoin
+from urllib import pathname2url, url2pathname
+from urllib2 import urlopen
+from collections import defaultdict
+from compare_locales import util
+
+
+class L10nConfigParser(object):
+ '''Helper class to gather application information from ini files.
+
+ This class is working on synchronous open to read files or web data.
+ Subclass this and overwrite loadConfigs and addChild if you need async.
+ '''
+ def __init__(self, inipath, **kwargs):
+ """Constructor for L10nConfigParsers
+
+ inipath -- l10n.ini path
+ Optional keyword arguments are fowarded to the inner ConfigParser as
+ defaults.
+ """
+ if os.path.isabs(inipath):
+ self.inipath = 'file:%s' % pathname2url(inipath)
+ else:
+ pwdurl = 'file:%s/' % pathname2url(os.getcwd())
+ self.inipath = urljoin(pwdurl, inipath)
+ # l10n.ini files can import other l10n.ini files, store the
+ # corresponding L10nConfigParsers
+ self.children = []
+ # we really only care about the l10n directories described in l10n.ini
+ self.dirs = []
+ # optional defaults to be passed to the inner ConfigParser (unused?)
+ self.defaults = kwargs
+
+ def getDepth(self, cp):
+ '''Get the depth for the comparison from the parsed l10n.ini.
+
+ Overloadable to get the source depth for fennec and friends.
+ '''
+ try:
+ depth = cp.get('general', 'depth')
+ except:
+ depth = '.'
+ return depth
+
+ def getFilters(self):
+ '''Get the test functions from this ConfigParser and all children.
+
+ Only works with synchronous loads, used by compare-locales, which
+ is local anyway.
+ '''
+ filterurl = urljoin(self.inipath, 'filter.py')
+ try:
+ l = {}
+ execfile(url2pathname(urlparse(filterurl).path), {}, l)
+ if 'test' in l and callable(l['test']):
+ filters = [l['test']]
+ else:
+ filters = []
+ except:
+ filters = []
+
+ for c in self.children:
+ filters += c.getFilters()
+
+ return filters
+
+ def loadConfigs(self):
+ """Entry point to load the l10n.ini file this Parser refers to.
+
+ This implementation uses synchronous loads, subclasses might overload
+ this behaviour. If you do, make sure to pass a file-like object
+ to onLoadConfig.
+ """
+ self.onLoadConfig(urlopen(self.inipath))
+
+ def onLoadConfig(self, inifile):
+ """Parse a file-like object for the loaded l10n.ini file."""
+ cp = ConfigParser(self.defaults)
+ cp.readfp(inifile)
+ depth = self.getDepth(cp)
+ self.baseurl = urljoin(self.inipath, depth)
+ # create child loaders for any other l10n.ini files to be included
+ try:
+ for title, path in cp.items('includes'):
+ # skip default items
+ if title in self.defaults:
+ continue
+ # add child config parser
+ self.addChild(title, path, cp)
+ except NoSectionError:
+ pass
+ # try to load the "dirs" defined in the "compare" section
+ try:
+ self.dirs.extend(cp.get('compare', 'dirs').split())
+ except (NoOptionError, NoSectionError):
+ pass
+ # try getting a top level compare dir, as used for fennec
+ try:
+ self.tld = cp.get('compare', 'tld')
+ # remove tld from comparison dirs
+ if self.tld in self.dirs:
+ self.dirs.remove(self.tld)
+ except (NoOptionError, NoSectionError):
+ self.tld = None
+ # try to set "all_path" and "all_url"
+ try:
+ self.all_path = cp.get('general', 'all')
+ self.all_url = urljoin(self.baseurl, self.all_path)
+ except (NoOptionError, NoSectionError):
+ self.all_path = None
+ self.all_url = None
+ return cp
+
+ def addChild(self, title, path, orig_cp):
+ """Create a child L10nConfigParser and load it.
+
+ title -- indicates the module's name
+ path -- indicates the path to the module's l10n.ini file
+ orig_cp -- the configuration parser of this l10n.ini
+ """
+ cp = L10nConfigParser(urljoin(self.baseurl, path), **self.defaults)
+ cp.loadConfigs()
+ self.children.append(cp)
+
+ def getTLDPathsTuple(self, basepath):
+ """Given the basepath, return the path fragments to be used for
+ self.tld. For build runs, this is (basepath, self.tld), for
+ source runs, just (basepath,).
+
+ @see overwritten method in SourceTreeConfigParser.
+ """
+ return (basepath, self.tld)
+
+ def dirsIter(self):
+ """Iterate over all dirs and our base path for this l10n.ini"""
+ url = urlparse(self.baseurl)
+ basepath = url2pathname(url.path)
+ if self.tld is not None:
+ yield self.tld, self.getTLDPathsTuple(basepath)
+ for dir in self.dirs:
+ yield dir, (basepath, dir)
+
+ def directories(self):
+ """Iterate over all dirs and base paths for this l10n.ini as well
+ as the included ones.
+ """
+ for t in self.dirsIter():
+ yield t
+ for child in self.children:
+ for t in child.directories():
+ yield t
+
+ def allLocales(self):
+ """Return a list of all the locales of this project"""
+ return util.parseLocales(urlopen(self.all_url).read())
+
+
+class SourceTreeConfigParser(L10nConfigParser):
+ '''Subclassing L10nConfigParser to work with just the repos
+ checked out next to each other instead of intermingled like
+ we do for real builds.
+ '''
+
+ def __init__(self, inipath, basepath):
+ '''Add additional arguments basepath.
+
+ basepath is used to resolve local paths via branchnames.
+ '''
+ L10nConfigParser.__init__(self, inipath)
+ self.basepath = basepath
+ self.tld = None
+
+ def getDepth(self, cp):
+ '''Get the depth for the comparison from the parsed l10n.ini.
+
+ Overloaded to get the source depth for fennec and friends.
+ '''
+ try:
+ depth = cp.get('general', 'source-depth')
+ except:
+ try:
+ depth = cp.get('general', 'depth')
+ except:
+ depth = '.'
+ return depth
+
+ def addChild(self, title, path, orig_cp):
+ # check if there's a section with details for this include
+ # we might have to check a different repo, or even VCS
+ # for example, projects like "mail" indicate in
+ # an "include_" section where to find the l10n.ini for "toolkit"
+ details = 'include_' + title
+ if orig_cp.has_section(details):
+ branch = orig_cp.get(details, 'mozilla')
+ inipath = orig_cp.get(details, 'l10n.ini')
+ path = self.basepath + '/' + branch + '/' + inipath
+ else:
+ path = urljoin(self.baseurl, path)
+ cp = SourceTreeConfigParser(path, self.basepath, **self.defaults)
+ cp.loadConfigs()
+ self.children.append(cp)
+
+ def getTLDPathsTuple(self, basepath):
+ """Overwrite L10nConfigParser's getTLDPathsTuple to just return
+ the basepath.
+ """
+ return (basepath, )
+
+
+class File(object):
+
+ def __init__(self, fullpath, file, module=None, locale=None):
+ self.fullpath = fullpath
+ self.file = file
+ self.module = module
+ self.locale = locale
+ pass
+
+ def getContents(self):
+ # open with universal line ending support and read
+ return open(self.fullpath, 'rU').read()
+
+ def __hash__(self):
+ f = self.file
+ if self.module:
+ f = self.module + '/' + f
+ return hash(f)
+
+ def __str__(self):
+ return self.fullpath
+
+ def __cmp__(self, other):
+ if not isinstance(other, File):
+ raise NotImplementedError
+ rv = cmp(self.module, other.module)
+ if rv != 0:
+ return rv
+ return cmp(self.file, other.file)
+
+
+class EnumerateDir(object):
+ ignore_dirs = ['CVS', '.svn', '.hg', '.git']
+
+ def __init__(self, basepath, module='', locale=None, ignore_subdirs=[]):
+ self.basepath = basepath
+ self.module = module
+ self.locale = locale
+ self.ignore_subdirs = ignore_subdirs
+ pass
+
+ def cloneFile(self, other):
+ '''
+ Return a File object that this enumerator would return, if it had it.
+ '''
+ return File(os.path.join(self.basepath, other.file), other.file,
+ self.module, self.locale)
+
+ def __iter__(self):
+ # our local dirs are given as a tuple of path segments, starting off
+ # with an empty sequence for the basepath.
+ dirs = [()]
+ while dirs:
+ dir = dirs.pop(0)
+ fulldir = os.path.join(self.basepath, *dir)
+ try:
+ entries = os.listdir(fulldir)
+ except OSError:
+ # we probably just started off in a non-existing dir, ignore
+ continue
+ entries.sort()
+ for entry in entries:
+ leaf = os.path.join(fulldir, entry)
+ if os.path.isdir(leaf):
+ if entry not in self.ignore_dirs and \
+ leaf not in [os.path.join(self.basepath, d)
+ for d in self.ignore_subdirs]:
+ dirs.append(dir + (entry,))
+ continue
+ yield File(leaf, '/'.join(dir + (entry,)),
+ self.module, self.locale)
+
+
+class LocalesWrap(object):
+
+ def __init__(self, base, module, locales, ignore_subdirs=[]):
+ self.base = base
+ self.module = module
+ self.locales = locales
+ self.ignore_subdirs = ignore_subdirs
+
+ def __iter__(self):
+ for locale in self.locales:
+ path = os.path.join(self.base, locale, self.module)
+ yield (locale, EnumerateDir(path, self.module, locale,
+ self.ignore_subdirs))
+
+
+class EnumerateApp(object):
+ reference = 'en-US'
+
+ def __init__(self, inipath, l10nbase, locales=None):
+ self.setupConfigParser(inipath)
+ self.modules = defaultdict(dict)
+ self.l10nbase = os.path.abspath(l10nbase)
+ self.filters = []
+ drive, tail = os.path.splitdrive(inipath)
+ self.addFilters(*self.config.getFilters())
+ self.locales = locales or self.config.allLocales()
+ self.locales.sort()
+
+ def setupConfigParser(self, inipath):
+ self.config = L10nConfigParser(inipath)
+ self.config.loadConfigs()
+
+ def addFilters(self, *args):
+ self.filters += args
+
+ value_map = {None: None, 'error': 0, 'ignore': 1, 'report': 2}
+
+ def filter(self, l10n_file, entity=None):
+ '''Go through all added filters, and,
+ - map "error" -> 0, "ignore" -> 1, "report" -> 2
+ - if filter.test returns a bool, map that to
+ False -> "ignore" (1), True -> "error" (0)
+ - take the max of all reported
+ '''
+ rv = 0
+ for f in reversed(self.filters):
+ try:
+ _r = f(l10n_file.module, l10n_file.file, entity)
+ except:
+ # XXX error handling
+ continue
+ if isinstance(_r, bool):
+ _r = [1, 0][_r]
+ else:
+ # map string return value to int, default to 'error',
+ # None is None
+ _r = self.value_map.get(_r, 0)
+ if _r is not None:
+ rv = max(rv, _r)
+ return ['error', 'ignore', 'report'][rv]
+
+ def __iter__(self):
+ '''
+ Iterate over all modules, return en-US directory enumerator, and an
+ iterator over all locales in each iteration. Per locale, the locale
+ code and an directory enumerator will be given.
+ '''
+ dirmap = dict(self.config.directories())
+ mods = dirmap.keys()
+ mods.sort()
+ for mod in mods:
+ if self.reference == 'en-US':
+ base = os.path.join(*(dirmap[mod] + ('locales', 'en-US')))
+ else:
+ base = os.path.join(self.l10nbase, self.reference, mod)
+ yield (mod, EnumerateDir(base, mod, self.reference),
+ LocalesWrap(self.l10nbase, mod, self.locales,
+ [m[len(mod)+1:] for m in mods if m.startswith(mod+'/')]))
+
+
+class EnumerateSourceTreeApp(EnumerateApp):
+ '''Subclass EnumerateApp to work on side-by-side checked out
+ repos, and to no pay attention to how the source would actually
+ be checked out for building.
+
+ It's supporting applications like Fennec, too, which have
+ 'locales/en-US/...' in their root dir, but claim to be 'mobile'.
+ '''
+
+ def __init__(self, inipath, basepath, l10nbase, locales=None):
+ self.basepath = basepath
+ EnumerateApp.__init__(self, inipath, l10nbase, locales)
+
+ def setupConfigParser(self, inipath):
+ self.config = SourceTreeConfigParser(inipath, self.basepath)
+ self.config.loadConfigs()
+
+
+def get_base_path(mod, loc):
+ 'statics for path patterns and conversion'
+ __l10n = 'l10n/%(loc)s/%(mod)s'
+ __en_US = 'mozilla/%(mod)s/locales/en-US'
+ if loc == 'en-US':
+ return __en_US % {'mod': mod}
+ return __l10n % {'mod': mod, 'loc': loc}
+
+
+def get_path(mod, loc, leaf):
+ return get_base_path(mod, loc) + '/' + leaf
diff --git a/python/compare-locales/compare_locales/tests/__init__.py b/python/compare-locales/compare_locales/tests/__init__.py
new file mode 100644
index 000000000..8808d78f4
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/__init__.py
@@ -0,0 +1,49 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''Mixins for parser tests.
+'''
+
+from itertools import izip_longest
+from pkg_resources import resource_string
+import re
+
+from compare_locales.parser import getParser
+
+
+class ParserTestMixin():
+ '''Utility methods used by the parser tests.
+ '''
+ filename = None
+
+ def setUp(self):
+ '''Create a parser for this test.
+ '''
+ self.parser = getParser(self.filename)
+
+ def tearDown(self):
+ 'tear down this test'
+ del self.parser
+
+ def resource(self, name):
+ testcontent = resource_string(__name__, 'data/' + name)
+ # fake universal line endings
+ testcontent = re.sub('\r\n?', lambda m: '\n', testcontent)
+ return testcontent
+
+ def _test(self, content, refs):
+ '''Helper to test the parser.
+ Compares the result of parsing content with the given list
+ of reference keys and values.
+ '''
+ self.parser.readContents(content)
+ entities = [entity for entity in self.parser]
+ for entity, ref in izip_longest(entities, refs):
+ self.assertTrue(entity, 'excess reference entity')
+ self.assertTrue(ref, 'excess parsed entity')
+ self.assertEqual(entity.val, ref[1])
+ if ref[0].startswith('_junk'):
+ self.assertTrue(re.match(ref[0], entity.key))
+ else:
+ self.assertEqual(entity.key, ref[0])
diff --git a/python/compare-locales/compare_locales/tests/data/bug121341.properties b/python/compare-locales/compare_locales/tests/data/bug121341.properties
new file mode 100644
index 000000000..b45fc9698
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/data/bug121341.properties
@@ -0,0 +1,68 @@
+# simple check
+1=abc
+# test whitespace trimming in key and value
+ 2 = xy
+# test parsing of escaped values
+3 = \u1234\t\r\n\uAB\
+\u1\n
+# test multiline properties
+4 = this is \
+multiline property
+5 = this is \
+ another multiline property
+# property with DOS EOL
+6 = test\u0036
+# test multiline property with with DOS EOL
+7 = yet another multi\
+ line propery
+# trimming should not trim escaped whitespaces
+8 = \ttest5\u0020
+# another variant of #8
+9 = \ test6\t
+# test UTF-8 encoded property/value
+10aሴb = c췯d
+# next property should test unicode escaping at the boundary of parsing buffer
+# buffer size is expected to be 4096 so add comments to get to this offset
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+################################################################################
+###############################################################################
+11 = \uABCD
diff --git a/python/compare-locales/compare_locales/tests/data/test.properties b/python/compare-locales/compare_locales/tests/data/test.properties
new file mode 100644
index 000000000..19cae9702
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/data/test.properties
@@ -0,0 +1,14 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+1=1
+ 2=2
+3 =3
+ 4 =4
+5=5
+6= 6
+7=7
+8= 8
+# this is a comment
+9=this is the first part of a continued line \
+ and here is the 2nd part
diff --git a/python/compare-locales/compare_locales/tests/data/triple-license.dtd b/python/compare-locales/compare_locales/tests/data/triple-license.dtd
new file mode 100644
index 000000000..4a28b17a6
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/data/triple-license.dtd
@@ -0,0 +1,38 @@
+<!-- ***** BEGIN LICENSE BLOCK *****
+#if 0
+ - Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ -
+ - The contents of this file are subject to the Mozilla Public License Version
+ - 1.1 (the "License"); you may not use this file except in compliance with
+ - the License. You may obtain a copy of the License at
+ - http://www.mozilla.org/MPL/
+ -
+ - Software distributed under the License is distributed on an "AS IS" basis,
+ - WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ - for the specific language governing rights and limitations under the
+ - License.
+ -
+ - The Original Code is mozilla.org Code.
+ -
+ - The Initial Developer of the Original Code is dummy.
+ - Portions created by the Initial Developer are Copyright (C) 2005
+ - the Initial Developer. All Rights Reserved.
+ -
+ - Contributor(s):
+ -
+ - Alternatively, the contents of this file may be used under the terms of
+ - either the GNU General Public License Version 2 or later (the "GPL"), or
+ - the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ - in which case the provisions of the GPL or the LGPL are applicable instead
+ - of those above. If you wish to allow use of your version of this file only
+ - under the terms of either the GPL or the LGPL, and not to allow others to
+ - use your version of this file under the terms of the MPL, indicate your
+ - decision by deleting the provisions above and replace them with the notice
+ - and other provisions required by the LGPL or the GPL. If you do not delete
+ - the provisions above, a recipient may use your version of this file under
+ - the terms of any one of the MPL, the GPL or the LGPL.
+ -
+#endif
+ - ***** END LICENSE BLOCK ***** -->
+
+<!ENTITY foo "value">
diff --git a/python/compare-locales/compare_locales/tests/test_checks.py b/python/compare-locales/compare_locales/tests/test_checks.py
new file mode 100644
index 000000000..b995d43f9
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/test_checks.py
@@ -0,0 +1,403 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales.checks import getChecker
+from compare_locales.parser import getParser, Entity
+from compare_locales.paths import File
+
+
+class BaseHelper(unittest.TestCase):
+ file = None
+ refContent = None
+
+ def setUp(self):
+ p = getParser(self.file.file)
+ p.readContents(self.refContent)
+ self.refList, self.refMap = p.parse()
+
+ def _test(self, content, refWarnOrErrors, with_ref_file=False):
+ p = getParser(self.file.file)
+ p.readContents(content)
+ l10n = [e for e in p]
+ assert len(l10n) == 1
+ l10n = l10n[0]
+ if with_ref_file:
+ kwargs = {
+ 'reference': self.refList
+ }
+ else:
+ kwargs = {}
+ checker = getChecker(self.file, **kwargs)
+ ref = self.refList[self.refMap[l10n.key]]
+ found = tuple(checker.check(ref, l10n))
+ self.assertEqual(found, refWarnOrErrors)
+
+
+class TestProperties(BaseHelper):
+ file = File('foo.properties', 'foo.properties')
+ refContent = '''some = value
+'''
+
+ def testGood(self):
+ self._test('''some = localized''',
+ tuple())
+
+ def testMissedEscape(self):
+ self._test(r'''some = \u67ood escape, bad \escape''',
+ (('warning', 20, r'unknown escape sequence, \e',
+ 'escape'),))
+
+
+class TestPlurals(BaseHelper):
+ file = File('foo.properties', 'foo.properties')
+ refContent = '''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file - Downloads;#1 files - #2
+'''
+
+ def testGood(self):
+ self._test('''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file - Downloads;#1 files - #2;#1 filers
+''',
+ tuple())
+
+ def testNotUsed(self):
+ self._test('''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file - Downloads;#1 files - Downloads;#1 filers
+''',
+ (('warning', 0, 'not all variables used in l10n',
+ 'plural'),))
+
+ def testNotDefined(self):
+ self._test('''\
+# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
+# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
+# #1 number of files
+# example: 111 files - Downloads
+downloadsTitleFiles=#1 file - Downloads;#1 files - #2;#1 #3
+''',
+ (('error', 0, 'unreplaced variables in l10n', 'plural'),))
+
+
+class TestDTDs(BaseHelper):
+ file = File('foo.dtd', 'foo.dtd')
+ refContent = '''<!ENTITY foo "This is &apos;good&apos;">
+<!ENTITY width "10ch">
+<!ENTITY style "width: 20ch; height: 280px;">
+<!ENTITY minStyle "min-height: 50em;">
+<!ENTITY ftd "0">
+<!ENTITY formatPercent "This is 100&#037; correct">
+<!ENTITY some.key "K">
+'''
+
+ def testWarning(self):
+ self._test('''<!ENTITY foo "This is &not; good">
+''',
+ (('warning', (0, 0), 'Referencing unknown entity `not`',
+ 'xmlparse'),))
+ # make sure we only handle translated entity references
+ self._test(u'''<!ENTITY foo "This is &ƞǿŧ; good">
+'''.encode('utf-8'),
+ (('warning', (0, 0), u'Referencing unknown entity `ƞǿŧ`',
+ 'xmlparse'),))
+
+ def testErrorFirstLine(self):
+ self._test('''<!ENTITY foo "This is </bad> stuff">
+''',
+ (('error', (1, 10), 'mismatched tag', 'xmlparse'),))
+
+ def testErrorSecondLine(self):
+ self._test('''<!ENTITY foo "This is
+ </bad>
+stuff">
+''',
+ (('error', (2, 4), 'mismatched tag', 'xmlparse'),))
+
+ def testKeyErrorSingleAmpersand(self):
+ self._test('''<!ENTITY some.key "&">
+''',
+ (('error', (1, 1), 'not well-formed (invalid token)',
+ 'xmlparse'),))
+
+ def testXMLEntity(self):
+ self._test('''<!ENTITY foo "This is &quot;good&quot;">
+''',
+ tuple())
+
+ def testPercentEntity(self):
+ self._test('''<!ENTITY formatPercent "Another 100&#037;">
+''',
+ tuple())
+ self._test('''<!ENTITY formatPercent "Bad 100% should fail">
+''',
+ (('error', (0, 32), 'not well-formed (invalid token)',
+ 'xmlparse'),))
+
+ def testNoNumber(self):
+ self._test('''<!ENTITY ftd "foo">''',
+ (('warning', 0, 'reference is a number', 'number'),))
+
+ def testNoLength(self):
+ self._test('''<!ENTITY width "15miles">''',
+ (('error', 0, 'reference is a CSS length', 'css'),))
+
+ def testNoStyle(self):
+ self._test('''<!ENTITY style "15ch">''',
+ (('error', 0, 'reference is a CSS spec', 'css'),))
+ self._test('''<!ENTITY style "junk">''',
+ (('error', 0, 'reference is a CSS spec', 'css'),))
+
+ def testStyleWarnings(self):
+ self._test('''<!ENTITY style "width:15ch">''',
+ (('warning', 0, 'height only in reference', 'css'),))
+ self._test('''<!ENTITY style "width:15em;height:200px;">''',
+ (('warning', 0, "units for width don't match (em != ch)",
+ 'css'),))
+
+ def testNoWarning(self):
+ self._test('''<!ENTITY width "12em">''', tuple())
+ self._test('''<!ENTITY style "width:12ch;height:200px;">''', tuple())
+ self._test('''<!ENTITY ftd "0">''', tuple())
+
+
+class TestEntitiesInDTDs(BaseHelper):
+ file = File('foo.dtd', 'foo.dtd')
+ refContent = '''<!ENTITY short "This is &brandShortName;">
+<!ENTITY shorter "This is &brandShorterName;">
+<!ENTITY ent.start "Using &brandShorterName; start to">
+<!ENTITY ent.end " end">
+'''
+
+ def testOK(self):
+ self._test('''<!ENTITY ent.start "Mit &brandShorterName;">''', tuple(),
+ with_ref_file=True)
+
+ def testMismatch(self):
+ self._test('''<!ENTITY ent.start "Mit &brandShortName;">''',
+ (('warning', (0, 0),
+ 'Entity brandShortName referenced, '
+ 'but brandShorterName used in context',
+ 'xmlparse'),),
+ with_ref_file=True)
+
+ def testAcross(self):
+ self._test('''<!ENTITY ent.end "Mit &brandShorterName;">''',
+ tuple(),
+ with_ref_file=True)
+
+ def testAcrossWithMismatch(self):
+ '''If we could tell that ent.start and ent.end are one string,
+ we should warn. Sadly, we can't, so this goes without warning.'''
+ self._test('''<!ENTITY ent.end "Mit &brandShortName;">''',
+ tuple(),
+ with_ref_file=True)
+
+ def testUnknownWithRef(self):
+ self._test('''<!ENTITY ent.start "Mit &foopy;">''',
+ (('warning',
+ (0, 0),
+ 'Referencing unknown entity `foopy` '
+ '(brandShorterName used in context, '
+ 'brandShortName known)',
+ 'xmlparse'),),
+ with_ref_file=True)
+
+ def testUnknown(self):
+ self._test('''<!ENTITY ent.end "Mit &foopy;">''',
+ (('warning',
+ (0, 0),
+ 'Referencing unknown entity `foopy`'
+ ' (brandShortName, brandShorterName known)',
+ 'xmlparse'),),
+ with_ref_file=True)
+
+
+class TestAndroid(unittest.TestCase):
+ """Test Android checker
+
+ Make sure we're hitting our extra rules only if
+ we're passing in a DTD file in the embedding/android module.
+ """
+ apos_msg = u"Apostrophes in Android DTDs need escaping with \\' or " + \
+ u"\\u0027, or use \u2019, or put string in quotes."
+ quot_msg = u"Quotes in Android DTDs need escaping with \\\" or " + \
+ u"\\u0022, or put string in apostrophes."
+
+ def getEntity(self, v):
+ return Entity(v, lambda s: s, (0, len(v)), (), (0, 0), (), (),
+ (0, len(v)), ())
+
+ def getDTDEntity(self, v):
+ v = v.replace('"', '&quot;')
+ return Entity('<!ENTITY foo "%s">' % v,
+ lambda s: s,
+ (0, len(v) + 16), (), (0, 0), (), (9, 12),
+ (14, len(v) + 14), ())
+
+ def test_android_dtd(self):
+ """Testing the actual android checks. The logic is involved,
+ so this is a lot of nitty gritty detail tests.
+ """
+ f = File("embedding/android/strings.dtd", "strings.dtd",
+ "embedding/android")
+ checker = getChecker(f)
+ # good string
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # dtd warning
+ l10n = self.getDTDEntity("plain localized string &ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('warning', (0, 0),
+ 'Referencing unknown entity `ref`', 'xmlparse'),))
+ # no report on stray ampersand or quote, if not completely quoted
+ for i in xrange(3):
+ # make sure we're catching unescaped apostrophes,
+ # try 0..5 backticks
+ l10n = self.getDTDEntity("\\"*(2*i) + "'")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 2*i, self.apos_msg, 'android'),))
+ l10n = self.getDTDEntity("\\"*(2*i + 1) + "'")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # make sure we don't report if apos string is quoted
+ l10n = self.getDTDEntity('"' + "\\"*(2*i) + "'\"")
+ tpl = tuple(checker.check(ref, l10n))
+ self.assertEqual(tpl, (),
+ "`%s` shouldn't fail but got %s"
+ % (l10n.val, str(tpl)))
+ l10n = self.getDTDEntity('"' + "\\"*(2*i+1) + "'\"")
+ tpl = tuple(checker.check(ref, l10n))
+ self.assertEqual(tpl, (),
+ "`%s` shouldn't fail but got %s"
+ % (l10n.val, str(tpl)))
+ # make sure we're catching unescaped quotes, try 0..5 backticks
+ l10n = self.getDTDEntity("\\"*(2*i) + "\"")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 2*i, self.quot_msg, 'android'),))
+ l10n = self.getDTDEntity("\\"*(2*i + 1) + "'")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # make sure we don't report if quote string is single quoted
+ l10n = self.getDTDEntity("'" + "\\"*(2*i) + "\"'")
+ tpl = tuple(checker.check(ref, l10n))
+ self.assertEqual(tpl, (),
+ "`%s` shouldn't fail but got %s" %
+ (l10n.val, str(tpl)))
+ l10n = self.getDTDEntity('"' + "\\"*(2*i+1) + "'\"")
+ tpl = tuple(checker.check(ref, l10n))
+ self.assertEqual(tpl, (),
+ "`%s` shouldn't fail but got %s" %
+ (l10n.val, str(tpl)))
+ # check for mixed quotes and ampersands
+ l10n = self.getDTDEntity("'\"")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 0, self.apos_msg, 'android'),
+ ('error', 1, self.quot_msg, 'android')))
+ l10n = self.getDTDEntity("''\"'")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 1, self.apos_msg, 'android'),))
+ l10n = self.getDTDEntity('"\'""')
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 2, self.quot_msg, 'android'),))
+
+ # broken unicode escape
+ l10n = self.getDTDEntity("Some broken \u098 unicode")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 12, 'truncated \\uXXXX escape',
+ 'android'),))
+ # broken unicode escape, try to set the error off
+ l10n = self.getDTDEntity(u"\u9690"*14+"\u006"+" "+"\u0064")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 14, 'truncated \\uXXXX escape',
+ 'android'),))
+
+ def test_android_prop(self):
+ f = File("embedding/android/strings.properties", "strings.properties",
+ "embedding/android")
+ checker = getChecker(f)
+ # good plain string
+ ref = self.getEntity("plain string")
+ l10n = self.getEntity("plain localized string")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # no dtd warning
+ ref = self.getEntity("plain string")
+ l10n = self.getEntity("plain localized string &ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # no report on stray ampersand
+ ref = self.getEntity("plain string")
+ l10n = self.getEntity("plain localized string with apos: '")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # report on bad printf
+ ref = self.getEntity("string with %s")
+ l10n = self.getEntity("string with %S")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('error', 0, 'argument 1 `S` should be `s`',
+ 'printf'),))
+
+ def test_non_android_dtd(self):
+ f = File("browser/strings.dtd", "strings.dtd", "browser")
+ checker = getChecker(f)
+ # good string
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # dtd warning
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string &ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('warning', (0, 0),
+ 'Referencing unknown entity `ref`', 'xmlparse'),))
+ # no report on stray ampersand
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string with apos: '")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+
+ def test_entities_across_dtd(self):
+ f = File("browser/strings.dtd", "strings.dtd", "browser")
+ p = getParser(f.file)
+ p.readContents('<!ENTITY other "some &good.ref;">')
+ ref = p.parse()
+ checker = getChecker(f, reference=ref[0])
+ # good string
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+ # dtd warning
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string &ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ (('warning', (0, 0),
+ 'Referencing unknown entity `ref` (good.ref known)',
+ 'xmlparse'),))
+ # no report on stray ampersand
+ ref = self.getDTDEntity("plain string")
+ l10n = self.getDTDEntity("plain localized string with &good.ref;")
+ self.assertEqual(tuple(checker.check(ref, l10n)),
+ ())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/python/compare-locales/compare_locales/tests/test_compare.py b/python/compare-locales/compare_locales/tests/test_compare.py
new file mode 100644
index 000000000..51ba7cd8c
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/test_compare.py
@@ -0,0 +1,90 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales import compare
+
+
+class TestTree(unittest.TestCase):
+ '''Test the Tree utility class
+
+ Tree value classes need to be in-place editable
+ '''
+
+ def test_empty_dict(self):
+ tree = compare.Tree(dict)
+ self.assertEqual(list(tree.getContent()), [])
+ self.assertDictEqual(
+ tree.toJSON(),
+ {}
+ )
+
+ def test_disjoint_dict(self):
+ tree = compare.Tree(dict)
+ tree['one/entry']['leaf'] = 1
+ tree['two/other']['leaf'] = 2
+ self.assertEqual(
+ list(tree.getContent()),
+ [
+ (0, 'key', ('one', 'entry')),
+ (1, 'value', {'leaf': 1}),
+ (0, 'key', ('two', 'other')),
+ (1, 'value', {'leaf': 2})
+ ]
+ )
+ self.assertDictEqual(
+ tree.toJSON(),
+ {
+ 'children': [
+ ('one/entry',
+ {'value': {'leaf': 1}}
+ ),
+ ('two/other',
+ {'value': {'leaf': 2}}
+ )
+ ]
+ }
+ )
+ self.assertMultiLineEqual(
+ str(tree),
+ '''\
+one/entry
+ {'leaf': 1}
+two/other
+ {'leaf': 2}\
+'''
+ )
+
+ def test_overlapping_dict(self):
+ tree = compare.Tree(dict)
+ tree['one/entry']['leaf'] = 1
+ tree['one/other']['leaf'] = 2
+ self.assertEqual(
+ list(tree.getContent()),
+ [
+ (0, 'key', ('one',)),
+ (1, 'key', ('entry',)),
+ (2, 'value', {'leaf': 1}),
+ (1, 'key', ('other',)),
+ (2, 'value', {'leaf': 2})
+ ]
+ )
+ self.assertDictEqual(
+ tree.toJSON(),
+ {
+ 'children': [
+ ('one', {
+ 'children': [
+ ('entry',
+ {'value': {'leaf': 1}}
+ ),
+ ('other',
+ {'value': {'leaf': 2}}
+ )
+ ]
+ })
+ ]
+ }
+ )
diff --git a/python/compare-locales/compare_locales/tests/test_dtd.py b/python/compare-locales/compare_locales/tests/test_dtd.py
new file mode 100644
index 000000000..87ddcde30
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/test_dtd.py
@@ -0,0 +1,86 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''Tests for the DTD parser.
+'''
+
+import unittest
+import re
+
+from compare_locales.parser import getParser
+from compare_locales.tests import ParserTestMixin
+
+
+class TestDTD(ParserTestMixin, unittest.TestCase):
+ '''Tests for the DTD Parser.'''
+ filename = 'foo.dtd'
+
+ def test_one_entity(self):
+ self._test('''<!ENTITY foo.label "stuff">''',
+ (('foo.label', 'stuff'),))
+
+ quoteContent = '''<!ENTITY good.one "one">
+<!ENTITY bad.one "bad " quote">
+<!ENTITY good.two "two">
+<!ENTITY bad.two "bad "quoted" word">
+<!ENTITY good.three "three">
+<!ENTITY good.four "good ' quote">
+<!ENTITY good.five "good 'quoted' word">
+'''
+ quoteRef = (
+ ('good.one', 'one'),
+ ('_junk_\\d_25-56$', '<!ENTITY bad.one "bad " quote">'),
+ ('good.two', 'two'),
+ ('_junk_\\d_82-119$', '<!ENTITY bad.two "bad "quoted" word">'),
+ ('good.three', 'three'),
+ ('good.four', 'good \' quote'),
+ ('good.five', 'good \'quoted\' word'),)
+
+ def test_quotes(self):
+ self._test(self.quoteContent, self.quoteRef)
+
+ def test_apos(self):
+ qr = re.compile('[\'"]', re.M)
+
+ def quot2apos(s):
+ return qr.sub(lambda m: m.group(0) == '"' and "'" or '"', s)
+
+ self._test(quot2apos(self.quoteContent),
+ map(lambda t: (t[0], quot2apos(t[1])), self.quoteRef))
+
+ def test_parsed_ref(self):
+ self._test('''<!ENTITY % fooDTD SYSTEM "chrome://brand.dtd">
+ %fooDTD;
+''',
+ (('fooDTD', '"chrome://brand.dtd"'),))
+
+ def test_trailing_comment(self):
+ self._test('''<!ENTITY first "string">
+<!ENTITY second "string">
+<!--
+<!ENTITY commented "out">
+-->
+''',
+ (('first', 'string'), ('second', 'string')))
+
+ def test_license_header(self):
+ p = getParser('foo.dtd')
+ p.readContents(self.resource('triple-license.dtd'))
+ for e in p:
+ self.assertEqual(e.key, 'foo')
+ self.assertEqual(e.val, 'value')
+ self.assert_('MPL' in p.header)
+ p.readContents('''\
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ - You can obtain one at http://mozilla.org/MPL/2.0/. -->
+<!ENTITY foo "value">
+''')
+ for e in p:
+ self.assertEqual(e.key, 'foo')
+ self.assertEqual(e.val, 'value')
+ self.assert_('MPL' in p.header)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/python/compare-locales/compare_locales/tests/test_ini.py b/python/compare-locales/compare_locales/tests/test_ini.py
new file mode 100644
index 000000000..4c8cc03e1
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/test_ini.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales.tests import ParserTestMixin
+
+
+mpl2 = '''\
+; This Source Code Form is subject to the terms of the Mozilla Public
+; License, v. 2.0. If a copy of the MPL was not distributed with this file,
+; You can obtain one at http://mozilla.org/MPL/2.0/.
+'''
+
+
+class TestIniParser(ParserTestMixin, unittest.TestCase):
+
+ filename = 'foo.ini'
+
+ def testSimpleHeader(self):
+ self._test('''; This file is in the UTF-8 encoding
+[Strings]
+TitleText=Some Title
+''', (('TitleText', 'Some Title'),))
+ self.assert_('UTF-8' in self.parser.header)
+
+ def testMPL2_Space_UTF(self):
+ self._test(mpl2 + '''
+; This file is in the UTF-8 encoding
+[Strings]
+TitleText=Some Title
+''', (('TitleText', 'Some Title'),))
+ self.assert_('MPL' in self.parser.header)
+
+ def testMPL2_Space(self):
+ self._test(mpl2 + '''
+[Strings]
+TitleText=Some Title
+''', (('TitleText', 'Some Title'),))
+ self.assert_('MPL' in self.parser.header)
+
+ def testMPL2_MultiSpace(self):
+ self._test(mpl2 + '''\
+
+; more comments
+
+[Strings]
+TitleText=Some Title
+''', (('TitleText', 'Some Title'),))
+ self.assert_('MPL' in self.parser.header)
+
+ def testMPL2_JunkBeforeCategory(self):
+ self._test(mpl2 + '''\
+Junk
+[Strings]
+TitleText=Some Title
+''', (('_junk_\\d+_0-213$', mpl2 + '''\
+Junk
+[Strings]'''), ('TitleText', 'Some Title')))
+ self.assert_('MPL' not in self.parser.header)
+
+ def test_TrailingComment(self):
+ self._test(mpl2 + '''
+[Strings]
+TitleText=Some Title
+;Stray trailing comment
+''', (('TitleText', 'Some Title'),))
+ self.assert_('MPL' in self.parser.header)
+
+ def test_SpacedTrailingComments(self):
+ self._test(mpl2 + '''
+[Strings]
+TitleText=Some Title
+
+;Stray trailing comment
+;Second stray comment
+
+''', (('TitleText', 'Some Title'),))
+ self.assert_('MPL' in self.parser.header)
+
+ def test_TrailingCommentsAndJunk(self):
+ self._test(mpl2 + '''
+[Strings]
+TitleText=Some Title
+
+;Stray trailing comment
+Junk
+;Second stray comment
+
+''', (('TitleText', 'Some Title'), ('_junk_\\d+_231-284$', '''\
+
+;Stray trailing comment
+Junk
+;Second stray comment
+
+''')))
+ self.assert_('MPL' in self.parser.header)
+
+ def test_JunkInbetweenEntries(self):
+ self._test(mpl2 + '''
+[Strings]
+TitleText=Some Title
+
+Junk
+
+Good=other string
+''', (('TitleText', 'Some Title'), ('_junk_\\d+_231-236$', '''\
+
+Junk'''), ('Good', 'other string')))
+ self.assert_('MPL' in self.parser.header)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/python/compare-locales/compare_locales/tests/test_merge.py b/python/compare-locales/compare_locales/tests/test_merge.py
new file mode 100644
index 000000000..c006edbb5
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/test_merge.py
@@ -0,0 +1,265 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import os
+from tempfile import mkdtemp
+import shutil
+
+from compare_locales.parser import getParser
+from compare_locales.paths import File
+from compare_locales.compare import ContentComparer
+
+
+class ContentMixin(object):
+ maxDiff = None # we got big dictionaries to compare
+ extension = None # OVERLOAD
+
+ def reference(self, content):
+ self.ref = os.path.join(self.tmp, "en-reference" + self.extension)
+ open(self.ref, "w").write(content)
+
+ def localized(self, content):
+ self.l10n = os.path.join(self.tmp, "l10n" + self.extension)
+ open(self.l10n, "w").write(content)
+
+
+class TestProperties(unittest.TestCase, ContentMixin):
+ extension = '.properties'
+
+ def setUp(self):
+ self.tmp = mkdtemp()
+ os.mkdir(os.path.join(self.tmp, "merge"))
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp)
+ del self.tmp
+
+ def testGood(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+bar = barVal
+eff = effVal""")
+ self.localized("""foo = lFoo
+bar = lBar
+eff = lEff
+""")
+ cc = ContentComparer()
+ cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""))
+ self.assertDictEqual(
+ cc.observer.toJSON(),
+ {'summary':
+ {None: {
+ 'changed': 3
+ }},
+ 'details': {}
+ }
+ )
+ self.assert_(not os.path.exists(os.path.join(cc.merge_stage,
+ 'l10n.properties')))
+
+ def testMissing(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+bar = barVal
+eff = effVal""")
+ self.localized("""bar = lBar
+""")
+ cc = ContentComparer()
+ cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""))
+ self.assertDictEqual(
+ cc.observer.toJSON(),
+ {'summary':
+ {None: {
+ 'changed': 1, 'missing': 2
+ }},
+ 'details': {
+ 'children': [
+ ('l10n.properties',
+ {'value': {'missingEntity': [u'eff', u'foo']}}
+ )
+ ]}
+ }
+ )
+ mergefile = os.path.join(self.tmp, "merge", "l10n.properties")
+ self.assertTrue(os.path.isfile(mergefile))
+ p = getParser(mergefile)
+ p.readFile(mergefile)
+ [m, n] = p.parse()
+ self.assertEqual(map(lambda e: e.key, m), ["bar", "eff", "foo"])
+
+ def testError(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+bar = %d barVal
+eff = effVal""")
+ self.localized("""bar = %S lBar
+eff = leffVal
+""")
+ cc = ContentComparer()
+ cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""))
+ self.assertDictEqual(
+ cc.observer.toJSON(),
+ {'summary':
+ {None: {
+ 'changed': 2, 'errors': 1, 'missing': 1
+ }},
+ 'details': {
+ 'children': [
+ ('l10n.properties',
+ {'value': {
+ 'error': [u'argument 1 `S` should be `d` '
+ u'at line 1, column 6 for bar'],
+ 'missingEntity': [u'foo']}}
+ )
+ ]}
+ }
+ )
+ mergefile = os.path.join(self.tmp, "merge", "l10n.properties")
+ self.assertTrue(os.path.isfile(mergefile))
+ p = getParser(mergefile)
+ p.readFile(mergefile)
+ [m, n] = p.parse()
+ self.assertEqual([e.key for e in m], ["eff", "foo", "bar"])
+ self.assertEqual(m[n['bar']].val, '%d barVal')
+
+ def testObsolete(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""foo = fooVal
+eff = effVal""")
+ self.localized("""foo = fooVal
+other = obsolete
+eff = leffVal
+""")
+ cc = ContentComparer()
+ cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+ cc.compare(File(self.ref, "en-reference.properties", ""),
+ File(self.l10n, "l10n.properties", ""))
+ self.assertDictEqual(
+ cc.observer.toJSON(),
+ {'summary':
+ {None: {
+ 'changed': 1, 'obsolete': 1, 'unchanged': 1
+ }},
+ 'details': {
+ 'children': [
+ ('l10n.properties',
+ {'value': {'obsoleteEntity': [u'other']}})]},
+ }
+ )
+
+
+class TestDTD(unittest.TestCase, ContentMixin):
+ extension = '.dtd'
+
+ def setUp(self):
+ self.tmp = mkdtemp()
+ os.mkdir(os.path.join(self.tmp, "merge"))
+
+ def tearDown(self):
+ shutil.rmtree(self.tmp)
+ del self.tmp
+
+ def testGood(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'barVal'>
+<!ENTITY eff 'effVal'>""")
+ self.localized("""<!ENTITY foo 'lFoo'>
+<!ENTITY bar 'lBar'>
+<!ENTITY eff 'lEff'>
+""")
+ cc = ContentComparer()
+ cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+ cc.compare(File(self.ref, "en-reference.dtd", ""),
+ File(self.l10n, "l10n.dtd", ""))
+ self.assertDictEqual(
+ cc.observer.toJSON(),
+ {'summary':
+ {None: {
+ 'changed': 3
+ }},
+ 'details': {}
+ }
+ )
+ self.assert_(
+ not os.path.exists(os.path.join(cc.merge_stage, 'l10n.dtd')))
+
+ def testMissing(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'barVal'>
+<!ENTITY eff 'effVal'>""")
+ self.localized("""<!ENTITY bar 'lBar'>
+""")
+ cc = ContentComparer()
+ cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+ cc.compare(File(self.ref, "en-reference.dtd", ""),
+ File(self.l10n, "l10n.dtd", ""))
+ self.assertDictEqual(
+ cc.observer.toJSON(),
+ {'summary':
+ {None: {
+ 'changed': 1, 'missing': 2
+ }},
+ 'details': {
+ 'children': [
+ ('l10n.dtd',
+ {'value': {'missingEntity': [u'eff', u'foo']}}
+ )
+ ]}
+ }
+ )
+ mergefile = os.path.join(self.tmp, "merge", "l10n.dtd")
+ self.assertTrue(os.path.isfile(mergefile))
+ p = getParser(mergefile)
+ p.readFile(mergefile)
+ [m, n] = p.parse()
+ self.assertEqual(map(lambda e: e.key, m), ["bar", "eff", "foo"])
+
+ def testJunk(self):
+ self.assertTrue(os.path.isdir(self.tmp))
+ self.reference("""<!ENTITY foo 'fooVal'>
+<!ENTITY bar 'barVal'>
+<!ENTITY eff 'effVal'>""")
+ self.localized("""<!ENTITY foo 'fooVal'>
+<!ENTY bar 'gimmick'>
+<!ENTITY eff 'effVal'>
+""")
+ cc = ContentComparer()
+ cc.set_merge_stage(os.path.join(self.tmp, "merge"))
+ cc.compare(File(self.ref, "en-reference.dtd", ""),
+ File(self.l10n, "l10n.dtd", ""))
+ self.assertDictEqual(
+ cc.observer.toJSON(),
+ {'summary':
+ {None: {
+ 'errors': 1, 'missing': 1, 'unchanged': 2
+ }},
+ 'details': {
+ 'children': [
+ ('l10n.dtd',
+ {'value': {
+ 'error': [u'Unparsed content "<!ENTY bar '
+ u'\'gimmick\'>" at 23-44'],
+ 'missingEntity': [u'bar']}}
+ )
+ ]}
+ }
+ )
+ mergefile = os.path.join(self.tmp, "merge", "l10n.dtd")
+ self.assertTrue(os.path.isfile(mergefile))
+ p = getParser(mergefile)
+ p.readFile(mergefile)
+ [m, n] = p.parse()
+ self.assertEqual(map(lambda e: e.key, m), ["foo", "eff", "bar"])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/python/compare-locales/compare_locales/tests/test_properties.py b/python/compare-locales/compare_locales/tests/test_properties.py
new file mode 100644
index 000000000..331a1a57c
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/test_properties.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales.tests import ParserTestMixin
+
+
+class TestPropertiesParser(ParserTestMixin, unittest.TestCase):
+
+ filename = 'foo.properties'
+
+ def testBackslashes(self):
+ self._test(r'''one_line = This is one line
+two_line = This is the first \
+of two lines
+one_line_trailing = This line ends in \\
+and has junk
+two_lines_triple = This line is one of two and ends in \\\
+and still has another line coming
+''', (
+ ('one_line', 'This is one line'),
+ ('two_line', u'This is the first of two lines'),
+ ('one_line_trailing', u'This line ends in \\'),
+ ('_junk_\\d+_113-126$', 'and has junk\n'),
+ ('two_lines_triple', 'This line is one of two and ends in \\'
+ 'and still has another line coming')))
+
+ def testProperties(self):
+ # port of netwerk/test/PropertiesTest.cpp
+ self.parser.readContents(self.resource('test.properties'))
+ ref = ['1', '2', '3', '4', '5', '6', '7', '8',
+ 'this is the first part of a continued line '
+ 'and here is the 2nd part']
+ i = iter(self.parser)
+ for r, e in zip(ref, i):
+ self.assertEqual(e.val, r)
+
+ def test_bug121341(self):
+ # port of xpcom/tests/unit/test_bug121341.js
+ self.parser.readContents(self.resource('bug121341.properties'))
+ ref = ['abc', 'xy', u"\u1234\t\r\n\u00AB\u0001\n",
+ "this is multiline property",
+ "this is another multiline property", u"test\u0036",
+ "yet another multiline propery", u"\ttest5\u0020", " test6\t",
+ u"c\uCDEFd", u"\uABCD"]
+ i = iter(self.parser)
+ for r, e in zip(ref, i):
+ self.assertEqual(e.val, r)
+
+ def test_comment_in_multi(self):
+ self._test(r'''bar=one line with a \
+# part that looks like a comment \
+and an end''', (('bar', 'one line with a # part that looks like a comment '
+ 'and an end'),))
+
+ def test_license_header(self):
+ self._test('''\
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+foo=value
+''', (('foo', 'value'),))
+ self.assert_('MPL' in self.parser.header)
+
+ def test_escapes(self):
+ self.parser.readContents(r'''
+# unicode escapes
+zero = some \unicode
+one = \u0
+two = \u41
+three = \u042
+four = \u0043
+five = \u0044a
+six = \a
+seven = \n\r\t\\
+''')
+ ref = ['some unicode', chr(0), 'A', 'B', 'C', 'Da', 'a', '\n\r\t\\']
+ for r, e in zip(ref, self.parser):
+ self.assertEqual(e.val, r)
+
+ def test_trailing_comment(self):
+ self._test('''first = string
+second = string
+
+#
+#commented out
+''', (('first', 'string'), ('second', 'string')))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/python/compare-locales/compare_locales/tests/test_util.py b/python/compare-locales/compare_locales/tests/test_util.py
new file mode 100644
index 000000000..fd2d2c92b
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/test_util.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales import util
+
+
+class ParseLocalesTest(unittest.TestCase):
+ def test_empty(self):
+ self.assertEquals(util.parseLocales(''), [])
+
+ def test_all(self):
+ self.assertEquals(util.parseLocales('''af
+de'''), ['af', 'de'])
+
+ def test_shipped(self):
+ self.assertEquals(util.parseLocales('''af
+ja win mac
+de'''), ['af', 'de', 'ja'])
+
+ def test_sparse(self):
+ self.assertEquals(util.parseLocales('''
+af
+
+de
+
+'''), ['af', 'de'])
diff --git a/python/compare-locales/compare_locales/tests/test_webapps.py b/python/compare-locales/compare_locales/tests/test_webapps.py
new file mode 100644
index 000000000..2f1223649
--- /dev/null
+++ b/python/compare-locales/compare_locales/tests/test_webapps.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from compare_locales import webapps
+
+
+class TestFileComparison(unittest.TestCase):
+
+ def mock_FileComparison(self, mock_listdir):
+ class Target(webapps.FileComparison):
+ def _listdir(self):
+ return mock_listdir()
+ return Target('.', 'en-US')
+
+ def test_just_reference(self):
+ def _listdir():
+ return ['my_app.en-US.properties']
+ filecomp = self.mock_FileComparison(_listdir)
+ filecomp.files()
+ self.assertEqual(filecomp.locales(), [])
+ self.assertEqual(filecomp._reference.keys(), ['my_app'])
+ file_ = filecomp._reference['my_app']
+ self.assertEqual(file_.file, 'locales/my_app.en-US.properties')
+
+ def test_just_locales(self):
+ def _listdir():
+ return ['my_app.ar.properties',
+ 'my_app.sr-Latn.properties',
+ 'my_app.sv-SE.properties',
+ 'my_app.po_SI.properties']
+ filecomp = self.mock_FileComparison(_listdir)
+ filecomp.files()
+ self.assertEqual(filecomp.locales(),
+ ['ar', 'sr-Latn', 'sv-SE'])
+ self.assertEqual(filecomp._files['ar'].keys(), ['my_app'])
+ file_ = filecomp._files['ar']['my_app']
+ self.assertEqual(file_.file, 'locales/my_app.ar.properties')
diff --git a/python/compare-locales/compare_locales/util.py b/python/compare-locales/compare_locales/util.py
new file mode 100644
index 000000000..71eadd874
--- /dev/null
+++ b/python/compare-locales/compare_locales/util.py
@@ -0,0 +1,11 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file is shared between compare-locales and locale-inspector
+# test_util is in compare-locales only, for the sake of easy
+# development.
+
+
+def parseLocales(content):
+ return sorted(l.split()[0] for l in content.splitlines() if l)
diff --git a/python/compare-locales/compare_locales/webapps.py b/python/compare-locales/compare_locales/webapps.py
new file mode 100644
index 000000000..42f5b5657
--- /dev/null
+++ b/python/compare-locales/compare_locales/webapps.py
@@ -0,0 +1,235 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''gaia-style web apps support
+
+This variant supports manifest.webapp localization as well as
+.properties files with a naming scheme of locales/foo.*.properties.
+'''
+
+from collections import defaultdict
+import json
+import os
+import os.path
+import re
+
+from compare_locales.paths import File, EnumerateDir
+from compare_locales.compare import AddRemove, ContentComparer
+
+
+class WebAppCompare(object):
+ '''For a given directory, analyze
+ /manifest.webapp
+ /locales/*.*.properties
+
+ Deduce the present locale codes.
+ '''
+ ignore_dirs = EnumerateDir.ignore_dirs
+ reference_locale = 'en-US'
+
+ def __init__(self, basedir):
+ '''Constructor
+ :param basedir: Directory of the web app to inspect
+ '''
+ self.basedir = basedir
+ self.manifest = Manifest(basedir, self.reference_locale)
+ self.files = FileComparison(basedir, self.reference_locale)
+ self.watcher = None
+
+ def compare(self, locales):
+ '''Compare the manifest.webapp and the locales/*.*.properties
+ '''
+ if not locales:
+ locales = self.locales()
+ self.manifest.compare(locales)
+ self.files.compare(locales)
+
+ def setWatcher(self, watcher):
+ self.watcher = watcher
+ self.manifest.watcher = watcher
+ self.files.watcher = watcher
+
+ def locales(self):
+ '''Inspect files on disk to find present languages.
+ :rtype: List of locales, sorted, including reference.
+ '''
+ locales = set(self.manifest.strings.keys())
+ locales.update(self.files.locales())
+ locales = list(sorted(locales))
+ return locales
+
+
+class Manifest(object):
+ '''Class that helps with parsing and inspection of manifest.webapp.
+ '''
+
+ def __init__(self, basedir, reference_locale):
+ self.file = File(os.path.join(basedir, 'manifest.webapp'),
+ 'manifest.webapp')
+ self.reference_locale = reference_locale
+ self._strings = None
+ self.watcher = None
+
+ @property
+ def strings(self):
+ if self._strings is None:
+ self._strings = self.load_and_parse()
+ return self._strings
+
+ def load_and_parse(self):
+ try:
+ manifest = json.load(open(self.file.fullpath))
+ except (ValueError, IOError), e:
+ if self.watcher:
+ self.watcher.notify('error', self.file, str(e))
+ return False
+ return self.extract_manifest_strings(manifest)
+
+ def extract_manifest_strings(self, manifest_fragment):
+ '''Extract localizable strings from a manifest dict.
+ This method is recursive, and returns a two-level dict,
+ first level being locale codes, second level being generated
+ key and localized value. Keys are generated by concatenating
+ each level in the json with a ".".
+ '''
+ rv = defaultdict(dict)
+ localizable = manifest_fragment.pop('locales', {})
+ if localizable:
+ for locale, keyvalue in localizable.iteritems():
+ for key, value in keyvalue.iteritems():
+ key = '.'.join(['locales', 'AB_CD', key])
+ rv[locale][key] = value
+ for key, sub_manifest in manifest_fragment.iteritems():
+ if not isinstance(sub_manifest, dict):
+ continue
+ subdict = self.extract_manifest_strings(sub_manifest)
+ if subdict:
+ for locale, keyvalue in subdict:
+ rv[locale].update((key + '.' + subkey, value)
+ for subkey, value
+ in keyvalue.iteritems())
+ return rv
+
+ def compare(self, locales):
+ strings = self.strings
+ if not strings:
+ return
+ # create a copy so that we can mock around with it
+ strings = strings.copy()
+ reference = strings.pop(self.reference_locale)
+ for locale in locales:
+ if locale == self.reference_locale:
+ continue
+ self.compare_strings(reference,
+ strings.get(locale, {}),
+ locale)
+
+ def compare_strings(self, reference, l10n, locale):
+ add_remove = AddRemove()
+ add_remove.set_left(sorted(reference.keys()))
+ add_remove.set_right(sorted(l10n.keys()))
+ missing = obsolete = changed = unchanged = 0
+ for op, item_or_pair in add_remove:
+ if op == 'equal':
+ if reference[item_or_pair[0]] == l10n[item_or_pair[1]]:
+ unchanged += 1
+ else:
+ changed += 1
+ else:
+ key = item_or_pair.replace('.AB_CD.',
+ '.%s.' % locale)
+ if op == 'add':
+ # obsolete entry
+ obsolete += 1
+ self.watcher.notify('obsoleteEntity', self.file, key)
+ else:
+ # missing entry
+ missing += 1
+ self.watcher.notify('missingEntity', self.file, key)
+
+
+class FileComparison(object):
+ '''Compare the locales/*.*.properties files inside a webapp.
+ '''
+ prop = re.compile('(?P<base>.*)\\.'
+ '(?P<locale>[a-zA-Z]+(?:-[a-zA-Z]+)*)'
+ '\\.properties$')
+
+ def __init__(self, basedir, reference_locale):
+ self.basedir = basedir
+ self.reference_locale = reference_locale
+ self.watcher = None
+ self._reference = self._files = None
+
+ def locales(self):
+ '''Get the locales present in the webapp
+ '''
+ self.files()
+ locales = self._files.keys()
+ locales.sort()
+ return locales
+
+ def compare(self, locales):
+ self.files()
+ for locale in locales:
+ l10n = self._files[locale]
+ filecmp = AddRemove()
+ filecmp.set_left(sorted(self._reference.keys()))
+ filecmp.set_right(sorted(l10n.keys()))
+ for op, item_or_pair in filecmp:
+ if op == 'equal':
+ self.watcher.compare(self._reference[item_or_pair[0]],
+ l10n[item_or_pair[1]])
+ elif op == 'add':
+ # obsolete file
+ self.watcher.remove(l10n[item_or_pair])
+ else:
+ # missing file
+ _path = '.'.join([item_or_pair, locale, 'properties'])
+ missingFile = File(
+ os.path.join(self.basedir, 'locales', _path),
+ 'locales/' + _path)
+ self.watcher.add(self._reference[item_or_pair],
+ missingFile)
+
+ def files(self):
+ '''Read the list of locales from disk.
+ '''
+ if self._reference:
+ return
+ self._reference = {}
+ self._files = defaultdict(dict)
+ path_list = self._listdir()
+ for path in path_list:
+ match = self.prop.match(path)
+ if match is None:
+ continue
+ locale = match.group('locale')
+ if locale == self.reference_locale:
+ target = self._reference
+ else:
+ target = self._files[locale]
+ fullpath = os.path.join(self.basedir, 'locales', path)
+ target[match.group('base')] = File(fullpath, 'locales/' + path)
+
+ def _listdir(self):
+ 'Monkey-patch this for testing.'
+ return os.listdir(os.path.join(self.basedir, 'locales'))
+
+
+def compare_web_app(basedir, locales, other_observer=None):
+ '''Compare gaia-style web app.
+
+ Optional arguments are:
+ - other_observer. A object implementing
+ notify(category, _file, data)
+ The return values of that callback are ignored.
+ '''
+ comparer = ContentComparer()
+ if other_observer is not None:
+ comparer.add_observer(other_observer)
+ webapp_comp = WebAppCompare(basedir)
+ webapp_comp.setWatcher(comparer)
+ webapp_comp.compare(locales)
+ return comparer.observer
diff --git a/python/compare-locales/docs/glossary.rst b/python/compare-locales/docs/glossary.rst
new file mode 100644
index 000000000..e89839b16
--- /dev/null
+++ b/python/compare-locales/docs/glossary.rst
@@ -0,0 +1,26 @@
+========
+Glossary
+========
+
+.. glossary::
+ :sorted:
+
+ Localization
+ The process of creating content in a native language, including
+ translation, but also customizations like Search.
+
+ Localizability
+ Enabling a piece of software to be localized. This is mostly
+ externalizing English strings, and writing build support to
+ pick up localized search engines etc.
+
+ L10n
+ *Numeronym* for Localization, *L*, 10 chars, *n*
+
+ L12y
+ Numeronym for Localizability
+
+ l10n-merge
+ nick-name for the process of merging ``en-US`` and a particular
+ localization into one joint artifact without any missing strings, and
+ without technical errors, as far as possible.
diff --git a/python/compare-locales/docs/index.rst b/python/compare-locales/docs/index.rst
new file mode 100644
index 000000000..925ca0f88
--- /dev/null
+++ b/python/compare-locales/docs/index.rst
@@ -0,0 +1,191 @@
+============
+Localization
+============
+
+.. toctree::
+ :maxdepth: 1
+
+ glossary
+
+The documentation here is targeted at developers, writing localizable code
+for Firefox and Firefox for Android, as well as Thunderbird and SeaMonkey.
+
+If you haven't dealt with localization in gecko code before, it's a good
+idea to check the :doc:`./glossary` for what localization is, and which terms
+we use for what.
+
+Exposing strings
+----------------
+
+Localizers only handle a few file formats in well-known locations in the
+source tree.
+
+The locations are in directories like
+
+ :file:`browser/`\ ``locales/en-US/``\ :file:`subdir/file.ext`
+
+The first thing to note is that only files beneath :file:`locales/en-US` are
+exposed to localizers. The second thing to note is that only a few directories
+are exposed. Which directories are exposed is defined in files called
+``l10n.ini``, which are at a
+`few places <https://dxr.mozilla.org/mozilla-central/search?q=path%3Al10n.ini&redirect=true>`_
+in the source code.
+
+An example looks like this
+
+.. code-block:: ini
+
+ [general]
+ depth = ../..
+
+ [compare]
+ dirs = browser
+ browser/branding/official
+
+ [includes]
+ toolkit = toolkit/locales/l10n.ini
+
+This tells the l10n infrastructure three things: Resolve the paths against the
+directory two levels up, include files in :file:`browser/locales/en-US` and
+:file:`browser/branding/official/locales/en-US`, and load more data from
+:file:`toolkit/locales/l10n.ini`.
+
+For projects like Thunderbird and SeaMonkey in ``comm-central``, additional
+data needs to be provided when including an ``l10n.ini`` from a different
+repository:
+
+.. code-block:: ini
+
+ [include_toolkit]
+ type = hg
+ mozilla = mozilla-central
+ repo = http://hg.mozilla.org/
+ l10n.ini = toolkit/locales/l10n.ini
+
+This tells the l10n pieces where to find the repository, and where inside
+that repository the ``l10n.ini`` file is. This is needed because for local
+builds, :file:`mail/locales/l10n.ini` references
+:file:`mozilla/toolkit/locales/l10n.ini`, which is where the comm-central
+build setup expects toolkit to be.
+
+Now that the directories exposed to l10n are known, we can talk about the
+supported file formats.
+
+File formats
+------------
+
+This is just a quick overview, please check the
+`XUL Tutorial <https://developer.mozilla.org/docs/Mozilla/Tech/XUL/Tutorial/Localization>`_
+for an in-depth tour.
+
+The following file formats are known to the l10n tool chains:
+
+DTD
+ Used in XUL and XHTML. Also for Android native strings.
+Properties
+ Used from JavaScript and C++. When used from js, also comes with
+ `plural support <https://developer.mozilla.org/docs/Mozilla/Localization/Localization_and_Plurals>`_.
+ini
+ Used by the crashreporter and updater, avoid if possible.
+foo.defines
+ Used during builds, for example to create file:`install.rdf` for
+ language packs.
+
+Adding new formats involves changing various different tools, and is strongly
+discouraged.
+
+Exceptions
+----------
+Generally, anything that exists in ``en-US`` needs a one-to-one mapping in
+all localizations. There are a few cases where that's not wanted, notably
+around search settings and spell-checking dictionaries.
+
+To enable tools to adjust to those exceptions, there's a python-coded
+:py:mod:`filter.py`, implementing :py:func:`test`, with the following
+signature
+
+.. code-block:: python
+
+ def test(mod, path, entity = None):
+ if does_not_matter:
+ return "ignore"
+ if show_but_do_not_merge:
+ return "report"
+ # default behavior, localizer or build need to do something
+ return "error"
+
+For any missing file, this function is called with ``mod`` being
+the *module*, and ``path`` being the relative path inside
+:file:`locales/en-US`. The module is the top-level dir as referenced in
+:file:`l10n.ini`.
+
+For missing strings, the :py:data:`entity` parameter is the key of the string
+in the en-US file.
+
+l10n-merge
+----------
+
+Gecko doesn't support fallback from a localization to ``en-US`` at runtime.
+Thus, the build needs to ensure that the localization as it's built into
+the package has all required strings, and that the strings don't contain
+errors. To ensure that, we're *merging* the localization and ``en-US``
+at build time, nick-named :term:`l10n-merge`.
+
+The process is usually triggered via
+
+.. code-block:: bash
+
+ $obj-dir/browser/locales> make merge-de LOCALE_MERGEDIR=$PWD/merge-de
+
+It creates another directory in the object dir, :file:`merge-ab-CD`, in
+which the modified files are stored. The actual repackaging process looks for
+the localized files in the merge dir first, then the localized file, and then
+in ``en-US``. Thus, for the ``de`` localization of
+:file:`browser/locales/en-US/chrome/browser/browser.dtd`, it checks
+
+1. :file:`$objdir/browser/locales/merge-de/browser/chrome/browser/browser.dtd`
+2. :file:`$(LOCALE_BASEDIR)/de/browser/chrome/browser/browser.dtd`
+3. :file:`browser/locales/en-US/chrome/browser/browser.dtd`
+
+and will include the first of those files it finds.
+
+l10n-merge modifies a file if it supports the particular file type, and there
+are missing strings which are not filtered out, or if an existing string
+shows an error. See the Checks section below for details.
+
+Checks
+------
+
+As part of the build and other localization tool chains, we run a variety
+of source-based checks. Think of them as linters.
+
+The suite of checks is usually determined by file type, i.e., there's a
+suite of checks for DTD files and one for properties files, etc. An exception
+are Android-specific checks.
+
+Android
+^^^^^^^
+
+For Android, we need to localize :file:`strings.xml`. We're doing so via DTD
+files, which is mostly OK. But the strings inside the XML file have to
+satisfy additional constraints about quotes etc, that are not part of XML.
+There's probably some historic background on why things are the way they are.
+
+The Android-specific checks are enabled for DTD files that are in
+:file:`mobile/android/base/locales/en-US/`.
+
+Localizations
+-------------
+
+Now that we talked in-depth about how to expose content to localizers,
+where are the localizations?
+
+We host a mercurial repository per locale and per branch. Most of our
+localizations only work starting with aurora, so the bulk of the localizations
+is found on https://hg.mozilla.org/releases/l10n/mozilla-aurora/. We have
+several localizations continuously working with mozilla-central, those
+repositories are on https://hg.mozilla.org/l10n-central/.
+
+You can search inside our localized files on
+`Transvision <https://transvision.mozfr.org/>`_ and
+http://dxr.mozilla.org/l10n-mozilla-aurora/.
diff --git a/python/compare-locales/mach_commands.py b/python/compare-locales/mach_commands.py
new file mode 100644
index 000000000..7be6a50e7
--- /dev/null
+++ b/python/compare-locales/mach_commands.py
@@ -0,0 +1,81 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+from mozbuild.base import (
+ MachCommandBase,
+)
+
+import mozpack.path as mozpath
+
+
+MERGE_HELP = '''Directory to merge to. Will be removed to before running
+the comparison. Default: $(OBJDIR)/($MOZ_BUILD_APP)/locales/merge-$(AB_CD)
+'''.lstrip()
+
+
+@CommandProvider
+class CompareLocales(MachCommandBase):
+ """Run compare-locales."""
+
+ @Command('compare-locales', category='testing',
+ description='Run source checks on a localization.')
+ @CommandArgument('--l10n-ini',
+ help='l10n.ini describing the app. ' +
+ 'Default: $(MOZ_BUILD_APP)/locales/l10n.ini')
+ @CommandArgument('--l10n-base',
+ help='Directory with the localizations. ' +
+ 'Default: $(L10NBASEDIR)')
+ @CommandArgument('--merge-dir',
+ help=MERGE_HELP)
+ @CommandArgument('locales', nargs='+', metavar='ab_CD',
+ help='Locale codes to compare')
+ def compare(self, l10n_ini=None, l10n_base=None, merge_dir=None,
+ locales=None):
+ from compare_locales.paths import EnumerateApp
+ from compare_locales.compare import compareApp
+
+ # check if we're configured and use defaults from there
+ # otherwise, error early
+ try:
+ self.substs # explicitly check
+ if not l10n_ini:
+ l10n_ini = mozpath.join(
+ self.topsrcdir,
+ self.substs['MOZ_BUILD_APP'],
+ 'locales', 'l10n.ini'
+ )
+ if not l10n_base:
+ l10n_base = mozpath.join(
+ self.topsrcdir,
+ self.substs['L10NBASEDIR']
+ )
+ except Exception:
+ if not l10n_ini or not l10n_base:
+ print('Specify --l10n-ini and --l10n-base or run configure.')
+ return 1
+
+ if not merge_dir:
+ try:
+ # self.substs is raising an Exception if we're not configured
+ # don't merge if we're not
+ merge_dir = mozpath.join(
+ self.topobjdir,
+ self.substs['MOZ_BUILD_APP'],
+ 'locales', 'merge-dir-{ab_CD}'
+ )
+ except Exception:
+ pass
+
+ app = EnumerateApp(l10n_ini, l10n_base, locales)
+ observer = compareApp(app, merge_stage=merge_dir,
+ clobber=True)
+ print(observer.serialize())
diff --git a/python/compare-locales/moz.build b/python/compare-locales/moz.build
new file mode 100644
index 000000000..f772ab620
--- /dev/null
+++ b/python/compare-locales/moz.build
@@ -0,0 +1,16 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files('compare_locales/**'):
+ BUG_COMPONENT = ('Localization Infrastructure and Tools', 'compare-locales')
+with Files('docs/**'):
+ BUG_COMPONENT = ('Mozilla Localizations', 'Documentation')
+
+# SPHINX_PYTHON_PACKAGE_DIRS += [
+# 'compare_locales',
+# ]
+
+SPHINX_TREES['.'] = 'docs'
diff --git a/python/configobj/PKG-INFO b/python/configobj/PKG-INFO
new file mode 100644
index 000000000..71c47b907
--- /dev/null
+++ b/python/configobj/PKG-INFO
@@ -0,0 +1,47 @@
+Metadata-Version: 1.0
+Name: configobj
+Version: 4.7.2
+Summary: Config file reading, writing and validation.
+Home-page: http://www.voidspace.org.uk/python/configobj.html
+Author: Michael Foord & Nicola Larosa
+Author-email: fuzzyman@voidspace.org.uk
+License: UNKNOWN
+Download-URL: http://www.voidspace.org.uk/downloads/configobj-4.7.2.zip
+Description: **ConfigObj** is a simple but powerful config file reader and writer: an *ini
+ file round tripper*. Its main feature is that it is very easy to use, with a
+ straightforward programmer's interface and a simple syntax for config files.
+ It has lots of other features though :
+
+ * Nested sections (subsections), to any level
+ * List values
+ * Multiple line values
+ * Full Unicode support
+ * String interpolation (substitution)
+ * Integrated with a powerful validation system
+
+ - including automatic type checking/conversion
+ - and allowing default values
+ - repeated sections
+
+ * All comments in the file are preserved
+ * The order of keys/sections is preserved
+ * Powerful ``unrepr`` mode for storing/retrieving Python data-types
+
+ | Release 4.7.2 fixes several bugs in 4.7.1
+ | Release 4.7.1 fixes a bug with the deprecated options keyword in
+ | 4.7.0.
+ | Release 4.7.0 improves performance adds features for validation and
+ | fixes some bugs.
+Keywords: config,ini,dictionary,application,admin,sysadmin,configuration,validation
+Platform: UNKNOWN
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.3
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/python/configobj/configobj.py b/python/configobj/configobj.py
new file mode 100644
index 000000000..c1f6e6df8
--- /dev/null
+++ b/python/configobj/configobj.py
@@ -0,0 +1,2468 @@
+# configobj.py
+# A config file reader/writer that supports nested sections in config files.
+# Copyright (C) 2005-2010 Michael Foord, Nicola Larosa
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# nico AT tekNico DOT net
+
+# ConfigObj 4
+# http://www.voidspace.org.uk/python/configobj.html
+
+# Released subject to the BSD License
+# Please see http://www.voidspace.org.uk/python/license.shtml
+
+# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
+# For information about bugfixes, updates and support, please join the
+# ConfigObj mailing list:
+# http://lists.sourceforge.net/lists/listinfo/configobj-develop
+# Comments, suggestions and bug reports welcome.
+
+from __future__ import generators
+
+import os
+import re
+import sys
+
+from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
+
+
+# imported lazily to avoid startup performance hit if it isn't used
+compiler = None
+
+# A dictionary mapping BOM to
+# the encoding to decode with, and what to set the
+# encoding attribute to.
+BOMS = {
+ BOM_UTF8: ('utf_8', None),
+ BOM_UTF16_BE: ('utf16_be', 'utf_16'),
+ BOM_UTF16_LE: ('utf16_le', 'utf_16'),
+ BOM_UTF16: ('utf_16', 'utf_16'),
+ }
+# All legal variants of the BOM codecs.
+# TODO: the list of aliases is not meant to be exhaustive, is there a
+# better way ?
+BOM_LIST = {
+ 'utf_16': 'utf_16',
+ 'u16': 'utf_16',
+ 'utf16': 'utf_16',
+ 'utf-16': 'utf_16',
+ 'utf16_be': 'utf16_be',
+ 'utf_16_be': 'utf16_be',
+ 'utf-16be': 'utf16_be',
+ 'utf16_le': 'utf16_le',
+ 'utf_16_le': 'utf16_le',
+ 'utf-16le': 'utf16_le',
+ 'utf_8': 'utf_8',
+ 'u8': 'utf_8',
+ 'utf': 'utf_8',
+ 'utf8': 'utf_8',
+ 'utf-8': 'utf_8',
+ }
+
+# Map of encodings to the BOM to write.
+BOM_SET = {
+ 'utf_8': BOM_UTF8,
+ 'utf_16': BOM_UTF16,
+ 'utf16_be': BOM_UTF16_BE,
+ 'utf16_le': BOM_UTF16_LE,
+ None: BOM_UTF8
+ }
+
+
+def match_utf8(encoding):
+ return BOM_LIST.get(encoding.lower()) == 'utf_8'
+
+
+# Quote strings used for writing values
+squot = "'%s'"
+dquot = '"%s"'
+noquot = "%s"
+wspace_plus = ' \r\n\v\t\'"'
+tsquot = '"""%s"""'
+tdquot = "'''%s'''"
+
+# Sentinel for use in getattr calls to replace hasattr
+MISSING = object()
+
+__version__ = '4.7.2'
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for entry in iterable:
+ if entry:
+ return True
+ return False
+
+
+__all__ = (
+ '__version__',
+ 'DEFAULT_INDENT_TYPE',
+ 'DEFAULT_INTERPOLATION',
+ 'ConfigObjError',
+ 'NestingError',
+ 'ParseError',
+ 'DuplicateError',
+ 'ConfigspecError',
+ 'ConfigObj',
+ 'SimpleVal',
+ 'InterpolationError',
+ 'InterpolationLoopError',
+ 'MissingInterpolationOption',
+ 'RepeatSectionError',
+ 'ReloadError',
+ 'UnreprError',
+ 'UnknownType',
+ 'flatten_errors',
+ 'get_extra_values'
+)
+
+DEFAULT_INTERPOLATION = 'configparser'
+DEFAULT_INDENT_TYPE = ' '
+MAX_INTERPOL_DEPTH = 10
+
+OPTION_DEFAULTS = {
+ 'interpolation': True,
+ 'raise_errors': False,
+ 'list_values': True,
+ 'create_empty': False,
+ 'file_error': False,
+ 'configspec': None,
+ 'stringify': True,
+ # option may be set to one of ('', ' ', '\t')
+ 'indent_type': None,
+ 'encoding': None,
+ 'default_encoding': None,
+ 'unrepr': False,
+ 'write_empty_values': False,
+}
+
+
+
+def getObj(s):
+ global compiler
+ if compiler is None:
+ import compiler
+ s = "a=" + s
+ p = compiler.parse(s)
+ return p.getChildren()[1].getChildren()[0].getChildren()[1]
+
+
+class UnknownType(Exception):
+ pass
+
+
+class Builder(object):
+
+ def build(self, o):
+ m = getattr(self, 'build_' + o.__class__.__name__, None)
+ if m is None:
+ raise UnknownType(o.__class__.__name__)
+ return m(o)
+
+ def build_List(self, o):
+ return map(self.build, o.getChildren())
+
+ def build_Const(self, o):
+ return o.value
+
+ def build_Dict(self, o):
+ d = {}
+ i = iter(map(self.build, o.getChildren()))
+ for el in i:
+ d[el] = i.next()
+ return d
+
+ def build_Tuple(self, o):
+ return tuple(self.build_List(o))
+
+ def build_Name(self, o):
+ if o.name == 'None':
+ return None
+ if o.name == 'True':
+ return True
+ if o.name == 'False':
+ return False
+
+ # An undefined Name
+ raise UnknownType('Undefined Name')
+
+ def build_Add(self, o):
+ real, imag = map(self.build_Const, o.getChildren())
+ try:
+ real = float(real)
+ except TypeError:
+ raise UnknownType('Add')
+ if not isinstance(imag, complex) or imag.real != 0.0:
+ raise UnknownType('Add')
+ return real+imag
+
+ def build_Getattr(self, o):
+ parent = self.build(o.expr)
+ return getattr(parent, o.attrname)
+
+ def build_UnarySub(self, o):
+ return -self.build_Const(o.getChildren()[0])
+
+ def build_UnaryAdd(self, o):
+ return self.build_Const(o.getChildren()[0])
+
+
+_builder = Builder()
+
+
+def unrepr(s):
+ if not s:
+ return s
+ return _builder.build(getObj(s))
+
+
+
+class ConfigObjError(SyntaxError):
+ """
+ This is the base class for all errors that ConfigObj raises.
+ It is a subclass of SyntaxError.
+ """
+ def __init__(self, message='', line_number=None, line=''):
+ self.line = line
+ self.line_number = line_number
+ SyntaxError.__init__(self, message)
+
+
+class NestingError(ConfigObjError):
+ """
+ This error indicates a level of nesting that doesn't match.
+ """
+
+
+class ParseError(ConfigObjError):
+ """
+ This error indicates that a line is badly written.
+ It is neither a valid ``key = value`` line,
+ nor a valid section marker line.
+ """
+
+
+class ReloadError(IOError):
+ """
+ A 'reload' operation failed.
+ This exception is a subclass of ``IOError``.
+ """
+ def __init__(self):
+ IOError.__init__(self, 'reload failed, filename is not set.')
+
+
+class DuplicateError(ConfigObjError):
+ """
+ The keyword or section specified already exists.
+ """
+
+
+class ConfigspecError(ConfigObjError):
+ """
+ An error occured whilst parsing a configspec.
+ """
+
+
+class InterpolationError(ConfigObjError):
+ """Base class for the two interpolation errors."""
+
+
+class InterpolationLoopError(InterpolationError):
+ """Maximum interpolation depth exceeded in string interpolation."""
+
+ def __init__(self, option):
+ InterpolationError.__init__(
+ self,
+ 'interpolation loop detected in value "%s".' % option)
+
+
+class RepeatSectionError(ConfigObjError):
+ """
+ This error indicates additional sections in a section with a
+ ``__many__`` (repeated) section.
+ """
+
+
+class MissingInterpolationOption(InterpolationError):
+ """A value specified for interpolation was missing."""
+ def __init__(self, option):
+ msg = 'missing option "%s" in interpolation.' % option
+ InterpolationError.__init__(self, msg)
+
+
+class UnreprError(ConfigObjError):
+ """An error parsing in unrepr mode."""
+
+
+
+class InterpolationEngine(object):
+ """
+ A helper class to help perform string interpolation.
+
+ This class is an abstract base class; its descendants perform
+ the actual work.
+ """
+
+ # compiled regexp to use in self.interpolate()
+ _KEYCRE = re.compile(r"%\(([^)]*)\)s")
+ _cookie = '%'
+
+ def __init__(self, section):
+ # the Section instance that "owns" this engine
+ self.section = section
+
+
+ def interpolate(self, key, value):
+ # short-cut
+ if not self._cookie in value:
+ return value
+
+ def recursive_interpolate(key, value, section, backtrail):
+ """The function that does the actual work.
+
+ ``value``: the string we're trying to interpolate.
+ ``section``: the section in which that string was found
+ ``backtrail``: a dict to keep track of where we've been,
+ to detect and prevent infinite recursion loops
+
+ This is similar to a depth-first-search algorithm.
+ """
+ # Have we been here already?
+ if (key, section.name) in backtrail:
+ # Yes - infinite loop detected
+ raise InterpolationLoopError(key)
+ # Place a marker on our backtrail so we won't come back here again
+ backtrail[(key, section.name)] = 1
+
+ # Now start the actual work
+ match = self._KEYCRE.search(value)
+ while match:
+ # The actual parsing of the match is implementation-dependent,
+ # so delegate to our helper function
+ k, v, s = self._parse_match(match)
+ if k is None:
+ # That's the signal that no further interpolation is needed
+ replacement = v
+ else:
+ # Further interpolation may be needed to obtain final value
+ replacement = recursive_interpolate(k, v, s, backtrail)
+ # Replace the matched string with its final value
+ start, end = match.span()
+ value = ''.join((value[:start], replacement, value[end:]))
+ new_search_start = start + len(replacement)
+ # Pick up the next interpolation key, if any, for next time
+ # through the while loop
+ match = self._KEYCRE.search(value, new_search_start)
+
+ # Now safe to come back here again; remove marker from backtrail
+ del backtrail[(key, section.name)]
+
+ return value
+
+ # Back in interpolate(), all we have to do is kick off the recursive
+ # function with appropriate starting values
+ value = recursive_interpolate(key, value, self.section, {})
+ return value
+
+
+ def _fetch(self, key):
+ """Helper function to fetch values from owning section.
+
+ Returns a 2-tuple: the value, and the section where it was found.
+ """
+ # switch off interpolation before we try and fetch anything !
+ save_interp = self.section.main.interpolation
+ self.section.main.interpolation = False
+
+ # Start at section that "owns" this InterpolationEngine
+ current_section = self.section
+ while True:
+ # try the current section first
+ val = current_section.get(key)
+ if val is not None and not isinstance(val, Section):
+ break
+ # try "DEFAULT" next
+ val = current_section.get('DEFAULT', {}).get(key)
+ if val is not None and not isinstance(val, Section):
+ break
+ # move up to parent and try again
+ # top-level's parent is itself
+ if current_section.parent is current_section:
+ # reached top level, time to give up
+ break
+ current_section = current_section.parent
+
+ # restore interpolation to previous value before returning
+ self.section.main.interpolation = save_interp
+ if val is None:
+ raise MissingInterpolationOption(key)
+ return val, current_section
+
+
+ def _parse_match(self, match):
+ """Implementation-dependent helper function.
+
+ Will be passed a match object corresponding to the interpolation
+ key we just found (e.g., "%(foo)s" or "$foo"). Should look up that
+ key in the appropriate config file section (using the ``_fetch()``
+ helper function) and return a 3-tuple: (key, value, section)
+
+ ``key`` is the name of the key we're looking for
+ ``value`` is the value found for that key
+ ``section`` is a reference to the section where it was found
+
+ ``key`` and ``section`` should be None if no further
+ interpolation should be performed on the resulting value
+ (e.g., if we interpolated "$$" and returned "$").
+ """
+ raise NotImplementedError()
+
+
+
+class ConfigParserInterpolation(InterpolationEngine):
+ """Behaves like ConfigParser."""
+ _cookie = '%'
+ _KEYCRE = re.compile(r"%\(([^)]*)\)s")
+
+ def _parse_match(self, match):
+ key = match.group(1)
+ value, section = self._fetch(key)
+ return key, value, section
+
+
+
+class TemplateInterpolation(InterpolationEngine):
+ """Behaves like string.Template."""
+ _cookie = '$'
+ _delimiter = '$'
+ _KEYCRE = re.compile(r"""
+ \$(?:
+ (?P<escaped>\$) | # Two $ signs
+ (?P<named>[_a-z][_a-z0-9]*) | # $name format
+ {(?P<braced>[^}]*)} # ${name} format
+ )
+ """, re.IGNORECASE | re.VERBOSE)
+
+ def _parse_match(self, match):
+ # Valid name (in or out of braces): fetch value from section
+ key = match.group('named') or match.group('braced')
+ if key is not None:
+ value, section = self._fetch(key)
+ return key, value, section
+ # Escaped delimiter (e.g., $$): return single delimiter
+ if match.group('escaped') is not None:
+ # Return None for key and section to indicate it's time to stop
+ return None, self._delimiter, None
+ # Anything else: ignore completely, just return it unchanged
+ return None, match.group(), None
+
+
+interpolation_engines = {
+ 'configparser': ConfigParserInterpolation,
+ 'template': TemplateInterpolation,
+}
+
+
+def __newobj__(cls, *args):
+ # Hack for pickle
+ return cls.__new__(cls, *args)
+
+class Section(dict):
+ """
+ A dictionary-like object that represents a section in a config file.
+
+ It does string interpolation if the 'interpolation' attribute
+ of the 'main' object is set to True.
+
+ Interpolation is tried first from this object, then from the 'DEFAULT'
+ section of this object, next from the parent and its 'DEFAULT' section,
+ and so on until the main object is reached.
+
+ A Section will behave like an ordered dictionary - following the
+ order of the ``scalars`` and ``sections`` attributes.
+ You can use this to change the order of members.
+
+ Iteration follows the order: scalars, then sections.
+ """
+
+
+ def __setstate__(self, state):
+ dict.update(self, state[0])
+ self.__dict__.update(state[1])
+
+ def __reduce__(self):
+ state = (dict(self), self.__dict__)
+ return (__newobj__, (self.__class__,), state)
+
+
+ def __init__(self, parent, depth, main, indict=None, name=None):
+ """
+ * parent is the section above
+ * depth is the depth level of this section
+ * main is the main ConfigObj
+ * indict is a dictionary to initialise the section with
+ """
+ if indict is None:
+ indict = {}
+ dict.__init__(self)
+ # used for nesting level *and* interpolation
+ self.parent = parent
+ # used for the interpolation attribute
+ self.main = main
+ # level of nesting depth of this Section
+ self.depth = depth
+ # purely for information
+ self.name = name
+ #
+ self._initialise()
+ # we do this explicitly so that __setitem__ is used properly
+ # (rather than just passing to ``dict.__init__``)
+ for entry, value in indict.iteritems():
+ self[entry] = value
+
+
+ def _initialise(self):
+ # the sequence of scalar values in this Section
+ self.scalars = []
+ # the sequence of sections in this Section
+ self.sections = []
+ # for comments :-)
+ self.comments = {}
+ self.inline_comments = {}
+ # the configspec
+ self.configspec = None
+ # for defaults
+ self.defaults = []
+ self.default_values = {}
+ self.extra_values = []
+ self._created = False
+
+
+ def _interpolate(self, key, value):
+ try:
+ # do we already have an interpolation engine?
+ engine = self._interpolation_engine
+ except AttributeError:
+ # not yet: first time running _interpolate(), so pick the engine
+ name = self.main.interpolation
+ if name == True: # note that "if name:" would be incorrect here
+ # backwards-compatibility: interpolation=True means use default
+ name = DEFAULT_INTERPOLATION
+ name = name.lower() # so that "Template", "template", etc. all work
+ class_ = interpolation_engines.get(name, None)
+ if class_ is None:
+ # invalid value for self.main.interpolation
+ self.main.interpolation = False
+ return value
+ else:
+ # save reference to engine so we don't have to do this again
+ engine = self._interpolation_engine = class_(self)
+ # let the engine do the actual work
+ return engine.interpolate(key, value)
+
+
+ def __getitem__(self, key):
+ """Fetch the item and do string interpolation."""
+ val = dict.__getitem__(self, key)
+ if self.main.interpolation:
+ if isinstance(val, basestring):
+ return self._interpolate(key, val)
+ if isinstance(val, list):
+ def _check(entry):
+ if isinstance(entry, basestring):
+ return self._interpolate(key, entry)
+ return entry
+ new = [_check(entry) for entry in val]
+ if new != val:
+ return new
+ return val
+
+
+ def __setitem__(self, key, value, unrepr=False):
+ """
+ Correctly set a value.
+
+ Making dictionary values Section instances.
+ (We have to special case 'Section' instances - which are also dicts)
+
+ Keys must be strings.
+ Values need only be strings (or lists of strings) if
+ ``main.stringify`` is set.
+
+ ``unrepr`` must be set when setting a value to a dictionary, without
+ creating a new sub-section.
+ """
+ if not isinstance(key, basestring):
+ raise ValueError('The key "%s" is not a string.' % key)
+
+ # add the comment
+ if key not in self.comments:
+ self.comments[key] = []
+ self.inline_comments[key] = ''
+ # remove the entry from defaults
+ if key in self.defaults:
+ self.defaults.remove(key)
+ #
+ if isinstance(value, Section):
+ if key not in self:
+ self.sections.append(key)
+ dict.__setitem__(self, key, value)
+ elif isinstance(value, dict) and not unrepr:
+ # First create the new depth level,
+ # then create the section
+ if key not in self:
+ self.sections.append(key)
+ new_depth = self.depth + 1
+ dict.__setitem__(
+ self,
+ key,
+ Section(
+ self,
+ new_depth,
+ self.main,
+ indict=value,
+ name=key))
+ else:
+ if key not in self:
+ self.scalars.append(key)
+ if not self.main.stringify:
+ if isinstance(value, basestring):
+ pass
+ elif isinstance(value, (list, tuple)):
+ for entry in value:
+ if not isinstance(entry, basestring):
+ raise TypeError('Value is not a string "%s".' % entry)
+ else:
+ raise TypeError('Value is not a string "%s".' % value)
+ dict.__setitem__(self, key, value)
+
+
+ def __delitem__(self, key):
+ """Remove items from the sequence when deleting."""
+ dict. __delitem__(self, key)
+ if key in self.scalars:
+ self.scalars.remove(key)
+ else:
+ self.sections.remove(key)
+ del self.comments[key]
+ del self.inline_comments[key]
+
+
+ def get(self, key, default=None):
+ """A version of ``get`` that doesn't bypass string interpolation."""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+
+ def update(self, indict):
+ """
+ A version of update that uses our ``__setitem__``.
+ """
+ for entry in indict:
+ self[entry] = indict[entry]
+
+
+ def pop(self, key, default=MISSING):
+ """
+ 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised'
+ """
+ try:
+ val = self[key]
+ except KeyError:
+ if default is MISSING:
+ raise
+ val = default
+ else:
+ del self[key]
+ return val
+
+
+ def popitem(self):
+ """Pops the first (key,val)"""
+ sequence = (self.scalars + self.sections)
+ if not sequence:
+ raise KeyError(": 'popitem(): dictionary is empty'")
+ key = sequence[0]
+ val = self[key]
+ del self[key]
+ return key, val
+
+
+ def clear(self):
+ """
+ A version of clear that also affects scalars/sections
+ Also clears comments and configspec.
+
+ Leaves other attributes alone :
+ depth/main/parent are not affected
+ """
+ dict.clear(self)
+ self.scalars = []
+ self.sections = []
+ self.comments = {}
+ self.inline_comments = {}
+ self.configspec = None
+ self.defaults = []
+ self.extra_values = []
+
+
+ def setdefault(self, key, default=None):
+ """A version of setdefault that sets sequence if appropriate."""
+ try:
+ return self[key]
+ except KeyError:
+ self[key] = default
+ return self[key]
+
+
+ def items(self):
+ """D.items() -> list of D's (key, value) pairs, as 2-tuples"""
+ return zip((self.scalars + self.sections), self.values())
+
+
+ def keys(self):
+ """D.keys() -> list of D's keys"""
+ return (self.scalars + self.sections)
+
+
+ def values(self):
+ """D.values() -> list of D's values"""
+ return [self[key] for key in (self.scalars + self.sections)]
+
+
+ def iteritems(self):
+ """D.iteritems() -> an iterator over the (key, value) items of D"""
+ return iter(self.items())
+
+
+ def iterkeys(self):
+ """D.iterkeys() -> an iterator over the keys of D"""
+ return iter((self.scalars + self.sections))
+
+ __iter__ = iterkeys
+
+
+ def itervalues(self):
+ """D.itervalues() -> an iterator over the values of D"""
+ return iter(self.values())
+
+
+ def __repr__(self):
+ """x.__repr__() <==> repr(x)"""
+ def _getval(key):
+ try:
+ return self[key]
+ except MissingInterpolationOption:
+ return dict.__getitem__(self, key)
+ return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
+ for key in (self.scalars + self.sections)])
+
+ __str__ = __repr__
+ __str__.__doc__ = "x.__str__() <==> str(x)"
+
+
+ # Extra methods - not in a normal dictionary
+
+ def dict(self):
+ """
+ Return a deepcopy of self as a dictionary.
+
+ All members that are ``Section`` instances are recursively turned to
+ ordinary dictionaries - by calling their ``dict`` method.
+
+ >>> n = a.dict()
+ >>> n == a
+ 1
+ >>> n is a
+ 0
+ """
+ newdict = {}
+ for entry in self:
+ this_entry = self[entry]
+ if isinstance(this_entry, Section):
+ this_entry = this_entry.dict()
+ elif isinstance(this_entry, list):
+ # create a copy rather than a reference
+ this_entry = list(this_entry)
+ elif isinstance(this_entry, tuple):
+ # create a copy rather than a reference
+ this_entry = tuple(this_entry)
+ newdict[entry] = this_entry
+ return newdict
+
+
+ def merge(self, indict):
+ """
+ A recursive update - useful for merging config files.
+
+ >>> a = '''[section1]
+ ... option1 = True
+ ... [[subsection]]
+ ... more_options = False
+ ... # end of file'''.splitlines()
+ >>> b = '''# File is user.ini
+ ... [section1]
+ ... option1 = False
+ ... # end of file'''.splitlines()
+ >>> c1 = ConfigObj(b)
+ >>> c2 = ConfigObj(a)
+ >>> c2.merge(c1)
+ >>> c2
+ ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
+ """
+ for key, val in indict.items():
+ if (key in self and isinstance(self[key], dict) and
+ isinstance(val, dict)):
+ self[key].merge(val)
+ else:
+ self[key] = val
+
+
+ def rename(self, oldkey, newkey):
+ """
+ Change a keyname to another, without changing position in sequence.
+
+ Implemented so that transformations can be made on keys,
+ as well as on values. (used by encode and decode)
+
+ Also renames comments.
+ """
+ if oldkey in self.scalars:
+ the_list = self.scalars
+ elif oldkey in self.sections:
+ the_list = self.sections
+ else:
+ raise KeyError('Key "%s" not found.' % oldkey)
+ pos = the_list.index(oldkey)
+ #
+ val = self[oldkey]
+ dict.__delitem__(self, oldkey)
+ dict.__setitem__(self, newkey, val)
+ the_list.remove(oldkey)
+ the_list.insert(pos, newkey)
+ comm = self.comments[oldkey]
+ inline_comment = self.inline_comments[oldkey]
+ del self.comments[oldkey]
+ del self.inline_comments[oldkey]
+ self.comments[newkey] = comm
+ self.inline_comments[newkey] = inline_comment
+
+
+ def walk(self, function, raise_errors=True,
+ call_on_sections=False, **keywargs):
+ """
+ Walk every member and call a function on the keyword and value.
+
+ Return a dictionary of the return values
+
+ If the function raises an exception, raise the errror
+ unless ``raise_errors=False``, in which case set the return value to
+ ``False``.
+
+ Any unrecognised keyword arguments you pass to walk, will be pased on
+ to the function you pass in.
+
+ Note: if ``call_on_sections`` is ``True`` then - on encountering a
+ subsection, *first* the function is called for the *whole* subsection,
+ and then recurses into it's members. This means your function must be
+ able to handle strings, dictionaries and lists. This allows you
+ to change the key of subsections as well as for ordinary members. The
+ return value when called on the whole subsection has to be discarded.
+
+ See the encode and decode methods for examples, including functions.
+
+ .. admonition:: caution
+
+ You can use ``walk`` to transform the names of members of a section
+ but you mustn't add or delete members.
+
+ >>> config = '''[XXXXsection]
+ ... XXXXkey = XXXXvalue'''.splitlines()
+ >>> cfg = ConfigObj(config)
+ >>> cfg
+ ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
+ >>> def transform(section, key):
+ ... val = section[key]
+ ... newkey = key.replace('XXXX', 'CLIENT1')
+ ... section.rename(key, newkey)
+ ... if isinstance(val, (tuple, list, dict)):
+ ... pass
+ ... else:
+ ... val = val.replace('XXXX', 'CLIENT1')
+ ... section[newkey] = val
+ >>> cfg.walk(transform, call_on_sections=True)
+ {'CLIENT1section': {'CLIENT1key': None}}
+ >>> cfg
+ ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
+ """
+ out = {}
+ # scalars first
+ for i in range(len(self.scalars)):
+ entry = self.scalars[i]
+ try:
+ val = function(self, entry, **keywargs)
+ # bound again in case name has changed
+ entry = self.scalars[i]
+ out[entry] = val
+ except Exception:
+ if raise_errors:
+ raise
+ else:
+ entry = self.scalars[i]
+ out[entry] = False
+ # then sections
+ for i in range(len(self.sections)):
+ entry = self.sections[i]
+ if call_on_sections:
+ try:
+ function(self, entry, **keywargs)
+ except Exception:
+ if raise_errors:
+ raise
+ else:
+ entry = self.sections[i]
+ out[entry] = False
+ # bound again in case name has changed
+ entry = self.sections[i]
+ # previous result is discarded
+ out[entry] = self[entry].walk(
+ function,
+ raise_errors=raise_errors,
+ call_on_sections=call_on_sections,
+ **keywargs)
+ return out
+
+
+ def as_bool(self, key):
+ """
+ Accepts a key as input. The corresponding value must be a string or
+ the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to
+ retain compatibility with Python 2.2.
+
+ If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns
+ ``True``.
+
+ If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns
+ ``False``.
+
+ ``as_bool`` is not case sensitive.
+
+ Any other input will raise a ``ValueError``.
+
+ >>> a = ConfigObj()
+ >>> a['a'] = 'fish'
+ >>> a.as_bool('a')
+ Traceback (most recent call last):
+ ValueError: Value "fish" is neither True nor False
+ >>> a['b'] = 'True'
+ >>> a.as_bool('b')
+ 1
+ >>> a['b'] = 'off'
+ >>> a.as_bool('b')
+ 0
+ """
+ val = self[key]
+ if val == True:
+ return True
+ elif val == False:
+ return False
+ else:
+ try:
+ if not isinstance(val, basestring):
+ # TODO: Why do we raise a KeyError here?
+ raise KeyError()
+ else:
+ return self.main._bools[val.lower()]
+ except KeyError:
+ raise ValueError('Value "%s" is neither True nor False' % val)
+
+
+ def as_int(self, key):
+ """
+ A convenience method which coerces the specified value to an integer.
+
+ If the value is an invalid literal for ``int``, a ``ValueError`` will
+ be raised.
+
+ >>> a = ConfigObj()
+ >>> a['a'] = 'fish'
+ >>> a.as_int('a')
+ Traceback (most recent call last):
+ ValueError: invalid literal for int() with base 10: 'fish'
+ >>> a['b'] = '1'
+ >>> a.as_int('b')
+ 1
+ >>> a['b'] = '3.2'
+ >>> a.as_int('b')
+ Traceback (most recent call last):
+ ValueError: invalid literal for int() with base 10: '3.2'
+ """
+ return int(self[key])
+
+
+ def as_float(self, key):
+ """
+ A convenience method which coerces the specified value to a float.
+
+ If the value is an invalid literal for ``float``, a ``ValueError`` will
+ be raised.
+
+ >>> a = ConfigObj()
+ >>> a['a'] = 'fish'
+ >>> a.as_float('a')
+ Traceback (most recent call last):
+ ValueError: invalid literal for float(): fish
+ >>> a['b'] = '1'
+ >>> a.as_float('b')
+ 1.0
+ >>> a['b'] = '3.2'
+ >>> a.as_float('b')
+ 3.2000000000000002
+ """
+ return float(self[key])
+
+
+ def as_list(self, key):
+ """
+ A convenience method which fetches the specified value, guaranteeing
+ that it is a list.
+
+ >>> a = ConfigObj()
+ >>> a['a'] = 1
+ >>> a.as_list('a')
+ [1]
+ >>> a['a'] = (1,)
+ >>> a.as_list('a')
+ [1]
+ >>> a['a'] = [1]
+ >>> a.as_list('a')
+ [1]
+ """
+ result = self[key]
+ if isinstance(result, (tuple, list)):
+ return list(result)
+ return [result]
+
+
+ def restore_default(self, key):
+ """
+ Restore (and return) default value for the specified key.
+
+ This method will only work for a ConfigObj that was created
+ with a configspec and has been validated.
+
+ If there is no default value for this key, ``KeyError`` is raised.
+ """
+ default = self.default_values[key]
+ dict.__setitem__(self, key, default)
+ if key not in self.defaults:
+ self.defaults.append(key)
+ return default
+
+
+ def restore_defaults(self):
+ """
+ Recursively restore default values to all members
+ that have them.
+
+ This method will only work for a ConfigObj that was created
+ with a configspec and has been validated.
+
+ It doesn't delete or modify entries without default values.
+ """
+ for key in self.default_values:
+ self.restore_default(key)
+
+ for section in self.sections:
+ self[section].restore_defaults()
+
+
+class ConfigObj(Section):
+ """An object to read, create, and write config files."""
+
+ _keyword = re.compile(r'''^ # line start
+ (\s*) # indentation
+ ( # keyword
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'"=].*?) # no quotes
+ )
+ \s*=\s* # divider
+ (.*) # value (including list values and comments)
+ $ # line end
+ ''',
+ re.VERBOSE)
+
+ _sectionmarker = re.compile(r'''^
+ (\s*) # 1: indentation
+ ((?:\[\s*)+) # 2: section marker open
+ ( # 3: section name open
+ (?:"\s*\S.*?\s*")| # at least one non-space with double quotes
+ (?:'\s*\S.*?\s*')| # at least one non-space with single quotes
+ (?:[^'"\s].*?) # at least one non-space unquoted
+ ) # section name close
+ ((?:\s*\])+) # 4: section marker close
+ \s*(\#.*)? # 5: optional comment
+ $''',
+ re.VERBOSE)
+
+ # this regexp pulls list values out as a single string
+ # or single values and comments
+ # FIXME: this regex adds a '' to the end of comma terminated lists
+ # workaround in ``_handle_value``
+ _valueexp = re.compile(r'''^
+ (?:
+ (?:
+ (
+ (?:
+ (?:
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\#][^,\#]*?) # unquoted
+ )
+ \s*,\s* # comma
+ )* # match all list items ending in a comma (if any)
+ )
+ (
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\#\s][^,]*?)| # unquoted
+ (?:(?<!,)) # Empty value
+ )? # last item in a list - or string value
+ )|
+ (,) # alternatively a single comma - empty list
+ )
+ \s*(\#.*)? # optional comment
+ $''',
+ re.VERBOSE)
+
+ # use findall to get the members of a list value
+ _listvalueexp = re.compile(r'''
+ (
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\#]?.*?) # unquoted
+ )
+ \s*,\s* # comma
+ ''',
+ re.VERBOSE)
+
+ # this regexp is used for the value
+ # when lists are switched off
+ _nolistvalue = re.compile(r'''^
+ (
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'"\#].*?)| # unquoted
+ (?:) # Empty value
+ )
+ \s*(\#.*)? # optional comment
+ $''',
+ re.VERBOSE)
+
+ # regexes for finding triple quoted values on one line
+ _single_line_single = re.compile(r"^'''(.*?)'''\s*(#.*)?$")
+ _single_line_double = re.compile(r'^"""(.*?)"""\s*(#.*)?$')
+ _multi_line_single = re.compile(r"^(.*?)'''\s*(#.*)?$")
+ _multi_line_double = re.compile(r'^(.*?)"""\s*(#.*)?$')
+
+ _triple_quote = {
+ "'''": (_single_line_single, _multi_line_single),
+ '"""': (_single_line_double, _multi_line_double),
+ }
+
+ # Used by the ``istrue`` Section method
+ _bools = {
+ 'yes': True, 'no': False,
+ 'on': True, 'off': False,
+ '1': True, '0': False,
+ 'true': True, 'false': False,
+ }
+
+
+ def __init__(self, infile=None, options=None, configspec=None, encoding=None,
+ interpolation=True, raise_errors=False, list_values=True,
+ create_empty=False, file_error=False, stringify=True,
+ indent_type=None, default_encoding=None, unrepr=False,
+ write_empty_values=False, _inspec=False):
+ """
+ Parse a config file or create a config file object.
+
+ ``ConfigObj(infile=None, configspec=None, encoding=None,
+ interpolation=True, raise_errors=False, list_values=True,
+ create_empty=False, file_error=False, stringify=True,
+ indent_type=None, default_encoding=None, unrepr=False,
+ write_empty_values=False, _inspec=False)``
+ """
+ self._inspec = _inspec
+ # init the superclass
+ Section.__init__(self, self, 0, self)
+
+ infile = infile or []
+
+ _options = {'configspec': configspec,
+ 'encoding': encoding, 'interpolation': interpolation,
+ 'raise_errors': raise_errors, 'list_values': list_values,
+ 'create_empty': create_empty, 'file_error': file_error,
+ 'stringify': stringify, 'indent_type': indent_type,
+ 'default_encoding': default_encoding, 'unrepr': unrepr,
+ 'write_empty_values': write_empty_values}
+
+ if options is None:
+ options = _options
+ else:
+ import warnings
+ warnings.warn('Passing in an options dictionary to ConfigObj() is '
+ 'deprecated. Use **options instead.',
+ DeprecationWarning, stacklevel=2)
+
+ # TODO: check the values too.
+ for entry in options:
+ if entry not in OPTION_DEFAULTS:
+ raise TypeError('Unrecognised option "%s".' % entry)
+ for entry, value in OPTION_DEFAULTS.items():
+ if entry not in options:
+ options[entry] = value
+ keyword_value = _options[entry]
+ if value != keyword_value:
+ options[entry] = keyword_value
+
+ # XXXX this ignores an explicit list_values = True in combination
+ # with _inspec. The user should *never* do that anyway, but still...
+ if _inspec:
+ options['list_values'] = False
+
+ self._initialise(options)
+ configspec = options['configspec']
+ self._original_configspec = configspec
+ self._load(infile, configspec)
+
+
+ def _load(self, infile, configspec):
+ if isinstance(infile, basestring):
+ self.filename = infile
+ if os.path.isfile(infile):
+ h = open(infile, 'rb')
+ infile = h.read() or []
+ h.close()
+ elif self.file_error:
+ # raise an error if the file doesn't exist
+ raise IOError('Config file not found: "%s".' % self.filename)
+ else:
+ # file doesn't already exist
+ if self.create_empty:
+ # this is a good test that the filename specified
+ # isn't impossible - like on a non-existent device
+ h = open(infile, 'w')
+ h.write('')
+ h.close()
+ infile = []
+
+ elif isinstance(infile, (list, tuple)):
+ infile = list(infile)
+
+ elif isinstance(infile, dict):
+ # initialise self
+ # the Section class handles creating subsections
+ if isinstance(infile, ConfigObj):
+ # get a copy of our ConfigObj
+ def set_section(in_section, this_section):
+ for entry in in_section.scalars:
+ this_section[entry] = in_section[entry]
+ for section in in_section.sections:
+ this_section[section] = {}
+ set_section(in_section[section], this_section[section])
+ set_section(infile, self)
+
+ else:
+ for entry in infile:
+ self[entry] = infile[entry]
+ del self._errors
+
+ if configspec is not None:
+ self._handle_configspec(configspec)
+ else:
+ self.configspec = None
+ return
+
+ elif getattr(infile, 'read', MISSING) is not MISSING:
+ # This supports file like objects
+ infile = infile.read() or []
+ # needs splitting into lines - but needs doing *after* decoding
+ # in case it's not an 8 bit encoding
+ else:
+ raise TypeError('infile must be a filename, file like object, or list of lines.')
+
+ if infile:
+ # don't do it for the empty ConfigObj
+ infile = self._handle_bom(infile)
+ # infile is now *always* a list
+ #
+ # Set the newlines attribute (first line ending it finds)
+ # and strip trailing '\n' or '\r' from lines
+ for line in infile:
+ if (not line) or (line[-1] not in ('\r', '\n', '\r\n')):
+ continue
+ for end in ('\r\n', '\n', '\r'):
+ if line.endswith(end):
+ self.newlines = end
+ break
+ break
+
+ infile = [line.rstrip('\r\n') for line in infile]
+
+ self._parse(infile)
+ # if we had any errors, now is the time to raise them
+ if self._errors:
+ info = "at line %s." % self._errors[0].line_number
+ if len(self._errors) > 1:
+ msg = "Parsing failed with several errors.\nFirst error %s" % info
+ error = ConfigObjError(msg)
+ else:
+ error = self._errors[0]
+ # set the errors attribute; it's a list of tuples:
+ # (error_type, message, line_number)
+ error.errors = self._errors
+ # set the config attribute
+ error.config = self
+ raise error
+ # delete private attributes
+ del self._errors
+
+ if configspec is None:
+ self.configspec = None
+ else:
+ self._handle_configspec(configspec)
+
+
+ def _initialise(self, options=None):
+ if options is None:
+ options = OPTION_DEFAULTS
+
+ # initialise a few variables
+ self.filename = None
+ self._errors = []
+ self.raise_errors = options['raise_errors']
+ self.interpolation = options['interpolation']
+ self.list_values = options['list_values']
+ self.create_empty = options['create_empty']
+ self.file_error = options['file_error']
+ self.stringify = options['stringify']
+ self.indent_type = options['indent_type']
+ self.encoding = options['encoding']
+ self.default_encoding = options['default_encoding']
+ self.BOM = False
+ self.newlines = None
+ self.write_empty_values = options['write_empty_values']
+ self.unrepr = options['unrepr']
+
+ self.initial_comment = []
+ self.final_comment = []
+ self.configspec = None
+
+ if self._inspec:
+ self.list_values = False
+
+ # Clear section attributes as well
+ Section._initialise(self)
+
+
+ def __repr__(self):
+ def _getval(key):
+ try:
+ return self[key]
+ except MissingInterpolationOption:
+ return dict.__getitem__(self, key)
+ return ('ConfigObj({%s})' %
+ ', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
+ for key in (self.scalars + self.sections)]))
+
+
+ def _handle_bom(self, infile):
+ """
+ Handle any BOM, and decode if necessary.
+
+ If an encoding is specified, that *must* be used - but the BOM should
+ still be removed (and the BOM attribute set).
+
+ (If the encoding is wrongly specified, then a BOM for an alternative
+ encoding won't be discovered or removed.)
+
+ If an encoding is not specified, UTF8 or UTF16 BOM will be detected and
+ removed. The BOM attribute will be set. UTF16 will be decoded to
+ unicode.
+
+ NOTE: This method must not be called with an empty ``infile``.
+
+ Specifying the *wrong* encoding is likely to cause a
+ ``UnicodeDecodeError``.
+
+ ``infile`` must always be returned as a list of lines, but may be
+ passed in as a single string.
+ """
+ if ((self.encoding is not None) and
+ (self.encoding.lower() not in BOM_LIST)):
+ # No need to check for a BOM
+ # the encoding specified doesn't have one
+ # just decode
+ return self._decode(infile, self.encoding)
+
+ if isinstance(infile, (list, tuple)):
+ line = infile[0]
+ else:
+ line = infile
+ if self.encoding is not None:
+ # encoding explicitly supplied
+ # And it could have an associated BOM
+ # TODO: if encoding is just UTF16 - we ought to check for both
+ # TODO: big endian and little endian versions.
+ enc = BOM_LIST[self.encoding.lower()]
+ if enc == 'utf_16':
+ # For UTF16 we try big endian and little endian
+ for BOM, (encoding, final_encoding) in BOMS.items():
+ if not final_encoding:
+ # skip UTF8
+ continue
+ if infile.startswith(BOM):
+ ### BOM discovered
+ ##self.BOM = True
+ # Don't need to remove BOM
+ return self._decode(infile, encoding)
+
+ # If we get this far, will *probably* raise a DecodeError
+ # As it doesn't appear to start with a BOM
+ return self._decode(infile, self.encoding)
+
+ # Must be UTF8
+ BOM = BOM_SET[enc]
+ if not line.startswith(BOM):
+ return self._decode(infile, self.encoding)
+
+ newline = line[len(BOM):]
+
+ # BOM removed
+ if isinstance(infile, (list, tuple)):
+ infile[0] = newline
+ else:
+ infile = newline
+ self.BOM = True
+ return self._decode(infile, self.encoding)
+
+ # No encoding specified - so we need to check for UTF8/UTF16
+ for BOM, (encoding, final_encoding) in BOMS.items():
+ if not line.startswith(BOM):
+ continue
+ else:
+ # BOM discovered
+ self.encoding = final_encoding
+ if not final_encoding:
+ self.BOM = True
+ # UTF8
+ # remove BOM
+ newline = line[len(BOM):]
+ if isinstance(infile, (list, tuple)):
+ infile[0] = newline
+ else:
+ infile = newline
+ # UTF8 - don't decode
+ if isinstance(infile, basestring):
+ return infile.splitlines(True)
+ else:
+ return infile
+ # UTF16 - have to decode
+ return self._decode(infile, encoding)
+
+ # No BOM discovered and no encoding specified, just return
+ if isinstance(infile, basestring):
+ # infile read from a file will be a single string
+ return infile.splitlines(True)
+ return infile
+
+
+ def _a_to_u(self, aString):
+ """Decode ASCII strings to unicode if a self.encoding is specified."""
+ if self.encoding:
+ return aString.decode('ascii')
+ else:
+ return aString
+
+
+ def _decode(self, infile, encoding):
+ """
+ Decode infile to unicode. Using the specified encoding.
+
+ if is a string, it also needs converting to a list.
+ """
+ if isinstance(infile, basestring):
+ # can't be unicode
+ # NOTE: Could raise a ``UnicodeDecodeError``
+ return infile.decode(encoding).splitlines(True)
+ for i, line in enumerate(infile):
+ if not isinstance(line, unicode):
+ # NOTE: The isinstance test here handles mixed lists of unicode/string
+ # NOTE: But the decode will break on any non-string values
+ # NOTE: Or could raise a ``UnicodeDecodeError``
+ infile[i] = line.decode(encoding)
+ return infile
+
+
+ def _decode_element(self, line):
+ """Decode element to unicode if necessary."""
+ if not self.encoding:
+ return line
+ if isinstance(line, str) and self.default_encoding:
+ return line.decode(self.default_encoding)
+ return line
+
+
+ def _str(self, value):
+ """
+ Used by ``stringify`` within validate, to turn non-string values
+ into strings.
+ """
+ if not isinstance(value, basestring):
+ return str(value)
+ else:
+ return value
+
+
+ def _parse(self, infile):
+ """Actually parse the config file."""
+ temp_list_values = self.list_values
+ if self.unrepr:
+ self.list_values = False
+
+ comment_list = []
+ done_start = False
+ this_section = self
+ maxline = len(infile) - 1
+ cur_index = -1
+ reset_comment = False
+
+ while cur_index < maxline:
+ if reset_comment:
+ comment_list = []
+ cur_index += 1
+ line = infile[cur_index]
+ sline = line.strip()
+ # do we have anything on the line ?
+ if not sline or sline.startswith('#'):
+ reset_comment = False
+ comment_list.append(line)
+ continue
+
+ if not done_start:
+ # preserve initial comment
+ self.initial_comment = comment_list
+ comment_list = []
+ done_start = True
+
+ reset_comment = True
+ # first we check if it's a section marker
+ mat = self._sectionmarker.match(line)
+ if mat is not None:
+ # is a section line
+ (indent, sect_open, sect_name, sect_close, comment) = mat.groups()
+ if indent and (self.indent_type is None):
+ self.indent_type = indent
+ cur_depth = sect_open.count('[')
+ if cur_depth != sect_close.count(']'):
+ self._handle_error("Cannot compute the section depth at line %s.",
+ NestingError, infile, cur_index)
+ continue
+
+ if cur_depth < this_section.depth:
+ # the new section is dropping back to a previous level
+ try:
+ parent = self._match_depth(this_section,
+ cur_depth).parent
+ except SyntaxError:
+ self._handle_error("Cannot compute nesting level at line %s.",
+ NestingError, infile, cur_index)
+ continue
+ elif cur_depth == this_section.depth:
+ # the new section is a sibling of the current section
+ parent = this_section.parent
+ elif cur_depth == this_section.depth + 1:
+ # the new section is a child the current section
+ parent = this_section
+ else:
+ self._handle_error("Section too nested at line %s.",
+ NestingError, infile, cur_index)
+
+ sect_name = self._unquote(sect_name)
+ if sect_name in parent:
+ self._handle_error('Duplicate section name at line %s.',
+ DuplicateError, infile, cur_index)
+ continue
+
+ # create the new section
+ this_section = Section(
+ parent,
+ cur_depth,
+ self,
+ name=sect_name)
+ parent[sect_name] = this_section
+ parent.inline_comments[sect_name] = comment
+ parent.comments[sect_name] = comment_list
+ continue
+ #
+ # it's not a section marker,
+ # so it should be a valid ``key = value`` line
+ mat = self._keyword.match(line)
+ if mat is None:
+ # it neither matched as a keyword
+ # or a section marker
+ self._handle_error(
+ 'Invalid line at line "%s".',
+ ParseError, infile, cur_index)
+ else:
+ # is a keyword value
+ # value will include any inline comment
+ (indent, key, value) = mat.groups()
+ if indent and (self.indent_type is None):
+ self.indent_type = indent
+ # check for a multiline value
+ if value[:3] in ['"""', "'''"]:
+ try:
+ value, comment, cur_index = self._multiline(
+ value, infile, cur_index, maxline)
+ except SyntaxError:
+ self._handle_error(
+ 'Parse error in value at line %s.',
+ ParseError, infile, cur_index)
+ continue
+ else:
+ if self.unrepr:
+ comment = ''
+ try:
+ value = unrepr(value)
+ except Exception, e:
+ if type(e) == UnknownType:
+ msg = 'Unknown name or type in value at line %s.'
+ else:
+ msg = 'Parse error in value at line %s.'
+ self._handle_error(msg, UnreprError, infile,
+ cur_index)
+ continue
+ else:
+ if self.unrepr:
+ comment = ''
+ try:
+ value = unrepr(value)
+ except Exception, e:
+ if isinstance(e, UnknownType):
+ msg = 'Unknown name or type in value at line %s.'
+ else:
+ msg = 'Parse error in value at line %s.'
+ self._handle_error(msg, UnreprError, infile,
+ cur_index)
+ continue
+ else:
+ # extract comment and lists
+ try:
+ (value, comment) = self._handle_value(value)
+ except SyntaxError:
+ self._handle_error(
+ 'Parse error in value at line %s.',
+ ParseError, infile, cur_index)
+ continue
+ #
+ key = self._unquote(key)
+ if key in this_section:
+ self._handle_error(
+ 'Duplicate keyword name at line %s.',
+ DuplicateError, infile, cur_index)
+ continue
+ # add the key.
+ # we set unrepr because if we have got this far we will never
+ # be creating a new section
+ this_section.__setitem__(key, value, unrepr=True)
+ this_section.inline_comments[key] = comment
+ this_section.comments[key] = comment_list
+ continue
+ #
+ if self.indent_type is None:
+ # no indentation used, set the type accordingly
+ self.indent_type = ''
+
+ # preserve the final comment
+ if not self and not self.initial_comment:
+ self.initial_comment = comment_list
+ elif not reset_comment:
+ self.final_comment = comment_list
+ self.list_values = temp_list_values
+
+
+ def _match_depth(self, sect, depth):
+ """
+ Given a section and a depth level, walk back through the sections
+ parents to see if the depth level matches a previous section.
+
+ Return a reference to the right section,
+ or raise a SyntaxError.
+ """
+ while depth < sect.depth:
+ if sect is sect.parent:
+ # we've reached the top level already
+ raise SyntaxError()
+ sect = sect.parent
+ if sect.depth == depth:
+ return sect
+ # shouldn't get here
+ raise SyntaxError()
+
+
+ def _handle_error(self, text, ErrorClass, infile, cur_index):
+ """
+ Handle an error according to the error settings.
+
+ Either raise the error or store it.
+ The error will have occured at ``cur_index``
+ """
+ line = infile[cur_index]
+ cur_index += 1
+ message = text % cur_index
+ error = ErrorClass(message, cur_index, line)
+ if self.raise_errors:
+ # raise the error - parsing stops here
+ raise error
+ # store the error
+ # reraise when parsing has finished
+ self._errors.append(error)
+
+
+ def _unquote(self, value):
+ """Return an unquoted version of a value"""
+ if not value:
+ # should only happen during parsing of lists
+ raise SyntaxError
+ if (value[0] == value[-1]) and (value[0] in ('"', "'")):
+ value = value[1:-1]
+ return value
+
+
+ def _quote(self, value, multiline=True):
+ """
+ Return a safely quoted version of a value.
+
+ Raise a ConfigObjError if the value cannot be safely quoted.
+ If multiline is ``True`` (default) then use triple quotes
+ if necessary.
+
+ * Don't quote values that don't need it.
+ * Recursively quote members of a list and return a comma joined list.
+ * Multiline is ``False`` for lists.
+ * Obey list syntax for empty and single member lists.
+
+ If ``list_values=False`` then the value is only quoted if it contains
+ a ``\\n`` (is multiline) or '#'.
+
+ If ``write_empty_values`` is set, and the value is an empty string, it
+ won't be quoted.
+ """
+ if multiline and self.write_empty_values and value == '':
+ # Only if multiline is set, so that it is used for values not
+ # keys, and not values that are part of a list
+ return ''
+
+ if multiline and isinstance(value, (list, tuple)):
+ if not value:
+ return ','
+ elif len(value) == 1:
+ return self._quote(value[0], multiline=False) + ','
+ return ', '.join([self._quote(val, multiline=False)
+ for val in value])
+ if not isinstance(value, basestring):
+ if self.stringify:
+ value = str(value)
+ else:
+ raise TypeError('Value "%s" is not a string.' % value)
+
+ if not value:
+ return '""'
+
+ no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value
+ need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value ))
+ hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value)
+ check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote
+
+ if check_for_single:
+ if not self.list_values:
+ # we don't quote if ``list_values=False``
+ quot = noquot
+ # for normal values either single or double quotes will do
+ elif '\n' in value:
+ # will only happen if multiline is off - e.g. '\n' in key
+ raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
+ elif ((value[0] not in wspace_plus) and
+ (value[-1] not in wspace_plus) and
+ (',' not in value)):
+ quot = noquot
+ else:
+ quot = self._get_single_quote(value)
+ else:
+ # if value has '\n' or "'" *and* '"', it will need triple quotes
+ quot = self._get_triple_quote(value)
+
+ if quot == noquot and '#' in value and self.list_values:
+ quot = self._get_single_quote(value)
+
+ return quot % value
+
+
+ def _get_single_quote(self, value):
+ if ("'" in value) and ('"' in value):
+ raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
+ elif '"' in value:
+ quot = squot
+ else:
+ quot = dquot
+ return quot
+
+
+ def _get_triple_quote(self, value):
+ if (value.find('"""') != -1) and (value.find("'''") != -1):
+ raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
+ if value.find('"""') == -1:
+ quot = tdquot
+ else:
+ quot = tsquot
+ return quot
+
+
+ def _handle_value(self, value):
+ """
+ Given a value string, unquote, remove comment,
+ handle lists. (including empty and single member lists)
+ """
+ if self._inspec:
+ # Parsing a configspec so don't handle comments
+ return (value, '')
+ # do we look for lists in values ?
+ if not self.list_values:
+ mat = self._nolistvalue.match(value)
+ if mat is None:
+ raise SyntaxError()
+ # NOTE: we don't unquote here
+ return mat.groups()
+ #
+ mat = self._valueexp.match(value)
+ if mat is None:
+ # the value is badly constructed, probably badly quoted,
+ # or an invalid list
+ raise SyntaxError()
+ (list_values, single, empty_list, comment) = mat.groups()
+ if (list_values == '') and (single is None):
+ # change this if you want to accept empty values
+ raise SyntaxError()
+ # NOTE: note there is no error handling from here if the regex
+ # is wrong: then incorrect values will slip through
+ if empty_list is not None:
+ # the single comma - meaning an empty list
+ return ([], comment)
+ if single is not None:
+ # handle empty values
+ if list_values and not single:
+ # FIXME: the '' is a workaround because our regex now matches
+ # '' at the end of a list if it has a trailing comma
+ single = None
+ else:
+ single = single or '""'
+ single = self._unquote(single)
+ if list_values == '':
+ # not a list value
+ return (single, comment)
+ the_list = self._listvalueexp.findall(list_values)
+ the_list = [self._unquote(val) for val in the_list]
+ if single is not None:
+ the_list += [single]
+ return (the_list, comment)
+
+
+ def _multiline(self, value, infile, cur_index, maxline):
+ """Extract the value, where we are in a multiline situation."""
+ quot = value[:3]
+ newvalue = value[3:]
+ single_line = self._triple_quote[quot][0]
+ multi_line = self._triple_quote[quot][1]
+ mat = single_line.match(value)
+ if mat is not None:
+ retval = list(mat.groups())
+ retval.append(cur_index)
+ return retval
+ elif newvalue.find(quot) != -1:
+ # somehow the triple quote is missing
+ raise SyntaxError()
+ #
+ while cur_index < maxline:
+ cur_index += 1
+ newvalue += '\n'
+ line = infile[cur_index]
+ if line.find(quot) == -1:
+ newvalue += line
+ else:
+ # end of multiline, process it
+ break
+ else:
+ # we've got to the end of the config, oops...
+ raise SyntaxError()
+ mat = multi_line.match(line)
+ if mat is None:
+ # a badly formed line
+ raise SyntaxError()
+ (value, comment) = mat.groups()
+ return (newvalue + value, comment, cur_index)
+
+
+ def _handle_configspec(self, configspec):
+ """Parse the configspec."""
+ # FIXME: Should we check that the configspec was created with the
+ # correct settings ? (i.e. ``list_values=False``)
+ if not isinstance(configspec, ConfigObj):
+ try:
+ configspec = ConfigObj(configspec,
+ raise_errors=True,
+ file_error=True,
+ _inspec=True)
+ except ConfigObjError, e:
+ # FIXME: Should these errors have a reference
+ # to the already parsed ConfigObj ?
+ raise ConfigspecError('Parsing configspec failed: %s' % e)
+ except IOError, e:
+ raise IOError('Reading configspec failed: %s' % e)
+
+ self.configspec = configspec
+
+
+
+ def _set_configspec(self, section, copy):
+ """
+ Called by validate. Handles setting the configspec on subsections
+ including sections to be validated by __many__
+ """
+ configspec = section.configspec
+ many = configspec.get('__many__')
+ if isinstance(many, dict):
+ for entry in section.sections:
+ if entry not in configspec:
+ section[entry].configspec = many
+
+ for entry in configspec.sections:
+ if entry == '__many__':
+ continue
+ if entry not in section:
+ section[entry] = {}
+ section[entry]._created = True
+ if copy:
+ # copy comments
+ section.comments[entry] = configspec.comments.get(entry, [])
+ section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
+
+ # Could be a scalar when we expect a section
+ if isinstance(section[entry], Section):
+ section[entry].configspec = configspec[entry]
+
+
+ def _write_line(self, indent_string, entry, this_entry, comment):
+ """Write an individual line, for the write method"""
+ # NOTE: the calls to self._quote here handles non-StringType values.
+ if not self.unrepr:
+ val = self._decode_element(self._quote(this_entry))
+ else:
+ val = repr(this_entry)
+ return '%s%s%s%s%s' % (indent_string,
+ self._decode_element(self._quote(entry, multiline=False)),
+ self._a_to_u(' = '),
+ val,
+ self._decode_element(comment))
+
+
+ def _write_marker(self, indent_string, depth, entry, comment):
+ """Write a section marker line"""
+ return '%s%s%s%s%s' % (indent_string,
+ self._a_to_u('[' * depth),
+ self._quote(self._decode_element(entry), multiline=False),
+ self._a_to_u(']' * depth),
+ self._decode_element(comment))
+
+
+ def _handle_comment(self, comment):
+ """Deal with a comment."""
+ if not comment:
+ return ''
+ start = self.indent_type
+ if not comment.startswith('#'):
+ start += self._a_to_u(' # ')
+ return (start + comment)
+
+
+ # Public methods
+
+ def write(self, outfile=None, section=None):
+ """
+ Write the current ConfigObj as a file
+
+ tekNico: FIXME: use StringIO instead of real files
+
+ >>> filename = a.filename
+ >>> a.filename = 'test.ini'
+ >>> a.write()
+ >>> a.filename = filename
+ >>> a == ConfigObj('test.ini', raise_errors=True)
+ 1
+ >>> import os
+ >>> os.remove('test.ini')
+ """
+ if self.indent_type is None:
+ # this can be true if initialised from a dictionary
+ self.indent_type = DEFAULT_INDENT_TYPE
+
+ out = []
+ cs = self._a_to_u('#')
+ csp = self._a_to_u('# ')
+ if section is None:
+ int_val = self.interpolation
+ self.interpolation = False
+ section = self
+ for line in self.initial_comment:
+ line = self._decode_element(line)
+ stripped_line = line.strip()
+ if stripped_line and not stripped_line.startswith(cs):
+ line = csp + line
+ out.append(line)
+
+ indent_string = self.indent_type * section.depth
+ for entry in (section.scalars + section.sections):
+ if entry in section.defaults:
+ # don't write out default values
+ continue
+ for comment_line in section.comments[entry]:
+ comment_line = self._decode_element(comment_line.lstrip())
+ if comment_line and not comment_line.startswith(cs):
+ comment_line = csp + comment_line
+ out.append(indent_string + comment_line)
+ this_entry = section[entry]
+ comment = self._handle_comment(section.inline_comments[entry])
+
+ if isinstance(this_entry, dict):
+ # a section
+ out.append(self._write_marker(
+ indent_string,
+ this_entry.depth,
+ entry,
+ comment))
+ out.extend(self.write(section=this_entry))
+ else:
+ out.append(self._write_line(
+ indent_string,
+ entry,
+ this_entry,
+ comment))
+
+ if section is self:
+ for line in self.final_comment:
+ line = self._decode_element(line)
+ stripped_line = line.strip()
+ if stripped_line and not stripped_line.startswith(cs):
+ line = csp + line
+ out.append(line)
+ self.interpolation = int_val
+
+ if section is not self:
+ return out
+
+ if (self.filename is None) and (outfile is None):
+ # output a list of lines
+ # might need to encode
+ # NOTE: This will *screw* UTF16, each line will start with the BOM
+ if self.encoding:
+ out = [l.encode(self.encoding) for l in out]
+ if (self.BOM and ((self.encoding is None) or
+ (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))):
+ # Add the UTF8 BOM
+ if not out:
+ out.append('')
+ out[0] = BOM_UTF8 + out[0]
+ return out
+
+ # Turn the list to a string, joined with correct newlines
+ newline = self.newlines or os.linesep
+ if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w'
+ and sys.platform == 'win32' and newline == '\r\n'):
+ # Windows specific hack to avoid writing '\r\r\n'
+ newline = '\n'
+ output = self._a_to_u(newline).join(out)
+ if self.encoding:
+ output = output.encode(self.encoding)
+ if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
+ # Add the UTF8 BOM
+ output = BOM_UTF8 + output
+
+ if not output.endswith(newline):
+ output += newline
+ if outfile is not None:
+ outfile.write(output)
+ else:
+ h = open(self.filename, 'wb')
+ h.write(output)
+ h.close()
+
+
+ def validate(self, validator, preserve_errors=False, copy=False,
+ section=None):
+ """
+ Test the ConfigObj against a configspec.
+
+ It uses the ``validator`` object from *validate.py*.
+
+ To run ``validate`` on the current ConfigObj, call: ::
+
+ test = config.validate(validator)
+
+ (Normally having previously passed in the configspec when the ConfigObj
+ was created - you can dynamically assign a dictionary of checks to the
+ ``configspec`` attribute of a section though).
+
+ It returns ``True`` if everything passes, or a dictionary of
+ pass/fails (True/False). If every member of a subsection passes, it
+ will just have the value ``True``. (It also returns ``False`` if all
+ members fail).
+
+ In addition, it converts the values from strings to their native
+ types if their checks pass (and ``stringify`` is set).
+
+ If ``preserve_errors`` is ``True`` (``False`` is default) then instead
+ of a marking a fail with a ``False``, it will preserve the actual
+ exception object. This can contain info about the reason for failure.
+ For example the ``VdtValueTooSmallError`` indicates that the value
+ supplied was too small. If a value (or section) is missing it will
+ still be marked as ``False``.
+
+ You must have the validate module to use ``preserve_errors=True``.
+
+ You can then use the ``flatten_errors`` function to turn your nested
+ results dictionary into a flattened list of failures - useful for
+ displaying meaningful error messages.
+ """
+ if section is None:
+ if self.configspec is None:
+ raise ValueError('No configspec supplied.')
+ if preserve_errors:
+ # We do this once to remove a top level dependency on the validate module
+ # Which makes importing configobj faster
+ from validate import VdtMissingValue
+ self._vdtMissingValue = VdtMissingValue
+
+ section = self
+
+ if copy:
+ section.initial_comment = section.configspec.initial_comment
+ section.final_comment = section.configspec.final_comment
+ section.encoding = section.configspec.encoding
+ section.BOM = section.configspec.BOM
+ section.newlines = section.configspec.newlines
+ section.indent_type = section.configspec.indent_type
+
+ #
+ # section.default_values.clear() #??
+ configspec = section.configspec
+ self._set_configspec(section, copy)
+
+
+ def validate_entry(entry, spec, val, missing, ret_true, ret_false):
+ section.default_values.pop(entry, None)
+
+ try:
+ section.default_values[entry] = validator.get_default_value(configspec[entry])
+ except (KeyError, AttributeError, validator.baseErrorClass):
+ # No default, bad default or validator has no 'get_default_value'
+ # (e.g. SimpleVal)
+ pass
+
+ try:
+ check = validator.check(spec,
+ val,
+ missing=missing
+ )
+ except validator.baseErrorClass, e:
+ if not preserve_errors or isinstance(e, self._vdtMissingValue):
+ out[entry] = False
+ else:
+ # preserve the error
+ out[entry] = e
+ ret_false = False
+ ret_true = False
+ else:
+ ret_false = False
+ out[entry] = True
+ if self.stringify or missing:
+ # if we are doing type conversion
+ # or the value is a supplied default
+ if not self.stringify:
+ if isinstance(check, (list, tuple)):
+ # preserve lists
+ check = [self._str(item) for item in check]
+ elif missing and check is None:
+ # convert the None from a default to a ''
+ check = ''
+ else:
+ check = self._str(check)
+ if (check != val) or missing:
+ section[entry] = check
+ if not copy and missing and entry not in section.defaults:
+ section.defaults.append(entry)
+ return ret_true, ret_false
+
+ #
+ out = {}
+ ret_true = True
+ ret_false = True
+
+ unvalidated = [k for k in section.scalars if k not in configspec]
+ incorrect_sections = [k for k in configspec.sections if k in section.scalars]
+ incorrect_scalars = [k for k in configspec.scalars if k in section.sections]
+
+ for entry in configspec.scalars:
+ if entry in ('__many__', '___many___'):
+ # reserved names
+ continue
+ if (not entry in section.scalars) or (entry in section.defaults):
+ # missing entries
+ # or entries from defaults
+ missing = True
+ val = None
+ if copy and entry not in section.scalars:
+ # copy comments
+ section.comments[entry] = (
+ configspec.comments.get(entry, []))
+ section.inline_comments[entry] = (
+ configspec.inline_comments.get(entry, ''))
+ #
+ else:
+ missing = False
+ val = section[entry]
+
+ ret_true, ret_false = validate_entry(entry, configspec[entry], val,
+ missing, ret_true, ret_false)
+
+ many = None
+ if '__many__' in configspec.scalars:
+ many = configspec['__many__']
+ elif '___many___' in configspec.scalars:
+ many = configspec['___many___']
+
+ if many is not None:
+ for entry in unvalidated:
+ val = section[entry]
+ ret_true, ret_false = validate_entry(entry, many, val, False,
+ ret_true, ret_false)
+ unvalidated = []
+
+ for entry in incorrect_scalars:
+ ret_true = False
+ if not preserve_errors:
+ out[entry] = False
+ else:
+ ret_false = False
+ msg = 'Value %r was provided as a section' % entry
+ out[entry] = validator.baseErrorClass(msg)
+ for entry in incorrect_sections:
+ ret_true = False
+ if not preserve_errors:
+ out[entry] = False
+ else:
+ ret_false = False
+ msg = 'Section %r was provided as a single value' % entry
+ out[entry] = validator.baseErrorClass(msg)
+
+ # Missing sections will have been created as empty ones when the
+ # configspec was read.
+ for entry in section.sections:
+ # FIXME: this means DEFAULT is not copied in copy mode
+ if section is self and entry == 'DEFAULT':
+ continue
+ if section[entry].configspec is None:
+ unvalidated.append(entry)
+ continue
+ if copy:
+ section.comments[entry] = configspec.comments.get(entry, [])
+ section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
+ check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry])
+ out[entry] = check
+ if check == False:
+ ret_true = False
+ elif check == True:
+ ret_false = False
+ else:
+ ret_true = False
+
+ section.extra_values = unvalidated
+ if preserve_errors and not section._created:
+ # If the section wasn't created (i.e. it wasn't missing)
+ # then we can't return False, we need to preserve errors
+ ret_false = False
+ #
+ if ret_false and preserve_errors and out:
+ # If we are preserving errors, but all
+ # the failures are from missing sections / values
+ # then we can return False. Otherwise there is a
+ # real failure that we need to preserve.
+ ret_false = not any(out.values())
+ if ret_true:
+ return True
+ elif ret_false:
+ return False
+ return out
+
+
+ def reset(self):
+ """Clear ConfigObj instance and restore to 'freshly created' state."""
+ self.clear()
+ self._initialise()
+ # FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload)
+ # requires an empty dictionary
+ self.configspec = None
+ # Just to be sure ;-)
+ self._original_configspec = None
+
+
+ def reload(self):
+ """
+ Reload a ConfigObj from file.
+
+ This method raises a ``ReloadError`` if the ConfigObj doesn't have
+ a filename attribute pointing to a file.
+ """
+ if not isinstance(self.filename, basestring):
+ raise ReloadError()
+
+ filename = self.filename
+ current_options = {}
+ for entry in OPTION_DEFAULTS:
+ if entry == 'configspec':
+ continue
+ current_options[entry] = getattr(self, entry)
+
+ configspec = self._original_configspec
+ current_options['configspec'] = configspec
+
+ self.clear()
+ self._initialise(current_options)
+ self._load(filename, configspec)
+
+
+
+class SimpleVal(object):
+ """
+ A simple validator.
+ Can be used to check that all members expected are present.
+
+ To use it, provide a configspec with all your members in (the value given
+ will be ignored). Pass an instance of ``SimpleVal`` to the ``validate``
+ method of your ``ConfigObj``. ``validate`` will return ``True`` if all
+ members are present, or a dictionary with True/False meaning
+ present/missing. (Whole missing sections will be replaced with ``False``)
+ """
+
+ def __init__(self):
+ self.baseErrorClass = ConfigObjError
+
+ def check(self, check, member, missing=False):
+ """A dummy check method, always returns the value unchanged."""
+ if missing:
+ raise self.baseErrorClass()
+ return member
+
+
+def flatten_errors(cfg, res, levels=None, results=None):
+ """
+ An example function that will turn a nested dictionary of results
+ (as returned by ``ConfigObj.validate``) into a flat list.
+
+ ``cfg`` is the ConfigObj instance being checked, ``res`` is the results
+ dictionary returned by ``validate``.
+
+ (This is a recursive function, so you shouldn't use the ``levels`` or
+ ``results`` arguments - they are used by the function.)
+
+ Returns a list of keys that failed. Each member of the list is a tuple::
+
+ ([list of sections...], key, result)
+
+ If ``validate`` was called with ``preserve_errors=False`` (the default)
+ then ``result`` will always be ``False``.
+
+ *list of sections* is a flattened list of sections that the key was found
+ in.
+
+ If the section was missing (or a section was expected and a scalar provided
+ - or vice-versa) then key will be ``None``.
+
+ If the value (or section) was missing then ``result`` will be ``False``.
+
+ If ``validate`` was called with ``preserve_errors=True`` and a value
+ was present, but failed the check, then ``result`` will be the exception
+ object returned. You can use this as a string that describes the failure.
+
+ For example *The value "3" is of the wrong type*.
+ """
+ if levels is None:
+ # first time called
+ levels = []
+ results = []
+ if res == True:
+ return results
+ if res == False or isinstance(res, Exception):
+ results.append((levels[:], None, res))
+ if levels:
+ levels.pop()
+ return results
+ for (key, val) in res.items():
+ if val == True:
+ continue
+ if isinstance(cfg.get(key), dict):
+ # Go down one level
+ levels.append(key)
+ flatten_errors(cfg[key], val, levels, results)
+ continue
+ results.append((levels[:], key, val))
+ #
+ # Go up one level
+ if levels:
+ levels.pop()
+ #
+ return results
+
+
+def get_extra_values(conf, _prepend=()):
+ """
+ Find all the values and sections not in the configspec from a validated
+ ConfigObj.
+
+ ``get_extra_values`` returns a list of tuples where each tuple represents
+ either an extra section, or an extra value.
+
+ The tuples contain two values, a tuple representing the section the value
+ is in and the name of the extra values. For extra values in the top level
+ section the first member will be an empty tuple. For values in the 'foo'
+ section the first member will be ``('foo',)``. For members in the 'bar'
+ subsection of the 'foo' section the first member will be ``('foo', 'bar')``.
+
+ NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't
+ been validated it will return an empty list.
+ """
+ out = []
+
+ out.extend([(_prepend, name) for name in conf.extra_values])
+ for name in conf.sections:
+ if name not in conf.extra_values:
+ out.extend(get_extra_values(conf[name], _prepend + (name,)))
+ return out
+
+
+"""*A programming language is a medium of expression.* - Paul Graham"""
diff --git a/python/configobj/setup.py b/python/configobj/setup.py
new file mode 100644
index 000000000..63d70cc0c
--- /dev/null
+++ b/python/configobj/setup.py
@@ -0,0 +1,83 @@
+# setup.py
+# Install script for ConfigObj
+# Copyright (C) 2005-2010 Michael Foord, Mark Andrews, Nicola Larosa
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# mark AT la-la DOT com
+# nico AT tekNico DOT net
+
+# This software is licensed under the terms of the BSD license.
+# http://www.voidspace.org.uk/python/license.shtml
+
+import sys
+from distutils.core import setup
+from configobj import __version__ as VERSION
+
+NAME = 'configobj'
+
+MODULES = 'configobj', 'validate'
+
+DESCRIPTION = 'Config file reading, writing and validation.'
+
+URL = 'http://www.voidspace.org.uk/python/configobj.html'
+
+DOWNLOAD_URL = "http://www.voidspace.org.uk/downloads/configobj-%s.zip" % VERSION
+
+LONG_DESCRIPTION = """**ConfigObj** is a simple but powerful config file reader and writer: an *ini
+file round tripper*. Its main feature is that it is very easy to use, with a
+straightforward programmer's interface and a simple syntax for config files.
+It has lots of other features though :
+
+* Nested sections (subsections), to any level
+* List values
+* Multiple line values
+* Full Unicode support
+* String interpolation (substitution)
+* Integrated with a powerful validation system
+
+ - including automatic type checking/conversion
+ - and allowing default values
+ - repeated sections
+
+* All comments in the file are preserved
+* The order of keys/sections is preserved
+* Powerful ``unrepr`` mode for storing/retrieving Python data-types
+
+| Release 4.7.2 fixes several bugs in 4.7.1
+| Release 4.7.1 fixes a bug with the deprecated options keyword in
+| 4.7.0.
+| Release 4.7.0 improves performance adds features for validation and
+| fixes some bugs."""
+
+CLASSIFIERS = [
+ 'Development Status :: 6 - Mature',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.3',
+ 'Programming Language :: Python :: 2.4',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Operating System :: OS Independent',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+]
+
+AUTHOR = 'Michael Foord & Nicola Larosa'
+
+AUTHOR_EMAIL = 'fuzzyman@voidspace.org.uk'
+
+KEYWORDS = "config, ini, dictionary, application, admin, sysadmin, configuration, validation".split(', ')
+
+
+setup(name=NAME,
+ version=VERSION,
+ description=DESCRIPTION,
+ long_description=LONG_DESCRIPTION,
+ download_url=DOWNLOAD_URL,
+ author=AUTHOR,
+ author_email=AUTHOR_EMAIL,
+ url=URL,
+ py_modules=MODULES,
+ classifiers=CLASSIFIERS,
+ keywords=KEYWORDS
+ )
diff --git a/python/configobj/validate.py b/python/configobj/validate.py
new file mode 100644
index 000000000..73dbdb891
--- /dev/null
+++ b/python/configobj/validate.py
@@ -0,0 +1,1450 @@
+# validate.py
+# A Validator object
+# Copyright (C) 2005-2010 Michael Foord, Mark Andrews, Nicola Larosa
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# mark AT la-la DOT com
+# nico AT tekNico DOT net
+
+# This software is licensed under the terms of the BSD license.
+# http://www.voidspace.org.uk/python/license.shtml
+# Basically you're free to copy, modify, distribute and relicense it,
+# So long as you keep a copy of the license with it.
+
+# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
+# For information about bugfixes, updates and support, please join the
+# ConfigObj mailing list:
+# http://lists.sourceforge.net/lists/listinfo/configobj-develop
+# Comments, suggestions and bug reports welcome.
+
+"""
+ The Validator object is used to check that supplied values
+ conform to a specification.
+
+ The value can be supplied as a string - e.g. from a config file.
+ In this case the check will also *convert* the value to
+ the required type. This allows you to add validation
+ as a transparent layer to access data stored as strings.
+ The validation checks that the data is correct *and*
+ converts it to the expected type.
+
+ Some standard checks are provided for basic data types.
+ Additional checks are easy to write. They can be
+ provided when the ``Validator`` is instantiated or
+ added afterwards.
+
+ The standard functions work with the following basic data types :
+
+ * integers
+ * floats
+ * booleans
+ * strings
+ * ip_addr
+
+ plus lists of these datatypes
+
+ Adding additional checks is done through coding simple functions.
+
+ The full set of standard checks are :
+
+ * 'integer': matches integer values (including negative)
+ Takes optional 'min' and 'max' arguments : ::
+
+ integer()
+ integer(3, 9) # any value from 3 to 9
+ integer(min=0) # any positive value
+ integer(max=9)
+
+ * 'float': matches float values
+ Has the same parameters as the integer check.
+
+ * 'boolean': matches boolean values - ``True`` or ``False``
+ Acceptable string values for True are :
+ true, on, yes, 1
+ Acceptable string values for False are :
+ false, off, no, 0
+
+ Any other value raises an error.
+
+ * 'ip_addr': matches an Internet Protocol address, v.4, represented
+ by a dotted-quad string, i.e. '1.2.3.4'.
+
+ * 'string': matches any string.
+ Takes optional keyword args 'min' and 'max'
+ to specify min and max lengths of the string.
+
+ * 'list': matches any list.
+ Takes optional keyword args 'min', and 'max' to specify min and
+ max sizes of the list. (Always returns a list.)
+
+ * 'tuple': matches any tuple.
+ Takes optional keyword args 'min', and 'max' to specify min and
+ max sizes of the tuple. (Always returns a tuple.)
+
+ * 'int_list': Matches a list of integers.
+ Takes the same arguments as list.
+
+ * 'float_list': Matches a list of floats.
+ Takes the same arguments as list.
+
+ * 'bool_list': Matches a list of boolean values.
+ Takes the same arguments as list.
+
+ * 'ip_addr_list': Matches a list of IP addresses.
+ Takes the same arguments as list.
+
+ * 'string_list': Matches a list of strings.
+ Takes the same arguments as list.
+
+ * 'mixed_list': Matches a list with different types in
+ specific positions. List size must match
+ the number of arguments.
+
+ Each position can be one of :
+ 'integer', 'float', 'ip_addr', 'string', 'boolean'
+
+ So to specify a list with two strings followed
+ by two integers, you write the check as : ::
+
+ mixed_list('string', 'string', 'integer', 'integer')
+
+ * 'pass': This check matches everything ! It never fails
+ and the value is unchanged.
+
+ It is also the default if no check is specified.
+
+ * 'option': This check matches any from a list of options.
+ You specify this check with : ::
+
+ option('option 1', 'option 2', 'option 3')
+
+ You can supply a default value (returned if no value is supplied)
+ using the default keyword argument.
+
+ You specify a list argument for default using a list constructor syntax in
+ the check : ::
+
+ checkname(arg1, arg2, default=list('val 1', 'val 2', 'val 3'))
+
+ A badly formatted set of arguments will raise a ``VdtParamError``.
+"""
+
+__version__ = '1.0.1'
+
+
+__all__ = (
+ '__version__',
+ 'dottedQuadToNum',
+ 'numToDottedQuad',
+ 'ValidateError',
+ 'VdtUnknownCheckError',
+ 'VdtParamError',
+ 'VdtTypeError',
+ 'VdtValueError',
+ 'VdtValueTooSmallError',
+ 'VdtValueTooBigError',
+ 'VdtValueTooShortError',
+ 'VdtValueTooLongError',
+ 'VdtMissingValue',
+ 'Validator',
+ 'is_integer',
+ 'is_float',
+ 'is_boolean',
+ 'is_list',
+ 'is_tuple',
+ 'is_ip_addr',
+ 'is_string',
+ 'is_int_list',
+ 'is_bool_list',
+ 'is_float_list',
+ 'is_string_list',
+ 'is_ip_addr_list',
+ 'is_mixed_list',
+ 'is_option',
+ '__docformat__',
+)
+
+
+import re
+
+
+_list_arg = re.compile(r'''
+ (?:
+ ([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\(
+ (
+ (?:
+ \s*
+ (?:
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\s\)][^,\)]*?) # unquoted
+ )
+ \s*,\s*
+ )*
+ (?:
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\s\)][^,\)]*?) # unquoted
+ )? # last one
+ )
+ \)
+ )
+''', re.VERBOSE | re.DOTALL) # two groups
+
+_list_members = re.compile(r'''
+ (
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\s=][^,=]*?) # unquoted
+ )
+ (?:
+ (?:\s*,\s*)|(?:\s*$) # comma
+ )
+''', re.VERBOSE | re.DOTALL) # one group
+
+_paramstring = r'''
+ (?:
+ (
+ (?:
+ [a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\(
+ (?:
+ \s*
+ (?:
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\s\)][^,\)]*?) # unquoted
+ )
+ \s*,\s*
+ )*
+ (?:
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\s\)][^,\)]*?) # unquoted
+ )? # last one
+ \)
+ )|
+ (?:
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\s=][^,=]*?)| # unquoted
+ (?: # keyword argument
+ [a-zA-Z_][a-zA-Z0-9_]*\s*=\s*
+ (?:
+ (?:".*?")| # double quotes
+ (?:'.*?')| # single quotes
+ (?:[^'",\s=][^,=]*?) # unquoted
+ )
+ )
+ )
+ )
+ (?:
+ (?:\s*,\s*)|(?:\s*$) # comma
+ )
+ )
+ '''
+
+_matchstring = '^%s*' % _paramstring
+
+# Python pre 2.2.1 doesn't have bool
+try:
+ bool
+except NameError:
+ def bool(val):
+ """Simple boolean equivalent function. """
+ if val:
+ return 1
+ else:
+ return 0
+
+
+def dottedQuadToNum(ip):
+ """
+ Convert decimal dotted quad string to long integer
+
+ >>> int(dottedQuadToNum('1 '))
+ 1
+ >>> int(dottedQuadToNum(' 1.2'))
+ 16777218
+ >>> int(dottedQuadToNum(' 1.2.3 '))
+ 16908291
+ >>> int(dottedQuadToNum('1.2.3.4'))
+ 16909060
+ >>> dottedQuadToNum('255.255.255.255')
+ 4294967295L
+ >>> dottedQuadToNum('255.255.255.256')
+ Traceback (most recent call last):
+ ValueError: Not a good dotted-quad IP: 255.255.255.256
+ """
+
+ # import here to avoid it when ip_addr values are not used
+ import socket, struct
+
+ try:
+ return struct.unpack('!L',
+ socket.inet_aton(ip.strip()))[0]
+ except socket.error:
+ # bug in inet_aton, corrected in Python 2.4
+ if ip.strip() == '255.255.255.255':
+ return 0xFFFFFFFFL
+ else:
+ raise ValueError('Not a good dotted-quad IP: %s' % ip)
+ return
+
+
+def numToDottedQuad(num):
+ """
+ Convert long int to dotted quad string
+
+ >>> numToDottedQuad(-1L)
+ Traceback (most recent call last):
+ ValueError: Not a good numeric IP: -1
+ >>> numToDottedQuad(1L)
+ '0.0.0.1'
+ >>> numToDottedQuad(16777218L)
+ '1.0.0.2'
+ >>> numToDottedQuad(16908291L)
+ '1.2.0.3'
+ >>> numToDottedQuad(16909060L)
+ '1.2.3.4'
+ >>> numToDottedQuad(4294967295L)
+ '255.255.255.255'
+ >>> numToDottedQuad(4294967296L)
+ Traceback (most recent call last):
+ ValueError: Not a good numeric IP: 4294967296
+ """
+
+ # import here to avoid it when ip_addr values are not used
+ import socket, struct
+
+ # no need to intercept here, 4294967295L is fine
+ if num > 4294967295L or num < 0:
+ raise ValueError('Not a good numeric IP: %s' % num)
+ try:
+ return socket.inet_ntoa(
+ struct.pack('!L', long(num)))
+ except (socket.error, struct.error, OverflowError):
+ raise ValueError('Not a good numeric IP: %s' % num)
+
+
+class ValidateError(Exception):
+ """
+ This error indicates that the check failed.
+ It can be the base class for more specific errors.
+
+ Any check function that fails ought to raise this error.
+ (or a subclass)
+
+ >>> raise ValidateError
+ Traceback (most recent call last):
+ ValidateError
+ """
+
+
+class VdtMissingValue(ValidateError):
+ """No value was supplied to a check that needed one."""
+
+
+class VdtUnknownCheckError(ValidateError):
+ """An unknown check function was requested"""
+
+ def __init__(self, value):
+ """
+ >>> raise VdtUnknownCheckError('yoda')
+ Traceback (most recent call last):
+ VdtUnknownCheckError: the check "yoda" is unknown.
+ """
+ ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,))
+
+
+class VdtParamError(SyntaxError):
+ """An incorrect parameter was passed"""
+
+ def __init__(self, name, value):
+ """
+ >>> raise VdtParamError('yoda', 'jedi')
+ Traceback (most recent call last):
+ VdtParamError: passed an incorrect value "jedi" for parameter "yoda".
+ """
+ SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name))
+
+
+class VdtTypeError(ValidateError):
+ """The value supplied was of the wrong type"""
+
+ def __init__(self, value):
+ """
+ >>> raise VdtTypeError('jedi')
+ Traceback (most recent call last):
+ VdtTypeError: the value "jedi" is of the wrong type.
+ """
+ ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,))
+
+
+class VdtValueError(ValidateError):
+ """The value supplied was of the correct type, but was not an allowed value."""
+
+ def __init__(self, value):
+ """
+ >>> raise VdtValueError('jedi')
+ Traceback (most recent call last):
+ VdtValueError: the value "jedi" is unacceptable.
+ """
+ ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,))
+
+
+class VdtValueTooSmallError(VdtValueError):
+ """The value supplied was of the correct type, but was too small."""
+
+ def __init__(self, value):
+ """
+ >>> raise VdtValueTooSmallError('0')
+ Traceback (most recent call last):
+ VdtValueTooSmallError: the value "0" is too small.
+ """
+ ValidateError.__init__(self, 'the value "%s" is too small.' % (value,))
+
+
+class VdtValueTooBigError(VdtValueError):
+ """The value supplied was of the correct type, but was too big."""
+
+ def __init__(self, value):
+ """
+ >>> raise VdtValueTooBigError('1')
+ Traceback (most recent call last):
+ VdtValueTooBigError: the value "1" is too big.
+ """
+ ValidateError.__init__(self, 'the value "%s" is too big.' % (value,))
+
+
+class VdtValueTooShortError(VdtValueError):
+ """The value supplied was of the correct type, but was too short."""
+
+ def __init__(self, value):
+ """
+ >>> raise VdtValueTooShortError('jed')
+ Traceback (most recent call last):
+ VdtValueTooShortError: the value "jed" is too short.
+ """
+ ValidateError.__init__(
+ self,
+ 'the value "%s" is too short.' % (value,))
+
+
+class VdtValueTooLongError(VdtValueError):
+ """The value supplied was of the correct type, but was too long."""
+
+ def __init__(self, value):
+ """
+ >>> raise VdtValueTooLongError('jedie')
+ Traceback (most recent call last):
+ VdtValueTooLongError: the value "jedie" is too long.
+ """
+ ValidateError.__init__(self, 'the value "%s" is too long.' % (value,))
+
+
+class Validator(object):
+ """
+ Validator is an object that allows you to register a set of 'checks'.
+ These checks take input and test that it conforms to the check.
+
+ This can also involve converting the value from a string into
+ the correct datatype.
+
+ The ``check`` method takes an input string which configures which
+ check is to be used and applies that check to a supplied value.
+
+ An example input string would be:
+ 'int_range(param1, param2)'
+
+ You would then provide something like:
+
+ >>> def int_range_check(value, min, max):
+ ... # turn min and max from strings to integers
+ ... min = int(min)
+ ... max = int(max)
+ ... # check that value is of the correct type.
+ ... # possible valid inputs are integers or strings
+ ... # that represent integers
+ ... if not isinstance(value, (int, long, basestring)):
+ ... raise VdtTypeError(value)
+ ... elif isinstance(value, basestring):
+ ... # if we are given a string
+ ... # attempt to convert to an integer
+ ... try:
+ ... value = int(value)
+ ... except ValueError:
+ ... raise VdtValueError(value)
+ ... # check the value is between our constraints
+ ... if not min <= value:
+ ... raise VdtValueTooSmallError(value)
+ ... if not value <= max:
+ ... raise VdtValueTooBigError(value)
+ ... return value
+
+ >>> fdict = {'int_range': int_range_check}
+ >>> vtr1 = Validator(fdict)
+ >>> vtr1.check('int_range(20, 40)', '30')
+ 30
+ >>> vtr1.check('int_range(20, 40)', '60')
+ Traceback (most recent call last):
+ VdtValueTooBigError: the value "60" is too big.
+
+ New functions can be added with : ::
+
+ >>> vtr2 = Validator()
+ >>> vtr2.functions['int_range'] = int_range_check
+
+ Or by passing in a dictionary of functions when Validator
+ is instantiated.
+
+ Your functions *can* use keyword arguments,
+ but the first argument should always be 'value'.
+
+ If the function doesn't take additional arguments,
+ the parentheses are optional in the check.
+ It can be written with either of : ::
+
+ keyword = function_name
+ keyword = function_name()
+
+ The first program to utilise Validator() was Michael Foord's
+ ConfigObj, an alternative to ConfigParser which supports lists and
+ can validate a config file using a config schema.
+ For more details on using Validator with ConfigObj see:
+ http://www.voidspace.org.uk/python/configobj.html
+ """
+
+ # this regex does the initial parsing of the checks
+ _func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL)
+
+ # this regex takes apart keyword arguments
+ _key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$', re.DOTALL)
+
+
+ # this regex finds keyword=list(....) type values
+ _list_arg = _list_arg
+
+ # this regex takes individual values out of lists - in one pass
+ _list_members = _list_members
+
+ # These regexes check a set of arguments for validity
+ # and then pull the members out
+ _paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL)
+ _matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL)
+
+
+ def __init__(self, functions=None):
+ """
+ >>> vtri = Validator()
+ """
+ self.functions = {
+ '': self._pass,
+ 'integer': is_integer,
+ 'float': is_float,
+ 'boolean': is_boolean,
+ 'ip_addr': is_ip_addr,
+ 'string': is_string,
+ 'list': is_list,
+ 'tuple': is_tuple,
+ 'int_list': is_int_list,
+ 'float_list': is_float_list,
+ 'bool_list': is_bool_list,
+ 'ip_addr_list': is_ip_addr_list,
+ 'string_list': is_string_list,
+ 'mixed_list': is_mixed_list,
+ 'pass': self._pass,
+ 'option': is_option,
+ 'force_list': force_list,
+ }
+ if functions is not None:
+ self.functions.update(functions)
+ # tekNico: for use by ConfigObj
+ self.baseErrorClass = ValidateError
+ self._cache = {}
+
+
+ def check(self, check, value, missing=False):
+ """
+ Usage: check(check, value)
+
+ Arguments:
+ check: string representing check to apply (including arguments)
+ value: object to be checked
+ Returns value, converted to correct type if necessary
+
+ If the check fails, raises a ``ValidateError`` subclass.
+
+ >>> vtor.check('yoda', '')
+ Traceback (most recent call last):
+ VdtUnknownCheckError: the check "yoda" is unknown.
+ >>> vtor.check('yoda()', '')
+ Traceback (most recent call last):
+ VdtUnknownCheckError: the check "yoda" is unknown.
+
+ >>> vtor.check('string(default="")', '', missing=True)
+ ''
+ """
+ fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
+
+ if missing:
+ if default is None:
+ # no information needed here - to be handled by caller
+ raise VdtMissingValue()
+ value = self._handle_none(default)
+
+ if value is None:
+ return None
+
+ return self._check_value(value, fun_name, fun_args, fun_kwargs)
+
+
+ def _handle_none(self, value):
+ if value == 'None':
+ return None
+ elif value in ("'None'", '"None"'):
+ # Special case a quoted None
+ value = self._unquote(value)
+ return value
+
+
+ def _parse_with_caching(self, check):
+ if check in self._cache:
+ fun_name, fun_args, fun_kwargs, default = self._cache[check]
+ # We call list and dict below to work with *copies* of the data
+ # rather than the original (which are mutable of course)
+ fun_args = list(fun_args)
+ fun_kwargs = dict(fun_kwargs)
+ else:
+ fun_name, fun_args, fun_kwargs, default = self._parse_check(check)
+ fun_kwargs = dict([(str(key), value) for (key, value) in fun_kwargs.items()])
+ self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default
+ return fun_name, fun_args, fun_kwargs, default
+
+
+ def _check_value(self, value, fun_name, fun_args, fun_kwargs):
+ try:
+ fun = self.functions[fun_name]
+ except KeyError:
+ raise VdtUnknownCheckError(fun_name)
+ else:
+ return fun(value, *fun_args, **fun_kwargs)
+
+
+ def _parse_check(self, check):
+ fun_match = self._func_re.match(check)
+ if fun_match:
+ fun_name = fun_match.group(1)
+ arg_string = fun_match.group(2)
+ arg_match = self._matchfinder.match(arg_string)
+ if arg_match is None:
+ # Bad syntax
+ raise VdtParamError('Bad syntax in check "%s".' % check)
+ fun_args = []
+ fun_kwargs = {}
+ # pull out args of group 2
+ for arg in self._paramfinder.findall(arg_string):
+ # args may need whitespace removing (before removing quotes)
+ arg = arg.strip()
+ listmatch = self._list_arg.match(arg)
+ if listmatch:
+ key, val = self._list_handle(listmatch)
+ fun_kwargs[key] = val
+ continue
+ keymatch = self._key_arg.match(arg)
+ if keymatch:
+ val = keymatch.group(2)
+ if not val in ("'None'", '"None"'):
+ # Special case a quoted None
+ val = self._unquote(val)
+ fun_kwargs[keymatch.group(1)] = val
+ continue
+
+ fun_args.append(self._unquote(arg))
+ else:
+ # allows for function names without (args)
+ return check, (), {}, None
+
+ # Default must be deleted if the value is specified too,
+ # otherwise the check function will get a spurious "default" keyword arg
+ default = fun_kwargs.pop('default', None)
+ return fun_name, fun_args, fun_kwargs, default
+
+
+ def _unquote(self, val):
+ """Unquote a value if necessary."""
+ if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]):
+ val = val[1:-1]
+ return val
+
+
+ def _list_handle(self, listmatch):
+ """Take apart a ``keyword=list('val, 'val')`` type string."""
+ out = []
+ name = listmatch.group(1)
+ args = listmatch.group(2)
+ for arg in self._list_members.findall(args):
+ out.append(self._unquote(arg))
+ return name, out
+
+
+ def _pass(self, value):
+ """
+ Dummy check that always passes
+
+ >>> vtor.check('', 0)
+ 0
+ >>> vtor.check('', '0')
+ '0'
+ """
+ return value
+
+
+ def get_default_value(self, check):
+ """
+ Given a check, return the default value for the check
+ (converted to the right type).
+
+ If the check doesn't specify a default value then a
+ ``KeyError`` will be raised.
+ """
+ fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
+ if default is None:
+ raise KeyError('Check "%s" has no default value.' % check)
+ value = self._handle_none(default)
+ if value is None:
+ return value
+ return self._check_value(value, fun_name, fun_args, fun_kwargs)
+
+
+def _is_num_param(names, values, to_float=False):
+ """
+ Return numbers from inputs or raise VdtParamError.
+
+ Lets ``None`` pass through.
+ Pass in keyword argument ``to_float=True`` to
+ use float for the conversion rather than int.
+
+ >>> _is_num_param(('', ''), (0, 1.0))
+ [0, 1]
+ >>> _is_num_param(('', ''), (0, 1.0), to_float=True)
+ [0.0, 1.0]
+ >>> _is_num_param(('a'), ('a'))
+ Traceback (most recent call last):
+ VdtParamError: passed an incorrect value "a" for parameter "a".
+ """
+ fun = to_float and float or int
+ out_params = []
+ for (name, val) in zip(names, values):
+ if val is None:
+ out_params.append(val)
+ elif isinstance(val, (int, long, float, basestring)):
+ try:
+ out_params.append(fun(val))
+ except ValueError, e:
+ raise VdtParamError(name, val)
+ else:
+ raise VdtParamError(name, val)
+ return out_params
+
+
+# built in checks
+# you can override these by setting the appropriate name
+# in Validator.functions
+# note: if the params are specified wrongly in your input string,
+# you will also raise errors.
+
+def is_integer(value, min=None, max=None):
+ """
+ A check that tests that a given value is an integer (int, or long)
+ and optionally, between bounds. A negative value is accepted, while
+ a float will fail.
+
+ If the value is a string, then the conversion is done - if possible.
+ Otherwise a VdtError is raised.
+
+ >>> vtor.check('integer', '-1')
+ -1
+ >>> vtor.check('integer', '0')
+ 0
+ >>> vtor.check('integer', 9)
+ 9
+ >>> vtor.check('integer', 'a')
+ Traceback (most recent call last):
+ VdtTypeError: the value "a" is of the wrong type.
+ >>> vtor.check('integer', '2.2')
+ Traceback (most recent call last):
+ VdtTypeError: the value "2.2" is of the wrong type.
+ >>> vtor.check('integer(10)', '20')
+ 20
+ >>> vtor.check('integer(max=20)', '15')
+ 15
+ >>> vtor.check('integer(10)', '9')
+ Traceback (most recent call last):
+ VdtValueTooSmallError: the value "9" is too small.
+ >>> vtor.check('integer(10)', 9)
+ Traceback (most recent call last):
+ VdtValueTooSmallError: the value "9" is too small.
+ >>> vtor.check('integer(max=20)', '35')
+ Traceback (most recent call last):
+ VdtValueTooBigError: the value "35" is too big.
+ >>> vtor.check('integer(max=20)', 35)
+ Traceback (most recent call last):
+ VdtValueTooBigError: the value "35" is too big.
+ >>> vtor.check('integer(0, 9)', False)
+ 0
+ """
+ (min_val, max_val) = _is_num_param(('min', 'max'), (min, max))
+ if not isinstance(value, (int, long, basestring)):
+ raise VdtTypeError(value)
+ if isinstance(value, basestring):
+ # if it's a string - does it represent an integer ?
+ try:
+ value = int(value)
+ except ValueError:
+ raise VdtTypeError(value)
+ if (min_val is not None) and (value < min_val):
+ raise VdtValueTooSmallError(value)
+ if (max_val is not None) and (value > max_val):
+ raise VdtValueTooBigError(value)
+ return value
+
+
+def is_float(value, min=None, max=None):
+ """
+ A check that tests that a given value is a float
+ (an integer will be accepted), and optionally - that it is between bounds.
+
+ If the value is a string, then the conversion is done - if possible.
+ Otherwise a VdtError is raised.
+
+ This can accept negative values.
+
+ >>> vtor.check('float', '2')
+ 2.0
+
+ From now on we multiply the value to avoid comparing decimals
+
+ >>> vtor.check('float', '-6.8') * 10
+ -68.0
+ >>> vtor.check('float', '12.2') * 10
+ 122.0
+ >>> vtor.check('float', 8.4) * 10
+ 84.0
+ >>> vtor.check('float', 'a')
+ Traceback (most recent call last):
+ VdtTypeError: the value "a" is of the wrong type.
+ >>> vtor.check('float(10.1)', '10.2') * 10
+ 102.0
+ >>> vtor.check('float(max=20.2)', '15.1') * 10
+ 151.0
+ >>> vtor.check('float(10.0)', '9.0')
+ Traceback (most recent call last):
+ VdtValueTooSmallError: the value "9.0" is too small.
+ >>> vtor.check('float(max=20.0)', '35.0')
+ Traceback (most recent call last):
+ VdtValueTooBigError: the value "35.0" is too big.
+ """
+ (min_val, max_val) = _is_num_param(
+ ('min', 'max'), (min, max), to_float=True)
+ if not isinstance(value, (int, long, float, basestring)):
+ raise VdtTypeError(value)
+ if not isinstance(value, float):
+ # if it's a string - does it represent a float ?
+ try:
+ value = float(value)
+ except ValueError:
+ raise VdtTypeError(value)
+ if (min_val is not None) and (value < min_val):
+ raise VdtValueTooSmallError(value)
+ if (max_val is not None) and (value > max_val):
+ raise VdtValueTooBigError(value)
+ return value
+
+
+bool_dict = {
+ True: True, 'on': True, '1': True, 'true': True, 'yes': True,
+ False: False, 'off': False, '0': False, 'false': False, 'no': False,
+}
+
+
+def is_boolean(value):
+ """
+ Check if the value represents a boolean.
+
+ >>> vtor.check('boolean', 0)
+ 0
+ >>> vtor.check('boolean', False)
+ 0
+ >>> vtor.check('boolean', '0')
+ 0
+ >>> vtor.check('boolean', 'off')
+ 0
+ >>> vtor.check('boolean', 'false')
+ 0
+ >>> vtor.check('boolean', 'no')
+ 0
+ >>> vtor.check('boolean', 'nO')
+ 0
+ >>> vtor.check('boolean', 'NO')
+ 0
+ >>> vtor.check('boolean', 1)
+ 1
+ >>> vtor.check('boolean', True)
+ 1
+ >>> vtor.check('boolean', '1')
+ 1
+ >>> vtor.check('boolean', 'on')
+ 1
+ >>> vtor.check('boolean', 'true')
+ 1
+ >>> vtor.check('boolean', 'yes')
+ 1
+ >>> vtor.check('boolean', 'Yes')
+ 1
+ >>> vtor.check('boolean', 'YES')
+ 1
+ >>> vtor.check('boolean', '')
+ Traceback (most recent call last):
+ VdtTypeError: the value "" is of the wrong type.
+ >>> vtor.check('boolean', 'up')
+ Traceback (most recent call last):
+ VdtTypeError: the value "up" is of the wrong type.
+
+ """
+ if isinstance(value, basestring):
+ try:
+ return bool_dict[value.lower()]
+ except KeyError:
+ raise VdtTypeError(value)
+ # we do an equality test rather than an identity test
+ # this ensures Python 2.2 compatibilty
+ # and allows 0 and 1 to represent True and False
+ if value == False:
+ return False
+ elif value == True:
+ return True
+ else:
+ raise VdtTypeError(value)
+
+
+def is_ip_addr(value):
+ """
+ Check that the supplied value is an Internet Protocol address, v.4,
+ represented by a dotted-quad string, i.e. '1.2.3.4'.
+
+ >>> vtor.check('ip_addr', '1 ')
+ '1'
+ >>> vtor.check('ip_addr', ' 1.2')
+ '1.2'
+ >>> vtor.check('ip_addr', ' 1.2.3 ')
+ '1.2.3'
+ >>> vtor.check('ip_addr', '1.2.3.4')
+ '1.2.3.4'
+ >>> vtor.check('ip_addr', '0.0.0.0')
+ '0.0.0.0'
+ >>> vtor.check('ip_addr', '255.255.255.255')
+ '255.255.255.255'
+ >>> vtor.check('ip_addr', '255.255.255.256')
+ Traceback (most recent call last):
+ VdtValueError: the value "255.255.255.256" is unacceptable.
+ >>> vtor.check('ip_addr', '1.2.3.4.5')
+ Traceback (most recent call last):
+ VdtValueError: the value "1.2.3.4.5" is unacceptable.
+ >>> vtor.check('ip_addr', 0)
+ Traceback (most recent call last):
+ VdtTypeError: the value "0" is of the wrong type.
+ """
+ if not isinstance(value, basestring):
+ raise VdtTypeError(value)
+ value = value.strip()
+ try:
+ dottedQuadToNum(value)
+ except ValueError:
+ raise VdtValueError(value)
+ return value
+
+
+def is_list(value, min=None, max=None):
+ """
+ Check that the value is a list of values.
+
+ You can optionally specify the minimum and maximum number of members.
+
+ It does no check on list members.
+
+ >>> vtor.check('list', ())
+ []
+ >>> vtor.check('list', [])
+ []
+ >>> vtor.check('list', (1, 2))
+ [1, 2]
+ >>> vtor.check('list', [1, 2])
+ [1, 2]
+ >>> vtor.check('list(3)', (1, 2))
+ Traceback (most recent call last):
+ VdtValueTooShortError: the value "(1, 2)" is too short.
+ >>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6))
+ Traceback (most recent call last):
+ VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
+ >>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4))
+ [1, 2, 3, 4]
+ >>> vtor.check('list', 0)
+ Traceback (most recent call last):
+ VdtTypeError: the value "0" is of the wrong type.
+ >>> vtor.check('list', '12')
+ Traceback (most recent call last):
+ VdtTypeError: the value "12" is of the wrong type.
+ """
+ (min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
+ if isinstance(value, basestring):
+ raise VdtTypeError(value)
+ try:
+ num_members = len(value)
+ except TypeError:
+ raise VdtTypeError(value)
+ if min_len is not None and num_members < min_len:
+ raise VdtValueTooShortError(value)
+ if max_len is not None and num_members > max_len:
+ raise VdtValueTooLongError(value)
+ return list(value)
+
+
+def is_tuple(value, min=None, max=None):
+ """
+ Check that the value is a tuple of values.
+
+ You can optionally specify the minimum and maximum number of members.
+
+ It does no check on members.
+
+ >>> vtor.check('tuple', ())
+ ()
+ >>> vtor.check('tuple', [])
+ ()
+ >>> vtor.check('tuple', (1, 2))
+ (1, 2)
+ >>> vtor.check('tuple', [1, 2])
+ (1, 2)
+ >>> vtor.check('tuple(3)', (1, 2))
+ Traceback (most recent call last):
+ VdtValueTooShortError: the value "(1, 2)" is too short.
+ >>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6))
+ Traceback (most recent call last):
+ VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
+ >>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4))
+ (1, 2, 3, 4)
+ >>> vtor.check('tuple', 0)
+ Traceback (most recent call last):
+ VdtTypeError: the value "0" is of the wrong type.
+ >>> vtor.check('tuple', '12')
+ Traceback (most recent call last):
+ VdtTypeError: the value "12" is of the wrong type.
+ """
+ return tuple(is_list(value, min, max))
+
+
+def is_string(value, min=None, max=None):
+ """
+ Check that the supplied value is a string.
+
+ You can optionally specify the minimum and maximum number of members.
+
+ >>> vtor.check('string', '0')
+ '0'
+ >>> vtor.check('string', 0)
+ Traceback (most recent call last):
+ VdtTypeError: the value "0" is of the wrong type.
+ >>> vtor.check('string(2)', '12')
+ '12'
+ >>> vtor.check('string(2)', '1')
+ Traceback (most recent call last):
+ VdtValueTooShortError: the value "1" is too short.
+ >>> vtor.check('string(min=2, max=3)', '123')
+ '123'
+ >>> vtor.check('string(min=2, max=3)', '1234')
+ Traceback (most recent call last):
+ VdtValueTooLongError: the value "1234" is too long.
+ """
+ if not isinstance(value, basestring):
+ raise VdtTypeError(value)
+ (min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
+ try:
+ num_members = len(value)
+ except TypeError:
+ raise VdtTypeError(value)
+ if min_len is not None and num_members < min_len:
+ raise VdtValueTooShortError(value)
+ if max_len is not None and num_members > max_len:
+ raise VdtValueTooLongError(value)
+ return value
+
+
+def is_int_list(value, min=None, max=None):
+ """
+ Check that the value is a list of integers.
+
+ You can optionally specify the minimum and maximum number of members.
+
+ Each list member is checked that it is an integer.
+
+ >>> vtor.check('int_list', ())
+ []
+ >>> vtor.check('int_list', [])
+ []
+ >>> vtor.check('int_list', (1, 2))
+ [1, 2]
+ >>> vtor.check('int_list', [1, 2])
+ [1, 2]
+ >>> vtor.check('int_list', [1, 'a'])
+ Traceback (most recent call last):
+ VdtTypeError: the value "a" is of the wrong type.
+ """
+ return [is_integer(mem) for mem in is_list(value, min, max)]
+
+
+def is_bool_list(value, min=None, max=None):
+ """
+ Check that the value is a list of booleans.
+
+ You can optionally specify the minimum and maximum number of members.
+
+ Each list member is checked that it is a boolean.
+
+ >>> vtor.check('bool_list', ())
+ []
+ >>> vtor.check('bool_list', [])
+ []
+ >>> check_res = vtor.check('bool_list', (True, False))
+ >>> check_res == [True, False]
+ 1
+ >>> check_res = vtor.check('bool_list', [True, False])
+ >>> check_res == [True, False]
+ 1
+ >>> vtor.check('bool_list', [True, 'a'])
+ Traceback (most recent call last):
+ VdtTypeError: the value "a" is of the wrong type.
+ """
+ return [is_boolean(mem) for mem in is_list(value, min, max)]
+
+
+def is_float_list(value, min=None, max=None):
+ """
+ Check that the value is a list of floats.
+
+ You can optionally specify the minimum and maximum number of members.
+
+ Each list member is checked that it is a float.
+
+ >>> vtor.check('float_list', ())
+ []
+ >>> vtor.check('float_list', [])
+ []
+ >>> vtor.check('float_list', (1, 2.0))
+ [1.0, 2.0]
+ >>> vtor.check('float_list', [1, 2.0])
+ [1.0, 2.0]
+ >>> vtor.check('float_list', [1, 'a'])
+ Traceback (most recent call last):
+ VdtTypeError: the value "a" is of the wrong type.
+ """
+ return [is_float(mem) for mem in is_list(value, min, max)]
+
+
+def is_string_list(value, min=None, max=None):
+ """
+ Check that the value is a list of strings.
+
+ You can optionally specify the minimum and maximum number of members.
+
+ Each list member is checked that it is a string.
+
+ >>> vtor.check('string_list', ())
+ []
+ >>> vtor.check('string_list', [])
+ []
+ >>> vtor.check('string_list', ('a', 'b'))
+ ['a', 'b']
+ >>> vtor.check('string_list', ['a', 1])
+ Traceback (most recent call last):
+ VdtTypeError: the value "1" is of the wrong type.
+ >>> vtor.check('string_list', 'hello')
+ Traceback (most recent call last):
+ VdtTypeError: the value "hello" is of the wrong type.
+ """
+ if isinstance(value, basestring):
+ raise VdtTypeError(value)
+ return [is_string(mem) for mem in is_list(value, min, max)]
+
+
+def is_ip_addr_list(value, min=None, max=None):
+ """
+ Check that the value is a list of IP addresses.
+
+ You can optionally specify the minimum and maximum number of members.
+
+ Each list member is checked that it is an IP address.
+
+ >>> vtor.check('ip_addr_list', ())
+ []
+ >>> vtor.check('ip_addr_list', [])
+ []
+ >>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8'))
+ ['1.2.3.4', '5.6.7.8']
+ >>> vtor.check('ip_addr_list', ['a'])
+ Traceback (most recent call last):
+ VdtValueError: the value "a" is unacceptable.
+ """
+ return [is_ip_addr(mem) for mem in is_list(value, min, max)]
+
+
+def force_list(value, min=None, max=None):
+ """
+ Check that a value is a list, coercing strings into
+ a list with one member. Useful where users forget the
+ trailing comma that turns a single value into a list.
+
+ You can optionally specify the minimum and maximum number of members.
+ A minumum of greater than one will fail if the user only supplies a
+ string.
+
+ >>> vtor.check('force_list', ())
+ []
+ >>> vtor.check('force_list', [])
+ []
+ >>> vtor.check('force_list', 'hello')
+ ['hello']
+ """
+ if not isinstance(value, (list, tuple)):
+ value = [value]
+ return is_list(value, min, max)
+
+
+
+fun_dict = {
+ 'integer': is_integer,
+ 'float': is_float,
+ 'ip_addr': is_ip_addr,
+ 'string': is_string,
+ 'boolean': is_boolean,
+}
+
+
+def is_mixed_list(value, *args):
+ """
+ Check that the value is a list.
+ Allow specifying the type of each member.
+ Work on lists of specific lengths.
+
+ You specify each member as a positional argument specifying type
+
+ Each type should be one of the following strings :
+ 'integer', 'float', 'ip_addr', 'string', 'boolean'
+
+ So you can specify a list of two strings, followed by
+ two integers as :
+
+ mixed_list('string', 'string', 'integer', 'integer')
+
+ The length of the list must match the number of positional
+ arguments you supply.
+
+ >>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')"
+ >>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True))
+ >>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
+ 1
+ >>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True'))
+ >>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
+ 1
+ >>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True))
+ Traceback (most recent call last):
+ VdtTypeError: the value "b" is of the wrong type.
+ >>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a'))
+ Traceback (most recent call last):
+ VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short.
+ >>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b'))
+ Traceback (most recent call last):
+ VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long.
+ >>> vtor.check(mix_str, 0)
+ Traceback (most recent call last):
+ VdtTypeError: the value "0" is of the wrong type.
+
+ This test requires an elaborate setup, because of a change in error string
+ output from the interpreter between Python 2.2 and 2.3 .
+
+ >>> res_seq = (
+ ... 'passed an incorrect value "',
+ ... 'yoda',
+ ... '" for parameter "mixed_list".',
+ ... )
+ >>> res_str = "'".join(res_seq)
+ >>> try:
+ ... vtor.check('mixed_list("yoda")', ('a'))
+ ... except VdtParamError, err:
+ ... str(err) == res_str
+ 1
+ """
+ try:
+ length = len(value)
+ except TypeError:
+ raise VdtTypeError(value)
+ if length < len(args):
+ raise VdtValueTooShortError(value)
+ elif length > len(args):
+ raise VdtValueTooLongError(value)
+ try:
+ return [fun_dict[arg](val) for arg, val in zip(args, value)]
+ except KeyError, e:
+ raise VdtParamError('mixed_list', e)
+
+
+def is_option(value, *options):
+ """
+ This check matches the value to any of a set of options.
+
+ >>> vtor.check('option("yoda", "jedi")', 'yoda')
+ 'yoda'
+ >>> vtor.check('option("yoda", "jedi")', 'jed')
+ Traceback (most recent call last):
+ VdtValueError: the value "jed" is unacceptable.
+ >>> vtor.check('option("yoda", "jedi")', 0)
+ Traceback (most recent call last):
+ VdtTypeError: the value "0" is of the wrong type.
+ """
+ if not isinstance(value, basestring):
+ raise VdtTypeError(value)
+ if not value in options:
+ raise VdtValueError(value)
+ return value
+
+
+def _test(value, *args, **keywargs):
+ """
+ A function that exists for test purposes.
+
+ >>> checks = [
+ ... '3, 6, min=1, max=3, test=list(a, b, c)',
+ ... '3',
+ ... '3, 6',
+ ... '3,',
+ ... 'min=1, test="a b c"',
+ ... 'min=5, test="a, b, c"',
+ ... 'min=1, max=3, test="a, b, c"',
+ ... 'min=-100, test=-99',
+ ... 'min=1, max=3',
+ ... '3, 6, test="36"',
+ ... '3, 6, test="a, b, c"',
+ ... '3, max=3, test=list("a", "b", "c")',
+ ... '''3, max=3, test=list("'a'", 'b', "x=(c)")''',
+ ... "test='x=fish(3)'",
+ ... ]
+ >>> v = Validator({'test': _test})
+ >>> for entry in checks:
+ ... print v.check(('test(%s)' % entry), 3)
+ (3, ('3', '6'), {'test': ['a', 'b', 'c'], 'max': '3', 'min': '1'})
+ (3, ('3',), {})
+ (3, ('3', '6'), {})
+ (3, ('3',), {})
+ (3, (), {'test': 'a b c', 'min': '1'})
+ (3, (), {'test': 'a, b, c', 'min': '5'})
+ (3, (), {'test': 'a, b, c', 'max': '3', 'min': '1'})
+ (3, (), {'test': '-99', 'min': '-100'})
+ (3, (), {'max': '3', 'min': '1'})
+ (3, ('3', '6'), {'test': '36'})
+ (3, ('3', '6'), {'test': 'a, b, c'})
+ (3, ('3',), {'test': ['a', 'b', 'c'], 'max': '3'})
+ (3, ('3',), {'test': ["'a'", 'b', 'x=(c)'], 'max': '3'})
+ (3, (), {'test': 'x=fish(3)'})
+
+ >>> v = Validator()
+ >>> v.check('integer(default=6)', '3')
+ 3
+ >>> v.check('integer(default=6)', None, True)
+ 6
+ >>> v.get_default_value('integer(default=6)')
+ 6
+ >>> v.get_default_value('float(default=6)')
+ 6.0
+ >>> v.get_default_value('pass(default=None)')
+ >>> v.get_default_value("string(default='None')")
+ 'None'
+ >>> v.get_default_value('pass')
+ Traceback (most recent call last):
+ KeyError: 'Check "pass" has no default value.'
+ >>> v.get_default_value('pass(default=list(1, 2, 3, 4))')
+ ['1', '2', '3', '4']
+
+ >>> v = Validator()
+ >>> v.check("pass(default=None)", None, True)
+ >>> v.check("pass(default='None')", None, True)
+ 'None'
+ >>> v.check('pass(default="None")', None, True)
+ 'None'
+ >>> v.check('pass(default=list(1, 2, 3, 4))', None, True)
+ ['1', '2', '3', '4']
+
+ Bug test for unicode arguments
+ >>> v = Validator()
+ >>> v.check(u'string(min=4)', u'test')
+ u'test'
+
+ >>> v = Validator()
+ >>> v.get_default_value(u'string(min=4, default="1234")')
+ u'1234'
+ >>> v.check(u'string(min=4, default="1234")', u'test')
+ u'test'
+
+ >>> v = Validator()
+ >>> default = v.get_default_value('string(default=None)')
+ >>> default == None
+ 1
+ """
+ return (value, args, keywargs)
+
+
+def _test2():
+ """
+ >>>
+ >>> v = Validator()
+ >>> v.get_default_value('string(default="#ff00dd")')
+ '#ff00dd'
+ >>> v.get_default_value('integer(default=3) # comment')
+ 3
+ """
+
+def _test3():
+ r"""
+ >>> vtor.check('string(default="")', '', missing=True)
+ ''
+ >>> vtor.check('string(default="\n")', '', missing=True)
+ '\n'
+ >>> print vtor.check('string(default="\n")', '', missing=True),
+ <BLANKLINE>
+ >>> vtor.check('string()', '\n')
+ '\n'
+ >>> vtor.check('string(default="\n\n\n")', '', missing=True)
+ '\n\n\n'
+ >>> vtor.check('string()', 'random \n text goes here\n\n')
+ 'random \n text goes here\n\n'
+ >>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")',
+ ... '', missing=True)
+ ' \nrandom text\ngoes \n here\n\n '
+ >>> vtor.check("string(default='\n\n\n')", '', missing=True)
+ '\n\n\n'
+ >>> vtor.check("option('\n','a','b',default='\n')", '', missing=True)
+ '\n'
+ >>> vtor.check("string_list()", ['foo', '\n', 'bar'])
+ ['foo', '\n', 'bar']
+ >>> vtor.check("string_list(default=list('\n'))", '', missing=True)
+ ['\n']
+ """
+
+
+if __name__ == '__main__':
+ # run the code tests in doctest format
+ import sys
+ import doctest
+ m = sys.modules.get('__main__')
+ globs = m.__dict__.copy()
+ globs.update({
+ 'vtor': Validator(),
+ })
+ doctest.testmod(m, globs=globs)
diff --git a/python/devtools/migrate-l10n/README.rst b/python/devtools/migrate-l10n/README.rst
new file mode 100644
index 000000000..70f5a6303
--- /dev/null
+++ b/python/devtools/migrate-l10n/README.rst
@@ -0,0 +1,16 @@
+devtools-l10n-migration script
+==============================
+
+For devtools.html, devtools will no longer rely on DTD files. This migration
+script is aimed at localizers to automate the migration of strings from DTD to
+properties files.
+
+How to run this script?
+
+To migrate all configuration files:
+ python migrate/main.py path/to/your/l10n/repo/ -c migrate/conf/
+
+To migrate only one configuration file:
+ python migrate/main.py path/to/your/l10n/repo/ -c migrate/conf/bug1294186
+
+All configuration files should be named after the bug where specific devtools strings were migrated.
diff --git a/python/devtools/migrate-l10n/migrate/__init__.py b/python/devtools/migrate-l10n/migrate/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/devtools/migrate-l10n/migrate/__init__.py
diff --git a/python/devtools/migrate-l10n/migrate/conf/bug1294186 b/python/devtools/migrate-l10n/migrate/conf/bug1294186
new file mode 100644
index 000000000..0b91b4d58
--- /dev/null
+++ b/python/devtools/migrate-l10n/migrate/conf/bug1294186
@@ -0,0 +1,22 @@
+font-inspector.properties:fontinspector.seeAll.tooltip = font-inspector.dtd:showAllFonts
+font-inspector.properties:fontinspector.seeAll = font-inspector.dtd:showAllFontsUsed
+font-inspector.properties:fontinspector.usedAs = font-inspector.dtd:usedAs
+font-inspector.properties:fontinspector.system = font-inspector.dtd:system
+font-inspector.properties:fontinspector.remote = font-inspector.dtd:remote
+font-inspector.properties:fontinspector.previewText = font-inspector.dtd:previewHint
+
+inspector.properties:inspector.eyedropper.label = inspector.dtd:inspectorEyeDropper.label
+inspector.properties:inspector.breadcrumbs.label = inspector.dtd:inspectorBreadcrumbsGroup
+
+boxmodel.properties:boxmodel.title = layoutview.dtd:layoutViewTitle
+boxmodel.properties:boxmodel.margin = layoutview.dtd:margin.tooltip
+boxmodel.properties:boxmodel.padding = layoutview.dtd:padding.tooltip
+boxmodel.properties:boxmodel.border = layoutview.dtd:border.tooltip
+boxmodel.properties:boxmodel.content = layoutview.dtd:content.tooltip
+boxmodel.properties:boxmodel.geometryButton.tooltip = layoutview.dtd:geometry.button.tooltip
+
+inspector.properties:inspector.browserStyles.label = styleinspector.dtd:browserStylesLabel
+inspector.properties:inspector.filterStyles.placeholder = styleinspector.dtd:filterStylesPlaceholder
+inspector.properties:inspector.addRule.tooltip = styleinspector.dtd:addRuleButtonTooltip
+inspector.properties:inspector.togglePseudo.tooltip = styleinspector.dtd:togglePseudoClassPanel
+inspector.properties:inspector.noProperties = styleinspector.dtd:noPropertiesFound
diff --git a/python/devtools/migrate-l10n/migrate/conf/bug1308500_1309191 b/python/devtools/migrate-l10n/migrate/conf/bug1308500_1309191
new file mode 100644
index 000000000..177236b33
--- /dev/null
+++ b/python/devtools/migrate-l10n/migrate/conf/bug1308500_1309191
@@ -0,0 +1,97 @@
+netmonitor.properties:netmonitor.perfNotice1 = netmonitor.dtd:netmonitorUI.perfNotice1
+netmonitor.properties:netmonitor.perfNotice2 = netmonitor.dtd:netmonitorUI.perfNotice2
+netmonitor.properties:netmonitor.perfNotice3 = netmonitor.dtd:netmonitorUI.perfNotice3
+netmonitor.properties:netmonitor.reloadNotice1 = netmonitor.dtd:netmonitorUI.reloadNotice1
+netmonitor.properties:netmonitor.reloadNotice2 = netmonitor.dtd:netmonitorUI.reloadNotice2
+netmonitor.properties:netmonitor.reloadNotice3 = netmonitor.dtd:netmonitorUI.reloadNotice3
+netmonitor.properties:netmonitor.toolbar.status3 = netmonitor.dtd:netmonitorUI.toolbar.status3
+netmonitor.properties:netmonitor.toolbar.method = netmonitor.dtd:netmonitorUI.toolbar.method
+netmonitor.properties:netmonitor.toolbar.file = netmonitor.dtd:netmonitorUI.toolbar.file
+netmonitor.properties:netmonitor.toolbar.domain = netmonitor.dtd:netmonitorUI.toolbar.domain
+netmonitor.properties:netmonitor.toolbar.cause = netmonitor.dtd:netmonitorUI.toolbar.cause
+netmonitor.properties:netmonitor.toolbar.type = netmonitor.dtd:netmonitorUI.toolbar.type
+netmonitor.properties:netmonitor.toolbar.transferred = netmonitor.dtd:netmonitorUI.toolbar.transferred
+netmonitor.properties:netmonitor.toolbar.size = netmonitor.dtd:netmonitorUI.toolbar.size
+netmonitor.properties:netmonitor.toolbar.waterfall = netmonitor.dtd:netmonitorUI.toolbar.waterfall
+netmonitor.properties:netmonitor.tab.headers = netmonitor.dtd:netmonitorUI.tab.headers
+netmonitor.properties:netmonitor.tab.cookies = netmonitor.dtd:netmonitorUI.tab.cookies
+netmonitor.properties:netmonitor.tab.params = netmonitor.dtd:netmonitorUI.tab.params
+netmonitor.properties:netmonitor.tab.response = netmonitor.dtd:netmonitorUI.tab.response
+netmonitor.properties:netmonitor.tab.timings = netmonitor.dtd:netmonitorUI.tab.timings
+netmonitor.properties:netmonitor.tab.preview = netmonitor.dtd:netmonitorUI.tab.preview
+netmonitor.properties:netmonitor.tab.security = netmonitor.dtd:netmonitorUI.tab.security
+netmonitor.properties:netmonitor.toolbar.filter.all = netmonitor.dtd:netmonitorUI.footer.filterAll
+netmonitor.properties:netmonitor.toolbar.filter.html = netmonitor.dtd:netmonitorUI.footer.filterHTML
+netmonitor.properties:netmonitor.toolbar.filter.css = netmonitor.dtd:netmonitorUI.footer.filterCSS
+netmonitor.properties:netmonitor.toolbar.filter.js = netmonitor.dtd:netmonitorUI.footer.filterJS
+netmonitor.properties:netmonitor.toolbar.filter.xhr = netmonitor.dtd:netmonitorUI.footer.filterXHR
+netmonitor.properties:netmonitor.toolbar.filter.fonts = netmonitor.dtd:netmonitorUI.footer.filterFonts
+netmonitor.properties:netmonitor.toolbar.filter.images = netmonitor.dtd:netmonitorUI.footer.filterImages
+netmonitor.properties:netmonitor.toolbar.filter.media = netmonitor.dtd:netmonitorUI.footer.filterMedia
+netmonitor.properties:netmonitor.toolbar.filter.flash = netmonitor.dtd:netmonitorUI.footer.filterFlash
+netmonitor.properties:netmonitor.toolbar.filter.ws = netmonitor.dtd:netmonitorUI.footer.filterWS
+netmonitor.properties:netmonitor.toolbar.filter.other = netmonitor.dtd:netmonitorUI.footer.filterOther
+netmonitor.properties:netmonitor.toolbar.filterFreetext.label = netmonitor.dtd:netmonitorUI.footer.filterFreetext.label
+netmonitor.properties:netmonitor.toolbar.clear = netmonitor.dtd:netmonitorUI.footer.clear
+netmonitor.properties:netmonitor.toolbar.perf = netmonitor.dtd:netmonitorUI.footer.perf
+netmonitor.properties:netmonitor.panesButton.tooltip = netmonitor.dtd:netmonitorUI.panesButton.tooltip
+netmonitor.properties:netmonitor.summary.url = netmonitor.dtd:netmonitorUI.summary.url
+netmonitor.properties:netmonitor.summary.method = netmonitor.dtd:netmonitorUI.summary.method
+netmonitor.properties:netmonitor.summary.address = netmonitor.dtd:netmonitorUI.summary.address
+netmonitor.properties:netmonitor.summary.status = netmonitor.dtd:netmonitorUI.summary.status
+netmonitor.properties:netmonitor.summary.version = netmonitor.dtd:netmonitorUI.summary.version
+netmonitor.properties:netmonitor.summary.editAndResend = netmonitor.dtd:netmonitorUI.summary.editAndResend
+netmonitor.properties:netmonitor.summary.rawHeaders = netmonitor.dtd:netmonitorUI.summary.rawHeaders
+netmonitor.properties:netmonitor.summary.rawHeaders.requestHeaders = netmonitor.dtd:netmonitorUI.summary.rawHeaders.requestHeaders
+netmonitor.properties:netmonitor.summary.rawHeaders.responseHeaders = netmonitor.dtd:netmonitorUI.summary.rawHeaders.responseHeaders
+netmonitor.properties:netmonitor.summary.size = netmonitor.dtd:netmonitorUI.summary.size
+netmonitor.properties:netmonitor.response.name = netmonitor.dtd:netmonitorUI.response.name
+netmonitor.properties:netmonitor.response.dimensions = netmonitor.dtd:netmonitorUI.response.dimensions
+netmonitor.properties:netmonitor.response.mime = netmonitor.dtd:netmonitorUI.response.mime
+netmonitor.properties:netmonitor.timings.blocked = netmonitor.dtd:netmonitorUI.timings.blocked
+netmonitor.properties:netmonitor.timings.dns = netmonitor.dtd:netmonitorUI.timings.dns
+netmonitor.properties:netmonitor.timings.connect = netmonitor.dtd:netmonitorUI.timings.connect
+netmonitor.properties:netmonitor.timings.send = netmonitor.dtd:netmonitorUI.timings.send
+netmonitor.properties:netmonitor.timings.wait = netmonitor.dtd:netmonitorUI.timings.wait
+netmonitor.properties:netmonitor.timings.receive = netmonitor.dtd:netmonitorUI.timings.receive
+netmonitor.properties:netmonitor.security.warning.cipher = netmonitor.dtd:netmonitorUI.security.warning.cipher
+netmonitor.properties:netmonitor.security.error = netmonitor.dtd:netmonitorUI.security.error
+netmonitor.properties:netmonitor.security.protocolVersion = netmonitor.dtd:netmonitorUI.security.protocolVersion
+netmonitor.properties:netmonitor.security.cipherSuite = netmonitor.dtd:netmonitorUI.security.cipherSuite
+netmonitor.properties:netmonitor.security.hsts = netmonitor.dtd:netmonitorUI.security.hsts
+netmonitor.properties:netmonitor.security.hpkp = netmonitor.dtd:netmonitorUI.security.hpkp
+netmonitor.properties:netmonitor.security.connection = netmonitor.dtd:netmonitorUI.security.connection
+netmonitor.properties:netmonitor.security.certificate = netmonitor.dtd:netmonitorUI.security.certificate
+netmonitor.properties:netmonitor.context.copyUrl = netmonitor.dtd:netmonitorUI.context.copyUrl
+netmonitor.properties:netmonitor.context.copyUrl.accesskey = netmonitor.dtd:netmonitorUI.context.copyUrl.accesskey
+netmonitor.properties:netmonitor.context.copyUrlParams = netmonitor.dtd:netmonitorUI.context.copyUrlParams
+netmonitor.properties:netmonitor.context.copyUrlParams.accesskey = netmonitor.dtd:netmonitorUI.context.copyUrlParams.accesskey
+netmonitor.properties:netmonitor.context.copyPostData = netmonitor.dtd:netmonitorUI.context.copyPostData
+netmonitor.properties:netmonitor.context.copyPostData.accesskey = netmonitor.dtd:netmonitorUI.context.copyPostData.accesskey
+netmonitor.properties:netmonitor.context.copyAsCurl = netmonitor.dtd:netmonitorUI.context.copyAsCurl
+netmonitor.properties:netmonitor.context.copyAsCurl.accesskey = netmonitor.dtd:netmonitorUI.context.copyAsCurl.accesskey
+netmonitor.properties:netmonitor.context.copyRequestHeaders = netmonitor.dtd:netmonitorUI.context.copyRequestHeaders
+netmonitor.properties:netmonitor.context.copyRequestHeaders.accesskey = netmonitor.dtd:netmonitorUI.context.copyRequestHeaders.accesskey
+netmonitor.properties:netmonitor.context.copyResponseHeaders = netmonitor.dtd:netmonitorUI.context.copyResponseHeaders
+netmonitor.properties:netmonitor.context.copyResponseHeaders.accesskey = netmonitor.dtd:netmonitorUI.context.copyResponseHeaders.accesskey
+netmonitor.properties:netmonitor.context.copyResponse = netmonitor.dtd:netmonitorUI.context.copyResponse
+netmonitor.properties:netmonitor.context.copyResponse.accesskey = netmonitor.dtd:netmonitorUI.context.copyResponse.accesskey
+netmonitor.properties:netmonitor.context.copyImageAsDataUri = netmonitor.dtd:netmonitorUI.context.copyImageAsDataUri
+netmonitor.properties:netmonitor.context.copyImageAsDataUri.accesskey = netmonitor.dtd:netmonitorUI.context.copyImageAsDataUri.accesskey
+netmonitor.properties:netmonitor.context.copyAllAsHar = netmonitor.dtd:netmonitorUI.context.copyAllAsHar
+netmonitor.properties:netmonitor.context.copyAllAsHar.accesskey = netmonitor.dtd:netmonitorUI.context.copyAllAsHar.accesskey
+netmonitor.properties:netmonitor.context.saveAllAsHar = netmonitor.dtd:netmonitorUI.context.saveAllAsHar
+netmonitor.properties:netmonitor.context.saveAllAsHar.accesskey = netmonitor.dtd:netmonitorUI.context.saveAllAsHar.accesskey
+netmonitor.properties:netmonitor.context.editAndResend = netmonitor.dtd:netmonitorUI.summary.editAndResend
+netmonitor.properties:netmonitor.context.editAndResend.accesskey = netmonitor.dtd:netmonitorUI.summary.editAndResend.accesskey
+netmonitor.properties:netmonitor.context.newTab = netmonitor.dtd:netmonitorUI.context.newTab
+netmonitor.properties:netmonitor.context.newTab.accesskey = netmonitor.dtd:netmonitorUI.context.newTab.accesskey
+netmonitor.properties:netmonitor.context.perfTools = netmonitor.dtd:netmonitorUI.context.perfTools
+netmonitor.properties:netmonitor.context.perfTools.accesskey = netmonitor.dtd:netmonitorUI.context.perfTools.accesskey
+netmonitor.properties:netmonitor.custom.newRequest = netmonitor.dtd:netmonitorUI.custom.newRequest
+netmonitor.properties:netmonitor.custom.query = netmonitor.dtd:netmonitorUI.custom.query
+netmonitor.properties:netmonitor.custom.headers = netmonitor.dtd:netmonitorUI.custom.headers
+netmonitor.properties:netmonitor.custom.postData = netmonitor.dtd:netmonitorUI.custom.postData
+netmonitor.properties:netmonitor.custom.send = netmonitor.dtd:netmonitorUI.custom.send
+netmonitor.properties:netmonitor.custom.cancel = netmonitor.dtd:netmonitorUI.custom.cancel
+netmonitor.properties:netmonitor.backButton = netmonitor.dtd:netmonitorUI.backButton
diff --git a/python/devtools/migrate-l10n/migrate/main.py b/python/devtools/migrate-l10n/migrate/main.py
new file mode 100644
index 000000000..0a1d468a8
--- /dev/null
+++ b/python/devtools/migrate-l10n/migrate/main.py
@@ -0,0 +1,261 @@
+import argparse
+import glob
+import HTMLParser
+import logging
+import os
+import re
+import sys
+import urllib2
+
+
+# Import compare-locales parser from parent folder.
+script_path = os.path.dirname(os.path.realpath(__file__))
+compare_locales_path = os.path.join(script_path, '../../../compare-locales')
+sys.path.insert(0, compare_locales_path)
+from compare_locales import parser
+
+
+# Configure logging format and level
+logging.basicConfig(format=' [%(levelname)s] %(message)s', level=logging.INFO)
+
+
+# License header to use when creating new properties files.
+DEFAULT_HEADER = ('# This Source Code Form is subject to the terms of the '
+ 'Mozilla Public\n# License, v. 2.0. If a copy of the MPL '
+ 'was not distributed with this\n# file, You can obtain '
+ 'one at http://mozilla.org/MPL/2.0/.\n')
+
+
+# Base url to retrieve properties files on central, that will be parsed for
+# localization notes.
+CENTRAL_BASE_URL = ('https://hg.mozilla.org/'
+ 'mozilla-central/raw-file/tip/'
+ 'devtools/client/locales/en-US/')
+
+
+# HTML parser to translate HTML entities in dtd files.
+HTML_PARSER = HTMLParser.HTMLParser()
+
+# Cache to store properties files retrieved over the network.
+central_prop_cache = {}
+
+# Cache the parsed entities from the existing DTD files.
+dtd_entities_cache = {}
+
+
+# Retrieve the content of the current version of a properties file for the
+# provided filename, from devtools/client on mozilla central. Will return an
+# empty array if the file can't be retrieved or read.
+def get_central_prop_content(prop_filename):
+ if prop_filename in central_prop_cache:
+ return central_prop_cache[prop_filename]
+
+ url = CENTRAL_BASE_URL + prop_filename
+ logging.info('loading localization file from central: {%s}' % url)
+
+ try:
+ central_prop_cache[prop_filename] = urllib2.urlopen(url).readlines()
+ except:
+ logging.error('failed to load properties file from central: {%s}'
+ % url)
+ central_prop_cache[prop_filename] = []
+
+ return central_prop_cache[prop_filename]
+
+
+# Retrieve the current en-US localization notes for the provided prop_name.
+def get_localization_note(prop_name, prop_filename):
+ prop_content = get_central_prop_content(prop_filename)
+
+ comment_buffer = []
+ for i, line in enumerate(prop_content):
+ # Remove line breaks.
+ line = line.strip('\n').strip('\r')
+
+ if line.startswith('#'):
+ # Comment line, add to the current comment buffer.
+ comment_buffer.append(line)
+ elif re.search('(^|\n)' + re.escape(prop_name) + '\s*=', line):
+ # Property found, the current comment buffer is the localization
+ # note.
+ break;
+ else:
+ # No match, not a comment, reinitialize the comment buffer.
+ comment_buffer = []
+
+ return '\n'.join(comment_buffer)
+
+
+# Retrieve the parsed DTD entities for a provided path. Results are cached by
+# dtd path.
+def get_dtd_entities(dtd_path):
+ if dtd_path in dtd_entities_cache:
+ return dtd_entities_cache[dtd_path]
+
+ dtd_parser = parser.getParser('.dtd')
+ dtd_parser.readFile(dtd_path)
+ dtd_entities_cache[dtd_path] = dtd_parser.parse()
+ return dtd_entities_cache[dtd_path]
+
+
+# Extract the value of an entity in a dtd file.
+def get_translation_from_dtd(dtd_path, entity_name):
+ entities, map = get_dtd_entities(dtd_path)
+ if entity_name not in map:
+ # Bail out if translation is missing.
+ return
+
+ key = map[entity_name]
+ entity = entities[key]
+ translation = HTML_PARSER.unescape(entity.val)
+ return translation.encode('utf-8')
+
+
+# Extract the header and file wide comments for the provided properties file
+# filename.
+def get_properties_header(prop_filename):
+ prop_content = get_central_prop_content(prop_filename)
+
+ # if the file content is empty, return the default license header.
+ if len(prop_content) == 0:
+ return DEFAULT_HEADER
+
+ header_buffer = []
+ for i, line in enumerate(prop_content):
+ # remove line breaks.
+ line = line.strip('\n').strip('\r')
+
+ # regexp matching keys extracted form parser.py.
+ is_entity_line = re.search('^(\s*)'
+ '((?:[#!].*?\n\s*)*)'
+ '([^#!\s\n][^=:\n]*?)\s*[:=][ \t]*', line)
+ is_loc_note = re.search('^(\s*)'
+ '\#\s*LOCALIZATION NOTE\s*\([^)]+\)', line)
+ if is_entity_line or is_loc_note:
+ # header finished, break the loop.
+ break
+ else:
+ # header line, add to the current buffer.
+ header_buffer.append(line)
+
+ # concatenate the current buffer and return.
+ return '\n'.join(header_buffer)
+
+
+# Create a new properties file at the provided path.
+def create_properties_file(prop_path):
+ logging.info('creating new *.properties file: {%s}' % prop_path)
+
+ prop_filename = os.path.basename(prop_path)
+ header = get_properties_header(prop_filename)
+
+ prop_file = open(prop_path, 'w+')
+ prop_file.write(header)
+ prop_file.close()
+
+
+# Migrate a single string entry for a dtd to a properties file.
+def migrate_string(dtd_path, prop_path, dtd_name, prop_name):
+ if not os.path.isfile(dtd_path):
+ logging.error('dtd file can not be found at: {%s}' % dtd_path)
+ return
+
+ translation = get_translation_from_dtd(dtd_path, dtd_name)
+ if not translation:
+ logging.error('translation could not be found for: {%s} in {%s}'
+ % (dtd_name, dtd_path))
+ return
+
+ # Create properties file if missing.
+ if not os.path.isfile(prop_path):
+ create_properties_file(prop_path)
+
+ if not os.path.isfile(prop_path):
+ logging.error('could not create new properties file at: {%s}'
+ % prop_path)
+ return
+
+ prop_line = prop_name + '=' + translation + '\n'
+
+ # Skip the string if it already exists in the destination file.
+ prop_file_content = open(prop_path, 'r').read()
+ if prop_line in prop_file_content:
+ logging.warning('string already migrated, skipping: {%s}' % prop_name)
+ return
+
+ # Skip the string and log an error if an existing entry is found, but with
+ # a different value.
+ if re.search('(^|\n)' + re.escape(prop_name) + '\s*=', prop_file_content):
+ logging.error('existing string found, skipping: {%s}' % prop_name)
+ return
+
+ prop_filename = os.path.basename(prop_path)
+ logging.info('migrating {%s} in {%s}' % (prop_name, prop_filename))
+ with open(prop_path, 'a') as prop_file:
+ localization_note = get_localization_note(prop_name, prop_filename)
+ if len(localization_note):
+ prop_file.write('\n' + localization_note)
+ else:
+ logging.warning('localization notes could not be found for: {%s}'
+ % prop_name)
+ prop_file.write('\n' + prop_line)
+
+
+# Apply the migration instructions in the provided configuration file.
+def migrate_conf(conf_path, l10n_path):
+ f = open(conf_path, 'r')
+ lines = f.readlines()
+ f.close()
+
+ for i, line in enumerate(lines):
+ # Remove line breaks.
+ line = line.strip('\n').strip('\r')
+
+ # Skip invalid lines.
+ if ' = ' not in line:
+ continue
+
+ # Expected syntax: ${prop_path}:${prop_name} = ${dtd_path}:${dtd_name}.
+ prop_info, dtd_info = line.split(' = ')
+ prop_path, prop_name = prop_info.split(':')
+ dtd_path, dtd_name = dtd_info.split(':')
+
+ dtd_path = os.path.join(l10n_path, dtd_path)
+ prop_path = os.path.join(l10n_path, prop_path)
+
+ migrate_string(dtd_path, prop_path, dtd_name, prop_name)
+
+
+def main():
+ # Read command line arguments.
+ arg_parser = argparse.ArgumentParser(
+ description='Migrate devtools localized strings.')
+ arg_parser.add_argument('path', type=str, help='path to l10n repository')
+ arg_parser.add_argument('-c', '--config', type=str,
+ help='path to configuration file or folder')
+ args = arg_parser.parse_args()
+
+ # Retrieve path to devtools localization files in l10n repository.
+ devtools_l10n_path = os.path.join(args.path, 'devtools/client/')
+ if not os.path.exists(devtools_l10n_path):
+ logging.error('l10n path is invalid: {%s}' % devtools_l10n_path)
+ exit()
+ logging.info('l10n path is valid: {%s}' % devtools_l10n_path)
+
+ # Retrieve configuration files to apply.
+ if os.path.isdir(args.config):
+ conf_files = glob.glob(args.config + '*')
+ elif os.path.isfile(args.config):
+ conf_files = [args.config]
+ else:
+ logging.error('config path is invalid: {%s}' % args.config)
+ exit()
+
+ # Perform migration for each configuration file.
+ for conf_file in conf_files:
+ logging.info('performing migration for config file: {%s}' % conf_file)
+ migrate_conf(conf_file, devtools_l10n_path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/devtools/migrate-l10n/migrate/tests/__init__.py b/python/devtools/migrate-l10n/migrate/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/devtools/migrate-l10n/migrate/tests/__init__.py
diff --git a/python/eme/gen-eme-voucher.py b/python/eme/gen-eme-voucher.py
new file mode 100644
index 000000000..299bc7146
--- /dev/null
+++ b/python/eme/gen-eme-voucher.py
@@ -0,0 +1,633 @@
+#!/usr/bin/env python2.7
+#
+# Copyright 2014 Adobe Systems Incorporated. All Rights Reserved.
+#
+# Adobe permits you to use, modify, and distribute this file in accordance
+# with the terms of the Mozilla Public License, v 2.0 accompanying it. If
+# a copy of the MPL was not distributed with this file, You can obtain one
+# at http://mozilla.org/MPL/2.0/.
+#
+# Creates an Adobe Access signed voucher for x32/x64 windows executables
+# Notes: This is currently python2.7 due to mozilla build system requirements
+
+from __future__ import print_function
+
+import argparse, bitstring, pprint, hashlib, os, subprocess, sys, tempfile, macholib, macholib.MachO
+from pyasn1.codec.der import encoder as der_encoder
+from pyasn1.type import univ, namedtype, namedval, constraint
+
+
+# Defined in WinNT.h from the Windows SDK
+IMAGE_SCN_MEM_EXECUTE = 0x20000000
+IMAGE_REL_BASED_HIGHLOW = 3
+IMAGE_REL_BASED_DIR64 = 10
+
+
+# CodeSectionDigest ::= SEQUENCE {
+# offset INTEGER -- section's file offset in the signed binary
+# digestAlgorithm OBJECT IDENTIFIER -- algorithm identifier for the hash value below. For now only supports SHA256.
+# digestValue OCTET STRING -- hash value of the TEXT segment.
+# }
+class CodeSectionDigest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('offset', univ.Integer()),
+ namedtype.NamedType('digestAlgorithm', univ.ObjectIdentifier()),
+ namedtype.NamedType('digest', univ.OctetString()))
+
+
+# CodeSegmentDigest ::= SEQUENCE {
+# offset INTEGER -- TEXT segment's file offset in the signed binary
+# codeSectionDigests SET OF CodeSectionDigests
+# }
+
+class SetOfCodeSectionDigest(univ.SetOf):
+ componentType = CodeSectionDigest()
+
+
+class CodeSegmentDigest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('offset', univ.Integer()),
+ namedtype.NamedType('codeSectionDigests', SetOfCodeSectionDigest()))
+
+
+# ArchitectureDigest ::= SEQUENCE {
+# cpuType ENUMERATED CpuType
+# cpuSubType ENUMERATED CpuSubType
+# CodeSegmentDigests SET OF CodeSegmentDigests
+# }
+class SetOfCodeSegmentDigest(univ.SetOf):
+ componentType = CodeSegmentDigest()
+
+
+class CPUType(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('IMAGE_FILE_MACHINE_I386', 0x14c),
+ ('IMAGE_FILE_MACHINE_AMD64',0x8664 ),
+ ('MACHO_CPU_TYPE_I386',0x7 ),
+ ('MACHO_CPU_TYPE_X86_64',0x1000007 ),
+ )
+ subtypeSpec = univ.Enumerated.subtypeSpec + \
+ constraint.SingleValueConstraint(0x14c, 0x8664, 0x7, 0x1000007)
+
+
+class CPUSubType(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('IMAGE_UNUSED', 0x0),
+ ('CPU_SUBTYPE_X86_ALL', 0x3),
+ ('CPU_SUBTYPE_X86_64_ALL', 0x80000003)
+ )
+ subtypeSpec = univ.Enumerated.subtypeSpec + \
+ constraint.SingleValueConstraint(0, 0x3, 0x80000003)
+
+
+class ArchitectureDigest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('cpuType', CPUType()),
+ namedtype.NamedType('cpuSubType', CPUSubType()),
+ namedtype.NamedType('CodeSegmentDigests', SetOfCodeSegmentDigest())
+ )
+
+
+# ApplicationDigest ::= SEQUENCE {
+# version INTEGER
+# digests SET OF ArchitectureDigest
+# }
+class SetOfArchitectureDigest(univ.SetOf):
+ componentType = ArchitectureDigest()
+
+
+class ApplicationDigest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer()),
+ namedtype.NamedType('digests', SetOfArchitectureDigest())
+ )
+
+
+def meets_requirements(items, requirements):
+ for r in requirements:
+ for n, v in r.items():
+ if n not in items or items[n] != v: return False
+ return True
+
+
+# return total number of bytes read from items_in excluding leaves
+# TODO: research replacing this with the python built-in struct module
+def parse_items(stream, items_in, items_out):
+ bits_read = 0
+ total_bits_read = 0
+
+ for item in items_in:
+ name = item[0]
+ t = item[1]
+ bits = 1 if ":" not in t else int(t[t.index(":") + 1:])
+
+ if ":" in t and t.find("bytes") >= 0:
+ bits = bits * 8
+
+ if len(item) == 2:
+ items_out[name] = stream.read(t)
+ bits_read += bits
+ total_bits_read += bits
+ elif len(item) == 3 or len(item) == 4:
+ requirements = list(filter(lambda x: isinstance(x, dict), item[2]))
+ sub_items = list(filter(lambda x: isinstance(x, tuple), item[2]))
+
+ if not meets_requirements(items_out, requirements): continue
+
+ # has sub-items based on length
+ items_out[name] = stream.read(t)
+ bits_read += bits
+ total_bits_read += bits
+
+ if len(item) == 4:
+ bit_length = items_out[name] * 8
+
+ if bit_length > 0:
+ sub_read, sub_total_read = parse_items(stream, sub_items, items_out)
+ bit_length -= sub_read
+ total_bits_read += sub_total_read
+
+ if bit_length > 0:
+ items_out[item[3]] = stream.read('bits:' + str(bit_length))
+ bits_read += bit_length
+ total_bits_read += bit_length
+ else:
+ raise Exception("unrecognized item" + pprint.pformat(item))
+
+ return bits_read, total_bits_read
+
+
+# macho stuff
+# Constant for the magic field of the mach_header (32-bit architectures)
+MH_MAGIC =0xfeedface # the mach magic number
+MH_CIGAM =0xcefaedfe # NXSwapInt(MH_MAGIC)
+
+MH_MAGIC_64 =0xfeedfacf # the the 64-bit mach magic number
+MH_CIGAM_64 =0xcffaedfe # NXSwapInt(MH_MAGIC_64)
+
+FAT_CIGAM = 0xbebafeca
+FAT_MAGIC = 0xcafebabe
+
+LC_SEGMENT = 0x1
+LC_SEGMENT_64 = 0x19 # 64-bit segment of this file to be
+
+
+
+# TODO: perhaps switch to pefile module when it officially supports python3
+class SectionHeader:
+ def __init__(self, stream):
+ items = [
+ ('Name', 'bytes:8'),
+ ('VirtualSize', 'uintle:32'),
+ ('VirtualAddress', 'uintle:32'),
+ ('SizeOfRawData', 'uintle:32'),
+ ('PointerToRawData', 'uintle:32'),
+ ('PointerToRelocations', 'uintle:32'),
+ ('PointerToLineNumber', 'uintle:32'),
+ ('NumberOfRelocations', 'uintle:16'),
+ ('NumberOfLineNumbers', 'uintle:16'),
+ ('Characteristics', 'uintle:32')
+ ]
+ self.items = dict()
+ self.relocs = dict()
+
+ _, self.bits_read = parse_items(stream, items, self.items)
+
+ self.sectionName = self.items['Name'].decode('utf-8')
+ self.offset = self.items['PointerToRawData']
+
+COFF_DATA_DIRECTORY_TYPES = [
+ "Export Table",
+ "Import Table",
+ "Resource Table",
+ "Exception Table",
+ "Certificate Tble",
+ "Base Relocation Table",
+ "Debug",
+ "Architecture",
+ "Global Ptr",
+ "TLS Table",
+ "Load Config Table",
+ "Bound Import",
+ "IAT",
+ "Delay Import Descriptor",
+ "CLR Runtime Header",
+ "Reserved",
+]
+
+
+def chained_safe_get(obj, names, default=None):
+ if obj is None: return default
+
+ for n in names:
+ if n in obj:
+ obj = obj[n]
+ else:
+ return default
+
+ return obj
+
+
+class OptionalHeader:
+ def __init__(self, stream, size):
+ self.items = {}
+ items = []
+
+ if size:
+ items += [
+ ('Magic', 'uintle:16'),
+ ('MajorLinkerVersion', 'uintle:8'),
+ ('MinorLinkerVersion', 'uintle:8'),
+ ('SizeOfCode', 'uintle:32'),
+ ('SizeOfInitializedData', 'uintle:32'),
+ ('SizeOfUninitializedData', 'uintle:32'),
+ ('AddressOfEntryPoint', 'uintle:32'),
+ ('BaseOfCode', 'uintle:32'),
+ ]
+
+ _, self.bits_read = parse_items(stream, items, self.items)
+
+ items = []
+ if self.items['Magic'] == 0x10b: # PE32
+ items += [('BaseOfData', 'uintle:32')]
+
+ address_size = 'uintle:64' if self.items['Magic'] == 0x20b else 'uintle:32'
+
+ items += [
+ ('ImageBase', address_size),
+ ('SectionAlignment', 'uintle:32'),
+ ('FileAlignment', 'uintle:32'),
+ ('MajorOperatingSystemVersion', 'uintle:16'),
+ ('MinorOperatingSystemVersion', 'uintle:16'),
+ ('MajorImageVersion', 'uintle:16'),
+ ('MinorImageVersion', 'uintle:16'),
+ ('MajorSubsystemVersion', 'uintle:16'),
+ ('MinorSubsystemVersion', 'uintle:16'),
+ ('Win32VersionValue', 'uintle:32'),
+ ('SizeOfImage', 'uintle:32'),
+ ('SizeOfHeaders', 'uintle:32'),
+ ('CheckSum', 'uintle:32'),
+ ('Subsystem', 'uintle:16'),
+ ('DllCharacteristics', 'uintle:16'),
+ ('SizeOfStackReserve', address_size),
+ ('SizeOfStackCommit', address_size),
+ ('SizeOfHeapReserve', address_size),
+ ('SizeOfHeapCommit', address_size),
+ ('LoaderFlags', 'uintle:32'),
+ ('NumberOfRvaAndSizes', 'uintle:32'),
+ ]
+
+ if size > 28:
+ _, bits_read = parse_items(stream, items, self.items)
+ self.bits_read += bits_read
+
+ if 'NumberOfRvaAndSizes' in self.items:
+ index = 0
+ self.items['Data Directories'] = dict()
+ while self.bits_read / 8 < size:
+ d = self.items['Data Directories'][COFF_DATA_DIRECTORY_TYPES[index]] = dict()
+
+ _, bits_read = parse_items(stream, [('VirtualAddress', 'uintle:32'), ('Size', 'uintle:32')], d)
+ self.bits_read += bits_read
+ index += 1
+
+
+class COFFFileHeader:
+ def __init__(self, stream):
+ self.items = {}
+ self.section_headers = []
+
+ items = [
+ ('Machine', 'uintle:16'),
+ ('NumberOfSections', 'uintle:16'),
+ ('TimeDateStamp', 'uintle:32'),
+ ('PointerToSymbolTable', 'uintle:32'),
+ ('NumberOfSymbols', 'uintle:32'),
+ ('SizeOfOptionalHeader', 'uintle:16'),
+ ('Characteristics', 'uintle:16')
+ ]
+ _, self.bits_read = parse_items(stream, items, self.items)
+
+ self.OptionalHeader = OptionalHeader(stream, self.items['SizeOfOptionalHeader'])
+ self.bits_read += self.OptionalHeader.bits_read
+
+ # start reading section headers
+ num_sections = self.items['NumberOfSections']
+
+ while num_sections > 0 :
+ section_header = SectionHeader(stream)
+ self.bits_read += section_header.bits_read
+ self.section_headers.append(section_header)
+ num_sections -= 1
+
+ self.section_headers.sort(key=lambda header: header.offset)
+
+ # Read Relocations
+ self.process_relocs(stream)
+
+ def process_relocs(self, stream):
+ reloc_table = chained_safe_get(self.OptionalHeader.items, ['Data Directories', 'Base Relocation Table'])
+ if reloc_table is None: return
+
+ orig_pos = stream.bitpos
+ _, stream.bytepos = self.get_rva_section(reloc_table['VirtualAddress'])
+ end_pos = stream.bitpos + reloc_table['Size'] * 8
+
+ while stream.bitpos < end_pos:
+ page_rva = stream.read('uintle:32')
+ block_size = stream.read('uintle:32')
+
+ for i in range(0, int((block_size - 8) / 2)):
+ data = stream.read('uintle:16')
+ typ = data >> 12
+ offset = data & 0xFFF
+
+ if offset == 0 and i > 0: continue
+
+ assert(typ == IMAGE_REL_BASED_HIGHLOW or typ == IMAGE_REL_BASED_DIR64)
+
+ cur_pos = stream.bitpos
+ sh, value_bytepos = self.get_rva_section(page_rva + offset)
+ stream.bytepos = value_bytepos
+ value = stream.read('uintle:32' if typ == IMAGE_REL_BASED_HIGHLOW else 'uintle:64')
+
+ # remove BaseAddress
+ value -= self.OptionalHeader.items['ImageBase']
+
+ bit_size = (4 if typ == IMAGE_REL_BASED_HIGHLOW else 8) * 8
+ stream.overwrite(bitstring.BitArray(uint=value, length=bit_size), pos=value_bytepos * 8)
+ stream.pos = cur_pos
+
+ stream.bitpos = orig_pos
+
+ def get_rva_section(self, rva):
+ for sh in self.section_headers:
+ if rva < sh.items['VirtualAddress'] or rva >= sh.items['VirtualAddress'] + sh.items['VirtualSize']:
+ continue
+
+ file_pointer = rva - sh.items['VirtualAddress'] + sh.items['PointerToRawData']
+ return sh, file_pointer
+
+ raise Exception('Could not match RVA to section')
+
+
+def create_temp_file(suffix=""):
+ fd, path = tempfile.mkstemp(suffix=suffix)
+ os.close(fd)
+ return path
+
+
+class ExpandPath(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ setattr(namespace, self.dest, os.path.abspath(os.path.expanduser(values)))
+
+
+# this does a naming trick since windows doesn't allow multiple usernames for the same server
+def get_password(service_name, user_name):
+ try:
+ import keyring
+
+ # windows doesn't allow multiple usernames for the same server, argh
+ if sys.platform == "win32":
+ password = keyring.get_password(service_name + "-" + user_name, user_name)
+ else:
+ password = keyring.get_password(service_name, user_name)
+
+ return password
+ except:
+ # This allows for manual testing where you do not wish to cache the password on the system
+ print("Missing keyring module...getting password manually")
+
+ return None
+
+
+def openssl_cmd(app_args, args, password_in, password_out):
+ password = get_password(app_args.password_service, app_args.password_user) if (password_in or password_out) else None
+ env = None
+ args = [app_args.openssl_path] + args
+
+ if password is not None:
+ env = os.environ.copy()
+ env["COFF_PW"] = password
+
+ if password_in: args += ["-passin", "env:COFF_PW"]
+ if password_out: args += ["-passout", "env:COFF_PW", "-password", "env:COFF_PW"]
+
+ subprocess.check_call(args, env=env)
+
+
+def processMachoBinary(filename):
+
+ outDict = dict()
+ outDict['result'] = False
+
+ setOfArchDigests = SetOfArchitectureDigest()
+ archDigestIdx = 0
+
+ parsedMacho = macholib.MachO.MachO(filename)
+
+ for header in parsedMacho.headers :
+ arch_digest = ArchitectureDigest()
+ lc_segment = LC_SEGMENT
+
+ arch_digest.setComponentByName('cpuType', CPUType(header.header.cputype))
+ arch_digest.setComponentByName('cpuSubType', CPUSubType(header.header.cpusubtype))
+
+ if header.header.cputype == 0x1000007:
+ lc_segment = LC_SEGMENT_64
+
+
+
+ segment_commands = list(filter(lambda x: x[0].cmd == lc_segment, header.commands))
+ text_segment_commands = list(filter(lambda x: x[1].segname.decode("utf-8").startswith("__TEXT"), segment_commands))
+
+
+ code_segment_digests = SetOfCodeSegmentDigest()
+ code_segment_idx = 0
+
+ for text_command in text_segment_commands:
+
+ codeSegmentDigest = CodeSegmentDigest()
+ codeSegmentDigest.setComponentByName('offset', text_command[1].fileoff)
+
+ sectionDigestIdx = 0
+ set_of_digest = SetOfCodeSectionDigest()
+ for section in text_command[2]:
+ digester = hashlib.sha256()
+ digester.update(section.section_data)
+ digest = digester.digest()
+
+ code_section_digest = CodeSectionDigest()
+ code_section_digest.setComponentByName('offset', section.offset)
+ code_section_digest.setComponentByName('digestAlgorithm', univ.ObjectIdentifier('2.16.840.1.101.3.4.2.1'))
+ code_section_digest.setComponentByName('digest', univ.OctetString(digest))
+
+ set_of_digest.setComponentByPosition(sectionDigestIdx, code_section_digest)
+ sectionDigestIdx += 1
+
+
+ codeSegmentDigest.setComponentByName('codeSectionDigests', set_of_digest)
+
+ code_segment_digests.setComponentByPosition(code_segment_idx, codeSegmentDigest)
+
+ code_segment_idx += 1
+
+ arch_digest.setComponentByName('CodeSegmentDigests', code_segment_digests)
+ setOfArchDigests.setComponentByPosition(archDigestIdx, arch_digest)
+ archDigestIdx += 1
+
+ outDict['result'] = True
+
+ if outDict['result']:
+ appDigest = ApplicationDigest()
+ appDigest.setComponentByName('version', 1)
+ appDigest.setComponentByName('digests', setOfArchDigests)
+ outDict['digest'] = appDigest
+
+
+ return outDict
+
+
+
+def processCOFFBinary(stream):
+
+ outDict = dict()
+ outDict['result'] = False
+
+ # find the COFF header.
+ # skip forward past the MSDOS stub header to 0x3c.
+ stream.bytepos = 0x3c
+
+ # read 4 bytes, this is the file offset of the PE signature.
+ pe_sig_offset = stream.read('uintle:32')
+ stream.bytepos = pe_sig_offset
+
+ # read 4 bytes, make sure it's a PE signature.
+ signature = stream.read('uintle:32')
+ if signature != 0x00004550:
+ return outDict
+
+ # after signature is the actual COFF file header.
+ coff_header = COFFFileHeader(stream)
+
+ arch_digest = ArchitectureDigest()
+ if coff_header.items['Machine'] == 0x14c:
+ arch_digest.setComponentByName('cpuType', CPUType('IMAGE_FILE_MACHINE_I386'))
+ elif coff_header.items['Machine'] == 0x8664:
+ arch_digest.setComponentByName('cpuType', CPUType('IMAGE_FILE_MACHINE_AMD64'))
+
+ arch_digest.setComponentByName('cpuSubType', CPUSubType('IMAGE_UNUSED'))
+
+ text_section_headers = list(filter(lambda x: (x.items['Characteristics'] & IMAGE_SCN_MEM_EXECUTE) == IMAGE_SCN_MEM_EXECUTE, coff_header.section_headers))
+
+ code_segment_digests = SetOfCodeSegmentDigest()
+ code_segment_idx = 0
+ for code_sect_header in text_section_headers:
+ stream.bytepos = code_sect_header.offset
+ code_sect_bytes = stream.read('bytes:' + str(code_sect_header.items['VirtualSize']))
+
+ digester = hashlib.sha256()
+ digester.update(code_sect_bytes)
+ digest = digester.digest()
+
+ # with open('segment_' + str(code_sect_header.offset) + ".bin", 'wb') as f:
+ # f.write(code_sect_bytes)
+
+ code_section_digest = CodeSectionDigest()
+ code_section_digest.setComponentByName('offset', code_sect_header.offset)
+ code_section_digest.setComponentByName('digestAlgorithm', univ.ObjectIdentifier('2.16.840.1.101.3.4.2.1'))
+ code_section_digest.setComponentByName('digest', univ.OctetString(digest))
+
+ set_of_digest = SetOfCodeSectionDigest()
+ set_of_digest.setComponentByPosition(0, code_section_digest)
+
+ codeSegmentDigest = CodeSegmentDigest()
+ codeSegmentDigest.setComponentByName('offset', code_sect_header.offset)
+ codeSegmentDigest.setComponentByName('codeSectionDigests', set_of_digest)
+
+ code_segment_digests.setComponentByPosition(code_segment_idx, codeSegmentDigest)
+ code_segment_idx += 1
+
+ arch_digest.setComponentByName('CodeSegmentDigests', code_segment_digests)
+
+ setOfArchDigests = SetOfArchitectureDigest()
+ setOfArchDigests.setComponentByPosition(0, arch_digest)
+
+ appDigest = ApplicationDigest()
+
+ appDigest.setComponentByName('version', 1)
+ appDigest.setComponentByName('digests', setOfArchDigests)
+
+ outDict['result'] = True
+ outDict['digest'] = appDigest
+
+ return outDict
+
+def main():
+ parser = argparse.ArgumentParser(description='PE/COFF Signer')
+ parser.add_argument('-input', action=ExpandPath, required=True, help="File to parse.")
+ parser.add_argument('-output', action=ExpandPath, required=True, help="File to write to.")
+ parser.add_argument('-openssl_path', action=ExpandPath, help="Path to OpenSSL to create signed voucher")
+ parser.add_argument('-signer_pfx', action=ExpandPath, help="Path to certificate to use to sign voucher. Must contain full certificate chain.")
+ parser.add_argument('-password_service', help="Name of Keyring/Wallet service/host")
+ parser.add_argument('-password_user', help="Name of Keyring/Wallet user name")
+ parser.add_argument('-verbose', action='store_true', help="Verbose output.")
+ app_args = parser.parse_args()
+
+ # to simplify relocation handling we use a mutable BitStream so we can remove
+ # the BaseAddress from each relocation
+ stream = bitstring.BitStream(filename=app_args.input)
+
+
+ dict = processCOFFBinary(stream)
+
+ if dict['result'] == False:
+ dict = processMachoBinary(app_args.input)
+
+
+
+ if dict['result'] == False:
+ raise Exception("Invalid File")
+
+ binaryDigest = der_encoder.encode(dict['digest'])
+
+ with open(app_args.output, 'wb') as f:
+ f.write(binaryDigest)
+
+ # sign with openssl if specified
+ if app_args.openssl_path is not None:
+ assert app_args.signer_pfx is not None
+
+ out_base, out_ext = os.path.splitext(app_args.output)
+ signed_path = out_base + ".signed" + out_ext
+
+ # http://stackoverflow.com/questions/12507277/how-to-fix-unable-to-write-random-state-in-openssl
+ temp_files = []
+ if sys.platform == "win32" and "RANDFILE" not in os.environ:
+ temp_file = create_temp_file()
+ temp_files += [temp_file]
+ os.environ["RANDFILE"] = temp_file
+
+ try:
+ # create PEM from PFX
+ pfx_pem_path = create_temp_file(".pem")
+ temp_files += [pfx_pem_path]
+ print("Extracting PEM from PFX to:" + pfx_pem_path)
+ openssl_cmd(app_args, ["pkcs12", "-in", app_args.signer_pfx, "-out", pfx_pem_path], True, True)
+
+ # extract CA certs
+ pfx_cert_path = create_temp_file(".cert")
+ temp_files += [pfx_cert_path]
+ print("Extracting cert from PFX to:" + pfx_cert_path)
+ openssl_cmd(app_args, ["pkcs12", "-in", app_args.signer_pfx, "-cacerts", "-nokeys", "-out", pfx_cert_path], True, False)
+
+ # we embed the public keychain for client validation
+ openssl_cmd(app_args, ["cms", "-sign", "-nodetach", "-md", "sha256", "-binary", "-in", app_args.output, "-outform", "der", "-out", signed_path, "-signer", pfx_pem_path, "-certfile", pfx_cert_path], True, False)
+ finally:
+ for t in temp_files:
+ if "RANDFILE" in os.environ and t == os.environ["RANDFILE"]:
+ del os.environ["RANDFILE"]
+ os.unlink(t)
+
+if __name__ == '__main__':
+ main()
diff --git a/python/futures/CHANGES b/python/futures/CHANGES
new file mode 100644
index 000000000..09b1ab183
--- /dev/null
+++ b/python/futures/CHANGES
@@ -0,0 +1,89 @@
+3.0.2
+=====
+
+- Made multiprocessing optional again on implementations other than just Jython
+
+
+3.0.1
+=====
+
+- Made Executor.map() non-greedy
+
+
+3.0.0
+=====
+
+- Dropped Python 2.5 and 3.1 support
+- Removed the deprecated "futures" top level package
+- Applied patch for issue 11777 (Executor.map does not submit futures until
+ iter.next() is called)
+- Applied patch for issue 15015 (accessing an non-existing attribute)
+- Applied patch for issue 16284 (memory leak)
+- Applied patch for issue 20367 (behavior of concurrent.futures.as_completed()
+ for duplicate arguments)
+
+2.2.0
+=====
+
+- Added the set_exception_info() and exception_info() methods to Future
+ to enable extraction of tracebacks on Python 2.x
+- Added support for Future.set_exception_info() to ThreadPoolExecutor
+
+
+2.1.6
+=====
+
+- Fixed a problem with files missing from the source distribution
+
+
+2.1.5
+=====
+
+- Fixed Jython compatibility
+- Added metadata for wheel support
+
+
+2.1.4
+=====
+
+- Ported the library again from Python 3.2.5 to get the latest bug fixes
+
+
+2.1.3
+=====
+
+- Fixed race condition in wait(return_when=ALL_COMPLETED)
+ (http://bugs.python.org/issue14406) -- thanks Ralf Schmitt
+- Added missing setUp() methods to several test classes
+
+
+2.1.2
+=====
+
+- Fixed installation problem on Python 3.1
+
+
+2.1.1
+=====
+
+- Fixed missing 'concurrent' package declaration in setup.py
+
+
+2.1
+===
+
+- Moved the code from the 'futures' package to 'concurrent.futures' to provide
+ a drop in backport that matches the code in Python 3.2 standard library
+- Deprecated the old 'futures' package
+
+
+2.0
+===
+
+- Changed implementation to match PEP 3148
+
+
+1.0
+===
+
+Initial release.
diff --git a/python/futures/LICENSE b/python/futures/LICENSE
new file mode 100644
index 000000000..c430db0f1
--- /dev/null
+++ b/python/futures/LICENSE
@@ -0,0 +1,21 @@
+Copyright 2009 Brian Quinlan. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY BRIAN QUINLAN "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+HALL THE FREEBSD PROJECT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file
diff --git a/python/futures/MANIFEST.in b/python/futures/MANIFEST.in
new file mode 100644
index 000000000..52860d043
--- /dev/null
+++ b/python/futures/MANIFEST.in
@@ -0,0 +1,5 @@
+recursive-include docs *
+include *.py
+include tox.ini
+include CHANGES
+include LICENSE
diff --git a/python/futures/PKG-INFO b/python/futures/PKG-INFO
new file mode 100644
index 000000000..0f7e6250c
--- /dev/null
+++ b/python/futures/PKG-INFO
@@ -0,0 +1,16 @@
+Metadata-Version: 1.0
+Name: futures
+Version: 3.0.2
+Summary: Backport of the concurrent.futures package from Python 3.2
+Home-page: https://github.com/agronholm/pythonfutures
+Author: Alex Gronholm
+Author-email: alex.gronholm+pypi@nextday.fi
+License: BSD
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 2 :: Only
diff --git a/python/futures/concurrent/__init__.py b/python/futures/concurrent/__init__.py
new file mode 100644
index 000000000..b36383a61
--- /dev/null
+++ b/python/futures/concurrent/__init__.py
@@ -0,0 +1,3 @@
+from pkgutil import extend_path
+
+__path__ = extend_path(__path__, __name__)
diff --git a/python/futures/concurrent/futures/__init__.py b/python/futures/concurrent/futures/__init__.py
new file mode 100644
index 000000000..428b14bdf
--- /dev/null
+++ b/python/futures/concurrent/futures/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Execute computations asynchronously using threads or processes."""
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+from concurrent.futures._base import (FIRST_COMPLETED,
+ FIRST_EXCEPTION,
+ ALL_COMPLETED,
+ CancelledError,
+ TimeoutError,
+ Future,
+ Executor,
+ wait,
+ as_completed)
+from concurrent.futures.thread import ThreadPoolExecutor
+
+try:
+ from concurrent.futures.process import ProcessPoolExecutor
+except ImportError:
+ # some platforms don't have multiprocessing
+ pass
diff --git a/python/futures/concurrent/futures/_base.py b/python/futures/concurrent/futures/_base.py
new file mode 100644
index 000000000..fbf027512
--- /dev/null
+++ b/python/futures/concurrent/futures/_base.py
@@ -0,0 +1,605 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+import collections
+import logging
+import threading
+import itertools
+import time
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+FIRST_COMPLETED = 'FIRST_COMPLETED'
+FIRST_EXCEPTION = 'FIRST_EXCEPTION'
+ALL_COMPLETED = 'ALL_COMPLETED'
+_AS_COMPLETED = '_AS_COMPLETED'
+
+# Possible future states (for internal use by the futures package).
+PENDING = 'PENDING'
+RUNNING = 'RUNNING'
+# The future was cancelled by the user...
+CANCELLED = 'CANCELLED'
+# ...and _Waiter.add_cancelled() was called by a worker.
+CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
+FINISHED = 'FINISHED'
+
+_FUTURE_STATES = [
+ PENDING,
+ RUNNING,
+ CANCELLED,
+ CANCELLED_AND_NOTIFIED,
+ FINISHED
+]
+
+_STATE_TO_DESCRIPTION_MAP = {
+ PENDING: "pending",
+ RUNNING: "running",
+ CANCELLED: "cancelled",
+ CANCELLED_AND_NOTIFIED: "cancelled",
+ FINISHED: "finished"
+}
+
+# Logger for internal use by the futures package.
+LOGGER = logging.getLogger("concurrent.futures")
+
+class Error(Exception):
+ """Base class for all future-related exceptions."""
+ pass
+
+class CancelledError(Error):
+ """The Future was cancelled."""
+ pass
+
+class TimeoutError(Error):
+ """The operation exceeded the given deadline."""
+ pass
+
+class _Waiter(object):
+ """Provides the event that wait() and as_completed() block on."""
+ def __init__(self):
+ self.event = threading.Event()
+ self.finished_futures = []
+
+ def add_result(self, future):
+ self.finished_futures.append(future)
+
+ def add_exception(self, future):
+ self.finished_futures.append(future)
+
+ def add_cancelled(self, future):
+ self.finished_futures.append(future)
+
+class _AsCompletedWaiter(_Waiter):
+ """Used by as_completed()."""
+
+ def __init__(self):
+ super(_AsCompletedWaiter, self).__init__()
+ self.lock = threading.Lock()
+
+ def add_result(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_result(future)
+ self.event.set()
+
+ def add_exception(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_exception(future)
+ self.event.set()
+
+ def add_cancelled(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_cancelled(future)
+ self.event.set()
+
+class _FirstCompletedWaiter(_Waiter):
+ """Used by wait(return_when=FIRST_COMPLETED)."""
+
+ def add_result(self, future):
+ super(_FirstCompletedWaiter, self).add_result(future)
+ self.event.set()
+
+ def add_exception(self, future):
+ super(_FirstCompletedWaiter, self).add_exception(future)
+ self.event.set()
+
+ def add_cancelled(self, future):
+ super(_FirstCompletedWaiter, self).add_cancelled(future)
+ self.event.set()
+
+class _AllCompletedWaiter(_Waiter):
+ """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
+
+ def __init__(self, num_pending_calls, stop_on_exception):
+ self.num_pending_calls = num_pending_calls
+ self.stop_on_exception = stop_on_exception
+ self.lock = threading.Lock()
+ super(_AllCompletedWaiter, self).__init__()
+
+ def _decrement_pending_calls(self):
+ with self.lock:
+ self.num_pending_calls -= 1
+ if not self.num_pending_calls:
+ self.event.set()
+
+ def add_result(self, future):
+ super(_AllCompletedWaiter, self).add_result(future)
+ self._decrement_pending_calls()
+
+ def add_exception(self, future):
+ super(_AllCompletedWaiter, self).add_exception(future)
+ if self.stop_on_exception:
+ self.event.set()
+ else:
+ self._decrement_pending_calls()
+
+ def add_cancelled(self, future):
+ super(_AllCompletedWaiter, self).add_cancelled(future)
+ self._decrement_pending_calls()
+
+class _AcquireFutures(object):
+ """A context manager that does an ordered acquire of Future conditions."""
+
+ def __init__(self, futures):
+ self.futures = sorted(futures, key=id)
+
+ def __enter__(self):
+ for future in self.futures:
+ future._condition.acquire()
+
+ def __exit__(self, *args):
+ for future in self.futures:
+ future._condition.release()
+
+def _create_and_install_waiters(fs, return_when):
+ if return_when == _AS_COMPLETED:
+ waiter = _AsCompletedWaiter()
+ elif return_when == FIRST_COMPLETED:
+ waiter = _FirstCompletedWaiter()
+ else:
+ pending_count = sum(
+ f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
+
+ if return_when == FIRST_EXCEPTION:
+ waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
+ elif return_when == ALL_COMPLETED:
+ waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
+ else:
+ raise ValueError("Invalid return condition: %r" % return_when)
+
+ for f in fs:
+ f._waiters.append(waiter)
+
+ return waiter
+
+def as_completed(fs, timeout=None):
+ """An iterator over the given futures that yields each as it completes.
+
+ Args:
+ fs: The sequence of Futures (possibly created by different Executors) to
+ iterate over.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+
+ Returns:
+ An iterator that yields the given Futures as they complete (finished or
+ cancelled). If any given Futures are duplicated, they will be returned
+ once.
+
+ Raises:
+ TimeoutError: If the entire result iterator could not be generated
+ before the given timeout.
+ """
+ if timeout is not None:
+ end_time = timeout + time.time()
+
+ fs = set(fs)
+ with _AcquireFutures(fs):
+ finished = set(
+ f for f in fs
+ if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
+ pending = fs - finished
+ waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
+
+ try:
+ for future in finished:
+ yield future
+
+ while pending:
+ if timeout is None:
+ wait_timeout = None
+ else:
+ wait_timeout = end_time - time.time()
+ if wait_timeout < 0:
+ raise TimeoutError(
+ '%d (of %d) futures unfinished' % (
+ len(pending), len(fs)))
+
+ waiter.event.wait(wait_timeout)
+
+ with waiter.lock:
+ finished = waiter.finished_futures
+ waiter.finished_futures = []
+ waiter.event.clear()
+
+ for future in finished:
+ yield future
+ pending.remove(future)
+
+ finally:
+ for f in fs:
+ f._waiters.remove(waiter)
+
+DoneAndNotDoneFutures = collections.namedtuple(
+ 'DoneAndNotDoneFutures', 'done not_done')
+def wait(fs, timeout=None, return_when=ALL_COMPLETED):
+ """Wait for the futures in the given sequence to complete.
+
+ Args:
+ fs: The sequence of Futures (possibly created by different Executors) to
+ wait upon.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+ return_when: Indicates when this function should return. The options
+ are:
+
+ FIRST_COMPLETED - Return when any future finishes or is
+ cancelled.
+ FIRST_EXCEPTION - Return when any future finishes by raising an
+ exception. If no future raises an exception
+ then it is equivalent to ALL_COMPLETED.
+ ALL_COMPLETED - Return when all futures finish or are cancelled.
+
+ Returns:
+ A named 2-tuple of sets. The first set, named 'done', contains the
+ futures that completed (is finished or cancelled) before the wait
+ completed. The second set, named 'not_done', contains uncompleted
+ futures.
+ """
+ with _AcquireFutures(fs):
+ done = set(f for f in fs
+ if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
+ not_done = set(fs) - done
+
+ if (return_when == FIRST_COMPLETED) and done:
+ return DoneAndNotDoneFutures(done, not_done)
+ elif (return_when == FIRST_EXCEPTION) and done:
+ if any(f for f in done
+ if not f.cancelled() and f.exception() is not None):
+ return DoneAndNotDoneFutures(done, not_done)
+
+ if len(done) == len(fs):
+ return DoneAndNotDoneFutures(done, not_done)
+
+ waiter = _create_and_install_waiters(fs, return_when)
+
+ waiter.event.wait(timeout)
+ for f in fs:
+ f._waiters.remove(waiter)
+
+ done.update(waiter.finished_futures)
+ return DoneAndNotDoneFutures(done, set(fs) - done)
+
+class Future(object):
+ """Represents the result of an asynchronous computation."""
+
+ def __init__(self):
+ """Initializes the future. Should not be called by clients."""
+ self._condition = threading.Condition()
+ self._state = PENDING
+ self._result = None
+ self._exception = None
+ self._traceback = None
+ self._waiters = []
+ self._done_callbacks = []
+
+ def _invoke_callbacks(self):
+ for callback in self._done_callbacks:
+ try:
+ callback(self)
+ except Exception:
+ LOGGER.exception('exception calling callback for %r', self)
+
+ def __repr__(self):
+ with self._condition:
+ if self._state == FINISHED:
+ if self._exception:
+ return '<Future at %s state=%s raised %s>' % (
+ hex(id(self)),
+ _STATE_TO_DESCRIPTION_MAP[self._state],
+ self._exception.__class__.__name__)
+ else:
+ return '<Future at %s state=%s returned %s>' % (
+ hex(id(self)),
+ _STATE_TO_DESCRIPTION_MAP[self._state],
+ self._result.__class__.__name__)
+ return '<Future at %s state=%s>' % (
+ hex(id(self)),
+ _STATE_TO_DESCRIPTION_MAP[self._state])
+
+ def cancel(self):
+ """Cancel the future if possible.
+
+ Returns True if the future was cancelled, False otherwise. A future
+ cannot be cancelled if it is running or has already completed.
+ """
+ with self._condition:
+ if self._state in [RUNNING, FINISHED]:
+ return False
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ return True
+
+ self._state = CANCELLED
+ self._condition.notify_all()
+
+ self._invoke_callbacks()
+ return True
+
+ def cancelled(self):
+ """Return True if the future has cancelled."""
+ with self._condition:
+ return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
+
+ def running(self):
+ """Return True if the future is currently executing."""
+ with self._condition:
+ return self._state == RUNNING
+
+ def done(self):
+ """Return True of the future was cancelled or finished executing."""
+ with self._condition:
+ return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
+
+ def __get_result(self):
+ if self._exception:
+ raise type(self._exception), self._exception, self._traceback
+ else:
+ return self._result
+
+ def add_done_callback(self, fn):
+ """Attaches a callable that will be called when the future finishes.
+
+ Args:
+ fn: A callable that will be called with this future as its only
+ argument when the future completes or is cancelled. The callable
+ will always be called by a thread in the same process in which
+ it was added. If the future has already completed or been
+ cancelled then the callable will be called immediately. These
+ callables are called in the order that they were added.
+ """
+ with self._condition:
+ if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
+ self._done_callbacks.append(fn)
+ return
+ fn(self)
+
+ def result(self, timeout=None):
+ """Return the result of the call that the future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the result if the future
+ isn't done. If None, then there is no limit on the wait time.
+
+ Returns:
+ The result of the call that the future represents.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ Exception: If the call raised then that exception will be raised.
+ """
+ with self._condition:
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self.__get_result()
+
+ self._condition.wait(timeout)
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self.__get_result()
+ else:
+ raise TimeoutError()
+
+ def exception_info(self, timeout=None):
+ """Return a tuple of (exception, traceback) raised by the call that the
+ future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the exception if the
+ future isn't done. If None, then there is no limit on the wait
+ time.
+
+ Returns:
+ The exception raised by the call that the future represents or None
+ if the call completed without raising.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ """
+ with self._condition:
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self._exception, self._traceback
+
+ self._condition.wait(timeout)
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self._exception, self._traceback
+ else:
+ raise TimeoutError()
+
+ def exception(self, timeout=None):
+ """Return the exception raised by the call that the future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the exception if the
+ future isn't done. If None, then there is no limit on the wait
+ time.
+
+ Returns:
+ The exception raised by the call that the future represents or None
+ if the call completed without raising.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ """
+ return self.exception_info(timeout)[0]
+
+ # The following methods should only be used by Executors and in tests.
+ def set_running_or_notify_cancel(self):
+ """Mark the future as running or process any cancel notifications.
+
+ Should only be used by Executor implementations and unit tests.
+
+ If the future has been cancelled (cancel() was called and returned
+ True) then any threads waiting on the future completing (though calls
+ to as_completed() or wait()) are notified and False is returned.
+
+ If the future was not cancelled then it is put in the running state
+ (future calls to running() will return True) and True is returned.
+
+ This method should be called by Executor implementations before
+ executing the work associated with this future. If this method returns
+ False then the work should not be executed.
+
+ Returns:
+ False if the Future was cancelled, True otherwise.
+
+ Raises:
+ RuntimeError: if this method was already called or if set_result()
+ or set_exception() was called.
+ """
+ with self._condition:
+ if self._state == CANCELLED:
+ self._state = CANCELLED_AND_NOTIFIED
+ for waiter in self._waiters:
+ waiter.add_cancelled(self)
+ # self._condition.notify_all() is not necessary because
+ # self.cancel() triggers a notification.
+ return False
+ elif self._state == PENDING:
+ self._state = RUNNING
+ return True
+ else:
+ LOGGER.critical('Future %s in unexpected state: %s',
+ id(self),
+ self._state)
+ raise RuntimeError('Future in unexpected state')
+
+ def set_result(self, result):
+ """Sets the return value of work associated with the future.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ with self._condition:
+ self._result = result
+ self._state = FINISHED
+ for waiter in self._waiters:
+ waiter.add_result(self)
+ self._condition.notify_all()
+ self._invoke_callbacks()
+
+ def set_exception_info(self, exception, traceback):
+ """Sets the result of the future as being the given exception
+ and traceback.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ with self._condition:
+ self._exception = exception
+ self._traceback = traceback
+ self._state = FINISHED
+ for waiter in self._waiters:
+ waiter.add_exception(self)
+ self._condition.notify_all()
+ self._invoke_callbacks()
+
+ def set_exception(self, exception):
+ """Sets the result of the future as being the given exception.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ self.set_exception_info(exception, None)
+
+class Executor(object):
+ """This is an abstract base class for concrete asynchronous executors."""
+
+ def submit(self, fn, *args, **kwargs):
+ """Submits a callable to be executed with the given arguments.
+
+ Schedules the callable to be executed as fn(*args, **kwargs) and returns
+ a Future instance representing the execution of the callable.
+
+ Returns:
+ A Future representing the given call.
+ """
+ raise NotImplementedError()
+
+ def map(self, fn, *iterables, **kwargs):
+ """Returns a iterator equivalent to map(fn, iter).
+
+ Args:
+ fn: A callable that will take as many arguments as there are
+ passed iterables.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+
+ Returns:
+ An iterator equivalent to: map(func, *iterables) but the calls may
+ be evaluated out-of-order.
+
+ Raises:
+ TimeoutError: If the entire result iterator could not be generated
+ before the given timeout.
+ Exception: If fn(*args) raises for any values.
+ """
+ timeout = kwargs.get('timeout')
+ if timeout is not None:
+ end_time = timeout + time.time()
+
+ fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)]
+
+ # Yield must be hidden in closure so that the futures are submitted
+ # before the first iterator value is required.
+ def result_iterator():
+ try:
+ for future in fs:
+ if timeout is None:
+ yield future.result()
+ else:
+ yield future.result(end_time - time.time())
+ finally:
+ for future in fs:
+ future.cancel()
+ return result_iterator()
+
+ def shutdown(self, wait=True):
+ """Clean-up the resources associated with the Executor.
+
+ It is safe to call this method several times. Otherwise, no other
+ methods can be called after this one.
+
+ Args:
+ wait: If True then shutdown will not return until all running
+ futures have finished executing and the resources used by the
+ executor have been reclaimed.
+ """
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.shutdown(wait=True)
+ return False
diff --git a/python/futures/concurrent/futures/process.py b/python/futures/concurrent/futures/process.py
new file mode 100644
index 000000000..ee463f181
--- /dev/null
+++ b/python/futures/concurrent/futures/process.py
@@ -0,0 +1,359 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Implements ProcessPoolExecutor.
+
+The follow diagram and text describe the data-flow through the system:
+
+|======================= In-process =====================|== Out-of-process ==|
+
++----------+ +----------+ +--------+ +-----------+ +---------+
+| | => | Work Ids | => | | => | Call Q | => | |
+| | +----------+ | | +-----------+ | |
+| | | ... | | | | ... | | |
+| | | 6 | | | | 5, call() | | |
+| | | 7 | | | | ... | | |
+| Process | | ... | | Local | +-----------+ | Process |
+| Pool | +----------+ | Worker | | #1..n |
+| Executor | | Thread | | |
+| | +----------- + | | +-----------+ | |
+| | <=> | Work Items | <=> | | <= | Result Q | <= | |
+| | +------------+ | | +-----------+ | |
+| | | 6: call() | | | | ... | | |
+| | | future | | | | 4, result | | |
+| | | ... | | | | 3, except | | |
++----------+ +------------+ +--------+ +-----------+ +---------+
+
+Executor.submit() called:
+- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
+- adds the id of the _WorkItem to the "Work Ids" queue
+
+Local worker thread:
+- reads work ids from the "Work Ids" queue and looks up the corresponding
+ WorkItem from the "Work Items" dict: if the work item has been cancelled then
+ it is simply removed from the dict, otherwise it is repackaged as a
+ _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
+ until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
+ calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
+- reads _ResultItems from "Result Q", updates the future stored in the
+ "Work Items" dict and deletes the dict entry
+
+Process #1..n:
+- reads _CallItems from "Call Q", executes the calls, and puts the resulting
+ _ResultItems in "Request Q"
+"""
+
+import atexit
+from concurrent.futures import _base
+import Queue as queue
+import multiprocessing
+import threading
+import weakref
+import sys
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+# Workers are created as daemon threads and processes. This is done to allow the
+# interpreter to exit when there are still idle processes in a
+# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
+# allowing workers to die with the interpreter has two undesirable properties:
+# - The workers would still be running during interpretor shutdown,
+# meaning that they would fail in unpredictable ways.
+# - The workers could be killed while evaluating a work item, which could
+# be bad if the callable being evaluated has external side-effects e.g.
+# writing to a file.
+#
+# To work around this problem, an exit handler is installed which tells the
+# workers to exit when their work queues are empty and then waits until the
+# threads/processes finish.
+
+_threads_queues = weakref.WeakKeyDictionary()
+_shutdown = False
+
+def _python_exit():
+ global _shutdown
+ _shutdown = True
+ items = list(_threads_queues.items())
+ for t, q in items:
+ q.put(None)
+ for t, q in items:
+ t.join()
+
+# Controls how many more calls than processes will be queued in the call queue.
+# A smaller number will mean that processes spend more time idle waiting for
+# work while a larger number will make Future.cancel() succeed less frequently
+# (Futures in the call queue cannot be cancelled).
+EXTRA_QUEUED_CALLS = 1
+
+class _WorkItem(object):
+ def __init__(self, future, fn, args, kwargs):
+ self.future = future
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+class _ResultItem(object):
+ def __init__(self, work_id, exception=None, result=None):
+ self.work_id = work_id
+ self.exception = exception
+ self.result = result
+
+class _CallItem(object):
+ def __init__(self, work_id, fn, args, kwargs):
+ self.work_id = work_id
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+def _process_worker(call_queue, result_queue):
+ """Evaluates calls from call_queue and places the results in result_queue.
+
+ This worker is run in a separate process.
+
+ Args:
+ call_queue: A multiprocessing.Queue of _CallItems that will be read and
+ evaluated by the worker.
+ result_queue: A multiprocessing.Queue of _ResultItems that will written
+ to by the worker.
+ shutdown: A multiprocessing.Event that will be set as a signal to the
+ worker that it should exit when call_queue is empty.
+ """
+ while True:
+ call_item = call_queue.get(block=True)
+ if call_item is None:
+ # Wake up queue management thread
+ result_queue.put(None)
+ return
+ try:
+ r = call_item.fn(*call_item.args, **call_item.kwargs)
+ except BaseException:
+ e = sys.exc_info()[1]
+ result_queue.put(_ResultItem(call_item.work_id,
+ exception=e))
+ else:
+ result_queue.put(_ResultItem(call_item.work_id,
+ result=r))
+
+def _add_call_item_to_queue(pending_work_items,
+ work_ids,
+ call_queue):
+ """Fills call_queue with _WorkItems from pending_work_items.
+
+ This function never blocks.
+
+ Args:
+ pending_work_items: A dict mapping work ids to _WorkItems e.g.
+ {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+ work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
+ are consumed and the corresponding _WorkItems from
+ pending_work_items are transformed into _CallItems and put in
+ call_queue.
+ call_queue: A multiprocessing.Queue that will be filled with _CallItems
+ derived from _WorkItems.
+ """
+ while True:
+ if call_queue.full():
+ return
+ try:
+ work_id = work_ids.get(block=False)
+ except queue.Empty:
+ return
+ else:
+ work_item = pending_work_items[work_id]
+
+ if work_item.future.set_running_or_notify_cancel():
+ call_queue.put(_CallItem(work_id,
+ work_item.fn,
+ work_item.args,
+ work_item.kwargs),
+ block=True)
+ else:
+ del pending_work_items[work_id]
+ continue
+
+def _queue_management_worker(executor_reference,
+ processes,
+ pending_work_items,
+ work_ids_queue,
+ call_queue,
+ result_queue):
+ """Manages the communication between this process and the worker processes.
+
+ This function is run in a local thread.
+
+ Args:
+ executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
+ this thread. Used to determine if the ProcessPoolExecutor has been
+ garbage collected and that this function can exit.
+ process: A list of the multiprocessing.Process instances used as
+ workers.
+ pending_work_items: A dict mapping work ids to _WorkItems e.g.
+ {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+ work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
+ call_queue: A multiprocessing.Queue that will be filled with _CallItems
+ derived from _WorkItems for processing by the process workers.
+ result_queue: A multiprocessing.Queue of _ResultItems generated by the
+ process workers.
+ """
+ nb_shutdown_processes = [0]
+ def shutdown_one_process():
+ """Tell a worker to terminate, which will in turn wake us again"""
+ call_queue.put(None)
+ nb_shutdown_processes[0] += 1
+ while True:
+ _add_call_item_to_queue(pending_work_items,
+ work_ids_queue,
+ call_queue)
+
+ result_item = result_queue.get(block=True)
+ if result_item is not None:
+ work_item = pending_work_items[result_item.work_id]
+ del pending_work_items[result_item.work_id]
+
+ if result_item.exception:
+ work_item.future.set_exception(result_item.exception)
+ else:
+ work_item.future.set_result(result_item.result)
+ # Delete references to object. See issue16284
+ del work_item
+ # Check whether we should start shutting down.
+ executor = executor_reference()
+ # No more work items can be added if:
+ # - The interpreter is shutting down OR
+ # - The executor that owns this worker has been collected OR
+ # - The executor that owns this worker has been shutdown.
+ if _shutdown or executor is None or executor._shutdown_thread:
+ # Since no new work items can be added, it is safe to shutdown
+ # this thread if there are no pending work items.
+ if not pending_work_items:
+ while nb_shutdown_processes[0] < len(processes):
+ shutdown_one_process()
+ # If .join() is not called on the created processes then
+ # some multiprocessing.Queue methods may deadlock on Mac OS
+ # X.
+ for p in processes:
+ p.join()
+ call_queue.close()
+ return
+ del executor
+
+_system_limits_checked = False
+_system_limited = None
+def _check_system_limits():
+ global _system_limits_checked, _system_limited
+ if _system_limits_checked:
+ if _system_limited:
+ raise NotImplementedError(_system_limited)
+ _system_limits_checked = True
+ try:
+ import os
+ nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
+ except (AttributeError, ValueError):
+ # sysconf not available or setting not available
+ return
+ if nsems_max == -1:
+ # indetermine limit, assume that limit is determined
+ # by available memory only
+ return
+ if nsems_max >= 256:
+ # minimum number of semaphores available
+ # according to POSIX
+ return
+ _system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
+ raise NotImplementedError(_system_limited)
+
+class ProcessPoolExecutor(_base.Executor):
+ def __init__(self, max_workers=None):
+ """Initializes a new ProcessPoolExecutor instance.
+
+ Args:
+ max_workers: The maximum number of processes that can be used to
+ execute the given calls. If None or not given then as many
+ worker processes will be created as the machine has processors.
+ """
+ _check_system_limits()
+
+ if max_workers is None:
+ self._max_workers = multiprocessing.cpu_count()
+ else:
+ self._max_workers = max_workers
+
+ # Make the call queue slightly larger than the number of processes to
+ # prevent the worker processes from idling. But don't make it too big
+ # because futures in the call queue cannot be cancelled.
+ self._call_queue = multiprocessing.Queue(self._max_workers +
+ EXTRA_QUEUED_CALLS)
+ self._result_queue = multiprocessing.Queue()
+ self._work_ids = queue.Queue()
+ self._queue_management_thread = None
+ self._processes = set()
+
+ # Shutdown is a two-step process.
+ self._shutdown_thread = False
+ self._shutdown_lock = threading.Lock()
+ self._queue_count = 0
+ self._pending_work_items = {}
+
+ def _start_queue_management_thread(self):
+ # When the executor gets lost, the weakref callback will wake up
+ # the queue management thread.
+ def weakref_cb(_, q=self._result_queue):
+ q.put(None)
+ if self._queue_management_thread is None:
+ self._queue_management_thread = threading.Thread(
+ target=_queue_management_worker,
+ args=(weakref.ref(self, weakref_cb),
+ self._processes,
+ self._pending_work_items,
+ self._work_ids,
+ self._call_queue,
+ self._result_queue))
+ self._queue_management_thread.daemon = True
+ self._queue_management_thread.start()
+ _threads_queues[self._queue_management_thread] = self._result_queue
+
+ def _adjust_process_count(self):
+ for _ in range(len(self._processes), self._max_workers):
+ p = multiprocessing.Process(
+ target=_process_worker,
+ args=(self._call_queue,
+ self._result_queue))
+ p.start()
+ self._processes.add(p)
+
+ def submit(self, fn, *args, **kwargs):
+ with self._shutdown_lock:
+ if self._shutdown_thread:
+ raise RuntimeError('cannot schedule new futures after shutdown')
+
+ f = _base.Future()
+ w = _WorkItem(f, fn, args, kwargs)
+
+ self._pending_work_items[self._queue_count] = w
+ self._work_ids.put(self._queue_count)
+ self._queue_count += 1
+ # Wake up queue management thread
+ self._result_queue.put(None)
+
+ self._start_queue_management_thread()
+ self._adjust_process_count()
+ return f
+ submit.__doc__ = _base.Executor.submit.__doc__
+
+ def shutdown(self, wait=True):
+ with self._shutdown_lock:
+ self._shutdown_thread = True
+ if self._queue_management_thread:
+ # Wake up queue management thread
+ self._result_queue.put(None)
+ if wait:
+ self._queue_management_thread.join()
+ # To reduce the risk of openning too many files, remove references to
+ # objects that use file descriptors.
+ self._queue_management_thread = None
+ self._call_queue = None
+ self._result_queue = None
+ self._processes = None
+ shutdown.__doc__ = _base.Executor.shutdown.__doc__
+
+atexit.register(_python_exit)
diff --git a/python/futures/concurrent/futures/thread.py b/python/futures/concurrent/futures/thread.py
new file mode 100644
index 000000000..fa5ed0c09
--- /dev/null
+++ b/python/futures/concurrent/futures/thread.py
@@ -0,0 +1,134 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Implements ThreadPoolExecutor."""
+
+import atexit
+from concurrent.futures import _base
+import Queue as queue
+import threading
+import weakref
+import sys
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+# Workers are created as daemon threads. This is done to allow the interpreter
+# to exit when there are still idle threads in a ThreadPoolExecutor's thread
+# pool (i.e. shutdown() was not called). However, allowing workers to die with
+# the interpreter has two undesirable properties:
+# - The workers would still be running during interpretor shutdown,
+# meaning that they would fail in unpredictable ways.
+# - The workers could be killed while evaluating a work item, which could
+# be bad if the callable being evaluated has external side-effects e.g.
+# writing to a file.
+#
+# To work around this problem, an exit handler is installed which tells the
+# workers to exit when their work queues are empty and then waits until the
+# threads finish.
+
+_threads_queues = weakref.WeakKeyDictionary()
+_shutdown = False
+
+def _python_exit():
+ global _shutdown
+ _shutdown = True
+ items = list(_threads_queues.items())
+ for t, q in items:
+ q.put(None)
+ for t, q in items:
+ t.join()
+
+atexit.register(_python_exit)
+
+class _WorkItem(object):
+ def __init__(self, future, fn, args, kwargs):
+ self.future = future
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+ def run(self):
+ if not self.future.set_running_or_notify_cancel():
+ return
+
+ try:
+ result = self.fn(*self.args, **self.kwargs)
+ except BaseException:
+ e, tb = sys.exc_info()[1:]
+ self.future.set_exception_info(e, tb)
+ else:
+ self.future.set_result(result)
+
+def _worker(executor_reference, work_queue):
+ try:
+ while True:
+ work_item = work_queue.get(block=True)
+ if work_item is not None:
+ work_item.run()
+ # Delete references to object. See issue16284
+ del work_item
+ continue
+ executor = executor_reference()
+ # Exit if:
+ # - The interpreter is shutting down OR
+ # - The executor that owns the worker has been collected OR
+ # - The executor that owns the worker has been shutdown.
+ if _shutdown or executor is None or executor._shutdown:
+ # Notice other workers
+ work_queue.put(None)
+ return
+ del executor
+ except BaseException:
+ _base.LOGGER.critical('Exception in worker', exc_info=True)
+
+class ThreadPoolExecutor(_base.Executor):
+ def __init__(self, max_workers):
+ """Initializes a new ThreadPoolExecutor instance.
+
+ Args:
+ max_workers: The maximum number of threads that can be used to
+ execute the given calls.
+ """
+ self._max_workers = max_workers
+ self._work_queue = queue.Queue()
+ self._threads = set()
+ self._shutdown = False
+ self._shutdown_lock = threading.Lock()
+
+ def submit(self, fn, *args, **kwargs):
+ with self._shutdown_lock:
+ if self._shutdown:
+ raise RuntimeError('cannot schedule new futures after shutdown')
+
+ f = _base.Future()
+ w = _WorkItem(f, fn, args, kwargs)
+
+ self._work_queue.put(w)
+ self._adjust_thread_count()
+ return f
+ submit.__doc__ = _base.Executor.submit.__doc__
+
+ def _adjust_thread_count(self):
+ # When the executor gets lost, the weakref callback will wake up
+ # the worker threads.
+ def weakref_cb(_, q=self._work_queue):
+ q.put(None)
+ # TODO(bquinlan): Should avoid creating new threads if there are more
+ # idle threads than items in the work queue.
+ if len(self._threads) < self._max_workers:
+ t = threading.Thread(target=_worker,
+ args=(weakref.ref(self, weakref_cb),
+ self._work_queue))
+ t.daemon = True
+ t.start()
+ self._threads.add(t)
+ _threads_queues[t] = self._work_queue
+
+ def shutdown(self, wait=True):
+ with self._shutdown_lock:
+ self._shutdown = True
+ self._work_queue.put(None)
+ if wait:
+ for t in self._threads:
+ t.join()
+ shutdown.__doc__ = _base.Executor.shutdown.__doc__
diff --git a/python/futures/crawl.py b/python/futures/crawl.py
new file mode 100644
index 000000000..86e0af7fe
--- /dev/null
+++ b/python/futures/crawl.py
@@ -0,0 +1,74 @@
+"""Compare the speed of downloading URLs sequentially vs. using futures."""
+
+import functools
+import time
+import timeit
+import sys
+
+try:
+ from urllib2 import urlopen
+except ImportError:
+ from urllib.request import urlopen
+
+from concurrent.futures import (as_completed, ThreadPoolExecutor,
+ ProcessPoolExecutor)
+
+URLS = ['http://www.google.com/',
+ 'http://www.apple.com/',
+ 'http://www.ibm.com',
+ 'http://www.thisurlprobablydoesnotexist.com',
+ 'http://www.slashdot.org/',
+ 'http://www.python.org/',
+ 'http://www.bing.com/',
+ 'http://www.facebook.com/',
+ 'http://www.yahoo.com/',
+ 'http://www.youtube.com/',
+ 'http://www.blogger.com/']
+
+def load_url(url, timeout):
+ kwargs = {'timeout': timeout} if sys.version_info >= (2, 6) else {}
+ return urlopen(url, **kwargs).read()
+
+def download_urls_sequential(urls, timeout=60):
+ url_to_content = {}
+ for url in urls:
+ try:
+ url_to_content[url] = load_url(url, timeout=timeout)
+ except:
+ pass
+ return url_to_content
+
+def download_urls_with_executor(urls, executor, timeout=60):
+ try:
+ url_to_content = {}
+ future_to_url = dict((executor.submit(load_url, url, timeout), url)
+ for url in urls)
+
+ for future in as_completed(future_to_url):
+ try:
+ url_to_content[future_to_url[future]] = future.result()
+ except:
+ pass
+ return url_to_content
+ finally:
+ executor.shutdown()
+
+def main():
+ for name, fn in [('sequential',
+ functools.partial(download_urls_sequential, URLS)),
+ ('processes',
+ functools.partial(download_urls_with_executor,
+ URLS,
+ ProcessPoolExecutor(10))),
+ ('threads',
+ functools.partial(download_urls_with_executor,
+ URLS,
+ ThreadPoolExecutor(10)))]:
+ sys.stdout.write('%s: ' % name.ljust(12))
+ start = time.time()
+ url_map = fn()
+ sys.stdout.write('%.2f seconds (%d of %d downloaded)\n' %
+ (time.time() - start, len(url_map), len(URLS)))
+
+if __name__ == '__main__':
+ main()
diff --git a/python/futures/docs/Makefile b/python/futures/docs/Makefile
new file mode 100644
index 000000000..f69d84035
--- /dev/null
+++ b/python/futures/docs/Makefile
@@ -0,0 +1,88 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf _build/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html
+ @echo
+ @echo "Build finished. The HTML pages are in _build/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) _build/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in _build/dirhtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) _build/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) _build/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) _build/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in _build/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) _build/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in _build/qthelp, like this:"
+ @echo "# qcollectiongenerator _build/qthelp/futures.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile _build/qthelp/futures.qhc"
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in _build/latex."
+ @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+ "run these through (pdf)latex."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) _build/changes
+ @echo
+ @echo "The overview file is in _build/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) _build/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in _build/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) _build/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in _build/doctest/output.txt."
diff --git a/python/futures/docs/conf.py b/python/futures/docs/conf.py
new file mode 100644
index 000000000..5cea44c88
--- /dev/null
+++ b/python/futures/docs/conf.py
@@ -0,0 +1,194 @@
+# -*- coding: utf-8 -*-
+#
+# futures documentation build configuration file, created by
+# sphinx-quickstart on Wed Jun 3 19:35:34 2009.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.append(os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'futures'
+copyright = u'2009-2011, Brian Quinlan'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '2.1.6'
+# The full version, including alpha/beta/rc tags.
+release = '2.1.6'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'futuresdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'futures.tex', u'futures Documentation',
+ u'Brian Quinlan', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/python/futures/docs/index.rst b/python/futures/docs/index.rst
new file mode 100644
index 000000000..4103f014b
--- /dev/null
+++ b/python/futures/docs/index.rst
@@ -0,0 +1,347 @@
+:mod:`concurrent.futures` --- Asynchronous computation
+======================================================
+
+.. module:: concurrent.futures
+ :synopsis: Execute computations asynchronously using threads or processes.
+
+The :mod:`concurrent.futures` module provides a high-level interface for
+asynchronously executing callables.
+
+The asynchronous execution can be be performed by threads using
+:class:`ThreadPoolExecutor` or seperate processes using
+:class:`ProcessPoolExecutor`. Both implement the same interface, which is
+defined by the abstract :class:`Executor` class.
+
+Executor Objects
+----------------
+
+:class:`Executor` is an abstract class that provides methods to execute calls
+asynchronously. It should not be used directly, but through its two
+subclasses: :class:`ThreadPoolExecutor` and :class:`ProcessPoolExecutor`.
+
+.. method:: Executor.submit(fn, *args, **kwargs)
+
+ Schedules the callable to be executed as *fn*(*\*args*, *\*\*kwargs*) and
+ returns a :class:`Future` representing the execution of the callable.
+
+::
+
+ with ThreadPoolExecutor(max_workers=1) as executor:
+ future = executor.submit(pow, 323, 1235)
+ print(future.result())
+
+.. method:: Executor.map(func, *iterables, timeout=None)
+
+ Equivalent to map(*func*, *\*iterables*) but func is executed asynchronously
+ and several calls to *func* may be made concurrently. The returned iterator
+ raises a :exc:`TimeoutError` if :meth:`__next__()` is called and the result
+ isn't available after *timeout* seconds from the original call to
+ :meth:`map()`. *timeout* can be an int or float. If *timeout* is not
+ specified or ``None`` then there is no limit to the wait time. If a call
+ raises an exception then that exception will be raised when its value is
+ retrieved from the iterator.
+
+.. method:: Executor.shutdown(wait=True)
+
+ Signal the executor that it should free any resources that it is using when
+ the currently pending futures are done executing. Calls to
+ :meth:`Executor.submit` and :meth:`Executor.map` made after shutdown will
+ raise :exc:`RuntimeError`.
+
+ If *wait* is `True` then this method will not return until all the pending
+ futures are done executing and the resources associated with the executor
+ have been freed. If *wait* is `False` then this method will return
+ immediately and the resources associated with the executor will be freed
+ when all pending futures are done executing. Regardless of the value of
+ *wait*, the entire Python program will not exit until all pending futures
+ are done executing.
+
+ You can avoid having to call this method explicitly if you use the `with`
+ statement, which will shutdown the `Executor` (waiting as if
+ `Executor.shutdown` were called with *wait* set to `True`):
+
+::
+
+ import shutil
+ with ThreadPoolExecutor(max_workers=4) as e:
+ e.submit(shutil.copy, 'src1.txt', 'dest1.txt')
+ e.submit(shutil.copy, 'src2.txt', 'dest2.txt')
+ e.submit(shutil.copy, 'src3.txt', 'dest3.txt')
+ e.submit(shutil.copy, 'src3.txt', 'dest4.txt')
+
+
+ThreadPoolExecutor Objects
+--------------------------
+
+The :class:`ThreadPoolExecutor` class is an :class:`Executor` subclass that uses
+a pool of threads to execute calls asynchronously.
+
+Deadlock can occur when the callable associated with a :class:`Future` waits on
+the results of another :class:`Future`. For example:
+
+::
+
+ import time
+ def wait_on_b():
+ time.sleep(5)
+ print(b.result()) # b will never complete because it is waiting on a.
+ return 5
+
+ def wait_on_a():
+ time.sleep(5)
+ print(a.result()) # a will never complete because it is waiting on b.
+ return 6
+
+
+ executor = ThreadPoolExecutor(max_workers=2)
+ a = executor.submit(wait_on_b)
+ b = executor.submit(wait_on_a)
+
+And:
+
+::
+
+ def wait_on_future():
+ f = executor.submit(pow, 5, 2)
+ # This will never complete because there is only one worker thread and
+ # it is executing this function.
+ print(f.result())
+
+ executor = ThreadPoolExecutor(max_workers=1)
+ executor.submit(wait_on_future)
+
+.. class:: ThreadPoolExecutor(max_workers)
+
+ Executes calls asynchronously using at pool of at most *max_workers* threads.
+
+.. _threadpoolexecutor-example:
+
+ThreadPoolExecutor Example
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+::
+
+ from concurrent import futures
+ import urllib.request
+
+ URLS = ['http://www.foxnews.com/',
+ 'http://www.cnn.com/',
+ 'http://europe.wsj.com/',
+ 'http://www.bbc.co.uk/',
+ 'http://some-made-up-domain.com/']
+
+ def load_url(url, timeout):
+ return urllib.request.urlopen(url, timeout=timeout).read()
+
+ with futures.ThreadPoolExecutor(max_workers=5) as executor:
+ future_to_url = dict((executor.submit(load_url, url, 60), url)
+ for url in URLS)
+
+ for future in futures.as_completed(future_to_url):
+ url = future_to_url[future]
+ if future.exception() is not None:
+ print('%r generated an exception: %s' % (url,
+ future.exception()))
+ else:
+ print('%r page is %d bytes' % (url, len(future.result())))
+
+ProcessPoolExecutor Objects
+---------------------------
+
+The :class:`ProcessPoolExecutor` class is an :class:`Executor` subclass that
+uses a pool of processes to execute calls asynchronously.
+:class:`ProcessPoolExecutor` uses the :mod:`multiprocessing` module, which
+allows it to side-step the :term:`Global Interpreter Lock` but also means that
+only picklable objects can be executed and returned.
+
+Calling :class:`Executor` or :class:`Future` methods from a callable submitted
+to a :class:`ProcessPoolExecutor` will result in deadlock.
+
+.. class:: ProcessPoolExecutor(max_workers=None)
+
+ Executes calls asynchronously using a pool of at most *max_workers*
+ processes. If *max_workers* is ``None`` or not given then as many worker
+ processes will be created as the machine has processors.
+
+.. _processpoolexecutor-example:
+
+ProcessPoolExecutor Example
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+::
+
+ import math
+
+ PRIMES = [
+ 112272535095293,
+ 112582705942171,
+ 112272535095293,
+ 115280095190773,
+ 115797848077099,
+ 1099726899285419]
+
+ def is_prime(n):
+ if n % 2 == 0:
+ return False
+
+ sqrt_n = int(math.floor(math.sqrt(n)))
+ for i in range(3, sqrt_n + 1, 2):
+ if n % i == 0:
+ return False
+ return True
+
+ def main():
+ with futures.ProcessPoolExecutor() as executor:
+ for number, prime in zip(PRIMES, executor.map(is_prime, PRIMES)):
+ print('%d is prime: %s' % (number, prime))
+
+ if __name__ == '__main__':
+ main()
+
+Future Objects
+--------------
+
+The :class:`Future` class encapulates the asynchronous execution of a callable.
+:class:`Future` instances are created by :meth:`Executor.submit`.
+
+.. method:: Future.cancel()
+
+ Attempt to cancel the call. If the call is currently being executed then
+ it cannot be cancelled and the method will return `False`, otherwise the call
+ will be cancelled and the method will return `True`.
+
+.. method:: Future.cancelled()
+
+ Return `True` if the call was successfully cancelled.
+
+.. method:: Future.running()
+
+ Return `True` if the call is currently being executed and cannot be
+ cancelled.
+
+.. method:: Future.done()
+
+ Return `True` if the call was successfully cancelled or finished running.
+
+.. method:: Future.result(timeout=None)
+
+ Return the value returned by the call. If the call hasn't yet completed then
+ this method will wait up to *timeout* seconds. If the call hasn't completed
+ in *timeout* seconds then a :exc:`TimeoutError` will be raised. *timeout* can
+ be an int or float.If *timeout* is not specified or ``None`` then there is no
+ limit to the wait time.
+
+ If the future is cancelled before completing then :exc:`CancelledError` will
+ be raised.
+
+ If the call raised then this method will raise the same exception.
+
+.. method:: Future.exception(timeout=None)
+
+ Return the exception raised by the call. If the call hasn't yet completed
+ then this method will wait up to *timeout* seconds. If the call hasn't
+ completed in *timeout* seconds then a :exc:`TimeoutError` will be raised.
+ *timeout* can be an int or float. If *timeout* is not specified or ``None``
+ then there is no limit to the wait time.
+
+ If the future is cancelled before completing then :exc:`CancelledError` will
+ be raised.
+
+ If the call completed without raising then ``None`` is returned.
+
+.. method:: Future.add_done_callback(fn)
+
+ Attaches the callable *fn* to the future. *fn* will be called, with the
+ future as its only argument, when the future is cancelled or finishes
+ running.
+
+ Added callables are called in the order that they were added and are always
+ called in a thread belonging to the process that added them. If the callable
+ raises an :exc:`Exception` then it will be logged and ignored. If the
+ callable raises another :exc:`BaseException` then the behavior is not
+ defined.
+
+ If the future has already completed or been cancelled then *fn* will be
+ called immediately.
+
+Internal Future Methods
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The following :class:`Future` methods are meant for use in unit tests and
+:class:`Executor` implementations.
+
+.. method:: Future.set_running_or_notify_cancel()
+
+ This method should only be called by :class:`Executor` implementations before
+ executing the work associated with the :class:`Future` and by unit tests.
+
+ If the method returns `False` then the :class:`Future` was cancelled i.e.
+ :meth:`Future.cancel` was called and returned `True`. Any threads waiting
+ on the :class:`Future` completing (i.e. through :func:`as_completed` or
+ :func:`wait`) will be woken up.
+
+ If the method returns `True` then the :class:`Future` was not cancelled
+ and has been put in the running state i.e. calls to
+ :meth:`Future.running` will return `True`.
+
+ This method can only be called once and cannot be called after
+ :meth:`Future.set_result` or :meth:`Future.set_exception` have been
+ called.
+
+.. method:: Future.set_result(result)
+
+ Sets the result of the work associated with the :class:`Future` to *result*.
+
+ This method should only be used by Executor implementations and unit tests.
+
+.. method:: Future.set_exception(exception)
+
+ Sets the result of the work associated with the :class:`Future` to the
+ :class:`Exception` *exception*.
+
+ This method should only be used by Executor implementations and unit tests.
+
+Module Functions
+----------------
+
+.. function:: wait(fs, timeout=None, return_when=ALL_COMPLETED)
+
+ Wait for the :class:`Future` instances (possibly created by different
+ :class:`Executor` instances) given by *fs* to complete. Returns a named
+ 2-tuple of sets. The first set, named "done", contains the futures that
+ completed (finished or were cancelled) before the wait completed. The second
+ set, named "not_done", contains uncompleted futures.
+
+ *timeout* can be used to control the maximum number of seconds to wait before
+ returning. *timeout* can be an int or float. If *timeout* is not specified or
+ ``None`` then there is no limit to the wait time.
+
+ *return_when* indicates when this function should return. It must be one of
+ the following constants:
+
+ +-----------------------------+----------------------------------------+
+ | Constant | Description |
+ +=============================+========================================+
+ | :const:`FIRST_COMPLETED` | The function will return when any |
+ | | future finishes or is cancelled. |
+ +-----------------------------+----------------------------------------+
+ | :const:`FIRST_EXCEPTION` | The function will return when any |
+ | | future finishes by raising an |
+ | | exception. If no future raises an |
+ | | exception then it is equivalent to |
+ | | `ALL_COMPLETED`. |
+ +-----------------------------+----------------------------------------+
+ | :const:`ALL_COMPLETED` | The function will return when all |
+ | | futures finish or are cancelled. |
+ +-----------------------------+----------------------------------------+
+
+.. function:: as_completed(fs, timeout=None)
+
+ Returns an iterator over the :class:`Future` instances (possibly created by
+ different :class:`Executor` instances) given by *fs* that yields futures as
+ they complete (finished or were cancelled). Any futures given by *fs* that
+ are duplicated will be returned once. Any futures that completed
+ before :func:`as_completed` is called will be yielded first. The returned
+ iterator raises a :exc:`TimeoutError` if :meth:`~iterator.__next__` is
+ called and the result isn't available after *timeout* seconds from the
+ original call to :func:`as_completed`. *timeout* can be an int or float.
+ If *timeout* is not specified or ``None``, there is no limit to the wait
+ time.
diff --git a/python/futures/docs/make.bat b/python/futures/docs/make.bat
new file mode 100644
index 000000000..3e8021b56
--- /dev/null
+++ b/python/futures/docs/make.bat
@@ -0,0 +1,112 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+set SPHINXBUILD=sphinx-build
+set ALLSPHINXOPTS=-d _build/doctrees %SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (_build\*) do rmdir /q /s %%i
+ del /q /s _build\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% _build/html
+ echo.
+ echo.Build finished. The HTML pages are in _build/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% _build/dirhtml
+ echo.
+ echo.Build finished. The HTML pages are in _build/dirhtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% _build/pickle
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% _build/json
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% _build/htmlhelp
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in _build/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% _build/qthelp
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in _build/qthelp, like this:
+ echo.^> qcollectiongenerator _build\qthelp\futures.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile _build\qthelp\futures.ghc
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% _build/latex
+ echo.
+ echo.Build finished; the LaTeX files are in _build/latex.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% _build/changes
+ echo.
+ echo.The overview file is in _build/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% _build/linkcheck
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in _build/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% _build/doctest
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in _build/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/python/futures/primes.py b/python/futures/primes.py
new file mode 100644
index 000000000..0da2b3e64
--- /dev/null
+++ b/python/futures/primes.py
@@ -0,0 +1,50 @@
+from __future__ import with_statement
+import math
+import time
+import sys
+
+from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
+
+PRIMES = [
+ 112272535095293,
+ 112582705942171,
+ 112272535095293,
+ 115280095190773,
+ 115797848077099,
+ 117450548693743,
+ 993960000099397]
+
+def is_prime(n):
+ if n % 2 == 0:
+ return False
+
+ sqrt_n = int(math.floor(math.sqrt(n)))
+ for i in range(3, sqrt_n + 1, 2):
+ if n % i == 0:
+ return False
+ return True
+
+def sequential():
+ return list(map(is_prime, PRIMES))
+
+def with_process_pool_executor():
+ with ProcessPoolExecutor(10) as executor:
+ return list(executor.map(is_prime, PRIMES))
+
+def with_thread_pool_executor():
+ with ThreadPoolExecutor(10) as executor:
+ return list(executor.map(is_prime, PRIMES))
+
+def main():
+ for name, fn in [('sequential', sequential),
+ ('processes', with_process_pool_executor),
+ ('threads', with_thread_pool_executor)]:
+ sys.stdout.write('%s: ' % name.ljust(12))
+ start = time.time()
+ if fn() != [True] * len(PRIMES):
+ sys.stdout.write('failed\n')
+ else:
+ sys.stdout.write('%.2f seconds\n' % (time.time() - start))
+
+if __name__ == '__main__':
+ main()
diff --git a/python/futures/setup.cfg b/python/futures/setup.cfg
new file mode 100644
index 000000000..e04dbabe3
--- /dev/null
+++ b/python/futures/setup.cfg
@@ -0,0 +1,12 @@
+[build_sphinx]
+build-dir = build/sphinx
+source-dir = docs
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
+[upload_docs]
+upload-dir = build/sphinx/html
+
diff --git a/python/futures/setup.py b/python/futures/setup.py
new file mode 100755
index 000000000..7c1d40ee2
--- /dev/null
+++ b/python/futures/setup.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+extras = {}
+try:
+ from setuptools import setup
+ extras['zip_safe'] = False
+except ImportError:
+ from distutils.core import setup
+
+setup(name='futures',
+ version='3.0.2',
+ description='Backport of the concurrent.futures package from Python 3.2',
+ author='Brian Quinlan',
+ author_email='brian@sweetapp.com',
+ maintainer='Alex Gronholm',
+ maintainer_email='alex.gronholm+pypi@nextday.fi',
+ url='https://github.com/agronholm/pythonfutures',
+ packages=['concurrent', 'concurrent.futures'],
+ license='BSD',
+ classifiers=['License :: OSI Approved :: BSD License',
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 2 :: Only'],
+ **extras
+ )
diff --git a/python/futures/test_futures.py b/python/futures/test_futures.py
new file mode 100644
index 000000000..ace340cb0
--- /dev/null
+++ b/python/futures/test_futures.py
@@ -0,0 +1,724 @@
+import os
+import subprocess
+import sys
+import threading
+import functools
+import contextlib
+import logging
+import re
+import time
+from StringIO import StringIO
+from test import test_support
+
+from concurrent import futures
+from concurrent.futures._base import (
+ PENDING, RUNNING, CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, Future)
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+
+def reap_threads(func):
+ """Use this function when threads are being used. This will
+ ensure that the threads are cleaned up even when the test fails.
+ If threading is unavailable this function does nothing.
+ """
+ @functools.wraps(func)
+ def decorator(*args):
+ key = test_support.threading_setup()
+ try:
+ return func(*args)
+ finally:
+ test_support.threading_cleanup(*key)
+ return decorator
+
+
+# Executing the interpreter in a subprocess
+def _assert_python(expected_success, *args, **env_vars):
+ cmd_line = [sys.executable]
+ if not env_vars:
+ cmd_line.append('-E')
+ # Need to preserve the original environment, for in-place testing of
+ # shared library builds.
+ env = os.environ.copy()
+ # But a special flag that can be set to override -- in this case, the
+ # caller is responsible to pass the full environment.
+ if env_vars.pop('__cleanenv', None):
+ env = {}
+ env.update(env_vars)
+ cmd_line.extend(args)
+ p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ env=env)
+ try:
+ out, err = p.communicate()
+ finally:
+ subprocess._cleanup()
+ p.stdout.close()
+ p.stderr.close()
+ rc = p.returncode
+ err = strip_python_stderr(err)
+ if (rc and expected_success) or (not rc and not expected_success):
+ raise AssertionError(
+ "Process return code is %d, "
+ "stderr follows:\n%s" % (rc, err.decode('ascii', 'ignore')))
+ return rc, out, err
+
+
+def assert_python_ok(*args, **env_vars):
+ """
+ Assert that running the interpreter with `args` and optional environment
+ variables `env_vars` is ok and return a (return code, stdout, stderr) tuple.
+ """
+ return _assert_python(True, *args, **env_vars)
+
+
+def strip_python_stderr(stderr):
+ """Strip the stderr of a Python process from potential debug output
+ emitted by the interpreter.
+
+ This will typically be run on the result of the communicate() method
+ of a subprocess.Popen object.
+ """
+ stderr = re.sub(r"\[\d+ refs\]\r?\n?$".encode(), "".encode(), stderr).strip()
+ return stderr
+
+
+@contextlib.contextmanager
+def captured_stderr():
+ """Return a context manager used by captured_stdout/stdin/stderr
+ that temporarily replaces the sys stream *stream_name* with a StringIO."""
+ logging_stream = StringIO()
+ handler = logging.StreamHandler(logging_stream)
+ logging.root.addHandler(handler)
+
+ try:
+ yield logging_stream
+ finally:
+ logging.root.removeHandler(handler)
+
+
+def create_future(state=PENDING, exception=None, result=None):
+ f = Future()
+ f._state = state
+ f._exception = exception
+ f._result = result
+ return f
+
+
+PENDING_FUTURE = create_future(state=PENDING)
+RUNNING_FUTURE = create_future(state=RUNNING)
+CANCELLED_FUTURE = create_future(state=CANCELLED)
+CANCELLED_AND_NOTIFIED_FUTURE = create_future(state=CANCELLED_AND_NOTIFIED)
+EXCEPTION_FUTURE = create_future(state=FINISHED, exception=IOError())
+SUCCESSFUL_FUTURE = create_future(state=FINISHED, result=42)
+
+
+def mul(x, y):
+ return x * y
+
+
+def sleep_and_raise(t):
+ time.sleep(t)
+ raise Exception('this is an exception')
+
+def sleep_and_print(t, msg):
+ time.sleep(t)
+ print(msg)
+ sys.stdout.flush()
+
+
+class ExecutorMixin:
+ worker_count = 5
+
+ def setUp(self):
+ self.t1 = time.time()
+ try:
+ self.executor = self.executor_type(max_workers=self.worker_count)
+ except NotImplementedError:
+ e = sys.exc_info()[1]
+ self.skipTest(str(e))
+ self._prime_executor()
+
+ def tearDown(self):
+ self.executor.shutdown(wait=True)
+ dt = time.time() - self.t1
+ if test_support.verbose:
+ print("%.2fs" % dt)
+ self.assertLess(dt, 60, "synchronization issue: test lasted too long")
+
+ def _prime_executor(self):
+ # Make sure that the executor is ready to do work before running the
+ # tests. This should reduce the probability of timeouts in the tests.
+ futures = [self.executor.submit(time.sleep, 0.1)
+ for _ in range(self.worker_count)]
+
+ for f in futures:
+ f.result()
+
+
+class ThreadPoolMixin(ExecutorMixin):
+ executor_type = futures.ThreadPoolExecutor
+
+
+class ProcessPoolMixin(ExecutorMixin):
+ executor_type = futures.ProcessPoolExecutor
+
+
+class ExecutorShutdownTest(unittest.TestCase):
+ def test_run_after_shutdown(self):
+ self.executor.shutdown()
+ self.assertRaises(RuntimeError,
+ self.executor.submit,
+ pow, 2, 5)
+
+ def test_interpreter_shutdown(self):
+ # Test the atexit hook for shutdown of worker threads and processes
+ rc, out, err = assert_python_ok('-c', """if 1:
+ from concurrent.futures import %s
+ from time import sleep
+ from test_futures import sleep_and_print
+ t = %s(5)
+ t.submit(sleep_and_print, 1.0, "apple")
+ """ % (self.executor_type.__name__, self.executor_type.__name__))
+ # Errors in atexit hooks don't change the process exit code, check
+ # stderr manually.
+ self.assertFalse(err)
+ self.assertEqual(out.strip(), "apple".encode())
+
+ def test_hang_issue12364(self):
+ fs = [self.executor.submit(time.sleep, 0.1) for _ in range(50)]
+ self.executor.shutdown()
+ for f in fs:
+ f.result()
+
+
+class ThreadPoolShutdownTest(ThreadPoolMixin, ExecutorShutdownTest):
+ def _prime_executor(self):
+ pass
+
+ def test_threads_terminate(self):
+ self.executor.submit(mul, 21, 2)
+ self.executor.submit(mul, 6, 7)
+ self.executor.submit(mul, 3, 14)
+ self.assertEqual(len(self.executor._threads), 3)
+ self.executor.shutdown()
+ for t in self.executor._threads:
+ t.join()
+
+ def test_context_manager_shutdown(self):
+ with futures.ThreadPoolExecutor(max_workers=5) as e:
+ executor = e
+ self.assertEqual(list(e.map(abs, range(-5, 5))),
+ [5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
+
+ for t in executor._threads:
+ t.join()
+
+ def test_del_shutdown(self):
+ executor = futures.ThreadPoolExecutor(max_workers=5)
+ executor.map(abs, range(-5, 5))
+ threads = executor._threads
+ del executor
+
+ for t in threads:
+ t.join()
+
+
+class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest):
+ def _prime_executor(self):
+ pass
+
+ def test_processes_terminate(self):
+ self.executor.submit(mul, 21, 2)
+ self.executor.submit(mul, 6, 7)
+ self.executor.submit(mul, 3, 14)
+ self.assertEqual(len(self.executor._processes), 5)
+ processes = self.executor._processes
+ self.executor.shutdown()
+
+ for p in processes:
+ p.join()
+
+ def test_context_manager_shutdown(self):
+ with futures.ProcessPoolExecutor(max_workers=5) as e:
+ processes = e._processes
+ self.assertEqual(list(e.map(abs, range(-5, 5))),
+ [5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
+
+ for p in processes:
+ p.join()
+
+ def test_del_shutdown(self):
+ executor = futures.ProcessPoolExecutor(max_workers=5)
+ list(executor.map(abs, range(-5, 5)))
+ queue_management_thread = executor._queue_management_thread
+ processes = executor._processes
+ del executor
+
+ queue_management_thread.join()
+ for p in processes:
+ p.join()
+
+
+class WaitTests(unittest.TestCase):
+
+ def test_first_completed(self):
+ future1 = self.executor.submit(mul, 21, 2)
+ future2 = self.executor.submit(time.sleep, 1.5)
+
+ done, not_done = futures.wait(
+ [CANCELLED_FUTURE, future1, future2],
+ return_when=futures.FIRST_COMPLETED)
+
+ self.assertEqual(set([future1]), done)
+ self.assertEqual(set([CANCELLED_FUTURE, future2]), not_done)
+
+ def test_first_completed_some_already_completed(self):
+ future1 = self.executor.submit(time.sleep, 1.5)
+
+ finished, pending = futures.wait(
+ [CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE, future1],
+ return_when=futures.FIRST_COMPLETED)
+
+ self.assertEqual(
+ set([CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE]),
+ finished)
+ self.assertEqual(set([future1]), pending)
+
+ def test_first_exception(self):
+ future1 = self.executor.submit(mul, 2, 21)
+ future2 = self.executor.submit(sleep_and_raise, 1.5)
+ future3 = self.executor.submit(time.sleep, 3)
+
+ finished, pending = futures.wait(
+ [future1, future2, future3],
+ return_when=futures.FIRST_EXCEPTION)
+
+ self.assertEqual(set([future1, future2]), finished)
+ self.assertEqual(set([future3]), pending)
+
+ def test_first_exception_some_already_complete(self):
+ future1 = self.executor.submit(divmod, 21, 0)
+ future2 = self.executor.submit(time.sleep, 1.5)
+
+ finished, pending = futures.wait(
+ [SUCCESSFUL_FUTURE,
+ CANCELLED_FUTURE,
+ CANCELLED_AND_NOTIFIED_FUTURE,
+ future1, future2],
+ return_when=futures.FIRST_EXCEPTION)
+
+ self.assertEqual(set([SUCCESSFUL_FUTURE,
+ CANCELLED_AND_NOTIFIED_FUTURE,
+ future1]), finished)
+ self.assertEqual(set([CANCELLED_FUTURE, future2]), pending)
+
+ def test_first_exception_one_already_failed(self):
+ future1 = self.executor.submit(time.sleep, 2)
+
+ finished, pending = futures.wait(
+ [EXCEPTION_FUTURE, future1],
+ return_when=futures.FIRST_EXCEPTION)
+
+ self.assertEqual(set([EXCEPTION_FUTURE]), finished)
+ self.assertEqual(set([future1]), pending)
+
+ def test_all_completed(self):
+ future1 = self.executor.submit(divmod, 2, 0)
+ future2 = self.executor.submit(mul, 2, 21)
+
+ finished, pending = futures.wait(
+ [SUCCESSFUL_FUTURE,
+ CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ future1,
+ future2],
+ return_when=futures.ALL_COMPLETED)
+
+ self.assertEqual(set([SUCCESSFUL_FUTURE,
+ CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ future1,
+ future2]), finished)
+ self.assertEqual(set(), pending)
+
+ def test_timeout(self):
+ future1 = self.executor.submit(mul, 6, 7)
+ future2 = self.executor.submit(time.sleep, 3)
+
+ finished, pending = futures.wait(
+ [CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1, future2],
+ timeout=1.5,
+ return_when=futures.ALL_COMPLETED)
+
+ self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1]), finished)
+ self.assertEqual(set([future2]), pending)
+
+
+class ThreadPoolWaitTests(ThreadPoolMixin, WaitTests):
+
+ def test_pending_calls_race(self):
+ # Issue #14406: multi-threaded race condition when waiting on all
+ # futures.
+ event = threading.Event()
+ def future_func():
+ event.wait()
+ oldswitchinterval = sys.getcheckinterval()
+ sys.setcheckinterval(1)
+ try:
+ fs = set(self.executor.submit(future_func) for i in range(100))
+ event.set()
+ futures.wait(fs, return_when=futures.ALL_COMPLETED)
+ finally:
+ sys.setcheckinterval(oldswitchinterval)
+
+
+class ProcessPoolWaitTests(ProcessPoolMixin, WaitTests):
+ pass
+
+
+class AsCompletedTests(unittest.TestCase):
+ # TODO(brian@sweetapp.com): Should have a test with a non-zero timeout.
+ def test_no_timeout(self):
+ future1 = self.executor.submit(mul, 2, 21)
+ future2 = self.executor.submit(mul, 7, 6)
+
+ completed = set(futures.as_completed(
+ [CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1, future2]))
+ self.assertEqual(set(
+ [CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1, future2]),
+ completed)
+
+ def test_zero_timeout(self):
+ future1 = self.executor.submit(time.sleep, 2)
+ completed_futures = set()
+ try:
+ for future in futures.as_completed(
+ [CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE,
+ future1],
+ timeout=0):
+ completed_futures.add(future)
+ except futures.TimeoutError:
+ pass
+
+ self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
+ EXCEPTION_FUTURE,
+ SUCCESSFUL_FUTURE]),
+ completed_futures)
+
+ def test_duplicate_futures(self):
+ # Issue 20367. Duplicate futures should not raise exceptions or give
+ # duplicate responses.
+ future1 = self.executor.submit(time.sleep, 2)
+ completed = [f for f in futures.as_completed([future1,future1])]
+ self.assertEqual(len(completed), 1)
+
+
+class ThreadPoolAsCompletedTests(ThreadPoolMixin, AsCompletedTests):
+ pass
+
+
+class ProcessPoolAsCompletedTests(ProcessPoolMixin, AsCompletedTests):
+ pass
+
+
+class ExecutorTest(unittest.TestCase):
+ # Executor.shutdown() and context manager usage is tested by
+ # ExecutorShutdownTest.
+ def test_submit(self):
+ future = self.executor.submit(pow, 2, 8)
+ self.assertEqual(256, future.result())
+
+ def test_submit_keyword(self):
+ future = self.executor.submit(mul, 2, y=8)
+ self.assertEqual(16, future.result())
+
+ def test_map(self):
+ self.assertEqual(
+ list(self.executor.map(pow, range(10), range(10))),
+ list(map(pow, range(10), range(10))))
+
+ def test_map_exception(self):
+ i = self.executor.map(divmod, [1, 1, 1, 1], [2, 3, 0, 5])
+ self.assertEqual(next(i), (0, 1))
+ self.assertEqual(next(i), (0, 1))
+ self.assertRaises(ZeroDivisionError, next, i)
+
+ def test_map_timeout(self):
+ results = []
+ try:
+ for i in self.executor.map(time.sleep,
+ [0, 0, 3],
+ timeout=1.5):
+ results.append(i)
+ except futures.TimeoutError:
+ pass
+ else:
+ self.fail('expected TimeoutError')
+
+ self.assertEqual([None, None], results)
+
+
+class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
+ def test_map_submits_without_iteration(self):
+ """Tests verifying issue 11777."""
+ finished = []
+ def record_finished(n):
+ finished.append(n)
+
+ self.executor.map(record_finished, range(10))
+ self.executor.shutdown(wait=True)
+ self.assertEqual(len(finished), 10)
+
+
+class ProcessPoolExecutorTest(ProcessPoolMixin, ExecutorTest):
+ pass
+
+
+class FutureTests(unittest.TestCase):
+ def test_done_callback_with_result(self):
+ callback_result = [None]
+ def fn(callback_future):
+ callback_result[0] = callback_future.result()
+
+ f = Future()
+ f.add_done_callback(fn)
+ f.set_result(5)
+ self.assertEqual(5, callback_result[0])
+
+ def test_done_callback_with_exception(self):
+ callback_exception = [None]
+ def fn(callback_future):
+ callback_exception[0] = callback_future.exception()
+
+ f = Future()
+ f.add_done_callback(fn)
+ f.set_exception(Exception('test'))
+ self.assertEqual(('test',), callback_exception[0].args)
+
+ def test_done_callback_with_cancel(self):
+ was_cancelled = [None]
+ def fn(callback_future):
+ was_cancelled[0] = callback_future.cancelled()
+
+ f = Future()
+ f.add_done_callback(fn)
+ self.assertTrue(f.cancel())
+ self.assertTrue(was_cancelled[0])
+
+ def test_done_callback_raises(self):
+ with captured_stderr() as stderr:
+ raising_was_called = [False]
+ fn_was_called = [False]
+
+ def raising_fn(callback_future):
+ raising_was_called[0] = True
+ raise Exception('doh!')
+
+ def fn(callback_future):
+ fn_was_called[0] = True
+
+ f = Future()
+ f.add_done_callback(raising_fn)
+ f.add_done_callback(fn)
+ f.set_result(5)
+ self.assertTrue(raising_was_called)
+ self.assertTrue(fn_was_called)
+ self.assertIn('Exception: doh!', stderr.getvalue())
+
+ def test_done_callback_already_successful(self):
+ callback_result = [None]
+ def fn(callback_future):
+ callback_result[0] = callback_future.result()
+
+ f = Future()
+ f.set_result(5)
+ f.add_done_callback(fn)
+ self.assertEqual(5, callback_result[0])
+
+ def test_done_callback_already_failed(self):
+ callback_exception = [None]
+ def fn(callback_future):
+ callback_exception[0] = callback_future.exception()
+
+ f = Future()
+ f.set_exception(Exception('test'))
+ f.add_done_callback(fn)
+ self.assertEqual(('test',), callback_exception[0].args)
+
+ def test_done_callback_already_cancelled(self):
+ was_cancelled = [None]
+ def fn(callback_future):
+ was_cancelled[0] = callback_future.cancelled()
+
+ f = Future()
+ self.assertTrue(f.cancel())
+ f.add_done_callback(fn)
+ self.assertTrue(was_cancelled[0])
+
+ def test_repr(self):
+ self.assertRegexpMatches(repr(PENDING_FUTURE),
+ '<Future at 0x[0-9a-f]+ state=pending>')
+ self.assertRegexpMatches(repr(RUNNING_FUTURE),
+ '<Future at 0x[0-9a-f]+ state=running>')
+ self.assertRegexpMatches(repr(CANCELLED_FUTURE),
+ '<Future at 0x[0-9a-f]+ state=cancelled>')
+ self.assertRegexpMatches(repr(CANCELLED_AND_NOTIFIED_FUTURE),
+ '<Future at 0x[0-9a-f]+ state=cancelled>')
+ self.assertRegexpMatches(
+ repr(EXCEPTION_FUTURE),
+ '<Future at 0x[0-9a-f]+ state=finished raised IOError>')
+ self.assertRegexpMatches(
+ repr(SUCCESSFUL_FUTURE),
+ '<Future at 0x[0-9a-f]+ state=finished returned int>')
+
+ def test_cancel(self):
+ f1 = create_future(state=PENDING)
+ f2 = create_future(state=RUNNING)
+ f3 = create_future(state=CANCELLED)
+ f4 = create_future(state=CANCELLED_AND_NOTIFIED)
+ f5 = create_future(state=FINISHED, exception=IOError())
+ f6 = create_future(state=FINISHED, result=5)
+
+ self.assertTrue(f1.cancel())
+ self.assertEqual(f1._state, CANCELLED)
+
+ self.assertFalse(f2.cancel())
+ self.assertEqual(f2._state, RUNNING)
+
+ self.assertTrue(f3.cancel())
+ self.assertEqual(f3._state, CANCELLED)
+
+ self.assertTrue(f4.cancel())
+ self.assertEqual(f4._state, CANCELLED_AND_NOTIFIED)
+
+ self.assertFalse(f5.cancel())
+ self.assertEqual(f5._state, FINISHED)
+
+ self.assertFalse(f6.cancel())
+ self.assertEqual(f6._state, FINISHED)
+
+ def test_cancelled(self):
+ self.assertFalse(PENDING_FUTURE.cancelled())
+ self.assertFalse(RUNNING_FUTURE.cancelled())
+ self.assertTrue(CANCELLED_FUTURE.cancelled())
+ self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.cancelled())
+ self.assertFalse(EXCEPTION_FUTURE.cancelled())
+ self.assertFalse(SUCCESSFUL_FUTURE.cancelled())
+
+ def test_done(self):
+ self.assertFalse(PENDING_FUTURE.done())
+ self.assertFalse(RUNNING_FUTURE.done())
+ self.assertTrue(CANCELLED_FUTURE.done())
+ self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.done())
+ self.assertTrue(EXCEPTION_FUTURE.done())
+ self.assertTrue(SUCCESSFUL_FUTURE.done())
+
+ def test_running(self):
+ self.assertFalse(PENDING_FUTURE.running())
+ self.assertTrue(RUNNING_FUTURE.running())
+ self.assertFalse(CANCELLED_FUTURE.running())
+ self.assertFalse(CANCELLED_AND_NOTIFIED_FUTURE.running())
+ self.assertFalse(EXCEPTION_FUTURE.running())
+ self.assertFalse(SUCCESSFUL_FUTURE.running())
+
+ def test_result_with_timeout(self):
+ self.assertRaises(futures.TimeoutError,
+ PENDING_FUTURE.result, timeout=0)
+ self.assertRaises(futures.TimeoutError,
+ RUNNING_FUTURE.result, timeout=0)
+ self.assertRaises(futures.CancelledError,
+ CANCELLED_FUTURE.result, timeout=0)
+ self.assertRaises(futures.CancelledError,
+ CANCELLED_AND_NOTIFIED_FUTURE.result, timeout=0)
+ self.assertRaises(IOError, EXCEPTION_FUTURE.result, timeout=0)
+ self.assertEqual(SUCCESSFUL_FUTURE.result(timeout=0), 42)
+
+ def test_result_with_success(self):
+ # TODO(brian@sweetapp.com): This test is timing dependant.
+ def notification():
+ # Wait until the main thread is waiting for the result.
+ time.sleep(1)
+ f1.set_result(42)
+
+ f1 = create_future(state=PENDING)
+ t = threading.Thread(target=notification)
+ t.start()
+
+ self.assertEqual(f1.result(timeout=5), 42)
+
+ def test_result_with_cancel(self):
+ # TODO(brian@sweetapp.com): This test is timing dependant.
+ def notification():
+ # Wait until the main thread is waiting for the result.
+ time.sleep(1)
+ f1.cancel()
+
+ f1 = create_future(state=PENDING)
+ t = threading.Thread(target=notification)
+ t.start()
+
+ self.assertRaises(futures.CancelledError, f1.result, timeout=5)
+
+ def test_exception_with_timeout(self):
+ self.assertRaises(futures.TimeoutError,
+ PENDING_FUTURE.exception, timeout=0)
+ self.assertRaises(futures.TimeoutError,
+ RUNNING_FUTURE.exception, timeout=0)
+ self.assertRaises(futures.CancelledError,
+ CANCELLED_FUTURE.exception, timeout=0)
+ self.assertRaises(futures.CancelledError,
+ CANCELLED_AND_NOTIFIED_FUTURE.exception, timeout=0)
+ self.assertTrue(isinstance(EXCEPTION_FUTURE.exception(timeout=0),
+ IOError))
+ self.assertEqual(SUCCESSFUL_FUTURE.exception(timeout=0), None)
+
+ def test_exception_with_success(self):
+ def notification():
+ # Wait until the main thread is waiting for the exception.
+ time.sleep(1)
+ with f1._condition:
+ f1._state = FINISHED
+ f1._exception = IOError()
+ f1._condition.notify_all()
+
+ f1 = create_future(state=PENDING)
+ t = threading.Thread(target=notification)
+ t.start()
+
+ self.assertTrue(isinstance(f1.exception(timeout=5), IOError))
+
+@reap_threads
+def test_main():
+ try:
+ test_support.run_unittest(ProcessPoolExecutorTest,
+ ThreadPoolExecutorTest,
+ ProcessPoolWaitTests,
+ ThreadPoolWaitTests,
+ ProcessPoolAsCompletedTests,
+ ThreadPoolAsCompletedTests,
+ FutureTests,
+ ProcessPoolShutdownTest,
+ ThreadPoolShutdownTest)
+ finally:
+ test_support.reap_children()
+
+if __name__ == "__main__":
+ test_main()
diff --git a/python/futures/tox.ini b/python/futures/tox.ini
new file mode 100644
index 000000000..4948bd161
--- /dev/null
+++ b/python/futures/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist = py26,py27
+
+[testenv]
+commands={envpython} test_futures.py []
+
+[testenv:py26]
+deps=unittest2
diff --git a/python/gdbpp/gdbpp/__init__.py b/python/gdbpp/gdbpp/__init__.py
new file mode 100644
index 000000000..d20de23a7
--- /dev/null
+++ b/python/gdbpp/gdbpp/__init__.py
@@ -0,0 +1,28 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+import gdb.printing
+
+class GeckoPrettyPrinter(object):
+ pp = gdb.printing.RegexpCollectionPrettyPrinter('GeckoPrettyPrinters')
+
+ def __init__(self, name, regexp):
+ self.name = name
+ self.regexp = regexp
+
+ def __call__(self, wrapped):
+ GeckoPrettyPrinter.pp.add_printer(self.name, self.regexp, wrapped)
+ return wrapped
+
+import gdbpp.linkedlist
+import gdbpp.owningthread
+import gdbpp.smartptr
+import gdbpp.string
+import gdbpp.tarray
+import gdbpp.thashtable
+
+gdb.printing.register_pretty_printer(None, GeckoPrettyPrinter.pp)
diff --git a/python/gdbpp/gdbpp/linkedlist.py b/python/gdbpp/gdbpp/linkedlist.py
new file mode 100644
index 000000000..966f9b9c0
--- /dev/null
+++ b/python/gdbpp/gdbpp/linkedlist.py
@@ -0,0 +1,49 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+import itertools
+from gdbpp import GeckoPrettyPrinter
+
+# mfbt's LinkedList<T> is a doubly-linked list where the items in the list store
+# the next/prev pointers as part of themselves rather than the list structure be
+# its own independent data structure. This means:
+# - Every item may belong to at most one LinkedList instance.
+# - For our pretty printer, we only want to pretty-print the LinkedList object
+# itself. We do not want to start printing every item in the list whenever
+# we run into a LinkedListElement<T>.
+@GeckoPrettyPrinter('mozilla::LinkedList', '^mozilla::LinkedList<.*>$')
+class linkedlist_printer(object):
+ def __init__(self, value):
+ self.value = value
+ # mfbt's LinkedList has the elements of the linked list subclass from
+ # LinkedListElement<T>. We want its pointer type for casting purposes.
+ #
+ # (We want to list pointers since we expect all of these objects to be
+ # complex enough that we don't want to automatically expand them. The
+ # LinkedListElement type itself isn't small.)
+ self.t_ptr_type = value.type.template_argument(0).pointer()
+
+ def children(self):
+ # Walk mNext until we loop back around to the sentinel. The sentinel
+ # item always exists and in the zero-length base-case mNext == sentinel,
+ # so extract that immediately and update it throughout the loop.
+ sentinel = self.value['sentinel']
+ pSentinel = sentinel.address
+ pNext = sentinel['mNext']
+ i = 0
+ while pSentinel != pNext:
+ list_elem = pNext.dereference()
+ list_value = pNext.cast(self.t_ptr_type)
+ yield ('%d' % i, list_value)
+ pNext = list_elem['mNext']
+ i += 1
+
+ def to_string(self):
+ return str(self.value.type)
+
+ def display_hint(self):
+ return 'array'
diff --git a/python/gdbpp/gdbpp/owningthread.py b/python/gdbpp/gdbpp/owningthread.py
new file mode 100644
index 000000000..d102bef24
--- /dev/null
+++ b/python/gdbpp/gdbpp/owningthread.py
@@ -0,0 +1,24 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+from gdbpp import GeckoPrettyPrinter
+
+@GeckoPrettyPrinter('nsAutoOwningThread', '^nsAutoOwningThread$')
+class owning_thread_printer(object):
+ def __init__(self, value):
+ self.value = value
+
+ def to_string(self):
+ prthread_type = gdb.lookup_type('PRThread').pointer()
+ prthread = self.value['mThread'].cast(prthread_type)
+ name = prthread['name']
+
+ # if the thread doesn't have a name try to get its thread id (might not
+ # work on !linux)
+ name = prthread['tid']
+
+ return name if name else '(PRThread *) %s' % prthread
diff --git a/python/gdbpp/gdbpp/smartptr.py b/python/gdbpp/gdbpp/smartptr.py
new file mode 100644
index 000000000..c35215426
--- /dev/null
+++ b/python/gdbpp/gdbpp/smartptr.py
@@ -0,0 +1,55 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+from gdbpp import GeckoPrettyPrinter
+
+@GeckoPrettyPrinter('nsWeakPtr', '^nsCOMPtr<nsIWeakReference>$')
+class weak_ptr_printer(object):
+ def __init__(self, value):
+ self.value = value
+
+ def to_string(self):
+ proxy = self.value['mRawPtr']
+ if not proxy:
+ return '[(%s) 0x0]' % proxy.type
+
+ ref_type = proxy.dynamic_type
+ weak_ptr = proxy.cast(ref_type).dereference()['mReferent']
+ if not weak_ptr:
+ return '[(%s) %s]' % (weak_ptr.type, weak_ptr)
+
+ return '[(%s) %s]' % (weak_ptr.dynamic_type, weak_ptr)
+
+@GeckoPrettyPrinter('mozilla::StaticAutoPtr', '^mozilla::StaticAutoPtr<.*>$')
+@GeckoPrettyPrinter('mozilla::StaticRefPtr', '^mozilla::StaticRefPtr<.*>$')
+@GeckoPrettyPrinter('nsAutoPtr', '^nsAutoPtr<.*>$')
+@GeckoPrettyPrinter('nsCOMPtr', '^nsCOMPtr<.*>$')
+@GeckoPrettyPrinter('RefPtr', '^RefPtr<.*>$')
+class smartptr_printer(object):
+ def __init__(self, value):
+ self.value = value['mRawPtr']
+
+ def to_string(self):
+ if not self.value:
+ type_name = str(self.value.type)
+ else:
+ type_name = str(self.value.dereference().dynamic_type.pointer())
+
+ return '[(%s) %s]' % (type_name, str(self.value))
+
+@GeckoPrettyPrinter('UniquePtr', '^mozilla::UniquePtr<.*>$')
+class uniqueptr_printer(object):
+ def __init__(self, value):
+ self.value = value['mTuple']['mFirstA']
+
+ def to_string(self):
+ if not self.value:
+ type_name = str(self.value.type)
+ else:
+ type_name = str(self.value.dereference().dynamic_type.pointer())
+
+ return '[(%s) %s]' % (type_name, str(self.value))
diff --git a/python/gdbpp/gdbpp/string.py b/python/gdbpp/gdbpp/string.py
new file mode 100644
index 000000000..33d536a02
--- /dev/null
+++ b/python/gdbpp/gdbpp/string.py
@@ -0,0 +1,19 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+from gdbpp import GeckoPrettyPrinter
+
+@GeckoPrettyPrinter('nsString', '^ns.*String$')
+class string_printer(object):
+ def __init__(self, value):
+ self.value = value
+
+ def to_string(self):
+ return self.value['mData']
+
+ def display_hint(self):
+ return 'string'
diff --git a/python/gdbpp/gdbpp/tarray.py b/python/gdbpp/gdbpp/tarray.py
new file mode 100644
index 000000000..66797e4c9
--- /dev/null
+++ b/python/gdbpp/gdbpp/tarray.py
@@ -0,0 +1,30 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+import itertools
+from gdbpp import GeckoPrettyPrinter
+
+@GeckoPrettyPrinter('InfallibleTArray', '^InfallibleTArray<.*>$')
+@GeckoPrettyPrinter('FallibleTArray', '^FallibleTArray<.*>$')
+@GeckoPrettyPrinter('AutoTArray', '^AutoTArray<.*>$')
+@GeckoPrettyPrinter('nsTArray', '^nsTArray<.*>$')
+class tarray_printer(object):
+ def __init__(self, value):
+ self.value = value
+ self.elem_type = value.type.template_argument(0)
+
+ def children(self):
+ length = self.value['mHdr'].dereference()['mLength']
+ data = self.value['mHdr'] + 1
+ elements = data.cast(self.elem_type.pointer())
+ return (('%d' % i, (elements + i).dereference()) for i in range(0, int(length)))
+
+ def to_string(self):
+ return str(self.value.type)
+
+ def display_hint(self):
+ return 'array'
diff --git a/python/gdbpp/gdbpp/thashtable.py b/python/gdbpp/gdbpp/thashtable.py
new file mode 100644
index 000000000..10aee4946
--- /dev/null
+++ b/python/gdbpp/gdbpp/thashtable.py
@@ -0,0 +1,143 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+import itertools
+from gdbpp import GeckoPrettyPrinter
+
+def walk_template_to_given_base(value, desired_tag_prefix):
+ '''Given a value of some template subclass, walk up its ancestry until we
+ hit the desired type, then return the appropriate value (which will then
+ have that type).
+ '''
+ # Base case
+ t = value.type
+ # It's possible that we're dealing with an alias template that looks like:
+ # template<typename Protocol>
+ # using ManagedContainer = nsTHashtable<nsPtrHashKey<Protocol>>;
+ # In which case we want to strip the indirection, and strip_typedefs()
+ # accomplishes this. (Disclaimer: I tried it and it worked and it didn't
+ # break my other use cases, if things start exploding, do reconsider.)
+ t = t.strip_typedefs()
+ if t.tag.startswith(desired_tag_prefix):
+ return value
+ for f in t.fields():
+ # we only care about the inheritance hierarchy
+ if not f.is_base_class:
+ continue
+ # This is the answer or something we're going to need to recurse into.
+ fv = value[f]
+ ft = fv.type
+ # slightly optimize by checking the tag rather than in the recursion
+ if ft.tag.startswith(desired_tag_prefix):
+ # found it!
+ return fv
+ return walk_template_to_given_base(fv, desired_tag_prefix)
+ return None
+
+# The templates and their inheritance hierarchy form an onion of types around
+# the nsTHashtable core at the center. All we care about is that nsTHashtable,
+# but we register for the descendant types in order to avoid the default pretty
+# printers having to unwrap those onion layers, wasting precious lines.
+@GeckoPrettyPrinter('nsClassHashtable', '^nsClassHashtable<.*>$')
+@GeckoPrettyPrinter('nsDataHashtable', '^nsDataHashtable<.*>$')
+@GeckoPrettyPrinter('nsInterfaceHashtable', '^nsInterfaceHashtable<.*>$')
+@GeckoPrettyPrinter('nsRefPtrHashtable', '^nsRefPtrHashtable<.*>$')
+@GeckoPrettyPrinter('nsBaseHashtable', '^nsBaseHashtable<.*>$')
+@GeckoPrettyPrinter('nsTHashtable', '^nsTHashtable<.*>$')
+class thashtable_printer(object):
+ def __init__(self, outer_value):
+ self.outermost_type = outer_value.type
+
+ value = walk_template_to_given_base(outer_value, 'nsTHashtable<')
+ self.value = value
+
+ self.entry_type = value.type.template_argument(0)
+
+ # -- Determine whether we're a hashTABLE or a hashSET
+ # If we're a table, the entry type will be a nsBaseHashtableET template.
+ # If we're a set, it will be something like nsPtrHashKey.
+ #
+ # So, assume we're a set if we're not nsBaseHashtableET<
+ # (It should ideally also be true that the type ends with HashKey, but
+ # since nsBaseHashtableET causes us to assume "mData" exists, let's
+ # pivot based on that.)
+ self.is_table = self.entry_type.tag.startswith('nsBaseHashtableET<')
+
+ # While we know that it has a field `mKeyHash` for the hash-code and
+ # book-keeping, and a DataType field mData for the value (if we're a
+ # table), the key field frustratingly varies by key type.
+ #
+ # So we want to walk its key type to figure out the field name. And we
+ # do mean field name. The field object is no good for subscripting the
+ # value unless the field was directly owned by that value's type. But
+ # by using a string name, we save ourselves all that fanciness.
+
+ if self.is_table:
+ # For nsBaseHashtableET<KeyClass, DataType>, we want the KeyClass
+ key_type = self.entry_type.template_argument(0)
+ else:
+ # If we're a set, our entry type is the key class already!
+ key_type = self.entry_type
+ self.key_field_name = None
+ for f in key_type.fields():
+ # No need to traverse up the type hierarchy...
+ if f.is_base_class:
+ continue
+ # ...just to skip the fields we know exist...
+ if f.name == 'mKeyHash' or f.name == 'mData':
+ continue
+ # ...and assume the first one we find is the key.
+ self.key_field_name = f.name
+ break
+
+ def children(self):
+ table = self.value['mTable']
+
+ # mEntryCount is the number of occupied slots/entries in the table.
+ # We can use this to avoid doing wasted memory reads.
+ entryCount = table['mEntryCount']
+ if entryCount == 0:
+ return
+
+ # The table capacity is tracked "cleverly" in terms of how many bits
+ # the hash needs to be shifted. CapacityFromHashShift calculates the
+ # actual entry capacity via ((uint32_t)1 << (kHashBits - mHashShift));
+ capacity = 1 << (table['kHashBits'] - table['mHashShift'])
+
+ # Pierce generation-tracking EntryStore class to get at buffer. The
+ # class instance always exists, but this char* may be null.
+ store = table['mEntryStore']['mEntryStore']
+
+ key_field_name = self.key_field_name
+
+ seenCount = 0
+ pEntry = store.cast(self.entry_type.pointer())
+ for i in range(0, int(capacity)):
+ entry = (pEntry + i).dereference()
+ # An mKeyHash of 0 means empty, 1 means deleted sentinel, so skip
+ # if that's the case.
+ if entry['mKeyHash'] <= 1:
+ continue
+
+ yield ('%d' % i, entry[key_field_name])
+ if self.is_table:
+ yield ('%d' % i, entry['mData'])
+
+ # Stop iterating if we know there are no more occupied slots.
+ seenCount += 1
+ if seenCount >= entryCount:
+ break
+
+ def to_string(self):
+ # The most specific template type is the most interesting.
+ return str(self.outermost_type)
+
+ def display_hint(self):
+ if self.is_table:
+ return 'map'
+ else:
+ return 'array'
diff --git a/python/jsmin/jsmin/__init__.py b/python/jsmin/jsmin/__init__.py
new file mode 100644
index 000000000..033a08701
--- /dev/null
+++ b/python/jsmin/jsmin/__init__.py
@@ -0,0 +1,238 @@
+# This code is original from jsmin by Douglas Crockford, it was translated to
+# Python by Baruch Even. It was rewritten by Dave St.Germain for speed.
+#
+# The MIT License (MIT)
+#
+# Copyright (c) 2013 Dave St.Germain
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+
+
+import sys
+is_3 = sys.version_info >= (3, 0)
+if is_3:
+ import io
+else:
+ import StringIO
+ try:
+ import cStringIO
+ except ImportError:
+ cStringIO = None
+
+
+__all__ = ['jsmin', 'JavascriptMinify']
+__version__ = '2.0.11'
+
+
+def jsmin(js, **kwargs):
+ """
+ returns a minified version of the javascript string
+ """
+ if not is_3:
+ if cStringIO and not isinstance(js, unicode):
+ # strings can use cStringIO for a 3x performance
+ # improvement, but unicode (in python2) cannot
+ klass = cStringIO.StringIO
+ else:
+ klass = StringIO.StringIO
+ else:
+ klass = io.StringIO
+ ins = klass(js)
+ outs = klass()
+ JavascriptMinify(ins, outs, **kwargs).minify()
+ return outs.getvalue()
+
+
+class JavascriptMinify(object):
+ """
+ Minify an input stream of javascript, writing
+ to an output stream
+ """
+
+ def __init__(self, instream=None, outstream=None, quote_chars="'\""):
+ self.ins = instream
+ self.outs = outstream
+ self.quote_chars = quote_chars
+
+ def minify(self, instream=None, outstream=None):
+ if instream and outstream:
+ self.ins, self.outs = instream, outstream
+
+ self.is_return = False
+ self.return_buf = ''
+
+ def write(char):
+ # all of this is to support literal regular expressions.
+ # sigh
+ if char in 'return':
+ self.return_buf += char
+ self.is_return = self.return_buf == 'return'
+ self.outs.write(char)
+ if self.is_return:
+ self.return_buf = ''
+
+ read = self.ins.read
+
+ space_strings = "abcdefghijklmnopqrstuvwxyz"\
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_$\\"
+ starters, enders = '{[(+-', '}])+-' + self.quote_chars
+ newlinestart_strings = starters + space_strings
+ newlineend_strings = enders + space_strings
+ do_newline = False
+ do_space = False
+ escape_slash_count = 0
+ doing_single_comment = False
+ previous_before_comment = ''
+ doing_multi_comment = False
+ in_re = False
+ in_quote = ''
+ quote_buf = []
+
+ previous = read(1)
+ if previous == '\\':
+ escape_slash_count += 1
+ next1 = read(1)
+ if previous == '/':
+ if next1 == '/':
+ doing_single_comment = True
+ elif next1 == '*':
+ doing_multi_comment = True
+ previous = next1
+ next1 = read(1)
+ else:
+ in_re = True # literal regex at start of script
+ write(previous)
+ elif not previous:
+ return
+ elif previous >= '!':
+ if previous in self.quote_chars:
+ in_quote = previous
+ write(previous)
+ previous_non_space = previous
+ else:
+ previous_non_space = ' '
+ if not next1:
+ return
+
+ while 1:
+ next2 = read(1)
+ if not next2:
+ last = next1.strip()
+ if not (doing_single_comment or doing_multi_comment)\
+ and last not in ('', '/'):
+ if in_quote:
+ write(''.join(quote_buf))
+ write(last)
+ break
+ if doing_multi_comment:
+ if next1 == '*' and next2 == '/':
+ doing_multi_comment = False
+ if previous_before_comment and previous_before_comment in space_strings:
+ do_space = True
+ next2 = read(1)
+ elif doing_single_comment:
+ if next1 in '\r\n':
+ doing_single_comment = False
+ while next2 in '\r\n':
+ next2 = read(1)
+ if not next2:
+ break
+ if previous_before_comment in ')}]':
+ do_newline = True
+ elif previous_before_comment in space_strings:
+ write('\n')
+ elif in_quote:
+ quote_buf.append(next1)
+
+ if next1 == in_quote:
+ numslashes = 0
+ for c in reversed(quote_buf[:-1]):
+ if c != '\\':
+ break
+ else:
+ numslashes += 1
+ if numslashes % 2 == 0:
+ in_quote = ''
+ write(''.join(quote_buf))
+ elif next1 in '\r\n':
+ if previous_non_space in newlineend_strings \
+ or previous_non_space > '~':
+ while 1:
+ if next2 < '!':
+ next2 = read(1)
+ if not next2:
+ break
+ else:
+ if next2 in newlinestart_strings \
+ or next2 > '~' or next2 == '/':
+ do_newline = True
+ break
+ elif next1 < '!' and not in_re:
+ if (previous_non_space in space_strings \
+ or previous_non_space > '~') \
+ and (next2 in space_strings or next2 > '~'):
+ do_space = True
+ elif previous_non_space in '-+' and next2 == previous_non_space:
+ # protect against + ++ or - -- sequences
+ do_space = True
+ elif self.is_return and next2 == '/':
+ # returning a regex...
+ write(' ')
+ elif next1 == '/':
+ if do_space:
+ write(' ')
+ if in_re:
+ if previous != '\\' or (not escape_slash_count % 2) or next2 in 'gimy':
+ in_re = False
+ write('/')
+ elif next2 == '/':
+ doing_single_comment = True
+ previous_before_comment = previous_non_space
+ elif next2 == '*':
+ doing_multi_comment = True
+ previous_before_comment = previous_non_space
+ previous = next1
+ next1 = next2
+ next2 = read(1)
+ else:
+ in_re = previous_non_space in '(,=:[?!&|;' or self.is_return # literal regular expression
+ write('/')
+ else:
+ if do_space:
+ do_space = False
+ write(' ')
+ if do_newline:
+ write('\n')
+ do_newline = False
+
+ write(next1)
+ if not in_re and next1 in self.quote_chars:
+ in_quote = next1
+ quote_buf = []
+
+ previous = next1
+ next1 = next2
+
+ if previous >= '!':
+ previous_non_space = previous
+
+ if previous == '\\':
+ escape_slash_count += 1
+ else:
+ escape_slash_count = 0
diff --git a/python/jsmin/jsmin/test.py b/python/jsmin/jsmin/test.py
new file mode 100644
index 000000000..6f7f627fd
--- /dev/null
+++ b/python/jsmin/jsmin/test.py
@@ -0,0 +1,394 @@
+import unittest
+import jsmin
+import sys
+
+class JsTests(unittest.TestCase):
+ def _minify(self, js):
+ return jsmin.jsmin(js)
+
+ def assertEqual(self, thing1, thing2):
+ if thing1 != thing2:
+ print(repr(thing1), repr(thing2))
+ raise AssertionError
+ return True
+
+ def assertMinified(self, js_input, expected, **kwargs):
+ minified = jsmin.jsmin(js_input, **kwargs)
+ assert minified == expected, "%r != %r" % (minified, expected)
+
+ def testQuoted(self):
+ js = r'''
+ Object.extend(String, {
+ interpret: function(value) {
+ return value == null ? '' : String(value);
+ },
+ specialChar: {
+ '\b': '\\b',
+ '\t': '\\t',
+ '\n': '\\n',
+ '\f': '\\f',
+ '\r': '\\r',
+ '\\': '\\\\'
+ }
+ });
+
+ '''
+ expected = r"""Object.extend(String,{interpret:function(value){return value==null?'':String(value);},specialChar:{'\b':'\\b','\t':'\\t','\n':'\\n','\f':'\\f','\r':'\\r','\\':'\\\\'}});"""
+ self.assertMinified(js, expected)
+
+ def testSingleComment(self):
+ js = r'''// use native browser JS 1.6 implementation if available
+ if (Object.isFunction(Array.prototype.forEach))
+ Array.prototype._each = Array.prototype.forEach;
+
+ if (!Array.prototype.indexOf) Array.prototype.indexOf = function(item, i) {
+
+ // hey there
+ function() {// testing comment
+ foo;
+ //something something
+
+ location = 'http://foo.com;'; // goodbye
+ }
+ //bye
+ '''
+ expected = r"""
+if(Object.isFunction(Array.prototype.forEach))
+Array.prototype._each=Array.prototype.forEach;if(!Array.prototype.indexOf)Array.prototype.indexOf=function(item,i){ function(){ foo; location='http://foo.com;';}"""
+ # print expected
+ self.assertMinified(js, expected)
+
+ def testEmpty(self):
+ self.assertMinified('', '')
+ self.assertMinified(' ', '')
+ self.assertMinified('\n', '')
+ self.assertMinified('\r\n', '')
+ self.assertMinified('\t', '')
+
+
+ def testMultiComment(self):
+ js = r"""
+ function foo() {
+ print('hey');
+ }
+ /*
+ if(this.options.zindex) {
+ this.originalZ = parseInt(Element.getStyle(this.element,'z-index') || 0);
+ this.element.style.zIndex = this.options.zindex;
+ }
+ */
+ another thing;
+ """
+ expected = r"""function foo(){print('hey');}
+another thing;"""
+ self.assertMinified(js, expected)
+
+ def testLeadingComment(self):
+ js = r"""/* here is a comment at the top
+
+ it ends here */
+ function foo() {
+ alert('crud');
+ }
+
+ """
+ expected = r"""function foo(){alert('crud');}"""
+ self.assertMinified(js, expected)
+
+ def testBlockCommentStartingWithSlash(self):
+ self.assertMinified('A; /*/ comment */ B', 'A;B')
+
+ def testBlockCommentEndingWithSlash(self):
+ self.assertMinified('A; /* comment /*/ B', 'A;B')
+
+ def testLeadingBlockCommentStartingWithSlash(self):
+ self.assertMinified('/*/ comment */ A', 'A')
+
+ def testLeadingBlockCommentEndingWithSlash(self):
+ self.assertMinified('/* comment /*/ A', 'A')
+
+ def testEmptyBlockComment(self):
+ self.assertMinified('/**/ A', 'A')
+
+ def testBlockCommentMultipleOpen(self):
+ self.assertMinified('/* A /* B */ C', 'C')
+
+ def testJustAComment(self):
+ self.assertMinified(' // a comment', '')
+
+ def test_issue_10(self):
+ js = '''
+ files = [{name: value.replace(/^.*\\\\/, '')}];
+ // comment
+ A
+ '''
+ expected = '''files=[{name:value.replace(/^.*\\\\/,'')}]; A'''
+ self.assertMinified(js, expected)
+
+ def testRe(self):
+ js = r'''
+ var str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, '');
+ return (/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);
+ });'''
+ expected = r"""var str=this.replace(/\\./g,'@').replace(/"[^"\\\n\r]*"/g,'');return(/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);});"""
+ self.assertMinified(js, expected)
+
+ def testIgnoreComment(self):
+ js = r"""
+ var options_for_droppable = {
+ overlap: options.overlap,
+ containment: options.containment,
+ tree: options.tree,
+ hoverclass: options.hoverclass,
+ onHover: Sortable.onHover
+ }
+
+ var options_for_tree = {
+ onHover: Sortable.onEmptyHover,
+ overlap: options.overlap,
+ containment: options.containment,
+ hoverclass: options.hoverclass
+ }
+
+ // fix for gecko engine
+ Element.cleanWhitespace(element);
+ """
+ expected = r"""var options_for_droppable={overlap:options.overlap,containment:options.containment,tree:options.tree,hoverclass:options.hoverclass,onHover:Sortable.onHover}
+var options_for_tree={onHover:Sortable.onEmptyHover,overlap:options.overlap,containment:options.containment,hoverclass:options.hoverclass}
+Element.cleanWhitespace(element);"""
+ self.assertMinified(js, expected)
+
+ def testHairyRe(self):
+ js = r"""
+ inspect: function(useDoubleQuotes) {
+ var escapedString = this.gsub(/[\x00-\x1f\\]/, function(match) {
+ var character = String.specialChar[match[0]];
+ return character ? character : '\\u00' + match[0].charCodeAt().toPaddedString(2, 16);
+ });
+ if (useDoubleQuotes) return '"' + escapedString.replace(/"/g, '\\"') + '"';
+ return "'" + escapedString.replace(/'/g, '\\\'') + "'";
+ },
+
+ toJSON: function() {
+ return this.inspect(true);
+ },
+
+ unfilterJSON: function(filter) {
+ return this.sub(filter || Prototype.JSONFilter, '#{1}');
+ },
+ """
+ expected = r"""inspect:function(useDoubleQuotes){var escapedString=this.gsub(/[\x00-\x1f\\]/,function(match){var character=String.specialChar[match[0]];return character?character:'\\u00'+match[0].charCodeAt().toPaddedString(2,16);});if(useDoubleQuotes)return'"'+escapedString.replace(/"/g,'\\"')+'"';return"'"+escapedString.replace(/'/g,'\\\'')+"'";},toJSON:function(){return this.inspect(true);},unfilterJSON:function(filter){return this.sub(filter||Prototype.JSONFilter,'#{1}');},"""
+ self.assertMinified(js, expected)
+
+ def testLiteralRe(self):
+ js = r"""
+ myString.replace(/\\/g, '/');
+ console.log("hi");
+ """
+ expected = r"""myString.replace(/\\/g,'/');console.log("hi");"""
+ self.assertMinified(js, expected)
+
+ js = r''' return /^data:image\//i.test(url) ||
+ /^(https?|ftp|file|about|chrome|resource):/.test(url);
+ '''
+ expected = r'''return /^data:image\//i.test(url)||/^(https?|ftp|file|about|chrome|resource):/.test(url);'''
+ self.assertMinified(js, expected)
+
+ def testNoBracesWithComment(self):
+ js = r"""
+ onSuccess: function(transport) {
+ var js = transport.responseText.strip();
+ if (!/^\[.*\]$/.test(js)) // TODO: improve sanity check
+ throw 'Server returned an invalid collection representation.';
+ this._collection = eval(js);
+ this.checkForExternalText();
+ }.bind(this),
+ onFailure: this.onFailure
+ });
+ """
+ expected = r"""onSuccess:function(transport){var js=transport.responseText.strip();if(!/^\[.*\]$/.test(js))
+throw'Server returned an invalid collection representation.';this._collection=eval(js);this.checkForExternalText();}.bind(this),onFailure:this.onFailure});"""
+ self.assertMinified(js, expected)
+
+ def testSpaceInRe(self):
+ js = r"""
+ num = num.replace(/ /g,'');
+ """
+ self.assertMinified(js, "num=num.replace(/ /g,'');")
+
+ def testEmptyString(self):
+ js = r'''
+ function foo('') {
+
+ }
+ '''
+ self.assertMinified(js, "function foo(''){}")
+
+ def testDoubleSpace(self):
+ js = r'''
+var foo = "hey";
+ '''
+ self.assertMinified(js, 'var foo="hey";')
+
+ def testLeadingRegex(self):
+ js = r'/[d]+/g '
+ self.assertMinified(js, js.strip())
+
+ def testLeadingString(self):
+ js = r"'a string in the middle of nowhere'; // and a comment"
+ self.assertMinified(js, "'a string in the middle of nowhere';")
+
+ def testSingleCommentEnd(self):
+ js = r'// a comment\n'
+ self.assertMinified(js, '')
+
+ def testInputStream(self):
+ try:
+ from StringIO import StringIO
+ except ImportError:
+ from io import StringIO
+
+ ins = StringIO(r'''
+ function foo('') {
+
+ }
+ ''')
+ outs = StringIO()
+ m = jsmin.JavascriptMinify()
+ m.minify(ins, outs)
+ output = outs.getvalue()
+ assert output == "function foo(''){}"
+
+ def testUnicode(self):
+ instr = u'\u4000 //foo'
+ expected = u'\u4000'
+ output = jsmin.jsmin(instr)
+ self.assertEqual(output, expected)
+
+ def testCommentBeforeEOF(self):
+ self.assertMinified("//test\r\n", "")
+
+ def testCommentInObj(self):
+ self.assertMinified("""{
+ a: 1,//comment
+ }""", "{a:1,}")
+
+ def testCommentInObj2(self):
+ self.assertMinified("{a: 1//comment\r\n}", "{a:1\n}")
+
+ def testImplicitSemicolon(self):
+ # return \n 1 is equivalent with return; 1
+ # so best make sure jsmin retains the newline
+ self.assertMinified("return;//comment\r\na", "return;a")
+
+ def testImplicitSemicolon2(self):
+ self.assertMinified("return//comment...\r\na", "return\na")
+
+ def testSingleComment2(self):
+ self.assertMinified('x.replace(/\//, "_")// slash to underscore',
+ 'x.replace(/\//,"_")')
+
+ def testSlashesNearComments(self):
+ original = '''
+ { a: n / 2, }
+ // comment
+ '''
+ expected = '''{a:n/2,}'''
+ self.assertMinified(original, expected)
+
+ def testReturn(self):
+ original = '''
+ return foo;//comment
+ return bar;'''
+ expected = 'return foo; return bar;'
+ self.assertMinified(original, expected)
+
+ def test_space_plus(self):
+ original = '"s" + ++e + "s"'
+ expected = '"s"+ ++e+"s"'
+ self.assertMinified(original, expected)
+
+ def test_no_final_newline(self):
+ original = '"s"'
+ expected = '"s"'
+ self.assertMinified(original, expected)
+
+ def test_space_with_regex_repeats(self):
+ original = '/(NaN| {2}|^$)/.test(a)&&(a="M 0 0");'
+ self.assertMinified(original, original) # there should be nothing jsmin can do here
+
+ def test_space_with_regex_repeats_not_at_start(self):
+ original = 'aaa;/(NaN| {2}|^$)/.test(a)&&(a="M 0 0");'
+ self.assertMinified(original, original) # there should be nothing jsmin can do here
+
+ def test_space_in_regex(self):
+ original = '/a (a)/.test("a")'
+ self.assertMinified(original, original)
+
+ def test_angular_1(self):
+ original = '''var /** holds major version number for IE or NaN for real browsers */
+ msie,
+ jqLite, // delay binding since jQuery could be loaded after us.'''
+ minified = jsmin.jsmin(original)
+ self.assertTrue('var msie' in minified)
+
+ def test_angular_2(self):
+ original = 'var/* comment */msie;'
+ expected = 'var msie;'
+ self.assertMinified(original, expected)
+
+ def test_angular_3(self):
+ original = 'var /* comment */msie;'
+ expected = 'var msie;'
+ self.assertMinified(original, expected)
+
+ def test_angular_4(self):
+ original = 'var /* comment */ msie;'
+ expected = 'var msie;'
+ self.assertMinified(original, expected)
+
+ def test_angular_5(self):
+ original = 'a/b'
+ self.assertMinified(original, original)
+
+ def testBackticks(self):
+ original = '`test`'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '` test with leading whitespace`'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '`test with trailing whitespace `'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '''`test
+with a new line`'''
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '''dumpAvStats: function(stats) {
+ var statsString = "";
+ if (stats.mozAvSyncDelay) {
+ statsString += `A/V sync: ${stats.mozAvSyncDelay} ms `;
+ }
+ if (stats.mozJitterBufferDelay) {
+ statsString += `Jitter-buffer delay: ${stats.mozJitterBufferDelay} ms`;
+ }
+
+ return React.DOM.div(null, statsString);'''
+ expected = 'dumpAvStats:function(stats){var statsString="";if(stats.mozAvSyncDelay){statsString+=`A/V sync: ${stats.mozAvSyncDelay} ms `;}\nif(stats.mozJitterBufferDelay){statsString+=`Jitter-buffer delay: ${stats.mozJitterBufferDelay} ms`;}\nreturn React.DOM.div(null,statsString);'
+ self.assertMinified(original, expected, quote_chars="'\"`")
+
+ def testBackticksExpressions(self):
+ original = '`Fifteen is ${a + b} and not ${2 * a + b}.`'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ original = '''`Fifteen is ${a +
+b} and not ${2 * a + "b"}.`'''
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+ def testBackticksTagged(self):
+ original = 'tag`Hello ${ a + b } world ${ a * b}`;'
+ self.assertMinified(original, original, quote_chars="'\"`")
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/python/jsmin/setup.cfg b/python/jsmin/setup.cfg
new file mode 100644
index 000000000..861a9f554
--- /dev/null
+++ b/python/jsmin/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/jsmin/setup.py b/python/jsmin/setup.py
new file mode 100644
index 000000000..8fff56602
--- /dev/null
+++ b/python/jsmin/setup.py
@@ -0,0 +1,42 @@
+from setuptools import setup
+
+import os, sys, re
+
+os.environ['COPYFILE_DISABLE'] = 'true' # this disables including resource forks in tar files on os x
+
+
+extra = {}
+if sys.version_info >= (3,0):
+ extra['use_2to3'] = True
+
+setup(
+ name="jsmin",
+ version=re.search(r'__version__ = ["\']([^"\']+)', open('jsmin/__init__.py').read()).group(1),
+ packages=['jsmin'],
+ description='JavaScript minifier.\nPLEASE UPDATE TO VERSION >= 2.0.6. Older versions have a serious bug related to comments.',
+ long_description=open('README.rst').read(),
+ author='Dave St.Germain',
+ author_email='dave@st.germa.in',
+ maintainer='Tikitu de Jager',
+ maintainer_email='tikitu+jsmin@logophile.org',
+ test_suite='jsmin.test.JsTests',
+ license='MIT License',
+ url='https://bitbucket.org/dcs/jsmin/',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Web Environment',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
+ 'Topic :: Software Development :: Pre-processors',
+ 'Topic :: Text Processing :: Filters',
+ ],
+ **extra
+)
diff --git a/python/lldbutils/README.txt b/python/lldbutils/README.txt
new file mode 100644
index 000000000..a8db723f1
--- /dev/null
+++ b/python/lldbutils/README.txt
@@ -0,0 +1,221 @@
+lldb debugging functionality for Gecko
+======================================
+
+This directory contains a module, lldbutils, which is imported by the
+in-tree .lldbinit file. The lldbutil modules define some lldb commands
+that are handy for debugging Gecko.
+
+If you want to add a new command or Python-implemented type summary, either add
+it to one of the existing broad area Python files (such as lldbutils/layout.py
+for layout-related commands) or create a new file if none of the existing files
+is appropriate. If you add a new file, make sure you add it to __all__ in
+lldbutils/__init__.py.
+
+
+Supported commands
+------------------
+
+Most commands below that can take a pointer to an object also support being
+called with a smart pointer like nsRefPtr or nsCOMPtr.
+
+
+* frametree EXPR, ft EXPR
+ frametreelimited EXPR, ftl EXPR
+
+ Shows information about a frame tree. EXPR is an expression that
+ is evaluated, and must be an nsIFrame*. frametree displays the
+ entire frame tree that contains the given frame. frametreelimited
+ displays a subtree of the frame tree rooted at the given frame.
+
+ (lldb) p this
+ (nsBlockFrame *) $4 = 0x000000011687fcb8
+ (lldb) ftl this
+ Block(div)(-1)@0x11687fcb8 {0,0,7380,690} [state=0002100000d04601] [content=0x11688c0c0] [sc=0x11687f990:-moz-scrolled-content]<
+ line 0x116899130: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x100] {60,0,0,690} vis-overflow=60,510,0,0 scr-overflow=60,510,0,0 <
+ Text(0)""@0x1168990c0 {60,510,0,0} [state=0001000020404000] [content=0x11687ca10] [sc=0x11687fd88:-moz-non-element,parent=0x11687eb00] [run=0x115115e80][0,0,T]
+ >
+ >
+ (lldb) ft this
+ Viewport(-1)@0x116017430 [view=0x115efe190] {0,0,60,60} [state=000b063000002623] [sc=0x1160170f8:-moz-viewport]<
+ HTMLScroll(html)(-1)@0x1160180d0 {0,0,0,0} [state=000b020000000403] [content=0x115e4d640] [sc=0x116017768:-moz-viewport-scroll]<
+ ...
+ Canvas(html)(-1)@0x116017e08 {0,0,60,60} vis-overflow=0,0,8340,2196 scr-overflow=0,0,8220,2196 [state=000b002000000601] [content=0x115e4d640] [sc=0x11687e0f8:-moz-scrolled-canvas]<
+ Block(html)(-1)@0x11687e578 {0,0,60,2196} vis-overflow=0,0,8340,2196 scr-overflow=0,0,8220,2196 [state=000b100000d00601] [content=0x115e4d640] [sc=0x11687e4b8,parent=0x0]<
+ line 0x11687ec48: count=1 state=block,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x48] bm=480 {480,480,0,1236} vis-overflow=360,426,7980,1410 scr-overflow=480,480,7740,1236 <
+ Block(body)(1)@0x11687ebb0 {480,480,0,1236} vis-overflow=-120,-54,7980,1410 scr-overflow=0,0,7740,1236 [state=000b120000100601] [content=0x115ed8980] [sc=0x11687e990]<
+ line 0x116899170: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x0] {0,0,7740,1236} vis-overflow=-120,-54,7980,1410 scr-overflow=0,0,7740,1236 <
+ nsTextControlFrame@0x11687f068 {0,66,7740,1170} vis-overflow=-120,-120,7980,1410 scr-overflow=0,0,7740,1170 [state=0002000000004621] [content=0x115ca2c50] [sc=0x11687ea40]<
+ HTMLScroll(div)(-1)@0x11687f6b0 {180,240,7380,690} [state=0002000000084409] [content=0x11688c0c0] [sc=0x11687eb00]<
+ Block(div)(-1)@0x11687fcb8 {0,0,7380,690} [state=0002100000d04601] [content=0x11688c0c0] [sc=0x11687f990:-moz-scrolled-content]<
+ line 0x116899130: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x100] {60,0,0,690} vis-overflow=60,510,0,0 scr-overflow=60,510,0,0 <
+ Text(0)""@0x1168990c0 {60,510,0,0} [state=0001000020404000] [content=0x11687ca10] [sc=0x11687fd88:-moz-non-element,parent=0x11687eb00] [run=0x115115e80][0,0,T]
+ ...
+
+
+* js
+
+ Dumps the current JS stack.
+
+ (lldb) js
+ 0 anonymous(aForce = false) ["chrome://browser/content/browser.js":13414]
+ this = [object Object]
+ 1 updateAppearance() ["chrome://browser/content/browser.js":13326]
+ this = [object Object]
+ 2 handleEvent(aEvent = [object Event]) ["chrome://browser/content/tabbrowser.xml":3811]
+ this = [object XULElement]
+
+
+* prefcnt EXPR
+
+ Shows the refcount of a given object. EXPR is an expression that is
+ evaluated, and can be either a pointer to or an actual refcounted
+ object. The object can be a standard nsISupports-like refcounted
+ object, a cycle-collected object or a mozilla::RefCounted<T> object.
+
+ (lldb) p this
+ (nsHTMLDocument *) $1 = 0x0000000116e9d800
+ (lldb) prefcnt this
+ 20
+ (lldb) p mDocumentURI
+ (nsCOMPtr<nsIURI>) $3 = {
+ mRawPtr = 0x0000000117163e50
+ }
+ (lldb) prefcnt mDocumentURI
+ 11
+
+
+* pstate EXPR
+
+ Shows the frame state bits (using their symbolic names) of a given frame.
+ EXPR is an expression that is evaluated, and must be an nsIFrame*.
+
+ (lldb) p this
+ (nsTextFrame *) $1 = 0x000000011f470b10
+ (lldb) p/x mState
+ (nsFrameState) $2 = 0x0000004080604000
+ (lldb) pstate this
+ TEXT_HAS_NONCOLLAPSED_CHARACTERS | TEXT_END_OF_LINE | TEXT_START_OF_LINE | NS_FRAME_PAINTED_THEBES | NS_FRAME_INDEPENDENT_SELECTION
+
+
+* ptag EXPR
+
+ Shows the DOM tag name of a node. EXPR is an expression that is
+ evaluated, and can be either an nsINode pointer or a concrete DOM
+ object.
+
+ (lldb) p this
+ (nsHTMLDocument *) $0 = 0x0000000116e9d800
+ (lldb) ptag this
+ (PermanentAtomImpl *) $1 = 0x0000000110133ac0 u"#document"
+ (lldb) p this->GetRootElement()
+ (mozilla::dom::HTMLSharedElement *) $2 = 0x0000000118429780
+ (lldb) ptag $2
+ (PermanentAtomImpl *) $3 = 0x0000000110123b80 u"html"
+
+
+Supported type summaries and synthetic children
+-----------------------------------------------
+
+In lldb terminology, type summaries are rules for how to display a value
+when using the "expression" command (or its familiar-to-gdb-users "p" alias),
+and synthetic children are fake member variables or array elements also
+added by custom rules.
+
+For objects that do have synthetic children defined for them, like nsTArray,
+the "expr -R -- EXPR" command can be used to show its actual member variables.
+
+
+* nsAString, nsACString,
+ nsFixedString, nsFixedCString,
+ nsAutoString, nsAutoCString
+
+ Strings have a type summary that shows the actual string.
+
+ (lldb) frame info
+ frame #0: 0x000000010400cfea XUL`nsCSSParser::ParseProperty(this=0x00007fff5fbf5248, aPropID=eCSSProperty_margin_top, aPropValue=0x00007fff5fbf53f8, aSheetURI=0x0000000115ae8c00, aBaseURI=0x0000000115ae8c00, aSheetPrincipal=0x000000010ff9e040, aDeclaration=0x00000001826fd580, aChanged=0x00007fff5fbf5247, aIsImportant=false, aIsSVGMode=false) + 74 at nsCSSParser.cpp:12851
+ (lldb) p aPropValue
+ (const nsAString_internal) $16 = u"-25px"
+
+ (lldb) p this
+ (nsHTMLDocument *) $18 = 0x0000000115b56000
+ (lldb) p mContentType
+ (nsCString) $19 = {
+ nsACString_internal = "text/html"
+ }
+
+* nscolor
+
+ nscolors (32-bit RGBA colors) have a type summary that shows the color as
+ one of the CSS 2.1 color keywords, a six digit hex color, an rgba() color,
+ or the "transparent" keyword.
+
+ (lldb) p this
+ (nsTextFrame *) $0 = 0x00000001168245e0
+ (lldb) p *this->StyleColor()
+ (const nsStyleColor) $1 = {
+ mColor = lime
+ }
+ (lldb) expr -R -- *this->StyleColor()
+ (const nsStyleColor) $2 = {
+ mColor = 4278255360
+ }
+
+* nsIAtom
+
+ Atoms have a type summary that shows the string value inside the atom.
+
+ (lldb) frame info
+ frame #0: 0x00000001028b8c49 XUL`mozilla::dom::Element::GetBoolAttr(this=0x0000000115ca1c50, aAttr=0x000000011012a640) const + 25 at Element.h:907
+ (lldb) p aAttr
+ (PermanentAtomImpl *) $1 = 0x000000011012a640 u"readonly"
+
+* nsTArray and friends
+
+ nsTArrays and their auto and fallible varieties have synthetic children
+ for their elements. This means when displaying them with "expr" (or "p"),
+ they will be shown like regular arrays, rather than showing the mHdr and
+ other fields.
+
+ (lldb) frame info
+ frame #0: 0x00000001043eb8a8 XUL`SVGTextFrame::DoGlyphPositioning(this=0x000000012f3f8778) + 248 at SVGTextFrame.cpp:4940
+ (lldb) p charPositions
+ (nsTArray<nsPoint>) $5 = {
+ [0] = {
+ mozilla::gfx::BasePoint<int, nsPoint> = {
+ x = 0
+ y = 816
+ }
+ }
+ [1] = {
+ mozilla::gfx::BasePoint<int, nsPoint> = {
+ x = 426
+ y = 816
+ }
+ }
+ [2] = {
+ mozilla::gfx::BasePoint<int, nsPoint> = {
+ x = 906
+ y = 816
+ }
+ }
+ }
+ (lldb) expr -R -- charPositions
+ (nsTArray<nsPoint>) $4 = {
+ nsTArray_Impl<nsPoint, nsTArrayInfallibleAllocator> = {
+ nsTArray_base<nsTArrayInfallibleAllocator, nsTArray_CopyWithMemutils> = {
+ mHdr = 0x000000012f3f1b80
+ }
+ }
+ }
+
+* nsTextNode, nsTextFragment
+
+ Text nodes have a type summary that shows the nsTextFragment in the
+ nsTextNode, which itself has a type summary that shows the text
+ content.
+
+ (lldb) p this
+ (nsTextFrame *) $14 = 0x000000011811bb10
+ (lldb) p mContent
+ (nsTextNode *) $15 = 0x0000000118130110 "Search or enter address"
+
diff --git a/python/lldbutils/lldbutils/__init__.py b/python/lldbutils/lldbutils/__init__.py
new file mode 100644
index 000000000..f27fa7297
--- /dev/null
+++ b/python/lldbutils/lldbutils/__init__.py
@@ -0,0 +1,13 @@
+import lldb
+
+__all__ = ['content', 'general', 'gfx', 'layout', 'utils']
+
+def init():
+ for name in __all__:
+ init = None
+ try:
+ init = __import__('lldbutils.' + name, globals(), locals(), ['init']).init
+ except AttributeError:
+ pass
+ if init:
+ init(lldb.debugger)
diff --git a/python/lldbutils/lldbutils/content.py b/python/lldbutils/lldbutils/content.py
new file mode 100644
index 000000000..93199001b
--- /dev/null
+++ b/python/lldbutils/lldbutils/content.py
@@ -0,0 +1,21 @@
+import lldb
+from lldbutils import utils
+
+def summarize_text_fragment(valobj, internal_dict):
+ content_union = valobj.GetChildAtIndex(0)
+ state_union = valobj.GetChildAtIndex(1).GetChildMemberWithName("mState")
+ length = state_union.GetChildMemberWithName("mLength").GetValueAsUnsigned(0)
+ if state_union.GetChildMemberWithName("mIs2b").GetValueAsUnsigned(0):
+ field = "m2b"
+ else:
+ field = "m1b"
+ ptr = content_union.GetChildMemberWithName(field)
+ return utils.format_string(ptr, length)
+
+def ptag(debugger, command, result, dict):
+ """Displays the tag name of a content node."""
+ debugger.HandleCommand("expr (" + command + ")->mNodeInfo.mRawPtr->mInner.mName")
+
+def init(debugger):
+ debugger.HandleCommand("type summary add nsTextFragment -F lldbutils.content.summarize_text_fragment")
+ debugger.HandleCommand("command script add -f lldbutils.content.ptag ptag")
diff --git a/python/lldbutils/lldbutils/general.py b/python/lldbutils/lldbutils/general.py
new file mode 100644
index 000000000..27cf19aab
--- /dev/null
+++ b/python/lldbutils/lldbutils/general.py
@@ -0,0 +1,105 @@
+import lldb
+from lldbutils import utils
+
+def summarize_string(valobj, internal_dict):
+ data = valobj.GetChildMemberWithName("mData")
+ length = valobj.GetChildMemberWithName("mLength").GetValueAsUnsigned(0)
+ return utils.format_string(data, length)
+
+class TArraySyntheticChildrenProvider:
+ def __init__(self, valobj, internal_dict):
+ self.valobj = valobj
+ self.header = self.valobj.GetChildMemberWithName("mHdr")
+ self.element_type = self.valobj.GetType().GetTemplateArgumentType(0)
+ self.element_size = self.element_type.GetByteSize()
+ header_size = self.header.GetType().GetPointeeType().GetByteSize()
+ self.element_base_addr = self.header.GetValueAsUnsigned(0) + header_size
+
+ def num_children(self):
+ return self.header.Dereference().GetChildMemberWithName("mLength").GetValueAsUnsigned(0)
+
+ def get_child_index(self, name):
+ try:
+ index = int(name)
+ if index >= self.num_children():
+ return None
+ except:
+ pass
+ return None
+
+ def get_child_at_index(self, index):
+ if index >= self.num_children():
+ return None
+ addr = self.element_base_addr + index * self.element_size
+ return self.valobj.CreateValueFromAddress("[%d]" % index, addr, self.element_type)
+
+def prefcnt(debugger, command, result, dict):
+ """Displays the refcount of an object."""
+ # We handled regular nsISupports-like refcounted objects and cycle collected
+ # objects.
+ target = debugger.GetSelectedTarget()
+ process = target.GetProcess()
+ thread = process.GetSelectedThread()
+ frame = thread.GetSelectedFrame()
+ obj = frame.EvaluateExpression(command)
+ if obj.GetError().Fail():
+ print "could not evaluate expression"
+ return
+ obj = utils.dereference(obj)
+ field = obj.GetChildMemberWithName("mRefCnt")
+ if field.GetError().Fail():
+ field = obj.GetChildMemberWithName("refCnt")
+ if field.GetError().Fail():
+ print "not a refcounted object"
+ return
+ refcnt_type = field.GetType().GetCanonicalType().GetName()
+ if refcnt_type == "nsAutoRefCnt":
+ print field.GetChildMemberWithName("mValue").GetValueAsUnsigned(0)
+ elif refcnt_type == "nsCycleCollectingAutoRefCnt":
+ print field.GetChildMemberWithName("mRefCntAndFlags").GetValueAsUnsigned(0) >> 2
+ elif refcnt_type == "mozilla::ThreadSafeAutoRefCnt":
+ print field.GetChildMemberWithName("mValue").GetChildMemberWithName("mValue").GetValueAsUnsigned(0)
+ elif refcnt_type == "int": # non-atomic mozilla::RefCounted object
+ print field.GetValueAsUnsigned(0)
+ elif refcnt_type == "mozilla::Atomic<int>": # atomic mozilla::RefCounted object
+ print field.GetChildMemberWithName("mValue").GetValueAsUnsigned(0)
+ else:
+ print "unknown mRefCnt type " + refcnt_type
+
+# Used to work around http://llvm.org/bugs/show_bug.cgi?id=22211
+def callfunc(debugger, command, result, dict):
+ """Calls a function for which debugger information is unavailable by getting its address from the symbol table.
+ The function is assumed to return void."""
+
+ if '(' not in command:
+ print 'Usage: callfunc your_function(args)'
+ return
+
+ command_parts = command.split('(')
+ funcname = command_parts[0].strip()
+ args = command_parts[1]
+
+ target = debugger.GetSelectedTarget()
+ symbols = target.FindFunctions(funcname).symbols
+ if not symbols:
+ print 'Could not find a function symbol for a function called "%s"' % funcname
+ return
+
+ sym = symbols[0]
+ arg_types = '()'
+ if sym.name and sym.name.startswith(funcname + '('):
+ arg_types = sym.name[len(funcname):]
+ debugger.HandleCommand('print ((void(*)%s)0x%0x)(%s' % (arg_types, sym.addr.GetLoadAddress(target), args))
+
+def init(debugger):
+ debugger.HandleCommand("type summary add nsAString_internal -F lldbutils.general.summarize_string")
+ debugger.HandleCommand("type summary add nsACString_internal -F lldbutils.general.summarize_string")
+ debugger.HandleCommand("type summary add nsFixedString -F lldbutils.general.summarize_string")
+ debugger.HandleCommand("type summary add nsFixedCString -F lldbutils.general.summarize_string")
+ debugger.HandleCommand("type summary add nsAutoString -F lldbutils.general.summarize_string")
+ debugger.HandleCommand("type summary add nsAutoCString -F lldbutils.general.summarize_string")
+ debugger.HandleCommand("type synthetic add -x \"nsTArray<\" -l lldbutils.general.TArraySyntheticChildrenProvider")
+ debugger.HandleCommand("type synthetic add -x \"AutoTArray<\" -l lldbutils.general.TArraySyntheticChildrenProvider")
+ debugger.HandleCommand("type synthetic add -x \"FallibleTArray<\" -l lldbutils.general.TArraySyntheticChildrenProvider")
+ debugger.HandleCommand("command script add -f lldbutils.general.prefcnt -f lldbutils.general.prefcnt prefcnt")
+ debugger.HandleCommand("command script add -f lldbutils.general.callfunc -f lldbutils.general.callfunc callfunc")
diff --git a/python/lldbutils/lldbutils/gfx.py b/python/lldbutils/lldbutils/gfx.py
new file mode 100644
index 000000000..1ad9a37a7
--- /dev/null
+++ b/python/lldbutils/lldbutils/gfx.py
@@ -0,0 +1,130 @@
+import lldb
+
+def summarize_nscolor(valobj, internal_dict):
+ colors = {
+ "#800000": "maroon",
+ "#ff0000": "red",
+ "#ffa500": "orange",
+ "#ffff00": "yellow",
+ "#808000": "olive",
+ "#800080": "purple",
+ "#ff00ff": "fuchsia",
+ "#ffffff": "white",
+ "#00ff00": "lime",
+ "#008000": "green",
+ "#000080": "navy",
+ "#0000ff": "blue",
+ "#00ffff": "aqua",
+ "#008080": "teal",
+ "#000000": "black",
+ "#c0c0c0": "silver",
+ "#808080": "gray"
+ }
+ value = valobj.GetValueAsUnsigned(0)
+ if value == 0:
+ return "transparent"
+ if value & 0xff000000 != 0xff000000:
+ return "rgba(%d, %d, %d, %f)" % (value & 0xff, (value >> 8) & 0xff, (value >> 16) & 0xff, ((value >> 24) & 0xff) / 255.0)
+ color = "#%02x%02x%02x" % (value & 0xff, (value >> 8) & 0xff, (value >> 16) & 0xff)
+ if color in colors:
+ return colors[color]
+ return color
+
+class RegionSyntheticChildrenProvider:
+
+ def __init__(self, valobj, internal_dict, rect_type = "nsRect"):
+ self.rect_type = rect_type
+ self.valobj = valobj
+ self.pixman_region = self.valobj.GetChildMemberWithName("mImpl")
+ self.pixman_data = self.pixman_region.GetChildMemberWithName("data")
+ self.pixman_extents = self.pixman_region.GetChildMemberWithName("extents")
+ self.num_rects = self.pixman_region_num_rects()
+ self.box_type = self.pixman_extents.GetType()
+ self.box_type_size = self.box_type.GetByteSize()
+ self.box_list_base_ptr = self.pixman_data.GetValueAsUnsigned(0) + self.pixman_data.GetType().GetPointeeType().GetByteSize()
+
+ def pixman_region_num_rects(self):
+ if self.pixman_data.GetValueAsUnsigned(0):
+ return self.pixman_data.Dereference().GetChildMemberWithName("numRects").GetValueAsUnsigned(0)
+ return 1
+
+ def num_children(self):
+ return 2 + self.num_rects
+
+ def get_child_index(self, name):
+ if name == "numRects":
+ return 0
+ if name == "bounds":
+ return 1
+ return None
+
+ def convert_pixman_box_to_rect(self, valobj, name):
+ x1 = valobj.GetChildMemberWithName("x1").GetValueAsSigned()
+ x2 = valobj.GetChildMemberWithName("x2").GetValueAsSigned()
+ y1 = valobj.GetChildMemberWithName("y1").GetValueAsSigned()
+ y2 = valobj.GetChildMemberWithName("y2").GetValueAsSigned()
+ return valobj.CreateValueFromExpression(name,
+ '%s(%d, %d, %d, %d)' % (self.rect_type, x1, y1, x2 - x1, y2 - y1))
+
+ def get_child_at_index(self, index):
+ if index == 0:
+ return self.pixman_data.CreateValueFromExpression('numRects', '(uint32_t)%d' % self.num_rects)
+ if index == 1:
+ return self.convert_pixman_box_to_rect(self.pixman_extents, 'bounds')
+
+ rect_index = index - 2
+ if rect_index >= self.num_rects:
+ return None
+ if self.num_rects == 1:
+ return self.convert_pixman_box_to_rect(self.pixman_extents, 'bounds')
+ box_address = self.box_list_base_ptr + rect_index * self.box_type_size
+ box = self.pixman_data.CreateValueFromAddress('', box_address, self.box_type)
+ return self.convert_pixman_box_to_rect(box, "[%d]" % rect_index)
+
+class IntRegionSyntheticChildrenProvider:
+ def __init__(self, valobj, internal_dict):
+ wrapped_region = valobj.GetChildMemberWithName("mImpl")
+ self.wrapped_provider = RegionSyntheticChildrenProvider(wrapped_region, internal_dict, "nsIntRect")
+
+ def num_children(self):
+ return self.wrapped_provider.num_children()
+
+ def get_child_index(self, name):
+ return self.wrapped_provider.get_child_index(name)
+
+ def get_child_at_index(self, index):
+ return self.wrapped_provider.get_child_at_index(index)
+
+def summarize_rect(valobj, internal_dict):
+ x = valobj.GetChildMemberWithName("x").GetValue()
+ y = valobj.GetChildMemberWithName("y").GetValue()
+ width = valobj.GetChildMemberWithName("width").GetValue()
+ height = valobj.GetChildMemberWithName("height").GetValue()
+ return "%s, %s, %s, %s" % (x, y, width, height)
+
+def rect_is_empty(valobj):
+ width = valobj.GetChildMemberWithName("width").GetValueAsSigned()
+ height = valobj.GetChildMemberWithName("height").GetValueAsSigned()
+ return width <= 0 or height <= 0
+
+def summarize_region(valobj, internal_dict):
+ # This function makes use of the synthetic children generated for ns(Int)Regions.
+ bounds = valobj.GetChildMemberWithName("bounds")
+ bounds_summary = summarize_rect(bounds, internal_dict)
+ num_rects = valobj.GetChildMemberWithName("numRects").GetValueAsUnsigned(0)
+ if num_rects <= 1:
+ if rect_is_empty(bounds):
+ return "empty"
+ if num_rects == 1:
+ return "one rect: " + bounds_summary
+ return str(num_rects) + " rects, bounds: " + bounds_summary
+
+def init(debugger):
+ debugger.HandleCommand("type summary add nscolor -v -F lldbutils.gfx.summarize_nscolor")
+ debugger.HandleCommand("type summary add nsRect -v -F lldbutils.gfx.summarize_rect")
+ debugger.HandleCommand("type summary add nsIntRect -v -F lldbutils.gfx.summarize_rect")
+ debugger.HandleCommand("type summary add gfxRect -v -F lldbutils.gfx.summarize_rect")
+ debugger.HandleCommand("type synthetic add nsRegion -l lldbutils.gfx.RegionSyntheticChildrenProvider")
+ debugger.HandleCommand("type synthetic add nsIntRegion -l lldbutils.gfx.IntRegionSyntheticChildrenProvider")
+ debugger.HandleCommand("type summary add nsRegion -v -F lldbutils.gfx.summarize_region")
+ debugger.HandleCommand("type summary add nsIntRegion -v -F lldbutils.gfx.summarize_region")
diff --git a/python/lldbutils/lldbutils/layout.py b/python/lldbutils/lldbutils/layout.py
new file mode 100644
index 000000000..a4894699c
--- /dev/null
+++ b/python/lldbutils/lldbutils/layout.py
@@ -0,0 +1,20 @@
+import lldb
+
+def frametree(debugger, command, result, dict):
+ """Dumps the frame tree containing the given nsIFrame*."""
+ debugger.HandleCommand('expr (' + command + ')->DumpFrameTree()')
+
+def frametreelimited(debugger, command, result, dict):
+ """Dumps the subtree of a frame tree rooted at the given nsIFrame*."""
+ debugger.HandleCommand('expr (' + command + ')->DumpFrameTreeLimited()')
+
+def pstate(debugger, command, result, dict):
+ """Displays a frame's state bits symbolically."""
+ debugger.HandleCommand('expr mozilla::PrintFrameState(' + command + ')')
+
+def init(debugger):
+ debugger.HandleCommand('command script add -f lldbutils.layout.frametree frametree')
+ debugger.HandleCommand('command script add -f lldbutils.layout.frametreelimited frametreelimited')
+ debugger.HandleCommand('command alias ft frametree')
+ debugger.HandleCommand('command alias ftl frametreelimited')
+ debugger.HandleCommand('command script add -f lldbutils.layout.pstate pstate');
diff --git a/python/lldbutils/lldbutils/utils.py b/python/lldbutils/lldbutils/utils.py
new file mode 100644
index 000000000..4e038f630
--- /dev/null
+++ b/python/lldbutils/lldbutils/utils.py
@@ -0,0 +1,70 @@
+def format_char(c):
+ if c == 0:
+ return "\\0"
+ elif c == 0x07:
+ return "\\a"
+ elif c == 0x08:
+ return "\\b"
+ elif c == 0x0c:
+ return "\\f"
+ elif c == 0x0a:
+ return "\\n"
+ elif c == 0x0d:
+ return "\\r"
+ elif c == 0x09:
+ return "\\t"
+ elif c == 0x0b:
+ return "\\v"
+ elif c == 0x5c:
+ return "\\"
+ elif c == 0x22:
+ return "\\\""
+ elif c == 0x27:
+ return "\\'"
+ elif c < 0x20 or c >= 0x80 and c <= 0xff:
+ return "\\x%02x" % c
+ elif c >= 0x0100:
+ return "\\u%04x" % c
+ else:
+ return chr(c)
+
+# Take an SBValue that is either a char* or char16_t* and formats it like lldb
+# would when printing it.
+def format_string(lldb_value, length=100):
+ ptr = lldb_value.GetValueAsUnsigned(0)
+ char_type = lldb_value.GetType().GetPointeeType()
+ if char_type.GetByteSize() == 1:
+ s = "\""
+ size = 1
+ mask = 0xff
+ elif char_type.GetByteSize() == 2:
+ s = "u\""
+ size = 2
+ mask = 0xffff
+ else:
+ return "(cannot format string with char type %s)" % char_type.GetName()
+ i = 0
+ terminated = False
+ while i < length:
+ c = lldb_value.CreateValueFromAddress("x", ptr + i * size, char_type).GetValueAsUnsigned(0) & mask
+ if c == 0:
+ terminated = True
+ break
+ s += format_char(c)
+ i = i + 1
+ s += "\""
+ if not terminated and i != length:
+ s += "..."
+ return s
+
+# Dereferences a raw pointer, nsCOMPtr, RefPtr, nsAutoPtr, already_AddRefed or
+# mozilla::RefPtr; otherwise returns the value unchanged.
+def dereference(lldb_value):
+ if lldb_value.TypeIsPointerType():
+ return lldb_value.Dereference()
+ name = lldb_value.GetType().GetUnqualifiedType().GetName()
+ if name.startswith("nsCOMPtr<") or name.startswith("RefPtr<") or name.startswith("nsAutoPtr<") or name.startswith("already_AddRefed<"):
+ return lldb_value.GetChildMemberWithName("mRawPtr")
+ if name.startswith("mozilla::RefPtr<"):
+ return lldb_value.GetChildMemberWithName("ptr")
+ return lldb_value
diff --git a/python/mach/README.rst b/python/mach/README.rst
new file mode 100644
index 000000000..7c2e00bec
--- /dev/null
+++ b/python/mach/README.rst
@@ -0,0 +1,13 @@
+====
+mach
+====
+
+Mach (German for *do*) is a generic command dispatcher for the command
+line.
+
+To use mach, you install the mach core (a Python package), create an
+executable *driver* script (named whatever you want), and write mach
+commands. When the *driver* is executed, mach dispatches to the
+requested command handler automatically.
+
+To learn more, read the docs in ``docs/``.
diff --git a/python/mach/bash-completion.sh b/python/mach/bash-completion.sh
new file mode 100644
index 000000000..e4b151f24
--- /dev/null
+++ b/python/mach/bash-completion.sh
@@ -0,0 +1,29 @@
+function _mach()
+{
+ local cur cmds c subcommand
+ COMPREPLY=()
+
+ # Load the list of commands
+ cmds=`"${COMP_WORDS[0]}" mach-commands`
+
+ # Look for the subcommand.
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ subcommand=""
+ c=1
+ while [ $c -lt $COMP_CWORD ]; do
+ word="${COMP_WORDS[c]}"
+ for cmd in $cmds; do
+ if [ "$cmd" = "$word" ]; then
+ subcommand="$word"
+ fi
+ done
+ c=$((++c))
+ done
+
+ if [[ "$subcommand" == "help" || -z "$subcommand" ]]; then
+ COMPREPLY=( $(compgen -W "$cmds" -- ${cur}) )
+ fi
+
+ return 0
+}
+complete -o default -F _mach mach
diff --git a/python/mach/docs/commands.rst b/python/mach/docs/commands.rst
new file mode 100644
index 000000000..af2973dd7
--- /dev/null
+++ b/python/mach/docs/commands.rst
@@ -0,0 +1,145 @@
+.. _mach_commands:
+
+=====================
+Implementing Commands
+=====================
+
+Mach commands are defined via Python decorators.
+
+All the relevant decorators are defined in the *mach.decorators* module.
+The important decorators are as follows:
+
+:py:func:`CommandProvider <mach.decorators.CommandProvider>`
+ A class decorator that denotes that a class contains mach
+ commands. The decorator takes no arguments.
+
+:py:func:`Command <mach.decorators.Command>`
+ A method decorator that denotes that the method should be called when
+ the specified command is requested. The decorator takes a command name
+ as its first argument and a number of additional arguments to
+ configure the behavior of the command.
+
+:py:func:`CommandArgument <mach.decorators.CommandArgument>`
+ A method decorator that defines an argument to the command. Its
+ arguments are essentially proxied to ArgumentParser.add_argument()
+
+:py:func:`SubCommand <mach.decorators.SubCommand>`
+ A method decorator that denotes that the method should be a
+ sub-command to an existing ``@Command``. The decorator takes the
+ parent command name as its first argument and the sub-command name
+ as its second argument.
+
+ ``@CommandArgument`` can be used on ``@SubCommand`` instances just
+ like they can on ``@Command`` instances.
+
+Classes with the ``@CommandProvider`` decorator **must** have an
+``__init__`` method that accepts 1 or 2 arguments. If it accepts 2
+arguments, the 2nd argument will be a
+:py:class:`mach.base.CommandContext` instance.
+
+Here is a complete example:
+
+.. code-block:: python
+
+ from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+ )
+
+ @CommandProvider
+ class MyClass(object):
+ @Command('doit', help='Do ALL OF THE THINGS.')
+ @CommandArgument('--force', '-f', action='store_true',
+ help='Force doing it.')
+ def doit(self, force=False):
+ # Do stuff here.
+
+When the module is loaded, the decorators tell mach about all handlers.
+When mach runs, it takes the assembled metadata from these handlers and
+hooks it up to the command line driver. Under the hood, arguments passed
+to the decorators are being used to help mach parse command arguments,
+formulate arguments to the methods, etc. See the documentation in the
+:py:mod:`mach.base` module for more.
+
+The Python modules defining mach commands do not need to live inside the
+main mach source tree.
+
+Conditionally Filtering Commands
+================================
+
+Sometimes it might only make sense to run a command given a certain
+context. For example, running tests only makes sense if the product
+they are testing has been built, and said build is available. To make
+sure a command is only runnable from within a correct context, you can
+define a series of conditions on the
+:py:func:`Command <mach.decorators.Command>` decorator.
+
+A condition is simply a function that takes an instance of the
+:py:func:`mach.decorators.CommandProvider` class as an argument, and
+returns ``True`` or ``False``. If any of the conditions defined on a
+command return ``False``, the command will not be runnable. The
+docstring of a condition function is used in error messages, to explain
+why the command cannot currently be run.
+
+Here is an example:
+
+.. code-block:: python
+
+ from mach.decorators import (
+ CommandProvider,
+ Command,
+ )
+
+ def build_available(cls):
+ """The build needs to be available."""
+ return cls.build_path is not None
+
+ @CommandProvider
+ class MyClass(MachCommandBase):
+ def __init__(self, build_path=None):
+ self.build_path = build_path
+
+ @Command('run_tests', conditions=[build_available])
+ def run_tests(self):
+ # Do stuff here.
+
+It is important to make sure that any state needed by the condition is
+available to instances of the command provider.
+
+By default all commands without any conditions applied will be runnable,
+but it is possible to change this behaviour by setting
+``require_conditions`` to ``True``:
+
+.. code-block:: python
+
+ m = mach.main.Mach()
+ m.require_conditions = True
+
+Minimizing Code in Commands
+===========================
+
+Mach command modules, classes, and methods work best when they are
+minimal dispatchers. The reason is import bloat. Currently, the mach
+core needs to import every Python file potentially containing mach
+commands for every command invocation. If you have dozens of commands or
+commands in modules that import a lot of Python code, these imports
+could slow mach down and waste memory.
+
+It is thus recommended that mach modules, classes, and methods do as
+little work as possible. Ideally the module should only import from
+the :py:mod:`mach` package. If you need external modules, you should
+import them from within the command method.
+
+To keep code size small, the body of a command method should be limited
+to:
+
+1. Obtaining user input (parsing arguments, prompting, etc)
+2. Calling into some other Python package
+3. Formatting output
+
+Of course, these recommendations can be ignored if you want to risk
+slower performance.
+
+In the future, the mach driver may cache the dispatching information or
+have it intelligently loaded to facilitate lazy loading.
diff --git a/python/mach/docs/driver.rst b/python/mach/docs/driver.rst
new file mode 100644
index 000000000..022ebe657
--- /dev/null
+++ b/python/mach/docs/driver.rst
@@ -0,0 +1,51 @@
+.. _mach_driver:
+
+=======
+Drivers
+=======
+
+Entry Points
+============
+
+It is possible to use setuptools' entry points to load commands
+directly from python packages. A mach entry point is a function which
+returns a list of files or directories containing mach command
+providers. e.g.:
+
+.. code-block:: python
+
+ def list_providers():
+ providers = []
+ here = os.path.abspath(os.path.dirname(__file__))
+ for p in os.listdir(here):
+ if p.endswith('.py'):
+ providers.append(os.path.join(here, p))
+ return providers
+
+See http://pythonhosted.org/setuptools/setuptools.html#dynamic-discovery-of-services-and-plugins
+for more information on creating an entry point. To search for entry
+point plugins, you can call
+:py:meth:`mach.main.Mach.load_commands_from_entry_point`. e.g.:
+
+.. code-block:: python
+
+ mach.load_commands_from_entry_point("mach.external.providers")
+
+Adding Global Arguments
+=======================
+
+Arguments to mach commands are usually command-specific. However,
+mach ships with a handful of global arguments that apply to all
+commands.
+
+It is possible to extend the list of global arguments. In your
+*mach driver*, simply call
+:py:meth:`mach.main.Mach.add_global_argument`. e.g.:
+
+.. code-block:: python
+
+ mach = mach.main.Mach(os.getcwd())
+
+ # Will allow --example to be specified on every mach command.
+ mach.add_global_argument('--example', action='store_true',
+ help='Demonstrate an example global argument.')
diff --git a/python/mach/docs/index.rst b/python/mach/docs/index.rst
new file mode 100644
index 000000000..cd2056333
--- /dev/null
+++ b/python/mach/docs/index.rst
@@ -0,0 +1,75 @@
+====
+mach
+====
+
+Mach (German for *do*) is a generic command dispatcher for the command
+line.
+
+To use mach, you install the mach core (a Python package), create an
+executable *driver* script (named whatever you want), and write mach
+commands. When the *driver* is executed, mach dispatches to the
+requested command handler automatically.
+
+Features
+========
+
+On a high level, mach is similar to using argparse with subparsers (for
+command handling). When you dig deeper, mach offers a number of
+additional features:
+
+Distributed command definitions
+ With optparse/argparse, you have to define your commands on a central
+ parser instance. With mach, you annotate your command methods with
+ decorators and mach finds and dispatches to them automatically.
+
+Command categories
+ Mach commands can be grouped into categories when displayed in help.
+ This is currently not possible with argparse.
+
+Logging management
+ Mach provides a facility for logging (both classical text and
+ structured) that is available to any command handler.
+
+Settings files
+ Mach provides a facility for reading settings from an ini-like file
+ format.
+
+Components
+==========
+
+Mach is conceptually composed of the following components:
+
+core
+ The mach core is the core code powering mach. This is a Python package
+ that contains all the business logic that makes mach work. The mach
+ core is common to all mach deployments.
+
+commands
+ These are what mach dispatches to. Commands are simply Python methods
+ registered as command names. The set of commands is unique to the
+ environment mach is deployed in.
+
+driver
+ The *driver* is the entry-point to mach. It is simply an executable
+ script that loads the mach core, tells it where commands can be found,
+ then asks the mach core to handle the current request. The driver is
+ unique to the deployed environment. But, it's usually based on an
+ example from this source tree.
+
+Project State
+=============
+
+mach was originally written as a command dispatching framework to aid
+Firefox development. While the code is mostly generic, there are still
+some pieces that closely tie it to Mozilla/Firefox. The goal is for
+these to eventually be removed and replaced with generic features so
+mach is suitable for anybody to use. Until then, mach may not be the
+best fit for you.
+
+.. toctree::
+ :maxdepth: 1
+
+ commands
+ driver
+ logging
+ settings
diff --git a/python/mach/docs/logging.rst b/python/mach/docs/logging.rst
new file mode 100644
index 000000000..ff245cf03
--- /dev/null
+++ b/python/mach/docs/logging.rst
@@ -0,0 +1,100 @@
+.. _mach_logging:
+
+=======
+Logging
+=======
+
+Mach configures a built-in logging facility so commands can easily log
+data.
+
+What sets the logging facility apart from most loggers you've seen is
+that it encourages structured logging. Instead of conventional logging
+where simple strings are logged, the internal logging mechanism logs all
+events with the following pieces of information:
+
+* A string *action*
+* A dict of log message fields
+* A formatting string
+
+Essentially, instead of assembling a human-readable string at
+logging-time, you create an object holding all the pieces of data that
+will constitute your logged event. For each unique type of logged event,
+you assign an *action* name.
+
+Depending on how logging is configured, your logged event could get
+written a couple of different ways.
+
+JSON Logging
+============
+
+Where machines are the intended target of the logging data, a JSON
+logger is configured. The JSON logger assembles an array consisting of
+the following elements:
+
+* Decimal wall clock time in seconds since UNIX epoch
+* String *action* of message
+* Object with structured message data
+
+The JSON-serialized array is written to a configured file handle.
+Consumers of this logging stream can just perform a readline() then feed
+that into a JSON deserializer to reconstruct the original logged
+message. They can key off the *action* element to determine how to
+process individual events. There is no need to invent a parser.
+Convenient, isn't it?
+
+Logging for Humans
+==================
+
+Where humans are the intended consumer of a log message, the structured
+log message are converted to more human-friendly form. This is done by
+utilizing the *formatting* string provided at log time. The logger
+simply calls the *format* method of the formatting string, passing the
+dict containing the message's fields.
+
+When *mach* is used in a terminal that supports it, the logging facility
+also supports terminal features such as colorization. This is done
+automatically in the logging layer - there is no need to control this at
+logging time.
+
+In addition, messages intended for humans typically prepends every line
+with the time passed since the application started.
+
+Logging HOWTO
+=============
+
+Structured logging piggybacks on top of Python's built-in logging
+infrastructure provided by the *logging* package. We accomplish this by
+taking advantage of *logging.Logger.log()*'s *extra* argument. To this
+argument, we pass a dict with the fields *action* and *params*. These
+are the string *action* and dict of message fields, respectively. The
+formatting string is passed as the *msg* argument, like normal.
+
+If you were logging to a logger directly, you would do something like:
+
+.. code-block:: python
+
+ logger.log(logging.INFO, 'My name is {name}',
+ extra={'action': 'my_name', 'params': {'name': 'Gregory'}})
+
+The JSON logging would produce something like::
+
+ [1339985554.306338, "my_name", {"name": "Gregory"}]
+
+Human logging would produce something like::
+
+ 0.52 My name is Gregory
+
+Since there is a lot of complexity using logger.log directly, it is
+recommended to go through a wrapping layer that hides part of the
+complexity for you. The easiest way to do this is by utilizing the
+LoggingMixin:
+
+.. code-block:: python
+
+ import logging
+ from mach.mixin.logging import LoggingMixin
+
+ class MyClass(LoggingMixin):
+ def foo(self):
+ self.log(logging.INFO, 'foo_start', {'bar': True},
+ 'Foo performed. Bar: {bar}')
diff --git a/python/mach/docs/settings.rst b/python/mach/docs/settings.rst
new file mode 100644
index 000000000..b51dc54a2
--- /dev/null
+++ b/python/mach/docs/settings.rst
@@ -0,0 +1,140 @@
+.. _mach_settings:
+
+========
+Settings
+========
+
+Mach can read settings in from a set of configuration files. These
+configuration files are either named ``machrc`` or ``.machrc`` and
+are specified by the bootstrap script. In mozilla-central, these files
+can live in ``~/.mozbuild`` and/or ``topsrcdir``.
+
+Settings can be specified anywhere, and used both by mach core or
+individual commands.
+
+
+Core Settings
+=============
+
+These settings are implemented by mach core.
+
+* alias - Create a command alias. This is useful if you want to alias a command to something else, optionally including some defaults. It can either be used to create an entire new command, or provide defaults for an existing one. For example:
+
+.. parsed-literal::
+
+ [alias]
+ mochitest = mochitest -f browser
+ browser-test = mochitest -f browser
+
+
+Defining Settings
+=================
+
+Settings need to be explicitly defined, along with their type,
+otherwise mach will throw when trying to access them.
+
+To define settings, use the :func:`~decorators.SettingsProvider`
+decorator in an existing mach command module. E.g:
+
+.. code-block:: python
+
+ from mach.decorators import SettingsProvider
+
+ @SettingsProvider
+ class ArbitraryClassName(object):
+ config_settings = [
+ ('foo.bar', 'string'),
+ ('foo.baz', 'int', 0, set([0,1,2])),
+ ]
+
+``@SettingsProvider``'s must specify a variable called ``config_settings``
+that returns a list of tuples. Alternatively, it can specify a function
+called ``config_settings`` that returns a list of tuples.
+
+Each tuple is of the form:
+
+.. code-block:: python
+
+ ('<section>.<option>', '<type>', default, extra)
+
+``type`` is a string and can be one of:
+string, boolean, int, pos_int, path
+
+``default`` is optional, and provides a default value in case none was
+specified by any of the configuration files.
+
+``extra`` is also optional and is a dict containing additional key/value
+pairs to add to the setting's metadata. The following keys may be specified
+in the ``extra`` dict:
+ * ``choices`` - A set of allowed values for the setting.
+
+Wildcards
+---------
+
+Sometimes a section should allow arbitrarily defined options from the user, such
+as the ``alias`` section mentioned above. To define a section like this, use ``*``
+as the option name. For example:
+
+.. parsed-literal::
+
+ ('foo.*', 'string')
+
+This allows configuration files like this:
+
+.. parsed-literal::
+
+ [foo]
+ arbitrary1 = some string
+ arbitrary2 = some other string
+
+
+Documenting Settings
+====================
+
+All settings must at least be documented in the en_US locale. Otherwise,
+running ``mach settings`` will raise. Mach uses gettext to perform localization.
+
+A handy command exists to generate the localization files:
+
+.. parsed-literal::
+
+ mach settings locale-gen <section>
+
+You'll be prompted to add documentation for all options in section with the
+en_US locale. To add documentation in another locale, pass in ``--locale``.
+
+
+Accessing Settings
+==================
+
+Now that the settings are defined and documented, they're accessible from
+individual mach commands if the command receives a context in its constructor.
+For example:
+
+.. code-block:: python
+
+ from mach.decorators import (
+ Command,
+ CommandProvider,
+ SettingsProvider,
+ )
+
+ @SettingsProvider
+ class ExampleSettings(object):
+ config_settings = [
+ ('a.b', 'string', 'default'),
+ ('foo.bar', 'string'),
+ ('foo.baz', 'int', 0, {'choices': set([0,1,2])}),
+ ]
+
+ @CommandProvider
+ class Commands(object):
+ def __init__(self, context):
+ self.settings = context.settings
+
+ @Command('command', category='misc',
+ description='Prints a setting')
+ def command(self):
+ print(self.settings.a.b)
+ for option in self.settings.foo:
+ print(self.settings.foo[option])
diff --git a/python/mach/mach/__init__.py b/python/mach/mach/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mach/mach/__init__.py
diff --git a/python/mach/mach/base.py b/python/mach/mach/base.py
new file mode 100644
index 000000000..3556dc6e5
--- /dev/null
+++ b/python/mach/mach/base.py
@@ -0,0 +1,46 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+
+class CommandContext(object):
+ """Holds run-time state so it can easily be passed to command providers."""
+ def __init__(self, cwd=None, settings=None, log_manager=None,
+ commands=None, **kwargs):
+ self.cwd = cwd
+ self.settings = settings
+ self.log_manager = log_manager
+ self.commands = commands
+
+ for k,v in kwargs.items():
+ setattr(self, k, v)
+
+
+class MachError(Exception):
+ """Base class for all errors raised by mach itself."""
+
+
+class NoCommandError(MachError):
+ """No command was passed into mach."""
+
+
+class UnknownCommandError(MachError):
+ """Raised when we attempted to execute an unknown command."""
+
+ def __init__(self, command, verb, suggested_commands=None):
+ MachError.__init__(self)
+
+ self.command = command
+ self.verb = verb
+ self.suggested_commands = suggested_commands or []
+
+class UnrecognizedArgumentError(MachError):
+ """Raised when an unknown argument is passed to mach."""
+
+ def __init__(self, command, arguments):
+ MachError.__init__(self)
+
+ self.command = command
+ self.arguments = arguments
diff --git a/python/mach/mach/commands/__init__.py b/python/mach/mach/commands/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mach/mach/commands/__init__.py
diff --git a/python/mach/mach/commands/commandinfo.py b/python/mach/mach/commands/commandinfo.py
new file mode 100644
index 000000000..cce85f859
--- /dev/null
+++ b/python/mach/mach/commands/commandinfo.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from mach.decorators import (
+ CommandProvider,
+ Command,
+ CommandArgument,
+)
+
+
+@CommandProvider
+class BuiltinCommands(object):
+ def __init__(self, context):
+ self.context = context
+
+ @property
+ def command_keys(self):
+ # NOTE 'REMOVED' is a function in testing/mochitest/mach_commands.py
+ return (k for k, v in self.context.commands.command_handlers.items()
+ if not v.conditions or v.conditions[0].__name__ != 'REMOVED')
+
+ @Command('mach-commands', category='misc',
+ description='List all mach commands.')
+ def commands(self):
+ print("\n".join(self.command_keys))
+
+ @Command('mach-debug-commands', category='misc',
+ description='Show info about available mach commands.')
+ @CommandArgument('match', metavar='MATCH', default=None, nargs='?',
+ help='Only display commands containing given substring.')
+ def debug_commands(self, match=None):
+ import inspect
+
+ handlers = self.context.commands.command_handlers
+ for command in sorted(self.command_keys):
+ if match and match not in command:
+ continue
+
+ handler = handlers[command]
+ cls = handler.cls
+ method = getattr(cls, getattr(handler, 'method'))
+
+ print(command)
+ print('=' * len(command))
+ print('')
+ print('File: %s' % inspect.getsourcefile(method))
+ print('Class: %s' % cls.__name__)
+ print('Method: %s' % handler.method)
+ print('')
+
diff --git a/python/mach/mach/commands/settings.py b/python/mach/mach/commands/settings.py
new file mode 100644
index 000000000..d5bb807d8
--- /dev/null
+++ b/python/mach/mach/commands/settings.py
@@ -0,0 +1,132 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+from textwrap import TextWrapper
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+ SubCommand,
+)
+
+POLIB_NOT_FOUND = """
+Could not detect the 'polib' package on the local system.
+Please run:
+
+ pip install polib
+""".lstrip()
+
+
+@CommandProvider
+class Settings(object):
+ """Interact with settings for mach.
+
+ Currently, we only provide functionality to view what settings are
+ available. In the future, this module will be used to modify settings, help
+ people create configs via a wizard, etc.
+ """
+ def __init__(self, context):
+ self._settings = context.settings
+
+ @Command('settings', category='devenv',
+ description='Show available config settings.')
+ @CommandArgument('-l', '--list', dest='short', action='store_true',
+ help='Show settings in a concise list')
+ def settings(self, short=None):
+ """List available settings."""
+ if short:
+ for section in sorted(self._settings):
+ for option in sorted(self._settings[section]._settings):
+ short, full = self._settings.option_help(section, option)
+ print('%s.%s -- %s' % (section, option, short))
+ return
+
+ wrapper = TextWrapper(initial_indent='# ', subsequent_indent='# ')
+ for section in sorted(self._settings):
+ print('[%s]' % section)
+
+ for option in sorted(self._settings[section]._settings):
+ short, full = self._settings.option_help(section, option)
+ print(wrapper.fill(full))
+
+ if option != '*':
+ print(';%s =' % option)
+ print('')
+
+ @SubCommand('settings', 'locale-gen',
+ description='Generate or update gettext .po and .mo locale files.')
+ @CommandArgument('sections', nargs='*',
+ help='A list of strings in the form of either <section> or '
+ '<section>.<option> to translate. By default, prompt to '
+ 'translate all applicable options.')
+ @CommandArgument('--locale', default='en_US',
+ help='Locale to generate, defaults to en_US.')
+ @CommandArgument('--overwrite', action='store_true', default=False,
+ help='Overwrite pre-existing entries in .po files.')
+ def locale_gen(self, sections, **kwargs):
+ try:
+ import polib
+ except ImportError:
+ print(POLIB_NOT_FOUND)
+ return 1
+
+ self.was_prompted = False
+
+ sections = sections or self._settings
+ for section in sections:
+ self._gen_section(section, **kwargs)
+
+ if not self.was_prompted:
+ print("All specified options already have an {} translation. "
+ "To overwrite existing translations, pass --overwrite."
+ .format(kwargs['locale']))
+
+ def _gen_section(self, section, **kwargs):
+ if '.' in section:
+ section, option = section.split('.')
+ return self._gen_option(section, option, **kwargs)
+
+ for option in sorted(self._settings[section]._settings):
+ self._gen_option(section, option, **kwargs)
+
+ def _gen_option(self, section, option, locale, overwrite):
+ import polib
+
+ meta = self._settings[section]._settings[option]
+
+ localedir = os.path.join(meta['localedir'], locale, 'LC_MESSAGES')
+ if not os.path.isdir(localedir):
+ os.makedirs(localedir)
+
+ path = os.path.join(localedir, '{}.po'.format(section))
+ if os.path.isfile(path):
+ po = polib.pofile(path)
+ else:
+ po = polib.POFile()
+
+ optionid = '{}.{}'.format(section, option)
+ for name in ('short', 'full'):
+ msgid = '{}.{}'.format(optionid, name)
+ entry = po.find(msgid)
+ if not entry:
+ entry = polib.POEntry(msgid=msgid)
+ po.append(entry)
+
+ if entry in po.translated_entries() and not overwrite:
+ continue
+
+ self.was_prompted = True
+
+ msgstr = raw_input("Translate {} to {}:\n"
+ .format(msgid, locale))
+ entry.msgstr = msgstr
+
+ if self.was_prompted:
+ mopath = os.path.join(localedir, '{}.mo'.format(section))
+ po.save(path)
+ po.save_as_mofile(mopath)
diff --git a/python/mach/mach/config.py b/python/mach/mach/config.py
new file mode 100644
index 000000000..26c9a4482
--- /dev/null
+++ b/python/mach/mach/config.py
@@ -0,0 +1,461 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""
+This file defines classes for representing config data/settings.
+
+Config data is modeled as key-value pairs. Keys are grouped together into named
+sections. Individual config settings (options) have metadata associated with
+them. This metadata includes type, default value, valid values, etc.
+
+The main interface to config data is the ConfigSettings class. 1 or more
+ConfigProvider classes are associated with ConfigSettings and define what
+settings are available.
+
+Descriptions of individual config options can be translated to multiple
+languages using gettext. Each option has associated with it a domain and locale
+directory. By default, the domain is the section the option is in and the
+locale directory is the "locale" directory beneath the directory containing the
+module that defines it.
+
+People implementing ConfigProvider instances are expected to define a complete
+gettext .po and .mo file for the en_US locale. The |mach settings locale-gen|
+command can be used to populate these files.
+"""
+
+from __future__ import absolute_import, unicode_literals
+
+import collections
+import gettext
+import os
+import sys
+from functools import wraps
+
+if sys.version_info[0] == 3:
+ from configparser import RawConfigParser, NoSectionError
+ str_type = str
+else:
+ from ConfigParser import RawConfigParser, NoSectionError
+ str_type = basestring
+
+
+TRANSLATION_NOT_FOUND = """
+No translation files detected for {section}, there must at least be a
+translation for the 'en_US' locale. To generate these files, run:
+
+ mach settings locale-gen {section}
+""".lstrip()
+
+
+class ConfigException(Exception):
+ pass
+
+
+class ConfigType(object):
+ """Abstract base class for config values."""
+
+ @staticmethod
+ def validate(value):
+ """Validates a Python value conforms to this type.
+
+ Raises a TypeError or ValueError if it doesn't conform. Does not do
+ anything if the value is valid.
+ """
+
+ @staticmethod
+ def from_config(config, section, option):
+ """Obtain the value of this type from a RawConfigParser.
+
+ Receives a RawConfigParser instance, a str section name, and the str
+ option in that section to retrieve.
+
+ The implementation may assume the option exists in the RawConfigParser
+ instance.
+
+ Implementations are not expected to validate the value. But, they
+ should return the appropriate Python type.
+ """
+
+ @staticmethod
+ def to_config(value):
+ return value
+
+
+class StringType(ConfigType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, str_type):
+ raise TypeError()
+
+ @staticmethod
+ def from_config(config, section, option):
+ return config.get(section, option)
+
+
+class BooleanType(ConfigType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, bool):
+ raise TypeError()
+
+ @staticmethod
+ def from_config(config, section, option):
+ return config.getboolean(section, option)
+
+ @staticmethod
+ def to_config(value):
+ return 'true' if value else 'false'
+
+
+class IntegerType(ConfigType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, int):
+ raise TypeError()
+
+ @staticmethod
+ def from_config(config, section, option):
+ return config.getint(section, option)
+
+
+class PositiveIntegerType(IntegerType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, int):
+ raise TypeError()
+
+ if value < 0:
+ raise ValueError()
+
+
+class PathType(StringType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, str_type):
+ raise TypeError()
+
+ @staticmethod
+ def from_config(config, section, option):
+ return config.get(section, option)
+
+
+TYPE_CLASSES = {
+ 'string': StringType,
+ 'boolean': BooleanType,
+ 'int': IntegerType,
+ 'pos_int': PositiveIntegerType,
+ 'path': PathType,
+}
+
+
+class DefaultValue(object):
+ pass
+
+
+def reraise_attribute_error(func):
+ """Used to make sure __getattr__ wrappers around __getitem__
+ raise AttributeError instead of KeyError.
+ """
+ @wraps(func)
+ def _(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except KeyError:
+ exc_class, exc, tb = sys.exc_info()
+ raise AttributeError().__class__, exc, tb
+ return _
+
+
+class ConfigSettings(collections.Mapping):
+ """Interface for configuration settings.
+
+ This is the main interface to the configuration.
+
+ A configuration is a collection of sections. Each section contains
+ key-value pairs.
+
+ When an instance is created, the caller first registers ConfigProvider
+ instances with it. This tells the ConfigSettings what individual settings
+ are available and defines extra metadata associated with those settings.
+ This is used for validation, etc.
+
+ Once ConfigProvider instances are registered, a config is populated. It can
+ be loaded from files or populated by hand.
+
+ ConfigSettings instances are accessed like dictionaries or by using
+ attributes. e.g. the section "foo" is accessed through either
+ settings.foo or settings['foo'].
+
+ Sections are modeled by the ConfigSection class which is defined inside
+ this one. They look just like dicts or classes with attributes. To access
+ the "bar" option in the "foo" section:
+
+ value = settings.foo.bar
+ value = settings['foo']['bar']
+ value = settings.foo['bar']
+
+ Assignment is similar:
+
+ settings.foo.bar = value
+ settings['foo']['bar'] = value
+ settings['foo'].bar = value
+
+ You can even delete user-assigned values:
+
+ del settings.foo.bar
+ del settings['foo']['bar']
+
+ If there is a default, it will be returned.
+
+ When settings are mutated, they are validated against the registered
+ providers. Setting unknown settings or setting values to illegal values
+ will result in exceptions being raised.
+ """
+
+ class ConfigSection(collections.MutableMapping, object):
+ """Represents an individual config section."""
+ def __init__(self, config, name, settings):
+ object.__setattr__(self, '_config', config)
+ object.__setattr__(self, '_name', name)
+ object.__setattr__(self, '_settings', settings)
+
+ wildcard = any(s == '*' for s in self._settings)
+ object.__setattr__(self, '_wildcard', wildcard)
+
+ @property
+ def options(self):
+ try:
+ return self._config.options(self._name)
+ except NoSectionError:
+ return []
+
+ def get_meta(self, option):
+ if option in self._settings:
+ return self._settings[option]
+ if self._wildcard:
+ return self._settings['*']
+ raise KeyError('Option not registered with provider: %s' % option)
+
+ def _validate(self, option, value):
+ meta = self.get_meta(option)
+ meta['type_cls'].validate(value)
+
+ if 'choices' in meta and value not in meta['choices']:
+ raise ValueError("Value '%s' must be one of: %s" % (
+ value, ', '.join(sorted(meta['choices']))))
+
+ # MutableMapping interface
+ def __len__(self):
+ return len(self.options)
+
+ def __iter__(self):
+ return iter(self.options)
+
+ def __contains__(self, k):
+ return self._config.has_option(self._name, k)
+
+ def __getitem__(self, k):
+ meta = self.get_meta(k)
+
+ if self._config.has_option(self._name, k):
+ v = meta['type_cls'].from_config(self._config, self._name, k)
+ else:
+ v = meta.get('default', DefaultValue)
+
+ if v == DefaultValue:
+ raise KeyError('No default value registered: %s' % k)
+
+ self._validate(k, v)
+ return v
+
+ def __setitem__(self, k, v):
+ self._validate(k, v)
+ meta = self.get_meta(k)
+
+ if not self._config.has_section(self._name):
+ self._config.add_section(self._name)
+
+ self._config.set(self._name, k, meta['type_cls'].to_config(v))
+
+ def __delitem__(self, k):
+ self._config.remove_option(self._name, k)
+
+ # Prune empty sections.
+ if not len(self._config.options(self._name)):
+ self._config.remove_section(self._name)
+
+ @reraise_attribute_error
+ def __getattr__(self, k):
+ return self.__getitem__(k)
+
+ @reraise_attribute_error
+ def __setattr__(self, k, v):
+ self.__setitem__(k, v)
+
+ @reraise_attribute_error
+ def __delattr__(self, k):
+ self.__delitem__(k)
+
+
+ def __init__(self):
+ self._config = RawConfigParser()
+
+ self._settings = {}
+ self._sections = {}
+ self._finalized = False
+ self.loaded_files = set()
+
+ def load_file(self, filename):
+ self.load_files([filename])
+
+ def load_files(self, filenames):
+ """Load a config from files specified by their paths.
+
+ Files are loaded in the order given. Subsequent files will overwrite
+ values from previous files. If a file does not exist, it will be
+ ignored.
+ """
+ filtered = [f for f in filenames if os.path.exists(f)]
+
+ fps = [open(f, 'rt') for f in filtered]
+ self.load_fps(fps)
+ self.loaded_files.update(set(filtered))
+ for fp in fps:
+ fp.close()
+
+ def load_fps(self, fps):
+ """Load config data by reading file objects."""
+
+ for fp in fps:
+ self._config.readfp(fp)
+
+ def write(self, fh):
+ """Write the config to a file object."""
+ self._config.write(fh)
+
+ @classmethod
+ def _format_metadata(cls, provider, section, option, type_cls,
+ default=DefaultValue, extra=None):
+ """Formats and returns the metadata for a setting.
+
+ Each setting must have:
+
+ section -- str section to which the setting belongs. This is how
+ settings are grouped.
+
+ option -- str id for the setting. This must be unique within the
+ section it appears.
+
+ type -- a ConfigType-derived type defining the type of the setting.
+
+ Each setting has the following optional parameters:
+
+ default -- The default value for the setting. If None (the default)
+ there is no default.
+
+ extra -- A dict of additional key/value pairs to add to the
+ setting metadata.
+ """
+ if isinstance(type_cls, basestring):
+ type_cls = TYPE_CLASSES[type_cls]
+
+ meta = {
+ 'short': '%s.short' % option,
+ 'full': '%s.full' % option,
+ 'type_cls': type_cls,
+ 'domain': section,
+ 'localedir': provider.config_settings_locale_directory,
+ }
+
+ if default != DefaultValue:
+ meta['default'] = default
+
+ if extra:
+ meta.update(extra)
+
+ return meta
+
+ def register_provider(self, provider):
+ """Register a SettingsProvider with this settings interface."""
+
+ if self._finalized:
+ raise ConfigException('Providers cannot be registered after finalized.')
+
+ settings = provider.config_settings
+ if callable(settings):
+ settings = settings()
+
+ config_settings = collections.defaultdict(dict)
+ for setting in settings:
+ section, option = setting[0].split('.')
+
+ if option in config_settings[section]:
+ raise ConfigException('Setting has already been registered: %s.%s' % (
+ section, option))
+
+ meta = self._format_metadata(provider, section, option, *setting[1:])
+ config_settings[section][option] = meta
+
+ for section_name, settings in config_settings.items():
+ section = self._settings.get(section_name, {})
+
+ for k, v in settings.items():
+ if k in section:
+ raise ConfigException('Setting already registered: %s.%s' %
+ section_name, k)
+
+ section[k] = v
+
+ self._settings[section_name] = section
+
+ def option_help(self, section, option):
+ """Obtain the translated help messages for an option."""
+
+ meta = self[section].get_meta(option)
+
+ # Providers should always have an en_US translation. If they don't,
+ # they are coded wrong and this will raise.
+ default = gettext.translation(meta['domain'], meta['localedir'],
+ ['en_US'])
+
+ t = gettext.translation(meta['domain'], meta['localedir'],
+ fallback=True)
+ t.add_fallback(default)
+
+ short = t.ugettext('%s.%s.short' % (section, option))
+ full = t.ugettext('%s.%s.full' % (section, option))
+
+ return (short, full)
+
+ def _finalize(self):
+ if self._finalized:
+ return
+
+ for section, settings in self._settings.items():
+ s = ConfigSettings.ConfigSection(self._config, section, settings)
+ self._sections[section] = s
+
+ self._finalized = True
+
+ # Mapping interface.
+ def __len__(self):
+ return len(self._settings)
+
+ def __iter__(self):
+ self._finalize()
+
+ return iter(self._sections.keys())
+
+ def __contains__(self, k):
+ return k in self._settings
+
+ def __getitem__(self, k):
+ self._finalize()
+
+ return self._sections[k]
+
+ # Allow attribute access because it looks nice.
+ @reraise_attribute_error
+ def __getattr__(self, k):
+ return self.__getitem__(k)
diff --git a/python/mach/mach/decorators.py b/python/mach/mach/decorators.py
new file mode 100644
index 000000000..6c1713d7f
--- /dev/null
+++ b/python/mach/mach/decorators.py
@@ -0,0 +1,353 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import argparse
+import collections
+import inspect
+import os
+import types
+
+from .base import MachError
+from .registrar import Registrar
+
+
+class _MachCommand(object):
+ """Container for mach command metadata.
+
+ Mach commands contain lots of attributes. This class exists to capture them
+ in a sane way so tuples, etc aren't used instead.
+ """
+ __slots__ = (
+ # Content from decorator arguments to define the command.
+ 'name',
+ 'subcommand',
+ 'category',
+ 'description',
+ 'conditions',
+ '_parser',
+ 'arguments',
+ 'argument_group_names',
+
+ # Describes how dispatch is performed.
+
+ # The Python class providing the command. This is the class type not
+ # an instance of the class. Mach will instantiate a new instance of
+ # the class if the command is executed.
+ 'cls',
+
+ # Whether the __init__ method of the class should receive a mach
+ # context instance. This should only affect the mach driver and how
+ # it instantiates classes.
+ 'pass_context',
+
+ # The name of the method providing the command. In other words, this
+ # is the str name of the attribute on the class type corresponding to
+ # the name of the function.
+ 'method',
+
+ # Dict of string to _MachCommand defining sub-commands for this
+ # command.
+ 'subcommand_handlers',
+ )
+
+ def __init__(self, name=None, subcommand=None, category=None,
+ description=None, conditions=None, parser=None):
+ self.name = name
+ self.subcommand = subcommand
+ self.category = category
+ self.description = description
+ self.conditions = conditions or []
+ self._parser = parser
+ self.arguments = []
+ self.argument_group_names = []
+
+ self.cls = None
+ self.pass_context = None
+ self.method = None
+ self.subcommand_handlers = {}
+
+ @property
+ def parser(self):
+ # Creating CLI parsers at command dispatch time can be expensive. Make
+ # it possible to lazy load them by using functions.
+ if callable(self._parser):
+ self._parser = self._parser()
+
+ return self._parser
+
+ @property
+ def docstring(self):
+ return self.cls.__dict__[self.method].__doc__
+
+ def __ior__(self, other):
+ if not isinstance(other, _MachCommand):
+ raise ValueError('can only operate on _MachCommand instances')
+
+ for a in self.__slots__:
+ if not getattr(self, a):
+ setattr(self, a, getattr(other, a))
+
+ return self
+
+
+def CommandProvider(cls):
+ """Class decorator to denote that it provides subcommands for Mach.
+
+ When this decorator is present, mach looks for commands being defined by
+ methods inside the class.
+ """
+
+ # The implementation of this decorator relies on the parse-time behavior of
+ # decorators. When the module is imported, the method decorators (like
+ # @Command and @CommandArgument) are called *before* this class decorator.
+ # The side-effect of the method decorators is to store specifically-named
+ # attributes on the function types. We just scan over all functions in the
+ # class looking for the side-effects of the method decorators.
+
+ # Tell mach driver whether to pass context argument to __init__.
+ pass_context = False
+
+ if inspect.ismethod(cls.__init__):
+ spec = inspect.getargspec(cls.__init__)
+
+ if len(spec.args) > 2:
+ msg = 'Mach @CommandProvider class %s implemented incorrectly. ' + \
+ '__init__() must take 1 or 2 arguments. From %s'
+ msg = msg % (cls.__name__, inspect.getsourcefile(cls))
+ raise MachError(msg)
+
+ if len(spec.args) == 2:
+ pass_context = True
+
+ seen_commands = set()
+
+ # We scan __dict__ because we only care about the classes own attributes,
+ # not inherited ones. If we did inherited attributes, we could potentially
+ # define commands multiple times. We also sort keys so commands defined in
+ # the same class are grouped in a sane order.
+ for attr in sorted(cls.__dict__.keys()):
+ value = cls.__dict__[attr]
+
+ if not isinstance(value, types.FunctionType):
+ continue
+
+ command = getattr(value, '_mach_command', None)
+ if not command:
+ continue
+
+ # Ignore subcommands for now: we handle them later.
+ if command.subcommand:
+ continue
+
+ seen_commands.add(command.name)
+
+ if not command.conditions and Registrar.require_conditions:
+ continue
+
+ msg = 'Mach command \'%s\' implemented incorrectly. ' + \
+ 'Conditions argument must take a list ' + \
+ 'of functions. Found %s instead.'
+
+ if not isinstance(command.conditions, collections.Iterable):
+ msg = msg % (command.name, type(command.conditions))
+ raise MachError(msg)
+
+ for c in command.conditions:
+ if not hasattr(c, '__call__'):
+ msg = msg % (command.name, type(c))
+ raise MachError(msg)
+
+ command.cls = cls
+ command.method = attr
+ command.pass_context = pass_context
+
+ Registrar.register_command_handler(command)
+
+ # Now do another pass to get sub-commands. We do this in two passes so
+ # we can check the parent command existence without having to hold
+ # state and reconcile after traversal.
+ for attr in sorted(cls.__dict__.keys()):
+ value = cls.__dict__[attr]
+
+ if not isinstance(value, types.FunctionType):
+ continue
+
+ command = getattr(value, '_mach_command', None)
+ if not command:
+ continue
+
+ # It is a regular command.
+ if not command.subcommand:
+ continue
+
+ if command.name not in seen_commands:
+ raise MachError('Command referenced by sub-command does not '
+ 'exist: %s' % command.name)
+
+ if command.name not in Registrar.command_handlers:
+ continue
+
+ command.cls = cls
+ command.method = attr
+ command.pass_context = pass_context
+ parent = Registrar.command_handlers[command.name]
+
+ if parent._parser:
+ raise MachError('cannot declare sub commands against a command '
+ 'that has a parser installed: %s' % command)
+ if command.subcommand in parent.subcommand_handlers:
+ raise MachError('sub-command already defined: %s' % command.subcommand)
+
+ parent.subcommand_handlers[command.subcommand] = command
+
+ return cls
+
+
+class Command(object):
+ """Decorator for functions or methods that provide a mach command.
+
+ The decorator accepts arguments that define basic attributes of the
+ command. The following arguments are recognized:
+
+ category -- The string category to which this command belongs. Mach's
+ help will group commands by category.
+
+ description -- A brief description of what the command does.
+
+ parser -- an optional argparse.ArgumentParser instance or callable
+ that returns an argparse.ArgumentParser instance to use as the
+ basis for the command arguments.
+
+ For example:
+
+ @Command('foo', category='misc', description='Run the foo action')
+ def foo(self):
+ pass
+ """
+ def __init__(self, name, **kwargs):
+ self._mach_command = _MachCommand(name=name, **kwargs)
+
+ def __call__(self, func):
+ if not hasattr(func, '_mach_command'):
+ func._mach_command = _MachCommand()
+
+ func._mach_command |= self._mach_command
+
+ return func
+
+class SubCommand(object):
+ """Decorator for functions or methods that provide a sub-command.
+
+ Mach commands can have sub-commands. e.g. ``mach command foo`` or
+ ``mach command bar``. Each sub-command has its own parser and is
+ effectively its own mach command.
+
+ The decorator accepts arguments that define basic attributes of the
+ sub command:
+
+ command -- The string of the command this sub command should be
+ attached to.
+
+ subcommand -- The string name of the sub command to register.
+
+ description -- A textual description for this sub command.
+ """
+ def __init__(self, command, subcommand, description=None):
+ self._mach_command = _MachCommand(name=command, subcommand=subcommand,
+ description=description)
+
+ def __call__(self, func):
+ if not hasattr(func, '_mach_command'):
+ func._mach_command = _MachCommand()
+
+ func._mach_command |= self._mach_command
+
+ return func
+
+class CommandArgument(object):
+ """Decorator for additional arguments to mach subcommands.
+
+ This decorator should be used to add arguments to mach commands. Arguments
+ to the decorator are proxied to ArgumentParser.add_argument().
+
+ For example:
+
+ @Command('foo', help='Run the foo action')
+ @CommandArgument('-b', '--bar', action='store_true', default=False,
+ help='Enable bar mode.')
+ def foo(self):
+ pass
+ """
+ def __init__(self, *args, **kwargs):
+ if kwargs.get('nargs') == argparse.REMAINDER:
+ # These are the assertions we make in dispatcher.py about
+ # those types of CommandArguments.
+ assert len(args) == 1
+ assert all(k in ('default', 'nargs', 'help', 'group') for k in kwargs)
+ self._command_args = (args, kwargs)
+
+ def __call__(self, func):
+ if not hasattr(func, '_mach_command'):
+ func._mach_command = _MachCommand()
+
+ func._mach_command.arguments.insert(0, self._command_args)
+
+ return func
+
+
+class CommandArgumentGroup(object):
+ """Decorator for additional argument groups to mach commands.
+
+ This decorator should be used to add arguments groups to mach commands.
+ Arguments to the decorator are proxied to
+ ArgumentParser.add_argument_group().
+
+ For example:
+
+ @Command('foo', helps='Run the foo action')
+ @CommandArgumentGroup('group1')
+ @CommandArgument('-b', '--bar', group='group1', action='store_true',
+ default=False, help='Enable bar mode.')
+ def foo(self):
+ pass
+
+ The name should be chosen so that it makes sense as part of the phrase
+ 'Command Arguments for <name>' because that's how it will be shown in the
+ help message.
+ """
+ def __init__(self, group_name):
+ self._group_name = group_name
+
+ def __call__(self, func):
+ if not hasattr(func, '_mach_command'):
+ func._mach_command = _MachCommand()
+
+ func._mach_command.argument_group_names.insert(0, self._group_name)
+
+ return func
+
+
+def SettingsProvider(cls):
+ """Class decorator to denote that this class provides Mach settings.
+
+ When this decorator is encountered, the underlying class will automatically
+ be registered with the Mach registrar and will (likely) be hooked up to the
+ mach driver.
+ """
+ if not hasattr(cls, 'config_settings'):
+ raise MachError('@SettingsProvider must contain a config_settings attribute. It '
+ 'may either be a list of tuples, or a callable that returns a list '
+ 'of tuples. Each tuple must be of the form:\n'
+ '(<section>.<option>, <type_cls>, <default>, <choices>)\n'
+ 'as specified by ConfigSettings._format_metadata.')
+
+ if not hasattr(cls, 'config_settings_locale_directory'):
+ cls_dir = os.path.dirname(inspect.getfile(cls))
+ cls.config_settings_locale_directory = os.path.join(cls_dir, 'locale')
+
+ Registrar.register_settings_provider(cls)
+ return cls
+
diff --git a/python/mach/mach/dispatcher.py b/python/mach/mach/dispatcher.py
new file mode 100644
index 000000000..5604d981d
--- /dev/null
+++ b/python/mach/mach/dispatcher.py
@@ -0,0 +1,453 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import argparse
+import difflib
+import shlex
+import sys
+
+from operator import itemgetter
+
+from .base import (
+ NoCommandError,
+ UnknownCommandError,
+ UnrecognizedArgumentError,
+)
+from .decorators import SettingsProvider
+
+
+@SettingsProvider
+class DispatchSettings():
+ config_settings = [
+ ('alias.*', 'string'),
+ ]
+
+
+class CommandFormatter(argparse.HelpFormatter):
+ """Custom formatter to format just a subcommand."""
+
+ def add_usage(self, *args):
+ pass
+
+
+class CommandAction(argparse.Action):
+ """An argparse action that handles mach commands.
+
+ This class is essentially a reimplementation of argparse's sub-parsers
+ feature. We first tried to use sub-parsers. However, they were missing
+ features like grouping of commands (http://bugs.python.org/issue14037).
+
+ The way this works involves light magic and a partial understanding of how
+ argparse works.
+
+ Arguments registered with an argparse.ArgumentParser have an action
+ associated with them. An action is essentially a class that when called
+ does something with the encountered argument(s). This class is one of those
+ action classes.
+
+ An instance of this class is created doing something like:
+
+ parser.add_argument('command', action=CommandAction, registrar=r)
+
+ Note that a mach.registrar.Registrar instance is passed in. The Registrar
+ holds information on all the mach commands that have been registered.
+
+ When this argument is registered with the ArgumentParser, an instance of
+ this class is instantiated. One of the subtle but important things it does
+ is tell the argument parser that it's interested in *all* of the remaining
+ program arguments. So, when the ArgumentParser calls this action, we will
+ receive the command name plus all of its arguments.
+
+ For more, read the docs in __call__.
+ """
+ def __init__(self, option_strings, dest, required=True, default=None,
+ registrar=None, context=None):
+ # A proper API would have **kwargs here. However, since we are a little
+ # hacky, we intentionally omit it as a way of detecting potentially
+ # breaking changes with argparse's implementation.
+ #
+ # In a similar vein, default is passed in but is not needed, so we drop
+ # it.
+ argparse.Action.__init__(self, option_strings, dest, required=required,
+ help=argparse.SUPPRESS, nargs=argparse.REMAINDER)
+
+ self._mach_registrar = registrar
+ self._context = context
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ """This is called when the ArgumentParser has reached our arguments.
+
+ Since we always register ourselves with nargs=argparse.REMAINDER,
+ values should be a list of remaining arguments to parse. The first
+ argument should be the name of the command to invoke and all remaining
+ arguments are arguments for that command.
+
+ The gist of the flow is that we look at the command being invoked. If
+ it's *help*, we handle that specially (because argparse's default help
+ handler isn't satisfactory). Else, we create a new, independent
+ ArgumentParser instance for just the invoked command (based on the
+ information contained in the command registrar) and feed the arguments
+ into that parser. We then merge the results with the main
+ ArgumentParser.
+ """
+ if namespace.help:
+ # -h or --help is in the global arguments.
+ self._handle_main_help(parser, namespace.verbose)
+ sys.exit(0)
+ elif values:
+ command = values[0].lower()
+ args = values[1:]
+ if command == 'help':
+ if args and args[0] not in ['-h', '--help']:
+ # Make sure args[0] is indeed a command.
+ self._handle_command_help(parser, args[0])
+ else:
+ self._handle_main_help(parser, namespace.verbose)
+ sys.exit(0)
+ elif '-h' in args or '--help' in args:
+ # -h or --help is in the command arguments.
+ if '--' in args:
+ # -- is in command arguments
+ if '-h' in args[:args.index('--')] or '--help' in args[:args.index('--')]:
+ # Honor -h or --help only if it appears before --
+ self._handle_command_help(parser, command)
+ sys.exit(0)
+ else:
+ self._handle_command_help(parser, command)
+ sys.exit(0)
+ else:
+ raise NoCommandError()
+
+ # First see if the this is a user-defined alias
+ if command in self._context.settings.alias:
+ alias = self._context.settings.alias[command]
+ defaults = shlex.split(alias)
+ command = defaults.pop(0)
+ args = defaults + args
+
+ if command not in self._mach_registrar.command_handlers:
+ # Try to find similar commands, may raise UnknownCommandError.
+ command = self._suggest_command(command)
+
+ handler = self._mach_registrar.command_handlers.get(command)
+
+ usage = '%(prog)s [global arguments] ' + command + \
+ ' [command arguments]'
+
+ subcommand = None
+
+ # If there are sub-commands, parse the intent out immediately.
+ if handler.subcommand_handlers and args:
+ if len(args) == 1 and args[0] in ('help', '--help'):
+ self._handle_subcommand_main_help(parser, handler)
+ sys.exit(0)
+ # mach <command> help <subcommand>
+ elif len(args) == 2 and args[0] == 'help':
+ subcommand = args[1]
+ subhandler = handler.subcommand_handlers[subcommand]
+ self._handle_subcommand_help(parser, command, subcommand, subhandler)
+ sys.exit(0)
+ # We are running a sub command.
+ elif args[0] in handler.subcommand_handlers:
+ subcommand = args[0]
+ handler = handler.subcommand_handlers[subcommand]
+ usage = '%(prog)s [global arguments] ' + command + ' ' + \
+ subcommand + ' [command arguments]'
+ args.pop(0)
+
+ # We create a new parser, populate it with the command's arguments,
+ # then feed all remaining arguments to it, merging the results
+ # with ourselves. This is essentially what argparse subparsers
+ # do.
+
+ parser_args = {
+ 'add_help': False,
+ 'usage': usage,
+ }
+
+ remainder = None
+
+ if handler.parser:
+ subparser = handler.parser
+ subparser.context = self._context
+ for arg in subparser._actions[:]:
+ if arg.nargs == argparse.REMAINDER:
+ subparser._actions.remove(arg)
+ remainder = (arg.dest,), {'default': arg.default,
+ 'nargs': arg.nargs,
+ 'help': arg.help}
+ else:
+ subparser = argparse.ArgumentParser(**parser_args)
+
+ for arg in handler.arguments:
+ # Remove our group keyword; it's not needed here.
+ group_name = arg[1].get('group')
+ if group_name:
+ del arg[1]['group']
+
+ if arg[1].get('nargs') == argparse.REMAINDER:
+ # parse_known_args expects all argparse.REMAINDER ('...')
+ # arguments to be all stuck together. Instead, we want them to
+ # pick any extra argument, wherever they are.
+ # Assume a limited CommandArgument for those arguments.
+ assert len(arg[0]) == 1
+ assert all(k in ('default', 'nargs', 'help') for k in arg[1])
+ remainder = arg
+ else:
+ subparser.add_argument(*arg[0], **arg[1])
+
+ # We define the command information on the main parser result so as to
+ # not interfere with arguments passed to the command.
+ setattr(namespace, 'mach_handler', handler)
+ setattr(namespace, 'command', command)
+ setattr(namespace, 'subcommand', subcommand)
+
+ command_namespace, extra = subparser.parse_known_args(args)
+ setattr(namespace, 'command_args', command_namespace)
+ if remainder:
+ (name,), options = remainder
+ # parse_known_args usefully puts all arguments after '--' in
+ # extra, but also puts '--' there. We don't want to pass it down
+ # to the command handler. Note that if multiple '--' are on the
+ # command line, only the first one is removed, so that subsequent
+ # ones are passed down.
+ if '--' in extra:
+ extra.remove('--')
+
+ # Commands with argparse.REMAINDER arguments used to force the
+ # other arguments to be '+' prefixed. If a user now passes such
+ # an argument, if will silently end up in extra. So, check if any
+ # of the allowed arguments appear in a '+' prefixed form, and error
+ # out if that's the case.
+ for args, _ in handler.arguments:
+ for arg in args:
+ arg = arg.replace('-', '+', 1)
+ if arg in extra:
+ raise UnrecognizedArgumentError(command, [arg])
+
+ if extra:
+ setattr(command_namespace, name, extra)
+ else:
+ setattr(command_namespace, name, options.get('default', []))
+ elif extra and handler.cls.__name__ != 'DeprecatedCommands':
+ raise UnrecognizedArgumentError(command, extra)
+
+ def _handle_main_help(self, parser, verbose):
+ # Since we don't need full sub-parser support for the main help output,
+ # we create groups in the ArgumentParser and populate each group with
+ # arguments corresponding to command names. This has the side-effect
+ # that argparse renders it nicely.
+ r = self._mach_registrar
+ disabled_commands = []
+
+ cats = [(k, v[2]) for k, v in r.categories.items()]
+ sorted_cats = sorted(cats, key=itemgetter(1), reverse=True)
+ for category, priority in sorted_cats:
+ group = None
+
+ for command in sorted(r.commands_by_category[category]):
+ handler = r.command_handlers[command]
+
+ # Instantiate a handler class to see if it should be filtered
+ # out for the current context or not. Condition functions can be
+ # applied to the command's decorator.
+ if handler.conditions:
+ if handler.pass_context:
+ instance = handler.cls(self._context)
+ else:
+ instance = handler.cls()
+
+ is_filtered = False
+ for c in handler.conditions:
+ if not c(instance):
+ is_filtered = True
+ break
+ if is_filtered:
+ description = handler.description
+ disabled_command = {'command': command, 'description': description}
+ disabled_commands.append(disabled_command)
+ continue
+
+ if group is None:
+ title, description, _priority = r.categories[category]
+ group = parser.add_argument_group(title, description)
+
+ description = handler.description
+ group.add_argument(command, help=description,
+ action='store_true')
+
+ if disabled_commands and 'disabled' in r.categories:
+ title, description, _priority = r.categories['disabled']
+ group = parser.add_argument_group(title, description)
+ if verbose:
+ for c in disabled_commands:
+ group.add_argument(c['command'], help=c['description'],
+ action='store_true')
+
+ parser.print_help()
+
+ def _populate_command_group(self, parser, handler, group):
+ extra_groups = {}
+ for group_name in handler.argument_group_names:
+ group_full_name = 'Command Arguments for ' + group_name
+ extra_groups[group_name] = \
+ parser.add_argument_group(group_full_name)
+
+ for arg in handler.arguments:
+ # Apply our group keyword.
+ group_name = arg[1].get('group')
+ if group_name:
+ del arg[1]['group']
+ group = extra_groups[group_name]
+ group.add_argument(*arg[0], **arg[1])
+
+ def _handle_command_help(self, parser, command):
+ handler = self._mach_registrar.command_handlers.get(command)
+
+ if not handler:
+ raise UnknownCommandError(command, 'query')
+
+ if handler.subcommand_handlers:
+ self._handle_subcommand_main_help(parser, handler)
+ return
+
+ # This code is worth explaining. Because we are doing funky things with
+ # argument registration to allow the same option in both global and
+ # command arguments, we can't simply put all arguments on the same
+ # parser instance because argparse would complain. We can't register an
+ # argparse subparser here because it won't properly show help for
+ # global arguments. So, we employ a strategy similar to command
+ # execution where we construct a 2nd, independent ArgumentParser for
+ # just the command data then supplement the main help's output with
+ # this 2nd parser's. We use a custom formatter class to ignore some of
+ # the help output.
+ parser_args = {
+ 'formatter_class': CommandFormatter,
+ 'add_help': False,
+ }
+
+ if handler.parser:
+ c_parser = handler.parser
+ c_parser.context = self._context
+ c_parser.formatter_class = NoUsageFormatter
+ # Accessing _action_groups is a bit shady. We are highly dependent
+ # on the argparse implementation not changing. We fail fast to
+ # detect upstream changes so we can intelligently react to them.
+ group = c_parser._action_groups[1]
+
+ # By default argparse adds two groups called "positional arguments"
+ # and "optional arguments". We want to rename these to reflect standard
+ # mach terminology.
+ c_parser._action_groups[0].title = 'Command Parameters'
+ c_parser._action_groups[1].title = 'Command Arguments'
+
+ if not handler.description:
+ handler.description = c_parser.description
+ c_parser.description = None
+ else:
+ c_parser = argparse.ArgumentParser(**parser_args)
+ group = c_parser.add_argument_group('Command Arguments')
+
+ self._populate_command_group(c_parser, handler, group)
+
+ # Set the long help of the command to the docstring (if present) or
+ # the command decorator description argument (if present).
+ if handler.docstring:
+ parser.description = format_docstring(handler.docstring)
+ elif handler.description:
+ parser.description = handler.description
+
+ parser.usage = '%(prog)s [global arguments] ' + command + \
+ ' [command arguments]'
+
+ # This is needed to preserve line endings in the description field,
+ # which may be populated from a docstring.
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+ parser.print_help()
+ print('')
+ c_parser.print_help()
+
+ def _handle_subcommand_main_help(self, parser, handler):
+ parser.usage = '%(prog)s [global arguments] ' + handler.name + \
+ ' subcommand [subcommand arguments]'
+ group = parser.add_argument_group('Sub Commands')
+
+ for subcommand, subhandler in sorted(handler.subcommand_handlers.iteritems()):
+ group.add_argument(subcommand, help=subhandler.description,
+ action='store_true')
+
+ if handler.docstring:
+ parser.description = format_docstring(handler.docstring)
+
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+
+ parser.print_help()
+
+ def _handle_subcommand_help(self, parser, command, subcommand, handler):
+ parser.usage = '%(prog)s [global arguments] ' + command + \
+ ' ' + subcommand + ' [command arguments]'
+
+ c_parser = argparse.ArgumentParser(add_help=False,
+ formatter_class=CommandFormatter)
+ group = c_parser.add_argument_group('Sub Command Arguments')
+ self._populate_command_group(c_parser, handler, group)
+
+ if handler.docstring:
+ parser.description = format_docstring(handler.docstring)
+
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+
+ parser.print_help()
+ print('')
+ c_parser.print_help()
+
+ def _suggest_command(self, command):
+ # Make sure we don't suggest any deprecated commands.
+ names = [h.name for h in self._mach_registrar.command_handlers.values()
+ if h.cls.__name__ != 'DeprecatedCommands']
+ # We first try to look for a valid command that is very similar to the given command.
+ suggested_commands = difflib.get_close_matches(command, names, cutoff=0.8)
+ # If we find more than one matching command, or no command at all,
+ # we give command suggestions instead (with a lower matching threshold).
+ # All commands that start with the given command (for instance:
+ # 'mochitest-plain', 'mochitest-chrome', etc. for 'mochitest-')
+ # are also included.
+ if len(suggested_commands) != 1:
+ suggested_commands = set(difflib.get_close_matches(command, names, cutoff=0.5))
+ suggested_commands |= {cmd for cmd in names if cmd.startswith(command)}
+ raise UnknownCommandError(command, 'run', suggested_commands)
+ sys.stderr.write("We're assuming the '%s' command is '%s' and we're "
+ "executing it for you.\n\n" % (command, suggested_commands[0]))
+ return suggested_commands[0]
+
+
+class NoUsageFormatter(argparse.HelpFormatter):
+ def _format_usage(self, *args, **kwargs):
+ return ""
+
+
+def format_docstring(docstring):
+ """Format a raw docstring into something suitable for presentation.
+
+ This function is based on the example function in PEP-0257.
+ """
+ if not docstring:
+ return ''
+ lines = docstring.expandtabs().splitlines()
+ indent = sys.maxint
+ for line in lines[1:]:
+ stripped = line.lstrip()
+ if stripped:
+ indent = min(indent, len(line) - len(stripped))
+ trimmed = [lines[0].strip()]
+ if indent < sys.maxint:
+ for line in lines[1:]:
+ trimmed.append(line[indent:].rstrip())
+ while trimmed and not trimmed[-1]:
+ trimmed.pop()
+ while trimmed and not trimmed[0]:
+ trimmed.pop(0)
+ return '\n'.join(trimmed)
diff --git a/python/mach/mach/locale/en_US/LC_MESSAGES/alias.mo b/python/mach/mach/locale/en_US/LC_MESSAGES/alias.mo
new file mode 100644
index 000000000..663180841
--- /dev/null
+++ b/python/mach/mach/locale/en_US/LC_MESSAGES/alias.mo
Binary files differ
diff --git a/python/mach/mach/locale/en_US/LC_MESSAGES/alias.po b/python/mach/mach/locale/en_US/LC_MESSAGES/alias.po
new file mode 100644
index 000000000..8ea0e2f86
--- /dev/null
+++ b/python/mach/mach/locale/en_US/LC_MESSAGES/alias.po
@@ -0,0 +1,9 @@
+#
+msgid ""
+msgstr ""
+
+msgid "alias.*.short"
+msgstr "Create a command alias"
+
+msgid "alias.*.full"
+msgstr "Create a command alias of the form `<alias> = <command> <args>`."
diff --git a/python/mach/mach/logging.py b/python/mach/mach/logging.py
new file mode 100644
index 000000000..729e6cb3d
--- /dev/null
+++ b/python/mach/mach/logging.py
@@ -0,0 +1,256 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains logging functionality for mach. It essentially provides
+# support for a structured logging framework built on top of Python's built-in
+# logging framework.
+
+from __future__ import absolute_import, unicode_literals
+
+try:
+ import blessings
+except ImportError:
+ blessings = None
+
+import json
+import logging
+import sys
+import time
+
+
+def format_seconds(total):
+ """Format number of seconds to MM:SS.DD form."""
+
+ minutes, seconds = divmod(total, 60)
+
+ return '%2d:%05.2f' % (minutes, seconds)
+
+
+class ConvertToStructuredFilter(logging.Filter):
+ """Filter that converts unstructured records into structured ones."""
+ def filter(self, record):
+ if hasattr(record, 'action') and hasattr(record, 'params'):
+ return True
+
+ record.action = 'unstructured'
+ record.params = {'msg': record.getMessage()}
+ record.msg = '{msg}'
+
+ return True
+
+
+class StructuredJSONFormatter(logging.Formatter):
+ """Log formatter that writes a structured JSON entry."""
+
+ def format(self, record):
+ action = getattr(record, 'action', 'UNKNOWN')
+ params = getattr(record, 'params', {})
+
+ return json.dumps([record.created, action, params])
+
+
+class StructuredHumanFormatter(logging.Formatter):
+ """Log formatter that writes structured messages for humans.
+
+ It is important that this formatter never be added to a logger that
+ produces unstructured/classic log messages. If it is, the call to format()
+ could fail because the string could contain things (like JSON) that look
+ like formatting character sequences.
+
+ Because of this limitation, format() will fail with a KeyError if an
+ unstructured record is passed or if the structured message is malformed.
+ """
+ def __init__(self, start_time, write_interval=False, write_times=True):
+ self.start_time = start_time
+ self.write_interval = write_interval
+ self.write_times = write_times
+ self.last_time = None
+
+ def format(self, record):
+ f = record.msg.format(**record.params)
+
+ if not self.write_times:
+ return f
+
+ elapsed = self._time(record)
+
+ return '%s %s' % (format_seconds(elapsed), f)
+
+ def _time(self, record):
+ t = record.created - self.start_time
+
+ if self.write_interval and self.last_time is not None:
+ t = record.created - self.last_time
+
+ self.last_time = record.created
+
+ return t
+
+
+class StructuredTerminalFormatter(StructuredHumanFormatter):
+ """Log formatter for structured messages writing to a terminal."""
+
+ def set_terminal(self, terminal):
+ self.terminal = terminal
+
+ def format(self, record):
+ f = record.msg.format(**record.params)
+
+ if not self.write_times:
+ return f
+
+ t = self.terminal.blue(format_seconds(self._time(record)))
+
+ return '%s %s' % (t, self._colorize(f))
+
+ def _colorize(self, s):
+ if not self.terminal:
+ return s
+
+ result = s
+
+ reftest = s.startswith('REFTEST ')
+ if reftest:
+ s = s[8:]
+
+ if s.startswith('TEST-PASS'):
+ result = self.terminal.green(s[0:9]) + s[9:]
+ elif s.startswith('TEST-UNEXPECTED'):
+ result = self.terminal.red(s[0:20]) + s[20:]
+ elif s.startswith('TEST-START'):
+ result = self.terminal.yellow(s[0:10]) + s[10:]
+ elif s.startswith('TEST-INFO'):
+ result = self.terminal.yellow(s[0:9]) + s[9:]
+
+ if reftest:
+ result = 'REFTEST ' + result
+
+ return result
+
+
+class LoggingManager(object):
+ """Holds and controls global logging state.
+
+ An application should instantiate one of these and configure it as needed.
+
+ This class provides a mechanism to configure the output of logging data
+ both from mach and from the overall logging system (e.g. from other
+ modules).
+ """
+
+ def __init__(self):
+ self.start_time = time.time()
+
+ self.json_handlers = []
+ self.terminal_handler = None
+ self.terminal_formatter = None
+
+ self.root_logger = logging.getLogger()
+ self.root_logger.setLevel(logging.DEBUG)
+
+ # Installing NullHandler on the root logger ensures that *all* log
+ # messages have at least one handler. This prevents Python from
+ # complaining about "no handlers could be found for logger XXX."
+ self.root_logger.addHandler(logging.NullHandler())
+
+ self.mach_logger = logging.getLogger('mach')
+ self.mach_logger.setLevel(logging.DEBUG)
+
+ self.structured_filter = ConvertToStructuredFilter()
+
+ self.structured_loggers = [self.mach_logger]
+
+ self._terminal = None
+
+ @property
+ def terminal(self):
+ if not self._terminal and blessings:
+ # Sometimes blessings fails to set up the terminal. In that case,
+ # silently fail.
+ try:
+ terminal = blessings.Terminal(stream=sys.stdout)
+
+ if terminal.is_a_tty:
+ self._terminal = terminal
+ except Exception:
+ pass
+
+ return self._terminal
+
+ def add_json_handler(self, fh):
+ """Enable JSON logging on the specified file object."""
+
+ # Configure the consumer of structured messages.
+ handler = logging.StreamHandler(stream=fh)
+ handler.setFormatter(StructuredJSONFormatter())
+ handler.setLevel(logging.DEBUG)
+
+ # And hook it up.
+ for logger in self.structured_loggers:
+ logger.addHandler(handler)
+
+ self.json_handlers.append(handler)
+
+ def add_terminal_logging(self, fh=sys.stdout, level=logging.INFO,
+ write_interval=False, write_times=True):
+ """Enable logging to the terminal."""
+
+ formatter = StructuredHumanFormatter(self.start_time,
+ write_interval=write_interval, write_times=write_times)
+
+ if self.terminal:
+ formatter = StructuredTerminalFormatter(self.start_time,
+ write_interval=write_interval, write_times=write_times)
+ formatter.set_terminal(self.terminal)
+
+ handler = logging.StreamHandler(stream=fh)
+ handler.setFormatter(formatter)
+ handler.setLevel(level)
+
+ for logger in self.structured_loggers:
+ logger.addHandler(handler)
+
+ self.terminal_handler = handler
+ self.terminal_formatter = formatter
+
+ def replace_terminal_handler(self, handler):
+ """Replace the installed terminal handler.
+
+ Returns the old handler or None if none was configured.
+ If the new handler is None, removes any existing handler and disables
+ logging to the terminal.
+ """
+ old = self.terminal_handler
+
+ if old:
+ for logger in self.structured_loggers:
+ logger.removeHandler(old)
+
+ if handler:
+ for logger in self.structured_loggers:
+ logger.addHandler(handler)
+
+ self.terminal_handler = handler
+
+ return old
+
+ def enable_unstructured(self):
+ """Enable logging of unstructured messages."""
+ if self.terminal_handler:
+ self.terminal_handler.addFilter(self.structured_filter)
+ self.root_logger.addHandler(self.terminal_handler)
+
+ def disable_unstructured(self):
+ """Disable logging of unstructured messages."""
+ if self.terminal_handler:
+ self.terminal_handler.removeFilter(self.structured_filter)
+ self.root_logger.removeHandler(self.terminal_handler)
+
+ def register_structured_logger(self, logger):
+ """Register a structured logger.
+
+ This needs to be called for all structured loggers that don't chain up
+ to the mach logger in order for their output to be captured.
+ """
+ self.structured_loggers.append(logger)
diff --git a/python/mach/mach/main.py b/python/mach/mach/main.py
new file mode 100644
index 000000000..dec3d7dfd
--- /dev/null
+++ b/python/mach/mach/main.py
@@ -0,0 +1,594 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module provides functionality for the command-line build tool
+# (mach). It is packaged as a module because everything is a library.
+
+from __future__ import absolute_import, print_function, unicode_literals
+from collections import Iterable
+
+import argparse
+import codecs
+import imp
+import logging
+import os
+import sys
+import traceback
+import uuid
+
+from .base import (
+ CommandContext,
+ MachError,
+ NoCommandError,
+ UnknownCommandError,
+ UnrecognizedArgumentError,
+)
+
+from .decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+from .config import ConfigSettings
+from .dispatcher import CommandAction
+from .logging import LoggingManager
+from .registrar import Registrar
+
+
+
+MACH_ERROR = r'''
+The error occurred in mach itself. This is likely a bug in mach itself or a
+fundamental problem with a loaded module.
+
+Please consider filing a bug against mach by going to the URL:
+
+ https://bugzilla.mozilla.org/enter_bug.cgi?product=Core&component=mach
+
+'''.lstrip()
+
+ERROR_FOOTER = r'''
+If filing a bug, please include the full output of mach, including this error
+message.
+
+The details of the failure are as follows:
+'''.lstrip()
+
+COMMAND_ERROR = r'''
+The error occurred in the implementation of the invoked mach command.
+
+This should never occur and is likely a bug in the implementation of that
+command. Consider filing a bug for this issue.
+'''.lstrip()
+
+MODULE_ERROR = r'''
+The error occurred in code that was called by the mach command. This is either
+a bug in the called code itself or in the way that mach is calling it.
+
+You should consider filing a bug for this issue.
+'''.lstrip()
+
+NO_COMMAND_ERROR = r'''
+It looks like you tried to run mach without a command.
+
+Run |mach help| to show a list of commands.
+'''.lstrip()
+
+UNKNOWN_COMMAND_ERROR = r'''
+It looks like you are trying to %s an unknown mach command: %s
+%s
+Run |mach help| to show a list of commands.
+'''.lstrip()
+
+SUGGESTED_COMMANDS_MESSAGE = r'''
+Did you want to %s any of these commands instead: %s?
+'''
+
+UNRECOGNIZED_ARGUMENT_ERROR = r'''
+It looks like you passed an unrecognized argument into mach.
+
+The %s command does not accept the arguments: %s
+'''.lstrip()
+
+INVALID_ENTRY_POINT = r'''
+Entry points should return a list of command providers or directories
+containing command providers. The following entry point is invalid:
+
+ %s
+
+You are seeing this because there is an error in an external module attempting
+to implement a mach command. Please fix the error, or uninstall the module from
+your system.
+'''.lstrip()
+
+class ArgumentParser(argparse.ArgumentParser):
+ """Custom implementation argument parser to make things look pretty."""
+
+ def error(self, message):
+ """Custom error reporter to give more helpful text on bad commands."""
+ if not message.startswith('argument command: invalid choice'):
+ argparse.ArgumentParser.error(self, message)
+ assert False
+
+ print('Invalid command specified. The list of commands is below.\n')
+ self.print_help()
+ sys.exit(1)
+
+ def format_help(self):
+ text = argparse.ArgumentParser.format_help(self)
+
+ # Strip out the silly command list that would preceed the pretty list.
+ #
+ # Commands:
+ # {foo,bar}
+ # foo Do foo.
+ # bar Do bar.
+ search = 'Commands:\n {'
+ start = text.find(search)
+
+ if start != -1:
+ end = text.find('}\n', start)
+ assert end != -1
+
+ real_start = start + len('Commands:\n')
+ real_end = end + len('}\n')
+
+ text = text[0:real_start] + text[real_end:]
+
+ return text
+
+
+class ContextWrapper(object):
+ def __init__(self, context, handler):
+ object.__setattr__(self, '_context', context)
+ object.__setattr__(self, '_handler', handler)
+
+ def __getattribute__(self, key):
+ try:
+ return getattr(object.__getattribute__(self, '_context'), key)
+ except AttributeError as e:
+ try:
+ ret = object.__getattribute__(self, '_handler')(self, key)
+ except (AttributeError, TypeError):
+ # TypeError is in case the handler comes from old code not
+ # taking a key argument.
+ raise e
+ setattr(self, key, ret)
+ return ret
+
+ def __setattr__(self, key, value):
+ setattr(object.__getattribute__(self, '_context'), key, value)
+
+
+@CommandProvider
+class Mach(object):
+ """Main mach driver type.
+
+ This type is responsible for holding global mach state and dispatching
+ a command from arguments.
+
+ The following attributes may be assigned to the instance to influence
+ behavior:
+
+ populate_context_handler -- If defined, it must be a callable. The
+ callable signature is the following:
+ populate_context_handler(context, key=None)
+ It acts as a fallback getter for the mach.base.CommandContext
+ instance.
+ This allows to augment the context instance with arbitrary data
+ for use in command handlers.
+ For backwards compatibility, it is also called before command
+ dispatch without a key, allowing the context handler to add
+ attributes to the context instance.
+
+ require_conditions -- If True, commands that do not have any condition
+ functions applied will be skipped. Defaults to False.
+
+ settings_paths -- A list of files or directories in which to search
+ for settings files to load.
+
+ """
+
+ USAGE = """%(prog)s [global arguments] command [command arguments]
+
+mach (German for "do") is the main interface to the Mozilla build system and
+common developer tasks.
+
+You tell mach the command you want to perform and it does it for you.
+
+Some common commands are:
+
+ %(prog)s build Build/compile the source tree.
+ %(prog)s help Show full help, including the list of all commands.
+
+To see more help for a specific command, run:
+
+ %(prog)s help <command>
+"""
+
+ def __init__(self, cwd):
+ assert os.path.isdir(cwd)
+
+ self.cwd = cwd
+ self.log_manager = LoggingManager()
+ self.logger = logging.getLogger(__name__)
+ self.settings = ConfigSettings()
+ self.settings_paths = []
+
+ if 'MACHRC' in os.environ:
+ self.settings_paths.append(os.environ['MACHRC'])
+
+ self.log_manager.register_structured_logger(self.logger)
+ self.global_arguments = []
+ self.populate_context_handler = None
+
+ def add_global_argument(self, *args, **kwargs):
+ """Register a global argument with the argument parser.
+
+ Arguments are proxied to ArgumentParser.add_argument()
+ """
+
+ self.global_arguments.append((args, kwargs))
+
+ def load_commands_from_directory(self, path):
+ """Scan for mach commands from modules in a directory.
+
+ This takes a path to a directory, loads the .py files in it, and
+ registers and found mach command providers with this mach instance.
+ """
+ for f in sorted(os.listdir(path)):
+ if not f.endswith('.py') or f == '__init__.py':
+ continue
+
+ full_path = os.path.join(path, f)
+ module_name = 'mach.commands.%s' % f[0:-3]
+
+ self.load_commands_from_file(full_path, module_name=module_name)
+
+ def load_commands_from_file(self, path, module_name=None):
+ """Scan for mach commands from a file.
+
+ This takes a path to a file and loads it as a Python module under the
+ module name specified. If no name is specified, a random one will be
+ chosen.
+ """
+ if module_name is None:
+ # Ensure parent module is present otherwise we'll (likely) get
+ # an error due to unknown parent.
+ if b'mach.commands' not in sys.modules:
+ mod = imp.new_module(b'mach.commands')
+ sys.modules[b'mach.commands'] = mod
+
+ module_name = 'mach.commands.%s' % uuid.uuid1().get_hex()
+
+ imp.load_source(module_name, path)
+
+ def load_commands_from_entry_point(self, group='mach.providers'):
+ """Scan installed packages for mach command provider entry points. An
+ entry point is a function that returns a list of paths to files or
+ directories containing command providers.
+
+ This takes an optional group argument which specifies the entry point
+ group to use. If not specified, it defaults to 'mach.providers'.
+ """
+ try:
+ import pkg_resources
+ except ImportError:
+ print("Could not find setuptools, ignoring command entry points",
+ file=sys.stderr)
+ return
+
+ for entry in pkg_resources.iter_entry_points(group=group, name=None):
+ paths = entry.load()()
+ if not isinstance(paths, Iterable):
+ print(INVALID_ENTRY_POINT % entry)
+ sys.exit(1)
+
+ for path in paths:
+ if os.path.isfile(path):
+ self.load_commands_from_file(path)
+ elif os.path.isdir(path):
+ self.load_commands_from_directory(path)
+ else:
+ print("command provider '%s' does not exist" % path)
+
+ def define_category(self, name, title, description, priority=50):
+ """Provide a description for a named command category."""
+
+ Registrar.register_category(name, title, description, priority)
+
+ @property
+ def require_conditions(self):
+ return Registrar.require_conditions
+
+ @require_conditions.setter
+ def require_conditions(self, value):
+ Registrar.require_conditions = value
+
+ def run(self, argv, stdin=None, stdout=None, stderr=None):
+ """Runs mach with arguments provided from the command line.
+
+ Returns the integer exit code that should be used. 0 means success. All
+ other values indicate failure.
+ """
+
+ # If no encoding is defined, we default to UTF-8 because without this
+ # Python 2.7 will assume the default encoding of ASCII. This will blow
+ # up with UnicodeEncodeError as soon as it encounters a non-ASCII
+ # character in a unicode instance. We simply install a wrapper around
+ # the streams and restore once we have finished.
+ stdin = sys.stdin if stdin is None else stdin
+ stdout = sys.stdout if stdout is None else stdout
+ stderr = sys.stderr if stderr is None else stderr
+
+ orig_stdin = sys.stdin
+ orig_stdout = sys.stdout
+ orig_stderr = sys.stderr
+
+ sys.stdin = stdin
+ sys.stdout = stdout
+ sys.stderr = stderr
+
+ orig_env = dict(os.environ)
+
+ try:
+ if stdin.encoding is None:
+ sys.stdin = codecs.getreader('utf-8')(stdin)
+
+ if stdout.encoding is None:
+ sys.stdout = codecs.getwriter('utf-8')(stdout)
+
+ if stderr.encoding is None:
+ sys.stderr = codecs.getwriter('utf-8')(stderr)
+
+ # Allow invoked processes (which may not have a handle on the
+ # original stdout file descriptor) to know if the original stdout
+ # is a TTY. This provides a mechanism to allow said processes to
+ # enable emitting code codes, for example.
+ if os.isatty(orig_stdout.fileno()):
+ os.environ[b'MACH_STDOUT_ISATTY'] = b'1'
+
+ return self._run(argv)
+ except KeyboardInterrupt:
+ print('mach interrupted by signal or user action. Stopping.')
+ return 1
+
+ except Exception as e:
+ # _run swallows exceptions in invoked handlers and converts them to
+ # a proper exit code. So, the only scenario where we should get an
+ # exception here is if _run itself raises. If _run raises, that's a
+ # bug in mach (or a loaded command module being silly) and thus
+ # should be reported differently.
+ self._print_error_header(argv, sys.stdout)
+ print(MACH_ERROR)
+
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ stack = traceback.extract_tb(exc_tb)
+
+ self._print_exception(sys.stdout, exc_type, exc_value, stack)
+
+ return 1
+
+ finally:
+ os.environ.clear()
+ os.environ.update(orig_env)
+
+ sys.stdin = orig_stdin
+ sys.stdout = orig_stdout
+ sys.stderr = orig_stderr
+
+ def _run(self, argv):
+ # Load settings as early as possible so things in dispatcher.py
+ # can use them.
+ for provider in Registrar.settings_providers:
+ self.settings.register_provider(provider)
+ self.load_settings(self.settings_paths)
+
+ context = CommandContext(cwd=self.cwd,
+ settings=self.settings, log_manager=self.log_manager,
+ commands=Registrar)
+
+ if self.populate_context_handler:
+ self.populate_context_handler(context)
+ context = ContextWrapper(context, self.populate_context_handler)
+
+ parser = self.get_argument_parser(context)
+
+ if not len(argv):
+ # We don't register the usage until here because if it is globally
+ # registered, argparse always prints it. This is not desired when
+ # running with --help.
+ parser.usage = Mach.USAGE
+ parser.print_usage()
+ return 0
+
+ try:
+ args = parser.parse_args(argv)
+ except NoCommandError:
+ print(NO_COMMAND_ERROR)
+ return 1
+ except UnknownCommandError as e:
+ suggestion_message = SUGGESTED_COMMANDS_MESSAGE % (e.verb, ', '.join(e.suggested_commands)) if e.suggested_commands else ''
+ print(UNKNOWN_COMMAND_ERROR % (e.verb, e.command, suggestion_message))
+ return 1
+ except UnrecognizedArgumentError as e:
+ print(UNRECOGNIZED_ARGUMENT_ERROR % (e.command,
+ ' '.join(e.arguments)))
+ return 1
+
+ # Add JSON logging to a file if requested.
+ if args.logfile:
+ self.log_manager.add_json_handler(args.logfile)
+
+ # Up the logging level if requested.
+ log_level = logging.INFO
+ if args.verbose:
+ log_level = logging.DEBUG
+
+ self.log_manager.register_structured_logger(logging.getLogger('mach'))
+
+ write_times = True
+ if args.log_no_times or 'MACH_NO_WRITE_TIMES' in os.environ:
+ write_times = False
+
+ # Always enable terminal logging. The log manager figures out if we are
+ # actually in a TTY or are a pipe and does the right thing.
+ self.log_manager.add_terminal_logging(level=log_level,
+ write_interval=args.log_interval, write_times=write_times)
+
+ if args.settings_file:
+ # Argument parsing has already happened, so settings that apply
+ # to command line handling (e.g alias, defaults) will be ignored.
+ self.load_settings(args.settings_file)
+
+ if not hasattr(args, 'mach_handler'):
+ raise MachError('ArgumentParser result missing mach handler info.')
+
+ handler = getattr(args, 'mach_handler')
+
+ try:
+ return Registrar._run_command_handler(handler, context=context,
+ debug_command=args.debug_command, **vars(args.command_args))
+ except KeyboardInterrupt as ki:
+ raise ki
+ except Exception as e:
+ exc_type, exc_value, exc_tb = sys.exc_info()
+
+ # The first two frames are us and are never used.
+ stack = traceback.extract_tb(exc_tb)[2:]
+
+ # If we have nothing on the stack, the exception was raised as part
+ # of calling the @Command method itself. This likely means a
+ # mismatch between @CommandArgument and arguments to the method.
+ # e.g. there exists a @CommandArgument without the corresponding
+ # argument on the method. We handle that here until the module
+ # loader grows the ability to validate better.
+ if not len(stack):
+ print(COMMAND_ERROR)
+ self._print_exception(sys.stdout, exc_type, exc_value,
+ traceback.extract_tb(exc_tb))
+ return 1
+
+ # Split the frames into those from the module containing the
+ # command and everything else.
+ command_frames = []
+ other_frames = []
+
+ initial_file = stack[0][0]
+
+ for frame in stack:
+ if frame[0] == initial_file:
+ command_frames.append(frame)
+ else:
+ other_frames.append(frame)
+
+ # If the exception was in the module providing the command, it's
+ # likely the bug is in the mach command module, not something else.
+ # If there are other frames, the bug is likely not the mach
+ # command's fault.
+ self._print_error_header(argv, sys.stdout)
+
+ if len(other_frames):
+ print(MODULE_ERROR)
+ else:
+ print(COMMAND_ERROR)
+
+ self._print_exception(sys.stdout, exc_type, exc_value, stack)
+
+ return 1
+
+ def log(self, level, action, params, format_str):
+ """Helper method to record a structured log event."""
+ self.logger.log(level, format_str,
+ extra={'action': action, 'params': params})
+
+ def _print_error_header(self, argv, fh):
+ fh.write('Error running mach:\n\n')
+ fh.write(' ')
+ fh.write(repr(argv))
+ fh.write('\n\n')
+
+ def _print_exception(self, fh, exc_type, exc_value, stack):
+ fh.write(ERROR_FOOTER)
+ fh.write('\n')
+
+ for l in traceback.format_exception_only(exc_type, exc_value):
+ fh.write(l)
+
+ fh.write('\n')
+ for l in traceback.format_list(stack):
+ fh.write(l)
+
+ def load_settings(self, paths):
+ """Load the specified settings files.
+
+ If a directory is specified, the following basenames will be
+ searched for in this order:
+
+ machrc, .machrc
+ """
+ if isinstance(paths, basestring):
+ paths = [paths]
+
+ valid_names = ('machrc', '.machrc')
+ def find_in_dir(base):
+ if os.path.isfile(base):
+ return base
+
+ for name in valid_names:
+ path = os.path.join(base, name)
+ if os.path.isfile(path):
+ return path
+
+ files = map(find_in_dir, self.settings_paths)
+ files = filter(bool, files)
+
+ self.settings.load_files(files)
+
+ def get_argument_parser(self, context):
+ """Returns an argument parser for the command-line interface."""
+
+ parser = ArgumentParser(add_help=False,
+ usage='%(prog)s [global arguments] command [command arguments]')
+
+ # Order is important here as it dictates the order the auto-generated
+ # help messages are printed.
+ global_group = parser.add_argument_group('Global Arguments')
+
+ global_group.add_argument('-v', '--verbose', dest='verbose',
+ action='store_true', default=False,
+ help='Print verbose output.')
+ global_group.add_argument('-l', '--log-file', dest='logfile',
+ metavar='FILENAME', type=argparse.FileType('ab'),
+ help='Filename to write log data to.')
+ global_group.add_argument('--log-interval', dest='log_interval',
+ action='store_true', default=False,
+ help='Prefix log line with interval from last message rather '
+ 'than relative time. Note that this is NOT execution time '
+ 'if there are parallel operations.')
+ suppress_log_by_default = False
+ if 'INSIDE_EMACS' in os.environ:
+ suppress_log_by_default = True
+ global_group.add_argument('--log-no-times', dest='log_no_times',
+ action='store_true', default=suppress_log_by_default,
+ help='Do not prefix log lines with times. By default, mach will '
+ 'prefix each output line with the time since command start.')
+ global_group.add_argument('-h', '--help', dest='help',
+ action='store_true', default=False,
+ help='Show this help message.')
+ global_group.add_argument('--debug-command', action='store_true',
+ help='Start a Python debugger when command is dispatched.')
+ global_group.add_argument('--settings', dest='settings_file',
+ metavar='FILENAME', default=None,
+ help='Path to settings file.')
+
+ for args, kwargs in self.global_arguments:
+ global_group.add_argument(*args, **kwargs)
+
+ # We need to be last because CommandAction swallows all remaining
+ # arguments and argparse parses arguments in the order they were added.
+ parser.add_argument('command', action=CommandAction,
+ registrar=Registrar, context=context)
+
+ return parser
diff --git a/python/mach/mach/mixin/__init__.py b/python/mach/mach/mixin/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mach/mach/mixin/__init__.py
diff --git a/python/mach/mach/mixin/logging.py b/python/mach/mach/mixin/logging.py
new file mode 100644
index 000000000..5c37b54f1
--- /dev/null
+++ b/python/mach/mach/mixin/logging.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import logging
+
+
+class LoggingMixin(object):
+ """Provides functionality to control logging."""
+
+ def populate_logger(self, name=None):
+ """Ensure this class instance has a logger associated with it.
+
+ Users of this mixin that call log() will need to ensure self._logger is
+ a logging.Logger instance before they call log(). This function ensures
+ self._logger is defined by populating it if it isn't.
+ """
+ if hasattr(self, '_logger'):
+ return
+
+ if name is None:
+ name = '.'.join([self.__module__, self.__class__.__name__])
+
+ self._logger = logging.getLogger(name)
+
+ def log(self, level, action, params, format_str):
+ """Log a structured log event.
+
+ A structured log event consists of a logging level, a string action, a
+ dictionary of attributes, and a formatting string.
+
+ The logging level is one of the logging.* constants, such as
+ logging.INFO.
+
+ The action string is essentially the enumeration of the event. Each
+ different type of logged event should have a different action.
+
+ The params dict is the metadata constituting the logged event.
+
+ The formatting string is used to convert the structured message back to
+ human-readable format. Conversion back to human-readable form is
+ performed by calling format() on this string, feeding into it the dict
+ of attributes constituting the event.
+
+ Example Usage
+ -------------
+
+ self.log(logging.DEBUG, 'login', {'username': 'johndoe'},
+ 'User login: {username}')
+ """
+ self._logger.log(level, format_str,
+ extra={'action': action, 'params': params})
+
diff --git a/python/mach/mach/mixin/process.py b/python/mach/mach/mixin/process.py
new file mode 100644
index 000000000..a6d3a2a1b
--- /dev/null
+++ b/python/mach/mach/mixin/process.py
@@ -0,0 +1,175 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module provides mixins to perform process execution.
+
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import subprocess
+import sys
+
+from mozprocess.processhandler import ProcessHandlerMixin
+
+from .logging import LoggingMixin
+
+
+# Perform detection of operating system environment. This is used by command
+# execution. We only do this once to save redundancy. Yes, this can fail module
+# loading. That is arguably OK.
+if 'SHELL' in os.environ:
+ _current_shell = os.environ['SHELL']
+elif 'MOZILLABUILD' in os.environ:
+ _current_shell = os.environ['MOZILLABUILD'] + '/msys/bin/sh.exe'
+elif 'COMSPEC' in os.environ:
+ _current_shell = os.environ['COMSPEC']
+else:
+ raise Exception('Could not detect environment shell!')
+
+_in_msys = False
+
+if os.environ.get('MSYSTEM', None) in ('MINGW32', 'MINGW64'):
+ _in_msys = True
+
+ if not _current_shell.lower().endswith('.exe'):
+ _current_shell += '.exe'
+
+
+class ProcessExecutionMixin(LoggingMixin):
+ """Mix-in that provides process execution functionality."""
+
+ def run_process(self, args=None, cwd=None, append_env=None,
+ explicit_env=None, log_name=None, log_level=logging.INFO,
+ line_handler=None, require_unix_environment=False,
+ ensure_exit_code=0, ignore_children=False, pass_thru=False):
+ """Runs a single process to completion.
+
+ Takes a list of arguments to run where the first item is the
+ executable. Runs the command in the specified directory and
+ with optional environment variables.
+
+ append_env -- Dict of environment variables to append to the current
+ set of environment variables.
+ explicit_env -- Dict of environment variables to set for the new
+ process. Any existing environment variables will be ignored.
+
+ require_unix_environment if True will ensure the command is executed
+ within a UNIX environment. Basically, if we are on Windows, it will
+ execute the command via an appropriate UNIX-like shell.
+
+ ignore_children is proxied to mozprocess's ignore_children.
+
+ ensure_exit_code is used to ensure the exit code of a process matches
+ what is expected. If it is an integer, we raise an Exception if the
+ exit code does not match this value. If it is True, we ensure the exit
+ code is 0. If it is False, we don't perform any exit code validation.
+
+ pass_thru is a special execution mode where the child process inherits
+ this process's standard file handles (stdin, stdout, stderr) as well as
+ additional file descriptors. It should be used for interactive processes
+ where buffering from mozprocess could be an issue. pass_thru does not
+ use mozprocess. Therefore, arguments like log_name, line_handler,
+ and ignore_children have no effect.
+ """
+ args = self._normalize_command(args, require_unix_environment)
+
+ self.log(logging.INFO, 'new_process', {'args': ' '.join(args)}, '{args}')
+
+ def handleLine(line):
+ # Converts str to unicode on Python 2 and bytes to str on Python 3.
+ if isinstance(line, bytes):
+ line = line.decode(sys.stdout.encoding or 'utf-8', 'replace')
+
+ if line_handler:
+ line_handler(line)
+
+ if not log_name:
+ return
+
+ self.log(log_level, log_name, {'line': line.rstrip()}, '{line}')
+
+ use_env = {}
+ if explicit_env:
+ use_env = explicit_env
+ else:
+ use_env.update(os.environ)
+
+ if append_env:
+ use_env.update(append_env)
+
+ self.log(logging.DEBUG, 'process', {'env': use_env}, 'Environment: {env}')
+
+ # There is a bug in subprocess where it doesn't like unicode types in
+ # environment variables. Here, ensure all unicode are converted to
+ # binary. utf-8 is our globally assumed default. If the caller doesn't
+ # want UTF-8, they shouldn't pass in a unicode instance.
+ normalized_env = {}
+ for k, v in use_env.items():
+ if isinstance(k, unicode):
+ k = k.encode('utf-8', 'strict')
+
+ if isinstance(v, unicode):
+ v = v.encode('utf-8', 'strict')
+
+ normalized_env[k] = v
+
+ use_env = normalized_env
+
+ if pass_thru:
+ proc = subprocess.Popen(args, cwd=cwd, env=use_env)
+ status = None
+ # Leave it to the subprocess to handle Ctrl+C. If it terminates as
+ # a result of Ctrl+C, proc.wait() will return a status code, and,
+ # we get out of the loop. If it doesn't, like e.g. gdb, we continue
+ # waiting.
+ while status is None:
+ try:
+ status = proc.wait()
+ except KeyboardInterrupt:
+ pass
+ else:
+ p = ProcessHandlerMixin(args, cwd=cwd, env=use_env,
+ processOutputLine=[handleLine], universal_newlines=True,
+ ignore_children=ignore_children)
+ p.run()
+ p.processOutput()
+ status = p.wait()
+
+ if ensure_exit_code is False:
+ return status
+
+ if ensure_exit_code is True:
+ ensure_exit_code = 0
+
+ if status != ensure_exit_code:
+ raise Exception('Process executed with non-0 exit code %d: %s' % (status, args))
+
+ return status
+
+ def _normalize_command(self, args, require_unix_environment):
+ """Adjust command arguments to run in the necessary environment.
+
+ This exists mainly to facilitate execution of programs requiring a *NIX
+ shell when running on Windows. The caller specifies whether a shell
+ environment is required. If it is and we are running on Windows but
+ aren't running in the UNIX-like msys environment, then we rewrite the
+ command to execute via a shell.
+ """
+ assert isinstance(args, list) and len(args)
+
+ if not require_unix_environment or not _in_msys:
+ return args
+
+ # Always munge Windows-style into Unix style for the command.
+ prog = args[0].replace('\\', '/')
+
+ # PyMake removes the C: prefix. But, things seem to work here
+ # without it. Not sure what that's about.
+
+ # We run everything through the msys shell. We need to use
+ # '-c' and pass all the arguments as one argument because that is
+ # how sh works.
+ cline = subprocess.list2cmdline([prog] + args[1:])
+ return [_current_shell, '-c', cline]
diff --git a/python/mach/mach/registrar.py b/python/mach/mach/registrar.py
new file mode 100644
index 000000000..63c9099c0
--- /dev/null
+++ b/python/mach/mach/registrar.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+from .base import MachError
+
+INVALID_COMMAND_CONTEXT = r'''
+It looks like you tried to run a mach command from an invalid context. The %s
+command failed to meet the following conditions: %s
+
+Run |mach help| to show a list of all commands available to the current context.
+'''.lstrip()
+
+
+class MachRegistrar(object):
+ """Container for mach command and config providers."""
+
+ def __init__(self):
+ self.command_handlers = {}
+ self.commands_by_category = {}
+ self.settings_providers = set()
+ self.categories = {}
+ self.require_conditions = False
+
+ def register_command_handler(self, handler):
+ name = handler.name
+
+ if not handler.category:
+ raise MachError('Cannot register a mach command without a '
+ 'category: %s' % name)
+
+ if handler.category not in self.categories:
+ raise MachError('Cannot register a command to an undefined '
+ 'category: %s -> %s' % (name, handler.category))
+
+ self.command_handlers[name] = handler
+ self.commands_by_category[handler.category].add(name)
+
+ def register_settings_provider(self, cls):
+ self.settings_providers.add(cls)
+
+ def register_category(self, name, title, description, priority=50):
+ self.categories[name] = (title, description, priority)
+ self.commands_by_category[name] = set()
+
+ @classmethod
+ def _condition_failed_message(cls, name, conditions):
+ msg = ['\n']
+ for c in conditions:
+ part = [' %s' % c.__name__]
+ if c.__doc__ is not None:
+ part.append(c.__doc__)
+ msg.append(' - '.join(part))
+ return INVALID_COMMAND_CONTEXT % (name, '\n'.join(msg))
+
+ def _run_command_handler(self, handler, context=None, debug_command=False, **kwargs):
+ cls = handler.cls
+
+ if handler.pass_context and not context:
+ raise Exception('mach command class requires context.')
+
+ if context:
+ prerun = getattr(context, 'pre_dispatch_handler', None)
+ if prerun:
+ prerun(context, handler, args=kwargs)
+
+ if handler.pass_context:
+ instance = cls(context)
+ else:
+ instance = cls()
+
+ if handler.conditions:
+ fail_conditions = []
+ for c in handler.conditions:
+ if not c(instance):
+ fail_conditions.append(c)
+
+ if fail_conditions:
+ print(self._condition_failed_message(handler.name, fail_conditions))
+ return 1
+
+ fn = getattr(instance, handler.method)
+
+ if debug_command:
+ import pdb
+ result = pdb.runcall(fn, **kwargs)
+ else:
+ result = fn(**kwargs)
+
+ result = result or 0
+ assert isinstance(result, (int, long))
+
+ if context:
+ postrun = getattr(context, 'post_dispatch_handler', None)
+ if postrun:
+ postrun(context, handler, args=kwargs)
+
+ return result
+
+ def dispatch(self, name, context=None, argv=None, subcommand=None, **kwargs):
+ """Dispatch/run a command.
+
+ Commands can use this to call other commands.
+ """
+ handler = self.command_handlers[name]
+
+ if subcommand:
+ handler = handler.subcommand_handlers[subcommand]
+
+ if handler.parser:
+ parser = handler.parser
+
+ # save and restore existing defaults so **kwargs don't persist across
+ # subsequent invocations of Registrar.dispatch()
+ old_defaults = parser._defaults.copy()
+ parser.set_defaults(**kwargs)
+ kwargs, _ = parser.parse_known_args(argv or [])
+ kwargs = vars(kwargs)
+ parser._defaults = old_defaults
+
+ return self._run_command_handler(handler, context=context, **kwargs)
+
+
+Registrar = MachRegistrar()
diff --git a/python/mach/mach/terminal.py b/python/mach/mach/terminal.py
new file mode 100644
index 000000000..9115211e0
--- /dev/null
+++ b/python/mach/mach/terminal.py
@@ -0,0 +1,75 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""This file contains code for interacting with terminals.
+
+All the terminal interaction code is consolidated so the complexity can be in
+one place, away from code that is commonly looked at.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import sys
+
+
+class LoggingHandler(logging.Handler):
+ """Custom logging handler that works with terminal window dressing.
+
+ This is alternative terminal logging handler which contains smarts for
+ emitting terminal control characters properly. Currently, it has generic
+ support for "footer" elements at the bottom of the screen. Functionality
+ can be added when needed.
+ """
+ def __init__(self):
+ logging.Handler.__init__(self)
+
+ self.fh = sys.stdout
+ self.footer = None
+
+ def flush(self):
+ self.acquire()
+
+ try:
+ self.fh.flush()
+ finally:
+ self.release()
+
+ def emit(self, record):
+ msg = self.format(record)
+
+ if self.footer:
+ self.footer.clear()
+
+ self.fh.write(msg)
+ self.fh.write('\n')
+
+ if self.footer:
+ self.footer.draw()
+
+ # If we don't flush, the footer may not get drawn.
+ self.flush()
+
+
+class TerminalFooter(object):
+ """Represents something drawn on the bottom of a terminal."""
+ def __init__(self, terminal):
+ self.t = terminal
+ self.fh = sys.stdout
+
+ def _clear_lines(self, n):
+ for i in xrange(n):
+ self.fh.write(self.t.move_x(0))
+ self.fh.write(self.t.clear_eol())
+ self.fh.write(self.t.move_up())
+
+ self.fh.write(self.t.move_down())
+ self.fh.write(self.t.move_x(0))
+
+ def clear(self):
+ raise Exception('clear() must be implemented.')
+
+ def draw(self):
+ raise Exception('draw() must be implemented.')
+
diff --git a/python/mach/mach/test/__init__.py b/python/mach/mach/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mach/mach/test/__init__.py
diff --git a/python/mach/mach/test/common.py b/python/mach/mach/test/common.py
new file mode 100644
index 000000000..f68ff5095
--- /dev/null
+++ b/python/mach/mach/test/common.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from StringIO import StringIO
+import os
+import unittest
+
+from mach.main import Mach
+from mach.base import CommandContext
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+class TestBase(unittest.TestCase):
+ provider_dir = os.path.join(here, 'providers')
+
+ def get_mach(self, provider_file=None, entry_point=None, context_handler=None):
+ m = Mach(os.getcwd())
+ m.define_category('testing', 'Mach unittest', 'Testing for mach core', 10)
+ m.populate_context_handler = context_handler
+
+ if provider_file:
+ m.load_commands_from_file(os.path.join(self.provider_dir, provider_file))
+
+ if entry_point:
+ m.load_commands_from_entry_point(entry_point)
+
+ return m
+
+ def _run_mach(self, argv, *args, **kwargs):
+ m = self.get_mach(*args, **kwargs)
+
+ stdout = StringIO()
+ stderr = StringIO()
+ stdout.encoding = 'UTF-8'
+ stderr.encoding = 'UTF-8'
+
+ try:
+ result = m.run(argv, stdout=stdout, stderr=stderr)
+ except SystemExit:
+ result = None
+
+ return (result, stdout.getvalue(), stderr.getvalue())
diff --git a/python/mach/mach/test/providers/__init__.py b/python/mach/mach/test/providers/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mach/mach/test/providers/__init__.py
diff --git a/python/mach/mach/test/providers/basic.py b/python/mach/mach/test/providers/basic.py
new file mode 100644
index 000000000..f2e64e6d6
--- /dev/null
+++ b/python/mach/mach/test/providers/basic.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import unicode_literals
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+
+@CommandProvider
+class ConditionsProvider(object):
+ @Command('cmd_foo', category='testing')
+ def run_foo(self):
+ pass
+
+ @Command('cmd_bar', category='testing')
+ @CommandArgument('--baz', action="store_true",
+ help='Run with baz')
+ def run_bar(self, baz=None):
+ pass
diff --git a/python/mach/mach/test/providers/conditions.py b/python/mach/mach/test/providers/conditions.py
new file mode 100644
index 000000000..a95429752
--- /dev/null
+++ b/python/mach/mach/test/providers/conditions.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from mach.decorators import (
+ CommandProvider,
+ Command,
+)
+
+def is_foo(cls):
+ """Foo must be true"""
+ return cls.foo
+
+def is_bar(cls):
+ """Bar must be true"""
+ return cls.bar
+
+@CommandProvider
+class ConditionsProvider(object):
+ foo = True
+ bar = False
+
+ @Command('cmd_foo', category='testing', conditions=[is_foo])
+ def run_foo(self):
+ pass
+
+ @Command('cmd_bar', category='testing', conditions=[is_bar])
+ def run_bar(self):
+ pass
+
+ @Command('cmd_foobar', category='testing', conditions=[is_foo, is_bar])
+ def run_foobar(self):
+ pass
+
+@CommandProvider
+class ConditionsContextProvider(object):
+ def __init__(self, context):
+ self.foo = context.foo
+ self.bar = context.bar
+
+ @Command('cmd_foo_ctx', category='testing', conditions=[is_foo])
+ def run_foo(self):
+ pass
+
+ @Command('cmd_bar_ctx', category='testing', conditions=[is_bar])
+ def run_bar(self):
+ pass
+
+ @Command('cmd_foobar_ctx', category='testing', conditions=[is_foo, is_bar])
+ def run_foobar(self):
+ pass
diff --git a/python/mach/mach/test/providers/conditions_invalid.py b/python/mach/mach/test/providers/conditions_invalid.py
new file mode 100644
index 000000000..22284d4dc
--- /dev/null
+++ b/python/mach/mach/test/providers/conditions_invalid.py
@@ -0,0 +1,16 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from mach.decorators import (
+ CommandProvider,
+ Command,
+)
+
+@CommandProvider
+class ConditionsProvider(object):
+ @Command('cmd_foo', category='testing', conditions=["invalid"])
+ def run_foo(self):
+ pass
diff --git a/python/mach/mach/test/providers/throw.py b/python/mach/mach/test/providers/throw.py
new file mode 100644
index 000000000..06bee01ee
--- /dev/null
+++ b/python/mach/mach/test/providers/throw.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import time
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+from mach.test.providers import throw2
+
+
+@CommandProvider
+class TestCommandProvider(object):
+ @Command('throw', category='testing')
+ @CommandArgument('--message', '-m', default='General Error')
+ def throw(self, message):
+ raise Exception(message)
+
+ @Command('throw_deep', category='testing')
+ @CommandArgument('--message', '-m', default='General Error')
+ def throw_deep(self, message):
+ throw2.throw_deep(message)
+
diff --git a/python/mach/mach/test/providers/throw2.py b/python/mach/mach/test/providers/throw2.py
new file mode 100644
index 000000000..af0a23fcf
--- /dev/null
+++ b/python/mach/mach/test/providers/throw2.py
@@ -0,0 +1,13 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file exists to trigger the differences in mach error reporting between
+# exceptions that occur in mach command modules themselves and in the things
+# they call.
+
+def throw_deep(message):
+ return throw_real(message)
+
+def throw_real(message):
+ raise Exception(message)
diff --git a/python/mach/mach/test/test_conditions.py b/python/mach/mach/test/test_conditions.py
new file mode 100644
index 000000000..20080687e
--- /dev/null
+++ b/python/mach/mach/test/test_conditions.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+
+from mach.base import MachError
+from mach.main import Mach
+from mach.registrar import Registrar
+from mach.test.common import TestBase
+
+from mozunit import main
+
+
+def _populate_context(context, key=None):
+ if key is None:
+ return
+ if key == 'foo':
+ return True
+ if key == 'bar':
+ return False
+ raise AttributeError(key)
+
+class TestConditions(TestBase):
+ """Tests for conditionally filtering commands."""
+
+ def _run_mach(self, args, context_handler=None):
+ return TestBase._run_mach(self, args, 'conditions.py',
+ context_handler=context_handler)
+
+
+ def test_conditions_pass(self):
+ """Test that a command which passes its conditions is runnable."""
+
+ self.assertEquals((0, '', ''), self._run_mach(['cmd_foo']))
+ self.assertEquals((0, '', ''), self._run_mach(['cmd_foo_ctx'], _populate_context))
+
+ def test_invalid_context_message(self):
+ """Test that commands which do not pass all their conditions
+ print the proper failure message."""
+
+ def is_bar():
+ """Bar must be true"""
+ fail_conditions = [is_bar]
+
+ for name in ('cmd_bar', 'cmd_foobar'):
+ result, stdout, stderr = self._run_mach([name])
+ self.assertEquals(1, result)
+
+ fail_msg = Registrar._condition_failed_message(name, fail_conditions)
+ self.assertEquals(fail_msg.rstrip(), stdout.rstrip())
+
+ for name in ('cmd_bar_ctx', 'cmd_foobar_ctx'):
+ result, stdout, stderr = self._run_mach([name], _populate_context)
+ self.assertEquals(1, result)
+
+ fail_msg = Registrar._condition_failed_message(name, fail_conditions)
+ self.assertEquals(fail_msg.rstrip(), stdout.rstrip())
+
+ def test_invalid_type(self):
+ """Test that a condition which is not callable raises an exception."""
+
+ m = Mach(os.getcwd())
+ m.define_category('testing', 'Mach unittest', 'Testing for mach core', 10)
+ self.assertRaises(MachError, m.load_commands_from_file,
+ os.path.join(self.provider_dir, 'conditions_invalid.py'))
+
+ def test_help_message(self):
+ """Test that commands that are not runnable do not show up in help."""
+
+ result, stdout, stderr = self._run_mach(['help'], _populate_context)
+ self.assertIn('cmd_foo', stdout)
+ self.assertNotIn('cmd_bar', stdout)
+ self.assertNotIn('cmd_foobar', stdout)
+ self.assertIn('cmd_foo_ctx', stdout)
+ self.assertNotIn('cmd_bar_ctx', stdout)
+ self.assertNotIn('cmd_foobar_ctx', stdout)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mach/mach/test/test_config.py b/python/mach/mach/test/test_config.py
new file mode 100644
index 000000000..d48dff67b
--- /dev/null
+++ b/python/mach/mach/test/test_config.py
@@ -0,0 +1,297 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import unicode_literals
+
+import sys
+import unittest
+
+from mozfile.mozfile import NamedTemporaryFile
+
+from mach.config import (
+ BooleanType,
+ ConfigException,
+ ConfigSettings,
+ IntegerType,
+ PathType,
+ PositiveIntegerType,
+ StringType,
+)
+from mach.decorators import SettingsProvider
+
+from mozunit import main
+
+
+if sys.version_info[0] == 3:
+ str_type = str
+else:
+ str_type = basestring
+
+CONFIG1 = r"""
+[foo]
+
+bar = bar_value
+baz = /baz/foo.c
+"""
+
+CONFIG2 = r"""
+[foo]
+
+bar = value2
+"""
+
+@SettingsProvider
+class Provider1(object):
+ config_settings = [
+ ('foo.bar', StringType),
+ ('foo.baz', PathType),
+ ]
+
+
+@SettingsProvider
+class ProviderDuplicate(object):
+ config_settings = [
+ ('dupesect.foo', StringType),
+ ('dupesect.foo', StringType),
+ ]
+
+
+@SettingsProvider
+class Provider2(object):
+ config_settings = [
+ ('a.string', StringType),
+ ('a.boolean', BooleanType),
+ ('a.pos_int', PositiveIntegerType),
+ ('a.int', IntegerType),
+ ('a.path', PathType),
+ ]
+
+
+@SettingsProvider
+class Provider3(object):
+ @classmethod
+ def config_settings(cls):
+ return [
+ ('a.string', 'string'),
+ ('a.boolean', 'boolean'),
+ ('a.pos_int', 'pos_int'),
+ ('a.int', 'int'),
+ ('a.path', 'path'),
+ ]
+
+
+@SettingsProvider
+class Provider4(object):
+ config_settings = [
+ ('foo.abc', StringType, 'a', {'choices': set('abc')}),
+ ('foo.xyz', StringType, 'w', {'choices': set('xyz')}),
+ ]
+
+
+@SettingsProvider
+class Provider5(object):
+ config_settings = [
+ ('foo.*', 'string'),
+ ('foo.bar', 'string'),
+ ]
+
+
+class TestConfigSettings(unittest.TestCase):
+ def test_empty(self):
+ s = ConfigSettings()
+
+ self.assertEqual(len(s), 0)
+ self.assertNotIn('foo', s)
+
+ def test_duplicate_option(self):
+ s = ConfigSettings()
+
+ with self.assertRaises(ConfigException):
+ s.register_provider(ProviderDuplicate)
+
+ def test_simple(self):
+ s = ConfigSettings()
+ s.register_provider(Provider1)
+
+ self.assertEqual(len(s), 1)
+ self.assertIn('foo', s)
+
+ foo = s['foo']
+ foo = s.foo
+
+ self.assertEqual(len(foo), 0)
+ self.assertEqual(len(foo._settings), 2)
+
+ self.assertIn('bar', foo._settings)
+ self.assertIn('baz', foo._settings)
+
+ self.assertNotIn('bar', foo)
+ foo['bar'] = 'value1'
+ self.assertIn('bar', foo)
+
+ self.assertEqual(foo['bar'], 'value1')
+ self.assertEqual(foo.bar, 'value1')
+
+ def test_assignment_validation(self):
+ s = ConfigSettings()
+ s.register_provider(Provider2)
+
+ a = s.a
+
+ # Assigning an undeclared setting raises.
+ with self.assertRaises(AttributeError):
+ a.undefined = True
+
+ with self.assertRaises(KeyError):
+ a['undefined'] = True
+
+ # Basic type validation.
+ a.string = 'foo'
+ a.string = 'foo'
+
+ with self.assertRaises(TypeError):
+ a.string = False
+
+ a.boolean = True
+ a.boolean = False
+
+ with self.assertRaises(TypeError):
+ a.boolean = 'foo'
+
+ a.pos_int = 5
+ a.pos_int = 0
+
+ with self.assertRaises(ValueError):
+ a.pos_int = -1
+
+ with self.assertRaises(TypeError):
+ a.pos_int = 'foo'
+
+ a.int = 5
+ a.int = 0
+ a.int = -5
+
+ with self.assertRaises(TypeError):
+ a.int = 1.24
+
+ with self.assertRaises(TypeError):
+ a.int = 'foo'
+
+ a.path = '/home/gps'
+ a.path = 'foo.c'
+ a.path = 'foo/bar'
+ a.path = './foo'
+
+ def retrieval_type_helper(self, provider):
+ s = ConfigSettings()
+ s.register_provider(provider)
+
+ a = s.a
+
+ a.string = 'foo'
+ a.boolean = True
+ a.pos_int = 12
+ a.int = -4
+ a.path = './foo/bar'
+
+ self.assertIsInstance(a.string, str_type)
+ self.assertIsInstance(a.boolean, bool)
+ self.assertIsInstance(a.pos_int, int)
+ self.assertIsInstance(a.int, int)
+ self.assertIsInstance(a.path, str_type)
+
+ def test_retrieval_type(self):
+ self.retrieval_type_helper(Provider2)
+ self.retrieval_type_helper(Provider3)
+
+ def test_choices_validation(self):
+ s = ConfigSettings()
+ s.register_provider(Provider4)
+
+ foo = s.foo
+ foo.abc
+ with self.assertRaises(ValueError):
+ foo.xyz
+
+ with self.assertRaises(ValueError):
+ foo.abc = 'e'
+
+ foo.abc = 'b'
+ foo.xyz = 'y'
+
+ def test_wildcard_options(self):
+ s = ConfigSettings()
+ s.register_provider(Provider5)
+
+ foo = s.foo
+
+ self.assertIn('*', foo._settings)
+ self.assertNotIn('*', foo)
+
+ foo.baz = 'value1'
+ foo.bar = 'value2'
+
+ self.assertIn('baz', foo)
+ self.assertEqual(foo.baz, 'value1')
+
+ self.assertIn('bar', foo)
+ self.assertEqual(foo.bar, 'value2')
+
+ def test_file_reading_single(self):
+ temp = NamedTemporaryFile(mode='wt')
+ temp.write(CONFIG1)
+ temp.flush()
+
+ s = ConfigSettings()
+ s.register_provider(Provider1)
+
+ s.load_file(temp.name)
+
+ self.assertEqual(s.foo.bar, 'bar_value')
+
+ def test_file_reading_multiple(self):
+ """Loading multiple files has proper overwrite behavior."""
+ temp1 = NamedTemporaryFile(mode='wt')
+ temp1.write(CONFIG1)
+ temp1.flush()
+
+ temp2 = NamedTemporaryFile(mode='wt')
+ temp2.write(CONFIG2)
+ temp2.flush()
+
+ s = ConfigSettings()
+ s.register_provider(Provider1)
+
+ s.load_files([temp1.name, temp2.name])
+
+ self.assertEqual(s.foo.bar, 'value2')
+
+ def test_file_reading_missing(self):
+ """Missing files should silently be ignored."""
+
+ s = ConfigSettings()
+
+ s.load_file('/tmp/foo.ini')
+
+ def test_file_writing(self):
+ s = ConfigSettings()
+ s.register_provider(Provider2)
+
+ s.a.string = 'foo'
+ s.a.boolean = False
+
+ temp = NamedTemporaryFile('wt')
+ s.write(temp)
+ temp.flush()
+
+ s2 = ConfigSettings()
+ s2.register_provider(Provider2)
+
+ s2.load_file(temp.name)
+
+ self.assertEqual(s.a.string, s2.a.string)
+ self.assertEqual(s.a.boolean, s2.a.boolean)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mach/mach/test/test_dispatcher.py b/python/mach/mach/test/test_dispatcher.py
new file mode 100644
index 000000000..3d689a4a2
--- /dev/null
+++ b/python/mach/mach/test/test_dispatcher.py
@@ -0,0 +1,61 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+from cStringIO import StringIO
+
+from mach.base import CommandContext
+from mach.registrar import Registrar
+from mach.test.common import TestBase
+
+from mozunit import main
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+class TestDispatcher(TestBase):
+ """Tests dispatch related code"""
+
+ def get_parser(self, config=None):
+ mach = self.get_mach('basic.py')
+
+ for provider in Registrar.settings_providers:
+ mach.settings.register_provider(provider)
+
+ if config:
+ if isinstance(config, basestring):
+ config = StringIO(config)
+ mach.settings.load_fps([config])
+
+ context = CommandContext(settings=mach.settings)
+ return mach.get_argument_parser(context)
+
+ def test_command_aliases(self):
+ config = """
+[alias]
+foo = cmd_foo
+bar = cmd_bar
+baz = cmd_bar --baz
+cmd_bar = cmd_bar --baz
+"""
+ parser = self.get_parser(config=config)
+
+ args = parser.parse_args(['foo'])
+ self.assertEquals(args.command, 'cmd_foo')
+
+ def assert_bar_baz(argv):
+ args = parser.parse_args(argv)
+ self.assertEquals(args.command, 'cmd_bar')
+ self.assertTrue(args.command_args.baz)
+
+ # The following should all result in |cmd_bar --baz|
+ assert_bar_baz(['bar', '--baz'])
+ assert_bar_baz(['baz'])
+ assert_bar_baz(['cmd_bar'])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mach/mach/test/test_entry_point.py b/python/mach/mach/test/test_entry_point.py
new file mode 100644
index 000000000..7aea91e5e
--- /dev/null
+++ b/python/mach/mach/test/test_entry_point.py
@@ -0,0 +1,61 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import unicode_literals
+
+import imp
+import os
+import sys
+
+from mach.base import MachError
+from mach.test.common import TestBase
+from mock import patch
+
+from mozunit import main
+
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+class Entry():
+ """Stub replacement for pkg_resources.EntryPoint"""
+ def __init__(self, providers):
+ self.providers = providers
+
+ def load(self):
+ def _providers():
+ return self.providers
+ return _providers
+
+
+class TestEntryPoints(TestBase):
+ """Test integrating with setuptools entry points"""
+ provider_dir = os.path.join(here, 'providers')
+
+ def _run_mach(self):
+ return TestBase._run_mach(self, ['help'], entry_point='mach.providers')
+
+ @patch('pkg_resources.iter_entry_points')
+ def test_load_entry_point_from_directory(self, mock):
+ # Ensure parent module is present otherwise we'll (likely) get
+ # an error due to unknown parent.
+ if b'mach.commands' not in sys.modules:
+ mod = imp.new_module(b'mach.commands')
+ sys.modules[b'mach.commands'] = mod
+
+ mock.return_value = [Entry([self.provider_dir])]
+ # Mach error raised due to conditions_invalid.py
+ with self.assertRaises(MachError):
+ self._run_mach()
+
+ @patch('pkg_resources.iter_entry_points')
+ def test_load_entry_point_from_file(self, mock):
+ mock.return_value = [Entry([os.path.join(self.provider_dir, 'basic.py')])]
+
+ result, stdout, stderr = self._run_mach()
+ self.assertIsNone(result)
+ self.assertIn('cmd_foo', stdout)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mach/mach/test/test_error_output.py b/python/mach/mach/test/test_error_output.py
new file mode 100644
index 000000000..25553f96b
--- /dev/null
+++ b/python/mach/mach/test/test_error_output.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from mach.main import (
+ COMMAND_ERROR,
+ MODULE_ERROR
+)
+from mach.test.common import TestBase
+
+from mozunit import main
+
+
+class TestErrorOutput(TestBase):
+
+ def _run_mach(self, args):
+ return TestBase._run_mach(self, args, 'throw.py')
+
+ def test_command_error(self):
+ result, stdout, stderr = self._run_mach(['throw', '--message',
+ 'Command Error'])
+
+ self.assertEqual(result, 1)
+
+ self.assertIn(COMMAND_ERROR, stdout)
+
+ def test_invoked_error(self):
+ result, stdout, stderr = self._run_mach(['throw_deep', '--message',
+ 'Deep stack'])
+
+ self.assertEqual(result, 1)
+
+ self.assertIn(MODULE_ERROR, stdout)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mach/mach/test/test_logger.py b/python/mach/mach/test/test_logger.py
new file mode 100644
index 000000000..05592845e
--- /dev/null
+++ b/python/mach/mach/test/test_logger.py
@@ -0,0 +1,47 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import time
+import unittest
+
+from mach.logging import StructuredHumanFormatter
+
+from mozunit import main
+
+
+class DummyLogger(logging.Logger):
+ def __init__(self, cb):
+ logging.Logger.__init__(self, 'test')
+
+ self._cb = cb
+
+ def handle(self, record):
+ self._cb(record)
+
+
+class TestStructuredHumanFormatter(unittest.TestCase):
+ def test_non_ascii_logging(self):
+ # Ensures the formatter doesn't choke when non-ASCII characters are
+ # present in printed parameters.
+ formatter = StructuredHumanFormatter(time.time())
+
+ def on_record(record):
+ result = formatter.format(record)
+ relevant = result[9:]
+
+ self.assertEqual(relevant, 'Test: s\xe9curit\xe9')
+
+ logger = DummyLogger(on_record)
+
+ value = 's\xe9curit\xe9'
+
+ logger.log(logging.INFO, 'Test: {utf}',
+ extra={'action': 'action', 'params': {'utf': value}})
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mach/setup.py b/python/mach/setup.py
new file mode 100644
index 000000000..50065546e
--- /dev/null
+++ b/python/mach/setup.py
@@ -0,0 +1,38 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+try:
+ from setuptools import setup
+except:
+ from distutils.core import setup
+
+
+VERSION = '0.6'
+
+README = open('README.rst').read()
+
+setup(
+ name='mach',
+ description='Generic command line command dispatching framework.',
+ long_description=README,
+ license='MPL 2.0',
+ author='Gregory Szorc',
+ author_email='gregory.szorc@gmail.com',
+ url='https://developer.mozilla.org/en-US/docs/Developer_Guide/mach',
+ packages=['mach'],
+ version=VERSION,
+ classifiers=[
+ 'Environment :: Console',
+ 'Development Status :: 3 - Alpha',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Natural Language :: English',
+ ],
+ install_requires=[
+ 'blessings',
+ 'mozfile',
+ 'mozprocess',
+ ],
+ tests_require=['mock'],
+)
+
diff --git a/python/mach_commands.py b/python/mach_commands.py
new file mode 100644
index 000000000..5ff0b1876
--- /dev/null
+++ b/python/mach_commands.py
@@ -0,0 +1,158 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import __main__
+import argparse
+import logging
+import mozpack.path as mozpath
+import os
+
+from mozbuild.base import (
+ MachCommandBase,
+)
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+
+@CommandProvider
+class MachCommands(MachCommandBase):
+ @Command('python', category='devenv',
+ description='Run Python.')
+ @CommandArgument('args', nargs=argparse.REMAINDER)
+ def python(self, args):
+ # Avoid logging the command
+ self.log_manager.terminal_handler.setLevel(logging.CRITICAL)
+
+ self._activate_virtualenv()
+
+ return self.run_process([self.virtualenv_manager.python_path] + args,
+ pass_thru=True, # Allow user to run Python interactively.
+ ensure_exit_code=False, # Don't throw on non-zero exit code.
+ # Note: subprocess requires native strings in os.environ on Windows
+ append_env={b'PYTHONDONTWRITEBYTECODE': str('1')})
+
+ @Command('python-test', category='testing',
+ description='Run Python unit tests with an appropriate test runner.')
+ @CommandArgument('--verbose',
+ default=False,
+ action='store_true',
+ help='Verbose output.')
+ @CommandArgument('--stop',
+ default=False,
+ action='store_true',
+ help='Stop running tests after the first error or failure.')
+ @CommandArgument('--path-only',
+ default=False,
+ action='store_true',
+ help=('Collect all tests under given path instead of default '
+ 'test resolution. Supports pytest-style tests.'))
+ @CommandArgument('tests', nargs='*',
+ metavar='TEST',
+ help=('Tests to run. Each test can be a single file or a directory. '
+ 'Default test resolution relies on PYTHON_UNIT_TESTS.'))
+ def python_test(self,
+ tests=[],
+ test_objects=None,
+ subsuite=None,
+ verbose=False,
+ path_only=False,
+ stop=False):
+ self._activate_virtualenv()
+
+ def find_tests_by_path():
+ import glob
+ files = []
+ for t in tests:
+ if t.endswith('.py') and os.path.isfile(t):
+ files.append(t)
+ elif os.path.isdir(t):
+ for root, _, _ in os.walk(t):
+ files += glob.glob(mozpath.join(root, 'test*.py'))
+ files += glob.glob(mozpath.join(root, 'unit*.py'))
+ else:
+ self.log(logging.WARN, 'python-test',
+ {'test': t},
+ 'TEST-UNEXPECTED-FAIL | Invalid test: {test}')
+ if stop:
+ break
+ return files
+
+ # Python's unittest, and in particular discover, has problems with
+ # clashing namespaces when importing multiple test modules. What follows
+ # is a simple way to keep environments separate, at the price of
+ # launching Python multiple times. Most tests are run via mozunit,
+ # which produces output in the format Mozilla infrastructure expects.
+ # Some tests are run via pytest.
+ return_code = 0
+ found_tests = False
+ if test_objects is None:
+ # If we're not being called from `mach test`, do our own
+ # test resolution.
+ if path_only:
+ if tests:
+ test_objects = [{'path': p} for p in find_tests_by_path()]
+ else:
+ self.log(logging.WARN, 'python-test', {},
+ 'TEST-UNEXPECTED-FAIL | No tests specified')
+ test_objects = []
+ else:
+ from mozbuild.testing import TestResolver
+ resolver = self._spawn(TestResolver)
+ if tests:
+ # If we were given test paths, try to find tests matching them.
+ test_objects = resolver.resolve_tests(paths=tests,
+ flavor='python')
+ else:
+ # Otherwise just run everything in PYTHON_UNIT_TESTS
+ test_objects = resolver.resolve_tests(flavor='python')
+
+ for test in test_objects:
+ found_tests = True
+ f = test['path']
+ file_displayed_test = [] # Used as a boolean.
+
+ def _line_handler(line):
+ if not file_displayed_test:
+ output = ('Ran' in line or 'collected' in line or
+ line.startswith('TEST-'))
+ if output:
+ file_displayed_test.append(True)
+
+ inner_return_code = self.run_process(
+ [self.virtualenv_manager.python_path, f],
+ ensure_exit_code=False, # Don't throw on non-zero exit code.
+ log_name='python-test',
+ # subprocess requires native strings in os.environ on Windows
+ append_env={b'PYTHONDONTWRITEBYTECODE': str('1')},
+ line_handler=_line_handler)
+ return_code += inner_return_code
+
+ if not file_displayed_test:
+ self.log(logging.WARN, 'python-test', {'file': f},
+ 'TEST-UNEXPECTED-FAIL | No test output (missing mozunit.main() call?): {file}')
+
+ if verbose:
+ if inner_return_code != 0:
+ self.log(logging.INFO, 'python-test', {'file': f},
+ 'Test failed: {file}')
+ else:
+ self.log(logging.INFO, 'python-test', {'file': f},
+ 'Test passed: {file}')
+ if stop and return_code > 0:
+ return 1
+
+ if not found_tests:
+ message = 'TEST-UNEXPECTED-FAIL | No tests collected'
+ if not path_only:
+ message += ' (Not in PYTHON_UNIT_TESTS? Try --path-only?)'
+ self.log(logging.WARN, 'python-test', {}, message)
+ return 1
+
+ return 0 if return_code == 0 else 1
diff --git a/python/macholib/MANIFEST.in b/python/macholib/MANIFEST.in
new file mode 100644
index 000000000..f39832106
--- /dev/null
+++ b/python/macholib/MANIFEST.in
@@ -0,0 +1,8 @@
+include *.txt MANIFEST.in *.py
+graft doc
+graft doc/_static
+graft doc/_templates
+graft macholib_tests
+global-exclude .DS_Store
+global-exclude *.pyc
+global-exclude *.so
diff --git a/python/macholib/PKG-INFO b/python/macholib/PKG-INFO
new file mode 100644
index 000000000..33408d200
--- /dev/null
+++ b/python/macholib/PKG-INFO
@@ -0,0 +1,275 @@
+Metadata-Version: 1.1
+Name: macholib
+Version: 1.7
+Summary: Mach-O header analysis and editing
+Home-page: http://bitbucket.org/ronaldoussoren/macholib
+Author: Ronald Oussoren
+Author-email: ronaldoussoren@mac.com
+License: MIT
+Download-URL: http://pypi.python.org/pypi/macholib
+Description: macholib can be used to analyze and edit Mach-O headers, the executable
+ format used by Mac OS X.
+
+ It's typically used as a dependency analysis tool, and also to rewrite dylib
+ references in Mach-O headers to be @executable_path relative.
+
+ Though this tool targets a platform specific file format, it is pure python
+ code that is platform and endian independent.
+
+
+ Release history
+ ===============
+
+ macholib 1.7
+ ------------
+
+ * Added support for ARM64, LC_ENCRYPTION_INFO_64 and LC_LINKER_OPTION
+
+ Patch by Matthias Ringwald.
+
+ * Load commands now have a "describe" method that returns more information
+ about the command.
+
+ Patch by David Dorsey.
+
+ * The MAGIC value in the header was always represented in the native
+ byte order, instead of as the value read from the binary.
+
+ Patch by David Dorsey.
+
+ * Added various new constants to "macholib.mach_o".
+
+ Patch by David Dorsey.
+
+ macholib 1.6.1
+ --------------
+
+ * ?
+
+ macholib 1.6
+ ------------
+
+ * Add support for '@loader_path' link command in
+ macholib.dyld:
+
+ - Added function ``macholib.dyld.dyld_loader_search``
+
+ - This function is used by ``macholib.dyld.dyld_find``,
+ and that function now has an new (optional) argument
+ with the path to the loader.
+
+ * Also add support for '@loader_path' to macholib.MachoGraph,
+ using the newly added '@loader_path' support in the
+ dyld module.
+
+ Due to this suppport the *macho_standalone* tool can
+ now rewrite binaries that contain an '@loader_path' load
+ command.
+
+
+ macholib 1.5.2
+ --------------
+
+ * Issue #93: Show the name of the affected file in the exception message
+ for Mach-O headers that are too large to relocate.
+
+
+ macholib 1.5.1
+ --------------
+
+ * There were no 'classifiers' in the package metadata due to
+ a bug in setup.py.
+
+ macholib 1.5
+ --------------
+
+ macholib 1.5 is a minor feature release
+
+ * No longer use 2to3 to provide Python 3 support
+
+ As a side-effect of this macholib no longer supports
+ Python 2.5 and earlier.
+
+ * Adds suppport for some new macho load commands
+
+ * Fix for py3k problem in macho_standalone.py
+
+ Patch by Guanqun Lu.
+
+ * Fix for some issues in macho_dump.py
+
+ Patch by Nam Nguyen
+
+ * Issue #10: Fix for LC_DATA_IN_CODE linker commands, without
+ this fix py2app cannot build application bundles when
+ the source binaries have been compiled with Xcode 4.5.
+
+ * Issue #6: Fix for LC_ENCRYPTION_INFO linker commands
+
+ * Use the mach header information to print the cpu type of a
+ binary, instead of trying to deduce that from pointer width
+ and endianness.
+
+ Changed the code because of issue #6, in which a user tries to
+ dump a iOS binary which results in bogus output in the previous
+ releases.
+
+ * The mapping ``macholib.macho_dump.ARCH_MAP`` is undocumented
+ and no longer used by macholib itself. It will be removed
+ in the next release.
+
+
+ * The command-line tools ``macho_find``, ``macho_dump`` and
+ ``macho_standalone`` are deprecated. Use "python -mmacholib"
+ instead. That is::
+
+ $ python -mmacholib dump /usr/bin/grep
+
+ $ python -mmacholib find ~
+
+ $ python -mmacholib standalone myapp.app
+
+ This makes it clearer which version of the tools are used.
+
+ macholib 1.4.3
+ --------------
+
+ macholib 1.4.3 is a minor feature release
+
+ * Added strings for 'x86_64' and 'ppc64' to
+ macholib.mach_o.CPU_TYPE_NAMES.
+
+ * macho_find and macho_dump were broken in the 1.4.2 release
+
+ * added 'macholib.util.NOT_SYSTEM_FILES', a list of
+ files that aren't system path's even though they are
+ located in system locations.
+
+ Needed to work around a bug in PySide (see issue #32 in the
+ py2app tracker)
+
+
+
+ macholib 1.4.2
+ --------------
+
+ macholib 1.4.2 is a minor bugfix release
+
+ * The support for new load commands that was added in 1.4.1
+ contained a typo that caused problems on OSX 10.7 (Lion).
+
+ macholib 1.4.1
+ --------------
+
+ macholib 1.4.1 is a minor feature release
+
+ Features:
+
+ - Add support for a number of new MachO load commands that were added
+ during the lifetime of OSX 10.6: ``LC_LOAD_UPWARD_DYLIB``,
+ ``LC_VERSION_MIN_MACOSX``, ``LC_VERSION_MIN_IPHONEOS`` and
+ ``LC_FUNCTION_STARTS``.
+
+ macholib 1.4
+ -------------
+
+ macholib 1.4 is a feature release
+
+ Features:
+
+ - Documentation is now generated using `sphinx <http://pypi.python.org/pypi/sphinx>`_
+ and can be viewed at <http://packages.python.org/macholib>.
+
+ - The repository has moved to bitbucket
+
+ - There now is a testsuite
+
+ - Private functionality inside modules was renamed to
+ a name starting with an underscore.
+
+ .. note:: if this change affects your code you are relying on undefined
+ implementation features, please stop using private functions.
+
+ - The basic packable types in ``macholib.ptypes`` were renamed to better
+ represent the corresponding C type. The table below lists the old
+ an new names (the old names are still available, but are deprecated and
+ will be removed in a future release).
+
+ +--------------+--------------+
+ | **Old name** | **New name** |
+ +==============+==============+
+ | p_byte | p_int8 |
+ +--------------+--------------+
+ | p_ubyte | p_uint8 |
+ +--------------+--------------+
+ | p_short | p_int16 |
+ +--------------+--------------+
+ | p_ushort | p_uint16 |
+ +--------------+--------------+
+ | p_int | p_int32 |
+ +--------------+--------------+
+ | p_uint | p_uint32 |
+ +--------------+--------------+
+ | p_long | p_int32 |
+ +--------------+--------------+
+ | p_ulong | p_uint32 |
+ +--------------+--------------+
+ | p_longlong | p_int64 |
+ +--------------+--------------+
+ | p_ulonglong | p_uint64 |
+ +--------------+--------------+
+
+ ``Macholib.ptypes.p_ptr`` is no longer present as it had an unclear
+ definition and isn't actually used in the codebase.
+
+
+ Bug fixes:
+
+ - The semantics of ``dyld.dyld_default_search`` were changed a bit,
+ it now first searches the framework path (if appropriate) and then
+ the linker path, irrespective of the value of the ``DYLD_FALLBACK*``
+ environment variables.
+
+ Previous versions would change the search order when those variables
+ was set, which is odd and doesn't correspond with the documented
+ behaviour of the system dyld.
+
+ - It is once again possible to install using python2.5
+
+ - The source distribution includes all files, this was broken
+ due to the switch to mercurial (which confused setuptools)
+
+ macholib 1.3
+ ------------
+
+ macholib 1.3 is a feature release.
+
+ Features:
+
+ - Experimental Python 3.x support
+
+ This version contains lightly tested support for Python 3.
+
+ macholib 1.2.2
+ --------------
+
+ macholib 1.2.2 is a bugfix release.
+
+ Bug fixes:
+
+ - Macholib should work better with 64-bit code
+ (patch by Marc-Antoine Parent)
+
+Keywords: Mach-O,,dyld
+Platform: any
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Build Tools
diff --git a/python/macholib/README.txt b/python/macholib/README.txt
new file mode 100644
index 000000000..62dbdb709
--- /dev/null
+++ b/python/macholib/README.txt
@@ -0,0 +1,8 @@
+macholib can be used to analyze and edit Mach-O headers, the executable
+format used by Mac OS X.
+
+It's typically used as a dependency analysis tool, and also to rewrite dylib
+references in Mach-O headers to be @executable_path relative.
+
+Though this tool targets a platform specific file format, it is pure python
+code that is platform and endian independent.
diff --git a/python/macholib/doc/MachO.rst b/python/macholib/doc/MachO.rst
new file mode 100644
index 000000000..204e04087
--- /dev/null
+++ b/python/macholib/doc/MachO.rst
@@ -0,0 +1,19 @@
+:mod:`macholib.MachO` --- Utilities for reading and writing Mach-O headers
+==========================================================================
+
+.. module:: macholib.MachO
+ :synopsis: Utilities for reading and writing Mach-O headers
+
+This module defines a class :class:`Macho`, which enables reading
+and writing the Mach-O header of an executable file or dynamic
+library on MacOS X.
+
+.. class:: MachO(filename)
+
+ Creates a MachO object by reading the Mach-O headers from
+ *filename*.
+
+ The *filename* should refer to an existing file in Mach-O
+ format, and can refer to fat (universal) binaries.
+
+.. note:: more information will be added later
diff --git a/python/macholib/doc/MachoOGraph.rst b/python/macholib/doc/MachoOGraph.rst
new file mode 100644
index 000000000..256a4137f
--- /dev/null
+++ b/python/macholib/doc/MachoOGraph.rst
@@ -0,0 +1,14 @@
+:mod:`macholib.MachoGraph` --- Graph data structure of Mach-O dependencies
+===============================================================================
+
+.. module:: macholib.MachOGraph
+ :synopsis: Graph data structure of Mach-O dependencies
+
+This module defines the class :class:`MachOGraph` which represents the
+direct and indirect dependencies of one or more Mach-O files on
+other (library) files.
+
+.. class:: MachOGraph(...)
+
+ To be discussed.
+
diff --git a/python/macholib/doc/MachoOStandalone.rst b/python/macholib/doc/MachoOStandalone.rst
new file mode 100644
index 000000000..d1a8052f6
--- /dev/null
+++ b/python/macholib/doc/MachoOStandalone.rst
@@ -0,0 +1,13 @@
+:mod:`macholib.MachOStandalone` --- Create standalone application bundles
+==========================================================================
+
+.. module:: macholib.MachOStandalone
+ :synopsis: Create standalone application bundles
+
+This module defines class :class:`MachOStandalone` which locates
+all Mach-O files in a directory (assumed to be the root of an
+application or plugin bundle) and then copies all non-system
+dependencies for the located files into the bundle.
+
+.. class:: MachOStandalone(base[, dest[, graph[, env[, executable_path]]]])
+
diff --git a/python/macholib/doc/Makefile b/python/macholib/doc/Makefile
new file mode 100644
index 000000000..cca691360
--- /dev/null
+++ b/python/macholib/doc/Makefile
@@ -0,0 +1,130 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/macholib.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/macholib.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/macholib"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/macholib"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ make -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/python/macholib/doc/SymbolTable.rst b/python/macholib/doc/SymbolTable.rst
new file mode 100644
index 000000000..5417e315c
--- /dev/null
+++ b/python/macholib/doc/SymbolTable.rst
@@ -0,0 +1,24 @@
+:mod:`macholib.SymbolTable` --- Class to read the symbol table from a Mach-O header
+===================================================================================
+
+.. module:: macholib.SymbolTable
+ :synopsis: Class to read the symbol table from a Mach-O header
+
+This module is deprecated because it is not by the author
+and likely contains bugs. It also does not work for 64-bit binaries.
+
+.. class:: SymbolTable(macho[, openfile])
+
+ Reads the SymbolTable for the given Mach-O object.
+
+ The option argument *openfile* specifies the
+ function to use to open the file, defaulting to
+ the builtin :func:`open` function.
+
+ .. warning:: As far as we know this class is not used
+ by any user of the modulegraph package, and the code
+ has not been updated after the initial implementation.
+
+ The end result of this is that the code does not
+ support 64-bit code at all and likely doesn't work
+ properly for 32-bit code as well.
diff --git a/python/macholib/doc/_build/doctrees/MachO.doctree b/python/macholib/doc/_build/doctrees/MachO.doctree
new file mode 100644
index 000000000..0aed18b42
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/MachO.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/MachoOGraph.doctree b/python/macholib/doc/_build/doctrees/MachoOGraph.doctree
new file mode 100644
index 000000000..7ed639427
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/MachoOGraph.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/MachoOStandalone.doctree b/python/macholib/doc/_build/doctrees/MachoOStandalone.doctree
new file mode 100644
index 000000000..f19f00e8e
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/MachoOStandalone.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/SymbolTable.doctree b/python/macholib/doc/_build/doctrees/SymbolTable.doctree
new file mode 100644
index 000000000..0ff1c997c
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/SymbolTable.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/changelog.doctree b/python/macholib/doc/_build/doctrees/changelog.doctree
new file mode 100644
index 000000000..ffb3f138a
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/changelog.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/dyld.doctree b/python/macholib/doc/_build/doctrees/dyld.doctree
new file mode 100644
index 000000000..1692b9834
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/dyld.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/dylib.doctree b/python/macholib/doc/_build/doctrees/dylib.doctree
new file mode 100644
index 000000000..0c501fc6b
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/dylib.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/environment.pickle b/python/macholib/doc/_build/doctrees/environment.pickle
new file mode 100644
index 000000000..126177dfd
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/environment.pickle
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/framework.doctree b/python/macholib/doc/_build/doctrees/framework.doctree
new file mode 100644
index 000000000..9db55ed0c
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/framework.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/index.doctree b/python/macholib/doc/_build/doctrees/index.doctree
new file mode 100644
index 000000000..db4b86e65
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/index.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/license.doctree b/python/macholib/doc/_build/doctrees/license.doctree
new file mode 100644
index 000000000..231522cfc
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/license.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/macho_o.doctree b/python/macholib/doc/_build/doctrees/macho_o.doctree
new file mode 100644
index 000000000..2e4a8aa2c
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/macho_o.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/ptypes.doctree b/python/macholib/doc/_build/doctrees/ptypes.doctree
new file mode 100644
index 000000000..53b528782
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/ptypes.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/doctrees/scripts.doctree b/python/macholib/doc/_build/doctrees/scripts.doctree
new file mode 100644
index 000000000..6c9413c02
--- /dev/null
+++ b/python/macholib/doc/_build/doctrees/scripts.doctree
Binary files differ
diff --git a/python/macholib/doc/_build/html/.buildinfo b/python/macholib/doc/_build/html/.buildinfo
new file mode 100644
index 000000000..97c8fd2fd
--- /dev/null
+++ b/python/macholib/doc/_build/html/.buildinfo
@@ -0,0 +1,4 @@
+# Sphinx build info version 1
+# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
+config: b80b1cc6a748fe1205999d2fef6726e1
+tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/python/macholib/doc/_build/html/MachO.html b/python/macholib/doc/_build/html/MachO.html
new file mode 100644
index 000000000..61c8bf3a6
--- /dev/null
+++ b/python/macholib/doc/_build/html/MachO.html
@@ -0,0 +1,132 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.MachO — Utilities for reading and writing Mach-O headers &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.MachoGraph — Graph data structure of Mach-O dependencies" href="MachoOGraph.html" />
+ <link rel="prev" title="Command-line tools" href="scripts.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="MachoOGraph.html" title="macholib.MachoGraph — Graph data structure of Mach-O dependencies"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="scripts.html" title="Command-line tools"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.MachO">
+<span id="macholib-macho-utilities-for-reading-and-writing-mach-o-headers"></span><h1><a class="reference internal" href="#module-macholib.MachO" title="macholib.MachO: Utilities for reading and writing Mach-O headers"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.MachO</span></tt></a> &#8212; Utilities for reading and writing Mach-O headers<a class="headerlink" href="#module-macholib.MachO" title="Permalink to this headline">¶</a></h1>
+<p>This module defines a class <tt class="xref py py-class docutils literal"><span class="pre">Macho</span></tt>, which enables reading
+and writing the Mach-O header of an executable file or dynamic
+library on MacOS X.</p>
+<dl class="class">
+<dt id="macholib.MachO.MachO">
+<em class="property">class </em><tt class="descclassname">macholib.MachO.</tt><tt class="descname">MachO</tt><big>(</big><em>filename</em><big>)</big><a class="headerlink" href="#macholib.MachO.MachO" title="Permalink to this definition">¶</a></dt>
+<dd><p>Creates a MachO object by reading the Mach-O headers from
+<em>filename</em>.</p>
+<p>The <em>filename</em> should refer to an existing file in Mach-O
+format, and can refer to fat (universal) binaries.</p>
+</dd></dl>
+
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">more information will be added later</p>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="scripts.html"
+ title="previous chapter">Command-line tools</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="MachoOGraph.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.MachoGraph</span></tt> &#8212; Graph data structure of Mach-O dependencies</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/MachO.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="MachoOGraph.html" title="macholib.MachoGraph — Graph data structure of Mach-O dependencies"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="scripts.html" title="Command-line tools"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/MachoOGraph.html b/python/macholib/doc/_build/html/MachoOGraph.html
new file mode 100644
index 000000000..aeba004d3
--- /dev/null
+++ b/python/macholib/doc/_build/html/MachoOGraph.html
@@ -0,0 +1,125 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.MachoGraph — Graph data structure of Mach-O dependencies &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.MachOStandalone — Create standalone application bundles" href="MachoOStandalone.html" />
+ <link rel="prev" title="macholib.MachO — Utilities for reading and writing Mach-O headers" href="MachO.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="MachoOStandalone.html" title="macholib.MachOStandalone — Create standalone application bundles"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="MachO.html" title="macholib.MachO — Utilities for reading and writing Mach-O headers"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.MachOGraph">
+<span id="macholib-machograph-graph-data-structure-of-mach-o-dependencies"></span><h1><tt class="xref py py-mod docutils literal"><span class="pre">macholib.MachoGraph</span></tt> &#8212; Graph data structure of Mach-O dependencies<a class="headerlink" href="#module-macholib.MachOGraph" title="Permalink to this headline">¶</a></h1>
+<p>This module defines the class <a class="reference internal" href="#macholib.MachOGraph.MachOGraph" title="macholib.MachOGraph.MachOGraph"><tt class="xref py py-class docutils literal"><span class="pre">MachOGraph</span></tt></a> which represents the
+direct and indirect dependencies of one or more Mach-O files on
+other (library) files.</p>
+<dl class="class">
+<dt id="macholib.MachOGraph.MachOGraph">
+<em class="property">class </em><tt class="descclassname">macholib.MachOGraph.</tt><tt class="descname">MachOGraph</tt><big>(</big><em>...</em><big>)</big><a class="headerlink" href="#macholib.MachOGraph.MachOGraph" title="Permalink to this definition">¶</a></dt>
+<dd><p>To be discussed.</p>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="MachO.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">macholib.MachO</span></tt> &#8212; Utilities for reading and writing Mach-O headers</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="MachoOStandalone.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.MachOStandalone</span></tt> &#8212; Create standalone application bundles</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/MachoOGraph.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="MachoOStandalone.html" title="macholib.MachOStandalone — Create standalone application bundles"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="MachO.html" title="macholib.MachO — Utilities for reading and writing Mach-O headers"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/MachoOStandalone.html b/python/macholib/doc/_build/html/MachoOStandalone.html
new file mode 100644
index 000000000..4b94201b6
--- /dev/null
+++ b/python/macholib/doc/_build/html/MachoOStandalone.html
@@ -0,0 +1,125 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.MachOStandalone — Create standalone application bundles &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.SymbolTable — Class to read the symbol table from a Mach-O header" href="SymbolTable.html" />
+ <link rel="prev" title="macholib.MachoGraph — Graph data structure of Mach-O dependencies" href="MachoOGraph.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="SymbolTable.html" title="macholib.SymbolTable — Class to read the symbol table from a Mach-O header"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="MachoOGraph.html" title="macholib.MachoGraph — Graph data structure of Mach-O dependencies"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.MachOStandalone">
+<span id="macholib-machostandalone-create-standalone-application-bundles"></span><h1><a class="reference internal" href="#module-macholib.MachOStandalone" title="macholib.MachOStandalone: Create standalone application bundles"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.MachOStandalone</span></tt></a> &#8212; Create standalone application bundles<a class="headerlink" href="#module-macholib.MachOStandalone" title="Permalink to this headline">¶</a></h1>
+<p>This module defines class <a class="reference internal" href="#macholib.MachOStandalone.MachOStandalone" title="macholib.MachOStandalone.MachOStandalone"><tt class="xref py py-class docutils literal"><span class="pre">MachOStandalone</span></tt></a> which locates
+all Mach-O files in a directory (assumed to be the root of an
+application or plugin bundle) and then copies all non-system
+dependencies for the located files into the bundle.</p>
+<dl class="class">
+<dt id="macholib.MachOStandalone.MachOStandalone">
+<em class="property">class </em><tt class="descclassname">macholib.MachOStandalone.</tt><tt class="descname">MachOStandalone</tt><big>(</big><em>base</em><span class="optional">[</span>, <em>dest</em><span class="optional">[</span>, <em>graph</em><span class="optional">[</span>, <em>env</em><span class="optional">[</span>, <em>executable_path</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.MachOStandalone.MachOStandalone" title="Permalink to this definition">¶</a></dt>
+<dd></dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="MachoOGraph.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">macholib.MachoGraph</span></tt> &#8212; Graph data structure of Mach-O dependencies</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="SymbolTable.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.SymbolTable</span></tt> &#8212; Class to read the symbol table from a Mach-O header</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/MachoOStandalone.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="SymbolTable.html" title="macholib.SymbolTable — Class to read the symbol table from a Mach-O header"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="MachoOGraph.html" title="macholib.MachoGraph — Graph data structure of Mach-O dependencies"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/SymbolTable.html b/python/macholib/doc/_build/html/SymbolTable.html
new file mode 100644
index 000000000..f6847d612
--- /dev/null
+++ b/python/macholib/doc/_build/html/SymbolTable.html
@@ -0,0 +1,136 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.SymbolTable — Class to read the symbol table from a Mach-O header &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.dyld — Dyld emulation" href="dyld.html" />
+ <link rel="prev" title="macholib.MachOStandalone — Create standalone application bundles" href="MachoOStandalone.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="dyld.html" title="macholib.dyld — Dyld emulation"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="MachoOStandalone.html" title="macholib.MachOStandalone — Create standalone application bundles"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.SymbolTable">
+<span id="macholib-symboltable-class-to-read-the-symbol-table-from-a-mach-o-header"></span><h1><a class="reference internal" href="#module-macholib.SymbolTable" title="macholib.SymbolTable: Class to read the symbol table from a Mach-O header"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.SymbolTable</span></tt></a> &#8212; Class to read the symbol table from a Mach-O header<a class="headerlink" href="#module-macholib.SymbolTable" title="Permalink to this headline">¶</a></h1>
+<p>This module is deprecated because it is not by the author
+and likely contains bugs. It also does not work for 64-bit binaries.</p>
+<dl class="class">
+<dt id="macholib.SymbolTable.SymbolTable">
+<em class="property">class </em><tt class="descclassname">macholib.SymbolTable.</tt><tt class="descname">SymbolTable</tt><big>(</big><em>macho</em><span class="optional">[</span>, <em>openfile</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.SymbolTable.SymbolTable" title="Permalink to this definition">¶</a></dt>
+<dd><p>Reads the SymbolTable for the given Mach-O object.</p>
+<p>The option argument <em>openfile</em> specifies the
+function to use to open the file, defaulting to
+the builtin <a class="reference external" href="http://docs.python.org/library/functions.html#open" title="(in Python v2.7)"><tt class="xref py py-func docutils literal"><span class="pre">open()</span></tt></a> function.</p>
+<div class="admonition warning">
+<p class="first admonition-title">Warning</p>
+<p>As far as we know this class is not used
+by any user of the modulegraph package, and the code
+has not been updated after the initial implementation.</p>
+<p class="last">The end result of this is that the code does not
+support 64-bit code at all and likely doesn&#8217;t work
+properly for 32-bit code as well.</p>
+</div>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="MachoOStandalone.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">macholib.MachOStandalone</span></tt> &#8212; Create standalone application bundles</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="dyld.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.dyld</span></tt> &#8212; Dyld emulation</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/SymbolTable.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="dyld.html" title="macholib.dyld — Dyld emulation"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="MachoOStandalone.html" title="macholib.MachOStandalone — Create standalone application bundles"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/_sources/MachO.txt b/python/macholib/doc/_build/html/_sources/MachO.txt
new file mode 100644
index 000000000..204e04087
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/MachO.txt
@@ -0,0 +1,19 @@
+:mod:`macholib.MachO` --- Utilities for reading and writing Mach-O headers
+==========================================================================
+
+.. module:: macholib.MachO
+ :synopsis: Utilities for reading and writing Mach-O headers
+
+This module defines a class :class:`Macho`, which enables reading
+and writing the Mach-O header of an executable file or dynamic
+library on MacOS X.
+
+.. class:: MachO(filename)
+
+ Creates a MachO object by reading the Mach-O headers from
+ *filename*.
+
+ The *filename* should refer to an existing file in Mach-O
+ format, and can refer to fat (universal) binaries.
+
+.. note:: more information will be added later
diff --git a/python/macholib/doc/_build/html/_sources/MachoOGraph.txt b/python/macholib/doc/_build/html/_sources/MachoOGraph.txt
new file mode 100644
index 000000000..256a4137f
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/MachoOGraph.txt
@@ -0,0 +1,14 @@
+:mod:`macholib.MachoGraph` --- Graph data structure of Mach-O dependencies
+===============================================================================
+
+.. module:: macholib.MachOGraph
+ :synopsis: Graph data structure of Mach-O dependencies
+
+This module defines the class :class:`MachOGraph` which represents the
+direct and indirect dependencies of one or more Mach-O files on
+other (library) files.
+
+.. class:: MachOGraph(...)
+
+ To be discussed.
+
diff --git a/python/macholib/doc/_build/html/_sources/MachoOStandalone.txt b/python/macholib/doc/_build/html/_sources/MachoOStandalone.txt
new file mode 100644
index 000000000..d1a8052f6
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/MachoOStandalone.txt
@@ -0,0 +1,13 @@
+:mod:`macholib.MachOStandalone` --- Create standalone application bundles
+==========================================================================
+
+.. module:: macholib.MachOStandalone
+ :synopsis: Create standalone application bundles
+
+This module defines class :class:`MachOStandalone` which locates
+all Mach-O files in a directory (assumed to be the root of an
+application or plugin bundle) and then copies all non-system
+dependencies for the located files into the bundle.
+
+.. class:: MachOStandalone(base[, dest[, graph[, env[, executable_path]]]])
+
diff --git a/python/macholib/doc/_build/html/_sources/SymbolTable.txt b/python/macholib/doc/_build/html/_sources/SymbolTable.txt
new file mode 100644
index 000000000..5417e315c
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/SymbolTable.txt
@@ -0,0 +1,24 @@
+:mod:`macholib.SymbolTable` --- Class to read the symbol table from a Mach-O header
+===================================================================================
+
+.. module:: macholib.SymbolTable
+ :synopsis: Class to read the symbol table from a Mach-O header
+
+This module is deprecated because it is not by the author
+and likely contains bugs. It also does not work for 64-bit binaries.
+
+.. class:: SymbolTable(macho[, openfile])
+
+ Reads the SymbolTable for the given Mach-O object.
+
+ The option argument *openfile* specifies the
+ function to use to open the file, defaulting to
+ the builtin :func:`open` function.
+
+ .. warning:: As far as we know this class is not used
+ by any user of the modulegraph package, and the code
+ has not been updated after the initial implementation.
+
+ The end result of this is that the code does not
+ support 64-bit code at all and likely doesn't work
+ properly for 32-bit code as well.
diff --git a/python/macholib/doc/_build/html/_sources/changelog.txt b/python/macholib/doc/_build/html/_sources/changelog.txt
new file mode 100644
index 000000000..d73299f91
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/changelog.txt
@@ -0,0 +1,242 @@
+Release history
+===============
+
+macholib 1.7
+------------
+
+* Added support for ARM64, LC_ENCRYPTION_INFO_64 and LC_LINKER_OPTION
+
+ Patch by Matthias Ringwald.
+
+* Load commands now have a "describe" method that returns more information
+ about the command.
+
+ Patch by David Dorsey.
+
+* The MAGIC value in the header was always represented in the native
+ byte order, instead of as the value read from the binary.
+
+ Patch by David Dorsey.
+
+* Added various new constants to "macholib.mach_o".
+
+ Patch by David Dorsey.
+
+macholib 1.6.1
+--------------
+
+* ?
+
+macholib 1.6
+------------
+
+* Add support for '@loader_path' link command in
+ macholib.dyld:
+
+ - Added function ``macholib.dyld.dyld_loader_search``
+
+ - This function is used by ``macholib.dyld.dyld_find``,
+ and that function now has an new (optional) argument
+ with the path to the loader.
+
+* Also add support for '@loader_path' to macholib.MachoGraph,
+ using the newly added '@loader_path' support in the
+ dyld module.
+
+ Due to this suppport the *macho_standalone* tool can
+ now rewrite binaries that contain an '@loader_path' load
+ command.
+
+
+macholib 1.5.2
+--------------
+
+* Issue #93: Show the name of the affected file in the exception message
+ for Mach-O headers that are too large to relocate.
+
+
+macholib 1.5.1
+--------------
+
+* There were no 'classifiers' in the package metadata due to
+ a bug in setup.py.
+
+macholib 1.5
+--------------
+
+macholib 1.5 is a minor feature release
+
+* No longer use 2to3 to provide Python 3 support
+
+ As a side-effect of this macholib no longer supports
+ Python 2.5 and earlier.
+
+* Adds suppport for some new macho load commands
+
+* Fix for py3k problem in macho_standalone.py
+
+ Patch by Guanqun Lu.
+
+* Fix for some issues in macho_dump.py
+
+ Patch by Nam Nguyen
+
+* Issue #10: Fix for LC_DATA_IN_CODE linker commands, without
+ this fix py2app cannot build application bundles when
+ the source binaries have been compiled with Xcode 4.5.
+
+* Issue #6: Fix for LC_ENCRYPTION_INFO linker commands
+
+* Use the mach header information to print the cpu type of a
+ binary, instead of trying to deduce that from pointer width
+ and endianness.
+
+ Changed the code because of issue #6, in which a user tries to
+ dump a iOS binary which results in bogus output in the previous
+ releases.
+
+* The mapping ``macholib.macho_dump.ARCH_MAP`` is undocumented
+ and no longer used by macholib itself. It will be removed
+ in the next release.
+
+
+* The command-line tools ``macho_find``, ``macho_dump`` and
+ ``macho_standalone`` are deprecated. Use "python -mmacholib"
+ instead. That is::
+
+ $ python -mmacholib dump /usr/bin/grep
+
+ $ python -mmacholib find ~
+
+ $ python -mmacholib standalone myapp.app
+
+ This makes it clearer which version of the tools are used.
+
+macholib 1.4.3
+--------------
+
+macholib 1.4.3 is a minor feature release
+
+* Added strings for 'x86_64' and 'ppc64' to
+ macholib.mach_o.CPU_TYPE_NAMES.
+
+* macho_find and macho_dump were broken in the 1.4.2 release
+
+* added 'macholib.util.NOT_SYSTEM_FILES', a list of
+ files that aren't system path's even though they are
+ located in system locations.
+
+ Needed to work around a bug in PySide (see issue #32 in the
+ py2app tracker)
+
+
+
+macholib 1.4.2
+--------------
+
+macholib 1.4.2 is a minor bugfix release
+
+* The support for new load commands that was added in 1.4.1
+ contained a typo that caused problems on OSX 10.7 (Lion).
+
+macholib 1.4.1
+--------------
+
+macholib 1.4.1 is a minor feature release
+
+Features:
+
+- Add support for a number of new MachO load commands that were added
+ during the lifetime of OSX 10.6: ``LC_LOAD_UPWARD_DYLIB``,
+ ``LC_VERSION_MIN_MACOSX``, ``LC_VERSION_MIN_IPHONEOS`` and
+ ``LC_FUNCTION_STARTS``.
+
+macholib 1.4
+-------------
+
+macholib 1.4 is a feature release
+
+Features:
+
+- Documentation is now generated using `sphinx <http://pypi.python.org/pypi/sphinx>`_
+ and can be viewed at <http://packages.python.org/macholib>.
+
+- The repository has moved to bitbucket
+
+- There now is a testsuite
+
+- Private functionality inside modules was renamed to
+ a name starting with an underscore.
+
+ .. note:: if this change affects your code you are relying on undefined
+ implementation features, please stop using private functions.
+
+- The basic packable types in ``macholib.ptypes`` were renamed to better
+ represent the corresponding C type. The table below lists the old
+ an new names (the old names are still available, but are deprecated and
+ will be removed in a future release).
+
+ +--------------+--------------+
+ | **Old name** | **New name** |
+ +==============+==============+
+ | p_byte | p_int8 |
+ +--------------+--------------+
+ | p_ubyte | p_uint8 |
+ +--------------+--------------+
+ | p_short | p_int16 |
+ +--------------+--------------+
+ | p_ushort | p_uint16 |
+ +--------------+--------------+
+ | p_int | p_int32 |
+ +--------------+--------------+
+ | p_uint | p_uint32 |
+ +--------------+--------------+
+ | p_long | p_int32 |
+ +--------------+--------------+
+ | p_ulong | p_uint32 |
+ +--------------+--------------+
+ | p_longlong | p_int64 |
+ +--------------+--------------+
+ | p_ulonglong | p_uint64 |
+ +--------------+--------------+
+
+ ``Macholib.ptypes.p_ptr`` is no longer present as it had an unclear
+ definition and isn't actually used in the codebase.
+
+
+Bug fixes:
+
+- The semantics of ``dyld.dyld_default_search`` were changed a bit,
+ it now first searches the framework path (if appropriate) and then
+ the linker path, irrespective of the value of the ``DYLD_FALLBACK*``
+ environment variables.
+
+ Previous versions would change the search order when those variables
+ was set, which is odd and doesn't correspond with the documented
+ behaviour of the system dyld.
+
+- It is once again possible to install using python2.5
+
+- The source distribution includes all files, this was broken
+ due to the switch to mercurial (which confused setuptools)
+
+macholib 1.3
+------------
+
+macholib 1.3 is a feature release.
+
+Features:
+
+- Experimental Python 3.x support
+
+ This version contains lightly tested support for Python 3.
+
+macholib 1.2.2
+--------------
+
+macholib 1.2.2 is a bugfix release.
+
+Bug fixes:
+
+- Macholib should work better with 64-bit code
+ (patch by Marc-Antoine Parent)
diff --git a/python/macholib/doc/_build/html/_sources/dyld.txt b/python/macholib/doc/_build/html/_sources/dyld.txt
new file mode 100644
index 000000000..14895b6b0
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/dyld.txt
@@ -0,0 +1,159 @@
+:mod:`macholib.dyld` --- Dyld emulation
+=======================================
+
+.. module:: macholib.dyld
+ :synopsis: Emulation of functonality of the dynamic linker
+
+This module defines a number of functions that can be used
+to emulate the functionality of the dynamic linker (``dyld``)
+w.r.t. looking for library files and framworks.
+
+.. function:: dyld_image_suffix([env])
+
+ Looks up the suffix to append to shared library and
+ framework names and returns this value when found.
+ Returns ``None`` when no suffix should be appended.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_IMAGE_SUFFIX`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dydl_framework_path([env])
+
+ Returns a user-specified framework search path,
+ or an empty list when only the default search path
+ should be used.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_FRAMEWORK_PATH`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dyld_library_path([env])
+
+ Returns a user-specified library search path,
+ or an empty list when only the default search path
+ should be used.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_LIBRARY_PATH`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dyld_fallback_framework_path([env])
+
+ Return a user specified list of of directories where
+ to look for frameworks that aren't in their install path,
+ or an empty list when the default fallback path should
+ be used.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_FALLBACK_FRAMEWORK_PATH`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dyld_fallback_library_path([env])
+
+ Return a user specified list of of directories where
+ to look for libraries that aren't in their install path,
+ or an empty list when the default fallback path should
+ be used.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_FALLBACK_LIBRARY_PATH`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dyld_image_suffix_search(iterator[, env])
+
+ Yields all items in *iterator*, and prepents names
+ with the image suffix to those items when the suffix
+ is specified.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+.. function:: dyld_override_search(name[, env])
+
+ If *name* is a framework name yield filesystem
+ paths relative to the entries in the framework
+ search path.
+
+ Always yield the filesystem paths relative to the
+ entries in the library search path.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+.. function:: dyld_executable_path_search(name, executable_path)
+
+ If *name* is a path starting with ``@executable_path/`` yield
+ the path relative to the specified *executable_path*.
+
+ If *executable_path* is None nothing is yielded.
+
+.. function:: dyld_loader_search(name, loader_path)
+
+ If *name* is a path starting with ``@loader_path/`` yield
+ the path relative to the specified *loader_path*.
+
+ If *loader_path* is None nothing is yielded.
+
+ .. versionadded: 1.6
+
+.. function:: dyld_default_search(name[, env])
+
+ Yield the filesystem locations to look for a dynamic
+ library or framework using the default locations
+ used by the system dynamic linker.
+
+ This function will look in ``~/Library/Frameworks``
+ for frameworks, even though the system dynamic linker
+ doesn't.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+.. function:: dyld_find(name[, executable_path[, env [, loader]]])
+
+ Returns the path of the requested dynamic library,
+ raises :exc:`ValueError` when the library cannot be found.
+
+ This function searches for the library in the same
+ locations and de system dynamic linker.
+
+ The *executable_path* should be the filesystem path
+ of the executable to which the library is linked (either
+ directly or indirectly).
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ The *loader_path* argument is an optional filesystem path for
+ the object file (binary of shared library) that references
+ *name*.
+
+ .. versionchanged:: 1.6
+
+ Added the *loader_path* argument.
+
+.. function:: framework_find(fn[, executable_path[, env]])
+
+ Find a framework using the same semantics as the
+ system dynamic linker, but will accept looser names
+ than the system linker.
+
+ This function will return a correct result for input
+ values like:
+
+ * Python
+
+ * Python.framework
+
+ * Python.framework/Versions/Current
diff --git a/python/macholib/doc/_build/html/_sources/dylib.txt b/python/macholib/doc/_build/html/_sources/dylib.txt
new file mode 100644
index 000000000..969680212
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/dylib.txt
@@ -0,0 +1,33 @@
+:mod:`macholib.dylib` --- Generic dylib path manipulation
+=========================================================
+
+.. module:: macholib.dylib
+ :synopsis: Generic dylib path manipulation
+
+This module defines a function :func:`dylib_info` that can extract
+useful information from the name of a dynamic library.
+
+.. function:: dylib_info(filename)
+
+ A dylib name can take one of the following four forms:
+
+ * ``Location/Name.SomeVersion_Suffix.dylib``
+
+ * ``Location/Name.SomeVersion.dylib``
+
+ * ``Location/Name_Suffix.dylib``
+
+ * ``Location/Name.dylib``
+
+ Returns None if not found or a mapping equivalent to::
+
+ dict(
+ location='Location',
+ name='Name.SomeVersion_Suffix.dylib',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ )
+
+ .. note:: *SomeVersion* and *Suffix* are optional and my be ``None``
+ if not present.
diff --git a/python/macholib/doc/_build/html/_sources/framework.txt b/python/macholib/doc/_build/html/_sources/framework.txt
new file mode 100644
index 000000000..b58b5751c
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/framework.txt
@@ -0,0 +1,34 @@
+:mod:`macholib.framework` --- Generic framework path manipulation
+==========================================================================
+
+.. module:: macholib.framework
+ :synopsis: Generic framework path manipulation
+
+
+This module defines a function :func:`framework_info` that can extract
+useful information from the name of a dynamic library in a framework.
+
+.. function:: framework_info(filename)
+
+ A framework name can take one of the following four forms:
+
+ * ``Location/Name.framework/Versions/SomeVersion/Name_Suffix``
+
+ * ``Location/Name.framework/Versions/SomeVersion/Name``
+
+ * ``Location/Name.framework/Name_Suffix``
+
+ * ``Location/Name.framework/Name``
+
+ Returns ``None`` if not found, or a mapping equivalent to::
+
+ dict(
+ location='Location',
+ name='Name.framework/Versions/SomeVersion/Name_Suffix',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ )
+
+ .. note:: *SomeVersion* and *Suffix* are optional and may be None
+ if not present.
diff --git a/python/macholib/doc/_build/html/_sources/index.txt b/python/macholib/doc/_build/html/_sources/index.txt
new file mode 100644
index 000000000..3067926a8
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/index.txt
@@ -0,0 +1,59 @@
+Macholib - Analyze and edit Mach-O headers
+==========================================
+
+macholib can be used to analyze and edit Mach-O headers, the executable
+format used by Mac OS X.
+
+It's typically used as a dependency analysis tool, and also to rewrite dylib
+references in Mach-O headers to be ``@executable_path`` relative.
+
+Though this tool targets a platform specific file format, it is pure python
+code that is platform and endian independent.
+
+General documentation
+---------------------
+
+.. toctree::
+ :maxdepth: 1
+
+ changelog
+ license
+ scripts
+
+Reference Guide
+---------------
+
+.. toctree::
+ :maxdepth: 1
+
+ MachO
+ MachoOGraph
+ MachoOStandalone
+ SymbolTable
+ dyld
+ dylib
+ framework
+ macho_o
+ ptypes
+
+Online Resources
+----------------
+
+* `Sourcecode repository on bitbucket <http://bitbucket.org/ronaldoussoren/macholib/>`_
+
+* `The issue tracker <http://bitbucket.org/ronaldoussoren/macholib/issues>`_
+
+* `Mac OS X ABI Mach-O File Format Reference at Apple <http://developer.apple.com/library/mac/#documentation/DeveloperTools/Conceptual/MachORuntime/Reference/reference.html>`_
+
+Contributors
+------------
+
+Macholib was written by Bob Ippolito and is currently maintained by Ronald Oussoren <ronaldoussoren@mac.com>.
+
+Indices and tables
+------------------
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/python/macholib/doc/_build/html/_sources/license.txt b/python/macholib/doc/_build/html/_sources/license.txt
new file mode 100644
index 000000000..f9c8cc50e
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/license.txt
@@ -0,0 +1,23 @@
+License
+=======
+
+Copyright (c) Bob Ippolito
+
+Parts are copyright (c) 2010-2014 Ronald Oussoren
+
+MIT License
+...........
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
+so.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/python/macholib/doc/_build/html/_sources/macho_o.txt b/python/macholib/doc/_build/html/_sources/macho_o.txt
new file mode 100644
index 000000000..dfa347e8f
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/macho_o.txt
@@ -0,0 +1,13 @@
+:mod:`macholib.mach_o` --- Low-level definitions
+================================================
+
+.. module:: macholib.mach_o
+ :synopsis: Low-level definitions of elements in a Mach-O file
+
+This module defines constants and packable structure types
+that correspond to elements of a Mach-O file.
+
+The names of classes and constants is the same as those in
+the Mach-O header files and
+`Apple's documentation <http://developer.apple.com/library/mac/#documentation/DeveloperTools/Conceptual/MachORuntime/Reference/reference.html>`_. This document therefore
+doesn't explictly document the names in this module.
diff --git a/python/macholib/doc/_build/html/_sources/ptypes.txt b/python/macholib/doc/_build/html/_sources/ptypes.txt
new file mode 100644
index 000000000..ade2ac847
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/ptypes.txt
@@ -0,0 +1,157 @@
+:mod:`macholib.ptypes` --- Packable types
+=========================================
+
+.. module:: macholib.ptypes
+ :synopsis: Serializable types
+
+The module :mod:`macholib.ptypes` defines types that can be serialized into
+byte arrays, both for basic types and structured types (C ``struct`` values).
+
+Utility functions
+-----------------
+
+.. function:: sizeof(value)
+
+ Returns the size in bytes of an object when packed, raises :exc:`ValueError`
+ for inappropriate values.
+
+.. function:: pypackable(name, pytype, format)
+
+ Returns a packable type that is a subclass of the Python type
+ *pytype*. The value is converted to and from the packed format using
+ the struct *format*.
+
+
+
+Packable types
+--------------
+
+
+.. class:: BasePackable
+
+ All packable types are a subclass of :class:`BasePackable`, which defines
+ the basic interface but is itself an abstract base class.
+
+ .. data:: _endian_
+
+ The byteorder of a packed value. This will be ``"<"` for
+ little endian values and ``">"`` for big-endian ones.
+
+ .. note:: the endianness option is a public value to be
+ able to support both big- and little-endian file formats.
+
+ The name suggests that this attribute is private, this
+ is partically for historical reasons and partially to
+ avoid conflicts with field names in C structs.
+
+ .. method:: from_mmap(mmap, ptr, \**kw)
+
+ This class method constructs the value from a subview of a
+ :class:`mmap.mmap` object. It uses bytes starting at offset *ptr* and
+ reads just enough bytes to read the entire object.
+
+ .. method:: from_fileobj(fp, \**kw)
+
+ This class method constructs the value by reading just enough bytes
+ from a file-like object.
+
+ .. note:: The file must be opened in binary mode, that is read calls
+ should return byte-strings and not unicode-strings.
+
+ .. method:: from_str(value, \**kw)
+
+ This class method construct the value by using the struct module
+ to parse the given bytes.
+
+ .. note:: contrary to what the name suggests the argument to this
+ method is a byte-string, not a unicode-string.
+
+ .. method:: from_tuple(fp, \**kw)
+
+ This class method constructs the object from a tuple with all fields.
+
+
+ .. method:: to_str()
+
+ Returns a byte representation of the value.
+
+ .. note:: there is no default implementation for this method
+
+ .. method:: to_fileobj(fp)
+
+ Write a byte representation of the value to the given file-like
+ object. The file should be opened in binary mode.
+
+ .. method:: to_mmap(mmap, ptr)
+
+ Write the byte representation of the value to a :class:`mmap.mmap`
+ object, starting at offset *ptr*.
+
+
+.. class:: Structure(...)
+
+ .. data:: _fields_
+
+ This class attribute is a list that contains the fields of the
+ structure in the right order. Every item of this list is a tuple
+ with 2 arguments: the first element is the name of the field, and
+ the second the packable type for the field.
+
+ Every subclass of :class:`Structure` must define *_fields_* to be
+ usefull, and the value of *_fields_* should not be changed after
+ class construction.
+
+
+Basic packables
+---------------
+
+Other than the core functionality this module defines a number of
+:func:`pypackable` types that correspond to useful basic C types.
+
+.. class:: p_char([value])
+
+ A byte string of length 1
+
+.. class:: p_int8
+
+ An 8-bit signed integer
+
+.. class:: p_uint8
+
+ An 8-bit unsigned integer
+
+.. class:: p_int16
+
+ An 16-bit signed integer
+
+.. class:: p_uint16
+
+ An 16-bit unsigned integer
+
+.. class:: p_int32
+
+ An 32-bit signed integer
+
+.. class:: p_uint32
+
+ An 32-bit unsigned integer
+
+.. class:: p_int64
+
+ An 64-bit signed integer
+
+.. class:: p_uint64
+
+ An 64-bit unsigned integer
+
+.. class:: p_float
+
+ An floating point value of type ``float``
+
+.. class:: p_double
+
+ An floating point value of type ``double``
+
+.. note:: the module exports a number of other types with
+ names starting with ``p_``, such as ``p_int``. Those types
+ are deprecated and should not be used.
diff --git a/python/macholib/doc/_build/html/_sources/scripts.txt b/python/macholib/doc/_build/html/_sources/scripts.txt
new file mode 100644
index 000000000..50cf38028
--- /dev/null
+++ b/python/macholib/doc/_build/html/_sources/scripts.txt
@@ -0,0 +1,35 @@
+Command-line tools
+==================
+
+python -m macholib find
+-----------------------
+
+Usage::
+
+ $ python -mmacholib find dir...
+
+Print the paths of all MachO binaries
+in the specified directories.
+
+python -m macholib standalone
+-----------------------------
+
+Usage::
+
+ $ python -m macholib standalone appbundle...
+
+Convert one or more application bundles into
+standalone bundles. That is, copy all non-system
+shared libraries and frameworks used by the bundle
+into the bundle and rewrite load commands.
+
+python -mmacholib dump
+----------------------
+
+Usage::
+
+ $ python -mmacholib dump dir...
+
+Prints information about all architectures in a
+Mach-O file as well as all libraries it links
+to.
diff --git a/python/macholib/doc/_build/html/_static/ajax-loader.gif b/python/macholib/doc/_build/html/_static/ajax-loader.gif
new file mode 100644
index 000000000..61faf8cab
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/ajax-loader.gif
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/basic.css b/python/macholib/doc/_build/html/_static/basic.css
new file mode 100644
index 000000000..c959cf0db
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/basic.css
@@ -0,0 +1,537 @@
+/*
+ * basic.css
+ * ~~~~~~~~~
+ *
+ * Sphinx stylesheet -- basic theme.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+ clear: both;
+}
+
+/* -- relbar ---------------------------------------------------------------- */
+
+div.related {
+ width: 100%;
+ font-size: 90%;
+}
+
+div.related h3 {
+ display: none;
+}
+
+div.related ul {
+ margin: 0;
+ padding: 0 0 0 10px;
+ list-style: none;
+}
+
+div.related li {
+ display: inline;
+}
+
+div.related li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+/* -- sidebar --------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+ float: left;
+ width: 230px;
+ margin-left: -100%;
+ font-size: 90%;
+}
+
+div.sphinxsidebar ul {
+ list-style: none;
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+ margin-left: 20px;
+ list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar form {
+ margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #98dbcc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar #searchbox input[type="text"] {
+ width: 170px;
+}
+
+div.sphinxsidebar #searchbox input[type="submit"] {
+ width: 30px;
+}
+
+img {
+ border: 0;
+ max-width: 100%;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable {
+ width: 100%;
+}
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+div.modindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+div.genindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+/* -- general body styles --------------------------------------------------- */
+
+a.headerlink {
+ visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+ visibility: visible;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+.field-list ul {
+ padding-left: 1em;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+img.align-left, .figure.align-left, object.align-left {
+ clear: left;
+ float: left;
+ margin-right: 1em;
+}
+
+img.align-right, .figure.align-right, object.align-right {
+ clear: right;
+ float: right;
+ margin-left: 1em;
+}
+
+img.align-center, .figure.align-center, object.align-center {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+.align-left {
+ text-align: left;
+}
+
+.align-center {
+ text-align: center;
+}
+
+.align-right {
+ text-align: right;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+div.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px 7px 0 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
+/* -- topics ---------------------------------------------------------------- */
+
+div.topic {
+ border: 1px solid #ccc;
+ padding: 7px 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+
+div.admonition {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ padding: 7px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+ border: 0;
+ border-collapse: collapse;
+}
+
+table.docutils td, table.docutils th {
+ padding: 1px 8px 1px 5px;
+ border-top: 0;
+ border-left: 0;
+ border-right: 0;
+ border-bottom: 1px solid #aaa;
+}
+
+table.field-list td, table.field-list th {
+ border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+table.citation {
+ border-left: solid 1px gray;
+ margin-left: 1px;
+}
+
+table.citation td {
+ border-bottom: none;
+}
+
+/* -- other body styles ----------------------------------------------------- */
+
+ol.arabic {
+ list-style: decimal;
+}
+
+ol.loweralpha {
+ list-style: lower-alpha;
+}
+
+ol.upperalpha {
+ list-style: upper-alpha;
+}
+
+ol.lowerroman {
+ list-style: lower-roman;
+}
+
+ol.upperroman {
+ list-style: upper-roman;
+}
+
+dl {
+ margin-bottom: 15px;
+}
+
+dd p {
+ margin-top: 0px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+}
+
+dt:target, .highlighted {
+ background-color: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 1.1em;
+}
+
+.field-list ul {
+ margin: 0;
+ padding-left: 1em;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+.footnote:target {
+ background-color: #ffa;
+}
+
+.line-block {
+ display: block;
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
+
+.line-block .line-block {
+ margin-top: 0;
+ margin-bottom: 0;
+ margin-left: 1.5em;
+}
+
+.guilabel, .menuselection {
+ font-family: sans-serif;
+}
+
+.accelerator {
+ text-decoration: underline;
+}
+
+.classifier {
+ font-style: oblique;
+}
+
+abbr, acronym {
+ border-bottom: dotted 1px;
+ cursor: help;
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+ overflow: auto;
+ overflow-y: hidden; /* fixes display issues on Chrome browsers */
+}
+
+td.linenos pre {
+ padding: 5px 0px;
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ margin-left: 0.5em;
+}
+
+table.highlighttable td {
+ padding: 0 0.5em 0 0.5em;
+}
+
+tt.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1.2em;
+}
+
+tt.descclassname {
+ background-color: transparent;
+}
+
+tt.xref, a tt {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ background-color: transparent;
+}
+
+.viewcode-link {
+ float: right;
+}
+
+.viewcode-back {
+ float: right;
+ font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+ margin: -1px -10px;
+ padding: 0 10px;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+ vertical-align: middle;
+}
+
+div.body div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0 !important;
+ width: 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ #top-link {
+ display: none;
+ }
+} \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/_static/comment-bright.png b/python/macholib/doc/_build/html/_static/comment-bright.png
new file mode 100644
index 000000000..551517b8c
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/comment-bright.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/comment-close.png b/python/macholib/doc/_build/html/_static/comment-close.png
new file mode 100644
index 000000000..09b54be46
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/comment-close.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/comment.png b/python/macholib/doc/_build/html/_static/comment.png
new file mode 100644
index 000000000..92feb52b8
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/comment.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/doctools.js b/python/macholib/doc/_build/html/_static/doctools.js
new file mode 100644
index 000000000..2036e5f5f
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/doctools.js
@@ -0,0 +1,238 @@
+/*
+ * doctools.js
+ * ~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for all documentation.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/**
+ * select a different prefix for underscore
+ */
+$u = _.noConflict();
+
+/**
+ * make the code below compatible with browsers without
+ * an installed firebug like debugger
+if (!window.console || !console.firebug) {
+ var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
+ "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
+ "profile", "profileEnd"];
+ window.console = {};
+ for (var i = 0; i < names.length; ++i)
+ window.console[names[i]] = function() {};
+}
+ */
+
+/**
+ * small helper function to urldecode strings
+ */
+jQuery.urldecode = function(x) {
+ return decodeURIComponent(x).replace(/\+/g, ' ');
+};
+
+/**
+ * small helper function to urlencode strings
+ */
+jQuery.urlencode = encodeURIComponent;
+
+/**
+ * This function returns the parsed url parameters of the
+ * current request. Multiple values per key are supported,
+ * it will always return arrays of strings for the value parts.
+ */
+jQuery.getQueryParameters = function(s) {
+ if (typeof s == 'undefined')
+ s = document.location.search;
+ var parts = s.substr(s.indexOf('?') + 1).split('&');
+ var result = {};
+ for (var i = 0; i < parts.length; i++) {
+ var tmp = parts[i].split('=', 2);
+ var key = jQuery.urldecode(tmp[0]);
+ var value = jQuery.urldecode(tmp[1]);
+ if (key in result)
+ result[key].push(value);
+ else
+ result[key] = [value];
+ }
+ return result;
+};
+
+/**
+ * highlight a given string on a jquery object by wrapping it in
+ * span elements with the given class name.
+ */
+jQuery.fn.highlightText = function(text, className) {
+ function highlight(node) {
+ if (node.nodeType == 3) {
+ var val = node.nodeValue;
+ var pos = val.toLowerCase().indexOf(text);
+ if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
+ var span = document.createElement("span");
+ span.className = className;
+ span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+ node.parentNode.insertBefore(span, node.parentNode.insertBefore(
+ document.createTextNode(val.substr(pos + text.length)),
+ node.nextSibling));
+ node.nodeValue = val.substr(0, pos);
+ }
+ }
+ else if (!jQuery(node).is("button, select, textarea")) {
+ jQuery.each(node.childNodes, function() {
+ highlight(this);
+ });
+ }
+ }
+ return this.each(function() {
+ highlight(this);
+ });
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+var Documentation = {
+
+ init : function() {
+ this.fixFirefoxAnchorBug();
+ this.highlightSearchWords();
+ this.initIndexTable();
+ },
+
+ /**
+ * i18n support
+ */
+ TRANSLATIONS : {},
+ PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
+ LOCALE : 'unknown',
+
+ // gettext and ngettext don't access this so that the functions
+ // can safely bound to a different name (_ = Documentation.gettext)
+ gettext : function(string) {
+ var translated = Documentation.TRANSLATIONS[string];
+ if (typeof translated == 'undefined')
+ return string;
+ return (typeof translated == 'string') ? translated : translated[0];
+ },
+
+ ngettext : function(singular, plural, n) {
+ var translated = Documentation.TRANSLATIONS[singular];
+ if (typeof translated == 'undefined')
+ return (n == 1) ? singular : plural;
+ return translated[Documentation.PLURALEXPR(n)];
+ },
+
+ addTranslations : function(catalog) {
+ for (var key in catalog.messages)
+ this.TRANSLATIONS[key] = catalog.messages[key];
+ this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
+ this.LOCALE = catalog.locale;
+ },
+
+ /**
+ * add context elements like header anchor links
+ */
+ addContextElements : function() {
+ $('div[id] > :header:first').each(function() {
+ $('<a class="headerlink">\u00B6</a>').
+ attr('href', '#' + this.id).
+ attr('title', _('Permalink to this headline')).
+ appendTo(this);
+ });
+ $('dt[id]').each(function() {
+ $('<a class="headerlink">\u00B6</a>').
+ attr('href', '#' + this.id).
+ attr('title', _('Permalink to this definition')).
+ appendTo(this);
+ });
+ },
+
+ /**
+ * workaround a firefox stupidity
+ */
+ fixFirefoxAnchorBug : function() {
+ if (document.location.hash && $.browser.mozilla)
+ window.setTimeout(function() {
+ document.location.href += '';
+ }, 10);
+ },
+
+ /**
+ * highlight the search words provided in the url in the text
+ */
+ highlightSearchWords : function() {
+ var params = $.getQueryParameters();
+ var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
+ if (terms.length) {
+ var body = $('div.body');
+ if (!body.length) {
+ body = $('body');
+ }
+ window.setTimeout(function() {
+ $.each(terms, function() {
+ body.highlightText(this.toLowerCase(), 'highlighted');
+ });
+ }, 10);
+ $('<p class="highlight-link"><a href="javascript:Documentation.' +
+ 'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
+ .appendTo($('#searchbox'));
+ }
+ },
+
+ /**
+ * init the domain index toggle buttons
+ */
+ initIndexTable : function() {
+ var togglers = $('img.toggler').click(function() {
+ var src = $(this).attr('src');
+ var idnum = $(this).attr('id').substr(7);
+ $('tr.cg-' + idnum).toggle();
+ if (src.substr(-9) == 'minus.png')
+ $(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
+ else
+ $(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
+ }).css('display', '');
+ if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
+ togglers.click();
+ }
+ },
+
+ /**
+ * helper function to hide the search marks again
+ */
+ hideSearchWords : function() {
+ $('#searchbox .highlight-link').fadeOut(300);
+ $('span.highlighted').removeClass('highlighted');
+ },
+
+ /**
+ * make the url absolute
+ */
+ makeURL : function(relativeURL) {
+ return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
+ },
+
+ /**
+ * get the current relative url
+ */
+ getCurrentURL : function() {
+ var path = document.location.pathname;
+ var parts = path.split(/\//);
+ $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
+ if (this == '..')
+ parts.pop();
+ });
+ var url = parts.join('/');
+ return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
+ }
+};
+
+// quick alias for translations
+_ = Documentation.gettext;
+
+$(document).ready(function() {
+ Documentation.init();
+});
diff --git a/python/macholib/doc/_build/html/_static/down-pressed.png b/python/macholib/doc/_build/html/_static/down-pressed.png
new file mode 100644
index 000000000..6f7ad7827
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/down-pressed.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/down.png b/python/macholib/doc/_build/html/_static/down.png
new file mode 100644
index 000000000..3003a8877
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/down.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/file.png b/python/macholib/doc/_build/html/_static/file.png
new file mode 100644
index 000000000..d18082e39
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/file.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/jquery.js b/python/macholib/doc/_build/html/_static/jquery.js
new file mode 100644
index 000000000..83589daa7
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/jquery.js
@@ -0,0 +1,2 @@
+/*! jQuery v1.8.3 jquery.com | jquery.org/license */
+(function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r<i;r++)v.event.add(t,n,u[n][r])}o.data&&(o.data=v.extend({},o.data))}function Ot(e,t){var n;if(t.nodeType!==1)return;t.clearAttributes&&t.clearAttributes(),t.mergeAttributes&&t.mergeAttributes(e),n=t.nodeName.toLowerCase(),n==="object"?(t.parentNode&&(t.outerHTML=e.outerHTML),v.support.html5Clone&&e.innerHTML&&!v.trim(t.innerHTML)&&(t.innerHTML=e.innerHTML)):n==="input"&&Et.test(e.type)?(t.defaultChecked=t.checked=e.checked,t.value!==e.value&&(t.value=e.value)):n==="option"?t.selected=e.defaultSelected:n==="input"||n==="textarea"?t.defaultValue=e.defaultValue:n==="script"&&t.text!==e.text&&(t.text=e.text),t.removeAttribute(v.expando)}function Mt(e){return typeof e.getElementsByTagName!="undefined"?e.getElementsByTagName("*"):typeof e.querySelectorAll!="undefined"?e.querySelectorAll("*"):[]}function _t(e){Et.test(e.type)&&(e.defaultChecked=e.checked)}function Qt(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=Jt.length;while(i--){t=Jt[i]+n;if(t in e)return t}return r}function Gt(e,t){return e=t||e,v.css(e,"display")==="none"||!v.contains(e.ownerDocument,e)}function Yt(e,t){var n,r,i=[],s=0,o=e.length;for(;s<o;s++){n=e[s];if(!n.style)continue;i[s]=v._data(n,"olddisplay"),t?(!i[s]&&n.style.display==="none"&&(n.style.display=""),n.style.display===""&&Gt(n)&&(i[s]=v._data(n,"olddisplay",nn(n.nodeName)))):(r=Dt(n,"display"),!i[s]&&r!=="none"&&v._data(n,"olddisplay",r))}for(s=0;s<o;s++){n=e[s];if(!n.style)continue;if(!t||n.style.display==="none"||n.style.display==="")n.style.display=t?i[s]||"":"none"}return e}function Zt(e,t,n){var r=Rt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function en(e,t,n,r){var i=n===(r?"border":"content")?4:t==="width"?1:0,s=0;for(;i<4;i+=2)n==="margin"&&(s+=v.css(e,n+$t[i],!0)),r?(n==="content"&&(s-=parseFloat(Dt(e,"padding"+$t[i]))||0),n!=="margin"&&(s-=parseFloat(Dt(e,"border"+$t[i]+"Width"))||0)):(s+=parseFloat(Dt(e,"padding"+$t[i]))||0,n!=="padding"&&(s+=parseFloat(Dt(e,"border"+$t[i]+"Width"))||0));return s}function tn(e,t,n){var r=t==="width"?e.offsetWidth:e.offsetHeight,i=!0,s=v.support.boxSizing&&v.css(e,"boxSizing")==="border-box";if(r<=0||r==null){r=Dt(e,t);if(r<0||r==null)r=e.style[t];if(Ut.test(r))return r;i=s&&(v.support.boxSizingReliable||r===e.style[t]),r=parseFloat(r)||0}return r+en(e,t,n||(s?"border":"content"),i)+"px"}function nn(e){if(Wt[e])return Wt[e];var t=v("<"+e+">").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write("<!doctype html><html><body>"),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u<a;u++)r=o[u],s=/^\+/.test(r),s&&(r=r.substr(1)||"*"),i=e[r]=e[r]||[],i[s?"unshift":"push"](n)}}function kn(e,n,r,i,s,o){s=s||n.dataTypes[0],o=o||{},o[s]=!0;var u,a=e[s],f=0,l=a?a.length:0,c=e===Sn;for(;f<l&&(c||!u);f++)u=a[f](n,r,i),typeof u=="string"&&(!c||o[u]?u=t:(n.dataTypes.unshift(u),u=kn(e,n,r,i,u,o)));return(c||!u)&&!o["*"]&&(u=kn(e,n,r,i,"*",o)),u}function Ln(e,n){var r,i,s=v.ajaxSettings.flatOptions||{};for(r in n)n[r]!==t&&((s[r]?e:i||(i={}))[r]=n[r]);i&&v.extend(!0,e,i)}function An(e,n,r){var i,s,o,u,a=e.contents,f=e.dataTypes,l=e.responseFields;for(s in l)s in r&&(n[l[s]]=r[s]);while(f[0]==="*")f.shift(),i===t&&(i=e.mimeType||n.getResponseHeader("content-type"));if(i)for(s in a)if(a[s]&&a[s].test(i)){f.unshift(s);break}if(f[0]in r)o=f[0];else{for(s in r){if(!f[0]||e.converters[s+" "+f[0]]){o=s;break}u||(u=s)}o=o||u}if(o)return o!==f[0]&&f.unshift(o),r[o]}function On(e,t){var n,r,i,s,o=e.dataTypes.slice(),u=o[0],a={},f=0;e.dataFilter&&(t=e.dataFilter(t,e.dataType));if(o[1])for(n in e.converters)a[n.toLowerCase()]=e.converters[n];for(;i=o[++f];)if(i!=="*"){if(u!=="*"&&u!==i){n=a[u+" "+i]||a["* "+i];if(!n)for(r in a){s=r.split(" ");if(s[1]===i){n=a[u+" "+s[0]]||a["* "+s[0]];if(n){n===!0?n=a[r]:a[r]!==!0&&(i=s[0],o.splice(f--,0,i));break}}}if(n!==!0)if(n&&e["throws"])t=n(t);else try{t=n(t)}catch(l){return{state:"parsererror",error:n?l:"No conversion from "+u+" to "+i}}}u=i}return{state:"success",data:t}}function Fn(){try{return new e.XMLHttpRequest}catch(t){}}function In(){try{return new e.ActiveXObject("Microsoft.XMLHTTP")}catch(t){}}function $n(){return setTimeout(function(){qn=t},0),qn=v.now()}function Jn(e,t){v.each(t,function(t,n){var r=(Vn[t]||[]).concat(Vn["*"]),i=0,s=r.length;for(;i<s;i++)if(r[i].call(e,t,n))return})}function Kn(e,t,n){var r,i=0,s=0,o=Xn.length,u=v.Deferred().always(function(){delete a.elem}),a=function(){var t=qn||$n(),n=Math.max(0,f.startTime+f.duration-t),r=n/f.duration||0,i=1-r,s=0,o=f.tweens.length;for(;s<o;s++)f.tweens[s].run(i);return u.notifyWith(e,[f,i,n]),i<1&&o?n:(u.resolveWith(e,[f]),!1)},f=u.promise({elem:e,props:v.extend({},t),opts:v.extend(!0,{specialEasing:{}},n),originalProperties:t,originalOptions:n,startTime:qn||$n(),duration:n.duration,tweens:[],createTween:function(t,n,r){var i=v.Tween(e,f.opts,t,n,f.opts.specialEasing[t]||f.opts.easing);return f.tweens.push(i),i},stop:function(t){var n=0,r=t?f.tweens.length:0;for(;n<r;n++)f.tweens[n].run(1);return t?u.resolveWith(e,[f,t]):u.rejectWith(e,[f,t]),this}}),l=f.props;Qn(l,f.opts.specialEasing);for(;i<o;i++){r=Xn[i].call(f,e,l,f.opts);if(r)return r}return Jn(f,l),v.isFunction(f.opts.start)&&f.opts.start.call(e,f),v.fx.timer(v.extend(a,{anim:f,queue:f.opts.queue,elem:e})),f.progress(f.opts.progress).done(f.opts.done,f.opts.complete).fail(f.opts.fail).always(f.opts.always)}function Qn(e,t){var n,r,i,s,o;for(n in e){r=v.camelCase(n),i=t[r],s=e[n],v.isArray(s)&&(i=s[1],s=e[n]=s[0]),n!==r&&(e[r]=s,delete e[n]),o=v.cssHooks[r];if(o&&"expand"in o){s=o.expand(s),delete e[r];for(n in s)n in e||(e[n]=s[n],t[n]=i)}else t[r]=i}}function Gn(e,t,n){var r,i,s,o,u,a,f,l,c,h=this,p=e.style,d={},m=[],g=e.nodeType&&Gt(e);n.queue||(l=v._queueHooks(e,"fx"),l.unqueued==null&&(l.unqueued=0,c=l.empty.fire,l.empty.fire=function(){l.unqueued||c()}),l.unqueued++,h.always(function(){h.always(function(){l.unqueued--,v.queue(e,"fx").length||l.empty.fire()})})),e.nodeType===1&&("height"in t||"width"in t)&&(n.overflow=[p.overflow,p.overflowX,p.overflowY],v.css(e,"display")==="inline"&&v.css(e,"float")==="none"&&(!v.support.inlineBlockNeedsLayout||nn(e.nodeName)==="inline"?p.display="inline-block":p.zoom=1)),n.overflow&&(p.overflow="hidden",v.support.shrinkWrapBlocks||h.done(function(){p.overflow=n.overflow[0],p.overflowX=n.overflow[1],p.overflowY=n.overflow[2]}));for(r in t){s=t[r];if(Un.exec(s)){delete t[r],a=a||s==="toggle";if(s===(g?"hide":"show"))continue;m.push(r)}}o=m.length;if(o){u=v._data(e,"fxshow")||v._data(e,"fxshow",{}),"hidden"in u&&(g=u.hidden),a&&(u.hidden=!g),g?v(e).show():h.done(function(){v(e).hide()}),h.done(function(){var t;v.removeData(e,"fxshow",!0);for(t in d)v.style(e,t,d[t])});for(r=0;r<o;r++)i=m[r],f=h.createTween(i,g?u[i]:0),d[i]=u[i]||v.style(e,i),i in u||(u[i]=f.start,g&&(f.end=f.start,f.start=i==="width"||i==="height"?1:0))}}function Yn(e,t,n,r,i){return new Yn.prototype.init(e,t,n,r,i)}function Zn(e,t){var n,r={height:e},i=0;t=t?1:0;for(;i<4;i+=2-t)n=$t[i],r["margin"+n]=r["padding"+n]=e;return t&&(r.opacity=r.width=e),r}function tr(e){return v.isWindow(e)?e:e.nodeType===9?e.defaultView||e.parentWindow:!1}var n,r,i=e.document,s=e.location,o=e.navigator,u=e.jQuery,a=e.$,f=Array.prototype.push,l=Array.prototype.slice,c=Array.prototype.indexOf,h=Object.prototype.toString,p=Object.prototype.hasOwnProperty,d=String.prototype.trim,v=function(e,t){return new v.fn.init(e,t,n)},m=/[\-+]?(?:\d*\.|)\d+(?:[eE][\-+]?\d+|)/.source,g=/\S/,y=/\s+/,b=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,w=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a<f;a++)if((e=arguments[a])!=null)for(n in e){r=u[n],i=e[n];if(u===i)continue;l&&i&&(v.isPlainObject(i)||(s=v.isArray(i)))?(s?(s=!1,o=r&&v.isArray(r)?r:[]):o=r&&v.isPlainObject(r)?r:{},u[n]=v.extend(l,o,i)):i!==t&&(u[n]=i)}return u},v.extend({noConflict:function(t){return e.$===v&&(e.$=a),t&&e.jQuery===v&&(e.jQuery=u),v},isReady:!1,readyWait:1,holdReady:function(e){e?v.readyWait++:v.ready(!0)},ready:function(e){if(e===!0?--v.readyWait:v.isReady)return;if(!i.body)return setTimeout(v.ready,1);v.isReady=!0;if(e!==!0&&--v.readyWait>0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s<o;)if(n.apply(e[s++],r)===!1)break}else if(u){for(i in e)if(n.call(e[i],i,e[i])===!1)break}else for(;s<o;)if(n.call(e[s],s,e[s++])===!1)break;return e},trim:d&&!d.call("\ufeff\u00a0")?function(e){return e==null?"":d.call(e)}:function(e){return e==null?"":(e+"").replace(b,"")},makeArray:function(e,t){var n,r=t||[];return e!=null&&(n=v.type(e),e.length==null||n==="string"||n==="function"||n==="regexp"||v.isWindow(e)?f.call(r,e):v.merge(r,e)),r},inArray:function(e,t,n){var r;if(t){if(c)return c.call(t,e,n);r=t.length,n=n?n<0?Math.max(0,r+n):n:0;for(;n<r;n++)if(n in t&&t[n]===e)return n}return-1},merge:function(e,n){var r=n.length,i=e.length,s=0;if(typeof r=="number")for(;s<r;s++)e[i++]=n[s];else while(n[s]!==t)e[i++]=n[s++];return e.length=i,e},grep:function(e,t,n){var r,i=[],s=0,o=e.length;n=!!n;for(;s<o;s++)r=!!t(e[s],s),n!==r&&i.push(e[s]);return i},map:function(e,n,r){var i,s,o=[],u=0,a=e.length,f=e instanceof v||a!==t&&typeof a=="number"&&(a>0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u<a;u++)i=n(e[u],u,r),i!=null&&(o[o.length]=i);else for(s in e)i=n(e[s],s,r),i!=null&&(o[o.length]=i);return o.concat.apply([],o)},guid:1,proxy:function(e,n){var r,i,s;return typeof n=="string"&&(r=e[n],n=e,e=r),v.isFunction(e)?(i=l.call(arguments,2),s=function(){return e.apply(n,i.concat(l.call(arguments)))},s.guid=e.guid=e.guid||v.guid++,s):t},access:function(e,n,r,i,s,o,u){var a,f=r==null,l=0,c=e.length;if(r&&typeof r=="object"){for(l in r)v.access(e,n,l,r[l],1,o,i);s=1}else if(i!==t){a=u===t&&v.isFunction(i),f&&(a?(a=n,n=function(e,t,n){return a.call(v(e),n)}):(n.call(e,i),n=null));if(n)for(;l<c;l++)n(e[l],r,a?i.call(e[l],l,n(e[l],r)):i,u);s=1}return s?e:f?n.call(e):c?n(e[0],r):o},now:function(){return(new Date).getTime()}}),v.ready.promise=function(t){if(!r){r=v.Deferred();if(i.readyState==="complete")setTimeout(v.ready,1);else if(i.addEventListener)i.addEventListener("DOMContentLoaded",A,!1),e.addEventListener("load",v.ready,!1);else{i.attachEvent("onreadystatechange",A),e.attachEvent("onload",v.ready);var n=!1;try{n=e.frameElement==null&&i.documentElement}catch(s){}n&&n.doScroll&&function o(){if(!v.isReady){try{n.doScroll("left")}catch(e){return setTimeout(o,50)}v.ready()}}()}}return r.promise(t)},v.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(e,t){O["[object "+t+"]"]=t.toLowerCase()}),n=v(i);var M={};v.Callbacks=function(e){e=typeof e=="string"?M[e]||_(e):v.extend({},e);var n,r,i,s,o,u,a=[],f=!e.once&&[],l=function(t){n=e.memory&&t,r=!0,u=s||0,s=0,o=a.length,i=!0;for(;a&&u<o;u++)if(a[u].apply(t[0],t[1])===!1&&e.stopOnFalse){n=!1;break}i=!1,a&&(f?f.length&&l(f.shift()):n?a=[]:c.disable())},c={add:function(){if(a){var t=a.length;(function r(t){v.each(t,function(t,n){var i=v.type(n);i==="function"?(!e.unique||!c.has(n))&&a.push(n):n&&n.length&&i!=="string"&&r(n)})})(arguments),i?o=a.length:n&&(s=t,l(n))}return this},remove:function(){return a&&v.each(arguments,function(e,t){var n;while((n=v.inArray(t,a,n))>-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t<r;t++)n[t]&&v.isFunction(n[t].promise)?n[t].promise().done(o(t,f,n)).fail(s.reject).progress(o(t,a,u)):--i}return i||s.resolveWith(f,n),s.promise()}}),v.support=function(){var t,n,r,s,o,u,a,f,l,c,h,p=i.createElement("div");p.setAttribute("className","t"),p.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="<table><tr><td></td><td>t</td></tr></table>",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="<div></div>",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i<s;i++)delete r[t[i]];if(!(n?B:v.isEmptyObject)(r))return}}if(!n){delete u[a].data;if(!B(u[a]))return}o?v.cleanData([e],!0):v.support.deleteExpando||u!=u.window?delete u[a]:u[a]=null},_data:function(e,t,n){return v.data(e,t,n,!0)},acceptData:function(e){var t=e.nodeName&&v.noData[e.nodeName.toLowerCase()];return!t||t!==!0&&e.getAttribute("classid")===t}}),v.fn.extend({data:function(e,n){var r,i,s,o,u,a=this[0],f=0,l=null;if(e===t){if(this.length){l=v.data(a);if(a.nodeType===1&&!v._data(a,"parsedAttrs")){s=a.attributes;for(u=s.length;f<u;f++)o=s[f].name,o.indexOf("data-")||(o=v.camelCase(o.substring(5)),H(a,o,l[o]));v._data(a,"parsedAttrs",!0)}}return l}return typeof e=="object"?this.each(function(){v.data(this,e)}):(r=e.split(".",2),r[1]=r[1]?"."+r[1]:"",i=r[1]+"!",v.access(this,function(n){if(n===t)return l=this.triggerHandler("getData"+i,[r[0]]),l===t&&a&&(l=v.data(a,e),l=H(a,e,l)),l===t&&r[1]?this.data(r[0]):l;r[1]=n,this.each(function(){var t=v(this);t.triggerHandler("setData"+i,r),v.data(this,e,n),t.triggerHandler("changeData"+i,r)})},null,n,arguments.length>1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length<r?v.queue(this[0],e):n===t?this:this.each(function(){var t=v.queue(this,e,n);v._queueHooks(this,e),e==="fx"&&t[0]!=="inprogress"&&v.dequeue(this,e)})},dequeue:function(e){return this.each(function(){v.dequeue(this,e)})},delay:function(e,t){return e=v.fx?v.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,n){var r,i=1,s=v.Deferred(),o=this,u=this.length,a=function(){--i||s.resolveWith(o,[o])};typeof e!="string"&&(n=e,e=t),e=e||"fx";while(u--)r=v._data(o[u],e+"queueHooks"),r&&r.empty&&(i++,r.empty.add(a));return a(),s.promise(n)}});var j,F,I,q=/[\t\r\n]/g,R=/\r/g,U=/^(?:button|input)$/i,z=/^(?:button|input|object|select|textarea)$/i,W=/^a(?:rea|)$/i,X=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,V=v.support.getSetAttribute;v.fn.extend({attr:function(e,t){return v.access(this,v.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n<r;n++){i=this[n];if(i.nodeType===1)if(!i.className&&t.length===1)i.className=e;else{s=" "+i.className+" ";for(o=0,u=t.length;o<u;o++)s.indexOf(" "+t[o]+" ")<0&&(s+=t[o]+" ");i.className=v.trim(s)}}}return this},removeClass:function(e){var n,r,i,s,o,u,a;if(v.isFunction(e))return this.each(function(t){v(this).removeClass(e.call(this,t,this.className))});if(e&&typeof e=="string"||e===t){n=(e||"").split(y);for(u=0,a=this.length;u<a;u++){i=this[u];if(i.nodeType===1&&i.className){r=(" "+i.className+" ").replace(q," ");for(s=0,o=n.length;s<o;s++)while(r.indexOf(" "+n[s]+" ")>=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n<r;n++)if(this[n].nodeType===1&&(" "+this[n].className+" ").replace(q," ").indexOf(t)>=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a<u;a++){n=r[a];if((n.selected||a===i)&&(v.support.optDisabled?!n.disabled:n.getAttribute("disabled")===null)&&(!n.parentNode.disabled||!v.nodeName(n.parentNode,"optgroup"))){t=v(n).val();if(s)return t;o.push(t)}}return o},set:function(e,t){var n=v.makeArray(t);return v(e).find("option").each(function(){this.selected=v.inArray(v(this).val(),n)>=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o<r.length;o++)i=r[o],i&&(n=v.propFix[i]||i,s=X.test(i),s||v.attr(e,i,""),e.removeAttribute(V?i:n),s&&n in e&&(e[n]=!1))}},attrHooks:{type:{set:function(e,t){if(U.test(e.nodeName)&&e.parentNode)v.error("type property can't be changed");else if(!v.support.radioValue&&t==="radio"&&v.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}},value:{get:function(e,t){return j&&v.nodeName(e,"button")?j.get(e,t):t in e?e.value:null},set:function(e,t,n){if(j&&v.nodeName(e,"button"))return j.set(e,t,n);e.value=t}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(e,n,r){var i,s,o,u=e.nodeType;if(!e||u===3||u===8||u===2)return;return o=u!==1||!v.isXMLDoc(e),o&&(n=v.propFix[n]||n,s=v.propHooks[n]),r!==t?s&&"set"in s&&(i=s.set(e,r,n))!==t?i:e[n]=r:s&&"get"in s&&(i=s.get(e,n))!==null?i:e[n]},propHooks:{tabIndex:{get:function(e){var n=e.getAttributeNode("tabindex");return n&&n.specified?parseInt(n.value,10):z.test(e.nodeName)||W.test(e.nodeName)&&e.href?0:t}}}}),F={get:function(e,n){var r,i=v.prop(e,n);return i===!0||typeof i!="boolean"&&(r=e.getAttributeNode(n))&&r.nodeValue!==!1?n.toLowerCase():t},set:function(e,t,n){var r;return t===!1?v.removeAttr(e,n):(r=v.propFix[n]||n,r in e&&(e[r]=!0),e.setAttribute(n,n.toLowerCase())),n}},V||(I={name:!0,id:!0,coords:!0},j=v.valHooks.button={get:function(e,n){var r;return r=e.getAttributeNode(n),r&&(I[n]?r.value!=="":r.specified)?r.value:t},set:function(e,t,n){var r=e.getAttributeNode(n);return r||(r=i.createAttribute(n),e.setAttributeNode(r)),r.value=t+""}},v.each(["width","height"],function(e,t){v.attrHooks[t]=v.extend(v.attrHooks[t],{set:function(e,n){if(n==="")return e.setAttribute(t,"auto"),n}})}),v.attrHooks.contenteditable={get:j.get,set:function(e,t,n){t===""&&(t="false"),j.set(e,t,n)}}),v.support.hrefNormalized||v.each(["href","src","width","height"],function(e,n){v.attrHooks[n]=v.extend(v.attrHooks[n],{get:function(e){var r=e.getAttribute(n,2);return r===null?t:r}})}),v.support.style||(v.attrHooks.style={get:function(e){return e.style.cssText.toLowerCase()||t},set:function(e,t){return e.style.cssText=t+""}}),v.support.optSelected||(v.propHooks.selected=v.extend(v.propHooks.selected,{get:function(e){var t=e.parentNode;return t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex),null}})),v.support.enctype||(v.propFix.enctype="encoding"),v.support.checkOn||v.each(["radio","checkbox"],function(){v.valHooks[this]={get:function(e){return e.getAttribute("value")===null?"on":e.value}}}),v.each(["radio","checkbox"],function(){v.valHooks[this]=v.extend(v.valHooks[this],{set:function(e,t){if(v.isArray(t))return e.checked=v.inArray(v(e).val(),t)>=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f<n.length;f++){l=J.exec(n[f])||[],c=l[1],h=(l[2]||"").split(".").sort(),g=v.event.special[c]||{},c=(s?g.delegateType:g.bindType)||c,g=v.event.special[c]||{},p=v.extend({type:c,origType:l[1],data:i,handler:r,guid:r.guid,selector:s,needsContext:s&&v.expr.match.needsContext.test(s),namespace:h.join(".")},d),m=a[c];if(!m){m=a[c]=[],m.delegateCount=0;if(!g.setup||g.setup.call(e,i,h,u)===!1)e.addEventListener?e.addEventListener(c,u,!1):e.attachEvent&&e.attachEvent("on"+c,u)}g.add&&(g.add.call(e,p),p.handler.guid||(p.handler.guid=r.guid)),s?m.splice(m.delegateCount++,0,p):m.push(p),v.event.global[c]=!0}e=null},global:{},remove:function(e,t,n,r,i){var s,o,u,a,f,l,c,h,p,d,m,g=v.hasData(e)&&v._data(e);if(!g||!(h=g.events))return;t=v.trim(Z(t||"")).split(" ");for(s=0;s<t.length;s++){o=J.exec(t[s])||[],u=a=o[1],f=o[2];if(!u){for(u in h)v.event.remove(e,u+t[s],n,r,!0);continue}p=v.event.special[u]||{},u=(r?p.delegateType:p.bindType)||u,d=h[u]||[],l=d.length,f=f?new RegExp("(^|\\.)"+f.split(".").sort().join("\\.(?:.*\\.|)")+"(\\.|$)"):null;for(c=0;c<d.length;c++)m=d[c],(i||a===m.origType)&&(!n||n.guid===m.guid)&&(!f||f.test(m.namespace))&&(!r||r===m.selector||r==="**"&&m.selector)&&(d.splice(c--,1),m.selector&&d.delegateCount--,p.remove&&p.remove.call(e,m));d.length===0&&l!==d.length&&((!p.teardown||p.teardown.call(e,f,g.handle)===!1)&&v.removeEvent(e,u,g.handle),delete h[u])}v.isEmptyObject(h)&&(delete g.handle,v.removeData(e,"events",!0))},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(n,r,s,o){if(!s||s.nodeType!==3&&s.nodeType!==8){var u,a,f,l,c,h,p,d,m,g,y=n.type||n,b=[];if(Y.test(y+v.event.triggered))return;y.indexOf("!")>=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f<m.length&&!n.isPropagationStopped();f++)l=m[f][0],n.type=m[f][1],d=(v._data(l,"events")||{})[n.type]&&v._data(l,"handle"),d&&d.apply(l,r),d=h&&l[h],d&&v.acceptData(l)&&d.apply&&d.apply(l,r)===!1&&n.preventDefault();return n.type=y,!o&&!n.isDefaultPrevented()&&(!p._default||p._default.apply(s.ownerDocument,r)===!1)&&(y!=="click"||!v.nodeName(s,"a"))&&v.acceptData(s)&&h&&s[y]&&(y!=="focus"&&y!=="blur"||n.target.offsetWidth!==0)&&!v.isWindow(s)&&(c=s[h],c&&(s[h]=null),v.event.triggered=y,s[y](),v.event.triggered=t,c&&(s[h]=c)),n.result}return},dispatch:function(n){n=v.event.fix(n||e.event);var r,i,s,o,u,a,f,c,h,p,d=(v._data(this,"events")||{})[n.type]||[],m=d.delegateCount,g=l.call(arguments),y=!n.exclusive&&!n.namespace,b=v.event.special[n.type]||{},w=[];g[0]=n,n.delegateTarget=this;if(b.preDispatch&&b.preDispatch.call(this,n)===!1)return;if(m&&(!n.button||n.type!=="click"))for(s=n.target;s!=this;s=s.parentNode||this)if(s.disabled!==!0||n.type!=="click"){u={},f=[];for(r=0;r<m;r++)c=d[r],h=c.selector,u[h]===t&&(u[h]=c.needsContext?v(h,this).index(s)>=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r<w.length&&!n.isPropagationStopped();r++){a=w[r],n.currentTarget=a.elem;for(i=0;i<a.matches.length&&!n.isImmediatePropagationStopped();i++){c=a.matches[i];if(y||!n.namespace&&!c.namespace||n.namespace_re&&n.namespace_re.test(c.namespace))n.data=c.data,n.handleObj=c,o=((v.event.special[c.origType]||{}).handle||c.handler).apply(a.elem,g),o!==t&&(n.result=o,o===!1&&(n.preventDefault(),n.stopPropagation()))}}return b.postDispatch&&b.postDispatch.call(this,n),n.result},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return e.which==null&&(e.which=t.charCode!=null?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,n){var r,s,o,u=n.button,a=n.fromElement;return e.pageX==null&&n.clientX!=null&&(r=e.target.ownerDocument||i,s=r.documentElement,o=r.body,e.pageX=n.clientX+(s&&s.scrollLeft||o&&o.scrollLeft||0)-(s&&s.clientLeft||o&&o.clientLeft||0),e.pageY=n.clientY+(s&&s.scrollTop||o&&o.scrollTop||0)-(s&&s.clientTop||o&&o.clientTop||0)),!e.relatedTarget&&a&&(e.relatedTarget=a===e.target?n.toElement:a),!e.which&&u!==t&&(e.which=u&1?1:u&2?3:u&4?2:0),e}},fix:function(e){if(e[v.expando])return e;var t,n,r=e,s=v.event.fixHooks[e.type]||{},o=s.props?this.props.concat(s.props):this.props;e=v.Event(r);for(t=o.length;t;)n=o[--t],e[n]=r[n];return e.target||(e.target=r.srcElement||i),e.target.nodeType===3&&(e.target=e.target.parentNode),e.metaKey=!!e.metaKey,s.filter?s.filter(e,r):e},special:{load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(e,t,n){v.isWindow(this)&&(this.onbeforeunload=n)},teardown:function(e,t){this.onbeforeunload===t&&(this.onbeforeunload=null)}}},simulate:function(e,t,n,r){var i=v.extend(new v.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?v.event.trigger(i,null,t):v.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},v.event.handle=v.event.dispatch,v.removeEvent=i.removeEventListener?function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)}:function(e,t,n){var r="on"+t;e.detachEvent&&(typeof e[r]=="undefined"&&(e[r]=null),e.detachEvent(r,n))},v.Event=function(e,t){if(!(this instanceof v.Event))return new v.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.returnValue===!1||e.getPreventDefault&&e.getPreventDefault()?tt:et):this.type=e,t&&v.extend(this,t),this.timeStamp=e&&e.timeStamp||v.now(),this[v.expando]=!0},v.Event.prototype={preventDefault:function(){this.isDefaultPrevented=tt;var e=this.originalEvent;if(!e)return;e.preventDefault?e.preventDefault():e.returnValue=!1},stopPropagation:function(){this.isPropagationStopped=tt;var e=this.originalEvent;if(!e)return;e.stopPropagation&&e.stopPropagation(),e.cancelBubble=!0},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=tt,this.stopPropagation()},isDefaultPrevented:et,isPropagationStopped:et,isImmediatePropagationStopped:et},v.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){v.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,s=e.handleObj,o=s.selector;if(!i||i!==r&&!v.contains(r,i))e.type=s.origType,n=s.handler.apply(this,arguments),e.type=t;return n}}}),v.support.submitBubbles||(v.event.special.submit={setup:function(){if(v.nodeName(this,"form"))return!1;v.event.add(this,"click._submit keypress._submit",function(e){var n=e.target,r=v.nodeName(n,"input")||v.nodeName(n,"button")?n.form:t;r&&!v._data(r,"_submit_attached")&&(v.event.add(r,"submit._submit",function(e){e._submit_bubble=!0}),v._data(r,"_submit_attached",!0))})},postDispatch:function(e){e._submit_bubble&&(delete e._submit_bubble,this.parentNode&&!e.isTrigger&&v.event.simulate("submit",this.parentNode,e,!0))},teardown:function(){if(v.nodeName(this,"form"))return!1;v.event.remove(this,"._submit")}}),v.support.changeBubbles||(v.event.special.change={setup:function(){if($.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")v.event.add(this,"propertychange._change",function(e){e.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),v.event.add(this,"click._change",function(e){this._just_changed&&!e.isTrigger&&(this._just_changed=!1),v.event.simulate("change",this,e,!0)});return!1}v.event.add(this,"beforeactivate._change",function(e){var t=e.target;$.test(t.nodeName)&&!v._data(t,"_change_attached")&&(v.event.add(t,"change._change",function(e){this.parentNode&&!e.isSimulated&&!e.isTrigger&&v.event.simulate("change",this.parentNode,e,!0)}),v._data(t,"_change_attached",!0))})},handle:function(e){var t=e.target;if(this!==t||e.isSimulated||e.isTrigger||t.type!=="radio"&&t.type!=="checkbox")return e.handleObj.handler.apply(this,arguments)},teardown:function(){return v.event.remove(this,"._change"),!$.test(this.nodeName)}}),v.support.focusinBubbles||v.each({focus:"focusin",blur:"focusout"},function(e,t){var n=0,r=function(e){v.event.simulate(t,e.target,v.event.fix(e),!0)};v.event.special[t]={setup:function(){n++===0&&i.addEventListener(e,r,!0)},teardown:function(){--n===0&&i.removeEventListener(e,r,!0)}}}),v.fn.extend({on:function(e,n,r,i,s){var o,u;if(typeof e=="object"){typeof n!="string"&&(r=r||n,n=t);for(u in e)this.on(u,n,r,e[u],s);return this}r==null&&i==null?(i=n,r=n=t):i==null&&(typeof n=="string"?(i=r,r=t):(i=r,r=n,n=t));if(i===!1)i=et;else if(!i)return this;return s===1&&(o=i,i=function(e){return v().off(e),o.apply(this,arguments)},i.guid=o.guid||(o.guid=v.guid++)),this.each(function(){v.event.add(this,e,i,r,n)})},one:function(e,t,n,r){return this.on(e,t,n,r,1)},off:function(e,n,r){var i,s;if(e&&e.preventDefault&&e.handleObj)return i=e.handleObj,v(e.delegateTarget).off(i.namespace?i.origType+"."+i.namespace:i.origType,i.selector,i.handler),this;if(typeof e=="object"){for(s in e)this.off(s,n,e[s]);return this}if(n===!1||typeof n=="function")r=n,n=t;return r===!1&&(r=et),this.each(function(){v.event.remove(this,e,r,n)})},bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},live:function(e,t,n){return v(this.context).on(e,this.selector,t,n),this},die:function(e,t){return v(this.context).off(e,this.selector||"**",t),this},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return arguments.length===1?this.off(e,"**"):this.off(t,e||"**",n)},trigger:function(e,t){return this.each(function(){v.event.trigger(e,t,this)})},triggerHandler:function(e,t){if(this[0])return v.event.trigger(e,t,this[0],!0)},toggle:function(e){var t=arguments,n=e.guid||v.guid++,r=0,i=function(n){var i=(v._data(this,"lastToggle"+e.guid)||0)%r;return v._data(this,"lastToggle"+e.guid,i+1),n.preventDefault(),t[i].apply(this,arguments)||!1};i.guid=n;while(r<t.length)t[r++].guid=n;return this.click(i)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),v.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(e,t){v.fn[t]=function(e,n){return n==null&&(n=e,e=null),arguments.length>0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u<a;u++)if(s=e[u])if(!n||n(s,r,i))o.push(s),f&&t.push(u);return o}function ct(e,t,n,r,i,s){return r&&!r[d]&&(r=ct(r)),i&&!i[d]&&(i=ct(i,s)),N(function(s,o,u,a){var f,l,c,h=[],p=[],d=o.length,v=s||dt(t||"*",u.nodeType?[u]:u,[]),m=e&&(s||!t)?lt(v,h,e,u,a):v,g=n?i||(s?e:d||r)?[]:o:m;n&&n(m,g,u,a);if(r){f=lt(g,p),r(f,[],u,a),l=f.length;while(l--)if(c=f[l])g[p[l]]=!(m[p[l]]=c)}if(s){if(i||e){if(i){f=[],l=g.length;while(l--)(c=g[l])&&f.push(m[l]=c);i(null,g=[],f,a)}l=g.length;while(l--)(c=g[l])&&(f=i?T.call(s,c):h[l])>-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a<s;a++)if(n=i.relative[e[a].type])h=[at(ft(h),n)];else{n=i.filter[e[a].type].apply(null,e[a].matches);if(n[d]){r=++a;for(;r<s;r++)if(i.relative[e[r].type])break;return ct(a>1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a<r&&ht(e.slice(a,r)),r<s&&ht(e=e.slice(r)),r<s&&e.join(""))}h.push(n)}return ft(h)}function pt(e,t){var r=t.length>0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r<i;r++)nt(e,t[r],n);return n}function vt(e,t,n,r,s){var o,u,f,l,c,h=ut(e),p=h.length;if(!r&&h.length===1){u=h[0]=h[0].slice(0);if(u.length>2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;t<n;t++)if(this[t]===e)return t;return-1},N=function(e,t){return e[d]=t==null||t,e},C=function(){var e={},t=[];return N(function(n,r){return t.push(n)>i.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="<a href='#'></a>",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="<select></select>";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="<div class='hidden e'></div><div class='hidden'></div>",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="<a name='"+d+"'></a><div name='"+d+"'></div>",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:st(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:st(function(e,t,n){for(var r=n<0?n+t:n;--r>=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}},f=y.compareDocumentPosition?function(e,t){return e===t?(l=!0,0):(!e.compareDocumentPosition||!t.compareDocumentPosition?e.compareDocumentPosition:e.compareDocumentPosition(t)&4)?-1:1}:function(e,t){if(e===t)return l=!0,0;if(e.sourceIndex&&t.sourceIndex)return e.sourceIndex-t.sourceIndex;var n,r,i=[],s=[],o=e.parentNode,u=t.parentNode,a=o;if(o===u)return ot(e,t);if(!o)return-1;if(!u)return 1;while(a)i.unshift(a),a=a.parentNode;a=u;while(a)s.unshift(a),a=a.parentNode;n=i.length,r=s.length;for(var f=0;f<n&&f<r;f++)if(i[f]!==s[f])return ot(i[f],s[f]);return f===n?ot(e,s[f],-1):ot(i[f],t,1)},[0,0].sort(f),h=!l,nt.uniqueSort=function(e){var t,n=[],r=1,i=0;l=h,e.sort(f);if(l){for(;t=e[r];r++)t===e[r-1]&&(i=n.push(r));while(i--)e.splice(n[i],1)}return e},nt.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},a=nt.compile=function(e,t){var n,r=[],i=[],s=A[d][e+" "];if(!s){t||(t=ut(e)),n=t.length;while(n--)s=ht(t[n]),s[d]?r.push(s):i.push(s);s=A(e,pt(i,r))}return s},g.querySelectorAll&&function(){var e,t=vt,n=/'|\\/g,r=/\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g,i=[":focus"],s=[":active"],u=y.matchesSelector||y.mozMatchesSelector||y.webkitMatchesSelector||y.oMatchesSelector||y.msMatchesSelector;K(function(e){e.innerHTML="<select><option selected=''></option></select>",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="<p test=''></p>",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="<input type='hidden'/>",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t<n;t++)if(v.contains(u[t],this))return!0});o=this.pushStack("","find",e);for(t=0,n=this.length;t<n;t++){r=o.length,v.find(e,this[t],o);if(t>0)for(i=r;i<o.length;i++)for(s=0;s<r;s++)if(o[s]===o[i]){o.splice(i--,1);break}}return o},has:function(e){var t,n=v(e,this),r=n.length;return this.filter(function(){for(t=0;t<r;t++)if(v.contains(this,n[t]))return!0})},not:function(e){return this.pushStack(ft(this,e,!1),"not",e)},filter:function(e){return this.pushStack(ft(this,e,!0),"filter",e)},is:function(e){return!!e&&(typeof e=="string"?st.test(e)?v(e,this.context).index(this[0])>=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r<i;r++){n=this[r];while(n&&n.ownerDocument&&n!==t&&n.nodeType!==11){if(o?o.index(n)>-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/<tbody/i,gt=/<|&#?\w+;/,yt=/<(?:script|style|link)/i,bt=/<(?:script|object|embed|option|style)/i,wt=new RegExp("<(?:"+ct+")[\\s/>]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*<!(?:\[CDATA\[|\-\-)|[\]\-]{2}>\s*$/g,Nt={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X<div>","</div>"]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1></$2>");try{for(;r<i;r++)n=this[r]||{},n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),n.innerHTML=e);n=0}catch(s){}}n&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(e){return ut(this[0])?this.length?this.pushStack(v(v.isFunction(e)?e():e),"replaceWith",e):this:v.isFunction(e)?this.each(function(t){var n=v(this),r=n.html();n.replaceWith(e.call(this,t,r))}):(typeof e!="string"&&(e=v(e).detach()),this.each(function(){var t=this.nextSibling,n=this.parentNode;v(this).remove(),t?v(t).before(e):v(n).append(e)}))},detach:function(e){return this.remove(e,!0)},domManip:function(e,n,r){e=[].concat.apply([],e);var i,s,o,u,a=0,f=e[0],l=[],c=this.length;if(!v.support.checkClone&&c>1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a<c;a++)r.call(n&&v.nodeName(this[a],"table")?Lt(this[a],"tbody"):this[a],a===u?o:v.clone(o,!0,!0))}o=s=null,l.length&&v.each(l,function(e,t){t.src?v.ajax?v.ajax({url:t.src,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0}):v.error("no ajax"):v.globalEval((t.text||t.textContent||t.innerHTML||"").replace(Tt,"")),t.parentNode&&t.parentNode.removeChild(t)})}return this}}),v.buildFragment=function(e,n,r){var s,o,u,a=e[0];return n=n||i,n=!n.nodeType&&n[0]||n,n=n.ownerDocument||n,e.length===1&&typeof a=="string"&&a.length<512&&n===i&&a.charAt(0)==="<"&&!bt.test(a)&&(v.support.checkClone||!St.test(a))&&(v.support.html5Clone||!wt.test(a))&&(o=!0,s=v.fragments[a],u=s!==t),s||(s=n.createDocumentFragment(),v.clean(e,n,s,r),o&&(v.fragments[a]=u&&s)),{fragment:s,cacheable:o}},v.fragments={},v.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){v.fn[e]=function(n){var r,i=0,s=[],o=v(n),u=o.length,a=this.length===1&&this[0].parentNode;if((a==null||a&&a.nodeType===11&&a.childNodes.length===1)&&u===1)return o[t](this[0]),this;for(;i<u;i++)r=(i>0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1></$2>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]==="<table>"&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("<div>").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r<i;r++)n=e[r],Vn[n]=Vn[n]||[],Vn[n].unshift(t)},prefilter:function(e,t){t?Xn.unshift(e):Xn.push(e)}}),v.Tween=Yn,Yn.prototype={constructor:Yn,init:function(e,t,n,r,i,s){this.elem=e,this.prop=n,this.easing=i||"swing",this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=s||(v.cssNumber[n]?"":"px")},cur:function(){var e=Yn.propHooks[this.prop];return e&&e.get?e.get(this):Yn.propHooks._default.get(this)},run:function(e){var t,n=Yn.propHooks[this.prop];return this.options.duration?this.pos=t=v.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):Yn.propHooks._default.set(this),this}},Yn.prototype.init.prototype=Yn.prototype,Yn.propHooks={_default:{get:function(e){var t;return e.elem[e.prop]==null||!!e.elem.style&&e.elem.style[e.prop]!=null?(t=v.css(e.elem,e.prop,!1,""),!t||t==="auto"?0:t):e.elem[e.prop]},set:function(e){v.fx.step[e.prop]?v.fx.step[e.prop](e):e.elem.style&&(e.elem.style[v.cssProps[e.prop]]!=null||v.cssHooks[e.prop])?v.style(e.elem,e.prop,e.now+e.unit):e.elem[e.prop]=e.now}}},Yn.propHooks.scrollTop=Yn.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},v.each(["toggle","show","hide"],function(e,t){var n=v.fn[t];v.fn[t]=function(r,i,s){return r==null||typeof r=="boolean"||!e&&v.isFunction(r)&&v.isFunction(i)?n.apply(this,arguments):this.animate(Zn(t,!0),r,i,s)}}),v.fn.extend({fadeTo:function(e,t,n,r){return this.filter(Gt).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=v.isEmptyObject(e),s=v.speed(t,n,r),o=function(){var t=Kn(this,v.extend({},e),s);i&&t.stop(!0)};return i||s.queue===!1?this.each(o):this.queue(s.queue,o)},stop:function(e,n,r){var i=function(e){var t=e.stop;delete e.stop,t(r)};return typeof e!="string"&&(r=n,n=e,e=t),n&&e!==!1&&this.queue(e||"fx",[]),this.each(function(){var t=!0,n=e!=null&&e+"queueHooks",s=v.timers,o=v._data(this);if(n)o[n]&&o[n].stop&&i(o[n]);else for(n in o)o[n]&&o[n].stop&&Wn.test(n)&&i(o[n]);for(n=s.length;n--;)s[n].elem===this&&(e==null||s[n].queue===e)&&(s[n].anim.stop(r),t=!1,s.splice(n,1));(t||!r)&&v.dequeue(this,e)})}}),v.each({slideDown:Zn("show"),slideUp:Zn("hide"),slideToggle:Zn("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){v.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),v.speed=function(e,t,n){var r=e&&typeof e=="object"?v.extend({},e):{complete:n||!n&&t||v.isFunction(e)&&e,duration:e,easing:n&&t||t&&!v.isFunction(t)&&t};r.duration=v.fx.off?0:typeof r.duration=="number"?r.duration:r.duration in v.fx.speeds?v.fx.speeds[r.duration]:v.fx.speeds._default;if(r.queue==null||r.queue===!0)r.queue="fx";return r.old=r.complete,r.complete=function(){v.isFunction(r.old)&&r.old.call(this),r.queue&&v.dequeue(this,r.queue)},r},v.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2}},v.timers=[],v.fx=Yn.prototype.init,v.fx.tick=function(){var e,n=v.timers,r=0;qn=v.now();for(;r<n.length;r++)e=n[r],!e()&&n[r]===e&&n.splice(r--,1);n.length||v.fx.stop(),qn=t},v.fx.timer=function(e){e()&&v.timers.push(e)&&!Rn&&(Rn=setInterval(v.fx.tick,v.fx.interval))},v.fx.interval=13,v.fx.stop=function(){clearInterval(Rn),Rn=null},v.fx.speeds={slow:600,fast:200,_default:400},v.fx.step={},v.expr&&v.expr.filters&&(v.expr.filters.animated=function(e){return v.grep(v.timers,function(t){return e===t.elem}).length});var er=/^(?:body|html)$/i;v.fn.offset=function(e){if(arguments.length)return e===t?this:this.each(function(t){v.offset.setOffset(this,e,t)});var n,r,i,s,o,u,a,f={top:0,left:0},l=this[0],c=l&&l.ownerDocument;if(!c)return;return(r=c.body)===l?v.offset.bodyOffset(l):(n=c.documentElement,v.contains(n,l)?(typeof l.getBoundingClientRect!="undefined"&&(f=l.getBoundingClientRect()),i=tr(c),s=n.clientTop||r.clientTop||0,o=n.clientLeft||r.clientLeft||0,u=i.pageYOffset||n.scrollTop,a=i.pageXOffset||n.scrollLeft,{top:f.top+u-s,left:f.left+a-o}):f)},v.offset={bodyOffset:function(e){var t=e.offsetTop,n=e.offsetLeft;return v.support.doesNotIncludeMarginInBodyOffset&&(t+=parseFloat(v.css(e,"marginTop"))||0,n+=parseFloat(v.css(e,"marginLeft"))||0),{top:t,left:n}},setOffset:function(e,t,n){var r=v.css(e,"position");r==="static"&&(e.style.position="relative");var i=v(e),s=i.offset(),o=v.css(e,"top"),u=v.css(e,"left"),a=(r==="absolute"||r==="fixed")&&v.inArray("auto",[o,u])>-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window); \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/_static/minus.png b/python/macholib/doc/_build/html/_static/minus.png
new file mode 100644
index 000000000..da1c5620d
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/minus.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/nature.css b/python/macholib/doc/_build/html/_static/nature.css
new file mode 100644
index 000000000..f46081870
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/nature.css
@@ -0,0 +1,245 @@
+/*
+ * nature.css_t
+ * ~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- nature theme.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+@import url("basic.css");
+
+/* -- page layout ----------------------------------------------------------- */
+
+body {
+ font-family: Arial, sans-serif;
+ font-size: 100%;
+ background-color: #111;
+ color: #555;
+ margin: 0;
+ padding: 0;
+}
+
+div.documentwrapper {
+ float: left;
+ width: 100%;
+}
+
+div.bodywrapper {
+ margin: 0 0 0 230px;
+}
+
+hr {
+ border: 1px solid #B1B4B6;
+}
+
+div.document {
+ background-color: #eee;
+}
+
+div.body {
+ background-color: #ffffff;
+ color: #3E4349;
+ padding: 0 30px 30px 30px;
+ font-size: 0.9em;
+}
+
+div.footer {
+ color: #555;
+ width: 100%;
+ padding: 13px 0;
+ text-align: center;
+ font-size: 75%;
+}
+
+div.footer a {
+ color: #444;
+ text-decoration: underline;
+}
+
+div.related {
+ background-color: #6BA81E;
+ line-height: 32px;
+ color: #fff;
+ text-shadow: 0px 1px 0 #444;
+ font-size: 0.9em;
+}
+
+div.related a {
+ color: #E2F3CC;
+}
+
+div.sphinxsidebar {
+ font-size: 0.75em;
+ line-height: 1.5em;
+}
+
+div.sphinxsidebarwrapper{
+ padding: 20px 0;
+}
+
+div.sphinxsidebar h3,
+div.sphinxsidebar h4 {
+ font-family: Arial, sans-serif;
+ color: #222;
+ font-size: 1.2em;
+ font-weight: normal;
+ margin: 0;
+ padding: 5px 10px;
+ background-color: #ddd;
+ text-shadow: 1px 1px 0 white
+}
+
+div.sphinxsidebar h4{
+ font-size: 1.1em;
+}
+
+div.sphinxsidebar h3 a {
+ color: #444;
+}
+
+
+div.sphinxsidebar p {
+ color: #888;
+ padding: 5px 20px;
+}
+
+div.sphinxsidebar p.topless {
+}
+
+div.sphinxsidebar ul {
+ margin: 10px 20px;
+ padding: 0;
+ color: #000;
+}
+
+div.sphinxsidebar a {
+ color: #444;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #ccc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar input[type=text]{
+ margin-left: 20px;
+}
+
+/* -- body styles ----------------------------------------------------------- */
+
+a {
+ color: #005B81;
+ text-decoration: none;
+}
+
+a:hover {
+ color: #E32E00;
+ text-decoration: underline;
+}
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+ font-family: Arial, sans-serif;
+ background-color: #BED4EB;
+ font-weight: normal;
+ color: #212224;
+ margin: 30px 0px 10px 0px;
+ padding: 5px 0 5px 10px;
+ text-shadow: 0px 1px 0 white
+}
+
+div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; }
+div.body h2 { font-size: 150%; background-color: #C8D5E3; }
+div.body h3 { font-size: 120%; background-color: #D8DEE3; }
+div.body h4 { font-size: 110%; background-color: #D8DEE3; }
+div.body h5 { font-size: 100%; background-color: #D8DEE3; }
+div.body h6 { font-size: 100%; background-color: #D8DEE3; }
+
+a.headerlink {
+ color: #c60f0f;
+ font-size: 0.8em;
+ padding: 0 4px 0 4px;
+ text-decoration: none;
+}
+
+a.headerlink:hover {
+ background-color: #c60f0f;
+ color: white;
+}
+
+div.body p, div.body dd, div.body li {
+ line-height: 1.5em;
+}
+
+div.admonition p.admonition-title + p {
+ display: inline;
+}
+
+div.highlight{
+ background-color: white;
+}
+
+div.note {
+ background-color: #eee;
+ border: 1px solid #ccc;
+}
+
+div.seealso {
+ background-color: #ffc;
+ border: 1px solid #ff6;
+}
+
+div.topic {
+ background-color: #eee;
+}
+
+div.warning {
+ background-color: #ffe4e4;
+ border: 1px solid #f66;
+}
+
+p.admonition-title {
+ display: inline;
+}
+
+p.admonition-title:after {
+ content: ":";
+}
+
+pre {
+ padding: 10px;
+ background-color: White;
+ color: #222;
+ line-height: 1.2em;
+ border: 1px solid #C6C9CB;
+ font-size: 1.1em;
+ margin: 1.5em 0 1.5em 0;
+ -webkit-box-shadow: 1px 1px 1px #d8d8d8;
+ -moz-box-shadow: 1px 1px 1px #d8d8d8;
+}
+
+tt {
+ background-color: #ecf0f3;
+ color: #222;
+ /* padding: 1px 2px; */
+ font-size: 1.1em;
+ font-family: monospace;
+}
+
+.viewcode-back {
+ font-family: Arial, sans-serif;
+}
+
+div.viewcode-block:target {
+ background-color: #f4debf;
+ border-top: 1px solid #ac9;
+ border-bottom: 1px solid #ac9;
+} \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/_static/plus.png b/python/macholib/doc/_build/html/_static/plus.png
new file mode 100644
index 000000000..b3cb37425
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/plus.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/pygments.css b/python/macholib/doc/_build/html/_static/pygments.css
new file mode 100644
index 000000000..d79caa151
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/pygments.css
@@ -0,0 +1,62 @@
+.highlight .hll { background-color: #ffffcc }
+.highlight { background: #eeffcc; }
+.highlight .c { color: #408090; font-style: italic } /* Comment */
+.highlight .err { border: 1px solid #FF0000 } /* Error */
+.highlight .k { color: #007020; font-weight: bold } /* Keyword */
+.highlight .o { color: #666666 } /* Operator */
+.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #007020 } /* Comment.Preproc */
+.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */
+.highlight .gd { color: #A00000 } /* Generic.Deleted */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .gr { color: #FF0000 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #00A000 } /* Generic.Inserted */
+.highlight .go { color: #333333 } /* Generic.Output */
+.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #0044DD } /* Generic.Traceback */
+.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #007020 } /* Keyword.Pseudo */
+.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #902000 } /* Keyword.Type */
+.highlight .m { color: #208050 } /* Literal.Number */
+.highlight .s { color: #4070a0 } /* Literal.String */
+.highlight .na { color: #4070a0 } /* Name.Attribute */
+.highlight .nb { color: #007020 } /* Name.Builtin */
+.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
+.highlight .no { color: #60add5 } /* Name.Constant */
+.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
+.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
+.highlight .ne { color: #007020 } /* Name.Exception */
+.highlight .nf { color: #06287e } /* Name.Function */
+.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
+.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
+.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #bb60d5 } /* Name.Variable */
+.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
+.highlight .w { color: #bbbbbb } /* Text.Whitespace */
+.highlight .mf { color: #208050 } /* Literal.Number.Float */
+.highlight .mh { color: #208050 } /* Literal.Number.Hex */
+.highlight .mi { color: #208050 } /* Literal.Number.Integer */
+.highlight .mo { color: #208050 } /* Literal.Number.Oct */
+.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
+.highlight .sc { color: #4070a0 } /* Literal.String.Char */
+.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #4070a0 } /* Literal.String.Double */
+.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
+.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
+.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
+.highlight .sx { color: #c65d09 } /* Literal.String.Other */
+.highlight .sr { color: #235388 } /* Literal.String.Regex */
+.highlight .s1 { color: #4070a0 } /* Literal.String.Single */
+.highlight .ss { color: #517918 } /* Literal.String.Symbol */
+.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
+.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
+.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
+.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
+.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/_static/searchtools.js b/python/macholib/doc/_build/html/_static/searchtools.js
new file mode 100644
index 000000000..f5c7e5fee
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/searchtools.js
@@ -0,0 +1,622 @@
+/*
+ * searchtools.js_t
+ * ~~~~~~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilties for the full-text search.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+
+/**
+ * Porter Stemmer
+ */
+var Stemmer = function() {
+
+ var step2list = {
+ ational: 'ate',
+ tional: 'tion',
+ enci: 'ence',
+ anci: 'ance',
+ izer: 'ize',
+ bli: 'ble',
+ alli: 'al',
+ entli: 'ent',
+ eli: 'e',
+ ousli: 'ous',
+ ization: 'ize',
+ ation: 'ate',
+ ator: 'ate',
+ alism: 'al',
+ iveness: 'ive',
+ fulness: 'ful',
+ ousness: 'ous',
+ aliti: 'al',
+ iviti: 'ive',
+ biliti: 'ble',
+ logi: 'log'
+ };
+
+ var step3list = {
+ icate: 'ic',
+ ative: '',
+ alize: 'al',
+ iciti: 'ic',
+ ical: 'ic',
+ ful: '',
+ ness: ''
+ };
+
+ var c = "[^aeiou]"; // consonant
+ var v = "[aeiouy]"; // vowel
+ var C = c + "[^aeiouy]*"; // consonant sequence
+ var V = v + "[aeiou]*"; // vowel sequence
+
+ var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
+ var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
+ var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
+ var s_v = "^(" + C + ")?" + v; // vowel in stem
+
+ this.stemWord = function (w) {
+ var stem;
+ var suffix;
+ var firstch;
+ var origword = w;
+
+ if (w.length < 3)
+ return w;
+
+ var re;
+ var re2;
+ var re3;
+ var re4;
+
+ firstch = w.substr(0,1);
+ if (firstch == "y")
+ w = firstch.toUpperCase() + w.substr(1);
+
+ // Step 1a
+ re = /^(.+?)(ss|i)es$/;
+ re2 = /^(.+?)([^s])s$/;
+
+ if (re.test(w))
+ w = w.replace(re,"$1$2");
+ else if (re2.test(w))
+ w = w.replace(re2,"$1$2");
+
+ // Step 1b
+ re = /^(.+?)eed$/;
+ re2 = /^(.+?)(ed|ing)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ re = new RegExp(mgr0);
+ if (re.test(fp[1])) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1];
+ re2 = new RegExp(s_v);
+ if (re2.test(stem)) {
+ w = stem;
+ re2 = /(at|bl|iz)$/;
+ re3 = new RegExp("([^aeiouylsz])\\1$");
+ re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re2.test(w))
+ w = w + "e";
+ else if (re3.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ else if (re4.test(w))
+ w = w + "e";
+ }
+ }
+
+ // Step 1c
+ re = /^(.+?)y$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(s_v);
+ if (re.test(stem))
+ w = stem + "i";
+ }
+
+ // Step 2
+ re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step2list[suffix];
+ }
+
+ // Step 3
+ re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step3list[suffix];
+ }
+
+ // Step 4
+ re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
+ re2 = /^(.+?)(s|t)(ion)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ if (re.test(stem))
+ w = stem;
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1] + fp[2];
+ re2 = new RegExp(mgr1);
+ if (re2.test(stem))
+ w = stem;
+ }
+
+ // Step 5
+ re = /^(.+?)e$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ re2 = new RegExp(meq1);
+ re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
+ w = stem;
+ }
+ re = /ll$/;
+ re2 = new RegExp(mgr1);
+ if (re.test(w) && re2.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+
+ // and turn initial Y back to y
+ if (firstch == "y")
+ w = firstch.toLowerCase() + w.substr(1);
+ return w;
+ }
+}
+
+
+
+/**
+ * Simple result scoring code.
+ */
+var Scorer = {
+ // Implement the following function to further tweak the score for each result
+ // The function takes a result array [filename, title, anchor, descr, score]
+ // and returns the new score.
+ /*
+ score: function(result) {
+ return result[4];
+ },
+ */
+
+ // query matches the full name of an object
+ objNameMatch: 11,
+ // or matches in the last dotted part of the object name
+ objPartialMatch: 6,
+ // Additive scores depending on the priority of the object
+ objPrio: {0: 15, // used to be importantResults
+ 1: 5, // used to be objectResults
+ 2: -5}, // used to be unimportantResults
+ // Used when the priority is not in the mapping.
+ objPrioDefault: 0,
+
+ // query found in title
+ title: 15,
+ // query found in terms
+ term: 5
+};
+
+
+/**
+ * Search Module
+ */
+var Search = {
+
+ _index : null,
+ _queued_query : null,
+ _pulse_status : -1,
+
+ init : function() {
+ var params = $.getQueryParameters();
+ if (params.q) {
+ var query = params.q[0];
+ $('input[name="q"]')[0].value = query;
+ this.performSearch(query);
+ }
+ },
+
+ loadIndex : function(url) {
+ $.ajax({type: "GET", url: url, data: null,
+ dataType: "script", cache: true,
+ complete: function(jqxhr, textstatus) {
+ if (textstatus != "success") {
+ document.getElementById("searchindexloader").src = url;
+ }
+ }});
+ },
+
+ setIndex : function(index) {
+ var q;
+ this._index = index;
+ if ((q = this._queued_query) !== null) {
+ this._queued_query = null;
+ Search.query(q);
+ }
+ },
+
+ hasIndex : function() {
+ return this._index !== null;
+ },
+
+ deferQuery : function(query) {
+ this._queued_query = query;
+ },
+
+ stopPulse : function() {
+ this._pulse_status = 0;
+ },
+
+ startPulse : function() {
+ if (this._pulse_status >= 0)
+ return;
+ function pulse() {
+ var i;
+ Search._pulse_status = (Search._pulse_status + 1) % 4;
+ var dotString = '';
+ for (i = 0; i < Search._pulse_status; i++)
+ dotString += '.';
+ Search.dots.text(dotString);
+ if (Search._pulse_status > -1)
+ window.setTimeout(pulse, 500);
+ }
+ pulse();
+ },
+
+ /**
+ * perform a search for something (or wait until index is loaded)
+ */
+ performSearch : function(query) {
+ // create the required interface elements
+ this.out = $('#search-results');
+ this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
+ this.dots = $('<span></span>').appendTo(this.title);
+ this.status = $('<p style="display: none"></p>').appendTo(this.out);
+ this.output = $('<ul class="search"/>').appendTo(this.out);
+
+ $('#search-progress').text(_('Preparing search...'));
+ this.startPulse();
+
+ // index already loaded, the browser was quick!
+ if (this.hasIndex())
+ this.query(query);
+ else
+ this.deferQuery(query);
+ },
+
+ /**
+ * execute search (requires search index to be loaded)
+ */
+ query : function(query) {
+ var i;
+ var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"];
+
+ // stem the searchterms and add them to the correct list
+ var stemmer = new Stemmer();
+ var searchterms = [];
+ var excluded = [];
+ var hlterms = [];
+ var tmp = query.split(/\s+/);
+ var objectterms = [];
+ for (i = 0; i < tmp.length; i++) {
+ if (tmp[i] !== "") {
+ objectterms.push(tmp[i].toLowerCase());
+ }
+
+ if ($u.indexOf(stopwords, tmp[i].toLowerCase()) != -1 || tmp[i].match(/^\d+$/) ||
+ tmp[i] === "") {
+ // skip this "word"
+ continue;
+ }
+ // stem the word
+ var word = stemmer.stemWord(tmp[i].toLowerCase());
+ var toAppend;
+ // select the correct list
+ if (word[0] == '-') {
+ toAppend = excluded;
+ word = word.substr(1);
+ }
+ else {
+ toAppend = searchterms;
+ hlterms.push(tmp[i].toLowerCase());
+ }
+ // only add if not already in the list
+ if (!$u.contains(toAppend, word))
+ toAppend.push(word);
+ }
+ var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
+
+ // console.debug('SEARCH: searching for:');
+ // console.info('required: ', searchterms);
+ // console.info('excluded: ', excluded);
+
+ // prepare search
+ var terms = this._index.terms;
+ var titleterms = this._index.titleterms;
+
+ // array of [filename, title, anchor, descr, score]
+ var results = [];
+ $('#search-progress').empty();
+
+ // lookup as object
+ for (i = 0; i < objectterms.length; i++) {
+ var others = [].concat(objectterms.slice(0, i),
+ objectterms.slice(i+1, objectterms.length));
+ results = results.concat(this.performObjectSearch(objectterms[i], others));
+ }
+
+ // lookup as search terms in fulltext
+ results = results.concat(this.performTermsSearch(searchterms, excluded, terms, Scorer.term))
+ .concat(this.performTermsSearch(searchterms, excluded, titleterms, Scorer.title));
+
+ // let the scorer override scores with a custom scoring function
+ if (Scorer.score) {
+ for (i = 0; i < results.length; i++)
+ results[i][4] = Scorer.score(results[i]);
+ }
+
+ // now sort the results by score (in opposite order of appearance, since the
+ // display function below uses pop() to retrieve items) and then
+ // alphabetically
+ results.sort(function(a, b) {
+ var left = a[4];
+ var right = b[4];
+ if (left > right) {
+ return 1;
+ } else if (left < right) {
+ return -1;
+ } else {
+ // same score: sort alphabetically
+ left = a[1].toLowerCase();
+ right = b[1].toLowerCase();
+ return (left > right) ? -1 : ((left < right) ? 1 : 0);
+ }
+ });
+
+ // for debugging
+ //Search.lastresults = results.slice(); // a copy
+ //console.info('search results:', Search.lastresults);
+
+ // print the results
+ var resultCount = results.length;
+ function displayNextItem() {
+ // results left, load the summary and display it
+ if (results.length) {
+ var item = results.pop();
+ var listItem = $('<li style="display:none"></li>');
+ if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') {
+ // dirhtml builder
+ var dirname = item[0] + '/';
+ if (dirname.match(/\/index\/$/)) {
+ dirname = dirname.substring(0, dirname.length-6);
+ } else if (dirname == 'index/') {
+ dirname = '';
+ }
+ listItem.append($('<a/>').attr('href',
+ DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
+ highlightstring + item[2]).html(item[1]));
+ } else {
+ // normal html builders
+ listItem.append($('<a/>').attr('href',
+ item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
+ highlightstring + item[2]).html(item[1]));
+ }
+ if (item[3]) {
+ listItem.append($('<span> (' + item[3] + ')</span>'));
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
+ $.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' + item[0] + '.txt',
+ dataType: "text",
+ complete: function(jqxhr, textstatus) {
+ var data = jqxhr.responseText;
+ if (data !== '') {
+ listItem.append(Search.makeSearchSummary(data, searchterms, hlterms));
+ }
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ }});
+ } else {
+ // no source available, just display title
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ }
+ }
+ // search finished, update title and status message
+ else {
+ Search.stopPulse();
+ Search.title.text(_('Search Results'));
+ if (!resultCount)
+ Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
+ else
+ Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
+ Search.status.fadeIn(500);
+ }
+ }
+ displayNextItem();
+ },
+
+ /**
+ * search for object names
+ */
+ performObjectSearch : function(object, otherterms) {
+ var filenames = this._index.filenames;
+ var objects = this._index.objects;
+ var objnames = this._index.objnames;
+ var titles = this._index.titles;
+
+ var i;
+ var results = [];
+
+ for (var prefix in objects) {
+ for (var name in objects[prefix]) {
+ var fullname = (prefix ? prefix + '.' : '') + name;
+ if (fullname.toLowerCase().indexOf(object) > -1) {
+ var score = 0;
+ var parts = fullname.split('.');
+ // check for different match types: exact matches of full name or
+ // "last name" (i.e. last dotted part)
+ if (fullname == object || parts[parts.length - 1] == object) {
+ score += Scorer.objNameMatch;
+ // matches in last name
+ } else if (parts[parts.length - 1].indexOf(object) > -1) {
+ score += Scorer.objPartialMatch;
+ }
+ var match = objects[prefix][name];
+ var objname = objnames[match[1]][2];
+ var title = titles[match[0]];
+ // If more than one term searched for, we require other words to be
+ // found in the name/title/description
+ if (otherterms.length > 0) {
+ var haystack = (prefix + ' ' + name + ' ' +
+ objname + ' ' + title).toLowerCase();
+ var allfound = true;
+ for (i = 0; i < otherterms.length; i++) {
+ if (haystack.indexOf(otherterms[i]) == -1) {
+ allfound = false;
+ break;
+ }
+ }
+ if (!allfound) {
+ continue;
+ }
+ }
+ var descr = objname + _(', in ') + title;
+
+ var anchor = match[3];
+ if (anchor === '')
+ anchor = fullname;
+ else if (anchor == '-')
+ anchor = objnames[match[1]][1] + '-' + fullname;
+ // add custom score for some objects according to scorer
+ if (Scorer.objPrio.hasOwnProperty(match[2])) {
+ score += Scorer.objPrio[match[2]];
+ } else {
+ score += Scorer.objPrioDefault;
+ }
+ results.push([filenames[match[0]], fullname, '#'+anchor, descr, score]);
+ }
+ }
+ }
+
+ return results;
+ },
+
+ /**
+ * search for full-text terms in the index
+ */
+ performTermsSearch : function(searchterms, excluded, terms, score) {
+ var filenames = this._index.filenames;
+ var titles = this._index.titles;
+
+ var i, j, file, files;
+ var fileMap = {};
+ var results = [];
+
+ // perform the search on the required terms
+ for (i = 0; i < searchterms.length; i++) {
+ var word = searchterms[i];
+ // no match but word was a required one
+ if ((files = terms[word]) === undefined)
+ break;
+ if (files.length === undefined) {
+ files = [files];
+ }
+ // create the mapping
+ for (j = 0; j < files.length; j++) {
+ file = files[j];
+ if (file in fileMap)
+ fileMap[file].push(word);
+ else
+ fileMap[file] = [word];
+ }
+ }
+
+ // now check if the files don't contain excluded terms
+ for (file in fileMap) {
+ var valid = true;
+
+ // check if all requirements are matched
+ if (fileMap[file].length != searchterms.length)
+ continue;
+
+ // ensure that none of the excluded terms is in the search result
+ for (i = 0; i < excluded.length; i++) {
+ if (terms[excluded[i]] == file ||
+ $u.contains(terms[excluded[i]] || [], file)) {
+ valid = false;
+ break;
+ }
+ }
+
+ // if we have still a valid result we can add it to the result list
+ if (valid) {
+ results.push([filenames[file], titles[file], '', null, score]);
+ }
+ }
+ return results;
+ },
+
+ /**
+ * helper function to return a node containing the
+ * search summary for a given text. keywords is a list
+ * of stemmed words, hlwords is the list of normal, unstemmed
+ * words. the first one is used to find the occurance, the
+ * latter for highlighting it.
+ */
+ makeSearchSummary : function(text, keywords, hlwords) {
+ var textLower = text.toLowerCase();
+ var start = 0;
+ $.each(keywords, function() {
+ var i = textLower.indexOf(this.toLowerCase());
+ if (i > -1)
+ start = i;
+ });
+ start = Math.max(start - 120, 0);
+ var excerpt = ((start > 0) ? '...' : '') +
+ $.trim(text.substr(start, 240)) +
+ ((start + 240 - text.length) ? '...' : '');
+ var rv = $('<div class="context"></div>').text(excerpt);
+ $.each(hlwords, function() {
+ rv = rv.highlightText(this, 'highlighted');
+ });
+ return rv;
+ }
+};
+
+$(document).ready(function() {
+ Search.init();
+}); \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/_static/underscore.js b/python/macholib/doc/_build/html/_static/underscore.js
new file mode 100644
index 000000000..5b55f32be
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/underscore.js
@@ -0,0 +1,31 @@
+// Underscore.js 1.3.1
+// (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc.
+// Underscore is freely distributable under the MIT license.
+// Portions of Underscore are inspired or borrowed from Prototype,
+// Oliver Steele's Functional, and John Resig's Micro-Templating.
+// For all details and documentation:
+// http://documentcloud.github.com/underscore
+(function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source==
+c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c,
+h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each=
+b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e<f;e++){if(e in a&&c.call(d,a[e],e,a)===n)break}else for(e in a)if(b.has(a,e)&&c.call(d,a[e],e,a)===n)break};b.map=b.collect=function(a,c,b){var e=[];if(a==null)return e;if(x&&a.map===x)return a.map(c,b);j(a,function(a,g,h){e[e.length]=c.call(b,a,g,h)});if(a.length===+a.length)e.length=a.length;return e};b.reduce=b.foldl=b.inject=function(a,c,d,e){var f=arguments.length>2;a==
+null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect=
+function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e=
+e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck=
+function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b<e.computed&&(e={value:a,computed:b})});
+return e.value};b.shuffle=function(a){var b=[],d;j(a,function(a,f){f==0?b[0]=a:(d=Math.floor(Math.random()*(f+1)),b[f]=b[d],b[d]=a)});return b};b.sortBy=function(a,c,d){return b.pluck(b.map(a,function(a,b,g){return{value:a,criteria:c.call(d,a,b,g)}}).sort(function(a,b){var c=a.criteria,d=b.criteria;return c<d?-1:c>d?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a,
+c,d){d||(d=b.identity);for(var e=0,f=a.length;e<f;){var g=e+f>>1;d(a[g])<d(c)?e=g+1:f=g}return e};b.toArray=function(a){return!a?[]:a.toArray?a.toArray():b.isArray(a)?i.call(a):b.isArguments(a)?i.call(a):b.values(a)};b.size=function(a){return b.toArray(a).length};b.first=b.head=function(a,b,d){return b!=null&&!d?i.call(a,0,b):a[0]};b.initial=function(a,b,d){return i.call(a,0,a.length-(b==null||d?1:b))};b.last=function(a,b,d){return b!=null&&!d?i.call(a,Math.max(a.length-b,0)):a[a.length-1]};b.rest=
+b.tail=function(a,b,d){return i.call(a,b==null||d?1:b)};b.compact=function(a){return b.filter(a,function(a){return!!a})};b.flatten=function(a,c){return b.reduce(a,function(a,e){if(b.isArray(e))return a.concat(c?e:b.flatten(e));a[a.length]=e;return a},[])};b.without=function(a){return b.difference(a,i.call(arguments,1))};b.uniq=b.unique=function(a,c,d){var d=d?b.map(a,d):a,e=[];b.reduce(d,function(d,g,h){if(0==h||(c===true?b.last(d)!=g:!b.include(d,g)))d[d.length]=g,e[e.length]=a[h];return d},[]);
+return e};b.union=function(){return b.uniq(b.flatten(arguments,true))};b.intersection=b.intersect=function(a){var c=i.call(arguments,1);return b.filter(b.uniq(a),function(a){return b.every(c,function(c){return b.indexOf(c,a)>=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e<c;e++)d[e]=b.pluck(a,""+e);return d};b.indexOf=function(a,c,
+d){if(a==null)return-1;var e;if(d)return d=b.sortedIndex(a,c),a[d]===c?d:-1;if(p&&a.indexOf===p)return a.indexOf(c);for(d=0,e=a.length;d<e;d++)if(d in a&&a[d]===c)return d;return-1};b.lastIndexOf=function(a,b){if(a==null)return-1;if(D&&a.lastIndexOf===D)return a.lastIndexOf(b);for(var d=a.length;d--;)if(d in a&&a[d]===b)return d;return-1};b.range=function(a,b,d){arguments.length<=1&&(b=a||0,a=0);for(var d=arguments[2]||1,e=Math.max(Math.ceil((b-a)/d),0),f=0,g=Array(e);f<e;)g[f++]=a,a+=d;return g};
+var F=function(){};b.bind=function(a,c){var d,e;if(a.bind===s&&s)return s.apply(a,i.call(arguments,1));if(!b.isFunction(a))throw new TypeError;e=i.call(arguments,2);return d=function(){if(!(this instanceof d))return a.apply(c,e.concat(i.call(arguments)));F.prototype=a.prototype;var b=new F,g=a.apply(b,e.concat(i.call(arguments)));return Object(g)===g?g:b}};b.bindAll=function(a){var c=i.call(arguments,1);c.length==0&&(c=b.functions(a));j(c,function(c){a[c]=b.bind(a[c],a)});return a};b.memoize=function(a,
+c){var d={};c||(c=b.identity);return function(){var e=c.apply(this,arguments);return b.has(d,e)?d[e]:d[e]=a.apply(this,arguments)}};b.delay=function(a,b){var d=i.call(arguments,2);return setTimeout(function(){return a.apply(a,d)},b)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(i.call(arguments,1)))};b.throttle=function(a,c){var d,e,f,g,h,i=b.debounce(function(){h=g=false},c);return function(){d=this;e=arguments;var b;f||(f=setTimeout(function(){f=null;h&&a.apply(d,e);i()},c));g?h=true:
+a.apply(d,e);i();g=true}};b.debounce=function(a,b){var d;return function(){var e=this,f=arguments;clearTimeout(d);d=setTimeout(function(){d=null;a.apply(e,f)},b)}};b.once=function(a){var b=false,d;return function(){if(b)return d;b=true;return d=a.apply(this,arguments)}};b.wrap=function(a,b){return function(){var d=[a].concat(i.call(arguments,0));return b.apply(this,d)}};b.compose=function(){var a=arguments;return function(){for(var b=arguments,d=a.length-1;d>=0;d--)b=[a[d].apply(this,b)];return b[0]}};
+b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments,
+1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)};
+b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"};
+b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e<a;e++)b.call(d,e)};b.escape=function(a){return(""+a).replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;").replace(/'/g,"&#x27;").replace(/\//g,"&#x2F;")};b.mixin=function(a){j(b.functions(a),
+function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+
+u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]=
+function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain=
+true;return this};m.prototype.value=function(){return this._wrapped}}).call(this);
diff --git a/python/macholib/doc/_build/html/_static/up-pressed.png b/python/macholib/doc/_build/html/_static/up-pressed.png
new file mode 100644
index 000000000..8bd587afe
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/up-pressed.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/up.png b/python/macholib/doc/_build/html/_static/up.png
new file mode 100644
index 000000000..b94625680
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/up.png
Binary files differ
diff --git a/python/macholib/doc/_build/html/_static/websupport.js b/python/macholib/doc/_build/html/_static/websupport.js
new file mode 100644
index 000000000..19fcda564
--- /dev/null
+++ b/python/macholib/doc/_build/html/_static/websupport.js
@@ -0,0 +1,808 @@
+/*
+ * websupport.js
+ * ~~~~~~~~~~~~~
+ *
+ * sphinx.websupport utilties for all documentation.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+(function($) {
+ $.fn.autogrow = function() {
+ return this.each(function() {
+ var textarea = this;
+
+ $.fn.autogrow.resize(textarea);
+
+ $(textarea)
+ .focus(function() {
+ textarea.interval = setInterval(function() {
+ $.fn.autogrow.resize(textarea);
+ }, 500);
+ })
+ .blur(function() {
+ clearInterval(textarea.interval);
+ });
+ });
+ };
+
+ $.fn.autogrow.resize = function(textarea) {
+ var lineHeight = parseInt($(textarea).css('line-height'), 10);
+ var lines = textarea.value.split('\n');
+ var columns = textarea.cols;
+ var lineCount = 0;
+ $.each(lines, function() {
+ lineCount += Math.ceil(this.length / columns) || 1;
+ });
+ var height = lineHeight * (lineCount + 1);
+ $(textarea).css('height', height);
+ };
+})(jQuery);
+
+(function($) {
+ var comp, by;
+
+ function init() {
+ initEvents();
+ initComparator();
+ }
+
+ function initEvents() {
+ $('a.comment-close').live("click", function(event) {
+ event.preventDefault();
+ hide($(this).attr('id').substring(2));
+ });
+ $('a.vote').live("click", function(event) {
+ event.preventDefault();
+ handleVote($(this));
+ });
+ $('a.reply').live("click", function(event) {
+ event.preventDefault();
+ openReply($(this).attr('id').substring(2));
+ });
+ $('a.close-reply').live("click", function(event) {
+ event.preventDefault();
+ closeReply($(this).attr('id').substring(2));
+ });
+ $('a.sort-option').live("click", function(event) {
+ event.preventDefault();
+ handleReSort($(this));
+ });
+ $('a.show-proposal').live("click", function(event) {
+ event.preventDefault();
+ showProposal($(this).attr('id').substring(2));
+ });
+ $('a.hide-proposal').live("click", function(event) {
+ event.preventDefault();
+ hideProposal($(this).attr('id').substring(2));
+ });
+ $('a.show-propose-change').live("click", function(event) {
+ event.preventDefault();
+ showProposeChange($(this).attr('id').substring(2));
+ });
+ $('a.hide-propose-change').live("click", function(event) {
+ event.preventDefault();
+ hideProposeChange($(this).attr('id').substring(2));
+ });
+ $('a.accept-comment').live("click", function(event) {
+ event.preventDefault();
+ acceptComment($(this).attr('id').substring(2));
+ });
+ $('a.delete-comment').live("click", function(event) {
+ event.preventDefault();
+ deleteComment($(this).attr('id').substring(2));
+ });
+ $('a.comment-markup').live("click", function(event) {
+ event.preventDefault();
+ toggleCommentMarkupBox($(this).attr('id').substring(2));
+ });
+ }
+
+ /**
+ * Set comp, which is a comparator function used for sorting and
+ * inserting comments into the list.
+ */
+ function setComparator() {
+ // If the first three letters are "asc", sort in ascending order
+ // and remove the prefix.
+ if (by.substring(0,3) == 'asc') {
+ var i = by.substring(3);
+ comp = function(a, b) { return a[i] - b[i]; };
+ } else {
+ // Otherwise sort in descending order.
+ comp = function(a, b) { return b[by] - a[by]; };
+ }
+
+ // Reset link styles and format the selected sort option.
+ $('a.sel').attr('href', '#').removeClass('sel');
+ $('a.by' + by).removeAttr('href').addClass('sel');
+ }
+
+ /**
+ * Create a comp function. If the user has preferences stored in
+ * the sortBy cookie, use those, otherwise use the default.
+ */
+ function initComparator() {
+ by = 'rating'; // Default to sort by rating.
+ // If the sortBy cookie is set, use that instead.
+ if (document.cookie.length > 0) {
+ var start = document.cookie.indexOf('sortBy=');
+ if (start != -1) {
+ start = start + 7;
+ var end = document.cookie.indexOf(";", start);
+ if (end == -1) {
+ end = document.cookie.length;
+ by = unescape(document.cookie.substring(start, end));
+ }
+ }
+ }
+ setComparator();
+ }
+
+ /**
+ * Show a comment div.
+ */
+ function show(id) {
+ $('#ao' + id).hide();
+ $('#ah' + id).show();
+ var context = $.extend({id: id}, opts);
+ var popup = $(renderTemplate(popupTemplate, context)).hide();
+ popup.find('textarea[name="proposal"]').hide();
+ popup.find('a.by' + by).addClass('sel');
+ var form = popup.find('#cf' + id);
+ form.submit(function(event) {
+ event.preventDefault();
+ addComment(form);
+ });
+ $('#s' + id).after(popup);
+ popup.slideDown('fast', function() {
+ getComments(id);
+ });
+ }
+
+ /**
+ * Hide a comment div.
+ */
+ function hide(id) {
+ $('#ah' + id).hide();
+ $('#ao' + id).show();
+ var div = $('#sc' + id);
+ div.slideUp('fast', function() {
+ div.remove();
+ });
+ }
+
+ /**
+ * Perform an ajax request to get comments for a node
+ * and insert the comments into the comments tree.
+ */
+ function getComments(id) {
+ $.ajax({
+ type: 'GET',
+ url: opts.getCommentsURL,
+ data: {node: id},
+ success: function(data, textStatus, request) {
+ var ul = $('#cl' + id);
+ var speed = 100;
+ $('#cf' + id)
+ .find('textarea[name="proposal"]')
+ .data('source', data.source);
+
+ if (data.comments.length === 0) {
+ ul.html('<li>No comments yet.</li>');
+ ul.data('empty', true);
+ } else {
+ // If there are comments, sort them and put them in the list.
+ var comments = sortComments(data.comments);
+ speed = data.comments.length * 100;
+ appendComments(comments, ul);
+ ul.data('empty', false);
+ }
+ $('#cn' + id).slideUp(speed + 200);
+ ul.slideDown(speed);
+ },
+ error: function(request, textStatus, error) {
+ showError('Oops, there was a problem retrieving the comments.');
+ },
+ dataType: 'json'
+ });
+ }
+
+ /**
+ * Add a comment via ajax and insert the comment into the comment tree.
+ */
+ function addComment(form) {
+ var node_id = form.find('input[name="node"]').val();
+ var parent_id = form.find('input[name="parent"]').val();
+ var text = form.find('textarea[name="comment"]').val();
+ var proposal = form.find('textarea[name="proposal"]').val();
+
+ if (text == '') {
+ showError('Please enter a comment.');
+ return;
+ }
+
+ // Disable the form that is being submitted.
+ form.find('textarea,input').attr('disabled', 'disabled');
+
+ // Send the comment to the server.
+ $.ajax({
+ type: "POST",
+ url: opts.addCommentURL,
+ dataType: 'json',
+ data: {
+ node: node_id,
+ parent: parent_id,
+ text: text,
+ proposal: proposal
+ },
+ success: function(data, textStatus, error) {
+ // Reset the form.
+ if (node_id) {
+ hideProposeChange(node_id);
+ }
+ form.find('textarea')
+ .val('')
+ .add(form.find('input'))
+ .removeAttr('disabled');
+ var ul = $('#cl' + (node_id || parent_id));
+ if (ul.data('empty')) {
+ $(ul).empty();
+ ul.data('empty', false);
+ }
+ insertComment(data.comment);
+ var ao = $('#ao' + node_id);
+ ao.find('img').attr({'src': opts.commentBrightImage});
+ if (node_id) {
+ // if this was a "root" comment, remove the commenting box
+ // (the user can get it back by reopening the comment popup)
+ $('#ca' + node_id).slideUp();
+ }
+ },
+ error: function(request, textStatus, error) {
+ form.find('textarea,input').removeAttr('disabled');
+ showError('Oops, there was a problem adding the comment.');
+ }
+ });
+ }
+
+ /**
+ * Recursively append comments to the main comment list and children
+ * lists, creating the comment tree.
+ */
+ function appendComments(comments, ul) {
+ $.each(comments, function() {
+ var div = createCommentDiv(this);
+ ul.append($(document.createElement('li')).html(div));
+ appendComments(this.children, div.find('ul.comment-children'));
+ // To avoid stagnating data, don't store the comments children in data.
+ this.children = null;
+ div.data('comment', this);
+ });
+ }
+
+ /**
+ * After adding a new comment, it must be inserted in the correct
+ * location in the comment tree.
+ */
+ function insertComment(comment) {
+ var div = createCommentDiv(comment);
+
+ // To avoid stagnating data, don't store the comments children in data.
+ comment.children = null;
+ div.data('comment', comment);
+
+ var ul = $('#cl' + (comment.node || comment.parent));
+ var siblings = getChildren(ul);
+
+ var li = $(document.createElement('li'));
+ li.hide();
+
+ // Determine where in the parents children list to insert this comment.
+ for(i=0; i < siblings.length; i++) {
+ if (comp(comment, siblings[i]) <= 0) {
+ $('#cd' + siblings[i].id)
+ .parent()
+ .before(li.html(div));
+ li.slideDown('fast');
+ return;
+ }
+ }
+
+ // If we get here, this comment rates lower than all the others,
+ // or it is the only comment in the list.
+ ul.append(li.html(div));
+ li.slideDown('fast');
+ }
+
+ function acceptComment(id) {
+ $.ajax({
+ type: 'POST',
+ url: opts.acceptCommentURL,
+ data: {id: id},
+ success: function(data, textStatus, request) {
+ $('#cm' + id).fadeOut('fast');
+ $('#cd' + id).removeClass('moderate');
+ },
+ error: function(request, textStatus, error) {
+ showError('Oops, there was a problem accepting the comment.');
+ }
+ });
+ }
+
+ function deleteComment(id) {
+ $.ajax({
+ type: 'POST',
+ url: opts.deleteCommentURL,
+ data: {id: id},
+ success: function(data, textStatus, request) {
+ var div = $('#cd' + id);
+ if (data == 'delete') {
+ // Moderator mode: remove the comment and all children immediately
+ div.slideUp('fast', function() {
+ div.remove();
+ });
+ return;
+ }
+ // User mode: only mark the comment as deleted
+ div
+ .find('span.user-id:first')
+ .text('[deleted]').end()
+ .find('div.comment-text:first')
+ .text('[deleted]').end()
+ .find('#cm' + id + ', #dc' + id + ', #ac' + id + ', #rc' + id +
+ ', #sp' + id + ', #hp' + id + ', #cr' + id + ', #rl' + id)
+ .remove();
+ var comment = div.data('comment');
+ comment.username = '[deleted]';
+ comment.text = '[deleted]';
+ div.data('comment', comment);
+ },
+ error: function(request, textStatus, error) {
+ showError('Oops, there was a problem deleting the comment.');
+ }
+ });
+ }
+
+ function showProposal(id) {
+ $('#sp' + id).hide();
+ $('#hp' + id).show();
+ $('#pr' + id).slideDown('fast');
+ }
+
+ function hideProposal(id) {
+ $('#hp' + id).hide();
+ $('#sp' + id).show();
+ $('#pr' + id).slideUp('fast');
+ }
+
+ function showProposeChange(id) {
+ $('#pc' + id).hide();
+ $('#hc' + id).show();
+ var textarea = $('#pt' + id);
+ textarea.val(textarea.data('source'));
+ $.fn.autogrow.resize(textarea[0]);
+ textarea.slideDown('fast');
+ }
+
+ function hideProposeChange(id) {
+ $('#hc' + id).hide();
+ $('#pc' + id).show();
+ var textarea = $('#pt' + id);
+ textarea.val('').removeAttr('disabled');
+ textarea.slideUp('fast');
+ }
+
+ function toggleCommentMarkupBox(id) {
+ $('#mb' + id).toggle();
+ }
+
+ /** Handle when the user clicks on a sort by link. */
+ function handleReSort(link) {
+ var classes = link.attr('class').split(/\s+/);
+ for (var i=0; i<classes.length; i++) {
+ if (classes[i] != 'sort-option') {
+ by = classes[i].substring(2);
+ }
+ }
+ setComparator();
+ // Save/update the sortBy cookie.
+ var expiration = new Date();
+ expiration.setDate(expiration.getDate() + 365);
+ document.cookie= 'sortBy=' + escape(by) +
+ ';expires=' + expiration.toUTCString();
+ $('ul.comment-ul').each(function(index, ul) {
+ var comments = getChildren($(ul), true);
+ comments = sortComments(comments);
+ appendComments(comments, $(ul).empty());
+ });
+ }
+
+ /**
+ * Function to process a vote when a user clicks an arrow.
+ */
+ function handleVote(link) {
+ if (!opts.voting) {
+ showError("You'll need to login to vote.");
+ return;
+ }
+
+ var id = link.attr('id');
+ if (!id) {
+ // Didn't click on one of the voting arrows.
+ return;
+ }
+ // If it is an unvote, the new vote value is 0,
+ // Otherwise it's 1 for an upvote, or -1 for a downvote.
+ var value = 0;
+ if (id.charAt(1) != 'u') {
+ value = id.charAt(0) == 'u' ? 1 : -1;
+ }
+ // The data to be sent to the server.
+ var d = {
+ comment_id: id.substring(2),
+ value: value
+ };
+
+ // Swap the vote and unvote links.
+ link.hide();
+ $('#' + id.charAt(0) + (id.charAt(1) == 'u' ? 'v' : 'u') + d.comment_id)
+ .show();
+
+ // The div the comment is displayed in.
+ var div = $('div#cd' + d.comment_id);
+ var data = div.data('comment');
+
+ // If this is not an unvote, and the other vote arrow has
+ // already been pressed, unpress it.
+ if ((d.value !== 0) && (data.vote === d.value * -1)) {
+ $('#' + (d.value == 1 ? 'd' : 'u') + 'u' + d.comment_id).hide();
+ $('#' + (d.value == 1 ? 'd' : 'u') + 'v' + d.comment_id).show();
+ }
+
+ // Update the comments rating in the local data.
+ data.rating += (data.vote === 0) ? d.value : (d.value - data.vote);
+ data.vote = d.value;
+ div.data('comment', data);
+
+ // Change the rating text.
+ div.find('.rating:first')
+ .text(data.rating + ' point' + (data.rating == 1 ? '' : 's'));
+
+ // Send the vote information to the server.
+ $.ajax({
+ type: "POST",
+ url: opts.processVoteURL,
+ data: d,
+ error: function(request, textStatus, error) {
+ showError('Oops, there was a problem casting that vote.');
+ }
+ });
+ }
+
+ /**
+ * Open a reply form used to reply to an existing comment.
+ */
+ function openReply(id) {
+ // Swap out the reply link for the hide link
+ $('#rl' + id).hide();
+ $('#cr' + id).show();
+
+ // Add the reply li to the children ul.
+ var div = $(renderTemplate(replyTemplate, {id: id})).hide();
+ $('#cl' + id)
+ .prepend(div)
+ // Setup the submit handler for the reply form.
+ .find('#rf' + id)
+ .submit(function(event) {
+ event.preventDefault();
+ addComment($('#rf' + id));
+ closeReply(id);
+ })
+ .find('input[type=button]')
+ .click(function() {
+ closeReply(id);
+ });
+ div.slideDown('fast', function() {
+ $('#rf' + id).find('textarea').focus();
+ });
+ }
+
+ /**
+ * Close the reply form opened with openReply.
+ */
+ function closeReply(id) {
+ // Remove the reply div from the DOM.
+ $('#rd' + id).slideUp('fast', function() {
+ $(this).remove();
+ });
+
+ // Swap out the hide link for the reply link
+ $('#cr' + id).hide();
+ $('#rl' + id).show();
+ }
+
+ /**
+ * Recursively sort a tree of comments using the comp comparator.
+ */
+ function sortComments(comments) {
+ comments.sort(comp);
+ $.each(comments, function() {
+ this.children = sortComments(this.children);
+ });
+ return comments;
+ }
+
+ /**
+ * Get the children comments from a ul. If recursive is true,
+ * recursively include childrens' children.
+ */
+ function getChildren(ul, recursive) {
+ var children = [];
+ ul.children().children("[id^='cd']")
+ .each(function() {
+ var comment = $(this).data('comment');
+ if (recursive)
+ comment.children = getChildren($(this).find('#cl' + comment.id), true);
+ children.push(comment);
+ });
+ return children;
+ }
+
+ /** Create a div to display a comment in. */
+ function createCommentDiv(comment) {
+ if (!comment.displayed && !opts.moderator) {
+ return $('<div class="moderate">Thank you! Your comment will show up '
+ + 'once it is has been approved by a moderator.</div>');
+ }
+ // Prettify the comment rating.
+ comment.pretty_rating = comment.rating + ' point' +
+ (comment.rating == 1 ? '' : 's');
+ // Make a class (for displaying not yet moderated comments differently)
+ comment.css_class = comment.displayed ? '' : ' moderate';
+ // Create a div for this comment.
+ var context = $.extend({}, opts, comment);
+ var div = $(renderTemplate(commentTemplate, context));
+
+ // If the user has voted on this comment, highlight the correct arrow.
+ if (comment.vote) {
+ var direction = (comment.vote == 1) ? 'u' : 'd';
+ div.find('#' + direction + 'v' + comment.id).hide();
+ div.find('#' + direction + 'u' + comment.id).show();
+ }
+
+ if (opts.moderator || comment.text != '[deleted]') {
+ div.find('a.reply').show();
+ if (comment.proposal_diff)
+ div.find('#sp' + comment.id).show();
+ if (opts.moderator && !comment.displayed)
+ div.find('#cm' + comment.id).show();
+ if (opts.moderator || (opts.username == comment.username))
+ div.find('#dc' + comment.id).show();
+ }
+ return div;
+ }
+
+ /**
+ * A simple template renderer. Placeholders such as <%id%> are replaced
+ * by context['id'] with items being escaped. Placeholders such as <#id#>
+ * are not escaped.
+ */
+ function renderTemplate(template, context) {
+ var esc = $(document.createElement('div'));
+
+ function handle(ph, escape) {
+ var cur = context;
+ $.each(ph.split('.'), function() {
+ cur = cur[this];
+ });
+ return escape ? esc.text(cur || "").html() : cur;
+ }
+
+ return template.replace(/<([%#])([\w\.]*)\1>/g, function() {
+ return handle(arguments[2], arguments[1] == '%' ? true : false);
+ });
+ }
+
+ /** Flash an error message briefly. */
+ function showError(message) {
+ $(document.createElement('div')).attr({'class': 'popup-error'})
+ .append($(document.createElement('div'))
+ .attr({'class': 'error-message'}).text(message))
+ .appendTo('body')
+ .fadeIn("slow")
+ .delay(2000)
+ .fadeOut("slow");
+ }
+
+ /** Add a link the user uses to open the comments popup. */
+ $.fn.comment = function() {
+ return this.each(function() {
+ var id = $(this).attr('id').substring(1);
+ var count = COMMENT_METADATA[id];
+ var title = count + ' comment' + (count == 1 ? '' : 's');
+ var image = count > 0 ? opts.commentBrightImage : opts.commentImage;
+ var addcls = count == 0 ? ' nocomment' : '';
+ $(this)
+ .append(
+ $(document.createElement('a')).attr({
+ href: '#',
+ 'class': 'sphinx-comment-open' + addcls,
+ id: 'ao' + id
+ })
+ .append($(document.createElement('img')).attr({
+ src: image,
+ alt: 'comment',
+ title: title
+ }))
+ .click(function(event) {
+ event.preventDefault();
+ show($(this).attr('id').substring(2));
+ })
+ )
+ .append(
+ $(document.createElement('a')).attr({
+ href: '#',
+ 'class': 'sphinx-comment-close hidden',
+ id: 'ah' + id
+ })
+ .append($(document.createElement('img')).attr({
+ src: opts.closeCommentImage,
+ alt: 'close',
+ title: 'close'
+ }))
+ .click(function(event) {
+ event.preventDefault();
+ hide($(this).attr('id').substring(2));
+ })
+ );
+ });
+ };
+
+ var opts = {
+ processVoteURL: '/_process_vote',
+ addCommentURL: '/_add_comment',
+ getCommentsURL: '/_get_comments',
+ acceptCommentURL: '/_accept_comment',
+ deleteCommentURL: '/_delete_comment',
+ commentImage: '/static/_static/comment.png',
+ closeCommentImage: '/static/_static/comment-close.png',
+ loadingImage: '/static/_static/ajax-loader.gif',
+ commentBrightImage: '/static/_static/comment-bright.png',
+ upArrow: '/static/_static/up.png',
+ downArrow: '/static/_static/down.png',
+ upArrowPressed: '/static/_static/up-pressed.png',
+ downArrowPressed: '/static/_static/down-pressed.png',
+ voting: false,
+ moderator: false
+ };
+
+ if (typeof COMMENT_OPTIONS != "undefined") {
+ opts = jQuery.extend(opts, COMMENT_OPTIONS);
+ }
+
+ var popupTemplate = '\
+ <div class="sphinx-comments" id="sc<%id%>">\
+ <p class="sort-options">\
+ Sort by:\
+ <a href="#" class="sort-option byrating">best rated</a>\
+ <a href="#" class="sort-option byascage">newest</a>\
+ <a href="#" class="sort-option byage">oldest</a>\
+ </p>\
+ <div class="comment-header">Comments</div>\
+ <div class="comment-loading" id="cn<%id%>">\
+ loading comments... <img src="<%loadingImage%>" alt="" /></div>\
+ <ul id="cl<%id%>" class="comment-ul"></ul>\
+ <div id="ca<%id%>">\
+ <p class="add-a-comment">Add a comment\
+ (<a href="#" class="comment-markup" id="ab<%id%>">markup</a>):</p>\
+ <div class="comment-markup-box" id="mb<%id%>">\
+ reStructured text markup: <i>*emph*</i>, <b>**strong**</b>, \
+ <tt>``code``</tt>, \
+ code blocks: <tt>::</tt> and an indented block after blank line</div>\
+ <form method="post" id="cf<%id%>" class="comment-form" action="">\
+ <textarea name="comment" cols="80"></textarea>\
+ <p class="propose-button">\
+ <a href="#" id="pc<%id%>" class="show-propose-change">\
+ Propose a change &#9657;\
+ </a>\
+ <a href="#" id="hc<%id%>" class="hide-propose-change">\
+ Propose a change &#9663;\
+ </a>\
+ </p>\
+ <textarea name="proposal" id="pt<%id%>" cols="80"\
+ spellcheck="false"></textarea>\
+ <input type="submit" value="Add comment" />\
+ <input type="hidden" name="node" value="<%id%>" />\
+ <input type="hidden" name="parent" value="" />\
+ </form>\
+ </div>\
+ </div>';
+
+ var commentTemplate = '\
+ <div id="cd<%id%>" class="sphinx-comment<%css_class%>">\
+ <div class="vote">\
+ <div class="arrow">\
+ <a href="#" id="uv<%id%>" class="vote" title="vote up">\
+ <img src="<%upArrow%>" />\
+ </a>\
+ <a href="#" id="uu<%id%>" class="un vote" title="vote up">\
+ <img src="<%upArrowPressed%>" />\
+ </a>\
+ </div>\
+ <div class="arrow">\
+ <a href="#" id="dv<%id%>" class="vote" title="vote down">\
+ <img src="<%downArrow%>" id="da<%id%>" />\
+ </a>\
+ <a href="#" id="du<%id%>" class="un vote" title="vote down">\
+ <img src="<%downArrowPressed%>" />\
+ </a>\
+ </div>\
+ </div>\
+ <div class="comment-content">\
+ <p class="tagline comment">\
+ <span class="user-id"><%username%></span>\
+ <span class="rating"><%pretty_rating%></span>\
+ <span class="delta"><%time.delta%></span>\
+ </p>\
+ <div class="comment-text comment"><#text#></div>\
+ <p class="comment-opts comment">\
+ <a href="#" class="reply hidden" id="rl<%id%>">reply &#9657;</a>\
+ <a href="#" class="close-reply" id="cr<%id%>">reply &#9663;</a>\
+ <a href="#" id="sp<%id%>" class="show-proposal">proposal &#9657;</a>\
+ <a href="#" id="hp<%id%>" class="hide-proposal">proposal &#9663;</a>\
+ <a href="#" id="dc<%id%>" class="delete-comment hidden">delete</a>\
+ <span id="cm<%id%>" class="moderation hidden">\
+ <a href="#" id="ac<%id%>" class="accept-comment">accept</a>\
+ </span>\
+ </p>\
+ <pre class="proposal" id="pr<%id%>">\
+<#proposal_diff#>\
+ </pre>\
+ <ul class="comment-children" id="cl<%id%>"></ul>\
+ </div>\
+ <div class="clearleft"></div>\
+ </div>\
+ </div>';
+
+ var replyTemplate = '\
+ <li>\
+ <div class="reply-div" id="rd<%id%>">\
+ <form id="rf<%id%>">\
+ <textarea name="comment" cols="80"></textarea>\
+ <input type="submit" value="Add reply" />\
+ <input type="button" value="Cancel" />\
+ <input type="hidden" name="parent" value="<%id%>" />\
+ <input type="hidden" name="node" value="" />\
+ </form>\
+ </div>\
+ </li>';
+
+ $(document).ready(function() {
+ init();
+ });
+})(jQuery);
+
+$(document).ready(function() {
+ // add comment anchors for all paragraphs that are commentable
+ $('.sphinx-has-comment').comment();
+
+ // highlight search words in search results
+ $("div.context").each(function() {
+ var params = $.getQueryParameters();
+ var terms = (params.q) ? params.q[0].split(/\s+/) : [];
+ var result = $(this);
+ $.each(terms, function() {
+ result.highlightText(this.toLowerCase(), 'highlighted');
+ });
+ });
+
+ // directly open comment window if requested
+ var anchor = document.location.hash;
+ if (anchor.substring(0, 9) == '#comment-') {
+ $('#ao' + anchor.substring(9)).click();
+ document.location.hash = '#s' + anchor.substring(9);
+ }
+});
diff --git a/python/macholib/doc/_build/html/changelog.html b/python/macholib/doc/_build/html/changelog.html
new file mode 100644
index 000000000..6cf3dbcde
--- /dev/null
+++ b/python/macholib/doc/_build/html/changelog.html
@@ -0,0 +1,385 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Release history &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="License" href="license.html" />
+ <link rel="prev" title="Macholib - Analyze and edit Mach-O headers" href="index.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="license.html" title="License"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="index.html" title="Macholib - Analyze and edit Mach-O headers"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="release-history">
+<h1>Release history<a class="headerlink" href="#release-history" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="macholib-1-7">
+<h2>macholib 1.7<a class="headerlink" href="#macholib-1-7" title="Permalink to this headline">¶</a></h2>
+<ul>
+<li><p class="first">Added support for ARM64, LC_ENCRYPTION_INFO_64 and LC_LINKER_OPTION</p>
+<p>Patch by Matthias Ringwald.</p>
+</li>
+<li><p class="first">Load commands now have a &#8220;describe&#8221; method that returns more information
+about the command.</p>
+<p>Patch by David Dorsey.</p>
+</li>
+<li><p class="first">The MAGIC value in the header was always represented in the native
+byte order, instead of as the value read from the binary.</p>
+<p>Patch by David Dorsey.</p>
+</li>
+<li><p class="first">Added various new constants to &#8220;macholib.mach_o&#8221;.</p>
+<p>Patch by David Dorsey.</p>
+</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-6-1">
+<h2>macholib 1.6.1<a class="headerlink" href="#macholib-1-6-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>?</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-6">
+<h2>macholib 1.6<a class="headerlink" href="#macholib-1-6" title="Permalink to this headline">¶</a></h2>
+<ul>
+<li><p class="first">Add support for <a class="reference external" href="mailto:'&#37;&#52;&#48;loader_path">'<span>&#64;</span>loader_path</a>&#8216; link command in
+macholib.dyld:</p>
+<ul class="simple">
+<li>Added function <tt class="docutils literal"><span class="pre">macholib.dyld.dyld_loader_search</span></tt></li>
+<li>This function is used by <tt class="docutils literal"><span class="pre">macholib.dyld.dyld_find</span></tt>,
+and that function now has an new (optional) argument
+with the path to the loader.</li>
+</ul>
+</li>
+<li><p class="first">Also add support for <a class="reference external" href="mailto:'&#37;&#52;&#48;loader_path">'<span>&#64;</span>loader_path</a>&#8216; to macholib.MachoGraph,
+using the newly added <a class="reference external" href="mailto:'&#37;&#52;&#48;loader_path">'<span>&#64;</span>loader_path</a>&#8216; support in the
+dyld module.</p>
+<p>Due to this suppport the <em>macho_standalone</em> tool can
+now rewrite binaries that contain an <a class="reference external" href="mailto:'&#37;&#52;&#48;loader_path">'<span>&#64;</span>loader_path</a>&#8216; load
+command.</p>
+</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-5-2">
+<h2>macholib 1.5.2<a class="headerlink" href="#macholib-1-5-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Issue #93: Show the name of the affected file in the exception message
+for Mach-O headers that are too large to relocate.</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-5-1">
+<h2>macholib 1.5.1<a class="headerlink" href="#macholib-1-5-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>There were no &#8216;classifiers&#8217; in the package metadata due to
+a bug in setup.py.</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-5">
+<h2>macholib 1.5<a class="headerlink" href="#macholib-1-5" title="Permalink to this headline">¶</a></h2>
+<p>macholib 1.5 is a minor feature release</p>
+<ul>
+<li><p class="first">No longer use 2to3 to provide Python 3 support</p>
+<p>As a side-effect of this macholib no longer supports
+Python 2.5 and earlier.</p>
+</li>
+<li><p class="first">Adds suppport for some new macho load commands</p>
+</li>
+<li><p class="first">Fix for py3k problem in macho_standalone.py</p>
+<p>Patch by Guanqun Lu.</p>
+</li>
+<li><p class="first">Fix for some issues in macho_dump.py</p>
+<p>Patch by Nam Nguyen</p>
+</li>
+<li><p class="first">Issue #10: Fix for LC_DATA_IN_CODE linker commands, without
+this fix py2app cannot build application bundles when
+the source binaries have been compiled with Xcode 4.5.</p>
+</li>
+<li><p class="first">Issue #6: Fix for LC_ENCRYPTION_INFO linker commands</p>
+</li>
+<li><p class="first">Use the mach header information to print the cpu type of a
+binary, instead of trying to deduce that from pointer width
+and endianness.</p>
+<p>Changed the code because of issue #6, in which a user tries to
+dump a iOS binary which results in bogus output in the previous
+releases.</p>
+</li>
+<li><p class="first">The mapping <tt class="docutils literal"><span class="pre">macholib.macho_dump.ARCH_MAP</span></tt> is undocumented
+and no longer used by macholib itself. It will be removed
+in the next release.</p>
+</li>
+<li><p class="first">The command-line tools <tt class="docutils literal"><span class="pre">macho_find</span></tt>, <tt class="docutils literal"><span class="pre">macho_dump</span></tt> and
+<tt class="docutils literal"><span class="pre">macho_standalone</span></tt> are deprecated. Use &#8220;python -mmacholib&#8221;
+instead. That is:</p>
+<div class="highlight-python"><div class="highlight"><pre>$ python -mmacholib dump /usr/bin/grep
+
+$ python -mmacholib find ~
+
+$ python -mmacholib standalone myapp.app
+</pre></div>
+</div>
+<p>This makes it clearer which version of the tools are used.</p>
+</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-4-3">
+<h2>macholib 1.4.3<a class="headerlink" href="#macholib-1-4-3" title="Permalink to this headline">¶</a></h2>
+<p>macholib 1.4.3 is a minor feature release</p>
+<ul>
+<li><p class="first">Added strings for &#8216;x86_64&#8217; and &#8216;ppc64&#8217; to
+macholib.mach_o.CPU_TYPE_NAMES.</p>
+</li>
+<li><p class="first">macho_find and macho_dump were broken in the 1.4.2 release</p>
+</li>
+<li><p class="first">added &#8216;macholib.util.NOT_SYSTEM_FILES&#8217;, a list of
+files that aren&#8217;t system path&#8217;s even though they are
+located in system locations.</p>
+<p>Needed to work around a bug in PySide (see issue #32 in the
+py2app tracker)</p>
+</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-4-2">
+<h2>macholib 1.4.2<a class="headerlink" href="#macholib-1-4-2" title="Permalink to this headline">¶</a></h2>
+<p>macholib 1.4.2 is a minor bugfix release</p>
+<ul class="simple">
+<li>The support for new load commands that was added in 1.4.1
+contained a typo that caused problems on OSX 10.7 (Lion).</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-4-1">
+<h2>macholib 1.4.1<a class="headerlink" href="#macholib-1-4-1" title="Permalink to this headline">¶</a></h2>
+<p>macholib 1.4.1 is a minor feature release</p>
+<p>Features:</p>
+<ul class="simple">
+<li>Add support for a number of new MachO load commands that were added
+during the lifetime of OSX 10.6: <tt class="docutils literal"><span class="pre">LC_LOAD_UPWARD_DYLIB</span></tt>,
+<tt class="docutils literal"><span class="pre">LC_VERSION_MIN_MACOSX</span></tt>, <tt class="docutils literal"><span class="pre">LC_VERSION_MIN_IPHONEOS</span></tt> and
+<tt class="docutils literal"><span class="pre">LC_FUNCTION_STARTS</span></tt>.</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-4">
+<h2>macholib 1.4<a class="headerlink" href="#macholib-1-4" title="Permalink to this headline">¶</a></h2>
+<p>macholib 1.4 is a feature release</p>
+<p>Features:</p>
+<ul>
+<li><p class="first">Documentation is now generated using <a class="reference external" href="http://pypi.python.org/pypi/sphinx">sphinx</a>
+and can be viewed at &lt;<a class="reference external" href="http://packages.python.org/macholib">http://packages.python.org/macholib</a>&gt;.</p>
+</li>
+<li><p class="first">The repository has moved to bitbucket</p>
+</li>
+<li><p class="first">There now is a testsuite</p>
+</li>
+<li><p class="first">Private functionality inside modules was renamed to
+a name starting with an underscore.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">if this change affects your code you are relying on undefined
+implementation features, please stop using private functions.</p>
+</div>
+</li>
+<li><p class="first">The basic packable types in <tt class="docutils literal"><span class="pre">macholib.ptypes</span></tt> were renamed to better
+represent the corresponding C type. The table below lists the old
+an new names (the old names are still available, but are deprecated and
+will be removed in a future release).</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="50%" />
+<col width="50%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head"><strong>Old name</strong></th>
+<th class="head"><strong>New name</strong></th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>p_byte</td>
+<td>p_int8</td>
+</tr>
+<tr class="row-odd"><td>p_ubyte</td>
+<td>p_uint8</td>
+</tr>
+<tr class="row-even"><td>p_short</td>
+<td>p_int16</td>
+</tr>
+<tr class="row-odd"><td>p_ushort</td>
+<td>p_uint16</td>
+</tr>
+<tr class="row-even"><td>p_int</td>
+<td>p_int32</td>
+</tr>
+<tr class="row-odd"><td>p_uint</td>
+<td>p_uint32</td>
+</tr>
+<tr class="row-even"><td>p_long</td>
+<td>p_int32</td>
+</tr>
+<tr class="row-odd"><td>p_ulong</td>
+<td>p_uint32</td>
+</tr>
+<tr class="row-even"><td>p_longlong</td>
+<td>p_int64</td>
+</tr>
+<tr class="row-odd"><td>p_ulonglong</td>
+<td>p_uint64</td>
+</tr>
+</tbody>
+</table>
+<p><tt class="docutils literal"><span class="pre">Macholib.ptypes.p_ptr</span></tt> is no longer present as it had an unclear
+definition and isn&#8217;t actually used in the codebase.</p>
+</li>
+</ul>
+<p>Bug fixes:</p>
+<ul>
+<li><p class="first">The semantics of <tt class="docutils literal"><span class="pre">dyld.dyld_default_search</span></tt> were changed a bit,
+it now first searches the framework path (if appropriate) and then
+the linker path, irrespective of the value of the <tt class="docutils literal"><span class="pre">DYLD_FALLBACK*</span></tt>
+environment variables.</p>
+<p>Previous versions would change the search order when those variables
+was set, which is odd and doesn&#8217;t correspond with the documented
+behaviour of the system dyld.</p>
+</li>
+<li><p class="first">It is once again possible to install using python2.5</p>
+</li>
+<li><p class="first">The source distribution includes all files, this was broken
+due to the switch to mercurial (which confused setuptools)</p>
+</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-3">
+<h2>macholib 1.3<a class="headerlink" href="#macholib-1-3" title="Permalink to this headline">¶</a></h2>
+<p>macholib 1.3 is a feature release.</p>
+<p>Features:</p>
+<ul>
+<li><p class="first">Experimental Python 3.x support</p>
+<p>This version contains lightly tested support for Python 3.</p>
+</li>
+</ul>
+</div>
+<div class="section" id="macholib-1-2-2">
+<h2>macholib 1.2.2<a class="headerlink" href="#macholib-1-2-2" title="Permalink to this headline">¶</a></h2>
+<p>macholib 1.2.2 is a bugfix release.</p>
+<p>Bug fixes:</p>
+<ul class="simple">
+<li>Macholib should work better with 64-bit code
+(patch by Marc-Antoine Parent)</li>
+</ul>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Release history</a><ul>
+<li><a class="reference internal" href="#macholib-1-7">macholib 1.7</a></li>
+<li><a class="reference internal" href="#macholib-1-6-1">macholib 1.6.1</a></li>
+<li><a class="reference internal" href="#macholib-1-6">macholib 1.6</a></li>
+<li><a class="reference internal" href="#macholib-1-5-2">macholib 1.5.2</a></li>
+<li><a class="reference internal" href="#macholib-1-5-1">macholib 1.5.1</a></li>
+<li><a class="reference internal" href="#macholib-1-5">macholib 1.5</a></li>
+<li><a class="reference internal" href="#macholib-1-4-3">macholib 1.4.3</a></li>
+<li><a class="reference internal" href="#macholib-1-4-2">macholib 1.4.2</a></li>
+<li><a class="reference internal" href="#macholib-1-4-1">macholib 1.4.1</a></li>
+<li><a class="reference internal" href="#macholib-1-4">macholib 1.4</a></li>
+<li><a class="reference internal" href="#macholib-1-3">macholib 1.3</a></li>
+<li><a class="reference internal" href="#macholib-1-2-2">macholib 1.2.2</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="index.html"
+ title="previous chapter">Macholib - Analyze and edit Mach-O headers</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="license.html"
+ title="next chapter">License</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/changelog.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="license.html" title="License"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="index.html" title="Macholib - Analyze and edit Mach-O headers"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/dyld.html b/python/macholib/doc/_build/html/dyld.html
new file mode 100644
index 000000000..ad8555058
--- /dev/null
+++ b/python/macholib/doc/_build/html/dyld.html
@@ -0,0 +1,267 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.dyld — Dyld emulation &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.dylib — Generic dylib path manipulation" href="dylib.html" />
+ <link rel="prev" title="macholib.SymbolTable — Class to read the symbol table from a Mach-O header" href="SymbolTable.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="dylib.html" title="macholib.dylib — Generic dylib path manipulation"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="SymbolTable.html" title="macholib.SymbolTable — Class to read the symbol table from a Mach-O header"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.dyld">
+<span id="macholib-dyld-dyld-emulation"></span><h1><a class="reference internal" href="#module-macholib.dyld" title="macholib.dyld: Emulation of functonality of the dynamic linker"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.dyld</span></tt></a> &#8212; Dyld emulation<a class="headerlink" href="#module-macholib.dyld" title="Permalink to this headline">¶</a></h1>
+<p>This module defines a number of functions that can be used
+to emulate the functionality of the dynamic linker (<tt class="docutils literal"><span class="pre">dyld</span></tt>)
+w.r.t. looking for library files and framworks.</p>
+<dl class="function">
+<dt id="macholib.dyld.dyld_image_suffix">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_image_suffix</tt><big>(</big><span class="optional">[</span><em>env</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_image_suffix" title="Permalink to this definition">¶</a></dt>
+<dd><p>Looks up the suffix to append to shared library and
+framework names and returns this value when found.
+Returns <tt class="docutils literal"><span class="pre">None</span></tt> when no suffix should be appended.</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+<p>See the description of <tt class="docutils literal"><span class="pre">DYLD_IMAGE_SUFFIX</span></tt> in the
+manual page for dyld(1) for more information.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dydl_framework_path">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dydl_framework_path</tt><big>(</big><span class="optional">[</span><em>env</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dydl_framework_path" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a user-specified framework search path,
+or an empty list when only the default search path
+should be used.</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+<p>See the description of <tt class="docutils literal"><span class="pre">DYLD_FRAMEWORK_PATH</span></tt> in the
+manual page for dyld(1) for more information.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_library_path">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_library_path</tt><big>(</big><span class="optional">[</span><em>env</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_library_path" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a user-specified library search path,
+or an empty list when only the default search path
+should be used.</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+<p>See the description of <tt class="docutils literal"><span class="pre">DYLD_LIBRARY_PATH</span></tt> in the
+manual page for dyld(1) for more information.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_fallback_framework_path">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_fallback_framework_path</tt><big>(</big><span class="optional">[</span><em>env</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_fallback_framework_path" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a user specified list of of directories where
+to look for frameworks that aren&#8217;t in their install path,
+or an empty list when the default fallback path should
+be used.</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+<p>See the description of <tt class="docutils literal"><span class="pre">DYLD_FALLBACK_FRAMEWORK_PATH</span></tt> in the
+manual page for dyld(1) for more information.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_fallback_library_path">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_fallback_library_path</tt><big>(</big><span class="optional">[</span><em>env</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_fallback_library_path" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a user specified list of of directories where
+to look for libraries that aren&#8217;t in their install path,
+or an empty list when the default fallback path should
+be used.</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+<p>See the description of <tt class="docutils literal"><span class="pre">DYLD_FALLBACK_LIBRARY_PATH</span></tt> in the
+manual page for dyld(1) for more information.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_image_suffix_search">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_image_suffix_search</tt><big>(</big><em>iterator</em><span class="optional">[</span>, <em>env</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_image_suffix_search" title="Permalink to this definition">¶</a></dt>
+<dd><p>Yields all items in <em>iterator</em>, and prepents names
+with the image suffix to those items when the suffix
+is specified.</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_override_search">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_override_search</tt><big>(</big><em>name</em><span class="optional">[</span>, <em>env</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_override_search" title="Permalink to this definition">¶</a></dt>
+<dd><p>If <em>name</em> is a framework name yield filesystem
+paths relative to the entries in the framework
+search path.</p>
+<p>Always yield the filesystem paths relative to the
+entries in the library search path.</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_executable_path_search">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_executable_path_search</tt><big>(</big><em>name</em>, <em>executable_path</em><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_executable_path_search" title="Permalink to this definition">¶</a></dt>
+<dd><p>If <em>name</em> is a path starting with <tt class="docutils literal"><span class="pre">&#64;executable_path/</span></tt> yield
+the path relative to the specified <em>executable_path</em>.</p>
+<p>If <em>executable_path</em> is None nothing is yielded.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_loader_search">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_loader_search</tt><big>(</big><em>name</em>, <em>loader_path</em><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_loader_search" title="Permalink to this definition">¶</a></dt>
+<dd><p>If <em>name</em> is a path starting with <tt class="docutils literal"><span class="pre">&#64;loader_path/</span></tt> yield
+the path relative to the specified <em>loader_path</em>.</p>
+<p>If <em>loader_path</em> is None nothing is yielded.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_default_search">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_default_search</tt><big>(</big><em>name</em><span class="optional">[</span>, <em>env</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_default_search" title="Permalink to this definition">¶</a></dt>
+<dd><p>Yield the filesystem locations to look for a dynamic
+library or framework using the default locations
+used by the system dynamic linker.</p>
+<p>This function will look in <tt class="docutils literal"><span class="pre">~/Library/Frameworks</span></tt>
+for frameworks, even though the system dynamic linker
+doesn&#8217;t.</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.dyld_find">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">dyld_find</tt><big>(</big><em>name</em><span class="optional">[</span>, <em>executable_path</em><span class="optional">[</span>, <em>env</em><span class="optional">[</span>, <em>loader</em><span class="optional">]</span><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.dyld_find" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns the path of the requested dynamic library,
+raises <tt class="xref py py-exc docutils literal"><span class="pre">ValueError</span></tt> when the library cannot be found.</p>
+<p>This function searches for the library in the same
+locations and de system dynamic linker.</p>
+<p>The <em>executable_path</em> should be the filesystem path
+of the executable to which the library is linked (either
+directly or indirectly).</p>
+<p>The <em>env</em> argument is a dictionary, which defaults
+to <a class="reference external" href="http://docs.python.org/library/os.html#os.environ" title="(in Python v2.7)"><tt class="xref py py-data docutils literal"><span class="pre">os.environ</span></tt></a>.</p>
+<p>The <em>loader_path</em> argument is an optional filesystem path for
+the object file (binary of shared library) that references
+<em>name</em>.</p>
+<div class="versionchanged">
+<p><span class="versionmodified">Changed in version 1.6: </span>Added the <em>loader_path</em> argument.</p>
+</div>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.dyld.framework_find">
+<tt class="descclassname">macholib.dyld.</tt><tt class="descname">framework_find</tt><big>(</big><em>fn</em><span class="optional">[</span>, <em>executable_path</em><span class="optional">[</span>, <em>env</em><span class="optional">]</span><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.dyld.framework_find" title="Permalink to this definition">¶</a></dt>
+<dd><p>Find a framework using the same semantics as the
+system dynamic linker, but will accept looser names
+than the system linker.</p>
+<p>This function will return a correct result for input
+values like:</p>
+<ul class="simple">
+<li>Python</li>
+<li>Python.framework</li>
+<li>Python.framework/Versions/Current</li>
+</ul>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="SymbolTable.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">macholib.SymbolTable</span></tt> &#8212; Class to read the symbol table from a Mach-O header</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="dylib.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.dylib</span></tt> &#8212; Generic dylib path manipulation</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/dyld.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="dylib.html" title="macholib.dylib — Generic dylib path manipulation"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="SymbolTable.html" title="macholib.SymbolTable — Class to read the symbol table from a Mach-O header"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/dylib.html b/python/macholib/doc/_build/html/dylib.html
new file mode 100644
index 000000000..dd293fef2
--- /dev/null
+++ b/python/macholib/doc/_build/html/dylib.html
@@ -0,0 +1,145 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.dylib — Generic dylib path manipulation &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.framework — Generic framework path manipulation" href="framework.html" />
+ <link rel="prev" title="macholib.dyld — Dyld emulation" href="dyld.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="framework.html" title="macholib.framework — Generic framework path manipulation"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="dyld.html" title="macholib.dyld — Dyld emulation"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.dylib">
+<span id="macholib-dylib-generic-dylib-path-manipulation"></span><h1><a class="reference internal" href="#module-macholib.dylib" title="macholib.dylib: Generic dylib path manipulation"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.dylib</span></tt></a> &#8212; Generic dylib path manipulation<a class="headerlink" href="#module-macholib.dylib" title="Permalink to this headline">¶</a></h1>
+<p>This module defines a function <a class="reference internal" href="#macholib.dylib.dylib_info" title="macholib.dylib.dylib_info"><tt class="xref py py-func docutils literal"><span class="pre">dylib_info()</span></tt></a> that can extract
+useful information from the name of a dynamic library.</p>
+<dl class="function">
+<dt id="macholib.dylib.dylib_info">
+<tt class="descclassname">macholib.dylib.</tt><tt class="descname">dylib_info</tt><big>(</big><em>filename</em><big>)</big><a class="headerlink" href="#macholib.dylib.dylib_info" title="Permalink to this definition">¶</a></dt>
+<dd><p>A dylib name can take one of the following four forms:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">Location/Name.SomeVersion_Suffix.dylib</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">Location/Name.SomeVersion.dylib</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">Location/Name_Suffix.dylib</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">Location/Name.dylib</span></tt></li>
+</ul>
+<p>Returns None if not found or a mapping equivalent to:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="nb">dict</span><span class="p">(</span>
+ <span class="n">location</span><span class="o">=</span><span class="s">&#39;Location&#39;</span><span class="p">,</span>
+ <span class="n">name</span><span class="o">=</span><span class="s">&#39;Name.SomeVersion_Suffix.dylib&#39;</span><span class="p">,</span>
+ <span class="n">shortname</span><span class="o">=</span><span class="s">&#39;Name&#39;</span><span class="p">,</span>
+ <span class="n">version</span><span class="o">=</span><span class="s">&#39;SomeVersion&#39;</span><span class="p">,</span>
+ <span class="n">suffix</span><span class="o">=</span><span class="s">&#39;Suffix&#39;</span><span class="p">,</span>
+<span class="p">)</span>
+</pre></div>
+</div>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last"><em>SomeVersion</em> and <em>Suffix</em> are optional and my be <tt class="docutils literal"><span class="pre">None</span></tt>
+if not present.</p>
+</div>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="dyld.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">macholib.dyld</span></tt> &#8212; Dyld emulation</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="framework.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.framework</span></tt> &#8212; Generic framework path manipulation</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/dylib.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="framework.html" title="macholib.framework — Generic framework path manipulation"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="dyld.html" title="macholib.dyld — Dyld emulation"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/framework.html b/python/macholib/doc/_build/html/framework.html
new file mode 100644
index 000000000..023e580ed
--- /dev/null
+++ b/python/macholib/doc/_build/html/framework.html
@@ -0,0 +1,147 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.framework — Generic framework path manipulation &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.mach_o — Low-level definitions" href="macho_o.html" />
+ <link rel="prev" title="macholib.dylib — Generic dylib path manipulation" href="dylib.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="macho_o.html" title="macholib.mach_o — Low-level definitions"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="dylib.html" title="macholib.dylib — Generic dylib path manipulation"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.framework">
+<span id="macholib-framework-generic-framework-path-manipulation"></span><h1><a class="reference internal" href="#module-macholib.framework" title="macholib.framework: Generic framework path manipulation"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.framework</span></tt></a> &#8212; Generic framework path manipulation<a class="headerlink" href="#module-macholib.framework" title="Permalink to this headline">¶</a></h1>
+<p>This module defines a function <a class="reference internal" href="#macholib.framework.framework_info" title="macholib.framework.framework_info"><tt class="xref py py-func docutils literal"><span class="pre">framework_info()</span></tt></a> that can extract
+useful information from the name of a dynamic library in a framework.</p>
+<dl class="function">
+<dt id="macholib.framework.framework_info">
+<tt class="descclassname">macholib.framework.</tt><tt class="descname">framework_info</tt><big>(</big><em>filename</em><big>)</big><a class="headerlink" href="#macholib.framework.framework_info" title="Permalink to this definition">¶</a></dt>
+<dd><blockquote>
+<div><p>A framework name can take one of the following four forms:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">Location/Name.framework/Versions/SomeVersion/Name_Suffix</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">Location/Name.framework/Versions/SomeVersion/Name</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">Location/Name.framework/Name_Suffix</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">Location/Name.framework/Name</span></tt></li>
+</ul>
+<p>Returns <tt class="docutils literal"><span class="pre">None</span></tt> if not found, or a mapping equivalent to:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="nb">dict</span><span class="p">(</span>
+ <span class="n">location</span><span class="o">=</span><span class="s">&#39;Location&#39;</span><span class="p">,</span>
+ <span class="n">name</span><span class="o">=</span><span class="s">&#39;Name.framework/Versions/SomeVersion/Name_Suffix&#39;</span><span class="p">,</span>
+ <span class="n">shortname</span><span class="o">=</span><span class="s">&#39;Name&#39;</span><span class="p">,</span>
+ <span class="n">version</span><span class="o">=</span><span class="s">&#39;SomeVersion&#39;</span><span class="p">,</span>
+ <span class="n">suffix</span><span class="o">=</span><span class="s">&#39;Suffix&#39;</span><span class="p">,</span>
+<span class="p">)</span>
+</pre></div>
+</div>
+</div></blockquote>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last"><em>SomeVersion</em> and <em>Suffix</em> are optional and may be None
+if not present.</p>
+</div>
+</dd></dl>
+
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="dylib.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">macholib.dylib</span></tt> &#8212; Generic dylib path manipulation</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="macho_o.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.mach_o</span></tt> &#8212; Low-level definitions</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/framework.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="macho_o.html" title="macholib.mach_o — Low-level definitions"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="dylib.html" title="macholib.dylib — Generic dylib path manipulation"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/genindex.html b/python/macholib/doc/_build/html/genindex.html
new file mode 100644
index 000000000..4c18044e2
--- /dev/null
+++ b/python/macholib/doc/_build/html/genindex.html
@@ -0,0 +1,365 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Index &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="#" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+
+<h1 id="index">Index</h1>
+
+<div class="genindex-jumpbox">
+ <a href="#B"><strong>B</strong></a>
+ | <a href="#D"><strong>D</strong></a>
+ | <a href="#F"><strong>F</strong></a>
+ | <a href="#M"><strong>M</strong></a>
+ | <a href="#P"><strong>P</strong></a>
+ | <a href="#S"><strong>S</strong></a>
+ | <a href="#T"><strong>T</strong></a>
+
+</div>
+<h2 id="B">B</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable">BasePackable (class in macholib.ptypes)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable._endian_">BasePackable._endian_ (in module macholib.ptypes)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="D">D</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="dyld.html#macholib.dyld.dydl_framework_path">dydl_framework_path() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_default_search">dyld_default_search() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_executable_path_search">dyld_executable_path_search() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_fallback_framework_path">dyld_fallback_framework_path() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_fallback_library_path">dyld_fallback_library_path() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_find">dyld_find() (in module macholib.dyld)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_image_suffix">dyld_image_suffix() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_image_suffix_search">dyld_image_suffix_search() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_library_path">dyld_library_path() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_loader_search">dyld_loader_search() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#macholib.dyld.dyld_override_search">dyld_override_search() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="dylib.html#macholib.dylib.dylib_info">dylib_info() (in module macholib.dylib)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="F">F</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="dyld.html#macholib.dyld.framework_find">framework_find() (in module macholib.dyld)</a>
+ </dt>
+
+
+ <dt><a href="framework.html#macholib.framework.framework_info">framework_info() (in module macholib.framework)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable.from_fileobj">from_fileobj() (macholib.ptypes.BasePackable method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable.from_mmap">from_mmap() (macholib.ptypes.BasePackable method)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable.from_str">from_str() (macholib.ptypes.BasePackable method)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable.from_tuple">from_tuple() (macholib.ptypes.BasePackable method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="M">M</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="MachO.html#macholib.MachO.MachO">MachO (class in macholib.MachO)</a>
+ </dt>
+
+
+ <dt><a href="MachoOGraph.html#macholib.MachOGraph.MachOGraph">MachOGraph (class in macholib.MachOGraph)</a>
+ </dt>
+
+
+ <dt><a href="dyld.html#module-macholib.dyld">macholib.dyld (module)</a>
+ </dt>
+
+
+ <dt><a href="dylib.html#module-macholib.dylib">macholib.dylib (module)</a>
+ </dt>
+
+
+ <dt><a href="framework.html#module-macholib.framework">macholib.framework (module)</a>
+ </dt>
+
+
+ <dt><a href="macho_o.html#module-macholib.mach_o">macholib.mach_o (module)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="MachO.html#module-macholib.MachO">macholib.MachO (module)</a>
+ </dt>
+
+
+ <dt><a href="MachoOGraph.html#module-macholib.MachOGraph">macholib.MachOGraph (module)</a>
+ </dt>
+
+
+ <dt><a href="MachoOStandalone.html#module-macholib.MachOStandalone">macholib.MachOStandalone (module)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#module-macholib.ptypes">macholib.ptypes (module)</a>
+ </dt>
+
+
+ <dt><a href="SymbolTable.html#module-macholib.SymbolTable">macholib.SymbolTable (module)</a>
+ </dt>
+
+
+ <dt><a href="MachoOStandalone.html#macholib.MachOStandalone.MachOStandalone">MachOStandalone (class in macholib.MachOStandalone)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="P">P</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_char">p_char (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_double">p_double (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_float">p_float (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_int16">p_int16 (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_int32">p_int32 (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_int64">p_int64 (class in macholib.ptypes)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_int8">p_int8 (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_uint16">p_uint16 (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_uint32">p_uint32 (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_uint64">p_uint64 (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.p_uint8">p_uint8 (class in macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.pypackable">pypackable() (in module macholib.ptypes)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="S">S</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.sizeof">sizeof() (in module macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.Structure">Structure (class in macholib.ptypes)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.Structure._fields_">Structure._fields_ (in module macholib.ptypes)</a>
+ </dt>
+
+
+ <dt><a href="SymbolTable.html#macholib.SymbolTable.SymbolTable">SymbolTable (class in macholib.SymbolTable)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="T">T</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable.to_fileobj">to_fileobj() (macholib.ptypes.BasePackable method)</a>
+ </dt>
+
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable.to_mmap">to_mmap() (macholib.ptypes.BasePackable method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="ptypes.html#macholib.ptypes.BasePackable.to_str">to_str() (macholib.ptypes.BasePackable method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+
+
+
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="#" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/index.html b/python/macholib/doc/_build/html/index.html
new file mode 100644
index 000000000..8b405f778
--- /dev/null
+++ b/python/macholib/doc/_build/html/index.html
@@ -0,0 +1,170 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Macholib - Analyze and edit Mach-O headers &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="#" />
+ <link rel="next" title="Release history" href="changelog.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="changelog.html" title="Release history"
+ accesskey="N">next</a> |</li>
+ <li><a href="#">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="macholib-analyze-and-edit-mach-o-headers">
+<h1>Macholib - Analyze and edit Mach-O headers<a class="headerlink" href="#macholib-analyze-and-edit-mach-o-headers" title="Permalink to this headline">¶</a></h1>
+<p>macholib can be used to analyze and edit Mach-O headers, the executable
+format used by Mac OS X.</p>
+<p>It&#8217;s typically used as a dependency analysis tool, and also to rewrite dylib
+references in Mach-O headers to be <tt class="docutils literal"><span class="pre">&#64;executable_path</span></tt> relative.</p>
+<p>Though this tool targets a platform specific file format, it is pure python
+code that is platform and endian independent.</p>
+<div class="section" id="general-documentation">
+<h2>General documentation<a class="headerlink" href="#general-documentation" title="Permalink to this headline">¶</a></h2>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="changelog.html">Release history</a></li>
+<li class="toctree-l1"><a class="reference internal" href="license.html">License</a></li>
+<li class="toctree-l1"><a class="reference internal" href="scripts.html">Command-line tools</a></li>
+</ul>
+</div>
+</div>
+<div class="section" id="reference-guide">
+<h2>Reference Guide<a class="headerlink" href="#reference-guide" title="Permalink to this headline">¶</a></h2>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="MachO.html"><tt class="docutils literal"><span class="pre">macholib.MachO</span></tt> &#8212; Utilities for reading and writing Mach-O headers</a></li>
+<li class="toctree-l1"><a class="reference internal" href="MachoOGraph.html"><tt class="docutils literal"><span class="pre">macholib.MachoGraph</span></tt> &#8212; Graph data structure of Mach-O dependencies</a></li>
+<li class="toctree-l1"><a class="reference internal" href="MachoOStandalone.html"><tt class="docutils literal"><span class="pre">macholib.MachOStandalone</span></tt> &#8212; Create standalone application bundles</a></li>
+<li class="toctree-l1"><a class="reference internal" href="SymbolTable.html"><tt class="docutils literal"><span class="pre">macholib.SymbolTable</span></tt> &#8212; Class to read the symbol table from a Mach-O header</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dyld.html"><tt class="docutils literal"><span class="pre">macholib.dyld</span></tt> &#8212; Dyld emulation</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dylib.html"><tt class="docutils literal"><span class="pre">macholib.dylib</span></tt> &#8212; Generic dylib path manipulation</a></li>
+<li class="toctree-l1"><a class="reference internal" href="framework.html"><tt class="docutils literal"><span class="pre">macholib.framework</span></tt> &#8212; Generic framework path manipulation</a></li>
+<li class="toctree-l1"><a class="reference internal" href="macho_o.html"><tt class="docutils literal"><span class="pre">macholib.mach_o</span></tt> &#8212; Low-level definitions</a></li>
+<li class="toctree-l1"><a class="reference internal" href="ptypes.html"><tt class="docutils literal"><span class="pre">macholib.ptypes</span></tt> &#8212; Packable types</a></li>
+</ul>
+</div>
+</div>
+<div class="section" id="online-resources">
+<h2>Online Resources<a class="headerlink" href="#online-resources" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><a class="reference external" href="http://bitbucket.org/ronaldoussoren/macholib/">Sourcecode repository on bitbucket</a></li>
+<li><a class="reference external" href="http://bitbucket.org/ronaldoussoren/macholib/issues">The issue tracker</a></li>
+<li><a class="reference external" href="http://developer.apple.com/library/mac/#documentation/DeveloperTools/Conceptual/MachORuntime/Reference/reference.html">Mac OS X ABI Mach-O File Format Reference at Apple</a></li>
+</ul>
+</div>
+<div class="section" id="contributors">
+<h2>Contributors<a class="headerlink" href="#contributors" title="Permalink to this headline">¶</a></h2>
+<p>Macholib was written by Bob Ippolito and is currently maintained by Ronald Oussoren &lt;<a class="reference external" href="mailto:ronaldoussoren&#37;&#52;&#48;mac&#46;com">ronaldoussoren<span>&#64;</span>mac<span>&#46;</span>com</a>&gt;.</p>
+</div>
+<div class="section" id="indices-and-tables">
+<h2>Indices and tables<a class="headerlink" href="#indices-and-tables" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><a class="reference internal" href="genindex.html"><em>Index</em></a></li>
+<li><a class="reference internal" href="py-modindex.html"><em>Module Index</em></a></li>
+<li><a class="reference internal" href="search.html"><em>Search Page</em></a></li>
+</ul>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="#">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Macholib - Analyze and edit Mach-O headers</a><ul>
+<li><a class="reference internal" href="#general-documentation">General documentation</a></li>
+<li><a class="reference internal" href="#reference-guide">Reference Guide</a></li>
+<li><a class="reference internal" href="#online-resources">Online Resources</a></li>
+<li><a class="reference internal" href="#contributors">Contributors</a></li>
+<li><a class="reference internal" href="#indices-and-tables">Indices and tables</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Next topic</h4>
+ <p class="topless"><a href="changelog.html"
+ title="next chapter">Release history</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/index.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="changelog.html" title="Release history"
+ >next</a> |</li>
+ <li><a href="#">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/license.html b/python/macholib/doc/_build/html/license.html
new file mode 100644
index 000000000..88765bfce
--- /dev/null
+++ b/python/macholib/doc/_build/html/license.html
@@ -0,0 +1,140 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>License &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="Command-line tools" href="scripts.html" />
+ <link rel="prev" title="Release history" href="changelog.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="scripts.html" title="Command-line tools"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="changelog.html" title="Release history"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="license">
+<h1>License<a class="headerlink" href="#license" title="Permalink to this headline">¶</a></h1>
+<p>Copyright (c) Bob Ippolito</p>
+<p>Parts are copyright (c) 2010-2014 Ronald Oussoren</p>
+<div class="section" id="mit-license">
+<h2>MIT License<a class="headerlink" href="#mit-license" title="Permalink to this headline">¶</a></h2>
+<p>Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the &#8220;Software&#8221;), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
+so.</p>
+<p>THE SOFTWARE IS PROVIDED &#8220;AS IS&#8221;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.</p>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">License</a><ul>
+<li><a class="reference internal" href="#mit-license">MIT License</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="changelog.html"
+ title="previous chapter">Release history</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="scripts.html"
+ title="next chapter">Command-line tools</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/license.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="scripts.html" title="Command-line tools"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="changelog.html" title="Release history"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/macho_o.html b/python/macholib/doc/_build/html/macho_o.html
new file mode 100644
index 000000000..5e41f7a96
--- /dev/null
+++ b/python/macholib/doc/_build/html/macho_o.html
@@ -0,0 +1,122 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.mach_o — Low-level definitions &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.ptypes — Packable types" href="ptypes.html" />
+ <link rel="prev" title="macholib.framework — Generic framework path manipulation" href="framework.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="ptypes.html" title="macholib.ptypes — Packable types"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="framework.html" title="macholib.framework — Generic framework path manipulation"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.mach_o">
+<span id="macholib-mach-o-low-level-definitions"></span><h1><a class="reference internal" href="#module-macholib.mach_o" title="macholib.mach_o: Low-level definitions of elements in a Mach-O file"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.mach_o</span></tt></a> &#8212; Low-level definitions<a class="headerlink" href="#module-macholib.mach_o" title="Permalink to this headline">¶</a></h1>
+<p>This module defines constants and packable structure types
+that correspond to elements of a Mach-O file.</p>
+<p>The names of classes and constants is the same as those in
+the Mach-O header files and
+<a class="reference external" href="http://developer.apple.com/library/mac/#documentation/DeveloperTools/Conceptual/MachORuntime/Reference/reference.html">Apple&#8217;s documentation</a>. This document therefore
+doesn&#8217;t explictly document the names in this module.</p>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="framework.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">macholib.framework</span></tt> &#8212; Generic framework path manipulation</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="ptypes.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.ptypes</span></tt> &#8212; Packable types</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/macho_o.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="ptypes.html" title="macholib.ptypes — Packable types"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="framework.html" title="macholib.framework — Generic framework path manipulation"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/objects.inv b/python/macholib/doc/_build/html/objects.inv
new file mode 100644
index 000000000..4edc7d27d
--- /dev/null
+++ b/python/macholib/doc/_build/html/objects.inv
Binary files differ
diff --git a/python/macholib/doc/_build/html/ptypes.html b/python/macholib/doc/_build/html/ptypes.html
new file mode 100644
index 000000000..a74627186
--- /dev/null
+++ b/python/macholib/doc/_build/html/ptypes.html
@@ -0,0 +1,317 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>macholib.ptypes — Packable types &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="prev" title="macholib.mach_o — Low-level definitions" href="macho_o.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="macho_o.html" title="macholib.mach_o — Low-level definitions"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="module-macholib.ptypes">
+<span id="macholib-ptypes-packable-types"></span><h1><a class="reference internal" href="#module-macholib.ptypes" title="macholib.ptypes: Serializable types"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.ptypes</span></tt></a> &#8212; Packable types<a class="headerlink" href="#module-macholib.ptypes" title="Permalink to this headline">¶</a></h1>
+<p>The module <a class="reference internal" href="#module-macholib.ptypes" title="macholib.ptypes: Serializable types"><tt class="xref py py-mod docutils literal"><span class="pre">macholib.ptypes</span></tt></a> defines types that can be serialized into
+byte arrays, both for basic types and structured types (C <tt class="docutils literal"><span class="pre">struct</span></tt> values).</p>
+<div class="section" id="utility-functions">
+<h2>Utility functions<a class="headerlink" href="#utility-functions" title="Permalink to this headline">¶</a></h2>
+<dl class="function">
+<dt id="macholib.ptypes.sizeof">
+<tt class="descclassname">macholib.ptypes.</tt><tt class="descname">sizeof</tt><big>(</big><em>value</em><big>)</big><a class="headerlink" href="#macholib.ptypes.sizeof" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns the size in bytes of an object when packed, raises <tt class="xref py py-exc docutils literal"><span class="pre">ValueError</span></tt>
+for inappropriate values.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="macholib.ptypes.pypackable">
+<tt class="descclassname">macholib.ptypes.</tt><tt class="descname">pypackable</tt><big>(</big><em>name</em>, <em>pytype</em>, <em>format</em><big>)</big><a class="headerlink" href="#macholib.ptypes.pypackable" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a packable type that is a subclass of the Python type
+<em>pytype</em>. The value is converted to and from the packed format using
+the struct <em>format</em>.</p>
+</dd></dl>
+
+</div>
+<div class="section" id="packable-types">
+<h2>Packable types<a class="headerlink" href="#packable-types" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="macholib.ptypes.BasePackable">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">BasePackable</tt><a class="headerlink" href="#macholib.ptypes.BasePackable" title="Permalink to this definition">¶</a></dt>
+<dd><p>All packable types are a subclass of <a class="reference internal" href="#macholib.ptypes.BasePackable" title="macholib.ptypes.BasePackable"><tt class="xref py py-class docutils literal"><span class="pre">BasePackable</span></tt></a>, which defines
+the basic interface but is itself an abstract base class.</p>
+<dl class="data">
+<dt id="macholib.ptypes.BasePackable._endian_">
+<tt class="descname">_endian_</tt><a class="headerlink" href="#macholib.ptypes.BasePackable._endian_" title="Permalink to this definition">¶</a></dt>
+<dd><p>The byteorder of a packed value. This will be <tt class="docutils literal"><span class="pre">&quot;&lt;&quot;`</span> <span class="pre">for</span>
+<span class="pre">little</span> <span class="pre">endian</span> <span class="pre">values</span> <span class="pre">and</span> <span class="pre">``&quot;&gt;&quot;</span></tt> for big-endian ones.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>the endianness option is a public value to be
+able to support both big- and little-endian file formats.</p>
+<p class="last">The name suggests that this attribute is private, this
+is partically for historical reasons and partially to
+avoid conflicts with field names in C structs.</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="macholib.ptypes.BasePackable.from_mmap">
+<tt class="descname">from_mmap</tt><big>(</big><em>mmap</em>, <em>ptr</em>, <em>**kw</em><big>)</big><a class="headerlink" href="#macholib.ptypes.BasePackable.from_mmap" title="Permalink to this definition">¶</a></dt>
+<dd><p>This class method constructs the value from a subview of a
+<a class="reference external" href="http://docs.python.org/library/mmap.html#mmap.mmap" title="(in Python v2.7)"><tt class="xref py py-class docutils literal"><span class="pre">mmap.mmap</span></tt></a> object. It uses bytes starting at offset <em>ptr</em> and
+reads just enough bytes to read the entire object.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="macholib.ptypes.BasePackable.from_fileobj">
+<tt class="descname">from_fileobj</tt><big>(</big><em>fp</em>, <em>**kw</em><big>)</big><a class="headerlink" href="#macholib.ptypes.BasePackable.from_fileobj" title="Permalink to this definition">¶</a></dt>
+<dd><p>This class method constructs the value by reading just enough bytes
+from a file-like object.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">The file must be opened in binary mode, that is read calls
+should return byte-strings and not unicode-strings.</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="macholib.ptypes.BasePackable.from_str">
+<tt class="descname">from_str</tt><big>(</big><em>value</em>, <em>**kw</em><big>)</big><a class="headerlink" href="#macholib.ptypes.BasePackable.from_str" title="Permalink to this definition">¶</a></dt>
+<dd><p>This class method construct the value by using the struct module
+to parse the given bytes.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">contrary to what the name suggests the argument to this
+method is a byte-string, not a unicode-string.</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="macholib.ptypes.BasePackable.from_tuple">
+<tt class="descname">from_tuple</tt><big>(</big><em>fp</em>, <em>**kw</em><big>)</big><a class="headerlink" href="#macholib.ptypes.BasePackable.from_tuple" title="Permalink to this definition">¶</a></dt>
+<dd><p>This class method constructs the object from a tuple with all fields.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="macholib.ptypes.BasePackable.to_str">
+<tt class="descname">to_str</tt><big>(</big><big>)</big><a class="headerlink" href="#macholib.ptypes.BasePackable.to_str" title="Permalink to this definition">¶</a></dt>
+<dd><p>Returns a byte representation of the value.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">there is no default implementation for this method</p>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="macholib.ptypes.BasePackable.to_fileobj">
+<tt class="descname">to_fileobj</tt><big>(</big><em>fp</em><big>)</big><a class="headerlink" href="#macholib.ptypes.BasePackable.to_fileobj" title="Permalink to this definition">¶</a></dt>
+<dd><p>Write a byte representation of the value to the given file-like
+object. The file should be opened in binary mode.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="macholib.ptypes.BasePackable.to_mmap">
+<tt class="descname">to_mmap</tt><big>(</big><em>mmap</em>, <em>ptr</em><big>)</big><a class="headerlink" href="#macholib.ptypes.BasePackable.to_mmap" title="Permalink to this definition">¶</a></dt>
+<dd><p>Write the byte representation of the value to a <a class="reference external" href="http://docs.python.org/library/mmap.html#mmap.mmap" title="(in Python v2.7)"><tt class="xref py py-class docutils literal"><span class="pre">mmap.mmap</span></tt></a>
+object, starting at offset <em>ptr</em>.</p>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.Structure">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">Structure</tt><big>(</big><em>...</em><big>)</big><a class="headerlink" href="#macholib.ptypes.Structure" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="data">
+<dt id="macholib.ptypes.Structure._fields_">
+<tt class="descname">_fields_</tt><a class="headerlink" href="#macholib.ptypes.Structure._fields_" title="Permalink to this definition">¶</a></dt>
+<dd><p>This class attribute is a list that contains the fields of the
+structure in the right order. Every item of this list is a tuple
+with 2 arguments: the first element is the name of the field, and
+the second the packable type for the field.</p>
+<p>Every subclass of <a class="reference internal" href="#macholib.ptypes.Structure" title="macholib.ptypes.Structure"><tt class="xref py py-class docutils literal"><span class="pre">Structure</span></tt></a> must define <em>_fields_</em> to be
+usefull, and the value of <em>_fields_</em> should not be changed after
+class construction.</p>
+</dd></dl>
+
+</dd></dl>
+
+</div>
+<div class="section" id="basic-packables">
+<h2>Basic packables<a class="headerlink" href="#basic-packables" title="Permalink to this headline">¶</a></h2>
+<p>Other than the core functionality this module defines a number of
+<a class="reference internal" href="#macholib.ptypes.pypackable" title="macholib.ptypes.pypackable"><tt class="xref py py-func docutils literal"><span class="pre">pypackable()</span></tt></a> types that correspond to useful basic C types.</p>
+<dl class="class">
+<dt id="macholib.ptypes.p_char">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_char</tt><big>(</big><span class="optional">[</span><em>value</em><span class="optional">]</span><big>)</big><a class="headerlink" href="#macholib.ptypes.p_char" title="Permalink to this definition">¶</a></dt>
+<dd><p>A byte string of length 1</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_int8">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_int8</tt><a class="headerlink" href="#macholib.ptypes.p_int8" title="Permalink to this definition">¶</a></dt>
+<dd><p>An 8-bit signed integer</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_uint8">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_uint8</tt><a class="headerlink" href="#macholib.ptypes.p_uint8" title="Permalink to this definition">¶</a></dt>
+<dd><p>An 8-bit unsigned integer</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_int16">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_int16</tt><a class="headerlink" href="#macholib.ptypes.p_int16" title="Permalink to this definition">¶</a></dt>
+<dd><p>An 16-bit signed integer</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_uint16">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_uint16</tt><a class="headerlink" href="#macholib.ptypes.p_uint16" title="Permalink to this definition">¶</a></dt>
+<dd><p>An 16-bit unsigned integer</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_int32">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_int32</tt><a class="headerlink" href="#macholib.ptypes.p_int32" title="Permalink to this definition">¶</a></dt>
+<dd><p>An 32-bit signed integer</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_uint32">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_uint32</tt><a class="headerlink" href="#macholib.ptypes.p_uint32" title="Permalink to this definition">¶</a></dt>
+<dd><p>An 32-bit unsigned integer</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_int64">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_int64</tt><a class="headerlink" href="#macholib.ptypes.p_int64" title="Permalink to this definition">¶</a></dt>
+<dd><p>An 64-bit signed integer</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_uint64">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_uint64</tt><a class="headerlink" href="#macholib.ptypes.p_uint64" title="Permalink to this definition">¶</a></dt>
+<dd><p>An 64-bit unsigned integer</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_float">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_float</tt><a class="headerlink" href="#macholib.ptypes.p_float" title="Permalink to this definition">¶</a></dt>
+<dd><p>An floating point value of type <tt class="docutils literal"><span class="pre">float</span></tt></p>
+</dd></dl>
+
+<dl class="class">
+<dt id="macholib.ptypes.p_double">
+<em class="property">class </em><tt class="descclassname">macholib.ptypes.</tt><tt class="descname">p_double</tt><a class="headerlink" href="#macholib.ptypes.p_double" title="Permalink to this definition">¶</a></dt>
+<dd><p>An floating point value of type <tt class="docutils literal"><span class="pre">double</span></tt></p>
+</dd></dl>
+
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">the module exports a number of other types with
+names starting with <tt class="docutils literal"><span class="pre">p_</span></tt>, such as <tt class="docutils literal"><span class="pre">p_int</span></tt>. Those types
+are deprecated and should not be used.</p>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#"><tt class="docutils literal"><span class="pre">macholib.ptypes</span></tt> &#8212; Packable types</a><ul>
+<li><a class="reference internal" href="#utility-functions">Utility functions</a></li>
+<li><a class="reference internal" href="#packable-types">Packable types</a></li>
+<li><a class="reference internal" href="#basic-packables">Basic packables</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="macho_o.html"
+ title="previous chapter"><tt class="docutils literal"><span class="pre">macholib.mach_o</span></tt> &#8212; Low-level definitions</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/ptypes.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="macho_o.html" title="macholib.mach_o — Low-level definitions"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/py-modindex.html b/python/macholib/doc/_build/html/py-modindex.html
new file mode 100644
index 000000000..30a195a20
--- /dev/null
+++ b/python/macholib/doc/_build/html/py-modindex.html
@@ -0,0 +1,154 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Python Module Index &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+
+
+
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="#" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+
+ <h1>Python Module Index</h1>
+
+ <div class="modindex-jumpbox">
+ <a href="#cap-m"><strong>m</strong></a>
+ </div>
+
+ <table class="indextable modindextable" cellspacing="0" cellpadding="2">
+ <tr class="pcap"><td></td><td>&nbsp;</td><td></td></tr>
+ <tr class="cap" id="cap-m"><td></td><td>
+ <strong>m</strong></td><td></td></tr>
+ <tr>
+ <td><img src="_static/minus.png" class="toggler"
+ id="toggle-1" style="display: none" alt="-" /></td>
+ <td>
+ <tt class="xref">macholib</tt></td><td>
+ <em></em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="dyld.html#module-macholib.dyld"><tt class="xref">macholib.dyld</tt></a></td><td>
+ <em>Emulation of functonality of the dynamic linker</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="dylib.html#module-macholib.dylib"><tt class="xref">macholib.dylib</tt></a></td><td>
+ <em>Generic dylib path manipulation</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="framework.html#module-macholib.framework"><tt class="xref">macholib.framework</tt></a></td><td>
+ <em>Generic framework path manipulation</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="macho_o.html#module-macholib.mach_o"><tt class="xref">macholib.mach_o</tt></a></td><td>
+ <em>Low-level definitions of elements in a Mach-O file</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="MachO.html#module-macholib.MachO"><tt class="xref">macholib.MachO</tt></a></td><td>
+ <em>Utilities for reading and writing Mach-O headers</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="MachoOGraph.html#module-macholib.MachOGraph"><tt class="xref">macholib.MachOGraph</tt></a></td><td>
+ <em>Graph data structure of Mach-O dependencies</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="MachoOStandalone.html#module-macholib.MachOStandalone"><tt class="xref">macholib.MachOStandalone</tt></a></td><td>
+ <em>Create standalone application bundles</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="ptypes.html#module-macholib.ptypes"><tt class="xref">macholib.ptypes</tt></a></td><td>
+ <em>Serializable types</em></td></tr>
+ <tr class="cg-1">
+ <td></td>
+ <td>&nbsp;&nbsp;&nbsp;
+ <a href="SymbolTable.html#module-macholib.SymbolTable"><tt class="xref">macholib.SymbolTable</tt></a></td><td>
+ <em>Class to read the symbol table from a Mach-O header</em></td></tr>
+ </table>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="#" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/scripts.html b/python/macholib/doc/_build/html/scripts.html
new file mode 100644
index 000000000..38e540300
--- /dev/null
+++ b/python/macholib/doc/_build/html/scripts.html
@@ -0,0 +1,156 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Command-line tools &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <link rel="next" title="macholib.MachO — Utilities for reading and writing Mach-O headers" href="MachO.html" />
+ <link rel="prev" title="License" href="license.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="MachO.html" title="macholib.MachO — Utilities for reading and writing Mach-O headers"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="license.html" title="License"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="command-line-tools">
+<h1>Command-line tools<a class="headerlink" href="#command-line-tools" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="python-m-macholib-find">
+<h2>python -m macholib find<a class="headerlink" href="#python-m-macholib-find" title="Permalink to this headline">¶</a></h2>
+<p>Usage:</p>
+<div class="highlight-python"><div class="highlight"><pre>$ python -mmacholib find dir...
+</pre></div>
+</div>
+<p>Print the paths of all MachO binaries
+in the specified directories.</p>
+</div>
+<div class="section" id="python-m-macholib-standalone">
+<h2>python -m macholib standalone<a class="headerlink" href="#python-m-macholib-standalone" title="Permalink to this headline">¶</a></h2>
+<p>Usage:</p>
+<div class="highlight-python"><div class="highlight"><pre>$ python -m macholib standalone appbundle...
+</pre></div>
+</div>
+<p>Convert one or more application bundles into
+standalone bundles. That is, copy all non-system
+shared libraries and frameworks used by the bundle
+into the bundle and rewrite load commands.</p>
+</div>
+<div class="section" id="python-mmacholib-dump">
+<h2>python -mmacholib dump<a class="headerlink" href="#python-mmacholib-dump" title="Permalink to this headline">¶</a></h2>
+<p>Usage:</p>
+<div class="highlight-python"><div class="highlight"><pre>$ python -mmacholib dump dir...
+</pre></div>
+</div>
+<p>Prints information about all architectures in a
+Mach-O file as well as all libraries it links
+to.</p>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Command-line tools</a><ul>
+<li><a class="reference internal" href="#python-m-macholib-find">python -m macholib find</a></li>
+<li><a class="reference internal" href="#python-m-macholib-standalone">python -m macholib standalone</a></li>
+<li><a class="reference internal" href="#python-mmacholib-dump">python -mmacholib dump</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="license.html"
+ title="previous chapter">License</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="MachO.html"
+ title="next chapter"><tt class="docutils literal"><span class="pre">macholib.MachO</span></tt> &#8212; Utilities for reading and writing Mach-O headers</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/scripts.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li class="right" >
+ <a href="MachO.html" title="macholib.MachO — Utilities for reading and writing Mach-O headers"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="license.html" title="License"
+ >previous</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/search.html b/python/macholib/doc/_build/html/search.html
new file mode 100644
index 000000000..1aa1b53df
--- /dev/null
+++ b/python/macholib/doc/_build/html/search.html
@@ -0,0 +1,105 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Search &mdash; macholib 1.7 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: './',
+ VERSION: '1.7',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <script type="text/javascript" src="_static/searchtools.js"></script>
+ <link rel="top" title="macholib 1.7 documentation" href="index.html" />
+ <script type="text/javascript">
+ jQuery(function() { Search.loadIndex("searchindex.js"); });
+ </script>
+
+ <script type="text/javascript" id="searchindexloader"></script>
+
+
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <h1 id="search-documentation">Search</h1>
+ <div id="fallback" class="admonition warning">
+ <script type="text/javascript">$('#fallback').hide();</script>
+ <p>
+ Please activate JavaScript to enable the search
+ functionality.
+ </p>
+ </div>
+ <p>
+ From here you can search these documents. Enter your search
+ words into the box below and click "search". Note that the search
+ function will automatically search for all of the words. Pages
+ containing fewer words won't appear in the result list.
+ </p>
+ <form action="" method="get">
+ <input type="text" name="q" value="" />
+ <input type="submit" value="search" />
+ <span id="search-progress" style="padding-left: 10px"></span>
+ </form>
+
+ <div id="search-results">
+
+ </div>
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="py-modindex.html" title="Python Module Index"
+ >modules</a> |</li>
+ <li><a href="index.html">macholib 1.7 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2010-2011, Ronald Oussoren.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.1.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/macholib/doc/_build/html/searchindex.js b/python/macholib/doc/_build/html/searchindex.js
new file mode 100644
index 000000000..1ed235d8f
--- /dev/null
+++ b/python/macholib/doc/_build/html/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({envversion:42,terms:{osx:4,all:[0,4,5,8,10,11],code:[1,8,4],partial:5,represent:5,four:[7,6],follow:[7,6],ptr:5,privat:[5,4],from_mmap:5,ronaldoussoren:1,dyld_library_path:11,those:[11,3,5,4],aris:2,merchant:2,p_char:5,sourc:4,everi:5,string:[5,4],p_int8:[5,4],than:[11,5],p_longlong:4,fat:12,affect:4,tri:4,magic:4,level:1,py2app:4,iter:11,"try":4,item:[11,5],dir:10,pleas:4,x86_64:4,impli:2,cpu_type_nam:4,direct:9,sign:5,second:5,odd:4,append:11,even:[11,4],index:1,what:5,p_int16:[5,4],dorsei:4,abi:1,abl:5,current:[1,11],"public":5,experiment:4,"new":4,oussoren:[1,2],method:[5,4],deduc:4,metadata:4,sizeof:5,gener:4,lc_load_upward_dylib:4,onli:11,behaviour:4,path:[11,4],modifi:2,"_endian_":5,valu:[11,5,4],search:[1,11,4],subview:5,p_int64:[5,4],permit:2,macho_standalon:4,implement:[8,5,4],from_fileobj:5,semant:[11,4],useful:5,app:4,filenam:[7,12,6],inappropri:5,instal:[11,4],lc_function_start:4,from:12,describ:4,would:4,univers:12,dure:4,next:4,call:5,usr:4,typo:4,type:[1,3,4],minor:4,more:[11,12,10,9,4],share:[11,10],nguyen:4,p_uint:4,particular:2,sourcecod:1,herebi:2,must:5,none:[7,11,6],setup:4,work:[8,4],can:[1,4,5,7,6,11,12],endian:[1,5,4],purpos:2,root:0,claim:2,bogu:4,ppc64:4,accept:11,serial:5,unsign:5,looser:11,alwai:[11,4],end:8,classifi:4,far:8,pure:1,instead:4,updat:8,map:[7,6,4],after:[8,5],ronald:[1,2],mac:1,mai:7,underscor:4,associ:2,p_long:4,author:[8,2],correspond:[3,5,4],element:[5,3],issu:[1,4],inform:[4,10,7,6,11,12],"switch":4,maintain:1,environ:[11,4],reloc:4,fallback:11,order:[5,4],furnish:2,to_mmap:5,move:4,becaus:[8,4],still:4,pointer:4,dynam:[7,11,12,6],p_uint32:[5,4],fit:2,fix:4,better:4,tort:2,contrari:5,therefor:3,non:[0,10],"return":[7,11,6,5,4],env:[0,11],thei:4,python:[1,11,5,4],initi:8,framework:[11,4],now:4,discuss:9,from_str:5,name:[3,4,5,7,6,11],name_suffix:[7,6],mode:5,found:[7,11,6],unicod:5,side:4,compil:4,list:[11,5,4],connect:2,extract:[7,6],event:2,out:2,variabl:4,antoin:4,newli:4,publish:2,dylib_info:6,rewrit:[1,10,4],rel:[1,11],print:[10,4],correct:11,earlier:4,insid:4,macho_dump:4,free:2,reason:5,base:[0,5],dictionari:11,p_float:5,org:4,"byte":[5,4],modulegraph:8,dyld_find:[11,4],dyld_image_suffix:11,david:4,length:5,isn:4,lc_encryption_info:4,lifetim:4,confus:4,suppport:4,first:[5,4],softwar:2,suffix:[7,11,6],directli:11,onc:4,arrai:5,independ:1,number:[11,5,4],restrict:2,construct:5,open:[8,5],size:5,given:[8,5],width:4,licens:1,system:[0,11,10,4],messag:4,mercuri:4,too:4,lc_version_min_iphoneo:4,includ:[2,4],option:[4,5,7,8,6,11],tool:[1,4],copi:[0,2,10],setuptool:4,specifi:[11,8,10],part:2,pars:5,holder:2,packabl:[1,3,4],kind:2,grep:4,target:1,provid:[2,4],remov:4,see:[11,4],were:4,"function":[6,7,11,8,4],loader_path:[11,4],linker:[11,4],appbundl:10,argument:[11,8,5,4],packag:[8,4],properli:8,lightli:4,need:4,lc_data_in_cod:4,sell:2,caus:4,equival:[7,6],also:[1,8,4],builtin:8,arch_map:4,take:[7,6],which:[0,4,5,9,11,12],noth:11,distribut:[2,4],though:[1,11,4],dydl_framework_path:11,previou:4,clearer:4,sublicens:2,maco:12,"class":[0,12,9],charg:2,renam:4,framework_info:7,later:12,request:11,doe:8,shortnam:[7,6],show:4,xcode:4,permiss:2,find:[11,4],dyld_executable_path_search:11,locat:[0,7,11,6,4],macho_find:4,copyright:2,enough:5,should:[11,12,5,4],version:[7,11,6,4],dict:[7,6],to_fileobj:5,p_int32:[5,4],variou:4,express:2,stop:4,nativ:4,cannot:[11,4],liabl:2,partic:5,byteord:5,enabl:12,yield:11,patch:4,irrespect:4,"default":[11,8,5],contain:[8,5,4],where:11,view:4,set:4,dump:4,pytyp:5,from_tupl:5,lion:4,result:[11,8,4],dyld_override_search:11,basepack:5,written:1,p_byte:4,attribut:5,pypack:5,parent:4,p_ptr:4,entir:5,both:5,plugin:0,dyld_default_search:[11,4],contract:2,matthia:4,py3k:4,com:1,load:[10,4],unclear:4,undocu:4,somevers:[7,6],point:5,loader:[11,4],platform:1,assum:0,damag:2,liabil:2,marc:4,due:4,empti:11,whom:2,basic:4,valueerror:[11,5],dyld_fallback:4,imag:11,convert:[5,10],ani:[8,2],p_uint64:[5,4],p_int:[5,4],present:[7,6,4],look:11,histor:5,defin:[0,3,5,7,9,6,11,12],mmacholib:4,pack:5,have:4,itself:[5,4],myapp:4,dyld_fallback_library_path:11,p_uint16:[5,4],grant:2,suggest:5,make:4,same:[11,3],binari:[4,5,8,10,11,12],pysid:4,p_short:4,document:4,conflict:5,http:4,effect:4,action:2,rais:[11,5],user:[11,8,4],typic:1,p_doubl:5,noninfring:2,appropri:4,p_ushort:4,entri:11,well:[8,10],person:2,without:[2,4],command:[1,4],thi:[0,1,2,3,4,5,7,8,9,6,11,12],filesystem:11,undefin:4,just:5,when:[11,5,4],obtain:2,guanqun:4,struct:5,mmap:5,had:4,except:4,littl:5,add:4,lc_linker_opt:4,input:11,modul:[0,1,3,4,5,7,8,9,6,11,12],build:4,bin:4,around:4,format:[1,12,5],dest:0,big:5,p_ulonglong:4,know:8,bit:[8,5,4],python2:4,like:[11,8,5],specif:1,deprec:[8,5,4],manual:11,integ:5,either:11,output:4,prepent:11,page:[1,11],openfil:8,right:[5,2],old:4,p_uint8:[5,4],dyld_image_suffix_search:11,some:4,ptype:[1,4],someversion_suffix:6,"export":5,indirect:9,librari:[10,7,9,6,11,12],p_ubyt:4,avoid:5,shall:2,definit:[1,4],framwork:11,subclass:5,not_system_fil:4,tracker:[1,4],larg:4,refer:[11,12],core:5,ippolito:[1,2],object:[11,8,5,12],usag:10,broken:4,repositori:[1,4],offset:5,executable_path:[0,1,11],about:[10,4],actual:4,testsuit:4,explictli:3,doubl:5,"float":5,warranti:2,been:[8,4],chang:[11,5,4],reli:4,your:4,merg:2,deal:2,aren:[11,4],execut:[1,12,11],support:[8,5,4],avail:4,start:[11,5,4],lc_version_min_macosx:4,appl:[1,3],interfac:5,low:1,ringwald:4,analysi:1,"_fields_":5,form:[7,6],tupl:5,link:[11,10,4],p_ulong:4,line:[1,4],dyld_fallback_framework_path:11,bug:[8,4],framework_find:11,possibl:4,whether:2,bugfix:4,below:4,limit:2,arm64:4,otherwis:2,problem:4,featur:4,constant:[3,4],creat:12,"abstract":5,doesn:[11,3,8,4],repres:[9,4],exist:12,file:[0,1,2,3,4,5,8,9,10,11,12],bob:[1,2],codebas:4,again:4,lc_encryption_info_64:4,"2to3":4,nam:4,field:5,other:[9,5,2],futur:4,test:4,you:4,architectur:10,mach_o:[1,4],dyld:4,bitbucket:[1,4],sphinx:4,longer:4,directori:[0,11,10],dyld_framework_path:11,dyld_loader_search:[11,4],descript:11,indirectli:11,to_str:5,cpu:4},objtypes:{"0":"py:module","1":"py:method","2":"py:function","3":"py:class","4":"py:data"},objnames:{"0":["py","module","Python module"],"1":["py","method","Python method"],"2":["py","function","Python function"],"3":["py","class","Python class"],"4":["py","data","Python data"]},filenames:["MachoOStandalone","index","license","macho_o","changelog","ptypes","dylib","framework","SymbolTable","MachoOGraph","scripts","dyld","MachO"],titles:["<tt class=\"docutils literal\"><span class=\"pre\">macholib.MachOStandalone</span></tt> &#8212; Create standalone application bundles","Macholib - Analyze and edit Mach-O headers","License","<tt class=\"docutils literal\"><span class=\"pre\">macholib.mach_o</span></tt> &#8212; Low-level definitions","Release history","<tt class=\"docutils literal\"><span class=\"pre\">macholib.ptypes</span></tt> &#8212; Packable types","<tt class=\"docutils literal\"><span class=\"pre\">macholib.dylib</span></tt> &#8212; Generic dylib path manipulation","<tt class=\"docutils literal\"><span class=\"pre\">macholib.framework</span></tt> &#8212; Generic framework path manipulation","<tt class=\"docutils literal\"><span class=\"pre\">macholib.SymbolTable</span></tt> &#8212; Class to read the symbol table from a Mach-O header","<tt class=\"docutils literal\"><span class=\"pre\">macholib.MachoGraph</span></tt> &#8212; Graph data structure of Mach-O dependencies","Command-line tools","<tt class=\"docutils literal\"><span class=\"pre\">macholib.dyld</span></tt> &#8212; Dyld emulation","<tt class=\"docutils literal\"><span class=\"pre\">macholib.MachO</span></tt> &#8212; Utilities for reading and writing Mach-O headers"],objects:{"macholib.MachO":{MachO:[12,3,1,""]},"macholib.MachOStandalone":{MachOStandalone:[0,3,1,""]},"macholib.ptypes":{p_char:[5,3,1,""],BasePackable:[5,3,1,""],p_uint16:[5,3,1,""],p_uint8:[5,3,1,""],p_uint64:[5,3,1,""],p_int8:[5,3,1,""],p_uint32:[5,3,1,""],p_float:[5,3,1,""],p_int64:[5,3,1,""],pypackable:[5,2,1,""],p_double:[5,3,1,""],p_int16:[5,3,1,""],p_int32:[5,3,1,""],sizeof:[5,2,1,""],Structure:[5,3,1,""]},"macholib.MachOGraph":{MachOGraph:[9,3,1,""]},macholib:{mach_o:[3,0,0,"-"],MachOGraph:[9,0,0,"-"],ptypes:[5,0,0,"-"],framework:[7,0,0,"-"],SymbolTable:[8,0,0,"-"],MachOStandalone:[0,0,0,"-"],dylib:[6,0,0,"-"],dyld:[11,0,0,"-"],MachO:[12,0,0,"-"]},"macholib.ptypes.Structure":{"_fields_":[5,4,1,""]},"macholib.SymbolTable":{SymbolTable:[8,3,1,""]},"macholib.dylib":{dylib_info:[6,2,1,""]},"macholib.ptypes.BasePackable":{from_str:[5,1,1,""],to_mmap:[5,1,1,""],from_tuple:[5,1,1,""],to_str:[5,1,1,""],to_fileobj:[5,1,1,""],from_mmap:[5,1,1,""],from_fileobj:[5,1,1,""],"_endian_":[5,4,1,""]},"macholib.framework":{framework_info:[7,2,1,""]},"macholib.dyld":{framework_find:[11,2,1,""],dyld_executable_path_search:[11,2,1,""],dyld_override_search:[11,2,1,""],dyld_find:[11,2,1,""],dydl_framework_path:[11,2,1,""],dyld_image_suffix_search:[11,2,1,""],dyld_library_path:[11,2,1,""],dyld_image_suffix:[11,2,1,""],dyld_default_search:[11,2,1,""],dyld_fallback_framework_path:[11,2,1,""],dyld_loader_search:[11,2,1,""],dyld_fallback_library_path:[11,2,1,""]}},titleterms:{bundl:0,machostandalon:0,dump:10,definit:3,symbolt:8,ptype:5,indic:1,header:[1,8,12],framework:7,tabl:[1,8],contributor:1,onlin:1,guid:1,find:10,from:8,creat:0,graph:9,licens:2,machograph:9,"class":8,write:12,read:[12,8],analyz:1,basic:5,macho:12,type:5,mit:2,refer:1,"function":5,applic:0,mach_o:3,resourc:1,python:10,symbol:8,manipul:[7,6],tool:10,util:[12,5],standalon:[0,10],low:3,releas:4,depend:9,line:10,data:9,packabl:5,mach:[1,8,9,12],document:1,dylib:6,level:3,edit:1,macholib:[0,1,3,4,5,6,7,8,9,10,11,12],structur:9,histori:4,emul:11,dyld:11,command:10,gener:[1,6,7],path:[7,6],mmacholib:10}}) \ No newline at end of file
diff --git a/python/macholib/doc/changelog.rst b/python/macholib/doc/changelog.rst
new file mode 100644
index 000000000..d73299f91
--- /dev/null
+++ b/python/macholib/doc/changelog.rst
@@ -0,0 +1,242 @@
+Release history
+===============
+
+macholib 1.7
+------------
+
+* Added support for ARM64, LC_ENCRYPTION_INFO_64 and LC_LINKER_OPTION
+
+ Patch by Matthias Ringwald.
+
+* Load commands now have a "describe" method that returns more information
+ about the command.
+
+ Patch by David Dorsey.
+
+* The MAGIC value in the header was always represented in the native
+ byte order, instead of as the value read from the binary.
+
+ Patch by David Dorsey.
+
+* Added various new constants to "macholib.mach_o".
+
+ Patch by David Dorsey.
+
+macholib 1.6.1
+--------------
+
+* ?
+
+macholib 1.6
+------------
+
+* Add support for '@loader_path' link command in
+ macholib.dyld:
+
+ - Added function ``macholib.dyld.dyld_loader_search``
+
+ - This function is used by ``macholib.dyld.dyld_find``,
+ and that function now has an new (optional) argument
+ with the path to the loader.
+
+* Also add support for '@loader_path' to macholib.MachoGraph,
+ using the newly added '@loader_path' support in the
+ dyld module.
+
+ Due to this suppport the *macho_standalone* tool can
+ now rewrite binaries that contain an '@loader_path' load
+ command.
+
+
+macholib 1.5.2
+--------------
+
+* Issue #93: Show the name of the affected file in the exception message
+ for Mach-O headers that are too large to relocate.
+
+
+macholib 1.5.1
+--------------
+
+* There were no 'classifiers' in the package metadata due to
+ a bug in setup.py.
+
+macholib 1.5
+--------------
+
+macholib 1.5 is a minor feature release
+
+* No longer use 2to3 to provide Python 3 support
+
+ As a side-effect of this macholib no longer supports
+ Python 2.5 and earlier.
+
+* Adds suppport for some new macho load commands
+
+* Fix for py3k problem in macho_standalone.py
+
+ Patch by Guanqun Lu.
+
+* Fix for some issues in macho_dump.py
+
+ Patch by Nam Nguyen
+
+* Issue #10: Fix for LC_DATA_IN_CODE linker commands, without
+ this fix py2app cannot build application bundles when
+ the source binaries have been compiled with Xcode 4.5.
+
+* Issue #6: Fix for LC_ENCRYPTION_INFO linker commands
+
+* Use the mach header information to print the cpu type of a
+ binary, instead of trying to deduce that from pointer width
+ and endianness.
+
+ Changed the code because of issue #6, in which a user tries to
+ dump a iOS binary which results in bogus output in the previous
+ releases.
+
+* The mapping ``macholib.macho_dump.ARCH_MAP`` is undocumented
+ and no longer used by macholib itself. It will be removed
+ in the next release.
+
+
+* The command-line tools ``macho_find``, ``macho_dump`` and
+ ``macho_standalone`` are deprecated. Use "python -mmacholib"
+ instead. That is::
+
+ $ python -mmacholib dump /usr/bin/grep
+
+ $ python -mmacholib find ~
+
+ $ python -mmacholib standalone myapp.app
+
+ This makes it clearer which version of the tools are used.
+
+macholib 1.4.3
+--------------
+
+macholib 1.4.3 is a minor feature release
+
+* Added strings for 'x86_64' and 'ppc64' to
+ macholib.mach_o.CPU_TYPE_NAMES.
+
+* macho_find and macho_dump were broken in the 1.4.2 release
+
+* added 'macholib.util.NOT_SYSTEM_FILES', a list of
+ files that aren't system path's even though they are
+ located in system locations.
+
+ Needed to work around a bug in PySide (see issue #32 in the
+ py2app tracker)
+
+
+
+macholib 1.4.2
+--------------
+
+macholib 1.4.2 is a minor bugfix release
+
+* The support for new load commands that was added in 1.4.1
+ contained a typo that caused problems on OSX 10.7 (Lion).
+
+macholib 1.4.1
+--------------
+
+macholib 1.4.1 is a minor feature release
+
+Features:
+
+- Add support for a number of new MachO load commands that were added
+ during the lifetime of OSX 10.6: ``LC_LOAD_UPWARD_DYLIB``,
+ ``LC_VERSION_MIN_MACOSX``, ``LC_VERSION_MIN_IPHONEOS`` and
+ ``LC_FUNCTION_STARTS``.
+
+macholib 1.4
+-------------
+
+macholib 1.4 is a feature release
+
+Features:
+
+- Documentation is now generated using `sphinx <http://pypi.python.org/pypi/sphinx>`_
+ and can be viewed at <http://packages.python.org/macholib>.
+
+- The repository has moved to bitbucket
+
+- There now is a testsuite
+
+- Private functionality inside modules was renamed to
+ a name starting with an underscore.
+
+ .. note:: if this change affects your code you are relying on undefined
+ implementation features, please stop using private functions.
+
+- The basic packable types in ``macholib.ptypes`` were renamed to better
+ represent the corresponding C type. The table below lists the old
+ an new names (the old names are still available, but are deprecated and
+ will be removed in a future release).
+
+ +--------------+--------------+
+ | **Old name** | **New name** |
+ +==============+==============+
+ | p_byte | p_int8 |
+ +--------------+--------------+
+ | p_ubyte | p_uint8 |
+ +--------------+--------------+
+ | p_short | p_int16 |
+ +--------------+--------------+
+ | p_ushort | p_uint16 |
+ +--------------+--------------+
+ | p_int | p_int32 |
+ +--------------+--------------+
+ | p_uint | p_uint32 |
+ +--------------+--------------+
+ | p_long | p_int32 |
+ +--------------+--------------+
+ | p_ulong | p_uint32 |
+ +--------------+--------------+
+ | p_longlong | p_int64 |
+ +--------------+--------------+
+ | p_ulonglong | p_uint64 |
+ +--------------+--------------+
+
+ ``Macholib.ptypes.p_ptr`` is no longer present as it had an unclear
+ definition and isn't actually used in the codebase.
+
+
+Bug fixes:
+
+- The semantics of ``dyld.dyld_default_search`` were changed a bit,
+ it now first searches the framework path (if appropriate) and then
+ the linker path, irrespective of the value of the ``DYLD_FALLBACK*``
+ environment variables.
+
+ Previous versions would change the search order when those variables
+ was set, which is odd and doesn't correspond with the documented
+ behaviour of the system dyld.
+
+- It is once again possible to install using python2.5
+
+- The source distribution includes all files, this was broken
+ due to the switch to mercurial (which confused setuptools)
+
+macholib 1.3
+------------
+
+macholib 1.3 is a feature release.
+
+Features:
+
+- Experimental Python 3.x support
+
+ This version contains lightly tested support for Python 3.
+
+macholib 1.2.2
+--------------
+
+macholib 1.2.2 is a bugfix release.
+
+Bug fixes:
+
+- Macholib should work better with 64-bit code
+ (patch by Marc-Antoine Parent)
diff --git a/python/macholib/doc/conf.py b/python/macholib/doc/conf.py
new file mode 100644
index 000000000..08a48f3ed
--- /dev/null
+++ b/python/macholib/doc/conf.py
@@ -0,0 +1,275 @@
+# -*- coding: utf-8 -*-
+#
+# macholib documentation build configuration file, created by
+# sphinx-quickstart on Tue Sep 28 22:23:35 2010.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+def get_version():
+ fn = os.path.join(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
+ 'setup.cfg')
+ for ln in open(fn):
+ if ln.startswith('version'):
+ version = ln.split('=')[-1].strip()
+ return version
+
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+sys.path.insert(0,
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'macholib'
+copyright = u'2010-2011, Ronald Oussoren'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = get_version()
+# The full version, including alpha/beta/rc tags.
+release = version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'nature'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'macholibdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'macholib.tex', u'macholib Documentation',
+ u'Ronald Oussoren', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'macholib', u'macholib Documentation',
+ [u'Ronald Oussoren'], 1)
+]
+
+
+# -- Options for Epub output ---------------------------------------------------
+
+# Bibliographic Dublin Core info.
+epub_title = u'macholib'
+epub_author = u'Ronald Oussoren'
+epub_publisher = u'Ronald Oussoren'
+epub_copyright = u'2010, Ronald Oussoren'
+
+# The language of the text. It defaults to the language option
+# or en if the language is not set.
+#epub_language = ''
+
+# The scheme of the identifier. Typical schemes are ISBN or URL.
+#epub_scheme = ''
+
+# The unique identifier of the text. This can be a ISBN number
+# or the project homepage.
+#epub_identifier = ''
+
+# A unique identification for the text.
+#epub_uid = ''
+
+# HTML files that should be inserted before the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_pre_files = []
+
+# HTML files shat should be inserted after the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_post_files = []
+
+# A list of files that should not be packed into the epub file.
+#epub_exclude_files = []
+
+# The depth of the table of contents in toc.ncx.
+#epub_tocdepth = 3
+
+# Allow duplicate toc entries.
+#epub_tocdup = True
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ 'python': ('http://docs.python.org/', None),
+ 'altgraph': ('http://packages.python.org/altgraph', None),
+}
diff --git a/python/macholib/doc/dyld.rst b/python/macholib/doc/dyld.rst
new file mode 100644
index 000000000..14895b6b0
--- /dev/null
+++ b/python/macholib/doc/dyld.rst
@@ -0,0 +1,159 @@
+:mod:`macholib.dyld` --- Dyld emulation
+=======================================
+
+.. module:: macholib.dyld
+ :synopsis: Emulation of functonality of the dynamic linker
+
+This module defines a number of functions that can be used
+to emulate the functionality of the dynamic linker (``dyld``)
+w.r.t. looking for library files and framworks.
+
+.. function:: dyld_image_suffix([env])
+
+ Looks up the suffix to append to shared library and
+ framework names and returns this value when found.
+ Returns ``None`` when no suffix should be appended.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_IMAGE_SUFFIX`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dydl_framework_path([env])
+
+ Returns a user-specified framework search path,
+ or an empty list when only the default search path
+ should be used.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_FRAMEWORK_PATH`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dyld_library_path([env])
+
+ Returns a user-specified library search path,
+ or an empty list when only the default search path
+ should be used.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_LIBRARY_PATH`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dyld_fallback_framework_path([env])
+
+ Return a user specified list of of directories where
+ to look for frameworks that aren't in their install path,
+ or an empty list when the default fallback path should
+ be used.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_FALLBACK_FRAMEWORK_PATH`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dyld_fallback_library_path([env])
+
+ Return a user specified list of of directories where
+ to look for libraries that aren't in their install path,
+ or an empty list when the default fallback path should
+ be used.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ See the description of ``DYLD_FALLBACK_LIBRARY_PATH`` in the
+ manual page for dyld(1) for more information.
+
+.. function:: dyld_image_suffix_search(iterator[, env])
+
+ Yields all items in *iterator*, and prepents names
+ with the image suffix to those items when the suffix
+ is specified.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+.. function:: dyld_override_search(name[, env])
+
+ If *name* is a framework name yield filesystem
+ paths relative to the entries in the framework
+ search path.
+
+ Always yield the filesystem paths relative to the
+ entries in the library search path.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+.. function:: dyld_executable_path_search(name, executable_path)
+
+ If *name* is a path starting with ``@executable_path/`` yield
+ the path relative to the specified *executable_path*.
+
+ If *executable_path* is None nothing is yielded.
+
+.. function:: dyld_loader_search(name, loader_path)
+
+ If *name* is a path starting with ``@loader_path/`` yield
+ the path relative to the specified *loader_path*.
+
+ If *loader_path* is None nothing is yielded.
+
+ .. versionadded: 1.6
+
+.. function:: dyld_default_search(name[, env])
+
+ Yield the filesystem locations to look for a dynamic
+ library or framework using the default locations
+ used by the system dynamic linker.
+
+ This function will look in ``~/Library/Frameworks``
+ for frameworks, even though the system dynamic linker
+ doesn't.
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+.. function:: dyld_find(name[, executable_path[, env [, loader]]])
+
+ Returns the path of the requested dynamic library,
+ raises :exc:`ValueError` when the library cannot be found.
+
+ This function searches for the library in the same
+ locations and de system dynamic linker.
+
+ The *executable_path* should be the filesystem path
+ of the executable to which the library is linked (either
+ directly or indirectly).
+
+ The *env* argument is a dictionary, which defaults
+ to :data:`os.environ`.
+
+ The *loader_path* argument is an optional filesystem path for
+ the object file (binary of shared library) that references
+ *name*.
+
+ .. versionchanged:: 1.6
+
+ Added the *loader_path* argument.
+
+.. function:: framework_find(fn[, executable_path[, env]])
+
+ Find a framework using the same semantics as the
+ system dynamic linker, but will accept looser names
+ than the system linker.
+
+ This function will return a correct result for input
+ values like:
+
+ * Python
+
+ * Python.framework
+
+ * Python.framework/Versions/Current
diff --git a/python/macholib/doc/dylib.rst b/python/macholib/doc/dylib.rst
new file mode 100644
index 000000000..969680212
--- /dev/null
+++ b/python/macholib/doc/dylib.rst
@@ -0,0 +1,33 @@
+:mod:`macholib.dylib` --- Generic dylib path manipulation
+=========================================================
+
+.. module:: macholib.dylib
+ :synopsis: Generic dylib path manipulation
+
+This module defines a function :func:`dylib_info` that can extract
+useful information from the name of a dynamic library.
+
+.. function:: dylib_info(filename)
+
+ A dylib name can take one of the following four forms:
+
+ * ``Location/Name.SomeVersion_Suffix.dylib``
+
+ * ``Location/Name.SomeVersion.dylib``
+
+ * ``Location/Name_Suffix.dylib``
+
+ * ``Location/Name.dylib``
+
+ Returns None if not found or a mapping equivalent to::
+
+ dict(
+ location='Location',
+ name='Name.SomeVersion_Suffix.dylib',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ )
+
+ .. note:: *SomeVersion* and *Suffix* are optional and my be ``None``
+ if not present.
diff --git a/python/macholib/doc/framework.rst b/python/macholib/doc/framework.rst
new file mode 100644
index 000000000..b58b5751c
--- /dev/null
+++ b/python/macholib/doc/framework.rst
@@ -0,0 +1,34 @@
+:mod:`macholib.framework` --- Generic framework path manipulation
+==========================================================================
+
+.. module:: macholib.framework
+ :synopsis: Generic framework path manipulation
+
+
+This module defines a function :func:`framework_info` that can extract
+useful information from the name of a dynamic library in a framework.
+
+.. function:: framework_info(filename)
+
+ A framework name can take one of the following four forms:
+
+ * ``Location/Name.framework/Versions/SomeVersion/Name_Suffix``
+
+ * ``Location/Name.framework/Versions/SomeVersion/Name``
+
+ * ``Location/Name.framework/Name_Suffix``
+
+ * ``Location/Name.framework/Name``
+
+ Returns ``None`` if not found, or a mapping equivalent to::
+
+ dict(
+ location='Location',
+ name='Name.framework/Versions/SomeVersion/Name_Suffix',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ )
+
+ .. note:: *SomeVersion* and *Suffix* are optional and may be None
+ if not present.
diff --git a/python/macholib/doc/index.rst b/python/macholib/doc/index.rst
new file mode 100644
index 000000000..3067926a8
--- /dev/null
+++ b/python/macholib/doc/index.rst
@@ -0,0 +1,59 @@
+Macholib - Analyze and edit Mach-O headers
+==========================================
+
+macholib can be used to analyze and edit Mach-O headers, the executable
+format used by Mac OS X.
+
+It's typically used as a dependency analysis tool, and also to rewrite dylib
+references in Mach-O headers to be ``@executable_path`` relative.
+
+Though this tool targets a platform specific file format, it is pure python
+code that is platform and endian independent.
+
+General documentation
+---------------------
+
+.. toctree::
+ :maxdepth: 1
+
+ changelog
+ license
+ scripts
+
+Reference Guide
+---------------
+
+.. toctree::
+ :maxdepth: 1
+
+ MachO
+ MachoOGraph
+ MachoOStandalone
+ SymbolTable
+ dyld
+ dylib
+ framework
+ macho_o
+ ptypes
+
+Online Resources
+----------------
+
+* `Sourcecode repository on bitbucket <http://bitbucket.org/ronaldoussoren/macholib/>`_
+
+* `The issue tracker <http://bitbucket.org/ronaldoussoren/macholib/issues>`_
+
+* `Mac OS X ABI Mach-O File Format Reference at Apple <http://developer.apple.com/library/mac/#documentation/DeveloperTools/Conceptual/MachORuntime/Reference/reference.html>`_
+
+Contributors
+------------
+
+Macholib was written by Bob Ippolito and is currently maintained by Ronald Oussoren <ronaldoussoren@mac.com>.
+
+Indices and tables
+------------------
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/python/macholib/doc/license.rst b/python/macholib/doc/license.rst
new file mode 100644
index 000000000..f9c8cc50e
--- /dev/null
+++ b/python/macholib/doc/license.rst
@@ -0,0 +1,23 @@
+License
+=======
+
+Copyright (c) Bob Ippolito
+
+Parts are copyright (c) 2010-2014 Ronald Oussoren
+
+MIT License
+...........
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
+so.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/python/macholib/doc/macho_o.rst b/python/macholib/doc/macho_o.rst
new file mode 100644
index 000000000..dfa347e8f
--- /dev/null
+++ b/python/macholib/doc/macho_o.rst
@@ -0,0 +1,13 @@
+:mod:`macholib.mach_o` --- Low-level definitions
+================================================
+
+.. module:: macholib.mach_o
+ :synopsis: Low-level definitions of elements in a Mach-O file
+
+This module defines constants and packable structure types
+that correspond to elements of a Mach-O file.
+
+The names of classes and constants is the same as those in
+the Mach-O header files and
+`Apple's documentation <http://developer.apple.com/library/mac/#documentation/DeveloperTools/Conceptual/MachORuntime/Reference/reference.html>`_. This document therefore
+doesn't explictly document the names in this module.
diff --git a/python/macholib/doc/ptypes.rst b/python/macholib/doc/ptypes.rst
new file mode 100644
index 000000000..ade2ac847
--- /dev/null
+++ b/python/macholib/doc/ptypes.rst
@@ -0,0 +1,157 @@
+:mod:`macholib.ptypes` --- Packable types
+=========================================
+
+.. module:: macholib.ptypes
+ :synopsis: Serializable types
+
+The module :mod:`macholib.ptypes` defines types that can be serialized into
+byte arrays, both for basic types and structured types (C ``struct`` values).
+
+Utility functions
+-----------------
+
+.. function:: sizeof(value)
+
+ Returns the size in bytes of an object when packed, raises :exc:`ValueError`
+ for inappropriate values.
+
+.. function:: pypackable(name, pytype, format)
+
+ Returns a packable type that is a subclass of the Python type
+ *pytype*. The value is converted to and from the packed format using
+ the struct *format*.
+
+
+
+Packable types
+--------------
+
+
+.. class:: BasePackable
+
+ All packable types are a subclass of :class:`BasePackable`, which defines
+ the basic interface but is itself an abstract base class.
+
+ .. data:: _endian_
+
+ The byteorder of a packed value. This will be ``"<"` for
+ little endian values and ``">"`` for big-endian ones.
+
+ .. note:: the endianness option is a public value to be
+ able to support both big- and little-endian file formats.
+
+ The name suggests that this attribute is private, this
+ is partically for historical reasons and partially to
+ avoid conflicts with field names in C structs.
+
+ .. method:: from_mmap(mmap, ptr, \**kw)
+
+ This class method constructs the value from a subview of a
+ :class:`mmap.mmap` object. It uses bytes starting at offset *ptr* and
+ reads just enough bytes to read the entire object.
+
+ .. method:: from_fileobj(fp, \**kw)
+
+ This class method constructs the value by reading just enough bytes
+ from a file-like object.
+
+ .. note:: The file must be opened in binary mode, that is read calls
+ should return byte-strings and not unicode-strings.
+
+ .. method:: from_str(value, \**kw)
+
+ This class method construct the value by using the struct module
+ to parse the given bytes.
+
+ .. note:: contrary to what the name suggests the argument to this
+ method is a byte-string, not a unicode-string.
+
+ .. method:: from_tuple(fp, \**kw)
+
+ This class method constructs the object from a tuple with all fields.
+
+
+ .. method:: to_str()
+
+ Returns a byte representation of the value.
+
+ .. note:: there is no default implementation for this method
+
+ .. method:: to_fileobj(fp)
+
+ Write a byte representation of the value to the given file-like
+ object. The file should be opened in binary mode.
+
+ .. method:: to_mmap(mmap, ptr)
+
+ Write the byte representation of the value to a :class:`mmap.mmap`
+ object, starting at offset *ptr*.
+
+
+.. class:: Structure(...)
+
+ .. data:: _fields_
+
+ This class attribute is a list that contains the fields of the
+ structure in the right order. Every item of this list is a tuple
+ with 2 arguments: the first element is the name of the field, and
+ the second the packable type for the field.
+
+ Every subclass of :class:`Structure` must define *_fields_* to be
+ usefull, and the value of *_fields_* should not be changed after
+ class construction.
+
+
+Basic packables
+---------------
+
+Other than the core functionality this module defines a number of
+:func:`pypackable` types that correspond to useful basic C types.
+
+.. class:: p_char([value])
+
+ A byte string of length 1
+
+.. class:: p_int8
+
+ An 8-bit signed integer
+
+.. class:: p_uint8
+
+ An 8-bit unsigned integer
+
+.. class:: p_int16
+
+ An 16-bit signed integer
+
+.. class:: p_uint16
+
+ An 16-bit unsigned integer
+
+.. class:: p_int32
+
+ An 32-bit signed integer
+
+.. class:: p_uint32
+
+ An 32-bit unsigned integer
+
+.. class:: p_int64
+
+ An 64-bit signed integer
+
+.. class:: p_uint64
+
+ An 64-bit unsigned integer
+
+.. class:: p_float
+
+ An floating point value of type ``float``
+
+.. class:: p_double
+
+ An floating point value of type ``double``
+
+.. note:: the module exports a number of other types with
+ names starting with ``p_``, such as ``p_int``. Those types
+ are deprecated and should not be used.
diff --git a/python/macholib/doc/scripts.rst b/python/macholib/doc/scripts.rst
new file mode 100644
index 000000000..50cf38028
--- /dev/null
+++ b/python/macholib/doc/scripts.rst
@@ -0,0 +1,35 @@
+Command-line tools
+==================
+
+python -m macholib find
+-----------------------
+
+Usage::
+
+ $ python -mmacholib find dir...
+
+Print the paths of all MachO binaries
+in the specified directories.
+
+python -m macholib standalone
+-----------------------------
+
+Usage::
+
+ $ python -m macholib standalone appbundle...
+
+Convert one or more application bundles into
+standalone bundles. That is, copy all non-system
+shared libraries and frameworks used by the bundle
+into the bundle and rewrite load commands.
+
+python -mmacholib dump
+----------------------
+
+Usage::
+
+ $ python -mmacholib dump dir...
+
+Prints information about all architectures in a
+Mach-O file as well as all libraries it links
+to.
diff --git a/python/macholib/macholib/MachO.py b/python/macholib/macholib/MachO.py
new file mode 100644
index 000000000..f83ddb711
--- /dev/null
+++ b/python/macholib/macholib/MachO.py
@@ -0,0 +1,398 @@
+"""
+Utilities for reading and writing Mach-O headers
+"""
+from __future__ import print_function
+
+import sys
+import struct
+
+from macholib.mach_o import *
+from macholib.dyld import dyld_find, framework_info
+from macholib.util import fileview
+try:
+ from macholib.compat import bytes
+except ImportError:
+ pass
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+__all__ = ['MachO']
+
+_RELOCATABLE = set((
+ # relocatable commands that should be used for dependency walking
+ LC_LOAD_DYLIB,
+ LC_LOAD_WEAK_DYLIB,
+ LC_PREBOUND_DYLIB,
+ LC_REEXPORT_DYLIB,
+))
+
+_RELOCATABLE_NAMES = {
+ LC_LOAD_DYLIB: 'load_dylib',
+ LC_LOAD_WEAK_DYLIB: 'load_weak_dylib',
+ LC_PREBOUND_DYLIB: 'prebound_dylib',
+ LC_REEXPORT_DYLIB: 'reexport_dylib',
+}
+
+def _shouldRelocateCommand(cmd):
+ """
+ Should this command id be investigated for relocation?
+ """
+ return cmd in _RELOCATABLE
+
+class MachO(object):
+ """
+ Provides reading/writing the Mach-O header of a specific existing file
+ """
+ # filename - the original filename of this mach-o
+ # sizediff - the current deviation from the initial mach-o size
+ # header - the mach-o header
+ # commands - a list of (load_command, somecommand, data)
+ # data is either a str, or a list of segment structures
+ # total_size - the current mach-o header size (including header)
+ # low_offset - essentially, the maximum mach-o header size
+ # id_cmd - the index of my id command, or None
+
+
+ def __init__(self, filename):
+
+ # supports the ObjectGraph protocol
+ self.graphident = filename
+ self.filename = filename
+
+ # initialized by load
+ self.fat = None
+ self.headers = []
+ with open(filename, 'rb') as fp:
+ self.load(fp)
+
+ def __repr__(self):
+ return "<MachO filename=%r>" % (self.filename,)
+
+ def load(self, fh):
+ assert fh.tell() == 0
+ header = struct.unpack('>I', fh.read(4))[0]
+ fh.seek(0)
+ if header == FAT_MAGIC:
+ self.load_fat(fh)
+ else:
+ fh.seek(0, 2)
+ size = fh.tell()
+ fh.seek(0)
+ self.load_header(fh, 0, size)
+
+ def load_fat(self, fh):
+ self.fat = fat_header.from_fileobj(fh)
+ archs = [fat_arch.from_fileobj(fh) for i in range(self.fat.nfat_arch)]
+ for arch in archs:
+ self.load_header(fh, arch.offset, arch.size)
+
+ def rewriteLoadCommands(self, *args, **kw):
+ changed = False
+ for header in self.headers:
+ if header.rewriteLoadCommands(*args, **kw):
+ changed = True
+ return changed
+
+ def load_header(self, fh, offset, size):
+ fh.seek(offset)
+ header = struct.unpack('>I', fh.read(4))[0]
+ fh.seek(offset)
+ if header == MH_MAGIC:
+ magic, hdr, endian = MH_MAGIC, mach_header, '>'
+ elif header == MH_CIGAM:
+ magic, hdr, endian = MH_CIGAM, mach_header, '<'
+ elif header == MH_MAGIC_64:
+ magic, hdr, endian = MH_MAGIC_64, mach_header_64, '>'
+ elif header == MH_CIGAM_64:
+ magic, hdr, endian = MH_CIGAM_64, mach_header_64, '<'
+ else:
+ raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (
+ header, fh))
+ hdr = MachOHeader(self, fh, offset, size, magic, hdr, endian)
+ self.headers.append(hdr)
+
+ def write(self, f):
+ for header in self.headers:
+ header.write(f)
+
+class MachOHeader(object):
+ """
+ Provides reading/writing the Mach-O header of a specific existing file
+ """
+ # filename - the original filename of this mach-o
+ # sizediff - the current deviation from the initial mach-o size
+ # header - the mach-o header
+ # commands - a list of (load_command, somecommand, data)
+ # data is either a str, or a list of segment structures
+ # total_size - the current mach-o header size (including header)
+ # low_offset - essentially, the maximum mach-o header size
+ # id_cmd - the index of my id command, or None
+
+
+ def __init__(self, parent, fh, offset, size, magic, hdr, endian):
+ self.MH_MAGIC = magic
+ self.mach_header = hdr
+
+ # These are all initialized by self.load()
+ self.parent = parent
+ self.offset = offset
+ self.size = size
+
+ self.endian = endian
+ self.header = None
+ self.commands = None
+ self.id_cmd = None
+ self.sizediff = None
+ self.total_size = None
+ self.low_offset = None
+ self.filetype = None
+ self.headers = []
+
+ self.load(fh)
+
+ def __repr__(self):
+ return "<%s filename=%r offset=%d size=%d endian=%r>" % (
+ type(self).__name__, self.parent.filename, self.offset, self.size,
+ self.endian)
+
+ def load(self, fh):
+ fh = fileview(fh, self.offset, self.size)
+ fh.seek(0)
+
+ self.sizediff = 0
+ kw = {'_endian_': self.endian}
+ header = self.mach_header.from_fileobj(fh, **kw)
+ self.header = header
+ #if header.magic != self.MH_MAGIC:
+ # raise ValueError("header has magic %08x, expecting %08x" % (
+ # header.magic, self.MH_MAGIC))
+
+ cmd = self.commands = []
+
+ self.filetype = self.get_filetype_shortname(header.filetype)
+
+ read_bytes = 0
+ low_offset = sys.maxsize
+ for i in range(header.ncmds):
+ # read the load command
+ cmd_load = load_command.from_fileobj(fh, **kw)
+
+ # read the specific command
+ klass = LC_REGISTRY.get(cmd_load.cmd, None)
+ if klass is None:
+ raise ValueError("Unknown load command: %d" % (cmd_load.cmd,))
+ cmd_cmd = klass.from_fileobj(fh, **kw)
+
+ if cmd_load.cmd == LC_ID_DYLIB:
+ # remember where this command was
+ if self.id_cmd is not None:
+ raise ValueError("This dylib already has an id")
+ self.id_cmd = i
+
+ if cmd_load.cmd in (LC_SEGMENT, LC_SEGMENT_64):
+ # for segment commands, read the list of segments
+ segs = []
+ # assert that the size makes sense
+ if cmd_load.cmd == LC_SEGMENT:
+ section_cls = section
+ else: # LC_SEGMENT_64
+ section_cls = section_64
+
+ expected_size = (
+ sizeof(klass) + sizeof(load_command) +
+ (sizeof(section_cls) * cmd_cmd.nsects)
+ )
+ if cmd_load.cmdsize != expected_size:
+ raise ValueError("Segment size mismatch")
+ # this is a zero block or something
+ # so the beginning is wherever the fileoff of this command is
+ if cmd_cmd.nsects == 0:
+ if cmd_cmd.filesize != 0:
+ low_offset = min(low_offset, cmd_cmd.fileoff)
+ else:
+ # this one has multiple segments
+ for j in range(cmd_cmd.nsects):
+ # read the segment
+ seg = section_cls.from_fileobj(fh, **kw)
+ # if the segment has a size and is not zero filled
+ # then its beginning is the offset of this segment
+ not_zerofill = ((seg.flags & S_ZEROFILL) != S_ZEROFILL)
+ if seg.offset > 0 and seg.size > 0 and not_zerofill:
+ low_offset = min(low_offset, seg.offset)
+ if not_zerofill:
+ c = fh.tell()
+ fh.seek(seg.offset)
+ sd = fh.read(seg.size)
+ seg.add_section_data(sd)
+ fh.seek(c)
+ segs.append(seg)
+ # data is a list of segments
+ cmd_data = segs
+
+ # XXX: Disabled for now because writing back doesn't work
+ #elif cmd_load.cmd == LC_CODE_SIGNATURE:
+ # c = fh.tell()
+ # fh.seek(cmd_cmd.dataoff)
+ # cmd_data = fh.read(cmd_cmd.datasize)
+ # fh.seek(c)
+ #elif cmd_load.cmd == LC_SYMTAB:
+ # c = fh.tell()
+ # fh.seek(cmd_cmd.stroff)
+ # cmd_data = fh.read(cmd_cmd.strsize)
+ # fh.seek(c)
+
+ else:
+ # data is a raw str
+ data_size = (
+ cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
+ )
+ cmd_data = fh.read(data_size)
+ cmd.append((cmd_load, cmd_cmd, cmd_data))
+ read_bytes += cmd_load.cmdsize
+
+ # make sure the header made sense
+ if read_bytes != header.sizeofcmds:
+ raise ValueError("Read %d bytes, header reports %d bytes" % (
+ read_bytes, header.sizeofcmds))
+ self.total_size = sizeof(self.mach_header) + read_bytes
+ self.low_offset = low_offset
+
+ # this header overwrites a segment, what the heck?
+ if self.total_size > low_offset:
+ raise ValueError("total_size > low_offset (%d > %d)" % (
+ self.total_size, low_offset))
+
+ def walkRelocatables(self, shouldRelocateCommand=_shouldRelocateCommand):
+ """
+ for all relocatable commands
+ yield (command_index, command_name, filename)
+ """
+ for (idx, (lc, cmd, data)) in enumerate(self.commands):
+ if shouldRelocateCommand(lc.cmd):
+ name = _RELOCATABLE_NAMES[lc.cmd]
+ ofs = cmd.name - sizeof(lc.__class__) - sizeof(cmd.__class__)
+ yield idx, name, data[ofs:data.find(b'\x00', ofs)].decode(
+ sys.getfilesystemencoding())
+
+ def rewriteInstallNameCommand(self, loadcmd):
+ """Rewrite the load command of this dylib"""
+ if self.id_cmd is not None:
+ self.rewriteDataForCommand(self.id_cmd, loadcmd)
+ return True
+ return False
+
+ def changedHeaderSizeBy(self, bytes):
+ self.sizediff += bytes
+ if (self.total_size + self.sizediff) > self.low_offset:
+ print("WARNING: Mach-O header in %r may be too large to relocate"%(self.parent.filename,))
+
+ def rewriteLoadCommands(self, changefunc):
+ """
+ Rewrite the load commands based upon a change dictionary
+ """
+ data = changefunc(self.parent.filename)
+ changed = False
+ if data is not None:
+ if self.rewriteInstallNameCommand(
+ data.encode(sys.getfilesystemencoding())):
+ changed = True
+ for idx, name, filename in self.walkRelocatables():
+ data = changefunc(filename)
+ if data is not None:
+ if self.rewriteDataForCommand(idx, data.encode(
+ sys.getfilesystemencoding())):
+ changed = True
+ return changed
+
+ def rewriteDataForCommand(self, idx, data):
+ lc, cmd, old_data = self.commands[idx]
+ hdrsize = sizeof(lc.__class__) + sizeof(cmd.__class__)
+ align = struct.calcsize('L')
+ data = data + (b'\x00' * (align - (len(data) % align)))
+ newsize = hdrsize + len(data)
+ self.commands[idx] = (lc, cmd, data)
+ self.changedHeaderSizeBy(newsize - lc.cmdsize)
+ lc.cmdsize, cmd.name = newsize, hdrsize
+ return True
+
+ def synchronize_size(self):
+ if (self.total_size + self.sizediff) > self.low_offset:
+ raise ValueError("New Mach-O header is too large to relocate in %r"%(self.parent.filename,))
+ self.header.sizeofcmds += self.sizediff
+ self.total_size = sizeof(self.mach_header) + self.header.sizeofcmds
+ self.sizediff = 0
+
+ def write(self, fileobj):
+ fileobj = fileview(fileobj, self.offset, self.size)
+ fileobj.seek(0)
+
+ # serialize all the mach-o commands
+ self.synchronize_size()
+
+ self.header.to_fileobj(fileobj)
+ for lc, cmd, data in self.commands:
+ lc.to_fileobj(fileobj)
+ cmd.to_fileobj(fileobj)
+
+ if sys.version_info[0] == 2:
+ if isinstance(data, unicode):
+ fileobj.write(data.encode(sys.getfilesystemencoding()))
+
+ elif isinstance(data, (bytes, str)):
+ fileobj.write(data)
+ else:
+ # segments..
+ for obj in data:
+ obj.to_fileobj(fileobj)
+ else:
+ if isinstance(data, str):
+ fileobj.write(data.encode(sys.getfilesystemencoding()))
+
+ elif isinstance(data, bytes):
+ fileobj.write(data)
+
+ else:
+ # segments..
+ for obj in data:
+ obj.to_fileobj(fileobj)
+
+ # zero out the unused space, doubt this is strictly necessary
+ # and is generally probably already the case
+ fileobj.write(b'\x00' * (self.low_offset - fileobj.tell()))
+
+ def getSymbolTableCommand(self):
+ for lc, cmd, data in self.commands:
+ if lc.cmd == LC_SYMTAB:
+ return cmd
+ return None
+
+ def getDynamicSymbolTableCommand(self):
+ for lc, cmd, data in self.commands:
+ if lc.cmd == LC_DYSYMTAB:
+ return cmd
+ return None
+
+ def get_filetype_shortname(self, filetype):
+ if filetype in MH_FILETYPE_SHORTNAMES:
+ return MH_FILETYPE_SHORTNAMES[filetype]
+ else:
+ return 'unknown'
+
+def main(fn):
+ m = MachO(fn)
+ seen = set()
+ for header in m.headers:
+ for idx, name, other in header.walkRelocatables():
+ if other not in seen:
+ seen.add(other)
+ print('\t' + name + ": " + other)
+
+if __name__ == '__main__':
+ import sys
+ files = sys.argv[1:] or ['/bin/ls']
+ for fn in files:
+ print(fn)
+ main(fn)
diff --git a/python/macholib/macholib/MachOGraph.py b/python/macholib/macholib/MachOGraph.py
new file mode 100644
index 000000000..8875c56eb
--- /dev/null
+++ b/python/macholib/macholib/MachOGraph.py
@@ -0,0 +1,131 @@
+"""
+Utilities for reading and writing Mach-O headers
+"""
+
+import os
+import sys
+
+from altgraph.Graph import Graph
+from altgraph.ObjectGraph import ObjectGraph
+
+from macholib.mach_o import *
+from macholib.dyld import dyld_find
+from macholib.MachO import MachO
+from macholib.itergraphreport import itergraphreport
+
+__all__ = ['MachOGraph']
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+class MissingMachO(object):
+ def __init__(self, filename):
+ self.graphident = filename
+ self.headers = ()
+
+ def __repr__(self):
+ return '<%s graphident=%r>' % (type(self).__name__, self.graphident)
+
+class MachOGraph(ObjectGraph):
+ """
+ Graph data structure of Mach-O dependencies
+ """
+ def __init__(self, debug=0, graph=None, env=None, executable_path=None):
+ super(MachOGraph, self).__init__(debug=debug, graph=graph)
+ self.env = env
+ self.trans_table = {}
+ self.executable_path = executable_path
+
+ def locate(self, filename, loader=None):
+ assert isinstance(filename, (str, unicode))
+ if filename.startswith('@loader_path/') and loader is not None:
+ fn = self.trans_table.get((loader.filename, filename))
+ if fn is None:
+ try:
+ fn = dyld_find(filename, env=self.env,
+ executable_path=self.executable_path,
+ loader=loader.filename)
+ self.trans_table[(loader.filename, filename)] = fn
+ except ValueError:
+ return None
+
+ else:
+ fn = self.trans_table.get(filename)
+ if fn is None:
+ try:
+ fn = dyld_find(filename, env=self.env,
+ executable_path=self.executable_path)
+ self.trans_table[filename] = fn
+ except ValueError:
+ return None
+ return fn
+
+ def findNode(self, name, loader=None):
+ assert isinstance(name, (str, unicode))
+ data = super(MachOGraph, self).findNode(name)
+ if data is not None:
+ return data
+ newname = self.locate(name, loader=loader)
+ if newname is not None and newname != name:
+ return self.findNode(newname)
+ return None
+
+ def run_file(self, pathname, caller=None):
+ assert isinstance(pathname, (str, unicode))
+ self.msgin(2, "run_file", pathname)
+ m = self.findNode(pathname, loader=caller)
+ if m is None:
+ if not os.path.exists(pathname):
+ raise ValueError('%r does not exist' % (pathname,))
+ m = self.createNode(MachO, pathname)
+ self.createReference(caller, m, edge_data='run_file')
+ self.scan_node(m)
+ self.msgout(2, '')
+ return m
+
+ def load_file(self, name, caller=None):
+ assert isinstance(name, (str, unicode))
+ self.msgin(2, "load_file", name)
+ m = self.findNode(name)
+ if m is None:
+ newname = self.locate(name, loader=caller)
+ if newname is not None and newname != name:
+ return self.load_file(newname, caller=caller)
+ if os.path.exists(name):
+ m = self.createNode(MachO, name)
+ self.scan_node(m)
+ else:
+ m = self.createNode(MissingMachO, name)
+ self.msgout(2, '')
+ return m
+
+ def scan_node(self, node):
+ self.msgin(2, 'scan_node', node)
+ for header in node.headers:
+ for idx, name, filename in header.walkRelocatables():
+ assert isinstance(name, (str, unicode))
+ assert isinstance(filename, (str, unicode))
+ m = self.load_file(filename, caller=node)
+ self.createReference(node, m, edge_data=name)
+ self.msgout(2, '', node)
+
+ def itergraphreport(self, name='G'):
+ nodes = map(self.graph.describe_node, self.graph.iterdfs(self))
+ describe_edge = self.graph.describe_edge
+ return itergraphreport(nodes, describe_edge, name=name)
+
+ def graphreport(self, fileobj=None):
+ if fileobj is None:
+ fileobj = sys.stdout
+ fileobj.writelines(self.itergraphreport())
+
+def main(args):
+ g = MachOGraph()
+ for arg in args:
+ g.run_file(arg)
+ g.graphreport()
+
+if __name__ == '__main__':
+ main(sys.argv[1:] or ['/bin/ls'])
diff --git a/python/macholib/macholib/MachOStandalone.py b/python/macholib/macholib/MachOStandalone.py
new file mode 100644
index 000000000..f8627336c
--- /dev/null
+++ b/python/macholib/macholib/MachOStandalone.py
@@ -0,0 +1,147 @@
+import os
+
+from macholib.MachOGraph import MachOGraph, MissingMachO
+from macholib.util import iter_platform_files, in_system_path, mergecopy, \
+ mergetree, flipwritable, has_filename_filter
+from macholib.dyld import framework_info
+from collections import deque
+
+class ExcludedMachO(MissingMachO):
+ pass
+
+class FilteredMachOGraph(MachOGraph):
+ def __init__(self, delegate, *args, **kwargs):
+ super(FilteredMachOGraph, self).__init__(*args, **kwargs)
+ self.delegate = delegate
+
+ def createNode(self, cls, name):
+ cls = self.delegate.getClass(name, cls)
+ res = super(FilteredMachOGraph, self).createNode(cls, name)
+ return res
+
+ def locate(self, filename, loader=None):
+ newname = super(FilteredMachOGraph, self).locate(filename, loader)
+ if newname is None:
+ return None
+ return self.delegate.locate(newname)
+
+class MachOStandalone(object):
+ def __init__(self, base, dest=None, graph=None, env=None,
+ executable_path=None):
+ self.base = os.path.join(os.path.abspath(base), '')
+ if dest is None:
+ dest = os.path.join(self.base, 'Contents', 'Frameworks')
+ self.dest = dest
+ self.mm = FilteredMachOGraph(self, graph=graph, env=env,
+ executable_path=executable_path)
+ self.changemap = {}
+ self.excludes = []
+ self.pending = deque()
+
+ def getClass(self, name, cls):
+ if in_system_path(name):
+ return ExcludedMachO
+ for base in self.excludes:
+ if name.startswith(base):
+ return ExcludedMachO
+ return cls
+
+ def locate(self, filename):
+ if in_system_path(filename):
+ return filename
+ if filename.startswith(self.base):
+ return filename
+ for base in self.excludes:
+ if filename.startswith(base):
+ return filename
+ if filename in self.changemap:
+ return self.changemap[filename]
+ info = framework_info(filename)
+ if info is None:
+ res = self.copy_dylib(filename)
+ self.changemap[filename] = res
+ return res
+ else:
+ res = self.copy_framework(info)
+ self.changemap[filename] = res
+ return res
+
+ def copy_dylib(self, filename):
+ # When the filename is a symlink use the basename of the target of the link
+ # as the name in standalone bundle. This avoids problems when two libraries
+ # link to the same dylib but using different symlinks.
+ if os.path.islink(filename):
+ dest = os.path.join(self.dest, os.path.basename(os.path.realpath(filename)))
+ else:
+ dest = os.path.join(self.dest, os.path.basename(filename))
+
+ if not os.path.exists(dest):
+ self.mergecopy(filename, dest)
+ return dest
+
+ def mergecopy(self, src, dest):
+ return mergecopy(src, dest)
+
+ def mergetree(self, src, dest):
+ return mergetree(src, dest)
+
+ def copy_framework(self, info):
+ dest = os.path.join(self.dest, info['shortname'] + '.framework')
+ destfn = os.path.join(self.dest, info['name'])
+ src = os.path.join(info['location'], info['shortname'] + '.framework')
+ if not os.path.exists(dest):
+ self.mergetree(src, dest)
+ self.pending.append((destfn, iter_platform_files(dest)))
+ return destfn
+
+ def run(self, platfiles=None, contents=None):
+ mm = self.mm
+ if contents is None:
+ contents = '@executable_path/..'
+ if platfiles is None:
+ platfiles = iter_platform_files(self.base)
+
+ for fn in platfiles:
+ mm.run_file(fn)
+
+ while self.pending:
+ fmwk, files = self.pending.popleft()
+ ref = mm.findNode(fmwk)
+ for fn in files:
+ mm.run_file(fn, caller=ref)
+
+ changemap = {}
+ skipcontents = os.path.join(os.path.dirname(self.dest), '')
+ machfiles = []
+
+ for node in mm.flatten(has_filename_filter):
+ machfiles.append(node)
+ dest = os.path.join(contents, node.filename[len(skipcontents):])
+ changemap[node.filename] = dest
+
+ def changefunc(path):
+ res = mm.locate(path)
+ return changemap.get(res)
+
+ for node in machfiles:
+ fn = mm.locate(node.filename)
+ if fn is None:
+ continue
+ rewroteAny = False
+ for header in node.headers:
+ if node.rewriteLoadCommands(changefunc):
+ rewroteAny = True
+ if rewroteAny:
+ old_mode = flipwritable(fn)
+ try:
+ with open(fn, 'rb+') as f:
+ for header in node.headers:
+ f.seek(0)
+ node.write(f)
+ f.seek(0, 2)
+ f.flush()
+ finally:
+ flipwritable(fn, old_mode)
+
+ allfiles = [mm.locate(node.filename) for node in machfiles]
+ return set(filter(None, allfiles))
diff --git a/python/macholib/macholib/SymbolTable.py b/python/macholib/macholib/SymbolTable.py
new file mode 100644
index 000000000..62fb172fd
--- /dev/null
+++ b/python/macholib/macholib/SymbolTable.py
@@ -0,0 +1,95 @@
+"""
+Class to read the symbol table from a Mach-O header
+"""
+
+from macholib.mach_o import *
+
+__all__ = ['SymbolTable']
+
+# XXX: Does not support 64-bit, probably broken anyway
+
+class SymbolTable(object):
+ def __init__(self, macho, openfile=None):
+ if openfile is None:
+ openfile = open
+ self.macho = macho.headers[0]
+ self.symtab = macho.getSymbolTableCommand()
+ self.dysymtab = macho.getDynamicSymbolTableCommand()
+ fh = openfile(self.macho.filename, 'rb')
+ try:
+ if self.symtab is not None:
+ self.readSymbolTable(fh)
+ if self.dysymtab is not None:
+ self.readDynamicSymbolTable(fh)
+ finally:
+ fh.close()
+
+ def readSymbolTable(self, fh):
+ cmd = self.symtab
+ fh.seek(cmd.stroff)
+ strtab = fh.read(cmd.strsize)
+ fh.seek(cmd.symoff)
+ nlists = []
+ for i in xrange(cmd.nsyms):
+ cmd = nlist.from_fileobj(fh)
+ if cmd.n_un == 0:
+ nlists.append((cmd, ''))
+ else:
+ nlists.append((cmd, strtab[cmd.n_un:strtab.find(b'\x00', cmd.n_un)]))
+ self.nlists = nlists
+
+ def readDynamicSymbolTable(self, fh):
+ cmd = self.dysymtab
+ nlists = self.nlists
+ self.localsyms = nlists[cmd.ilocalsym:cmd.ilocalsym+cmd.nlocalsym]
+ self.extdefsyms = nlists[cmd.iextdefsym:cmd.iextdefsym+cmd.nextdefsym]
+ self.undefsyms = nlists[cmd.iundefsym:cmd.iundefsym+cmd.nundefsym]
+ #if cmd.tocoff == 0:
+ # self.toc = None
+ #else:
+ # self.toc = self.readtoc(fh, cmd.tocoff, cmd.ntoc)
+ #if cmd.modtaboff == 0:
+ # self.modtab = None
+ #else:
+ # self.modtab = self.readmodtab(fh, cmd.modtaboff, cmd.nmodtab)
+ if cmd.extrefsymoff == 0:
+ self.extrefsym = None
+ else:
+ self.extrefsym = self.readsym(fh, cmd.extrefsymoff, cmd.nextrefsyms)
+ #if cmd.indirectsymoff == 0:
+ # self.indirectsym = None
+ #else:
+ # self.indirectsym = self.readsym(fh, cmd.indirectsymoff, cmd.nindirectsyms)
+ #if cmd.extreloff == 0:
+ # self.extrel = None
+ #else:
+ # self.extrel = self.readrel(fh, cmd.extreloff, cmd.nextrel)
+ #if cmd.locreloff == 0:
+ # self.locrel = None
+ #else:
+ # self.locrel = self.readrel(fh, cmd.locreloff, cmd.nlocrel)
+
+ def readtoc(self, fh, off, n):
+ #print 'toc', off, n
+ fh.seek(off)
+ return [dylib_table_of_contents.from_fileobj(fh) for i in xrange(n)]
+
+ def readmodtab(self, fh, off, n):
+ #print 'modtab', off, n
+ fh.seek(off)
+ return [dylib_module.from_fileobj(fh) for i in xrange(n)]
+
+ def readsym(self, fh, off, n):
+ #print 'sym', off, n
+ fh.seek(off)
+ refs = []
+ for i in xrange(n):
+ ref = dylib_reference.from_fileobj(fh)
+ isym, flags = divmod(ref.isym_flags, 256)
+ refs.append((self.nlists[isym], flags))
+ return refs
+
+ def readrel(self, fh, off, n):
+ #print 'rel', off, n
+ fh.seek(off)
+ return [relocation_info.from_fileobj(fh) for i in xrange(n)]
diff --git a/python/macholib/macholib/__init__.py b/python/macholib/macholib/__init__.py
new file mode 100644
index 000000000..df68ce802
--- /dev/null
+++ b/python/macholib/macholib/__init__.py
@@ -0,0 +1,10 @@
+"""
+Enough Mach-O to make your head spin.
+
+See the relevant header files in /usr/include/mach-o
+
+And also Apple's documentation.
+"""
+from __future__ import print_function
+import pkg_resources
+__version__ = pkg_resources.require('macholib')[0].version
diff --git a/python/macholib/macholib/__main__.py b/python/macholib/macholib/__main__.py
new file mode 100644
index 000000000..5309872ca
--- /dev/null
+++ b/python/macholib/macholib/__main__.py
@@ -0,0 +1,73 @@
+from __future__ import print_function, absolute_import
+import os, sys
+
+from macholib.util import is_platform_file
+from macholib import macho_dump
+from macholib import macho_standalone
+
+gCommand = None
+
+def check_file(fp, path, callback):
+ if not os.path.exists(path):
+ print('%s: %s: No such file or directory' % (gCommand, path),
+ file=sys.stderr)
+ return 1
+
+ try:
+ is_plat = is_platform_file(path)
+
+ except IOError as msg:
+ print('%s: %s: %s' % (gCommand, path, msg), file=sys.stderr)
+ return 1
+
+ else:
+ if is_plat:
+ callback(fp, path)
+ return 0
+
+def walk_tree(callback, paths):
+ args = sys.argv[1:]
+ err = 0
+
+ for base in paths:
+ if os.path.isdir(base):
+ for root, dirs, files in os.walk(base):
+ for fn in files:
+ err |= check_file(
+ sys.stdout, os.path.join(root, fn), callback)
+ else:
+ err |= check_file(sys.stdout, base, callback)
+
+ return err
+
+def print_usage(fp):
+ print("Usage:", file=sys.stderr)
+ print(" python -mmacholib dump FILE ...", file=fp)
+ print(" python -mmacholib find DIR ...", file=fp)
+ print(" python -mmacholib standalone DIR ...", file=fp)
+
+def main():
+ global gCommand
+ if len(sys.argv) < 3:
+ print_usage(sys.stderr)
+ sys.exit(1)
+
+ gCommand = sys.argv[1]
+
+ if gCommand == 'dump':
+ walk_tree(macho_dump.print_file, sys.argv[2:])
+
+ elif gCommand == 'find':
+ walk_tree(lambda fp, path: print(path, file=fp), sys.argv[2:])
+
+ elif gCommand == 'standalone':
+ for dn in sys.argv[2:]:
+ macho_standalone.standaloneApp(dn)
+
+ else:
+ print_usage(sys.stderr)
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/macholib/macholib/_cmdline.py b/python/macholib/macholib/_cmdline.py
new file mode 100644
index 000000000..eb44824f9
--- /dev/null
+++ b/python/macholib/macholib/_cmdline.py
@@ -0,0 +1,44 @@
+"""
+Internal helpers for basic commandline tools
+"""
+from __future__ import print_function, absolute_import
+import os
+import sys
+
+from macholib.util import is_platform_file
+
+def check_file(fp, path, callback):
+ if not os.path.exists(path):
+ print('%s: %s: No such file or directory' % (sys.argv[0], path), file=sys.stderr)
+ return 1
+
+ try:
+ is_plat = is_platform_file(path)
+
+ except IOError as msg:
+ print('%s: %s: %s' % (sys.argv[0], path, msg), file=sys.stderr)
+ return 1
+
+ else:
+ if is_plat:
+ callback(fp, path)
+ return 0
+
+def main(callback):
+ args = sys.argv[1:]
+ name = os.path.basename(sys.argv[0])
+ err = 0
+
+ if not args:
+ print("Usage: %s filename..."%(name,), file=sys.stderr)
+ return 1
+
+ for base in args:
+ if os.path.isdir(base):
+ for root, dirs, files in os.walk(base):
+ for fn in files:
+ err |= check_file(sys.stdout, os.path.join(root, fn), callback)
+ else:
+ err |= check_file(sys.stdout, base, callback)
+
+ return err
diff --git a/python/macholib/macholib/dyld.py b/python/macholib/macholib/dyld.py
new file mode 100644
index 000000000..2e81db7f8
--- /dev/null
+++ b/python/macholib/macholib/dyld.py
@@ -0,0 +1,176 @@
+"""
+dyld emulation
+"""
+
+from itertools import chain
+
+import os, sys
+
+from macholib.framework import framework_info
+from macholib.dylib import dylib_info
+
+__all__ = [
+ 'dyld_find', 'framework_find',
+ 'framework_info', 'dylib_info',
+]
+
+# These are the defaults as per man dyld(1)
+#
+_DEFAULT_FRAMEWORK_FALLBACK = [
+ os.path.expanduser("~/Library/Frameworks"),
+ "/Library/Frameworks",
+ "/Network/Library/Frameworks",
+ "/System/Library/Frameworks",
+]
+
+_DEFAULT_LIBRARY_FALLBACK = [
+ os.path.expanduser("~/lib"),
+ "/usr/local/lib",
+ "/lib",
+ "/usr/lib",
+]
+
+# XXX: Is this function still needed?
+if sys.version_info[0] == 2:
+ def _ensure_utf8(s):
+ """Not all of PyObjC and Python understand unicode paths very well yet"""
+ if isinstance(s, unicode):
+ return s.encode('utf8')
+ return s
+else:
+ def _ensure_utf8(s):
+ if s is not None and not isinstance(s, str):
+ raise ValueError(s)
+ return s
+
+
+def _dyld_env(env, var):
+ if env is None:
+ env = os.environ
+ rval = env.get(var)
+ if rval is None or rval == '':
+ return []
+ return rval.split(':')
+
+def dyld_image_suffix(env=None):
+ if env is None:
+ env = os.environ
+ return env.get('DYLD_IMAGE_SUFFIX')
+
+def dyld_framework_path(env=None):
+ return _dyld_env(env, 'DYLD_FRAMEWORK_PATH')
+
+def dyld_library_path(env=None):
+ return _dyld_env(env, 'DYLD_LIBRARY_PATH')
+
+def dyld_fallback_framework_path(env=None):
+ return _dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
+
+def dyld_fallback_library_path(env=None):
+ return _dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
+
+def dyld_image_suffix_search(iterator, env=None):
+ """For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
+ suffix = dyld_image_suffix(env)
+ if suffix is None:
+ return iterator
+ def _inject(iterator=iterator, suffix=suffix):
+ for path in iterator:
+ if path.endswith('.dylib'):
+ yield path[:-len('.dylib')] + suffix + '.dylib'
+ else:
+ yield path + suffix
+ yield path
+ return _inject()
+
+def dyld_override_search(name, env=None):
+ # If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
+ # framework name, use the first file that exists in the framework
+ # path if any. If there is none go on to search the DYLD_LIBRARY_PATH
+ # if any.
+
+ framework = framework_info(name)
+
+ if framework is not None:
+ for path in dyld_framework_path(env):
+ yield os.path.join(path, framework['name'])
+
+ # If DYLD_LIBRARY_PATH is set then use the first file that exists
+ # in the path. If none use the original name.
+ for path in dyld_library_path(env):
+ yield os.path.join(path, os.path.basename(name))
+
+def dyld_executable_path_search(name, executable_path=None):
+ # If we haven't done any searching and found a library and the
+ # dylib_name starts with "@executable_path/" then construct the
+ # library name.
+ if name.startswith('@executable_path/') and executable_path is not None:
+ yield os.path.join(executable_path, name[len('@executable_path/'):])
+
+def dyld_loader_search(name, loader_path=None):
+ # If we haven't done any searching and found a library and the
+ # dylib_name starts with "@loader_path/" then construct the
+ # library name.
+ if name.startswith('@loader_path/') and loader_path is not None:
+ yield os.path.join(loader_path, name[len('@loader_path/'):])
+
+def dyld_default_search(name, env=None):
+ yield name
+
+ framework = framework_info(name)
+
+ if framework is not None:
+ fallback_framework_path = dyld_fallback_framework_path(env)
+
+ if fallback_framework_path:
+ for path in fallback_framework_path:
+ yield os.path.join(path, framework['name'])
+
+ else:
+ for path in _DEFAULT_FRAMEWORK_FALLBACK:
+ yield os.path.join(path, framework['name'])
+
+ fallback_library_path = dyld_fallback_library_path(env)
+ if fallback_library_path:
+ for path in fallback_library_path:
+ yield os.path.join(path, os.path.basename(name))
+
+ else:
+ for path in _DEFAULT_LIBRARY_FALLBACK:
+ yield os.path.join(path, os.path.basename(name))
+
+def dyld_find(name, executable_path=None, env=None, loader_path=None):
+ """
+ Find a library or framework using dyld semantics
+ """
+ name = _ensure_utf8(name)
+ executable_path = _ensure_utf8(executable_path)
+ for path in dyld_image_suffix_search(chain(
+ dyld_override_search(name, env),
+ dyld_executable_path_search(name, executable_path),
+ dyld_loader_search(name, loader_path),
+ dyld_default_search(name, env),
+ ), env):
+ if os.path.isfile(path):
+ return path
+ raise ValueError("dylib %s could not be found" % (name,))
+
+def framework_find(fn, executable_path=None, env=None):
+ """
+ Find a framework using dyld semantics in a very loose manner.
+
+ Will take input such as:
+ Python
+ Python.framework
+ Python.framework/Versions/Current
+ """
+ try:
+ return dyld_find(fn, executable_path=executable_path, env=env)
+ except ValueError:
+ pass
+ fmwk_index = fn.rfind('.framework')
+ if fmwk_index == -1:
+ fmwk_index = len(fn)
+ fn += '.framework'
+ fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
+ return dyld_find(fn, executable_path=executable_path, env=env)
diff --git a/python/macholib/macholib/dylib.py b/python/macholib/macholib/dylib.py
new file mode 100644
index 000000000..cea6a95c3
--- /dev/null
+++ b/python/macholib/macholib/dylib.py
@@ -0,0 +1,42 @@
+"""
+Generic dylib path manipulation
+"""
+
+import re
+
+__all__ = ['dylib_info']
+
+_DYLIB_RE = re.compile(r"""(?x)
+(?P<location>^.*)(?:^|/)
+(?P<name>
+ (?P<shortname>\w+?)
+ (?:\.(?P<version>[^._]+))?
+ (?:_(?P<suffix>[^._]+))?
+ \.dylib$
+)
+""")
+
+def dylib_info(filename):
+ """
+ A dylib name can take one of the following four forms:
+ Location/Name.SomeVersion_Suffix.dylib
+ Location/Name.SomeVersion.dylib
+ Location/Name_Suffix.dylib
+ Location/Name.dylib
+
+ returns None if not found or a mapping equivalent to:
+ dict(
+ location='Location',
+ name='Name.SomeVersion_Suffix.dylib',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ )
+
+ Note that SomeVersion and Suffix are optional and may be None
+ if not present.
+ """
+ is_dylib = _DYLIB_RE.match(filename)
+ if not is_dylib:
+ return None
+ return is_dylib.groupdict()
diff --git a/python/macholib/macholib/framework.py b/python/macholib/macholib/framework.py
new file mode 100644
index 000000000..da099573c
--- /dev/null
+++ b/python/macholib/macholib/framework.py
@@ -0,0 +1,42 @@
+"""
+Generic framework path manipulation
+"""
+
+import re
+
+__all__ = ['framework_info']
+
+_STRICT_FRAMEWORK_RE = re.compile(r"""(?x)
+(?P<location>^.*)(?:^|/)
+(?P<name>
+ (?P<shortname>[-_A-Za-z0-9]+).framework/
+ (?:Versions/(?P<version>[^/]+)/)?
+ (?P=shortname)
+ (?:_(?P<suffix>[^_]+))?
+)$
+""")
+
+def framework_info(filename):
+ """
+ A framework name can take one of the following four forms:
+ Location/Name.framework/Versions/SomeVersion/Name_Suffix
+ Location/Name.framework/Versions/SomeVersion/Name
+ Location/Name.framework/Name_Suffix
+ Location/Name.framework/Name
+
+ returns None if not found, or a mapping equivalent to:
+ dict(
+ location='Location',
+ name='Name.framework/Versions/SomeVersion/Name_Suffix',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ )
+
+ Note that SomeVersion and Suffix are optional and may be None
+ if not present
+ """
+ is_framework = _STRICT_FRAMEWORK_RE.match(filename)
+ if not is_framework:
+ return None
+ return is_framework.groupdict()
diff --git a/python/macholib/macholib/itergraphreport.py b/python/macholib/macholib/itergraphreport.py
new file mode 100644
index 000000000..453c96e2a
--- /dev/null
+++ b/python/macholib/macholib/itergraphreport.py
@@ -0,0 +1,73 @@
+"""
+Utilities for creating dot output from a MachOGraph
+
+XXX: need to rewrite this based on altgraph.Dot
+"""
+
+from collections import deque
+
+try:
+ from itertools import imap
+except ImportError:
+ imap = map
+
+__all__ = ['itergraphreport']
+
+def itergraphreport(nodes, describe_edge, name='G'):
+ edges = deque()
+ nodetoident = {}
+ mainedges = set()
+
+ def nodevisitor(node, data, outgoing, incoming):
+ return {'label': str(node)}
+
+ def edgevisitor(edge, data, head, tail):
+ return {}
+
+ yield 'digraph %s {\n' % (name,)
+ attr = dict(rankdir='LR', concentrate='true')
+ cpatt = '%s="%s"'
+ for item in attr.iteritems():
+ yield '\t%s;\n' % (cpatt % item,)
+
+ # find all packages (subgraphs)
+ for (node, data, outgoing, incoming) in nodes:
+ nodetoident[node] = getattr(data, 'identifier', node)
+
+ # create sets for subgraph, write out descriptions
+ for (node, data, outgoing, incoming) in nodes:
+ # update edges
+ for edge in imap(describe_edge, outgoing):
+ edges.append(edge)
+
+ # describe node
+ yield '\t"%s" [%s];\n' % (
+ node,
+ ','.join([
+ (cpatt % item) for item in
+ nodevisitor(node, data, outgoing, incoming).iteritems()
+ ]),
+ )
+
+ graph = []
+
+ while edges:
+ edge, data, head, tail = edges.popleft()
+ if data in ('run_file', 'load_dylib'):
+ graph.append((edge, data, head, tail))
+
+ def do_graph(edges, tabs):
+ edgestr = tabs + '"%s" -> "%s" [%s];\n'
+ # describe edge
+ for (edge, data, head, tail) in edges:
+ attribs = edgevisitor(edge, data, head, tail)
+ yield edgestr % (
+ head,
+ tail,
+ ','.join([(cpatt % item) for item in attribs.iteritems()]),
+ )
+
+ for s in do_graph(graph, '\t'):
+ yield s
+
+ yield '}\n'
diff --git a/python/macholib/macholib/mach_o.py b/python/macholib/macholib/mach_o.py
new file mode 100644
index 000000000..1a85c75cf
--- /dev/null
+++ b/python/macholib/macholib/mach_o.py
@@ -0,0 +1,1311 @@
+"""
+Other than changing the load commands in such a way that they do not
+contain the load command itself, this is largely a by-hand conversion
+of the C headers. Hopefully everything in here should be at least as
+obvious as the C headers, and you should be using the C headers as a real
+reference because the documentation didn't come along for the ride.
+
+Doing much of anything with the symbol tables or segments is really
+not covered at this point.
+
+See /usr/include/mach-o and friends.
+"""
+import time
+
+from macholib.ptypes import *
+
+
+_CPU_ARCH_ABI64 = 0x01000000
+
+CPU_TYPE_NAMES = {
+ -1: 'ANY',
+ 1: 'VAX',
+ 6: 'MC680x0',
+ 7: 'i386',
+ _CPU_ARCH_ABI64 | 7: 'x86_64',
+ 8: 'MIPS',
+ 10: 'MC98000',
+ 11: 'HPPA',
+ 12: 'ARM',
+ _CPU_ARCH_ABI64 | 12: 'ARM64',
+ 13: 'MC88000',
+ 14: 'SPARC',
+ 15: 'i860',
+ 16: 'Alpha',
+ 18: 'PowerPC',
+ _CPU_ARCH_ABI64 | 18: 'PowerPC64',
+}
+
+INTEL64_SUBTYPE = {
+ 3 : "CPU_SUBTYPE_X86_64_ALL",
+ 4 : "CPU_SUBTYPE_X86_ARCH1"
+}
+
+#define CPU_SUBTYPE_INTEL(f, m) ((cpu_subtype_t) (f) + ((m) << 4))
+INTEL_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_INTEL_MODEL_ALL",
+ 1 : "CPU_THREADTYPE_INTEL_HTT",
+ 3 : "CPU_SUBTYPE_I386_ALL",
+ 4 : "CPU_SUBTYPE_486",
+ 5 : "CPU_SUBTYPE_586",
+ 8 : "CPU_SUBTYPE_PENTIUM_3",
+ 9 : "CPU_SUBTYPE_PENTIUM_M",
+ 10 : "CPU_SUBTYPE_PENTIUM_4",
+ 11 : "CPU_SUBTYPE_ITANIUM",
+ 12 : "CPU_SUBTYPE_XEON",
+ 34 : "CPU_SUBTYPE_XEON_MP",
+ 42 : "CPU_SUBTYPE_PENTIUM_4_M",
+ 43 : "CPU_SUBTYPE_ITANIUM_2",
+ 38 : "CPU_SUBTYPE_PENTPRO",
+ 40 : "CPU_SUBTYPE_PENTIUM_3_M",
+ 52 : "CPU_SUBTYPE_PENTIUM_3_XEON",
+ 102 : "CPU_SUBTYPE_PENTII_M3",
+ 132 : "CPU_SUBTYPE_486SX",
+ 166 : "CPU_SUBTYPE_PENTII_M5",
+ 199 : "CPU_SUBTYPE_CELERON",
+ 231 : "CPU_SUBTYPE_CELERON_MOBILE"
+}
+
+MC680_SUBTYPE = {
+ 1 : "CPU_SUBTYPE_MC680x0_ALL",
+ 2 : "CPU_SUBTYPE_MC68040",
+ 3 : "CPU_SUBTYPE_MC68030_ONLY"
+}
+
+MIPS_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_MIPS_ALL",
+ 1 : "CPU_SUBTYPE_MIPS_R2300",
+ 2 : "CPU_SUBTYPE_MIPS_R2600",
+ 3 : "CPU_SUBTYPE_MIPS_R2800",
+ 4 : "CPU_SUBTYPE_MIPS_R2000a",
+ 5 : "CPU_SUBTYPE_MIPS_R2000",
+ 6 : "CPU_SUBTYPE_MIPS_R3000a",
+ 7 : "CPU_SUBTYPE_MIPS_R3000"
+}
+
+MC98000_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_MC98000_ALL",
+ 1 : "CPU_SUBTYPE_MC98601"
+}
+
+HPPA_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_HPPA_7100",
+ 1 : "CPU_SUBTYPE_HPPA_7100LC"
+}
+
+MC88_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_MC88000_ALL",
+ 1 : "CPU_SUBTYPE_MC88100",
+ 2 : "CPU_SUBTYPE_MC88110"
+}
+
+SPARC_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_SPARC_ALL"
+}
+
+I860_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_I860_ALL",
+ 1 : "CPU_SUBTYPE_I860_860"
+}
+
+POWERPC_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_POWERPC_ALL",
+ 1 : "CPU_SUBTYPE_POWERPC_601",
+ 2 : "CPU_SUBTYPE_POWERPC_602",
+ 3 : "CPU_SUBTYPE_POWERPC_603",
+ 4 : "CPU_SUBTYPE_POWERPC_603e",
+ 5 : "CPU_SUBTYPE_POWERPC_603ev",
+ 6 : "CPU_SUBTYPE_POWERPC_604",
+ 7 : "CPU_SUBTYPE_POWERPC_604e",
+ 8 : "CPU_SUBTYPE_POWERPC_620",
+ 9 : "CPU_SUBTYPE_POWERPC_750",
+ 10 : "CPU_SUBTYPE_POWERPC_7400",
+ 11 : "CPU_SUBTYPE_POWERPC_7450",
+ 100 : "CPU_SUBTYPE_POWERPC_970"
+}
+
+ARM_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_ARM_ALL12",
+ 5 : "CPU_SUBTYPE_ARM_V4T",
+ 6 : "CPU_SUBTYPE_ARM_V6",
+ 7 : "CPU_SUBTYPE_ARM_V5TEJ",
+ 8 : "CPU_SUBTYPE_ARM_XSCALE",
+ 9 : "CPU_SUBTYPE_ARM_V7",
+ 10 : "CPU_SUBTYPE_ARM_V7F",
+ 12 : "CPU_SUBTYPE_ARM_V7K"
+}
+
+VAX_SUBTYPE = {
+ 0 : "CPU_SUBTYPE_VAX_ALL",
+ 1 : "CPU_SUBTYPE_VAX780",
+ 2 : "CPU_SUBTYPE_VAX785",
+ 3 : "CPU_SUBTYPE_VAX750",
+ 4 : "CPU_SUBTYPE_VAX730",
+ 5 : "CPU_SUBTYPE_UVAXI",
+ 6 : "CPU_SUBTYPE_UVAXII",
+ 7 : "CPU_SUBTYPE_VAX8200",
+ 8 : "CPU_SUBTYPE_VAX8500",
+ 9 : "CPU_SUBTYPE_VAX8600",
+ 10 : "CPU_SUBTYPE_VAX8650",
+ 11 : "CPU_SUBTYPE_VAX8800",
+ 12 : "CPU_SUBTYPE_UVAXIII",
+}
+
+
+def get_cpu_subtype(cpu_type, cpu_subtype):
+ st = cpu_subtype & 0x0fffffff
+
+ if cpu_type == 1:
+ subtype = VAX_SUBTYPE.get(st, st)
+ elif cpu_type == 6:
+ subtype = MC680_SUBTYPE.get(st, st)
+ elif cpu_type == 7:
+ subtype = INTEL_SUBTYPE.get(st, st)
+ elif cpu_type == 7 | _CPU_ARCH_ABI64:
+ subtype = INTEL64_SUBTYPE.get(st, st)
+ elif cpu_type == 8:
+ subtype = MIPS_SUBTYPE.get(st, st)
+ elif cpu_type == 10:
+ subtype = MC98000_SUBTYPE.get(st, st)
+ elif cpu_type == 11:
+ subtype = HPPA_SUBTYPE.get(st, st)
+ elif cpu_type == 12:
+ subtype = ARM_SUBTYPE.get(st, st)
+ elif cpu_type == 13:
+ subtype = MC88_SUBTYPE.get(st, st)
+ elif cpu_type == 14:
+ subtype = SPARC_SUBTYPE.get(st, st)
+ elif cpu_type == 15:
+ subtype = I860_SUBTYPE.get(st, st)
+ elif cpu_type == 16:
+ subtype = MIPS_SUBTYPE.get(st, st)
+ elif cpu_type == 18:
+ subtype = POWERPC_SUBTYPE.get(st, st)
+ elif cpu_type == 18 | _CPU_ARCH_ABI64:
+ subtype = POWERPC_SUBTYPE.get(st, st)
+ else:
+ subtype = str(st)
+
+ return subtype
+
+
+_MH_EXECUTE_SYM = "__mh_execute_header"
+MH_EXECUTE_SYM = "_mh_execute_header"
+_MH_BUNDLE_SYM = "__mh_bundle_header"
+MH_BUNDLE_SYM = "_mh_bundle_header"
+_MH_DYLIB_SYM = "__mh_dylib_header"
+MH_DYLIB_SYM = "_mh_dylib_header"
+_MH_DYLINKER_SYM = "__mh_dylinker_header"
+MH_DYLINKER_SYM = "_mh_dylinker_header"
+
+(
+ MH_OBJECT, MH_EXECUTE, MH_FVMLIB, MH_CORE, MH_PRELOAD, MH_DYLIB,
+ MH_DYLINKER, MH_BUNDLE, MH_DYLIB_STUB, MH_DSYM
+) = range(0x1, 0xb)
+
+(
+ MH_NOUNDEFS, MH_INCRLINK, MH_DYLDLINK, MH_BINDATLOAD, MH_PREBOUND,
+ MH_SPLIT_SEGS, MH_LAZY_INIT, MH_TWOLEVEL, MH_FORCE_FLAT, MH_NOMULTIDEFS,
+ MH_NOFIXPREBINDING, MH_PREBINDABLE, MH_ALLMODSBOUND, MH_SUBSECTIONS_VIA_SYMBOLS,
+ MH_CANONICAL, MH_WEAK_DEFINES, MH_BINDS_TO_WEAK, MH_ALLOW_STACK_EXECUTION,
+ MH_ROOT_SAFE, MH_SETUID_SAFE, MH_NO_REEXPORTED_DYLIBS, MH_PIE,
+ MH_DEAD_STRIPPABLE_DYLIB, MH_HAS_TLV_DESCRIPTORS, MH_NO_HEAP_EXECUTION
+) = map((1).__lshift__, range(25))
+
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+
+integer_t = p_int32
+cpu_type_t = integer_t
+cpu_subtype_t = p_uint32
+
+MH_FILETYPE_NAMES = {
+ MH_OBJECT: 'relocatable object',
+ MH_EXECUTE: 'demand paged executable',
+ MH_FVMLIB: 'fixed vm shared library',
+ MH_CORE: 'core',
+ MH_PRELOAD: 'preloaded executable',
+ MH_DYLIB: 'dynamically bound shared library',
+ MH_DYLINKER: 'dynamic link editor',
+ MH_BUNDLE: 'dynamically bound bundle',
+ MH_DYLIB_STUB: 'shared library stub for static linking',
+ MH_DSYM: 'symbol information',
+}
+
+MH_FILETYPE_SHORTNAMES = {
+ MH_OBJECT: 'object',
+ MH_EXECUTE: 'execute',
+ MH_FVMLIB: 'fvmlib',
+ MH_CORE: 'core',
+ MH_PRELOAD: 'preload',
+ MH_DYLIB: 'dylib',
+ MH_DYLINKER: 'dylinker',
+ MH_BUNDLE: 'bundle',
+ MH_DYLIB_STUB: 'dylib_stub',
+ MH_DSYM: 'dsym',
+}
+
+MH_FLAGS_NAMES = {
+ MH_NOUNDEFS: 'MH_NOUNDEFS',
+ MH_INCRLINK: 'MH_INCRLINK',
+ MH_DYLDLINK: 'MH_DYLDLINK',
+ MH_BINDATLOAD: 'MH_BINDATLOAD',
+ MH_PREBOUND: 'MH_PREBOUND',
+ MH_SPLIT_SEGS: 'MH_SPLIT_SEGS',
+ MH_LAZY_INIT: 'MH_LAZY_INIT',
+ MH_TWOLEVEL: 'MH_TWOLEVEL',
+ MH_FORCE_FLAT: 'MH_FORCE_FLAT',
+ MH_NOMULTIDEFS: 'MH_NOMULTIDEFS',
+ MH_NOFIXPREBINDING: 'MH_NOFIXPREBINDING',
+ MH_PREBINDABLE: 'MH_PREBINDABLE',
+ MH_ALLMODSBOUND: 'MH_ALLMODSBOUND',
+ MH_SUBSECTIONS_VIA_SYMBOLS: 'MH_SUBSECTIONS_VIA_SYMBOLS',
+ MH_CANONICAL: 'MH_CANONICAL',
+ MH_WEAK_DEFINES: 'MH_WEAK_DEFINES',
+ MH_BINDS_TO_WEAK: 'MH_BINDS_TO_WEAK',
+ MH_ALLOW_STACK_EXECUTION: 'MH_ALLOW_STACK_EXECUTION',
+ MH_ROOT_SAFE: 'MH_ROOT_SAFE',
+ MH_SETUID_SAFE: 'MH_SETUID_SAFE',
+ MH_NO_REEXPORTED_DYLIBS: 'MH_NO_REEXPORTED_DYLIBS',
+ MH_PIE: 'MH_PIE',
+ MH_DEAD_STRIPPABLE_DYLIB: 'MH_DEAD_STRIPPABLE_DYLIB',
+ MH_HAS_TLV_DESCRIPTORS: 'MH_HAS_TLV_DESCRIPTORS',
+ MH_NO_HEAP_EXECUTION: 'MH_NO_HEAP_EXECUTION',
+}
+
+MH_FLAGS_DESCRIPTIONS = {
+ MH_NOUNDEFS: 'no undefined references',
+ MH_INCRLINK: 'output of an incremental link',
+ MH_DYLDLINK: 'input for the dynamic linker',
+ MH_BINDATLOAD: 'undefined references bound dynamically when loaded',
+ MH_PREBOUND: 'dynamic undefined references prebound',
+ MH_SPLIT_SEGS: 'split read-only and read-write segments',
+ MH_LAZY_INIT: '(obsolete)',
+ MH_TWOLEVEL: 'using two-level name space bindings',
+ MH_FORCE_FLAT: 'forcing all imagges to use flat name space bindings',
+ MH_NOMULTIDEFS: 'umbrella guarantees no multiple definitions',
+ MH_NOFIXPREBINDING: 'do not notify prebinding agent about this executable',
+ MH_PREBINDABLE: 'the binary is not prebound but can have its prebinding redone',
+ MH_ALLMODSBOUND: 'indicates that this binary binds to all two-level namespace modules of its dependent libraries',
+ MH_SUBSECTIONS_VIA_SYMBOLS: 'safe to divide up the sections into sub-sections via symbols for dead code stripping',
+ MH_CANONICAL: 'the binary has been canonicalized via the unprebind operation',
+ MH_WEAK_DEFINES: 'the final linked image contains external weak symbols',
+ MH_BINDS_TO_WEAK: 'the final linked image uses weak symbols',
+ MH_ALLOW_STACK_EXECUTION: 'all stacks in the task will be given stack execution privilege',
+ MH_ROOT_SAFE: 'the binary declares it is safe for use in processes with uid zero',
+ MH_SETUID_SAFE: 'the binary declares it is safe for use in processes when issetugid() is true',
+ MH_NO_REEXPORTED_DYLIBS: 'the static linker does not need to examine dependent dylibs to see if any are re-exported',
+ MH_PIE: 'the OS will load the main executable at a random address',
+ MH_DEAD_STRIPPABLE_DYLIB: 'the static linker will automatically not create a LC_LOAD_DYLIB load command to the dylib if no symbols are being referenced from the dylib',
+ MH_HAS_TLV_DESCRIPTORS: 'contains a section of type S_THREAD_LOCAL_VARIABLES',
+ MH_NO_HEAP_EXECUTION: 'the OS will run the main executable with a non-executable heap even on platforms that don\'t require it',
+}
+
+class mach_version_helper(Structure):
+ _fields_ = (
+ ('major', p_ushort),
+ ('minor', p_uint8),
+ ('rev', p_uint8),
+ )
+ def __str__(self):
+ return '%s.%s.%s' % (self.major, self.minor, self.rev)
+
+class mach_timestamp_helper(p_uint32):
+ def __str__(self):
+ return time.ctime(self)
+
+def read_struct(f, s, **kw):
+ return s.from_fileobj(f, **kw)
+
+class mach_header(Structure):
+ _fields_ = (
+ ('magic', p_uint32),
+ ('cputype', cpu_type_t),
+ ('cpusubtype', cpu_subtype_t),
+ ('filetype', p_uint32),
+ ('ncmds', p_uint32),
+ ('sizeofcmds', p_uint32),
+ ('flags', p_uint32),
+ )
+ def _describe(self):
+ bit = 1
+ flags = self.flags
+ dflags = []
+ while flags and bit < (1<<32):
+ if flags & bit:
+ dflags.append({'name': MH_FLAGS_NAMES.get(bit, str(bit)), 'description': MH_FLAGS_DESCRIPTIONS.get(bit, str(bit))})
+ flags = flags ^ bit
+ bit <<= 1
+ return (
+ ('magic', int(self.magic)),
+ ('cputype_string', CPU_TYPE_NAMES.get(self.cputype, self.cputype)),
+ ('cputype', int(self.cputype)),
+ ('cpusubtype_string', get_cpu_subtype(self.cputype, self.cpusubtype)),
+ ('cpusubtype', int(self.cpusubtype)),
+ ('filetype_string', MH_FILETYPE_NAMES.get(self.filetype, self.filetype)),
+ ('filetype', int(self.filetype)),
+ ('ncmds', self.ncmds),
+ ('sizeofcmds', self.sizeofcmds),
+ ('flags', dflags),
+ ('raw_flags', int(self.flags))
+ )
+
+class mach_header_64(mach_header):
+ _fields_ = mach_header._fields_ + (('reserved', p_uint32),)
+
+class load_command(Structure):
+ _fields_ = (
+ ('cmd', p_uint32),
+ ('cmdsize', p_uint32),
+ )
+
+ def get_cmd_name(self):
+ return LC_NAMES.get(self.cmd, self.cmd)
+
+LC_REQ_DYLD = 0x80000000
+
+(
+ LC_SEGMENT, LC_SYMTAB, LC_SYMSEG, LC_THREAD, LC_UNIXTHREAD, LC_LOADFVMLIB,
+ LC_IDFVMLIB, LC_IDENT, LC_FVMFILE, LC_PREPAGE, LC_DYSYMTAB, LC_LOAD_DYLIB,
+ LC_ID_DYLIB, LC_LOAD_DYLINKER, LC_ID_DYLINKER, LC_PREBOUND_DYLIB,
+ LC_ROUTINES, LC_SUB_FRAMEWORK, LC_SUB_UMBRELLA, LC_SUB_CLIENT,
+ LC_SUB_LIBRARY, LC_TWOLEVEL_HINTS, LC_PREBIND_CKSUM
+) = range(0x1, 0x18)
+
+LC_LOAD_WEAK_DYLIB = LC_REQ_DYLD | 0x18
+
+LC_SEGMENT_64 = 0x19
+LC_ROUTINES_64 = 0x1a
+LC_UUID = 0x1b
+LC_RPATH = (0x1c | LC_REQ_DYLD)
+LC_CODE_SIGNATURE = 0x1d
+LC_CODE_SEGMENT_SPLIT_INFO = 0x1e
+LC_REEXPORT_DYLIB = 0x1f | LC_REQ_DYLD
+LC_LAZY_LOAD_DYLIB = 0x20
+LC_ENCRYPTION_INFO = 0x21
+LC_DYLD_INFO = 0x22
+LC_DYLD_INFO_ONLY = 0x22 | LC_REQ_DYLD
+LC_LOAD_UPWARD_DYLIB = 0x23 | LC_REQ_DYLD
+LC_VERSION_MIN_MACOSX = 0x24
+LC_VERSION_MIN_IPHONEOS = 0x25
+LC_FUNCTION_STARTS = 0x26
+LC_DYLD_ENVIRONMENT = 0x27
+LC_MAIN = 0x28 | LC_REQ_DYLD
+LC_DATA_IN_CODE = 0x29
+LC_SOURCE_VERSION = 0x2a
+LC_DYLIB_CODE_SIGN_DRS = 0x2b
+LC_ENCRYPTION_INFO_64 = 0x2c
+LC_LINKER_OPTION = 0x2d
+
+
+# this is really a union.. but whatever
+class lc_str(p_uint32):
+ pass
+
+p_str16 = pypackable('p_str16', bytes, '16s')
+
+vm_prot_t = p_int32
+class segment_command(Structure):
+ _fields_ = (
+ ('segname', p_str16),
+ ('vmaddr', p_uint32),
+ ('vmsize', p_uint32),
+ ('fileoff', p_uint32),
+ ('filesize', p_uint32),
+ ('maxprot', vm_prot_t),
+ ('initprot', vm_prot_t),
+ ('nsects', p_uint32), # read the section structures ?
+ ('flags', p_uint32),
+ )
+
+ def describe(self):
+ segname = self.segname
+ s = {}
+ s['segname'] = self.segname.rstrip('\x00')
+ s['vmaddr'] = int(self.vmaddr)
+ s['vmsize'] = int(self.vmsize)
+ s['fileoff'] = int(self.fileoff)
+ s['filesize'] = int(self.filesize)
+ s['initprot'] = self.get_initial_virtual_memory_protections()
+ s['initprot_raw'] = int(self.initprot)
+ s['maxprot'] = self.get_max_virtual_memory_protections()
+ s['maxprot_raw'] = int(self.maxprot)
+ s['nsects'] = int(self.nsects)
+ s['flags'] = self.flags
+ return s
+
+ def get_initial_virtual_memory_protections(self):
+ vm = []
+ if self.initprot == 0:
+ vm.append("VM_PROT_NONE")
+ if self.initprot & 1:
+ vm.append("VM_PROT_READ")
+ if self.initprot & 2:
+ vm.append("VM_PROT_WRITE")
+ if self.initprot & 4:
+ vm.append("VM_PROT_EXECUTE")
+ return vm
+
+ def get_max_virtual_memory_protections(self):
+ vm = []
+ if self.maxprot == 0:
+ vm.append("VM_PROT_NONE")
+ if self.maxprot & 1:
+ vm.append("VM_PROT_READ")
+ if self.maxprot & 2:
+ vm.append("VM_PROT_WRITE")
+ if self.maxprot & 4:
+ vm.append("VM_PROT_EXECUTE")
+ return vm
+
+class segment_command_64(Structure):
+ _fields_ = (
+ ('segname', p_str16),
+ ('vmaddr', p_uint64),
+ ('vmsize', p_uint64),
+ ('fileoff', p_uint64),
+ ('filesize', p_uint64),
+ ('maxprot', vm_prot_t),
+ ('initprot', vm_prot_t),
+ ('nsects', p_uint32), # read the section structures ?
+ ('flags', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['segname'] = self.segname.rstrip('\x00')
+ s['vmaddr'] = int(self.vmaddr)
+ s['vmsize'] = int(self.vmsize)
+ s['fileoff'] = int(self.fileoff)
+ s['filesize'] = int(self.filesize)
+ s['initprot'] = self.get_initial_virtual_memory_protections()
+ s['initprot_raw'] = int(self.initprot)
+ s['maxprot'] = self.get_max_virtual_memory_protections()
+ s['maxprot_raw'] = int(self.maxprot)
+ s['nsects'] = int(self.nsects)
+ s['flags'] = self.flags
+ return s
+
+ def get_initial_virtual_memory_protections(self):
+ vm = []
+ if self.initprot == 0:
+ vm.append("VM_PROT_NONE")
+ if self.initprot & 1:
+ vm.append("VM_PROT_READ")
+ if self.initprot & 2:
+ vm.append("VM_PROT_WRITE")
+ if self.initprot & 4:
+ vm.append("VM_PROT_EXECUTE")
+ return vm
+
+ def get_max_virtual_memory_protections(self):
+ vm = []
+ if self.maxprot == 0:
+ vm.append("VM_PROT_NONE")
+ if self.maxprot & 1:
+ vm.append("VM_PROT_READ")
+ if self.maxprot & 2:
+ vm.append("VM_PROT_WRITE")
+ if self.maxprot & 4:
+ vm.append("VM_PROT_EXECUTE")
+ return vm
+
+
+SG_HIGHVM = 0x1
+SG_FVMLIB = 0x2
+SG_NORELOC = 0x4
+
+class section(Structure):
+ _fields_ = (
+ ('sectname', p_str16),
+ ('segname', p_str16),
+ ('addr', p_uint32),
+ ('size', p_uint32),
+ ('offset', p_uint32),
+ ('align', p_uint32),
+ ('reloff', p_uint32),
+ ('nreloc', p_uint32),
+ ('flags', p_uint32),
+ ('reserved1', p_uint32),
+ ('reserved2', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['sectname'] = self.sectname.rstrip('\x00')
+ s['segname'] = self.segname.rstrip('\x00')
+ s['addr'] = int(self.addr)
+ s['size'] = int(self.size)
+ s['offset'] = int(self.offset)
+ s['align'] = int(self.align)
+ s['reloff'] = int(self.reloff)
+ s['nreloc'] = int(self.nreloc)
+ f = {}
+ f['type'] = FLAG_SECTION_TYPES[int(self.flags) & 0xff]
+ f['attributes'] = []
+ for k in FLAG_SECTION_ATTRIBUTES:
+ if k & self.flags:
+ f['attributes'].append(FLAG_SECTION_ATTRIBUTES[k])
+ if not f['attributes']:
+ del f['attributes']
+ s['flags'] = f
+ s['reserved1'] = int(self.reserved1)
+ s['reserved2'] = int(self.reserved2)
+ return s
+
+ def add_section_data(self, data):
+ self.section_data = data
+
+class section_64(Structure):
+ _fields_ = (
+ ('sectname', p_str16),
+ ('segname', p_str16),
+ ('addr', p_uint64),
+ ('size', p_uint64),
+ ('offset', p_uint32),
+ ('align', p_uint32),
+ ('reloff', p_uint32),
+ ('nreloc', p_uint32),
+ ('flags', p_uint32),
+ ('reserved1', p_uint32),
+ ('reserved2', p_uint32),
+ ('reserved3', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['sectname'] = self.sectname.rstrip('\x00')
+ s['segname'] = self.segname.rstrip('\x00')
+ s['addr'] = int(self.addr)
+ s['size'] = int(self.size)
+ s['offset'] = int(self.offset)
+ s['align'] = int(self.align)
+ s['reloff'] = int(self.reloff)
+ s['nreloc'] = int(self.nreloc)
+ f = {}
+ f['type'] = FLAG_SECTION_TYPES[int(self.flags) & 0xff]
+ f['attributes'] = []
+ for k in FLAG_SECTION_ATTRIBUTES:
+ if k & self.flags:
+ f['attributes'].append(FLAG_SECTION_ATTRIBUTES[k])
+ if not f['attributes']:
+ del f['attributes']
+ s['flags'] = f
+ s['reserved1'] = int(self.reserved1)
+ s['reserved2'] = int(self.reserved2)
+ s['reserved3'] = int(self.reserved3)
+ return s
+
+ def add_section_data(self, data):
+ self.section_data = data
+
+SECTION_TYPE = 0xff
+SECTION_ATTRIBUTES = 0xffffff00
+S_REGULAR = 0x0
+S_ZEROFILL = 0x1
+S_CSTRING_LITERALS = 0x2
+S_4BYTE_LITERALS = 0x3
+S_8BYTE_LITERALS = 0x4
+S_LITERAL_POINTERS = 0x5
+S_NON_LAZY_SYMBOL_POINTERS = 0x6
+S_LAZY_SYMBOL_POINTERS = 0x7
+S_SYMBOL_STUBS = 0x8
+S_MOD_INIT_FUNC_POINTERS = 0x9
+S_MOD_TERM_FUNC_POINTERS = 0xa
+S_COALESCED = 0xb
+
+FLAG_SECTION_TYPES = {
+ 0x0 : "S_REGULAR",
+ 0x1 : "S_ZEROFILL",
+ 0x2 : "S_CSTRING_LITERALS",
+ 0x3 : "S_4BYTE_LITERALS",
+ 0x4 : "S_8BYTE_LITERALS",
+ 0x5 : "S_LITERAL_POINTERS",
+ 0x6 : "S_NON_LAZY_SYMBOL_POINTERS",
+ 0x7 : "S_LAZY_SYMBOL_POINTERS",
+ 0x8 : "S_SYMBOL_STUBS",
+ 0x9 : "S_MOD_INIT_FUNC_POINTERS",
+ 0xa : "S_MOD_TERM_FUNC_POINTERS",
+ 0xb : "S_COALESCED",
+ 0xc : "S_GB_ZEROFILL",
+ 0xd : "S_INTERPOSING",
+ 0xe : "S_16BYTE_LITERALS",
+ 0xf : "S_DTRACE_DOF",
+ 0x10 : "S_LAZY_DYLIB_SYMBOL_POINTERS",
+ 0x11 : "S_THREAD_LOCAL_REGULAR",
+ 0x12 : "S_THREAD_LOCAL_ZEROFILL",
+ 0x13 : "S_THREAD_LOCAL_VARIABLES",
+ 0x14 : "S_THREAD_LOCAL_VARIABLE_POINTERS",
+ 0x15 : "S_THREAD_LOCAL_INIT_FUNCTION_POINTERS"
+}
+
+
+FLAG_SECTION_ATTRIBUTES = {
+ 0x80000000 : "S_ATTR_PURE_INSTRUCTIONS",
+ 0x40000000 : "S_ATTR_NO_TOC",
+ 0x20000000 : "S_ATTR_STRIP_STATIC_SYMS",
+ 0x10000000 : "S_ATTR_NO_DEAD_STRIP",
+ 0x08000000 : "S_ATTR_LIVE_SUPPORT",
+ 0x04000000 : "S_ATTR_SELF_MODIFYING_CODE",
+ 0x02000000 : "S_ATTR_DEBUG",
+ 0x00000400 : "S_ATTR_SOME_INSTRUCTIONS",
+ 0x00000200 : "S_ATTR_EXT_RELOC",
+ 0x00000100 : "S_ATTR_LOC_RELOC"
+}
+
+SECTION_ATTRIBUTES_USR = 0xff000000
+S_ATTR_PURE_INSTRUCTIONS = 0x80000000
+S_ATTR_NO_TOC = 0x40000000
+S_ATTR_STRIP_STATIC_SYMS = 0x20000000
+SECTION_ATTRIBUTES_SYS = 0x00ffff00
+S_ATTR_SOME_INSTRUCTIONS = 0x00000400
+S_ATTR_EXT_RELOC = 0x00000200
+S_ATTR_LOC_RELOC = 0x00000100
+
+
+SEG_PAGEZERO = "__PAGEZERO"
+SEG_TEXT = "__TEXT"
+SECT_TEXT = "__text"
+SECT_FVMLIB_INIT0 = "__fvmlib_init0"
+SECT_FVMLIB_INIT1 = "__fvmlib_init1"
+SEG_DATA = "__DATA"
+SECT_DATA = "__data"
+SECT_BSS = "__bss"
+SECT_COMMON = "__common"
+SEG_OBJC = "__OBJC"
+SECT_OBJC_SYMBOLS = "__symbol_table"
+SECT_OBJC_MODULES = "__module_info"
+SECT_OBJC_STRINGS = "__selector_strs"
+SECT_OBJC_REFS = "__selector_refs"
+SEG_ICON = "__ICON"
+SECT_ICON_HEADER = "__header"
+SECT_ICON_TIFF = "__tiff"
+SEG_LINKEDIT = "__LINKEDIT"
+SEG_UNIXSTACK = "__UNIXSTACK"
+
+#
+# I really should remove all these _command classes because they
+# are no different. I decided to keep the load commands separate,
+# so classes like fvmlib and fvmlib_command are equivalent.
+#
+
+class fvmlib(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ ('minor_version', mach_version_helper),
+ ('header_addr', p_uint32),
+ )
+
+class fvmlib_command(Structure):
+ _fields_ = fvmlib._fields_
+
+ def describe(self):
+ s = {}
+ s['header_addr'] = int(self.header_addr)
+ return s
+
+class dylib(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ ('timestamp', mach_timestamp_helper),
+ ('current_version', mach_version_helper),
+ ('compatibility_version', mach_version_helper),
+ )
+
+# merged dylib structure
+class dylib_command(Structure):
+ _fields_ = dylib._fields_
+
+ def describe(self):
+ s = {}
+ s['timestamp'] = str(self.timestamp)
+ s['current_version'] = str(self.current_version)
+ s['compatibility_version'] = str(self.compatibility_version)
+ return s
+
+class sub_framework_command(Structure):
+ _fields_ = (
+ ('umbrella', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+class sub_client_command(Structure):
+ _fields_ = (
+ ('client', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+class sub_umbrella_command(Structure):
+ _fields_ = (
+ ('sub_umbrella', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+class sub_library_command(Structure):
+ _fields_ = (
+ ('sub_library', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+class prebound_dylib_command(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ ('nmodules', p_uint32),
+ ('linked_modules', lc_str),
+ )
+
+ def describe(self):
+ return {'nmodules': int(self.nmodules)}
+
+class dylinker_command(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+class thread_command(Structure):
+ _fields_ = (
+ )
+
+ def describe(self):
+ return {}
+
+class entry_point_command(Structure):
+ _fields_ = (
+ ('entryoff', p_uint64),
+ ('stacksize', p_uint64),
+ )
+
+ def describe(self):
+ s = {}
+ s['entryoff'] = int(self.entryoff)
+ s['stacksize'] = int(self.stacksize)
+ return s
+
+class routines_command(Structure):
+ _fields_ = (
+ ('init_address', p_uint32),
+ ('init_module', p_uint32),
+ ('reserved1', p_uint32),
+ ('reserved2', p_uint32),
+ ('reserved3', p_uint32),
+ ('reserved4', p_uint32),
+ ('reserved5', p_uint32),
+ ('reserved6', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['init_address'] = int(self.init_address)
+ s['init_module'] = int(self.init_module)
+ s['reserved1'] = int(self.reserved1)
+ s['reserved2'] = int(self.reserved2)
+ s['reserved3'] = int(self.reserved3)
+ s['reserved4'] = int(self.reserved4)
+ s['reserved5'] = int(self.reserved5)
+ s['reserved6'] = int(self.reserved6)
+ return s
+
+class routines_command_64(Structure):
+ _fields_ = (
+ ('init_address', p_uint64),
+ ('init_module', p_uint64),
+ ('reserved1', p_uint64),
+ ('reserved2', p_uint64),
+ ('reserved3', p_uint64),
+ ('reserved4', p_uint64),
+ ('reserved5', p_uint64),
+ ('reserved6', p_uint64),
+ )
+
+ def describe(self):
+ s = {}
+ s['init_address'] = int(self.init_address)
+ s['init_module'] = int(self.init_module)
+ s['reserved1'] = int(self.reserved1)
+ s['reserved2'] = int(self.reserved2)
+ s['reserved3'] = int(self.reserved3)
+ s['reserved4'] = int(self.reserved4)
+ s['reserved5'] = int(self.reserved5)
+ s['reserved6'] = int(self.reserved6)
+ return s
+
+class symtab_command(Structure):
+ _fields_ = (
+ ('symoff', p_uint32),
+ ('nsyms', p_uint32),
+ ('stroff', p_uint32),
+ ('strsize', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['symoff'] = int(self.symoff)
+ s['nsyms'] = int(self.nsyms)
+ s['stroff'] = int(self.stroff)
+ s['strsize'] = int(self.strsize)
+ return s
+
+
+class dysymtab_command(Structure):
+ _fields_ = (
+ ('ilocalsym', p_uint32),
+ ('nlocalsym', p_uint32),
+ ('iextdefsym', p_uint32),
+ ('nextdefsym', p_uint32),
+ ('iundefsym', p_uint32),
+ ('nundefsym', p_uint32),
+ ('tocoff', p_uint32),
+ ('ntoc', p_uint32),
+ ('modtaboff', p_uint32),
+ ('nmodtab', p_uint32),
+ ('extrefsymoff', p_uint32),
+ ('nextrefsyms', p_uint32),
+ ('indirectsymoff', p_uint32),
+ ('nindirectsyms', p_uint32),
+ ('extreloff', p_uint32),
+ ('nextrel', p_uint32),
+ ('locreloff', p_uint32),
+ ('nlocrel', p_uint32),
+ )
+
+ def describe(self):
+ dys = {}
+ dys['ilocalsym'] = int(self.ilocalsym)
+ dys['nlocalsym'] = int(self.nlocalsym)
+ dys['iextdefsym'] = int(self.iextdefsym)
+ dys['nextdefsym'] = int(self.nextdefsym)
+ dys['iundefsym'] = int(self.iundefsym)
+ dys['nundefsym'] = int(self.nundefsym)
+ dys['tocoff'] = int(self.tocoff)
+ dys['ntoc'] = int(self.ntoc)
+ dys['modtaboff'] = int(self.modtaboff)
+ dys['nmodtab'] = int(self.nmodtab)
+ dys['extrefsymoff'] = int(self.extrefsymoff)
+ dys['nextrefsyms'] = int(self.nextrefsyms)
+ dys['indirectsymoff'] = int(self.indirectsymoff)
+ dys['nindirectsyms'] = int(self.nindirectsyms)
+ dys['extreloff'] = int(self.extreloff)
+ dys['nextrel'] = int(self.nextrel)
+ dys['locreloff'] = int(self.locreloff)
+ dys['nlocrel'] = int(self.nlocrel)
+ return dys
+
+INDIRECT_SYMBOL_LOCAL = 0x80000000
+INDIRECT_SYMBOL_ABS = 0x40000000
+
+class dylib_table_of_contents(Structure):
+ _fields_ = (
+ ('symbol_index', p_uint32),
+ ('module_index', p_uint32),
+ )
+
+class dylib_module(Structure):
+ _fields_ = (
+ ('module_name', p_uint32),
+ ('iextdefsym', p_uint32),
+ ('nextdefsym', p_uint32),
+ ('irefsym', p_uint32),
+ ('nrefsym', p_uint32),
+ ('ilocalsym', p_uint32),
+ ('nlocalsym', p_uint32),
+ ('iextrel', p_uint32),
+ ('nextrel', p_uint32),
+ ('iinit_iterm', p_uint32),
+ ('ninit_nterm', p_uint32),
+ ('objc_module_info_addr', p_uint32),
+ ('objc_module_info_size', p_uint32),
+ )
+
+class dylib_module_64(Structure):
+ _fields_ = (
+ ('module_name', p_uint32),
+ ('iextdefsym', p_uint32),
+ ('nextdefsym', p_uint32),
+ ('irefsym', p_uint32),
+ ('nrefsym', p_uint32),
+ ('ilocalsym', p_uint32),
+ ('nlocalsym', p_uint32),
+ ('iextrel', p_uint32),
+ ('nextrel', p_uint32),
+ ('iinit_iterm', p_uint32),
+ ('ninit_nterm', p_uint32),
+ ('objc_module_info_size', p_uint32),
+ ('objc_module_info_addr', p_uint64),
+ )
+
+class dylib_reference(Structure):
+ _fields_ = (
+ # XXX - ick, fix
+ ('isym_flags', p_uint32),
+ #('isym', p_uint8 * 3),
+ #('flags', p_uint8),
+ )
+
+class twolevel_hints_command(Structure):
+ _fields_ = (
+ ('offset', p_uint32),
+ ('nhints', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['offset'] = int(self.offset)
+ s['nhints'] = int(self.nhints)
+ return s
+
+class twolevel_hint(Structure):
+ _fields_ = (
+ # XXX - ick, fix
+ ('isub_image_itoc', p_uint32),
+ #('isub_image', p_uint8),
+ #('itoc', p_uint8 * 3),
+ )
+
+class prebind_cksum_command(Structure):
+ _fields_ = (
+ ('cksum', p_uint32),
+ )
+
+ def describe(self):
+ return {'cksum': int(self.cksum)}
+
+class symseg_command(Structure):
+ _fields_ = (
+ ('offset', p_uint32),
+ ('size', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['offset'] = int(self.offset)
+ s['size'] = int(self.size)
+
+class ident_command(Structure):
+ _fields_ = (
+ )
+
+ def describe(self):
+ return {}
+
+class fvmfile_command(Structure):
+ _fields_ = (
+ ('name', lc_str),
+ ('header_addr', p_uint32),
+ )
+
+ def describe(self):
+ return {'header_addr': int(self.header_addr)}
+
+class uuid_command (Structure):
+ _fields_ = (
+ ('uuid', p_str16),
+ )
+
+ def describe(self):
+ return {'uuid': self.uuid.rstrip('\x00')}
+
+class rpath_command (Structure):
+ _fields_ = (
+ ('path', lc_str),
+ )
+
+ def describe(self):
+ return {}
+
+
+class linkedit_data_command (Structure):
+ _fields_ = (
+ ('dataoff', p_uint32),
+ ('datasize', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['dataoff'] = int(self.dataoff)
+ s['datasize'] = int(self.datasize)
+ return s
+
+
+class version_min_command (Structure):
+ _fields_ = (
+ ('version', p_uint32), # X.Y.Z is encoded in nibbles xxxx.yy.zz
+ ('reserved', p_uint32),
+ )
+
+ def describe(self):
+ v = int(self.version)
+ v3 = v & 0xFF
+ v = v >> 8
+ v2 = v & 0xFF
+ v = v >> 8
+ v1 = v & 0xFFFF
+ return {'version': str(int(v1)) + "." + str(int(v2)) + "." + str(int(v3))}
+
+class source_version_command (Structure):
+ _fields_ = (
+ ('version', p_uint64),
+ )
+
+ def describe(self):
+ v = int(self.version)
+ a = v >> 40
+ b = (v >> 30) & 0x3ff
+ c = (v >> 20) & 0x3ff
+ d = (v >> 10) & 0x3ff
+ e = v & 0x3ff
+ r = str(a)+'.'+str(b)+'.'+str(c)+'.'+str(d)+'.'+str(e)
+ return {'version': r}
+
+class encryption_info_command (Structure):
+ _fields_ = (
+ ('cryptoff', p_uint32),
+ ('cryptsize', p_uint32),
+ ('cryptid', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['cryptoff'] = int(self.cryptoff)
+ s['cryptsize'] = int(self.cryptsize)
+ s['cryptid'] = int(self.cryptid)
+ return s
+
+class encryption_info_command_64 (Structure):
+ _fields_ = (
+ ('cryptoff', p_uint32),
+ ('cryptsize', p_uint32),
+ ('cryptid', p_uint32),
+ ('pad', p_uint32),
+ )
+
+ def describe(self):
+ s = {}
+ s['cryptoff'] = int(self.cryptoff)
+ s['cryptsize'] = int(self.cryptsize)
+ s['cryptid'] = int(self.cryptid)
+ s['pad'] = int(self.pad)
+ return s
+
+
+class dyld_info_command (Structure):
+ _fields_ = (
+ ('rebase_off', p_uint32),
+ ('rebase_size', p_uint32),
+ ('bind_off', p_uint32),
+ ('bind_size', p_uint32),
+ ('weak_bind_off', p_uint32),
+ ('weak_bind_size', p_uint32),
+ ('lazy_bind_off', p_uint32),
+ ('lazy_bind_size', p_uint32),
+ ('export_off', p_uint32),
+ ('export_size', p_uint32),
+ )
+
+ def describe(self):
+ dyld = {}
+ dyld['rebase_off'] = int(self.rebase_off)
+ dyld['rebase_size'] = int(self.rebase_size)
+ dyld['bind_off'] = int(self.bind_off)
+ dyld['bind_size'] = int(self.bind_size)
+ dyld['weak_bind_off'] = int(self.weak_bind_off)
+ dyld['weak_bind_size'] = int(self.weak_bind_size)
+ dyld['lazy_bind_off'] = int(self.lazy_bind_off)
+ dyld['lazy_bind_size'] = int(self.lazy_bind_size)
+ dyld['export_off'] = int(self.export_off)
+ dyld['export_size'] = int(self.export_size)
+ return dyld
+
+class linker_option_command (Structure):
+ _fields_ = (
+ ('count', p_uint32),
+ )
+
+ def describe(self):
+ return {'count': int(self.count)}
+
+
+LC_REGISTRY = {
+ LC_SEGMENT: segment_command,
+ LC_IDFVMLIB: fvmlib_command,
+ LC_LOADFVMLIB: fvmlib_command,
+ LC_ID_DYLIB: dylib_command,
+ LC_LOAD_DYLIB: dylib_command,
+ LC_LOAD_WEAK_DYLIB: dylib_command,
+ LC_SUB_FRAMEWORK: sub_framework_command,
+ LC_SUB_CLIENT: sub_client_command,
+ LC_SUB_UMBRELLA: sub_umbrella_command,
+ LC_SUB_LIBRARY: sub_library_command,
+ LC_PREBOUND_DYLIB: prebound_dylib_command,
+ LC_ID_DYLINKER: dylinker_command,
+ LC_LOAD_DYLINKER: dylinker_command,
+ LC_THREAD: thread_command,
+ LC_UNIXTHREAD: thread_command,
+ LC_ROUTINES: routines_command,
+ LC_SYMTAB: symtab_command,
+ LC_DYSYMTAB: dysymtab_command,
+ LC_TWOLEVEL_HINTS: twolevel_hints_command,
+ LC_PREBIND_CKSUM: prebind_cksum_command,
+ LC_SYMSEG: symseg_command,
+ LC_IDENT: ident_command,
+ LC_FVMFILE: fvmfile_command,
+ LC_SEGMENT_64: segment_command_64,
+ LC_ROUTINES_64: routines_command_64,
+ LC_UUID: uuid_command,
+ LC_RPATH: rpath_command,
+ LC_CODE_SIGNATURE: linkedit_data_command,
+ LC_CODE_SEGMENT_SPLIT_INFO: linkedit_data_command,
+ LC_REEXPORT_DYLIB: dylib_command,
+ LC_LAZY_LOAD_DYLIB: dylib_command,
+ LC_ENCRYPTION_INFO: encryption_info_command,
+ LC_DYLD_INFO: dyld_info_command,
+ LC_DYLD_INFO_ONLY: dyld_info_command,
+ LC_LOAD_UPWARD_DYLIB: dylib_command,
+ LC_VERSION_MIN_MACOSX: version_min_command,
+ LC_VERSION_MIN_IPHONEOS: version_min_command,
+ LC_FUNCTION_STARTS: linkedit_data_command,
+ LC_DYLD_ENVIRONMENT: dylinker_command,
+ LC_MAIN: entry_point_command,
+ LC_DATA_IN_CODE: linkedit_data_command,
+ LC_SOURCE_VERSION: source_version_command,
+ LC_DYLIB_CODE_SIGN_DRS: linkedit_data_command,
+ LC_ENCRYPTION_INFO_64: encryption_info_command_64,
+ LC_LINKER_OPTION: linker_option_command,
+}
+
+LC_NAMES = {
+ LC_SEGMENT: 'LC_SEGMENT',
+ LC_IDFVMLIB: 'LC_IDFVMLIB',
+ LC_LOADFVMLIB: 'LC_LOADFVMLIB',
+ LC_ID_DYLIB: 'LC_ID_DYLIB',
+ LC_LOAD_DYLIB: 'LC_LOAD_DYLIB',
+ LC_LOAD_WEAK_DYLIB: 'LC_LOAD_WEAK_DYLIB',
+ LC_SUB_FRAMEWORK: 'LC_SUB_FRAMEWORK',
+ LC_SUB_CLIENT: 'LC_SUB_CLIENT',
+ LC_SUB_UMBRELLA: 'LC_SUB_UMBRELLA',
+ LC_SUB_LIBRARY: 'LC_SUB_LIBRARY',
+ LC_PREBOUND_DYLIB: 'LC_PREBOUND_DYLIB',
+ LC_ID_DYLINKER: 'LC_ID_DYLINKER',
+ LC_LOAD_DYLINKER: 'LC_LOAD_DYLINKER',
+ LC_THREAD: 'LC_THREAD',
+ LC_UNIXTHREAD: 'LC_UNIXTHREAD',
+ LC_ROUTINES: 'LC_ROUTINES',
+ LC_SYMTAB: 'LC_SYMTAB',
+ LC_DYSYMTAB: 'LC_DYSYMTAB',
+ LC_TWOLEVEL_HINTS: 'LC_TWOLEVEL_HINTS',
+ LC_PREBIND_CKSUM: 'LC_PREBIND_CKSUM',
+ LC_SYMSEG: 'LC_SYMSEG',
+ LC_IDENT: 'LC_IDENT',
+ LC_FVMFILE: 'LC_FVMFILE',
+ LC_SEGMENT_64: 'LC_SEGMENT_64',
+ LC_ROUTINES_64: 'LC_ROUTINES_64',
+ LC_UUID: 'LC_UUID',
+ LC_RPATH: 'LC_RPATH',
+ LC_CODE_SIGNATURE: 'LC_CODE_SIGNATURE',
+ LC_CODE_SEGMENT_SPLIT_INFO: 'LC_CODE_SEGMENT_SPLIT_INFO',
+ LC_REEXPORT_DYLIB: 'LC_REEXPORT_DYLIB',
+ LC_LAZY_LOAD_DYLIB: 'LC_LAZY_LOAD_DYLIB',
+ LC_ENCRYPTION_INFO: 'LC_ENCRYPTION_INFO',
+ LC_DYLD_INFO: 'LC_DYLD_INFO',
+ LC_DYLD_INFO_ONLY: 'LC_DYLD_INFO_ONLY',
+ LC_LOAD_UPWARD_DYLIB: 'LC_LOAD_UPWARD_DYLIB',
+ LC_VERSION_MIN_MACOSX: 'LC_VERSION_MIN_MACOSX',
+ LC_VERSION_MIN_IPHONEOS: 'LC_VERSION_MIN_IPHONEOS',
+ LC_FUNCTION_STARTS: 'LC_FUNCTION_STARTS',
+ LC_DYLD_ENVIRONMENT: 'LC_DYLD_ENVIRONMENT',
+ LC_MAIN: 'LC_MAIN',
+ LC_DATA_IN_CODE: 'LC_DATA_IN_CODE',
+ LC_SOURCE_VERSION: 'LC_SOURCE_VERSION',
+ LC_DYLIB_CODE_SIGN_DRS: 'LC_DYLIB_CODE_SIGN_DRS',
+}
+
+
+#this is another union.
+class n_un(p_int32):
+ pass
+
+class nlist(Structure):
+ _fields_ = (
+ ('n_un', n_un),
+ ('n_type', p_uint8),
+ ('n_sect', p_uint8),
+ ('n_desc', p_short),
+ ('n_value', p_uint32),
+ )
+
+class nlist_64(Structure):
+ _fields_ = [
+ ('n_un', n_un),
+ ('n_type', p_uint8),
+ ('n_sect', p_uint8),
+ ('n_desc', p_short),
+ ('n_value', p_int64),
+ ]
+
+N_STAB = 0xe0
+N_PEXT = 0x10
+N_TYPE = 0x0e
+N_EXT = 0x01
+
+N_UNDF = 0x0
+N_ABS = 0x2
+N_SECT = 0xe
+N_PBUD = 0xc
+N_INDR = 0xa
+
+NO_SECT = 0
+MAX_SECT = 255
+
+REFERENCE_TYPE = 0xf
+REFERENCE_FLAG_UNDEFINED_NON_LAZY = 0
+REFERENCE_FLAG_UNDEFINED_LAZY = 1
+REFERENCE_FLAG_DEFINED = 2
+REFERENCE_FLAG_PRIVATE_DEFINED = 3
+REFERENCE_FLAG_PRIVATE_UNDEFINED_NON_LAZY = 4
+REFERENCE_FLAG_PRIVATE_UNDEFINED_LAZY = 5
+
+REFERENCED_DYNAMICALLY = 0x0010
+
+def GET_LIBRARY_ORDINAL(n_desc):
+ return (((n_desc) >> 8) & 0xff)
+
+def SET_LIBRARY_ORDINAL(n_desc, ordinal):
+ return (((n_desc) & 0x00ff) | (((ordinal & 0xff) << 8)))
+
+SELF_LIBRARY_ORDINAL = 0x0
+MAX_LIBRARY_ORDINAL = 0xfd
+DYNAMIC_LOOKUP_ORDINAL = 0xfe
+EXECUTABLE_ORDINAL = 0xff
+
+N_DESC_DISCARDED = 0x0020
+N_WEAK_REF = 0x0040
+N_WEAK_DEF = 0x0080
+
+# /usr/include/mach-o/fat.h
+FAT_MAGIC = 0xcafebabe
+class fat_header(Structure):
+ _fields_ = (
+ ('magic', p_uint32),
+ ('nfat_arch', p_uint32),
+ )
+
+class fat_arch(Structure):
+ _fields_ = (
+ ('cputype', cpu_type_t),
+ ('cpusubtype', cpu_subtype_t),
+ ('offset', p_uint32),
+ ('size', p_uint32),
+ ('align', p_uint32),
+ )
diff --git a/python/macholib/macholib/macho_dump.py b/python/macholib/macholib/macho_dump.py
new file mode 100644
index 000000000..737ce8ebe
--- /dev/null
+++ b/python/macholib/macholib/macho_dump.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import os
+import sys
+
+from macholib._cmdline import main as _main
+from macholib.MachO import MachO
+from macholib.mach_o import *
+
+ARCH_MAP={
+ ('<', '64-bit'): 'x86_64',
+ ('<', '32-bit'): 'i386',
+ ('>', '64-bit'): 'ppc64',
+ ('>', '32-bit'): 'ppc',
+}
+
+def print_file(fp, path):
+ print(path, file=fp)
+ m = MachO(path)
+ for header in m.headers:
+ seen = set()
+ if header.MH_MAGIC == MH_MAGIC_64:
+ sz = '64-bit'
+ else:
+ sz = '32-bit'
+
+ arch = CPU_TYPE_NAMES.get(header.header.cputype,
+ header.header.cputype)
+
+ print(' [%s endian=%r size=%r arch=%r]' % (header.__class__.__name__,
+ header.endian, sz, arch), file=fp)
+ for idx, name, other in header.walkRelocatables():
+ if other not in seen:
+ seen.add(other)
+ print('\t' + other, file=fp)
+ print('', file=fp)
+
+def main():
+ print("WARNING: 'macho_dump' is deprecated, use 'python -mmacholib dump' instead")
+ _main(print_file)
+
+if __name__ == '__main__':
+ try:
+ sys.exit(main())
+ except KeyboardInterrupt:
+ pass
diff --git a/python/macholib/macholib/macho_find.py b/python/macholib/macholib/macho_find.py
new file mode 100644
index 000000000..0a0cef270
--- /dev/null
+++ b/python/macholib/macholib/macho_find.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+from __future__ import print_function
+from macholib._cmdline import main as _main
+
+
+def print_file(fp, path):
+ print(path, file=fp)
+
+def main():
+ print("WARNING: 'macho_find' is deprecated, use 'python -mmacholib dump' instead")
+ _main(print_file)
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
diff --git a/python/macholib/macholib/macho_standalone.py b/python/macholib/macholib/macho_standalone.py
new file mode 100644
index 000000000..ceec82bc9
--- /dev/null
+++ b/python/macholib/macholib/macho_standalone.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+import os
+import sys
+
+from macholib.MachOStandalone import MachOStandalone
+from macholib.util import strip_files
+
+def standaloneApp(path):
+ if not (os.path.isdir(path) and os.path.exists(
+ os.path.join(path, 'Contents'))):
+ print('%s: %s does not look like an app bundle'
+ % (sys.argv[0], path))
+ sys.exit(1)
+ files = MachOStandalone(path).run()
+ strip_files(files)
+
+def main():
+ print("WARNING: 'macho_standalone' is deprecated, use 'python -mmacholib standalone' instead")
+ if not sys.argv[1:]:
+ raise SystemExit('usage: %s [appbundle ...]' % (sys.argv[0],))
+ for fn in sys.argv[1:]:
+ standaloneApp(fn)
+
+if __name__ == '__main__':
+ main()
diff --git a/python/macholib/macholib/ptypes.py b/python/macholib/macholib/ptypes.py
new file mode 100644
index 000000000..f1457c91b
--- /dev/null
+++ b/python/macholib/macholib/ptypes.py
@@ -0,0 +1,290 @@
+"""
+This module defines packable types, that is types than can be easily converted to a binary format
+as used in MachO headers.
+"""
+import struct
+import sys
+
+try:
+ from itertools import izip, imap
+except ImportError:
+ izip, imap = zip, map
+from itertools import chain, starmap
+import warnings
+
+__all__ = """
+sizeof
+BasePackable
+Structure
+pypackable
+p_char
+p_byte
+p_ubyte
+p_short
+p_ushort
+p_int
+p_uint
+p_long
+p_ulong
+p_longlong
+p_ulonglong
+p_int8
+p_uint8
+p_int16
+p_uint16
+p_int32
+p_uint32
+p_int64
+p_uint64
+p_float
+p_double
+""".split()
+
+def sizeof(s):
+ """
+ Return the size of an object when packed
+ """
+ if hasattr(s, '_size_'):
+ return s._size_
+
+ elif isinstance(s, bytes):
+ return len(s)
+
+ raise ValueError(s)
+
+class MetaPackable(type):
+ """
+ Fixed size struct.unpack-able types use from_tuple as their designated initializer
+ """
+ def from_mmap(cls, mm, ptr, **kw):
+ return cls.from_str(mm[ptr:ptr+cls._size_], **kw)
+
+ def from_fileobj(cls, f, **kw):
+ return cls.from_str(f.read(cls._size_), **kw)
+
+ def from_str(cls, s, **kw):
+ endian = kw.get('_endian_', cls._endian_)
+ return cls.from_tuple(struct.unpack(endian + cls._format_, s), **kw)
+
+ def from_tuple(cls, tpl, **kw):
+ return cls(tpl[0], **kw)
+
+class BasePackable(object):
+ _endian_ = '>'
+
+ def to_str(self):
+ raise NotImplementedError
+
+ def to_fileobj(self, f):
+ f.write(self.to_str())
+
+ def to_mmap(self, mm, ptr):
+ mm[ptr:ptr+self._size_] = self.to_str()
+
+
+# This defines a class with a custom metaclass, we'd normally
+# use "class Packable(BasePackable, metaclass=MetaPackage)",
+# but that syntax is not valid in Python 2 (and likewise the
+# python 2 syntax is not valid in Python 3)
+def _make():
+ def to_str(self):
+ cls = type(self)
+ endian = getattr(self, '_endian_', cls._endian_)
+ return struct.pack(endian + cls._format_, self)
+ return MetaPackable("Packable", (BasePackable,), {'to_str': to_str})
+Packable = _make()
+del _make
+
+def pypackable(name, pytype, format):
+ """
+ Create a "mix-in" class with a python type and a
+ Packable with the given struct format
+ """
+ size, items = _formatinfo(format)
+ return type(Packable)(name, (pytype, Packable), {
+ '_format_': format,
+ '_size_': size,
+ '_items_': items,
+ })
+
+def _formatinfo(format):
+ """
+ Calculate the size and number of items in a struct format.
+ """
+ size = struct.calcsize(format)
+ return size, len(struct.unpack(format, b'\x00' * size))
+
+class MetaStructure(MetaPackable):
+ """
+ The metaclass of Structure objects that does all the magic.
+
+ Since we can assume that all Structures have a fixed size,
+ we can do a bunch of calculations up front and pack or
+ unpack the whole thing in one struct call.
+ """
+ def __new__(cls, clsname, bases, dct):
+ fields = dct['_fields_']
+ names = []
+ types = []
+ structmarks = []
+ format = ''
+ items = 0
+ size = 0
+
+ def struct_property(name, typ):
+ def _get(self):
+ return self._objects_[name]
+ def _set(self, obj):
+ if type(obj) is not typ:
+ obj = typ(obj)
+ self._objects_[name] = obj
+ return property(_get, _set, typ.__name__)
+
+ for name, typ in fields:
+ dct[name] = struct_property(name, typ)
+ names.append(name)
+ types.append(typ)
+ format += typ._format_
+ size += typ._size_
+ if (typ._items_ > 1):
+ structmarks.append((items, typ._items_, typ))
+ items += typ._items_
+
+ dct['_structmarks_'] = structmarks
+ dct['_names_'] = names
+ dct['_types_'] = types
+ dct['_size_'] = size
+ dct['_items_'] = items
+ dct['_format_'] = format
+ return super(MetaStructure, cls).__new__(cls, clsname, bases, dct)
+
+ def from_tuple(cls, tpl, **kw):
+ values = []
+ current = 0
+ for begin, length, typ in cls._structmarks_:
+ if begin > current:
+ values.extend(tpl[current:begin])
+ current = begin + length
+ values.append(typ.from_tuple(tpl[begin:current], **kw))
+ values.extend(tpl[current:])
+ return cls(*values, **kw)
+
+# See metaclass discussion earlier in this file
+def _make():
+ class_dict={}
+ class_dict['_fields_'] = ()
+
+ def as_method(function):
+ class_dict[function.__name__] = function
+
+ @as_method
+ def __init__(self, *args, **kwargs):
+ if len(args) == 1 and not kwargs and type(args[0]) is type(self):
+ kwargs = args[0]._objects_
+ args = ()
+ self._objects_ = {}
+ iargs = chain(izip(self._names_, args), kwargs.items())
+ for key, value in iargs:
+ if key not in self._names_ and key != "_endian_":
+ raise TypeError
+ setattr(self, key, value)
+ for key, typ in izip(self._names_, self._types_):
+ if key not in self._objects_:
+ self._objects_[key] = typ()
+
+ @as_method
+ def _get_packables(self):
+ for obj in imap(self._objects_.__getitem__, self._names_):
+ if obj._items_ == 1:
+ yield obj
+ else:
+ for obj in obj._get_packables():
+ yield obj
+
+ @as_method
+ def to_str(self):
+ return struct.pack(self._endian_ + self._format_, *self._get_packables())
+
+ @as_method
+ def __cmp__(self, other):
+ if type(other) is not type(self):
+ raise TypeError('Cannot compare objects of type %r to objects of type %r' % (type(other), type(self)))
+ if sys.version_info[0] == 2:
+ _cmp = cmp
+ else:
+ def _cmp(a, b):
+ if a < b:
+ return -1
+ elif a > b:
+ return 1
+ elif a == b:
+ return 0
+ else:
+ raise TypeError()
+
+ for cmpval in starmap(_cmp, izip(self._get_packables(), other._get_packables())):
+ if cmpval != 0:
+ return cmpval
+ return 0
+
+ @as_method
+ def __eq__(self, other):
+ r = self.__cmp__(other)
+ return r == 0
+
+ @as_method
+ def __ne__(self, other):
+ r = self.__cmp__(other)
+ return r != 0
+
+ @as_method
+ def __lt__(self, other):
+ r = self.__cmp__(other)
+ return r < 0
+
+ @as_method
+ def __le__(self, other):
+ r = self.__cmp__(other)
+ return r <= 0
+
+ @as_method
+ def __gt__(self, other):
+ r = self.__cmp__(other)
+ return r > 0
+
+ @as_method
+ def __ge__(self, other):
+ r = self.__cmp__(other)
+ return r >= 0
+
+ return MetaStructure("Structure", (BasePackable,), class_dict)
+Structure = _make()
+del _make
+
+try:
+ long
+except NameError:
+ long = int
+
+# export common packables with predictable names
+p_char = pypackable('p_char', bytes, 'c')
+p_int8 = pypackable('p_int8', int, 'b')
+p_uint8 = pypackable('p_uint8', int, 'B')
+p_int16 = pypackable('p_int16', int, 'h')
+p_uint16 = pypackable('p_uint16', int, 'H')
+p_int32 = pypackable('p_int32', int, 'i')
+p_uint32 = pypackable('p_uint32', long, 'I')
+p_int64 = pypackable('p_int64', long, 'q')
+p_uint64 = pypackable('p_uint64', long, 'Q')
+p_float = pypackable('p_float', float, 'f')
+p_double = pypackable('p_double', float, 'd')
+
+# Deprecated names, need trick to emit deprecation warning.
+p_byte = p_int8
+p_ubyte = p_uint8
+p_short = p_int16
+p_ushort = p_uint16
+p_int = p_long = p_int32
+p_uint = p_ulong = p_uint32
+p_longlong = p_int64
+p_ulonglong = p_uint64
diff --git a/python/macholib/macholib/util.py b/python/macholib/macholib/util.py
new file mode 100644
index 000000000..7f954b6f7
--- /dev/null
+++ b/python/macholib/macholib/util.py
@@ -0,0 +1,245 @@
+import os
+import sys
+import stat
+import operator
+import struct
+import shutil
+
+#from modulegraph.util import *
+
+from macholib import mach_o
+
+MAGIC = [
+ struct.pack('!L', getattr(mach_o, 'MH_' + _))
+ for _ in ['MAGIC', 'CIGAM', 'MAGIC_64', 'CIGAM_64']
+]
+FAT_MAGIC_BYTES = struct.pack('!L', mach_o.FAT_MAGIC)
+MAGIC_LEN = 4
+STRIPCMD = ['/usr/bin/strip', '-x', '-S', '-']
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+
+def fsencoding(s, encoding=sys.getfilesystemencoding()):
+ """
+ Ensure the given argument is in filesystem encoding (not unicode)
+ """
+ if isinstance(s, unicode):
+ s = s.encode(encoding)
+ return s
+
+def move(src, dst):
+ """
+ move that ensures filesystem encoding of paths
+ """
+ shutil.move(fsencoding(src), fsencoding(dst))
+
+def copy2(src, dst):
+ """
+ copy2 that ensures filesystem encoding of paths
+ """
+ shutil.copy2(fsencoding(src), fsencoding(dst))
+
+def flipwritable(fn, mode=None):
+ """
+ Flip the writability of a file and return the old mode. Returns None
+ if the file is already writable.
+ """
+ if os.access(fn, os.W_OK):
+ return None
+ old_mode = os.stat(fn).st_mode
+ os.chmod(fn, stat.S_IWRITE | old_mode)
+ return old_mode
+
+class fileview(object):
+ """
+ A proxy for file-like objects that exposes a given view of a file
+ """
+
+ def __init__(self, fileobj, start, size):
+ self._fileobj = fileobj
+ self._start = start
+ self._end = start + size
+
+ def __repr__(self):
+ return '<fileview [%d, %d] %r>' % (
+ self._start, self._end, self._fileobj)
+
+ def tell(self):
+ return self._fileobj.tell() - self._start
+
+ def _checkwindow(self, seekto, op):
+ if not (self._start <= seekto <= self._end):
+ raise IOError("%s to offset %d is outside window [%d, %d]" % (
+ op, seekto, self._start, self._end))
+
+ def seek(self, offset, whence=0):
+ seekto = offset
+ if whence == 0:
+ seekto += self._start
+ elif whence == 1:
+ seekto += self._fileobj.tell()
+ elif whence == 2:
+ seekto += self._end
+ else:
+ raise IOError("Invalid whence argument to seek: %r" % (whence,))
+ self._checkwindow(seekto, 'seek')
+ self._fileobj.seek(seekto)
+
+ def write(self, bytes):
+ here = self._fileobj.tell()
+ self._checkwindow(here, 'write')
+ self._checkwindow(here + len(bytes), 'write')
+ self._fileobj.write(bytes)
+
+ def read(self, size=sys.maxsize):
+ if size < 0:
+ raise ValueError("Invalid size %s while reading from %s", size, self._fileobj)
+ here = self._fileobj.tell()
+ self._checkwindow(here, 'read')
+ bytes = min(size, self._end - here)
+ return self._fileobj.read(bytes)
+
+
+def mergecopy(src, dest):
+ """
+ copy2, but only if the destination isn't up to date
+ """
+ if os.path.exists(dest) and os.stat(dest).st_mtime >= os.stat(src).st_mtime:
+ return
+
+ copy2(src, dest)
+
+def mergetree(src, dst, condition=None, copyfn=mergecopy, srcbase=None):
+ """
+ Recursively merge a directory tree using mergecopy().
+ """
+ src = fsencoding(src)
+ dst = fsencoding(dst)
+ if srcbase is None:
+ srcbase = src
+ names = map(fsencoding, os.listdir(src))
+ try:
+ os.makedirs(dst)
+ except OSError:
+ pass
+ errors = []
+ for name in names:
+ srcname = os.path.join(src, name)
+ dstname = os.path.join(dst, name)
+ if condition is not None and not condition(srcname):
+ continue
+ try:
+ if os.path.islink(srcname):
+ # XXX: This is naive at best, should check srcbase(?)
+ realsrc = os.readlink(srcname)
+ os.symlink(realsrc, dstname)
+ elif os.path.isdir(srcname):
+ mergetree(srcname, dstname,
+ condition=condition, copyfn=copyfn, srcbase=srcbase)
+ else:
+ copyfn(srcname, dstname)
+ except (IOError, os.error) as why:
+ errors.append((srcname, dstname, why))
+ if errors:
+ raise IOError(errors)
+
+def sdk_normalize(filename):
+ """
+ Normalize a path to strip out the SDK portion, normally so that it
+ can be decided whether it is in a system path or not.
+ """
+ if filename.startswith('/Developer/SDKs/'):
+ pathcomp = filename.split('/')
+ del pathcomp[1:4]
+ filename = '/'.join(pathcomp)
+ return filename
+
+NOT_SYSTEM_FILES=[]
+
+def in_system_path(filename):
+ """
+ Return True if the file is in a system path
+ """
+ fn = sdk_normalize(os.path.realpath(filename))
+ if fn.startswith('/usr/local/'):
+ return False
+ elif fn.startswith('/System/') or fn.startswith('/usr/'):
+ if fn in NOT_SYSTEM_FILES:
+ return False
+ return True
+ else:
+ return False
+
+def has_filename_filter(module):
+ """
+ Return False if the module does not have a filename attribute
+ """
+ return getattr(module, 'filename', None) is not None
+
+def get_magic():
+ """
+ Get a list of valid Mach-O header signatures, not including the fat header
+ """
+ return MAGIC
+
+def is_platform_file(path):
+ """
+ Return True if the file is Mach-O
+ """
+ if not os.path.exists(path) or os.path.islink(path):
+ return False
+ # If the header is fat, we need to read into the first arch
+ with open(path, 'rb') as fileobj:
+ bytes = fileobj.read(MAGIC_LEN)
+ if bytes == FAT_MAGIC_BYTES:
+ # Read in the fat header
+ fileobj.seek(0)
+ header = mach_o.fat_header.from_fileobj(fileobj, _endian_='>')
+ if header.nfat_arch < 1:
+ return False
+ # Read in the first fat arch header
+ arch = mach_o.fat_arch.from_fileobj(fileobj, _endian_='>')
+ fileobj.seek(arch.offset)
+ # Read magic off the first header
+ bytes = fileobj.read(MAGIC_LEN)
+ for magic in MAGIC:
+ if bytes == magic:
+ return True
+ return False
+
+def iter_platform_files(dst):
+ """
+ Walk a directory and yield each full path that is a Mach-O file
+ """
+ for root, dirs, files in os.walk(dst):
+ for fn in files:
+ fn = os.path.join(root, fn)
+ if is_platform_file(fn):
+ yield fn
+
+def strip_files(files, argv_max=(256 * 1024)):
+ """
+ Strip a list of files
+ """
+ tostrip = [(fn, flipwritable(fn)) for fn in files]
+ while tostrip:
+ cmd = list(STRIPCMD)
+ flips = []
+ pathlen = sum([len(s) + 1 for s in cmd])
+ while pathlen < argv_max:
+ if not tostrip:
+ break
+ added, flip = tostrip.pop()
+ pathlen += len(added) + 1
+ cmd.append(added)
+ flips.append((added, flip))
+ else:
+ cmd.pop()
+ tostrip.append(flips.pop())
+ os.spawnv(os.P_WAIT, cmd[0], cmd)
+ for args in flips:
+ flipwritable(*args)
diff --git a/python/macholib/macholib_tests/__init__.py b/python/macholib/macholib_tests/__init__.py
new file mode 100644
index 000000000..b5fe939b4
--- /dev/null
+++ b/python/macholib/macholib_tests/__init__.py
@@ -0,0 +1 @@
+""" macholib_tests package """
diff --git a/python/macholib/macholib_tests/binaries/src/build.py b/python/macholib/macholib_tests/binaries/src/build.py
new file mode 100644
index 000000000..e2a179c78
--- /dev/null
+++ b/python/macholib/macholib_tests/binaries/src/build.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+import os, sys
+
+
+class Builder (object):
+ def __init__(self, args):
+ self.output_dir = args[1]
+
+
+ def run(self):
+ for nm in dir(type(self)):
+ if nm.startswith('build_'):
+ getattr(self, nm)()
+
+ def build_executable(self):
+ print "Building plain executable"
+ pass
+
+
+builder = Builder(sys.argv)
+builder.run()
diff --git a/python/macholib/macholib_tests/test_MachO.py b/python/macholib/macholib_tests/test_MachO.py
new file mode 100644
index 000000000..730007e12
--- /dev/null
+++ b/python/macholib/macholib_tests/test_MachO.py
@@ -0,0 +1,15 @@
+from macholib import MachO
+
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+class TestMachO (unittest.TestCase):
+ @unittest.expectedFailure
+ def test_missing(self):
+ self.fail("tests are missing")
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_MachOGraph.py b/python/macholib/macholib_tests/test_MachOGraph.py
new file mode 100644
index 000000000..3f8e4ea19
--- /dev/null
+++ b/python/macholib/macholib_tests/test_MachOGraph.py
@@ -0,0 +1,15 @@
+from macholib import MachOGraph
+
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+class TestMachOGraph (unittest.TestCase):
+ @unittest.expectedFailure
+ def test_missing(self):
+ self.fail("tests are missing")
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_MachOStandalone.py b/python/macholib/macholib_tests/test_MachOStandalone.py
new file mode 100644
index 000000000..aa431e517
--- /dev/null
+++ b/python/macholib/macholib_tests/test_MachOStandalone.py
@@ -0,0 +1,15 @@
+from macholib import MachOStandalone
+
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+class TestMachOStandalone (unittest.TestCase):
+ @unittest.expectedFailure
+ def test_missing(self):
+ self.fail("tests are missing")
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_SymbolTable.py b/python/macholib/macholib_tests/test_SymbolTable.py
new file mode 100644
index 000000000..8970ca0a9
--- /dev/null
+++ b/python/macholib/macholib_tests/test_SymbolTable.py
@@ -0,0 +1,15 @@
+from macholib import SymbolTable
+
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+class TestSymbolTable (unittest.TestCase):
+ @unittest.expectedFailure
+ def test_missing(self):
+ self.fail("tests are missing")
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_command_line.py b/python/macholib/macholib_tests/test_command_line.py
new file mode 100644
index 000000000..a9cfd6f10
--- /dev/null
+++ b/python/macholib/macholib_tests/test_command_line.py
@@ -0,0 +1,147 @@
+from macholib import macho_dump
+from macholib import macho_find
+from macholib import _cmdline
+from macholib import util
+
+import sys
+import shutil
+import os
+
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+class TestCmdLine (unittest.TestCase):
+
+ # This test is no longer valid:
+ def no_test_main_is_shared(self):
+ self.assertTrue(macho_dump.main is _cmdline.main)
+ self.assertTrue(macho_find.main is _cmdline.main)
+
+ def test_check_file(self):
+ record = []
+ def record_cb(fp, path):
+ record.append((fp, path))
+
+ self.assertEqual(_cmdline.check_file(sys.stdout, '/bin/sh', record_cb), 0)
+ self.assertEqual(record, [(sys.stdout, '/bin/sh')])
+
+ saved_stderr = sys.stderr
+ saved_argv = sys.argv
+ try:
+ sys.stderr = StringIO()
+ sys.argv = ['macho_test']
+
+ record[:] = []
+ self.assertEqual(_cmdline.check_file(sys.stdout, '/bin/no-shell', record_cb), 1)
+ self.assertEqual(record, [])
+ self.assertEqual(sys.stderr.getvalue(), "macho_test: /bin/no-shell: No such file or directory\n")
+ self.assertEqual(record, [])
+
+ shutil.copy('/bin/sh', 'test.exec')
+ os.chmod('test.exec', 0)
+
+ sys.stderr = StringIO()
+ self.assertEqual(_cmdline.check_file(sys.stdout, 'test.exec', record_cb), 1)
+ self.assertEqual(record, [])
+ self.assertEqual(sys.stderr.getvalue(), "macho_test: test.exec: [Errno 13] Permission denied: 'test.exec'\n")
+ self.assertEqual(record, [])
+
+
+ finally:
+ sys.stderr = sys.stderr
+ sys.argv = saved_argv
+ if os.path.exists('test.exec'):
+ os.unlink('test.exec')
+
+ def test_shared_main(self):
+
+ saved_stderr = sys.stderr
+ saved_argv = sys.argv
+ try:
+ sys.stderr = StringIO()
+
+ sys.argv = ['macho_tool']
+
+ self.assertEqual(_cmdline.main(lambda *args: None), 1)
+ self.assertEqual(sys.stderr.getvalue(), 'Usage: macho_tool filename...\n')
+
+ names = []
+ def record_names(fp, name):
+ self.assertEqual(fp, sys.stdout)
+ names.append(name)
+
+
+ sys.stderr = StringIO()
+ sys.argv = ['macho_tool', '/bin/sh']
+ self.assertEqual(_cmdline.main(record_names), 0)
+ self.assertEqual(sys.stderr.getvalue(), '')
+ self.assertEqual(names, ['/bin/sh'])
+
+ names = []
+ sys.stderr = StringIO()
+ sys.argv = ['macho_tool', '/bin/sh', '/bin/ls']
+ self.assertEqual(_cmdline.main(record_names), 0)
+ self.assertEqual(sys.stderr.getvalue(), '')
+ self.assertEqual(names, ['/bin/sh', '/bin/ls'])
+
+ names = []
+ sys.stderr = StringIO()
+ sys.argv = ['macho_tool', '/bin']
+ self.assertEqual(_cmdline.main(record_names), 0)
+ self.assertEqual(sys.stderr.getvalue(), '')
+ names.sort()
+ dn = '/bin'
+ real_names = [
+ os.path.join(dn, fn) for fn in os.listdir(dn)
+ if util.is_platform_file(os.path.join(dn, fn)) ]
+ real_names.sort()
+
+ self.assertEqual(names, real_names)
+
+ finally:
+ sys.stderr = saved_stderr
+ sys.argv = saved_argv
+
+ def test_macho_find(self):
+ fp = StringIO()
+ macho_find.print_file(fp, "file1")
+ macho_find.print_file(fp, "file2")
+ self.assertEqual(fp.getvalue(), "file1\nfile2\n")
+
+ def test_macho_dump(self):
+ fp = StringIO()
+ macho_dump.print_file(fp, "/bin/sh")
+ lines = fp.getvalue().splitlines()
+
+ self.assertEqual(lines[0], "/bin/sh")
+ self.assertTrue(len(lines) > 3)
+
+ self.assertEqual(lines[-1], '')
+ del lines[-1]
+
+ idx = 1
+ while idx < len(lines):
+ self.assertTrue(lines[idx].startswith(' [MachOHeader endian'))
+ idx+=1
+
+ lc = 0
+ while idx < len(lines):
+ if not lines[idx].startswith('\t'):
+ break
+
+ lc +=1
+ self.assertTrue(os.path.exists(lines[idx].lstrip()))
+ idx += 1
+
+ self.assertTrue(lc > 1)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_dyld.py b/python/macholib/macholib_tests/test_dyld.py
new file mode 100644
index 000000000..7b41758d9
--- /dev/null
+++ b/python/macholib/macholib_tests/test_dyld.py
@@ -0,0 +1,450 @@
+from macholib import dyld
+
+import sys
+import os
+import functools
+
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+class DyldPatcher (object):
+ def __init__(self):
+ self.calls = []
+ self.patched = {}
+
+ def clear_calls(self):
+ self.calls = []
+
+ def cleanup(self):
+ for name in self.patched:
+ setattr(dyld, name, self.patched[name])
+
+ def log_calls(self, name):
+ if name in self.patched:
+ return
+
+ self.patched[name] = getattr(dyld, name)
+
+
+ @functools.wraps(self.patched[name])
+ def wrapper(*args, **kwds):
+ self.calls.append((name, args, kwds))
+ return self.patched[name](*args, **kwds)
+
+ setattr(dyld, name, wrapper)
+
+
+class TestDyld (unittest.TestCase):
+ if not hasattr(unittest.TestCase, 'assertIsInstance'):
+ def assertIsInstance(self, value, types, message=None):
+ self.assertTrue(isinstance(value, types),
+ message or "%r is not an instance of %r"%(value, types))
+
+ def setUp(self):
+ self._environ = os.environ
+ os.environ = dict([(k, os.environ[k]) for k in os.environ if 'DYLD' not in k])
+ self._dyld_env = dyld._dyld_env
+ self._dyld_image_suffix = dyld.dyld_image_suffix
+
+ def tearDown(self):
+ dyld._dyld_env = self._dyld_env
+ dyld.dyld_image_suffix = self._dyld_image_suffix
+ os.environ = self._environ
+
+ if sys.version_info[0] == 2:
+ def test_ensure_utf8(self):
+ self.assertEqual(dyld._ensure_utf8("hello"), "hello")
+ self.assertEqual(dyld._ensure_utf8("hello".decode('utf-8')), "hello")
+ self.assertEqual(dyld._ensure_utf8(None), None)
+
+ else:
+ def test_ensure_utf8(self):
+ self.assertEqual(dyld._ensure_utf8("hello"), "hello")
+ self.assertEqual(dyld._ensure_utf8(None), None)
+ self.assertRaises(ValueError, dyld._ensure_utf8, b"hello")
+
+ def test__dyld_env(self):
+ new = os.environ
+
+ self.assertEqual(dyld._dyld_env(None, 'DYLD_FOO'), [])
+ self.assertEqual(dyld._dyld_env({'DYLD_FOO':'bar'}, 'DYLD_FOO'), ['bar'])
+ self.assertEqual(dyld._dyld_env({'DYLD_FOO':'bar:baz'}, 'DYLD_FOO'), ['bar', 'baz'])
+ self.assertEqual(dyld._dyld_env({}, 'DYLD_FOO'), [])
+ self.assertEqual(dyld._dyld_env({'DYLD_FOO':''}, 'DYLD_FOO'), [])
+ os.environ['DYLD_FOO'] = 'foobar'
+ self.assertEqual(dyld._dyld_env(None, 'DYLD_FOO'), ['foobar'])
+ os.environ['DYLD_FOO'] = 'foobar:nowhere'
+ self.assertEqual(dyld._dyld_env(None, 'DYLD_FOO'), ['foobar', 'nowhere'])
+ self.assertEqual(dyld._dyld_env({'DYLD_FOO':'bar'}, 'DYLD_FOO'), ['bar'])
+ self.assertEqual(dyld._dyld_env({}, 'DYLD_FOO'), [])
+
+
+ self.assertEqual(dyld.dyld_image_suffix(), None)
+ self.assertEqual(dyld.dyld_image_suffix(None), None)
+ self.assertEqual(dyld.dyld_image_suffix({'DYLD_IMAGE_SUFFIX':'bar'}), 'bar')
+ os.environ['DYLD_IMAGE_SUFFIX'] = 'foobar'
+ self.assertEqual(dyld.dyld_image_suffix(), 'foobar')
+ self.assertEqual(dyld.dyld_image_suffix(None), 'foobar')
+
+ def test_dyld_helpers(self):
+ record = []
+ def fake__dyld_env(env, key):
+ record.append((env, key))
+ return ['hello']
+
+ dyld._dyld_env = fake__dyld_env
+ self.assertEqual(dyld.dyld_framework_path(), ['hello'])
+ self.assertEqual(dyld.dyld_framework_path({}), ['hello'])
+
+ self.assertEqual(dyld.dyld_library_path(), ['hello'])
+ self.assertEqual(dyld.dyld_library_path({}), ['hello'])
+
+ self.assertEqual(dyld.dyld_fallback_framework_path(), ['hello'])
+ self.assertEqual(dyld.dyld_fallback_framework_path({}), ['hello'])
+
+ self.assertEqual(dyld.dyld_fallback_library_path(), ['hello'])
+ self.assertEqual(dyld.dyld_fallback_library_path({}), ['hello'])
+
+ self.assertEqual(record, [
+ (None, 'DYLD_FRAMEWORK_PATH'),
+ ({}, 'DYLD_FRAMEWORK_PATH'),
+ (None, 'DYLD_LIBRARY_PATH'),
+ ({}, 'DYLD_LIBRARY_PATH'),
+ (None, 'DYLD_FALLBACK_FRAMEWORK_PATH'),
+ ({}, 'DYLD_FALLBACK_FRAMEWORK_PATH'),
+ (None, 'DYLD_FALLBACK_LIBRARY_PATH'),
+ ({}, 'DYLD_FALLBACK_LIBRARY_PATH'),
+ ])
+
+ def test_dyld_suffix_search(self):
+ envs = [object()]
+ def fake_suffix(env):
+ envs[0] = env
+ return None
+ dyld.dyld_image_suffix = fake_suffix
+
+ iterator = [
+ '/usr/lib/foo',
+ '/usr/lib/foo.dylib',
+ ]
+ result = dyld.dyld_image_suffix_search(iter(iterator))
+ self.assertEqual(list(result), iterator)
+ self.assertEqual(envs[0], None)
+
+ result = dyld.dyld_image_suffix_search(iter(iterator), {})
+ self.assertEqual(list(result), iterator)
+ self.assertEqual(envs[0], {})
+
+ envs = [object()]
+ def fake_suffix(env):
+ envs[0] = env
+ return '_profile'
+ dyld.dyld_image_suffix = fake_suffix
+
+ iterator = [
+ '/usr/lib/foo',
+ '/usr/lib/foo.dylib',
+ ]
+ result = dyld.dyld_image_suffix_search(iter(iterator))
+ self.assertEqual(list(result), [
+ '/usr/lib/foo_profile',
+ '/usr/lib/foo',
+ '/usr/lib/foo_profile.dylib',
+ '/usr/lib/foo.dylib',
+ ])
+ self.assertEqual(envs[0], None)
+
+ result = dyld.dyld_image_suffix_search(iter(iterator), {})
+ self.assertEqual(list(result), [
+ '/usr/lib/foo_profile',
+ '/usr/lib/foo',
+ '/usr/lib/foo_profile.dylib',
+ '/usr/lib/foo.dylib',
+ ])
+ self.assertEqual(envs[0], {})
+
+ def test_override_search(self):
+ os.environ['DYLD_FRAMEWORK_PATH'] = ''
+ os.environ['DYLD_LIBRARY_PATH'] = ''
+
+ self.assertEqual(
+ list(dyld.dyld_override_search("foo.dyld", None)), [])
+ self.assertEqual(
+ list(dyld.dyld_override_search("/usr/lib/libfoo.dyld", None)), [])
+ self.assertEqual(
+ list(dyld.dyld_override_search("/Library/Frameworks/Python.framework/Versions/Current/Python", None)), [])
+
+
+ os.environ['DYLD_FRAMEWORK_PATH'] = '/Foo/Frameworks:/Bar/Frameworks'
+ os.environ['DYLD_LIBRARY_PATH'] = ''
+ self.assertEqual(
+ list(dyld.dyld_override_search("foo.dyld", None)), [])
+ self.assertEqual(
+ list(dyld.dyld_override_search("/usr/lib/libfoo.dyld", None)), [])
+ self.assertEqual(
+ list(dyld.dyld_override_search("/Library/Frameworks/Python.framework/Versions/Current/Python", None)), [
+ '/Foo/Frameworks/Python.framework/Versions/Current/Python',
+ '/Bar/Frameworks/Python.framework/Versions/Current/Python',
+ ])
+
+ os.environ['DYLD_FRAMEWORK_PATH'] = ''
+ os.environ['DYLD_LIBRARY_PATH'] = '/local/lib:/remote/lib'
+ self.assertEqual(
+ list(dyld.dyld_override_search("foo.dyld", None)), [
+ '/local/lib/foo.dyld',
+ '/remote/lib/foo.dyld',
+ ])
+ self.assertEqual(
+ list(dyld.dyld_override_search("/usr/lib/libfoo.dyld", None)), [
+ '/local/lib/libfoo.dyld',
+ '/remote/lib/libfoo.dyld',
+ ])
+ self.assertEqual(
+ list(dyld.dyld_override_search("/Library/Frameworks/Python.framework/Versions/Current/Python", None)), [
+ '/local/lib/Python',
+ '/remote/lib/Python',
+ ])
+
+ os.environ['DYLD_FRAMEWORK_PATH'] = '/Foo/Frameworks:/Bar/Frameworks'
+ os.environ['DYLD_LIBRARY_PATH'] = '/local/lib:/remote/lib'
+ self.assertEqual(
+ list(dyld.dyld_override_search("foo.dyld", None)), [
+ '/local/lib/foo.dyld',
+ '/remote/lib/foo.dyld',
+ ])
+ self.assertEqual(
+ list(dyld.dyld_override_search("/usr/lib/libfoo.dyld", None)), [
+ '/local/lib/libfoo.dyld',
+ '/remote/lib/libfoo.dyld',
+ ])
+ self.assertEqual(
+ list(dyld.dyld_override_search("/Library/Frameworks/Python.framework/Versions/Current/Python", None)), [
+ '/Foo/Frameworks/Python.framework/Versions/Current/Python',
+ '/Bar/Frameworks/Python.framework/Versions/Current/Python',
+ '/local/lib/Python',
+ '/remote/lib/Python',
+ ])
+
+ def test_executable_path_search(self):
+ self.assertEqual(list(dyld.dyld_executable_path_search("/usr/lib/foo.dyld", "/usr/bin")), [])
+ self.assertEqual(
+ list(dyld.dyld_executable_path_search("@executable_path/foo.dyld", "/usr/bin")),
+ ['/usr/bin/foo.dyld'])
+ self.assertEqual(
+ list(dyld.dyld_executable_path_search("@executable_path/../../lib/foo.dyld", "/usr/bin")),
+ ['/usr/bin/../../lib/foo.dyld'])
+
+ def test_default_search(self):
+ self.assertEqual(
+ list(dyld.dyld_default_search('/usr/lib/mylib.dylib', None)), [
+ '/usr/lib/mylib.dylib',
+ os.path.join(os.path.expanduser('~/lib'), 'mylib.dylib'),
+ '/usr/local/lib/mylib.dylib',
+ '/lib/mylib.dylib',
+ '/usr/lib/mylib.dylib',
+
+ ])
+
+ self.assertEqual(
+ list(dyld.dyld_default_search('/Library/Frameworks/Python.framework/Versions/2.7/Python', None)), [
+ '/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ os.path.join(os.path.expanduser('~/Library/Frameworks'), 'Python.framework/Versions/2.7/Python'),
+ '/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ '/Network/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ '/System/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ os.path.join(os.path.expanduser('~/lib'), 'Python'),
+ '/usr/local/lib/Python',
+ '/lib/Python',
+ '/usr/lib/Python',
+ ])
+
+
+
+
+ os.environ['DYLD_FALLBACK_LIBRARY_PATH'] = '/local/lib:/network/lib'
+ os.environ['DYLD_FALLBACK_FRAMEWORK_PATH'] = ''
+
+ self.assertEqual(
+ list(dyld.dyld_default_search('/usr/lib/mylib.dylib', None)), [
+ '/usr/lib/mylib.dylib',
+ '/local/lib/mylib.dylib',
+ '/network/lib/mylib.dylib',
+ ])
+
+
+ self.assertEqual(
+ list(dyld.dyld_default_search('/Library/Frameworks/Python.framework/Versions/2.7/Python', None)), [
+ '/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ os.path.join(os.path.expanduser('~/Library/Frameworks'), 'Python.framework/Versions/2.7/Python'),
+ '/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ '/Network/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ '/System/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ '/local/lib/Python',
+ '/network/lib/Python',
+ ])
+
+
+ os.environ['DYLD_FALLBACK_LIBRARY_PATH'] = ''
+ os.environ['DYLD_FALLBACK_FRAMEWORK_PATH'] = '/MyFrameworks:/OtherFrameworks'
+
+
+ self.assertEqual(
+ list(dyld.dyld_default_search('/usr/lib/mylib.dylib', None)), [
+ '/usr/lib/mylib.dylib',
+ os.path.join(os.path.expanduser('~/lib'), 'mylib.dylib'),
+ '/usr/local/lib/mylib.dylib',
+ '/lib/mylib.dylib',
+ '/usr/lib/mylib.dylib',
+
+ ])
+
+ self.assertEqual(
+ list(dyld.dyld_default_search('/Library/Frameworks/Python.framework/Versions/2.7/Python', None)), [
+ '/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ '/MyFrameworks/Python.framework/Versions/2.7/Python',
+ '/OtherFrameworks/Python.framework/Versions/2.7/Python',
+ os.path.join(os.path.expanduser('~/lib'), 'Python'),
+ '/usr/local/lib/Python',
+ '/lib/Python',
+ '/usr/lib/Python',
+ ])
+
+ os.environ['DYLD_FALLBACK_LIBRARY_PATH'] = '/local/lib:/network/lib'
+ os.environ['DYLD_FALLBACK_FRAMEWORK_PATH'] = '/MyFrameworks:/OtherFrameworks'
+
+
+ self.assertEqual(
+ list(dyld.dyld_default_search('/usr/lib/mylib.dylib', None)), [
+ '/usr/lib/mylib.dylib',
+ '/local/lib/mylib.dylib',
+ '/network/lib/mylib.dylib',
+
+ ])
+
+ self.assertEqual(
+ list(dyld.dyld_default_search('/Library/Frameworks/Python.framework/Versions/2.7/Python', None)), [
+ '/Library/Frameworks/Python.framework/Versions/2.7/Python',
+ '/MyFrameworks/Python.framework/Versions/2.7/Python',
+ '/OtherFrameworks/Python.framework/Versions/2.7/Python',
+ '/local/lib/Python',
+ '/network/lib/Python',
+ ])
+
+ def test_dyld_find(self):
+ result = dyld.dyld_find('/usr/lib/libSystem.dylib')
+ self.assertEqual(result, '/usr/lib/libSystem.dylib')
+ self.assertIsInstance(result, str) # bytes on 2.x, unicode on 3.x
+
+ result = dyld.dyld_find(b'/usr/lib/libSystem.dylib'.decode('ascii'))
+ self.assertEqual(result, '/usr/lib/libSystem.dylib')
+ self.assertIsInstance(result, str) # bytes on 2.x, unicode on 3.x
+
+ patcher = DyldPatcher()
+ try:
+ patcher.log_calls('dyld_image_suffix_search')
+ patcher.log_calls('dyld_override_search')
+ patcher.log_calls('dyld_executable_path_search')
+ patcher.log_calls('dyld_default_search')
+
+ result = dyld.dyld_find('/usr/lib/libSystem.dylib')
+ self.assertEqual(patcher.calls[:-1], [
+ ('dyld_override_search', ('/usr/lib/libSystem.dylib', None), {}),
+ ('dyld_executable_path_search', ('/usr/lib/libSystem.dylib', None), {}),
+ ('dyld_default_search', ('/usr/lib/libSystem.dylib', None), {}),
+ ])
+ self.assertEqual(patcher.calls[-1][0], 'dyld_image_suffix_search')
+ patcher.clear_calls()
+
+ result = dyld.dyld_find('/usr/lib/libSystem.dylib', env=None)
+ self.assertEqual(patcher.calls[:-1], [
+ ('dyld_override_search', ('/usr/lib/libSystem.dylib', None), {}),
+ ('dyld_executable_path_search', ('/usr/lib/libSystem.dylib', None), {}),
+ ('dyld_default_search', ('/usr/lib/libSystem.dylib', None), {}),
+ ])
+ self.assertEqual(patcher.calls[-1][0], 'dyld_image_suffix_search')
+ patcher.clear_calls()
+
+ result = dyld.dyld_find('/usr/lib/libSystem.dylib', env={})
+ self.assertEqual(patcher.calls[:-1], [
+ ('dyld_override_search', ('/usr/lib/libSystem.dylib', {}), {}),
+ ('dyld_executable_path_search', ('/usr/lib/libSystem.dylib', None), {}),
+ ('dyld_default_search', ('/usr/lib/libSystem.dylib', {}), {}),
+ ])
+ self.assertEqual(patcher.calls[-1][0], 'dyld_image_suffix_search')
+ patcher.clear_calls()
+
+ result = dyld.dyld_find('/usr/lib/libSystem.dylib', executable_path="/opt/py2app/bin", env={})
+ self.assertEqual(patcher.calls[:-1], [
+ ('dyld_override_search', ('/usr/lib/libSystem.dylib', {}), {}),
+ ('dyld_executable_path_search', ('/usr/lib/libSystem.dylib', "/opt/py2app/bin"), {}),
+ ('dyld_default_search', ('/usr/lib/libSystem.dylib', {}), {}),
+ ])
+ self.assertEqual(patcher.calls[-1][0], 'dyld_image_suffix_search')
+ patcher.clear_calls()
+
+ finally:
+ patcher.cleanup()
+
+ def test_framework_find(self):
+ result = dyld.framework_find('/System/Library/Frameworks/Cocoa.framework/Versions/Current/Cocoa')
+ self.assertEqual(result, '/System/Library/Frameworks/Cocoa.framework/Versions/Current/Cocoa')
+ self.assertIsInstance(result, str) # bytes on 2.x, unicode on 3.x
+
+ result = dyld.framework_find(b'/System/Library/Frameworks/Cocoa.framework/Versions/Current/Cocoa'.decode('latin1'))
+ self.assertEqual(result, '/System/Library/Frameworks/Cocoa.framework/Versions/Current/Cocoa')
+ self.assertIsInstance(result, str) # bytes on 2.x, unicode on 3.x
+
+ result = dyld.framework_find('Cocoa.framework')
+ self.assertEqual(result, '/System/Library/Frameworks/Cocoa.framework/Cocoa')
+ self.assertIsInstance(result, str) # bytes on 2.x, unicode on 3.x
+
+ result = dyld.framework_find('Cocoa')
+ self.assertEqual(result, '/System/Library/Frameworks/Cocoa.framework/Cocoa')
+ self.assertIsInstance(result, str) # bytes on 2.x, unicode on 3.x
+
+ patcher = DyldPatcher()
+ try:
+ patcher.log_calls('dyld_find')
+
+ result = dyld.framework_find('/System/Library/Frameworks/Cocoa.framework/Versions/Current/Cocoa')
+ self.assertEqual(patcher.calls, [
+ ('dyld_find', ('/System/Library/Frameworks/Cocoa.framework/Versions/Current/Cocoa',),
+ {'env':None, 'executable_path': None}),
+ ])
+ patcher.clear_calls()
+
+ result = dyld.framework_find('Cocoa')
+ self.assertEqual(patcher.calls, [
+ ('dyld_find', ('Cocoa',),
+ {'env':None, 'executable_path': None}),
+ ('dyld_find', ('Cocoa.framework/Cocoa',),
+ {'env':None, 'executable_path': None}),
+ ])
+ patcher.clear_calls()
+
+ result = dyld.framework_find('Cocoa', '/my/sw/bin', {})
+ self.assertEqual(patcher.calls, [
+ ('dyld_find', ('Cocoa',),
+ {'env':{}, 'executable_path': '/my/sw/bin'}),
+ ('dyld_find', ('Cocoa.framework/Cocoa',),
+ {'env':{}, 'executable_path': '/my/sw/bin'}),
+ ])
+ patcher.clear_calls()
+
+
+ finally:
+ patcher.cleanup()
+
+
+
+
+class TestTrivialDyld (unittest.TestCase):
+ # Tests ported from the implementation file
+ def testBasic(self):
+ self.assertEqual(dyld.dyld_find('libSystem.dylib'), '/usr/lib/libSystem.dylib')
+ self.assertEqual(dyld.dyld_find('System.framework/System'), '/System/Library/Frameworks/System.framework/System')
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_dylib.py b/python/macholib/macholib_tests/test_dylib.py
new file mode 100644
index 000000000..dcbc45358
--- /dev/null
+++ b/python/macholib/macholib_tests/test_dylib.py
@@ -0,0 +1,38 @@
+from macholib import dylib
+
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+def d(location=None, name=None, shortname=None, version=None, suffix=None):
+ return dict(
+ location=location,
+ name=name,
+ shortname=shortname,
+ version=version,
+ suffix=suffix
+ )
+
+class TestDylib (unittest.TestCase):
+ def testInvalid(self):
+ self.assertTrue(dylib.dylib_info('completely/invalid') is None)
+ self.assertTrue(dylib.dylib_info('completely/invalid_debug') is None)
+
+ def testUnversioned(self):
+ self.assertEqual(dylib.dylib_info('P/Foo.dylib'),
+ d('P', 'Foo.dylib', 'Foo'))
+ self.assertEqual(dylib.dylib_info('P/Foo_debug.dylib'),
+ d('P', 'Foo_debug.dylib', 'Foo', suffix='debug'))
+
+ def testVersioned(self):
+ self.assertEqual(dylib.dylib_info('P/Foo.A.dylib'),
+ d('P', 'Foo.A.dylib', 'Foo', 'A'))
+ self.assertEqual(dylib.dylib_info('P/Foo_debug.A.dylib'),
+ d('P', 'Foo_debug.A.dylib', 'Foo_debug', 'A'))
+ self.assertEqual(dylib.dylib_info('P/Foo.A_debug.dylib'),
+ d('P', 'Foo.A_debug.dylib', 'Foo', 'A', 'debug'))
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_framework.py b/python/macholib/macholib_tests/test_framework.py
new file mode 100644
index 000000000..8c123afd0
--- /dev/null
+++ b/python/macholib/macholib_tests/test_framework.py
@@ -0,0 +1,88 @@
+from macholib import framework
+
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+
+class TestFramework (unittest.TestCase):
+ def test_framework(self):
+ self.assertEqual(
+ framework.framework_info('Location/Name.framework/Versions/SomeVersion/Name_Suffix'),
+ dict(
+ location='Location',
+ name='Name.framework/Versions/SomeVersion/Name_Suffix',
+ shortname='Name',
+ version='SomeVersion',
+ suffix='Suffix',
+ ))
+ self.assertEqual(
+ framework.framework_info('Location/Name.framework/Versions/SomeVersion/Name'),
+ dict(
+ location='Location',
+ name='Name.framework/Versions/SomeVersion/Name',
+ shortname='Name',
+ version='SomeVersion',
+ suffix=None,
+ ))
+ self.assertEqual(
+ framework.framework_info('Location/Name.framework/Name_Suffix'),
+ dict(
+ location='Location',
+ name='Name.framework/Name_Suffix',
+ shortname='Name',
+ version=None,
+ suffix='Suffix',
+ ))
+ self.assertEqual(
+ framework.framework_info('Location/Name.framework/Name'),
+ dict(
+ location='Location',
+ name='Name.framework/Name',
+ shortname='Name',
+ version=None,
+ suffix=None
+ ))
+ self.assertEqual(
+ framework.framework_info('Location/Name.framework.disabled/Name'),
+ None
+ )
+ self.assertEqual(
+ framework.framework_info('Location/Name.framework/Versions/A/B/Name'),
+ None
+ )
+ self.assertEqual(
+ framework.framework_info('Location/Name.framework/Versions/A'),
+ None
+ )
+ self.assertEqual(
+ framework.framework_info('Location/Name.framework/Versions/A/Name/_debug'),
+ None
+ )
+
+ def test_interal_tests(self):
+ # Ported over from the source file
+ def d(location=None, name=None, shortname=None, version=None, suffix=None):
+ return dict(
+ location=location,
+ name=name,
+ shortname=shortname,
+ version=version,
+ suffix=suffix
+ )
+ self.assertEqual(framework.framework_info('completely/invalid'), None)
+ self.assertEqual(framework.framework_info('completely/invalid/_debug'), None)
+ self.assertEqual(framework.framework_info('P/F.framework'), None)
+ self.assertEqual(framework.framework_info('P/F.framework/_debug'), None)
+ self.assertEqual(framework.framework_info('P/F.framework/F'), d('P', 'F.framework/F', 'F'))
+ self.assertEqual(framework.framework_info('P/F.framework/F_debug'), d('P', 'F.framework/F_debug', 'F', suffix='debug'))
+ self.assertEqual(framework.framework_info('P/F.framework/Versions'), None)
+ self.assertEqual(framework.framework_info('P/F.framework/Versions/A'), None)
+ self.assertEqual(framework.framework_info('P/F.framework/Versions/A/F'), d('P', 'F.framework/Versions/A/F', 'F', 'A'))
+ self.assertEqual(framework.framework_info('P/F.framework/Versions/A/F_debug'), d('P', 'F.framework/Versions/A/F_debug', 'F', 'A', 'debug'))
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_itergraphreport.py b/python/macholib/macholib_tests/test_itergraphreport.py
new file mode 100644
index 000000000..f3e7c28a3
--- /dev/null
+++ b/python/macholib/macholib_tests/test_itergraphreport.py
@@ -0,0 +1,15 @@
+from macholib import itergraphreport
+
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+class TestIterGraphReport (unittest.TestCase):
+ @unittest.expectedFailure
+ def test_missing(self):
+ self.fail("tests are missing")
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_mach_o.py b/python/macholib/macholib_tests/test_mach_o.py
new file mode 100644
index 000000000..0fbcf04b2
--- /dev/null
+++ b/python/macholib/macholib_tests/test_mach_o.py
@@ -0,0 +1,21 @@
+from macholib import mach_o
+
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+
+class TestMachO (unittest.TestCase):
+ # This module is just a set of struct definitions,
+ # not sure how to test those without replicating
+ # the code.
+ #
+ # The definitions will get exercised by the
+ # other tests, therefore testing is ignored
+ # for now.
+ pass
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/macholib_tests/test_ptypes.py b/python/macholib/macholib_tests/test_ptypes.py
new file mode 100644
index 000000000..f221d37d3
--- /dev/null
+++ b/python/macholib/macholib_tests/test_ptypes.py
@@ -0,0 +1,191 @@
+from macholib import ptypes
+
+import unittest
+import sys
+if sys.version_info[:2] <= (2,6):
+ import unittest2 as unittest
+else:
+ import unittest
+
+try:
+ from io import BytesIO
+except ImportError:
+ from cStringIO import StringIO as BytesIO
+import mmap
+
+try:
+ long
+except NameError:
+ long = int
+
+
+class TestPTypes (unittest.TestCase):
+ if not hasattr(unittest.TestCase, 'assertIsSubclass'):
+ def assertIsSubclass(self, class1, class2, message=None):
+ self.assertTrue(issubclass(class1, class2),
+ message or "%r is not a subclass of %r"%(class1, class2))
+
+ if not hasattr(unittest.TestCase, 'assertIsInstance'):
+ def assertIsInstance(self, value, types, message=None):
+ self.assertTrue(isinstance(value, types),
+ message or "%r is not an instance of %r"%(value, types))
+
+ def test_sizeof(self):
+ self.assertEqual(ptypes.sizeof(b"foobar"), 6)
+
+ self.assertRaises(ValueError, ptypes.sizeof, [])
+ self.assertRaises(ValueError, ptypes.sizeof, {})
+ self.assertRaises(ValueError, ptypes.sizeof, b"foo".decode('ascii'))
+
+ class M (object):
+ pass
+
+ m = M()
+ m._size_ = 42
+ self.assertEqual(ptypes.sizeof(m), 42)
+
+
+ def verifyType(self, ptype, size, pytype, values):
+ self.assertEqual(ptypes.sizeof(ptype), size)
+ self.assertIsSubclass(ptype, pytype)
+
+ for v in values:
+ pv = ptype(v)
+ packed = pv.to_str()
+ self.assertIsInstance(packed, bytes)
+ self.assertEqual(len(packed), size)
+
+ unp = ptype.from_str(packed)
+ self.assertIsInstance(unp, ptype)
+ self.assertEqual(unp, pv)
+
+ fp = BytesIO(packed)
+ unp = ptype.from_fileobj(fp)
+ fp.close()
+ self.assertIsInstance(unp, ptype)
+ self.assertEqual(unp, pv)
+
+ fp = BytesIO()
+ pv.to_fileobj(fp)
+ data = fp.getvalue()
+ fp.close()
+ self.assertEqual(data, packed)
+
+ mm = mmap.mmap(-1, size+20)
+ mm[:] = b'\x00' * (size+20)
+ pv.to_mmap(mm, 10)
+
+ self.assertEqual(ptype.from_mmap(mm, 10), pv)
+ self.assertEqual(mm[:], (b'\x00'*10) + packed + (b'\x00'*10))
+
+ self.assertEqual(ptype.from_tuple((v,)), pv)
+
+ def test_basic_types(self):
+ self.verifyType(ptypes.p_char, 1, bytes, [b'a', b'b'])
+ self.verifyType(ptypes.p_int8, 1, int, [1, 42, -4])
+ self.verifyType(ptypes.p_uint8, 1, int, [1, 42, 253])
+
+ self.verifyType(ptypes.p_int16, 2, int, [1, 400, -10, -5000])
+ self.verifyType(ptypes.p_uint16, 2, int, [1, 400, 65000])
+
+ self.verifyType(ptypes.p_int32, 4, int, [1, 400, 2**24, -10, -5000, -2**24])
+ self.verifyType(ptypes.p_uint32, 4, long, [1, 400, 2*31+5, 65000])
+
+ self.verifyType(ptypes.p_int64, 8, long, [1, 400, 2**43, -10, -5000, -2**43])
+ self.verifyType(ptypes.p_uint64, 8, long, [1, 400, 2*63+5, 65000])
+
+ self.verifyType(ptypes.p_float, 4, float, [1.0, 42.5])
+ self.verifyType(ptypes.p_double, 8, float, [1.0, 42.5])
+
+ def test_basic_types_deprecated(self):
+ self.verifyType(ptypes.p_byte, 1, int, [1, 42, -4])
+ self.verifyType(ptypes.p_ubyte, 1, int, [1, 42, 253])
+
+ self.verifyType(ptypes.p_short, 2, int, [1, 400, -10, -5000])
+ self.verifyType(ptypes.p_ushort, 2, int, [1, 400, 65000])
+
+ self.verifyType(ptypes.p_int, 4, int, [1, 400, 2**24, -10, -5000, -2**24])
+ self.verifyType(ptypes.p_uint, 4, long, [1, 400, 2*31+5, 65000])
+
+ self.verifyType(ptypes.p_long, 4, int, [1, 400, 2**24, -10, -5000, -2**24])
+ self.verifyType(ptypes.p_ulong, 4, long, [1, 400, 2*31+5, 65000])
+
+ self.verifyType(ptypes.p_longlong, 8, long, [1, 400, 2**43, -10, -5000, -2**43])
+ self.verifyType(ptypes.p_ulonglong, 8, long, [1, 400, 2*63+5, 65000])
+
+class TestPTypesPrivate (unittest.TestCase):
+ # These are tests for functions that aren't part of the public
+ # API.
+
+ def test_formatinfo(self):
+ self.assertEqual(ptypes._formatinfo(">b"), (1, 1))
+ self.assertEqual(ptypes._formatinfo(">h"), (2, 1))
+ self.assertEqual(ptypes._formatinfo(">HhL"), (8, 3))
+ self.assertEqual(ptypes._formatinfo("<b"), (1, 1))
+ self.assertEqual(ptypes._formatinfo("<h"), (2, 1))
+ self.assertEqual(ptypes._formatinfo("<HhL"), (8, 3))
+
+
+class MyStructure (ptypes.Structure):
+ _fields_ = (
+ ('foo', ptypes.p_int32),
+ ('bar', ptypes.p_uint8),
+ )
+
+class MyFunStructure (ptypes.Structure):
+ _fields_ = (
+ ('fun', ptypes.p_char),
+ ('mystruct', MyStructure),
+ )
+
+class TestPTypesSimple (unittest.TestCase):
+ # Quick port of tests that used to be part of
+ # the macholib.ptypes source code
+ #
+ # Moving these in a structured manner to TestPTypes
+ # would be nice, but is not extremely important.
+
+ def testBasic(self):
+ for endian in '><':
+ kw = dict(_endian_=endian)
+ MYSTRUCTURE = b'\x00\x11\x22\x33\xFF'
+ for fn, args in [
+ ('from_str', (MYSTRUCTURE,)),
+ ('from_mmap', (MYSTRUCTURE, 0)),
+ ('from_fileobj', (BytesIO(MYSTRUCTURE),)),
+ ]:
+ myStructure = getattr(MyStructure, fn)(*args, **kw)
+ if endian == '>':
+ self.assertEqual(myStructure.foo, 0x00112233)
+ else:
+ self.assertEqual( myStructure.foo, 0x33221100)
+ self.assertEqual(myStructure.bar, 0xFF)
+ self.assertEqual(myStructure.to_str(), MYSTRUCTURE)
+
+ MYFUNSTRUCTURE = b'!' + MYSTRUCTURE
+ for fn, args in [
+ ('from_str', (MYFUNSTRUCTURE,)),
+ ('from_mmap', (MYFUNSTRUCTURE, 0)),
+ ('from_fileobj', (BytesIO(MYFUNSTRUCTURE),)),
+ ]:
+ myFunStructure = getattr(MyFunStructure, fn)(*args, **kw)
+ self.assertEqual(myFunStructure.mystruct, myStructure)
+ self.assertEqual(myFunStructure.fun, b'!', (myFunStructure.fun, b'!'))
+ self.assertEqual(myFunStructure.to_str(), MYFUNSTRUCTURE)
+
+ sio = BytesIO()
+ myFunStructure.to_fileobj(sio)
+ self.assertEqual(sio.getvalue(), MYFUNSTRUCTURE)
+
+ mm = mmap.mmap(-1, ptypes.sizeof(MyFunStructure) * 2)
+ mm[:] = b'\x00' * (ptypes.sizeof(MyFunStructure) * 2)
+ myFunStructure.to_mmap(mm, 0)
+ self.assertEqual(MyFunStructure.from_mmap(mm, 0, **kw), myFunStructure)
+ self.assertEqual(mm[:ptypes.sizeof(MyFunStructure)], MYFUNSTRUCTURE)
+ self.assertEqual(mm[ptypes.sizeof(MyFunStructure):], b'\x00' * ptypes.sizeof(MyFunStructure))
+ myFunStructure.to_mmap(mm, ptypes.sizeof(MyFunStructure))
+ self.assertEqual(mm[:], MYFUNSTRUCTURE + MYFUNSTRUCTURE)
+ self.assertEqual(MyFunStructure.from_mmap(mm, ptypes.sizeof(MyFunStructure), **kw), myFunStructure)
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python/macholib/setup.cfg b/python/macholib/setup.cfg
new file mode 100644
index 000000000..1713110a8
--- /dev/null
+++ b/python/macholib/setup.cfg
@@ -0,0 +1,42 @@
+[metadata]
+name = macholib
+version = 1.7
+description = Mach-O header analysis and editing
+long_description_file =
+ README.txt
+ doc/changelog.rst
+classifiers =
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.3
+ Programming Language :: Python :: 3.4
+ Operating System :: MacOS :: MacOS X
+ Topic :: Software Development :: Libraries :: Python Modules
+ Topic :: Software Development :: Build Tools
+author = Ronald Oussoren
+author_email = ronaldoussoren@mac.com
+maintainer = Ronald Oussoren
+maintainer_email = ronaldoussoren@mac.com
+url = http://bitbucket.org/ronaldoussoren/macholib
+download_url = http://pypi.python.org/pypi/macholib
+packages = macholib
+license = MIT
+platforms = any
+requires-dist =
+ altgraph (>=0.12)
+zip-safe = yes
+console_scripts =
+ macho_find = macholib.macho_find:main
+ macho_standalone = macholib.macho_standalone:main
+ macho_dump = macholib.macho_dump:main
+keywords = Mach-O, dyld
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/macholib/setup.py b/python/macholib/setup.py
new file mode 100644
index 000000000..a1a4cb6eb
--- /dev/null
+++ b/python/macholib/setup.py
@@ -0,0 +1,867 @@
+"""
+Shared setup file for simple python packages. Uses a setup.cfg that
+is the same as the distutils2 project, unless noted otherwise.
+
+It exists for two reasons:
+1) This makes it easier to reuse setup.py code between my own
+ projects
+
+2) Easier migration to distutils2 when that catches on.
+
+Additional functionality:
+
+* Section metadata:
+ requires-test: Same as 'tests_require' option for setuptools.
+
+"""
+
+import sys
+import os
+import re
+import platform
+from fnmatch import fnmatch
+import os
+import sys
+import time
+import tempfile
+import tarfile
+try:
+ import urllib.request as urllib
+except ImportError:
+ import urllib
+from distutils import log
+try:
+ from hashlib import md5
+
+except ImportError:
+ from md5 import md5
+
+if sys.version_info[0] == 2:
+ from ConfigParser import RawConfigParser, NoOptionError, NoSectionError
+else:
+ from configparser import RawConfigParser, NoOptionError, NoSectionError
+
+ROOTDIR = os.path.dirname(os.path.abspath(__file__))
+
+
+#
+#
+#
+# Parsing the setup.cfg and converting it to something that can be
+# used by setuptools.setup()
+#
+#
+#
+
+def eval_marker(value):
+ """
+ Evaluate an distutils2 environment marker.
+
+ This code is unsafe when used with hostile setup.cfg files,
+ but that's not a problem for our own files.
+ """
+ value = value.strip()
+
+ class M:
+ def __init__(self, **kwds):
+ for k, v in kwds.items():
+ setattr(self, k, v)
+
+ variables = {
+ 'python_version': '%d.%d'%(sys.version_info[0], sys.version_info[1]),
+ 'python_full_version': sys.version.split()[0],
+ 'os': M(
+ name=os.name,
+ ),
+ 'sys': M(
+ platform=sys.platform,
+ ),
+ 'platform': M(
+ version=platform.version(),
+ machine=platform.machine(),
+ ),
+ }
+
+ return bool(eval(value, variables, variables))
+
+
+ return True
+
+def _opt_value(cfg, into, section, key, transform = None):
+ try:
+ v = cfg.get(section, key)
+ if transform != _as_lines and ';' in v:
+ v, marker = v.rsplit(';', 1)
+ if not eval_marker(marker):
+ return
+
+ v = v.strip()
+
+ if v:
+ if transform:
+ into[key] = transform(v.strip())
+ else:
+ into[key] = v.strip()
+
+ except (NoOptionError, NoSectionError):
+ pass
+
+def _as_bool(value):
+ if value.lower() in ('y', 'yes', 'on'):
+ return True
+ elif value.lower() in ('n', 'no', 'off'):
+ return False
+ elif value.isdigit():
+ return bool(int(value))
+ else:
+ raise ValueError(value)
+
+def _as_list(value):
+ return value.split()
+
+def _as_lines(value):
+ result = []
+ for v in value.splitlines():
+ if ';' in v:
+ v, marker = v.rsplit(';', 1)
+ if not eval_marker(marker):
+ continue
+
+ v = v.strip()
+ if v:
+ result.append(v)
+ else:
+ result.append(v)
+ return result
+
+def _map_requirement(value):
+ m = re.search(r'(\S+)\s*(?:\((.*)\))?', value)
+ name = m.group(1)
+ version = m.group(2)
+
+ if version is None:
+ return name
+
+ else:
+ mapped = []
+ for v in version.split(','):
+ v = v.strip()
+ if v[0].isdigit():
+ # Checks for a specific version prefix
+ m = v.rsplit('.', 1)
+ mapped.append('>=%s,<%s.%s'%(
+ v, m[0], int(m[1])+1))
+
+ else:
+ mapped.append(v)
+ return '%s %s'%(name, ','.join(mapped),)
+
+def _as_requires(value):
+ requires = []
+ for req in value.splitlines():
+ if ';' in req:
+ req, marker = v.rsplit(';', 1)
+ if not eval_marker(marker):
+ continue
+ req = req.strip()
+
+ if not req:
+ continue
+ requires.append(_map_requirement(req))
+ return requires
+
+def parse_setup_cfg():
+ cfg = RawConfigParser()
+ r = cfg.read([os.path.join(ROOTDIR, 'setup.cfg')])
+ if len(r) != 1:
+ print("Cannot read 'setup.cfg'")
+ sys.exit(1)
+
+ metadata = dict(
+ name = cfg.get('metadata', 'name'),
+ version = cfg.get('metadata', 'version'),
+ description = cfg.get('metadata', 'description'),
+ )
+
+ _opt_value(cfg, metadata, 'metadata', 'license')
+ _opt_value(cfg, metadata, 'metadata', 'maintainer')
+ _opt_value(cfg, metadata, 'metadata', 'maintainer_email')
+ _opt_value(cfg, metadata, 'metadata', 'author')
+ _opt_value(cfg, metadata, 'metadata', 'author_email')
+ _opt_value(cfg, metadata, 'metadata', 'url')
+ _opt_value(cfg, metadata, 'metadata', 'download_url')
+ _opt_value(cfg, metadata, 'metadata', 'classifiers', _as_lines)
+ _opt_value(cfg, metadata, 'metadata', 'platforms', _as_list)
+ _opt_value(cfg, metadata, 'metadata', 'packages', _as_list)
+ _opt_value(cfg, metadata, 'metadata', 'keywords', _as_list)
+
+ try:
+ v = cfg.get('metadata', 'requires-dist')
+
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ requires = _as_requires(v)
+ if requires:
+ metadata['install_requires'] = requires
+
+ try:
+ v = cfg.get('metadata', 'requires-test')
+
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ requires = _as_requires(v)
+ if requires:
+ metadata['tests_require'] = requires
+
+
+ try:
+ v = cfg.get('metadata', 'long_description_file')
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ parts = []
+ for nm in v.split():
+ fp = open(nm, 'rU')
+ parts.append(fp.read())
+ fp.close()
+
+ metadata['long_description'] = '\n\n'.join(parts)
+
+
+ try:
+ v = cfg.get('metadata', 'zip-safe')
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ metadata['zip_safe'] = _as_bool(v)
+
+ try:
+ v = cfg.get('metadata', 'console_scripts')
+ except (NoOptionError, NoSectionError):
+ pass
+
+ else:
+ if 'entry_points' not in metadata:
+ metadata['entry_points'] = {}
+
+ metadata['entry_points']['console_scripts'] = v.splitlines()
+
+ if sys.version_info[:2] <= (2,6):
+ try:
+ metadata['tests_require'] += ", unittest2"
+ except KeyError:
+ metadata['tests_require'] = "unittest2"
+
+ return metadata
+
+
+#
+#
+#
+# Bootstrapping setuptools/distribute, based on
+# a heavily modified version of distribute_setup.py
+#
+#
+#
+
+
+SETUPTOOLS_PACKAGE='setuptools'
+
+
+try:
+ import subprocess
+
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ return subprocess.call(args) == 0
+
+except ImportError:
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ new_args = []
+ for a in args:
+ new_args.append(a.replace("'", "'\"'\"'"))
+ os.system(' '.join(new_args)) == 0
+
+
+try:
+ import json
+
+ def get_pypi_src_download(package):
+ url = 'https://pypi.python.org/pypi/%s/json'%(package,)
+ fp = urllib.urlopen(url)
+ try:
+ try:
+ data = fp.read()
+
+ finally:
+ fp.close()
+ except urllib.error:
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ pkgdata = json.loads(data.decode('utf-8'))
+ if 'urls' not in pkgdata:
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ for info in pkgdata['urls']:
+ if info['packagetype'] == 'sdist' and info['url'].endswith('tar.gz'):
+ return (info.get('md5_digest'), info['url'])
+
+ raise RuntimeError("Cannot determine downlink link for %s"%(package,))
+
+except ImportError:
+ # Python 2.5 compatibility, no JSON in stdlib but luckily JSON syntax is
+ # simular enough to Python's syntax to be able to abuse the Python compiler
+
+ import _ast as ast
+
+ def get_pypi_src_download(package):
+ url = 'https://pypi.python.org/pypi/%s/json'%(package,)
+ fp = urllib.urlopen(url)
+ try:
+ try:
+ data = fp.read()
+
+ finally:
+ fp.close()
+ except urllib.error:
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+
+ a = compile(data, '-', 'eval', ast.PyCF_ONLY_AST)
+ if not isinstance(a, ast.Expression):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ a = a.body
+ if not isinstance(a, ast.Dict):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ for k, v in zip(a.keys, a.values):
+ if not isinstance(k, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ k = k.s
+ if k == 'urls':
+ a = v
+ break
+ else:
+ raise RuntimeError("PyPI JSON for %s doesn't contain URLs section"%(package,))
+
+ if not isinstance(a, ast.List):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ for info in v.elts:
+ if not isinstance(info, ast.Dict):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+ url = None
+ packagetype = None
+ chksum = None
+
+ for k, v in zip(info.keys, info.values):
+ if not isinstance(k, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+ if k.s == 'url':
+ if not isinstance(v, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+ url = v.s
+
+ elif k.s == 'packagetype':
+ if not isinstance(v, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+ packagetype = v.s
+
+ elif k.s == 'md5_digest':
+ if not isinstance(v, ast.Str):
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+ chksum = v.s
+
+ if url is not None and packagetype == 'sdist' and url.endswith('.tar.gz'):
+ return (chksum, url)
+
+ raise RuntimeError("Cannot determine download link for %s"%(package,))
+
+def _build_egg(egg, tarball, to_dir):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # building an egg
+ log.warn('Building a %s egg in %s', egg, to_dir)
+ _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+ finally:
+ os.chdir(old_wd)
+ # returning the result
+ log.warn(egg)
+ if not os.path.exists(egg):
+ raise IOError('Could not build the egg.')
+
+
+def _do_download(to_dir, packagename=SETUPTOOLS_PACKAGE):
+ tarball = download_setuptools(packagename, to_dir)
+ version = tarball.split('-')[-1][:-7]
+ egg = os.path.join(to_dir, '%s-%s-py%d.%d.egg'
+ % (packagename, version, sys.version_info[0], sys.version_info[1]))
+ if not os.path.exists(egg):
+ _build_egg(egg, tarball, to_dir)
+ sys.path.insert(0, egg)
+ import setuptools
+ setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools():
+ # making sure we use the absolute path
+ return _do_download(os.path.abspath(os.curdir))
+
+def download_setuptools(packagename, to_dir):
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ try:
+ from urllib.request import urlopen
+ except ImportError:
+ from urllib2 import urlopen
+
+ chksum, url = get_pypi_src_download(packagename)
+ tgz_name = os.path.basename(url)
+ saveto = os.path.join(to_dir, tgz_name)
+
+ src = dst = None
+ if not os.path.exists(saveto): # Avoid repeated downloads
+ try:
+ log.warn("Downloading %s", url)
+ src = urlopen(url)
+ # Read/write all in one block, so we don't create a corrupt file
+ # if the download is interrupted.
+ data = src.read()
+
+ if chksum is not None:
+ data_sum = md5(data).hexdigest()
+ if data_sum != chksum:
+ raise RuntimeError("Downloading %s failed: corrupt checksum"%(url,))
+
+
+ dst = open(saveto, "wb")
+ dst.write(data)
+ finally:
+ if src:
+ src.close()
+ if dst:
+ dst.close()
+ return os.path.realpath(saveto)
+
+
+
+def _extractall(self, path=".", members=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ import copy
+ import operator
+ from tarfile import ExtractError
+ directories = []
+
+ if members is None:
+ members = self
+
+ for tarinfo in members:
+ if tarinfo.isdir():
+ # Extract directories with a safe mode.
+ directories.append(tarinfo)
+ tarinfo = copy.copy(tarinfo)
+ tarinfo.mode = 448 # decimal for oct 0700
+ self.extract(tarinfo, path)
+
+ # Reverse sort directories.
+ if sys.version_info < (2, 4):
+ def sorter(dir1, dir2):
+ return cmp(dir1.name, dir2.name)
+ directories.sort(sorter)
+ directories.reverse()
+ else:
+ directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError:
+ e = sys.exc_info()[1]
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+
+#
+#
+#
+# Definitions of custom commands
+#
+#
+#
+
+try:
+ import setuptools
+
+except ImportError:
+ use_setuptools()
+
+from setuptools import setup
+
+try:
+ from distutils.core import PyPIRCCommand
+except ImportError:
+ PyPIRCCommand = None # Ancient python version
+
+from distutils.core import Command
+from distutils.errors import DistutilsError
+from distutils import log
+
+if PyPIRCCommand is None:
+ class upload_docs (Command):
+ description = "upload sphinx documentation"
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ raise DistutilsError("not supported on this version of python")
+
+else:
+ class upload_docs (PyPIRCCommand):
+ description = "upload sphinx documentation"
+ user_options = PyPIRCCommand.user_options
+
+ def initialize_options(self):
+ PyPIRCCommand.initialize_options(self)
+ self.username = ''
+ self.password = ''
+
+
+ def finalize_options(self):
+ PyPIRCCommand.finalize_options(self)
+ config = self._read_pypirc()
+ if config != {}:
+ self.username = config['username']
+ self.password = config['password']
+
+
+ def run(self):
+ import subprocess
+ import shutil
+ import zipfile
+ import os
+ import urllib
+ import StringIO
+ from base64 import standard_b64encode
+ import httplib
+ import urlparse
+
+ # Extract the package name from distutils metadata
+ meta = self.distribution.metadata
+ name = meta.get_name()
+
+ # Run sphinx
+ if os.path.exists('doc/_build'):
+ shutil.rmtree('doc/_build')
+ os.mkdir('doc/_build')
+
+ p = subprocess.Popen(['make', 'html'],
+ cwd='doc')
+ exit = p.wait()
+ if exit != 0:
+ raise DistutilsError("sphinx-build failed")
+
+ # Collect sphinx output
+ if not os.path.exists('dist'):
+ os.mkdir('dist')
+ zf = zipfile.ZipFile('dist/%s-docs.zip'%(name,), 'w',
+ compression=zipfile.ZIP_DEFLATED)
+
+ for toplevel, dirs, files in os.walk('doc/_build/html'):
+ for fn in files:
+ fullname = os.path.join(toplevel, fn)
+ relname = os.path.relpath(fullname, 'doc/_build/html')
+
+ print ("%s -> %s"%(fullname, relname))
+
+ zf.write(fullname, relname)
+
+ zf.close()
+
+ # Upload the results, this code is based on the distutils
+ # 'upload' command.
+ content = open('dist/%s-docs.zip'%(name,), 'rb').read()
+
+ data = {
+ ':action': 'doc_upload',
+ 'name': name,
+ 'content': ('%s-docs.zip'%(name,), content),
+ }
+ auth = "Basic " + standard_b64encode(self.username + ":" +
+ self.password)
+
+
+ boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = '\n--' + boundary
+ end_boundary = sep_boundary + '--'
+ body = StringIO.StringIO()
+ for key, value in data.items():
+ if not isinstance(value, list):
+ value = [value]
+
+ for value in value:
+ if isinstance(value, tuple):
+ fn = ';filename="%s"'%(value[0])
+ value = value[1]
+ else:
+ fn = ''
+
+ body.write(sep_boundary)
+ body.write('\nContent-Disposition: form-data; name="%s"'%key)
+ body.write(fn)
+ body.write("\n\n")
+ body.write(value)
+
+ body.write(end_boundary)
+ body.write('\n')
+ body = body.getvalue()
+
+ self.announce("Uploading documentation to %s"%(self.repository,), log.INFO)
+
+ schema, netloc, url, params, query, fragments = \
+ urlparse.urlparse(self.repository)
+
+
+ if schema == 'http':
+ http = httplib.HTTPConnection(netloc)
+ elif schema == 'https':
+ http = httplib.HTTPSConnection(netloc)
+ else:
+ raise AssertionError("unsupported schema "+schema)
+
+ data = ''
+ loglevel = log.INFO
+ try:
+ http.connect()
+ http.putrequest("POST", url)
+ http.putheader('Content-type',
+ 'multipart/form-data; boundary=%s'%boundary)
+ http.putheader('Content-length', str(len(body)))
+ http.putheader('Authorization', auth)
+ http.endheaders()
+ http.send(body)
+ except socket.error:
+ e = socket.exc_info()[1]
+ self.announce(str(e), log.ERROR)
+ return
+
+ r = http.getresponse()
+ if r.status in (200, 301):
+ self.announce('Upload succeeded (%s): %s' % (r.status, r.reason),
+ log.INFO)
+ else:
+ self.announce('Upload failed (%s): %s' % (r.status, r.reason),
+ log.ERROR)
+
+ print ('-'*75)
+ print (r.read())
+ print ('-'*75)
+
+
+def recursiveGlob(root, pathPattern):
+ """
+ Recursively look for files matching 'pathPattern'. Return a list
+ of matching files/directories.
+ """
+ result = []
+
+ for rootpath, dirnames, filenames in os.walk(root):
+ for fn in filenames:
+ if fnmatch(fn, pathPattern):
+ result.append(os.path.join(rootpath, fn))
+ return result
+
+
+def importExternalTestCases(unittest,
+ pathPattern="test_*.py", root=".", package=None):
+ """
+ Import all unittests in the PyObjC tree starting at 'root'
+ """
+
+ testFiles = recursiveGlob(root, pathPattern)
+ testModules = map(lambda x:x[len(root)+1:-3].replace('/', '.'), testFiles)
+ if package is not None:
+ testModules = [(package + '.' + m) for m in testModules]
+
+ suites = []
+
+ for modName in testModules:
+ try:
+ module = __import__(modName)
+ except ImportError:
+ print("SKIP %s: %s"%(modName, sys.exc_info()[1]))
+ continue
+
+ if '.' in modName:
+ for elem in modName.split('.')[1:]:
+ module = getattr(module, elem)
+
+ s = unittest.defaultTestLoader.loadTestsFromModule(module)
+ suites.append(s)
+
+ return unittest.TestSuite(suites)
+
+
+
+class test (Command):
+ description = "run test suite"
+ user_options = [
+ ('verbosity=', None, "print what tests are run"),
+ ]
+
+ def initialize_options(self):
+ self.verbosity='1'
+
+ def finalize_options(self):
+ if isinstance(self.verbosity, str):
+ self.verbosity = int(self.verbosity)
+
+
+ def cleanup_environment(self):
+ ei_cmd = self.get_finalized_command('egg_info')
+ egg_name = ei_cmd.egg_name.replace('-', '_')
+
+ to_remove = []
+ for dirname in sys.path:
+ bn = os.path.basename(dirname)
+ if bn.startswith(egg_name + "-"):
+ to_remove.append(dirname)
+
+ for dirname in to_remove:
+ log.info("removing installed %r from sys.path before testing"%(
+ dirname,))
+ sys.path.remove(dirname)
+
+ def add_project_to_sys_path(self):
+ from pkg_resources import normalize_path, add_activation_listener
+ from pkg_resources import working_set, require
+
+ self.reinitialize_command('egg_info')
+ self.run_command('egg_info')
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+
+ # Check if this distribution is already on sys.path
+ # and remove that version, this ensures that the right
+ # copy of the package gets tested.
+
+ self.__old_path = sys.path[:]
+ self.__old_modules = sys.modules.copy()
+
+
+ ei_cmd = self.get_finalized_command('egg_info')
+ sys.path.insert(0, normalize_path(ei_cmd.egg_base))
+ sys.path.insert(1, os.path.dirname(__file__))
+
+ # Strip the namespace packages defined in this distribution
+ # from sys.modules, needed to reset the search path for
+ # those modules.
+
+ nspkgs = getattr(self.distribution, 'namespace_packages')
+ if nspkgs is not None:
+ for nm in nspkgs:
+ del sys.modules[nm]
+
+ # Reset pkg_resources state:
+ add_activation_listener(lambda dist: dist.activate())
+ working_set.__init__()
+ require('%s==%s'%(ei_cmd.egg_name, ei_cmd.egg_version))
+
+ def remove_from_sys_path(self):
+ from pkg_resources import working_set
+ sys.path[:] = self.__old_path
+ sys.modules.clear()
+ sys.modules.update(self.__old_modules)
+ working_set.__init__()
+
+
+ def run(self):
+ import unittest
+
+ # Ensure that build directory is on sys.path (py3k)
+
+ self.cleanup_environment()
+ self.add_project_to_sys_path()
+
+ try:
+ meta = self.distribution.metadata
+ name = meta.get_name()
+ test_pkg = name + "_tests"
+ suite = importExternalTestCases(unittest,
+ "test_*.py", test_pkg, test_pkg)
+
+ runner = unittest.TextTestRunner(verbosity=self.verbosity)
+ result = runner.run(suite)
+
+ # Print out summary. This is a structured format that
+ # should make it easy to use this information in scripts.
+ summary = dict(
+ count=result.testsRun,
+ fails=len(result.failures),
+ errors=len(result.errors),
+ xfails=len(getattr(result, 'expectedFailures', [])),
+ xpass=len(getattr(result, 'expectedSuccesses', [])),
+ skip=len(getattr(result, 'skipped', [])),
+ )
+ print("SUMMARY: %s"%(summary,))
+
+ finally:
+ self.remove_from_sys_path()
+
+#
+#
+#
+# And finally run the setuptools main entry point.
+#
+#
+#
+
+metadata = parse_setup_cfg()
+
+setup(
+ cmdclass=dict(
+ upload_docs=upload_docs,
+ test=test,
+ ),
+ **metadata
+)
diff --git a/python/mock-1.0.0/LICENSE.txt b/python/mock-1.0.0/LICENSE.txt
new file mode 100644
index 000000000..7891703b1
--- /dev/null
+++ b/python/mock-1.0.0/LICENSE.txt
@@ -0,0 +1,26 @@
+Copyright (c) 2003-2012, Michael Foord
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/python/mock-1.0.0/MANIFEST.in b/python/mock-1.0.0/MANIFEST.in
new file mode 100644
index 000000000..d52b301de
--- /dev/null
+++ b/python/mock-1.0.0/MANIFEST.in
@@ -0,0 +1,2 @@
+include LICENSE.txt tox.ini tests/*.py
+recursive-include docs *.txt *.py *.png *.css *.html *.js
diff --git a/python/mock-1.0.0/PKG-INFO b/python/mock-1.0.0/PKG-INFO
new file mode 100644
index 000000000..4c7309c71
--- /dev/null
+++ b/python/mock-1.0.0/PKG-INFO
@@ -0,0 +1,208 @@
+Metadata-Version: 1.0
+Name: mock
+Version: 1.0.0
+Summary: A Python Mocking and Patching Library for Testing
+Home-page: http://www.voidspace.org.uk/python/mock/
+Author: Michael Foord
+Author-email: michael@voidspace.org.uk
+License: UNKNOWN
+Description: mock is a library for testing in Python. It allows you to replace parts of
+ your system under test with mock objects and make assertions about how they
+ have been used.
+
+ mock is now part of the Python standard library, available as `unittest.mock <
+ http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+ in Python 3.3 onwards.
+
+ mock provides a core `MagicMock` class removing the need to create a host of
+ stubs throughout your test suite. After performing an action, you can make
+ assertions about which methods / attributes were used and arguments they were
+ called with. You can also specify return values and set needed attributes in
+ the normal way.
+
+ mock is tested on Python versions 2.4-2.7 and Python 3. mock is also tested
+ with the latest versions of Jython and pypy.
+
+ The mock module also provides utility functions / objects to assist with
+ testing, particularly monkey patching.
+
+ * `PDF documentation for 1.0 beta 1
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+ * `mock on google code (repository and issue tracker)
+ <http://code.google.com/p/mock/>`_
+ * `mock documentation
+ <http://www.voidspace.org.uk/python/mock/>`_
+ * `mock on PyPI <http://pypi.python.org/pypi/mock/>`_
+ * `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+
+ Mock is very easy to use and is designed for use with
+ `unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+ the 'action -> assertion' pattern instead of 'record -> replay' used by many
+ mocking frameworks. See the `mock documentation`_ for full details.
+
+ Mock objects create all attributes and methods as you access them and store
+ details of how they have been used. You can configure them, to specify return
+ values or limit what attributes are available, and then make assertions about
+ how they have been used::
+
+ >>> from mock import Mock
+ >>> real = ProductionClass()
+ >>> real.method = Mock(return_value=3)
+ >>> real.method(3, 4, 5, key='value')
+ 3
+ >>> real.method.assert_called_with(3, 4, 5, key='value')
+
+ `side_effect` allows you to perform side effects, return different values or
+ raise an exception when a mock is called::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (3, 2, 1)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+ Mock has many other ways you can configure it and control its behaviour. For
+ example the `spec` argument configures the mock to take its specification from
+ another object. Attempting to access attributes or methods on the mock that
+ don't exist on the spec will fail with an `AttributeError`.
+
+ The `patch` decorator / context manager makes it easy to mock classes or
+ objects in a module under test. The object you specify will be replaced with a
+ mock (or other object) during the test and restored when the test ends::
+
+ >>> from mock import patch
+ >>> @patch('test_module.ClassName1')
+ ... @patch('test_module.ClassName2')
+ ... def test(MockClass2, MockClass1):
+ ... test_module.ClassName1()
+ ... test_module.ClassName2()
+
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+ .. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `test_module.ClassName2` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read `where to patch
+ <http://www.voidspace.org.uk/python/mock/patch.html#where-to-patch>`_.
+
+ As well as a decorator `patch` can be used as a context manager in a with
+ statement::
+
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+ There is also `patch.dict` for setting values in a dictionary just during the
+ scope of a test and restoring the dictionary to its original state when the
+ test ends::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+ Mock supports the mocking of Python magic methods. The easiest way of
+ using magic methods is with the `MagicMock` class. It allows you to do
+ things like::
+
+ >>> from mock import MagicMock
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_once_with()
+
+ Mock allows you to assign functions (or other Mock instances) to magic methods
+ and they will be called appropriately. The MagicMock class is just a Mock
+ variant that has all of the magic methods pre-created for you (well - all the
+ useful ones anyway).
+
+ The following is an example of using magic methods with the ordinary Mock
+ class::
+
+ >>> from mock import Mock
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value = 'wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+ For ensuring that the mock objects your tests use have the same api as the
+ objects they are replacing, you can use "auto-speccing". Auto-speccing can
+ be done through the `autospec` argument to patch, or the `create_autospec`
+ function. Auto-speccing creates mock objects that have the same attributes
+ and methods as the objects they are replacing, and any functions and methods
+ (including constructors) have the same call signature as the real object.
+
+ This ensures that your mocks will fail in the same way as your production
+ code if they are used incorrectly::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+ `create_autospec` can also be used on classes, where it copies the signature of
+ the `__init__` method, and on callable objects where it copies the signature of
+ the `__call__` method.
+
+ The distribution contains tests and documentation. The tests require
+ `unittest2 <http://pypi.python.org/pypi/unittest2>`_ to run.
+
+ Docs from the in-development version of `mock` can be found at
+ `mock.readthedocs.org <http://mock.readthedocs.org>`_.
+
+Keywords: testing,test,mock,mocking,unittest,patching,stubs,fakes,doubles
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: Implementation :: Jython
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Testing
diff --git a/python/mock-1.0.0/README.txt b/python/mock-1.0.0/README.txt
new file mode 100644
index 000000000..385db3cae
--- /dev/null
+++ b/python/mock-1.0.0/README.txt
@@ -0,0 +1,177 @@
+mock is a library for testing in Python. It allows you to replace parts of
+your system under test with mock objects and make assertions about how they
+have been used.
+
+mock is now part of the Python standard library, available as `unittest.mock <
+http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3 onwards.
+
+mock provides a core `MagicMock` class removing the need to create a host of
+stubs throughout your test suite. After performing an action, you can make
+assertions about which methods / attributes were used and arguments they were
+called with. You can also specify return values and set needed attributes in
+the normal way.
+
+mock is tested on Python versions 2.4-2.7 and Python 3. mock is also tested
+with the latest versions of Jython and pypy.
+
+The mock module also provides utility functions / objects to assist with
+testing, particularly monkey patching.
+
+* `PDF documentation for 1.0 beta 1
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+* `mock on google code (repository and issue tracker)
+ <http://code.google.com/p/mock/>`_
+* `mock documentation
+ <http://www.voidspace.org.uk/python/mock/>`_
+* `mock on PyPI <http://pypi.python.org/pypi/mock/>`_
+* `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+
+Mock is very easy to use and is designed for use with
+`unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+the 'action -> assertion' pattern instead of 'record -> replay' used by many
+mocking frameworks. See the `mock documentation`_ for full details.
+
+Mock objects create all attributes and methods as you access them and store
+details of how they have been used. You can configure them, to specify return
+values or limit what attributes are available, and then make assertions about
+how they have been used::
+
+ >>> from mock import Mock
+ >>> real = ProductionClass()
+ >>> real.method = Mock(return_value=3)
+ >>> real.method(3, 4, 5, key='value')
+ 3
+ >>> real.method.assert_called_with(3, 4, 5, key='value')
+
+`side_effect` allows you to perform side effects, return different values or
+raise an exception when a mock is called::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (3, 2, 1)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+Mock has many other ways you can configure it and control its behaviour. For
+example the `spec` argument configures the mock to take its specification from
+another object. Attempting to access attributes or methods on the mock that
+don't exist on the spec will fail with an `AttributeError`.
+
+The `patch` decorator / context manager makes it easy to mock classes or
+objects in a module under test. The object you specify will be replaced with a
+mock (or other object) during the test and restored when the test ends::
+
+ >>> from mock import patch
+ >>> @patch('test_module.ClassName1')
+ ... @patch('test_module.ClassName2')
+ ... def test(MockClass2, MockClass1):
+ ... test_module.ClassName1()
+ ... test_module.ClassName2()
+
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+.. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `test_module.ClassName2` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read `where to patch
+ <http://www.voidspace.org.uk/python/mock/patch.html#where-to-patch>`_.
+
+As well as a decorator `patch` can be used as a context manager in a with
+statement::
+
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+There is also `patch.dict` for setting values in a dictionary just during the
+scope of a test and restoring the dictionary to its original state when the
+test ends::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+Mock supports the mocking of Python magic methods. The easiest way of
+using magic methods is with the `MagicMock` class. It allows you to do
+things like::
+
+ >>> from mock import MagicMock
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_once_with()
+
+Mock allows you to assign functions (or other Mock instances) to magic methods
+and they will be called appropriately. The MagicMock class is just a Mock
+variant that has all of the magic methods pre-created for you (well - all the
+useful ones anyway).
+
+The following is an example of using magic methods with the ordinary Mock
+class::
+
+ >>> from mock import Mock
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value = 'wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+For ensuring that the mock objects your tests use have the same api as the
+objects they are replacing, you can use "auto-speccing". Auto-speccing can
+be done through the `autospec` argument to patch, or the `create_autospec`
+function. Auto-speccing creates mock objects that have the same attributes
+and methods as the objects they are replacing, and any functions and methods
+(including constructors) have the same call signature as the real object.
+
+This ensures that your mocks will fail in the same way as your production
+code if they are used incorrectly::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+`create_autospec` can also be used on classes, where it copies the signature of
+the `__init__` method, and on callable objects where it copies the signature of
+the `__call__` method.
+
+The distribution contains tests and documentation. The tests require
+`unittest2 <http://pypi.python.org/pypi/unittest2>`_ to run.
+
+Docs from the in-development version of `mock` can be found at
+`mock.readthedocs.org <http://mock.readthedocs.org>`_.
diff --git a/python/mock-1.0.0/docs/changelog.txt b/python/mock-1.0.0/docs/changelog.txt
new file mode 100644
index 000000000..a605be3d9
--- /dev/null
+++ b/python/mock-1.0.0/docs/changelog.txt
@@ -0,0 +1,725 @@
+.. currentmodule:: mock
+
+
+CHANGELOG
+=========
+
+2012/10/07 Version 1.0.0
+------------------------
+
+No changes since 1.0.0 beta 1. This version has feature parity with
+`unittest.mock
+<http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3.
+
+Full list of changes since 0.8:
+
+* `mocksignature`, along with the `mocksignature` argument to `patch`, removed
+* Support for deleting attributes (accessing deleted attributes will raise an
+ `AttributeError`)
+* Added the `mock_open` helper function for mocking the builtin `open`
+* `__class__` is assignable, so a mock can pass an `isinstance` check without
+ requiring a spec
+* Addition of `PropertyMock`, for mocking properties
+* `MagicMocks` made unorderable by default (in Python 3). The comparison
+ methods (other than equality and inequality) now return `NotImplemented`
+* Propagate traceback info to support subclassing of `_patch` by other
+ libraries
+* `create_autospec` works with attributes present in results of `dir` that
+ can't be fetched from the object's class. Contributed by Konstantine Rybnikov
+* Any exceptions in an iterable `side_effect` will be raised instead of
+ returned
+* In Python 3, `create_autospec` now supports keyword only arguments
+* Added `patch.stopall` method to stop all active patches created by `start`
+* BUGFIX: calling `MagicMock.reset_mock` wouldn't reset magic method mocks
+* BUGFIX: calling `reset_mock` on a `MagicMock` created with autospec could
+ raise an exception
+* BUGFIX: passing multiple spec arguments to patchers (`spec` , `spec_set` and
+ `autospec`) had unpredictable results, now it is an error
+* BUGFIX: using `spec=True` *and* `create=True` as arguments to patchers could
+ result in using `DEFAULT` as the spec. Now it is an error instead
+* BUGFIX: using `spec` or `autospec` arguments to patchers, along with
+ `spec_set=True` did not work correctly
+* BUGFIX: using an object that evaluates to False as a spec could be ignored
+* BUGFIX: a list as the `spec` argument to a patcher would always result in a
+ non-callable mock. Now if `__call__` is in the spec the mock is callable
+
+
+2012/07/13 Version 1.0.0 beta 1
+--------------------------------
+
+* Added `patch.stopall` method to stop all active patches created by `start`
+* BUGFIX: calling `MagicMock.reset_mock` wouldn't reset magic method mocks
+* BUGFIX: calling `reset_mock` on a `MagicMock` created with autospec could
+ raise an exception
+
+
+2012/05/04 Version 1.0.0 alpha 2
+--------------------------------
+
+* `PropertyMock` attributes are now standard `MagicMocks`
+* `create_autospec` works with attributes present in results of `dir` that
+ can't be fetched from the object's class. Contributed by Konstantine Rybnikov
+* Any exceptions in an iterable `side_effect` will be raised instead of
+ returned
+* In Python 3, `create_autospec` now supports keyword only arguments
+
+
+2012/03/25 Version 1.0.0 alpha 1
+--------------------------------
+
+The standard library version!
+
+* `mocksignature`, along with the `mocksignature` argument to `patch`, removed
+* Support for deleting attributes (accessing deleted attributes will raise an
+ `AttributeError`)
+* Added the `mock_open` helper function for mocking the builtin `open`
+* `__class__` is assignable, so a mock can pass an `isinstance` check without
+ requiring a spec
+* Addition of `PropertyMock`, for mocking properties
+* `MagicMocks` made unorderable by default (in Python 3). The comparison
+ methods (other than equality and inequality) now return `NotImplemented`
+* Propagate traceback info to support subclassing of `_patch` by other
+ libraries
+* BUGFIX: passing multiple spec arguments to patchers (`spec` , `spec_set` and
+ `autospec`) had unpredictable results, now it is an error
+* BUGFIX: using `spec=True` *and* `create=True` as arguments to patchers could
+ result in using `DEFAULT` as the spec. Now it is an error instead
+* BUGFIX: using `spec` or `autospec` arguments to patchers, along with
+ `spec_set=True` did not work correctly
+* BUGFIX: using an object that evaluates to False as a spec could be ignored
+* BUGFIX: a list as the `spec` argument to a patcher would always result in a
+ non-callable mock. Now if `__call__` is in the spec the mock is callable
+
+
+2012/02/13 Version 0.8.0
+------------------------
+
+The only changes since 0.8rc2 are:
+
+* Improved repr of :data:`sentinel` objects
+* :data:`ANY` can be used for comparisons against :data:`call` objects
+* The return value of `MagicMock.__iter__` method can be set to
+ any iterable and isn't required to be an iterator
+
+Full List of changes since 0.7:
+
+mock 0.8.0 is the last version that will support Python 2.4.
+
+* Addition of :attr:`~Mock.mock_calls` list for *all* calls (including magic
+ methods and chained calls)
+* :func:`patch` and :func:`patch.object` now create a :class:`MagicMock`
+ instead of a :class:`Mock` by default
+* The patchers (`patch`, `patch.object` and `patch.dict`), plus `Mock` and
+ `MagicMock`, take arbitrary keyword arguments for configuration
+* New mock method :meth:`~Mock.configure_mock` for setting attributes and
+ return values / side effects on the mock and its attributes
+* New mock assert methods :meth:`~Mock.assert_any_call` and
+ :meth:`~Mock.assert_has_calls`
+* Implemented :ref:`auto-speccing` (recursive, lazy speccing of mocks with
+ mocked signatures for functions/methods), as the `autospec` argument to
+ `patch`
+* Added the :func:`create_autospec` function for manually creating
+ 'auto-specced' mocks
+* :func:`patch.multiple` for doing multiple patches in a single call, using
+ keyword arguments
+* Setting :attr:`~Mock.side_effect` to an iterable will cause calls to the mock
+ to return the next value from the iterable
+* New `new_callable` argument to `patch` and `patch.object` allowing you to
+ pass in a class or callable object (instead of `MagicMock`) that will be
+ called to replace the object being patched
+* Addition of :class:`NonCallableMock` and :class:`NonCallableMagicMock`, mocks
+ without a `__call__` method
+* Addition of :meth:`~Mock.mock_add_spec` method for adding (or changing) a
+ spec on an existing mock
+* Protocol methods on :class:`MagicMock` are magic mocks, and are created
+ lazily on first lookup. This means the result of calling a protocol method is
+ a `MagicMock` instead of a `Mock` as it was previously
+* Addition of :meth:`~Mock.attach_mock` method
+* Added :data:`ANY` for ignoring arguments in :meth:`~Mock.assert_called_with`
+ calls
+* Addition of :data:`call` helper object
+* Improved repr for mocks
+* Improved repr for :attr:`Mock.call_args` and entries in
+ :attr:`Mock.call_args_list`, :attr:`Mock.method_calls` and
+ :attr:`Mock.mock_calls`
+* Improved repr for :data:`sentinel` objects
+* `patch` lookup is done at use time not at decoration time
+* In Python 2.6 or more recent, `dir` on a mock will report all the dynamically
+ created attributes (or the full list of attributes if there is a spec) as
+ well as all the mock methods and attributes.
+* Module level :data:`FILTER_DIR` added to control whether `dir(mock)` filters
+ private attributes. `True` by default.
+* `patch.TEST_PREFIX` for controlling how patchers recognise test methods when
+ used to decorate a class
+* Support for using Java exceptions as a :attr:`~Mock.side_effect` on Jython
+* `Mock` call lists (`call_args_list`, `method_calls` & `mock_calls`) are now
+ custom list objects that allow membership tests for "sub lists" and have
+ a nicer representation if you `str` or `print` them
+* Mocks attached as attributes or return values to other mocks have calls
+ recorded in `method_calls` and `mock_calls` of the parent (unless a name is
+ already set on the child)
+* Improved failure messages for `assert_called_with` and
+ `assert_called_once_with`
+* The return value of the :class:`MagicMock` `__iter__` method can be set to
+ any iterable and isn't required to be an iterator
+* Added the Mock API (`assert_called_with` etc) to functions created by
+ :func:`mocksignature`
+* Tuples as well as lists can be used to specify allowed methods for `spec` &
+ `spec_set` arguments
+* Calling `stop` on an unstarted patcher fails with a more meaningful error
+ message
+* Renamed the internal classes `Sentinel` and `SentinelObject` to prevent abuse
+* BUGFIX: an error creating a patch, with nested patch decorators, won't leave
+ patches in place
+* BUGFIX: `__truediv__` and `__rtruediv__` not available as magic methods on
+ mocks in Python 3
+* BUGFIX: `assert_called_with` / `assert_called_once_with` can be used with
+ `self` as a keyword argument
+* BUGFIX: when patching a class with an explicit spec / spec_set (not a
+ boolean) it applies "spec inheritance" to the return value of the created
+ mock (the "instance")
+* BUGFIX: remove the `__unittest` marker causing traceback truncation
+* Removal of deprecated `patch_object`
+* Private attributes `_name`, `_methods`, '_children', `_wraps` and `_parent`
+ (etc) renamed to reduce likelihood of clash with user attributes.
+* Added license file to the distribution
+
+
+2012/01/10 Version 0.8.0 release candidate 2
+--------------------------------------------
+
+* Removed the `configure` keyword argument to `create_autospec` and allow
+ arbitrary keyword arguments (for the `Mock` constructor) instead
+* Fixed `ANY` equality with some types in `assert_called_with` calls
+* Switched to a standard Sphinx theme (compatible with
+ `readthedocs.org <http://mock.readthedocs.org>`_)
+
+
+2011/12/29 Version 0.8.0 release candidate 1
+--------------------------------------------
+
+* `create_autospec` on the return value of a mocked class will use `__call__`
+ for the signature rather than `__init__`
+* Performance improvement instantiating `Mock` and `MagicMock`
+* Mocks used as magic methods have the same type as their parent instead of
+ being hardcoded to `MagicMock`
+
+Special thanks to Julian Berman for his help with diagnosing and improving
+performance in this release.
+
+
+2011/10/09 Version 0.8.0 beta 4
+-------------------------------
+
+* `patch` lookup is done at use time not at decoration time
+* When attaching a Mock to another Mock as a magic method, calls are recorded
+ in mock_calls
+* Addition of `attach_mock` method
+* Renamed the internal classes `Sentinel` and `SentinelObject` to prevent abuse
+* BUGFIX: various issues around circular references with mocks (setting a mock
+ return value to be itself etc)
+
+
+2011/08/15 Version 0.8.0 beta 3
+-------------------------------
+
+* Mocks attached as attributes or return values to other mocks have calls
+ recorded in `method_calls` and `mock_calls` of the parent (unless a name is
+ already set on the child)
+* Addition of `mock_add_spec` method for adding (or changing) a spec on an
+ existing mock
+* Improved repr for `Mock.call_args` and entries in `Mock.call_args_list`,
+ `Mock.method_calls` and `Mock.mock_calls`
+* Improved repr for mocks
+* BUGFIX: minor fixes in the way `mock_calls` is worked out,
+ especially for "intermediate" mocks in a call chain
+
+
+2011/08/05 Version 0.8.0 beta 2
+-------------------------------
+
+* Setting `side_effect` to an iterable will cause calls to the mock to return
+ the next value from the iterable
+* Added `assert_any_call` method
+* Moved `assert_has_calls` from call lists onto mocks
+* BUGFIX: `call_args` and all members of `call_args_list` are two tuples of
+ `(args, kwargs)` again instead of three tuples of `(name, args, kwargs)`
+
+
+2011/07/25 Version 0.8.0 beta 1
+-------------------------------
+
+* `patch.TEST_PREFIX` for controlling how patchers recognise test methods when
+ used to decorate a class
+* `Mock` call lists (`call_args_list`, `method_calls` & `mock_calls`) are now
+ custom list objects that allow membership tests for "sub lists" and have
+ an `assert_has_calls` method for unordered call checks
+* `callargs` changed to *always* be a three-tuple of `(name, args, kwargs)`
+* Addition of `mock_calls` list for *all* calls (including magic methods and
+ chained calls)
+* Extension of `call` object to support chained calls and `callargs` for better
+ comparisons with or without names. `call` object has a `call_list` method for
+ chained calls
+* Added the public `instance` argument to `create_autospec`
+* Support for using Java exceptions as a `side_effect` on Jython
+* Improved failure messages for `assert_called_with` and
+ `assert_called_once_with`
+* Tuples as well as lists can be used to specify allowed methods for `spec` &
+ `spec_set` arguments
+* BUGFIX: Fixed bug in `patch.multiple` for argument passing when creating
+ mocks
+* Added license file to the distribution
+
+
+2011/07/16 Version 0.8.0 alpha 2
+--------------------------------
+
+* `patch.multiple` for doing multiple patches in a single call, using keyword
+ arguments
+* New `new_callable` argument to `patch` and `patch.object` allowing you to
+ pass in a class or callable object (instead of `MagicMock`) that will be
+ called to replace the object being patched
+* Addition of `NonCallableMock` and `NonCallableMagicMock`, mocks without a
+ `__call__` method
+* Mocks created by `patch` have a `MagicMock` as the `return_value` where a
+ class is being patched
+* `create_autospec` can create non-callable mocks for non-callable objects.
+ `return_value` mocks of classes will be non-callable unless the class has
+ a `__call__` method
+* `autospec` creates a `MagicMock` without a spec for properties and slot
+ descriptors, because we don't know the type of object they return
+* Removed the "inherit" argument from `create_autospec`
+* Calling `stop` on an unstarted patcher fails with a more meaningful error
+ message
+* BUGFIX: an error creating a patch, with nested patch decorators, won't leave
+ patches in place
+* BUGFIX: `__truediv__` and `__rtruediv__` not available as magic methods on
+ mocks in Python 3
+* BUGFIX: `assert_called_with` / `assert_called_once_with` can be used with
+ `self` as a keyword argument
+* BUGFIX: autospec for functions / methods with an argument named self that
+ isn't the first argument no longer broken
+* BUGFIX: when patching a class with an explicit spec / spec_set (not a
+ boolean) it applies "spec inheritance" to the return value of the created
+ mock (the "instance")
+* BUGFIX: remove the `__unittest` marker causing traceback truncation
+
+
+2011/06/14 Version 0.8.0 alpha 1
+--------------------------------
+
+mock 0.8.0 is the last version that will support Python 2.4.
+
+* The patchers (`patch`, `patch.object` and `patch.dict`), plus `Mock` and
+ `MagicMock`, take arbitrary keyword arguments for configuration
+* New mock method `configure_mock` for setting attributes and return values /
+ side effects on the mock and its attributes
+* In Python 2.6 or more recent, `dir` on a mock will report all the dynamically
+ created attributes (or the full list of attributes if there is a spec) as
+ well as all the mock methods and attributes.
+* Module level `FILTER_DIR` added to control whether `dir(mock)` filters
+ private attributes. `True` by default. Note that `vars(Mock())` can still be
+ used to get all instance attributes and `dir(type(Mock())` will still return
+ all the other attributes (irrespective of `FILTER_DIR`)
+* `patch` and `patch.object` now create a `MagicMock` instead of a `Mock` by
+ default
+* Added `ANY` for ignoring arguments in `assert_called_with` calls
+* Addition of `call` helper object
+* Protocol methods on `MagicMock` are magic mocks, and are created lazily on
+ first lookup. This means the result of calling a protocol method is a
+ MagicMock instead of a Mock as it was previously
+* Added the Mock API (`assert_called_with` etc) to functions created by
+ `mocksignature`
+* Private attributes `_name`, `_methods`, '_children', `_wraps` and `_parent`
+ (etc) renamed to reduce likelihood of clash with user attributes.
+* Implemented auto-speccing (recursive, lazy speccing of mocks with mocked
+ signatures for functions/methods)
+
+ Limitations:
+
+ - Doesn't mock magic methods or attributes (it creates MagicMocks, so the
+ magic methods are *there*, they just don't have the signature mocked nor
+ are attributes followed)
+ - Doesn't mock function / method attributes
+ - Uses object traversal on the objects being mocked to determine types - so
+ properties etc may be triggered
+ - The return value of mocked classes (the 'instance') has the same call
+ signature as the class __init__ (as they share the same spec)
+
+ You create auto-specced mocks by passing `autospec=True` to `patch`.
+
+ Note that attributes that are None are special cased and mocked without a
+ spec (so any attribute / method can be used). This is because None is
+ typically used as a default value for attributes that may be of some other
+ type, and as we don't know what type that may be we allow all access.
+
+ Note that the `autospec` option to `patch` obsoletes the `mocksignature`
+ option.
+
+* Added the `create_autospec` function for manually creating 'auto-specced'
+ mocks
+* Removal of deprecated `patch_object`
+
+
+2011/05/30 Version 0.7.2
+------------------------
+
+* BUGFIX: instances of list subclasses can now be used as mock specs
+* BUGFIX: MagicMock equality / inequality protocol methods changed to use the
+ default equality / inequality. This is done through a `side_effect` on
+ the mocks used for `__eq__` / `__ne__`
+
+
+2011/05/06 Version 0.7.1
+------------------------
+
+Package fixes contributed by Michael Fladischer. No code changes.
+
+* Include template in package
+* Use isolated binaries for the tox tests
+* Unset executable bit on docs
+* Fix DOS line endings in getting-started.txt
+
+
+2011/03/05 Version 0.7.0
+------------------------
+
+No API changes since 0.7.0 rc1. Many documentation changes including a stylish
+new `Sphinx theme <https://github.com/coordt/ADCtheme/>`_.
+
+The full set of changes since 0.6.0 are:
+
+* Python 3 compatibility
+* Ability to mock magic methods with `Mock` and addition of `MagicMock`
+ with pre-created magic methods
+* Addition of `mocksignature` and `mocksignature` argument to `patch` and
+ `patch.object`
+* Addition of `patch.dict` for changing dictionaries during a test
+* Ability to use `patch`, `patch.object` and `patch.dict` as class decorators
+* Renamed ``patch_object`` to `patch.object` (``patch_object`` is
+ deprecated)
+* Addition of soft comparisons: `call_args`, `call_args_list` and `method_calls`
+ now return tuple-like objects which compare equal even when empty args
+ or kwargs are skipped
+* patchers (`patch`, `patch.object` and `patch.dict`) have start and stop
+ methods
+* Addition of `assert_called_once_with` method
+* Mocks can now be named (`name` argument to constructor) and the name is used
+ in the repr
+* repr of a mock with a spec includes the class name of the spec
+* `assert_called_with` works with `python -OO`
+* New `spec_set` keyword argument to `Mock` and `patch`. If used,
+ attempting to *set* an attribute on a mock not on the spec will raise an
+ `AttributeError`
+* Mocks created with a spec can now pass `isinstance` tests (`__class__`
+ returns the type of the spec)
+* Added docstrings to all objects
+* Improved failure message for `Mock.assert_called_with` when the mock
+ has not been called at all
+* Decorated functions / methods have their docstring and `__module__`
+ preserved on Python 2.4.
+* BUGFIX: `mock.patch` now works correctly with certain types of objects that
+ proxy attribute access, like the django settings object
+* BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+ diagnosing this)
+* BUGFIX: `spec=True` works with old style classes
+* BUGFIX: ``help(mock)`` works now (on the module). Can no longer use ``__bases__``
+ as a valid sentinel name (thanks to Stephen Emslie for reporting and
+ diagnosing this)
+* BUGFIX: ``side_effect`` now works with ``BaseException`` exceptions like
+ ``KeyboardInterrupt``
+* BUGFIX: `reset_mock` caused infinite recursion when a mock is set as its own
+ return value
+* BUGFIX: patching the same object twice now restores the patches correctly
+* with statement tests now skipped on Python 2.4
+* Tests require unittest2 (or unittest2-py3k) to run
+* Tested with `tox <http://pypi.python.org/pypi/tox>`_ on Python 2.4 - 3.2,
+ jython and pypy (excluding 3.0)
+* Added 'build_sphinx' command to setup.py (requires setuptools or distribute)
+ Thanks to Florian Bauer
+* Switched from subversion to mercurial for source code control
+* `Konrad Delong <http://konryd.blogspot.com/>`_ added as co-maintainer
+
+
+2011/02/16 Version 0.7.0 RC 1
+-----------------------------
+
+Changes since beta 4:
+
+* Tested with jython, pypy and Python 3.2 and 3.1
+* Decorated functions / methods have their docstring and `__module__`
+ preserved on Python 2.4
+* BUGFIX: `mock.patch` now works correctly with certain types of objects that
+ proxy attribute access, like the django settings object
+* BUGFIX: `reset_mock` caused infinite recursion when a mock is set as its own
+ return value
+
+
+2010/11/12 Version 0.7.0 beta 4
+-------------------------------
+
+* patchers (`patch`, `patch.object` and `patch.dict`) have start and stop
+ methods
+* Addition of `assert_called_once_with` method
+* repr of a mock with a spec includes the class name of the spec
+* `assert_called_with` works with `python -OO`
+* New `spec_set` keyword argument to `Mock` and `patch`. If used,
+ attempting to *set* an attribute on a mock not on the spec will raise an
+ `AttributeError`
+* Attributes and return value of a `MagicMock` are `MagicMock` objects
+* Attempting to set an unsupported magic method now raises an `AttributeError`
+* `patch.dict` works as a class decorator
+* Switched from subversion to mercurial for source code control
+* BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+ diagnosing this)
+* BUGFIX: `spec=True` works with old style classes
+* BUGFIX: `mocksignature=True` can now patch instance methods via
+ `patch.object`
+
+
+2010/09/18 Version 0.7.0 beta 3
+-------------------------------
+
+* Using spec with :class:`MagicMock` only pre-creates magic methods in the spec
+* Setting a magic method on a mock with a ``spec`` can only be done if the
+ spec has that method
+* Mocks can now be named (`name` argument to constructor) and the name is used
+ in the repr
+* `mocksignature` can now be used with classes (signature based on `__init__`)
+ and callable objects (signature based on `__call__`)
+* Mocks created with a spec can now pass `isinstance` tests (`__class__`
+ returns the type of the spec)
+* Default numeric value for MagicMock is 1 rather than zero (because the
+ MagicMock bool defaults to True and 0 is False)
+* Improved failure message for :meth:`~Mock.assert_called_with` when the mock
+ has not been called at all
+* Adding the following to the set of supported magic methods:
+
+ - ``__getformat__`` and ``__setformat__``
+ - pickle methods
+ - ``__trunc__``, ``__ceil__`` and ``__floor__``
+ - ``__sizeof__``
+
+* Added 'build_sphinx' command to setup.py (requires setuptools or distribute)
+ Thanks to Florian Bauer
+* with statement tests now skipped on Python 2.4
+* Tests require unittest2 to run on Python 2.7
+* Improved several docstrings and documentation
+
+
+2010/06/23 Version 0.7.0 beta 2
+-------------------------------
+
+* :func:`patch.dict` works as a context manager as well as a decorator
+* ``patch.dict`` takes a string to specify dictionary as well as a dictionary
+ object. If a string is supplied the name specified is imported
+* BUGFIX: ``patch.dict`` restores dictionary even when an exception is raised
+
+
+2010/06/22 Version 0.7.0 beta 1
+-------------------------------
+
+* Addition of :func:`mocksignature`
+* Ability to mock magic methods
+* Ability to use ``patch`` and ``patch.object`` as class decorators
+* Renamed ``patch_object`` to :func:`patch.object` (``patch_object`` is
+ deprecated)
+* Addition of :class:`MagicMock` class with all magic methods pre-created for you
+* Python 3 compatibility (tested with 3.2 but should work with 3.0 & 3.1 as
+ well)
+* Addition of :func:`patch.dict` for changing dictionaries during a test
+* Addition of ``mocksignature`` argument to ``patch`` and ``patch.object``
+* ``help(mock)`` works now (on the module). Can no longer use ``__bases__``
+ as a valid sentinel name (thanks to Stephen Emslie for reporting and
+ diagnosing this)
+* Addition of soft comparisons: `call_args`, `call_args_list` and `method_calls`
+ now return tuple-like objects which compare equal even when empty args
+ or kwargs are skipped
+* Added docstrings.
+* BUGFIX: ``side_effect`` now works with ``BaseException`` exceptions like
+ ``KeyboardInterrupt``
+* BUGFIX: patching the same object twice now restores the patches correctly
+* The tests now require `unittest2 <http://pypi.python.org/pypi/unittest2>`_
+ to run
+* `Konrad Delong <http://konryd.blogspot.com/>`_ added as co-maintainer
+
+
+2009/08/22 Version 0.6.0
+------------------------
+
+* New test layout compatible with test discovery
+* Descriptors (static methods / class methods etc) can now be patched and
+ restored correctly
+* Mocks can raise exceptions when called by setting ``side_effect`` to an
+ exception class or instance
+* Mocks that wrap objects will not pass on calls to the underlying object if
+ an explicit return_value is set
+
+
+2009/04/17 Version 0.5.0
+------------------------
+
+* Made DEFAULT part of the public api.
+* Documentation built with Sphinx.
+* ``side_effect`` is now called with the same arguments as the mock is called with and
+ if returns a non-DEFAULT value that is automatically set as the ``mock.return_value``.
+* ``wraps`` keyword argument used for wrapping objects (and passing calls through to the wrapped object).
+* ``Mock.reset`` renamed to ``Mock.reset_mock``, as reset is a common API name.
+* ``patch`` / ``patch_object`` are now context managers and can be used with ``with``.
+* A new 'create' keyword argument to patch and patch_object that allows them to patch
+ (and unpatch) attributes that don't exist. (Potentially unsafe to use - it can allow
+ you to have tests that pass when they are testing an API that doesn't exist - use at
+ your own risk!)
+* The methods keyword argument to Mock has been removed and merged with spec. The spec
+ argument can now be a list of methods or an object to take the spec from.
+* Nested patches may now be applied in a different order (created mocks passed
+ in the opposite order). This is actually a bugfix.
+* patch and patch_object now take a spec keyword argument. If spec is
+ passed in as 'True' then the Mock created will take the object it is replacing
+ as its spec object. If the object being replaced is a class, then the return
+ value for the mock will also use the class as a spec.
+* A Mock created without a spec will not attempt to mock any magic methods / attributes
+ (they will raise an ``AttributeError`` instead).
+
+
+2008/10/12 Version 0.4.0
+------------------------
+
+* Default return value is now a new mock rather than None
+* return_value added as a keyword argument to the constructor
+* New method 'assert_called_with'
+* Added 'side_effect' attribute / keyword argument called when mock is called
+* patch decorator split into two decorators:
+
+ - ``patch_object`` which takes an object and an attribute name to patch
+ (plus optionally a value to patch with which defaults to a mock object)
+ - ``patch`` which takes a string specifying a target to patch; in the form
+ 'package.module.Class.attribute'. (plus optionally a value to
+ patch with which defaults to a mock object)
+
+* Can now patch objects with ``None``
+* Change to patch for nose compatibility with error reporting in wrapped functions
+* Reset no longer clears children / return value etc - it just resets
+ call count and call args. It also calls reset on all children (and
+ the return value if it is a mock).
+
+Thanks to Konrad Delong, Kevin Dangoor and others for patches and suggestions.
+
+
+2007/12/03 Version 0.3.1
+-------------------------
+
+``patch`` maintains the name of decorated functions for compatibility with nose
+test autodiscovery.
+
+Tests decorated with ``patch`` that use the two argument form (implicit mock
+creation) will receive the mock(s) passed in as extra arguments.
+
+Thanks to Kevin Dangoor for these changes.
+
+
+2007/11/30 Version 0.3.0
+-------------------------
+
+Removed ``patch_module``. ``patch`` can now take a string as the first
+argument for patching modules.
+
+The third argument to ``patch`` is optional - a mock will be created by
+default if it is not passed in.
+
+
+2007/11/21 Version 0.2.1
+-------------------------
+
+Bug fix, allows reuse of functions decorated with ``patch`` and ``patch_module``.
+
+
+2007/11/20 Version 0.2.0
+-------------------------
+
+Added ``spec`` keyword argument for creating ``Mock`` objects from a
+specification object.
+
+Added ``patch`` and ``patch_module`` monkey patching decorators.
+
+Added ``sentinel`` for convenient access to unique objects.
+
+Distribution includes unit tests.
+
+
+2007/11/19 Version 0.1.0
+-------------------------
+
+Initial release.
+
+
+TODO and Limitations
+====================
+
+Contributions, bug reports and comments welcomed!
+
+Feature requests and bug reports are handled on the issue tracker:
+
+ * `mock issue tracker <http://code.google.com/p/mock/issues/list>`_
+
+`wraps` is not integrated with magic methods.
+
+`patch` could auto-do the patching in the constructor and unpatch in the
+destructor. This would be useful in itself, but violates TOOWTDI and would be
+unsafe for IronPython & PyPy (non-deterministic calling of destructors).
+Destructors aren't called in CPython where there are cycles, but a weak
+reference with a callback can be used to get round this.
+
+`Mock` has several attributes. This makes it unsuitable for mocking objects
+that use these attribute names. A way round this would be to provide methods
+that *hide* these attributes when needed. In 0.8 many, but not all, of these
+attributes are renamed to gain a `_mock` prefix, making it less likely that
+they will clash. Any outstanding attributes that haven't been modified with
+the prefix should be changed.
+
+If a patch is started using `patch.start` and then not stopped correctly then
+the unpatching is not done. Using weak references it would be possible to
+detect and fix this when the patch object itself is garbage collected. This
+would be tricky to get right though.
+
+When a `Mock` is created by `patch`, arbitrary keywords can be used to set
+attributes. If `patch` is created with a `spec`, and is replacing a class, then
+a `return_value` mock is created. The keyword arguments are not applied to the
+child mock, but could be.
+
+When mocking a class with `patch`, passing in `spec=True` or `autospec=True`,
+the mock class has an instance created from the same spec. Should this be the
+default behaviour for mocks anyway (mock return values inheriting the spec
+from their parent), or should it be controlled by an additional keyword
+argument (`inherit`) to the Mock constructor? `create_autospec` does this, so
+an additional keyword argument to Mock is probably unnecessary.
+
+The `mocksignature` argument to `patch` with a non `Mock` passed into
+`new_callable` will *probably* cause an error. Should it just be invalid?
+
+Note that `NonCallableMock` and `NonCallableMagicMock` still have the unused
+(and unusable) attributes: `return_value`, `side_effect`, `call_count`,
+`call_args` and `call_args_list`. These could be removed or raise errors on
+getting / setting. They also have the `assert_called_with` and
+`assert_called_once_with` methods. Removing these would be pointless as
+fetching them would create a mock (attribute) that could be called without
+error.
+
+Some outstanding technical debt. The way autospeccing mocks function
+signatures was copied and modified from `mocksignature`. This could all be
+refactored into one set of functions instead of two. The way we tell if
+patchers are started and if a patcher is being used for a `patch.multiple`
+call are both horrible. There are now a host of helper functions that should
+be rationalised. (Probably time to split mock into a package instead of a
+module.)
+
+Passing arbitrary keyword arguments to `create_autospec`, or `patch` with
+`autospec`, when mocking a *function* works fine. However, the arbitrary
+attributes are set on the created mock - but `create_autospec` returns a
+real function (which doesn't have those attributes). However, what is the use
+case for using autospec to create functions with attributes that don't exist
+on the original?
+
+`mocksignature`, plus the `call_args_list` and `method_calls` attributes of
+`Mock` could all be deprecated.
diff --git a/python/mock-1.0.0/docs/compare.txt b/python/mock-1.0.0/docs/compare.txt
new file mode 100644
index 000000000..41555308e
--- /dev/null
+++ b/python/mock-1.0.0/docs/compare.txt
@@ -0,0 +1,628 @@
+=========================
+ Mock Library Comparison
+=========================
+
+
+.. testsetup::
+
+ def assertEqual(a, b):
+ assert a == b, ("%r != %r" % (a, b))
+
+ def assertRaises(Exc, func):
+ try:
+ func()
+ except Exc:
+ return
+ assert False, ("%s not raised" % Exc)
+
+ sys.modules['somemodule'] = somemodule = mock.Mock(name='somemodule')
+ class SomeException(Exception):
+ some_method = method1 = method2 = None
+ some_other_object = SomeObject = SomeException
+
+
+A side-by-side comparison of how to accomplish some basic tasks with mock and
+some other popular Python mocking libraries and frameworks.
+
+These are:
+
+* `flexmock <http://pypi.python.org/pypi/flexmock>`_
+* `mox <http://pypi.python.org/pypi/mox>`_
+* `Mocker <http://niemeyer.net/mocker>`_
+* `dingus <http://pypi.python.org/pypi/dingus>`_
+* `fudge <http://pypi.python.org/pypi/fudge>`_
+
+Popular python mocking frameworks not yet represented here include
+`MiniMock <http://pypi.python.org/pypi/MiniMock>`_.
+
+`pMock <http://pmock.sourceforge.net/>`_ (last release 2004 and doesn't import
+in recent versions of Python) and
+`python-mock <http://python-mock.sourceforge.net/>`_ (last release 2005) are
+intentionally omitted.
+
+.. note::
+
+ A more up to date, and tested for all mock libraries (only the mock
+ examples on this page can be executed as doctests) version of this
+ comparison is maintained by Gary Bernhardt:
+
+ * `Python Mock Library Comparison
+ <http://garybernhardt.github.com/python-mock-comparison/>`_
+
+This comparison is by no means complete, and also may not be fully idiomatic
+for all the libraries represented. *Please* contribute corrections, missing
+comparisons, or comparisons for additional libraries to the `mock issue
+tracker <https://code.google.com/p/mock/issues/list>`_.
+
+This comparison page was originally created by the `Mox project
+<https://code.google.com/p/pymox/wiki/MoxComparison>`_ and then extended for
+`flexmock and mock <http://has207.github.com/flexmock/compare.html>`_ by
+Herman Sheremetyev. Dingus examples written by `Gary Bernhadt
+<http://garybernhardt.github.com/python-mock-comparison/>`_. fudge examples
+provided by `Kumar McMillan <http://farmdev.com/>`_.
+
+.. note::
+
+ The examples tasks here were originally created by Mox which is a mocking
+ *framework* rather than a library like mock. The tasks shown naturally
+ exemplify tasks that frameworks are good at and not the ones they make
+ harder. In particular you can take a `Mock` or `MagicMock` object and use
+ it in any way you want with no up-front configuration. The same is also
+ true for Dingus.
+
+ The examples for mock here assume version 0.7.0.
+
+
+Simple fake object
+~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.return_value = "calculated value"
+ >>> my_mock.some_attribute = "value"
+ >>> assertEqual("calculated value", my_mock.some_method())
+ >>> assertEqual("value", my_mock.some_attribute)
+
+::
+
+ # Flexmock
+ mock = flexmock(some_method=lambda: "calculated value", some_attribute="value")
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndReturn("calculated value")
+ mock.some_attribute = "value"
+ mox.Replay(mock)
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.result("calculated value")
+ mocker.replay()
+ mock.some_attribute = "value"
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus(some_attribute="value",
+ ... some_method__returns="calculated value")
+ >>> assertEqual("calculated value", my_dingus.some_method())
+ >>> assertEqual("value", my_dingus.some_attribute)
+
+::
+
+ >>> # fudge
+ >>> my_fake = (fudge.Fake()
+ ... .provides('some_method')
+ ... .returns("calculated value")
+ ... .has_attr(some_attribute="value"))
+ ...
+ >>> assertEqual("calculated value", my_fake.some_method())
+ >>> assertEqual("value", my_fake.some_attribute)
+
+
+Simple mock
+~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.return_value = "value"
+ >>> assertEqual("value", my_mock.some_method())
+ >>> my_mock.some_method.assert_called_once_with()
+
+::
+
+ # Flexmock
+ mock = flexmock()
+ mock.should_receive("some_method").and_return("value").once
+ assertEqual("value", mock.some_method())
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndReturn("value")
+ mox.Replay(mock)
+ assertEqual("value", mock.some_method())
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.result("value")
+ mocker.replay()
+ assertEqual("value", mock.some_method())
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus(some_method__returns="value")
+ >>> assertEqual("value", my_dingus.some_method())
+ >>> assert my_dingus.some_method.calls().once()
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = (fudge.Fake()
+ ... .expects('some_method')
+ ... .returns("value")
+ ... .times_called(1))
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: fake:my_fake.some_method() was not called
+
+
+Creating partial mocks
+~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> SomeObject.some_method = mock.Mock(return_value='value')
+ >>> assertEqual("value", SomeObject.some_method())
+
+::
+
+ # Flexmock
+ flexmock(SomeObject).should_receive("some_method").and_return('value')
+ assertEqual("value", mock.some_method())
+
+ # Mox
+ mock = mox.MockObject(SomeObject)
+ mock.some_method().AndReturn("value")
+ mox.Replay(mock)
+ assertEqual("value", mock.some_method())
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock(SomeObject)
+ mock.Get()
+ mocker.result("value")
+ mocker.replay()
+ assertEqual("value", mock.some_method())
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> object = SomeObject
+ >>> object.some_method = dingus.Dingus(return_value="value")
+ >>> assertEqual("value", object.some_method())
+
+::
+
+ >>> # fudge
+ >>> fake = fudge.Fake().is_callable().returns("<fudge-value>")
+ >>> with fudge.patched_context(SomeObject, 'some_method', fake):
+ ... s = SomeObject()
+ ... assertEqual("<fudge-value>", s.some_method())
+ ...
+
+
+Ensure calls are made in specific order
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock(spec=SomeObject)
+ >>> my_mock.method1()
+ <Mock name='mock.method1()' id='...'>
+ >>> my_mock.method2()
+ <Mock name='mock.method2()' id='...'>
+ >>> assertEqual(my_mock.mock_calls, [call.method1(), call.method2()])
+
+::
+
+ # Flexmock
+ mock = flexmock(SomeObject)
+ mock.should_receive('method1').once.ordered.and_return('first thing')
+ mock.should_receive('method2').once.ordered.and_return('second thing')
+
+ # Mox
+ mock = mox.MockObject(SomeObject)
+ mock.method1().AndReturn('first thing')
+ mock.method2().AndReturn('second thing')
+ mox.Replay(mock)
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ with mocker.order():
+ mock.method1()
+ mocker.result('first thing')
+ mock.method2()
+ mocker.result('second thing')
+ mocker.replay()
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.method1()
+ <Dingus ...>
+ >>> my_dingus.method2()
+ <Dingus ...>
+ >>> assertEqual(['method1', 'method2'], [call.name for call in my_dingus.calls])
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = (fudge.Fake()
+ ... .remember_order()
+ ... .expects('method1')
+ ... .expects('method2'))
+ ... my_fake.method2()
+ ... my_fake.method1()
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: Call #1 was fake:my_fake.method2(); Expected: #1 fake:my_fake.method1(), #2 fake:my_fake.method2(), end
+
+
+Raising exceptions
+~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.side_effect = SomeException("message")
+ >>> assertRaises(SomeException, my_mock.some_method)
+
+::
+
+ # Flexmock
+ mock = flexmock()
+ mock.should_receive("some_method").and_raise(SomeException("message"))
+ assertRaises(SomeException, mock.some_method)
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndRaise(SomeException("message"))
+ mox.Replay(mock)
+ assertRaises(SomeException, mock.some_method)
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.throw(SomeException("message"))
+ mocker.replay()
+ assertRaises(SomeException, mock.some_method)
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.some_method = dingus.exception_raiser(SomeException)
+ >>> assertRaises(SomeException, my_dingus.some_method)
+
+::
+
+ >>> # fudge
+ >>> my_fake = (fudge.Fake()
+ ... .is_callable()
+ ... .raises(SomeException("message")))
+ ...
+ >>> my_fake()
+ Traceback (most recent call last):
+ ...
+ SomeException: message
+
+
+Override new instances of a class
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> with mock.patch('somemodule.Someclass') as MockClass:
+ ... MockClass.return_value = some_other_object
+ ... assertEqual(some_other_object, somemodule.Someclass())
+ ...
+
+
+::
+
+ # Flexmock
+ flexmock(some_module.SomeClass, new_instances=some_other_object)
+ assertEqual(some_other_object, some_module.SomeClass())
+
+ # Mox
+ # (you will probably have mox.Mox() available as self.mox in a real test)
+ mox.Mox().StubOutWithMock(some_module, 'SomeClass', use_mock_anything=True)
+ some_module.SomeClass().AndReturn(some_other_object)
+ mox.ReplayAll()
+ assertEqual(some_other_object, some_module.SomeClass())
+
+ # Mocker
+ instance = mocker.mock()
+ klass = mocker.replace(SomeClass, spec=None)
+ klass('expected', 'args')
+ mocker.result(instance)
+
+::
+
+ >>> # Dingus
+ >>> MockClass = dingus.Dingus(return_value=some_other_object)
+ >>> with dingus.patch('somemodule.SomeClass', MockClass):
+ ... assertEqual(some_other_object, somemodule.SomeClass())
+ ...
+
+::
+
+ >>> # fudge
+ >>> @fudge.patch('somemodule.SomeClass')
+ ... def test(FakeClass):
+ ... FakeClass.is_callable().returns(some_other_object)
+ ... assertEqual(some_other_object, somemodule.SomeClass())
+ ...
+ >>> test()
+
+
+Call the same method multiple times
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. note::
+
+ You don't need to do *any* configuration to call `mock.Mock()` methods
+ multiple times. Attributes like `call_count`, `call_args_list` and
+ `method_calls` provide various different ways of making assertions about
+ how the mock was used.
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method()
+ <Mock name='mock.some_method()' id='...'>
+ >>> my_mock.some_method()
+ <Mock name='mock.some_method()' id='...'>
+ >>> assert my_mock.some_method.call_count >= 2
+
+::
+
+ # Flexmock # (verifies that the method gets called at least twice)
+ flexmock(some_object).should_receive('some_method').at_least.twice
+
+ # Mox
+ # (does not support variable number of calls, so you need to create a new entry for each explicit call)
+ mock = mox.MockObject(some_object)
+ mock.some_method(mox.IgnoreArg(), mox.IgnoreArg())
+ mock.some_method(mox.IgnoreArg(), mox.IgnoreArg())
+ mox.Replay(mock)
+ mox.Verify(mock)
+
+ # Mocker
+ # (TODO)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.some_method()
+ <Dingus ...>
+ >>> my_dingus.some_method()
+ <Dingus ...>
+ >>> assert len(my_dingus.calls('some_method')) == 2
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = fudge.Fake().expects('some_method').times_called(2)
+ ... my_fake.some_method()
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: fake:my_fake.some_method() was called 1 time(s). Expected 2.
+
+
+Mock chained methods
+~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> method3 = my_mock.method1.return_value.method2.return_value.method3
+ >>> method3.return_value = 'some value'
+ >>> assertEqual('some value', my_mock.method1().method2().method3(1, 2))
+ >>> method3.assert_called_once_with(1, 2)
+
+::
+
+ # Flexmock
+ # (intermediate method calls are automatically assigned to temporary fake objects
+ # and can be called with any arguments)
+ flexmock(some_object).should_receive(
+ 'method1.method2.method3'
+ ).with_args(arg1, arg2).and_return('some value')
+ assertEqual('some_value', some_object.method1().method2().method3(arg1, arg2))
+
+::
+
+ # Mox
+ mock = mox.MockObject(some_object)
+ mock2 = mox.MockAnything()
+ mock3 = mox.MockAnything()
+ mock.method1().AndReturn(mock1)
+ mock2.method2().AndReturn(mock2)
+ mock3.method3(arg1, arg2).AndReturn('some_value')
+ self.mox.ReplayAll()
+ assertEqual("some_value", some_object.method1().method2().method3(arg1, arg2))
+ self.mox.VerifyAll()
+
+ # Mocker
+ # (TODO)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> method3 = my_dingus.method1.return_value.method2.return_value.method3
+ >>> method3.return_value = 'some value'
+ >>> assertEqual('some value', my_dingus.method1().method2().method3(1, 2))
+ >>> assert method3.calls('()', 1, 2).once()
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = fudge.Fake()
+ ... (my_fake
+ ... .expects('method1')
+ ... .returns_fake()
+ ... .expects('method2')
+ ... .returns_fake()
+ ... .expects('method3')
+ ... .with_args(1, 2)
+ ... .returns('some value'))
+ ... assertEqual('some value', my_fake.method1().method2().method3(1, 2))
+ ...
+ >>> test()
+
+
+Mocking a context manager
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Examples for mock, Dingus and fudge only (so far):
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.MagicMock()
+ >>> with my_mock:
+ ... pass
+ ...
+ >>> my_mock.__enter__.assert_called_with()
+ >>> my_mock.__exit__.assert_called_with(None, None, None)
+
+::
+
+
+ >>> # Dingus (nothing special here; all dinguses are "magic mocks")
+ >>> my_dingus = dingus.Dingus()
+ >>> with my_dingus:
+ ... pass
+ ...
+ >>> assert my_dingus.__enter__.calls()
+ >>> assert my_dingus.__exit__.calls('()', None, None, None)
+
+::
+
+ >>> # fudge
+ >>> my_fake = fudge.Fake().provides('__enter__').provides('__exit__')
+ >>> with my_fake:
+ ... pass
+ ...
+
+
+Mocking the builtin open used as a context manager
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Example for mock only (so far):
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.MagicMock()
+ >>> with mock.patch('__builtin__.open', my_mock):
+ ... manager = my_mock.return_value.__enter__.return_value
+ ... manager.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = h.read()
+ ...
+ >>> data
+ 'some data'
+ >>> my_mock.assert_called_once_with('foo')
+
+*or*:
+
+.. doctest::
+
+ >>> # mock
+ >>> with mock.patch('__builtin__.open') as my_mock:
+ ... my_mock.return_value.__enter__ = lambda s: s
+ ... my_mock.return_value.__exit__ = mock.Mock()
+ ... my_mock.return_value.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = h.read()
+ ...
+ >>> data
+ 'some data'
+ >>> my_mock.assert_called_once_with('foo')
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> with dingus.patch('__builtin__.open', my_dingus):
+ ... file_ = open.return_value.__enter__.return_value
+ ... file_.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = f.read()
+ ...
+ >>> data
+ 'some data'
+ >>> assert my_dingus.calls('()', 'foo').once()
+
+::
+
+ >>> # fudge
+ >>> from contextlib import contextmanager
+ >>> from StringIO import StringIO
+ >>> @contextmanager
+ ... def fake_file(filename):
+ ... yield StringIO('sekrets')
+ ...
+ >>> with fudge.patch('__builtin__.open') as fake_open:
+ ... fake_open.is_callable().calls(fake_file)
+ ... with open('/etc/password') as f:
+ ... data = f.read()
+ ...
+ fake:__builtin__.open
+ >>> data
+ 'sekrets' \ No newline at end of file
diff --git a/python/mock-1.0.0/docs/conf.py b/python/mock-1.0.0/docs/conf.py
new file mode 100644
index 000000000..62f0491cc
--- /dev/null
+++ b/python/mock-1.0.0/docs/conf.py
@@ -0,0 +1,209 @@
+# -*- coding: utf-8 -*-
+#
+# Mock documentation build configuration file, created by
+# sphinx-quickstart on Mon Nov 17 18:12:00 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import sys, os
+sys.path.insert(0, os.path.abspath('..'))
+from mock import __version__
+
+# If your extensions are in another directory, add it here. If the directory
+# is relative to the documentation root, use os.path.abspath to make it
+# absolute, like shown here.
+#sys.path.append(os.path.abspath('some/directory'))
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.doctest']
+
+doctest_global_setup = """
+import os
+import sys
+import mock
+from mock import * # yeah, I know :-/
+import unittest2
+import __main__
+
+if os.getcwd() not in sys.path:
+ sys.path.append(os.getcwd())
+
+# keep a reference to __main__
+sys.modules['__main'] = __main__
+
+class ProxyModule(object):
+ def __init__(self):
+ self.__dict__ = globals()
+
+sys.modules['__main__'] = ProxyModule()
+"""
+
+doctest_global_cleanup = """
+sys.modules['__main__'] = sys.modules['__main']
+"""
+
+html_theme = 'nature'
+html_theme_options = {}
+
+# Add any paths that contain templates here, relative to this directory.
+#templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.txt'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = u'Mock'
+copyright = u'2007-2012, Michael Foord & the mock team'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+#
+# The short X.Y version.
+version = __version__[:3]
+# The full version, including alpha/beta/rc tags.
+release = __version__
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directories, that shouldn't be searched
+# for source files.
+exclude_trees = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+add_module_names = False
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'friendly'
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+#html_style = 'adctheme.css'
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+#html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+html_use_modindex = False
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Mockdoc'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+latex_font_size = '12pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+latex_documents = [
+ ('index', 'Mock.tex', u'Mock Documentation',
+ u'Michael Foord', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+latex_use_modindex = False \ No newline at end of file
diff --git a/python/mock-1.0.0/docs/examples.txt b/python/mock-1.0.0/docs/examples.txt
new file mode 100644
index 000000000..ecb994b15
--- /dev/null
+++ b/python/mock-1.0.0/docs/examples.txt
@@ -0,0 +1,1063 @@
+.. _further-examples:
+
+==================
+ Further Examples
+==================
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ from datetime import date
+
+ BackendProvider = Mock()
+ sys.modules['mymodule'] = mymodule = Mock(name='mymodule')
+
+ def grob(val):
+ "First frob and then clear val"
+ mymodule.frob(val)
+ val.clear()
+
+ mymodule.frob = lambda val: val
+ mymodule.grob = grob
+ mymodule.date = date
+
+ class TestCase(unittest2.TestCase):
+ def run(self):
+ result = unittest2.TestResult()
+ out = unittest2.TestCase.run(self, result)
+ assert result.wasSuccessful()
+
+ from mock import inPy3k
+
+
+
+For comprehensive examples, see the unit tests included in the full source
+distribution.
+
+Here are some more examples for some slightly more advanced scenarios than in
+the :ref:`getting started <getting-started>` guide.
+
+
+Mocking chained calls
+=====================
+
+Mocking chained calls is actually straightforward with mock once you
+understand the :attr:`~Mock.return_value` attribute. When a mock is called for
+the first time, or you fetch its `return_value` before it has been called, a
+new `Mock` is created.
+
+This means that you can see how the object returned from a call to a mocked
+object has been used by interrogating the `return_value` mock:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock().foo(a=2, b=3)
+ <Mock name='mock().foo()' id='...'>
+ >>> mock.return_value.foo.assert_called_with(a=2, b=3)
+
+From here it is a simple step to configure and then make assertions about
+chained calls. Of course another alternative is writing your code in a more
+testable way in the first place...
+
+So, suppose we have some code that looks a little bit like this:
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self):
+ ... self.backend = BackendProvider()
+ ... def method(self):
+ ... response = self.backend.get_endpoint('foobar').create_call('spam', 'eggs').start_call()
+ ... # more code
+
+Assuming that `BackendProvider` is already well tested, how do we test
+`method()`? Specifically, we want to test that the code section `# more
+code` uses the response object in the correct way.
+
+As this chain of calls is made from an instance attribute we can monkey patch
+the `backend` attribute on a `Something` instance. In this particular case
+we are only interested in the return value from the final call to
+`start_call` so we don't have much configuration to do. Let's assume the
+object it returns is 'file-like', so we'll ensure that our response object
+uses the builtin `file` as its `spec`.
+
+To do this we create a mock instance as our mock backend and create a mock
+response object for it. To set the response as the return value for that final
+`start_call` we could do this:
+
+ `mock_backend.get_endpoint.return_value.create_call.return_value.start_call.return_value = mock_response`.
+
+We can do that in a slightly nicer way using the :meth:`~Mock.configure_mock`
+method to directly set the return value for us:
+
+.. doctest::
+
+ >>> something = Something()
+ >>> mock_response = Mock(spec=file)
+ >>> mock_backend = Mock()
+ >>> config = {'get_endpoint.return_value.create_call.return_value.start_call.return_value': mock_response}
+ >>> mock_backend.configure_mock(**config)
+
+With these we monkey patch the "mock backend" in place and can make the real
+call:
+
+.. doctest::
+
+ >>> something.backend = mock_backend
+ >>> something.method()
+
+Using :attr:`~Mock.mock_calls` we can check the chained call with a single
+assert. A chained call is several calls in one line of code, so there will be
+several entries in `mock_calls`. We can use :meth:`call.call_list` to create
+this list of calls for us:
+
+.. doctest::
+
+ >>> chained = call.get_endpoint('foobar').create_call('spam', 'eggs').start_call()
+ >>> call_list = chained.call_list()
+ >>> assert mock_backend.mock_calls == call_list
+
+
+Partial mocking
+===============
+
+In some tests I wanted to mock out a call to `datetime.date.today()
+<http://docs.python.org/library/datetime.html#datetime.date.today>`_ to return
+a known date, but I didn't want to prevent the code under test from
+creating new date objects. Unfortunately `datetime.date` is written in C, and
+so I couldn't just monkey-patch out the static `date.today` method.
+
+I found a simple way of doing this that involved effectively wrapping the date
+class with a mock, but passing through calls to the constructor to the real
+class (and returning real instances).
+
+The :func:`patch decorator <patch>` is used here to
+mock out the `date` class in the module under test. The :attr:`side_effect`
+attribute on the mock date class is then set to a lambda function that returns
+a real date. When the mock date class is called a real date will be
+constructed and returned by `side_effect`.
+
+.. doctest::
+
+ >>> from datetime import date
+ >>> with patch('mymodule.date') as mock_date:
+ ... mock_date.today.return_value = date(2010, 10, 8)
+ ... mock_date.side_effect = lambda *args, **kw: date(*args, **kw)
+ ...
+ ... assert mymodule.date.today() == date(2010, 10, 8)
+ ... assert mymodule.date(2009, 6, 8) == date(2009, 6, 8)
+ ...
+
+Note that we don't patch `datetime.date` globally, we patch `date` in the
+module that *uses* it. See :ref:`where to patch <where-to-patch>`.
+
+When `date.today()` is called a known date is returned, but calls to the
+`date(...)` constructor still return normal dates. Without this you can find
+yourself having to calculate an expected result using exactly the same
+algorithm as the code under test, which is a classic testing anti-pattern.
+
+Calls to the date constructor are recorded in the `mock_date` attributes
+(`call_count` and friends) which may also be useful for your tests.
+
+An alternative way of dealing with mocking dates, or other builtin classes,
+is discussed in `this blog entry
+<http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/>`_.
+
+
+Mocking a Generator Method
+==========================
+
+A Python generator is a function or method that uses the `yield statement
+<http://docs.python.org/reference/simple_stmts.html#the-yield-statement>`_ to
+return a series of values when iterated over [#]_.
+
+A generator method / function is called to return the generator object. It is
+the generator object that is then iterated over. The protocol method for
+iteration is `__iter__
+<http://docs.python.org/library/stdtypes.html#container.__iter__>`_, so we can
+mock this using a `MagicMock`.
+
+Here's an example class with an "iter" method implemented as a generator:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def iter(self):
+ ... for i in [1, 2, 3]:
+ ... yield i
+ ...
+ >>> foo = Foo()
+ >>> list(foo.iter())
+ [1, 2, 3]
+
+
+How would we mock this class, and in particular its "iter" method?
+
+To configure the values returned from the iteration (implicit in the call to
+`list`), we need to configure the object returned by the call to `foo.iter()`.
+
+.. doctest::
+
+ >>> mock_foo = MagicMock()
+ >>> mock_foo.iter.return_value = iter([1, 2, 3])
+ >>> list(mock_foo.iter())
+ [1, 2, 3]
+
+.. [#] There are also generator expressions and more `advanced uses
+ <http://www.dabeaz.com/coroutines/index.html>`_ of generators, but we aren't
+ concerned about them here. A very good introduction to generators and how
+ powerful they are is: `Generator Tricks for Systems Programmers
+ <http://www.dabeaz.com/generators/>`_.
+
+
+Applying the same patch to every test method
+============================================
+
+If you want several patches in place for multiple test methods the obvious way
+is to apply the patch decorators to every method. This can feel like unnecessary
+repetition. For Python 2.6 or more recent you can use `patch` (in all its
+various forms) as a class decorator. This applies the patches to all test
+methods on the class. A test method is identified by methods whose names start
+with `test`:
+
+.. doctest::
+
+ >>> @patch('mymodule.SomeClass')
+ ... class MyTest(TestCase):
+ ...
+ ... def test_one(self, MockSomeClass):
+ ... self.assertTrue(mymodule.SomeClass is MockSomeClass)
+ ...
+ ... def test_two(self, MockSomeClass):
+ ... self.assertTrue(mymodule.SomeClass is MockSomeClass)
+ ...
+ ... def not_a_test(self):
+ ... return 'something'
+ ...
+ >>> MyTest('test_one').test_one()
+ >>> MyTest('test_two').test_two()
+ >>> MyTest('test_two').not_a_test()
+ 'something'
+
+An alternative way of managing patches is to use the :ref:`start-and-stop`.
+These allow you to move the patching into your `setUp` and `tearDown` methods.
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... self.patcher = patch('mymodule.foo')
+ ... self.mock_foo = self.patcher.start()
+ ...
+ ... def test_foo(self):
+ ... self.assertTrue(mymodule.foo is self.mock_foo)
+ ...
+ ... def tearDown(self):
+ ... self.patcher.stop()
+ ...
+ >>> MyTest('test_foo').run()
+
+If you use this technique you must ensure that the patching is "undone" by
+calling `stop`. This can be fiddlier than you might think, because if an
+exception is raised in the setUp then tearDown is not called. `unittest2
+<http://pypi.python.org/pypi/unittest2>`_ cleanup functions make this simpler:
+
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... patcher = patch('mymodule.foo')
+ ... self.addCleanup(patcher.stop)
+ ... self.mock_foo = patcher.start()
+ ...
+ ... def test_foo(self):
+ ... self.assertTrue(mymodule.foo is self.mock_foo)
+ ...
+ >>> MyTest('test_foo').run()
+
+
+Mocking Unbound Methods
+=======================
+
+Whilst writing tests today I needed to patch an *unbound method* (patching the
+method on the class rather than on the instance). I needed self to be passed
+in as the first argument because I want to make asserts about which objects
+were calling this particular method. The issue is that you can't patch with a
+mock for this, because if you replace an unbound method with a mock it doesn't
+become a bound method when fetched from the instance, and so it doesn't get
+self passed in. The workaround is to patch the unbound method with a real
+function instead. The :func:`patch` decorator makes it so simple to
+patch out methods with a mock that having to create a real function becomes a
+nuisance.
+
+If you pass `autospec=True` to patch then it does the patching with a
+*real* function object. This function object has the same signature as the one
+it is replacing, but delegates to a mock under the hood. You still get your
+mock auto-created in exactly the same way as before. What it means though, is
+that if you use it to patch out an unbound method on a class the mocked
+function will be turned into a bound method if it is fetched from an instance.
+It will have `self` passed in as the first argument, which is exactly what I
+wanted:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def foo(self):
+ ... pass
+ ...
+ >>> with patch.object(Foo, 'foo', autospec=True) as mock_foo:
+ ... mock_foo.return_value = 'foo'
+ ... foo = Foo()
+ ... foo.foo()
+ ...
+ 'foo'
+ >>> mock_foo.assert_called_once_with(foo)
+
+If we don't use `autospec=True` then the unbound method is patched out
+with a Mock instance instead, and isn't called with `self`.
+
+
+Checking multiple calls with mock
+=================================
+
+mock has a nice API for making assertions about how your mock objects are used.
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.foo_bar.return_value = None
+ >>> mock.foo_bar('baz', spam='eggs')
+ >>> mock.foo_bar.assert_called_with('baz', spam='eggs')
+
+If your mock is only being called once you can use the
+:meth:`assert_called_once_with` method that also asserts that the
+:attr:`call_count` is one.
+
+.. doctest::
+
+ >>> mock.foo_bar.assert_called_once_with('baz', spam='eggs')
+ >>> mock.foo_bar()
+ >>> mock.foo_bar.assert_called_once_with('baz', spam='eggs')
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+Both `assert_called_with` and `assert_called_once_with` make assertions about
+the *most recent* call. If your mock is going to be called several times, and
+you want to make assertions about *all* those calls you can use
+:attr:`~Mock.call_args_list`:
+
+.. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1, 2, 3)
+ >>> mock(4, 5, 6)
+ >>> mock()
+ >>> mock.call_args_list
+ [call(1, 2, 3), call(4, 5, 6), call()]
+
+The :data:`call` helper makes it easy to make assertions about these calls. You
+can build up a list of expected calls and compare it to `call_args_list`. This
+looks remarkably similar to the repr of the `call_args_list`:
+
+.. doctest::
+
+ >>> expected = [call(1, 2, 3), call(4, 5, 6), call()]
+ >>> mock.call_args_list == expected
+ True
+
+
+Coping with mutable arguments
+=============================
+
+Another situation is rare, but can bite you, is when your mock is called with
+mutable arguments. `call_args` and `call_args_list` store *references* to the
+arguments. If the arguments are mutated by the code under test then you can no
+longer make assertions about what the values were when the mock was called.
+
+Here's some example code that shows the problem. Imagine the following functions
+defined in 'mymodule'::
+
+ def frob(val):
+ pass
+
+ def grob(val):
+ "First frob and then clear val"
+ frob(val)
+ val.clear()
+
+When we try to test that `grob` calls `frob` with the correct argument look
+what happens:
+
+.. doctest::
+
+ >>> with patch('mymodule.frob') as mock_frob:
+ ... val = set([6])
+ ... mymodule.grob(val)
+ ...
+ >>> val
+ set([])
+ >>> mock_frob.assert_called_with(set([6]))
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: ((set([6]),), {})
+ Called with: ((set([]),), {})
+
+One possibility would be for mock to copy the arguments you pass in. This
+could then cause problems if you do assertions that rely on object identity
+for equality.
+
+Here's one solution that uses the :attr:`side_effect`
+functionality. If you provide a `side_effect` function for a mock then
+`side_effect` will be called with the same args as the mock. This gives us an
+opportunity to copy the arguments and store them for later assertions. In this
+example I'm using *another* mock to store the arguments so that I can use the
+mock methods for doing the assertion. Again a helper function sets this up for
+me.
+
+.. doctest::
+
+ >>> from copy import deepcopy
+ >>> from mock import Mock, patch, DEFAULT
+ >>> def copy_call_args(mock):
+ ... new_mock = Mock()
+ ... def side_effect(*args, **kwargs):
+ ... args = deepcopy(args)
+ ... kwargs = deepcopy(kwargs)
+ ... new_mock(*args, **kwargs)
+ ... return DEFAULT
+ ... mock.side_effect = side_effect
+ ... return new_mock
+ ...
+ >>> with patch('mymodule.frob') as mock_frob:
+ ... new_mock = copy_call_args(mock_frob)
+ ... val = set([6])
+ ... mymodule.grob(val)
+ ...
+ >>> new_mock.assert_called_with(set([6]))
+ >>> new_mock.call_args
+ call(set([6]))
+
+`copy_call_args` is called with the mock that will be called. It returns a new
+mock that we do the assertion on. The `side_effect` function makes a copy of
+the args and calls our `new_mock` with the copy.
+
+.. note::
+
+ If your mock is only going to be used once there is an easier way of
+ checking arguments at the point they are called. You can simply do the
+ checking inside a `side_effect` function.
+
+ .. doctest::
+
+ >>> def side_effect(arg):
+ ... assert arg == set([6])
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock(set([6]))
+ >>> mock(set())
+ Traceback (most recent call last):
+ ...
+ AssertionError
+
+An alternative approach is to create a subclass of `Mock` or `MagicMock` that
+copies (using `copy.deepcopy
+<http://docs.python.org/library/copy.html#copy.deepcopy>`_) the arguments.
+Here's an example implementation:
+
+.. doctest::
+
+ >>> from copy import deepcopy
+ >>> class CopyingMock(MagicMock):
+ ... def __call__(self, *args, **kwargs):
+ ... args = deepcopy(args)
+ ... kwargs = deepcopy(kwargs)
+ ... return super(CopyingMock, self).__call__(*args, **kwargs)
+ ...
+ >>> c = CopyingMock(return_value=None)
+ >>> arg = set()
+ >>> c(arg)
+ >>> arg.add(1)
+ >>> c.assert_called_with(set())
+ >>> c.assert_called_with(arg)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected call: mock(set([1]))
+ Actual call: mock(set([]))
+ >>> c.foo
+ <CopyingMock name='mock.foo' id='...'>
+
+When you subclass `Mock` or `MagicMock` all dynamically created attributes,
+and the `return_value` will use your subclass automatically. That means all
+children of a `CopyingMock` will also have the type `CopyingMock`.
+
+
+Raising exceptions on attribute access
+======================================
+
+You can use :class:`PropertyMock` to mimic the behaviour of properties. This
+includes raising exceptions when an attribute is accessed.
+
+Here's an example raising a `ValueError` when the 'foo' attribute is accessed:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> p = PropertyMock(side_effect=ValueError)
+ >>> type(m).foo = p
+ >>> m.foo
+ Traceback (most recent call last):
+ ....
+ ValueError
+
+Because every mock object has its own type, a new subclass of whichever mock
+class you're using, all mock objects are isolated from each other. You can
+safely attach properties (or other descriptors or whatever you want in fact)
+to `type(mock)` without affecting other mock objects.
+
+
+Multiple calls with different effects
+=====================================
+
+.. note::
+
+ In mock 1.0 the handling of iterable `side_effect` was changed. Any
+ exceptions in the iterable will be raised instead of returned.
+
+Handling code that needs to behave differently on subsequent calls during the
+test can be tricky. For example you may have a function that needs to raise
+an exception the first time it is called but returns a response on the second
+call (testing retry behaviour).
+
+One approach is to use a :attr:`side_effect` function that replaces itself. The
+first time it is called the `side_effect` sets a new `side_effect` that will
+be used for the second call. It then raises an exception:
+
+.. doctest::
+
+ >>> def side_effect(*args):
+ ... def second_call(*args):
+ ... return 'response'
+ ... mock.side_effect = second_call
+ ... raise Exception('boom')
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock('first')
+ Traceback (most recent call last):
+ ...
+ Exception: boom
+ >>> mock('second')
+ 'response'
+ >>> mock.assert_called_with('second')
+
+Another perfectly valid way would be to pop return values from a list. If the
+return value is an exception, raise it instead of returning it:
+
+.. doctest::
+
+ >>> returns = [Exception('boom'), 'response']
+ >>> def side_effect(*args):
+ ... result = returns.pop(0)
+ ... if isinstance(result, Exception):
+ ... raise result
+ ... return result
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock('first')
+ Traceback (most recent call last):
+ ...
+ Exception: boom
+ >>> mock('second')
+ 'response'
+ >>> mock.assert_called_with('second')
+
+Which approach you prefer is a matter of taste. The first approach is actually
+a line shorter but maybe the second approach is more readable.
+
+
+Nesting Patches
+===============
+
+Using patch as a context manager is nice, but if you do multiple patches you
+can end up with nested with statements indenting further and further to the
+right:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ...
+ ... def test_foo(self):
+ ... with patch('mymodule.Foo') as mock_foo:
+ ... with patch('mymodule.Bar') as mock_bar:
+ ... with patch('mymodule.Spam') as mock_spam:
+ ... assert mymodule.Foo is mock_foo
+ ... assert mymodule.Bar is mock_bar
+ ... assert mymodule.Spam is mock_spam
+ ...
+ >>> original = mymodule.Foo
+ >>> MyTest('test_foo').test_foo()
+ >>> assert mymodule.Foo is original
+
+With unittest2_ `cleanup` functions and the :ref:`start-and-stop` we can
+achieve the same effect without the nested indentation. A simple helper
+method, `create_patch`, puts the patch in place and returns the created mock
+for us:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ...
+ ... def create_patch(self, name):
+ ... patcher = patch(name)
+ ... thing = patcher.start()
+ ... self.addCleanup(patcher.stop)
+ ... return thing
+ ...
+ ... def test_foo(self):
+ ... mock_foo = self.create_patch('mymodule.Foo')
+ ... mock_bar = self.create_patch('mymodule.Bar')
+ ... mock_spam = self.create_patch('mymodule.Spam')
+ ...
+ ... assert mymodule.Foo is mock_foo
+ ... assert mymodule.Bar is mock_bar
+ ... assert mymodule.Spam is mock_spam
+ ...
+ >>> original = mymodule.Foo
+ >>> MyTest('test_foo').run()
+ >>> assert mymodule.Foo is original
+
+
+Mocking a dictionary with MagicMock
+===================================
+
+You may want to mock a dictionary, or other container object, recording all
+access to it whilst having it still behave like a dictionary.
+
+We can do this with :class:`MagicMock`, which will behave like a dictionary,
+and using :data:`~Mock.side_effect` to delegate dictionary access to a real
+underlying dictionary that is under our control.
+
+When the `__getitem__` and `__setitem__` methods of our `MagicMock` are called
+(normal dictionary access) then `side_effect` is called with the key (and in
+the case of `__setitem__` the value too). We can also control what is returned.
+
+After the `MagicMock` has been used we can use attributes like
+:data:`~Mock.call_args_list` to assert about how the dictionary was used:
+
+.. doctest::
+
+ >>> my_dict = {'a': 1, 'b': 2, 'c': 3}
+ >>> def getitem(name):
+ ... return my_dict[name]
+ ...
+ >>> def setitem(name, val):
+ ... my_dict[name] = val
+ ...
+ >>> mock = MagicMock()
+ >>> mock.__getitem__.side_effect = getitem
+ >>> mock.__setitem__.side_effect = setitem
+
+.. note::
+
+ An alternative to using `MagicMock` is to use `Mock` and *only* provide
+ the magic methods you specifically want:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__setitem__ = Mock(side_effect=getitem)
+ >>> mock.__getitem__ = Mock(side_effect=setitem)
+
+ A *third* option is to use `MagicMock` but passing in `dict` as the `spec`
+ (or `spec_set`) argument so that the `MagicMock` created only has
+ dictionary magic methods available:
+
+ .. doctest::
+
+ >>> mock = MagicMock(spec_set=dict)
+ >>> mock.__getitem__.side_effect = getitem
+ >>> mock.__setitem__.side_effect = setitem
+
+With these side effect functions in place, the `mock` will behave like a normal
+dictionary but recording the access. It even raises a `KeyError` if you try
+to access a key that doesn't exist.
+
+.. doctest::
+
+ >>> mock['a']
+ 1
+ >>> mock['c']
+ 3
+ >>> mock['d']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'd'
+ >>> mock['b'] = 'fish'
+ >>> mock['d'] = 'eggs'
+ >>> mock['b']
+ 'fish'
+ >>> mock['d']
+ 'eggs'
+
+After it has been used you can make assertions about the access using the normal
+mock methods and attributes:
+
+.. doctest::
+
+ >>> mock.__getitem__.call_args_list
+ [call('a'), call('c'), call('d'), call('b'), call('d')]
+ >>> mock.__setitem__.call_args_list
+ [call('b', 'fish'), call('d', 'eggs')]
+ >>> my_dict
+ {'a': 1, 'c': 3, 'b': 'fish', 'd': 'eggs'}
+
+
+Mock subclasses and their attributes
+====================================
+
+There are various reasons why you might want to subclass `Mock`. One reason
+might be to add helper methods. Here's a silly example:
+
+.. doctest::
+
+ >>> class MyMock(MagicMock):
+ ... def has_been_called(self):
+ ... return self.called
+ ...
+ >>> mymock = MyMock(return_value=None)
+ >>> mymock
+ <MyMock id='...'>
+ >>> mymock.has_been_called()
+ False
+ >>> mymock()
+ >>> mymock.has_been_called()
+ True
+
+The standard behaviour for `Mock` instances is that attributes and the return
+value mocks are of the same type as the mock they are accessed on. This ensures
+that `Mock` attributes are `Mocks` and `MagicMock` attributes are `MagicMocks`
+[#]_. So if you're subclassing to add helper methods then they'll also be
+available on the attributes and return value mock of instances of your
+subclass.
+
+.. doctest::
+
+ >>> mymock.foo
+ <MyMock name='mock.foo' id='...'>
+ >>> mymock.foo.has_been_called()
+ False
+ >>> mymock.foo()
+ <MyMock name='mock.foo()' id='...'>
+ >>> mymock.foo.has_been_called()
+ True
+
+Sometimes this is inconvenient. For example, `one user
+<https://code.google.com/p/mock/issues/detail?id=105>`_ is subclassing mock to
+created a `Twisted adaptor
+<http://twistedmatrix.com/documents/11.0.0/api/twisted.python.components.html>`_.
+Having this applied to attributes too actually causes errors.
+
+`Mock` (in all its flavours) uses a method called `_get_child_mock` to create
+these "sub-mocks" for attributes and return values. You can prevent your
+subclass being used for attributes by overriding this method. The signature is
+that it takes arbitrary keyword arguments (`**kwargs`) which are then passed
+onto the mock constructor:
+
+.. doctest::
+
+ >>> class Subclass(MagicMock):
+ ... def _get_child_mock(self, **kwargs):
+ ... return MagicMock(**kwargs)
+ ...
+ >>> mymock = Subclass()
+ >>> mymock.foo
+ <MagicMock name='mock.foo' id='...'>
+ >>> assert isinstance(mymock, Subclass)
+ >>> assert not isinstance(mymock.foo, Subclass)
+ >>> assert not isinstance(mymock(), Subclass)
+
+.. [#] An exception to this rule are the non-callable mocks. Attributes use the
+ callable variant because otherwise non-callable mocks couldn't have callable
+ methods.
+
+
+Mocking imports with patch.dict
+===============================
+
+One situation where mocking can be hard is where you have a local import inside
+a function. These are harder to mock because they aren't using an object from
+the module namespace that we can patch out.
+
+Generally local imports are to be avoided. They are sometimes done to prevent
+circular dependencies, for which there is *usually* a much better way to solve
+the problem (refactor the code) or to prevent "up front costs" by delaying the
+import. This can also be solved in better ways than an unconditional local
+import (store the module as a class or module attribute and only do the import
+on first use).
+
+That aside there is a way to use `mock` to affect the results of an import.
+Importing fetches an *object* from the `sys.modules` dictionary. Note that it
+fetches an *object*, which need not be a module. Importing a module for the
+first time results in a module object being put in `sys.modules`, so usually
+when you import something you get a module back. This need not be the case
+however.
+
+This means you can use :func:`patch.dict` to *temporarily* put a mock in place
+in `sys.modules`. Any imports whilst this patch is active will fetch the mock.
+When the patch is complete (the decorated function exits, the with statement
+body is complete or `patcher.stop()` is called) then whatever was there
+previously will be restored safely.
+
+Here's an example that mocks out the 'fooble' module.
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> with patch.dict('sys.modules', {'fooble': mock}):
+ ... import fooble
+ ... fooble.blob()
+ ...
+ <Mock name='mock.blob()' id='...'>
+ >>> assert 'fooble' not in sys.modules
+ >>> mock.blob.assert_called_once_with()
+
+As you can see the `import fooble` succeeds, but on exit there is no 'fooble'
+left in `sys.modules`.
+
+This also works for the `from module import name` form:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> with patch.dict('sys.modules', {'fooble': mock}):
+ ... from fooble import blob
+ ... blob.blip()
+ ...
+ <Mock name='mock.blob.blip()' id='...'>
+ >>> mock.blob.blip.assert_called_once_with()
+
+With slightly more work you can also mock package imports:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> modules = {'package': mock, 'package.module': mock.module}
+ >>> with patch.dict('sys.modules', modules):
+ ... from package.module import fooble
+ ... fooble()
+ ...
+ <Mock name='mock.module.fooble()' id='...'>
+ >>> mock.module.fooble.assert_called_once_with()
+
+
+Tracking order of calls and less verbose call assertions
+========================================================
+
+The :class:`Mock` class allows you to track the *order* of method calls on
+your mock objects through the :attr:`~Mock.method_calls` attribute. This
+doesn't allow you to track the order of calls between separate mock objects,
+however we can use :attr:`~Mock.mock_calls` to achieve the same effect.
+
+Because mocks track calls to child mocks in `mock_calls`, and accessing an
+arbitrary attribute of a mock creates a child mock, we can create our separate
+mocks from a parent one. Calls to those child mock will then all be recorded,
+in order, in the `mock_calls` of the parent:
+
+.. doctest::
+
+ >>> manager = Mock()
+ >>> mock_foo = manager.foo
+ >>> mock_bar = manager.bar
+
+ >>> mock_foo.something()
+ <Mock name='mock.foo.something()' id='...'>
+ >>> mock_bar.other.thing()
+ <Mock name='mock.bar.other.thing()' id='...'>
+
+ >>> manager.mock_calls
+ [call.foo.something(), call.bar.other.thing()]
+
+We can then assert about the calls, including the order, by comparing with
+the `mock_calls` attribute on the manager mock:
+
+.. doctest::
+
+ >>> expected_calls = [call.foo.something(), call.bar.other.thing()]
+ >>> manager.mock_calls == expected_calls
+ True
+
+If `patch` is creating, and putting in place, your mocks then you can attach
+them to a manager mock using the :meth:`~Mock.attach_mock` method. After
+attaching calls will be recorded in `mock_calls` of the manager.
+
+.. doctest::
+
+ >>> manager = MagicMock()
+ >>> with patch('mymodule.Class1') as MockClass1:
+ ... with patch('mymodule.Class2') as MockClass2:
+ ... manager.attach_mock(MockClass1, 'MockClass1')
+ ... manager.attach_mock(MockClass2, 'MockClass2')
+ ... MockClass1().foo()
+ ... MockClass2().bar()
+ ...
+ <MagicMock name='mock.MockClass1().foo()' id='...'>
+ <MagicMock name='mock.MockClass2().bar()' id='...'>
+ >>> manager.mock_calls
+ [call.MockClass1(),
+ call.MockClass1().foo(),
+ call.MockClass2(),
+ call.MockClass2().bar()]
+
+If many calls have been made, but you're only interested in a particular
+sequence of them then an alternative is to use the
+:meth:`~Mock.assert_has_calls` method. This takes a list of calls (constructed
+with the :data:`call` object). If that sequence of calls are in
+:attr:`~Mock.mock_calls` then the assert succeeds.
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m().foo().bar().baz()
+ <MagicMock name='mock().foo().bar().baz()' id='...'>
+ >>> m.one().two().three()
+ <MagicMock name='mock.one().two().three()' id='...'>
+ >>> calls = call.one().two().three().call_list()
+ >>> m.assert_has_calls(calls)
+
+Even though the chained call `m.one().two().three()` aren't the only calls that
+have been made to the mock, the assert still succeeds.
+
+Sometimes a mock may have several calls made to it, and you are only interested
+in asserting about *some* of those calls. You may not even care about the
+order. In this case you can pass `any_order=True` to `assert_has_calls`:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m(1), m.two(2, 3), m.seven(7), m.fifty('50')
+ (...)
+ >>> calls = [call.fifty('50'), call(1), call.seven(7)]
+ >>> m.assert_has_calls(calls, any_order=True)
+
+
+More complex argument matching
+==============================
+
+Using the same basic concept as `ANY` we can implement matchers to do more
+complex assertions on objects used as arguments to mocks.
+
+Suppose we expect some object to be passed to a mock that by default
+compares equal based on object identity (which is the Python default for user
+defined classes). To use :meth:`~Mock.assert_called_with` we would need to pass
+in the exact same object. If we are only interested in some of the attributes
+of this object then we can create a matcher that will check these attributes
+for us.
+
+You can see in this example how a 'standard' call to `assert_called_with` isn't
+sufficient:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def __init__(self, a, b):
+ ... self.a, self.b = a, b
+ ...
+ >>> mock = Mock(return_value=None)
+ >>> mock(Foo(1, 2))
+ >>> mock.assert_called_with(Foo(1, 2))
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: call(<__main__.Foo object at 0x...>)
+ Actual call: call(<__main__.Foo object at 0x...>)
+
+A comparison function for our `Foo` class might look something like this:
+
+.. doctest::
+
+ >>> def compare(self, other):
+ ... if not type(self) == type(other):
+ ... return False
+ ... if self.a != other.a:
+ ... return False
+ ... if self.b != other.b:
+ ... return False
+ ... return True
+ ...
+
+And a matcher object that can use comparison functions like this for its
+equality operation would look something like this:
+
+.. doctest::
+
+ >>> class Matcher(object):
+ ... def __init__(self, compare, some_obj):
+ ... self.compare = compare
+ ... self.some_obj = some_obj
+ ... def __eq__(self, other):
+ ... return self.compare(self.some_obj, other)
+ ...
+
+Putting all this together:
+
+.. doctest::
+
+ >>> match_foo = Matcher(compare, Foo(1, 2))
+ >>> mock.assert_called_with(match_foo)
+
+The `Matcher` is instantiated with our compare function and the `Foo` object
+we want to compare against. In `assert_called_with` the `Matcher` equality
+method will be called, which compares the object the mock was called with
+against the one we created our matcher with. If they match then
+`assert_called_with` passes, and if they don't an `AssertionError` is raised:
+
+.. doctest::
+
+ >>> match_wrong = Matcher(compare, Foo(3, 4))
+ >>> mock.assert_called_with(match_wrong)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: ((<Matcher object at 0x...>,), {})
+ Called with: ((<Foo object at 0x...>,), {})
+
+With a bit of tweaking you could have the comparison function raise the
+`AssertionError` directly and provide a more useful failure message.
+
+As of version 1.5, the Python testing library `PyHamcrest
+<http://pypi.python.org/pypi/PyHamcrest>`_ provides similar functionality,
+that may be useful here, in the form of its equality matcher
+(`hamcrest.library.integration.match_equality
+<http://packages.python.org/PyHamcrest/integration.html#hamcrest.library.integration.match_equality>`_).
+
+
+Less verbose configuration of mock objects
+==========================================
+
+This recipe, for easier configuration of mock objects, is now part of `Mock`.
+See the :meth:`~Mock.configure_mock` method.
+
+
+Matching any argument in assertions
+===================================
+
+This example is now built in to mock. See :data:`ANY`.
+
+
+Mocking Properties
+==================
+
+This example is now built in to mock. See :class:`PropertyMock`.
+
+
+Mocking open
+============
+
+This example is now built in to mock. See :func:`mock_open`.
+
+
+Mocks without some attributes
+=============================
+
+This example is now built in to mock. See :ref:`deleting-attributes`.
diff --git a/python/mock-1.0.0/docs/getting-started.txt b/python/mock-1.0.0/docs/getting-started.txt
new file mode 100644
index 000000000..1b5d289eb
--- /dev/null
+++ b/python/mock-1.0.0/docs/getting-started.txt
@@ -0,0 +1,479 @@
+===========================
+ Getting Started with Mock
+===========================
+
+.. _getting-started:
+
+.. index:: Getting Started
+
+.. testsetup::
+
+ class SomeClass(object):
+ static_method = None
+ class_method = None
+ attribute = None
+
+ sys.modules['package'] = package = Mock(name='package')
+ sys.modules['package.module'] = module = package.module
+ sys.modules['module'] = package.module
+
+
+Using Mock
+==========
+
+Mock Patching Methods
+---------------------
+
+Common uses for :class:`Mock` objects include:
+
+* Patching methods
+* Recording method calls on objects
+
+You might want to replace a method on an object to check that
+it is called with the correct arguments by another part of the system:
+
+.. doctest::
+
+ >>> real = SomeClass()
+ >>> real.method = MagicMock(name='method')
+ >>> real.method(3, 4, 5, key='value')
+ <MagicMock name='method()' id='...'>
+
+Once our mock has been used (`real.method` in this example) it has methods
+and attributes that allow you to make assertions about how it has been used.
+
+.. note::
+
+ In most of these examples the :class:`Mock` and :class:`MagicMock` classes
+ are interchangeable. As the `MagicMock` is the more capable class it makes
+ a sensible one to use by default.
+
+Once the mock has been called its :attr:`~Mock.called` attribute is set to
+`True`. More importantly we can use the :meth:`~Mock.assert_called_with` or
+:meth:`~Mock.assert_called_once_with` method to check that it was called with
+the correct arguments.
+
+This example tests that calling `ProductionClass().method` results in a call to
+the `something` method:
+
+.. doctest::
+
+ >>> from mock import MagicMock
+ >>> class ProductionClass(object):
+ ... def method(self):
+ ... self.something(1, 2, 3)
+ ... def something(self, a, b, c):
+ ... pass
+ ...
+ >>> real = ProductionClass()
+ >>> real.something = MagicMock()
+ >>> real.method()
+ >>> real.something.assert_called_once_with(1, 2, 3)
+
+
+
+Mock for Method Calls on an Object
+----------------------------------
+
+In the last example we patched a method directly on an object to check that it
+was called correctly. Another common use case is to pass an object into a
+method (or some part of the system under test) and then check that it is used
+in the correct way.
+
+The simple `ProductionClass` below has a `closer` method. If it is called with
+an object then it calls `close` on it.
+
+.. doctest::
+
+ >>> class ProductionClass(object):
+ ... def closer(self, something):
+ ... something.close()
+ ...
+
+So to test it we need to pass in an object with a `close` method and check
+that it was called correctly.
+
+.. doctest::
+
+ >>> real = ProductionClass()
+ >>> mock = Mock()
+ >>> real.closer(mock)
+ >>> mock.close.assert_called_with()
+
+We don't have to do any work to provide the 'close' method on our mock.
+Accessing close creates it. So, if 'close' hasn't already been called then
+accessing it in the test will create it, but :meth:`~Mock.assert_called_with`
+will raise a failure exception.
+
+
+Mocking Classes
+---------------
+
+A common use case is to mock out classes instantiated by your code under test.
+When you patch a class, then that class is replaced with a mock. Instances
+are created by *calling the class*. This means you access the "mock instance"
+by looking at the return value of the mocked class.
+
+In the example below we have a function `some_function` that instantiates `Foo`
+and calls a method on it. The call to `patch` replaces the class `Foo` with a
+mock. The `Foo` instance is the result of calling the mock, so it is configured
+by modifying the mock :attr:`~Mock.return_value`.
+
+.. doctest::
+
+ >>> def some_function():
+ ... instance = module.Foo()
+ ... return instance.method()
+ ...
+ >>> with patch('module.Foo') as mock:
+ ... instance = mock.return_value
+ ... instance.method.return_value = 'the result'
+ ... result = some_function()
+ ... assert result == 'the result'
+
+
+Naming your mocks
+-----------------
+
+It can be useful to give your mocks a name. The name is shown in the repr of
+the mock and can be helpful when the mock appears in test failure messages. The
+name is also propagated to attributes or methods of the mock:
+
+.. doctest::
+
+ >>> mock = MagicMock(name='foo')
+ >>> mock
+ <MagicMock name='foo' id='...'>
+ >>> mock.method
+ <MagicMock name='foo.method' id='...'>
+
+
+Tracking all Calls
+------------------
+
+Often you want to track more than a single call to a method. The
+:attr:`~Mock.mock_calls` attribute records all calls
+to child attributes of the mock - and also to their children.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.method()
+ <MagicMock name='mock.method()' id='...'>
+ >>> mock.attribute.method(10, x=53)
+ <MagicMock name='mock.attribute.method()' id='...'>
+ >>> mock.mock_calls
+ [call.method(), call.attribute.method(10, x=53)]
+
+If you make an assertion about `mock_calls` and any unexpected methods
+have been called, then the assertion will fail. This is useful because as well
+as asserting that the calls you expected have been made, you are also checking
+that they were made in the right order and with no additional calls:
+
+You use the :data:`call` object to construct lists for comparing with
+`mock_calls`:
+
+.. doctest::
+
+ >>> expected = [call.method(), call.attribute.method(10, x=53)]
+ >>> mock.mock_calls == expected
+ True
+
+
+Setting Return Values and Attributes
+------------------------------------
+
+Setting the return values on a mock object is trivially easy:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value = 3
+ >>> mock()
+ 3
+
+Of course you can do the same for methods on the mock:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method.return_value = 3
+ >>> mock.method()
+ 3
+
+The return value can also be set in the constructor:
+
+.. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> mock()
+ 3
+
+If you need an attribute setting on your mock, just do it:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.x = 3
+ >>> mock.x
+ 3
+
+Sometimes you want to mock up a more complex situation, like for example
+`mock.connection.cursor().execute("SELECT 1")`. If we wanted this call to
+return a list, then we have to configure the result of the nested call.
+
+We can use :data:`call` to construct the set of calls in a "chained call" like
+this for easy assertion afterwards:
+
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> cursor = mock.connection.cursor.return_value
+ >>> cursor.execute.return_value = ['foo']
+ >>> mock.connection.cursor().execute("SELECT 1")
+ ['foo']
+ >>> expected = call.connection.cursor().execute("SELECT 1").call_list()
+ >>> mock.mock_calls
+ [call.connection.cursor(), call.connection.cursor().execute('SELECT 1')]
+ >>> mock.mock_calls == expected
+ True
+
+It is the call to `.call_list()` that turns our call object into a list of
+calls representing the chained calls.
+
+
+
+Raising exceptions with mocks
+-----------------------------
+
+A useful attribute is :attr:`~Mock.side_effect`. If you set this to an
+exception class or instance then the exception will be raised when the mock
+is called.
+
+.. doctest::
+
+ >>> mock = Mock(side_effect=Exception('Boom!'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ Exception: Boom!
+
+
+Side effect functions and iterables
+-----------------------------------
+
+`side_effect` can also be set to a function or an iterable. The use case for
+`side_effect` as an iterable is where your mock is going to be called several
+times, and you want each call to return a different value. When you set
+`side_effect` to an iterable every call to the mock returns the next value
+from the iterable:
+
+.. doctest::
+
+ >>> mock = MagicMock(side_effect=[4, 5, 6])
+ >>> mock()
+ 4
+ >>> mock()
+ 5
+ >>> mock()
+ 6
+
+
+For more advanced use cases, like dynamically varying the return values
+depending on what the mock is called with, `side_effect` can be a function.
+The function will be called with the same arguments as the mock. Whatever the
+function returns is what the call returns:
+
+.. doctest::
+
+ >>> vals = {(1, 2): 1, (2, 3): 2}
+ >>> def side_effect(*args):
+ ... return vals[args]
+ ...
+ >>> mock = MagicMock(side_effect=side_effect)
+ >>> mock(1, 2)
+ 1
+ >>> mock(2, 3)
+ 2
+
+
+Creating a Mock from an Existing Object
+---------------------------------------
+
+One problem with over use of mocking is that it couples your tests to the
+implementation of your mocks rather than your real code. Suppose you have a
+class that implements `some_method`. In a test for another class, you
+provide a mock of this object that *also* provides `some_method`. If later
+you refactor the first class, so that it no longer has `some_method` - then
+your tests will continue to pass even though your code is now broken!
+
+`Mock` allows you to provide an object as a specification for the mock,
+using the `spec` keyword argument. Accessing methods / attributes on the
+mock that don't exist on your specification object will immediately raise an
+attribute error. If you change the implementation of your specification, then
+tests that use that class will start failing immediately without you having to
+instantiate the class in those tests.
+
+.. doctest::
+
+ >>> mock = Mock(spec=SomeClass)
+ >>> mock.old_method()
+ Traceback (most recent call last):
+ ...
+ AttributeError: object has no attribute 'old_method'
+
+If you want a stronger form of specification that prevents the setting
+of arbitrary attributes as well as the getting of them then you can use
+`spec_set` instead of `spec`.
+
+
+
+Patch Decorators
+================
+
+.. note::
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read :ref:`where to patch <where-to-patch>`.
+
+
+A common need in tests is to patch a class attribute or a module attribute,
+for example patching a builtin or patching a class in a module to test that it
+is instantiated. Modules and classes are effectively global, so patching on
+them has to be undone after the test or the patch will persist into other
+tests and cause hard to diagnose problems.
+
+mock provides three convenient decorators for this: `patch`, `patch.object` and
+`patch.dict`. `patch` takes a single string, of the form
+`package.module.Class.attribute` to specify the attribute you are patching. It
+also optionally takes a value that you want the attribute (or class or
+whatever) to be replaced with. 'patch.object' takes an object and the name of
+the attribute you would like patched, plus optionally the value to patch it
+with.
+
+`patch.object`:
+
+.. doctest::
+
+ >>> original = SomeClass.attribute
+ >>> @patch.object(SomeClass, 'attribute', sentinel.attribute)
+ ... def test():
+ ... assert SomeClass.attribute == sentinel.attribute
+ ...
+ >>> test()
+ >>> assert SomeClass.attribute == original
+
+ >>> @patch('package.module.attribute', sentinel.attribute)
+ ... def test():
+ ... from package.module import attribute
+ ... assert attribute is sentinel.attribute
+ ...
+ >>> test()
+
+If you are patching a module (including `__builtin__`) then use `patch`
+instead of `patch.object`:
+
+.. doctest::
+
+ >>> mock = MagicMock(return_value = sentinel.file_handle)
+ >>> with patch('__builtin__.open', mock):
+ ... handle = open('filename', 'r')
+ ...
+ >>> mock.assert_called_with('filename', 'r')
+ >>> assert handle == sentinel.file_handle, "incorrect file handle returned"
+
+The module name can be 'dotted', in the form `package.module` if needed:
+
+.. doctest::
+
+ >>> @patch('package.module.ClassName.attribute', sentinel.attribute)
+ ... def test():
+ ... from package.module import ClassName
+ ... assert ClassName.attribute == sentinel.attribute
+ ...
+ >>> test()
+
+A nice pattern is to actually decorate test methods themselves:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch.object(SomeClass, 'attribute', sentinel.attribute)
+ ... def test_something(self):
+ ... self.assertEqual(SomeClass.attribute, sentinel.attribute)
+ ...
+ >>> original = SomeClass.attribute
+ >>> MyTest('test_something').test_something()
+ >>> assert SomeClass.attribute == original
+
+If you want to patch with a Mock, you can use `patch` with only one argument
+(or `patch.object` with two arguments). The mock will be created for you and
+passed into the test function / method:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch.object(SomeClass, 'static_method')
+ ... def test_something(self, mock_method):
+ ... SomeClass.static_method()
+ ... mock_method.assert_called_with()
+ ...
+ >>> MyTest('test_something').test_something()
+
+You can stack up multiple patch decorators using this pattern:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch('package.module.ClassName1')
+ ... @patch('package.module.ClassName2')
+ ... def test_something(self, MockClass2, MockClass1):
+ ... self.assertTrue(package.module.ClassName1 is MockClass1)
+ ... self.assertTrue(package.module.ClassName2 is MockClass2)
+ ...
+ >>> MyTest('test_something').test_something()
+
+When you nest patch decorators the mocks are passed in to the decorated
+function in the same order they applied (the normal *python* order that
+decorators are applied). This means from the bottom up, so in the example
+above the mock for `test_module.ClassName2` is passed in first.
+
+There is also :func:`patch.dict` for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:
+
+.. doctest::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+`patch`, `patch.object` and `patch.dict` can all be used as context managers.
+
+Where you use `patch` to create a mock for you, you can get a reference to the
+mock using the "as" form of the with statement:
+
+.. doctest::
+
+ >>> class ProductionClass(object):
+ ... def method(self):
+ ... pass
+ ...
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_with(1, 2, 3)
+
+
+As an alternative `patch`, `patch.object` and `patch.dict` can be used as
+class decorators. When used in this way it is the same as applying the
+decorator indvidually to every method whose name starts with "test".
+
+For some more advanced examples, see the :ref:`further-examples` page.
diff --git a/python/mock-1.0.0/docs/helpers.txt b/python/mock-1.0.0/docs/helpers.txt
new file mode 100644
index 000000000..571b71d5e
--- /dev/null
+++ b/python/mock-1.0.0/docs/helpers.txt
@@ -0,0 +1,583 @@
+=========
+ Helpers
+=========
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ mock.FILTER_DIR = True
+ from pprint import pprint as pp
+ original_dir = dir
+ def dir(obj):
+ print pp(original_dir(obj))
+
+ import urllib2
+ __main__.urllib2 = urllib2
+
+.. testcleanup::
+
+ dir = original_dir
+ mock.FILTER_DIR = True
+
+
+
+call
+====
+
+.. function:: call(*args, **kwargs)
+
+ `call` is a helper object for making simpler assertions, for comparing
+ with :attr:`~Mock.call_args`, :attr:`~Mock.call_args_list`,
+ :attr:`~Mock.mock_calls` and :attr: `~Mock.method_calls`. `call` can also be
+ used with :meth:`~Mock.assert_has_calls`.
+
+ .. doctest::
+
+ >>> m = MagicMock(return_value=None)
+ >>> m(1, 2, a='foo', b='bar')
+ >>> m()
+ >>> m.call_args_list == [call(1, 2, a='foo', b='bar'), call()]
+ True
+
+.. method:: call.call_list()
+
+ For a call object that represents multiple calls, `call_list`
+ returns a list of all the intermediate calls as well as the
+ final call.
+
+`call_list` is particularly useful for making assertions on "chained calls". A
+chained call is multiple calls on a single line of code. This results in
+multiple entries in :attr:`~Mock.mock_calls` on a mock. Manually constructing
+the sequence of calls can be tedious.
+
+:meth:`~call.call_list` can construct the sequence of calls from the same
+chained call:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m(1).method(arg='foo').other('bar')(2.0)
+ <MagicMock name='mock().method().other()()' id='...'>
+ >>> kall = call(1).method(arg='foo').other('bar')(2.0)
+ >>> kall.call_list()
+ [call(1),
+ call().method(arg='foo'),
+ call().method().other('bar'),
+ call().method().other()(2.0)]
+ >>> m.mock_calls == kall.call_list()
+ True
+
+.. _calls-as-tuples:
+
+A `call` object is either a tuple of (positional args, keyword args) or
+(name, positional args, keyword args) depending on how it was constructed. When
+you construct them yourself this isn't particularly interesting, but the `call`
+objects that are in the :attr:`Mock.call_args`, :attr:`Mock.call_args_list` and
+:attr:`Mock.mock_calls` attributes can be introspected to get at the individual
+arguments they contain.
+
+The `call` objects in :attr:`Mock.call_args` and :attr:`Mock.call_args_list`
+are two-tuples of (positional args, keyword args) whereas the `call` objects
+in :attr:`Mock.mock_calls`, along with ones you construct yourself, are
+three-tuples of (name, positional args, keyword args).
+
+You can use their "tupleness" to pull out the individual arguments for more
+complex introspection and assertions. The positional arguments are a tuple
+(an empty tuple if there are no positional arguments) and the keyword
+arguments are a dictionary:
+
+.. doctest::
+
+ >>> m = MagicMock(return_value=None)
+ >>> m(1, 2, 3, arg='one', arg2='two')
+ >>> kall = m.call_args
+ >>> args, kwargs = kall
+ >>> args
+ (1, 2, 3)
+ >>> kwargs
+ {'arg2': 'two', 'arg': 'one'}
+ >>> args is kall[0]
+ True
+ >>> kwargs is kall[1]
+ True
+
+ >>> m = MagicMock()
+ >>> m.foo(4, 5, 6, arg='two', arg2='three')
+ <MagicMock name='mock.foo()' id='...'>
+ >>> kall = m.mock_calls[0]
+ >>> name, args, kwargs = kall
+ >>> name
+ 'foo'
+ >>> args
+ (4, 5, 6)
+ >>> kwargs
+ {'arg2': 'three', 'arg': 'two'}
+ >>> name is m.mock_calls[0][0]
+ True
+
+
+create_autospec
+===============
+
+.. function:: create_autospec(spec, spec_set=False, instance=False, **kwargs)
+
+ Create a mock object using another object as a spec. Attributes on the
+ mock will use the corresponding attribute on the `spec` object as their
+ spec.
+
+ Functions or methods being mocked will have their arguments checked to
+ ensure that they are called with the correct signature.
+
+ If `spec_set` is `True` then attempting to set attributes that don't exist
+ on the spec object will raise an `AttributeError`.
+
+ If a class is used as a spec then the return value of the mock (the
+ instance of the class) will have the same spec. You can use a class as the
+ spec for an instance object by passing `instance=True`. The returned mock
+ will only be callable if instances of the mock are callable.
+
+ `create_autospec` also takes arbitrary keyword arguments that are passed to
+ the constructor of the created mock.
+
+See :ref:`auto-speccing` for examples of how to use auto-speccing with
+`create_autospec` and the `autospec` argument to :func:`patch`.
+
+
+ANY
+===
+
+.. data:: ANY
+
+Sometimes you may need to make assertions about *some* of the arguments in a
+call to mock, but either not care about some of the arguments or want to pull
+them individually out of :attr:`~Mock.call_args` and make more complex
+assertions on them.
+
+To ignore certain arguments you can pass in objects that compare equal to
+*everything*. Calls to :meth:`~Mock.assert_called_with` and
+:meth:`~Mock.assert_called_once_with` will then succeed no matter what was
+passed in.
+
+.. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('foo', bar=object())
+ >>> mock.assert_called_once_with('foo', bar=ANY)
+
+`ANY` can also be used in comparisons with call lists like
+:attr:`~Mock.mock_calls`:
+
+.. doctest::
+
+ >>> m = MagicMock(return_value=None)
+ >>> m(1)
+ >>> m(1, 2)
+ >>> m(object())
+ >>> m.mock_calls == [call(1), call(1, 2), ANY]
+ True
+
+
+
+FILTER_DIR
+==========
+
+.. data:: FILTER_DIR
+
+`FILTER_DIR` is a module level variable that controls the way mock objects
+respond to `dir` (only for Python 2.6 or more recent). The default is `True`,
+which uses the filtering described below, to only show useful members. If you
+dislike this filtering, or need to switch it off for diagnostic purposes, then
+set `mock.FILTER_DIR = False`.
+
+With filtering on, `dir(some_mock)` shows only useful attributes and will
+include any dynamically created attributes that wouldn't normally be shown.
+If the mock was created with a `spec` (or `autospec` of course) then all the
+attributes from the original are shown, even if they haven't been accessed
+yet:
+
+.. doctest::
+
+ >>> dir(Mock())
+ ['assert_any_call',
+ 'assert_called_once_with',
+ 'assert_called_with',
+ 'assert_has_calls',
+ 'attach_mock',
+ ...
+ >>> import urllib2
+ >>> dir(Mock(spec=urllib2))
+ ['AbstractBasicAuthHandler',
+ 'AbstractDigestAuthHandler',
+ 'AbstractHTTPHandler',
+ 'BaseHandler',
+ ...
+
+Many of the not-very-useful (private to `Mock` rather than the thing being
+mocked) underscore and double underscore prefixed attributes have been
+filtered from the result of calling `dir` on a `Mock`. If you dislike this
+behaviour you can switch it off by setting the module level switch
+`FILTER_DIR`:
+
+.. doctest::
+
+ >>> import mock
+ >>> mock.FILTER_DIR = False
+ >>> dir(mock.Mock())
+ ['_NonCallableMock__get_return_value',
+ '_NonCallableMock__get_side_effect',
+ '_NonCallableMock__return_value_doc',
+ '_NonCallableMock__set_return_value',
+ '_NonCallableMock__set_side_effect',
+ '__call__',
+ '__class__',
+ ...
+
+Alternatively you can just use `vars(my_mock)` (instance members) and
+`dir(type(my_mock))` (type members) to bypass the filtering irrespective of
+`mock.FILTER_DIR`.
+
+
+mock_open
+=========
+
+.. function:: mock_open(mock=None, read_data=None)
+
+ A helper function to create a mock to replace the use of `open`. It works
+ for `open` called directly or used as a context manager.
+
+ The `mock` argument is the mock object to configure. If `None` (the
+ default) then a `MagicMock` will be created for you, with the API limited
+ to methods or attributes available on standard file handles.
+
+ `read_data` is a string for the `read` method of the file handle to return.
+ This is an empty string by default.
+
+Using `open` as a context manager is a great way to ensure your file handles
+are closed properly and is becoming common::
+
+ with open('/some/path', 'w') as f:
+ f.write('something')
+
+The issue is that even if you mock out the call to `open` it is the
+*returned object* that is used as a context manager (and has `__enter__` and
+`__exit__` called).
+
+Mocking context managers with a :class:`MagicMock` is common enough and fiddly
+enough that a helper function is useful.
+
+.. doctest::
+
+ >>> from mock import mock_open
+ >>> m = mock_open()
+ >>> with patch('__main__.open', m, create=True):
+ ... with open('foo', 'w') as h:
+ ... h.write('some stuff')
+ ...
+ >>> m.mock_calls
+ [call('foo', 'w'),
+ call().__enter__(),
+ call().write('some stuff'),
+ call().__exit__(None, None, None)]
+ >>> m.assert_called_once_with('foo', 'w')
+ >>> handle = m()
+ >>> handle.write.assert_called_once_with('some stuff')
+
+And for reading files:
+
+.. doctest::
+
+ >>> with patch('__main__.open', mock_open(read_data='bibble'), create=True) as m:
+ ... with open('foo') as h:
+ ... result = h.read()
+ ...
+ >>> m.assert_called_once_with('foo')
+ >>> assert result == 'bibble'
+
+
+.. _auto-speccing:
+
+Autospeccing
+============
+
+Autospeccing is based on the existing `spec` feature of mock. It limits the
+api of mocks to the api of an original object (the spec), but it is recursive
+(implemented lazily) so that attributes of mocks only have the same api as
+the attributes of the spec. In addition mocked functions / methods have the
+same call signature as the original so they raise a `TypeError` if they are
+called incorrectly.
+
+Before I explain how auto-speccing works, here's why it is needed.
+
+`Mock` is a very powerful and flexible object, but it suffers from two flaws
+when used to mock out objects from a system under test. One of these flaws is
+specific to the `Mock` api and the other is a more general problem with using
+mock objects.
+
+First the problem specific to `Mock`. `Mock` has two assert methods that are
+extremely handy: :meth:`~Mock.assert_called_with` and
+:meth:`~Mock.assert_called_once_with`.
+
+.. doctest::
+
+ >>> mock = Mock(name='Thing', return_value=None)
+ >>> mock(1, 2, 3)
+ >>> mock.assert_called_once_with(1, 2, 3)
+ >>> mock(1, 2, 3)
+ >>> mock.assert_called_once_with(1, 2, 3)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+Because mocks auto-create attributes on demand, and allow you to call them
+with arbitrary arguments, if you misspell one of these assert methods then
+your assertion is gone:
+
+.. code-block:: pycon
+
+ >>> mock = Mock(name='Thing', return_value=None)
+ >>> mock(1, 2, 3)
+ >>> mock.assret_called_once_with(4, 5, 6)
+
+Your tests can pass silently and incorrectly because of the typo.
+
+The second issue is more general to mocking. If you refactor some of your
+code, rename members and so on, any tests for code that is still using the
+*old api* but uses mocks instead of the real objects will still pass. This
+means your tests can all pass even though your code is broken.
+
+Note that this is another reason why you need integration tests as well as
+unit tests. Testing everything in isolation is all fine and dandy, but if you
+don't test how your units are "wired together" there is still lots of room
+for bugs that tests might have caught.
+
+`mock` already provides a feature to help with this, called speccing. If you
+use a class or instance as the `spec` for a mock then you can only access
+attributes on the mock that exist on the real class:
+
+.. doctest::
+
+ >>> import urllib2
+ >>> mock = Mock(spec=urllib2.Request)
+ >>> mock.assret_called_with
+ Traceback (most recent call last):
+ ...
+ AttributeError: Mock object has no attribute 'assret_called_with'
+
+The spec only applies to the mock itself, so we still have the same issue
+with any methods on the mock:
+
+.. code-block:: pycon
+
+ >>> mock.has_data()
+ <mock.Mock object at 0x...>
+ >>> mock.has_data.assret_called_with()
+
+Auto-speccing solves this problem. You can either pass `autospec=True` to
+`patch` / `patch.object` or use the `create_autospec` function to create a
+mock with a spec. If you use the `autospec=True` argument to `patch` then the
+object that is being replaced will be used as the spec object. Because the
+speccing is done "lazily" (the spec is created as attributes on the mock are
+accessed) you can use it with very complex or deeply nested objects (like
+modules that import modules that import modules) without a big performance
+hit.
+
+Here's an example of it in use:
+
+.. doctest::
+
+ >>> import urllib2
+ >>> patcher = patch('__main__.urllib2', autospec=True)
+ >>> mock_urllib2 = patcher.start()
+ >>> urllib2 is mock_urllib2
+ True
+ >>> urllib2.Request
+ <MagicMock name='urllib2.Request' spec='Request' id='...'>
+
+You can see that `urllib2.Request` has a spec. `urllib2.Request` takes two
+arguments in the constructor (one of which is `self`). Here's what happens if
+we try to call it incorrectly:
+
+.. doctest::
+
+ >>> req = urllib2.Request()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 2 arguments (1 given)
+
+The spec also applies to instantiated classes (i.e. the return value of
+specced mocks):
+
+.. doctest::
+
+ >>> req = urllib2.Request('foo')
+ >>> req
+ <NonCallableMagicMock name='urllib2.Request()' spec='Request' id='...'>
+
+`Request` objects are not callable, so the return value of instantiating our
+mocked out `urllib2.Request` is a non-callable mock. With the spec in place
+any typos in our asserts will raise the correct error:
+
+.. doctest::
+
+ >>> req.add_header('spam', 'eggs')
+ <MagicMock name='urllib2.Request().add_header()' id='...'>
+ >>> req.add_header.assret_called_with
+ Traceback (most recent call last):
+ ...
+ AttributeError: Mock object has no attribute 'assret_called_with'
+ >>> req.add_header.assert_called_with('spam', 'eggs')
+
+In many cases you will just be able to add `autospec=True` to your existing
+`patch` calls and then be protected against bugs due to typos and api
+changes.
+
+As well as using `autospec` through `patch` there is a
+:func:`create_autospec` for creating autospecced mocks directly:
+
+.. doctest::
+
+ >>> import urllib2
+ >>> mock_urllib2 = create_autospec(urllib2)
+ >>> mock_urllib2.Request('foo', 'bar')
+ <NonCallableMagicMock name='mock.Request()' spec='Request' id='...'>
+
+This isn't without caveats and limitations however, which is why it is not
+the default behaviour. In order to know what attributes are available on the
+spec object, autospec has to introspect (access attributes) the spec. As you
+traverse attributes on the mock a corresponding traversal of the original
+object is happening under the hood. If any of your specced objects have
+properties or descriptors that can trigger code execution then you may not be
+able to use autospec. On the other hand it is much better to design your
+objects so that introspection is safe [#]_.
+
+A more serious problem is that it is common for instance attributes to be
+created in the `__init__` method and not to exist on the class at all.
+`autospec` can't know about any dynamically created attributes and restricts
+the api to visible attributes.
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self):
+ ... self.a = 33
+ ...
+ >>> with patch('__main__.Something', autospec=True):
+ ... thing = Something()
+ ... thing.a
+ ...
+ Traceback (most recent call last):
+ ...
+ AttributeError: Mock object has no attribute 'a'
+
+There are a few different ways of resolving this problem. The easiest, but
+not necessarily the least annoying, way is to simply set the required
+attributes on the mock after creation. Just because `autospec` doesn't allow
+you to fetch attributes that don't exist on the spec it doesn't prevent you
+setting them:
+
+.. doctest::
+
+ >>> with patch('__main__.Something', autospec=True):
+ ... thing = Something()
+ ... thing.a = 33
+ ...
+
+There is a more aggressive version of both `spec` and `autospec` that *does*
+prevent you setting non-existent attributes. This is useful if you want to
+ensure your code only *sets* valid attributes too, but obviously it prevents
+this particular scenario:
+
+.. doctest::
+
+ >>> with patch('__main__.Something', autospec=True, spec_set=True):
+ ... thing = Something()
+ ... thing.a = 33
+ ...
+ Traceback (most recent call last):
+ ...
+ AttributeError: Mock object has no attribute 'a'
+
+Probably the best way of solving the problem is to add class attributes as
+default values for instance members initialised in `__init__`. Note that if
+you are only setting default attributes in `__init__` then providing them via
+class attributes (shared between instances of course) is faster too. e.g.
+
+.. code-block:: python
+
+ class Something(object):
+ a = 33
+
+This brings up another issue. It is relatively common to provide a default
+value of `None` for members that will later be an object of a different type.
+`None` would be useless as a spec because it wouldn't let you access *any*
+attributes or methods on it. As `None` is *never* going to be useful as a
+spec, and probably indicates a member that will normally of some other type,
+`autospec` doesn't use a spec for members that are set to `None`. These will
+just be ordinary mocks (well - `MagicMocks`):
+
+.. doctest::
+
+ >>> class Something(object):
+ ... member = None
+ ...
+ >>> mock = create_autospec(Something)
+ >>> mock.member.foo.bar.baz()
+ <MagicMock name='mock.member.foo.bar.baz()' id='...'>
+
+If modifying your production classes to add defaults isn't to your liking
+then there are more options. One of these is simply to use an instance as the
+spec rather than the class. The other is to create a subclass of the
+production class and add the defaults to the subclass without affecting the
+production class. Both of these require you to use an alternative object as
+the spec. Thankfully `patch` supports this - you can simply pass the
+alternative object as the `autospec` argument:
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self):
+ ... self.a = 33
+ ...
+ >>> class SomethingForTest(Something):
+ ... a = 33
+ ...
+ >>> p = patch('__main__.Something', autospec=SomethingForTest)
+ >>> mock = p.start()
+ >>> mock.a
+ <NonCallableMagicMock name='Something.a' spec='int' id='...'>
+
+.. note::
+
+ An additional limitation (currently) with `autospec` is that unbound
+ methods on mocked classes *don't* take an "explicit self" as the first
+ argument - so this usage will fail with `autospec`.
+
+ .. doctest::
+
+ >>> class Foo(object):
+ ... def foo(self):
+ ... pass
+ ...
+ >>> Foo.foo(Foo())
+ >>> MockFoo = create_autospec(Foo)
+ >>> MockFoo.foo(MockFoo())
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 1 argument (2 given)
+
+ The reason is that its very hard to tell the difference between functions,
+ unbound methods and staticmethods across Python 2 & 3 and the alternative
+ implementations. This restriction may be fixed in future versions.
+
+
+------
+
+.. [#] This only applies to classes or already instantiated objects. Calling
+ a mocked class to create a mock instance *does not* create a real instance.
+ It is only attribute lookups - along with calls to `dir` - that are done. A
+ way round this problem would have been to use `getattr_static
+ <http://docs.python.org/dev/library/inspect.html#inspect.getattr_static>`_,
+ which can fetch attributes without triggering code execution. Descriptors
+ like `classmethod` and `staticmethod` *need* to be fetched correctly though,
+ so that their signatures can be mocked correctly.
diff --git a/python/mock-1.0.0/docs/index.txt b/python/mock-1.0.0/docs/index.txt
new file mode 100644
index 000000000..7e4a8daca
--- /dev/null
+++ b/python/mock-1.0.0/docs/index.txt
@@ -0,0 +1,411 @@
+====================================
+ Mock - Mocking and Testing Library
+====================================
+
+.. currentmodule:: mock
+
+:Author: `Michael Foord
+ <http://www.voidspace.org.uk/python/weblog/index.shtml>`_
+:Version: |release|
+:Date: 2012/10/07
+:Homepage: `Mock Homepage`_
+:Download: `Mock on PyPI`_
+:Documentation: `PDF Documentation
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+:License: `BSD License`_
+:Support: `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+:Issue tracker: `Google code project
+ <http://code.google.com/p/mock/issues/list>`_
+
+.. _Mock Homepage: http://www.voidspace.org.uk/python/mock/
+.. _BSD License: http://www.voidspace.org.uk/python/license.shtml
+
+
+.. currentmodule:: mock
+
+.. module:: mock
+ :synopsis: Mock object and testing library.
+
+.. index:: introduction
+
+mock is a library for testing in Python. It allows you to replace parts of
+your system under test with mock objects and make assertions about how they
+have been used.
+
+mock is now part of the Python standard library, available as `unittest.mock
+<http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3 onwards.
+
+mock provides a core :class:`Mock` class removing the need to create a host
+of stubs throughout your test suite. After performing an action, you can make
+assertions about which methods / attributes were used and arguments they were
+called with. You can also specify return values and set needed attributes in
+the normal way.
+
+Additionally, mock provides a :func:`patch` decorator that handles patching
+module and class level attributes within the scope of a test, along with
+:const:`sentinel` for creating unique objects. See the `quick guide`_ for
+some examples of how to use :class:`Mock`, :class:`MagicMock` and
+:func:`patch`.
+
+Mock is very easy to use and is designed for use with
+`unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+the 'action -> assertion' pattern instead of `'record -> replay'` used by many
+mocking frameworks.
+
+mock is tested on Python versions 2.4-2.7, Python 3 plus the latest versions of
+Jython and PyPy.
+
+
+.. testsetup::
+
+ class ProductionClass(object):
+ def method(self, *args):
+ pass
+
+ module = sys.modules['module'] = ProductionClass
+ ProductionClass.ClassName1 = ProductionClass
+ ProductionClass.ClassName2 = ProductionClass
+
+
+
+API Documentation
+=================
+
+.. toctree::
+ :maxdepth: 2
+
+ mock
+ patch
+ helpers
+ sentinel
+ magicmock
+
+
+User Guide
+==========
+
+.. toctree::
+ :maxdepth: 2
+
+ getting-started
+ examples
+ compare
+ changelog
+
+
+.. index:: installing
+
+Installing
+==========
+
+The current version is |release|. Mock is stable and widely used. If you do
+find any bugs, or have suggestions for improvements / extensions
+then please contact us.
+
+* `mock on PyPI <http://pypi.python.org/pypi/mock>`_
+* `mock documentation as PDF
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+* `Google Code Home & Mercurial Repository <http://code.google.com/p/mock/>`_
+
+.. index:: repository
+.. index:: hg
+
+You can checkout the latest development version from the Google Code Mercurial
+repository with the following command:
+
+ ``hg clone https://mock.googlecode.com/hg/ mock``
+
+
+.. index:: pip
+.. index:: easy_install
+.. index:: setuptools
+
+If you have pip, setuptools or distribute you can install mock with:
+
+ | ``easy_install -U mock``
+ | ``pip install -U mock``
+
+Alternatively you can download the mock distribution from PyPI and after
+unpacking run:
+
+ ``python setup.py install``
+
+
+Quick Guide
+===========
+
+:class:`Mock` and :class:`MagicMock` objects create all attributes and
+methods as you access them and store details of how they have been used. You
+can configure them, to specify return values or limit what attributes are
+available, and then make assertions about how they have been used:
+
+.. doctest::
+
+ >>> from mock import MagicMock
+ >>> thing = ProductionClass()
+ >>> thing.method = MagicMock(return_value=3)
+ >>> thing.method(3, 4, 5, key='value')
+ 3
+ >>> thing.method.assert_called_with(3, 4, 5, key='value')
+
+:attr:`side_effect` allows you to perform side effects, including raising an
+exception when a mock is called:
+
+.. doctest::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (1, 2, 3)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+Mock has many other ways you can configure it and control its behaviour. For
+example the `spec` argument configures the mock to take its specification
+from another object. Attempting to access attributes or methods on the mock
+that don't exist on the spec will fail with an `AttributeError`.
+
+The :func:`patch` decorator / context manager makes it easy to mock classes or
+objects in a module under test. The object you specify will be replaced with a
+mock (or other object) during the test and restored when the test ends:
+
+.. doctest::
+
+ >>> from mock import patch
+ >>> @patch('module.ClassName2')
+ ... @patch('module.ClassName1')
+ ... def test(MockClass1, MockClass2):
+ ... module.ClassName1()
+ ... module.ClassName2()
+
+ ... assert MockClass1 is module.ClassName1
+ ... assert MockClass2 is module.ClassName2
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+.. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `module.ClassName1` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read :ref:`where to patch <where-to-patch>`.
+
+As well as a decorator `patch` can be used as a context manager in a with
+statement:
+
+.. doctest::
+
+ >>> with patch.object(ProductionClass, 'method', return_value=None) as mock_method:
+ ... thing = ProductionClass()
+ ... thing.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+
+There is also :func:`patch.dict` for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:
+
+.. doctest::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+Mock supports the mocking of Python :ref:`magic methods <magic-methods>`. The
+easiest way of using magic methods is with the :class:`MagicMock` class. It
+allows you to do things like:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_with()
+
+Mock allows you to assign functions (or other Mock instances) to magic methods
+and they will be called appropriately. The `MagicMock` class is just a Mock
+variant that has all of the magic methods pre-created for you (well, all the
+useful ones anyway).
+
+The following is an example of using magic methods with the ordinary Mock
+class:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value='wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+For ensuring that the mock objects in your tests have the same api as the
+objects they are replacing, you can use :ref:`auto-speccing <auto-speccing>`.
+Auto-speccing can be done through the `autospec` argument to patch, or the
+:func:`create_autospec` function. Auto-speccing creates mock objects that
+have the same attributes and methods as the objects they are replacing, and
+any functions and methods (including constructors) have the same call
+signature as the real object.
+
+This ensures that your mocks will fail in the same way as your production
+code if they are used incorrectly:
+
+.. doctest::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+`create_autospec` can also be used on classes, where it copies the signature of
+the `__init__` method, and on callable objects where it copies the signature of
+the `__call__` method.
+
+
+.. index:: references
+.. index:: articles
+
+References
+==========
+
+Articles, blog entries and other stuff related to testing with Mock:
+
+* `Imposing a No DB Discipline on Django unit tests
+ <https://github.com/carljm/django-testing-slides/blob/master/models/30_no_database.md>`_
+* `mock-django: tools for mocking the Django ORM and models
+ <https://github.com/dcramer/mock-django>`_
+* `PyCon 2011 Video: Testing with mock <https://blip.tv/file/4881513>`_
+* `Mock objects in Python
+ <http://noopenblockers.com/2012/01/06/mock-objects-in-python/>`_
+* `Python: Injecting Mock Objects for Powerful Testing
+ <http://blueprintforge.com/blog/2012/01/08/python-injecting-mock-objects-for-powerful-testing/>`_
+* `Python Mock: How to assert a substring of logger output
+ <http://www.michaelpollmeier.com/python-mock-how-to-assert-a-substring-of-logger-output/>`_
+* `Mocking Django <http://www.mattjmorrison.com/2011/09/mocking-django.html>`_
+* `Mocking dates and other classes that can't be modified
+ <http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/>`_
+* `Mock recipes <http://konryd.blogspot.com/2010/06/mock-recipies.html>`_
+* `Mockity mock mock - some love for the mock module
+ <http://konryd.blogspot.com/2010/05/mockity-mock-mock-some-love-for-mock.html>`_
+* `Coverage and Mock (with django)
+ <http://mattsnider.com/python/mock-and-coverage/>`_
+* `Python Unit Testing with Mock <http://www.insomnihack.com/?p=194>`_
+* `Getting started with Python Mock
+ <http://myadventuresincoding.wordpress.com/2011/02/26/python-python-mock-cheat-sheet/>`_
+* `Smart Parameter Checks with mock
+ <http://tobyho.com/2011/03/24/smart-parameter-checks-in/>`_
+* `Python mock testing techniques and tools
+ <http://agiletesting.blogspot.com/2009/07/python-mock-testing-techniques-and.html>`_
+* `How To Test Django Template Tags
+ <http://techblog.ironfroggy.com/2008/10/how-to-test.html>`_
+* `A presentation on Unit Testing with Mock
+ <http://pypap.blogspot.com/2008/10/newbie-nugget-unit-testing-with-mock.html>`_
+* `Mocking with Django and Google AppEngine
+ <http://michael-a-nelson.blogspot.com/2008/09/mocking-with-django-and-google-app.html>`_
+
+
+.. index:: tests
+.. index:: unittest2
+
+Tests
+=====
+
+Mock uses `unittest2 <http://pypi.python.org/pypi/unittest2>`_ for its own
+test suite. In order to run it, use the `unit2` script that comes with
+`unittest2` module on a checkout of the source repository:
+
+ `unit2 discover`
+
+If you have `setuptools <http://pypi.python.org/pypi/distribute>`_ as well as
+unittest2 you can run:
+
+ ``python setup.py test``
+
+On Python 3.2 you can use ``unittest`` module from the standard library.
+
+ ``python3.2 -m unittest discover``
+
+.. index:: Python 3
+
+On Python 3 the tests for unicode are skipped as they are not relevant. On
+Python 2.4 tests that use the with statements are skipped as the with statement
+is invalid syntax on Python 2.4.
+
+
+.. index:: older versions
+
+Older Versions
+==============
+
+Documentation for older versions of mock:
+
+* `mock 0.8 <http://www.voidspace.org.uk/python/mock/0.8/>`_
+* `mock 0.7 <http://www.voidspace.org.uk/python/mock/0.7/>`_
+* `mock 0.6 <http://www.voidspace.org.uk/python/mock/0.6.0/>`_
+
+Docs from the in-development version of `mock` can be found at
+`mock.readthedocs.org <http://mock.readthedocs.org>`_.
+
+
+Terminology
+===========
+
+Terminology for objects used to replace other ones can be confusing. Terms
+like double, fake, mock, stub, and spy are all used with varying meanings.
+
+In `classic mock terminology
+<http://xunitpatterns.com/Mocks,%20Fakes,%20Stubs%20and%20Dummies.html>`_
+:class:`mock.Mock` is a `spy <http://xunitpatterns.com/Test%20Spy.html>`_ that
+allows for *post-mortem* examination. This is what I call the "action ->
+assertion" [#]_ pattern of testing.
+
+I'm not however a fan of this "statically typed mocking terminology"
+promulgated by `Martin Fowler
+<http://martinfowler.com/articles/mocksArentStubs.html>`_. It confuses usage
+patterns with implementation and prevents you from using natural terminology
+when discussing mocking.
+
+I much prefer duck typing, if an object used in your test suite looks like a
+mock object and quacks like a mock object then it's fine to call it a mock, no
+matter what the implementation looks like.
+
+This terminology is perhaps more useful in less capable languages where
+different usage patterns will *require* different implementations.
+`mock.Mock()` is capable of being used in most of the different roles
+described by Fowler, except (annoyingly / frustratingly / ironically) a Mock
+itself!
+
+How about a simpler definition: a "mock object" is an object used to replace a
+real one in a system under test.
+
+.. [#] This pattern is called "AAA" by some members of the testing community;
+ "Arrange - Act - Assert".
diff --git a/python/mock-1.0.0/docs/magicmock.txt b/python/mock-1.0.0/docs/magicmock.txt
new file mode 100644
index 000000000..42b2ed9db
--- /dev/null
+++ b/python/mock-1.0.0/docs/magicmock.txt
@@ -0,0 +1,258 @@
+
+.. currentmodule:: mock
+
+
+.. _magic-methods:
+
+Mocking Magic Methods
+=====================
+
+.. currentmodule:: mock
+
+:class:`Mock` supports mocking `magic methods
+<http://www.ironpythoninaction.com/magic-methods.html>`_. This allows mock
+objects to replace containers or other objects that implement Python
+protocols.
+
+Because magic methods are looked up differently from normal methods [#]_, this
+support has been specially implemented. This means that only specific magic
+methods are supported. The supported list includes *almost* all of them. If
+there are any missing that you need please let us know!
+
+You mock magic methods by setting the method you are interested in to a function
+or a mock instance. If you are using a function then it *must* take ``self`` as
+the first argument [#]_.
+
+.. doctest::
+
+ >>> def __str__(self):
+ ... return 'fooble'
+ ...
+ >>> mock = Mock()
+ >>> mock.__str__ = __str__
+ >>> str(mock)
+ 'fooble'
+
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock()
+ >>> mock.__str__.return_value = 'fooble'
+ >>> str(mock)
+ 'fooble'
+
+ >>> mock = Mock()
+ >>> mock.__iter__ = Mock(return_value=iter([]))
+ >>> list(mock)
+ []
+
+One use case for this is for mocking objects used as context managers in a
+`with` statement:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__enter__ = Mock(return_value='foo')
+ >>> mock.__exit__ = Mock(return_value=False)
+ >>> with mock as m:
+ ... assert m == 'foo'
+ ...
+ >>> mock.__enter__.assert_called_with()
+ >>> mock.__exit__.assert_called_with(None, None, None)
+
+Calls to magic methods do not appear in :attr:`~Mock.method_calls`, but they
+are recorded in :attr:`~Mock.mock_calls`.
+
+.. note::
+
+ If you use the `spec` keyword argument to create a mock then attempting to
+ set a magic method that isn't in the spec will raise an `AttributeError`.
+
+The full list of supported magic methods is:
+
+* ``__hash__``, ``__sizeof__``, ``__repr__`` and ``__str__``
+* ``__dir__``, ``__format__`` and ``__subclasses__``
+* ``__floor__``, ``__trunc__`` and ``__ceil__``
+* Comparisons: ``__cmp__``, ``__lt__``, ``__gt__``, ``__le__``, ``__ge__``,
+ ``__eq__`` and ``__ne__``
+* Container methods: ``__getitem__``, ``__setitem__``, ``__delitem__``,
+ ``__contains__``, ``__len__``, ``__iter__``, ``__getslice__``,
+ ``__setslice__``, ``__reversed__`` and ``__missing__``
+* Context manager: ``__enter__`` and ``__exit__``
+* Unary numeric methods: ``__neg__``, ``__pos__`` and ``__invert__``
+* The numeric methods (including right hand and in-place variants):
+ ``__add__``, ``__sub__``, ``__mul__``, ``__div__``,
+ ``__floordiv__``, ``__mod__``, ``__divmod__``, ``__lshift__``,
+ ``__rshift__``, ``__and__``, ``__xor__``, ``__or__``, and ``__pow__``
+* Numeric conversion methods: ``__complex__``, ``__int__``, ``__float__``,
+ ``__index__`` and ``__coerce__``
+* Descriptor methods: ``__get__``, ``__set__`` and ``__delete__``
+* Pickling: ``__reduce__``, ``__reduce_ex__``, ``__getinitargs__``,
+ ``__getnewargs__``, ``__getstate__`` and ``__setstate__``
+
+
+The following methods are supported in Python 2 but don't exist in Python 3:
+
+* ``__unicode__``, ``__long__``, ``__oct__``, ``__hex__`` and ``__nonzero__``
+* ``__truediv__`` and ``__rtruediv__``
+
+The following methods are supported in Python 3 but don't exist in Python 2:
+
+* ``__bool__`` and ``__next__``
+
+The following methods exist but are *not* supported as they are either in use by
+mock, can't be set dynamically, or can cause problems:
+
+* ``__getattr__``, ``__setattr__``, ``__init__`` and ``__new__``
+* ``__prepare__``, ``__instancecheck__``, ``__subclasscheck__``, ``__del__``
+
+
+
+Magic Mock
+==========
+
+There are two `MagicMock` variants: `MagicMock` and `NonCallableMagicMock`.
+
+
+.. class:: MagicMock(*args, **kw)
+
+ ``MagicMock`` is a subclass of :class:`Mock` with default implementations
+ of most of the magic methods. You can use ``MagicMock`` without having to
+ configure the magic methods yourself.
+
+ The constructor parameters have the same meaning as for :class:`Mock`.
+
+ If you use the `spec` or `spec_set` arguments then *only* magic methods
+ that exist in the spec will be created.
+
+
+.. class:: NonCallableMagicMock(*args, **kw)
+
+ A non-callable version of `MagicMock`.
+
+ The constructor parameters have the same meaning as for
+ :class:`MagicMock`, with the exception of `return_value` and
+ `side_effect` which have no meaning on a non-callable mock.
+
+The magic methods are setup with `MagicMock` objects, so you can configure them
+and use them in the usual way:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock[3] = 'fish'
+ >>> mock.__setitem__.assert_called_with(3, 'fish')
+ >>> mock.__getitem__.return_value = 'result'
+ >>> mock[2]
+ 'result'
+
+By default many of the protocol methods are required to return objects of a
+specific type. These methods are preconfigured with a default return value, so
+that they can be used without you having to do anything if you aren't interested
+in the return value. You can still *set* the return value manually if you want
+to change the default.
+
+Methods and their defaults:
+
+* ``__lt__``: NotImplemented
+* ``__gt__``: NotImplemented
+* ``__le__``: NotImplemented
+* ``__ge__``: NotImplemented
+* ``__int__`` : 1
+* ``__contains__`` : False
+* ``__len__`` : 1
+* ``__iter__`` : iter([])
+* ``__exit__`` : False
+* ``__complex__`` : 1j
+* ``__float__`` : 1.0
+* ``__bool__`` : True
+* ``__nonzero__`` : True
+* ``__oct__`` : '1'
+* ``__hex__`` : '0x1'
+* ``__long__`` : long(1)
+* ``__index__`` : 1
+* ``__hash__`` : default hash for the mock
+* ``__str__`` : default str for the mock
+* ``__unicode__`` : default unicode for the mock
+* ``__sizeof__``: default sizeof for the mock
+
+For example:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> int(mock)
+ 1
+ >>> len(mock)
+ 0
+ >>> hex(mock)
+ '0x1'
+ >>> list(mock)
+ []
+ >>> object() in mock
+ False
+
+The two equality method, `__eq__` and `__ne__`, are special (changed in
+0.7.2). They do the default equality comparison on identity, using a side
+effect, unless you change their return value to return something else:
+
+.. doctest::
+
+ >>> MagicMock() == 3
+ False
+ >>> MagicMock() != 3
+ True
+ >>> mock = MagicMock()
+ >>> mock.__eq__.return_value = True
+ >>> mock == 3
+ True
+
+In `0.8` the `__iter__` also gained special handling implemented with a
+side effect. The return value of `MagicMock.__iter__` can be any iterable
+object and isn't required to be an iterator:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.__iter__.return_value = ['a', 'b', 'c']
+ >>> list(mock)
+ ['a', 'b', 'c']
+ >>> list(mock)
+ ['a', 'b', 'c']
+
+If the return value *is* an iterator, then iterating over it once will consume
+it and subsequent iterations will result in an empty list:
+
+.. doctest::
+
+ >>> mock.__iter__.return_value = iter(['a', 'b', 'c'])
+ >>> list(mock)
+ ['a', 'b', 'c']
+ >>> list(mock)
+ []
+
+``MagicMock`` has all of the supported magic methods configured except for some
+of the obscure and obsolete ones. You can still set these up if you want.
+
+Magic methods that are supported but not setup by default in ``MagicMock`` are:
+
+* ``__cmp__``
+* ``__getslice__`` and ``__setslice__``
+* ``__coerce__``
+* ``__subclasses__``
+* ``__dir__``
+* ``__format__``
+* ``__get__``, ``__set__`` and ``__delete__``
+* ``__reversed__`` and ``__missing__``
+* ``__reduce__``, ``__reduce_ex__``, ``__getinitargs__``, ``__getnewargs__``,
+ ``__getstate__`` and ``__setstate__``
+* ``__getformat__`` and ``__setformat__``
+
+
+
+------------
+
+.. [#] Magic methods *should* be looked up on the class rather than the
+ instance. Different versions of Python are inconsistent about applying this
+ rule. The supported protocol methods should work with all supported versions
+ of Python.
+.. [#] The function is basically hooked up to the class, but each ``Mock``
+ instance is kept isolated from the others.
diff --git a/python/mock-1.0.0/docs/mock.txt b/python/mock-1.0.0/docs/mock.txt
new file mode 100644
index 000000000..58712b21a
--- /dev/null
+++ b/python/mock-1.0.0/docs/mock.txt
@@ -0,0 +1,842 @@
+The Mock Class
+==============
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class SomeClass:
+ pass
+
+
+`Mock` is a flexible mock object intended to replace the use of stubs and
+test doubles throughout your code. Mocks are callable and create attributes as
+new mocks when you access them [#]_. Accessing the same attribute will always
+return the same mock. Mocks record how you use them, allowing you to make
+assertions about what your code has done to them.
+
+:class:`MagicMock` is a subclass of `Mock` with all the magic methods
+pre-created and ready to use. There are also non-callable variants, useful
+when you are mocking out objects that aren't callable:
+:class:`NonCallableMock` and :class:`NonCallableMagicMock`
+
+The :func:`patch` decorators makes it easy to temporarily replace classes
+in a particular module with a `Mock` object. By default `patch` will create
+a `MagicMock` for you. You can specify an alternative class of `Mock` using
+the `new_callable` argument to `patch`.
+
+
+.. index:: side_effect
+.. index:: return_value
+.. index:: wraps
+.. index:: name
+.. index:: spec
+
+.. class:: Mock(spec=None, side_effect=None, return_value=DEFAULT, wraps=None, name=None, spec_set=None, **kwargs)
+
+ Create a new `Mock` object. `Mock` takes several optional arguments
+ that specify the behaviour of the Mock object:
+
+ * `spec`: This can be either a list of strings or an existing object (a
+ class or instance) that acts as the specification for the mock object. If
+ you pass in an object then a list of strings is formed by calling dir on
+ the object (excluding unsupported magic attributes and methods).
+ Accessing any attribute not in this list will raise an `AttributeError`.
+
+ If `spec` is an object (rather than a list of strings) then
+ :attr:`__class__` returns the class of the spec object. This allows mocks
+ to pass `isinstance` tests.
+
+ * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
+ or get an attribute on the mock that isn't on the object passed as
+ `spec_set` will raise an `AttributeError`.
+
+ * `side_effect`: A function to be called whenever the Mock is called. See
+ the :attr:`~Mock.side_effect` attribute. Useful for raising exceptions or
+ dynamically changing return values. The function is called with the same
+ arguments as the mock, and unless it returns :data:`DEFAULT`, the return
+ value of this function is used as the return value.
+
+ Alternatively `side_effect` can be an exception class or instance. In
+ this case the exception will be raised when the mock is called.
+
+ If `side_effect` is an iterable then each call to the mock will return
+ the next value from the iterable. If any of the members of the iterable
+ are exceptions they will be raised instead of returned.
+
+ A `side_effect` can be cleared by setting it to `None`.
+
+ * `return_value`: The value returned when the mock is called. By default
+ this is a new Mock (created on first access). See the
+ :attr:`return_value` attribute.
+
+ * `wraps`: Item for the mock object to wrap. If `wraps` is not None then
+ calling the Mock will pass the call through to the wrapped object
+ (returning the real result and ignoring `return_value`). Attribute access
+ on the mock will return a Mock object that wraps the corresponding
+ attribute of the wrapped object (so attempting to access an attribute
+ that doesn't exist will raise an `AttributeError`).
+
+ If the mock has an explicit `return_value` set then calls are not passed
+ to the wrapped object and the `return_value` is returned instead.
+
+ * `name`: If the mock has a name then it will be used in the repr of the
+ mock. This can be useful for debugging. The name is propagated to child
+ mocks.
+
+ Mocks can also be called with arbitrary keyword arguments. These will be
+ used to set attributes on the mock after it is created. See the
+ :meth:`configure_mock` method for details.
+
+
+ .. method:: assert_called_with(*args, **kwargs)
+
+ This method is a convenient way of asserting that calls are made in a
+ particular way:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method(1, 2, 3, test='wow')
+ <Mock name='mock.method()' id='...'>
+ >>> mock.method.assert_called_with(1, 2, 3, test='wow')
+
+
+ .. method:: assert_called_once_with(*args, **kwargs)
+
+ Assert that the mock was called exactly once and with the specified
+ arguments.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('foo', bar='baz')
+ >>> mock.assert_called_once_with('foo', bar='baz')
+ >>> mock('foo', bar='baz')
+ >>> mock.assert_called_once_with('foo', bar='baz')
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+
+ .. method:: assert_any_call(*args, **kwargs)
+
+ assert the mock has been called with the specified arguments.
+
+ The assert passes if the mock has *ever* been called, unlike
+ :meth:`assert_called_with` and :meth:`assert_called_once_with` that
+ only pass if the call is the most recent one.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1, 2, arg='thing')
+ >>> mock('some', 'thing', 'else')
+ >>> mock.assert_any_call(1, 2, arg='thing')
+
+
+ .. method:: assert_has_calls(calls, any_order=False)
+
+ assert the mock has been called with the specified calls.
+ The `mock_calls` list is checked for the calls.
+
+ If `any_order` is False (the default) then the calls must be
+ sequential. There can be extra calls before or after the
+ specified calls.
+
+ If `any_order` is True then the calls can be in any order, but
+ they must all appear in :attr:`mock_calls`.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1)
+ >>> mock(2)
+ >>> mock(3)
+ >>> mock(4)
+ >>> calls = [call(2), call(3)]
+ >>> mock.assert_has_calls(calls)
+ >>> calls = [call(4), call(2), call(3)]
+ >>> mock.assert_has_calls(calls, any_order=True)
+
+
+ .. method:: reset_mock()
+
+ The reset_mock method resets all the call attributes on a mock object:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('hello')
+ >>> mock.called
+ True
+ >>> mock.reset_mock()
+ >>> mock.called
+ False
+
+ This can be useful where you want to make a series of assertions that
+ reuse the same object. Note that `reset_mock` *doesn't* clear the
+ return value, :attr:`side_effect` or any child attributes you have
+ set using normal assignment. Child mocks and the return value mock
+ (if any) are reset as well.
+
+
+ .. method:: mock_add_spec(spec, spec_set=False)
+
+ Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is `True` then only attributes on the spec can be set.
+
+
+ .. method:: attach_mock(mock, attribute)
+
+ Attach a mock as an attribute of this one, replacing its name and
+ parent. Calls to the attached mock will be recorded in the
+ :attr:`method_calls` and :attr:`mock_calls` attributes of this one.
+
+
+ .. method:: configure_mock(**kwargs)
+
+ Set attributes on the mock through keyword arguments.
+
+ Attributes plus return values and side effects can be set on child
+ mocks using standard dot notation and unpacking a dictionary in the
+ method call:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock.configure_mock(**attrs)
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ The same thing can be achieved in the constructor call to mocks:
+
+ .. doctest::
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock = Mock(some_attribute='eggs', **attrs)
+ >>> mock.some_attribute
+ 'eggs'
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ `configure_mock` exists to make it easier to do configuration
+ after the mock has been created.
+
+
+ .. method:: __dir__()
+
+ `Mock` objects limit the results of `dir(some_mock)` to useful results.
+ For mocks with a `spec` this includes all the permitted attributes
+ for the mock.
+
+ See :data:`FILTER_DIR` for what this filtering does, and how to
+ switch it off.
+
+
+ .. method:: _get_child_mock(**kw)
+
+ Create the child mocks for attributes and return value.
+ By default child mocks will be the same type as the parent.
+ Subclasses of Mock may want to override this to customize the way
+ child mocks are made.
+
+ For non-callable mocks the callable variant will be used (rather than
+ any custom subclass).
+
+
+ .. attribute:: called
+
+ A boolean representing whether or not the mock object has been called:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock.called
+ False
+ >>> mock()
+ >>> mock.called
+ True
+
+ .. attribute:: call_count
+
+ An integer telling you how many times the mock object has been called:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock.call_count
+ 0
+ >>> mock()
+ >>> mock()
+ >>> mock.call_count
+ 2
+
+
+ .. attribute:: return_value
+
+ Set this to configure the value returned by calling the mock:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value = 'fish'
+ >>> mock()
+ 'fish'
+
+ The default return value is a mock object and you can configure it in
+ the normal way:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value.attribute = sentinel.Attribute
+ >>> mock.return_value()
+ <Mock name='mock()()' id='...'>
+ >>> mock.return_value.assert_called_with()
+
+ `return_value` can also be set in the constructor:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> mock.return_value
+ 3
+ >>> mock()
+ 3
+
+
+ .. attribute:: side_effect
+
+ This can either be a function to be called when the mock is called,
+ or an exception (class or instance) to be raised.
+
+ If you pass in a function it will be called with same arguments as the
+ mock and unless the function returns the :data:`DEFAULT` singleton the
+ call to the mock will then return whatever the function returns. If the
+ function returns :data:`DEFAULT` then the mock will return its normal
+ value (from the :attr:`return_value`.
+
+ An example of a mock that raises an exception (to test exception
+ handling of an API):
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.side_effect = Exception('Boom!')
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ Exception: Boom!
+
+ Using `side_effect` to return a sequence of values:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.side_effect = [3, 2, 1]
+ >>> mock(), mock(), mock()
+ (3, 2, 1)
+
+ The `side_effect` function is called with the same arguments as the
+ mock (so it is wise for it to take arbitrary args and keyword
+ arguments) and whatever it returns is used as the return value for
+ the call. The exception is if `side_effect` returns :data:`DEFAULT`,
+ in which case the normal :attr:`return_value` is used.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> def side_effect(*args, **kwargs):
+ ... return DEFAULT
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock()
+ 3
+
+ `side_effect` can be set in the constructor. Here's an example that
+ adds one to the value the mock is called with and returns it:
+
+ .. doctest::
+
+ >>> side_effect = lambda value: value + 1
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock(3)
+ 4
+ >>> mock(-8)
+ -7
+
+ Setting `side_effect` to `None` clears it:
+
+ .. doctest::
+
+ >>> from mock import Mock
+ >>> m = Mock(side_effect=KeyError, return_value=3)
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ KeyError
+ >>> m.side_effect = None
+ >>> m()
+ 3
+
+
+ .. attribute:: call_args
+
+ This is either `None` (if the mock hasn't been called), or the
+ arguments that the mock was last called with. This will be in the
+ form of a tuple: the first member is any ordered arguments the mock
+ was called with (or an empty tuple) and the second member is any
+ keyword arguments (or an empty dictionary).
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> print mock.call_args
+ None
+ >>> mock()
+ >>> mock.call_args
+ call()
+ >>> mock.call_args == ()
+ True
+ >>> mock(3, 4)
+ >>> mock.call_args
+ call(3, 4)
+ >>> mock.call_args == ((3, 4),)
+ True
+ >>> mock(3, 4, 5, key='fish', next='w00t!')
+ >>> mock.call_args
+ call(3, 4, 5, key='fish', next='w00t!')
+
+ `call_args`, along with members of the lists :attr:`call_args_list`,
+ :attr:`method_calls` and :attr:`mock_calls` are :data:`call` objects.
+ These are tuples, so they can be unpacked to get at the individual
+ arguments and make more complex assertions. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: call_args_list
+
+ This is a list of all the calls made to the mock object in sequence
+ (so the length of the list is the number of times it has been
+ called). Before any calls have been made it is an empty list. The
+ :data:`call` object can be used for conveniently constructing lists of
+ calls to compare with `call_args_list`.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock()
+ >>> mock(3, 4)
+ >>> mock(key='fish', next='w00t!')
+ >>> mock.call_args_list
+ [call(), call(3, 4), call(key='fish', next='w00t!')]
+ >>> expected = [(), ((3, 4),), ({'key': 'fish', 'next': 'w00t!'},)]
+ >>> mock.call_args_list == expected
+ True
+
+ Members of `call_args_list` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: method_calls
+
+ As well as tracking calls to themselves, mocks also track calls to
+ methods and attributes, and *their* methods and attributes:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method()
+ <Mock name='mock.method()' id='...'>
+ >>> mock.property.method.attribute()
+ <Mock name='mock.property.method.attribute()' id='...'>
+ >>> mock.method_calls
+ [call.method(), call.property.method.attribute()]
+
+ Members of `method_calls` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: mock_calls
+
+ `mock_calls` records *all* calls to the mock object, its methods, magic
+ methods *and* return value mocks.
+
+ .. doctest::
+
+ >>> mock = MagicMock()
+ >>> result = mock(1, 2, 3)
+ >>> mock.first(a=3)
+ <MagicMock name='mock.first()' id='...'>
+ >>> mock.second()
+ <MagicMock name='mock.second()' id='...'>
+ >>> int(mock)
+ 1
+ >>> result(1)
+ <MagicMock name='mock()()' id='...'>
+ >>> expected = [call(1, 2, 3), call.first(a=3), call.second(),
+ ... call.__int__(), call()(1)]
+ >>> mock.mock_calls == expected
+ True
+
+ Members of `mock_calls` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: __class__
+
+ Normally the `__class__` attribute of an object will return its type.
+ For a mock object with a `spec` `__class__` returns the spec class
+ instead. This allows mock objects to pass `isinstance` tests for the
+ object they are replacing / masquerading as:
+
+ .. doctest::
+
+ >>> mock = Mock(spec=3)
+ >>> isinstance(mock, int)
+ True
+
+ `__class__` is assignable to, this allows a mock to pass an
+ `isinstance` check without forcing you to use a spec:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__class__ = dict
+ >>> isinstance(mock, dict)
+ True
+
+.. class:: NonCallableMock(spec=None, wraps=None, name=None, spec_set=None, **kwargs)
+
+ A non-callable version of `Mock`. The constructor parameters have the same
+ meaning of `Mock`, with the exception of `return_value` and `side_effect`
+ which have no meaning on a non-callable mock.
+
+Mock objects that use a class or an instance as a `spec` or `spec_set` are able
+to pass `isintance` tests:
+
+.. doctest::
+
+ >>> mock = Mock(spec=SomeClass)
+ >>> isinstance(mock, SomeClass)
+ True
+ >>> mock = Mock(spec_set=SomeClass())
+ >>> isinstance(mock, SomeClass)
+ True
+
+The `Mock` classes have support for mocking magic methods. See :ref:`magic
+methods <magic-methods>` for the full details.
+
+The mock classes and the :func:`patch` decorators all take arbitrary keyword
+arguments for configuration. For the `patch` decorators the keywords are
+passed to the constructor of the mock being created. The keyword arguments
+are for configuring attributes of the mock:
+
+.. doctest::
+
+ >>> m = MagicMock(attribute=3, other='fish')
+ >>> m.attribute
+ 3
+ >>> m.other
+ 'fish'
+
+The return value and side effect of child mocks can be set in the same way,
+using dotted notation. As you can't use dotted names directly in a call you
+have to create a dictionary and unpack it using `**`:
+
+.. doctest::
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock = Mock(some_attribute='eggs', **attrs)
+ >>> mock.some_attribute
+ 'eggs'
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+
+.. class:: PropertyMock(*args, **kwargs)
+
+ A mock intended to be used as a property, or other descriptor, on a class.
+ `PropertyMock` provides `__get__` and `__set__` methods so you can specify
+ a return value when it is fetched.
+
+ Fetching a `PropertyMock` instance from an object calls the mock, with
+ no args. Setting it calls the mock with the value being set.
+
+ .. doctest::
+
+ >>> class Foo(object):
+ ... @property
+ ... def foo(self):
+ ... return 'something'
+ ... @foo.setter
+ ... def foo(self, value):
+ ... pass
+ ...
+ >>> with patch('__main__.Foo.foo', new_callable=PropertyMock) as mock_foo:
+ ... mock_foo.return_value = 'mockity-mock'
+ ... this_foo = Foo()
+ ... print this_foo.foo
+ ... this_foo.foo = 6
+ ...
+ mockity-mock
+ >>> mock_foo.mock_calls
+ [call(), call(6)]
+
+Because of the way mock attributes are stored you can't directly attach a
+`PropertyMock` to a mock object. Instead you can attach it to the mock type
+object:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> p = PropertyMock(return_value=3)
+ >>> type(m).foo = p
+ >>> m.foo
+ 3
+ >>> p.assert_called_once_with()
+
+
+.. index:: __call__
+.. index:: calling
+
+Calling
+=======
+
+Mock objects are callable. The call will return the value set as the
+:attr:`~Mock.return_value` attribute. The default return value is a new Mock
+object; it is created the first time the return value is accessed (either
+explicitly or by calling the Mock) - but it is stored and the same one
+returned each time.
+
+Calls made to the object will be recorded in the attributes
+like :attr:`~Mock.call_args` and :attr:`~Mock.call_args_list`.
+
+If :attr:`~Mock.side_effect` is set then it will be called after the call has
+been recorded, so if `side_effect` raises an exception the call is still
+recorded.
+
+The simplest way to make a mock raise an exception when called is to make
+:attr:`~Mock.side_effect` an exception class or instance:
+
+.. doctest::
+
+ >>> m = MagicMock(side_effect=IndexError)
+ >>> m(1, 2, 3)
+ Traceback (most recent call last):
+ ...
+ IndexError
+ >>> m.mock_calls
+ [call(1, 2, 3)]
+ >>> m.side_effect = KeyError('Bang!')
+ >>> m('two', 'three', 'four')
+ Traceback (most recent call last):
+ ...
+ KeyError: 'Bang!'
+ >>> m.mock_calls
+ [call(1, 2, 3), call('two', 'three', 'four')]
+
+If `side_effect` is a function then whatever that function returns is what
+calls to the mock return. The `side_effect` function is called with the
+same arguments as the mock. This allows you to vary the return value of the
+call dynamically, based on the input:
+
+.. doctest::
+
+ >>> def side_effect(value):
+ ... return value + 1
+ ...
+ >>> m = MagicMock(side_effect=side_effect)
+ >>> m(1)
+ 2
+ >>> m(2)
+ 3
+ >>> m.mock_calls
+ [call(1), call(2)]
+
+If you want the mock to still return the default return value (a new mock), or
+any set return value, then there are two ways of doing this. Either return
+`mock.return_value` from inside `side_effect`, or return :data:`DEFAULT`:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> def side_effect(*args, **kwargs):
+ ... return m.return_value
+ ...
+ >>> m.side_effect = side_effect
+ >>> m.return_value = 3
+ >>> m()
+ 3
+ >>> def side_effect(*args, **kwargs):
+ ... return DEFAULT
+ ...
+ >>> m.side_effect = side_effect
+ >>> m()
+ 3
+
+To remove a `side_effect`, and return to the default behaviour, set the
+`side_effect` to `None`:
+
+.. doctest::
+
+ >>> m = MagicMock(return_value=6)
+ >>> def side_effect(*args, **kwargs):
+ ... return 3
+ ...
+ >>> m.side_effect = side_effect
+ >>> m()
+ 3
+ >>> m.side_effect = None
+ >>> m()
+ 6
+
+The `side_effect` can also be any iterable object. Repeated calls to the mock
+will return values from the iterable (until the iterable is exhausted and
+a `StopIteration` is raised):
+
+.. doctest::
+
+ >>> m = MagicMock(side_effect=[1, 2, 3])
+ >>> m()
+ 1
+ >>> m()
+ 2
+ >>> m()
+ 3
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+If any members of the iterable are exceptions they will be raised instead of
+returned:
+
+.. doctest::
+
+ >>> iterable = (33, ValueError, 66)
+ >>> m = MagicMock(side_effect=iterable)
+ >>> m()
+ 33
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ ValueError
+ >>> m()
+ 66
+
+
+.. _deleting-attributes:
+
+Deleting Attributes
+===================
+
+Mock objects create attributes on demand. This allows them to pretend to be
+objects of any type.
+
+You may want a mock object to return `False` to a `hasattr` call, or raise an
+`AttributeError` when an attribute is fetched. You can do this by providing
+an object as a `spec` for a mock, but that isn't always convenient.
+
+You "block" attributes by deleting them. Once deleted, accessing an attribute
+will raise an `AttributeError`.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> hasattr(mock, 'm')
+ True
+ >>> del mock.m
+ >>> hasattr(mock, 'm')
+ False
+ >>> del mock.f
+ >>> mock.f
+ Traceback (most recent call last):
+ ...
+ AttributeError: f
+
+
+Attaching Mocks as Attributes
+=============================
+
+When you attach a mock as an attribute of another mock (or as the return
+value) it becomes a "child" of that mock. Calls to the child are recorded in
+the :attr:`~Mock.method_calls` and :attr:`~Mock.mock_calls` attributes of the
+parent. This is useful for configuring child mocks and then attaching them to
+the parent, or for attaching mocks to a parent that records all calls to the
+children and allows you to make assertions about the order of calls between
+mocks:
+
+.. doctest::
+
+ >>> parent = MagicMock()
+ >>> child1 = MagicMock(return_value=None)
+ >>> child2 = MagicMock(return_value=None)
+ >>> parent.child1 = child1
+ >>> parent.child2 = child2
+ >>> child1(1)
+ >>> child2(2)
+ >>> parent.mock_calls
+ [call.child1(1), call.child2(2)]
+
+The exception to this is if the mock has a name. This allows you to prevent
+the "parenting" if for some reason you don't want it to happen.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> not_a_child = MagicMock(name='not-a-child')
+ >>> mock.attribute = not_a_child
+ >>> mock.attribute()
+ <MagicMock name='not-a-child()' id='...'>
+ >>> mock.mock_calls
+ []
+
+Mocks created for you by :func:`patch` are automatically given names. To
+attach mocks that have names to a parent you use the :meth:`~Mock.attach_mock`
+method:
+
+.. doctest::
+
+ >>> thing1 = object()
+ >>> thing2 = object()
+ >>> parent = MagicMock()
+ >>> with patch('__main__.thing1', return_value=None) as child1:
+ ... with patch('__main__.thing2', return_value=None) as child2:
+ ... parent.attach_mock(child1, 'child1')
+ ... parent.attach_mock(child2, 'child2')
+ ... child1('one')
+ ... child2('two')
+ ...
+ >>> parent.mock_calls
+ [call.child1('one'), call.child2('two')]
+
+
+-----
+
+.. [#] The only exceptions are magic methods and attributes (those that have
+ leading and trailing double underscores). Mock doesn't create these but
+ instead of raises an ``AttributeError``. This is because the interpreter
+ will often implicitly request these methods, and gets *very* confused to
+ get a new Mock object when it expects a magic method. If you need magic
+ method support see :ref:`magic methods <magic-methods>`.
diff --git a/python/mock-1.0.0/docs/patch.txt b/python/mock-1.0.0/docs/patch.txt
new file mode 100644
index 000000000..3d56264fb
--- /dev/null
+++ b/python/mock-1.0.0/docs/patch.txt
@@ -0,0 +1,636 @@
+==================
+ Patch Decorators
+==================
+
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class SomeClass(object):
+ static_method = None
+ class_method = None
+ attribute = None
+
+ sys.modules['package'] = package = Mock(name='package')
+ sys.modules['package.module'] = package.module
+
+ class TestCase(unittest2.TestCase):
+ def run(self):
+ result = unittest2.TestResult()
+ super(unittest2.TestCase, self).run(result)
+ assert result.wasSuccessful()
+
+.. testcleanup::
+
+ patch.TEST_PREFIX = 'test'
+
+
+The patch decorators are used for patching objects only within the scope of
+the function they decorate. They automatically handle the unpatching for you,
+even if exceptions are raised. All of these functions can also be used in with
+statements or as class decorators.
+
+
+patch
+=====
+
+.. note::
+
+ `patch` is straightforward to use. The key is to do the patching in the
+ right namespace. See the section `where to patch`_.
+
+.. function:: patch(target, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ `patch` acts as a function decorator, class decorator or a context
+ manager. Inside the body of the function or with statement, the `target`
+ is patched with a `new` object. When the function/with statement exits
+ the patch is undone.
+
+ If `new` is omitted, then the target is replaced with a
+ :class:`MagicMock`. If `patch` is used as a decorator and `new` is
+ omitted, the created mock is passed in as an extra argument to the
+ decorated function. If `patch` is used as a context manager the created
+ mock is returned by the context manager.
+
+ `target` should be a string in the form `'package.module.ClassName'`. The
+ `target` is imported and the specified object replaced with the `new`
+ object, so the `target` must be importable from the environment you are
+ calling `patch` from. The target is imported when the decorated function
+ is executed, not at decoration time.
+
+ The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
+ if patch is creating one for you.
+
+ In addition you can pass `spec=True` or `spec_set=True`, which causes
+ patch to pass in the object being mocked as the spec/spec_set object.
+
+ `new_callable` allows you to specify a different class, or callable object,
+ that will be called to create the `new` object. By default `MagicMock` is
+ used.
+
+ A more powerful form of `spec` is `autospec`. If you set `autospec=True`
+ then the mock with be created with a spec from the object being replaced.
+ All attributes of the mock will also have the spec of the corresponding
+ attribute of the object being replaced. Methods and functions being mocked
+ will have their arguments checked and will raise a `TypeError` if they are
+ called with the wrong signature. For mocks
+ replacing a class, their return value (the 'instance') will have the same
+ spec as the class. See the :func:`create_autospec` function and
+ :ref:`auto-speccing`.
+
+ Instead of `autospec=True` you can pass `autospec=some_object` to use an
+ arbitrary object as the spec instead of the one being replaced.
+
+ By default `patch` will fail to replace attributes that don't exist. If
+ you pass in `create=True`, and the attribute doesn't exist, patch will
+ create the attribute for you when the patched function is called, and
+ delete it again afterwards. This is useful for writing tests against
+ attributes that your production code creates at runtime. It is off by by
+ default because it can be dangerous. With it switched on you can write
+ passing tests against APIs that don't actually exist!
+
+ Patch can be used as a `TestCase` class decorator. It works by
+ decorating each test method in the class. This reduces the boilerplate
+ code when your test methods share a common patchings set. `patch` finds
+ tests by looking for method names that start with `patch.TEST_PREFIX`.
+ By default this is `test`, which matches the way `unittest` finds tests.
+ You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
+
+ Patch can be used as a context manager, with the with statement. Here the
+ patching applies to the indented block after the with statement. If you
+ use "as" then the patched object will be bound to the name after the
+ "as"; very useful if `patch` is creating a mock object for you.
+
+ `patch` takes arbitrary keyword arguments. These will be passed to
+ the `Mock` (or `new_callable`) on construction.
+
+ `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
+ available for alternate use-cases.
+
+`patch` as function decorator, creating the mock for you and passing it into
+the decorated function:
+
+.. doctest::
+
+ >>> @patch('__main__.SomeClass')
+ ... def function(normal_argument, mock_class):
+ ... print mock_class is SomeClass
+ ...
+ >>> function(None)
+ True
+
+
+Patching a class replaces the class with a `MagicMock` *instance*. If the
+class is instantiated in the code under test then it will be the
+:attr:`~Mock.return_value` of the mock that will be used.
+
+If the class is instantiated multiple times you could use
+:attr:`~Mock.side_effect` to return a new mock each time. Alternatively you
+can set the `return_value` to be anything you want.
+
+To configure return values on methods of *instances* on the patched class
+you must do this on the `return_value`. For example:
+
+.. doctest::
+
+ >>> class Class(object):
+ ... def method(self):
+ ... pass
+ ...
+ >>> with patch('__main__.Class') as MockClass:
+ ... instance = MockClass.return_value
+ ... instance.method.return_value = 'foo'
+ ... assert Class() is instance
+ ... assert Class().method() == 'foo'
+ ...
+
+If you use `spec` or `spec_set` and `patch` is replacing a *class*, then the
+return value of the created mock will have the same spec.
+
+.. doctest::
+
+ >>> Original = Class
+ >>> patcher = patch('__main__.Class', spec=True)
+ >>> MockClass = patcher.start()
+ >>> instance = MockClass()
+ >>> assert isinstance(instance, Original)
+ >>> patcher.stop()
+
+The `new_callable` argument is useful where you want to use an alternative
+class to the default :class:`MagicMock` for the created mock. For example, if
+you wanted a :class:`NonCallableMock` to be used:
+
+.. doctest::
+
+ >>> thing = object()
+ >>> with patch('__main__.thing', new_callable=NonCallableMock) as mock_thing:
+ ... assert thing is mock_thing
+ ... thing()
+ ...
+ Traceback (most recent call last):
+ ...
+ TypeError: 'NonCallableMock' object is not callable
+
+Another use case might be to replace an object with a `StringIO` instance:
+
+.. doctest::
+
+ >>> from StringIO import StringIO
+ >>> def foo():
+ ... print 'Something'
+ ...
+ >>> @patch('sys.stdout', new_callable=StringIO)
+ ... def test(mock_stdout):
+ ... foo()
+ ... assert mock_stdout.getvalue() == 'Something\n'
+ ...
+ >>> test()
+
+When `patch` is creating a mock for you, it is common that the first thing
+you need to do is to configure the mock. Some of that configuration can be done
+in the call to patch. Any arbitrary keywords you pass into the call will be
+used to set attributes on the created mock:
+
+.. doctest::
+
+ >>> patcher = patch('__main__.thing', first='one', second='two')
+ >>> mock_thing = patcher.start()
+ >>> mock_thing.first
+ 'one'
+ >>> mock_thing.second
+ 'two'
+
+As well as attributes on the created mock attributes, like the
+:attr:`~Mock.return_value` and :attr:`~Mock.side_effect`, of child mocks can
+also be configured. These aren't syntactically valid to pass in directly as
+keyword arguments, but a dictionary with these as keys can still be expanded
+into a `patch` call using `**`:
+
+.. doctest::
+
+ >>> config = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> patcher = patch('__main__.thing', **config)
+ >>> mock_thing = patcher.start()
+ >>> mock_thing.method()
+ 3
+ >>> mock_thing.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+
+patch.object
+============
+
+.. function:: patch.object(target, attribute, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ patch the named member (`attribute`) on an object (`target`) with a mock
+ object.
+
+ `patch.object` can be used as a decorator, class decorator or a context
+ manager. Arguments `new`, `spec`, `create`, `spec_set`, `autospec` and
+ `new_callable` have the same meaning as for `patch`. Like `patch`,
+ `patch.object` takes arbitrary keyword arguments for configuring the mock
+ object it creates.
+
+ When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+
+You can either call `patch.object` with three arguments or two arguments. The
+three argument form takes the object to be patched, the attribute name and the
+object to replace the attribute with.
+
+When calling with the two argument form you omit the replacement object, and a
+mock is created for you and passed in as an extra argument to the decorated
+function:
+
+.. doctest::
+
+ >>> @patch.object(SomeClass, 'class_method')
+ ... def test(mock_method):
+ ... SomeClass.class_method(3)
+ ... mock_method.assert_called_with(3)
+ ...
+ >>> test()
+
+`spec`, `create` and the other arguments to `patch.object` have the same
+meaning as they do for `patch`.
+
+
+patch.dict
+==========
+
+.. function:: patch.dict(in_dict, values=(), clear=False, **kwargs)
+
+ Patch a dictionary, or dictionary like object, and restore the dictionary
+ to its original state after the test.
+
+ `in_dict` can be a dictionary or a mapping like container. If it is a
+ mapping then it must at least support getting, setting and deleting items
+ plus iterating over keys.
+
+ `in_dict` can also be a string specifying the name of the dictionary, which
+ will then be fetched by importing it.
+
+ `values` can be a dictionary of values to set in the dictionary. `values`
+ can also be an iterable of `(key, value)` pairs.
+
+ If `clear` is True then the dictionary will be cleared before the new
+ values are set.
+
+ `patch.dict` can also be called with arbitrary keyword arguments to set
+ values in the dictionary.
+
+ `patch.dict` can be used as a context manager, decorator or class
+ decorator. When used as a class decorator `patch.dict` honours
+ `patch.TEST_PREFIX` for choosing which methods to wrap.
+
+`patch.dict` can be used to add members to a dictionary, or simply let a test
+change a dictionary, and ensure the dictionary is restored when the test
+ends.
+
+.. doctest::
+
+ >>> from mock import patch
+ >>> foo = {}
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == {}
+
+ >>> import os
+ >>> with patch.dict('os.environ', {'newkey': 'newvalue'}):
+ ... print os.environ['newkey']
+ ...
+ newvalue
+ >>> assert 'newkey' not in os.environ
+
+Keywords can be used in the `patch.dict` call to set values in the dictionary:
+
+.. doctest::
+
+ >>> mymodule = MagicMock()
+ >>> mymodule.function.return_value = 'fish'
+ >>> with patch.dict('sys.modules', mymodule=mymodule):
+ ... import mymodule
+ ... mymodule.function('some', 'args')
+ ...
+ 'fish'
+
+`patch.dict` can be used with dictionary like objects that aren't actually
+dictionaries. At the very minimum they must support item getting, setting,
+deleting and either iteration or membership test. This corresponds to the
+magic methods `__getitem__`, `__setitem__`, `__delitem__` and either
+`__iter__` or `__contains__`.
+
+.. doctest::
+
+ >>> class Container(object):
+ ... def __init__(self):
+ ... self.values = {}
+ ... def __getitem__(self, name):
+ ... return self.values[name]
+ ... def __setitem__(self, name, value):
+ ... self.values[name] = value
+ ... def __delitem__(self, name):
+ ... del self.values[name]
+ ... def __iter__(self):
+ ... return iter(self.values)
+ ...
+ >>> thing = Container()
+ >>> thing['one'] = 1
+ >>> with patch.dict(thing, one=2, two=3):
+ ... assert thing['one'] == 2
+ ... assert thing['two'] == 3
+ ...
+ >>> assert thing['one'] == 1
+ >>> assert list(thing) == ['one']
+
+
+patch.multiple
+==============
+
+.. function:: patch.multiple(target, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ Perform multiple patches in a single call. It takes the object to be
+ patched (either as an object or a string to fetch the object by importing)
+ and keyword arguments for the patches::
+
+ with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
+ ...
+
+ Use :data:`DEFAULT` as the value if you want `patch.multiple` to create
+ mocks for you. In this case the created mocks are passed into a decorated
+ function by keyword, and a dictionary is returned when `patch.multiple` is
+ used as a context manager.
+
+ `patch.multiple` can be used as a decorator, class decorator or a context
+ manager. The arguments `spec`, `spec_set`, `create`, `autospec` and
+ `new_callable` have the same meaning as for `patch`. These arguments will
+ be applied to *all* patches done by `patch.multiple`.
+
+ When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+
+If you want `patch.multiple` to create mocks for you, then you can use
+:data:`DEFAULT` as the value. If you use `patch.multiple` as a decorator
+then the created mocks are passed into the decorated function by keyword.
+
+.. doctest::
+
+ >>> thing = object()
+ >>> other = object()
+
+ >>> @patch.multiple('__main__', thing=DEFAULT, other=DEFAULT)
+ ... def test_function(thing, other):
+ ... assert isinstance(thing, MagicMock)
+ ... assert isinstance(other, MagicMock)
+ ...
+ >>> test_function()
+
+`patch.multiple` can be nested with other `patch` decorators, but put arguments
+passed by keyword *after* any of the standard arguments created by `patch`:
+
+.. doctest::
+
+ >>> @patch('sys.exit')
+ ... @patch.multiple('__main__', thing=DEFAULT, other=DEFAULT)
+ ... def test_function(mock_exit, other, thing):
+ ... assert 'other' in repr(other)
+ ... assert 'thing' in repr(thing)
+ ... assert 'exit' in repr(mock_exit)
+ ...
+ >>> test_function()
+
+If `patch.multiple` is used as a context manager, the value returned by the
+context manger is a dictionary where created mocks are keyed by name:
+
+.. doctest::
+
+ >>> with patch.multiple('__main__', thing=DEFAULT, other=DEFAULT) as values:
+ ... assert 'other' in repr(values['other'])
+ ... assert 'thing' in repr(values['thing'])
+ ... assert values['thing'] is thing
+ ... assert values['other'] is other
+ ...
+
+
+.. _start-and-stop:
+
+patch methods: start and stop
+=============================
+
+All the patchers have `start` and `stop` methods. These make it simpler to do
+patching in `setUp` methods or where you want to do multiple patches without
+nesting decorators or with statements.
+
+To use them call `patch`, `patch.object` or `patch.dict` as normal and keep a
+reference to the returned `patcher` object. You can then call `start` to put
+the patch in place and `stop` to undo it.
+
+If you are using `patch` to create a mock for you then it will be returned by
+the call to `patcher.start`.
+
+.. doctest::
+
+ >>> patcher = patch('package.module.ClassName')
+ >>> from package import module
+ >>> original = module.ClassName
+ >>> new_mock = patcher.start()
+ >>> assert module.ClassName is not original
+ >>> assert module.ClassName is new_mock
+ >>> patcher.stop()
+ >>> assert module.ClassName is original
+ >>> assert module.ClassName is not new_mock
+
+
+A typical use case for this might be for doing multiple patches in the `setUp`
+method of a `TestCase`:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... self.patcher1 = patch('package.module.Class1')
+ ... self.patcher2 = patch('package.module.Class2')
+ ... self.MockClass1 = self.patcher1.start()
+ ... self.MockClass2 = self.patcher2.start()
+ ...
+ ... def tearDown(self):
+ ... self.patcher1.stop()
+ ... self.patcher2.stop()
+ ...
+ ... def test_something(self):
+ ... assert package.module.Class1 is self.MockClass1
+ ... assert package.module.Class2 is self.MockClass2
+ ...
+ >>> MyTest('test_something').run()
+
+.. caution::
+
+ If you use this technique you must ensure that the patching is "undone" by
+ calling `stop`. This can be fiddlier than you might think, because if an
+ exception is raised in the setUp then tearDown is not called. `unittest2
+ <http://pypi.python.org/pypi/unittest2>`_ cleanup functions make this
+ easier.
+
+ .. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... patcher = patch('package.module.Class')
+ ... self.MockClass = patcher.start()
+ ... self.addCleanup(patcher.stop)
+ ...
+ ... def test_something(self):
+ ... assert package.module.Class is self.MockClass
+ ...
+ >>> MyTest('test_something').run()
+
+ As an added bonus you no longer need to keep a reference to the `patcher`
+ object.
+
+It is also possible to stop all patches which have been started by using
+`patch.stopall`.
+
+.. function:: patch.stopall
+
+ Stop all active patches. Only stops patches started with `start`.
+
+
+TEST_PREFIX
+===========
+
+All of the patchers can be used as class decorators. When used in this way
+they wrap every test method on the class. The patchers recognise methods that
+start with `test` as being test methods. This is the same way that the
+`unittest.TestLoader` finds test methods by default.
+
+It is possible that you want to use a different prefix for your tests. You can
+inform the patchers of the different prefix by setting `patch.TEST_PREFIX`:
+
+.. doctest::
+
+ >>> patch.TEST_PREFIX = 'foo'
+ >>> value = 3
+ >>>
+ >>> @patch('__main__.value', 'not three')
+ ... class Thing(object):
+ ... def foo_one(self):
+ ... print value
+ ... def foo_two(self):
+ ... print value
+ ...
+ >>>
+ >>> Thing().foo_one()
+ not three
+ >>> Thing().foo_two()
+ not three
+ >>> value
+ 3
+
+
+Nesting Patch Decorators
+========================
+
+If you want to perform multiple patches then you can simply stack up the
+decorators.
+
+You can stack up multiple patch decorators using this pattern:
+
+.. doctest::
+
+ >>> @patch.object(SomeClass, 'class_method')
+ ... @patch.object(SomeClass, 'static_method')
+ ... def test(mock1, mock2):
+ ... assert SomeClass.static_method is mock1
+ ... assert SomeClass.class_method is mock2
+ ... SomeClass.static_method('foo')
+ ... SomeClass.class_method('bar')
+ ... return mock1, mock2
+ ...
+ >>> mock1, mock2 = test()
+ >>> mock1.assert_called_once_with('foo')
+ >>> mock2.assert_called_once_with('bar')
+
+
+Note that the decorators are applied from the bottom upwards. This is the
+standard way that Python applies decorators. The order of the created mocks
+passed into your test function matches this order.
+
+Like all context-managers patches can be nested using contextlib's nested
+function; *every* patching will appear in the tuple after "as":
+
+.. doctest::
+
+ >>> from contextlib import nested
+ >>> with nested(
+ ... patch('package.module.ClassName1'),
+ ... patch('package.module.ClassName2')
+ ... ) as (MockClass1, MockClass2):
+ ... assert package.module.ClassName1 is MockClass1
+ ... assert package.module.ClassName2 is MockClass2
+ ...
+
+
+.. _where-to-patch:
+
+Where to patch
+==============
+
+`patch` works by (temporarily) changing the object that a *name* points to with
+another one. There can be many names pointing to any individual object, so
+for patching to work you must ensure that you patch the name used by the system
+under test.
+
+The basic principle is that you patch where an object is *looked up*, which
+is not necessarily the same place as where it is defined. A couple of
+examples will help to clarify this.
+
+Imagine we have a project that we want to test with the following structure::
+
+ a.py
+ -> Defines SomeClass
+
+ b.py
+ -> from a import SomeClass
+ -> some_function instantiates SomeClass
+
+Now we want to test `some_function` but we want to mock out `SomeClass` using
+`patch`. The problem is that when we import module b, which we will have to
+do then it imports `SomeClass` from module a. If we use `patch` to mock out
+`a.SomeClass` then it will have no effect on our test; module b already has a
+reference to the *real* `SomeClass` and it looks like our patching had no
+effect.
+
+The key is to patch out `SomeClass` where it is used (or where it is looked up
+). In this case `some_function` will actually look up `SomeClass` in module b,
+where we have imported it. The patching should look like:
+
+ `@patch('b.SomeClass')`
+
+However, consider the alternative scenario where instead of `from a import
+SomeClass` module b does `import a` and `some_function` uses `a.SomeClass`. Both
+of these import forms are common. In this case the class we want to patch is
+being looked up on the a module and so we have to patch `a.SomeClass` instead:
+
+ `@patch('a.SomeClass')`
+
+
+Patching Descriptors and Proxy Objects
+======================================
+
+Since version 0.6.0 both patch_ and patch.object_ have been able to correctly
+patch and restore descriptors: class methods, static methods and properties.
+You should patch these on the *class* rather than an instance.
+
+Since version 0.7.0 patch_ and patch.object_ work correctly with some objects
+that proxy attribute access, like the `django setttings object
+<http://www.voidspace.org.uk/python/weblog/arch_d7_2010_12_04.shtml#e1198>`_.
+
+.. note::
+
+ In django `import settings` and `from django.conf import settings`
+ return different objects. If you are using libraries / apps that do both you
+ may have to patch both. Grrr...
diff --git a/python/mock-1.0.0/docs/sentinel.txt b/python/mock-1.0.0/docs/sentinel.txt
new file mode 100644
index 000000000..1c5223da0
--- /dev/null
+++ b/python/mock-1.0.0/docs/sentinel.txt
@@ -0,0 +1,58 @@
+==========
+ Sentinel
+==========
+
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class ProductionClass(object):
+ def something(self):
+ return self.method()
+
+ class Test(unittest2.TestCase):
+ def testSomething(self):
+ pass
+ self = Test('testSomething')
+
+
+.. data:: sentinel
+
+ The ``sentinel`` object provides a convenient way of providing unique
+ objects for your tests.
+
+ Attributes are created on demand when you access them by name. Accessing
+ the same attribute will always return the same object. The objects
+ returned have a sensible repr so that test failure messages are readable.
+
+
+.. data:: DEFAULT
+
+ The `DEFAULT` object is a pre-created sentinel (actually
+ `sentinel.DEFAULT`). It can be used by :attr:`~Mock.side_effect`
+ functions to indicate that the normal return value should be used.
+
+
+Sentinel Example
+================
+
+Sometimes when testing you need to test that a specific object is passed as an
+argument to another method, or returned. It can be common to create named
+sentinel objects to test this. `sentinel` provides a convenient way of
+creating and testing the identity of objects like this.
+
+In this example we monkey patch `method` to return
+`sentinel.some_object`:
+
+.. doctest::
+
+ >>> real = ProductionClass()
+ >>> real.method = Mock(name="method")
+ >>> real.method.return_value = sentinel.some_object
+ >>> result = real.method()
+ >>> assert result is sentinel.some_object
+ >>> sentinel.some_object
+ sentinel.some_object
+
+
diff --git a/python/mock-1.0.0/html/.doctrees/changelog.doctree b/python/mock-1.0.0/html/.doctrees/changelog.doctree
new file mode 100644
index 000000000..635630302
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/changelog.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/compare.doctree b/python/mock-1.0.0/html/.doctrees/compare.doctree
new file mode 100644
index 000000000..2a961fce4
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/compare.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/examples.doctree b/python/mock-1.0.0/html/.doctrees/examples.doctree
new file mode 100644
index 000000000..40e4fded5
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/examples.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/getting-started.doctree b/python/mock-1.0.0/html/.doctrees/getting-started.doctree
new file mode 100644
index 000000000..ba82a8998
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/getting-started.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/index.doctree b/python/mock-1.0.0/html/.doctrees/index.doctree
new file mode 100644
index 000000000..4c53f11a5
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/index.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/magicmock.doctree b/python/mock-1.0.0/html/.doctrees/magicmock.doctree
new file mode 100644
index 000000000..d57214e98
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/magicmock.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/mock.doctree b/python/mock-1.0.0/html/.doctrees/mock.doctree
new file mode 100644
index 000000000..44dc3043c
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/mock.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/mocksignature.doctree b/python/mock-1.0.0/html/.doctrees/mocksignature.doctree
new file mode 100644
index 000000000..58313c2cf
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/mocksignature.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/patch.doctree b/python/mock-1.0.0/html/.doctrees/patch.doctree
new file mode 100644
index 000000000..100ef505d
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/patch.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/.doctrees/sentinel.doctree b/python/mock-1.0.0/html/.doctrees/sentinel.doctree
new file mode 100644
index 000000000..e0312ed60
--- /dev/null
+++ b/python/mock-1.0.0/html/.doctrees/sentinel.doctree
Binary files differ
diff --git a/python/mock-1.0.0/html/_sources/changelog.txt b/python/mock-1.0.0/html/_sources/changelog.txt
new file mode 100644
index 000000000..a605be3d9
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/changelog.txt
@@ -0,0 +1,725 @@
+.. currentmodule:: mock
+
+
+CHANGELOG
+=========
+
+2012/10/07 Version 1.0.0
+------------------------
+
+No changes since 1.0.0 beta 1. This version has feature parity with
+`unittest.mock
+<http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3.
+
+Full list of changes since 0.8:
+
+* `mocksignature`, along with the `mocksignature` argument to `patch`, removed
+* Support for deleting attributes (accessing deleted attributes will raise an
+ `AttributeError`)
+* Added the `mock_open` helper function for mocking the builtin `open`
+* `__class__` is assignable, so a mock can pass an `isinstance` check without
+ requiring a spec
+* Addition of `PropertyMock`, for mocking properties
+* `MagicMocks` made unorderable by default (in Python 3). The comparison
+ methods (other than equality and inequality) now return `NotImplemented`
+* Propagate traceback info to support subclassing of `_patch` by other
+ libraries
+* `create_autospec` works with attributes present in results of `dir` that
+ can't be fetched from the object's class. Contributed by Konstantine Rybnikov
+* Any exceptions in an iterable `side_effect` will be raised instead of
+ returned
+* In Python 3, `create_autospec` now supports keyword only arguments
+* Added `patch.stopall` method to stop all active patches created by `start`
+* BUGFIX: calling `MagicMock.reset_mock` wouldn't reset magic method mocks
+* BUGFIX: calling `reset_mock` on a `MagicMock` created with autospec could
+ raise an exception
+* BUGFIX: passing multiple spec arguments to patchers (`spec` , `spec_set` and
+ `autospec`) had unpredictable results, now it is an error
+* BUGFIX: using `spec=True` *and* `create=True` as arguments to patchers could
+ result in using `DEFAULT` as the spec. Now it is an error instead
+* BUGFIX: using `spec` or `autospec` arguments to patchers, along with
+ `spec_set=True` did not work correctly
+* BUGFIX: using an object that evaluates to False as a spec could be ignored
+* BUGFIX: a list as the `spec` argument to a patcher would always result in a
+ non-callable mock. Now if `__call__` is in the spec the mock is callable
+
+
+2012/07/13 Version 1.0.0 beta 1
+--------------------------------
+
+* Added `patch.stopall` method to stop all active patches created by `start`
+* BUGFIX: calling `MagicMock.reset_mock` wouldn't reset magic method mocks
+* BUGFIX: calling `reset_mock` on a `MagicMock` created with autospec could
+ raise an exception
+
+
+2012/05/04 Version 1.0.0 alpha 2
+--------------------------------
+
+* `PropertyMock` attributes are now standard `MagicMocks`
+* `create_autospec` works with attributes present in results of `dir` that
+ can't be fetched from the object's class. Contributed by Konstantine Rybnikov
+* Any exceptions in an iterable `side_effect` will be raised instead of
+ returned
+* In Python 3, `create_autospec` now supports keyword only arguments
+
+
+2012/03/25 Version 1.0.0 alpha 1
+--------------------------------
+
+The standard library version!
+
+* `mocksignature`, along with the `mocksignature` argument to `patch`, removed
+* Support for deleting attributes (accessing deleted attributes will raise an
+ `AttributeError`)
+* Added the `mock_open` helper function for mocking the builtin `open`
+* `__class__` is assignable, so a mock can pass an `isinstance` check without
+ requiring a spec
+* Addition of `PropertyMock`, for mocking properties
+* `MagicMocks` made unorderable by default (in Python 3). The comparison
+ methods (other than equality and inequality) now return `NotImplemented`
+* Propagate traceback info to support subclassing of `_patch` by other
+ libraries
+* BUGFIX: passing multiple spec arguments to patchers (`spec` , `spec_set` and
+ `autospec`) had unpredictable results, now it is an error
+* BUGFIX: using `spec=True` *and* `create=True` as arguments to patchers could
+ result in using `DEFAULT` as the spec. Now it is an error instead
+* BUGFIX: using `spec` or `autospec` arguments to patchers, along with
+ `spec_set=True` did not work correctly
+* BUGFIX: using an object that evaluates to False as a spec could be ignored
+* BUGFIX: a list as the `spec` argument to a patcher would always result in a
+ non-callable mock. Now if `__call__` is in the spec the mock is callable
+
+
+2012/02/13 Version 0.8.0
+------------------------
+
+The only changes since 0.8rc2 are:
+
+* Improved repr of :data:`sentinel` objects
+* :data:`ANY` can be used for comparisons against :data:`call` objects
+* The return value of `MagicMock.__iter__` method can be set to
+ any iterable and isn't required to be an iterator
+
+Full List of changes since 0.7:
+
+mock 0.8.0 is the last version that will support Python 2.4.
+
+* Addition of :attr:`~Mock.mock_calls` list for *all* calls (including magic
+ methods and chained calls)
+* :func:`patch` and :func:`patch.object` now create a :class:`MagicMock`
+ instead of a :class:`Mock` by default
+* The patchers (`patch`, `patch.object` and `patch.dict`), plus `Mock` and
+ `MagicMock`, take arbitrary keyword arguments for configuration
+* New mock method :meth:`~Mock.configure_mock` for setting attributes and
+ return values / side effects on the mock and its attributes
+* New mock assert methods :meth:`~Mock.assert_any_call` and
+ :meth:`~Mock.assert_has_calls`
+* Implemented :ref:`auto-speccing` (recursive, lazy speccing of mocks with
+ mocked signatures for functions/methods), as the `autospec` argument to
+ `patch`
+* Added the :func:`create_autospec` function for manually creating
+ 'auto-specced' mocks
+* :func:`patch.multiple` for doing multiple patches in a single call, using
+ keyword arguments
+* Setting :attr:`~Mock.side_effect` to an iterable will cause calls to the mock
+ to return the next value from the iterable
+* New `new_callable` argument to `patch` and `patch.object` allowing you to
+ pass in a class or callable object (instead of `MagicMock`) that will be
+ called to replace the object being patched
+* Addition of :class:`NonCallableMock` and :class:`NonCallableMagicMock`, mocks
+ without a `__call__` method
+* Addition of :meth:`~Mock.mock_add_spec` method for adding (or changing) a
+ spec on an existing mock
+* Protocol methods on :class:`MagicMock` are magic mocks, and are created
+ lazily on first lookup. This means the result of calling a protocol method is
+ a `MagicMock` instead of a `Mock` as it was previously
+* Addition of :meth:`~Mock.attach_mock` method
+* Added :data:`ANY` for ignoring arguments in :meth:`~Mock.assert_called_with`
+ calls
+* Addition of :data:`call` helper object
+* Improved repr for mocks
+* Improved repr for :attr:`Mock.call_args` and entries in
+ :attr:`Mock.call_args_list`, :attr:`Mock.method_calls` and
+ :attr:`Mock.mock_calls`
+* Improved repr for :data:`sentinel` objects
+* `patch` lookup is done at use time not at decoration time
+* In Python 2.6 or more recent, `dir` on a mock will report all the dynamically
+ created attributes (or the full list of attributes if there is a spec) as
+ well as all the mock methods and attributes.
+* Module level :data:`FILTER_DIR` added to control whether `dir(mock)` filters
+ private attributes. `True` by default.
+* `patch.TEST_PREFIX` for controlling how patchers recognise test methods when
+ used to decorate a class
+* Support for using Java exceptions as a :attr:`~Mock.side_effect` on Jython
+* `Mock` call lists (`call_args_list`, `method_calls` & `mock_calls`) are now
+ custom list objects that allow membership tests for "sub lists" and have
+ a nicer representation if you `str` or `print` them
+* Mocks attached as attributes or return values to other mocks have calls
+ recorded in `method_calls` and `mock_calls` of the parent (unless a name is
+ already set on the child)
+* Improved failure messages for `assert_called_with` and
+ `assert_called_once_with`
+* The return value of the :class:`MagicMock` `__iter__` method can be set to
+ any iterable and isn't required to be an iterator
+* Added the Mock API (`assert_called_with` etc) to functions created by
+ :func:`mocksignature`
+* Tuples as well as lists can be used to specify allowed methods for `spec` &
+ `spec_set` arguments
+* Calling `stop` on an unstarted patcher fails with a more meaningful error
+ message
+* Renamed the internal classes `Sentinel` and `SentinelObject` to prevent abuse
+* BUGFIX: an error creating a patch, with nested patch decorators, won't leave
+ patches in place
+* BUGFIX: `__truediv__` and `__rtruediv__` not available as magic methods on
+ mocks in Python 3
+* BUGFIX: `assert_called_with` / `assert_called_once_with` can be used with
+ `self` as a keyword argument
+* BUGFIX: when patching a class with an explicit spec / spec_set (not a
+ boolean) it applies "spec inheritance" to the return value of the created
+ mock (the "instance")
+* BUGFIX: remove the `__unittest` marker causing traceback truncation
+* Removal of deprecated `patch_object`
+* Private attributes `_name`, `_methods`, '_children', `_wraps` and `_parent`
+ (etc) renamed to reduce likelihood of clash with user attributes.
+* Added license file to the distribution
+
+
+2012/01/10 Version 0.8.0 release candidate 2
+--------------------------------------------
+
+* Removed the `configure` keyword argument to `create_autospec` and allow
+ arbitrary keyword arguments (for the `Mock` constructor) instead
+* Fixed `ANY` equality with some types in `assert_called_with` calls
+* Switched to a standard Sphinx theme (compatible with
+ `readthedocs.org <http://mock.readthedocs.org>`_)
+
+
+2011/12/29 Version 0.8.0 release candidate 1
+--------------------------------------------
+
+* `create_autospec` on the return value of a mocked class will use `__call__`
+ for the signature rather than `__init__`
+* Performance improvement instantiating `Mock` and `MagicMock`
+* Mocks used as magic methods have the same type as their parent instead of
+ being hardcoded to `MagicMock`
+
+Special thanks to Julian Berman for his help with diagnosing and improving
+performance in this release.
+
+
+2011/10/09 Version 0.8.0 beta 4
+-------------------------------
+
+* `patch` lookup is done at use time not at decoration time
+* When attaching a Mock to another Mock as a magic method, calls are recorded
+ in mock_calls
+* Addition of `attach_mock` method
+* Renamed the internal classes `Sentinel` and `SentinelObject` to prevent abuse
+* BUGFIX: various issues around circular references with mocks (setting a mock
+ return value to be itself etc)
+
+
+2011/08/15 Version 0.8.0 beta 3
+-------------------------------
+
+* Mocks attached as attributes or return values to other mocks have calls
+ recorded in `method_calls` and `mock_calls` of the parent (unless a name is
+ already set on the child)
+* Addition of `mock_add_spec` method for adding (or changing) a spec on an
+ existing mock
+* Improved repr for `Mock.call_args` and entries in `Mock.call_args_list`,
+ `Mock.method_calls` and `Mock.mock_calls`
+* Improved repr for mocks
+* BUGFIX: minor fixes in the way `mock_calls` is worked out,
+ especially for "intermediate" mocks in a call chain
+
+
+2011/08/05 Version 0.8.0 beta 2
+-------------------------------
+
+* Setting `side_effect` to an iterable will cause calls to the mock to return
+ the next value from the iterable
+* Added `assert_any_call` method
+* Moved `assert_has_calls` from call lists onto mocks
+* BUGFIX: `call_args` and all members of `call_args_list` are two tuples of
+ `(args, kwargs)` again instead of three tuples of `(name, args, kwargs)`
+
+
+2011/07/25 Version 0.8.0 beta 1
+-------------------------------
+
+* `patch.TEST_PREFIX` for controlling how patchers recognise test methods when
+ used to decorate a class
+* `Mock` call lists (`call_args_list`, `method_calls` & `mock_calls`) are now
+ custom list objects that allow membership tests for "sub lists" and have
+ an `assert_has_calls` method for unordered call checks
+* `callargs` changed to *always* be a three-tuple of `(name, args, kwargs)`
+* Addition of `mock_calls` list for *all* calls (including magic methods and
+ chained calls)
+* Extension of `call` object to support chained calls and `callargs` for better
+ comparisons with or without names. `call` object has a `call_list` method for
+ chained calls
+* Added the public `instance` argument to `create_autospec`
+* Support for using Java exceptions as a `side_effect` on Jython
+* Improved failure messages for `assert_called_with` and
+ `assert_called_once_with`
+* Tuples as well as lists can be used to specify allowed methods for `spec` &
+ `spec_set` arguments
+* BUGFIX: Fixed bug in `patch.multiple` for argument passing when creating
+ mocks
+* Added license file to the distribution
+
+
+2011/07/16 Version 0.8.0 alpha 2
+--------------------------------
+
+* `patch.multiple` for doing multiple patches in a single call, using keyword
+ arguments
+* New `new_callable` argument to `patch` and `patch.object` allowing you to
+ pass in a class or callable object (instead of `MagicMock`) that will be
+ called to replace the object being patched
+* Addition of `NonCallableMock` and `NonCallableMagicMock`, mocks without a
+ `__call__` method
+* Mocks created by `patch` have a `MagicMock` as the `return_value` where a
+ class is being patched
+* `create_autospec` can create non-callable mocks for non-callable objects.
+ `return_value` mocks of classes will be non-callable unless the class has
+ a `__call__` method
+* `autospec` creates a `MagicMock` without a spec for properties and slot
+ descriptors, because we don't know the type of object they return
+* Removed the "inherit" argument from `create_autospec`
+* Calling `stop` on an unstarted patcher fails with a more meaningful error
+ message
+* BUGFIX: an error creating a patch, with nested patch decorators, won't leave
+ patches in place
+* BUGFIX: `__truediv__` and `__rtruediv__` not available as magic methods on
+ mocks in Python 3
+* BUGFIX: `assert_called_with` / `assert_called_once_with` can be used with
+ `self` as a keyword argument
+* BUGFIX: autospec for functions / methods with an argument named self that
+ isn't the first argument no longer broken
+* BUGFIX: when patching a class with an explicit spec / spec_set (not a
+ boolean) it applies "spec inheritance" to the return value of the created
+ mock (the "instance")
+* BUGFIX: remove the `__unittest` marker causing traceback truncation
+
+
+2011/06/14 Version 0.8.0 alpha 1
+--------------------------------
+
+mock 0.8.0 is the last version that will support Python 2.4.
+
+* The patchers (`patch`, `patch.object` and `patch.dict`), plus `Mock` and
+ `MagicMock`, take arbitrary keyword arguments for configuration
+* New mock method `configure_mock` for setting attributes and return values /
+ side effects on the mock and its attributes
+* In Python 2.6 or more recent, `dir` on a mock will report all the dynamically
+ created attributes (or the full list of attributes if there is a spec) as
+ well as all the mock methods and attributes.
+* Module level `FILTER_DIR` added to control whether `dir(mock)` filters
+ private attributes. `True` by default. Note that `vars(Mock())` can still be
+ used to get all instance attributes and `dir(type(Mock())` will still return
+ all the other attributes (irrespective of `FILTER_DIR`)
+* `patch` and `patch.object` now create a `MagicMock` instead of a `Mock` by
+ default
+* Added `ANY` for ignoring arguments in `assert_called_with` calls
+* Addition of `call` helper object
+* Protocol methods on `MagicMock` are magic mocks, and are created lazily on
+ first lookup. This means the result of calling a protocol method is a
+ MagicMock instead of a Mock as it was previously
+* Added the Mock API (`assert_called_with` etc) to functions created by
+ `mocksignature`
+* Private attributes `_name`, `_methods`, '_children', `_wraps` and `_parent`
+ (etc) renamed to reduce likelihood of clash with user attributes.
+* Implemented auto-speccing (recursive, lazy speccing of mocks with mocked
+ signatures for functions/methods)
+
+ Limitations:
+
+ - Doesn't mock magic methods or attributes (it creates MagicMocks, so the
+ magic methods are *there*, they just don't have the signature mocked nor
+ are attributes followed)
+ - Doesn't mock function / method attributes
+ - Uses object traversal on the objects being mocked to determine types - so
+ properties etc may be triggered
+ - The return value of mocked classes (the 'instance') has the same call
+ signature as the class __init__ (as they share the same spec)
+
+ You create auto-specced mocks by passing `autospec=True` to `patch`.
+
+ Note that attributes that are None are special cased and mocked without a
+ spec (so any attribute / method can be used). This is because None is
+ typically used as a default value for attributes that may be of some other
+ type, and as we don't know what type that may be we allow all access.
+
+ Note that the `autospec` option to `patch` obsoletes the `mocksignature`
+ option.
+
+* Added the `create_autospec` function for manually creating 'auto-specced'
+ mocks
+* Removal of deprecated `patch_object`
+
+
+2011/05/30 Version 0.7.2
+------------------------
+
+* BUGFIX: instances of list subclasses can now be used as mock specs
+* BUGFIX: MagicMock equality / inequality protocol methods changed to use the
+ default equality / inequality. This is done through a `side_effect` on
+ the mocks used for `__eq__` / `__ne__`
+
+
+2011/05/06 Version 0.7.1
+------------------------
+
+Package fixes contributed by Michael Fladischer. No code changes.
+
+* Include template in package
+* Use isolated binaries for the tox tests
+* Unset executable bit on docs
+* Fix DOS line endings in getting-started.txt
+
+
+2011/03/05 Version 0.7.0
+------------------------
+
+No API changes since 0.7.0 rc1. Many documentation changes including a stylish
+new `Sphinx theme <https://github.com/coordt/ADCtheme/>`_.
+
+The full set of changes since 0.6.0 are:
+
+* Python 3 compatibility
+* Ability to mock magic methods with `Mock` and addition of `MagicMock`
+ with pre-created magic methods
+* Addition of `mocksignature` and `mocksignature` argument to `patch` and
+ `patch.object`
+* Addition of `patch.dict` for changing dictionaries during a test
+* Ability to use `patch`, `patch.object` and `patch.dict` as class decorators
+* Renamed ``patch_object`` to `patch.object` (``patch_object`` is
+ deprecated)
+* Addition of soft comparisons: `call_args`, `call_args_list` and `method_calls`
+ now return tuple-like objects which compare equal even when empty args
+ or kwargs are skipped
+* patchers (`patch`, `patch.object` and `patch.dict`) have start and stop
+ methods
+* Addition of `assert_called_once_with` method
+* Mocks can now be named (`name` argument to constructor) and the name is used
+ in the repr
+* repr of a mock with a spec includes the class name of the spec
+* `assert_called_with` works with `python -OO`
+* New `spec_set` keyword argument to `Mock` and `patch`. If used,
+ attempting to *set* an attribute on a mock not on the spec will raise an
+ `AttributeError`
+* Mocks created with a spec can now pass `isinstance` tests (`__class__`
+ returns the type of the spec)
+* Added docstrings to all objects
+* Improved failure message for `Mock.assert_called_with` when the mock
+ has not been called at all
+* Decorated functions / methods have their docstring and `__module__`
+ preserved on Python 2.4.
+* BUGFIX: `mock.patch` now works correctly with certain types of objects that
+ proxy attribute access, like the django settings object
+* BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+ diagnosing this)
+* BUGFIX: `spec=True` works with old style classes
+* BUGFIX: ``help(mock)`` works now (on the module). Can no longer use ``__bases__``
+ as a valid sentinel name (thanks to Stephen Emslie for reporting and
+ diagnosing this)
+* BUGFIX: ``side_effect`` now works with ``BaseException`` exceptions like
+ ``KeyboardInterrupt``
+* BUGFIX: `reset_mock` caused infinite recursion when a mock is set as its own
+ return value
+* BUGFIX: patching the same object twice now restores the patches correctly
+* with statement tests now skipped on Python 2.4
+* Tests require unittest2 (or unittest2-py3k) to run
+* Tested with `tox <http://pypi.python.org/pypi/tox>`_ on Python 2.4 - 3.2,
+ jython and pypy (excluding 3.0)
+* Added 'build_sphinx' command to setup.py (requires setuptools or distribute)
+ Thanks to Florian Bauer
+* Switched from subversion to mercurial for source code control
+* `Konrad Delong <http://konryd.blogspot.com/>`_ added as co-maintainer
+
+
+2011/02/16 Version 0.7.0 RC 1
+-----------------------------
+
+Changes since beta 4:
+
+* Tested with jython, pypy and Python 3.2 and 3.1
+* Decorated functions / methods have their docstring and `__module__`
+ preserved on Python 2.4
+* BUGFIX: `mock.patch` now works correctly with certain types of objects that
+ proxy attribute access, like the django settings object
+* BUGFIX: `reset_mock` caused infinite recursion when a mock is set as its own
+ return value
+
+
+2010/11/12 Version 0.7.0 beta 4
+-------------------------------
+
+* patchers (`patch`, `patch.object` and `patch.dict`) have start and stop
+ methods
+* Addition of `assert_called_once_with` method
+* repr of a mock with a spec includes the class name of the spec
+* `assert_called_with` works with `python -OO`
+* New `spec_set` keyword argument to `Mock` and `patch`. If used,
+ attempting to *set* an attribute on a mock not on the spec will raise an
+ `AttributeError`
+* Attributes and return value of a `MagicMock` are `MagicMock` objects
+* Attempting to set an unsupported magic method now raises an `AttributeError`
+* `patch.dict` works as a class decorator
+* Switched from subversion to mercurial for source code control
+* BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+ diagnosing this)
+* BUGFIX: `spec=True` works with old style classes
+* BUGFIX: `mocksignature=True` can now patch instance methods via
+ `patch.object`
+
+
+2010/09/18 Version 0.7.0 beta 3
+-------------------------------
+
+* Using spec with :class:`MagicMock` only pre-creates magic methods in the spec
+* Setting a magic method on a mock with a ``spec`` can only be done if the
+ spec has that method
+* Mocks can now be named (`name` argument to constructor) and the name is used
+ in the repr
+* `mocksignature` can now be used with classes (signature based on `__init__`)
+ and callable objects (signature based on `__call__`)
+* Mocks created with a spec can now pass `isinstance` tests (`__class__`
+ returns the type of the spec)
+* Default numeric value for MagicMock is 1 rather than zero (because the
+ MagicMock bool defaults to True and 0 is False)
+* Improved failure message for :meth:`~Mock.assert_called_with` when the mock
+ has not been called at all
+* Adding the following to the set of supported magic methods:
+
+ - ``__getformat__`` and ``__setformat__``
+ - pickle methods
+ - ``__trunc__``, ``__ceil__`` and ``__floor__``
+ - ``__sizeof__``
+
+* Added 'build_sphinx' command to setup.py (requires setuptools or distribute)
+ Thanks to Florian Bauer
+* with statement tests now skipped on Python 2.4
+* Tests require unittest2 to run on Python 2.7
+* Improved several docstrings and documentation
+
+
+2010/06/23 Version 0.7.0 beta 2
+-------------------------------
+
+* :func:`patch.dict` works as a context manager as well as a decorator
+* ``patch.dict`` takes a string to specify dictionary as well as a dictionary
+ object. If a string is supplied the name specified is imported
+* BUGFIX: ``patch.dict`` restores dictionary even when an exception is raised
+
+
+2010/06/22 Version 0.7.0 beta 1
+-------------------------------
+
+* Addition of :func:`mocksignature`
+* Ability to mock magic methods
+* Ability to use ``patch`` and ``patch.object`` as class decorators
+* Renamed ``patch_object`` to :func:`patch.object` (``patch_object`` is
+ deprecated)
+* Addition of :class:`MagicMock` class with all magic methods pre-created for you
+* Python 3 compatibility (tested with 3.2 but should work with 3.0 & 3.1 as
+ well)
+* Addition of :func:`patch.dict` for changing dictionaries during a test
+* Addition of ``mocksignature`` argument to ``patch`` and ``patch.object``
+* ``help(mock)`` works now (on the module). Can no longer use ``__bases__``
+ as a valid sentinel name (thanks to Stephen Emslie for reporting and
+ diagnosing this)
+* Addition of soft comparisons: `call_args`, `call_args_list` and `method_calls`
+ now return tuple-like objects which compare equal even when empty args
+ or kwargs are skipped
+* Added docstrings.
+* BUGFIX: ``side_effect`` now works with ``BaseException`` exceptions like
+ ``KeyboardInterrupt``
+* BUGFIX: patching the same object twice now restores the patches correctly
+* The tests now require `unittest2 <http://pypi.python.org/pypi/unittest2>`_
+ to run
+* `Konrad Delong <http://konryd.blogspot.com/>`_ added as co-maintainer
+
+
+2009/08/22 Version 0.6.0
+------------------------
+
+* New test layout compatible with test discovery
+* Descriptors (static methods / class methods etc) can now be patched and
+ restored correctly
+* Mocks can raise exceptions when called by setting ``side_effect`` to an
+ exception class or instance
+* Mocks that wrap objects will not pass on calls to the underlying object if
+ an explicit return_value is set
+
+
+2009/04/17 Version 0.5.0
+------------------------
+
+* Made DEFAULT part of the public api.
+* Documentation built with Sphinx.
+* ``side_effect`` is now called with the same arguments as the mock is called with and
+ if returns a non-DEFAULT value that is automatically set as the ``mock.return_value``.
+* ``wraps`` keyword argument used for wrapping objects (and passing calls through to the wrapped object).
+* ``Mock.reset`` renamed to ``Mock.reset_mock``, as reset is a common API name.
+* ``patch`` / ``patch_object`` are now context managers and can be used with ``with``.
+* A new 'create' keyword argument to patch and patch_object that allows them to patch
+ (and unpatch) attributes that don't exist. (Potentially unsafe to use - it can allow
+ you to have tests that pass when they are testing an API that doesn't exist - use at
+ your own risk!)
+* The methods keyword argument to Mock has been removed and merged with spec. The spec
+ argument can now be a list of methods or an object to take the spec from.
+* Nested patches may now be applied in a different order (created mocks passed
+ in the opposite order). This is actually a bugfix.
+* patch and patch_object now take a spec keyword argument. If spec is
+ passed in as 'True' then the Mock created will take the object it is replacing
+ as its spec object. If the object being replaced is a class, then the return
+ value for the mock will also use the class as a spec.
+* A Mock created without a spec will not attempt to mock any magic methods / attributes
+ (they will raise an ``AttributeError`` instead).
+
+
+2008/10/12 Version 0.4.0
+------------------------
+
+* Default return value is now a new mock rather than None
+* return_value added as a keyword argument to the constructor
+* New method 'assert_called_with'
+* Added 'side_effect' attribute / keyword argument called when mock is called
+* patch decorator split into two decorators:
+
+ - ``patch_object`` which takes an object and an attribute name to patch
+ (plus optionally a value to patch with which defaults to a mock object)
+ - ``patch`` which takes a string specifying a target to patch; in the form
+ 'package.module.Class.attribute'. (plus optionally a value to
+ patch with which defaults to a mock object)
+
+* Can now patch objects with ``None``
+* Change to patch for nose compatibility with error reporting in wrapped functions
+* Reset no longer clears children / return value etc - it just resets
+ call count and call args. It also calls reset on all children (and
+ the return value if it is a mock).
+
+Thanks to Konrad Delong, Kevin Dangoor and others for patches and suggestions.
+
+
+2007/12/03 Version 0.3.1
+-------------------------
+
+``patch`` maintains the name of decorated functions for compatibility with nose
+test autodiscovery.
+
+Tests decorated with ``patch`` that use the two argument form (implicit mock
+creation) will receive the mock(s) passed in as extra arguments.
+
+Thanks to Kevin Dangoor for these changes.
+
+
+2007/11/30 Version 0.3.0
+-------------------------
+
+Removed ``patch_module``. ``patch`` can now take a string as the first
+argument for patching modules.
+
+The third argument to ``patch`` is optional - a mock will be created by
+default if it is not passed in.
+
+
+2007/11/21 Version 0.2.1
+-------------------------
+
+Bug fix, allows reuse of functions decorated with ``patch`` and ``patch_module``.
+
+
+2007/11/20 Version 0.2.0
+-------------------------
+
+Added ``spec`` keyword argument for creating ``Mock`` objects from a
+specification object.
+
+Added ``patch`` and ``patch_module`` monkey patching decorators.
+
+Added ``sentinel`` for convenient access to unique objects.
+
+Distribution includes unit tests.
+
+
+2007/11/19 Version 0.1.0
+-------------------------
+
+Initial release.
+
+
+TODO and Limitations
+====================
+
+Contributions, bug reports and comments welcomed!
+
+Feature requests and bug reports are handled on the issue tracker:
+
+ * `mock issue tracker <http://code.google.com/p/mock/issues/list>`_
+
+`wraps` is not integrated with magic methods.
+
+`patch` could auto-do the patching in the constructor and unpatch in the
+destructor. This would be useful in itself, but violates TOOWTDI and would be
+unsafe for IronPython & PyPy (non-deterministic calling of destructors).
+Destructors aren't called in CPython where there are cycles, but a weak
+reference with a callback can be used to get round this.
+
+`Mock` has several attributes. This makes it unsuitable for mocking objects
+that use these attribute names. A way round this would be to provide methods
+that *hide* these attributes when needed. In 0.8 many, but not all, of these
+attributes are renamed to gain a `_mock` prefix, making it less likely that
+they will clash. Any outstanding attributes that haven't been modified with
+the prefix should be changed.
+
+If a patch is started using `patch.start` and then not stopped correctly then
+the unpatching is not done. Using weak references it would be possible to
+detect and fix this when the patch object itself is garbage collected. This
+would be tricky to get right though.
+
+When a `Mock` is created by `patch`, arbitrary keywords can be used to set
+attributes. If `patch` is created with a `spec`, and is replacing a class, then
+a `return_value` mock is created. The keyword arguments are not applied to the
+child mock, but could be.
+
+When mocking a class with `patch`, passing in `spec=True` or `autospec=True`,
+the mock class has an instance created from the same spec. Should this be the
+default behaviour for mocks anyway (mock return values inheriting the spec
+from their parent), or should it be controlled by an additional keyword
+argument (`inherit`) to the Mock constructor? `create_autospec` does this, so
+an additional keyword argument to Mock is probably unnecessary.
+
+The `mocksignature` argument to `patch` with a non `Mock` passed into
+`new_callable` will *probably* cause an error. Should it just be invalid?
+
+Note that `NonCallableMock` and `NonCallableMagicMock` still have the unused
+(and unusable) attributes: `return_value`, `side_effect`, `call_count`,
+`call_args` and `call_args_list`. These could be removed or raise errors on
+getting / setting. They also have the `assert_called_with` and
+`assert_called_once_with` methods. Removing these would be pointless as
+fetching them would create a mock (attribute) that could be called without
+error.
+
+Some outstanding technical debt. The way autospeccing mocks function
+signatures was copied and modified from `mocksignature`. This could all be
+refactored into one set of functions instead of two. The way we tell if
+patchers are started and if a patcher is being used for a `patch.multiple`
+call are both horrible. There are now a host of helper functions that should
+be rationalised. (Probably time to split mock into a package instead of a
+module.)
+
+Passing arbitrary keyword arguments to `create_autospec`, or `patch` with
+`autospec`, when mocking a *function* works fine. However, the arbitrary
+attributes are set on the created mock - but `create_autospec` returns a
+real function (which doesn't have those attributes). However, what is the use
+case for using autospec to create functions with attributes that don't exist
+on the original?
+
+`mocksignature`, plus the `call_args_list` and `method_calls` attributes of
+`Mock` could all be deprecated.
diff --git a/python/mock-1.0.0/html/_sources/compare.txt b/python/mock-1.0.0/html/_sources/compare.txt
new file mode 100644
index 000000000..41555308e
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/compare.txt
@@ -0,0 +1,628 @@
+=========================
+ Mock Library Comparison
+=========================
+
+
+.. testsetup::
+
+ def assertEqual(a, b):
+ assert a == b, ("%r != %r" % (a, b))
+
+ def assertRaises(Exc, func):
+ try:
+ func()
+ except Exc:
+ return
+ assert False, ("%s not raised" % Exc)
+
+ sys.modules['somemodule'] = somemodule = mock.Mock(name='somemodule')
+ class SomeException(Exception):
+ some_method = method1 = method2 = None
+ some_other_object = SomeObject = SomeException
+
+
+A side-by-side comparison of how to accomplish some basic tasks with mock and
+some other popular Python mocking libraries and frameworks.
+
+These are:
+
+* `flexmock <http://pypi.python.org/pypi/flexmock>`_
+* `mox <http://pypi.python.org/pypi/mox>`_
+* `Mocker <http://niemeyer.net/mocker>`_
+* `dingus <http://pypi.python.org/pypi/dingus>`_
+* `fudge <http://pypi.python.org/pypi/fudge>`_
+
+Popular python mocking frameworks not yet represented here include
+`MiniMock <http://pypi.python.org/pypi/MiniMock>`_.
+
+`pMock <http://pmock.sourceforge.net/>`_ (last release 2004 and doesn't import
+in recent versions of Python) and
+`python-mock <http://python-mock.sourceforge.net/>`_ (last release 2005) are
+intentionally omitted.
+
+.. note::
+
+ A more up to date, and tested for all mock libraries (only the mock
+ examples on this page can be executed as doctests) version of this
+ comparison is maintained by Gary Bernhardt:
+
+ * `Python Mock Library Comparison
+ <http://garybernhardt.github.com/python-mock-comparison/>`_
+
+This comparison is by no means complete, and also may not be fully idiomatic
+for all the libraries represented. *Please* contribute corrections, missing
+comparisons, or comparisons for additional libraries to the `mock issue
+tracker <https://code.google.com/p/mock/issues/list>`_.
+
+This comparison page was originally created by the `Mox project
+<https://code.google.com/p/pymox/wiki/MoxComparison>`_ and then extended for
+`flexmock and mock <http://has207.github.com/flexmock/compare.html>`_ by
+Herman Sheremetyev. Dingus examples written by `Gary Bernhadt
+<http://garybernhardt.github.com/python-mock-comparison/>`_. fudge examples
+provided by `Kumar McMillan <http://farmdev.com/>`_.
+
+.. note::
+
+ The examples tasks here were originally created by Mox which is a mocking
+ *framework* rather than a library like mock. The tasks shown naturally
+ exemplify tasks that frameworks are good at and not the ones they make
+ harder. In particular you can take a `Mock` or `MagicMock` object and use
+ it in any way you want with no up-front configuration. The same is also
+ true for Dingus.
+
+ The examples for mock here assume version 0.7.0.
+
+
+Simple fake object
+~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.return_value = "calculated value"
+ >>> my_mock.some_attribute = "value"
+ >>> assertEqual("calculated value", my_mock.some_method())
+ >>> assertEqual("value", my_mock.some_attribute)
+
+::
+
+ # Flexmock
+ mock = flexmock(some_method=lambda: "calculated value", some_attribute="value")
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndReturn("calculated value")
+ mock.some_attribute = "value"
+ mox.Replay(mock)
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.result("calculated value")
+ mocker.replay()
+ mock.some_attribute = "value"
+ assertEqual("calculated value", mock.some_method())
+ assertEqual("value", mock.some_attribute)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus(some_attribute="value",
+ ... some_method__returns="calculated value")
+ >>> assertEqual("calculated value", my_dingus.some_method())
+ >>> assertEqual("value", my_dingus.some_attribute)
+
+::
+
+ >>> # fudge
+ >>> my_fake = (fudge.Fake()
+ ... .provides('some_method')
+ ... .returns("calculated value")
+ ... .has_attr(some_attribute="value"))
+ ...
+ >>> assertEqual("calculated value", my_fake.some_method())
+ >>> assertEqual("value", my_fake.some_attribute)
+
+
+Simple mock
+~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.return_value = "value"
+ >>> assertEqual("value", my_mock.some_method())
+ >>> my_mock.some_method.assert_called_once_with()
+
+::
+
+ # Flexmock
+ mock = flexmock()
+ mock.should_receive("some_method").and_return("value").once
+ assertEqual("value", mock.some_method())
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndReturn("value")
+ mox.Replay(mock)
+ assertEqual("value", mock.some_method())
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.result("value")
+ mocker.replay()
+ assertEqual("value", mock.some_method())
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus(some_method__returns="value")
+ >>> assertEqual("value", my_dingus.some_method())
+ >>> assert my_dingus.some_method.calls().once()
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = (fudge.Fake()
+ ... .expects('some_method')
+ ... .returns("value")
+ ... .times_called(1))
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: fake:my_fake.some_method() was not called
+
+
+Creating partial mocks
+~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> SomeObject.some_method = mock.Mock(return_value='value')
+ >>> assertEqual("value", SomeObject.some_method())
+
+::
+
+ # Flexmock
+ flexmock(SomeObject).should_receive("some_method").and_return('value')
+ assertEqual("value", mock.some_method())
+
+ # Mox
+ mock = mox.MockObject(SomeObject)
+ mock.some_method().AndReturn("value")
+ mox.Replay(mock)
+ assertEqual("value", mock.some_method())
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock(SomeObject)
+ mock.Get()
+ mocker.result("value")
+ mocker.replay()
+ assertEqual("value", mock.some_method())
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> object = SomeObject
+ >>> object.some_method = dingus.Dingus(return_value="value")
+ >>> assertEqual("value", object.some_method())
+
+::
+
+ >>> # fudge
+ >>> fake = fudge.Fake().is_callable().returns("<fudge-value>")
+ >>> with fudge.patched_context(SomeObject, 'some_method', fake):
+ ... s = SomeObject()
+ ... assertEqual("<fudge-value>", s.some_method())
+ ...
+
+
+Ensure calls are made in specific order
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock(spec=SomeObject)
+ >>> my_mock.method1()
+ <Mock name='mock.method1()' id='...'>
+ >>> my_mock.method2()
+ <Mock name='mock.method2()' id='...'>
+ >>> assertEqual(my_mock.mock_calls, [call.method1(), call.method2()])
+
+::
+
+ # Flexmock
+ mock = flexmock(SomeObject)
+ mock.should_receive('method1').once.ordered.and_return('first thing')
+ mock.should_receive('method2').once.ordered.and_return('second thing')
+
+ # Mox
+ mock = mox.MockObject(SomeObject)
+ mock.method1().AndReturn('first thing')
+ mock.method2().AndReturn('second thing')
+ mox.Replay(mock)
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ with mocker.order():
+ mock.method1()
+ mocker.result('first thing')
+ mock.method2()
+ mocker.result('second thing')
+ mocker.replay()
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.method1()
+ <Dingus ...>
+ >>> my_dingus.method2()
+ <Dingus ...>
+ >>> assertEqual(['method1', 'method2'], [call.name for call in my_dingus.calls])
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = (fudge.Fake()
+ ... .remember_order()
+ ... .expects('method1')
+ ... .expects('method2'))
+ ... my_fake.method2()
+ ... my_fake.method1()
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: Call #1 was fake:my_fake.method2(); Expected: #1 fake:my_fake.method1(), #2 fake:my_fake.method2(), end
+
+
+Raising exceptions
+~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method.side_effect = SomeException("message")
+ >>> assertRaises(SomeException, my_mock.some_method)
+
+::
+
+ # Flexmock
+ mock = flexmock()
+ mock.should_receive("some_method").and_raise(SomeException("message"))
+ assertRaises(SomeException, mock.some_method)
+
+ # Mox
+ mock = mox.MockAnything()
+ mock.some_method().AndRaise(SomeException("message"))
+ mox.Replay(mock)
+ assertRaises(SomeException, mock.some_method)
+ mox.Verify(mock)
+
+ # Mocker
+ mock = mocker.mock()
+ mock.some_method()
+ mocker.throw(SomeException("message"))
+ mocker.replay()
+ assertRaises(SomeException, mock.some_method)
+ mocker.verify()
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.some_method = dingus.exception_raiser(SomeException)
+ >>> assertRaises(SomeException, my_dingus.some_method)
+
+::
+
+ >>> # fudge
+ >>> my_fake = (fudge.Fake()
+ ... .is_callable()
+ ... .raises(SomeException("message")))
+ ...
+ >>> my_fake()
+ Traceback (most recent call last):
+ ...
+ SomeException: message
+
+
+Override new instances of a class
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> with mock.patch('somemodule.Someclass') as MockClass:
+ ... MockClass.return_value = some_other_object
+ ... assertEqual(some_other_object, somemodule.Someclass())
+ ...
+
+
+::
+
+ # Flexmock
+ flexmock(some_module.SomeClass, new_instances=some_other_object)
+ assertEqual(some_other_object, some_module.SomeClass())
+
+ # Mox
+ # (you will probably have mox.Mox() available as self.mox in a real test)
+ mox.Mox().StubOutWithMock(some_module, 'SomeClass', use_mock_anything=True)
+ some_module.SomeClass().AndReturn(some_other_object)
+ mox.ReplayAll()
+ assertEqual(some_other_object, some_module.SomeClass())
+
+ # Mocker
+ instance = mocker.mock()
+ klass = mocker.replace(SomeClass, spec=None)
+ klass('expected', 'args')
+ mocker.result(instance)
+
+::
+
+ >>> # Dingus
+ >>> MockClass = dingus.Dingus(return_value=some_other_object)
+ >>> with dingus.patch('somemodule.SomeClass', MockClass):
+ ... assertEqual(some_other_object, somemodule.SomeClass())
+ ...
+
+::
+
+ >>> # fudge
+ >>> @fudge.patch('somemodule.SomeClass')
+ ... def test(FakeClass):
+ ... FakeClass.is_callable().returns(some_other_object)
+ ... assertEqual(some_other_object, somemodule.SomeClass())
+ ...
+ >>> test()
+
+
+Call the same method multiple times
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. note::
+
+ You don't need to do *any* configuration to call `mock.Mock()` methods
+ multiple times. Attributes like `call_count`, `call_args_list` and
+ `method_calls` provide various different ways of making assertions about
+ how the mock was used.
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> my_mock.some_method()
+ <Mock name='mock.some_method()' id='...'>
+ >>> my_mock.some_method()
+ <Mock name='mock.some_method()' id='...'>
+ >>> assert my_mock.some_method.call_count >= 2
+
+::
+
+ # Flexmock # (verifies that the method gets called at least twice)
+ flexmock(some_object).should_receive('some_method').at_least.twice
+
+ # Mox
+ # (does not support variable number of calls, so you need to create a new entry for each explicit call)
+ mock = mox.MockObject(some_object)
+ mock.some_method(mox.IgnoreArg(), mox.IgnoreArg())
+ mock.some_method(mox.IgnoreArg(), mox.IgnoreArg())
+ mox.Replay(mock)
+ mox.Verify(mock)
+
+ # Mocker
+ # (TODO)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> my_dingus.some_method()
+ <Dingus ...>
+ >>> my_dingus.some_method()
+ <Dingus ...>
+ >>> assert len(my_dingus.calls('some_method')) == 2
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = fudge.Fake().expects('some_method').times_called(2)
+ ... my_fake.some_method()
+ ...
+ >>> test()
+ Traceback (most recent call last):
+ ...
+ AssertionError: fake:my_fake.some_method() was called 1 time(s). Expected 2.
+
+
+Mock chained methods
+~~~~~~~~~~~~~~~~~~~~
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.Mock()
+ >>> method3 = my_mock.method1.return_value.method2.return_value.method3
+ >>> method3.return_value = 'some value'
+ >>> assertEqual('some value', my_mock.method1().method2().method3(1, 2))
+ >>> method3.assert_called_once_with(1, 2)
+
+::
+
+ # Flexmock
+ # (intermediate method calls are automatically assigned to temporary fake objects
+ # and can be called with any arguments)
+ flexmock(some_object).should_receive(
+ 'method1.method2.method3'
+ ).with_args(arg1, arg2).and_return('some value')
+ assertEqual('some_value', some_object.method1().method2().method3(arg1, arg2))
+
+::
+
+ # Mox
+ mock = mox.MockObject(some_object)
+ mock2 = mox.MockAnything()
+ mock3 = mox.MockAnything()
+ mock.method1().AndReturn(mock1)
+ mock2.method2().AndReturn(mock2)
+ mock3.method3(arg1, arg2).AndReturn('some_value')
+ self.mox.ReplayAll()
+ assertEqual("some_value", some_object.method1().method2().method3(arg1, arg2))
+ self.mox.VerifyAll()
+
+ # Mocker
+ # (TODO)
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> method3 = my_dingus.method1.return_value.method2.return_value.method3
+ >>> method3.return_value = 'some value'
+ >>> assertEqual('some value', my_dingus.method1().method2().method3(1, 2))
+ >>> assert method3.calls('()', 1, 2).once()
+
+::
+
+ >>> # fudge
+ >>> @fudge.test
+ ... def test():
+ ... my_fake = fudge.Fake()
+ ... (my_fake
+ ... .expects('method1')
+ ... .returns_fake()
+ ... .expects('method2')
+ ... .returns_fake()
+ ... .expects('method3')
+ ... .with_args(1, 2)
+ ... .returns('some value'))
+ ... assertEqual('some value', my_fake.method1().method2().method3(1, 2))
+ ...
+ >>> test()
+
+
+Mocking a context manager
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Examples for mock, Dingus and fudge only (so far):
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.MagicMock()
+ >>> with my_mock:
+ ... pass
+ ...
+ >>> my_mock.__enter__.assert_called_with()
+ >>> my_mock.__exit__.assert_called_with(None, None, None)
+
+::
+
+
+ >>> # Dingus (nothing special here; all dinguses are "magic mocks")
+ >>> my_dingus = dingus.Dingus()
+ >>> with my_dingus:
+ ... pass
+ ...
+ >>> assert my_dingus.__enter__.calls()
+ >>> assert my_dingus.__exit__.calls('()', None, None, None)
+
+::
+
+ >>> # fudge
+ >>> my_fake = fudge.Fake().provides('__enter__').provides('__exit__')
+ >>> with my_fake:
+ ... pass
+ ...
+
+
+Mocking the builtin open used as a context manager
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Example for mock only (so far):
+
+.. doctest::
+
+ >>> # mock
+ >>> my_mock = mock.MagicMock()
+ >>> with mock.patch('__builtin__.open', my_mock):
+ ... manager = my_mock.return_value.__enter__.return_value
+ ... manager.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = h.read()
+ ...
+ >>> data
+ 'some data'
+ >>> my_mock.assert_called_once_with('foo')
+
+*or*:
+
+.. doctest::
+
+ >>> # mock
+ >>> with mock.patch('__builtin__.open') as my_mock:
+ ... my_mock.return_value.__enter__ = lambda s: s
+ ... my_mock.return_value.__exit__ = mock.Mock()
+ ... my_mock.return_value.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = h.read()
+ ...
+ >>> data
+ 'some data'
+ >>> my_mock.assert_called_once_with('foo')
+
+::
+
+ >>> # Dingus
+ >>> my_dingus = dingus.Dingus()
+ >>> with dingus.patch('__builtin__.open', my_dingus):
+ ... file_ = open.return_value.__enter__.return_value
+ ... file_.read.return_value = 'some data'
+ ... with open('foo') as h:
+ ... data = f.read()
+ ...
+ >>> data
+ 'some data'
+ >>> assert my_dingus.calls('()', 'foo').once()
+
+::
+
+ >>> # fudge
+ >>> from contextlib import contextmanager
+ >>> from StringIO import StringIO
+ >>> @contextmanager
+ ... def fake_file(filename):
+ ... yield StringIO('sekrets')
+ ...
+ >>> with fudge.patch('__builtin__.open') as fake_open:
+ ... fake_open.is_callable().calls(fake_file)
+ ... with open('/etc/password') as f:
+ ... data = f.read()
+ ...
+ fake:__builtin__.open
+ >>> data
+ 'sekrets' \ No newline at end of file
diff --git a/python/mock-1.0.0/html/_sources/examples.txt b/python/mock-1.0.0/html/_sources/examples.txt
new file mode 100644
index 000000000..ecb994b15
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/examples.txt
@@ -0,0 +1,1063 @@
+.. _further-examples:
+
+==================
+ Further Examples
+==================
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ from datetime import date
+
+ BackendProvider = Mock()
+ sys.modules['mymodule'] = mymodule = Mock(name='mymodule')
+
+ def grob(val):
+ "First frob and then clear val"
+ mymodule.frob(val)
+ val.clear()
+
+ mymodule.frob = lambda val: val
+ mymodule.grob = grob
+ mymodule.date = date
+
+ class TestCase(unittest2.TestCase):
+ def run(self):
+ result = unittest2.TestResult()
+ out = unittest2.TestCase.run(self, result)
+ assert result.wasSuccessful()
+
+ from mock import inPy3k
+
+
+
+For comprehensive examples, see the unit tests included in the full source
+distribution.
+
+Here are some more examples for some slightly more advanced scenarios than in
+the :ref:`getting started <getting-started>` guide.
+
+
+Mocking chained calls
+=====================
+
+Mocking chained calls is actually straightforward with mock once you
+understand the :attr:`~Mock.return_value` attribute. When a mock is called for
+the first time, or you fetch its `return_value` before it has been called, a
+new `Mock` is created.
+
+This means that you can see how the object returned from a call to a mocked
+object has been used by interrogating the `return_value` mock:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock().foo(a=2, b=3)
+ <Mock name='mock().foo()' id='...'>
+ >>> mock.return_value.foo.assert_called_with(a=2, b=3)
+
+From here it is a simple step to configure and then make assertions about
+chained calls. Of course another alternative is writing your code in a more
+testable way in the first place...
+
+So, suppose we have some code that looks a little bit like this:
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self):
+ ... self.backend = BackendProvider()
+ ... def method(self):
+ ... response = self.backend.get_endpoint('foobar').create_call('spam', 'eggs').start_call()
+ ... # more code
+
+Assuming that `BackendProvider` is already well tested, how do we test
+`method()`? Specifically, we want to test that the code section `# more
+code` uses the response object in the correct way.
+
+As this chain of calls is made from an instance attribute we can monkey patch
+the `backend` attribute on a `Something` instance. In this particular case
+we are only interested in the return value from the final call to
+`start_call` so we don't have much configuration to do. Let's assume the
+object it returns is 'file-like', so we'll ensure that our response object
+uses the builtin `file` as its `spec`.
+
+To do this we create a mock instance as our mock backend and create a mock
+response object for it. To set the response as the return value for that final
+`start_call` we could do this:
+
+ `mock_backend.get_endpoint.return_value.create_call.return_value.start_call.return_value = mock_response`.
+
+We can do that in a slightly nicer way using the :meth:`~Mock.configure_mock`
+method to directly set the return value for us:
+
+.. doctest::
+
+ >>> something = Something()
+ >>> mock_response = Mock(spec=file)
+ >>> mock_backend = Mock()
+ >>> config = {'get_endpoint.return_value.create_call.return_value.start_call.return_value': mock_response}
+ >>> mock_backend.configure_mock(**config)
+
+With these we monkey patch the "mock backend" in place and can make the real
+call:
+
+.. doctest::
+
+ >>> something.backend = mock_backend
+ >>> something.method()
+
+Using :attr:`~Mock.mock_calls` we can check the chained call with a single
+assert. A chained call is several calls in one line of code, so there will be
+several entries in `mock_calls`. We can use :meth:`call.call_list` to create
+this list of calls for us:
+
+.. doctest::
+
+ >>> chained = call.get_endpoint('foobar').create_call('spam', 'eggs').start_call()
+ >>> call_list = chained.call_list()
+ >>> assert mock_backend.mock_calls == call_list
+
+
+Partial mocking
+===============
+
+In some tests I wanted to mock out a call to `datetime.date.today()
+<http://docs.python.org/library/datetime.html#datetime.date.today>`_ to return
+a known date, but I didn't want to prevent the code under test from
+creating new date objects. Unfortunately `datetime.date` is written in C, and
+so I couldn't just monkey-patch out the static `date.today` method.
+
+I found a simple way of doing this that involved effectively wrapping the date
+class with a mock, but passing through calls to the constructor to the real
+class (and returning real instances).
+
+The :func:`patch decorator <patch>` is used here to
+mock out the `date` class in the module under test. The :attr:`side_effect`
+attribute on the mock date class is then set to a lambda function that returns
+a real date. When the mock date class is called a real date will be
+constructed and returned by `side_effect`.
+
+.. doctest::
+
+ >>> from datetime import date
+ >>> with patch('mymodule.date') as mock_date:
+ ... mock_date.today.return_value = date(2010, 10, 8)
+ ... mock_date.side_effect = lambda *args, **kw: date(*args, **kw)
+ ...
+ ... assert mymodule.date.today() == date(2010, 10, 8)
+ ... assert mymodule.date(2009, 6, 8) == date(2009, 6, 8)
+ ...
+
+Note that we don't patch `datetime.date` globally, we patch `date` in the
+module that *uses* it. See :ref:`where to patch <where-to-patch>`.
+
+When `date.today()` is called a known date is returned, but calls to the
+`date(...)` constructor still return normal dates. Without this you can find
+yourself having to calculate an expected result using exactly the same
+algorithm as the code under test, which is a classic testing anti-pattern.
+
+Calls to the date constructor are recorded in the `mock_date` attributes
+(`call_count` and friends) which may also be useful for your tests.
+
+An alternative way of dealing with mocking dates, or other builtin classes,
+is discussed in `this blog entry
+<http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/>`_.
+
+
+Mocking a Generator Method
+==========================
+
+A Python generator is a function or method that uses the `yield statement
+<http://docs.python.org/reference/simple_stmts.html#the-yield-statement>`_ to
+return a series of values when iterated over [#]_.
+
+A generator method / function is called to return the generator object. It is
+the generator object that is then iterated over. The protocol method for
+iteration is `__iter__
+<http://docs.python.org/library/stdtypes.html#container.__iter__>`_, so we can
+mock this using a `MagicMock`.
+
+Here's an example class with an "iter" method implemented as a generator:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def iter(self):
+ ... for i in [1, 2, 3]:
+ ... yield i
+ ...
+ >>> foo = Foo()
+ >>> list(foo.iter())
+ [1, 2, 3]
+
+
+How would we mock this class, and in particular its "iter" method?
+
+To configure the values returned from the iteration (implicit in the call to
+`list`), we need to configure the object returned by the call to `foo.iter()`.
+
+.. doctest::
+
+ >>> mock_foo = MagicMock()
+ >>> mock_foo.iter.return_value = iter([1, 2, 3])
+ >>> list(mock_foo.iter())
+ [1, 2, 3]
+
+.. [#] There are also generator expressions and more `advanced uses
+ <http://www.dabeaz.com/coroutines/index.html>`_ of generators, but we aren't
+ concerned about them here. A very good introduction to generators and how
+ powerful they are is: `Generator Tricks for Systems Programmers
+ <http://www.dabeaz.com/generators/>`_.
+
+
+Applying the same patch to every test method
+============================================
+
+If you want several patches in place for multiple test methods the obvious way
+is to apply the patch decorators to every method. This can feel like unnecessary
+repetition. For Python 2.6 or more recent you can use `patch` (in all its
+various forms) as a class decorator. This applies the patches to all test
+methods on the class. A test method is identified by methods whose names start
+with `test`:
+
+.. doctest::
+
+ >>> @patch('mymodule.SomeClass')
+ ... class MyTest(TestCase):
+ ...
+ ... def test_one(self, MockSomeClass):
+ ... self.assertTrue(mymodule.SomeClass is MockSomeClass)
+ ...
+ ... def test_two(self, MockSomeClass):
+ ... self.assertTrue(mymodule.SomeClass is MockSomeClass)
+ ...
+ ... def not_a_test(self):
+ ... return 'something'
+ ...
+ >>> MyTest('test_one').test_one()
+ >>> MyTest('test_two').test_two()
+ >>> MyTest('test_two').not_a_test()
+ 'something'
+
+An alternative way of managing patches is to use the :ref:`start-and-stop`.
+These allow you to move the patching into your `setUp` and `tearDown` methods.
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... self.patcher = patch('mymodule.foo')
+ ... self.mock_foo = self.patcher.start()
+ ...
+ ... def test_foo(self):
+ ... self.assertTrue(mymodule.foo is self.mock_foo)
+ ...
+ ... def tearDown(self):
+ ... self.patcher.stop()
+ ...
+ >>> MyTest('test_foo').run()
+
+If you use this technique you must ensure that the patching is "undone" by
+calling `stop`. This can be fiddlier than you might think, because if an
+exception is raised in the setUp then tearDown is not called. `unittest2
+<http://pypi.python.org/pypi/unittest2>`_ cleanup functions make this simpler:
+
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... patcher = patch('mymodule.foo')
+ ... self.addCleanup(patcher.stop)
+ ... self.mock_foo = patcher.start()
+ ...
+ ... def test_foo(self):
+ ... self.assertTrue(mymodule.foo is self.mock_foo)
+ ...
+ >>> MyTest('test_foo').run()
+
+
+Mocking Unbound Methods
+=======================
+
+Whilst writing tests today I needed to patch an *unbound method* (patching the
+method on the class rather than on the instance). I needed self to be passed
+in as the first argument because I want to make asserts about which objects
+were calling this particular method. The issue is that you can't patch with a
+mock for this, because if you replace an unbound method with a mock it doesn't
+become a bound method when fetched from the instance, and so it doesn't get
+self passed in. The workaround is to patch the unbound method with a real
+function instead. The :func:`patch` decorator makes it so simple to
+patch out methods with a mock that having to create a real function becomes a
+nuisance.
+
+If you pass `autospec=True` to patch then it does the patching with a
+*real* function object. This function object has the same signature as the one
+it is replacing, but delegates to a mock under the hood. You still get your
+mock auto-created in exactly the same way as before. What it means though, is
+that if you use it to patch out an unbound method on a class the mocked
+function will be turned into a bound method if it is fetched from an instance.
+It will have `self` passed in as the first argument, which is exactly what I
+wanted:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def foo(self):
+ ... pass
+ ...
+ >>> with patch.object(Foo, 'foo', autospec=True) as mock_foo:
+ ... mock_foo.return_value = 'foo'
+ ... foo = Foo()
+ ... foo.foo()
+ ...
+ 'foo'
+ >>> mock_foo.assert_called_once_with(foo)
+
+If we don't use `autospec=True` then the unbound method is patched out
+with a Mock instance instead, and isn't called with `self`.
+
+
+Checking multiple calls with mock
+=================================
+
+mock has a nice API for making assertions about how your mock objects are used.
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.foo_bar.return_value = None
+ >>> mock.foo_bar('baz', spam='eggs')
+ >>> mock.foo_bar.assert_called_with('baz', spam='eggs')
+
+If your mock is only being called once you can use the
+:meth:`assert_called_once_with` method that also asserts that the
+:attr:`call_count` is one.
+
+.. doctest::
+
+ >>> mock.foo_bar.assert_called_once_with('baz', spam='eggs')
+ >>> mock.foo_bar()
+ >>> mock.foo_bar.assert_called_once_with('baz', spam='eggs')
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+Both `assert_called_with` and `assert_called_once_with` make assertions about
+the *most recent* call. If your mock is going to be called several times, and
+you want to make assertions about *all* those calls you can use
+:attr:`~Mock.call_args_list`:
+
+.. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1, 2, 3)
+ >>> mock(4, 5, 6)
+ >>> mock()
+ >>> mock.call_args_list
+ [call(1, 2, 3), call(4, 5, 6), call()]
+
+The :data:`call` helper makes it easy to make assertions about these calls. You
+can build up a list of expected calls and compare it to `call_args_list`. This
+looks remarkably similar to the repr of the `call_args_list`:
+
+.. doctest::
+
+ >>> expected = [call(1, 2, 3), call(4, 5, 6), call()]
+ >>> mock.call_args_list == expected
+ True
+
+
+Coping with mutable arguments
+=============================
+
+Another situation is rare, but can bite you, is when your mock is called with
+mutable arguments. `call_args` and `call_args_list` store *references* to the
+arguments. If the arguments are mutated by the code under test then you can no
+longer make assertions about what the values were when the mock was called.
+
+Here's some example code that shows the problem. Imagine the following functions
+defined in 'mymodule'::
+
+ def frob(val):
+ pass
+
+ def grob(val):
+ "First frob and then clear val"
+ frob(val)
+ val.clear()
+
+When we try to test that `grob` calls `frob` with the correct argument look
+what happens:
+
+.. doctest::
+
+ >>> with patch('mymodule.frob') as mock_frob:
+ ... val = set([6])
+ ... mymodule.grob(val)
+ ...
+ >>> val
+ set([])
+ >>> mock_frob.assert_called_with(set([6]))
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: ((set([6]),), {})
+ Called with: ((set([]),), {})
+
+One possibility would be for mock to copy the arguments you pass in. This
+could then cause problems if you do assertions that rely on object identity
+for equality.
+
+Here's one solution that uses the :attr:`side_effect`
+functionality. If you provide a `side_effect` function for a mock then
+`side_effect` will be called with the same args as the mock. This gives us an
+opportunity to copy the arguments and store them for later assertions. In this
+example I'm using *another* mock to store the arguments so that I can use the
+mock methods for doing the assertion. Again a helper function sets this up for
+me.
+
+.. doctest::
+
+ >>> from copy import deepcopy
+ >>> from mock import Mock, patch, DEFAULT
+ >>> def copy_call_args(mock):
+ ... new_mock = Mock()
+ ... def side_effect(*args, **kwargs):
+ ... args = deepcopy(args)
+ ... kwargs = deepcopy(kwargs)
+ ... new_mock(*args, **kwargs)
+ ... return DEFAULT
+ ... mock.side_effect = side_effect
+ ... return new_mock
+ ...
+ >>> with patch('mymodule.frob') as mock_frob:
+ ... new_mock = copy_call_args(mock_frob)
+ ... val = set([6])
+ ... mymodule.grob(val)
+ ...
+ >>> new_mock.assert_called_with(set([6]))
+ >>> new_mock.call_args
+ call(set([6]))
+
+`copy_call_args` is called with the mock that will be called. It returns a new
+mock that we do the assertion on. The `side_effect` function makes a copy of
+the args and calls our `new_mock` with the copy.
+
+.. note::
+
+ If your mock is only going to be used once there is an easier way of
+ checking arguments at the point they are called. You can simply do the
+ checking inside a `side_effect` function.
+
+ .. doctest::
+
+ >>> def side_effect(arg):
+ ... assert arg == set([6])
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock(set([6]))
+ >>> mock(set())
+ Traceback (most recent call last):
+ ...
+ AssertionError
+
+An alternative approach is to create a subclass of `Mock` or `MagicMock` that
+copies (using `copy.deepcopy
+<http://docs.python.org/library/copy.html#copy.deepcopy>`_) the arguments.
+Here's an example implementation:
+
+.. doctest::
+
+ >>> from copy import deepcopy
+ >>> class CopyingMock(MagicMock):
+ ... def __call__(self, *args, **kwargs):
+ ... args = deepcopy(args)
+ ... kwargs = deepcopy(kwargs)
+ ... return super(CopyingMock, self).__call__(*args, **kwargs)
+ ...
+ >>> c = CopyingMock(return_value=None)
+ >>> arg = set()
+ >>> c(arg)
+ >>> arg.add(1)
+ >>> c.assert_called_with(set())
+ >>> c.assert_called_with(arg)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected call: mock(set([1]))
+ Actual call: mock(set([]))
+ >>> c.foo
+ <CopyingMock name='mock.foo' id='...'>
+
+When you subclass `Mock` or `MagicMock` all dynamically created attributes,
+and the `return_value` will use your subclass automatically. That means all
+children of a `CopyingMock` will also have the type `CopyingMock`.
+
+
+Raising exceptions on attribute access
+======================================
+
+You can use :class:`PropertyMock` to mimic the behaviour of properties. This
+includes raising exceptions when an attribute is accessed.
+
+Here's an example raising a `ValueError` when the 'foo' attribute is accessed:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> p = PropertyMock(side_effect=ValueError)
+ >>> type(m).foo = p
+ >>> m.foo
+ Traceback (most recent call last):
+ ....
+ ValueError
+
+Because every mock object has its own type, a new subclass of whichever mock
+class you're using, all mock objects are isolated from each other. You can
+safely attach properties (or other descriptors or whatever you want in fact)
+to `type(mock)` without affecting other mock objects.
+
+
+Multiple calls with different effects
+=====================================
+
+.. note::
+
+ In mock 1.0 the handling of iterable `side_effect` was changed. Any
+ exceptions in the iterable will be raised instead of returned.
+
+Handling code that needs to behave differently on subsequent calls during the
+test can be tricky. For example you may have a function that needs to raise
+an exception the first time it is called but returns a response on the second
+call (testing retry behaviour).
+
+One approach is to use a :attr:`side_effect` function that replaces itself. The
+first time it is called the `side_effect` sets a new `side_effect` that will
+be used for the second call. It then raises an exception:
+
+.. doctest::
+
+ >>> def side_effect(*args):
+ ... def second_call(*args):
+ ... return 'response'
+ ... mock.side_effect = second_call
+ ... raise Exception('boom')
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock('first')
+ Traceback (most recent call last):
+ ...
+ Exception: boom
+ >>> mock('second')
+ 'response'
+ >>> mock.assert_called_with('second')
+
+Another perfectly valid way would be to pop return values from a list. If the
+return value is an exception, raise it instead of returning it:
+
+.. doctest::
+
+ >>> returns = [Exception('boom'), 'response']
+ >>> def side_effect(*args):
+ ... result = returns.pop(0)
+ ... if isinstance(result, Exception):
+ ... raise result
+ ... return result
+ ...
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock('first')
+ Traceback (most recent call last):
+ ...
+ Exception: boom
+ >>> mock('second')
+ 'response'
+ >>> mock.assert_called_with('second')
+
+Which approach you prefer is a matter of taste. The first approach is actually
+a line shorter but maybe the second approach is more readable.
+
+
+Nesting Patches
+===============
+
+Using patch as a context manager is nice, but if you do multiple patches you
+can end up with nested with statements indenting further and further to the
+right:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ...
+ ... def test_foo(self):
+ ... with patch('mymodule.Foo') as mock_foo:
+ ... with patch('mymodule.Bar') as mock_bar:
+ ... with patch('mymodule.Spam') as mock_spam:
+ ... assert mymodule.Foo is mock_foo
+ ... assert mymodule.Bar is mock_bar
+ ... assert mymodule.Spam is mock_spam
+ ...
+ >>> original = mymodule.Foo
+ >>> MyTest('test_foo').test_foo()
+ >>> assert mymodule.Foo is original
+
+With unittest2_ `cleanup` functions and the :ref:`start-and-stop` we can
+achieve the same effect without the nested indentation. A simple helper
+method, `create_patch`, puts the patch in place and returns the created mock
+for us:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ...
+ ... def create_patch(self, name):
+ ... patcher = patch(name)
+ ... thing = patcher.start()
+ ... self.addCleanup(patcher.stop)
+ ... return thing
+ ...
+ ... def test_foo(self):
+ ... mock_foo = self.create_patch('mymodule.Foo')
+ ... mock_bar = self.create_patch('mymodule.Bar')
+ ... mock_spam = self.create_patch('mymodule.Spam')
+ ...
+ ... assert mymodule.Foo is mock_foo
+ ... assert mymodule.Bar is mock_bar
+ ... assert mymodule.Spam is mock_spam
+ ...
+ >>> original = mymodule.Foo
+ >>> MyTest('test_foo').run()
+ >>> assert mymodule.Foo is original
+
+
+Mocking a dictionary with MagicMock
+===================================
+
+You may want to mock a dictionary, or other container object, recording all
+access to it whilst having it still behave like a dictionary.
+
+We can do this with :class:`MagicMock`, which will behave like a dictionary,
+and using :data:`~Mock.side_effect` to delegate dictionary access to a real
+underlying dictionary that is under our control.
+
+When the `__getitem__` and `__setitem__` methods of our `MagicMock` are called
+(normal dictionary access) then `side_effect` is called with the key (and in
+the case of `__setitem__` the value too). We can also control what is returned.
+
+After the `MagicMock` has been used we can use attributes like
+:data:`~Mock.call_args_list` to assert about how the dictionary was used:
+
+.. doctest::
+
+ >>> my_dict = {'a': 1, 'b': 2, 'c': 3}
+ >>> def getitem(name):
+ ... return my_dict[name]
+ ...
+ >>> def setitem(name, val):
+ ... my_dict[name] = val
+ ...
+ >>> mock = MagicMock()
+ >>> mock.__getitem__.side_effect = getitem
+ >>> mock.__setitem__.side_effect = setitem
+
+.. note::
+
+ An alternative to using `MagicMock` is to use `Mock` and *only* provide
+ the magic methods you specifically want:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__setitem__ = Mock(side_effect=getitem)
+ >>> mock.__getitem__ = Mock(side_effect=setitem)
+
+ A *third* option is to use `MagicMock` but passing in `dict` as the `spec`
+ (or `spec_set`) argument so that the `MagicMock` created only has
+ dictionary magic methods available:
+
+ .. doctest::
+
+ >>> mock = MagicMock(spec_set=dict)
+ >>> mock.__getitem__.side_effect = getitem
+ >>> mock.__setitem__.side_effect = setitem
+
+With these side effect functions in place, the `mock` will behave like a normal
+dictionary but recording the access. It even raises a `KeyError` if you try
+to access a key that doesn't exist.
+
+.. doctest::
+
+ >>> mock['a']
+ 1
+ >>> mock['c']
+ 3
+ >>> mock['d']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'd'
+ >>> mock['b'] = 'fish'
+ >>> mock['d'] = 'eggs'
+ >>> mock['b']
+ 'fish'
+ >>> mock['d']
+ 'eggs'
+
+After it has been used you can make assertions about the access using the normal
+mock methods and attributes:
+
+.. doctest::
+
+ >>> mock.__getitem__.call_args_list
+ [call('a'), call('c'), call('d'), call('b'), call('d')]
+ >>> mock.__setitem__.call_args_list
+ [call('b', 'fish'), call('d', 'eggs')]
+ >>> my_dict
+ {'a': 1, 'c': 3, 'b': 'fish', 'd': 'eggs'}
+
+
+Mock subclasses and their attributes
+====================================
+
+There are various reasons why you might want to subclass `Mock`. One reason
+might be to add helper methods. Here's a silly example:
+
+.. doctest::
+
+ >>> class MyMock(MagicMock):
+ ... def has_been_called(self):
+ ... return self.called
+ ...
+ >>> mymock = MyMock(return_value=None)
+ >>> mymock
+ <MyMock id='...'>
+ >>> mymock.has_been_called()
+ False
+ >>> mymock()
+ >>> mymock.has_been_called()
+ True
+
+The standard behaviour for `Mock` instances is that attributes and the return
+value mocks are of the same type as the mock they are accessed on. This ensures
+that `Mock` attributes are `Mocks` and `MagicMock` attributes are `MagicMocks`
+[#]_. So if you're subclassing to add helper methods then they'll also be
+available on the attributes and return value mock of instances of your
+subclass.
+
+.. doctest::
+
+ >>> mymock.foo
+ <MyMock name='mock.foo' id='...'>
+ >>> mymock.foo.has_been_called()
+ False
+ >>> mymock.foo()
+ <MyMock name='mock.foo()' id='...'>
+ >>> mymock.foo.has_been_called()
+ True
+
+Sometimes this is inconvenient. For example, `one user
+<https://code.google.com/p/mock/issues/detail?id=105>`_ is subclassing mock to
+created a `Twisted adaptor
+<http://twistedmatrix.com/documents/11.0.0/api/twisted.python.components.html>`_.
+Having this applied to attributes too actually causes errors.
+
+`Mock` (in all its flavours) uses a method called `_get_child_mock` to create
+these "sub-mocks" for attributes and return values. You can prevent your
+subclass being used for attributes by overriding this method. The signature is
+that it takes arbitrary keyword arguments (`**kwargs`) which are then passed
+onto the mock constructor:
+
+.. doctest::
+
+ >>> class Subclass(MagicMock):
+ ... def _get_child_mock(self, **kwargs):
+ ... return MagicMock(**kwargs)
+ ...
+ >>> mymock = Subclass()
+ >>> mymock.foo
+ <MagicMock name='mock.foo' id='...'>
+ >>> assert isinstance(mymock, Subclass)
+ >>> assert not isinstance(mymock.foo, Subclass)
+ >>> assert not isinstance(mymock(), Subclass)
+
+.. [#] An exception to this rule are the non-callable mocks. Attributes use the
+ callable variant because otherwise non-callable mocks couldn't have callable
+ methods.
+
+
+Mocking imports with patch.dict
+===============================
+
+One situation where mocking can be hard is where you have a local import inside
+a function. These are harder to mock because they aren't using an object from
+the module namespace that we can patch out.
+
+Generally local imports are to be avoided. They are sometimes done to prevent
+circular dependencies, for which there is *usually* a much better way to solve
+the problem (refactor the code) or to prevent "up front costs" by delaying the
+import. This can also be solved in better ways than an unconditional local
+import (store the module as a class or module attribute and only do the import
+on first use).
+
+That aside there is a way to use `mock` to affect the results of an import.
+Importing fetches an *object* from the `sys.modules` dictionary. Note that it
+fetches an *object*, which need not be a module. Importing a module for the
+first time results in a module object being put in `sys.modules`, so usually
+when you import something you get a module back. This need not be the case
+however.
+
+This means you can use :func:`patch.dict` to *temporarily* put a mock in place
+in `sys.modules`. Any imports whilst this patch is active will fetch the mock.
+When the patch is complete (the decorated function exits, the with statement
+body is complete or `patcher.stop()` is called) then whatever was there
+previously will be restored safely.
+
+Here's an example that mocks out the 'fooble' module.
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> with patch.dict('sys.modules', {'fooble': mock}):
+ ... import fooble
+ ... fooble.blob()
+ ...
+ <Mock name='mock.blob()' id='...'>
+ >>> assert 'fooble' not in sys.modules
+ >>> mock.blob.assert_called_once_with()
+
+As you can see the `import fooble` succeeds, but on exit there is no 'fooble'
+left in `sys.modules`.
+
+This also works for the `from module import name` form:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> with patch.dict('sys.modules', {'fooble': mock}):
+ ... from fooble import blob
+ ... blob.blip()
+ ...
+ <Mock name='mock.blob.blip()' id='...'>
+ >>> mock.blob.blip.assert_called_once_with()
+
+With slightly more work you can also mock package imports:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> modules = {'package': mock, 'package.module': mock.module}
+ >>> with patch.dict('sys.modules', modules):
+ ... from package.module import fooble
+ ... fooble()
+ ...
+ <Mock name='mock.module.fooble()' id='...'>
+ >>> mock.module.fooble.assert_called_once_with()
+
+
+Tracking order of calls and less verbose call assertions
+========================================================
+
+The :class:`Mock` class allows you to track the *order* of method calls on
+your mock objects through the :attr:`~Mock.method_calls` attribute. This
+doesn't allow you to track the order of calls between separate mock objects,
+however we can use :attr:`~Mock.mock_calls` to achieve the same effect.
+
+Because mocks track calls to child mocks in `mock_calls`, and accessing an
+arbitrary attribute of a mock creates a child mock, we can create our separate
+mocks from a parent one. Calls to those child mock will then all be recorded,
+in order, in the `mock_calls` of the parent:
+
+.. doctest::
+
+ >>> manager = Mock()
+ >>> mock_foo = manager.foo
+ >>> mock_bar = manager.bar
+
+ >>> mock_foo.something()
+ <Mock name='mock.foo.something()' id='...'>
+ >>> mock_bar.other.thing()
+ <Mock name='mock.bar.other.thing()' id='...'>
+
+ >>> manager.mock_calls
+ [call.foo.something(), call.bar.other.thing()]
+
+We can then assert about the calls, including the order, by comparing with
+the `mock_calls` attribute on the manager mock:
+
+.. doctest::
+
+ >>> expected_calls = [call.foo.something(), call.bar.other.thing()]
+ >>> manager.mock_calls == expected_calls
+ True
+
+If `patch` is creating, and putting in place, your mocks then you can attach
+them to a manager mock using the :meth:`~Mock.attach_mock` method. After
+attaching calls will be recorded in `mock_calls` of the manager.
+
+.. doctest::
+
+ >>> manager = MagicMock()
+ >>> with patch('mymodule.Class1') as MockClass1:
+ ... with patch('mymodule.Class2') as MockClass2:
+ ... manager.attach_mock(MockClass1, 'MockClass1')
+ ... manager.attach_mock(MockClass2, 'MockClass2')
+ ... MockClass1().foo()
+ ... MockClass2().bar()
+ ...
+ <MagicMock name='mock.MockClass1().foo()' id='...'>
+ <MagicMock name='mock.MockClass2().bar()' id='...'>
+ >>> manager.mock_calls
+ [call.MockClass1(),
+ call.MockClass1().foo(),
+ call.MockClass2(),
+ call.MockClass2().bar()]
+
+If many calls have been made, but you're only interested in a particular
+sequence of them then an alternative is to use the
+:meth:`~Mock.assert_has_calls` method. This takes a list of calls (constructed
+with the :data:`call` object). If that sequence of calls are in
+:attr:`~Mock.mock_calls` then the assert succeeds.
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m().foo().bar().baz()
+ <MagicMock name='mock().foo().bar().baz()' id='...'>
+ >>> m.one().two().three()
+ <MagicMock name='mock.one().two().three()' id='...'>
+ >>> calls = call.one().two().three().call_list()
+ >>> m.assert_has_calls(calls)
+
+Even though the chained call `m.one().two().three()` aren't the only calls that
+have been made to the mock, the assert still succeeds.
+
+Sometimes a mock may have several calls made to it, and you are only interested
+in asserting about *some* of those calls. You may not even care about the
+order. In this case you can pass `any_order=True` to `assert_has_calls`:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> m(1), m.two(2, 3), m.seven(7), m.fifty('50')
+ (...)
+ >>> calls = [call.fifty('50'), call(1), call.seven(7)]
+ >>> m.assert_has_calls(calls, any_order=True)
+
+
+More complex argument matching
+==============================
+
+Using the same basic concept as `ANY` we can implement matchers to do more
+complex assertions on objects used as arguments to mocks.
+
+Suppose we expect some object to be passed to a mock that by default
+compares equal based on object identity (which is the Python default for user
+defined classes). To use :meth:`~Mock.assert_called_with` we would need to pass
+in the exact same object. If we are only interested in some of the attributes
+of this object then we can create a matcher that will check these attributes
+for us.
+
+You can see in this example how a 'standard' call to `assert_called_with` isn't
+sufficient:
+
+.. doctest::
+
+ >>> class Foo(object):
+ ... def __init__(self, a, b):
+ ... self.a, self.b = a, b
+ ...
+ >>> mock = Mock(return_value=None)
+ >>> mock(Foo(1, 2))
+ >>> mock.assert_called_with(Foo(1, 2))
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: call(<__main__.Foo object at 0x...>)
+ Actual call: call(<__main__.Foo object at 0x...>)
+
+A comparison function for our `Foo` class might look something like this:
+
+.. doctest::
+
+ >>> def compare(self, other):
+ ... if not type(self) == type(other):
+ ... return False
+ ... if self.a != other.a:
+ ... return False
+ ... if self.b != other.b:
+ ... return False
+ ... return True
+ ...
+
+And a matcher object that can use comparison functions like this for its
+equality operation would look something like this:
+
+.. doctest::
+
+ >>> class Matcher(object):
+ ... def __init__(self, compare, some_obj):
+ ... self.compare = compare
+ ... self.some_obj = some_obj
+ ... def __eq__(self, other):
+ ... return self.compare(self.some_obj, other)
+ ...
+
+Putting all this together:
+
+.. doctest::
+
+ >>> match_foo = Matcher(compare, Foo(1, 2))
+ >>> mock.assert_called_with(match_foo)
+
+The `Matcher` is instantiated with our compare function and the `Foo` object
+we want to compare against. In `assert_called_with` the `Matcher` equality
+method will be called, which compares the object the mock was called with
+against the one we created our matcher with. If they match then
+`assert_called_with` passes, and if they don't an `AssertionError` is raised:
+
+.. doctest::
+
+ >>> match_wrong = Matcher(compare, Foo(3, 4))
+ >>> mock.assert_called_with(match_wrong)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected: ((<Matcher object at 0x...>,), {})
+ Called with: ((<Foo object at 0x...>,), {})
+
+With a bit of tweaking you could have the comparison function raise the
+`AssertionError` directly and provide a more useful failure message.
+
+As of version 1.5, the Python testing library `PyHamcrest
+<http://pypi.python.org/pypi/PyHamcrest>`_ provides similar functionality,
+that may be useful here, in the form of its equality matcher
+(`hamcrest.library.integration.match_equality
+<http://packages.python.org/PyHamcrest/integration.html#hamcrest.library.integration.match_equality>`_).
+
+
+Less verbose configuration of mock objects
+==========================================
+
+This recipe, for easier configuration of mock objects, is now part of `Mock`.
+See the :meth:`~Mock.configure_mock` method.
+
+
+Matching any argument in assertions
+===================================
+
+This example is now built in to mock. See :data:`ANY`.
+
+
+Mocking Properties
+==================
+
+This example is now built in to mock. See :class:`PropertyMock`.
+
+
+Mocking open
+============
+
+This example is now built in to mock. See :func:`mock_open`.
+
+
+Mocks without some attributes
+=============================
+
+This example is now built in to mock. See :ref:`deleting-attributes`.
diff --git a/python/mock-1.0.0/html/_sources/getting-started.txt b/python/mock-1.0.0/html/_sources/getting-started.txt
new file mode 100644
index 000000000..1b5d289eb
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/getting-started.txt
@@ -0,0 +1,479 @@
+===========================
+ Getting Started with Mock
+===========================
+
+.. _getting-started:
+
+.. index:: Getting Started
+
+.. testsetup::
+
+ class SomeClass(object):
+ static_method = None
+ class_method = None
+ attribute = None
+
+ sys.modules['package'] = package = Mock(name='package')
+ sys.modules['package.module'] = module = package.module
+ sys.modules['module'] = package.module
+
+
+Using Mock
+==========
+
+Mock Patching Methods
+---------------------
+
+Common uses for :class:`Mock` objects include:
+
+* Patching methods
+* Recording method calls on objects
+
+You might want to replace a method on an object to check that
+it is called with the correct arguments by another part of the system:
+
+.. doctest::
+
+ >>> real = SomeClass()
+ >>> real.method = MagicMock(name='method')
+ >>> real.method(3, 4, 5, key='value')
+ <MagicMock name='method()' id='...'>
+
+Once our mock has been used (`real.method` in this example) it has methods
+and attributes that allow you to make assertions about how it has been used.
+
+.. note::
+
+ In most of these examples the :class:`Mock` and :class:`MagicMock` classes
+ are interchangeable. As the `MagicMock` is the more capable class it makes
+ a sensible one to use by default.
+
+Once the mock has been called its :attr:`~Mock.called` attribute is set to
+`True`. More importantly we can use the :meth:`~Mock.assert_called_with` or
+:meth:`~Mock.assert_called_once_with` method to check that it was called with
+the correct arguments.
+
+This example tests that calling `ProductionClass().method` results in a call to
+the `something` method:
+
+.. doctest::
+
+ >>> from mock import MagicMock
+ >>> class ProductionClass(object):
+ ... def method(self):
+ ... self.something(1, 2, 3)
+ ... def something(self, a, b, c):
+ ... pass
+ ...
+ >>> real = ProductionClass()
+ >>> real.something = MagicMock()
+ >>> real.method()
+ >>> real.something.assert_called_once_with(1, 2, 3)
+
+
+
+Mock for Method Calls on an Object
+----------------------------------
+
+In the last example we patched a method directly on an object to check that it
+was called correctly. Another common use case is to pass an object into a
+method (or some part of the system under test) and then check that it is used
+in the correct way.
+
+The simple `ProductionClass` below has a `closer` method. If it is called with
+an object then it calls `close` on it.
+
+.. doctest::
+
+ >>> class ProductionClass(object):
+ ... def closer(self, something):
+ ... something.close()
+ ...
+
+So to test it we need to pass in an object with a `close` method and check
+that it was called correctly.
+
+.. doctest::
+
+ >>> real = ProductionClass()
+ >>> mock = Mock()
+ >>> real.closer(mock)
+ >>> mock.close.assert_called_with()
+
+We don't have to do any work to provide the 'close' method on our mock.
+Accessing close creates it. So, if 'close' hasn't already been called then
+accessing it in the test will create it, but :meth:`~Mock.assert_called_with`
+will raise a failure exception.
+
+
+Mocking Classes
+---------------
+
+A common use case is to mock out classes instantiated by your code under test.
+When you patch a class, then that class is replaced with a mock. Instances
+are created by *calling the class*. This means you access the "mock instance"
+by looking at the return value of the mocked class.
+
+In the example below we have a function `some_function` that instantiates `Foo`
+and calls a method on it. The call to `patch` replaces the class `Foo` with a
+mock. The `Foo` instance is the result of calling the mock, so it is configured
+by modifying the mock :attr:`~Mock.return_value`.
+
+.. doctest::
+
+ >>> def some_function():
+ ... instance = module.Foo()
+ ... return instance.method()
+ ...
+ >>> with patch('module.Foo') as mock:
+ ... instance = mock.return_value
+ ... instance.method.return_value = 'the result'
+ ... result = some_function()
+ ... assert result == 'the result'
+
+
+Naming your mocks
+-----------------
+
+It can be useful to give your mocks a name. The name is shown in the repr of
+the mock and can be helpful when the mock appears in test failure messages. The
+name is also propagated to attributes or methods of the mock:
+
+.. doctest::
+
+ >>> mock = MagicMock(name='foo')
+ >>> mock
+ <MagicMock name='foo' id='...'>
+ >>> mock.method
+ <MagicMock name='foo.method' id='...'>
+
+
+Tracking all Calls
+------------------
+
+Often you want to track more than a single call to a method. The
+:attr:`~Mock.mock_calls` attribute records all calls
+to child attributes of the mock - and also to their children.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.method()
+ <MagicMock name='mock.method()' id='...'>
+ >>> mock.attribute.method(10, x=53)
+ <MagicMock name='mock.attribute.method()' id='...'>
+ >>> mock.mock_calls
+ [call.method(), call.attribute.method(10, x=53)]
+
+If you make an assertion about `mock_calls` and any unexpected methods
+have been called, then the assertion will fail. This is useful because as well
+as asserting that the calls you expected have been made, you are also checking
+that they were made in the right order and with no additional calls:
+
+You use the :data:`call` object to construct lists for comparing with
+`mock_calls`:
+
+.. doctest::
+
+ >>> expected = [call.method(), call.attribute.method(10, x=53)]
+ >>> mock.mock_calls == expected
+ True
+
+
+Setting Return Values and Attributes
+------------------------------------
+
+Setting the return values on a mock object is trivially easy:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value = 3
+ >>> mock()
+ 3
+
+Of course you can do the same for methods on the mock:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method.return_value = 3
+ >>> mock.method()
+ 3
+
+The return value can also be set in the constructor:
+
+.. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> mock()
+ 3
+
+If you need an attribute setting on your mock, just do it:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.x = 3
+ >>> mock.x
+ 3
+
+Sometimes you want to mock up a more complex situation, like for example
+`mock.connection.cursor().execute("SELECT 1")`. If we wanted this call to
+return a list, then we have to configure the result of the nested call.
+
+We can use :data:`call` to construct the set of calls in a "chained call" like
+this for easy assertion afterwards:
+
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> cursor = mock.connection.cursor.return_value
+ >>> cursor.execute.return_value = ['foo']
+ >>> mock.connection.cursor().execute("SELECT 1")
+ ['foo']
+ >>> expected = call.connection.cursor().execute("SELECT 1").call_list()
+ >>> mock.mock_calls
+ [call.connection.cursor(), call.connection.cursor().execute('SELECT 1')]
+ >>> mock.mock_calls == expected
+ True
+
+It is the call to `.call_list()` that turns our call object into a list of
+calls representing the chained calls.
+
+
+
+Raising exceptions with mocks
+-----------------------------
+
+A useful attribute is :attr:`~Mock.side_effect`. If you set this to an
+exception class or instance then the exception will be raised when the mock
+is called.
+
+.. doctest::
+
+ >>> mock = Mock(side_effect=Exception('Boom!'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ Exception: Boom!
+
+
+Side effect functions and iterables
+-----------------------------------
+
+`side_effect` can also be set to a function or an iterable. The use case for
+`side_effect` as an iterable is where your mock is going to be called several
+times, and you want each call to return a different value. When you set
+`side_effect` to an iterable every call to the mock returns the next value
+from the iterable:
+
+.. doctest::
+
+ >>> mock = MagicMock(side_effect=[4, 5, 6])
+ >>> mock()
+ 4
+ >>> mock()
+ 5
+ >>> mock()
+ 6
+
+
+For more advanced use cases, like dynamically varying the return values
+depending on what the mock is called with, `side_effect` can be a function.
+The function will be called with the same arguments as the mock. Whatever the
+function returns is what the call returns:
+
+.. doctest::
+
+ >>> vals = {(1, 2): 1, (2, 3): 2}
+ >>> def side_effect(*args):
+ ... return vals[args]
+ ...
+ >>> mock = MagicMock(side_effect=side_effect)
+ >>> mock(1, 2)
+ 1
+ >>> mock(2, 3)
+ 2
+
+
+Creating a Mock from an Existing Object
+---------------------------------------
+
+One problem with over use of mocking is that it couples your tests to the
+implementation of your mocks rather than your real code. Suppose you have a
+class that implements `some_method`. In a test for another class, you
+provide a mock of this object that *also* provides `some_method`. If later
+you refactor the first class, so that it no longer has `some_method` - then
+your tests will continue to pass even though your code is now broken!
+
+`Mock` allows you to provide an object as a specification for the mock,
+using the `spec` keyword argument. Accessing methods / attributes on the
+mock that don't exist on your specification object will immediately raise an
+attribute error. If you change the implementation of your specification, then
+tests that use that class will start failing immediately without you having to
+instantiate the class in those tests.
+
+.. doctest::
+
+ >>> mock = Mock(spec=SomeClass)
+ >>> mock.old_method()
+ Traceback (most recent call last):
+ ...
+ AttributeError: object has no attribute 'old_method'
+
+If you want a stronger form of specification that prevents the setting
+of arbitrary attributes as well as the getting of them then you can use
+`spec_set` instead of `spec`.
+
+
+
+Patch Decorators
+================
+
+.. note::
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read :ref:`where to patch <where-to-patch>`.
+
+
+A common need in tests is to patch a class attribute or a module attribute,
+for example patching a builtin or patching a class in a module to test that it
+is instantiated. Modules and classes are effectively global, so patching on
+them has to be undone after the test or the patch will persist into other
+tests and cause hard to diagnose problems.
+
+mock provides three convenient decorators for this: `patch`, `patch.object` and
+`patch.dict`. `patch` takes a single string, of the form
+`package.module.Class.attribute` to specify the attribute you are patching. It
+also optionally takes a value that you want the attribute (or class or
+whatever) to be replaced with. 'patch.object' takes an object and the name of
+the attribute you would like patched, plus optionally the value to patch it
+with.
+
+`patch.object`:
+
+.. doctest::
+
+ >>> original = SomeClass.attribute
+ >>> @patch.object(SomeClass, 'attribute', sentinel.attribute)
+ ... def test():
+ ... assert SomeClass.attribute == sentinel.attribute
+ ...
+ >>> test()
+ >>> assert SomeClass.attribute == original
+
+ >>> @patch('package.module.attribute', sentinel.attribute)
+ ... def test():
+ ... from package.module import attribute
+ ... assert attribute is sentinel.attribute
+ ...
+ >>> test()
+
+If you are patching a module (including `__builtin__`) then use `patch`
+instead of `patch.object`:
+
+.. doctest::
+
+ >>> mock = MagicMock(return_value = sentinel.file_handle)
+ >>> with patch('__builtin__.open', mock):
+ ... handle = open('filename', 'r')
+ ...
+ >>> mock.assert_called_with('filename', 'r')
+ >>> assert handle == sentinel.file_handle, "incorrect file handle returned"
+
+The module name can be 'dotted', in the form `package.module` if needed:
+
+.. doctest::
+
+ >>> @patch('package.module.ClassName.attribute', sentinel.attribute)
+ ... def test():
+ ... from package.module import ClassName
+ ... assert ClassName.attribute == sentinel.attribute
+ ...
+ >>> test()
+
+A nice pattern is to actually decorate test methods themselves:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch.object(SomeClass, 'attribute', sentinel.attribute)
+ ... def test_something(self):
+ ... self.assertEqual(SomeClass.attribute, sentinel.attribute)
+ ...
+ >>> original = SomeClass.attribute
+ >>> MyTest('test_something').test_something()
+ >>> assert SomeClass.attribute == original
+
+If you want to patch with a Mock, you can use `patch` with only one argument
+(or `patch.object` with two arguments). The mock will be created for you and
+passed into the test function / method:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch.object(SomeClass, 'static_method')
+ ... def test_something(self, mock_method):
+ ... SomeClass.static_method()
+ ... mock_method.assert_called_with()
+ ...
+ >>> MyTest('test_something').test_something()
+
+You can stack up multiple patch decorators using this pattern:
+
+.. doctest::
+
+ >>> class MyTest(unittest2.TestCase):
+ ... @patch('package.module.ClassName1')
+ ... @patch('package.module.ClassName2')
+ ... def test_something(self, MockClass2, MockClass1):
+ ... self.assertTrue(package.module.ClassName1 is MockClass1)
+ ... self.assertTrue(package.module.ClassName2 is MockClass2)
+ ...
+ >>> MyTest('test_something').test_something()
+
+When you nest patch decorators the mocks are passed in to the decorated
+function in the same order they applied (the normal *python* order that
+decorators are applied). This means from the bottom up, so in the example
+above the mock for `test_module.ClassName2` is passed in first.
+
+There is also :func:`patch.dict` for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:
+
+.. doctest::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+`patch`, `patch.object` and `patch.dict` can all be used as context managers.
+
+Where you use `patch` to create a mock for you, you can get a reference to the
+mock using the "as" form of the with statement:
+
+.. doctest::
+
+ >>> class ProductionClass(object):
+ ... def method(self):
+ ... pass
+ ...
+ >>> with patch.object(ProductionClass, 'method') as mock_method:
+ ... mock_method.return_value = None
+ ... real = ProductionClass()
+ ... real.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_with(1, 2, 3)
+
+
+As an alternative `patch`, `patch.object` and `patch.dict` can be used as
+class decorators. When used in this way it is the same as applying the
+decorator indvidually to every method whose name starts with "test".
+
+For some more advanced examples, see the :ref:`further-examples` page.
diff --git a/python/mock-1.0.0/html/_sources/index.txt b/python/mock-1.0.0/html/_sources/index.txt
new file mode 100644
index 000000000..7e4a8daca
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/index.txt
@@ -0,0 +1,411 @@
+====================================
+ Mock - Mocking and Testing Library
+====================================
+
+.. currentmodule:: mock
+
+:Author: `Michael Foord
+ <http://www.voidspace.org.uk/python/weblog/index.shtml>`_
+:Version: |release|
+:Date: 2012/10/07
+:Homepage: `Mock Homepage`_
+:Download: `Mock on PyPI`_
+:Documentation: `PDF Documentation
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+:License: `BSD License`_
+:Support: `Mailing list (testing-in-python@lists.idyll.org)
+ <http://lists.idyll.org/listinfo/testing-in-python>`_
+:Issue tracker: `Google code project
+ <http://code.google.com/p/mock/issues/list>`_
+
+.. _Mock Homepage: http://www.voidspace.org.uk/python/mock/
+.. _BSD License: http://www.voidspace.org.uk/python/license.shtml
+
+
+.. currentmodule:: mock
+
+.. module:: mock
+ :synopsis: Mock object and testing library.
+
+.. index:: introduction
+
+mock is a library for testing in Python. It allows you to replace parts of
+your system under test with mock objects and make assertions about how they
+have been used.
+
+mock is now part of the Python standard library, available as `unittest.mock
+<http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock>`_
+in Python 3.3 onwards.
+
+mock provides a core :class:`Mock` class removing the need to create a host
+of stubs throughout your test suite. After performing an action, you can make
+assertions about which methods / attributes were used and arguments they were
+called with. You can also specify return values and set needed attributes in
+the normal way.
+
+Additionally, mock provides a :func:`patch` decorator that handles patching
+module and class level attributes within the scope of a test, along with
+:const:`sentinel` for creating unique objects. See the `quick guide`_ for
+some examples of how to use :class:`Mock`, :class:`MagicMock` and
+:func:`patch`.
+
+Mock is very easy to use and is designed for use with
+`unittest <http://pypi.python.org/pypi/unittest2>`_. Mock is based on
+the 'action -> assertion' pattern instead of `'record -> replay'` used by many
+mocking frameworks.
+
+mock is tested on Python versions 2.4-2.7, Python 3 plus the latest versions of
+Jython and PyPy.
+
+
+.. testsetup::
+
+ class ProductionClass(object):
+ def method(self, *args):
+ pass
+
+ module = sys.modules['module'] = ProductionClass
+ ProductionClass.ClassName1 = ProductionClass
+ ProductionClass.ClassName2 = ProductionClass
+
+
+
+API Documentation
+=================
+
+.. toctree::
+ :maxdepth: 2
+
+ mock
+ patch
+ helpers
+ sentinel
+ magicmock
+
+
+User Guide
+==========
+
+.. toctree::
+ :maxdepth: 2
+
+ getting-started
+ examples
+ compare
+ changelog
+
+
+.. index:: installing
+
+Installing
+==========
+
+The current version is |release|. Mock is stable and widely used. If you do
+find any bugs, or have suggestions for improvements / extensions
+then please contact us.
+
+* `mock on PyPI <http://pypi.python.org/pypi/mock>`_
+* `mock documentation as PDF
+ <http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf>`_
+* `Google Code Home & Mercurial Repository <http://code.google.com/p/mock/>`_
+
+.. index:: repository
+.. index:: hg
+
+You can checkout the latest development version from the Google Code Mercurial
+repository with the following command:
+
+ ``hg clone https://mock.googlecode.com/hg/ mock``
+
+
+.. index:: pip
+.. index:: easy_install
+.. index:: setuptools
+
+If you have pip, setuptools or distribute you can install mock with:
+
+ | ``easy_install -U mock``
+ | ``pip install -U mock``
+
+Alternatively you can download the mock distribution from PyPI and after
+unpacking run:
+
+ ``python setup.py install``
+
+
+Quick Guide
+===========
+
+:class:`Mock` and :class:`MagicMock` objects create all attributes and
+methods as you access them and store details of how they have been used. You
+can configure them, to specify return values or limit what attributes are
+available, and then make assertions about how they have been used:
+
+.. doctest::
+
+ >>> from mock import MagicMock
+ >>> thing = ProductionClass()
+ >>> thing.method = MagicMock(return_value=3)
+ >>> thing.method(3, 4, 5, key='value')
+ 3
+ >>> thing.method.assert_called_with(3, 4, 5, key='value')
+
+:attr:`side_effect` allows you to perform side effects, including raising an
+exception when a mock is called:
+
+.. doctest::
+
+ >>> mock = Mock(side_effect=KeyError('foo'))
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ KeyError: 'foo'
+
+ >>> values = {'a': 1, 'b': 2, 'c': 3}
+ >>> def side_effect(arg):
+ ... return values[arg]
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock('a'), mock('b'), mock('c')
+ (1, 2, 3)
+ >>> mock.side_effect = [5, 4, 3, 2, 1]
+ >>> mock(), mock(), mock()
+ (5, 4, 3)
+
+Mock has many other ways you can configure it and control its behaviour. For
+example the `spec` argument configures the mock to take its specification
+from another object. Attempting to access attributes or methods on the mock
+that don't exist on the spec will fail with an `AttributeError`.
+
+The :func:`patch` decorator / context manager makes it easy to mock classes or
+objects in a module under test. The object you specify will be replaced with a
+mock (or other object) during the test and restored when the test ends:
+
+.. doctest::
+
+ >>> from mock import patch
+ >>> @patch('module.ClassName2')
+ ... @patch('module.ClassName1')
+ ... def test(MockClass1, MockClass2):
+ ... module.ClassName1()
+ ... module.ClassName2()
+
+ ... assert MockClass1 is module.ClassName1
+ ... assert MockClass2 is module.ClassName2
+ ... assert MockClass1.called
+ ... assert MockClass2.called
+ ...
+ >>> test()
+
+.. note::
+
+ When you nest patch decorators the mocks are passed in to the decorated
+ function in the same order they applied (the normal *python* order that
+ decorators are applied). This means from the bottom up, so in the example
+ above the mock for `module.ClassName1` is passed in first.
+
+ With `patch` it matters that you patch objects in the namespace where they
+ are looked up. This is normally straightforward, but for a quick guide
+ read :ref:`where to patch <where-to-patch>`.
+
+As well as a decorator `patch` can be used as a context manager in a with
+statement:
+
+.. doctest::
+
+ >>> with patch.object(ProductionClass, 'method', return_value=None) as mock_method:
+ ... thing = ProductionClass()
+ ... thing.method(1, 2, 3)
+ ...
+ >>> mock_method.assert_called_once_with(1, 2, 3)
+
+
+There is also :func:`patch.dict` for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:
+
+.. doctest::
+
+ >>> foo = {'key': 'value'}
+ >>> original = foo.copy()
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}, clear=True):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == original
+
+Mock supports the mocking of Python :ref:`magic methods <magic-methods>`. The
+easiest way of using magic methods is with the :class:`MagicMock` class. It
+allows you to do things like:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.__str__.return_value = 'foobarbaz'
+ >>> str(mock)
+ 'foobarbaz'
+ >>> mock.__str__.assert_called_with()
+
+Mock allows you to assign functions (or other Mock instances) to magic methods
+and they will be called appropriately. The `MagicMock` class is just a Mock
+variant that has all of the magic methods pre-created for you (well, all the
+useful ones anyway).
+
+The following is an example of using magic methods with the ordinary Mock
+class:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock(return_value='wheeeeee')
+ >>> str(mock)
+ 'wheeeeee'
+
+For ensuring that the mock objects in your tests have the same api as the
+objects they are replacing, you can use :ref:`auto-speccing <auto-speccing>`.
+Auto-speccing can be done through the `autospec` argument to patch, or the
+:func:`create_autospec` function. Auto-speccing creates mock objects that
+have the same attributes and methods as the objects they are replacing, and
+any functions and methods (including constructors) have the same call
+signature as the real object.
+
+This ensures that your mocks will fail in the same way as your production
+code if they are used incorrectly:
+
+.. doctest::
+
+ >>> from mock import create_autospec
+ >>> def function(a, b, c):
+ ... pass
+ ...
+ >>> mock_function = create_autospec(function, return_value='fishy')
+ >>> mock_function(1, 2, 3)
+ 'fishy'
+ >>> mock_function.assert_called_once_with(1, 2, 3)
+ >>> mock_function('wrong arguments')
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes exactly 3 arguments (1 given)
+
+`create_autospec` can also be used on classes, where it copies the signature of
+the `__init__` method, and on callable objects where it copies the signature of
+the `__call__` method.
+
+
+.. index:: references
+.. index:: articles
+
+References
+==========
+
+Articles, blog entries and other stuff related to testing with Mock:
+
+* `Imposing a No DB Discipline on Django unit tests
+ <https://github.com/carljm/django-testing-slides/blob/master/models/30_no_database.md>`_
+* `mock-django: tools for mocking the Django ORM and models
+ <https://github.com/dcramer/mock-django>`_
+* `PyCon 2011 Video: Testing with mock <https://blip.tv/file/4881513>`_
+* `Mock objects in Python
+ <http://noopenblockers.com/2012/01/06/mock-objects-in-python/>`_
+* `Python: Injecting Mock Objects for Powerful Testing
+ <http://blueprintforge.com/blog/2012/01/08/python-injecting-mock-objects-for-powerful-testing/>`_
+* `Python Mock: How to assert a substring of logger output
+ <http://www.michaelpollmeier.com/python-mock-how-to-assert-a-substring-of-logger-output/>`_
+* `Mocking Django <http://www.mattjmorrison.com/2011/09/mocking-django.html>`_
+* `Mocking dates and other classes that can't be modified
+ <http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/>`_
+* `Mock recipes <http://konryd.blogspot.com/2010/06/mock-recipies.html>`_
+* `Mockity mock mock - some love for the mock module
+ <http://konryd.blogspot.com/2010/05/mockity-mock-mock-some-love-for-mock.html>`_
+* `Coverage and Mock (with django)
+ <http://mattsnider.com/python/mock-and-coverage/>`_
+* `Python Unit Testing with Mock <http://www.insomnihack.com/?p=194>`_
+* `Getting started with Python Mock
+ <http://myadventuresincoding.wordpress.com/2011/02/26/python-python-mock-cheat-sheet/>`_
+* `Smart Parameter Checks with mock
+ <http://tobyho.com/2011/03/24/smart-parameter-checks-in/>`_
+* `Python mock testing techniques and tools
+ <http://agiletesting.blogspot.com/2009/07/python-mock-testing-techniques-and.html>`_
+* `How To Test Django Template Tags
+ <http://techblog.ironfroggy.com/2008/10/how-to-test.html>`_
+* `A presentation on Unit Testing with Mock
+ <http://pypap.blogspot.com/2008/10/newbie-nugget-unit-testing-with-mock.html>`_
+* `Mocking with Django and Google AppEngine
+ <http://michael-a-nelson.blogspot.com/2008/09/mocking-with-django-and-google-app.html>`_
+
+
+.. index:: tests
+.. index:: unittest2
+
+Tests
+=====
+
+Mock uses `unittest2 <http://pypi.python.org/pypi/unittest2>`_ for its own
+test suite. In order to run it, use the `unit2` script that comes with
+`unittest2` module on a checkout of the source repository:
+
+ `unit2 discover`
+
+If you have `setuptools <http://pypi.python.org/pypi/distribute>`_ as well as
+unittest2 you can run:
+
+ ``python setup.py test``
+
+On Python 3.2 you can use ``unittest`` module from the standard library.
+
+ ``python3.2 -m unittest discover``
+
+.. index:: Python 3
+
+On Python 3 the tests for unicode are skipped as they are not relevant. On
+Python 2.4 tests that use the with statements are skipped as the with statement
+is invalid syntax on Python 2.4.
+
+
+.. index:: older versions
+
+Older Versions
+==============
+
+Documentation for older versions of mock:
+
+* `mock 0.8 <http://www.voidspace.org.uk/python/mock/0.8/>`_
+* `mock 0.7 <http://www.voidspace.org.uk/python/mock/0.7/>`_
+* `mock 0.6 <http://www.voidspace.org.uk/python/mock/0.6.0/>`_
+
+Docs from the in-development version of `mock` can be found at
+`mock.readthedocs.org <http://mock.readthedocs.org>`_.
+
+
+Terminology
+===========
+
+Terminology for objects used to replace other ones can be confusing. Terms
+like double, fake, mock, stub, and spy are all used with varying meanings.
+
+In `classic mock terminology
+<http://xunitpatterns.com/Mocks,%20Fakes,%20Stubs%20and%20Dummies.html>`_
+:class:`mock.Mock` is a `spy <http://xunitpatterns.com/Test%20Spy.html>`_ that
+allows for *post-mortem* examination. This is what I call the "action ->
+assertion" [#]_ pattern of testing.
+
+I'm not however a fan of this "statically typed mocking terminology"
+promulgated by `Martin Fowler
+<http://martinfowler.com/articles/mocksArentStubs.html>`_. It confuses usage
+patterns with implementation and prevents you from using natural terminology
+when discussing mocking.
+
+I much prefer duck typing, if an object used in your test suite looks like a
+mock object and quacks like a mock object then it's fine to call it a mock, no
+matter what the implementation looks like.
+
+This terminology is perhaps more useful in less capable languages where
+different usage patterns will *require* different implementations.
+`mock.Mock()` is capable of being used in most of the different roles
+described by Fowler, except (annoyingly / frustratingly / ironically) a Mock
+itself!
+
+How about a simpler definition: a "mock object" is an object used to replace a
+real one in a system under test.
+
+.. [#] This pattern is called "AAA" by some members of the testing community;
+ "Arrange - Act - Assert".
diff --git a/python/mock-1.0.0/html/_sources/magicmock.txt b/python/mock-1.0.0/html/_sources/magicmock.txt
new file mode 100644
index 000000000..42b2ed9db
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/magicmock.txt
@@ -0,0 +1,258 @@
+
+.. currentmodule:: mock
+
+
+.. _magic-methods:
+
+Mocking Magic Methods
+=====================
+
+.. currentmodule:: mock
+
+:class:`Mock` supports mocking `magic methods
+<http://www.ironpythoninaction.com/magic-methods.html>`_. This allows mock
+objects to replace containers or other objects that implement Python
+protocols.
+
+Because magic methods are looked up differently from normal methods [#]_, this
+support has been specially implemented. This means that only specific magic
+methods are supported. The supported list includes *almost* all of them. If
+there are any missing that you need please let us know!
+
+You mock magic methods by setting the method you are interested in to a function
+or a mock instance. If you are using a function then it *must* take ``self`` as
+the first argument [#]_.
+
+.. doctest::
+
+ >>> def __str__(self):
+ ... return 'fooble'
+ ...
+ >>> mock = Mock()
+ >>> mock.__str__ = __str__
+ >>> str(mock)
+ 'fooble'
+
+ >>> mock = Mock()
+ >>> mock.__str__ = Mock()
+ >>> mock.__str__.return_value = 'fooble'
+ >>> str(mock)
+ 'fooble'
+
+ >>> mock = Mock()
+ >>> mock.__iter__ = Mock(return_value=iter([]))
+ >>> list(mock)
+ []
+
+One use case for this is for mocking objects used as context managers in a
+`with` statement:
+
+.. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__enter__ = Mock(return_value='foo')
+ >>> mock.__exit__ = Mock(return_value=False)
+ >>> with mock as m:
+ ... assert m == 'foo'
+ ...
+ >>> mock.__enter__.assert_called_with()
+ >>> mock.__exit__.assert_called_with(None, None, None)
+
+Calls to magic methods do not appear in :attr:`~Mock.method_calls`, but they
+are recorded in :attr:`~Mock.mock_calls`.
+
+.. note::
+
+ If you use the `spec` keyword argument to create a mock then attempting to
+ set a magic method that isn't in the spec will raise an `AttributeError`.
+
+The full list of supported magic methods is:
+
+* ``__hash__``, ``__sizeof__``, ``__repr__`` and ``__str__``
+* ``__dir__``, ``__format__`` and ``__subclasses__``
+* ``__floor__``, ``__trunc__`` and ``__ceil__``
+* Comparisons: ``__cmp__``, ``__lt__``, ``__gt__``, ``__le__``, ``__ge__``,
+ ``__eq__`` and ``__ne__``
+* Container methods: ``__getitem__``, ``__setitem__``, ``__delitem__``,
+ ``__contains__``, ``__len__``, ``__iter__``, ``__getslice__``,
+ ``__setslice__``, ``__reversed__`` and ``__missing__``
+* Context manager: ``__enter__`` and ``__exit__``
+* Unary numeric methods: ``__neg__``, ``__pos__`` and ``__invert__``
+* The numeric methods (including right hand and in-place variants):
+ ``__add__``, ``__sub__``, ``__mul__``, ``__div__``,
+ ``__floordiv__``, ``__mod__``, ``__divmod__``, ``__lshift__``,
+ ``__rshift__``, ``__and__``, ``__xor__``, ``__or__``, and ``__pow__``
+* Numeric conversion methods: ``__complex__``, ``__int__``, ``__float__``,
+ ``__index__`` and ``__coerce__``
+* Descriptor methods: ``__get__``, ``__set__`` and ``__delete__``
+* Pickling: ``__reduce__``, ``__reduce_ex__``, ``__getinitargs__``,
+ ``__getnewargs__``, ``__getstate__`` and ``__setstate__``
+
+
+The following methods are supported in Python 2 but don't exist in Python 3:
+
+* ``__unicode__``, ``__long__``, ``__oct__``, ``__hex__`` and ``__nonzero__``
+* ``__truediv__`` and ``__rtruediv__``
+
+The following methods are supported in Python 3 but don't exist in Python 2:
+
+* ``__bool__`` and ``__next__``
+
+The following methods exist but are *not* supported as they are either in use by
+mock, can't be set dynamically, or can cause problems:
+
+* ``__getattr__``, ``__setattr__``, ``__init__`` and ``__new__``
+* ``__prepare__``, ``__instancecheck__``, ``__subclasscheck__``, ``__del__``
+
+
+
+Magic Mock
+==========
+
+There are two `MagicMock` variants: `MagicMock` and `NonCallableMagicMock`.
+
+
+.. class:: MagicMock(*args, **kw)
+
+ ``MagicMock`` is a subclass of :class:`Mock` with default implementations
+ of most of the magic methods. You can use ``MagicMock`` without having to
+ configure the magic methods yourself.
+
+ The constructor parameters have the same meaning as for :class:`Mock`.
+
+ If you use the `spec` or `spec_set` arguments then *only* magic methods
+ that exist in the spec will be created.
+
+
+.. class:: NonCallableMagicMock(*args, **kw)
+
+ A non-callable version of `MagicMock`.
+
+ The constructor parameters have the same meaning as for
+ :class:`MagicMock`, with the exception of `return_value` and
+ `side_effect` which have no meaning on a non-callable mock.
+
+The magic methods are setup with `MagicMock` objects, so you can configure them
+and use them in the usual way:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock[3] = 'fish'
+ >>> mock.__setitem__.assert_called_with(3, 'fish')
+ >>> mock.__getitem__.return_value = 'result'
+ >>> mock[2]
+ 'result'
+
+By default many of the protocol methods are required to return objects of a
+specific type. These methods are preconfigured with a default return value, so
+that they can be used without you having to do anything if you aren't interested
+in the return value. You can still *set* the return value manually if you want
+to change the default.
+
+Methods and their defaults:
+
+* ``__lt__``: NotImplemented
+* ``__gt__``: NotImplemented
+* ``__le__``: NotImplemented
+* ``__ge__``: NotImplemented
+* ``__int__`` : 1
+* ``__contains__`` : False
+* ``__len__`` : 1
+* ``__iter__`` : iter([])
+* ``__exit__`` : False
+* ``__complex__`` : 1j
+* ``__float__`` : 1.0
+* ``__bool__`` : True
+* ``__nonzero__`` : True
+* ``__oct__`` : '1'
+* ``__hex__`` : '0x1'
+* ``__long__`` : long(1)
+* ``__index__`` : 1
+* ``__hash__`` : default hash for the mock
+* ``__str__`` : default str for the mock
+* ``__unicode__`` : default unicode for the mock
+* ``__sizeof__``: default sizeof for the mock
+
+For example:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> int(mock)
+ 1
+ >>> len(mock)
+ 0
+ >>> hex(mock)
+ '0x1'
+ >>> list(mock)
+ []
+ >>> object() in mock
+ False
+
+The two equality method, `__eq__` and `__ne__`, are special (changed in
+0.7.2). They do the default equality comparison on identity, using a side
+effect, unless you change their return value to return something else:
+
+.. doctest::
+
+ >>> MagicMock() == 3
+ False
+ >>> MagicMock() != 3
+ True
+ >>> mock = MagicMock()
+ >>> mock.__eq__.return_value = True
+ >>> mock == 3
+ True
+
+In `0.8` the `__iter__` also gained special handling implemented with a
+side effect. The return value of `MagicMock.__iter__` can be any iterable
+object and isn't required to be an iterator:
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> mock.__iter__.return_value = ['a', 'b', 'c']
+ >>> list(mock)
+ ['a', 'b', 'c']
+ >>> list(mock)
+ ['a', 'b', 'c']
+
+If the return value *is* an iterator, then iterating over it once will consume
+it and subsequent iterations will result in an empty list:
+
+.. doctest::
+
+ >>> mock.__iter__.return_value = iter(['a', 'b', 'c'])
+ >>> list(mock)
+ ['a', 'b', 'c']
+ >>> list(mock)
+ []
+
+``MagicMock`` has all of the supported magic methods configured except for some
+of the obscure and obsolete ones. You can still set these up if you want.
+
+Magic methods that are supported but not setup by default in ``MagicMock`` are:
+
+* ``__cmp__``
+* ``__getslice__`` and ``__setslice__``
+* ``__coerce__``
+* ``__subclasses__``
+* ``__dir__``
+* ``__format__``
+* ``__get__``, ``__set__`` and ``__delete__``
+* ``__reversed__`` and ``__missing__``
+* ``__reduce__``, ``__reduce_ex__``, ``__getinitargs__``, ``__getnewargs__``,
+ ``__getstate__`` and ``__setstate__``
+* ``__getformat__`` and ``__setformat__``
+
+
+
+------------
+
+.. [#] Magic methods *should* be looked up on the class rather than the
+ instance. Different versions of Python are inconsistent about applying this
+ rule. The supported protocol methods should work with all supported versions
+ of Python.
+.. [#] The function is basically hooked up to the class, but each ``Mock``
+ instance is kept isolated from the others.
diff --git a/python/mock-1.0.0/html/_sources/mock.txt b/python/mock-1.0.0/html/_sources/mock.txt
new file mode 100644
index 000000000..58712b21a
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/mock.txt
@@ -0,0 +1,842 @@
+The Mock Class
+==============
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class SomeClass:
+ pass
+
+
+`Mock` is a flexible mock object intended to replace the use of stubs and
+test doubles throughout your code. Mocks are callable and create attributes as
+new mocks when you access them [#]_. Accessing the same attribute will always
+return the same mock. Mocks record how you use them, allowing you to make
+assertions about what your code has done to them.
+
+:class:`MagicMock` is a subclass of `Mock` with all the magic methods
+pre-created and ready to use. There are also non-callable variants, useful
+when you are mocking out objects that aren't callable:
+:class:`NonCallableMock` and :class:`NonCallableMagicMock`
+
+The :func:`patch` decorators makes it easy to temporarily replace classes
+in a particular module with a `Mock` object. By default `patch` will create
+a `MagicMock` for you. You can specify an alternative class of `Mock` using
+the `new_callable` argument to `patch`.
+
+
+.. index:: side_effect
+.. index:: return_value
+.. index:: wraps
+.. index:: name
+.. index:: spec
+
+.. class:: Mock(spec=None, side_effect=None, return_value=DEFAULT, wraps=None, name=None, spec_set=None, **kwargs)
+
+ Create a new `Mock` object. `Mock` takes several optional arguments
+ that specify the behaviour of the Mock object:
+
+ * `spec`: This can be either a list of strings or an existing object (a
+ class or instance) that acts as the specification for the mock object. If
+ you pass in an object then a list of strings is formed by calling dir on
+ the object (excluding unsupported magic attributes and methods).
+ Accessing any attribute not in this list will raise an `AttributeError`.
+
+ If `spec` is an object (rather than a list of strings) then
+ :attr:`__class__` returns the class of the spec object. This allows mocks
+ to pass `isinstance` tests.
+
+ * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
+ or get an attribute on the mock that isn't on the object passed as
+ `spec_set` will raise an `AttributeError`.
+
+ * `side_effect`: A function to be called whenever the Mock is called. See
+ the :attr:`~Mock.side_effect` attribute. Useful for raising exceptions or
+ dynamically changing return values. The function is called with the same
+ arguments as the mock, and unless it returns :data:`DEFAULT`, the return
+ value of this function is used as the return value.
+
+ Alternatively `side_effect` can be an exception class or instance. In
+ this case the exception will be raised when the mock is called.
+
+ If `side_effect` is an iterable then each call to the mock will return
+ the next value from the iterable. If any of the members of the iterable
+ are exceptions they will be raised instead of returned.
+
+ A `side_effect` can be cleared by setting it to `None`.
+
+ * `return_value`: The value returned when the mock is called. By default
+ this is a new Mock (created on first access). See the
+ :attr:`return_value` attribute.
+
+ * `wraps`: Item for the mock object to wrap. If `wraps` is not None then
+ calling the Mock will pass the call through to the wrapped object
+ (returning the real result and ignoring `return_value`). Attribute access
+ on the mock will return a Mock object that wraps the corresponding
+ attribute of the wrapped object (so attempting to access an attribute
+ that doesn't exist will raise an `AttributeError`).
+
+ If the mock has an explicit `return_value` set then calls are not passed
+ to the wrapped object and the `return_value` is returned instead.
+
+ * `name`: If the mock has a name then it will be used in the repr of the
+ mock. This can be useful for debugging. The name is propagated to child
+ mocks.
+
+ Mocks can also be called with arbitrary keyword arguments. These will be
+ used to set attributes on the mock after it is created. See the
+ :meth:`configure_mock` method for details.
+
+
+ .. method:: assert_called_with(*args, **kwargs)
+
+ This method is a convenient way of asserting that calls are made in a
+ particular way:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method(1, 2, 3, test='wow')
+ <Mock name='mock.method()' id='...'>
+ >>> mock.method.assert_called_with(1, 2, 3, test='wow')
+
+
+ .. method:: assert_called_once_with(*args, **kwargs)
+
+ Assert that the mock was called exactly once and with the specified
+ arguments.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('foo', bar='baz')
+ >>> mock.assert_called_once_with('foo', bar='baz')
+ >>> mock('foo', bar='baz')
+ >>> mock.assert_called_once_with('foo', bar='baz')
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected to be called once. Called 2 times.
+
+
+ .. method:: assert_any_call(*args, **kwargs)
+
+ assert the mock has been called with the specified arguments.
+
+ The assert passes if the mock has *ever* been called, unlike
+ :meth:`assert_called_with` and :meth:`assert_called_once_with` that
+ only pass if the call is the most recent one.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1, 2, arg='thing')
+ >>> mock('some', 'thing', 'else')
+ >>> mock.assert_any_call(1, 2, arg='thing')
+
+
+ .. method:: assert_has_calls(calls, any_order=False)
+
+ assert the mock has been called with the specified calls.
+ The `mock_calls` list is checked for the calls.
+
+ If `any_order` is False (the default) then the calls must be
+ sequential. There can be extra calls before or after the
+ specified calls.
+
+ If `any_order` is True then the calls can be in any order, but
+ they must all appear in :attr:`mock_calls`.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock(1)
+ >>> mock(2)
+ >>> mock(3)
+ >>> mock(4)
+ >>> calls = [call(2), call(3)]
+ >>> mock.assert_has_calls(calls)
+ >>> calls = [call(4), call(2), call(3)]
+ >>> mock.assert_has_calls(calls, any_order=True)
+
+
+ .. method:: reset_mock()
+
+ The reset_mock method resets all the call attributes on a mock object:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock('hello')
+ >>> mock.called
+ True
+ >>> mock.reset_mock()
+ >>> mock.called
+ False
+
+ This can be useful where you want to make a series of assertions that
+ reuse the same object. Note that `reset_mock` *doesn't* clear the
+ return value, :attr:`side_effect` or any child attributes you have
+ set using normal assignment. Child mocks and the return value mock
+ (if any) are reset as well.
+
+
+ .. method:: mock_add_spec(spec, spec_set=False)
+
+ Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is `True` then only attributes on the spec can be set.
+
+
+ .. method:: attach_mock(mock, attribute)
+
+ Attach a mock as an attribute of this one, replacing its name and
+ parent. Calls to the attached mock will be recorded in the
+ :attr:`method_calls` and :attr:`mock_calls` attributes of this one.
+
+
+ .. method:: configure_mock(**kwargs)
+
+ Set attributes on the mock through keyword arguments.
+
+ Attributes plus return values and side effects can be set on child
+ mocks using standard dot notation and unpacking a dictionary in the
+ method call:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock.configure_mock(**attrs)
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ The same thing can be achieved in the constructor call to mocks:
+
+ .. doctest::
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock = Mock(some_attribute='eggs', **attrs)
+ >>> mock.some_attribute
+ 'eggs'
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ `configure_mock` exists to make it easier to do configuration
+ after the mock has been created.
+
+
+ .. method:: __dir__()
+
+ `Mock` objects limit the results of `dir(some_mock)` to useful results.
+ For mocks with a `spec` this includes all the permitted attributes
+ for the mock.
+
+ See :data:`FILTER_DIR` for what this filtering does, and how to
+ switch it off.
+
+
+ .. method:: _get_child_mock(**kw)
+
+ Create the child mocks for attributes and return value.
+ By default child mocks will be the same type as the parent.
+ Subclasses of Mock may want to override this to customize the way
+ child mocks are made.
+
+ For non-callable mocks the callable variant will be used (rather than
+ any custom subclass).
+
+
+ .. attribute:: called
+
+ A boolean representing whether or not the mock object has been called:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock.called
+ False
+ >>> mock()
+ >>> mock.called
+ True
+
+ .. attribute:: call_count
+
+ An integer telling you how many times the mock object has been called:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock.call_count
+ 0
+ >>> mock()
+ >>> mock()
+ >>> mock.call_count
+ 2
+
+
+ .. attribute:: return_value
+
+ Set this to configure the value returned by calling the mock:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value = 'fish'
+ >>> mock()
+ 'fish'
+
+ The default return value is a mock object and you can configure it in
+ the normal way:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.return_value.attribute = sentinel.Attribute
+ >>> mock.return_value()
+ <Mock name='mock()()' id='...'>
+ >>> mock.return_value.assert_called_with()
+
+ `return_value` can also be set in the constructor:
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> mock.return_value
+ 3
+ >>> mock()
+ 3
+
+
+ .. attribute:: side_effect
+
+ This can either be a function to be called when the mock is called,
+ or an exception (class or instance) to be raised.
+
+ If you pass in a function it will be called with same arguments as the
+ mock and unless the function returns the :data:`DEFAULT` singleton the
+ call to the mock will then return whatever the function returns. If the
+ function returns :data:`DEFAULT` then the mock will return its normal
+ value (from the :attr:`return_value`.
+
+ An example of a mock that raises an exception (to test exception
+ handling of an API):
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.side_effect = Exception('Boom!')
+ >>> mock()
+ Traceback (most recent call last):
+ ...
+ Exception: Boom!
+
+ Using `side_effect` to return a sequence of values:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.side_effect = [3, 2, 1]
+ >>> mock(), mock(), mock()
+ (3, 2, 1)
+
+ The `side_effect` function is called with the same arguments as the
+ mock (so it is wise for it to take arbitrary args and keyword
+ arguments) and whatever it returns is used as the return value for
+ the call. The exception is if `side_effect` returns :data:`DEFAULT`,
+ in which case the normal :attr:`return_value` is used.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=3)
+ >>> def side_effect(*args, **kwargs):
+ ... return DEFAULT
+ ...
+ >>> mock.side_effect = side_effect
+ >>> mock()
+ 3
+
+ `side_effect` can be set in the constructor. Here's an example that
+ adds one to the value the mock is called with and returns it:
+
+ .. doctest::
+
+ >>> side_effect = lambda value: value + 1
+ >>> mock = Mock(side_effect=side_effect)
+ >>> mock(3)
+ 4
+ >>> mock(-8)
+ -7
+
+ Setting `side_effect` to `None` clears it:
+
+ .. doctest::
+
+ >>> from mock import Mock
+ >>> m = Mock(side_effect=KeyError, return_value=3)
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ KeyError
+ >>> m.side_effect = None
+ >>> m()
+ 3
+
+
+ .. attribute:: call_args
+
+ This is either `None` (if the mock hasn't been called), or the
+ arguments that the mock was last called with. This will be in the
+ form of a tuple: the first member is any ordered arguments the mock
+ was called with (or an empty tuple) and the second member is any
+ keyword arguments (or an empty dictionary).
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> print mock.call_args
+ None
+ >>> mock()
+ >>> mock.call_args
+ call()
+ >>> mock.call_args == ()
+ True
+ >>> mock(3, 4)
+ >>> mock.call_args
+ call(3, 4)
+ >>> mock.call_args == ((3, 4),)
+ True
+ >>> mock(3, 4, 5, key='fish', next='w00t!')
+ >>> mock.call_args
+ call(3, 4, 5, key='fish', next='w00t!')
+
+ `call_args`, along with members of the lists :attr:`call_args_list`,
+ :attr:`method_calls` and :attr:`mock_calls` are :data:`call` objects.
+ These are tuples, so they can be unpacked to get at the individual
+ arguments and make more complex assertions. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: call_args_list
+
+ This is a list of all the calls made to the mock object in sequence
+ (so the length of the list is the number of times it has been
+ called). Before any calls have been made it is an empty list. The
+ :data:`call` object can be used for conveniently constructing lists of
+ calls to compare with `call_args_list`.
+
+ .. doctest::
+
+ >>> mock = Mock(return_value=None)
+ >>> mock()
+ >>> mock(3, 4)
+ >>> mock(key='fish', next='w00t!')
+ >>> mock.call_args_list
+ [call(), call(3, 4), call(key='fish', next='w00t!')]
+ >>> expected = [(), ((3, 4),), ({'key': 'fish', 'next': 'w00t!'},)]
+ >>> mock.call_args_list == expected
+ True
+
+ Members of `call_args_list` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: method_calls
+
+ As well as tracking calls to themselves, mocks also track calls to
+ methods and attributes, and *their* methods and attributes:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.method()
+ <Mock name='mock.method()' id='...'>
+ >>> mock.property.method.attribute()
+ <Mock name='mock.property.method.attribute()' id='...'>
+ >>> mock.method_calls
+ [call.method(), call.property.method.attribute()]
+
+ Members of `method_calls` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: mock_calls
+
+ `mock_calls` records *all* calls to the mock object, its methods, magic
+ methods *and* return value mocks.
+
+ .. doctest::
+
+ >>> mock = MagicMock()
+ >>> result = mock(1, 2, 3)
+ >>> mock.first(a=3)
+ <MagicMock name='mock.first()' id='...'>
+ >>> mock.second()
+ <MagicMock name='mock.second()' id='...'>
+ >>> int(mock)
+ 1
+ >>> result(1)
+ <MagicMock name='mock()()' id='...'>
+ >>> expected = [call(1, 2, 3), call.first(a=3), call.second(),
+ ... call.__int__(), call()(1)]
+ >>> mock.mock_calls == expected
+ True
+
+ Members of `mock_calls` are :data:`call` objects. These can be
+ unpacked as tuples to get at the individual arguments. See
+ :ref:`calls as tuples <calls-as-tuples>`.
+
+
+ .. attribute:: __class__
+
+ Normally the `__class__` attribute of an object will return its type.
+ For a mock object with a `spec` `__class__` returns the spec class
+ instead. This allows mock objects to pass `isinstance` tests for the
+ object they are replacing / masquerading as:
+
+ .. doctest::
+
+ >>> mock = Mock(spec=3)
+ >>> isinstance(mock, int)
+ True
+
+ `__class__` is assignable to, this allows a mock to pass an
+ `isinstance` check without forcing you to use a spec:
+
+ .. doctest::
+
+ >>> mock = Mock()
+ >>> mock.__class__ = dict
+ >>> isinstance(mock, dict)
+ True
+
+.. class:: NonCallableMock(spec=None, wraps=None, name=None, spec_set=None, **kwargs)
+
+ A non-callable version of `Mock`. The constructor parameters have the same
+ meaning of `Mock`, with the exception of `return_value` and `side_effect`
+ which have no meaning on a non-callable mock.
+
+Mock objects that use a class or an instance as a `spec` or `spec_set` are able
+to pass `isintance` tests:
+
+.. doctest::
+
+ >>> mock = Mock(spec=SomeClass)
+ >>> isinstance(mock, SomeClass)
+ True
+ >>> mock = Mock(spec_set=SomeClass())
+ >>> isinstance(mock, SomeClass)
+ True
+
+The `Mock` classes have support for mocking magic methods. See :ref:`magic
+methods <magic-methods>` for the full details.
+
+The mock classes and the :func:`patch` decorators all take arbitrary keyword
+arguments for configuration. For the `patch` decorators the keywords are
+passed to the constructor of the mock being created. The keyword arguments
+are for configuring attributes of the mock:
+
+.. doctest::
+
+ >>> m = MagicMock(attribute=3, other='fish')
+ >>> m.attribute
+ 3
+ >>> m.other
+ 'fish'
+
+The return value and side effect of child mocks can be set in the same way,
+using dotted notation. As you can't use dotted names directly in a call you
+have to create a dictionary and unpack it using `**`:
+
+.. doctest::
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock = Mock(some_attribute='eggs', **attrs)
+ >>> mock.some_attribute
+ 'eggs'
+ >>> mock.method()
+ 3
+ >>> mock.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+
+.. class:: PropertyMock(*args, **kwargs)
+
+ A mock intended to be used as a property, or other descriptor, on a class.
+ `PropertyMock` provides `__get__` and `__set__` methods so you can specify
+ a return value when it is fetched.
+
+ Fetching a `PropertyMock` instance from an object calls the mock, with
+ no args. Setting it calls the mock with the value being set.
+
+ .. doctest::
+
+ >>> class Foo(object):
+ ... @property
+ ... def foo(self):
+ ... return 'something'
+ ... @foo.setter
+ ... def foo(self, value):
+ ... pass
+ ...
+ >>> with patch('__main__.Foo.foo', new_callable=PropertyMock) as mock_foo:
+ ... mock_foo.return_value = 'mockity-mock'
+ ... this_foo = Foo()
+ ... print this_foo.foo
+ ... this_foo.foo = 6
+ ...
+ mockity-mock
+ >>> mock_foo.mock_calls
+ [call(), call(6)]
+
+Because of the way mock attributes are stored you can't directly attach a
+`PropertyMock` to a mock object. Instead you can attach it to the mock type
+object:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> p = PropertyMock(return_value=3)
+ >>> type(m).foo = p
+ >>> m.foo
+ 3
+ >>> p.assert_called_once_with()
+
+
+.. index:: __call__
+.. index:: calling
+
+Calling
+=======
+
+Mock objects are callable. The call will return the value set as the
+:attr:`~Mock.return_value` attribute. The default return value is a new Mock
+object; it is created the first time the return value is accessed (either
+explicitly or by calling the Mock) - but it is stored and the same one
+returned each time.
+
+Calls made to the object will be recorded in the attributes
+like :attr:`~Mock.call_args` and :attr:`~Mock.call_args_list`.
+
+If :attr:`~Mock.side_effect` is set then it will be called after the call has
+been recorded, so if `side_effect` raises an exception the call is still
+recorded.
+
+The simplest way to make a mock raise an exception when called is to make
+:attr:`~Mock.side_effect` an exception class or instance:
+
+.. doctest::
+
+ >>> m = MagicMock(side_effect=IndexError)
+ >>> m(1, 2, 3)
+ Traceback (most recent call last):
+ ...
+ IndexError
+ >>> m.mock_calls
+ [call(1, 2, 3)]
+ >>> m.side_effect = KeyError('Bang!')
+ >>> m('two', 'three', 'four')
+ Traceback (most recent call last):
+ ...
+ KeyError: 'Bang!'
+ >>> m.mock_calls
+ [call(1, 2, 3), call('two', 'three', 'four')]
+
+If `side_effect` is a function then whatever that function returns is what
+calls to the mock return. The `side_effect` function is called with the
+same arguments as the mock. This allows you to vary the return value of the
+call dynamically, based on the input:
+
+.. doctest::
+
+ >>> def side_effect(value):
+ ... return value + 1
+ ...
+ >>> m = MagicMock(side_effect=side_effect)
+ >>> m(1)
+ 2
+ >>> m(2)
+ 3
+ >>> m.mock_calls
+ [call(1), call(2)]
+
+If you want the mock to still return the default return value (a new mock), or
+any set return value, then there are two ways of doing this. Either return
+`mock.return_value` from inside `side_effect`, or return :data:`DEFAULT`:
+
+.. doctest::
+
+ >>> m = MagicMock()
+ >>> def side_effect(*args, **kwargs):
+ ... return m.return_value
+ ...
+ >>> m.side_effect = side_effect
+ >>> m.return_value = 3
+ >>> m()
+ 3
+ >>> def side_effect(*args, **kwargs):
+ ... return DEFAULT
+ ...
+ >>> m.side_effect = side_effect
+ >>> m()
+ 3
+
+To remove a `side_effect`, and return to the default behaviour, set the
+`side_effect` to `None`:
+
+.. doctest::
+
+ >>> m = MagicMock(return_value=6)
+ >>> def side_effect(*args, **kwargs):
+ ... return 3
+ ...
+ >>> m.side_effect = side_effect
+ >>> m()
+ 3
+ >>> m.side_effect = None
+ >>> m()
+ 6
+
+The `side_effect` can also be any iterable object. Repeated calls to the mock
+will return values from the iterable (until the iterable is exhausted and
+a `StopIteration` is raised):
+
+.. doctest::
+
+ >>> m = MagicMock(side_effect=[1, 2, 3])
+ >>> m()
+ 1
+ >>> m()
+ 2
+ >>> m()
+ 3
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+If any members of the iterable are exceptions they will be raised instead of
+returned:
+
+.. doctest::
+
+ >>> iterable = (33, ValueError, 66)
+ >>> m = MagicMock(side_effect=iterable)
+ >>> m()
+ 33
+ >>> m()
+ Traceback (most recent call last):
+ ...
+ ValueError
+ >>> m()
+ 66
+
+
+.. _deleting-attributes:
+
+Deleting Attributes
+===================
+
+Mock objects create attributes on demand. This allows them to pretend to be
+objects of any type.
+
+You may want a mock object to return `False` to a `hasattr` call, or raise an
+`AttributeError` when an attribute is fetched. You can do this by providing
+an object as a `spec` for a mock, but that isn't always convenient.
+
+You "block" attributes by deleting them. Once deleted, accessing an attribute
+will raise an `AttributeError`.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> hasattr(mock, 'm')
+ True
+ >>> del mock.m
+ >>> hasattr(mock, 'm')
+ False
+ >>> del mock.f
+ >>> mock.f
+ Traceback (most recent call last):
+ ...
+ AttributeError: f
+
+
+Attaching Mocks as Attributes
+=============================
+
+When you attach a mock as an attribute of another mock (or as the return
+value) it becomes a "child" of that mock. Calls to the child are recorded in
+the :attr:`~Mock.method_calls` and :attr:`~Mock.mock_calls` attributes of the
+parent. This is useful for configuring child mocks and then attaching them to
+the parent, or for attaching mocks to a parent that records all calls to the
+children and allows you to make assertions about the order of calls between
+mocks:
+
+.. doctest::
+
+ >>> parent = MagicMock()
+ >>> child1 = MagicMock(return_value=None)
+ >>> child2 = MagicMock(return_value=None)
+ >>> parent.child1 = child1
+ >>> parent.child2 = child2
+ >>> child1(1)
+ >>> child2(2)
+ >>> parent.mock_calls
+ [call.child1(1), call.child2(2)]
+
+The exception to this is if the mock has a name. This allows you to prevent
+the "parenting" if for some reason you don't want it to happen.
+
+.. doctest::
+
+ >>> mock = MagicMock()
+ >>> not_a_child = MagicMock(name='not-a-child')
+ >>> mock.attribute = not_a_child
+ >>> mock.attribute()
+ <MagicMock name='not-a-child()' id='...'>
+ >>> mock.mock_calls
+ []
+
+Mocks created for you by :func:`patch` are automatically given names. To
+attach mocks that have names to a parent you use the :meth:`~Mock.attach_mock`
+method:
+
+.. doctest::
+
+ >>> thing1 = object()
+ >>> thing2 = object()
+ >>> parent = MagicMock()
+ >>> with patch('__main__.thing1', return_value=None) as child1:
+ ... with patch('__main__.thing2', return_value=None) as child2:
+ ... parent.attach_mock(child1, 'child1')
+ ... parent.attach_mock(child2, 'child2')
+ ... child1('one')
+ ... child2('two')
+ ...
+ >>> parent.mock_calls
+ [call.child1('one'), call.child2('two')]
+
+
+-----
+
+.. [#] The only exceptions are magic methods and attributes (those that have
+ leading and trailing double underscores). Mock doesn't create these but
+ instead of raises an ``AttributeError``. This is because the interpreter
+ will often implicitly request these methods, and gets *very* confused to
+ get a new Mock object when it expects a magic method. If you need magic
+ method support see :ref:`magic methods <magic-methods>`.
diff --git a/python/mock-1.0.0/html/_sources/mocksignature.txt b/python/mock-1.0.0/html/_sources/mocksignature.txt
new file mode 100644
index 000000000..dbb5019fb
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/mocksignature.txt
@@ -0,0 +1,262 @@
+mocksignature
+=============
+
+.. currentmodule:: mock
+
+.. note::
+
+ :ref:`auto-speccing`, added in mock 0.8, is a more advanced version of
+ `mocksignature` and can be used for many of the same use cases.
+
+A problem with using mock objects to replace real objects in your tests is that
+:class:`Mock` can be *too* flexible. Your code can treat the mock objects in
+any way and you have to manually check that they were called correctly. If your
+code calls functions or methods with the wrong number of arguments then mocks
+don't complain.
+
+The solution to this is `mocksignature`, which creates functions with the
+same signature as the original, but delegating to a mock. You can interrogate
+the mock in the usual way to check it has been called with the *right*
+arguments, but if it is called with the wrong number of arguments it will
+raise a `TypeError` in the same way your production code would.
+
+Another advantage is that your mocked objects are real functions, which can
+be useful when your code uses
+`inspect <http://docs.python.org/library/inspect.html>`_ or depends on
+functions being function objects.
+
+.. function:: mocksignature(func, mock=None, skipfirst=False)
+
+ Create a new function with the same signature as `func` that delegates
+ to `mock`. If `skipfirst` is True the first argument is skipped, useful
+ for methods where `self` needs to be omitted from the new function.
+
+ If you don't pass in a `mock` then one will be created for you.
+
+ Functions returned by `mocksignature` have many of the same attributes
+ and assert methods as a mock object.
+
+ The mock is set as the `mock` attribute of the returned function for easy
+ access.
+
+ `mocksignature` can also be used with classes. It copies the signature of
+ the `__init__` method.
+
+ When used with callable objects (instances) it copies the signature of the
+ `__call__` method.
+
+`mocksignature` will work out if it is mocking the signature of a method on
+an instance or a method on a class and do the "right thing" with the `self`
+argument in both cases.
+
+Because of a limitation in the way that arguments are collected by functions
+created by `mocksignature` they are *always* passed as positional arguments
+(including defaults) and not keyword arguments.
+
+
+mocksignature api
+-----------------
+
+Although the objects returned by `mocksignature` api are real function objects,
+they have much of the same api as the :class:`Mock` class. This includes the
+assert methods:
+
+.. doctest::
+
+ >>> def func(a, b, c):
+ ... pass
+ ...
+ >>> func2 = mocksignature(func)
+ >>> func2.called
+ False
+ >>> func2.return_value = 3
+ >>> func2(1, 2, 3)
+ 3
+ >>> func2.called
+ True
+ >>> func2.assert_called_once_with(1, 2, 3)
+ >>> func2.assert_called_with(1, 2, 4)
+ Traceback (most recent call last):
+ ...
+ AssertionError: Expected call: mock(1, 2, 4)
+ Actual call: mock(1, 2, 3)
+ >>> func2.call_count
+ 1
+ >>> func2.side_effect = IndexError
+ >>> func2(4, 5, 6)
+ Traceback (most recent call last):
+ ...
+ IndexError
+
+The mock object that is being delegated to is available as the `mock` attribute
+of the function created by `mocksignature`.
+
+.. doctest::
+
+ >>> func2.mock.mock_calls
+ [call(1, 2, 3), call(4, 5, 6)]
+
+The methods and attributes available on functions returned by `mocksignature`
+are:
+
+ :meth:`~Mock.assert_any_call`, :meth:`~Mock.assert_called_once_with`,
+ :meth:`~Mock.assert_called_with`, :meth:`~Mock.assert_has_calls`,
+ :attr:`~Mock.call_args`, :attr:`~Mock.call_args_list`,
+ :attr:`~Mock.call_count`, :attr:`~Mock.called`,
+ :attr:`~Mock.method_calls`, `mock`, :attr:`~Mock.mock_calls`,
+ :meth:`~Mock.reset_mock`, :attr:`~Mock.return_value`, and
+ :attr:`~Mock.side_effect`.
+
+
+Example use
+-----------
+
+Basic use
+~~~~~~~~~
+
+.. doctest::
+
+ >>> def function(a, b, c=None):
+ ... pass
+ ...
+ >>> mock = Mock()
+ >>> function = mocksignature(function, mock)
+ >>> function()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 2 arguments (0 given)
+ >>> function.return_value = 'some value'
+ >>> function(1, 2, 'foo')
+ 'some value'
+ >>> function.assert_called_with(1, 2, 'foo')
+
+
+Keyword arguments
+~~~~~~~~~~~~~~~~~
+
+Note that arguments to functions created by `mocksignature` are always passed
+in to the underlying mock by position even when called with keywords:
+
+.. doctest::
+
+ >>> def function(a, b, c=None):
+ ... pass
+ ...
+ >>> function = mocksignature(function)
+ >>> function.return_value = None
+ >>> function(1, 2)
+ >>> function.assert_called_with(1, 2, None)
+
+
+Mocking methods and self
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+When you use `mocksignature` to replace a method on a class then `self`
+will be included in the method signature - and you will need to include
+the instance when you do your asserts.
+
+As a curious factor of the way Python (2) wraps methods fetched from a class,
+we can *get* the `return_value` from a function set on a class, but we can't
+set it. We have to do this through the exposed `mock` attribute instead:
+
+.. doctest::
+
+ >>> class SomeClass(object):
+ ... def method(self, a, b, c=None):
+ ... pass
+ ...
+ >>> SomeClass.method = mocksignature(SomeClass.method)
+ >>> SomeClass.method.mock.return_value = None
+ >>> instance = SomeClass()
+ >>> instance.method()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 4 arguments (1 given)
+ >>> instance.method(1, 2, 3)
+ >>> instance.method.assert_called_with(instance, 1, 2, 3)
+
+When you use `mocksignature` on instance methods `self` isn't included (and we
+can set the `return_value` etc directly):
+
+.. doctest::
+
+ >>> class SomeClass(object):
+ ... def method(self, a, b, c=None):
+ ... pass
+ ...
+ >>> instance = SomeClass()
+ >>> instance.method = mocksignature(instance.method)
+ >>> instance.method.return_value = None
+ >>> instance.method(1, 2, 3)
+ >>> instance.method.assert_called_with(1, 2, 3)
+
+
+mocksignature with classes
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When used with a class `mocksignature` copies the signature of the `__init__`
+method.
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __init__(self, foo, bar):
+ ... pass
+ ...
+ >>> MockSomething = mocksignature(Something)
+ >>> instance = MockSomething(10, 9)
+ >>> assert instance is MockSomething.return_value
+ >>> MockSomething.assert_called_with(10, 9)
+ >>> MockSomething()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 2 arguments (0 given)
+
+Because the object returned by `mocksignature` is a function rather than a
+`Mock` you lose the other capabilities of `Mock`, like dynamic attribute
+creation.
+
+
+mocksignature with callable objects
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When used with a callable object `mocksignature` copies the signature of the
+`__call__` method.
+
+.. doctest::
+
+ >>> class Something(object):
+ ... def __call__(self, spam, eggs):
+ ... pass
+ ...
+ >>> something = Something()
+ >>> mock_something = mocksignature(something)
+ >>> result = mock_something(10, 9)
+ >>> mock_something.assert_called_with(10, 9)
+ >>> mock_something()
+ Traceback (most recent call last):
+ ...
+ TypeError: <lambda>() takes at least 2 arguments (0 given)
+
+
+mocksignature argument to patch
+-------------------------------
+
+`mocksignature` is available as a keyword argument to :func:`patch` or
+:func:`patch.object`. It can be used with functions / methods / classes and
+callable objects.
+
+.. doctest::
+
+ >>> class SomeClass(object):
+ ... def method(self, a, b, c=None):
+ ... pass
+ ...
+ >>> @patch.object(SomeClass, 'method', mocksignature=True)
+ ... def test(mock_method):
+ ... instance = SomeClass()
+ ... mock_method.return_value = None
+ ... instance.method(1, 2)
+ ... mock_method.assert_called_with(instance, 1, 2, None)
+ ...
+ >>> test()
diff --git a/python/mock-1.0.0/html/_sources/patch.txt b/python/mock-1.0.0/html/_sources/patch.txt
new file mode 100644
index 000000000..3d56264fb
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/patch.txt
@@ -0,0 +1,636 @@
+==================
+ Patch Decorators
+==================
+
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class SomeClass(object):
+ static_method = None
+ class_method = None
+ attribute = None
+
+ sys.modules['package'] = package = Mock(name='package')
+ sys.modules['package.module'] = package.module
+
+ class TestCase(unittest2.TestCase):
+ def run(self):
+ result = unittest2.TestResult()
+ super(unittest2.TestCase, self).run(result)
+ assert result.wasSuccessful()
+
+.. testcleanup::
+
+ patch.TEST_PREFIX = 'test'
+
+
+The patch decorators are used for patching objects only within the scope of
+the function they decorate. They automatically handle the unpatching for you,
+even if exceptions are raised. All of these functions can also be used in with
+statements or as class decorators.
+
+
+patch
+=====
+
+.. note::
+
+ `patch` is straightforward to use. The key is to do the patching in the
+ right namespace. See the section `where to patch`_.
+
+.. function:: patch(target, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ `patch` acts as a function decorator, class decorator or a context
+ manager. Inside the body of the function or with statement, the `target`
+ is patched with a `new` object. When the function/with statement exits
+ the patch is undone.
+
+ If `new` is omitted, then the target is replaced with a
+ :class:`MagicMock`. If `patch` is used as a decorator and `new` is
+ omitted, the created mock is passed in as an extra argument to the
+ decorated function. If `patch` is used as a context manager the created
+ mock is returned by the context manager.
+
+ `target` should be a string in the form `'package.module.ClassName'`. The
+ `target` is imported and the specified object replaced with the `new`
+ object, so the `target` must be importable from the environment you are
+ calling `patch` from. The target is imported when the decorated function
+ is executed, not at decoration time.
+
+ The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
+ if patch is creating one for you.
+
+ In addition you can pass `spec=True` or `spec_set=True`, which causes
+ patch to pass in the object being mocked as the spec/spec_set object.
+
+ `new_callable` allows you to specify a different class, or callable object,
+ that will be called to create the `new` object. By default `MagicMock` is
+ used.
+
+ A more powerful form of `spec` is `autospec`. If you set `autospec=True`
+ then the mock with be created with a spec from the object being replaced.
+ All attributes of the mock will also have the spec of the corresponding
+ attribute of the object being replaced. Methods and functions being mocked
+ will have their arguments checked and will raise a `TypeError` if they are
+ called with the wrong signature. For mocks
+ replacing a class, their return value (the 'instance') will have the same
+ spec as the class. See the :func:`create_autospec` function and
+ :ref:`auto-speccing`.
+
+ Instead of `autospec=True` you can pass `autospec=some_object` to use an
+ arbitrary object as the spec instead of the one being replaced.
+
+ By default `patch` will fail to replace attributes that don't exist. If
+ you pass in `create=True`, and the attribute doesn't exist, patch will
+ create the attribute for you when the patched function is called, and
+ delete it again afterwards. This is useful for writing tests against
+ attributes that your production code creates at runtime. It is off by by
+ default because it can be dangerous. With it switched on you can write
+ passing tests against APIs that don't actually exist!
+
+ Patch can be used as a `TestCase` class decorator. It works by
+ decorating each test method in the class. This reduces the boilerplate
+ code when your test methods share a common patchings set. `patch` finds
+ tests by looking for method names that start with `patch.TEST_PREFIX`.
+ By default this is `test`, which matches the way `unittest` finds tests.
+ You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
+
+ Patch can be used as a context manager, with the with statement. Here the
+ patching applies to the indented block after the with statement. If you
+ use "as" then the patched object will be bound to the name after the
+ "as"; very useful if `patch` is creating a mock object for you.
+
+ `patch` takes arbitrary keyword arguments. These will be passed to
+ the `Mock` (or `new_callable`) on construction.
+
+ `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
+ available for alternate use-cases.
+
+`patch` as function decorator, creating the mock for you and passing it into
+the decorated function:
+
+.. doctest::
+
+ >>> @patch('__main__.SomeClass')
+ ... def function(normal_argument, mock_class):
+ ... print mock_class is SomeClass
+ ...
+ >>> function(None)
+ True
+
+
+Patching a class replaces the class with a `MagicMock` *instance*. If the
+class is instantiated in the code under test then it will be the
+:attr:`~Mock.return_value` of the mock that will be used.
+
+If the class is instantiated multiple times you could use
+:attr:`~Mock.side_effect` to return a new mock each time. Alternatively you
+can set the `return_value` to be anything you want.
+
+To configure return values on methods of *instances* on the patched class
+you must do this on the `return_value`. For example:
+
+.. doctest::
+
+ >>> class Class(object):
+ ... def method(self):
+ ... pass
+ ...
+ >>> with patch('__main__.Class') as MockClass:
+ ... instance = MockClass.return_value
+ ... instance.method.return_value = 'foo'
+ ... assert Class() is instance
+ ... assert Class().method() == 'foo'
+ ...
+
+If you use `spec` or `spec_set` and `patch` is replacing a *class*, then the
+return value of the created mock will have the same spec.
+
+.. doctest::
+
+ >>> Original = Class
+ >>> patcher = patch('__main__.Class', spec=True)
+ >>> MockClass = patcher.start()
+ >>> instance = MockClass()
+ >>> assert isinstance(instance, Original)
+ >>> patcher.stop()
+
+The `new_callable` argument is useful where you want to use an alternative
+class to the default :class:`MagicMock` for the created mock. For example, if
+you wanted a :class:`NonCallableMock` to be used:
+
+.. doctest::
+
+ >>> thing = object()
+ >>> with patch('__main__.thing', new_callable=NonCallableMock) as mock_thing:
+ ... assert thing is mock_thing
+ ... thing()
+ ...
+ Traceback (most recent call last):
+ ...
+ TypeError: 'NonCallableMock' object is not callable
+
+Another use case might be to replace an object with a `StringIO` instance:
+
+.. doctest::
+
+ >>> from StringIO import StringIO
+ >>> def foo():
+ ... print 'Something'
+ ...
+ >>> @patch('sys.stdout', new_callable=StringIO)
+ ... def test(mock_stdout):
+ ... foo()
+ ... assert mock_stdout.getvalue() == 'Something\n'
+ ...
+ >>> test()
+
+When `patch` is creating a mock for you, it is common that the first thing
+you need to do is to configure the mock. Some of that configuration can be done
+in the call to patch. Any arbitrary keywords you pass into the call will be
+used to set attributes on the created mock:
+
+.. doctest::
+
+ >>> patcher = patch('__main__.thing', first='one', second='two')
+ >>> mock_thing = patcher.start()
+ >>> mock_thing.first
+ 'one'
+ >>> mock_thing.second
+ 'two'
+
+As well as attributes on the created mock attributes, like the
+:attr:`~Mock.return_value` and :attr:`~Mock.side_effect`, of child mocks can
+also be configured. These aren't syntactically valid to pass in directly as
+keyword arguments, but a dictionary with these as keys can still be expanded
+into a `patch` call using `**`:
+
+.. doctest::
+
+ >>> config = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> patcher = patch('__main__.thing', **config)
+ >>> mock_thing = patcher.start()
+ >>> mock_thing.method()
+ 3
+ >>> mock_thing.other()
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+
+patch.object
+============
+
+.. function:: patch.object(target, attribute, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ patch the named member (`attribute`) on an object (`target`) with a mock
+ object.
+
+ `patch.object` can be used as a decorator, class decorator or a context
+ manager. Arguments `new`, `spec`, `create`, `spec_set`, `autospec` and
+ `new_callable` have the same meaning as for `patch`. Like `patch`,
+ `patch.object` takes arbitrary keyword arguments for configuring the mock
+ object it creates.
+
+ When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+
+You can either call `patch.object` with three arguments or two arguments. The
+three argument form takes the object to be patched, the attribute name and the
+object to replace the attribute with.
+
+When calling with the two argument form you omit the replacement object, and a
+mock is created for you and passed in as an extra argument to the decorated
+function:
+
+.. doctest::
+
+ >>> @patch.object(SomeClass, 'class_method')
+ ... def test(mock_method):
+ ... SomeClass.class_method(3)
+ ... mock_method.assert_called_with(3)
+ ...
+ >>> test()
+
+`spec`, `create` and the other arguments to `patch.object` have the same
+meaning as they do for `patch`.
+
+
+patch.dict
+==========
+
+.. function:: patch.dict(in_dict, values=(), clear=False, **kwargs)
+
+ Patch a dictionary, or dictionary like object, and restore the dictionary
+ to its original state after the test.
+
+ `in_dict` can be a dictionary or a mapping like container. If it is a
+ mapping then it must at least support getting, setting and deleting items
+ plus iterating over keys.
+
+ `in_dict` can also be a string specifying the name of the dictionary, which
+ will then be fetched by importing it.
+
+ `values` can be a dictionary of values to set in the dictionary. `values`
+ can also be an iterable of `(key, value)` pairs.
+
+ If `clear` is True then the dictionary will be cleared before the new
+ values are set.
+
+ `patch.dict` can also be called with arbitrary keyword arguments to set
+ values in the dictionary.
+
+ `patch.dict` can be used as a context manager, decorator or class
+ decorator. When used as a class decorator `patch.dict` honours
+ `patch.TEST_PREFIX` for choosing which methods to wrap.
+
+`patch.dict` can be used to add members to a dictionary, or simply let a test
+change a dictionary, and ensure the dictionary is restored when the test
+ends.
+
+.. doctest::
+
+ >>> from mock import patch
+ >>> foo = {}
+ >>> with patch.dict(foo, {'newkey': 'newvalue'}):
+ ... assert foo == {'newkey': 'newvalue'}
+ ...
+ >>> assert foo == {}
+
+ >>> import os
+ >>> with patch.dict('os.environ', {'newkey': 'newvalue'}):
+ ... print os.environ['newkey']
+ ...
+ newvalue
+ >>> assert 'newkey' not in os.environ
+
+Keywords can be used in the `patch.dict` call to set values in the dictionary:
+
+.. doctest::
+
+ >>> mymodule = MagicMock()
+ >>> mymodule.function.return_value = 'fish'
+ >>> with patch.dict('sys.modules', mymodule=mymodule):
+ ... import mymodule
+ ... mymodule.function('some', 'args')
+ ...
+ 'fish'
+
+`patch.dict` can be used with dictionary like objects that aren't actually
+dictionaries. At the very minimum they must support item getting, setting,
+deleting and either iteration or membership test. This corresponds to the
+magic methods `__getitem__`, `__setitem__`, `__delitem__` and either
+`__iter__` or `__contains__`.
+
+.. doctest::
+
+ >>> class Container(object):
+ ... def __init__(self):
+ ... self.values = {}
+ ... def __getitem__(self, name):
+ ... return self.values[name]
+ ... def __setitem__(self, name, value):
+ ... self.values[name] = value
+ ... def __delitem__(self, name):
+ ... del self.values[name]
+ ... def __iter__(self):
+ ... return iter(self.values)
+ ...
+ >>> thing = Container()
+ >>> thing['one'] = 1
+ >>> with patch.dict(thing, one=2, two=3):
+ ... assert thing['one'] == 2
+ ... assert thing['two'] == 3
+ ...
+ >>> assert thing['one'] == 1
+ >>> assert list(thing) == ['one']
+
+
+patch.multiple
+==============
+
+.. function:: patch.multiple(target, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ Perform multiple patches in a single call. It takes the object to be
+ patched (either as an object or a string to fetch the object by importing)
+ and keyword arguments for the patches::
+
+ with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
+ ...
+
+ Use :data:`DEFAULT` as the value if you want `patch.multiple` to create
+ mocks for you. In this case the created mocks are passed into a decorated
+ function by keyword, and a dictionary is returned when `patch.multiple` is
+ used as a context manager.
+
+ `patch.multiple` can be used as a decorator, class decorator or a context
+ manager. The arguments `spec`, `spec_set`, `create`, `autospec` and
+ `new_callable` have the same meaning as for `patch`. These arguments will
+ be applied to *all* patches done by `patch.multiple`.
+
+ When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+
+If you want `patch.multiple` to create mocks for you, then you can use
+:data:`DEFAULT` as the value. If you use `patch.multiple` as a decorator
+then the created mocks are passed into the decorated function by keyword.
+
+.. doctest::
+
+ >>> thing = object()
+ >>> other = object()
+
+ >>> @patch.multiple('__main__', thing=DEFAULT, other=DEFAULT)
+ ... def test_function(thing, other):
+ ... assert isinstance(thing, MagicMock)
+ ... assert isinstance(other, MagicMock)
+ ...
+ >>> test_function()
+
+`patch.multiple` can be nested with other `patch` decorators, but put arguments
+passed by keyword *after* any of the standard arguments created by `patch`:
+
+.. doctest::
+
+ >>> @patch('sys.exit')
+ ... @patch.multiple('__main__', thing=DEFAULT, other=DEFAULT)
+ ... def test_function(mock_exit, other, thing):
+ ... assert 'other' in repr(other)
+ ... assert 'thing' in repr(thing)
+ ... assert 'exit' in repr(mock_exit)
+ ...
+ >>> test_function()
+
+If `patch.multiple` is used as a context manager, the value returned by the
+context manger is a dictionary where created mocks are keyed by name:
+
+.. doctest::
+
+ >>> with patch.multiple('__main__', thing=DEFAULT, other=DEFAULT) as values:
+ ... assert 'other' in repr(values['other'])
+ ... assert 'thing' in repr(values['thing'])
+ ... assert values['thing'] is thing
+ ... assert values['other'] is other
+ ...
+
+
+.. _start-and-stop:
+
+patch methods: start and stop
+=============================
+
+All the patchers have `start` and `stop` methods. These make it simpler to do
+patching in `setUp` methods or where you want to do multiple patches without
+nesting decorators or with statements.
+
+To use them call `patch`, `patch.object` or `patch.dict` as normal and keep a
+reference to the returned `patcher` object. You can then call `start` to put
+the patch in place and `stop` to undo it.
+
+If you are using `patch` to create a mock for you then it will be returned by
+the call to `patcher.start`.
+
+.. doctest::
+
+ >>> patcher = patch('package.module.ClassName')
+ >>> from package import module
+ >>> original = module.ClassName
+ >>> new_mock = patcher.start()
+ >>> assert module.ClassName is not original
+ >>> assert module.ClassName is new_mock
+ >>> patcher.stop()
+ >>> assert module.ClassName is original
+ >>> assert module.ClassName is not new_mock
+
+
+A typical use case for this might be for doing multiple patches in the `setUp`
+method of a `TestCase`:
+
+.. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... self.patcher1 = patch('package.module.Class1')
+ ... self.patcher2 = patch('package.module.Class2')
+ ... self.MockClass1 = self.patcher1.start()
+ ... self.MockClass2 = self.patcher2.start()
+ ...
+ ... def tearDown(self):
+ ... self.patcher1.stop()
+ ... self.patcher2.stop()
+ ...
+ ... def test_something(self):
+ ... assert package.module.Class1 is self.MockClass1
+ ... assert package.module.Class2 is self.MockClass2
+ ...
+ >>> MyTest('test_something').run()
+
+.. caution::
+
+ If you use this technique you must ensure that the patching is "undone" by
+ calling `stop`. This can be fiddlier than you might think, because if an
+ exception is raised in the setUp then tearDown is not called. `unittest2
+ <http://pypi.python.org/pypi/unittest2>`_ cleanup functions make this
+ easier.
+
+ .. doctest::
+
+ >>> class MyTest(TestCase):
+ ... def setUp(self):
+ ... patcher = patch('package.module.Class')
+ ... self.MockClass = patcher.start()
+ ... self.addCleanup(patcher.stop)
+ ...
+ ... def test_something(self):
+ ... assert package.module.Class is self.MockClass
+ ...
+ >>> MyTest('test_something').run()
+
+ As an added bonus you no longer need to keep a reference to the `patcher`
+ object.
+
+It is also possible to stop all patches which have been started by using
+`patch.stopall`.
+
+.. function:: patch.stopall
+
+ Stop all active patches. Only stops patches started with `start`.
+
+
+TEST_PREFIX
+===========
+
+All of the patchers can be used as class decorators. When used in this way
+they wrap every test method on the class. The patchers recognise methods that
+start with `test` as being test methods. This is the same way that the
+`unittest.TestLoader` finds test methods by default.
+
+It is possible that you want to use a different prefix for your tests. You can
+inform the patchers of the different prefix by setting `patch.TEST_PREFIX`:
+
+.. doctest::
+
+ >>> patch.TEST_PREFIX = 'foo'
+ >>> value = 3
+ >>>
+ >>> @patch('__main__.value', 'not three')
+ ... class Thing(object):
+ ... def foo_one(self):
+ ... print value
+ ... def foo_two(self):
+ ... print value
+ ...
+ >>>
+ >>> Thing().foo_one()
+ not three
+ >>> Thing().foo_two()
+ not three
+ >>> value
+ 3
+
+
+Nesting Patch Decorators
+========================
+
+If you want to perform multiple patches then you can simply stack up the
+decorators.
+
+You can stack up multiple patch decorators using this pattern:
+
+.. doctest::
+
+ >>> @patch.object(SomeClass, 'class_method')
+ ... @patch.object(SomeClass, 'static_method')
+ ... def test(mock1, mock2):
+ ... assert SomeClass.static_method is mock1
+ ... assert SomeClass.class_method is mock2
+ ... SomeClass.static_method('foo')
+ ... SomeClass.class_method('bar')
+ ... return mock1, mock2
+ ...
+ >>> mock1, mock2 = test()
+ >>> mock1.assert_called_once_with('foo')
+ >>> mock2.assert_called_once_with('bar')
+
+
+Note that the decorators are applied from the bottom upwards. This is the
+standard way that Python applies decorators. The order of the created mocks
+passed into your test function matches this order.
+
+Like all context-managers patches can be nested using contextlib's nested
+function; *every* patching will appear in the tuple after "as":
+
+.. doctest::
+
+ >>> from contextlib import nested
+ >>> with nested(
+ ... patch('package.module.ClassName1'),
+ ... patch('package.module.ClassName2')
+ ... ) as (MockClass1, MockClass2):
+ ... assert package.module.ClassName1 is MockClass1
+ ... assert package.module.ClassName2 is MockClass2
+ ...
+
+
+.. _where-to-patch:
+
+Where to patch
+==============
+
+`patch` works by (temporarily) changing the object that a *name* points to with
+another one. There can be many names pointing to any individual object, so
+for patching to work you must ensure that you patch the name used by the system
+under test.
+
+The basic principle is that you patch where an object is *looked up*, which
+is not necessarily the same place as where it is defined. A couple of
+examples will help to clarify this.
+
+Imagine we have a project that we want to test with the following structure::
+
+ a.py
+ -> Defines SomeClass
+
+ b.py
+ -> from a import SomeClass
+ -> some_function instantiates SomeClass
+
+Now we want to test `some_function` but we want to mock out `SomeClass` using
+`patch`. The problem is that when we import module b, which we will have to
+do then it imports `SomeClass` from module a. If we use `patch` to mock out
+`a.SomeClass` then it will have no effect on our test; module b already has a
+reference to the *real* `SomeClass` and it looks like our patching had no
+effect.
+
+The key is to patch out `SomeClass` where it is used (or where it is looked up
+). In this case `some_function` will actually look up `SomeClass` in module b,
+where we have imported it. The patching should look like:
+
+ `@patch('b.SomeClass')`
+
+However, consider the alternative scenario where instead of `from a import
+SomeClass` module b does `import a` and `some_function` uses `a.SomeClass`. Both
+of these import forms are common. In this case the class we want to patch is
+being looked up on the a module and so we have to patch `a.SomeClass` instead:
+
+ `@patch('a.SomeClass')`
+
+
+Patching Descriptors and Proxy Objects
+======================================
+
+Since version 0.6.0 both patch_ and patch.object_ have been able to correctly
+patch and restore descriptors: class methods, static methods and properties.
+You should patch these on the *class* rather than an instance.
+
+Since version 0.7.0 patch_ and patch.object_ work correctly with some objects
+that proxy attribute access, like the `django setttings object
+<http://www.voidspace.org.uk/python/weblog/arch_d7_2010_12_04.shtml#e1198>`_.
+
+.. note::
+
+ In django `import settings` and `from django.conf import settings`
+ return different objects. If you are using libraries / apps that do both you
+ may have to patch both. Grrr...
diff --git a/python/mock-1.0.0/html/_sources/sentinel.txt b/python/mock-1.0.0/html/_sources/sentinel.txt
new file mode 100644
index 000000000..1c5223da0
--- /dev/null
+++ b/python/mock-1.0.0/html/_sources/sentinel.txt
@@ -0,0 +1,58 @@
+==========
+ Sentinel
+==========
+
+
+.. currentmodule:: mock
+
+.. testsetup::
+
+ class ProductionClass(object):
+ def something(self):
+ return self.method()
+
+ class Test(unittest2.TestCase):
+ def testSomething(self):
+ pass
+ self = Test('testSomething')
+
+
+.. data:: sentinel
+
+ The ``sentinel`` object provides a convenient way of providing unique
+ objects for your tests.
+
+ Attributes are created on demand when you access them by name. Accessing
+ the same attribute will always return the same object. The objects
+ returned have a sensible repr so that test failure messages are readable.
+
+
+.. data:: DEFAULT
+
+ The `DEFAULT` object is a pre-created sentinel (actually
+ `sentinel.DEFAULT`). It can be used by :attr:`~Mock.side_effect`
+ functions to indicate that the normal return value should be used.
+
+
+Sentinel Example
+================
+
+Sometimes when testing you need to test that a specific object is passed as an
+argument to another method, or returned. It can be common to create named
+sentinel objects to test this. `sentinel` provides a convenient way of
+creating and testing the identity of objects like this.
+
+In this example we monkey patch `method` to return
+`sentinel.some_object`:
+
+.. doctest::
+
+ >>> real = ProductionClass()
+ >>> real.method = Mock(name="method")
+ >>> real.method.return_value = sentinel.some_object
+ >>> result = real.method()
+ >>> assert result is sentinel.some_object
+ >>> sentinel.some_object
+ sentinel.some_object
+
+
diff --git a/python/mock-1.0.0/html/_static/adctheme.css b/python/mock-1.0.0/html/_static/adctheme.css
new file mode 100644
index 000000000..60395bcef
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/adctheme.css
@@ -0,0 +1,757 @@
+/**
+ * Sphinx stylesheet -- basic theme
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ */
+ h3 {
+ color:#000000;
+ font-size: 17px;
+ margin-bottom:0.5em;
+ margin-top:2em;
+ }
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+ clear: both;
+}
+
+/* -- header ---------------------------------------------------------------- */
+
+#header #title {
+ background:#29334F url(title_background.png) repeat-x scroll 0 0;
+ border-bottom:1px solid #B6B6B6;
+ height:25px;
+ overflow:hidden;
+}
+#headerButtons {
+ position: absolute;
+ list-style: none outside;
+ top: 26px;
+ left: 0px;
+ right: 0px;
+ margin: 0px;
+ padding: 0px;
+ border-top: 1px solid #2B334F;
+ border-bottom: 1px solid #EDEDED;
+ height: 20px;
+ font-size: 8pt;
+ overflow: hidden;
+ background-color: #D8D8D8;
+}
+
+#headerButtons li {
+ background-repeat:no-repeat;
+ display:inline;
+ margin-top:0;
+ padding:0;
+}
+
+.headerButton {
+ display: inline;
+ height:20px;
+}
+
+.headerButton a {
+ text-decoration: none;
+ float: right;
+ height: 20px;
+ padding: 4px 15px;
+ border-left: 1px solid #ACACAC;
+ font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+ color: black;
+}
+.headerButton a:hover {
+ color: white;
+ background-color: #787878;
+
+}
+
+li#toc_button {
+ text-align:left;
+}
+
+li#toc_button .headerButton a {
+ width:198px;
+ padding-top: 4px;
+ font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+ color: black;
+ float: left;
+ padding-left:15px;
+ border-right:1px solid #ACACAC;
+ background:transparent url(triangle_open.png) no-repeat scroll 4px 6px;
+}
+
+li#toc_button .headerButton a:hover {
+ background-color: #787878;
+ color: white;
+}
+
+li#page_buttons {
+position:absolute;
+right:0;
+}
+
+#breadcrumbs {
+ color: black;
+ background-image:url(breadcrumb_background.png);
+ border-top:1px solid #2B334F;
+ bottom:0;
+ font-size:10px;
+ height:15px;
+ left:0;
+ overflow:hidden;
+ padding:3px 10px 0;
+ position:absolute;
+ right:0;
+ white-space:nowrap;
+ z-index:901;
+}
+#breadcrumbs a {
+ color: black;
+ text-decoration: none;
+}
+#breadcrumbs a:hover {
+ text-decoration: underline;
+}
+#breadcrumbs img {
+ padding-left: 3px;
+}
+/* -- sidebar --------------------------------------------------------------- */
+#sphinxsidebar {
+ position: absolute;
+ top: 84px;
+ bottom: 19px;
+ left: 0px;
+ width: 229px;
+ background-color: #E4EBF7;
+ border-right: 1px solid #ACACAC;
+ border-top: 1px solid #2B334F;
+ overflow-x: hidden;
+ overflow-y: auto;
+ padding: 0px 0px 0px 0px;
+ font-size:11px;
+}
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+#sphinxsidebar li {
+ margin: 0px;
+ padding: 0px;
+ font-weight: normal;
+ margin: 0px 0px 7px 0px;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ font-size: 11px;
+}
+
+#sphinxsidebar ul {
+ list-style: none;
+ margin: 0px 0px 0px 0px;
+ padding: 0px 5px 0px 5px;
+}
+
+#sphinxsidebar ul ul,
+#sphinxsidebar ul.want-points {
+ list-style: square;
+}
+
+#sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+#sphinxsidebar form {
+ margin-top: 10px;
+}
+
+#sphinxsidebar input {
+ border: 1px solid #787878;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+img {
+ border: 0;
+}
+
+#sphinxsidebar li.toctree-l1 a {
+ font-weight: bold;
+ color: #000;
+ text-decoration: none;
+}
+
+#sphinxsidebar li.toctree-l2 a {
+ font-weight: bold;
+ color: #4f4f4f;
+ text-decoration: none;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+#sphinxsidebar input.prettysearch {border:none;}
+input.searchbutton {
+ float: right;
+}
+.search-wrapper {width: 100%; height: 25px;}
+.search-wrapper input.prettysearch { border: none; width:200px; height: 16px; background: url(searchfield_repeat.png) center top repeat-x; border: 0px; margin: 0; padding: 3px 0 0 0; font: 11px "Lucida Grande", "Lucida Sans Unicode", Arial, sans-serif; }
+.search-wrapper input.prettysearch { width: 184px; margin-left: 20px; *margin-top:-1px; *margin-right:-2px; *margin-left:10px; }
+.search-wrapper .search-left { display: block; position: absolute; width: 20px; height: 19px; background: url(searchfield_leftcap.png) left top no-repeat; }
+.search-wrapper .search-right { display: block; position: relative; left: 204px; top: -19px; width: 10px; height: 19px; background: url(searchfield_rightcap.png) right top no-repeat; }
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+/* -- general body styles --------------------------------------------------- */
+.document {
+ border-top:1px solid #2B334F;
+ overflow:auto;
+ padding-left:2em;
+ padding-right:2em;
+ position:absolute;
+ z-index:1;
+ top:84px;
+ bottom:19px;
+ right:0;
+ left:230px;
+}
+
+a.headerlink {
+ visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+ visibility: visible;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+.field-list ul {
+ padding-left: 1em;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+/*div.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px 7px 0 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+*/
+/* -- topics ---------------------------------------------------------------- */
+
+div.topic {
+ border: 1px solid #ccc;
+ padding: 7px 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+.admonition {
+ border: 1px solid #a1a5a9;
+ background-color: #f7f7f7;
+ margin: 20px;
+ padding: 0px 8px 7px 9px;
+ text-align: left;
+}
+.warning {
+ background-color:#E8E8E8;
+ border:1px solid #111111;
+ margin:30px;
+}
+.admonition p {
+ font: 12px 'Lucida Grande', Geneva, Helvetica, Arial, sans-serif;
+ margin-top: 7px;
+ margin-bottom: 0px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+ padding-top: 3px;
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+ border-collapse: collapse;
+ border-top: 1px solid #919699;
+ border-left: 1px solid #919699;
+ border-right: 1px solid #919699;
+ font-size:12px;
+ padding:8px;
+ text-align:left;
+ vertical-align:top;
+}
+
+table.docutils td, table.docutils th {
+ padding: 8px;
+ font-size: 12px;
+ text-align: left;
+ vertical-align: top;
+ border-bottom: 1px solid #919699;
+}
+
+table.docutils th {
+ font-weight: bold;
+}
+/* This alternates colors in up to six table rows (light blue for odd, white for even)*/
+.docutils tr {
+ background: #F0F5F9;
+}
+
+.docutils tr + tr {
+ background: #FFFFFF;
+}
+
+.docutils tr + tr + tr {
+ background: #F0F5F9;
+}
+
+.docutils tr + tr + tr + tr {
+ background: #FFFFFF;
+}
+
+.docutils tr + tr + tr +tr + tr {
+ background: #F0F5F9;
+}
+
+.docutils tr + tr + tr + tr + tr + tr {
+ background: #FFFFFF;
+}
+
+.docutils tr + tr + tr + tr + tr + tr + tr {
+ background: #F0F5F9;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+/* -- other body styles ----------------------------------------------------- */
+
+dl {
+ margin-bottom: 15px;
+}
+
+dd p {
+ margin-top: 0px;
+ font-size: 12px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+ font-size: 12px;
+}
+
+dt:target, .highlight {
+ background-color: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 0.8em;
+}
+
+dl.glossary dd {
+ font-size:12px;
+}
+.field-list ul {
+ vertical-align: top;
+ margin: 0;
+ padding-bottom: 0;
+ list-style: none inside;
+}
+
+.field-list ul li {
+ margin-top: 0;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+.refcount {
+ color: #060;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+.footnote:target {
+ background-color: #ffa
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+ overflow: auto;
+ background-color:#F1F5F9;
+ border:1px solid #C9D1D7;
+ border-spacing:0;
+ font-family:"Bitstream Vera Sans Mono",Monaco,"Lucida Console",Courier,Consolas,monospace;
+ font-size:11px;
+ padding: 10px;
+}
+
+td.linenos pre {
+ padding: 5px 0px;
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ margin-left: 0.5em;
+}
+
+table.highlighttable td {
+ padding: 0 0.5em 0 0.5em;
+}
+
+tt {
+ font-family:"Bitstream Vera Sans Mono",Monaco,"Lucida Console",Courier,Consolas,monospace;
+
+}
+
+tt.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1em;
+}
+
+tt.descclassname {
+ background-color: transparent;
+}
+
+tt.xref, a tt {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ background-color: transparent;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+ vertical-align: middle;
+}
+
+div.body div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0;
+ width: 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ #top-link {
+ display: none;
+ }
+}
+
+body {
+ font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+}
+
+dl.class dt {
+ padding: 3px;
+/* border-top: 2px solid #999;*/
+}
+
+em.property {
+ font-style: normal;
+}
+
+dl.class dd p {
+ margin-top: 6px;
+}
+
+dl.class dd dl.exception dt {
+ padding: 3px;
+ background-color: #FFD6D6;
+ border-top: none;
+}
+
+dl.class dd dl.method dt {
+ padding: 3px;
+ background-color: #e9e9e9;
+ border-top: none;
+
+}
+
+dl.function dt {
+ padding: 3px;
+ border-top: 2px solid #999;
+}
+
+ul {
+list-style-image:none;
+list-style-position:outside;
+list-style-type:square;
+margin:0 0 0 30px;
+padding:0 0 12px 6px;
+}
+#docstitle {
+ height: 36px;
+ background-image: url(header_sm_mid.png);
+ left: 0;
+ top: 0;
+ position: absolute;
+ width: 100%;
+}
+#docstitle p {
+ padding:7px 0 0 45px;
+ margin: 0;
+ color: white;
+ text-shadow:0 1px 0 #787878;
+ background: transparent url(documentation.png) no-repeat scroll 10px 3px;
+ height: 36px;
+ font-size: 15px;
+}
+#header {
+height:45px;
+left:0;
+position:absolute;
+right:0;
+top:36px;
+z-index:900;
+}
+
+#header h1 {
+font-size:10pt;
+margin:0;
+padding:5px 0 0 10px;
+text-shadow:0 1px 0 #D5D5D5;
+white-space:nowrap;
+}
+
+h1 {
+-x-system-font:none;
+color:#000000;
+font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+font-size:30px;
+font-size-adjust:none;
+font-stretch:normal;
+font-style:normal;
+font-variant:normal;
+font-weight:bold;
+line-height:normal;
+margin-bottom:25px;
+margin-top:1em;
+}
+
+.footer {
+border-top:1px solid #DDDDDD;
+clear:both;
+padding-top:9px;
+width:100%;
+font-size:10px;
+}
+
+p {
+-x-system-font:none;
+font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+font-size:12px;
+font-size-adjust:none;
+font-stretch:normal;
+font-style:normal;
+font-variant:normal;
+font-weight:normal;
+line-height:normal;
+margin-bottom:10px;
+margin-top:0;
+}
+
+h2 {
+border-bottom:1px solid #919699;
+color:#000000;
+font-size:24px;
+margin-top:2.5em;
+padding-bottom:2px;
+}
+
+a:link:hover {
+color:#093D92;
+text-decoration:underline;
+}
+
+a:link {
+color:#093D92;
+text-decoration:none;
+}
+
+
+ol {
+list-style-position:outside;
+list-style-type:decimal;
+margin:0 0 0 30px;
+padding:0 0 12px 6px;
+}
+li {
+margin-top:7px;
+font-family:'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
+font-size:12px;
+font-size-adjust:none;
+font-stretch:normal;
+font-style:normal;
+font-variant:normal;
+font-weight:normal;
+line-height:normal;
+}
+li p {
+margin-top:8px;
+} \ No newline at end of file
diff --git a/python/mock-1.0.0/html/_static/basic.css b/python/mock-1.0.0/html/_static/basic.css
new file mode 100644
index 000000000..43e8bafaf
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/basic.css
@@ -0,0 +1,540 @@
+/*
+ * basic.css
+ * ~~~~~~~~~
+ *
+ * Sphinx stylesheet -- basic theme.
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+ clear: both;
+}
+
+/* -- relbar ---------------------------------------------------------------- */
+
+div.related {
+ width: 100%;
+ font-size: 90%;
+}
+
+div.related h3 {
+ display: none;
+}
+
+div.related ul {
+ margin: 0;
+ padding: 0 0 0 10px;
+ list-style: none;
+}
+
+div.related li {
+ display: inline;
+}
+
+div.related li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+/* -- sidebar --------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+ float: left;
+ width: 230px;
+ margin-left: -100%;
+ font-size: 90%;
+}
+
+div.sphinxsidebar ul {
+ list-style: none;
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+ margin-left: 20px;
+ list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar form {
+ margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #98dbcc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar #searchbox input[type="text"] {
+ width: 170px;
+}
+
+div.sphinxsidebar #searchbox input[type="submit"] {
+ width: 30px;
+}
+
+img {
+ border: 0;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable {
+ width: 100%;
+}
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+div.modindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+div.genindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+/* -- general body styles --------------------------------------------------- */
+
+a.headerlink {
+ visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+ visibility: visible;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+.field-list ul {
+ padding-left: 1em;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+img.align-left, .figure.align-left, object.align-left {
+ clear: left;
+ float: left;
+ margin-right: 1em;
+}
+
+img.align-right, .figure.align-right, object.align-right {
+ clear: right;
+ float: right;
+ margin-left: 1em;
+}
+
+img.align-center, .figure.align-center, object.align-center {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+.align-left {
+ text-align: left;
+}
+
+.align-center {
+ text-align: center;
+}
+
+.align-right {
+ text-align: right;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+div.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px 7px 0 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
+/* -- topics ---------------------------------------------------------------- */
+
+div.topic {
+ border: 1px solid #ccc;
+ padding: 7px 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+
+div.admonition {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ padding: 7px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+ border: 0;
+ border-collapse: collapse;
+}
+
+table.docutils td, table.docutils th {
+ padding: 1px 8px 1px 5px;
+ border-top: 0;
+ border-left: 0;
+ border-right: 0;
+ border-bottom: 1px solid #aaa;
+}
+
+table.field-list td, table.field-list th {
+ border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+table.citation {
+ border-left: solid 1px gray;
+ margin-left: 1px;
+}
+
+table.citation td {
+ border-bottom: none;
+}
+
+/* -- other body styles ----------------------------------------------------- */
+
+ol.arabic {
+ list-style: decimal;
+}
+
+ol.loweralpha {
+ list-style: lower-alpha;
+}
+
+ol.upperalpha {
+ list-style: upper-alpha;
+}
+
+ol.lowerroman {
+ list-style: lower-roman;
+}
+
+ol.upperroman {
+ list-style: upper-roman;
+}
+
+dl {
+ margin-bottom: 15px;
+}
+
+dd p {
+ margin-top: 0px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+}
+
+dt:target, .highlighted {
+ background-color: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 1.1em;
+}
+
+.field-list ul {
+ margin: 0;
+ padding-left: 1em;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+.refcount {
+ color: #060;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+.footnote:target {
+ background-color: #ffa;
+}
+
+.line-block {
+ display: block;
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
+
+.line-block .line-block {
+ margin-top: 0;
+ margin-bottom: 0;
+ margin-left: 1.5em;
+}
+
+.guilabel, .menuselection {
+ font-family: sans-serif;
+}
+
+.accelerator {
+ text-decoration: underline;
+}
+
+.classifier {
+ font-style: oblique;
+}
+
+abbr, acronym {
+ border-bottom: dotted 1px;
+ cursor: help;
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+ overflow: auto;
+ overflow-y: hidden; /* fixes display issues on Chrome browsers */
+}
+
+td.linenos pre {
+ padding: 5px 0px;
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ margin-left: 0.5em;
+}
+
+table.highlighttable td {
+ padding: 0 0.5em 0 0.5em;
+}
+
+tt.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1.2em;
+}
+
+tt.descclassname {
+ background-color: transparent;
+}
+
+tt.xref, a tt {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ background-color: transparent;
+}
+
+.viewcode-link {
+ float: right;
+}
+
+.viewcode-back {
+ float: right;
+ font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+ margin: -1px -10px;
+ padding: 0 10px;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+ vertical-align: middle;
+}
+
+div.body div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0 !important;
+ width: 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ #top-link {
+ display: none;
+ }
+} \ No newline at end of file
diff --git a/python/mock-1.0.0/html/_static/breadcrumb_background.png b/python/mock-1.0.0/html/_static/breadcrumb_background.png
new file mode 100644
index 000000000..9b45910e0
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/breadcrumb_background.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/default.css b/python/mock-1.0.0/html/_static/default.css
new file mode 100644
index 000000000..2a3ac1331
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/default.css
@@ -0,0 +1,256 @@
+/*
+ * default.css_t
+ * ~~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- default theme.
+ *
+ * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+@import url("basic.css");
+
+/* -- page layout ----------------------------------------------------------- */
+
+body {
+ font-family: sans-serif;
+ font-size: 100%;
+ background-color: #11303d;
+ color: #000;
+ margin: 0;
+ padding: 0;
+}
+
+div.document {
+ background-color: #1c4e63;
+}
+
+div.documentwrapper {
+ float: left;
+ width: 100%;
+}
+
+div.bodywrapper {
+ margin: 0 0 0 230px;
+}
+
+div.body {
+ background-color: #ffffff;
+ color: #000000;
+ padding: 0 20px 30px 20px;
+}
+
+div.footer {
+ color: #ffffff;
+ width: 100%;
+ padding: 9px 0 9px 0;
+ text-align: center;
+ font-size: 75%;
+}
+
+div.footer a {
+ color: #ffffff;
+ text-decoration: underline;
+}
+
+div.related {
+ background-color: #133f52;
+ line-height: 30px;
+ color: #ffffff;
+}
+
+div.related a {
+ color: #ffffff;
+}
+
+div.sphinxsidebar {
+}
+
+div.sphinxsidebar h3 {
+ font-family: 'Trebuchet MS', sans-serif;
+ color: #ffffff;
+ font-size: 1.4em;
+ font-weight: normal;
+ margin: 0;
+ padding: 0;
+}
+
+div.sphinxsidebar h3 a {
+ color: #ffffff;
+}
+
+div.sphinxsidebar h4 {
+ font-family: 'Trebuchet MS', sans-serif;
+ color: #ffffff;
+ font-size: 1.3em;
+ font-weight: normal;
+ margin: 5px 0 0 0;
+ padding: 0;
+}
+
+div.sphinxsidebar p {
+ color: #ffffff;
+}
+
+div.sphinxsidebar p.topless {
+ margin: 5px 10px 10px 10px;
+}
+
+div.sphinxsidebar ul {
+ margin: 10px;
+ padding: 0;
+ color: #ffffff;
+}
+
+div.sphinxsidebar a {
+ color: #98dbcc;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #98dbcc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+
+
+/* -- hyperlink styles ------------------------------------------------------ */
+
+a {
+ color: #355f7c;
+ text-decoration: none;
+}
+
+a:visited {
+ color: #355f7c;
+ text-decoration: none;
+}
+
+a:hover {
+ text-decoration: underline;
+}
+
+
+
+/* -- body styles ----------------------------------------------------------- */
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+ font-family: 'Trebuchet MS', sans-serif;
+ background-color: #f2f2f2;
+ font-weight: normal;
+ color: #20435c;
+ border-bottom: 1px solid #ccc;
+ margin: 20px -20px 10px -20px;
+ padding: 3px 0 3px 10px;
+}
+
+div.body h1 { margin-top: 0; font-size: 200%; }
+div.body h2 { font-size: 160%; }
+div.body h3 { font-size: 140%; }
+div.body h4 { font-size: 120%; }
+div.body h5 { font-size: 110%; }
+div.body h6 { font-size: 100%; }
+
+a.headerlink {
+ color: #c60f0f;
+ font-size: 0.8em;
+ padding: 0 4px 0 4px;
+ text-decoration: none;
+}
+
+a.headerlink:hover {
+ background-color: #c60f0f;
+ color: white;
+}
+
+div.body p, div.body dd, div.body li {
+ text-align: justify;
+ line-height: 130%;
+}
+
+div.admonition p.admonition-title + p {
+ display: inline;
+}
+
+div.admonition p {
+ margin-bottom: 5px;
+}
+
+div.admonition pre {
+ margin-bottom: 5px;
+}
+
+div.admonition ul, div.admonition ol {
+ margin-bottom: 5px;
+}
+
+div.note {
+ background-color: #eee;
+ border: 1px solid #ccc;
+}
+
+div.seealso {
+ background-color: #ffc;
+ border: 1px solid #ff6;
+}
+
+div.topic {
+ background-color: #eee;
+}
+
+div.warning {
+ background-color: #ffe4e4;
+ border: 1px solid #f66;
+}
+
+p.admonition-title {
+ display: inline;
+}
+
+p.admonition-title:after {
+ content: ":";
+}
+
+pre {
+ padding: 5px;
+ background-color: #eeffcc;
+ color: #333333;
+ line-height: 120%;
+ border: 1px solid #ac9;
+ border-left: none;
+ border-right: none;
+}
+
+tt {
+ background-color: #ecf0f3;
+ padding: 0 1px 0 1px;
+ font-size: 0.95em;
+}
+
+th {
+ background-color: #ede;
+}
+
+.warning tt {
+ background: #efc2c2;
+}
+
+.note tt {
+ background: #d6d6d6;
+}
+
+.viewcode-back {
+ font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+ background-color: #f4debf;
+ border-top: 1px solid #ac9;
+ border-bottom: 1px solid #ac9;
+} \ No newline at end of file
diff --git a/python/mock-1.0.0/html/_static/doctools.js b/python/mock-1.0.0/html/_static/doctools.js
new file mode 100644
index 000000000..d4619fdfb
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/doctools.js
@@ -0,0 +1,247 @@
+/*
+ * doctools.js
+ * ~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for all documentation.
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/**
+ * select a different prefix for underscore
+ */
+$u = _.noConflict();
+
+/**
+ * make the code below compatible with browsers without
+ * an installed firebug like debugger
+if (!window.console || !console.firebug) {
+ var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
+ "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
+ "profile", "profileEnd"];
+ window.console = {};
+ for (var i = 0; i < names.length; ++i)
+ window.console[names[i]] = function() {};
+}
+ */
+
+/**
+ * small helper function to urldecode strings
+ */
+jQuery.urldecode = function(x) {
+ return decodeURIComponent(x).replace(/\+/g, ' ');
+}
+
+/**
+ * small helper function to urlencode strings
+ */
+jQuery.urlencode = encodeURIComponent;
+
+/**
+ * This function returns the parsed url parameters of the
+ * current request. Multiple values per key are supported,
+ * it will always return arrays of strings for the value parts.
+ */
+jQuery.getQueryParameters = function(s) {
+ if (typeof s == 'undefined')
+ s = document.location.search;
+ var parts = s.substr(s.indexOf('?') + 1).split('&');
+ var result = {};
+ for (var i = 0; i < parts.length; i++) {
+ var tmp = parts[i].split('=', 2);
+ var key = jQuery.urldecode(tmp[0]);
+ var value = jQuery.urldecode(tmp[1]);
+ if (key in result)
+ result[key].push(value);
+ else
+ result[key] = [value];
+ }
+ return result;
+};
+
+/**
+ * small function to check if an array contains
+ * a given item.
+ */
+jQuery.contains = function(arr, item) {
+ for (var i = 0; i < arr.length; i++) {
+ if (arr[i] == item)
+ return true;
+ }
+ return false;
+};
+
+/**
+ * highlight a given string on a jquery object by wrapping it in
+ * span elements with the given class name.
+ */
+jQuery.fn.highlightText = function(text, className) {
+ function highlight(node) {
+ if (node.nodeType == 3) {
+ var val = node.nodeValue;
+ var pos = val.toLowerCase().indexOf(text);
+ if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
+ var span = document.createElement("span");
+ span.className = className;
+ span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+ node.parentNode.insertBefore(span, node.parentNode.insertBefore(
+ document.createTextNode(val.substr(pos + text.length)),
+ node.nextSibling));
+ node.nodeValue = val.substr(0, pos);
+ }
+ }
+ else if (!jQuery(node).is("button, select, textarea")) {
+ jQuery.each(node.childNodes, function() {
+ highlight(this);
+ });
+ }
+ }
+ return this.each(function() {
+ highlight(this);
+ });
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+var Documentation = {
+
+ init : function() {
+ this.fixFirefoxAnchorBug();
+ this.highlightSearchWords();
+ this.initIndexTable();
+ },
+
+ /**
+ * i18n support
+ */
+ TRANSLATIONS : {},
+ PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
+ LOCALE : 'unknown',
+
+ // gettext and ngettext don't access this so that the functions
+ // can safely bound to a different name (_ = Documentation.gettext)
+ gettext : function(string) {
+ var translated = Documentation.TRANSLATIONS[string];
+ if (typeof translated == 'undefined')
+ return string;
+ return (typeof translated == 'string') ? translated : translated[0];
+ },
+
+ ngettext : function(singular, plural, n) {
+ var translated = Documentation.TRANSLATIONS[singular];
+ if (typeof translated == 'undefined')
+ return (n == 1) ? singular : plural;
+ return translated[Documentation.PLURALEXPR(n)];
+ },
+
+ addTranslations : function(catalog) {
+ for (var key in catalog.messages)
+ this.TRANSLATIONS[key] = catalog.messages[key];
+ this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
+ this.LOCALE = catalog.locale;
+ },
+
+ /**
+ * add context elements like header anchor links
+ */
+ addContextElements : function() {
+ $('div[id] > :header:first').each(function() {
+ $('<a class="headerlink">\u00B6</a>').
+ attr('href', '#' + this.id).
+ attr('title', _('Permalink to this headline')).
+ appendTo(this);
+ });
+ $('dt[id]').each(function() {
+ $('<a class="headerlink">\u00B6</a>').
+ attr('href', '#' + this.id).
+ attr('title', _('Permalink to this definition')).
+ appendTo(this);
+ });
+ },
+
+ /**
+ * workaround a firefox stupidity
+ */
+ fixFirefoxAnchorBug : function() {
+ if (document.location.hash && $.browser.mozilla)
+ window.setTimeout(function() {
+ document.location.href += '';
+ }, 10);
+ },
+
+ /**
+ * highlight the search words provided in the url in the text
+ */
+ highlightSearchWords : function() {
+ var params = $.getQueryParameters();
+ var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
+ if (terms.length) {
+ var body = $('div.body');
+ window.setTimeout(function() {
+ $.each(terms, function() {
+ body.highlightText(this.toLowerCase(), 'highlighted');
+ });
+ }, 10);
+ $('<p class="highlight-link"><a href="javascript:Documentation.' +
+ 'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
+ .appendTo($('#searchbox'));
+ }
+ },
+
+ /**
+ * init the domain index toggle buttons
+ */
+ initIndexTable : function() {
+ var togglers = $('img.toggler').click(function() {
+ var src = $(this).attr('src');
+ var idnum = $(this).attr('id').substr(7);
+ $('tr.cg-' + idnum).toggle();
+ if (src.substr(-9) == 'minus.png')
+ $(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
+ else
+ $(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
+ }).css('display', '');
+ if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
+ togglers.click();
+ }
+ },
+
+ /**
+ * helper function to hide the search marks again
+ */
+ hideSearchWords : function() {
+ $('#searchbox .highlight-link').fadeOut(300);
+ $('span.highlighted').removeClass('highlighted');
+ },
+
+ /**
+ * make the url absolute
+ */
+ makeURL : function(relativeURL) {
+ return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
+ },
+
+ /**
+ * get the current relative url
+ */
+ getCurrentURL : function() {
+ var path = document.location.pathname;
+ var parts = path.split(/\//);
+ $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
+ if (this == '..')
+ parts.pop();
+ });
+ var url = parts.join('/');
+ return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
+ }
+};
+
+// quick alias for translations
+_ = Documentation.gettext;
+
+$(document).ready(function() {
+ Documentation.init();
+});
diff --git a/python/mock-1.0.0/html/_static/documentation.png b/python/mock-1.0.0/html/_static/documentation.png
new file mode 100644
index 000000000..f0d334b57
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/documentation.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/file.png b/python/mock-1.0.0/html/_static/file.png
new file mode 100644
index 000000000..d18082e39
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/file.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/header_sm_mid.png b/python/mock-1.0.0/html/_static/header_sm_mid.png
new file mode 100644
index 000000000..dce5a40e9
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/header_sm_mid.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/jquery.js b/python/mock-1.0.0/html/_static/jquery.js
new file mode 100644
index 000000000..7c2430802
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/jquery.js
@@ -0,0 +1,154 @@
+/*!
+ * jQuery JavaScript Library v1.4.2
+ * http://jquery.com/
+ *
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
+ *
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Sat Feb 13 22:33:48 2010 -0500
+ */
+(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
+e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
+j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
+"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
+true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
+Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
+(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
+a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
+"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
+function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
+c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
+L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
+"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
+a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
+d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
+a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
+!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
+true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
+var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
+parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
+false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
+s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
+applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
+else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
+a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
+w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
+cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
+i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
+" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
+this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
+e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
+c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
+a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
+function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
+k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
+C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
+null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
+e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
+f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
+if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
+fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
+d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
+"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
+a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
+isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
+{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
+if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
+e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
+"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
+d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
+!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
+toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
+u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
+function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
+if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
+e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
+t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
+g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
+for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
+1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
+CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
+relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
+l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
+h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
+CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
+g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
+text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
+setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
+h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
+m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
+"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
+h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
+!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
+h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
+q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
+if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
+(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
+function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
+gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
+c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
+{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
+"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
+d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
+a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
+1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
+a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
+c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
+wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
+prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
+this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
+return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
+""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
+this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
+u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
+1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
+return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
+""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
+c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
+c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
+function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
+Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
+"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
+a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
+a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
+"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
+serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
+function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
+global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
+e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
+"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
+false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
+false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
+c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
+d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
+g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
+1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
+"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
+if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
+this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
+"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
+animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
+j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
+this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
+"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
+c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
+this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
+this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
+e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
+c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
+function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
+this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
+k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
+f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
+a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
+c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
+d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
+f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
+"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
+e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
diff --git a/python/mock-1.0.0/html/_static/minus.png b/python/mock-1.0.0/html/_static/minus.png
new file mode 100644
index 000000000..da1c5620d
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/minus.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/mobile.css b/python/mock-1.0.0/html/_static/mobile.css
new file mode 100644
index 000000000..0cfe799b2
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/mobile.css
@@ -0,0 +1,17 @@
+/*
+ * CSS adjustments (overrides) for mobile browsers that cannot handle
+ * fix-positioned div's very well.
+ * This makes long pages scrollable on mobile browsers.
+ */
+
+#breadcrumbs {
+ display: none !important;
+}
+
+.document {
+ bottom: inherit !important;
+}
+
+#sphinxsidebar {
+ bottom: inherit !important;
+}
diff --git a/python/mock-1.0.0/html/_static/plus.png b/python/mock-1.0.0/html/_static/plus.png
new file mode 100644
index 000000000..b3cb37425
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/plus.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/pygments.css b/python/mock-1.0.0/html/_static/pygments.css
new file mode 100644
index 000000000..f07b654ba
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/pygments.css
@@ -0,0 +1,62 @@
+.highlight .hll { background-color: #ffffcc }
+.highlight { background: #f0f0f0; }
+.highlight .c { color: #60a0b0; font-style: italic } /* Comment */
+.highlight .err { border: 1px solid #FF0000 } /* Error */
+.highlight .k { color: #007020; font-weight: bold } /* Keyword */
+.highlight .o { color: #666666 } /* Operator */
+.highlight .cm { color: #60a0b0; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #007020 } /* Comment.Preproc */
+.highlight .c1 { color: #60a0b0; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #60a0b0; background-color: #fff0f0 } /* Comment.Special */
+.highlight .gd { color: #A00000 } /* Generic.Deleted */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .gr { color: #FF0000 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #00A000 } /* Generic.Inserted */
+.highlight .go { color: #808080 } /* Generic.Output */
+.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #0040D0 } /* Generic.Traceback */
+.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #007020 } /* Keyword.Pseudo */
+.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #902000 } /* Keyword.Type */
+.highlight .m { color: #40a070 } /* Literal.Number */
+.highlight .s { color: #4070a0 } /* Literal.String */
+.highlight .na { color: #4070a0 } /* Name.Attribute */
+.highlight .nb { color: #007020 } /* Name.Builtin */
+.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
+.highlight .no { color: #60add5 } /* Name.Constant */
+.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
+.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
+.highlight .ne { color: #007020 } /* Name.Exception */
+.highlight .nf { color: #06287e } /* Name.Function */
+.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
+.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
+.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #bb60d5 } /* Name.Variable */
+.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
+.highlight .w { color: #bbbbbb } /* Text.Whitespace */
+.highlight .mf { color: #40a070 } /* Literal.Number.Float */
+.highlight .mh { color: #40a070 } /* Literal.Number.Hex */
+.highlight .mi { color: #40a070 } /* Literal.Number.Integer */
+.highlight .mo { color: #40a070 } /* Literal.Number.Oct */
+.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
+.highlight .sc { color: #4070a0 } /* Literal.String.Char */
+.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #4070a0 } /* Literal.String.Double */
+.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
+.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
+.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
+.highlight .sx { color: #c65d09 } /* Literal.String.Other */
+.highlight .sr { color: #235388 } /* Literal.String.Regex */
+.highlight .s1 { color: #4070a0 } /* Literal.String.Single */
+.highlight .ss { color: #517918 } /* Literal.String.Symbol */
+.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
+.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
+.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
+.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
+.highlight .il { color: #40a070 } /* Literal.Number.Integer.Long */ \ No newline at end of file
diff --git a/python/mock-1.0.0/html/_static/scrn1.png b/python/mock-1.0.0/html/_static/scrn1.png
new file mode 100644
index 000000000..6499b3cf7
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/scrn1.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/scrn2.png b/python/mock-1.0.0/html/_static/scrn2.png
new file mode 100644
index 000000000..2a60215d0
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/scrn2.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/searchfield_leftcap.png b/python/mock-1.0.0/html/_static/searchfield_leftcap.png
new file mode 100644
index 000000000..cc00c22b0
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/searchfield_leftcap.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/searchfield_repeat.png b/python/mock-1.0.0/html/_static/searchfield_repeat.png
new file mode 100644
index 000000000..b429a16ba
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/searchfield_repeat.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/searchfield_rightcap.png b/python/mock-1.0.0/html/_static/searchfield_rightcap.png
new file mode 100644
index 000000000..8e13620ec
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/searchfield_rightcap.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/searchtools.js b/python/mock-1.0.0/html/_static/searchtools.js
new file mode 100644
index 000000000..663be4c90
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/searchtools.js
@@ -0,0 +1,560 @@
+/*
+ * searchtools.js_t
+ * ~~~~~~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilties for the full-text search.
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/**
+ * helper function to return a node containing the
+ * search summary for a given text. keywords is a list
+ * of stemmed words, hlwords is the list of normal, unstemmed
+ * words. the first one is used to find the occurance, the
+ * latter for highlighting it.
+ */
+
+jQuery.makeSearchSummary = function(text, keywords, hlwords) {
+ var textLower = text.toLowerCase();
+ var start = 0;
+ $.each(keywords, function() {
+ var i = textLower.indexOf(this.toLowerCase());
+ if (i > -1)
+ start = i;
+ });
+ start = Math.max(start - 120, 0);
+ var excerpt = ((start > 0) ? '...' : '') +
+ $.trim(text.substr(start, 240)) +
+ ((start + 240 - text.length) ? '...' : '');
+ var rv = $('<div class="context"></div>').text(excerpt);
+ $.each(hlwords, function() {
+ rv = rv.highlightText(this, 'highlighted');
+ });
+ return rv;
+}
+
+
+/**
+ * Porter Stemmer
+ */
+var Stemmer = function() {
+
+ var step2list = {
+ ational: 'ate',
+ tional: 'tion',
+ enci: 'ence',
+ anci: 'ance',
+ izer: 'ize',
+ bli: 'ble',
+ alli: 'al',
+ entli: 'ent',
+ eli: 'e',
+ ousli: 'ous',
+ ization: 'ize',
+ ation: 'ate',
+ ator: 'ate',
+ alism: 'al',
+ iveness: 'ive',
+ fulness: 'ful',
+ ousness: 'ous',
+ aliti: 'al',
+ iviti: 'ive',
+ biliti: 'ble',
+ logi: 'log'
+ };
+
+ var step3list = {
+ icate: 'ic',
+ ative: '',
+ alize: 'al',
+ iciti: 'ic',
+ ical: 'ic',
+ ful: '',
+ ness: ''
+ };
+
+ var c = "[^aeiou]"; // consonant
+ var v = "[aeiouy]"; // vowel
+ var C = c + "[^aeiouy]*"; // consonant sequence
+ var V = v + "[aeiou]*"; // vowel sequence
+
+ var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
+ var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
+ var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
+ var s_v = "^(" + C + ")?" + v; // vowel in stem
+
+ this.stemWord = function (w) {
+ var stem;
+ var suffix;
+ var firstch;
+ var origword = w;
+
+ if (w.length < 3)
+ return w;
+
+ var re;
+ var re2;
+ var re3;
+ var re4;
+
+ firstch = w.substr(0,1);
+ if (firstch == "y")
+ w = firstch.toUpperCase() + w.substr(1);
+
+ // Step 1a
+ re = /^(.+?)(ss|i)es$/;
+ re2 = /^(.+?)([^s])s$/;
+
+ if (re.test(w))
+ w = w.replace(re,"$1$2");
+ else if (re2.test(w))
+ w = w.replace(re2,"$1$2");
+
+ // Step 1b
+ re = /^(.+?)eed$/;
+ re2 = /^(.+?)(ed|ing)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ re = new RegExp(mgr0);
+ if (re.test(fp[1])) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1];
+ re2 = new RegExp(s_v);
+ if (re2.test(stem)) {
+ w = stem;
+ re2 = /(at|bl|iz)$/;
+ re3 = new RegExp("([^aeiouylsz])\\1$");
+ re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re2.test(w))
+ w = w + "e";
+ else if (re3.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ else if (re4.test(w))
+ w = w + "e";
+ }
+ }
+
+ // Step 1c
+ re = /^(.+?)y$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(s_v);
+ if (re.test(stem))
+ w = stem + "i";
+ }
+
+ // Step 2
+ re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step2list[suffix];
+ }
+
+ // Step 3
+ re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step3list[suffix];
+ }
+
+ // Step 4
+ re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
+ re2 = /^(.+?)(s|t)(ion)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ if (re.test(stem))
+ w = stem;
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1] + fp[2];
+ re2 = new RegExp(mgr1);
+ if (re2.test(stem))
+ w = stem;
+ }
+
+ // Step 5
+ re = /^(.+?)e$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ re2 = new RegExp(meq1);
+ re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
+ w = stem;
+ }
+ re = /ll$/;
+ re2 = new RegExp(mgr1);
+ if (re.test(w) && re2.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+
+ // and turn initial Y back to y
+ if (firstch == "y")
+ w = firstch.toLowerCase() + w.substr(1);
+ return w;
+ }
+}
+
+
+/**
+ * Search Module
+ */
+var Search = {
+
+ _index : null,
+ _queued_query : null,
+ _pulse_status : -1,
+
+ init : function() {
+ var params = $.getQueryParameters();
+ if (params.q) {
+ var query = params.q[0];
+ $('input[name="q"]')[0].value = query;
+ this.performSearch(query);
+ }
+ },
+
+ loadIndex : function(url) {
+ $.ajax({type: "GET", url: url, data: null, success: null,
+ dataType: "script", cache: true});
+ },
+
+ setIndex : function(index) {
+ var q;
+ this._index = index;
+ if ((q = this._queued_query) !== null) {
+ this._queued_query = null;
+ Search.query(q);
+ }
+ },
+
+ hasIndex : function() {
+ return this._index !== null;
+ },
+
+ deferQuery : function(query) {
+ this._queued_query = query;
+ },
+
+ stopPulse : function() {
+ this._pulse_status = 0;
+ },
+
+ startPulse : function() {
+ if (this._pulse_status >= 0)
+ return;
+ function pulse() {
+ Search._pulse_status = (Search._pulse_status + 1) % 4;
+ var dotString = '';
+ for (var i = 0; i < Search._pulse_status; i++)
+ dotString += '.';
+ Search.dots.text(dotString);
+ if (Search._pulse_status > -1)
+ window.setTimeout(pulse, 500);
+ };
+ pulse();
+ },
+
+ /**
+ * perform a search for something
+ */
+ performSearch : function(query) {
+ // create the required interface elements
+ this.out = $('#search-results');
+ this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
+ this.dots = $('<span></span>').appendTo(this.title);
+ this.status = $('<p style="display: none"></p>').appendTo(this.out);
+ this.output = $('<ul class="search"/>').appendTo(this.out);
+
+ $('#search-progress').text(_('Preparing search...'));
+ this.startPulse();
+
+ // index already loaded, the browser was quick!
+ if (this.hasIndex())
+ this.query(query);
+ else
+ this.deferQuery(query);
+ },
+
+ query : function(query) {
+ var stopwords = ["and","then","into","it","as","are","in","if","for","no","there","their","was","is","be","to","that","but","they","not","such","with","by","a","on","these","of","will","this","near","the","or","at"];
+
+ // Stem the searchterms and add them to the correct list
+ var stemmer = new Stemmer();
+ var searchterms = [];
+ var excluded = [];
+ var hlterms = [];
+ var tmp = query.split(/\s+/);
+ var objectterms = [];
+ for (var i = 0; i < tmp.length; i++) {
+ if (tmp[i] != "") {
+ objectterms.push(tmp[i].toLowerCase());
+ }
+
+ if ($u.indexOf(stopwords, tmp[i]) != -1 || tmp[i].match(/^\d+$/) ||
+ tmp[i] == "") {
+ // skip this "word"
+ continue;
+ }
+ // stem the word
+ var word = stemmer.stemWord(tmp[i]).toLowerCase();
+ // select the correct list
+ if (word[0] == '-') {
+ var toAppend = excluded;
+ word = word.substr(1);
+ }
+ else {
+ var toAppend = searchterms;
+ hlterms.push(tmp[i].toLowerCase());
+ }
+ // only add if not already in the list
+ if (!$.contains(toAppend, word))
+ toAppend.push(word);
+ };
+ var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
+
+ // console.debug('SEARCH: searching for:');
+ // console.info('required: ', searchterms);
+ // console.info('excluded: ', excluded);
+
+ // prepare search
+ var filenames = this._index.filenames;
+ var titles = this._index.titles;
+ var terms = this._index.terms;
+ var fileMap = {};
+ var files = null;
+ // different result priorities
+ var importantResults = [];
+ var objectResults = [];
+ var regularResults = [];
+ var unimportantResults = [];
+ $('#search-progress').empty();
+
+ // lookup as object
+ for (var i = 0; i < objectterms.length; i++) {
+ var others = [].concat(objectterms.slice(0,i),
+ objectterms.slice(i+1, objectterms.length))
+ var results = this.performObjectSearch(objectterms[i], others);
+ // Assume first word is most likely to be the object,
+ // other words more likely to be in description.
+ // Therefore put matches for earlier words first.
+ // (Results are eventually used in reverse order).
+ objectResults = results[0].concat(objectResults);
+ importantResults = results[1].concat(importantResults);
+ unimportantResults = results[2].concat(unimportantResults);
+ }
+
+ // perform the search on the required terms
+ for (var i = 0; i < searchterms.length; i++) {
+ var word = searchterms[i];
+ // no match but word was a required one
+ if ((files = terms[word]) == null)
+ break;
+ if (files.length == undefined) {
+ files = [files];
+ }
+ // create the mapping
+ for (var j = 0; j < files.length; j++) {
+ var file = files[j];
+ if (file in fileMap)
+ fileMap[file].push(word);
+ else
+ fileMap[file] = [word];
+ }
+ }
+
+ // now check if the files don't contain excluded terms
+ for (var file in fileMap) {
+ var valid = true;
+
+ // check if all requirements are matched
+ if (fileMap[file].length != searchterms.length)
+ continue;
+
+ // ensure that none of the excluded terms is in the
+ // search result.
+ for (var i = 0; i < excluded.length; i++) {
+ if (terms[excluded[i]] == file ||
+ $.contains(terms[excluded[i]] || [], file)) {
+ valid = false;
+ break;
+ }
+ }
+
+ // if we have still a valid result we can add it
+ // to the result list
+ if (valid)
+ regularResults.push([filenames[file], titles[file], '', null]);
+ }
+
+ // delete unused variables in order to not waste
+ // memory until list is retrieved completely
+ delete filenames, titles, terms;
+
+ // now sort the regular results descending by title
+ regularResults.sort(function(a, b) {
+ var left = a[1].toLowerCase();
+ var right = b[1].toLowerCase();
+ return (left > right) ? -1 : ((left < right) ? 1 : 0);
+ });
+
+ // combine all results
+ var results = unimportantResults.concat(regularResults)
+ .concat(objectResults).concat(importantResults);
+
+ // print the results
+ var resultCount = results.length;
+ function displayNextItem() {
+ // results left, load the summary and display it
+ if (results.length) {
+ var item = results.pop();
+ var listItem = $('<li style="display:none"></li>');
+ if (DOCUMENTATION_OPTIONS.FILE_SUFFIX == '') {
+ // dirhtml builder
+ var dirname = item[0] + '/';
+ if (dirname.match(/\/index\/$/)) {
+ dirname = dirname.substring(0, dirname.length-6);
+ } else if (dirname == 'index/') {
+ dirname = '';
+ }
+ listItem.append($('<a/>').attr('href',
+ DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
+ highlightstring + item[2]).html(item[1]));
+ } else {
+ // normal html builders
+ listItem.append($('<a/>').attr('href',
+ item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
+ highlightstring + item[2]).html(item[1]));
+ }
+ if (item[3]) {
+ listItem.append($('<span> (' + item[3] + ')</span>'));
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
+ $.get(DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' +
+ item[0] + '.txt', function(data) {
+ if (data != '') {
+ listItem.append($.makeSearchSummary(data, searchterms, hlterms));
+ Search.output.append(listItem);
+ }
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ }, "text");
+ } else {
+ // no source available, just display title
+ Search.output.append(listItem);
+ listItem.slideDown(5, function() {
+ displayNextItem();
+ });
+ }
+ }
+ // search finished, update title and status message
+ else {
+ Search.stopPulse();
+ Search.title.text(_('Search Results'));
+ if (!resultCount)
+ Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
+ else
+ Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
+ Search.status.fadeIn(500);
+ }
+ }
+ displayNextItem();
+ },
+
+ performObjectSearch : function(object, otherterms) {
+ var filenames = this._index.filenames;
+ var objects = this._index.objects;
+ var objnames = this._index.objnames;
+ var titles = this._index.titles;
+
+ var importantResults = [];
+ var objectResults = [];
+ var unimportantResults = [];
+
+ for (var prefix in objects) {
+ for (var name in objects[prefix]) {
+ var fullname = (prefix ? prefix + '.' : '') + name;
+ if (fullname.toLowerCase().indexOf(object) > -1) {
+ var match = objects[prefix][name];
+ var objname = objnames[match[1]][2];
+ var title = titles[match[0]];
+ // If more than one term searched for, we require other words to be
+ // found in the name/title/description
+ if (otherterms.length > 0) {
+ var haystack = (prefix + ' ' + name + ' ' +
+ objname + ' ' + title).toLowerCase();
+ var allfound = true;
+ for (var i = 0; i < otherterms.length; i++) {
+ if (haystack.indexOf(otherterms[i]) == -1) {
+ allfound = false;
+ break;
+ }
+ }
+ if (!allfound) {
+ continue;
+ }
+ }
+ var descr = objname + _(', in ') + title;
+ anchor = match[3];
+ if (anchor == '')
+ anchor = fullname;
+ else if (anchor == '-')
+ anchor = objnames[match[1]][1] + '-' + fullname;
+ result = [filenames[match[0]], fullname, '#'+anchor, descr];
+ switch (match[2]) {
+ case 1: objectResults.push(result); break;
+ case 0: importantResults.push(result); break;
+ case 2: unimportantResults.push(result); break;
+ }
+ }
+ }
+ }
+
+ // sort results descending
+ objectResults.sort(function(a, b) {
+ return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
+ });
+
+ importantResults.sort(function(a, b) {
+ return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
+ });
+
+ unimportantResults.sort(function(a, b) {
+ return (a[1] > b[1]) ? -1 : ((a[1] < b[1]) ? 1 : 0);
+ });
+
+ return [importantResults, objectResults, unimportantResults]
+ }
+}
+
+$(document).ready(function() {
+ Search.init();
+}); \ No newline at end of file
diff --git a/python/mock-1.0.0/html/_static/sidebar.js b/python/mock-1.0.0/html/_static/sidebar.js
new file mode 100644
index 000000000..731851711
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/sidebar.js
@@ -0,0 +1,148 @@
+/*
+ * sidebar.js
+ * ~~~~~~~~~~
+ *
+ * This script makes the Sphinx sidebar collapsible.
+ *
+ * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds
+ * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton
+ * used to collapse and expand the sidebar.
+ *
+ * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden
+ * and the width of the sidebar and the margin-left of the document
+ * are decreased. When the sidebar is expanded the opposite happens.
+ * This script saves a per-browser/per-session cookie used to
+ * remember the position of the sidebar among the pages.
+ * Once the browser is closed the cookie is deleted and the position
+ * reset to the default (expanded).
+ *
+ * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+$(function() {
+ // global elements used by the functions.
+ // the 'sidebarbutton' element is defined as global after its
+ // creation, in the add_sidebar_button function
+ var bodywrapper = $('.bodywrapper');
+ var sidebar = $('.sphinxsidebar');
+ var sidebarwrapper = $('.sphinxsidebarwrapper');
+
+ // original margin-left of the bodywrapper and width of the sidebar
+ // with the sidebar expanded
+ var bw_margin_expanded = bodywrapper.css('margin-left');
+ var ssb_width_expanded = sidebar.width();
+
+ // margin-left of the bodywrapper and width of the sidebar
+ // with the sidebar collapsed
+ var bw_margin_collapsed = '.8em';
+ var ssb_width_collapsed = '.8em';
+
+ // colors used by the current theme
+ var dark_color = $('.related').css('background-color');
+ var light_color = $('.document').css('background-color');
+
+ function sidebar_is_collapsed() {
+ return sidebarwrapper.is(':not(:visible)');
+ }
+
+ function toggle_sidebar() {
+ if (sidebar_is_collapsed())
+ expand_sidebar();
+ else
+ collapse_sidebar();
+ }
+
+ function collapse_sidebar() {
+ sidebarwrapper.hide();
+ sidebar.css('width', ssb_width_collapsed);
+ bodywrapper.css('margin-left', bw_margin_collapsed);
+ sidebarbutton.css({
+ 'margin-left': '0',
+ 'height': bodywrapper.height()
+ });
+ sidebarbutton.find('span').text('»');
+ sidebarbutton.attr('title', _('Expand sidebar'));
+ document.cookie = 'sidebar=collapsed';
+ }
+
+ function expand_sidebar() {
+ bodywrapper.css('margin-left', bw_margin_expanded);
+ sidebar.css('width', ssb_width_expanded);
+ sidebarwrapper.show();
+ sidebarbutton.css({
+ 'margin-left': ssb_width_expanded-12,
+ 'height': bodywrapper.height()
+ });
+ sidebarbutton.find('span').text('«');
+ sidebarbutton.attr('title', _('Collapse sidebar'));
+ document.cookie = 'sidebar=expanded';
+ }
+
+ function add_sidebar_button() {
+ sidebarwrapper.css({
+ 'float': 'left',
+ 'margin-right': '0',
+ 'width': ssb_width_expanded - 28
+ });
+ // create the button
+ sidebar.append(
+ '<div id="sidebarbutton"><span>&laquo;</span></div>'
+ );
+ var sidebarbutton = $('#sidebarbutton');
+ light_color = sidebarbutton.css('background-color');
+ // find the height of the viewport to center the '<<' in the page
+ var viewport_height;
+ if (window.innerHeight)
+ viewport_height = window.innerHeight;
+ else
+ viewport_height = $(window).height();
+ sidebarbutton.find('span').css({
+ 'display': 'block',
+ 'margin-top': (viewport_height - sidebar.position().top - 20) / 2
+ });
+
+ sidebarbutton.click(toggle_sidebar);
+ sidebarbutton.attr('title', _('Collapse sidebar'));
+ sidebarbutton.css({
+ 'color': '#FFFFFF',
+ 'border-left': '1px solid ' + dark_color,
+ 'font-size': '1.2em',
+ 'cursor': 'pointer',
+ 'height': bodywrapper.height(),
+ 'padding-top': '1px',
+ 'margin-left': ssb_width_expanded - 12
+ });
+
+ sidebarbutton.hover(
+ function () {
+ $(this).css('background-color', dark_color);
+ },
+ function () {
+ $(this).css('background-color', light_color);
+ }
+ );
+ }
+
+ function set_position_from_cookie() {
+ if (!document.cookie)
+ return;
+ var items = document.cookie.split(';');
+ for(var k=0; k<items.length; k++) {
+ var key_val = items[k].split('=');
+ var key = key_val[0];
+ if (key == 'sidebar') {
+ var value = key_val[1];
+ if ((value == 'collapsed') && (!sidebar_is_collapsed()))
+ collapse_sidebar();
+ else if ((value == 'expanded') && (sidebar_is_collapsed()))
+ expand_sidebar();
+ }
+ }
+ }
+
+ add_sidebar_button();
+ var sidebarbutton = $('#sidebarbutton');
+ set_position_from_cookie();
+});
diff --git a/python/mock-1.0.0/html/_static/title_background.png b/python/mock-1.0.0/html/_static/title_background.png
new file mode 100644
index 000000000..6fcd1cda8
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/title_background.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/toc.js b/python/mock-1.0.0/html/_static/toc.js
new file mode 100644
index 000000000..7b709785d
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/toc.js
@@ -0,0 +1,20 @@
+var TOC = {
+ load: function () {
+ $('#toc_button').click(TOC.toggle);
+ },
+
+ toggle: function () {
+ if ($('#sphinxsidebar').toggle().is(':hidden')) {
+ $('div.document').css('left', "0px");
+ $('toc_button').removeClass("open");
+ } else {
+ $('div.document').css('left', "230px");
+ $('#toc_button').addClass("open");
+ }
+ return $('#sphinxsidebar');
+ }
+};
+
+$(document).ready(function () {
+ TOC.load();
+}); \ No newline at end of file
diff --git a/python/mock-1.0.0/html/_static/triangle_closed.png b/python/mock-1.0.0/html/_static/triangle_closed.png
new file mode 100644
index 000000000..1e7f7bba2
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/triangle_closed.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/triangle_left.png b/python/mock-1.0.0/html/_static/triangle_left.png
new file mode 100644
index 000000000..2d86be7df
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/triangle_left.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/triangle_open.png b/python/mock-1.0.0/html/_static/triangle_open.png
new file mode 100644
index 000000000..e5d3bfdad
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/triangle_open.png
Binary files differ
diff --git a/python/mock-1.0.0/html/_static/underscore.js b/python/mock-1.0.0/html/_static/underscore.js
new file mode 100644
index 000000000..5d8991434
--- /dev/null
+++ b/python/mock-1.0.0/html/_static/underscore.js
@@ -0,0 +1,23 @@
+// Underscore.js 0.5.5
+// (c) 2009 Jeremy Ashkenas, DocumentCloud Inc.
+// Underscore is freely distributable under the terms of the MIT license.
+// Portions of Underscore are inspired by or borrowed from Prototype.js,
+// Oliver Steele's Functional, and John Resig's Micro-Templating.
+// For all details and documentation:
+// http://documentcloud.github.com/underscore/
+(function(){var j=this,n=j._,i=function(a){this._wrapped=a},m=typeof StopIteration!=="undefined"?StopIteration:"__break__",b=j._=function(a){return new i(a)};if(typeof exports!=="undefined")exports._=b;var k=Array.prototype.slice,o=Array.prototype.unshift,p=Object.prototype.toString,q=Object.prototype.hasOwnProperty,r=Object.prototype.propertyIsEnumerable;b.VERSION="0.5.5";b.each=function(a,c,d){try{if(a.forEach)a.forEach(c,d);else if(b.isArray(a)||b.isArguments(a))for(var e=0,f=a.length;e<f;e++)c.call(d,
+a[e],e,a);else{var g=b.keys(a);f=g.length;for(e=0;e<f;e++)c.call(d,a[g[e]],g[e],a)}}catch(h){if(h!=m)throw h;}return a};b.map=function(a,c,d){if(a&&b.isFunction(a.map))return a.map(c,d);var e=[];b.each(a,function(f,g,h){e.push(c.call(d,f,g,h))});return e};b.reduce=function(a,c,d,e){if(a&&b.isFunction(a.reduce))return a.reduce(b.bind(d,e),c);b.each(a,function(f,g,h){c=d.call(e,c,f,g,h)});return c};b.reduceRight=function(a,c,d,e){if(a&&b.isFunction(a.reduceRight))return a.reduceRight(b.bind(d,e),c);
+var f=b.clone(b.toArray(a)).reverse();b.each(f,function(g,h){c=d.call(e,c,g,h,a)});return c};b.detect=function(a,c,d){var e;b.each(a,function(f,g,h){if(c.call(d,f,g,h)){e=f;b.breakLoop()}});return e};b.select=function(a,c,d){if(a&&b.isFunction(a.filter))return a.filter(c,d);var e=[];b.each(a,function(f,g,h){c.call(d,f,g,h)&&e.push(f)});return e};b.reject=function(a,c,d){var e=[];b.each(a,function(f,g,h){!c.call(d,f,g,h)&&e.push(f)});return e};b.all=function(a,c,d){c=c||b.identity;if(a&&b.isFunction(a.every))return a.every(c,
+d);var e=true;b.each(a,function(f,g,h){(e=e&&c.call(d,f,g,h))||b.breakLoop()});return e};b.any=function(a,c,d){c=c||b.identity;if(a&&b.isFunction(a.some))return a.some(c,d);var e=false;b.each(a,function(f,g,h){if(e=c.call(d,f,g,h))b.breakLoop()});return e};b.include=function(a,c){if(b.isArray(a))return b.indexOf(a,c)!=-1;var d=false;b.each(a,function(e){if(d=e===c)b.breakLoop()});return d};b.invoke=function(a,c){var d=b.rest(arguments,2);return b.map(a,function(e){return(c?e[c]:e).apply(e,d)})};b.pluck=
+function(a,c){return b.map(a,function(d){return d[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);var e={computed:-Infinity};b.each(a,function(f,g,h){g=c?c.call(d,f,g,h):f;g>=e.computed&&(e={value:f,computed:g})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);var e={computed:Infinity};b.each(a,function(f,g,h){g=c?c.call(d,f,g,h):f;g<e.computed&&(e={value:f,computed:g})});return e.value};b.sortBy=function(a,c,d){return b.pluck(b.map(a,
+function(e,f,g){return{value:e,criteria:c.call(d,e,f,g)}}).sort(function(e,f){e=e.criteria;f=f.criteria;return e<f?-1:e>f?1:0}),"value")};b.sortedIndex=function(a,c,d){d=d||b.identity;for(var e=0,f=a.length;e<f;){var g=e+f>>1;d(a[g])<d(c)?(e=g+1):(f=g)}return e};b.toArray=function(a){if(!a)return[];if(a.toArray)return a.toArray();if(b.isArray(a))return a;if(b.isArguments(a))return k.call(a);return b.values(a)};b.size=function(a){return b.toArray(a).length};b.first=function(a,c,d){return c&&!d?k.call(a,
+0,c):a[0]};b.rest=function(a,c,d){return k.call(a,b.isUndefined(c)||d?1:c)};b.last=function(a){return a[a.length-1]};b.compact=function(a){return b.select(a,function(c){return!!c})};b.flatten=function(a){return b.reduce(a,[],function(c,d){if(b.isArray(d))return c.concat(b.flatten(d));c.push(d);return c})};b.without=function(a){var c=b.rest(arguments);return b.select(a,function(d){return!b.include(c,d)})};b.uniq=function(a,c){return b.reduce(a,[],function(d,e,f){if(0==f||(c===true?b.last(d)!=e:!b.include(d,
+e)))d.push(e);return d})};b.intersect=function(a){var c=b.rest(arguments);return b.select(b.uniq(a),function(d){return b.all(c,function(e){return b.indexOf(e,d)>=0})})};b.zip=function(){for(var a=b.toArray(arguments),c=b.max(b.pluck(a,"length")),d=new Array(c),e=0;e<c;e++)d[e]=b.pluck(a,String(e));return d};b.indexOf=function(a,c){if(a.indexOf)return a.indexOf(c);for(var d=0,e=a.length;d<e;d++)if(a[d]===c)return d;return-1};b.lastIndexOf=function(a,c){if(a.lastIndexOf)return a.lastIndexOf(c);for(var d=
+a.length;d--;)if(a[d]===c)return d;return-1};b.range=function(a,c,d){var e=b.toArray(arguments),f=e.length<=1;a=f?0:e[0];c=f?e[0]:e[1];d=e[2]||1;e=Math.ceil((c-a)/d);if(e<=0)return[];e=new Array(e);f=a;for(var g=0;1;f+=d){if((d>0?f-c:c-f)>=0)return e;e[g++]=f}};b.bind=function(a,c){var d=b.rest(arguments,2);return function(){return a.apply(c||j,d.concat(b.toArray(arguments)))}};b.bindAll=function(a){var c=b.rest(arguments);if(c.length==0)c=b.functions(a);b.each(c,function(d){a[d]=b.bind(a[d],a)});
+return a};b.delay=function(a,c){var d=b.rest(arguments,2);return setTimeout(function(){return a.apply(a,d)},c)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(b.rest(arguments)))};b.wrap=function(a,c){return function(){var d=[a].concat(b.toArray(arguments));return c.apply(c,d)}};b.compose=function(){var a=b.toArray(arguments);return function(){for(var c=b.toArray(arguments),d=a.length-1;d>=0;d--)c=[a[d].apply(this,c)];return c[0]}};b.keys=function(a){if(b.isArray(a))return b.range(0,a.length);
+var c=[];for(var d in a)q.call(a,d)&&c.push(d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=function(a){return b.select(b.keys(a),function(c){return b.isFunction(a[c])}).sort()};b.extend=function(a,c){for(var d in c)a[d]=c[d];return a};b.clone=function(a){if(b.isArray(a))return a.slice(0);return b.extend({},a)};b.tap=function(a,c){c(a);return a};b.isEqual=function(a,c){if(a===c)return true;var d=typeof a;if(d!=typeof c)return false;if(a==c)return true;if(!a&&c||a&&!c)return false;
+if(a.isEqual)return a.isEqual(c);if(b.isDate(a)&&b.isDate(c))return a.getTime()===c.getTime();if(b.isNaN(a)&&b.isNaN(c))return true;if(b.isRegExp(a)&&b.isRegExp(c))return a.source===c.source&&a.global===c.global&&a.ignoreCase===c.ignoreCase&&a.multiline===c.multiline;if(d!=="object")return false;if(a.length&&a.length!==c.length)return false;d=b.keys(a);var e=b.keys(c);if(d.length!=e.length)return false;for(var f in a)if(!b.isEqual(a[f],c[f]))return false;return true};b.isEmpty=function(a){return b.keys(a).length==
+0};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=function(a){return!!(a&&a.concat&&a.unshift)};b.isArguments=function(a){return a&&b.isNumber(a.length)&&!b.isArray(a)&&!r.call(a,"length")};b.isFunction=function(a){return!!(a&&a.constructor&&a.call&&a.apply)};b.isString=function(a){return!!(a===""||a&&a.charCodeAt&&a.substr)};b.isNumber=function(a){return p.call(a)==="[object Number]"};b.isDate=function(a){return!!(a&&a.getTimezoneOffset&&a.setUTCFullYear)};b.isRegExp=function(a){return!!(a&&
+a.test&&a.exec&&(a.ignoreCase||a.ignoreCase===false))};b.isNaN=function(a){return b.isNumber(a)&&isNaN(a)};b.isNull=function(a){return a===null};b.isUndefined=function(a){return typeof a=="undefined"};b.noConflict=function(){j._=n;return this};b.identity=function(a){return a};b.breakLoop=function(){throw m;};var s=0;b.uniqueId=function(a){var c=s++;return a?a+c:c};b.template=function(a,c){a=new Function("obj","var p=[],print=function(){p.push.apply(p,arguments);};with(obj){p.push('"+a.replace(/[\r\t\n]/g,
+" ").replace(/'(?=[^%]*%>)/g,"\t").split("'").join("\\'").split("\t").join("'").replace(/<%=(.+?)%>/g,"',$1,'").split("<%").join("');").split("%>").join("p.push('")+"');}return p.join('');");return c?a(c):a};b.forEach=b.each;b.foldl=b.inject=b.reduce;b.foldr=b.reduceRight;b.filter=b.select;b.every=b.all;b.some=b.any;b.head=b.first;b.tail=b.rest;b.methods=b.functions;var l=function(a,c){return c?b(a).chain():a};b.each(b.functions(b),function(a){var c=b[a];i.prototype[a]=function(){var d=b.toArray(arguments);
+o.call(d,this._wrapped);return l(c.apply(b,d),this._chain)}});b.each(["pop","push","reverse","shift","sort","splice","unshift"],function(a){var c=Array.prototype[a];i.prototype[a]=function(){c.apply(this._wrapped,arguments);return l(this._wrapped,this._chain)}});b.each(["concat","join","slice"],function(a){var c=Array.prototype[a];i.prototype[a]=function(){return l(c.apply(this._wrapped,arguments),this._chain)}});i.prototype.chain=function(){this._chain=true;return this};i.prototype.value=function(){return this._wrapped}})();
diff --git a/python/mock-1.0.0/html/changelog.html b/python/mock-1.0.0/html/changelog.html
new file mode 100644
index 000000000..c4b935d2c
--- /dev/null
+++ b/python/mock-1.0.0/html/changelog.html
@@ -0,0 +1,839 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>CHANGELOG &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="prev" title="Mock Library Comparison" href="compare.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="compare.html" title="Mock Library Comparison"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="changelog">
+<h1>CHANGELOG<a class="headerlink" href="#changelog" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="version-1-0-0">
+<h2>2012/10/07 Version 1.0.0<a class="headerlink" href="#version-1-0-0" title="Permalink to this headline">¶</a></h2>
+<p>No changes since 1.0.0 beta 1. This version has feature parity with
+<a class="reference external" href="http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock">unittest.mock</a>
+in Python 3.3.</p>
+<p>Full list of changes since 0.8:</p>
+<ul class="simple">
+<li><cite>mocksignature</cite>, along with the <cite>mocksignature</cite> argument to <cite>patch</cite>, removed</li>
+<li>Support for deleting attributes (accessing deleted attributes will raise an
+<cite>AttributeError</cite>)</li>
+<li>Added the <cite>mock_open</cite> helper function for mocking the builtin <cite>open</cite></li>
+<li><cite>__class__</cite> is assignable, so a mock can pass an <cite>isinstance</cite> check without
+requiring a spec</li>
+<li>Addition of <cite>PropertyMock</cite>, for mocking properties</li>
+<li><cite>MagicMocks</cite> made unorderable by default (in Python 3). The comparison
+methods (other than equality and inequality) now return <cite>NotImplemented</cite></li>
+<li>Propagate traceback info to support subclassing of <cite>_patch</cite> by other
+libraries</li>
+<li><cite>create_autospec</cite> works with attributes present in results of <cite>dir</cite> that
+can&#8217;t be fetched from the object&#8217;s class. Contributed by Konstantine Rybnikov</li>
+<li>Any exceptions in an iterable <cite>side_effect</cite> will be raised instead of
+returned</li>
+<li>In Python 3, <cite>create_autospec</cite> now supports keyword only arguments</li>
+<li>Added <cite>patch.stopall</cite> method to stop all active patches created by <cite>start</cite></li>
+<li>BUGFIX: calling <cite>MagicMock.reset_mock</cite> wouldn&#8217;t reset magic method mocks</li>
+<li>BUGFIX: calling <cite>reset_mock</cite> on a <cite>MagicMock</cite> created with autospec could
+raise an exception</li>
+<li>BUGFIX: passing multiple spec arguments to patchers (<cite>spec</cite> , <cite>spec_set</cite> and
+<cite>autospec</cite>) had unpredictable results, now it is an error</li>
+<li>BUGFIX: using <cite>spec=True</cite> <em>and</em> <cite>create=True</cite> as arguments to patchers could
+result in using <cite>DEFAULT</cite> as the spec. Now it is an error instead</li>
+<li>BUGFIX: using <cite>spec</cite> or <cite>autospec</cite> arguments to patchers, along with
+<cite>spec_set=True</cite> did not work correctly</li>
+<li>BUGFIX: using an object that evaluates to False as a spec could be ignored</li>
+<li>BUGFIX: a list as the <cite>spec</cite> argument to a patcher would always result in a
+non-callable mock. Now if <cite>__call__</cite> is in the spec the mock is callable</li>
+</ul>
+</div>
+<div class="section" id="version-1-0-0-beta-1">
+<h2>2012/07/13 Version 1.0.0 beta 1<a class="headerlink" href="#version-1-0-0-beta-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Added <cite>patch.stopall</cite> method to stop all active patches created by <cite>start</cite></li>
+<li>BUGFIX: calling <cite>MagicMock.reset_mock</cite> wouldn&#8217;t reset magic method mocks</li>
+<li>BUGFIX: calling <cite>reset_mock</cite> on a <cite>MagicMock</cite> created with autospec could
+raise an exception</li>
+</ul>
+</div>
+<div class="section" id="version-1-0-0-alpha-2">
+<h2>2012/05/04 Version 1.0.0 alpha 2<a class="headerlink" href="#version-1-0-0-alpha-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>PropertyMock</cite> attributes are now standard <cite>MagicMocks</cite></li>
+<li><cite>create_autospec</cite> works with attributes present in results of <cite>dir</cite> that
+can&#8217;t be fetched from the object&#8217;s class. Contributed by Konstantine Rybnikov</li>
+<li>Any exceptions in an iterable <cite>side_effect</cite> will be raised instead of
+returned</li>
+<li>In Python 3, <cite>create_autospec</cite> now supports keyword only arguments</li>
+</ul>
+</div>
+<div class="section" id="version-1-0-0-alpha-1">
+<h2>2012/03/25 Version 1.0.0 alpha 1<a class="headerlink" href="#version-1-0-0-alpha-1" title="Permalink to this headline">¶</a></h2>
+<p>The standard library version!</p>
+<ul class="simple">
+<li><cite>mocksignature</cite>, along with the <cite>mocksignature</cite> argument to <cite>patch</cite>, removed</li>
+<li>Support for deleting attributes (accessing deleted attributes will raise an
+<cite>AttributeError</cite>)</li>
+<li>Added the <cite>mock_open</cite> helper function for mocking the builtin <cite>open</cite></li>
+<li><cite>__class__</cite> is assignable, so a mock can pass an <cite>isinstance</cite> check without
+requiring a spec</li>
+<li>Addition of <cite>PropertyMock</cite>, for mocking properties</li>
+<li><cite>MagicMocks</cite> made unorderable by default (in Python 3). The comparison
+methods (other than equality and inequality) now return <cite>NotImplemented</cite></li>
+<li>Propagate traceback info to support subclassing of <cite>_patch</cite> by other
+libraries</li>
+<li>BUGFIX: passing multiple spec arguments to patchers (<cite>spec</cite> , <cite>spec_set</cite> and
+<cite>autospec</cite>) had unpredictable results, now it is an error</li>
+<li>BUGFIX: using <cite>spec=True</cite> <em>and</em> <cite>create=True</cite> as arguments to patchers could
+result in using <cite>DEFAULT</cite> as the spec. Now it is an error instead</li>
+<li>BUGFIX: using <cite>spec</cite> or <cite>autospec</cite> arguments to patchers, along with
+<cite>spec_set=True</cite> did not work correctly</li>
+<li>BUGFIX: using an object that evaluates to False as a spec could be ignored</li>
+<li>BUGFIX: a list as the <cite>spec</cite> argument to a patcher would always result in a
+non-callable mock. Now if <cite>__call__</cite> is in the spec the mock is callable</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0">
+<h2>2012/02/13 Version 0.8.0<a class="headerlink" href="#version-0-8-0" title="Permalink to this headline">¶</a></h2>
+<p>The only changes since 0.8rc2 are:</p>
+<ul class="simple">
+<li>Improved repr of <a class="reference internal" href="sentinel.html#mock.sentinel" title="mock.sentinel"><tt class="xref py py-data docutils literal"><span class="pre">sentinel</span></tt></a> objects</li>
+<li><a class="reference internal" href="helpers.html#mock.ANY" title="mock.ANY"><tt class="xref py py-data docutils literal"><span class="pre">ANY</span></tt></a> can be used for comparisons against <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects</li>
+<li>The return value of <cite>MagicMock.__iter__</cite> method can be set to
+any iterable and isn&#8217;t required to be an iterator</li>
+</ul>
+<p>Full List of changes since 0.7:</p>
+<p>mock 0.8.0 is the last version that will support Python 2.4.</p>
+<ul class="simple">
+<li>Addition of <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> list for <em>all</em> calls (including magic
+methods and chained calls)</li>
+<li><a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> and <a class="reference internal" href="patch.html#mock.patch.object" title="mock.patch.object"><tt class="xref py py-func docutils literal"><span class="pre">patch.object()</span></tt></a> now create a <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a>
+instead of a <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> by default</li>
+<li>The patchers (<cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite>), plus <cite>Mock</cite> and
+<cite>MagicMock</cite>, take arbitrary keyword arguments for configuration</li>
+<li>New mock method <a class="reference internal" href="mock.html#mock.Mock.configure_mock" title="mock.Mock.configure_mock"><tt class="xref py py-meth docutils literal"><span class="pre">configure_mock()</span></tt></a> for setting attributes and
+return values / side effects on the mock and its attributes</li>
+<li>New mock assert methods <a class="reference internal" href="mock.html#mock.Mock.assert_any_call" title="mock.Mock.assert_any_call"><tt class="xref py py-meth docutils literal"><span class="pre">assert_any_call()</span></tt></a> and
+<a class="reference internal" href="mock.html#mock.Mock.assert_has_calls" title="mock.Mock.assert_has_calls"><tt class="xref py py-meth docutils literal"><span class="pre">assert_has_calls()</span></tt></a></li>
+<li>Implemented <a class="reference internal" href="helpers.html#auto-speccing"><em>Autospeccing</em></a> (recursive, lazy speccing of mocks with
+mocked signatures for functions/methods), as the <cite>autospec</cite> argument to
+<cite>patch</cite></li>
+<li>Added the <a class="reference internal" href="helpers.html#mock.create_autospec" title="mock.create_autospec"><tt class="xref py py-func docutils literal"><span class="pre">create_autospec()</span></tt></a> function for manually creating
+&#8216;auto-specced&#8217; mocks</li>
+<li><a class="reference internal" href="patch.html#mock.patch.multiple" title="mock.patch.multiple"><tt class="xref py py-func docutils literal"><span class="pre">patch.multiple()</span></tt></a> for doing multiple patches in a single call, using
+keyword arguments</li>
+<li>Setting <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> to an iterable will cause calls to the mock
+to return the next value from the iterable</li>
+<li>New <cite>new_callable</cite> argument to <cite>patch</cite> and <cite>patch.object</cite> allowing you to
+pass in a class or callable object (instead of <cite>MagicMock</cite>) that will be
+called to replace the object being patched</li>
+<li>Addition of <a class="reference internal" href="mock.html#mock.NonCallableMock" title="mock.NonCallableMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMock</span></tt></a> and <a class="reference internal" href="magicmock.html#mock.NonCallableMagicMock" title="mock.NonCallableMagicMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMagicMock</span></tt></a>, mocks
+without a <cite>__call__</cite> method</li>
+<li>Addition of <a class="reference internal" href="mock.html#mock.Mock.mock_add_spec" title="mock.Mock.mock_add_spec"><tt class="xref py py-meth docutils literal"><span class="pre">mock_add_spec()</span></tt></a> method for adding (or changing) a
+spec on an existing mock</li>
+<li>Protocol methods on <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> are magic mocks, and are created
+lazily on first lookup. This means the result of calling a protocol method is
+a <cite>MagicMock</cite> instead of a <cite>Mock</cite> as it was previously</li>
+<li>Addition of <a class="reference internal" href="mock.html#mock.Mock.attach_mock" title="mock.Mock.attach_mock"><tt class="xref py py-meth docutils literal"><span class="pre">attach_mock()</span></tt></a> method</li>
+<li>Added <a class="reference internal" href="helpers.html#mock.ANY" title="mock.ANY"><tt class="xref py py-data docutils literal"><span class="pre">ANY</span></tt></a> for ignoring arguments in <a class="reference internal" href="mock.html#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a>
+calls</li>
+<li>Addition of <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> helper object</li>
+<li>Improved repr for mocks</li>
+<li>Improved repr for <a class="reference internal" href="mock.html#mock.Mock.call_args" title="mock.Mock.call_args"><tt class="xref py py-attr docutils literal"><span class="pre">Mock.call_args</span></tt></a> and entries in
+<a class="reference internal" href="mock.html#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">Mock.call_args_list</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">Mock.method_calls</span></tt></a> and
+<a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">Mock.mock_calls</span></tt></a></li>
+<li>Improved repr for <a class="reference internal" href="sentinel.html#mock.sentinel" title="mock.sentinel"><tt class="xref py py-data docutils literal"><span class="pre">sentinel</span></tt></a> objects</li>
+<li><cite>patch</cite> lookup is done at use time not at decoration time</li>
+<li>In Python 2.6 or more recent, <cite>dir</cite> on a mock will report all the dynamically
+created attributes (or the full list of attributes if there is a spec) as
+well as all the mock methods and attributes.</li>
+<li>Module level <a class="reference internal" href="helpers.html#mock.FILTER_DIR" title="mock.FILTER_DIR"><tt class="xref py py-data docutils literal"><span class="pre">FILTER_DIR</span></tt></a> added to control whether <cite>dir(mock)</cite> filters
+private attributes. <cite>True</cite> by default.</li>
+<li><cite>patch.TEST_PREFIX</cite> for controlling how patchers recognise test methods when
+used to decorate a class</li>
+<li>Support for using Java exceptions as a <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> on Jython</li>
+<li><cite>Mock</cite> call lists (<cite>call_args_list</cite>, <cite>method_calls</cite> &amp; <cite>mock_calls</cite>) are now
+custom list objects that allow membership tests for &#8220;sub lists&#8221; and have
+a nicer representation if you <cite>str</cite> or <cite>print</cite> them</li>
+<li>Mocks attached as attributes or return values to other mocks have calls
+recorded in <cite>method_calls</cite> and <cite>mock_calls</cite> of the parent (unless a name is
+already set on the child)</li>
+<li>Improved failure messages for <cite>assert_called_with</cite> and
+<cite>assert_called_once_with</cite></li>
+<li>The return value of the <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> <cite>__iter__</cite> method can be set to
+any iterable and isn&#8217;t required to be an iterator</li>
+<li>Added the Mock API (<cite>assert_called_with</cite> etc) to functions created by
+<tt class="xref py py-func docutils literal"><span class="pre">mocksignature()</span></tt></li>
+<li>Tuples as well as lists can be used to specify allowed methods for <cite>spec</cite> &amp;
+<cite>spec_set</cite> arguments</li>
+<li>Calling <cite>stop</cite> on an unstarted patcher fails with a more meaningful error
+message</li>
+<li>Renamed the internal classes <cite>Sentinel</cite> and <cite>SentinelObject</cite> to prevent abuse</li>
+<li>BUGFIX: an error creating a patch, with nested patch decorators, won&#8217;t leave
+patches in place</li>
+<li>BUGFIX: <cite>__truediv__</cite> and <cite>__rtruediv__</cite> not available as magic methods on
+mocks in Python 3</li>
+<li>BUGFIX: <cite>assert_called_with</cite> / <cite>assert_called_once_with</cite> can be used with
+<cite>self</cite> as a keyword argument</li>
+<li>BUGFIX: when patching a class with an explicit spec / spec_set (not a
+boolean) it applies &#8220;spec inheritance&#8221; to the return value of the created
+mock (the &#8220;instance&#8221;)</li>
+<li>BUGFIX: remove the <cite>__unittest</cite> marker causing traceback truncation</li>
+<li>Removal of deprecated <cite>patch_object</cite></li>
+<li>Private attributes <cite>_name</cite>, <cite>_methods</cite>, &#8216;_children&#8217;, <cite>_wraps</cite> and <cite>_parent</cite>
+(etc) renamed to reduce likelihood of clash with user attributes.</li>
+<li>Added license file to the distribution</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-release-candidate-2">
+<h2>2012/01/10 Version 0.8.0 release candidate 2<a class="headerlink" href="#version-0-8-0-release-candidate-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Removed the <cite>configure</cite> keyword argument to <cite>create_autospec</cite> and allow
+arbitrary keyword arguments (for the <cite>Mock</cite> constructor) instead</li>
+<li>Fixed <cite>ANY</cite> equality with some types in <cite>assert_called_with</cite> calls</li>
+<li>Switched to a standard Sphinx theme (compatible with
+<a class="reference external" href="http://mock.readthedocs.org">readthedocs.org</a>)</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-release-candidate-1">
+<h2>2011/12/29 Version 0.8.0 release candidate 1<a class="headerlink" href="#version-0-8-0-release-candidate-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>create_autospec</cite> on the return value of a mocked class will use <cite>__call__</cite>
+for the signature rather than <cite>__init__</cite></li>
+<li>Performance improvement instantiating <cite>Mock</cite> and <cite>MagicMock</cite></li>
+<li>Mocks used as magic methods have the same type as their parent instead of
+being hardcoded to <cite>MagicMock</cite></li>
+</ul>
+<p>Special thanks to Julian Berman for his help with diagnosing and improving
+performance in this release.</p>
+</div>
+<div class="section" id="version-0-8-0-beta-4">
+<h2>2011/10/09 Version 0.8.0 beta 4<a class="headerlink" href="#version-0-8-0-beta-4" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>patch</cite> lookup is done at use time not at decoration time</li>
+<li>When attaching a Mock to another Mock as a magic method, calls are recorded
+in mock_calls</li>
+<li>Addition of <cite>attach_mock</cite> method</li>
+<li>Renamed the internal classes <cite>Sentinel</cite> and <cite>SentinelObject</cite> to prevent abuse</li>
+<li>BUGFIX: various issues around circular references with mocks (setting a mock
+return value to be itself etc)</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-beta-3">
+<h2>2011/08/15 Version 0.8.0 beta 3<a class="headerlink" href="#version-0-8-0-beta-3" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Mocks attached as attributes or return values to other mocks have calls
+recorded in <cite>method_calls</cite> and <cite>mock_calls</cite> of the parent (unless a name is
+already set on the child)</li>
+<li>Addition of <cite>mock_add_spec</cite> method for adding (or changing) a spec on an
+existing mock</li>
+<li>Improved repr for <cite>Mock.call_args</cite> and entries in <cite>Mock.call_args_list</cite>,
+<cite>Mock.method_calls</cite> and <cite>Mock.mock_calls</cite></li>
+<li>Improved repr for mocks</li>
+<li>BUGFIX: minor fixes in the way <cite>mock_calls</cite> is worked out,
+especially for &#8220;intermediate&#8221; mocks in a call chain</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-beta-2">
+<h2>2011/08/05 Version 0.8.0 beta 2<a class="headerlink" href="#version-0-8-0-beta-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Setting <cite>side_effect</cite> to an iterable will cause calls to the mock to return
+the next value from the iterable</li>
+<li>Added <cite>assert_any_call</cite> method</li>
+<li>Moved <cite>assert_has_calls</cite> from call lists onto mocks</li>
+<li>BUGFIX: <cite>call_args</cite> and all members of <cite>call_args_list</cite> are two tuples of
+<cite>(args, kwargs)</cite> again instead of three tuples of <cite>(name, args, kwargs)</cite></li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-beta-1">
+<h2>2011/07/25 Version 0.8.0 beta 1<a class="headerlink" href="#version-0-8-0-beta-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>patch.TEST_PREFIX</cite> for controlling how patchers recognise test methods when
+used to decorate a class</li>
+<li><cite>Mock</cite> call lists (<cite>call_args_list</cite>, <cite>method_calls</cite> &amp; <cite>mock_calls</cite>) are now
+custom list objects that allow membership tests for &#8220;sub lists&#8221; and have
+an <cite>assert_has_calls</cite> method for unordered call checks</li>
+<li><cite>callargs</cite> changed to <em>always</em> be a three-tuple of <cite>(name, args, kwargs)</cite></li>
+<li>Addition of <cite>mock_calls</cite> list for <em>all</em> calls (including magic methods and
+chained calls)</li>
+<li>Extension of <cite>call</cite> object to support chained calls and <cite>callargs</cite> for better
+comparisons with or without names. <cite>call</cite> object has a <cite>call_list</cite> method for
+chained calls</li>
+<li>Added the public <cite>instance</cite> argument to <cite>create_autospec</cite></li>
+<li>Support for using Java exceptions as a <cite>side_effect</cite> on Jython</li>
+<li>Improved failure messages for <cite>assert_called_with</cite> and
+<cite>assert_called_once_with</cite></li>
+<li>Tuples as well as lists can be used to specify allowed methods for <cite>spec</cite> &amp;
+<cite>spec_set</cite> arguments</li>
+<li>BUGFIX: Fixed bug in <cite>patch.multiple</cite> for argument passing when creating
+mocks</li>
+<li>Added license file to the distribution</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-alpha-2">
+<h2>2011/07/16 Version 0.8.0 alpha 2<a class="headerlink" href="#version-0-8-0-alpha-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><cite>patch.multiple</cite> for doing multiple patches in a single call, using keyword
+arguments</li>
+<li>New <cite>new_callable</cite> argument to <cite>patch</cite> and <cite>patch.object</cite> allowing you to
+pass in a class or callable object (instead of <cite>MagicMock</cite>) that will be
+called to replace the object being patched</li>
+<li>Addition of <cite>NonCallableMock</cite> and <cite>NonCallableMagicMock</cite>, mocks without a
+<cite>__call__</cite> method</li>
+<li>Mocks created by <cite>patch</cite> have a <cite>MagicMock</cite> as the <cite>return_value</cite> where a
+class is being patched</li>
+<li><cite>create_autospec</cite> can create non-callable mocks for non-callable objects.
+<cite>return_value</cite> mocks of classes will be non-callable unless the class has
+a <cite>__call__</cite> method</li>
+<li><cite>autospec</cite> creates a <cite>MagicMock</cite> without a spec for properties and slot
+descriptors, because we don&#8217;t know the type of object they return</li>
+<li>Removed the &#8220;inherit&#8221; argument from <cite>create_autospec</cite></li>
+<li>Calling <cite>stop</cite> on an unstarted patcher fails with a more meaningful error
+message</li>
+<li>BUGFIX: an error creating a patch, with nested patch decorators, won&#8217;t leave
+patches in place</li>
+<li>BUGFIX: <cite>__truediv__</cite> and <cite>__rtruediv__</cite> not available as magic methods on
+mocks in Python 3</li>
+<li>BUGFIX: <cite>assert_called_with</cite> / <cite>assert_called_once_with</cite> can be used with
+<cite>self</cite> as a keyword argument</li>
+<li>BUGFIX: autospec for functions / methods with an argument named self that
+isn&#8217;t the first argument no longer broken</li>
+<li>BUGFIX: when patching a class with an explicit spec / spec_set (not a
+boolean) it applies &#8220;spec inheritance&#8221; to the return value of the created
+mock (the &#8220;instance&#8221;)</li>
+<li>BUGFIX: remove the <cite>__unittest</cite> marker causing traceback truncation</li>
+</ul>
+</div>
+<div class="section" id="version-0-8-0-alpha-1">
+<h2>2011/06/14 Version 0.8.0 alpha 1<a class="headerlink" href="#version-0-8-0-alpha-1" title="Permalink to this headline">¶</a></h2>
+<p>mock 0.8.0 is the last version that will support Python 2.4.</p>
+<ul>
+<li><p class="first">The patchers (<cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite>), plus <cite>Mock</cite> and
+<cite>MagicMock</cite>, take arbitrary keyword arguments for configuration</p>
+</li>
+<li><p class="first">New mock method <cite>configure_mock</cite> for setting attributes and return values /
+side effects on the mock and its attributes</p>
+</li>
+<li><p class="first">In Python 2.6 or more recent, <cite>dir</cite> on a mock will report all the dynamically
+created attributes (or the full list of attributes if there is a spec) as
+well as all the mock methods and attributes.</p>
+</li>
+<li><p class="first">Module level <cite>FILTER_DIR</cite> added to control whether <cite>dir(mock)</cite> filters
+private attributes. <cite>True</cite> by default. Note that <cite>vars(Mock())</cite> can still be
+used to get all instance attributes and <cite>dir(type(Mock())</cite> will still return
+all the other attributes (irrespective of <cite>FILTER_DIR</cite>)</p>
+</li>
+<li><p class="first"><cite>patch</cite> and <cite>patch.object</cite> now create a <cite>MagicMock</cite> instead of a <cite>Mock</cite> by
+default</p>
+</li>
+<li><p class="first">Added <cite>ANY</cite> for ignoring arguments in <cite>assert_called_with</cite> calls</p>
+</li>
+<li><p class="first">Addition of <cite>call</cite> helper object</p>
+</li>
+<li><p class="first">Protocol methods on <cite>MagicMock</cite> are magic mocks, and are created lazily on
+first lookup. This means the result of calling a protocol method is a
+MagicMock instead of a Mock as it was previously</p>
+</li>
+<li><p class="first">Added the Mock API (<cite>assert_called_with</cite> etc) to functions created by
+<cite>mocksignature</cite></p>
+</li>
+<li><p class="first">Private attributes <cite>_name</cite>, <cite>_methods</cite>, &#8216;_children&#8217;, <cite>_wraps</cite> and <cite>_parent</cite>
+(etc) renamed to reduce likelihood of clash with user attributes.</p>
+</li>
+<li><p class="first">Implemented auto-speccing (recursive, lazy speccing of mocks with mocked
+signatures for functions/methods)</p>
+<p>Limitations:</p>
+<ul class="simple">
+<li>Doesn&#8217;t mock magic methods or attributes (it creates MagicMocks, so the
+magic methods are <em>there</em>, they just don&#8217;t have the signature mocked nor
+are attributes followed)</li>
+<li>Doesn&#8217;t mock function / method attributes</li>
+<li>Uses object traversal on the objects being mocked to determine types - so
+properties etc may be triggered</li>
+<li>The return value of mocked classes (the &#8216;instance&#8217;) has the same call
+signature as the class __init__ (as they share the same spec)</li>
+</ul>
+<p>You create auto-specced mocks by passing <cite>autospec=True</cite> to <cite>patch</cite>.</p>
+<p>Note that attributes that are None are special cased and mocked without a
+spec (so any attribute / method can be used). This is because None is
+typically used as a default value for attributes that may be of some other
+type, and as we don&#8217;t know what type that may be we allow all access.</p>
+<p>Note that the <cite>autospec</cite> option to <cite>patch</cite> obsoletes the <cite>mocksignature</cite>
+option.</p>
+</li>
+<li><p class="first">Added the <cite>create_autospec</cite> function for manually creating &#8216;auto-specced&#8217;
+mocks</p>
+</li>
+<li><p class="first">Removal of deprecated <cite>patch_object</cite></p>
+</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-2">
+<h2>2011/05/30 Version 0.7.2<a class="headerlink" href="#version-0-7-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>BUGFIX: instances of list subclasses can now be used as mock specs</li>
+<li>BUGFIX: MagicMock equality / inequality protocol methods changed to use the
+default equality / inequality. This is done through a <cite>side_effect</cite> on
+the mocks used for <cite>__eq__</cite> / <cite>__ne__</cite></li>
+</ul>
+</div>
+<div class="section" id="version-0-7-1">
+<h2>2011/05/06 Version 0.7.1<a class="headerlink" href="#version-0-7-1" title="Permalink to this headline">¶</a></h2>
+<p>Package fixes contributed by Michael Fladischer. No code changes.</p>
+<ul class="simple">
+<li>Include template in package</li>
+<li>Use isolated binaries for the tox tests</li>
+<li>Unset executable bit on docs</li>
+<li>Fix DOS line endings in getting-started.txt</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0">
+<h2>2011/03/05 Version 0.7.0<a class="headerlink" href="#version-0-7-0" title="Permalink to this headline">¶</a></h2>
+<p>No API changes since 0.7.0 rc1. Many documentation changes including a stylish
+new <a class="reference external" href="https://github.com/coordt/ADCtheme/">Sphinx theme</a>.</p>
+<p>The full set of changes since 0.6.0 are:</p>
+<ul class="simple">
+<li>Python 3 compatibility</li>
+<li>Ability to mock magic methods with <cite>Mock</cite> and addition of <cite>MagicMock</cite>
+with pre-created magic methods</li>
+<li>Addition of <cite>mocksignature</cite> and <cite>mocksignature</cite> argument to <cite>patch</cite> and
+<cite>patch.object</cite></li>
+<li>Addition of <cite>patch.dict</cite> for changing dictionaries during a test</li>
+<li>Ability to use <cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite> as class decorators</li>
+<li>Renamed <tt class="docutils literal"><span class="pre">patch_object</span></tt> to <cite>patch.object</cite> (<tt class="docutils literal"><span class="pre">patch_object</span></tt> is
+deprecated)</li>
+<li>Addition of soft comparisons: <cite>call_args</cite>, <cite>call_args_list</cite> and <cite>method_calls</cite>
+now return tuple-like objects which compare equal even when empty args
+or kwargs are skipped</li>
+<li>patchers (<cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite>) have start and stop
+methods</li>
+<li>Addition of <cite>assert_called_once_with</cite> method</li>
+<li>Mocks can now be named (<cite>name</cite> argument to constructor) and the name is used
+in the repr</li>
+<li>repr of a mock with a spec includes the class name of the spec</li>
+<li><cite>assert_called_with</cite> works with <cite>python -OO</cite></li>
+<li>New <cite>spec_set</cite> keyword argument to <cite>Mock</cite> and <cite>patch</cite>. If used,
+attempting to <em>set</em> an attribute on a mock not on the spec will raise an
+<cite>AttributeError</cite></li>
+<li>Mocks created with a spec can now pass <cite>isinstance</cite> tests (<cite>__class__</cite>
+returns the type of the spec)</li>
+<li>Added docstrings to all objects</li>
+<li>Improved failure message for <cite>Mock.assert_called_with</cite> when the mock
+has not been called at all</li>
+<li>Decorated functions / methods have their docstring and <cite>__module__</cite>
+preserved on Python 2.4.</li>
+<li>BUGFIX: <cite>mock.patch</cite> now works correctly with certain types of objects that
+proxy attribute access, like the django settings object</li>
+<li>BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+diagnosing this)</li>
+<li>BUGFIX: <cite>spec=True</cite> works with old style classes</li>
+<li>BUGFIX: <tt class="docutils literal"><span class="pre">help(mock)</span></tt> works now (on the module). Can no longer use <tt class="docutils literal"><span class="pre">__bases__</span></tt>
+as a valid sentinel name (thanks to Stephen Emslie for reporting and
+diagnosing this)</li>
+<li>BUGFIX: <tt class="docutils literal"><span class="pre">side_effect</span></tt> now works with <tt class="docutils literal"><span class="pre">BaseException</span></tt> exceptions like
+<tt class="docutils literal"><span class="pre">KeyboardInterrupt</span></tt></li>
+<li>BUGFIX: <cite>reset_mock</cite> caused infinite recursion when a mock is set as its own
+return value</li>
+<li>BUGFIX: patching the same object twice now restores the patches correctly</li>
+<li>with statement tests now skipped on Python 2.4</li>
+<li>Tests require unittest2 (or unittest2-py3k) to run</li>
+<li>Tested with <a class="reference external" href="http://pypi.python.org/pypi/tox">tox</a> on Python 2.4 - 3.2,
+jython and pypy (excluding 3.0)</li>
+<li>Added &#8216;build_sphinx&#8217; command to setup.py (requires setuptools or distribute)
+Thanks to Florian Bauer</li>
+<li>Switched from subversion to mercurial for source code control</li>
+<li><a class="reference external" href="http://konryd.blogspot.com/">Konrad Delong</a> added as co-maintainer</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-rc-1">
+<h2>2011/02/16 Version 0.7.0 RC 1<a class="headerlink" href="#version-0-7-0-rc-1" title="Permalink to this headline">¶</a></h2>
+<p>Changes since beta 4:</p>
+<ul class="simple">
+<li>Tested with jython, pypy and Python 3.2 and 3.1</li>
+<li>Decorated functions / methods have their docstring and <cite>__module__</cite>
+preserved on Python 2.4</li>
+<li>BUGFIX: <cite>mock.patch</cite> now works correctly with certain types of objects that
+proxy attribute access, like the django settings object</li>
+<li>BUGFIX: <cite>reset_mock</cite> caused infinite recursion when a mock is set as its own
+return value</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-beta-4">
+<h2>2010/11/12 Version 0.7.0 beta 4<a class="headerlink" href="#version-0-7-0-beta-4" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>patchers (<cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite>) have start and stop
+methods</li>
+<li>Addition of <cite>assert_called_once_with</cite> method</li>
+<li>repr of a mock with a spec includes the class name of the spec</li>
+<li><cite>assert_called_with</cite> works with <cite>python -OO</cite></li>
+<li>New <cite>spec_set</cite> keyword argument to <cite>Mock</cite> and <cite>patch</cite>. If used,
+attempting to <em>set</em> an attribute on a mock not on the spec will raise an
+<cite>AttributeError</cite></li>
+<li>Attributes and return value of a <cite>MagicMock</cite> are <cite>MagicMock</cite> objects</li>
+<li>Attempting to set an unsupported magic method now raises an <cite>AttributeError</cite></li>
+<li><cite>patch.dict</cite> works as a class decorator</li>
+<li>Switched from subversion to mercurial for source code control</li>
+<li>BUGFIX: mocks are now copyable (thanks to Ned Batchelder for reporting and
+diagnosing this)</li>
+<li>BUGFIX: <cite>spec=True</cite> works with old style classes</li>
+<li>BUGFIX: <cite>mocksignature=True</cite> can now patch instance methods via
+<cite>patch.object</cite></li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-beta-3">
+<h2>2010/09/18 Version 0.7.0 beta 3<a class="headerlink" href="#version-0-7-0-beta-3" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Using spec with <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> only pre-creates magic methods in the spec</li>
+<li>Setting a magic method on a mock with a <tt class="docutils literal"><span class="pre">spec</span></tt> can only be done if the
+spec has that method</li>
+<li>Mocks can now be named (<cite>name</cite> argument to constructor) and the name is used
+in the repr</li>
+<li><cite>mocksignature</cite> can now be used with classes (signature based on <cite>__init__</cite>)
+and callable objects (signature based on <cite>__call__</cite>)</li>
+<li>Mocks created with a spec can now pass <cite>isinstance</cite> tests (<cite>__class__</cite>
+returns the type of the spec)</li>
+<li>Default numeric value for MagicMock is 1 rather than zero (because the
+MagicMock bool defaults to True and 0 is False)</li>
+<li>Improved failure message for <a class="reference internal" href="mock.html#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a> when the mock
+has not been called at all</li>
+<li>Adding the following to the set of supported magic methods:<ul>
+<li><tt class="docutils literal"><span class="pre">__getformat__</span></tt> and <tt class="docutils literal"><span class="pre">__setformat__</span></tt></li>
+<li>pickle methods</li>
+<li><tt class="docutils literal"><span class="pre">__trunc__</span></tt>, <tt class="docutils literal"><span class="pre">__ceil__</span></tt> and <tt class="docutils literal"><span class="pre">__floor__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__sizeof__</span></tt></li>
+</ul>
+</li>
+<li>Added &#8216;build_sphinx&#8217; command to setup.py (requires setuptools or distribute)
+Thanks to Florian Bauer</li>
+<li>with statement tests now skipped on Python 2.4</li>
+<li>Tests require unittest2 to run on Python 2.7</li>
+<li>Improved several docstrings and documentation</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-beta-2">
+<h2>2010/06/23 Version 0.7.0 beta 2<a class="headerlink" href="#version-0-7-0-beta-2" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><a class="reference internal" href="patch.html#mock.patch.dict" title="mock.patch.dict"><tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt></a> works as a context manager as well as a decorator</li>
+<li><tt class="docutils literal"><span class="pre">patch.dict</span></tt> takes a string to specify dictionary as well as a dictionary
+object. If a string is supplied the name specified is imported</li>
+<li>BUGFIX: <tt class="docutils literal"><span class="pre">patch.dict</span></tt> restores dictionary even when an exception is raised</li>
+</ul>
+</div>
+<div class="section" id="version-0-7-0-beta-1">
+<h2>2010/06/22 Version 0.7.0 beta 1<a class="headerlink" href="#version-0-7-0-beta-1" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Addition of <tt class="xref py py-func docutils literal"><span class="pre">mocksignature()</span></tt></li>
+<li>Ability to mock magic methods</li>
+<li>Ability to use <tt class="docutils literal"><span class="pre">patch</span></tt> and <tt class="docutils literal"><span class="pre">patch.object</span></tt> as class decorators</li>
+<li>Renamed <tt class="docutils literal"><span class="pre">patch_object</span></tt> to <a class="reference internal" href="patch.html#mock.patch.object" title="mock.patch.object"><tt class="xref py py-func docutils literal"><span class="pre">patch.object()</span></tt></a> (<tt class="docutils literal"><span class="pre">patch_object</span></tt> is
+deprecated)</li>
+<li>Addition of <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> class with all magic methods pre-created for you</li>
+<li>Python 3 compatibility (tested with 3.2 but should work with 3.0 &amp; 3.1 as
+well)</li>
+<li>Addition of <a class="reference internal" href="patch.html#mock.patch.dict" title="mock.patch.dict"><tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt></a> for changing dictionaries during a test</li>
+<li>Addition of <tt class="docutils literal"><span class="pre">mocksignature</span></tt> argument to <tt class="docutils literal"><span class="pre">patch</span></tt> and <tt class="docutils literal"><span class="pre">patch.object</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">help(mock)</span></tt> works now (on the module). Can no longer use <tt class="docutils literal"><span class="pre">__bases__</span></tt>
+as a valid sentinel name (thanks to Stephen Emslie for reporting and
+diagnosing this)</li>
+<li>Addition of soft comparisons: <cite>call_args</cite>, <cite>call_args_list</cite> and <cite>method_calls</cite>
+now return tuple-like objects which compare equal even when empty args
+or kwargs are skipped</li>
+<li>Added docstrings.</li>
+<li>BUGFIX: <tt class="docutils literal"><span class="pre">side_effect</span></tt> now works with <tt class="docutils literal"><span class="pre">BaseException</span></tt> exceptions like
+<tt class="docutils literal"><span class="pre">KeyboardInterrupt</span></tt></li>
+<li>BUGFIX: patching the same object twice now restores the patches correctly</li>
+<li>The tests now require <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a>
+to run</li>
+<li><a class="reference external" href="http://konryd.blogspot.com/">Konrad Delong</a> added as co-maintainer</li>
+</ul>
+</div>
+<div class="section" id="version-0-6-0">
+<h2>2009/08/22 Version 0.6.0<a class="headerlink" href="#version-0-6-0" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>New test layout compatible with test discovery</li>
+<li>Descriptors (static methods / class methods etc) can now be patched and
+restored correctly</li>
+<li>Mocks can raise exceptions when called by setting <tt class="docutils literal"><span class="pre">side_effect</span></tt> to an
+exception class or instance</li>
+<li>Mocks that wrap objects will not pass on calls to the underlying object if
+an explicit return_value is set</li>
+</ul>
+</div>
+<div class="section" id="version-0-5-0">
+<h2>2009/04/17 Version 0.5.0<a class="headerlink" href="#version-0-5-0" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li>Made DEFAULT part of the public api.</li>
+<li>Documentation built with Sphinx.</li>
+<li><tt class="docutils literal"><span class="pre">side_effect</span></tt> is now called with the same arguments as the mock is called with and
+if returns a non-DEFAULT value that is automatically set as the <tt class="docutils literal"><span class="pre">mock.return_value</span></tt>.</li>
+<li><tt class="docutils literal"><span class="pre">wraps</span></tt> keyword argument used for wrapping objects (and passing calls through to the wrapped object).</li>
+<li><tt class="docutils literal"><span class="pre">Mock.reset</span></tt> renamed to <tt class="docutils literal"><span class="pre">Mock.reset_mock</span></tt>, as reset is a common API name.</li>
+<li><tt class="docutils literal"><span class="pre">patch</span></tt> / <tt class="docutils literal"><span class="pre">patch_object</span></tt> are now context managers and can be used with <tt class="docutils literal"><span class="pre">with</span></tt>.</li>
+<li>A new &#8216;create&#8217; keyword argument to patch and patch_object that allows them to patch
+(and unpatch) attributes that don&#8217;t exist. (Potentially unsafe to use - it can allow
+you to have tests that pass when they are testing an API that doesn&#8217;t exist - use at
+your own risk!)</li>
+<li>The methods keyword argument to Mock has been removed and merged with spec. The spec
+argument can now be a list of methods or an object to take the spec from.</li>
+<li>Nested patches may now be applied in a different order (created mocks passed
+in the opposite order). This is actually a bugfix.</li>
+<li>patch and patch_object now take a spec keyword argument. If spec is
+passed in as &#8216;True&#8217; then the Mock created will take the object it is replacing
+as its spec object. If the object being replaced is a class, then the return
+value for the mock will also use the class as a spec.</li>
+<li>A Mock created without a spec will not attempt to mock any magic methods / attributes
+(they will raise an <tt class="docutils literal"><span class="pre">AttributeError</span></tt> instead).</li>
+</ul>
+</div>
+<div class="section" id="version-0-4-0">
+<h2>2008/10/12 Version 0.4.0<a class="headerlink" href="#version-0-4-0" title="Permalink to this headline">¶</a></h2>
+<ul>
+<li><p class="first">Default return value is now a new mock rather than None</p>
+</li>
+<li><p class="first">return_value added as a keyword argument to the constructor</p>
+</li>
+<li><p class="first">New method &#8216;assert_called_with&#8217;</p>
+</li>
+<li><p class="first">Added &#8216;side_effect&#8217; attribute / keyword argument called when mock is called</p>
+</li>
+<li><p class="first">patch decorator split into two decorators:</p>
+<blockquote>
+<div><ul class="simple">
+<li><tt class="docutils literal"><span class="pre">patch_object</span></tt> which takes an object and an attribute name to patch
+(plus optionally a value to patch with which defaults to a mock object)</li>
+<li><tt class="docutils literal"><span class="pre">patch</span></tt> which takes a string specifying a target to patch; in the form
+&#8216;package.module.Class.attribute&#8217;. (plus optionally a value to
+patch with which defaults to a mock object)</li>
+</ul>
+</div></blockquote>
+</li>
+<li><p class="first">Can now patch objects with <tt class="docutils literal"><span class="pre">None</span></tt></p>
+</li>
+<li><p class="first">Change to patch for nose compatibility with error reporting in wrapped functions</p>
+</li>
+<li><p class="first">Reset no longer clears children / return value etc - it just resets
+call count and call args. It also calls reset on all children (and
+the return value if it is a mock).</p>
+</li>
+</ul>
+<p>Thanks to Konrad Delong, Kevin Dangoor and others for patches and suggestions.</p>
+</div>
+<div class="section" id="version-0-3-1">
+<h2>2007/12/03 Version 0.3.1<a class="headerlink" href="#version-0-3-1" title="Permalink to this headline">¶</a></h2>
+<p><tt class="docutils literal"><span class="pre">patch</span></tt> maintains the name of decorated functions for compatibility with nose
+test autodiscovery.</p>
+<p>Tests decorated with <tt class="docutils literal"><span class="pre">patch</span></tt> that use the two argument form (implicit mock
+creation) will receive the mock(s) passed in as extra arguments.</p>
+<p>Thanks to Kevin Dangoor for these changes.</p>
+</div>
+<div class="section" id="version-0-3-0">
+<h2>2007/11/30 Version 0.3.0<a class="headerlink" href="#version-0-3-0" title="Permalink to this headline">¶</a></h2>
+<p>Removed <tt class="docutils literal"><span class="pre">patch_module</span></tt>. <tt class="docutils literal"><span class="pre">patch</span></tt> can now take a string as the first
+argument for patching modules.</p>
+<p>The third argument to <tt class="docutils literal"><span class="pre">patch</span></tt> is optional - a mock will be created by
+default if it is not passed in.</p>
+</div>
+<div class="section" id="version-0-2-1">
+<h2>2007/11/21 Version 0.2.1<a class="headerlink" href="#version-0-2-1" title="Permalink to this headline">¶</a></h2>
+<p>Bug fix, allows reuse of functions decorated with <tt class="docutils literal"><span class="pre">patch</span></tt> and <tt class="docutils literal"><span class="pre">patch_module</span></tt>.</p>
+</div>
+<div class="section" id="version-0-2-0">
+<h2>2007/11/20 Version 0.2.0<a class="headerlink" href="#version-0-2-0" title="Permalink to this headline">¶</a></h2>
+<p>Added <tt class="docutils literal"><span class="pre">spec</span></tt> keyword argument for creating <tt class="docutils literal"><span class="pre">Mock</span></tt> objects from a
+specification object.</p>
+<p>Added <tt class="docutils literal"><span class="pre">patch</span></tt> and <tt class="docutils literal"><span class="pre">patch_module</span></tt> monkey patching decorators.</p>
+<p>Added <tt class="docutils literal"><span class="pre">sentinel</span></tt> for convenient access to unique objects.</p>
+<p>Distribution includes unit tests.</p>
+</div>
+<div class="section" id="version-0-1-0">
+<h2>2007/11/19 Version 0.1.0<a class="headerlink" href="#version-0-1-0" title="Permalink to this headline">¶</a></h2>
+<p>Initial release.</p>
+</div>
+</div>
+<div class="section" id="todo-and-limitations">
+<h1>TODO and Limitations<a class="headerlink" href="#todo-and-limitations" title="Permalink to this headline">¶</a></h1>
+<p>Contributions, bug reports and comments welcomed!</p>
+<p>Feature requests and bug reports are handled on the issue tracker:</p>
+<blockquote>
+<div><ul class="simple">
+<li><a class="reference external" href="http://code.google.com/p/mock/issues/list">mock issue tracker</a></li>
+</ul>
+</div></blockquote>
+<p><cite>wraps</cite> is not integrated with magic methods.</p>
+<p><cite>patch</cite> could auto-do the patching in the constructor and unpatch in the
+destructor. This would be useful in itself, but violates TOOWTDI and would be
+unsafe for IronPython &amp; PyPy (non-deterministic calling of destructors).
+Destructors aren&#8217;t called in CPython where there are cycles, but a weak
+reference with a callback can be used to get round this.</p>
+<p><cite>Mock</cite> has several attributes. This makes it unsuitable for mocking objects
+that use these attribute names. A way round this would be to provide methods
+that <em>hide</em> these attributes when needed. In 0.8 many, but not all, of these
+attributes are renamed to gain a <cite>_mock</cite> prefix, making it less likely that
+they will clash. Any outstanding attributes that haven&#8217;t been modified with
+the prefix should be changed.</p>
+<p>If a patch is started using <cite>patch.start</cite> and then not stopped correctly then
+the unpatching is not done. Using weak references it would be possible to
+detect and fix this when the patch object itself is garbage collected. This
+would be tricky to get right though.</p>
+<p>When a <cite>Mock</cite> is created by <cite>patch</cite>, arbitrary keywords can be used to set
+attributes. If <cite>patch</cite> is created with a <cite>spec</cite>, and is replacing a class, then
+a <cite>return_value</cite> mock is created. The keyword arguments are not applied to the
+child mock, but could be.</p>
+<p>When mocking a class with <cite>patch</cite>, passing in <cite>spec=True</cite> or <cite>autospec=True</cite>,
+the mock class has an instance created from the same spec. Should this be the
+default behaviour for mocks anyway (mock return values inheriting the spec
+from their parent), or should it be controlled by an additional keyword
+argument (<cite>inherit</cite>) to the Mock constructor? <cite>create_autospec</cite> does this, so
+an additional keyword argument to Mock is probably unnecessary.</p>
+<p>The <cite>mocksignature</cite> argument to <cite>patch</cite> with a non <cite>Mock</cite> passed into
+<cite>new_callable</cite> will <em>probably</em> cause an error. Should it just be invalid?</p>
+<p>Note that <cite>NonCallableMock</cite> and <cite>NonCallableMagicMock</cite> still have the unused
+(and unusable) attributes: <cite>return_value</cite>, <cite>side_effect</cite>, <cite>call_count</cite>,
+<cite>call_args</cite> and <cite>call_args_list</cite>. These could be removed or raise errors on
+getting / setting. They also have the <cite>assert_called_with</cite> and
+<cite>assert_called_once_with</cite> methods. Removing these would be pointless as
+fetching them would create a mock (attribute) that could be called without
+error.</p>
+<p>Some outstanding technical debt. The way autospeccing mocks function
+signatures was copied and modified from <cite>mocksignature</cite>. This could all be
+refactored into one set of functions instead of two. The way we tell if
+patchers are started and if a patcher is being used for a <cite>patch.multiple</cite>
+call are both horrible. There are now a host of helper functions that should
+be rationalised. (Probably time to split mock into a package instead of a
+module.)</p>
+<p>Passing arbitrary keyword arguments to <cite>create_autospec</cite>, or <cite>patch</cite> with
+<cite>autospec</cite>, when mocking a <em>function</em> works fine. However, the arbitrary
+attributes are set on the created mock - but <cite>create_autospec</cite> returns a
+real function (which doesn&#8217;t have those attributes). However, what is the use
+case for using autospec to create functions with attributes that don&#8217;t exist
+on the original?</p>
+<p><cite>mocksignature</cite>, plus the <cite>call_args_list</cite> and <cite>method_calls</cite> attributes of
+<cite>Mock</cite> could all be deprecated.</p>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">CHANGELOG</a><ul>
+<li><a class="reference internal" href="#version-1-0-0">2012/10/07 Version 1.0.0</a></li>
+<li><a class="reference internal" href="#version-1-0-0-beta-1">2012/07/13 Version 1.0.0 beta 1</a></li>
+<li><a class="reference internal" href="#version-1-0-0-alpha-2">2012/05/04 Version 1.0.0 alpha 2</a></li>
+<li><a class="reference internal" href="#version-1-0-0-alpha-1">2012/03/25 Version 1.0.0 alpha 1</a></li>
+<li><a class="reference internal" href="#version-0-8-0">2012/02/13 Version 0.8.0</a></li>
+<li><a class="reference internal" href="#version-0-8-0-release-candidate-2">2012/01/10 Version 0.8.0 release candidate 2</a></li>
+<li><a class="reference internal" href="#version-0-8-0-release-candidate-1">2011/12/29 Version 0.8.0 release candidate 1</a></li>
+<li><a class="reference internal" href="#version-0-8-0-beta-4">2011/10/09 Version 0.8.0 beta 4</a></li>
+<li><a class="reference internal" href="#version-0-8-0-beta-3">2011/08/15 Version 0.8.0 beta 3</a></li>
+<li><a class="reference internal" href="#version-0-8-0-beta-2">2011/08/05 Version 0.8.0 beta 2</a></li>
+<li><a class="reference internal" href="#version-0-8-0-beta-1">2011/07/25 Version 0.8.0 beta 1</a></li>
+<li><a class="reference internal" href="#version-0-8-0-alpha-2">2011/07/16 Version 0.8.0 alpha 2</a></li>
+<li><a class="reference internal" href="#version-0-8-0-alpha-1">2011/06/14 Version 0.8.0 alpha 1</a></li>
+<li><a class="reference internal" href="#version-0-7-2">2011/05/30 Version 0.7.2</a></li>
+<li><a class="reference internal" href="#version-0-7-1">2011/05/06 Version 0.7.1</a></li>
+<li><a class="reference internal" href="#version-0-7-0">2011/03/05 Version 0.7.0</a></li>
+<li><a class="reference internal" href="#version-0-7-0-rc-1">2011/02/16 Version 0.7.0 RC 1</a></li>
+<li><a class="reference internal" href="#version-0-7-0-beta-4">2010/11/12 Version 0.7.0 beta 4</a></li>
+<li><a class="reference internal" href="#version-0-7-0-beta-3">2010/09/18 Version 0.7.0 beta 3</a></li>
+<li><a class="reference internal" href="#version-0-7-0-beta-2">2010/06/23 Version 0.7.0 beta 2</a></li>
+<li><a class="reference internal" href="#version-0-7-0-beta-1">2010/06/22 Version 0.7.0 beta 1</a></li>
+<li><a class="reference internal" href="#version-0-6-0">2009/08/22 Version 0.6.0</a></li>
+<li><a class="reference internal" href="#version-0-5-0">2009/04/17 Version 0.5.0</a></li>
+<li><a class="reference internal" href="#version-0-4-0">2008/10/12 Version 0.4.0</a></li>
+<li><a class="reference internal" href="#version-0-3-1">2007/12/03 Version 0.3.1</a></li>
+<li><a class="reference internal" href="#version-0-3-0">2007/11/30 Version 0.3.0</a></li>
+<li><a class="reference internal" href="#version-0-2-1">2007/11/21 Version 0.2.1</a></li>
+<li><a class="reference internal" href="#version-0-2-0">2007/11/20 Version 0.2.0</a></li>
+<li><a class="reference internal" href="#version-0-1-0">2007/11/19 Version 0.1.0</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#todo-and-limitations">TODO and Limitations</a></li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="compare.html"
+ title="previous chapter">Mock Library Comparison</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/changelog.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="compare.html" title="Mock Library Comparison"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/compare.html b/python/mock-1.0.0/html/compare.html
new file mode 100644
index 000000000..bfc9d519a
--- /dev/null
+++ b/python/mock-1.0.0/html/compare.html
@@ -0,0 +1,672 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Mock Library Comparison &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="CHANGELOG" href="changelog.html" />
+ <link rel="prev" title="Further Examples" href="examples.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="changelog.html" title="CHANGELOG"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="examples.html" title="Further Examples"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="mock-library-comparison">
+<h1>Mock Library Comparison<a class="headerlink" href="#mock-library-comparison" title="Permalink to this headline">¶</a></h1>
+<p>A side-by-side comparison of how to accomplish some basic tasks with mock and
+some other popular Python mocking libraries and frameworks.</p>
+<p>These are:</p>
+<ul class="simple">
+<li><a class="reference external" href="http://pypi.python.org/pypi/flexmock">flexmock</a></li>
+<li><a class="reference external" href="http://pypi.python.org/pypi/mox">mox</a></li>
+<li><a class="reference external" href="http://niemeyer.net/mocker">Mocker</a></li>
+<li><a class="reference external" href="http://pypi.python.org/pypi/dingus">dingus</a></li>
+<li><a class="reference external" href="http://pypi.python.org/pypi/fudge">fudge</a></li>
+</ul>
+<p>Popular python mocking frameworks not yet represented here include
+<a class="reference external" href="http://pypi.python.org/pypi/MiniMock">MiniMock</a>.</p>
+<p><a class="reference external" href="http://pmock.sourceforge.net/">pMock</a> (last release 2004 and doesn&#8217;t import
+in recent versions of Python) and
+<a class="reference external" href="http://python-mock.sourceforge.net/">python-mock</a> (last release 2005) are
+intentionally omitted.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>A more up to date, and tested for all mock libraries (only the mock
+examples on this page can be executed as doctests) version of this
+comparison is maintained by Gary Bernhardt:</p>
+<ul class="last simple">
+<li><a class="reference external" href="http://garybernhardt.github.com/python-mock-comparison/">Python Mock Library Comparison</a></li>
+</ul>
+</div>
+<p>This comparison is by no means complete, and also may not be fully idiomatic
+for all the libraries represented. <em>Please</em> contribute corrections, missing
+comparisons, or comparisons for additional libraries to the <a class="reference external" href="https://code.google.com/p/mock/issues/list">mock issue
+tracker</a>.</p>
+<p>This comparison page was originally created by the <a class="reference external" href="https://code.google.com/p/pymox/wiki/MoxComparison">Mox project</a> and then extended for
+<a class="reference external" href="http://has207.github.com/flexmock/compare.html">flexmock and mock</a> by
+Herman Sheremetyev. Dingus examples written by <a class="reference external" href="http://garybernhardt.github.com/python-mock-comparison/">Gary Bernhadt</a>. fudge examples
+provided by <a class="reference external" href="http://farmdev.com/">Kumar McMillan</a>.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>The examples tasks here were originally created by Mox which is a mocking
+<em>framework</em> rather than a library like mock. The tasks shown naturally
+exemplify tasks that frameworks are good at and not the ones they make
+harder. In particular you can take a <cite>Mock</cite> or <cite>MagicMock</cite> object and use
+it in any way you want with no up-front configuration. The same is also
+true for Dingus.</p>
+<p class="last">The examples for mock here assume version 0.7.0.</p>
+</div>
+<div class="section" id="simple-fake-object">
+<h2>Simple fake object<a class="headerlink" href="#simple-fake-object" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&quot;calculated value&quot;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_attribute</span> <span class="o">=</span> <span class="s">&quot;value&quot;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">flexmock</span><span class="p">(</span><span class="n">some_method</span><span class="o">=</span><span class="k">lambda</span><span class="p">:</span> <span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">some_attribute</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span> <span class="o">=</span> <span class="s">&quot;value&quot;</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span> <span class="o">=</span> <span class="s">&quot;value&quot;</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">(</span><span class="n">some_attribute</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">,</span>
+<span class="gp">... </span> <span class="n">some_method__returns</span><span class="o">=</span><span class="s">&quot;calculated value&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_fake</span> <span class="o">=</span> <span class="p">(</span><span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">provides</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">has_attr</span><span class="p">(</span><span class="n">some_attribute</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">))</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;calculated value&quot;</span><span class="p">,</span> <span class="n">my_fake</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_fake</span><span class="o">.</span><span class="n">some_attribute</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="simple-mock">
+<h2>Simple mock<a class="headerlink" href="#simple-mock" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&quot;value&quot;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">flexmock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&quot;some_method&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">once</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">verify</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">(</span><span class="n">some_method__returns</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">calls</span><span class="p">()</span><span class="o">.</span><span class="n">once</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.test</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">my_fake</span> <span class="o">=</span> <span class="p">(</span><span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">times_called</span><span class="p">(</span><span class="mi">1</span><span class="p">))</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+<span class="c">...</span>
+<span class="gr">AssertionError</span>: <span class="n">fake:my_fake.some_method() was not called</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="creating-partial-mocks">
+<h2>Creating partial mocks<a class="headerlink" href="#creating-partial-mocks" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">SomeObject</span><span class="o">.</span><span class="n">some_method</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">SomeObject</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">flexmock</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&quot;some_method&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockObject</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">Get</span><span class="p">()</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">verify</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">object</span> <span class="o">=</span> <span class="n">SomeObject</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">object</span><span class="o">.</span><span class="n">some_method</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="s">&quot;value&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;value&quot;</span><span class="p">,</span> <span class="nb">object</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">fake</span> <span class="o">=</span> <span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span><span class="o">.</span><span class="n">is_callable</span><span class="p">()</span><span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="s">&quot;&lt;fudge-value&gt;&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">fudge</span><span class="o">.</span><span class="n">patched_context</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">,</span> <span class="s">&#39;some_method&#39;</span><span class="p">,</span> <span class="n">fake</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">s</span> <span class="o">=</span> <span class="n">SomeObject</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;&lt;fudge-value&gt;&quot;</span><span class="p">,</span> <span class="n">s</span><span class="o">.</span><span class="n">some_method</span><span class="p">())</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="ensure-calls-are-made-in-specific-order">
+<h2>Ensure calls are made in specific order<a class="headerlink" href="#ensure-calls-are-made-in-specific-order" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.method1()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.method2()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="n">my_mock</span><span class="o">.</span><span class="n">mock_calls</span><span class="p">,</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">method1</span><span class="p">(),</span> <span class="n">call</span><span class="o">.</span><span class="n">method2</span><span class="p">()])</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">flexmock</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&#39;method1&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">once</span><span class="o">.</span><span class="n">ordered</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&#39;first thing&#39;</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&#39;method2&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">once</span><span class="o">.</span><span class="n">ordered</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&#39;second thing&#39;</span><span class="p">)</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockObject</span><span class="p">(</span><span class="n">SomeObject</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&#39;first thing&#39;</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&#39;second thing&#39;</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="k">with</span> <span class="n">mocker</span><span class="o">.</span><span class="n">order</span><span class="p">():</span>
+ <span class="n">mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span>
+ <span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&#39;first thing&#39;</span><span class="p">)</span>
+ <span class="n">mock</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span>
+ <span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="s">&#39;second thing&#39;</span><span class="p">)</span>
+ <span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+ <span class="n">mocker</span><span class="o">.</span><span class="n">verify</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span>
+<span class="go">&lt;Dingus ...&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span>
+<span class="go">&lt;Dingus ...&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">([</span><span class="s">&#39;method1&#39;</span><span class="p">,</span> <span class="s">&#39;method2&#39;</span><span class="p">],</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">name</span> <span class="k">for</span> <span class="n">call</span> <span class="ow">in</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">calls</span><span class="p">])</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.test</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">my_fake</span> <span class="o">=</span> <span class="p">(</span><span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">remember_order</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method2&#39;</span><span class="p">))</span>
+<span class="gp">... </span> <span class="n">my_fake</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">my_fake</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+<span class="c">...</span>
+<span class="gr">AssertionError: Call #1 was fake:my_fake.method2(); Expected</span>: <span class="n">#1 fake:my_fake.method1(), #2 fake:my_fake.method2(), end</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="raising-exceptions">
+<h2>Raising exceptions<a class="headerlink" href="#raising-exceptions" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">flexmock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&quot;some_method&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">and_raise</span><span class="p">(</span><span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">))</span>
+<span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+
+<span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span><span class="o">.</span><span class="n">AndRaise</span><span class="p">(</span><span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">))</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">throw</span><span class="p">(</span><span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">))</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">replay</span><span class="p">()</span>
+<span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">verify</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">exception_raiser</span><span class="p">(</span><span class="n">SomeException</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertRaises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_fake</span> <span class="o">=</span> <span class="p">(</span><span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">is_callable</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">raises</span><span class="p">(</span><span class="n">SomeException</span><span class="p">(</span><span class="s">&quot;message&quot;</span><span class="p">)))</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_fake</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+<span class="c">...</span>
+<span class="gr">SomeException</span>: <span class="n">message</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="override-new-instances-of-a-class">
+<h2>Override new instances of a class<a class="headerlink" href="#override-new-instances-of-a-class" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">mock</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;somemodule.Someclass&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">MockClass</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">MockClass</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="n">some_other_object</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">somemodule</span><span class="o">.</span><span class="n">Someclass</span><span class="p">())</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="n">flexmock</span><span class="p">(</span><span class="n">some_module</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">,</span> <span class="n">new_instances</span><span class="o">=</span><span class="n">some_other_object</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">some_module</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">())</span>
+
+<span class="c"># Mox</span>
+<span class="c"># (you will probably have mox.Mox() available as self.mox in a real test)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Mox</span><span class="p">()</span><span class="o">.</span><span class="n">StubOutWithMock</span><span class="p">(</span><span class="n">some_module</span><span class="p">,</span> <span class="s">&#39;SomeClass&#39;</span><span class="p">,</span> <span class="n">use_mock_anything</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+<span class="n">some_module</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">ReplayAll</span><span class="p">()</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">some_module</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">())</span>
+
+<span class="c"># Mocker</span>
+<span class="n">instance</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">mock</span><span class="p">()</span>
+<span class="n">klass</span> <span class="o">=</span> <span class="n">mocker</span><span class="o">.</span><span class="n">replace</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="n">spec</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="n">klass</span><span class="p">(</span><span class="s">&#39;expected&#39;</span><span class="p">,</span> <span class="s">&#39;args&#39;</span><span class="p">)</span>
+<span class="n">mocker</span><span class="o">.</span><span class="n">result</span><span class="p">(</span><span class="n">instance</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockClass</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="n">some_other_object</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">dingus</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;somemodule.SomeClass&#39;</span><span class="p">,</span> <span class="n">MockClass</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">somemodule</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">())</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.patch</span><span class="p">(</span><span class="s">&#39;somemodule.SomeClass&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">FakeClass</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">FakeClass</span><span class="o">.</span><span class="n">is_callable</span><span class="p">()</span><span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="n">some_other_object</span><span class="p">,</span> <span class="n">somemodule</span><span class="o">.</span><span class="n">SomeClass</span><span class="p">())</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="call-the-same-method-multiple-times">
+<h2>Call the same method multiple times<a class="headerlink" href="#call-the-same-method-multiple-times" title="Permalink to this headline">¶</a></h2>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">You don&#8217;t need to do <em>any</em> configuration to call <cite>mock.Mock()</cite> methods
+multiple times. Attributes like <cite>call_count</cite>, <cite>call_args_list</cite> and
+<cite>method_calls</cite> provide various different ways of making assertions about
+how the mock was used.</p>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.some_method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.some_method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">some_method</span><span class="o">.</span><span class="n">call_count</span> <span class="o">&gt;=</span> <span class="mi">2</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock # (verifies that the method gets called at least twice)</span>
+<span class="n">flexmock</span><span class="p">(</span><span class="n">some_object</span><span class="p">)</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">at_least</span><span class="o">.</span><span class="n">twice</span>
+
+<span class="c"># Mox</span>
+<span class="c"># (does not support variable number of calls, so you need to create a new entry for each explicit call)</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockObject</span><span class="p">(</span><span class="n">some_object</span><span class="p">)</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">(</span><span class="n">mox</span><span class="o">.</span><span class="n">IgnoreArg</span><span class="p">(),</span> <span class="n">mox</span><span class="o">.</span><span class="n">IgnoreArg</span><span class="p">())</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">some_method</span><span class="p">(</span><span class="n">mox</span><span class="o">.</span><span class="n">IgnoreArg</span><span class="p">(),</span> <span class="n">mox</span><span class="o">.</span><span class="n">IgnoreArg</span><span class="p">())</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Replay</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="n">mox</span><span class="o">.</span><span class="n">Verify</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+
+<span class="c"># Mocker</span>
+<span class="c"># (TODO)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="go">&lt;Dingus ...&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="go">&lt;Dingus ...&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">my_dingus</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">))</span> <span class="o">==</span> <span class="mi">2</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.test</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">my_fake</span> <span class="o">=</span> <span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span><span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;some_method&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">times_called</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">my_fake</span><span class="o">.</span><span class="n">some_method</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+<span class="c">...</span>
+<span class="gr">AssertionError</span>: <span class="n">fake:my_fake.some_method() was called 1 time(s). Expected 2.</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mock-chained-methods">
+<h2>Mock chained methods<a class="headerlink" href="#mock-chained-methods" title="Permalink to this headline">¶</a></h2>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span> <span class="o">=</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">method1</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">method2</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">method3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some value&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">,</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Flexmock</span>
+<span class="c"># (intermediate method calls are automatically assigned to temporary fake objects</span>
+<span class="c"># and can be called with any arguments)</span>
+<span class="n">flexmock</span><span class="p">(</span><span class="n">some_object</span><span class="p">)</span><span class="o">.</span><span class="n">should_receive</span><span class="p">(</span>
+ <span class="s">&#39;method1.method2.method3&#39;</span>
+<span class="p">)</span><span class="o">.</span><span class="n">with_args</span><span class="p">(</span><span class="n">arg1</span><span class="p">,</span> <span class="n">arg2</span><span class="p">)</span><span class="o">.</span><span class="n">and_return</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">)</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&#39;some_value&#39;</span><span class="p">,</span> <span class="n">some_object</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="n">arg1</span><span class="p">,</span> <span class="n">arg2</span><span class="p">))</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="c"># Mox</span>
+<span class="n">mock</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockObject</span><span class="p">(</span><span class="n">some_object</span><span class="p">)</span>
+<span class="n">mock2</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock3</span> <span class="o">=</span> <span class="n">mox</span><span class="o">.</span><span class="n">MockAnything</span><span class="p">()</span>
+<span class="n">mock</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="n">mock1</span><span class="p">)</span>
+<span class="n">mock2</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="n">mock2</span><span class="p">)</span>
+<span class="n">mock3</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="n">arg1</span><span class="p">,</span> <span class="n">arg2</span><span class="p">)</span><span class="o">.</span><span class="n">AndReturn</span><span class="p">(</span><span class="s">&#39;some_value&#39;</span><span class="p">)</span>
+<span class="bp">self</span><span class="o">.</span><span class="n">mox</span><span class="o">.</span><span class="n">ReplayAll</span><span class="p">()</span>
+<span class="n">assertEqual</span><span class="p">(</span><span class="s">&quot;some_value&quot;</span><span class="p">,</span> <span class="n">some_object</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="n">arg1</span><span class="p">,</span> <span class="n">arg2</span><span class="p">))</span>
+<span class="bp">self</span><span class="o">.</span><span class="n">mox</span><span class="o">.</span><span class="n">VerifyAll</span><span class="p">()</span>
+
+<span class="c"># Mocker</span>
+<span class="c"># (TODO)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span> <span class="o">=</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">method1</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">method2</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">method3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">method3</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some value&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">assertEqual</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">method3</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="s">&#39;()&#39;</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span><span class="o">.</span><span class="n">once</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@fudge.test</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">my_fake</span> <span class="o">=</span> <span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="p">(</span><span class="n">my_fake</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns_fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns_fake</span><span class="p">()</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">expects</span><span class="p">(</span><span class="s">&#39;method3&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">with_args</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+<span class="gp">... </span> <span class="o">.</span><span class="n">returns</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">))</span>
+<span class="gp">... </span> <span class="n">assertEqual</span><span class="p">(</span><span class="s">&#39;some value&#39;</span><span class="p">,</span> <span class="n">my_fake</span><span class="o">.</span><span class="n">method1</span><span class="p">()</span><span class="o">.</span><span class="n">method2</span><span class="p">()</span><span class="o">.</span><span class="n">method3</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-a-context-manager">
+<h2>Mocking a context manager<a class="headerlink" href="#mocking-a-context-manager" title="Permalink to this headline">¶</a></h2>
+<p>Examples for mock, Dingus and fudge only (so far):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">my_mock</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">__exit__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus (nothing special here; all dinguses are &quot;magic mocks&quot;)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">my_dingus</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">calls</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">__exit__</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="s">&#39;()&#39;</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_fake</span> <span class="o">=</span> <span class="n">fudge</span><span class="o">.</span><span class="n">Fake</span><span class="p">()</span><span class="o">.</span><span class="n">provides</span><span class="p">(</span><span class="s">&#39;__enter__&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">provides</span><span class="p">(</span><span class="s">&#39;__exit__&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">my_fake</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-the-builtin-open-used-as-a-context-manager">
+<h2>Mocking the builtin open used as a context manager<a class="headerlink" href="#mocking-the-builtin-open-used-as-a-context-manager" title="Permalink to this headline">¶</a></h2>
+<p>Example for mock only (so far):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">mock</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">,</span> <span class="n">my_mock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">manager</span> <span class="o">=</span> <span class="n">my_mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">... </span> <span class="n">manager</span><span class="o">.</span><span class="n">read</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some data&#39;</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">h</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">data</span> <span class="o">=</span> <span class="n">h</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">data</span>
+<span class="go">&#39;some data&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p><em>or</em>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">mock</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">my_mock</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">my_mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">__enter__</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">s</span><span class="p">:</span> <span class="n">s</span>
+<span class="gp">... </span> <span class="n">my_mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">__exit__</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">Mock</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">my_mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">read</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some data&#39;</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">h</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">data</span> <span class="o">=</span> <span class="n">h</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">data</span>
+<span class="go">&#39;some data&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_mock</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># Dingus</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dingus</span> <span class="o">=</span> <span class="n">dingus</span><span class="o">.</span><span class="n">Dingus</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">dingus</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">,</span> <span class="n">my_dingus</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">file_</span> <span class="o">=</span> <span class="nb">open</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">... </span> <span class="n">file_</span><span class="o">.</span><span class="n">read</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some data&#39;</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">h</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">data</span> <span class="o">=</span> <span class="n">f</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">data</span>
+<span class="go">&#39;some data&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">my_dingus</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="s">&#39;()&#39;</span><span class="p">,</span> <span class="s">&#39;foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">once</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="c"># fudge</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">contextlib</span> <span class="kn">import</span> <span class="n">contextmanager</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">StringIO</span> <span class="kn">import</span> <span class="n">StringIO</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@contextmanager</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">fake_file</span><span class="p">(</span><span class="n">filename</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">yield</span> <span class="n">StringIO</span><span class="p">(</span><span class="s">&#39;sekrets&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">fudge</span><span class="o">.</span><span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">fake_open</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">fake_open</span><span class="o">.</span><span class="n">is_callable</span><span class="p">()</span><span class="o">.</span><span class="n">calls</span><span class="p">(</span><span class="n">fake_file</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;/etc/password&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">data</span> <span class="o">=</span> <span class="n">f</span><span class="o">.</span><span class="n">read</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">fake:__builtin__.open</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">data</span>
+<span class="go">&#39;sekrets&#39;</span>
+</pre></div>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Mock Library Comparison</a><ul>
+<li><a class="reference internal" href="#simple-fake-object">Simple fake object</a></li>
+<li><a class="reference internal" href="#simple-mock">Simple mock</a></li>
+<li><a class="reference internal" href="#creating-partial-mocks">Creating partial mocks</a></li>
+<li><a class="reference internal" href="#ensure-calls-are-made-in-specific-order">Ensure calls are made in specific order</a></li>
+<li><a class="reference internal" href="#raising-exceptions">Raising exceptions</a></li>
+<li><a class="reference internal" href="#override-new-instances-of-a-class">Override new instances of a class</a></li>
+<li><a class="reference internal" href="#call-the-same-method-multiple-times">Call the same method multiple times</a></li>
+<li><a class="reference internal" href="#mock-chained-methods">Mock chained methods</a></li>
+<li><a class="reference internal" href="#mocking-a-context-manager">Mocking a context manager</a></li>
+<li><a class="reference internal" href="#mocking-the-builtin-open-used-as-a-context-manager">Mocking the builtin open used as a context manager</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="examples.html"
+ title="previous chapter">Further Examples</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="changelog.html"
+ title="next chapter">CHANGELOG</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/compare.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="changelog.html" title="CHANGELOG"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="examples.html" title="Further Examples"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/examples.html b/python/mock-1.0.0/html/examples.html
new file mode 100644
index 000000000..8d8113e58
--- /dev/null
+++ b/python/mock-1.0.0/html/examples.html
@@ -0,0 +1,1006 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Further Examples &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Mock Library Comparison" href="compare.html" />
+ <link rel="prev" title="Getting Started with Mock" href="getting-started.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="compare.html" title="Mock Library Comparison"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="further-examples">
+<span id="id1"></span><h1>Further Examples<a class="headerlink" href="#further-examples" title="Permalink to this headline">¶</a></h1>
+<p>For comprehensive examples, see the unit tests included in the full source
+distribution.</p>
+<p>Here are some more examples for some slightly more advanced scenarios than in
+the <a class="reference internal" href="getting-started.html#getting-started"><em>getting started</em></a> guide.</p>
+<div class="section" id="mocking-chained-calls">
+<h2>Mocking chained calls<a class="headerlink" href="#mocking-chained-calls" title="Permalink to this headline">¶</a></h2>
+<p>Mocking chained calls is actually straightforward with mock once you
+understand the <a class="reference internal" href="mock.html#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> attribute. When a mock is called for
+the first time, or you fetch its <cite>return_value</cite> before it has been called, a
+new <cite>Mock</cite> is created.</p>
+<p>This means that you can see how the object returned from a call to a mocked
+object has been used by interrogating the <cite>return_value</cite> mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span><span class="o">.</span><span class="n">foo</span><span class="p">(</span><span class="n">a</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">b</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="go">&lt;Mock name=&#39;mock().foo()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">foo</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">a</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">b</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>From here it is a simple step to configure and then make assertions about
+chained calls. Of course another alternative is writing your code in a more
+testable way in the first place...</p>
+<p>So, suppose we have some code that looks a little bit like this:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Something</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">backend</span> <span class="o">=</span> <span class="n">BackendProvider</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">response</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">backend</span><span class="o">.</span><span class="n">get_endpoint</span><span class="p">(</span><span class="s">&#39;foobar&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">create_call</span><span class="p">(</span><span class="s">&#39;spam&#39;</span><span class="p">,</span> <span class="s">&#39;eggs&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">start_call</span><span class="p">()</span>
+<span class="gp">... </span> <span class="c"># more code</span>
+</pre></div>
+</div>
+<p>Assuming that <cite>BackendProvider</cite> is already well tested, how do we test
+<cite>method()</cite>? Specifically, we want to test that the code section <cite># more
+code</cite> uses the response object in the correct way.</p>
+<p>As this chain of calls is made from an instance attribute we can monkey patch
+the <cite>backend</cite> attribute on a <cite>Something</cite> instance. In this particular case
+we are only interested in the return value from the final call to
+<cite>start_call</cite> so we don&#8217;t have much configuration to do. Let&#8217;s assume the
+object it returns is &#8216;file-like&#8217;, so we&#8217;ll ensure that our response object
+uses the builtin <cite>file</cite> as its <cite>spec</cite>.</p>
+<p>To do this we create a mock instance as our mock backend and create a mock
+response object for it. To set the response as the return value for that final
+<cite>start_call</cite> we could do this:</p>
+<blockquote>
+<div><cite>mock_backend.get_endpoint.return_value.create_call.return_value.start_call.return_value = mock_response</cite>.</div></blockquote>
+<p>We can do that in a slightly nicer way using the <a class="reference internal" href="mock.html#mock.Mock.configure_mock" title="mock.Mock.configure_mock"><tt class="xref py py-meth docutils literal"><span class="pre">configure_mock()</span></tt></a>
+method to directly set the return value for us:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">something</span> <span class="o">=</span> <span class="n">Something</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_response</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="nb">file</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_backend</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">config</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;get_endpoint.return_value.create_call.return_value.start_call.return_value&#39;</span><span class="p">:</span> <span class="n">mock_response</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_backend</span><span class="o">.</span><span class="n">configure_mock</span><span class="p">(</span><span class="o">**</span><span class="n">config</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>With these we monkey patch the &#8220;mock backend&#8221; in place and can make the real
+call:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">something</span><span class="o">.</span><span class="n">backend</span> <span class="o">=</span> <span class="n">mock_backend</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">something</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>Using <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> we can check the chained call with a single
+assert. A chained call is several calls in one line of code, so there will be
+several entries in <cite>mock_calls</cite>. We can use <a class="reference internal" href="helpers.html#mock.call.call_list" title="mock.call.call_list"><tt class="xref py py-meth docutils literal"><span class="pre">call.call_list()</span></tt></a> to create
+this list of calls for us:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">chained</span> <span class="o">=</span> <span class="n">call</span><span class="o">.</span><span class="n">get_endpoint</span><span class="p">(</span><span class="s">&#39;foobar&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">create_call</span><span class="p">(</span><span class="s">&#39;spam&#39;</span><span class="p">,</span> <span class="s">&#39;eggs&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">start_call</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">call_list</span> <span class="o">=</span> <span class="n">chained</span><span class="o">.</span><span class="n">call_list</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">mock_backend</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">call_list</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="partial-mocking">
+<h2>Partial mocking<a class="headerlink" href="#partial-mocking" title="Permalink to this headline">¶</a></h2>
+<p>In some tests I wanted to mock out a call to <a class="reference external" href="http://docs.python.org/library/datetime.html#datetime.date.today">datetime.date.today()</a> to return
+a known date, but I didn&#8217;t want to prevent the code under test from
+creating new date objects. Unfortunately <cite>datetime.date</cite> is written in C, and
+so I couldn&#8217;t just monkey-patch out the static <cite>date.today</cite> method.</p>
+<p>I found a simple way of doing this that involved effectively wrapping the date
+class with a mock, but passing through calls to the constructor to the real
+class (and returning real instances).</p>
+<p>The <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch</span> <span class="pre">decorator</span></tt></a> is used here to
+mock out the <cite>date</cite> class in the module under test. The <tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt>
+attribute on the mock date class is then set to a lambda function that returns
+a real date. When the mock date class is called a real date will be
+constructed and returned by <cite>side_effect</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">datetime</span> <span class="kn">import</span> <span class="n">date</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.date&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_date</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">mock_date</span><span class="o">.</span><span class="n">today</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="n">date</span><span class="p">(</span><span class="mi">2010</span><span class="p">,</span> <span class="mi">10</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_date</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="k">lambda</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kw</span><span class="p">:</span> <span class="n">date</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kw</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">date</span><span class="o">.</span><span class="n">today</span><span class="p">()</span> <span class="o">==</span> <span class="n">date</span><span class="p">(</span><span class="mi">2010</span><span class="p">,</span> <span class="mi">10</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">date</span><span class="p">(</span><span class="mi">2009</span><span class="p">,</span> <span class="mi">6</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span> <span class="o">==</span> <span class="n">date</span><span class="p">(</span><span class="mi">2009</span><span class="p">,</span> <span class="mi">6</span><span class="p">,</span> <span class="mi">8</span><span class="p">)</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>Note that we don&#8217;t patch <cite>datetime.date</cite> globally, we patch <cite>date</cite> in the
+module that <em>uses</em> it. See <a class="reference internal" href="patch.html#where-to-patch"><em>where to patch</em></a>.</p>
+<p>When <cite>date.today()</cite> is called a known date is returned, but calls to the
+<cite>date(...)</cite> constructor still return normal dates. Without this you can find
+yourself having to calculate an expected result using exactly the same
+algorithm as the code under test, which is a classic testing anti-pattern.</p>
+<p>Calls to the date constructor are recorded in the <cite>mock_date</cite> attributes
+(<cite>call_count</cite> and friends) which may also be useful for your tests.</p>
+<p>An alternative way of dealing with mocking dates, or other builtin classes,
+is discussed in <a class="reference external" href="http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/">this blog entry</a>.</p>
+</div>
+<div class="section" id="mocking-a-generator-method">
+<h2>Mocking a Generator Method<a class="headerlink" href="#mocking-a-generator-method" title="Permalink to this headline">¶</a></h2>
+<p>A Python generator is a function or method that uses the <a class="reference external" href="http://docs.python.org/reference/simple_stmts.html#the-yield-statement">yield statement</a> to
+return a series of values when iterated over <a class="footnote-reference" href="#id3" id="id2">[1]</a>.</p>
+<p>A generator method / function is called to return the generator object. It is
+the generator object that is then iterated over. The protocol method for
+iteration is <a class="reference external" href="http://docs.python.org/library/stdtypes.html#container.__iter__">__iter__</a>, so we can
+mock this using a <cite>MagicMock</cite>.</p>
+<p>Here&#8217;s an example class with an &#8220;iter&#8221; method implemented as a generator:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Foo</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">iter</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">]:</span>
+<span class="gp">... </span> <span class="k">yield</span> <span class="n">i</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">foo</span> <span class="o">=</span> <span class="n">Foo</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">foo</span><span class="o">.</span><span class="n">iter</span><span class="p">())</span>
+<span class="go">[1, 2, 3]</span>
+</pre></div>
+</div>
+<p>How would we mock this class, and in particular its &#8220;iter&#8221; method?</p>
+<p>To configure the values returned from the iteration (implicit in the call to
+<cite>list</cite>), we need to configure the object returned by the call to <cite>foo.iter()</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span><span class="o">.</span><span class="n">iter</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="nb">iter</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock_foo</span><span class="o">.</span><span class="n">iter</span><span class="p">())</span>
+<span class="go">[1, 2, 3]</span>
+</pre></div>
+</div>
+<table class="docutils footnote" frame="void" id="id3" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id2">[1]</a></td><td>There are also generator expressions and more <a class="reference external" href="http://www.dabeaz.com/coroutines/index.html">advanced uses</a> of generators, but we aren&#8217;t
+concerned about them here. A very good introduction to generators and how
+powerful they are is: <a class="reference external" href="http://www.dabeaz.com/generators/">Generator Tricks for Systems Programmers</a>.</td></tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="applying-the-same-patch-to-every-test-method">
+<h2>Applying the same patch to every test method<a class="headerlink" href="#applying-the-same-patch-to-every-test-method" title="Permalink to this headline">¶</a></h2>
+<p>If you want several patches in place for multiple test methods the obvious way
+is to apply the patch decorators to every method. This can feel like unnecessary
+repetition. For Python 2.6 or more recent you can use <cite>patch</cite> (in all its
+various forms) as a class decorator. This applies the patches to all test
+methods on the class. A test method is identified by methods whose names start
+with <cite>test</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;mymodule.SomeClass&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_one</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">MockSomeClass</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">mymodule</span><span class="o">.</span><span class="n">SomeClass</span> <span class="ow">is</span> <span class="n">MockSomeClass</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_two</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">MockSomeClass</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">mymodule</span><span class="o">.</span><span class="n">SomeClass</span> <span class="ow">is</span> <span class="n">MockSomeClass</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">not_a_test</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="s">&#39;something&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_one&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_one</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_two&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_two</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_two&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">not_a_test</span><span class="p">()</span>
+<span class="go">&#39;something&#39;</span>
+</pre></div>
+</div>
+<p>An alternative way of managing patches is to use the <a class="reference internal" href="patch.html#start-and-stop"><em>patch methods: start and stop</em></a>.
+These allow you to move the patching into your <cite>setUp</cite> and <cite>tearDown</cite> methods.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">setUp</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.foo&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">mock_foo</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">mymodule</span><span class="o">.</span><span class="n">foo</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">mock_foo</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">tearDown</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>If you use this technique you must ensure that the patching is &#8220;undone&#8221; by
+calling <cite>stop</cite>. This can be fiddlier than you might think, because if an
+exception is raised in the setUp then tearDown is not called. <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a> cleanup functions make this simpler:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">setUp</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.foo&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">addCleanup</span><span class="p">(</span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">mock_foo</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">mymodule</span><span class="o">.</span><span class="n">foo</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">mock_foo</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-unbound-methods">
+<h2>Mocking Unbound Methods<a class="headerlink" href="#mocking-unbound-methods" title="Permalink to this headline">¶</a></h2>
+<p>Whilst writing tests today I needed to patch an <em>unbound method</em> (patching the
+method on the class rather than on the instance). I needed self to be passed
+in as the first argument because I want to make asserts about which objects
+were calling this particular method. The issue is that you can&#8217;t patch with a
+mock for this, because if you replace an unbound method with a mock it doesn&#8217;t
+become a bound method when fetched from the instance, and so it doesn&#8217;t get
+self passed in. The workaround is to patch the unbound method with a real
+function instead. The <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorator makes it so simple to
+patch out methods with a mock that having to create a real function becomes a
+nuisance.</p>
+<p>If you pass <cite>autospec=True</cite> to patch then it does the patching with a
+<em>real</em> function object. This function object has the same signature as the one
+it is replacing, but delegates to a mock under the hood. You still get your
+mock auto-created in exactly the same way as before. What it means though, is
+that if you use it to patch out an unbound method on a class the mocked
+function will be turned into a bound method if it is fetched from an instance.
+It will have <cite>self</cite> passed in as the first argument, which is exactly what I
+wanted:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Foo</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">object</span><span class="p">(</span><span class="n">Foo</span><span class="p">,</span> <span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">autospec</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_foo</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">mock_foo</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">... </span> <span class="n">foo</span> <span class="o">=</span> <span class="n">Foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">foo</span><span class="o">.</span><span class="n">foo</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&#39;foo&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="n">foo</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>If we don&#8217;t use <cite>autospec=True</cite> then the unbound method is patched out
+with a Mock instance instead, and isn&#8217;t called with <cite>self</cite>.</p>
+</div>
+<div class="section" id="checking-multiple-calls-with-mock">
+<h2>Checking multiple calls with mock<a class="headerlink" href="#checking-multiple-calls-with-mock" title="Permalink to this headline">¶</a></h2>
+<p>mock has a nice API for making assertions about how your mock objects are used.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="p">(</span><span class="s">&#39;baz&#39;</span><span class="p">,</span> <span class="n">spam</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="s">&#39;baz&#39;</span><span class="p">,</span> <span class="n">spam</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>If your mock is only being called once you can use the
+<tt class="xref py py-meth docutils literal"><span class="pre">assert_called_once_with()</span></tt> method that also asserts that the
+<tt class="xref py py-attr docutils literal"><span class="pre">call_count</span></tt> is one.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;baz&#39;</span><span class="p">,</span> <span class="n">spam</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">foo_bar</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;baz&#39;</span><span class="p">,</span> <span class="n">spam</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError</span>: <span class="n">Expected to be called once. Called 2 times.</span>
+</pre></div>
+</div>
+<p>Both <cite>assert_called_with</cite> and <cite>assert_called_once_with</cite> make assertions about
+the <em>most recent</em> call. If your mock is going to be called several times, and
+you want to make assertions about <em>all</em> those calls you can use
+<a class="reference internal" href="mock.html#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">call_args_list</span></tt></a>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">6</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args_list</span>
+<span class="go">[call(1, 2, 3), call(4, 5, 6), call()]</span>
+</pre></div>
+</div>
+<p>The <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> helper makes it easy to make assertions about these calls. You
+can build up a list of expected calls and compare it to <cite>call_args_list</cite>. This
+looks remarkably similar to the repr of the <cite>call_args_list</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">6</span><span class="p">),</span> <span class="n">call</span><span class="p">()]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args_list</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="coping-with-mutable-arguments">
+<h2>Coping with mutable arguments<a class="headerlink" href="#coping-with-mutable-arguments" title="Permalink to this headline">¶</a></h2>
+<p>Another situation is rare, but can bite you, is when your mock is called with
+mutable arguments. <cite>call_args</cite> and <cite>call_args_list</cite> store <em>references</em> to the
+arguments. If the arguments are mutated by the code under test then you can no
+longer make assertions about what the values were when the mock was called.</p>
+<p>Here&#8217;s some example code that shows the problem. Imagine the following functions
+defined in &#8216;mymodule&#8217;:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="k">def</span> <span class="nf">frob</span><span class="p">(</span><span class="n">val</span><span class="p">):</span>
+ <span class="k">pass</span>
+
+<span class="k">def</span> <span class="nf">grob</span><span class="p">(</span><span class="n">val</span><span class="p">):</span>
+ <span class="s">&quot;First frob and then clear val&quot;</span>
+ <span class="n">frob</span><span class="p">(</span><span class="n">val</span><span class="p">)</span>
+ <span class="n">val</span><span class="o">.</span><span class="n">clear</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>When we try to test that <cite>grob</cite> calls <cite>frob</cite> with the correct argument look
+what happens:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.frob&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_frob</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">val</span> <span class="o">=</span> <span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">])</span>
+<span class="gp">... </span> <span class="n">mymodule</span><span class="o">.</span><span class="n">grob</span><span class="p">(</span><span class="n">val</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">val</span>
+<span class="go">set([])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_frob</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">]))</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError: Expected</span>: <span class="n">((set([6]),), {})</span>
+<span class="go">Called with: ((set([]),), {})</span>
+</pre></div>
+</div>
+<p>One possibility would be for mock to copy the arguments you pass in. This
+could then cause problems if you do assertions that rely on object identity
+for equality.</p>
+<p>Here&#8217;s one solution that uses the <tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt>
+functionality. If you provide a <cite>side_effect</cite> function for a mock then
+<cite>side_effect</cite> will be called with the same args as the mock. This gives us an
+opportunity to copy the arguments and store them for later assertions. In this
+example I&#8217;m using <em>another</em> mock to store the arguments so that I can use the
+mock methods for doing the assertion. Again a helper function sets this up for
+me.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">copy</span> <span class="kn">import</span> <span class="n">deepcopy</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">Mock</span><span class="p">,</span> <span class="n">patch</span><span class="p">,</span> <span class="n">DEFAULT</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">copy_call_args</span><span class="p">(</span><span class="n">mock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">new_mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">args</span> <span class="o">=</span> <span class="n">deepcopy</span><span class="p">(</span><span class="n">args</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">kwargs</span> <span class="o">=</span> <span class="n">deepcopy</span><span class="p">(</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">new_mock</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">DEFAULT</span>
+<span class="gp">... </span> <span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">new_mock</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.frob&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_frob</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">new_mock</span> <span class="o">=</span> <span class="n">copy_call_args</span><span class="p">(</span><span class="n">mock_frob</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">val</span> <span class="o">=</span> <span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">])</span>
+<span class="gp">... </span> <span class="n">mymodule</span><span class="o">.</span><span class="n">grob</span><span class="p">(</span><span class="n">val</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">new_mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">]))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">new_mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">call(set([6]))</span>
+</pre></div>
+</div>
+<p><cite>copy_call_args</cite> is called with the mock that will be called. It returns a new
+mock that we do the assertion on. The <cite>side_effect</cite> function makes a copy of
+the args and calls our <cite>new_mock</cite> with the copy.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>If your mock is only going to be used once there is an easier way of
+checking arguments at the point they are called. You can simply do the
+checking inside a <cite>side_effect</cite> function.</p>
+<div class="last highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="n">arg</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">arg</span> <span class="o">==</span> <span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">])</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="nb">set</span><span class="p">([</span><span class="mi">6</span><span class="p">]))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="nb">set</span><span class="p">())</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError</span>
+</pre></div>
+</div>
+</div>
+<p>An alternative approach is to create a subclass of <cite>Mock</cite> or <cite>MagicMock</cite> that
+copies (using <a class="reference external" href="http://docs.python.org/library/copy.html#copy.deepcopy">copy.deepcopy</a>) the arguments.
+Here&#8217;s an example implementation:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">copy</span> <span class="kn">import</span> <span class="n">deepcopy</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">CopyingMock</span><span class="p">(</span><span class="n">MagicMock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__call__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">args</span> <span class="o">=</span> <span class="n">deepcopy</span><span class="p">(</span><span class="n">args</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">kwargs</span> <span class="o">=</span> <span class="n">deepcopy</span><span class="p">(</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">CopyingMock</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="n">__call__</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span> <span class="o">=</span> <span class="n">CopyingMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">arg</span> <span class="o">=</span> <span class="nb">set</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span><span class="p">(</span><span class="n">arg</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">arg</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="nb">set</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">arg</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError: Expected call</span>: <span class="n">mock(set([1]))</span>
+<span class="go">Actual call: mock(set([]))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">c</span><span class="o">.</span><span class="n">foo</span>
+<span class="go">&lt;CopyingMock name=&#39;mock.foo&#39; id=&#39;...&#39;&gt;</span>
+</pre></div>
+</div>
+<p>When you subclass <cite>Mock</cite> or <cite>MagicMock</cite> all dynamically created attributes,
+and the <cite>return_value</cite> will use your subclass automatically. That means all
+children of a <cite>CopyingMock</cite> will also have the type <cite>CopyingMock</cite>.</p>
+</div>
+<div class="section" id="raising-exceptions-on-attribute-access">
+<h2>Raising exceptions on attribute access<a class="headerlink" href="#raising-exceptions-on-attribute-access" title="Permalink to this headline">¶</a></h2>
+<p>You can use <a class="reference internal" href="mock.html#mock.PropertyMock" title="mock.PropertyMock"><tt class="xref py py-class docutils literal"><span class="pre">PropertyMock</span></tt></a> to mimic the behaviour of properties. This
+includes raising exceptions when an attribute is accessed.</p>
+<p>Here&#8217;s an example raising a <cite>ValueError</cite> when the &#8216;foo&#8217; attribute is accessed:</p>
+<div class="highlight-python"><pre>&gt;&gt;&gt; m = MagicMock()
+&gt;&gt;&gt; p = PropertyMock(side_effect=ValueError)
+&gt;&gt;&gt; type(m).foo = p
+&gt;&gt;&gt; m.foo
+Traceback (most recent call last):
+....
+ValueError</pre>
+</div>
+<p>Because every mock object has its own type, a new subclass of whichever mock
+class you&#8217;re using, all mock objects are isolated from each other. You can
+safely attach properties (or other descriptors or whatever you want in fact)
+to <cite>type(mock)</cite> without affecting other mock objects.</p>
+</div>
+<div class="section" id="multiple-calls-with-different-effects">
+<h2>Multiple calls with different effects<a class="headerlink" href="#multiple-calls-with-different-effects" title="Permalink to this headline">¶</a></h2>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">In mock 1.0 the handling of iterable <cite>side_effect</cite> was changed. Any
+exceptions in the iterable will be raised instead of returned.</p>
+</div>
+<p>Handling code that needs to behave differently on subsequent calls during the
+test can be tricky. For example you may have a function that needs to raise
+an exception the first time it is called but returns a response on the second
+call (testing retry behaviour).</p>
+<p>One approach is to use a <tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt> function that replaces itself. The
+first time it is called the <cite>side_effect</cite> sets a new <cite>side_effect</cite> that will
+be used for the second call. It then raises an exception:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">second_call</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="s">&#39;response&#39;</span>
+<span class="gp">... </span> <span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">second_call</span>
+<span class="gp">... </span> <span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span><span class="s">&#39;boom&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;first&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">Exception</span>: <span class="n">boom</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;second&#39;</span><span class="p">)</span>
+<span class="go">&#39;response&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="s">&#39;second&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Another perfectly valid way would be to pop return values from a list. If the
+return value is an exception, raise it instead of returning it:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">returns</span> <span class="o">=</span> <span class="p">[</span><span class="ne">Exception</span><span class="p">(</span><span class="s">&#39;boom&#39;</span><span class="p">),</span> <span class="s">&#39;response&#39;</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">result</span> <span class="o">=</span> <span class="n">returns</span><span class="o">.</span><span class="n">pop</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">result</span><span class="p">,</span> <span class="ne">Exception</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">raise</span> <span class="n">result</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">result</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;first&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">Exception</span>: <span class="n">boom</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;second&#39;</span><span class="p">)</span>
+<span class="go">&#39;response&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="s">&#39;second&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Which approach you prefer is a matter of taste. The first approach is actually
+a line shorter but maybe the second approach is more readable.</p>
+</div>
+<div class="section" id="nesting-patches">
+<h2>Nesting Patches<a class="headerlink" href="#nesting-patches" title="Permalink to this headline">¶</a></h2>
+<p>Using patch as a context manager is nice, but if you do multiple patches you
+can end up with nested with statements indenting further and further to the
+right:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_foo</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Bar&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_bar</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Spam&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_spam</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span> <span class="ow">is</span> <span class="n">mock_foo</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Bar</span> <span class="ow">is</span> <span class="n">mock_bar</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Spam</span> <span class="ow">is</span> <span class="n">mock_spam</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_foo</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span> <span class="ow">is</span> <span class="n">original</span>
+</pre></div>
+</div>
+<p>With <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a> <cite>cleanup</cite> functions and the <a class="reference internal" href="patch.html#start-and-stop"><em>patch methods: start and stop</em></a> we can
+achieve the same effect without the nested indentation. A simple helper
+method, <cite>create_patch</cite>, puts the patch in place and returns the created mock
+for us:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">create_patch</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="n">name</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">thing</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">addCleanup</span><span class="p">(</span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">thing</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">mock_foo</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">create_patch</span><span class="p">(</span><span class="s">&#39;mymodule.Foo&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_bar</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">create_patch</span><span class="p">(</span><span class="s">&#39;mymodule.Bar&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_spam</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">create_patch</span><span class="p">(</span><span class="s">&#39;mymodule.Spam&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span> <span class="ow">is</span> <span class="n">mock_foo</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Bar</span> <span class="ow">is</span> <span class="n">mock_bar</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Spam</span> <span class="ow">is</span> <span class="n">mock_spam</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_foo&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">mymodule</span><span class="o">.</span><span class="n">Foo</span> <span class="ow">is</span> <span class="n">original</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-a-dictionary-with-magicmock">
+<h2>Mocking a dictionary with MagicMock<a class="headerlink" href="#mocking-a-dictionary-with-magicmock" title="Permalink to this headline">¶</a></h2>
+<p>You may want to mock a dictionary, or other container object, recording all
+access to it whilst having it still behave like a dictionary.</p>
+<p>We can do this with <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a>, which will behave like a dictionary,
+and using <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-data docutils literal"><span class="pre">side_effect</span></tt></a> to delegate dictionary access to a real
+underlying dictionary that is under our control.</p>
+<p>When the <cite>__getitem__</cite> and <cite>__setitem__</cite> methods of our <cite>MagicMock</cite> are called
+(normal dictionary access) then <cite>side_effect</cite> is called with the key (and in
+the case of <cite>__setitem__</cite> the value too). We can also control what is returned.</p>
+<p>After the <cite>MagicMock</cite> has been used we can use attributes like
+<a class="reference internal" href="mock.html#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-data docutils literal"><span class="pre">call_args_list</span></tt></a> to assert about how the dictionary was used:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">my_dict</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;a&#39;</span><span class="p">:</span> <span class="mi">1</span><span class="p">,</span> <span class="s">&#39;b&#39;</span><span class="p">:</span> <span class="mi">2</span><span class="p">,</span> <span class="s">&#39;c&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">getitem</span><span class="p">(</span><span class="n">name</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">my_dict</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">setitem</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">val</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">my_dict</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">val</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">getitem</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">setitem</span>
+</pre></div>
+</div>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>An alternative to using <cite>MagicMock</cite> is to use <cite>Mock</cite> and <em>only</em> provide
+the magic methods you specifically want:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">getitem</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">setitem</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>A <em>third</em> option is to use <cite>MagicMock</cite> but passing in <cite>dict</cite> as the <cite>spec</cite>
+(or <cite>spec_set</cite>) argument so that the <cite>MagicMock</cite> created only has
+dictionary magic methods available:</p>
+<div class="last highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">spec_set</span><span class="o">=</span><span class="nb">dict</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">getitem</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">setitem</span>
+</pre></div>
+</div>
+</div>
+<p>With these side effect functions in place, the <cite>mock</cite> will behave like a normal
+dictionary but recording the access. It even raises a <cite>KeyError</cite> if you try
+to access a key that doesn&#8217;t exist.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;a&#39;</span><span class="p">]</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;c&#39;</span><span class="p">]</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;d&#39;</span><span class="p">]</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">KeyError</span>: <span class="n">&#39;d&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;b&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="s">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;d&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="s">&#39;eggs&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;b&#39;</span><span class="p">]</span>
+<span class="go">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="s">&#39;d&#39;</span><span class="p">]</span>
+<span class="go">&#39;eggs&#39;</span>
+</pre></div>
+</div>
+<p>After it has been used you can make assertions about the access using the normal
+mock methods and attributes:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span><span class="o">.</span><span class="n">call_args_list</span>
+<span class="go">[call(&#39;a&#39;), call(&#39;c&#39;), call(&#39;d&#39;), call(&#39;b&#39;), call(&#39;d&#39;)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span><span class="o">.</span><span class="n">call_args_list</span>
+<span class="go">[call(&#39;b&#39;, &#39;fish&#39;), call(&#39;d&#39;, &#39;eggs&#39;)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">my_dict</span>
+<span class="go">{&#39;a&#39;: 1, &#39;c&#39;: 3, &#39;b&#39;: &#39;fish&#39;, &#39;d&#39;: &#39;eggs&#39;}</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mock-subclasses-and-their-attributes">
+<h2>Mock subclasses and their attributes<a class="headerlink" href="#mock-subclasses-and-their-attributes" title="Permalink to this headline">¶</a></h2>
+<p>There are various reasons why you might want to subclass <cite>Mock</cite>. One reason
+might be to add helper methods. Here&#8217;s a silly example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyMock</span><span class="p">(</span><span class="n">MagicMock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">has_been_called</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">called</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span> <span class="o">=</span> <span class="n">MyMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span>
+<span class="go">&lt;MyMock id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">has_been_called</span><span class="p">()</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">has_been_called</span><span class="p">()</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>The standard behaviour for <cite>Mock</cite> instances is that attributes and the return
+value mocks are of the same type as the mock they are accessed on. This ensures
+that <cite>Mock</cite> attributes are <cite>Mocks</cite> and <cite>MagicMock</cite> attributes are <cite>MagicMocks</cite>
+<a class="footnote-reference" href="#id5" id="id4">[2]</a>. So if you&#8217;re subclassing to add helper methods then they&#8217;ll also be
+available on the attributes and return value mock of instances of your
+subclass.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span>
+<span class="go">&lt;MyMock name=&#39;mock.foo&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span><span class="o">.</span><span class="n">has_been_called</span><span class="p">()</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span><span class="p">()</span>
+<span class="go">&lt;MyMock name=&#39;mock.foo()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span><span class="o">.</span><span class="n">has_been_called</span><span class="p">()</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Sometimes this is inconvenient. For example, <a class="reference external" href="https://code.google.com/p/mock/issues/detail?id=105">one user</a> is subclassing mock to
+created a <a class="reference external" href="http://twistedmatrix.com/documents/11.0.0/api/twisted.python.components.html">Twisted adaptor</a>.
+Having this applied to attributes too actually causes errors.</p>
+<p><cite>Mock</cite> (in all its flavours) uses a method called <cite>_get_child_mock</cite> to create
+these &#8220;sub-mocks&#8221; for attributes and return values. You can prevent your
+subclass being used for attributes by overriding this method. The signature is
+that it takes arbitrary keyword arguments (<cite>**kwargs</cite>) which are then passed
+onto the mock constructor:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Subclass</span><span class="p">(</span><span class="n">MagicMock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">_get_child_mock</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">MagicMock</span><span class="p">(</span><span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span> <span class="o">=</span> <span class="n">Subclass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span>
+<span class="go">&lt;MagicMock name=&#39;mock.foo&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mymock</span><span class="p">,</span> <span class="n">Subclass</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mymock</span><span class="o">.</span><span class="n">foo</span><span class="p">,</span> <span class="n">Subclass</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mymock</span><span class="p">(),</span> <span class="n">Subclass</span><span class="p">)</span>
+</pre></div>
+</div>
+<table class="docutils footnote" frame="void" id="id5" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id4">[2]</a></td><td>An exception to this rule are the non-callable mocks. Attributes use the
+callable variant because otherwise non-callable mocks couldn&#8217;t have callable
+methods.</td></tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="mocking-imports-with-patch-dict">
+<h2>Mocking imports with patch.dict<a class="headerlink" href="#mocking-imports-with-patch-dict" title="Permalink to this headline">¶</a></h2>
+<p>One situation where mocking can be hard is where you have a local import inside
+a function. These are harder to mock because they aren&#8217;t using an object from
+the module namespace that we can patch out.</p>
+<p>Generally local imports are to be avoided. They are sometimes done to prevent
+circular dependencies, for which there is <em>usually</em> a much better way to solve
+the problem (refactor the code) or to prevent &#8220;up front costs&#8221; by delaying the
+import. This can also be solved in better ways than an unconditional local
+import (store the module as a class or module attribute and only do the import
+on first use).</p>
+<p>That aside there is a way to use <cite>mock</cite> to affect the results of an import.
+Importing fetches an <em>object</em> from the <cite>sys.modules</cite> dictionary. Note that it
+fetches an <em>object</em>, which need not be a module. Importing a module for the
+first time results in a module object being put in <cite>sys.modules</cite>, so usually
+when you import something you get a module back. This need not be the case
+however.</p>
+<p>This means you can use <a class="reference internal" href="patch.html#mock.patch.dict" title="mock.patch.dict"><tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt></a> to <em>temporarily</em> put a mock in place
+in <cite>sys.modules</cite>. Any imports whilst this patch is active will fetch the mock.
+When the patch is complete (the decorated function exits, the with statement
+body is complete or <cite>patcher.stop()</cite> is called) then whatever was there
+previously will be restored safely.</p>
+<p>Here&#8217;s an example that mocks out the &#8216;fooble&#8217; module.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;sys.modules&#39;</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;fooble&#39;</span><span class="p">:</span> <span class="n">mock</span><span class="p">}):</span>
+<span class="gp">... </span> <span class="kn">import</span> <span class="nn">fooble</span>
+<span class="gp">... </span> <span class="n">fooble</span><span class="o">.</span><span class="n">blob</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&lt;Mock name=&#39;mock.blob()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="s">&#39;fooble&#39;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">sys</span><span class="o">.</span><span class="n">modules</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">blob</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>As you can see the <cite>import fooble</cite> succeeds, but on exit there is no &#8216;fooble&#8217;
+left in <cite>sys.modules</cite>.</p>
+<p>This also works for the <cite>from module import name</cite> form:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;sys.modules&#39;</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;fooble&#39;</span><span class="p">:</span> <span class="n">mock</span><span class="p">}):</span>
+<span class="gp">... </span> <span class="kn">from</span> <span class="nn">fooble</span> <span class="kn">import</span> <span class="n">blob</span>
+<span class="gp">... </span> <span class="n">blob</span><span class="o">.</span><span class="n">blip</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&lt;Mock name=&#39;mock.blob.blip()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">blob</span><span class="o">.</span><span class="n">blip</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>With slightly more work you can also mock package imports:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">modules</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;package&#39;</span><span class="p">:</span> <span class="n">mock</span><span class="p">,</span> <span class="s">&#39;package.module&#39;</span><span class="p">:</span> <span class="n">mock</span><span class="o">.</span><span class="n">module</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;sys.modules&#39;</span><span class="p">,</span> <span class="n">modules</span><span class="p">):</span>
+<span class="gp">... </span> <span class="kn">from</span> <span class="nn">package.module</span> <span class="kn">import</span> <span class="n">fooble</span>
+<span class="gp">... </span> <span class="n">fooble</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&lt;Mock name=&#39;mock.module.fooble()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">fooble</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="tracking-order-of-calls-and-less-verbose-call-assertions">
+<h2>Tracking order of calls and less verbose call assertions<a class="headerlink" href="#tracking-order-of-calls-and-less-verbose-call-assertions" title="Permalink to this headline">¶</a></h2>
+<p>The <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> class allows you to track the <em>order</em> of method calls on
+your mock objects through the <a class="reference internal" href="mock.html#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a> attribute. This
+doesn&#8217;t allow you to track the order of calls between separate mock objects,
+however we can use <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> to achieve the same effect.</p>
+<p>Because mocks track calls to child mocks in <cite>mock_calls</cite>, and accessing an
+arbitrary attribute of a mock creates a child mock, we can create our separate
+mocks from a parent one. Calls to those child mock will then all be recorded,
+in order, in the <cite>mock_calls</cite> of the parent:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span> <span class="o">=</span> <span class="n">manager</span><span class="o">.</span><span class="n">foo</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_bar</span> <span class="o">=</span> <span class="n">manager</span><span class="o">.</span><span class="n">bar</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span><span class="o">.</span><span class="n">something</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.foo.something()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_bar</span><span class="o">.</span><span class="n">other</span><span class="o">.</span><span class="n">thing</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.bar.other.thing()&#39; id=&#39;...&#39;&gt;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.foo.something(), call.bar.other.thing()]</span>
+</pre></div>
+</div>
+<p>We can then assert about the calls, including the order, by comparing with
+the <cite>mock_calls</cite> attribute on the manager mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">expected_calls</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">foo</span><span class="o">.</span><span class="n">something</span><span class="p">(),</span> <span class="n">call</span><span class="o">.</span><span class="n">bar</span><span class="o">.</span><span class="n">other</span><span class="o">.</span><span class="n">thing</span><span class="p">()]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">expected_calls</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>If <cite>patch</cite> is creating, and putting in place, your mocks then you can attach
+them to a manager mock using the <a class="reference internal" href="mock.html#mock.Mock.attach_mock" title="mock.Mock.attach_mock"><tt class="xref py py-meth docutils literal"><span class="pre">attach_mock()</span></tt></a> method. After
+attaching calls will be recorded in <cite>mock_calls</cite> of the manager.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Class1&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">MockClass1</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;mymodule.Class2&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">MockClass2</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">manager</span><span class="o">.</span><span class="n">attach_mock</span><span class="p">(</span><span class="n">MockClass1</span><span class="p">,</span> <span class="s">&#39;MockClass1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">manager</span><span class="o">.</span><span class="n">attach_mock</span><span class="p">(</span><span class="n">MockClass2</span><span class="p">,</span> <span class="s">&#39;MockClass2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">MockClass1</span><span class="p">()</span><span class="o">.</span><span class="n">foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">MockClass2</span><span class="p">()</span><span class="o">.</span><span class="n">bar</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="go">&lt;MagicMock name=&#39;mock.MockClass1().foo()&#39; id=&#39;...&#39;&gt;</span>
+<span class="go">&lt;MagicMock name=&#39;mock.MockClass2().bar()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">manager</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.MockClass1(),</span>
+<span class="go"> call.MockClass1().foo(),</span>
+<span class="go"> call.MockClass2(),</span>
+<span class="go"> call.MockClass2().bar()]</span>
+</pre></div>
+</div>
+<p>If many calls have been made, but you&#8217;re only interested in a particular
+sequence of them then an alternative is to use the
+<a class="reference internal" href="mock.html#mock.Mock.assert_has_calls" title="mock.Mock.assert_has_calls"><tt class="xref py py-meth docutils literal"><span class="pre">assert_has_calls()</span></tt></a> method. This takes a list of calls (constructed
+with the <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> object). If that sequence of calls are in
+<a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> then the assert succeeds.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span><span class="o">.</span><span class="n">foo</span><span class="p">()</span><span class="o">.</span><span class="n">bar</span><span class="p">()</span><span class="o">.</span><span class="n">baz</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;mock().foo().bar().baz()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">one</span><span class="p">()</span><span class="o">.</span><span class="n">two</span><span class="p">()</span><span class="o">.</span><span class="n">three</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;mock.one().two().three()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">calls</span> <span class="o">=</span> <span class="n">call</span><span class="o">.</span><span class="n">one</span><span class="p">()</span><span class="o">.</span><span class="n">two</span><span class="p">()</span><span class="o">.</span><span class="n">three</span><span class="p">()</span><span class="o">.</span><span class="n">call_list</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">assert_has_calls</span><span class="p">(</span><span class="n">calls</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Even though the chained call <cite>m.one().two().three()</cite> aren&#8217;t the only calls that
+have been made to the mock, the assert still succeeds.</p>
+<p>Sometimes a mock may have several calls made to it, and you are only interested
+in asserting about <em>some</em> of those calls. You may not even care about the
+order. In this case you can pass <cite>any_order=True</cite> to <cite>assert_has_calls</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="mi">1</span><span class="p">),</span> <span class="n">m</span><span class="o">.</span><span class="n">two</span><span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">),</span> <span class="n">m</span><span class="o">.</span><span class="n">seven</span><span class="p">(</span><span class="mi">7</span><span class="p">),</span> <span class="n">m</span><span class="o">.</span><span class="n">fifty</span><span class="p">(</span><span class="s">&#39;50&#39;</span><span class="p">)</span>
+<span class="go">(...)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">calls</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">fifty</span><span class="p">(</span><span class="s">&#39;50&#39;</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">1</span><span class="p">),</span> <span class="n">call</span><span class="o">.</span><span class="n">seven</span><span class="p">(</span><span class="mi">7</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">assert_has_calls</span><span class="p">(</span><span class="n">calls</span><span class="p">,</span> <span class="n">any_order</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="more-complex-argument-matching">
+<h2>More complex argument matching<a class="headerlink" href="#more-complex-argument-matching" title="Permalink to this headline">¶</a></h2>
+<p>Using the same basic concept as <cite>ANY</cite> we can implement matchers to do more
+complex assertions on objects used as arguments to mocks.</p>
+<p>Suppose we expect some object to be passed to a mock that by default
+compares equal based on object identity (which is the Python default for user
+defined classes). To use <a class="reference internal" href="mock.html#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a> we would need to pass
+in the exact same object. If we are only interested in some of the attributes
+of this object then we can create a matcher that will check these attributes
+for us.</p>
+<p>You can see in this example how a &#8216;standard&#8217; call to <cite>assert_called_with</cite> isn&#8217;t
+sufficient:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Foo</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">a</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">b</span> <span class="o">=</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="n">Foo</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">Foo</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError: Expected</span>: <span class="n">call(&lt;__main__.Foo object at 0x...&gt;)</span>
+<span class="go">Actual call: call(&lt;__main__.Foo object at 0x...&gt;)</span>
+</pre></div>
+</div>
+<p>A comparison function for our <cite>Foo</cite> class might look something like this:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">compare</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">if</span> <span class="ow">not</span> <span class="nb">type</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="o">==</span> <span class="nb">type</span><span class="p">(</span><span class="n">other</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">False</span>
+<span class="gp">... </span> <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">a</span> <span class="o">!=</span> <span class="n">other</span><span class="o">.</span><span class="n">a</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">False</span>
+<span class="gp">... </span> <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">b</span> <span class="o">!=</span> <span class="n">other</span><span class="o">.</span><span class="n">b</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">False</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">True</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>And a matcher object that can use comparison functions like this for its
+equality operation would look something like this:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Matcher</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">compare</span><span class="p">,</span> <span class="n">some_obj</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">compare</span> <span class="o">=</span> <span class="n">compare</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">some_obj</span> <span class="o">=</span> <span class="n">some_obj</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__eq__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">compare</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">some_obj</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>Putting all this together:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">match_foo</span> <span class="o">=</span> <span class="n">Matcher</span><span class="p">(</span><span class="n">compare</span><span class="p">,</span> <span class="n">Foo</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">match_foo</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>The <cite>Matcher</cite> is instantiated with our compare function and the <cite>Foo</cite> object
+we want to compare against. In <cite>assert_called_with</cite> the <cite>Matcher</cite> equality
+method will be called, which compares the object the mock was called with
+against the one we created our matcher with. If they match then
+<cite>assert_called_with</cite> passes, and if they don&#8217;t an <cite>AssertionError</cite> is raised:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">match_wrong</span> <span class="o">=</span> <span class="n">Matcher</span><span class="p">(</span><span class="n">compare</span><span class="p">,</span> <span class="n">Foo</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">match_wrong</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AssertionError: Expected</span>: <span class="n">((&lt;Matcher object at 0x...&gt;,), {})</span>
+<span class="go">Called with: ((&lt;Foo object at 0x...&gt;,), {})</span>
+</pre></div>
+</div>
+<p>With a bit of tweaking you could have the comparison function raise the
+<cite>AssertionError</cite> directly and provide a more useful failure message.</p>
+<p>As of version 1.5, the Python testing library <a class="reference external" href="http://pypi.python.org/pypi/PyHamcrest">PyHamcrest</a> provides similar functionality,
+that may be useful here, in the form of its equality matcher
+(<a class="reference external" href="http://packages.python.org/PyHamcrest/integration.html#hamcrest.library.integration.match_equality">hamcrest.library.integration.match_equality</a>).</p>
+</div>
+<div class="section" id="less-verbose-configuration-of-mock-objects">
+<h2>Less verbose configuration of mock objects<a class="headerlink" href="#less-verbose-configuration-of-mock-objects" title="Permalink to this headline">¶</a></h2>
+<p>This recipe, for easier configuration of mock objects, is now part of <cite>Mock</cite>.
+See the <a class="reference internal" href="mock.html#mock.Mock.configure_mock" title="mock.Mock.configure_mock"><tt class="xref py py-meth docutils literal"><span class="pre">configure_mock()</span></tt></a> method.</p>
+</div>
+<div class="section" id="matching-any-argument-in-assertions">
+<h2>Matching any argument in assertions<a class="headerlink" href="#matching-any-argument-in-assertions" title="Permalink to this headline">¶</a></h2>
+<p>This example is now built in to mock. See <a class="reference internal" href="helpers.html#mock.ANY" title="mock.ANY"><tt class="xref py py-data docutils literal"><span class="pre">ANY</span></tt></a>.</p>
+</div>
+<div class="section" id="mocking-properties">
+<h2>Mocking Properties<a class="headerlink" href="#mocking-properties" title="Permalink to this headline">¶</a></h2>
+<p>This example is now built in to mock. See <a class="reference internal" href="mock.html#mock.PropertyMock" title="mock.PropertyMock"><tt class="xref py py-class docutils literal"><span class="pre">PropertyMock</span></tt></a>.</p>
+</div>
+<div class="section" id="mocking-open">
+<h2>Mocking open<a class="headerlink" href="#mocking-open" title="Permalink to this headline">¶</a></h2>
+<p>This example is now built in to mock. See <a class="reference internal" href="helpers.html#mock.mock_open" title="mock.mock_open"><tt class="xref py py-func docutils literal"><span class="pre">mock_open()</span></tt></a>.</p>
+</div>
+<div class="section" id="mocks-without-some-attributes">
+<h2>Mocks without some attributes<a class="headerlink" href="#mocks-without-some-attributes" title="Permalink to this headline">¶</a></h2>
+<p>This example is now built in to mock. See <a class="reference internal" href="mock.html#deleting-attributes"><em>Deleting Attributes</em></a>.</p>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Further Examples</a><ul>
+<li><a class="reference internal" href="#mocking-chained-calls">Mocking chained calls</a></li>
+<li><a class="reference internal" href="#partial-mocking">Partial mocking</a></li>
+<li><a class="reference internal" href="#mocking-a-generator-method">Mocking a Generator Method</a></li>
+<li><a class="reference internal" href="#applying-the-same-patch-to-every-test-method">Applying the same patch to every test method</a></li>
+<li><a class="reference internal" href="#mocking-unbound-methods">Mocking Unbound Methods</a></li>
+<li><a class="reference internal" href="#checking-multiple-calls-with-mock">Checking multiple calls with mock</a></li>
+<li><a class="reference internal" href="#coping-with-mutable-arguments">Coping with mutable arguments</a></li>
+<li><a class="reference internal" href="#raising-exceptions-on-attribute-access">Raising exceptions on attribute access</a></li>
+<li><a class="reference internal" href="#multiple-calls-with-different-effects">Multiple calls with different effects</a></li>
+<li><a class="reference internal" href="#nesting-patches">Nesting Patches</a></li>
+<li><a class="reference internal" href="#mocking-a-dictionary-with-magicmock">Mocking a dictionary with MagicMock</a></li>
+<li><a class="reference internal" href="#mock-subclasses-and-their-attributes">Mock subclasses and their attributes</a></li>
+<li><a class="reference internal" href="#mocking-imports-with-patch-dict">Mocking imports with patch.dict</a></li>
+<li><a class="reference internal" href="#tracking-order-of-calls-and-less-verbose-call-assertions">Tracking order of calls and less verbose call assertions</a></li>
+<li><a class="reference internal" href="#more-complex-argument-matching">More complex argument matching</a></li>
+<li><a class="reference internal" href="#less-verbose-configuration-of-mock-objects">Less verbose configuration of mock objects</a></li>
+<li><a class="reference internal" href="#matching-any-argument-in-assertions">Matching any argument in assertions</a></li>
+<li><a class="reference internal" href="#mocking-properties">Mocking Properties</a></li>
+<li><a class="reference internal" href="#mocking-open">Mocking open</a></li>
+<li><a class="reference internal" href="#mocks-without-some-attributes">Mocks without some attributes</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="getting-started.html"
+ title="previous chapter">Getting Started with Mock</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="compare.html"
+ title="next chapter">Mock Library Comparison</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/examples.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="compare.html" title="Mock Library Comparison"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/genindex.html b/python/mock-1.0.0/html/genindex.html
new file mode 100644
index 000000000..2d1acf0b4
--- /dev/null
+++ b/python/mock-1.0.0/html/genindex.html
@@ -0,0 +1,479 @@
+
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Index &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="#" title="General Index"
+ accesskey="I">index</a></li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+
+<h1 id="index">Index</h1>
+
+<div class="genindex-jumpbox">
+ <a href="#_"><strong>_</strong></a>
+ | <a href="#A"><strong>A</strong></a>
+ | <a href="#C"><strong>C</strong></a>
+ | <a href="#D"><strong>D</strong></a>
+ | <a href="#E"><strong>E</strong></a>
+ | <a href="#F"><strong>F</strong></a>
+ | <a href="#G"><strong>G</strong></a>
+ | <a href="#H"><strong>H</strong></a>
+ | <a href="#I"><strong>I</strong></a>
+ | <a href="#M"><strong>M</strong></a>
+ | <a href="#N"><strong>N</strong></a>
+ | <a href="#O"><strong>O</strong></a>
+ | <a href="#P"><strong>P</strong></a>
+ | <a href="#R"><strong>R</strong></a>
+ | <a href="#S"><strong>S</strong></a>
+ | <a href="#T"><strong>T</strong></a>
+ | <a href="#U"><strong>U</strong></a>
+ | <a href="#W"><strong>W</strong></a>
+
+</div>
+<h2 id="_">_</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#index-5">__call__</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.__class__">__class__ (Mock attribute)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.__dir__">__dir__() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock._get_child_mock">_get_child_mock() (Mock method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="A">A</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="helpers.html#mock.ANY">ANY (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-8">articles</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.assert_any_call">assert_any_call() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.assert_called_once_with">assert_called_once_with() (Mock method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.assert_called_with">assert_called_with() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.assert_has_calls">assert_has_calls() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.attach_mock">attach_mock() (Mock method)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="C">C</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="helpers.html#mock.call">call() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.call_args">call_args (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.call_args_list">call_args_list (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.call_count">call_count (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="helpers.html#mock.call.call_list">call_list() (call method)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.called">called (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#index-6">calling</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.configure_mock">configure_mock() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="helpers.html#mock.create_autospec">create_autospec() (in module mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="D">D</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="sentinel.html#mock.DEFAULT">DEFAULT (in module mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="E">E</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-5">easy_install</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="F">F</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="helpers.html#mock.FILTER_DIR">FILTER_DIR (in module mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="G">G</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="getting-started.html#index-0">Getting Started</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="H">H</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-3">hg</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="I">I</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-1">installing</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-0">introduction</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="M">M</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="magicmock.html#mock.MagicMock">MagicMock (class in mock)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.method_calls">method_calls (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock">Mock (class in mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#module-mock">mock (module)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.mock_add_spec">mock_add_spec() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.Mock.mock_calls">mock_calls (Mock attribute)</a>
+ </dt>
+
+
+ <dt><a href="helpers.html#mock.mock_open">mock_open() (in module mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="N">N</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#index-3">name</a>
+ </dt>
+
+
+ <dt><a href="magicmock.html#mock.NonCallableMagicMock">NonCallableMagicMock (class in mock)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.NonCallableMock">NonCallableMock (class in mock)</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="O">O</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-12">older versions</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="P">P</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="patch.html#mock.patch">patch() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="patch.html#mock.patch.dict">patch.dict() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="patch.html#mock.patch.multiple">patch.multiple() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="patch.html#mock.patch.object">patch.object() (in module mock)</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="patch.html#mock.patch.stopall">patch.stopall() (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-4">pip</a>
+ </dt>
+
+
+ <dt><a href="mock.html#mock.PropertyMock">PropertyMock (class in mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-11">Python 3</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="R">R</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-7">references</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-2">repository</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#mock.Mock.reset_mock">reset_mock() (Mock method)</a>
+ </dt>
+
+
+ <dt><a href="mock.html#index-1">return_value</a>
+ </dt>
+
+ <dd><dl>
+
+ <dt><a href="mock.html#mock.Mock.return_value">(Mock attribute)</a>
+ </dt>
+
+ </dl></dd>
+ </dl></td>
+</tr></table>
+
+<h2 id="S">S</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="sentinel.html#mock.sentinel">sentinel (in module mock)</a>
+ </dt>
+
+
+ <dt><a href="index.html#index-6">setuptools</a>
+ </dt>
+
+ </dl></td>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#index-0">side_effect</a>
+ </dt>
+
+ <dd><dl>
+
+ <dt><a href="mock.html#mock.Mock.side_effect">(Mock attribute)</a>
+ </dt>
+
+ </dl></dd>
+
+ <dt><a href="mock.html#index-4">spec</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="T">T</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-9">tests</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="U">U</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="index.html#index-10">unittest2</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+<h2 id="W">W</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+ <td style="width: 33%" valign="top"><dl>
+
+ <dt><a href="mock.html#index-2">wraps</a>
+ </dt>
+
+ </dl></td>
+</tr></table>
+
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+
+
+
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="#" title="General Index"
+ >index</a></li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/getting-started.html b/python/mock-1.0.0/html/getting-started.html
new file mode 100644
index 000000000..73d708ac9
--- /dev/null
+++ b/python/mock-1.0.0/html/getting-started.html
@@ -0,0 +1,510 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Getting Started with Mock &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Further Examples" href="examples.html" />
+ <link rel="prev" title="Mocking Magic Methods" href="magicmock.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="examples.html" title="Further Examples"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="getting-started-with-mock">
+<h1>Getting Started with Mock<a class="headerlink" href="#getting-started-with-mock" title="Permalink to this headline">¶</a></h1>
+<span class="target" id="getting-started"></span><span class="target" id="index-0"></span><div class="section" id="using-mock">
+<h2>Using Mock<a class="headerlink" href="#using-mock" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="mock-patching-methods">
+<h3>Mock Patching Methods<a class="headerlink" href="#mock-patching-methods" title="Permalink to this headline">¶</a></h3>
+<p>Common uses for <tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt> objects include:</p>
+<ul class="simple">
+<li>Patching methods</li>
+<li>Recording method calls on objects</li>
+</ul>
+<p>You might want to replace a method on an object to check that
+it is called with the correct arguments by another part of the system:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">real</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s">&#39;method&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">key</span><span class="o">=</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+<span class="go">&lt;MagicMock name=&#39;method()&#39; id=&#39;...&#39;&gt;</span>
+</pre></div>
+</div>
+<p>Once our mock has been used (<cite>real.method</cite> in this example) it has methods
+and attributes that allow you to make assertions about how it has been used.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">In most of these examples the <tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt> and <tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt> classes
+are interchangeable. As the <cite>MagicMock</cite> is the more capable class it makes
+a sensible one to use by default.</p>
+</div>
+<p>Once the mock has been called its <tt class="xref py py-attr docutils literal"><span class="pre">called</span></tt> attribute is set to
+<cite>True</cite>. More importantly we can use the <tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt> or
+<tt class="xref py py-meth docutils literal"><span class="pre">assert_called_once_with()</span></tt> method to check that it was called with
+the correct arguments.</p>
+<p>This example tests that calling <cite>ProductionClass().method</cite> results in a call to
+the <cite>something</cite> method:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">MagicMock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">ProductionClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">something</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">something</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">something</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">something</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mock-for-method-calls-on-an-object">
+<h3>Mock for Method Calls on an Object<a class="headerlink" href="#mock-for-method-calls-on-an-object" title="Permalink to this headline">¶</a></h3>
+<p>In the last example we patched a method directly on an object to check that it
+was called correctly. Another common use case is to pass an object into a
+method (or some part of the system under test) and then check that it is used
+in the correct way.</p>
+<p>The simple <cite>ProductionClass</cite> below has a <cite>closer</cite> method. If it is called with
+an object then it calls <cite>close</cite> on it.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">ProductionClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">closer</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">something</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">something</span><span class="o">.</span><span class="n">close</span><span class="p">()</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>So to test it we need to pass in an object with a <cite>close</cite> method and check
+that it was called correctly.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">real</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">closer</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">close</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>We don&#8217;t have to do any work to provide the &#8216;close&#8217; method on our mock.
+Accessing close creates it. So, if &#8216;close&#8217; hasn&#8217;t already been called then
+accessing it in the test will create it, but <tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt>
+will raise a failure exception.</p>
+</div>
+<div class="section" id="mocking-classes">
+<h3>Mocking Classes<a class="headerlink" href="#mocking-classes" title="Permalink to this headline">¶</a></h3>
+<p>A common use case is to mock out classes instantiated by your code under test.
+When you patch a class, then that class is replaced with a mock. Instances
+are created by <em>calling the class</em>. This means you access the &#8220;mock instance&#8221;
+by looking at the return value of the mocked class.</p>
+<p>In the example below we have a function <cite>some_function</cite> that instantiates <cite>Foo</cite>
+and calls a method on it. The call to <cite>patch</cite> replaces the class <cite>Foo</cite> with a
+mock. The <cite>Foo</cite> instance is the result of calling the mock, so it is configured
+by modifying the mock <tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">some_function</span><span class="p">():</span>
+<span class="gp">... </span> <span class="n">instance</span> <span class="o">=</span> <span class="n">module</span><span class="o">.</span><span class="n">Foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;module.Foo&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">instance</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">... </span> <span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;the result&#39;</span>
+<span class="gp">... </span> <span class="n">result</span> <span class="o">=</span> <span class="n">some_function</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">result</span> <span class="o">==</span> <span class="s">&#39;the result&#39;</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="naming-your-mocks">
+<h3>Naming your mocks<a class="headerlink" href="#naming-your-mocks" title="Permalink to this headline">¶</a></h3>
+<p>It can be useful to give your mocks a name. The name is shown in the repr of
+the mock and can be helpful when the mock appears in test failure messages. The
+name is also propagated to attributes or methods of the mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span>
+<span class="go">&lt;MagicMock name=&#39;foo&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span>
+<span class="go">&lt;MagicMock name=&#39;foo.method&#39; id=&#39;...&#39;&gt;</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="tracking-all-calls">
+<h3>Tracking all Calls<a class="headerlink" href="#tracking-all-calls" title="Permalink to this headline">¶</a></h3>
+<p>Often you want to track more than a single call to a method. The
+<tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt> attribute records all calls
+to child attributes of the mock - and also to their children.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;mock.method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">attribute</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="n">x</span><span class="o">=</span><span class="mi">53</span><span class="p">)</span>
+<span class="go">&lt;MagicMock name=&#39;mock.attribute.method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.method(), call.attribute.method(10, x=53)]</span>
+</pre></div>
+</div>
+<p>If you make an assertion about <cite>mock_calls</cite> and any unexpected methods
+have been called, then the assertion will fail. This is useful because as well
+as asserting that the calls you expected have been made, you are also checking
+that they were made in the right order and with no additional calls:</p>
+<p>You use the <tt class="xref py py-data docutils literal"><span class="pre">call</span></tt> object to construct lists for comparing with
+<cite>mock_calls</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="o">.</span><span class="n">method</span><span class="p">(),</span> <span class="n">call</span><span class="o">.</span><span class="n">attribute</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="n">x</span><span class="o">=</span><span class="mi">53</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="setting-return-values-and-attributes">
+<h3>Setting Return Values and Attributes<a class="headerlink" href="#setting-return-values-and-attributes" title="Permalink to this headline">¶</a></h3>
+<p>Setting the return values on a mock object is trivially easy:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>Of course you can do the same for methods on the mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>The return value can also be set in the constructor:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>If you need an attribute setting on your mock, just do it:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">x</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">x</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>Sometimes you want to mock up a more complex situation, like for example
+<cite>mock.connection.cursor().execute(&#8220;SELECT 1&#8221;)</cite>. If we wanted this call to
+return a list, then we have to configure the result of the nested call.</p>
+<p>We can use <tt class="xref py py-data docutils literal"><span class="pre">call</span></tt> to construct the set of calls in a &#8220;chained call&#8221; like
+this for easy assertion afterwards:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">cursor</span> <span class="o">=</span> <span class="n">mock</span><span class="o">.</span><span class="n">connection</span><span class="o">.</span><span class="n">cursor</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">cursor</span><span class="o">.</span><span class="n">execute</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="p">[</span><span class="s">&#39;foo&#39;</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">connection</span><span class="o">.</span><span class="n">cursor</span><span class="p">()</span><span class="o">.</span><span class="n">execute</span><span class="p">(</span><span class="s">&quot;SELECT 1&quot;</span><span class="p">)</span>
+<span class="go">[&#39;foo&#39;]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="n">call</span><span class="o">.</span><span class="n">connection</span><span class="o">.</span><span class="n">cursor</span><span class="p">()</span><span class="o">.</span><span class="n">execute</span><span class="p">(</span><span class="s">&quot;SELECT 1&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">call_list</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.connection.cursor(), call.connection.cursor().execute(&#39;SELECT 1&#39;)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>It is the call to <cite>.call_list()</cite> that turns our call object into a list of
+calls representing the chained calls.</p>
+</div>
+<div class="section" id="raising-exceptions-with-mocks">
+<h3>Raising exceptions with mocks<a class="headerlink" href="#raising-exceptions-with-mocks" title="Permalink to this headline">¶</a></h3>
+<p>A useful attribute is <tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt>. If you set this to an
+exception class or instance then the exception will be raised when the mock
+is called.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="ne">Exception</span><span class="p">(</span><span class="s">&#39;Boom!&#39;</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">Exception</span>: <span class="n">Boom!</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="side-effect-functions-and-iterables">
+<h3>Side effect functions and iterables<a class="headerlink" href="#side-effect-functions-and-iterables" title="Permalink to this headline">¶</a></h3>
+<p><cite>side_effect</cite> can also be set to a function or an iterable. The use case for
+<cite>side_effect</cite> as an iterable is where your mock is going to be called several
+times, and you want each call to return a different value. When you set
+<cite>side_effect</cite> to an iterable every call to the mock returns the next value
+from the iterable:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="p">[</span><span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">6</span><span class="p">])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">4</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">5</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">6</span>
+</pre></div>
+</div>
+<p>For more advanced use cases, like dynamically varying the return values
+depending on what the mock is called with, <cite>side_effect</cite> can be a function.
+The function will be called with the same arguments as the mock. Whatever the
+function returns is what the call returns:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">vals</span> <span class="o">=</span> <span class="p">{(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">):</span> <span class="mi">1</span><span class="p">,</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">):</span> <span class="mi">2</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">vals</span><span class="p">[</span><span class="n">args</span><span class="p">]</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="go">2</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="creating-a-mock-from-an-existing-object">
+<h3>Creating a Mock from an Existing Object<a class="headerlink" href="#creating-a-mock-from-an-existing-object" title="Permalink to this headline">¶</a></h3>
+<p>One problem with over use of mocking is that it couples your tests to the
+implementation of your mocks rather than your real code. Suppose you have a
+class that implements <cite>some_method</cite>. In a test for another class, you
+provide a mock of this object that <em>also</em> provides <cite>some_method</cite>. If later
+you refactor the first class, so that it no longer has <cite>some_method</cite> - then
+your tests will continue to pass even though your code is now broken!</p>
+<p><cite>Mock</cite> allows you to provide an object as a specification for the mock,
+using the <cite>spec</cite> keyword argument. Accessing methods / attributes on the
+mock that don&#8217;t exist on your specification object will immediately raise an
+attribute error. If you change the implementation of your specification, then
+tests that use that class will start failing immediately without you having to
+instantiate the class in those tests.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="n">SomeClass</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">old_method</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">AttributeError</span>: <span class="n">object has no attribute &#39;old_method&#39;</span>
+</pre></div>
+</div>
+<p>If you want a stronger form of specification that prevents the setting
+of arbitrary attributes as well as the getting of them then you can use
+<cite>spec_set</cite> instead of <cite>spec</cite>.</p>
+</div>
+</div>
+<div class="section" id="patch-decorators">
+<h2>Patch Decorators<a class="headerlink" href="#patch-decorators" title="Permalink to this headline">¶</a></h2>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">With <cite>patch</cite> it matters that you patch objects in the namespace where they
+are looked up. This is normally straightforward, but for a quick guide
+read <a class="reference internal" href="patch.html#where-to-patch"><em>where to patch</em></a>.</p>
+</div>
+<p>A common need in tests is to patch a class attribute or a module attribute,
+for example patching a builtin or patching a class in a module to test that it
+is instantiated. Modules and classes are effectively global, so patching on
+them has to be undone after the test or the patch will persist into other
+tests and cause hard to diagnose problems.</p>
+<p>mock provides three convenient decorators for this: <cite>patch</cite>, <cite>patch.object</cite> and
+<cite>patch.dict</cite>. <cite>patch</cite> takes a single string, of the form
+<cite>package.module.Class.attribute</cite> to specify the attribute you are patching. It
+also optionally takes a value that you want the attribute (or class or
+whatever) to be replaced with. &#8216;patch.object&#8217; takes an object and the name of
+the attribute you would like patched, plus optionally the value to patch it
+with.</p>
+<p><cite>patch.object</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;attribute&#39;</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span> <span class="o">==</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span> <span class="o">==</span> <span class="n">original</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;package.module.attribute&#39;</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="kn">from</span> <span class="nn">package.module</span> <span class="kn">import</span> <span class="n">attribute</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">attribute</span> <span class="ow">is</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>If you are patching a module (including <cite>__builtin__</cite>) then use <cite>patch</cite>
+instead of <cite>patch.object</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span> <span class="o">=</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">file_handle</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__builtin__.open&#39;</span><span class="p">,</span> <span class="n">mock</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">handle</span> <span class="o">=</span> <span class="nb">open</span><span class="p">(</span><span class="s">&#39;filename&#39;</span><span class="p">,</span> <span class="s">&#39;r&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="s">&#39;filename&#39;</span><span class="p">,</span> <span class="s">&#39;r&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">handle</span> <span class="o">==</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">file_handle</span><span class="p">,</span> <span class="s">&quot;incorrect file handle returned&quot;</span>
+</pre></div>
+</div>
+<p>The module name can be &#8216;dotted&#8217;, in the form <cite>package.module</cite> if needed:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName.attribute&#39;</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">():</span>
+<span class="gp">... </span> <span class="kn">from</span> <span class="nn">package.module</span> <span class="kn">import</span> <span class="n">ClassName</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">ClassName</span><span class="o">.</span><span class="n">attribute</span> <span class="o">==</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>A nice pattern is to actually decorate test methods themselves:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">unittest2</span><span class="o">.</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;attribute&#39;</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertEqual</span><span class="p">(</span><span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span><span class="p">,</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">attribute</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_something</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">attribute</span> <span class="o">==</span> <span class="n">original</span>
+</pre></div>
+</div>
+<p>If you want to patch with a Mock, you can use <cite>patch</cite> with only one argument
+(or <cite>patch.object</cite> with two arguments). The mock will be created for you and
+passed into the test function / method:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">unittest2</span><span class="o">.</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;static_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">mock_method</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">static_method</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_something</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>You can stack up multiple patch decorators using this pattern:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">unittest2</span><span class="o">.</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">MockClass2</span><span class="p">,</span> <span class="n">MockClass1</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">ClassName1</span> <span class="ow">is</span> <span class="n">MockClass1</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">assertTrue</span><span class="p">(</span><span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">ClassName2</span> <span class="ow">is</span> <span class="n">MockClass2</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">test_something</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>When you nest patch decorators the mocks are passed in to the decorated
+function in the same order they applied (the normal <em>python</em> order that
+decorators are applied). This means from the bottom up, so in the example
+above the mock for <cite>test_module.ClassName2</cite> is passed in first.</p>
+<p>There is also <tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt> for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">foo</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;key&#39;</span><span class="p">:</span> <span class="s">&#39;value&#39;</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">foo</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="n">foo</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">},</span> <span class="n">clear</span><span class="o">=</span><span class="bp">True</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="n">original</span>
+</pre></div>
+</div>
+<p><cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite> can all be used as context managers.</p>
+<p>Where you use <cite>patch</cite> to create a mock for you, you can get a reference to the
+mock using the &#8220;as&#8221; form of the with statement:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">ProductionClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">object</span><span class="p">(</span><span class="n">ProductionClass</span><span class="p">,</span> <span class="s">&#39;method&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_method</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">... </span> <span class="n">real</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>As an alternative <cite>patch</cite>, <cite>patch.object</cite> and <cite>patch.dict</cite> can be used as
+class decorators. When used in this way it is the same as applying the
+decorator indvidually to every method whose name starts with &#8220;test&#8221;.</p>
+<p>For some more advanced examples, see the <a class="reference internal" href="examples.html#further-examples"><em>Further Examples</em></a> page.</p>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Getting Started with Mock</a><ul>
+<li><a class="reference internal" href="#using-mock">Using Mock</a><ul>
+<li><a class="reference internal" href="#mock-patching-methods">Mock Patching Methods</a></li>
+<li><a class="reference internal" href="#mock-for-method-calls-on-an-object">Mock for Method Calls on an Object</a></li>
+<li><a class="reference internal" href="#mocking-classes">Mocking Classes</a></li>
+<li><a class="reference internal" href="#naming-your-mocks">Naming your mocks</a></li>
+<li><a class="reference internal" href="#tracking-all-calls">Tracking all Calls</a></li>
+<li><a class="reference internal" href="#setting-return-values-and-attributes">Setting Return Values and Attributes</a></li>
+<li><a class="reference internal" href="#raising-exceptions-with-mocks">Raising exceptions with mocks</a></li>
+<li><a class="reference internal" href="#side-effect-functions-and-iterables">Side effect functions and iterables</a></li>
+<li><a class="reference internal" href="#creating-a-mock-from-an-existing-object">Creating a Mock from an Existing Object</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#patch-decorators">Patch Decorators</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="magicmock.html"
+ title="previous chapter">Mocking Magic Methods</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="examples.html"
+ title="next chapter">Further Examples</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/getting-started.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="examples.html" title="Further Examples"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/index.html b/python/mock-1.0.0/html/index.html
new file mode 100644
index 000000000..2cdf89d5c
--- /dev/null
+++ b/python/mock-1.0.0/html/index.html
@@ -0,0 +1,529 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Mock - Mocking and Testing Library &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="#" />
+ <link rel="next" title="The Mock Class" href="mock.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="mock.html" title="The Mock Class"
+ accesskey="N">next</a> |</li>
+ <li><a href="#">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="mock-mocking-and-testing-library">
+<h1>Mock - Mocking and Testing Library<a class="headerlink" href="#mock-mocking-and-testing-library" title="Permalink to this headline">¶</a></h1>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name" />
+<col class="field-body" />
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Author:</th><td class="field-body"><a class="reference external" href="http://www.voidspace.org.uk/python/weblog/index.shtml">Michael Foord</a></td>
+</tr>
+<tr class="field-even field"><th class="field-name">Version:</th><td class="field-body">1.0.0</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Date:</th><td class="field-body">2012/10/07</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Homepage:</th><td class="field-body"><a class="reference external" href="http://www.voidspace.org.uk/python/mock/">Mock Homepage</a></td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Download:</th><td class="field-body"><a class="reference external" href="http://pypi.python.org/pypi/mock">Mock on PyPI</a></td>
+</tr>
+<tr class="field-even field"><th class="field-name">Documentation:</th><td class="field-body"><a class="reference external" href="http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf">PDF Documentation</a></td>
+</tr>
+<tr class="field-odd field"><th class="field-name">License:</th><td class="field-body"><a class="reference external" href="http://www.voidspace.org.uk/python/license.shtml">BSD License</a></td>
+</tr>
+<tr class="field-even field"><th class="field-name">Support:</th><td class="field-body"><a class="reference external" href="http://lists.idyll.org/listinfo/testing-in-python">Mailing list (testing-in-python&#64;lists.idyll.org)</a></td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Issue tracker:</th><td class="field-body"><a class="reference external" href="http://code.google.com/p/mock/issues/list">Google code project</a></td>
+</tr>
+</tbody>
+</table>
+<span class="target" id="module-mock"></span><p id="index-0">mock is a library for testing in Python. It allows you to replace parts of
+your system under test with mock objects and make assertions about how they
+have been used.</p>
+<p>mock is now part of the Python standard library, available as <a class="reference external" href="http://docs.python.org/py3k/library/unittest.mock.html#module-unittest.mock">unittest.mock</a>
+in Python 3.3 onwards.</p>
+<p>mock provides a core <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> class removing the need to create a host
+of stubs throughout your test suite. After performing an action, you can make
+assertions about which methods / attributes were used and arguments they were
+called with. You can also specify return values and set needed attributes in
+the normal way.</p>
+<p>Additionally, mock provides a <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorator that handles patching
+module and class level attributes within the scope of a test, along with
+<a class="reference internal" href="sentinel.html#mock.sentinel" title="mock.sentinel"><tt class="xref py py-const docutils literal"><span class="pre">sentinel</span></tt></a> for creating unique objects. See the <a class="reference internal" href="#quick-guide">quick guide</a> for
+some examples of how to use <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a>, <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> and
+<a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a>.</p>
+<p>Mock is very easy to use and is designed for use with
+<a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest</a>. Mock is based on
+the &#8216;action -&gt; assertion&#8217; pattern instead of <cite>&#8216;record -&gt; replay&#8217;</cite> used by many
+mocking frameworks.</p>
+<p>mock is tested on Python versions 2.4-2.7, Python 3 plus the latest versions of
+Jython and PyPy.</p>
+<div class="section" id="api-documentation">
+<h2>API Documentation<a class="headerlink" href="#api-documentation" title="Permalink to this headline">¶</a></h2>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="mock.html">The Mock Class</a></li>
+<li class="toctree-l1"><a class="reference internal" href="mock.html#calling">Calling</a></li>
+<li class="toctree-l1"><a class="reference internal" href="mock.html#deleting-attributes">Deleting Attributes</a></li>
+<li class="toctree-l1"><a class="reference internal" href="mock.html#attaching-mocks-as-attributes">Attaching Mocks as Attributes</a></li>
+<li class="toctree-l1"><a class="reference internal" href="patch.html">Patch Decorators</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch">patch</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch-object">patch.object</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch-dict">patch.dict</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch-multiple">patch.multiple</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patch-methods-start-and-stop">patch methods: start and stop</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#test-prefix">TEST_PREFIX</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#nesting-patch-decorators">Nesting Patch Decorators</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#where-to-patch">Where to patch</a></li>
+<li class="toctree-l2"><a class="reference internal" href="patch.html#patching-descriptors-and-proxy-objects">Patching Descriptors and Proxy Objects</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="helpers.html">Helpers</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#call">call</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#create-autospec">create_autospec</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#any">ANY</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#filter-dir">FILTER_DIR</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#mock-open">mock_open</a></li>
+<li class="toctree-l2"><a class="reference internal" href="helpers.html#autospeccing">Autospeccing</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="sentinel.html">Sentinel</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="sentinel.html#sentinel-example">Sentinel Example</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="magicmock.html">Mocking Magic Methods</a></li>
+<li class="toctree-l1"><a class="reference internal" href="magicmock.html#magic-mock">Magic Mock</a></li>
+</ul>
+</div>
+</div>
+<div class="section" id="user-guide">
+<h2>User Guide<a class="headerlink" href="#user-guide" title="Permalink to this headline">¶</a></h2>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="getting-started.html">Getting Started with Mock</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="getting-started.html#using-mock">Using Mock</a></li>
+<li class="toctree-l2"><a class="reference internal" href="getting-started.html#patch-decorators">Patch Decorators</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="examples.html">Further Examples</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-chained-calls">Mocking chained calls</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#partial-mocking">Partial mocking</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-a-generator-method">Mocking a Generator Method</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#applying-the-same-patch-to-every-test-method">Applying the same patch to every test method</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-unbound-methods">Mocking Unbound Methods</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#checking-multiple-calls-with-mock">Checking multiple calls with mock</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#coping-with-mutable-arguments">Coping with mutable arguments</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#raising-exceptions-on-attribute-access">Raising exceptions on attribute access</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#multiple-calls-with-different-effects">Multiple calls with different effects</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#nesting-patches">Nesting Patches</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-a-dictionary-with-magicmock">Mocking a dictionary with MagicMock</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mock-subclasses-and-their-attributes">Mock subclasses and their attributes</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-imports-with-patch-dict">Mocking imports with patch.dict</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#tracking-order-of-calls-and-less-verbose-call-assertions">Tracking order of calls and less verbose call assertions</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#more-complex-argument-matching">More complex argument matching</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#less-verbose-configuration-of-mock-objects">Less verbose configuration of mock objects</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#matching-any-argument-in-assertions">Matching any argument in assertions</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-properties">Mocking Properties</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocking-open">Mocking open</a></li>
+<li class="toctree-l2"><a class="reference internal" href="examples.html#mocks-without-some-attributes">Mocks without some attributes</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="compare.html">Mock Library Comparison</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#simple-fake-object">Simple fake object</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#simple-mock">Simple mock</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#creating-partial-mocks">Creating partial mocks</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#ensure-calls-are-made-in-specific-order">Ensure calls are made in specific order</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#raising-exceptions">Raising exceptions</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#override-new-instances-of-a-class">Override new instances of a class</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#call-the-same-method-multiple-times">Call the same method multiple times</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#mock-chained-methods">Mock chained methods</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#mocking-a-context-manager">Mocking a context manager</a></li>
+<li class="toctree-l2"><a class="reference internal" href="compare.html#mocking-the-builtin-open-used-as-a-context-manager">Mocking the builtin open used as a context manager</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="changelog.html">CHANGELOG</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-1-0-0">2012/10/07 Version 1.0.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-1-0-0-beta-1">2012/07/13 Version 1.0.0 beta 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-1-0-0-alpha-2">2012/05/04 Version 1.0.0 alpha 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-1-0-0-alpha-1">2012/03/25 Version 1.0.0 alpha 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0">2012/02/13 Version 0.8.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-release-candidate-2">2012/01/10 Version 0.8.0 release candidate 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-release-candidate-1">2011/12/29 Version 0.8.0 release candidate 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-beta-4">2011/10/09 Version 0.8.0 beta 4</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-beta-3">2011/08/15 Version 0.8.0 beta 3</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-beta-2">2011/08/05 Version 0.8.0 beta 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-beta-1">2011/07/25 Version 0.8.0 beta 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-alpha-2">2011/07/16 Version 0.8.0 alpha 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-8-0-alpha-1">2011/06/14 Version 0.8.0 alpha 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-2">2011/05/30 Version 0.7.2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-1">2011/05/06 Version 0.7.1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0">2011/03/05 Version 0.7.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-rc-1">2011/02/16 Version 0.7.0 RC 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-beta-4">2010/11/12 Version 0.7.0 beta 4</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-beta-3">2010/09/18 Version 0.7.0 beta 3</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-beta-2">2010/06/23 Version 0.7.0 beta 2</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-7-0-beta-1">2010/06/22 Version 0.7.0 beta 1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-6-0">2009/08/22 Version 0.6.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-5-0">2009/04/17 Version 0.5.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-4-0">2008/10/12 Version 0.4.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-3-1">2007/12/03 Version 0.3.1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-3-0">2007/11/30 Version 0.3.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-2-1">2007/11/21 Version 0.2.1</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-2-0">2007/11/20 Version 0.2.0</a></li>
+<li class="toctree-l2"><a class="reference internal" href="changelog.html#version-0-1-0">2007/11/19 Version 0.1.0</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="changelog.html#todo-and-limitations">TODO and Limitations</a></li>
+</ul>
+</div>
+</div>
+<div class="section" id="installing">
+<span id="index-1"></span><h2>Installing<a class="headerlink" href="#installing" title="Permalink to this headline">¶</a></h2>
+<p>The current version is 1.0.0. Mock is stable and widely used. If you do
+find any bugs, or have suggestions for improvements / extensions
+then please contact us.</p>
+<ul class="simple">
+<li><a class="reference external" href="http://pypi.python.org/pypi/mock">mock on PyPI</a></li>
+<li><a class="reference external" href="http://www.voidspace.org.uk/downloads/mock-1.0.0.pdf">mock documentation as PDF</a></li>
+<li><a class="reference external" href="http://code.google.com/p/mock/">Google Code Home &amp; Mercurial Repository</a></li>
+</ul>
+<span class="target" id="index-2"></span><p id="index-3">You can checkout the latest development version from the Google Code Mercurial
+repository with the following command:</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">hg</span> <span class="pre">clone</span> <span class="pre">https://mock.googlecode.com/hg/</span> <span class="pre">mock</span></tt></div></blockquote>
+<span class="target" id="index-4"></span><span class="target" id="index-5"></span><p id="index-6">If you have pip, setuptools or distribute you can install mock with:</p>
+<blockquote>
+<div><div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">easy_install</span> <span class="pre">-U</span> <span class="pre">mock</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">pip</span> <span class="pre">install</span> <span class="pre">-U</span> <span class="pre">mock</span></tt></div>
+</div>
+</div></blockquote>
+<p>Alternatively you can download the mock distribution from PyPI and after
+unpacking run:</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">python</span> <span class="pre">setup.py</span> <span class="pre">install</span></tt></div></blockquote>
+</div>
+<div class="section" id="quick-guide">
+<h2>Quick Guide<a class="headerlink" href="#quick-guide" title="Permalink to this headline">¶</a></h2>
+<p><a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> and <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> objects create all attributes and
+methods as you access them and store details of how they have been used. You
+can configure them, to specify return values or limit what attributes are
+available, and then make assertions about how they have been used:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">MagicMock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">key</span><span class="o">=</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">key</span><span class="o">=</span><span class="s">&#39;value&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt> allows you to perform side effects, including raising an
+exception when a mock is called:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="ne">KeyError</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>: <span class="n">&#39;foo&#39;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">values</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;a&#39;</span><span class="p">:</span> <span class="mi">1</span><span class="p">,</span> <span class="s">&#39;b&#39;</span><span class="p">:</span> <span class="mi">2</span><span class="p">,</span> <span class="s">&#39;c&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="n">arg</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">values</span><span class="p">[</span><span class="n">arg</span><span class="p">]</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;a&#39;</span><span class="p">),</span> <span class="n">mock</span><span class="p">(</span><span class="s">&#39;b&#39;</span><span class="p">),</span> <span class="n">mock</span><span class="p">(</span><span class="s">&#39;c&#39;</span><span class="p">)</span>
+<span class="go">(1, 2, 3)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="p">[</span><span class="mi">5</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(),</span> <span class="n">mock</span><span class="p">(),</span> <span class="n">mock</span><span class="p">()</span>
+<span class="go">(5, 4, 3)</span>
+</pre></div>
+</div>
+<p>Mock has many other ways you can configure it and control its behaviour. For
+example the <cite>spec</cite> argument configures the mock to take its specification
+from another object. Attempting to access attributes or methods on the mock
+that don&#8217;t exist on the spec will fail with an <cite>AttributeError</cite>.</p>
+<p>The <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorator / context manager makes it easy to mock classes or
+objects in a module under test. The object you specify will be replaced with a
+mock (or other object) during the test and restored when the test ends:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">patch</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;module.ClassName2&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;module.ClassName1&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">MockClass1</span><span class="p">,</span> <span class="n">MockClass2</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName1</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName2</span><span class="p">()</span>
+
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">MockClass1</span> <span class="ow">is</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName1</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">MockClass2</span> <span class="ow">is</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName2</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">MockClass1</span><span class="o">.</span><span class="n">called</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">MockClass2</span><span class="o">.</span><span class="n">called</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>When you nest patch decorators the mocks are passed in to the decorated
+function in the same order they applied (the normal <em>python</em> order that
+decorators are applied). This means from the bottom up, so in the example
+above the mock for <cite>module.ClassName1</cite> is passed in first.</p>
+<p class="last">With <cite>patch</cite> it matters that you patch objects in the namespace where they
+are looked up. This is normally straightforward, but for a quick guide
+read <a class="reference internal" href="patch.html#where-to-patch"><em>where to patch</em></a>.</p>
+</div>
+<p>As well as a decorator <cite>patch</cite> can be used as a context manager in a with
+statement:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">object</span><span class="p">(</span><span class="n">ProductionClass</span><span class="p">,</span> <span class="s">&#39;method&#39;</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_method</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">thing</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">thing</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>There is also <a class="reference internal" href="patch.html#mock.patch.dict" title="mock.patch.dict"><tt class="xref py py-func docutils literal"><span class="pre">patch.dict()</span></tt></a> for setting values in a dictionary just
+during a scope and restoring the dictionary to its original state when the test
+ends:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">foo</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;key&#39;</span><span class="p">:</span> <span class="s">&#39;value&#39;</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">foo</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="n">foo</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">},</span> <span class="n">clear</span><span class="o">=</span><span class="bp">True</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="n">original</span>
+</pre></div>
+</div>
+<p>Mock supports the mocking of Python <a class="reference internal" href="magicmock.html#magic-methods"><em>magic methods</em></a>. The
+easiest way of using magic methods is with the <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> class. It
+allows you to do things like:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;foobarbaz&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">str</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;foobarbaz&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>Mock allows you to assign functions (or other Mock instances) to magic methods
+and they will be called appropriately. The <cite>MagicMock</cite> class is just a Mock
+variant that has all of the magic methods pre-created for you (well, all the
+useful ones anyway).</p>
+<p>The following is an example of using magic methods with the ordinary Mock
+class:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="s">&#39;wheeeeee&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">str</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;wheeeeee&#39;</span>
+</pre></div>
+</div>
+<p>For ensuring that the mock objects in your tests have the same api as the
+objects they are replacing, you can use <a class="reference internal" href="helpers.html#auto-speccing"><em>auto-speccing</em></a>.
+Auto-speccing can be done through the <cite>autospec</cite> argument to patch, or the
+<a class="reference internal" href="helpers.html#mock.create_autospec" title="mock.create_autospec"><tt class="xref py py-func docutils literal"><span class="pre">create_autospec()</span></tt></a> function. Auto-speccing creates mock objects that
+have the same attributes and methods as the objects they are replacing, and
+any functions and methods (including constructors) have the same call
+signature as the real object.</p>
+<p>This ensures that your mocks will fail in the same way as your production
+code if they are used incorrectly:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">create_autospec</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">function</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_function</span> <span class="o">=</span> <span class="n">create_autospec</span><span class="p">(</span><span class="n">function</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="s">&#39;fishy&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_function</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="go">&#39;fishy&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_function</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_function</span><span class="p">(</span><span class="s">&#39;wrong arguments&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes exactly 3 arguments (1 given)</span>
+</pre></div>
+</div>
+<p><cite>create_autospec</cite> can also be used on classes, where it copies the signature of
+the <cite>__init__</cite> method, and on callable objects where it copies the signature of
+the <cite>__call__</cite> method.</p>
+<span class="target" id="index-7"></span></div>
+<div class="section" id="references">
+<span id="index-8"></span><h2>References<a class="headerlink" href="#references" title="Permalink to this headline">¶</a></h2>
+<p>Articles, blog entries and other stuff related to testing with Mock:</p>
+<ul class="simple">
+<li><a class="reference external" href="https://github.com/carljm/django-testing-slides/blob/master/models/30_no_database.md">Imposing a No DB Discipline on Django unit tests</a></li>
+<li><a class="reference external" href="https://github.com/dcramer/mock-django">mock-django: tools for mocking the Django ORM and models</a></li>
+<li><a class="reference external" href="https://blip.tv/file/4881513">PyCon 2011 Video: Testing with mock</a></li>
+<li><a class="reference external" href="http://noopenblockers.com/2012/01/06/mock-objects-in-python/">Mock objects in Python</a></li>
+<li><a class="reference external" href="http://blueprintforge.com/blog/2012/01/08/python-injecting-mock-objects-for-powerful-testing/">Python: Injecting Mock Objects for Powerful Testing</a></li>
+<li><a class="reference external" href="http://www.michaelpollmeier.com/python-mock-how-to-assert-a-substring-of-logger-output/">Python Mock: How to assert a substring of logger output</a></li>
+<li><a class="reference external" href="http://www.mattjmorrison.com/2011/09/mocking-django.html">Mocking Django</a></li>
+<li><a class="reference external" href="http://williamjohnbert.com/2011/07/how-to-unit-testing-in-django-with-mocking-and-patching/">Mocking dates and other classes that can&#8217;t be modified</a></li>
+<li><a class="reference external" href="http://konryd.blogspot.com/2010/06/mock-recipies.html">Mock recipes</a></li>
+<li><a class="reference external" href="http://konryd.blogspot.com/2010/05/mockity-mock-mock-some-love-for-mock.html">Mockity mock mock - some love for the mock module</a></li>
+<li><a class="reference external" href="http://mattsnider.com/python/mock-and-coverage/">Coverage and Mock (with django)</a></li>
+<li><a class="reference external" href="http://www.insomnihack.com/?p=194">Python Unit Testing with Mock</a></li>
+<li><a class="reference external" href="http://myadventuresincoding.wordpress.com/2011/02/26/python-python-mock-cheat-sheet/">Getting started with Python Mock</a></li>
+<li><a class="reference external" href="http://tobyho.com/2011/03/24/smart-parameter-checks-in/">Smart Parameter Checks with mock</a></li>
+<li><a class="reference external" href="http://agiletesting.blogspot.com/2009/07/python-mock-testing-techniques-and.html">Python mock testing techniques and tools</a></li>
+<li><a class="reference external" href="http://techblog.ironfroggy.com/2008/10/how-to-test.html">How To Test Django Template Tags</a></li>
+<li><a class="reference external" href="http://pypap.blogspot.com/2008/10/newbie-nugget-unit-testing-with-mock.html">A presentation on Unit Testing with Mock</a></li>
+<li><a class="reference external" href="http://michael-a-nelson.blogspot.com/2008/09/mocking-with-django-and-google-app.html">Mocking with Django and Google AppEngine</a></li>
+</ul>
+<span class="target" id="index-9"></span></div>
+<div class="section" id="tests">
+<span id="index-10"></span><h2>Tests<a class="headerlink" href="#tests" title="Permalink to this headline">¶</a></h2>
+<p>Mock uses <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a> for its own
+test suite. In order to run it, use the <cite>unit2</cite> script that comes with
+<cite>unittest2</cite> module on a checkout of the source repository:</p>
+<blockquote>
+<div><cite>unit2 discover</cite></div></blockquote>
+<p>If you have <a class="reference external" href="http://pypi.python.org/pypi/distribute">setuptools</a> as well as
+unittest2 you can run:</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">python</span> <span class="pre">setup.py</span> <span class="pre">test</span></tt></div></blockquote>
+<p>On Python 3.2 you can use <tt class="docutils literal"><span class="pre">unittest</span></tt> module from the standard library.</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">python3.2</span> <span class="pre">-m</span> <span class="pre">unittest</span> <span class="pre">discover</span></tt></div></blockquote>
+<p id="index-11">On Python 3 the tests for unicode are skipped as they are not relevant. On
+Python 2.4 tests that use the with statements are skipped as the with statement
+is invalid syntax on Python 2.4.</p>
+</div>
+<div class="section" id="older-versions">
+<span id="index-12"></span><h2>Older Versions<a class="headerlink" href="#older-versions" title="Permalink to this headline">¶</a></h2>
+<p>Documentation for older versions of mock:</p>
+<ul class="simple">
+<li><a class="reference external" href="http://www.voidspace.org.uk/python/mock/0.8/">mock 0.8</a></li>
+<li><a class="reference external" href="http://www.voidspace.org.uk/python/mock/0.7/">mock 0.7</a></li>
+<li><a class="reference external" href="http://www.voidspace.org.uk/python/mock/0.6.0/">mock 0.6</a></li>
+</ul>
+<p>Docs from the in-development version of <cite>mock</cite> can be found at
+<a class="reference external" href="http://mock.readthedocs.org">mock.readthedocs.org</a>.</p>
+</div>
+<div class="section" id="terminology">
+<h2>Terminology<a class="headerlink" href="#terminology" title="Permalink to this headline">¶</a></h2>
+<p>Terminology for objects used to replace other ones can be confusing. Terms
+like double, fake, mock, stub, and spy are all used with varying meanings.</p>
+<p>In <a class="reference external" href="http://xunitpatterns.com/Mocks,%20Fakes,%20Stubs%20and%20Dummies.html">classic mock terminology</a>
+<a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">mock.Mock</span></tt></a> is a <a class="reference external" href="http://xunitpatterns.com/Test%20Spy.html">spy</a> that
+allows for <em>post-mortem</em> examination. This is what I call the &#8220;action -&gt;
+assertion&#8221; <a class="footnote-reference" href="#id2" id="id1">[1]</a> pattern of testing.</p>
+<p>I&#8217;m not however a fan of this &#8220;statically typed mocking terminology&#8221;
+promulgated by <a class="reference external" href="http://martinfowler.com/articles/mocksArentStubs.html">Martin Fowler</a>. It confuses usage
+patterns with implementation and prevents you from using natural terminology
+when discussing mocking.</p>
+<p>I much prefer duck typing, if an object used in your test suite looks like a
+mock object and quacks like a mock object then it&#8217;s fine to call it a mock, no
+matter what the implementation looks like.</p>
+<p>This terminology is perhaps more useful in less capable languages where
+different usage patterns will <em>require</em> different implementations.
+<cite>mock.Mock()</cite> is capable of being used in most of the different roles
+described by Fowler, except (annoyingly / frustratingly / ironically) a Mock
+itself!</p>
+<p>How about a simpler definition: a &#8220;mock object&#8221; is an object used to replace a
+real one in a system under test.</p>
+<table class="docutils footnote" frame="void" id="id2" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id1">[1]</a></td><td>This pattern is called &#8220;AAA&#8221; by some members of the testing community;
+&#8220;Arrange - Act - Assert&#8221;.</td></tr>
+</tbody>
+</table>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="#">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Mock - Mocking and Testing Library</a><ul>
+<li><a class="reference internal" href="#api-documentation">API Documentation</a><ul>
+</ul>
+</li>
+<li><a class="reference internal" href="#user-guide">User Guide</a><ul>
+</ul>
+</li>
+<li><a class="reference internal" href="#installing">Installing</a></li>
+<li><a class="reference internal" href="#quick-guide">Quick Guide</a></li>
+<li><a class="reference internal" href="#references">References</a></li>
+<li><a class="reference internal" href="#tests">Tests</a></li>
+<li><a class="reference internal" href="#older-versions">Older Versions</a></li>
+<li><a class="reference internal" href="#terminology">Terminology</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Next topic</h4>
+ <p class="topless"><a href="mock.html"
+ title="next chapter">The Mock Class</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/index.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="mock.html" title="The Mock Class"
+ >next</a> |</li>
+ <li><a href="#">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/magicmock.html b/python/mock-1.0.0/html/magicmock.html
new file mode 100644
index 000000000..f49fae763
--- /dev/null
+++ b/python/mock-1.0.0/html/magicmock.html
@@ -0,0 +1,347 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Mocking Magic Methods &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Getting Started with Mock" href="getting-started.html" />
+ <link rel="prev" title="Sentinel" href="sentinel.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="sentinel.html" title="Sentinel"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="mocking-magic-methods">
+<span id="magic-methods"></span><h1>Mocking Magic Methods<a class="headerlink" href="#mocking-magic-methods" title="Permalink to this headline">¶</a></h1>
+<p><a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> supports mocking <a class="reference external" href="http://www.ironpythoninaction.com/magic-methods.html">magic methods</a>. This allows mock
+objects to replace containers or other objects that implement Python
+protocols.</p>
+<p>Because magic methods are looked up differently from normal methods <a class="footnote-reference" href="#id4" id="id2">[1]</a>, this
+support has been specially implemented. This means that only specific magic
+methods are supported. The supported list includes <em>almost</em> all of them. If
+there are any missing that you need please let us know!</p>
+<p>You mock magic methods by setting the method you are interested in to a function
+or a mock instance. If you are using a function then it <em>must</em> take <tt class="docutils literal"><span class="pre">self</span></tt> as
+the first argument <a class="footnote-reference" href="#id5" id="id3">[2]</a>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">__str__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="s">&#39;fooble&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span> <span class="o">=</span> <span class="n">__str__</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">str</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;fooble&#39;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__str__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;fooble&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">str</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;fooble&#39;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__iter__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="nb">iter</span><span class="p">([]))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[]</span>
+</pre></div>
+</div>
+<p>One use case for this is for mocking objects used as context managers in a
+<cite>with</cite> statement:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__enter__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__exit__</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">False</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">mock</span> <span class="k">as</span> <span class="n">m</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">m</span> <span class="o">==</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__enter__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__exit__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Calls to magic methods do not appear in <a class="reference internal" href="mock.html#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a>, but they
+are recorded in <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a>.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">If you use the <cite>spec</cite> keyword argument to create a mock then attempting to
+set a magic method that isn&#8217;t in the spec will raise an <cite>AttributeError</cite>.</p>
+</div>
+<p>The full list of supported magic methods is:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__hash__</span></tt>, <tt class="docutils literal"><span class="pre">__sizeof__</span></tt>, <tt class="docutils literal"><span class="pre">__repr__</span></tt> and <tt class="docutils literal"><span class="pre">__str__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__dir__</span></tt>, <tt class="docutils literal"><span class="pre">__format__</span></tt> and <tt class="docutils literal"><span class="pre">__subclasses__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__floor__</span></tt>, <tt class="docutils literal"><span class="pre">__trunc__</span></tt> and <tt class="docutils literal"><span class="pre">__ceil__</span></tt></li>
+<li>Comparisons: <tt class="docutils literal"><span class="pre">__cmp__</span></tt>, <tt class="docutils literal"><span class="pre">__lt__</span></tt>, <tt class="docutils literal"><span class="pre">__gt__</span></tt>, <tt class="docutils literal"><span class="pre">__le__</span></tt>, <tt class="docutils literal"><span class="pre">__ge__</span></tt>,
+<tt class="docutils literal"><span class="pre">__eq__</span></tt> and <tt class="docutils literal"><span class="pre">__ne__</span></tt></li>
+<li>Container methods: <tt class="docutils literal"><span class="pre">__getitem__</span></tt>, <tt class="docutils literal"><span class="pre">__setitem__</span></tt>, <tt class="docutils literal"><span class="pre">__delitem__</span></tt>,
+<tt class="docutils literal"><span class="pre">__contains__</span></tt>, <tt class="docutils literal"><span class="pre">__len__</span></tt>, <tt class="docutils literal"><span class="pre">__iter__</span></tt>, <tt class="docutils literal"><span class="pre">__getslice__</span></tt>,
+<tt class="docutils literal"><span class="pre">__setslice__</span></tt>, <tt class="docutils literal"><span class="pre">__reversed__</span></tt> and <tt class="docutils literal"><span class="pre">__missing__</span></tt></li>
+<li>Context manager: <tt class="docutils literal"><span class="pre">__enter__</span></tt> and <tt class="docutils literal"><span class="pre">__exit__</span></tt></li>
+<li>Unary numeric methods: <tt class="docutils literal"><span class="pre">__neg__</span></tt>, <tt class="docutils literal"><span class="pre">__pos__</span></tt> and <tt class="docutils literal"><span class="pre">__invert__</span></tt></li>
+<li>The numeric methods (including right hand and in-place variants):
+<tt class="docutils literal"><span class="pre">__add__</span></tt>, <tt class="docutils literal"><span class="pre">__sub__</span></tt>, <tt class="docutils literal"><span class="pre">__mul__</span></tt>, <tt class="docutils literal"><span class="pre">__div__</span></tt>,
+<tt class="docutils literal"><span class="pre">__floordiv__</span></tt>, <tt class="docutils literal"><span class="pre">__mod__</span></tt>, <tt class="docutils literal"><span class="pre">__divmod__</span></tt>, <tt class="docutils literal"><span class="pre">__lshift__</span></tt>,
+<tt class="docutils literal"><span class="pre">__rshift__</span></tt>, <tt class="docutils literal"><span class="pre">__and__</span></tt>, <tt class="docutils literal"><span class="pre">__xor__</span></tt>, <tt class="docutils literal"><span class="pre">__or__</span></tt>, and <tt class="docutils literal"><span class="pre">__pow__</span></tt></li>
+<li>Numeric conversion methods: <tt class="docutils literal"><span class="pre">__complex__</span></tt>, <tt class="docutils literal"><span class="pre">__int__</span></tt>, <tt class="docutils literal"><span class="pre">__float__</span></tt>,
+<tt class="docutils literal"><span class="pre">__index__</span></tt> and <tt class="docutils literal"><span class="pre">__coerce__</span></tt></li>
+<li>Descriptor methods: <tt class="docutils literal"><span class="pre">__get__</span></tt>, <tt class="docutils literal"><span class="pre">__set__</span></tt> and <tt class="docutils literal"><span class="pre">__delete__</span></tt></li>
+<li>Pickling: <tt class="docutils literal"><span class="pre">__reduce__</span></tt>, <tt class="docutils literal"><span class="pre">__reduce_ex__</span></tt>, <tt class="docutils literal"><span class="pre">__getinitargs__</span></tt>,
+<tt class="docutils literal"><span class="pre">__getnewargs__</span></tt>, <tt class="docutils literal"><span class="pre">__getstate__</span></tt> and <tt class="docutils literal"><span class="pre">__setstate__</span></tt></li>
+</ul>
+<p>The following methods are supported in Python 2 but don&#8217;t exist in Python 3:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__unicode__</span></tt>, <tt class="docutils literal"><span class="pre">__long__</span></tt>, <tt class="docutils literal"><span class="pre">__oct__</span></tt>, <tt class="docutils literal"><span class="pre">__hex__</span></tt> and <tt class="docutils literal"><span class="pre">__nonzero__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__truediv__</span></tt> and <tt class="docutils literal"><span class="pre">__rtruediv__</span></tt></li>
+</ul>
+<p>The following methods are supported in Python 3 but don&#8217;t exist in Python 2:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__bool__</span></tt> and <tt class="docutils literal"><span class="pre">__next__</span></tt></li>
+</ul>
+<p>The following methods exist but are <em>not</em> supported as they are either in use by
+mock, can&#8217;t be set dynamically, or can cause problems:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__getattr__</span></tt>, <tt class="docutils literal"><span class="pre">__setattr__</span></tt>, <tt class="docutils literal"><span class="pre">__init__</span></tt> and <tt class="docutils literal"><span class="pre">__new__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__prepare__</span></tt>, <tt class="docutils literal"><span class="pre">__instancecheck__</span></tt>, <tt class="docutils literal"><span class="pre">__subclasscheck__</span></tt>, <tt class="docutils literal"><span class="pre">__del__</span></tt></li>
+</ul>
+</div>
+<div class="section" id="magic-mock">
+<h1>Magic Mock<a class="headerlink" href="#magic-mock" title="Permalink to this headline">¶</a></h1>
+<p>There are two <cite>MagicMock</cite> variants: <cite>MagicMock</cite> and <cite>NonCallableMagicMock</cite>.</p>
+<dl class="class">
+<dt id="mock.MagicMock">
+<em class="property">class </em><tt class="descname">MagicMock</tt><big>(</big><em>*args</em>, <em>**kw</em><big>)</big><a class="headerlink" href="#mock.MagicMock" title="Permalink to this definition">¶</a></dt>
+<dd><p><tt class="docutils literal"><span class="pre">MagicMock</span></tt> is a subclass of <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> with default implementations
+of most of the magic methods. You can use <tt class="docutils literal"><span class="pre">MagicMock</span></tt> without having to
+configure the magic methods yourself.</p>
+<p>The constructor parameters have the same meaning as for <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a>.</p>
+<p>If you use the <cite>spec</cite> or <cite>spec_set</cite> arguments then <em>only</em> magic methods
+that exist in the spec will be created.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="mock.NonCallableMagicMock">
+<em class="property">class </em><tt class="descname">NonCallableMagicMock</tt><big>(</big><em>*args</em>, <em>**kw</em><big>)</big><a class="headerlink" href="#mock.NonCallableMagicMock" title="Permalink to this definition">¶</a></dt>
+<dd><p>A non-callable version of <cite>MagicMock</cite>.</p>
+<p>The constructor parameters have the same meaning as for
+<a class="reference internal" href="#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a>, with the exception of <cite>return_value</cite> and
+<cite>side_effect</cite> which have no meaning on a non-callable mock.</p>
+</dd></dl>
+
+<p>The magic methods are setup with <cite>MagicMock</cite> objects, so you can configure them
+and use them in the usual way:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="mi">3</span><span class="p">]</span> <span class="o">=</span> <span class="s">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__setitem__</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="s">&#39;fish&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__getitem__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;result&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">[</span><span class="mi">2</span><span class="p">]</span>
+<span class="go">&#39;result&#39;</span>
+</pre></div>
+</div>
+<p>By default many of the protocol methods are required to return objects of a
+specific type. These methods are preconfigured with a default return value, so
+that they can be used without you having to do anything if you aren&#8217;t interested
+in the return value. You can still <em>set</em> the return value manually if you want
+to change the default.</p>
+<p>Methods and their defaults:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__lt__</span></tt>: NotImplemented</li>
+<li><tt class="docutils literal"><span class="pre">__gt__</span></tt>: NotImplemented</li>
+<li><tt class="docutils literal"><span class="pre">__le__</span></tt>: NotImplemented</li>
+<li><tt class="docutils literal"><span class="pre">__ge__</span></tt>: NotImplemented</li>
+<li><tt class="docutils literal"><span class="pre">__int__</span></tt> : 1</li>
+<li><tt class="docutils literal"><span class="pre">__contains__</span></tt> : False</li>
+<li><tt class="docutils literal"><span class="pre">__len__</span></tt> : 1</li>
+<li><tt class="docutils literal"><span class="pre">__iter__</span></tt> : iter([])</li>
+<li><tt class="docutils literal"><span class="pre">__exit__</span></tt> : False</li>
+<li><tt class="docutils literal"><span class="pre">__complex__</span></tt> : 1j</li>
+<li><tt class="docutils literal"><span class="pre">__float__</span></tt> : 1.0</li>
+<li><tt class="docutils literal"><span class="pre">__bool__</span></tt> : True</li>
+<li><tt class="docutils literal"><span class="pre">__nonzero__</span></tt> : True</li>
+<li><tt class="docutils literal"><span class="pre">__oct__</span></tt> : &#8216;1&#8217;</li>
+<li><tt class="docutils literal"><span class="pre">__hex__</span></tt> : &#8216;0x1&#8217;</li>
+<li><tt class="docutils literal"><span class="pre">__long__</span></tt> : long(1)</li>
+<li><tt class="docutils literal"><span class="pre">__index__</span></tt> : 1</li>
+<li><tt class="docutils literal"><span class="pre">__hash__</span></tt> : default hash for the mock</li>
+<li><tt class="docutils literal"><span class="pre">__str__</span></tt> : default str for the mock</li>
+<li><tt class="docutils literal"><span class="pre">__unicode__</span></tt> : default unicode for the mock</li>
+<li><tt class="docutils literal"><span class="pre">__sizeof__</span></tt>: default sizeof for the mock</li>
+</ul>
+<p>For example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">int</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">len</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">0</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">hex</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">&#39;0x1&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">object</span><span class="p">()</span> <span class="ow">in</span> <span class="n">mock</span>
+<span class="go">False</span>
+</pre></div>
+</div>
+<p>The two equality method, <cite>__eq__</cite> and <cite>__ne__</cite>, are special (changed in
+0.7.2). They do the default equality comparison on identity, using a side
+effect, unless you change their return value to return something else:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">MagicMock</span><span class="p">()</span> <span class="o">==</span> <span class="mi">3</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MagicMock</span><span class="p">()</span> <span class="o">!=</span> <span class="mi">3</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__eq__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">==</span> <span class="mi">3</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>In <cite>0.8</cite> the <cite>__iter__</cite> also gained special handling implemented with a
+side effect. The return value of <cite>MagicMock.__iter__</cite> can be any iterable
+object and isn&#8217;t required to be an iterator:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__iter__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="p">[</span><span class="s">&#39;a&#39;</span><span class="p">,</span> <span class="s">&#39;b&#39;</span><span class="p">,</span> <span class="s">&#39;c&#39;</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;]</span>
+</pre></div>
+</div>
+<p>If the return value <em>is</em> an iterator, then iterating over it once will consume
+it and subsequent iterations will result in an empty list:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__iter__</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="nb">iter</span><span class="p">([</span><span class="s">&#39;a&#39;</span><span class="p">,</span> <span class="s">&#39;b&#39;</span><span class="p">,</span> <span class="s">&#39;c&#39;</span><span class="p">])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">[]</span>
+</pre></div>
+</div>
+<p><tt class="docutils literal"><span class="pre">MagicMock</span></tt> has all of the supported magic methods configured except for some
+of the obscure and obsolete ones. You can still set these up if you want.</p>
+<p>Magic methods that are supported but not setup by default in <tt class="docutils literal"><span class="pre">MagicMock</span></tt> are:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">__cmp__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__getslice__</span></tt> and <tt class="docutils literal"><span class="pre">__setslice__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__coerce__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__subclasses__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__dir__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__format__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__get__</span></tt>, <tt class="docutils literal"><span class="pre">__set__</span></tt> and <tt class="docutils literal"><span class="pre">__delete__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__reversed__</span></tt> and <tt class="docutils literal"><span class="pre">__missing__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__reduce__</span></tt>, <tt class="docutils literal"><span class="pre">__reduce_ex__</span></tt>, <tt class="docutils literal"><span class="pre">__getinitargs__</span></tt>, <tt class="docutils literal"><span class="pre">__getnewargs__</span></tt>,
+<tt class="docutils literal"><span class="pre">__getstate__</span></tt> and <tt class="docutils literal"><span class="pre">__setstate__</span></tt></li>
+<li><tt class="docutils literal"><span class="pre">__getformat__</span></tt> and <tt class="docutils literal"><span class="pre">__setformat__</span></tt></li>
+</ul>
+<hr class="docutils" />
+<table class="docutils footnote" frame="void" id="id4" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id2">[1]</a></td><td>Magic methods <em>should</em> be looked up on the class rather than the
+instance. Different versions of Python are inconsistent about applying this
+rule. The supported protocol methods should work with all supported versions
+of Python.</td></tr>
+</tbody>
+</table>
+<table class="docutils footnote" frame="void" id="id5" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id3">[2]</a></td><td>The function is basically hooked up to the class, but each <tt class="docutils literal"><span class="pre">Mock</span></tt>
+instance is kept isolated from the others.</td></tr>
+</tbody>
+</table>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Mocking Magic Methods</a></li>
+<li><a class="reference internal" href="#magic-mock">Magic Mock</a></li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="sentinel.html"
+ title="previous chapter">Sentinel</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="getting-started.html"
+ title="next chapter">Getting Started with Mock</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/magicmock.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="sentinel.html" title="Sentinel"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/mock.html b/python/mock-1.0.0/html/mock.html
new file mode 100644
index 000000000..84ba37b2b
--- /dev/null
+++ b/python/mock-1.0.0/html/mock.html
@@ -0,0 +1,875 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>The Mock Class &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Patch Decorators" href="patch.html" />
+ <link rel="prev" title="Mock - Mocking and Testing Library" href="index.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="patch.html" title="Patch Decorators"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="index.html" title="Mock - Mocking and Testing Library"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="the-mock-class">
+<h1>The Mock Class<a class="headerlink" href="#the-mock-class" title="Permalink to this headline">¶</a></h1>
+<p><cite>Mock</cite> is a flexible mock object intended to replace the use of stubs and
+test doubles throughout your code. Mocks are callable and create attributes as
+new mocks when you access them <a class="footnote-reference" href="#id3" id="id1">[1]</a>. Accessing the same attribute will always
+return the same mock. Mocks record how you use them, allowing you to make
+assertions about what your code has done to them.</p>
+<p><a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> is a subclass of <cite>Mock</cite> with all the magic methods
+pre-created and ready to use. There are also non-callable variants, useful
+when you are mocking out objects that aren&#8217;t callable:
+<a class="reference internal" href="#mock.NonCallableMock" title="mock.NonCallableMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMock</span></tt></a> and <a class="reference internal" href="magicmock.html#mock.NonCallableMagicMock" title="mock.NonCallableMagicMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMagicMock</span></tt></a></p>
+<p>The <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorators makes it easy to temporarily replace classes
+in a particular module with a <cite>Mock</cite> object. By default <cite>patch</cite> will create
+a <cite>MagicMock</cite> for you. You can specify an alternative class of <cite>Mock</cite> using
+the <cite>new_callable</cite> argument to <cite>patch</cite>.</p>
+<span class="target" id="index-0"></span><span class="target" id="index-1"></span><span class="target" id="index-2"></span><span class="target" id="index-3"></span><span class="target" id="index-4"></span><dl class="class">
+<dt id="mock.Mock">
+<em class="property">class </em><tt class="descname">Mock</tt><big>(</big><em>spec=None</em>, <em>side_effect=None</em>, <em>return_value=DEFAULT</em>, <em>wraps=None</em>, <em>name=None</em>, <em>spec_set=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>Create a new <cite>Mock</cite> object. <cite>Mock</cite> takes several optional arguments
+that specify the behaviour of the Mock object:</p>
+<ul>
+<li><p class="first"><cite>spec</cite>: This can be either a list of strings or an existing object (a
+class or instance) that acts as the specification for the mock object. If
+you pass in an object then a list of strings is formed by calling dir on
+the object (excluding unsupported magic attributes and methods).
+Accessing any attribute not in this list will raise an <cite>AttributeError</cite>.</p>
+<p>If <cite>spec</cite> is an object (rather than a list of strings) then
+<a class="reference internal" href="#mock.Mock.__class__" title="mock.Mock.__class__"><tt class="xref py py-attr docutils literal"><span class="pre">__class__</span></tt></a> returns the class of the spec object. This allows mocks
+to pass <cite>isinstance</cite> tests.</p>
+</li>
+<li><p class="first"><cite>spec_set</cite>: A stricter variant of <cite>spec</cite>. If used, attempting to <em>set</em>
+or get an attribute on the mock that isn&#8217;t on the object passed as
+<cite>spec_set</cite> will raise an <cite>AttributeError</cite>.</p>
+</li>
+<li><p class="first"><cite>side_effect</cite>: A function to be called whenever the Mock is called. See
+the <a class="reference internal" href="#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> attribute. Useful for raising exceptions or
+dynamically changing return values. The function is called with the same
+arguments as the mock, and unless it returns <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a>, the return
+value of this function is used as the return value.</p>
+<p>Alternatively <cite>side_effect</cite> can be an exception class or instance. In
+this case the exception will be raised when the mock is called.</p>
+<p>If <cite>side_effect</cite> is an iterable then each call to the mock will return
+the next value from the iterable. If any of the members of the iterable
+are exceptions they will be raised instead of returned.</p>
+<p>A <cite>side_effect</cite> can be cleared by setting it to <cite>None</cite>.</p>
+</li>
+<li><p class="first"><cite>return_value</cite>: The value returned when the mock is called. By default
+this is a new Mock (created on first access). See the
+<a class="reference internal" href="#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> attribute.</p>
+</li>
+<li><p class="first"><cite>wraps</cite>: Item for the mock object to wrap. If <cite>wraps</cite> is not None then
+calling the Mock will pass the call through to the wrapped object
+(returning the real result and ignoring <cite>return_value</cite>). Attribute access
+on the mock will return a Mock object that wraps the corresponding
+attribute of the wrapped object (so attempting to access an attribute
+that doesn&#8217;t exist will raise an <cite>AttributeError</cite>).</p>
+<p>If the mock has an explicit <cite>return_value</cite> set then calls are not passed
+to the wrapped object and the <cite>return_value</cite> is returned instead.</p>
+</li>
+<li><p class="first"><cite>name</cite>: If the mock has a name then it will be used in the repr of the
+mock. This can be useful for debugging. The name is propagated to child
+mocks.</p>
+</li>
+</ul>
+<p>Mocks can also be called with arbitrary keyword arguments. These will be
+used to set attributes on the mock after it is created. See the
+<a class="reference internal" href="#mock.Mock.configure_mock" title="mock.Mock.configure_mock"><tt class="xref py py-meth docutils literal"><span class="pre">configure_mock()</span></tt></a> method for details.</p>
+<dl class="method">
+<dt id="mock.Mock.assert_called_with">
+<tt class="descname">assert_called_with</tt><big>(</big><em>*args</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock.assert_called_with" title="Permalink to this definition">¶</a></dt>
+<dd><p>This method is a convenient way of asserting that calls are made in a
+particular way:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="n">test</span><span class="o">=</span><span class="s">&#39;wow&#39;</span><span class="p">)</span>
+<span class="go">&lt;Mock name=&#39;mock.method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="n">test</span><span class="o">=</span><span class="s">&#39;wow&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.assert_called_once_with">
+<tt class="descname">assert_called_once_with</tt><big>(</big><em>*args</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock.assert_called_once_with" title="Permalink to this definition">¶</a></dt>
+<dd><p>Assert that the mock was called exactly once and with the specified
+arguments.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">bar</span><span class="o">=</span><span class="s">&#39;baz&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">bar</span><span class="o">=</span><span class="s">&#39;baz&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">bar</span><span class="o">=</span><span class="s">&#39;baz&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">,</span> <span class="n">bar</span><span class="o">=</span><span class="s">&#39;baz&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">AssertionError</span>: <span class="n">Expected to be called once. Called 2 times.</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.assert_any_call">
+<tt class="descname">assert_any_call</tt><big>(</big><em>*args</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock.assert_any_call" title="Permalink to this definition">¶</a></dt>
+<dd><p>assert the mock has been called with the specified arguments.</p>
+<p>The assert passes if the mock has <em>ever</em> been called, unlike
+<a class="reference internal" href="#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a> and <a class="reference internal" href="#mock.Mock.assert_called_once_with" title="mock.Mock.assert_called_once_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_once_with()</span></tt></a> that
+only pass if the call is the most recent one.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="n">arg</span><span class="o">=</span><span class="s">&#39;thing&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;some&#39;</span><span class="p">,</span> <span class="s">&#39;thing&#39;</span><span class="p">,</span> <span class="s">&#39;else&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_any_call</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="n">arg</span><span class="o">=</span><span class="s">&#39;thing&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.assert_has_calls">
+<tt class="descname">assert_has_calls</tt><big>(</big><em>calls</em>, <em>any_order=False</em><big>)</big><a class="headerlink" href="#mock.Mock.assert_has_calls" title="Permalink to this definition">¶</a></dt>
+<dd><p>assert the mock has been called with the specified calls.
+The <cite>mock_calls</cite> list is checked for the calls.</p>
+<p>If <cite>any_order</cite> is False (the default) then the calls must be
+sequential. There can be extra calls before or after the
+specified calls.</p>
+<p>If <cite>any_order</cite> is True then the calls can be in any order, but
+they must all appear in <a class="reference internal" href="#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">4</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">calls</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="p">(</span><span class="mi">2</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">3</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_has_calls</span><span class="p">(</span><span class="n">calls</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">calls</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="p">(</span><span class="mi">4</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">2</span><span class="p">),</span> <span class="n">call</span><span class="p">(</span><span class="mi">3</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">assert_has_calls</span><span class="p">(</span><span class="n">calls</span><span class="p">,</span> <span class="n">any_order</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.reset_mock">
+<tt class="descname">reset_mock</tt><big>(</big><big>)</big><a class="headerlink" href="#mock.Mock.reset_mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>The reset_mock method resets all the call attributes on a mock object:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="s">&#39;hello&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">called</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">reset_mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">called</span>
+<span class="go">False</span>
+</pre></div>
+</div>
+<p>This can be useful where you want to make a series of assertions that
+reuse the same object. Note that <cite>reset_mock</cite> <em>doesn&#8217;t</em> clear the
+return value, <a class="reference internal" href="#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> or any child attributes you have
+set using normal assignment. Child mocks and the return value mock
+(if any) are reset as well.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.mock_add_spec">
+<tt class="descname">mock_add_spec</tt><big>(</big><em>spec</em>, <em>spec_set=False</em><big>)</big><a class="headerlink" href="#mock.Mock.mock_add_spec" title="Permalink to this definition">¶</a></dt>
+<dd><p>Add a spec to a mock. <cite>spec</cite> can either be an object or a
+list of strings. Only attributes on the <cite>spec</cite> can be fetched as
+attributes from the mock.</p>
+<p>If <cite>spec_set</cite> is <cite>True</cite> then only attributes on the spec can be set.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.attach_mock">
+<tt class="descname">attach_mock</tt><big>(</big><em>mock</em>, <em>attribute</em><big>)</big><a class="headerlink" href="#mock.Mock.attach_mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>Attach a mock as an attribute of this one, replacing its name and
+parent. Calls to the attached mock will be recorded in the
+<a class="reference internal" href="#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a> and <a class="reference internal" href="#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> attributes of this one.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.configure_mock">
+<tt class="descname">configure_mock</tt><big>(</big><em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.Mock.configure_mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>Set attributes on the mock through keyword arguments.</p>
+<p>Attributes plus return values and side effects can be set on child
+mocks using standard dot notation and unpacking a dictionary in the
+method call:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">attrs</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;method.return_value&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s">&#39;other.side_effect&#39;</span><span class="p">:</span> <span class="ne">KeyError</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">configure_mock</span><span class="p">(</span><span class="o">**</span><span class="n">attrs</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">other</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+</pre></div>
+</div>
+<p>The same thing can be achieved in the constructor call to mocks:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">attrs</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;method.return_value&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s">&#39;other.side_effect&#39;</span><span class="p">:</span> <span class="ne">KeyError</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">some_attribute</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">,</span> <span class="o">**</span><span class="n">attrs</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span>
+<span class="go">&#39;eggs&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">other</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+</pre></div>
+</div>
+<p><cite>configure_mock</cite> exists to make it easier to do configuration
+after the mock has been created.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock.__dir__">
+<tt class="descname">__dir__</tt><big>(</big><big>)</big><a class="headerlink" href="#mock.Mock.__dir__" title="Permalink to this definition">¶</a></dt>
+<dd><p><cite>Mock</cite> objects limit the results of <cite>dir(some_mock)</cite> to useful results.
+For mocks with a <cite>spec</cite> this includes all the permitted attributes
+for the mock.</p>
+<p>See <a class="reference internal" href="helpers.html#mock.FILTER_DIR" title="mock.FILTER_DIR"><tt class="xref py py-data docutils literal"><span class="pre">FILTER_DIR</span></tt></a> for what this filtering does, and how to
+switch it off.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="mock.Mock._get_child_mock">
+<tt class="descname">_get_child_mock</tt><big>(</big><em>**kw</em><big>)</big><a class="headerlink" href="#mock.Mock._get_child_mock" title="Permalink to this definition">¶</a></dt>
+<dd><p>Create the child mocks for attributes and return value.
+By default child mocks will be the same type as the parent.
+Subclasses of Mock may want to override this to customize the way
+child mocks are made.</p>
+<p>For non-callable mocks the callable variant will be used (rather than
+any custom subclass).</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.called">
+<tt class="descname">called</tt><a class="headerlink" href="#mock.Mock.called" title="Permalink to this definition">¶</a></dt>
+<dd><p>A boolean representing whether or not the mock object has been called:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">called</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">called</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.call_count">
+<tt class="descname">call_count</tt><a class="headerlink" href="#mock.Mock.call_count" title="Permalink to this definition">¶</a></dt>
+<dd><p>An integer telling you how many times the mock object has been called:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_count</span>
+<span class="go">0</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_count</span>
+<span class="go">2</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.return_value">
+<tt class="descname">return_value</tt><a class="headerlink" href="#mock.Mock.return_value" title="Permalink to this definition">¶</a></dt>
+<dd><p>Set this to configure the value returned by calling the mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">&#39;fish&#39;</span>
+</pre></div>
+</div>
+<p>The default return value is a mock object and you can configure it in
+the normal way:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">attribute</span> <span class="o">=</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">Attribute</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock()()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<p><cite>return_value</cite> can also be set in the constructor:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.side_effect">
+<tt class="descname">side_effect</tt><a class="headerlink" href="#mock.Mock.side_effect" title="Permalink to this definition">¶</a></dt>
+<dd><p>This can either be a function to be called when the mock is called,
+or an exception (class or instance) to be raised.</p>
+<p>If you pass in a function it will be called with same arguments as the
+mock and unless the function returns the <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a> singleton the
+call to the mock will then return whatever the function returns. If the
+function returns <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a> then the mock will return its normal
+value (from the <a class="reference internal" href="#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a>.</p>
+<p>An example of a mock that raises an exception (to test exception
+handling of an API):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="ne">Exception</span><span class="p">(</span><span class="s">&#39;Boom!&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">Exception</span>: <span class="n">Boom!</span>
+</pre></div>
+</div>
+<p>Using <cite>side_effect</cite> to return a sequence of values:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="p">[</span><span class="mi">3</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(),</span> <span class="n">mock</span><span class="p">(),</span> <span class="n">mock</span><span class="p">()</span>
+<span class="go">(3, 2, 1)</span>
+</pre></div>
+</div>
+<p>The <cite>side_effect</cite> function is called with the same arguments as the
+mock (so it is wise for it to take arbitrary args and keyword
+arguments) and whatever it returns is used as the return value for
+the call. The exception is if <cite>side_effect</cite> returns <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a>,
+in which case the normal <a class="reference internal" href="#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> is used.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">DEFAULT</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p><cite>side_effect</cite> can be set in the constructor. Here&#8217;s an example that
+adds one to the value the mock is called with and returns it:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">side_effect</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">value</span><span class="p">:</span> <span class="n">value</span> <span class="o">+</span> <span class="mi">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span>
+<span class="go">4</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="o">-</span><span class="mi">8</span><span class="p">)</span>
+<span class="go">-7</span>
+</pre></div>
+</div>
+<p>Setting <cite>side_effect</cite> to <cite>None</cite> clears it:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">Mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="ne">KeyError</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.call_args">
+<tt class="descname">call_args</tt><a class="headerlink" href="#mock.Mock.call_args" title="Permalink to this definition">¶</a></dt>
+<dd><p>This is either <cite>None</cite> (if the mock hasn&#8217;t been called), or the
+arguments that the mock was last called with. This will be in the
+form of a tuple: the first member is any ordered arguments the mock
+was called with (or an empty tuple) and the second member is any
+keyword arguments (or an empty dictionary).</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">print</span> <span class="n">mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">call()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span> <span class="o">==</span> <span class="p">()</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">call(3, 4)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span> <span class="o">==</span> <span class="p">((</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">),)</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">key</span><span class="o">=</span><span class="s">&#39;fish&#39;</span><span class="p">,</span> <span class="nb">next</span><span class="o">=</span><span class="s">&#39;w00t!&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args</span>
+<span class="go">call(3, 4, 5, key=&#39;fish&#39;, next=&#39;w00t!&#39;)</span>
+</pre></div>
+</div>
+<p><cite>call_args</cite>, along with members of the lists <a class="reference internal" href="#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">call_args_list</span></tt></a>,
+<a class="reference internal" href="#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a> and <a class="reference internal" href="#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> are <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects.
+These are tuples, so they can be unpacked to get at the individual
+arguments and make more complex assertions. See
+<a class="reference internal" href="helpers.html#calls-as-tuples"><em>calls as tuples</em></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.call_args_list">
+<tt class="descname">call_args_list</tt><a class="headerlink" href="#mock.Mock.call_args_list" title="Permalink to this definition">¶</a></dt>
+<dd><p>This is a list of all the calls made to the mock object in sequence
+(so the length of the list is the number of times it has been
+called). Before any calls have been made it is an empty list. The
+<a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> object can be used for conveniently constructing lists of
+calls to compare with <cite>call_args_list</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="p">(</span><span class="n">key</span><span class="o">=</span><span class="s">&#39;fish&#39;</span><span class="p">,</span> <span class="nb">next</span><span class="o">=</span><span class="s">&#39;w00t!&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args_list</span>
+<span class="go">[call(), call(3, 4), call(key=&#39;fish&#39;, next=&#39;w00t!&#39;)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="p">[(),</span> <span class="p">((</span><span class="mi">3</span><span class="p">,</span> <span class="mi">4</span><span class="p">),),</span> <span class="p">({</span><span class="s">&#39;key&#39;</span><span class="p">:</span> <span class="s">&#39;fish&#39;</span><span class="p">,</span> <span class="s">&#39;next&#39;</span><span class="p">:</span> <span class="s">&#39;w00t!&#39;</span><span class="p">},)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">call_args_list</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Members of <cite>call_args_list</cite> are <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects. These can be
+unpacked as tuples to get at the individual arguments. See
+<a class="reference internal" href="helpers.html#calls-as-tuples"><em>calls as tuples</em></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.method_calls">
+<tt class="descname">method_calls</tt><a class="headerlink" href="#mock.Mock.method_calls" title="Permalink to this definition">¶</a></dt>
+<dd><p>As well as tracking calls to themselves, mocks also track calls to
+methods and attributes, and <em>their</em> methods and attributes:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.method()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">property</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">attribute</span><span class="p">()</span>
+<span class="go">&lt;Mock name=&#39;mock.property.method.attribute()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method_calls</span>
+<span class="go">[call.method(), call.property.method.attribute()]</span>
+</pre></div>
+</div>
+<p>Members of <cite>method_calls</cite> are <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects. These can be
+unpacked as tuples to get at the individual arguments. See
+<a class="reference internal" href="helpers.html#calls-as-tuples"><em>calls as tuples</em></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.mock_calls">
+<tt class="descname">mock_calls</tt><a class="headerlink" href="#mock.Mock.mock_calls" title="Permalink to this definition">¶</a></dt>
+<dd><p><cite>mock_calls</cite> records <em>all</em> calls to the mock object, its methods, magic
+methods <em>and</em> return value mocks.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span> <span class="o">=</span> <span class="n">mock</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">first</span><span class="p">(</span><span class="n">a</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="go">&lt;MagicMock name=&#39;mock.first()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">second</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;mock.second()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">int</span><span class="p">(</span><span class="n">mock</span><span class="p">)</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="go">&lt;MagicMock name=&#39;mock()()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">expected</span> <span class="o">=</span> <span class="p">[</span><span class="n">call</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">),</span> <span class="n">call</span><span class="o">.</span><span class="n">first</span><span class="p">(</span><span class="n">a</span><span class="o">=</span><span class="mi">3</span><span class="p">),</span> <span class="n">call</span><span class="o">.</span><span class="n">second</span><span class="p">(),</span>
+<span class="gp">... </span><span class="n">call</span><span class="o">.</span><span class="n">__int__</span><span class="p">(),</span> <span class="n">call</span><span class="p">()(</span><span class="mi">1</span><span class="p">)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span> <span class="o">==</span> <span class="n">expected</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Members of <cite>mock_calls</cite> are <a class="reference internal" href="helpers.html#mock.call" title="mock.call"><tt class="xref py py-data docutils literal"><span class="pre">call</span></tt></a> objects. These can be
+unpacked as tuples to get at the individual arguments. See
+<a class="reference internal" href="helpers.html#calls-as-tuples"><em>calls as tuples</em></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="mock.Mock.__class__">
+<tt class="descname">__class__</tt><a class="headerlink" href="#mock.Mock.__class__" title="Permalink to this definition">¶</a></dt>
+<dd><p>Normally the <cite>__class__</cite> attribute of an object will return its type.
+For a mock object with a <cite>spec</cite> <cite>__class__</cite> returns the spec class
+instead. This allows mock objects to pass <cite>isinstance</cite> tests for the
+object they are replacing / masquerading as:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">isinstance</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="nb">int</span><span class="p">)</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p><cite>__class__</cite> is assignable to, this allows a mock to pass an
+<cite>isinstance</cite> check without forcing you to use a spec:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">__class__</span> <span class="o">=</span> <span class="nb">dict</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">isinstance</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="nb">dict</span><span class="p">)</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="mock.NonCallableMock">
+<em class="property">class </em><tt class="descname">NonCallableMock</tt><big>(</big><em>spec=None</em>, <em>wraps=None</em>, <em>name=None</em>, <em>spec_set=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.NonCallableMock" title="Permalink to this definition">¶</a></dt>
+<dd><p>A non-callable version of <cite>Mock</cite>. The constructor parameters have the same
+meaning of <cite>Mock</cite>, with the exception of <cite>return_value</cite> and <cite>side_effect</cite>
+which have no meaning on a non-callable mock.</p>
+</dd></dl>
+
+<p>Mock objects that use a class or an instance as a <cite>spec</cite> or <cite>spec_set</cite> are able
+to pass <cite>isintance</cite> tests:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec</span><span class="o">=</span><span class="n">SomeClass</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">isinstance</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="n">SomeClass</span><span class="p">)</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">spec_set</span><span class="o">=</span><span class="n">SomeClass</span><span class="p">())</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">isinstance</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="n">SomeClass</span><span class="p">)</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>The <cite>Mock</cite> classes have support for mocking magic methods. See <a class="reference internal" href="magicmock.html#magic-methods"><em>magic
+methods</em></a> for the full details.</p>
+<p>The mock classes and the <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> decorators all take arbitrary keyword
+arguments for configuration. For the <cite>patch</cite> decorators the keywords are
+passed to the constructor of the mock being created. The keyword arguments
+are for configuring attributes of the mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">attribute</span><span class="o">=</span><span class="mi">3</span><span class="p">,</span> <span class="n">other</span><span class="o">=</span><span class="s">&#39;fish&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">attribute</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">other</span>
+<span class="go">&#39;fish&#39;</span>
+</pre></div>
+</div>
+<p>The return value and side effect of child mocks can be set in the same way,
+using dotted notation. As you can&#8217;t use dotted names directly in a call you
+have to create a dictionary and unpack it using <cite>**</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">attrs</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;method.return_value&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s">&#39;other.side_effect&#39;</span><span class="p">:</span> <span class="ne">KeyError</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">some_attribute</span><span class="o">=</span><span class="s">&#39;eggs&#39;</span><span class="p">,</span> <span class="o">**</span><span class="n">attrs</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">some_attribute</span>
+<span class="go">&#39;eggs&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">other</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+</pre></div>
+</div>
+<dl class="class">
+<dt id="mock.PropertyMock">
+<em class="property">class </em><tt class="descname">PropertyMock</tt><big>(</big><em>*args</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.PropertyMock" title="Permalink to this definition">¶</a></dt>
+<dd><p>A mock intended to be used as a property, or other descriptor, on a class.
+<cite>PropertyMock</cite> provides <cite>__get__</cite> and <cite>__set__</cite> methods so you can specify
+a return value when it is fetched.</p>
+<p>Fetching a <cite>PropertyMock</cite> instance from an object calls the mock, with
+no args. Setting it calls the mock with the value being set.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Foo</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="nd">@property</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="s">&#39;something&#39;</span>
+<span class="gp">... </span> <span class="nd">@foo.setter</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.Foo.foo&#39;</span><span class="p">,</span> <span class="n">new_callable</span><span class="o">=</span><span class="n">PropertyMock</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_foo</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">mock_foo</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;mockity-mock&#39;</span>
+<span class="gp">... </span> <span class="n">this_foo</span> <span class="o">=</span> <span class="n">Foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">this_foo</span><span class="o">.</span><span class="n">foo</span>
+<span class="gp">... </span> <span class="n">this_foo</span><span class="o">.</span><span class="n">foo</span> <span class="o">=</span> <span class="mi">6</span>
+<span class="gp">...</span>
+<span class="go">mockity-mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_foo</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(), call(6)]</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<p>Because of the way mock attributes are stored you can&#8217;t directly attach a
+<cite>PropertyMock</cite> to a mock object. Instead you can attach it to the mock type
+object:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">p</span> <span class="o">=</span> <span class="n">PropertyMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">type</span><span class="p">(</span><span class="n">m</span><span class="p">)</span><span class="o">.</span><span class="n">foo</span> <span class="o">=</span> <span class="n">p</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">foo</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">p</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">()</span>
+</pre></div>
+</div>
+<span class="target" id="index-5"></span></div>
+<div class="section" id="calling">
+<span id="index-6"></span><h1>Calling<a class="headerlink" href="#calling" title="Permalink to this headline">¶</a></h1>
+<p>Mock objects are callable. The call will return the value set as the
+<a class="reference internal" href="#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> attribute. The default return value is a new Mock
+object; it is created the first time the return value is accessed (either
+explicitly or by calling the Mock) - but it is stored and the same one
+returned each time.</p>
+<p>Calls made to the object will be recorded in the attributes
+like <a class="reference internal" href="#mock.Mock.call_args" title="mock.Mock.call_args"><tt class="xref py py-attr docutils literal"><span class="pre">call_args</span></tt></a> and <a class="reference internal" href="#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">call_args_list</span></tt></a>.</p>
+<p>If <a class="reference internal" href="#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> is set then it will be called after the call has
+been recorded, so if <cite>side_effect</cite> raises an exception the call is still
+recorded.</p>
+<p>The simplest way to make a mock raise an exception when called is to make
+<a class="reference internal" href="#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> an exception class or instance:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="ne">IndexError</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">IndexError</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(1, 2, 3)]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="ne">KeyError</span><span class="p">(</span><span class="s">&#39;Bang!&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="s">&#39;two&#39;</span><span class="p">,</span> <span class="s">&#39;three&#39;</span><span class="p">,</span> <span class="s">&#39;four&#39;</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>: <span class="n">&#39;Bang!&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(1, 2, 3), call(&#39;two&#39;, &#39;three&#39;, &#39;four&#39;)]</span>
+</pre></div>
+</div>
+<p>If <cite>side_effect</cite> is a function then whatever that function returns is what
+calls to the mock return. The <cite>side_effect</cite> function is called with the
+same arguments as the mock. This allows you to vary the return value of the
+call dynamically, based on the input:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="n">value</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">value</span> <span class="o">+</span> <span class="mi">1</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">side_effect</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="go">2</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(1), call(2)]</span>
+</pre></div>
+</div>
+<p>If you want the mock to still return the default return value (a new mock), or
+any set return value, then there are two ways of doing this. Either return
+<cite>mock.return_value</cite> from inside <cite>side_effect</cite>, or return <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">m</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">DEFAULT</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<p>To remove a <cite>side_effect</cite>, and return to the default behaviour, set the
+<cite>side_effect</cite> to <cite>None</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="mi">6</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">side_effect</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="mi">3</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="n">side_effect</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">6</span>
+</pre></div>
+</div>
+<p>The <cite>side_effect</cite> can also be any iterable object. Repeated calls to the mock
+will return values from the iterable (until the iterable is exhausted and
+a <cite>StopIteration</cite> is raised):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">2</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">StopIteration</span>
+</pre></div>
+</div>
+<p>If any members of the iterable are exceptions they will be raised instead of
+returned:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">iterable</span> <span class="o">=</span> <span class="p">(</span><span class="mi">33</span><span class="p">,</span> <span class="ne">ValueError</span><span class="p">,</span> <span class="mi">66</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">side_effect</span><span class="o">=</span><span class="n">iterable</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">33</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">ValueError</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">m</span><span class="p">()</span>
+<span class="go">66</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="deleting-attributes">
+<span id="id2"></span><h1>Deleting Attributes<a class="headerlink" href="#deleting-attributes" title="Permalink to this headline">¶</a></h1>
+<p>Mock objects create attributes on demand. This allows them to pretend to be
+objects of any type.</p>
+<p>You may want a mock object to return <cite>False</cite> to a <cite>hasattr</cite> call, or raise an
+<cite>AttributeError</cite> when an attribute is fetched. You can do this by providing
+an object as a <cite>spec</cite> for a mock, but that isn&#8217;t always convenient.</p>
+<p>You &#8220;block&#8221; attributes by deleting them. Once deleted, accessing an attribute
+will raise an <cite>AttributeError</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">hasattr</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="s">&#39;m&#39;</span><span class="p">)</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">del</span> <span class="n">mock</span><span class="o">.</span><span class="n">m</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">hasattr</span><span class="p">(</span><span class="n">mock</span><span class="p">,</span> <span class="s">&#39;m&#39;</span><span class="p">)</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">del</span> <span class="n">mock</span><span class="o">.</span><span class="n">f</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">f</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="o">...</span>
+<span class="gr">AttributeError</span>: <span class="n">f</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="attaching-mocks-as-attributes">
+<h1>Attaching Mocks as Attributes<a class="headerlink" href="#attaching-mocks-as-attributes" title="Permalink to this headline">¶</a></h1>
+<p>When you attach a mock as an attribute of another mock (or as the return
+value) it becomes a &#8220;child&#8221; of that mock. Calls to the child are recorded in
+the <a class="reference internal" href="#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a> and <a class="reference internal" href="#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a> attributes of the
+parent. This is useful for configuring child mocks and then attaching them to
+the parent, or for attaching mocks to a parent that records all calls to the
+children and allows you to make assertions about the order of calls between
+mocks:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">child1</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">child2</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span><span class="o">.</span><span class="n">child1</span> <span class="o">=</span> <span class="n">child1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span><span class="o">.</span><span class="n">child2</span> <span class="o">=</span> <span class="n">child2</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">child1</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">child2</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.child1(1), call.child2(2)]</span>
+</pre></div>
+</div>
+<p>The exception to this is if the mock has a name. This allows you to prevent
+the &#8220;parenting&#8221; if for some reason you don&#8217;t want it to happen.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">not_a_child</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s">&#39;not-a-child&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">attribute</span> <span class="o">=</span> <span class="n">not_a_child</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">attribute</span><span class="p">()</span>
+<span class="go">&lt;MagicMock name=&#39;not-a-child()&#39; id=&#39;...&#39;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[]</span>
+</pre></div>
+</div>
+<p>Mocks created for you by <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> are automatically given names. To
+attach mocks that have names to a parent you use the <a class="reference internal" href="#mock.Mock.attach_mock" title="mock.Mock.attach_mock"><tt class="xref py py-meth docutils literal"><span class="pre">attach_mock()</span></tt></a>
+method:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">thing1</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing2</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing1&#39;</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span> <span class="k">as</span> <span class="n">child1</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing2&#39;</span><span class="p">,</span> <span class="n">return_value</span><span class="o">=</span><span class="bp">None</span><span class="p">)</span> <span class="k">as</span> <span class="n">child2</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">parent</span><span class="o">.</span><span class="n">attach_mock</span><span class="p">(</span><span class="n">child1</span><span class="p">,</span> <span class="s">&#39;child1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">parent</span><span class="o">.</span><span class="n">attach_mock</span><span class="p">(</span><span class="n">child2</span><span class="p">,</span> <span class="s">&#39;child2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">child1</span><span class="p">(</span><span class="s">&#39;one&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">child2</span><span class="p">(</span><span class="s">&#39;two&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">parent</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call.child1(&#39;one&#39;), call.child2(&#39;two&#39;)]</span>
+</pre></div>
+</div>
+<hr class="docutils" />
+<table class="docutils footnote" frame="void" id="id3" rules="none">
+<colgroup><col class="label" /><col /></colgroup>
+<tbody valign="top">
+<tr><td class="label"><a class="fn-backref" href="#id1">[1]</a></td><td>The only exceptions are magic methods and attributes (those that have
+leading and trailing double underscores). Mock doesn&#8217;t create these but
+instead of raises an <tt class="docutils literal"><span class="pre">AttributeError</span></tt>. This is because the interpreter
+will often implicitly request these methods, and gets <em>very</em> confused to
+get a new Mock object when it expects a magic method. If you need magic
+method support see <a class="reference internal" href="magicmock.html#magic-methods"><em>magic methods</em></a>.</td></tr>
+</tbody>
+</table>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">The Mock Class</a></li>
+<li><a class="reference internal" href="#calling">Calling</a></li>
+<li><a class="reference internal" href="#deleting-attributes">Deleting Attributes</a></li>
+<li><a class="reference internal" href="#attaching-mocks-as-attributes">Attaching Mocks as Attributes</a></li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="index.html"
+ title="previous chapter">Mock - Mocking and Testing Library</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="patch.html"
+ title="next chapter">Patch Decorators</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/mock.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="patch.html" title="Patch Decorators"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="index.html" title="Mock - Mocking and Testing Library"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/mocksignature.html b/python/mock-1.0.0/html/mocksignature.html
new file mode 100644
index 000000000..5b266f031
--- /dev/null
+++ b/python/mock-1.0.0/html/mocksignature.html
@@ -0,0 +1,352 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>mocksignature &mdash; Mock 0.8.1alpha1 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '0.8.1alpha1',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 0.8.1alpha1 documentation" href="index.html" />
+ <link rel="next" title="Getting Started with Mock" href="getting-started.html" />
+ <link rel="prev" title="Mocking Magic Methods" href="magicmock.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 0.8.1alpha1 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="mocksignature">
+<h1>mocksignature<a class="headerlink" href="#mocksignature" title="Permalink to this headline">¶</a></h1>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last"><a class="reference internal" href="helpers.html#auto-speccing"><em>Autospeccing</em></a>, added in mock 0.8, is a more advanced version of
+<cite>mocksignature</cite> and can be used for many of the same use cases.</p>
+</div>
+<p>A problem with using mock objects to replace real objects in your tests is that
+<a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> can be <em>too</em> flexible. Your code can treat the mock objects in
+any way and you have to manually check that they were called correctly. If your
+code calls functions or methods with the wrong number of arguments then mocks
+don&#8217;t complain.</p>
+<p>The solution to this is <cite>mocksignature</cite>, which creates functions with the
+same signature as the original, but delegating to a mock. You can interrogate
+the mock in the usual way to check it has been called with the <em>right</em>
+arguments, but if it is called with the wrong number of arguments it will
+raise a <cite>TypeError</cite> in the same way your production code would.</p>
+<p>Another advantage is that your mocked objects are real functions, which can
+be useful when your code uses
+<a class="reference external" href="http://docs.python.org/library/inspect.html">inspect</a> or depends on
+functions being function objects.</p>
+<dl class="function">
+<dt id="mock.mocksignature">
+<tt class="descname">mocksignature</tt><big>(</big><em>func</em>, <em>mock=None</em>, <em>skipfirst=False</em><big>)</big><a class="headerlink" href="#mock.mocksignature" title="Permalink to this definition">¶</a></dt>
+<dd><p>Create a new function with the same signature as <cite>func</cite> that delegates
+to <cite>mock</cite>. If <cite>skipfirst</cite> is True the first argument is skipped, useful
+for methods where <cite>self</cite> needs to be omitted from the new function.</p>
+<p>If you don&#8217;t pass in a <cite>mock</cite> then one will be created for you.</p>
+<p>Functions returned by <cite>mocksignature</cite> have many of the same attributes
+and assert methods as a mock object.</p>
+<p>The mock is set as the <cite>mock</cite> attribute of the returned function for easy
+access.</p>
+<p><cite>mocksignature</cite> can also be used with classes. It copies the signature of
+the <cite>__init__</cite> method.</p>
+<p>When used with callable objects (instances) it copies the signature of the
+<cite>__call__</cite> method.</p>
+</dd></dl>
+
+<p><cite>mocksignature</cite> will work out if it is mocking the signature of a method on
+an instance or a method on a class and do the &#8220;right thing&#8221; with the <cite>self</cite>
+argument in both cases.</p>
+<p>Because of a limitation in the way that arguments are collected by functions
+created by <cite>mocksignature</cite> they are <em>always</em> passed as positional arguments
+(including defaults) and not keyword arguments.</p>
+<div class="section" id="mocksignature-api">
+<h2>mocksignature api<a class="headerlink" href="#mocksignature-api" title="Permalink to this headline">¶</a></h2>
+<p>Although the objects returned by <cite>mocksignature</cite> api are real function objects,
+they have much of the same api as the <a class="reference internal" href="mock.html#mock.Mock" title="mock.Mock"><tt class="xref py py-class docutils literal"><span class="pre">Mock</span></tt></a> class. This includes the
+assert methods:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">func</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">func</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">called</span>
+<span class="go">False</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">called</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">4</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">AssertionError: Expected call</span>: <span class="n">mock(1, 2, 4)</span>
+<span class="go">Actual call: mock(1, 2, 3)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">call_count</span>
+<span class="go">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">side_effect</span> <span class="o">=</span> <span class="ne">IndexError</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="p">(</span><span class="mi">4</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">6</span><span class="p">)</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">IndexError</span>
+</pre></div>
+</div>
+<p>The mock object that is being delegated to is available as the <cite>mock</cite> attribute
+of the function created by <cite>mocksignature</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">func2</span><span class="o">.</span><span class="n">mock</span><span class="o">.</span><span class="n">mock_calls</span>
+<span class="go">[call(1, 2, 3), call(4, 5, 6)]</span>
+</pre></div>
+</div>
+<p>The methods and attributes available on functions returned by <cite>mocksignature</cite>
+are:</p>
+<blockquote>
+<div><a class="reference internal" href="mock.html#mock.Mock.assert_any_call" title="mock.Mock.assert_any_call"><tt class="xref py py-meth docutils literal"><span class="pre">assert_any_call()</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.assert_called_once_with" title="mock.Mock.assert_called_once_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_once_with()</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.assert_called_with" title="mock.Mock.assert_called_with"><tt class="xref py py-meth docutils literal"><span class="pre">assert_called_with()</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.assert_has_calls" title="mock.Mock.assert_has_calls"><tt class="xref py py-meth docutils literal"><span class="pre">assert_has_calls()</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.call_args" title="mock.Mock.call_args"><tt class="xref py py-attr docutils literal"><span class="pre">call_args</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.call_args_list" title="mock.Mock.call_args_list"><tt class="xref py py-attr docutils literal"><span class="pre">call_args_list</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.call_count" title="mock.Mock.call_count"><tt class="xref py py-attr docutils literal"><span class="pre">call_count</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.called" title="mock.Mock.called"><tt class="xref py py-attr docutils literal"><span class="pre">called</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.method_calls" title="mock.Mock.method_calls"><tt class="xref py py-attr docutils literal"><span class="pre">method_calls</span></tt></a>, <cite>mock</cite>, <a class="reference internal" href="mock.html#mock.Mock.mock_calls" title="mock.Mock.mock_calls"><tt class="xref py py-attr docutils literal"><span class="pre">mock_calls</span></tt></a>,
+<a class="reference internal" href="mock.html#mock.Mock.reset_mock" title="mock.Mock.reset_mock"><tt class="xref py py-meth docutils literal"><span class="pre">reset_mock()</span></tt></a>, <a class="reference internal" href="mock.html#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a>, and
+<a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a>.</div></blockquote>
+</div>
+<div class="section" id="example-use">
+<h2>Example use<a class="headerlink" href="#example-use" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="basic-use">
+<h3>Basic use<a class="headerlink" href="#basic-use" title="Permalink to this headline">¶</a></h3>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">function</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">function</span><span class="p">,</span> <span class="n">mock</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes at least 2 arguments (0 given)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;some value&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="go">&#39;some value&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="s">&#39;foo&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="keyword-arguments">
+<h3>Keyword arguments<a class="headerlink" href="#keyword-arguments" title="Permalink to this headline">¶</a></h3>
+<p>Note that arguments to functions created by <cite>mocksignature</cite> are always passed
+in to the underlying mock by position even when called with keywords:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">function</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">function</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocking-methods-and-self">
+<h3>Mocking methods and self<a class="headerlink" href="#mocking-methods-and-self" title="Permalink to this headline">¶</a></h3>
+<p>When you use <cite>mocksignature</cite> to replace a method on a class then <cite>self</cite>
+will be included in the method signature - and you will need to include
+the instance when you do your asserts.</p>
+<p>As a curious factor of the way Python (2) wraps methods fetched from a class,
+we can <em>get</em> the <cite>return_value</cite> from a function set on a class, but we can&#8217;t
+set it. We have to do this through the exposed <cite>mock</cite> attribute instead:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">SomeClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">SomeClass</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">SomeClass</span><span class="o">.</span><span class="n">method</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">SomeClass</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">mock</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes at least 4 arguments (1 given)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">instance</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>When you use <cite>mocksignature</cite> on instance methods <cite>self</cite> isn&#8217;t included (and we
+can set the <cite>return_value</cite> etc directly):</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">SomeClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="mocksignature-with-classes">
+<h3>mocksignature with classes<a class="headerlink" href="#mocksignature-with-classes" title="Permalink to this headline">¶</a></h3>
+<p>When used with a class <cite>mocksignature</cite> copies the signature of the <cite>__init__</cite>
+method.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Something</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">foo</span><span class="p">,</span> <span class="n">bar</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockSomething</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">Something</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span> <span class="o">=</span> <span class="n">MockSomething</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="mi">9</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">instance</span> <span class="ow">is</span> <span class="n">MockSomething</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockSomething</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="mi">9</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockSomething</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes at least 2 arguments (0 given)</span>
+</pre></div>
+</div>
+<p>Because the object returned by <cite>mocksignature</cite> is a function rather than a
+<cite>Mock</cite> you lose the other capabilities of <cite>Mock</cite>, like dynamic attribute
+creation.</p>
+</div>
+<div class="section" id="mocksignature-with-callable-objects">
+<h3>mocksignature with callable objects<a class="headerlink" href="#mocksignature-with-callable-objects" title="Permalink to this headline">¶</a></h3>
+<p>When used with a callable object <cite>mocksignature</cite> copies the signature of the
+<cite>__call__</cite> method.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Something</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__call__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">spam</span><span class="p">,</span> <span class="n">eggs</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">something</span> <span class="o">=</span> <span class="n">Something</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_something</span> <span class="o">=</span> <span class="n">mocksignature</span><span class="p">(</span><span class="n">something</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span> <span class="o">=</span> <span class="n">mock_something</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="mi">9</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_something</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">10</span><span class="p">,</span> <span class="mi">9</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_something</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&lt;lambda&gt;() takes at least 2 arguments (0 given)</span>
+</pre></div>
+</div>
+</div>
+</div>
+<div class="section" id="mocksignature-argument-to-patch">
+<h2>mocksignature argument to patch<a class="headerlink" href="#mocksignature-argument-to-patch" title="Permalink to this headline">¶</a></h2>
+<p><cite>mocksignature</cite> is available as a keyword argument to <a class="reference internal" href="patch.html#mock.patch" title="mock.patch"><tt class="xref py py-func docutils literal"><span class="pre">patch()</span></tt></a> or
+<a class="reference internal" href="patch.html#mock.patch.object" title="mock.patch.object"><tt class="xref py py-func docutils literal"><span class="pre">patch.object()</span></tt></a>. It can be used with functions / methods / classes and
+callable objects.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">SomeClass</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">,</span> <span class="n">c</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;method&#39;</span><span class="p">,</span> <span class="n">mocksignature</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">mock_method</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">instance</span> <span class="o">=</span> <span class="n">SomeClass</span><span class="p">()</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="bp">None</span>
+<span class="gp">... </span> <span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="n">instance</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">mocksignature</a><ul>
+<li><a class="reference internal" href="#mocksignature-api">mocksignature api</a></li>
+<li><a class="reference internal" href="#example-use">Example use</a><ul>
+<li><a class="reference internal" href="#basic-use">Basic use</a></li>
+<li><a class="reference internal" href="#keyword-arguments">Keyword arguments</a></li>
+<li><a class="reference internal" href="#mocking-methods-and-self">Mocking methods and self</a></li>
+<li><a class="reference internal" href="#mocksignature-with-classes">mocksignature with classes</a></li>
+<li><a class="reference internal" href="#mocksignature-with-callable-objects">mocksignature with callable objects</a></li>
+</ul>
+</li>
+<li><a class="reference internal" href="#mocksignature-argument-to-patch">mocksignature argument to patch</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="magicmock.html"
+ title="previous chapter">Mocking Magic Methods</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="getting-started.html"
+ title="next chapter">Getting Started with Mock</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/mocksignature.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="getting-started.html" title="Getting Started with Mock"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 0.8.1alpha1 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Feb 16, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.2.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/objects.inv b/python/mock-1.0.0/html/objects.inv
new file mode 100644
index 000000000..363889257
--- /dev/null
+++ b/python/mock-1.0.0/html/objects.inv
Binary files differ
diff --git a/python/mock-1.0.0/html/output.txt b/python/mock-1.0.0/html/output.txt
new file mode 100644
index 000000000..56093e750
--- /dev/null
+++ b/python/mock-1.0.0/html/output.txt
@@ -0,0 +1,126 @@
+Results of doctest builder run on 2012-10-07 18:33:27
+=====================================================
+
+Document: index
+---------------
+1 items passed all tests:
+ 35 tests in default
+35 tests in 1 items.
+35 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: compare
+-----------------
+1 items passed all tests:
+ 39 tests in default
+39 tests in 1 items.
+39 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: getting-started
+-------------------------
+1 items passed all tests:
+ 83 tests in default
+83 tests in 1 items.
+83 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: magicmock
+-------------------
+1 items passed all tests:
+ 40 tests in default
+40 tests in 1 items.
+40 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: patch
+---------------
+1 items passed all tests:
+ 75 tests in default
+75 tests in 1 items.
+75 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 2 tests in default (cleanup code)
+2 tests in 1 items.
+2 passed and 0 failed.
+Test passed.
+
+Document: helpers
+-----------------
+1 items passed all tests:
+ 87 tests in default
+87 tests in 1 items.
+87 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 2 tests in default (cleanup code)
+2 tests in 1 items.
+2 passed and 0 failed.
+Test passed.
+
+Document: examples
+------------------
+1 items passed all tests:
+ 171 tests in default
+171 tests in 1 items.
+171 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: sentinel
+------------------
+1 items passed all tests:
+ 6 tests in default
+6 tests in 1 items.
+6 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Document: mock
+--------------
+1 items passed all tests:
+ 187 tests in default
+187 tests in 1 items.
+187 passed and 0 failed.
+Test passed.
+1 items passed all tests:
+ 1 tests in default (cleanup code)
+1 tests in 1 items.
+1 passed and 0 failed.
+Test passed.
+
+Doctest summary
+===============
+ 723 tests
+ 0 failures in tests
+ 0 failures in setup code
+ 0 failures in cleanup code
diff --git a/python/mock-1.0.0/html/patch.html b/python/mock-1.0.0/html/patch.html
new file mode 100644
index 000000000..e7164d147
--- /dev/null
+++ b/python/mock-1.0.0/html/patch.html
@@ -0,0 +1,648 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Patch Decorators &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Helpers" href="helpers.html" />
+ <link rel="prev" title="The Mock Class" href="mock.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="helpers.html" title="Helpers"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="mock.html" title="The Mock Class"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="patch-decorators">
+<h1>Patch Decorators<a class="headerlink" href="#patch-decorators" title="Permalink to this headline">¶</a></h1>
+<p>The patch decorators are used for patching objects only within the scope of
+the function they decorate. They automatically handle the unpatching for you,
+even if exceptions are raised. All of these functions can also be used in with
+statements or as class decorators.</p>
+<div class="section" id="patch">
+<h2>patch<a class="headerlink" href="#patch" title="Permalink to this headline">¶</a></h2>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last"><cite>patch</cite> is straightforward to use. The key is to do the patching in the
+right namespace. See the section <a class="reference internal" href="#id1">where to patch</a>.</p>
+</div>
+<dl class="function">
+<dt id="mock.patch">
+<tt class="descname">patch</tt><big>(</big><em>target</em>, <em>new=DEFAULT</em>, <em>spec=None</em>, <em>create=False</em>, <em>spec_set=None</em>, <em>autospec=None</em>, <em>new_callable=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.patch" title="Permalink to this definition">¶</a></dt>
+<dd><p><cite>patch</cite> acts as a function decorator, class decorator or a context
+manager. Inside the body of the function or with statement, the <cite>target</cite>
+is patched with a <cite>new</cite> object. When the function/with statement exits
+the patch is undone.</p>
+<p>If <cite>new</cite> is omitted, then the target is replaced with a
+<a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a>. If <cite>patch</cite> is used as a decorator and <cite>new</cite> is
+omitted, the created mock is passed in as an extra argument to the
+decorated function. If <cite>patch</cite> is used as a context manager the created
+mock is returned by the context manager.</p>
+<p><cite>target</cite> should be a string in the form <cite>&#8216;package.module.ClassName&#8217;</cite>. The
+<cite>target</cite> is imported and the specified object replaced with the <cite>new</cite>
+object, so the <cite>target</cite> must be importable from the environment you are
+calling <cite>patch</cite> from. The target is imported when the decorated function
+is executed, not at decoration time.</p>
+<p>The <cite>spec</cite> and <cite>spec_set</cite> keyword arguments are passed to the <cite>MagicMock</cite>
+if patch is creating one for you.</p>
+<p>In addition you can pass <cite>spec=True</cite> or <cite>spec_set=True</cite>, which causes
+patch to pass in the object being mocked as the spec/spec_set object.</p>
+<p><cite>new_callable</cite> allows you to specify a different class, or callable object,
+that will be called to create the <cite>new</cite> object. By default <cite>MagicMock</cite> is
+used.</p>
+<p>A more powerful form of <cite>spec</cite> is <cite>autospec</cite>. If you set <cite>autospec=True</cite>
+then the mock with be created with a spec from the object being replaced.
+All attributes of the mock will also have the spec of the corresponding
+attribute of the object being replaced. Methods and functions being mocked
+will have their arguments checked and will raise a <cite>TypeError</cite> if they are
+called with the wrong signature. For mocks
+replacing a class, their return value (the &#8216;instance&#8217;) will have the same
+spec as the class. See the <a class="reference internal" href="helpers.html#mock.create_autospec" title="mock.create_autospec"><tt class="xref py py-func docutils literal"><span class="pre">create_autospec()</span></tt></a> function and
+<a class="reference internal" href="helpers.html#auto-speccing"><em>Autospeccing</em></a>.</p>
+<p>Instead of <cite>autospec=True</cite> you can pass <cite>autospec=some_object</cite> to use an
+arbitrary object as the spec instead of the one being replaced.</p>
+<p>By default <cite>patch</cite> will fail to replace attributes that don&#8217;t exist. If
+you pass in <cite>create=True</cite>, and the attribute doesn&#8217;t exist, patch will
+create the attribute for you when the patched function is called, and
+delete it again afterwards. This is useful for writing tests against
+attributes that your production code creates at runtime. It is off by by
+default because it can be dangerous. With it switched on you can write
+passing tests against APIs that don&#8217;t actually exist!</p>
+<p>Patch can be used as a <cite>TestCase</cite> class decorator. It works by
+decorating each test method in the class. This reduces the boilerplate
+code when your test methods share a common patchings set. <cite>patch</cite> finds
+tests by looking for method names that start with <cite>patch.TEST_PREFIX</cite>.
+By default this is <cite>test</cite>, which matches the way <cite>unittest</cite> finds tests.
+You can specify an alternative prefix by setting <cite>patch.TEST_PREFIX</cite>.</p>
+<p>Patch can be used as a context manager, with the with statement. Here the
+patching applies to the indented block after the with statement. If you
+use &#8220;as&#8221; then the patched object will be bound to the name after the
+&#8220;as&#8221;; very useful if <cite>patch</cite> is creating a mock object for you.</p>
+<p><cite>patch</cite> takes arbitrary keyword arguments. These will be passed to
+the <cite>Mock</cite> (or <cite>new_callable</cite>) on construction.</p>
+<p><cite>patch.dict(...)</cite>, <cite>patch.multiple(...)</cite> and <cite>patch.object(...)</cite> are
+available for alternate use-cases.</p>
+</dd></dl>
+
+<p><cite>patch</cite> as function decorator, creating the mock for you and passing it into
+the decorated function:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;__main__.SomeClass&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">function</span><span class="p">(</span><span class="n">normal_argument</span><span class="p">,</span> <span class="n">mock_class</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">mock_class</span> <span class="ow">is</span> <span class="n">SomeClass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">function</span><span class="p">(</span><span class="bp">None</span><span class="p">)</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Patching a class replaces the class with a <cite>MagicMock</cite> <em>instance</em>. If the
+class is instantiated in the code under test then it will be the
+<a class="reference internal" href="mock.html#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> of the mock that will be used.</p>
+<p>If the class is instantiated multiple times you could use
+<a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a> to return a new mock each time. Alternatively you
+can set the <cite>return_value</cite> to be anything you want.</p>
+<p>To configure return values on methods of <em>instances</em> on the patched class
+you must do this on the <cite>return_value</cite>. For example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Class</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">method</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">pass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.Class&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">MockClass</span><span class="p">:</span>
+<span class="gp">... </span> <span class="n">instance</span> <span class="o">=</span> <span class="n">MockClass</span><span class="o">.</span><span class="n">return_value</span>
+<span class="gp">... </span> <span class="n">instance</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">Class</span><span class="p">()</span> <span class="ow">is</span> <span class="n">instance</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">Class</span><span class="p">()</span><span class="o">.</span><span class="n">method</span><span class="p">()</span> <span class="o">==</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+<p>If you use <cite>spec</cite> or <cite>spec_set</cite> and <cite>patch</cite> is replacing a <em>class</em>, then the
+return value of the created mock will have the same spec.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">Original</span> <span class="o">=</span> <span class="n">Class</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.Class&#39;</span><span class="p">,</span> <span class="n">spec</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MockClass</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">instance</span> <span class="o">=</span> <span class="n">MockClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">instance</span><span class="p">,</span> <span class="n">Original</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>The <cite>new_callable</cite> argument is useful where you want to use an alternative
+class to the default <a class="reference internal" href="magicmock.html#mock.MagicMock" title="mock.MagicMock"><tt class="xref py py-class docutils literal"><span class="pre">MagicMock</span></tt></a> for the created mock. For example, if
+you wanted a <a class="reference internal" href="mock.html#mock.NonCallableMock" title="mock.NonCallableMock"><tt class="xref py py-class docutils literal"><span class="pre">NonCallableMock</span></tt></a> to be used:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing&#39;</span><span class="p">,</span> <span class="n">new_callable</span><span class="o">=</span><span class="n">NonCallableMock</span><span class="p">)</span> <span class="k">as</span> <span class="n">mock_thing</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">thing</span> <span class="ow">is</span> <span class="n">mock_thing</span>
+<span class="gp">... </span> <span class="n">thing</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">TypeError</span>: <span class="n">&#39;NonCallableMock&#39; object is not callable</span>
+</pre></div>
+</div>
+<p>Another use case might be to replace an object with a <cite>StringIO</cite> instance:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">StringIO</span> <span class="kn">import</span> <span class="n">StringIO</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">def</span> <span class="nf">foo</span><span class="p">():</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="s">&#39;Something&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;sys.stdout&#39;</span><span class="p">,</span> <span class="n">new_callable</span><span class="o">=</span><span class="n">StringIO</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">mock_stdout</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">foo</span><span class="p">()</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">mock_stdout</span><span class="o">.</span><span class="n">getvalue</span><span class="p">()</span> <span class="o">==</span> <span class="s">&#39;Something</span><span class="se">\n</span><span class="s">&#39;</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>When <cite>patch</cite> is creating a mock for you, it is common that the first thing
+you need to do is to configure the mock. Some of that configuration can be done
+in the call to patch. Any arbitrary keywords you pass into the call will be
+used to set attributes on the created mock:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing&#39;</span><span class="p">,</span> <span class="n">first</span><span class="o">=</span><span class="s">&#39;one&#39;</span><span class="p">,</span> <span class="n">second</span><span class="o">=</span><span class="s">&#39;two&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span><span class="o">.</span><span class="n">first</span>
+<span class="go">&#39;one&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span><span class="o">.</span><span class="n">second</span>
+<span class="go">&#39;two&#39;</span>
+</pre></div>
+</div>
+<p>As well as attributes on the created mock attributes, like the
+<a class="reference internal" href="mock.html#mock.Mock.return_value" title="mock.Mock.return_value"><tt class="xref py py-attr docutils literal"><span class="pre">return_value</span></tt></a> and <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a>, of child mocks can
+also be configured. These aren&#8217;t syntactically valid to pass in directly as
+keyword arguments, but a dictionary with these as keys can still be expanded
+into a <cite>patch</cite> call using <cite>**</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">config</span> <span class="o">=</span> <span class="p">{</span><span class="s">&#39;method.return_value&#39;</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s">&#39;other.side_effect&#39;</span><span class="p">:</span> <span class="ne">KeyError</span><span class="p">}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;__main__.thing&#39;</span><span class="p">,</span> <span class="o">**</span><span class="n">config</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="go">3</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock_thing</span><span class="o">.</span><span class="n">other</span><span class="p">()</span>
+<span class="gt">Traceback (most recent call last):</span>
+ <span class="c">...</span>
+<span class="gr">KeyError</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="patch-object">
+<h2>patch.object<a class="headerlink" href="#patch-object" title="Permalink to this headline">¶</a></h2>
+<dl class="function">
+<dt id="mock.patch.object">
+<tt class="descclassname">patch.</tt><tt class="descname">object</tt><big>(</big><em>target</em>, <em>attribute</em>, <em>new=DEFAULT</em>, <em>spec=None</em>, <em>create=False</em>, <em>spec_set=None</em>, <em>autospec=None</em>, <em>new_callable=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.patch.object" title="Permalink to this definition">¶</a></dt>
+<dd><p>patch the named member (<cite>attribute</cite>) on an object (<cite>target</cite>) with a mock
+object.</p>
+<p><cite>patch.object</cite> can be used as a decorator, class decorator or a context
+manager. Arguments <cite>new</cite>, <cite>spec</cite>, <cite>create</cite>, <cite>spec_set</cite>, <cite>autospec</cite> and
+<cite>new_callable</cite> have the same meaning as for <cite>patch</cite>. Like <cite>patch</cite>,
+<cite>patch.object</cite> takes arbitrary keyword arguments for configuring the mock
+object it creates.</p>
+<p>When used as a class decorator <cite>patch.object</cite> honours <cite>patch.TEST_PREFIX</cite>
+for choosing which methods to wrap.</p>
+</dd></dl>
+
+<p>You can either call <cite>patch.object</cite> with three arguments or two arguments. The
+three argument form takes the object to be patched, the attribute name and the
+object to replace the attribute with.</p>
+<p>When calling with the two argument form you omit the replacement object, and a
+mock is created for you and passed in as an extra argument to the decorated
+function:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;class_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">mock_method</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">class_method</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">mock_method</span><span class="o">.</span><span class="n">assert_called_with</span><span class="p">(</span><span class="mi">3</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test</span><span class="p">()</span>
+</pre></div>
+</div>
+<p><cite>spec</cite>, <cite>create</cite> and the other arguments to <cite>patch.object</cite> have the same
+meaning as they do for <cite>patch</cite>.</p>
+</div>
+<div class="section" id="patch-dict">
+<h2>patch.dict<a class="headerlink" href="#patch-dict" title="Permalink to this headline">¶</a></h2>
+<dl class="function">
+<dt id="mock.patch.dict">
+<tt class="descclassname">patch.</tt><tt class="descname">dict</tt><big>(</big><em>in_dict</em>, <em>values=()</em>, <em>clear=False</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.patch.dict" title="Permalink to this definition">¶</a></dt>
+<dd><p>Patch a dictionary, or dictionary like object, and restore the dictionary
+to its original state after the test.</p>
+<p><cite>in_dict</cite> can be a dictionary or a mapping like container. If it is a
+mapping then it must at least support getting, setting and deleting items
+plus iterating over keys.</p>
+<p><cite>in_dict</cite> can also be a string specifying the name of the dictionary, which
+will then be fetched by importing it.</p>
+<p><cite>values</cite> can be a dictionary of values to set in the dictionary. <cite>values</cite>
+can also be an iterable of <cite>(key, value)</cite> pairs.</p>
+<p>If <cite>clear</cite> is True then the dictionary will be cleared before the new
+values are set.</p>
+<p><cite>patch.dict</cite> can also be called with arbitrary keyword arguments to set
+values in the dictionary.</p>
+<p><cite>patch.dict</cite> can be used as a context manager, decorator or class
+decorator. When used as a class decorator <cite>patch.dict</cite> honours
+<cite>patch.TEST_PREFIX</cite> for choosing which methods to wrap.</p>
+</dd></dl>
+
+<p><cite>patch.dict</cite> can be used to add members to a dictionary, or simply let a test
+change a dictionary, and ensure the dictionary is restored when the test
+ends.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mock</span> <span class="kn">import</span> <span class="n">patch</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">foo</span> <span class="o">=</span> <span class="p">{}</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="n">foo</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">foo</span> <span class="o">==</span> <span class="p">{}</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">os</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;os.environ&#39;</span><span class="p">,</span> <span class="p">{</span><span class="s">&#39;newkey&#39;</span><span class="p">:</span> <span class="s">&#39;newvalue&#39;</span><span class="p">}):</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="p">[</span><span class="s">&#39;newkey&#39;</span><span class="p">]</span>
+<span class="gp">...</span>
+<span class="go">newvalue</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="s">&#39;newkey&#39;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span>
+</pre></div>
+</div>
+<p>Keywords can be used in the <cite>patch.dict</cite> call to set values in the dictionary:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">mymodule</span> <span class="o">=</span> <span class="n">MagicMock</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mymodule</span><span class="o">.</span><span class="n">function</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="s">&#39;fish&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="s">&#39;sys.modules&#39;</span><span class="p">,</span> <span class="n">mymodule</span><span class="o">=</span><span class="n">mymodule</span><span class="p">):</span>
+<span class="gp">... </span> <span class="kn">import</span> <span class="nn">mymodule</span>
+<span class="gp">... </span> <span class="n">mymodule</span><span class="o">.</span><span class="n">function</span><span class="p">(</span><span class="s">&#39;some&#39;</span><span class="p">,</span> <span class="s">&#39;args&#39;</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="go">&#39;fish&#39;</span>
+</pre></div>
+</div>
+<p><cite>patch.dict</cite> can be used with dictionary like objects that aren&#8217;t actually
+dictionaries. At the very minimum they must support item getting, setting,
+deleting and either iteration or membership test. This corresponds to the
+magic methods <cite>__getitem__</cite>, <cite>__setitem__</cite>, <cite>__delitem__</cite> and either
+<cite>__iter__</cite> or <cite>__contains__</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">Container</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">values</span> <span class="o">=</span> <span class="p">{}</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__getitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__setitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">value</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__delitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">del</span> <span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">__iter__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="nb">iter</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span> <span class="o">=</span> <span class="n">Container</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span><span class="p">[</span><span class="s">&#39;one&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="mi">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">dict</span><span class="p">(</span><span class="n">thing</span><span class="p">,</span> <span class="n">one</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">two</span><span class="o">=</span><span class="mi">3</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">thing</span><span class="p">[</span><span class="s">&#39;one&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="mi">2</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">thing</span><span class="p">[</span><span class="s">&#39;two&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="mi">3</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">thing</span><span class="p">[</span><span class="s">&#39;one&#39;</span><span class="p">]</span> <span class="o">==</span> <span class="mi">1</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="nb">list</span><span class="p">(</span><span class="n">thing</span><span class="p">)</span> <span class="o">==</span> <span class="p">[</span><span class="s">&#39;one&#39;</span><span class="p">]</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="patch-multiple">
+<h2>patch.multiple<a class="headerlink" href="#patch-multiple" title="Permalink to this headline">¶</a></h2>
+<dl class="function">
+<dt id="mock.patch.multiple">
+<tt class="descclassname">patch.</tt><tt class="descname">multiple</tt><big>(</big><em>target</em>, <em>spec=None</em>, <em>create=False</em>, <em>spec_set=None</em>, <em>autospec=None</em>, <em>new_callable=None</em>, <em>**kwargs</em><big>)</big><a class="headerlink" href="#mock.patch.multiple" title="Permalink to this definition">¶</a></dt>
+<dd><p>Perform multiple patches in a single call. It takes the object to be
+patched (either as an object or a string to fetch the object by importing)
+and keyword arguments for the patches:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">multiple</span><span class="p">(</span><span class="n">settings</span><span class="p">,</span> <span class="n">FIRST_PATCH</span><span class="o">=</span><span class="s">&#39;one&#39;</span><span class="p">,</span> <span class="n">SECOND_PATCH</span><span class="o">=</span><span class="s">&#39;two&#39;</span><span class="p">):</span>
+ <span class="o">...</span>
+</pre></div>
+</div>
+<p>Use <a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a> as the value if you want <cite>patch.multiple</cite> to create
+mocks for you. In this case the created mocks are passed into a decorated
+function by keyword, and a dictionary is returned when <cite>patch.multiple</cite> is
+used as a context manager.</p>
+<p><cite>patch.multiple</cite> can be used as a decorator, class decorator or a context
+manager. The arguments <cite>spec</cite>, <cite>spec_set</cite>, <cite>create</cite>, <cite>autospec</cite> and
+<cite>new_callable</cite> have the same meaning as for <cite>patch</cite>. These arguments will
+be applied to <em>all</em> patches done by <cite>patch.multiple</cite>.</p>
+<p>When used as a class decorator <cite>patch.multiple</cite> honours <cite>patch.TEST_PREFIX</cite>
+for choosing which methods to wrap.</p>
+</dd></dl>
+
+<p>If you want <cite>patch.multiple</cite> to create mocks for you, then you can use
+<a class="reference internal" href="sentinel.html#mock.DEFAULT" title="mock.DEFAULT"><tt class="xref py py-data docutils literal"><span class="pre">DEFAULT</span></tt></a> as the value. If you use <cite>patch.multiple</cite> as a decorator
+then the created mocks are passed into the decorated function by keyword.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">thing</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">other</span> <span class="o">=</span> <span class="nb">object</span><span class="p">()</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.multiple</span><span class="p">(</span><span class="s">&#39;__main__&#39;</span><span class="p">,</span> <span class="n">thing</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">,</span> <span class="n">other</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test_function</span><span class="p">(</span><span class="n">thing</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">thing</span><span class="p">,</span> <span class="n">MagicMock</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">MagicMock</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test_function</span><span class="p">()</span>
+</pre></div>
+</div>
+<p><cite>patch.multiple</cite> can be nested with other <cite>patch</cite> decorators, but put arguments
+passed by keyword <em>after</em> any of the standard arguments created by <cite>patch</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;sys.exit&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="nd">@patch.multiple</span><span class="p">(</span><span class="s">&#39;__main__&#39;</span><span class="p">,</span> <span class="n">thing</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">,</span> <span class="n">other</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test_function</span><span class="p">(</span><span class="n">mock_exit</span><span class="p">,</span> <span class="n">other</span><span class="p">,</span> <span class="n">thing</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;other&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">other</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;thing&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">thing</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;exit&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">mock_exit</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">test_function</span><span class="p">()</span>
+</pre></div>
+</div>
+<p>If <cite>patch.multiple</cite> is used as a context manager, the value returned by the
+context manger is a dictionary where created mocks are keyed by name:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">patch</span><span class="o">.</span><span class="n">multiple</span><span class="p">(</span><span class="s">&#39;__main__&#39;</span><span class="p">,</span> <span class="n">thing</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">,</span> <span class="n">other</span><span class="o">=</span><span class="n">DEFAULT</span><span class="p">)</span> <span class="k">as</span> <span class="n">values</span><span class="p">:</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;other&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">values</span><span class="p">[</span><span class="s">&#39;other&#39;</span><span class="p">])</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="s">&#39;thing&#39;</span> <span class="ow">in</span> <span class="nb">repr</span><span class="p">(</span><span class="n">values</span><span class="p">[</span><span class="s">&#39;thing&#39;</span><span class="p">])</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">values</span><span class="p">[</span><span class="s">&#39;thing&#39;</span><span class="p">]</span> <span class="ow">is</span> <span class="n">thing</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">values</span><span class="p">[</span><span class="s">&#39;other&#39;</span><span class="p">]</span> <span class="ow">is</span> <span class="n">other</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="patch-methods-start-and-stop">
+<span id="start-and-stop"></span><h2>patch methods: start and stop<a class="headerlink" href="#patch-methods-start-and-stop" title="Permalink to this headline">¶</a></h2>
+<p>All the patchers have <cite>start</cite> and <cite>stop</cite> methods. These make it simpler to do
+patching in <cite>setUp</cite> methods or where you want to do multiple patches without
+nesting decorators or with statements.</p>
+<p>To use them call <cite>patch</cite>, <cite>patch.object</cite> or <cite>patch.dict</cite> as normal and keep a
+reference to the returned <cite>patcher</cite> object. You can then call <cite>start</cite> to put
+the patch in place and <cite>stop</cite> to undo it.</p>
+<p>If you are using <cite>patch</cite> to create a mock for you then it will be returned by
+the call to <cite>patcher.start</cite>.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">package</span> <span class="kn">import</span> <span class="n">module</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">original</span> <span class="o">=</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">new_mock</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span> <span class="ow">is</span> <span class="ow">not</span> <span class="n">original</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span> <span class="ow">is</span> <span class="n">new_mock</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span> <span class="ow">is</span> <span class="n">original</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">module</span><span class="o">.</span><span class="n">ClassName</span> <span class="ow">is</span> <span class="ow">not</span> <span class="n">new_mock</span>
+</pre></div>
+</div>
+<p>A typical use case for this might be for doing multiple patches in the <cite>setUp</cite>
+method of a <cite>TestCase</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">setUp</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher1</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.Class1&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher2</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.Class2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass1</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher1</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass2</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher2</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">tearDown</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher1</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">patcher2</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">Class1</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass1</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">Class2</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass2</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+</pre></div>
+</div>
+<div class="admonition caution">
+<p class="first admonition-title">Caution</p>
+<p>If you use this technique you must ensure that the patching is &#8220;undone&#8221; by
+calling <cite>stop</cite>. This can be fiddlier than you might think, because if an
+exception is raised in the setUp then tearDown is not called. <a class="reference external" href="http://pypi.python.org/pypi/unittest2">unittest2</a> cleanup functions make this
+easier.</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyTest</span><span class="p">(</span><span class="n">TestCase</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">setUp</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="n">patcher</span> <span class="o">=</span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.Class&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass</span> <span class="o">=</span> <span class="n">patcher</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
+<span class="gp">... </span> <span class="bp">self</span><span class="o">.</span><span class="n">addCleanup</span><span class="p">(</span><span class="n">patcher</span><span class="o">.</span><span class="n">stop</span><span class="p">)</span>
+<span class="gp">...</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">test_something</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">Class</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">MockClass</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MyTest</span><span class="p">(</span><span class="s">&#39;test_something&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
+</pre></div>
+</div>
+<p class="last">As an added bonus you no longer need to keep a reference to the <cite>patcher</cite>
+object.</p>
+</div>
+<p>It is also possible to stop all patches which have been started by using
+<cite>patch.stopall</cite>.</p>
+<dl class="function">
+<dt id="mock.patch.stopall">
+<tt class="descclassname">patch.</tt><tt class="descname">stopall</tt><big>(</big><big>)</big><a class="headerlink" href="#mock.patch.stopall" title="Permalink to this definition">¶</a></dt>
+<dd><p>Stop all active patches. Only stops patches started with <cite>start</cite>.</p>
+</dd></dl>
+
+</div>
+<div class="section" id="test-prefix">
+<h2>TEST_PREFIX<a class="headerlink" href="#test-prefix" title="Permalink to this headline">¶</a></h2>
+<p>All of the patchers can be used as class decorators. When used in this way
+they wrap every test method on the class. The patchers recognise methods that
+start with <cite>test</cite> as being test methods. This is the same way that the
+<cite>unittest.TestLoader</cite> finds test methods by default.</p>
+<p>It is possible that you want to use a different prefix for your tests. You can
+inform the patchers of the different prefix by setting <cite>patch.TEST_PREFIX</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">patch</span><span class="o">.</span><span class="n">TEST_PREFIX</span> <span class="o">=</span> <span class="s">&#39;foo&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">value</span> <span class="o">=</span> <span class="mi">3</span>
+<span class="go">&gt;&gt;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch</span><span class="p">(</span><span class="s">&#39;__main__.value&#39;</span><span class="p">,</span> <span class="s">&#39;not three&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">class</span> <span class="nc">Thing</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo_one</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">value</span>
+<span class="gp">... </span> <span class="k">def</span> <span class="nf">foo_two</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">print</span> <span class="n">value</span>
+<span class="gp">...</span>
+<span class="go">&gt;&gt;&gt;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">Thing</span><span class="p">()</span><span class="o">.</span><span class="n">foo_one</span><span class="p">()</span>
+<span class="go">not three</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">Thing</span><span class="p">()</span><span class="o">.</span><span class="n">foo_two</span><span class="p">()</span>
+<span class="go">not three</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">value</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="nesting-patch-decorators">
+<h2>Nesting Patch Decorators<a class="headerlink" href="#nesting-patch-decorators" title="Permalink to this headline">¶</a></h2>
+<p>If you want to perform multiple patches then you can simply stack up the
+decorators.</p>
+<p>You can stack up multiple patch decorators using this pattern:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;class_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="nd">@patch.object</span><span class="p">(</span><span class="n">SomeClass</span><span class="p">,</span> <span class="s">&#39;static_method&#39;</span><span class="p">)</span>
+<span class="gp">... </span><span class="k">def</span> <span class="nf">test</span><span class="p">(</span><span class="n">mock1</span><span class="p">,</span> <span class="n">mock2</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">static_method</span> <span class="ow">is</span> <span class="n">mock1</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">class_method</span> <span class="ow">is</span> <span class="n">mock2</span>
+<span class="gp">... </span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">static_method</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="n">SomeClass</span><span class="o">.</span><span class="n">class_method</span><span class="p">(</span><span class="s">&#39;bar&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="k">return</span> <span class="n">mock1</span><span class="p">,</span> <span class="n">mock2</span>
+<span class="gp">...</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock1</span><span class="p">,</span> <span class="n">mock2</span> <span class="o">=</span> <span class="n">test</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock1</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;foo&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">mock2</span><span class="o">.</span><span class="n">assert_called_once_with</span><span class="p">(</span><span class="s">&#39;bar&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>Note that the decorators are applied from the bottom upwards. This is the
+standard way that Python applies decorators. The order of the created mocks
+passed into your test function matches this order.</p>
+<p>Like all context-managers patches can be nested using contextlib&#8217;s nested
+function; <em>every</em> patching will appear in the tuple after &#8220;as&#8221;:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">contextlib</span> <span class="kn">import</span> <span class="n">nested</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">with</span> <span class="n">nested</span><span class="p">(</span>
+<span class="gp">... </span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName1&#39;</span><span class="p">),</span>
+<span class="gp">... </span> <span class="n">patch</span><span class="p">(</span><span class="s">&#39;package.module.ClassName2&#39;</span><span class="p">)</span>
+<span class="gp">... </span> <span class="p">)</span> <span class="k">as</span> <span class="p">(</span><span class="n">MockClass1</span><span class="p">,</span> <span class="n">MockClass2</span><span class="p">):</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">ClassName1</span> <span class="ow">is</span> <span class="n">MockClass1</span>
+<span class="gp">... </span> <span class="k">assert</span> <span class="n">package</span><span class="o">.</span><span class="n">module</span><span class="o">.</span><span class="n">ClassName2</span> <span class="ow">is</span> <span class="n">MockClass2</span>
+<span class="gp">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="where-to-patch">
+<span id="id1"></span><h2>Where to patch<a class="headerlink" href="#where-to-patch" title="Permalink to this headline">¶</a></h2>
+<p><cite>patch</cite> works by (temporarily) changing the object that a <em>name</em> points to with
+another one. There can be many names pointing to any individual object, so
+for patching to work you must ensure that you patch the name used by the system
+under test.</p>
+<p>The basic principle is that you patch where an object is <em>looked up</em>, which
+is not necessarily the same place as where it is defined. A couple of
+examples will help to clarify this.</p>
+<p>Imagine we have a project that we want to test with the following structure:</p>
+<div class="highlight-python"><pre>a.py
+ -&gt; Defines SomeClass
+
+b.py
+ -&gt; from a import SomeClass
+ -&gt; some_function instantiates SomeClass</pre>
+</div>
+<p>Now we want to test <cite>some_function</cite> but we want to mock out <cite>SomeClass</cite> using
+<cite>patch</cite>. The problem is that when we import module b, which we will have to
+do then it imports <cite>SomeClass</cite> from module a. If we use <cite>patch</cite> to mock out
+<cite>a.SomeClass</cite> then it will have no effect on our test; module b already has a
+reference to the <em>real</em> <cite>SomeClass</cite> and it looks like our patching had no
+effect.</p>
+<p>The key is to patch out <cite>SomeClass</cite> where it is used (or where it is looked up
+). In this case <cite>some_function</cite> will actually look up <cite>SomeClass</cite> in module b,
+where we have imported it. The patching should look like:</p>
+<blockquote>
+<div><cite>&#64;patch(&#8216;b.SomeClass&#8217;)</cite></div></blockquote>
+<p>However, consider the alternative scenario where instead of <cite>from a import
+SomeClass</cite> module b does <cite>import a</cite> and <cite>some_function</cite> uses <cite>a.SomeClass</cite>. Both
+of these import forms are common. In this case the class we want to patch is
+being looked up on the a module and so we have to patch <cite>a.SomeClass</cite> instead:</p>
+<blockquote>
+<div><cite>&#64;patch(&#8216;a.SomeClass&#8217;)</cite></div></blockquote>
+</div>
+<div class="section" id="patching-descriptors-and-proxy-objects">
+<h2>Patching Descriptors and Proxy Objects<a class="headerlink" href="#patching-descriptors-and-proxy-objects" title="Permalink to this headline">¶</a></h2>
+<p>Since version 0.6.0 both <a class="reference internal" href="#patch">patch</a> and <a class="reference internal" href="#patch-object">patch.object</a> have been able to correctly
+patch and restore descriptors: class methods, static methods and properties.
+You should patch these on the <em>class</em> rather than an instance.</p>
+<p>Since version 0.7.0 <a class="reference internal" href="#patch">patch</a> and <a class="reference internal" href="#patch-object">patch.object</a> work correctly with some objects
+that proxy attribute access, like the <a class="reference external" href="http://www.voidspace.org.uk/python/weblog/arch_d7_2010_12_04.shtml#e1198">django setttings object</a>.</p>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p class="last">In django <cite>import settings</cite> and <cite>from django.conf import settings</cite>
+return different objects. If you are using libraries / apps that do both you
+may have to patch both. Grrr...</p>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Patch Decorators</a><ul>
+<li><a class="reference internal" href="#patch">patch</a></li>
+<li><a class="reference internal" href="#patch-object">patch.object</a></li>
+<li><a class="reference internal" href="#patch-dict">patch.dict</a></li>
+<li><a class="reference internal" href="#patch-multiple">patch.multiple</a></li>
+<li><a class="reference internal" href="#patch-methods-start-and-stop">patch methods: start and stop</a></li>
+<li><a class="reference internal" href="#test-prefix">TEST_PREFIX</a></li>
+<li><a class="reference internal" href="#nesting-patch-decorators">Nesting Patch Decorators</a></li>
+<li><a class="reference internal" href="#where-to-patch">Where to patch</a></li>
+<li><a class="reference internal" href="#patching-descriptors-and-proxy-objects">Patching Descriptors and Proxy Objects</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="mock.html"
+ title="previous chapter">The Mock Class</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="helpers.html"
+ title="next chapter">Helpers</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/patch.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="helpers.html" title="Helpers"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="mock.html" title="The Mock Class"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/search.html b/python/mock-1.0.0/html/search.html
new file mode 100644
index 000000000..8e0a907ca
--- /dev/null
+++ b/python/mock-1.0.0/html/search.html
@@ -0,0 +1,99 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Search &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <script type="text/javascript" src="_static/searchtools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <script type="text/javascript">
+ jQuery(function() { Search.loadIndex("searchindex.js"); });
+ </script>
+
+
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <h1 id="search-documentation">Search</h1>
+ <div id="fallback" class="admonition warning">
+ <script type="text/javascript">$('#fallback').hide();</script>
+ <p>
+ Please activate JavaScript to enable the search
+ functionality.
+ </p>
+ </div>
+ <p>
+ From here you can search these documents. Enter your search
+ words into the box below and click "search". Note that the search
+ function will automatically search for all of the words. Pages
+ containing fewer words won't appear in the result list.
+ </p>
+ <form action="" method="get">
+ <input type="text" name="q" value="" />
+ <input type="submit" value="search" />
+ <span id="search-progress" style="padding-left: 10px"></span>
+ </form>
+
+ <div id="search-results">
+
+ </div>
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/html/searchindex.js b/python/mock-1.0.0/html/searchindex.js
new file mode 100644
index 000000000..a71918b93
--- /dev/null
+++ b/python/mock-1.0.0/html/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({objects:{"":{mock:[0,0,1,""]},"mock.patch":{dict:[5,2,1,""],object:[5,2,1,""],multiple:[5,2,1,""],stopall:[5,2,1,""]},"mock.Mock":{reset_mock:[9,5,1,""],method_calls:[9,4,1,""],attach_mock:[9,5,1,""],assert_called_with:[9,5,1,""],assert_has_calls:[9,5,1,""],"_get_child_mock":[9,5,1,""],mock_calls:[9,4,1,""],side_effect:[9,4,1,""],"__class__":[9,4,1,""],assert_called_once_with:[9,5,1,""],call_args_list:[9,4,1,""],configure_mock:[9,5,1,""],return_value:[9,4,1,""],call_count:[9,4,1,""],assert_any_call:[9,5,1,""],mock_add_spec:[9,5,1,""],called:[9,4,1,""],"__dir__":[9,5,1,""],call_args:[9,4,1,""]},mock:{create_autospec:[6,2,1,""],MagicMock:[3,3,1,""],DEFAULT:[8,1,1,""],PropertyMock:[9,3,1,""],NonCallableMock:[9,3,1,""],patch:[5,2,1,""],FILTER_DIR:[6,1,1,""],NonCallableMagicMock:[3,3,1,""],call:[6,2,1,""],sentinel:[8,1,1,""],mock_open:[6,2,1,""],ANY:[6,1,1,""],Mock:[9,3,1,""]},"mock.call":{call_list:[6,5,1,""]}},terms:{"__lshift__":3,"_children":4,demand:[8,6,9],interchang:2,four:9,prefix:[5,6,4],"1alpha1":[],repetit:7,whose:[2,7],typeerror:[0,6,5],patcher:[5,6,7,4],test_foo:7,not_a_child:9,under:[0,2,6,7,5],testabl:7,spec:[0,1,2,3,4,5,6,7,9],everi:[0,2,7,5],risk:4,"__instancecheck__":3,"__oct__":3,"__nonzero__":3,noncallablemock:[5,9,4],sentinelobject:4,readthedoc:[0,4],initialis:6,request:[6,9,4],second:[1,6,7,9,5],even:[5,2,6,7,4],hide:4,"20spy":[],ned:4,introspect:6,calcul:[1,7],"new":[0,1,4,5,7,9],ever:9,never:6,here:[1,6,7,9,5],path:6,interpret:9,datetim:7,permit:9,aka:[],contextmanag:1,propag:[2,9,4],substr:0,articl:0,abstracthttphandl:6,describ:[0,6],would:[2,6,7,4],call:[0,1,2,3,4,5,6,7,9],typo:6,type:[0,3,4,6,7,9],tell:[6,9,4],mock_db:[],relat:0,"__iter__":[5,7,3,4],isatti:[],unpack:[0,9],must:[9,7,3,5],kumar:1,word:[],room:6,restor:[0,2,7,5,4],setup:[0,5,7,3,4],work:[2,3,4,5,6,7],line_buff:[],overrid:[0,1,7,9],give:[2,7],indic:[8,6],caution:5,want:[1,2,3,5,6,7,9],masquerad:9,unstart:4,end:[0,1,2,4,5,7],turn:[2,7],noncallablemagicmock:[6,9,3,4],ordinari:[0,6],how:[0,1,2,4,6,7,9],mockobject:1,mock_cal:[1,2,3,4,6,7,9],verifi:1,config:[7,5],earlier:[],befor:[6,7,9,5],wrong:[0,5],attempt:[0,6,9,3,4],third:[7,4],classmethod:6,obsolet:[3,4],foord:0,delong:4,maintain:[1,4],environ:5,emsli:4,lambda:[0,1,6,7,9],order:[0,1,2,4,5,6,7,9],create_autospec:[0,5,6,4],oper:7,diagnos:[2,4],over:[2,7,3,5],becaus:[2,3,4,5,6,7,9],affect:[6,7],flexibl:[6,9],vari:[0,2,9],fix:[6,4],"__class__":[6,9,4],better:[6,7,4],persist:2,comprehens:7,easier:[7,9,5],them:[0,2,3,4,5,6,7,8,9],thei:[0,1,2,3,4,5,6,7,9],"__setstate__":3,safe:[6,7],"__subclasscheck__":3,bang:9,changelog:[0,4],bonu:5,getvalu:5,each:[1,2,3,5,7,9],debug:9,complet:[1,7],dingu:1,side:[0,1,2,3,4,7,9],mean:[0,1,2,3,4,5,6,7,9],colleagu:[],test_method_calls_someth:[],appengin:0,unbound:[0,6,7],foo_bar:7,got:[],situat:[2,7],standard:[0,4,5,6,7,9],test_someth:[2,5],"__setitem__":[7,3,5],call_list:[2,6,7,4],bernhadt:1,traceback:[0,1,2,4,5,6,7,9],"_patch":4,filter:[6,9,4],isn:[9,6,7,3,4],onto:[7,4],bite:7,user:[0,7,4],restrict:6,hook:3,unlik:9,alreadi:[5,2,6,7,4],hood:[6,7],seekabl:[],tox:4,top:[],sometim:[8,2,6,7],mercuri:[0,4],too:[6,7],skipfirst:[],with_arg:1,namespac:[0,2,7,5],tool:0,setuptool:[0,4],"0alpha3":[],"0alpha2":[],"0alpha1":[],technic:4,silli:7,target:[5,4],keyword:[2,3,4,5,6,7,9],provid:[0,1,2,4,6,7,8,9],zero:4,file_spec:[],project:[0,1,5],matter:[0,2,6,7],some_method__return:1,iron:0,add_head:6,modern:[],raw:[],"__main__":[6,7,9,5],seen:[],seem:[],seek:[],"__new__":3,especi:4,adaptor:7,"__getstate__":3,"_new_nam":[],though:[2,6,7,4],object:[0,1,2,3,4,5,6,7,8,9],what:[0,2,4,6,7,9],monkeypatch:[],bsd:0,konstantin:4,don:[0,1,2,3,4,5,6,7,9],doc:[0,4],doe:[1,4,5,6,7,9],dot:[2,9],asid:7,opposit:4,"__str__":[0,3],syntax:0,somethingfortest:6,identifi:7,involv:7,despit:[],layout:4,explain:6,configur:[0,1,2,3,4,5,6,7,9],theme:4,"__call__":[0,6,7,4],plate:[],stop:[0,5,7,4],assertrais:1,report:4,reconstruct:[],softli:[],bar:[6,7,9,5],baz:[6,7,9],method:[0,1,2,3,4,5,6,7,8,9],twice:[1,4],respond:6,assert_called_with:[0,1,2,3,4,5,6,7,9],result:[1,2,3,4,6,7,8,9],respons:7,fail:[0,2,6,5,4],themselv:[2,9],best:6,hopefulli:[],discoveri:4,outstand:4,simplest:9,approach:7,attribut:[0,1,2,4,5,6,7,8,9],extend:1,weak:4,autodiscoveri:4,posit:6,lazi:4,returns_fak:1,debt:4,"__unicode__":3,protect:6,expos:[],"_noncallablemock__get_side_effect":6,howev:[0,5,6,7,4],against:[5,6,7,4],py3k:4,com:0,callarg:4,foobar:7,exception_rais:1,assum:[1,7],three:[2,4,5,6,7,9],been:[0,2,3,4,5,6,7,9],much:[0,6,7],interest:[6,7,3],basic:[1,7,3,5],mocksignatur:4,"__len__":3,xxx:[],uncommon:[],ani:[0,1,2,3,4,5,6,7,9],prop_mock:[],child:[5,2,7,9,4],"catch":[],ugli:[],ident:[8,7,3],properti:[0,4,5,6,7,9],weren:[],build_sphinx:4,suffici:7,"__ge__":3,seven:7,kwarg:[5,6,7,9,4],conf:5,tediou:6,somemodul:1,file_handl:2,incorrectli:[0,6],perform:[0,5,6,4],suggest:[0,4],make:[0,1,2,4,5,6,7,9],"0beta3":[],complex:[0,2,6,7,9],split:4,nosetest:[],idyl:0,hand:[6,3],rais:[0,1,2,3,4,5,6,7,9],boiler:[],second_cal:7,kept:3,scenario:[6,7,5],fiddlier:[7,5],thing2:9,thing1:9,inherit:4,contact:0,thi:[0,1,2,3,4,5,6,7,8,9],programm:7,everyth:6,left:7,protocol:[7,3,4],just:[0,2,6,7,4],kall:6,mock_fish:[],"__dict__":[],testwith:[],yet:[1,6],languag:0,previous:[7,4],easi:[0,2,7,9],interfer:[],had:[5,4],els:[9,3],"0beta4":[],"0beta2":[],mayb:7,preserv:4,mocker:1,measur:[],specif:[0,1,2,3,4,6,7,8,9],arbitrari:[2,4,5,6,7,9],manual:[6,3,4],noth:1,"__long__":3,unnecessari:[7,4],underli:[7,4],right:[5,2,7,3,4],old:[6,4],deal:7,stopiter:9,intern:4,test_method:[],txt:4,implement:[0,2,3,4,6,7],bottom:[0,2,5],subclass:[0,3,4,6,7,9],buffer:[],tracker:[0,1,4],unit:[0,6,7,4],foo:[0,1,2,3,5,6,7,9],core:0,plu:[0,2,9,5,4],sensibl:[8,2],idiomat:1,someclass:[1,2,7,9,5],repositori:0,post:0,"super":7,normal_argu:5,unsaf:4,obj:[],slightli:7,unfortun:7,"__mul__":3,some_object:[8,1,5],curiou:[],encod:[],bound:[7,5],"__and__":3,opportun:7,wai:[0,1,2,3,4,5,6,7,8,9],support:[0,1,3,4,5,6,9],herman:1,avail:[0,1,4,5,6,7],reli:7,mymock:7,creation:[6,4],form:[5,2,7,9,4],forc:9,"true":[0,1,2,3,4,5,6,7,9],something_els:[],reset:[9,4],flavour:7,attr:[6,9],bugfix:4,until:9,testutil:[],featur:[6,4],classic:[0,7],stronger:2,diagnost:6,exist:[0,2,3,4,5,6,7,9],check:[0,2,4,5,6,7,9],"__reduce_ex__":3,when:[0,2,4,5,6,7,8,9],refactor:[2,6,7,4],"8rc2":4,role:0,test:[0,1,2,4,5,6,7,8,9],intend:9,stringio:[1,5],assret_called_with:6,urllib2:6,consid:5,in_dict:5,receiv:4,faster:6,ignor:[6,9,4],time:[0,1,2,4,5,6,7,9],preconfigur:3,my_dict:7,concept:7,"__ne__":[3,4],chain:[0,1,2,4,6,7],skip:[0,4],global:[2,7],"__rshift__":3,depend:[2,6,7],readabl:[8,7],certainli:[],isinst:[5,7,9,4],child1:9,child2:9,sourc:[0,7,4],"__exit__":[1,6,3],string:[5,2,6,9,4],addcleanup:[7,5],"__bool__":3,exact:7,w00t:9,level:[0,6,4],did:4,iter:[2,3,4,5,7,9],assertequ:[1,2],item:[9,5],setitem:7,unsupport:[9,4],quick:[0,2],round:[6,4],dir:[6,9,4],inpy3k:[],prevent:[0,2,4,6,7,9],peek:[],cost:7,run:[0,5,7,4],lazili:[6,4],"_checkwrit":[],abstractbasicauthhandl:6,"_mock_children":[],current:[0,6],suspect:[],del:[9,5],fifti:7,honour:5,gener:[0,6,7],"__hex__":3,along:[0,6,9,4],mock_backend:7,has_attr:1,behav:7,extrem:6,weird:[],someobject:1,semant:[],love:0,"__getslice__":3,extra:[5,9,4],tweak:7,modul:[0,2,4,5,6,7,9],prefer:[0,7],fake:[0,1],marker:4,instal:0,ignorearg:1,method_cal:[1,3,4,6,7,9],baseexcept:4,todai:7,subvers:4,scope:[0,2,5],checkout:0,remember_ord:1,python:[0,1,2,3,4,5,6,7],peopl:[],asserttru:[2,7],prototyp:[],examin:0,easiest:[0,6],pretend:9,uniqu:[8,0,4],descriptor:[0,3,4,5,6,7,9],whatev:[2,7,9],purpos:6,boilerpl:5,spy:0,topic:[],konrad:4,mock3:1,mock2:[1,5],"__sizeof__":[3,4],occur:[],alwai:[8,9,4],multipl:[0,1,2,4,5,6,7],modulenam:[],write:[6,7,5],classname2:[0,2,5],map:5,product:[0,6,5],grob:7,clone:0,"__next__":3,membership:[5,4],mai:[1,4,5,6,7,9],underscor:[6,9],data:1,grow:[],toowtdi:4,"_chunk_siz":[],explicit:[1,6,9,4],inform:5,"switch":[5,6,9,4],cannot:[],combin:[],block:[9,5],callabl:[0,3,4,5,6,7,9],still:[3,4,5,6,7,9],dynam:[2,3,4,6,7,9],thank:4,match_foo:7,mail:0,main:[],non:[9,6,7,3,4],matcher:7,initi:4,now:[0,2,7,5,4],"_checkclos":[],discuss:[0,7],nor:4,introduct:7,term:0,name:[1,2,4,5,6,7,8,9],didn:7,separ:7,getitem:7,attributeerror:[0,2,3,4,6,9],replai:[0,1],arg1:1,individu:[6,9,5],continu:2,wrap:[5,7,9,4],year:[],happen:[6,7,9],shown:[1,2,6],accomplish:1,correct:[1,2,6,7],some_mock:[6,9],"_checkread":[],runtimeerror:[],orm:0,org:[0,4],unpredict:4,care:[6,7],couldn:7,"_get_child_mock":[7,9],class_method:5,refus:[],thing:[0,1,5,6,7,9],place:[5,6,7,3,4],principl:5,think:[7,5],first:[0,1,2,3,4,5,6,7,9],origin:[0,1,2,4,5,6,7],directli:[2,6,7,9,5],onc:[1,2,3,6,7,9],yourself:[6,7,3],open:[0,1,2,4,6,7],given:[0,6,9],"__subclasses__":3,silent:6,workaround:7,teardown:[7,5],caught:6,necessarili:[6,5],conveni:[8,2,9,4],friend:7,spec_set:[2,3,4,5,6,7,9],cope:[0,7],copi:[0,2,7,4],specifi:[0,2,9,5,4],than:[1,2,3,4,5,6,7,9],wide:0,setattr:[],autospec:[0,5,6,7,4],were:[0,1,2,7],bauer:4,nuisanc:7,seri:[7,9],pre:[8,0,9,4],nicer:[7,4],argument:[0,1,2,3,4,5,6,7,8,9],exclud:[9,4],kevin:4,"__builtin__":[1,2],techniqu:[0,7,5],alias:[],note:[0,1,2,3,4,5,6,7,9],take:[0,1,2,3,4,5,6,7,9],new_mock:[7,5],wonder:[],unittest2:[0,2,7,5,4],sure:[],importantli:2,normal:[0,2,3,5,6,7,8,9],track:[0,2,7,9],beta:[0,4],wire:6,abus:4,pair:5,homepag:0,renam:[6,4],later:[2,6,7],runtim:5,stopal:[5,4],uncondit:7,show:[6,7],"_new_par":[],slot:4,onli:[1,2,3,4,5,6,7,9],explicitli:9,activ:[5,7,4],written:[1,7],dict:[0,2,4,5,7,9],variou:[1,7,4],get:[0,1,2,4,5,6,7,9],repr:[2,4,5,7,8,9],soon:[],newkei:[0,2,5],attach_mock:[6,7,9,4],should_rec:1,requir:[0,6,3,4],fileno:[],yield:[1,7],email:[],irrespect:[6,4],mocksomeclass:7,where:[0,2,4,5,7,9],"__module__":4,dangoor:4,testcas:[2,7,5],concern:7,infinit:4,detect:4,getattr:[],between:[6,7,9],"import":[0,1,2,4,5,6,7,9],across:6,parent:[7,9,4],cycl:4,mock_add_spec:[9,4],come:0,quack:0,copyabl:4,inconsist:3,mani:[0,3,4,5,6,7,9],unittest:[0,5,4],pop:7,anti:7,sentinel:[8,0,2,9,4],typic:[5,4],"0a2":[],"0a3":[],coupl:[2,5],mock_someth:[],valueerror:[7,9],fake_open:1,ironpython:4,"__eq__":[7,3,4],those:[2,7,9,4],"case":[2,3,4,5,6,7,9],"__mod__":3,trick:7,ran_emo:[],advantag:[],stdout:5,side_effect:[0,1,2,3,4,5,7,8,9],"__getformat__":[3,4],"__init__":[0,3,4,5,6,7],develop:0,author:0,same:[0,1,2,3,4,5,6,7,8,9],binari:4,html:[],document:[0,4],exhaust:9,nest:[0,2,4,5,6,7],companion:[],capabl:[0,2],improv:[0,4],extern:[],"_spec_stat":[],appropri:0,without:[0,2,3,4,5,6,7,9],model:0,"__getinitargs__":3,"__int__":[9,3],execut:[5,1,2,6,4],aspect:[],is_cal:1,testmethod:[],any_ord:[7,9],except:[0,1,2,3,4,5,7,9],littl:7,blog:[0,7],blob:7,real:[0,1,2,4,5,6,7,8,9],mox:1,around:4,read:[0,1,2,6],read_data:6,some_funct:[2,5],integ:9,benefit:[],either:[6,9,3,5],output:0,manag:[0,1,2,3,4,5,6,7],assertionerror:[1,6,7,9],definit:0,exit:[7,5],inject:0,overli:[],refer:[0,2,7,5,4],power:[0,6,7,5],garbag:4,inspect:[],broken:[2,6,4],fulli:1,"__name__":[],"throw":1,comparison:[0,1,3,4,6,7],"__setslice__":3,call_args_list:[1,6,7,9,4],act:[0,9,5],gari:1,terminolog:0,call_arg:[6,7,9,4],patcher1:5,patcher2:5,your:[0,2,4,5,6,7,8,9],aren:[9,5,7,3,4],hex:3,start:[0,2,4,5,6,7],lot:6,replayal:1,"__invert__":3,enough:6,tupl:[5,6,9,4],mock_frob:7,"__get__":[9,3],verifyal:1,notat:9,copy_call_arg:7,mockmethod:[],abstractdigestauthhandl:6,possibl:[5,7,4],"default":[2,3,4,5,6,7,8,9],unusu:[],expect:[1,2,6,7,9],gone:6,creat:[0,1,2,3,4,5,6,7,8,9],certain:[6,4],strongli:[],"__floor__":[3,4],file:[2,6,7,4],incorrect:2,again:[5,7,4],googl:0,readinto:[],compel:[],pmock:1,valid:[5,6,7,4],writabl:[],you:[0,1,2,3,4,5,6,7,8,9],isint:9,sequenc:[6,7,9],minimock:1,docstr:4,testload:5,reduc:[5,4],tricki:[7,4],mimic:7,nelson:[],potenti:4,unset:4,represent:4,all:[0,1,2,3,4,5,6,7,9],test_closer_closes_someth:[],month:[],abil:4,follow:[0,5,7,3,4],children:[2,7,9,4],"__cmp__":3,hasattr:9,foo_on:5,patch_modul:4,introduc:[],consum:3,bibbl:6,open_nam:[],straightforward:[0,2,7,5],fals:[3,4,5,6,7,9],pop_last_cal:[],xunitpattern:[],candid:[0,4],fan:0,failur:[8,2,7,4],veri:[0,6,7,9,5],reset_mock:[9,4],list:[0,2,3,4,5,6,7,9],productionclass:[8,0,2],sane:[],"__missing__":3,mockanyth:1,"__truediv__":[3,4],design:[0,6],pass:[0,1,2,4,5,6,7,8,9],further:[0,2,7],cursor:2,deleg:7,sub:[7,4],section:[7,5],"_noncallablemock__set_side_effect":6,abl:[6,9,5],delet:[0,5,7,9,4],version:[0,1,3,4,5,6,7,9],deepli:6,some_method:[1,2],"public":4,hasn:[2,9],full:[9,7,3,4],hash:3,misspel:6,behaviour:[0,6,7,9,4],shouldn:[],modifi:[0,2,6,4],valu:[0,1,2,3,4,5,6,7,8,9],test_two:7,not_a_test:7,"_noncallablemock__return_value_doc":6,"__xor__":3,doctest:1,action:0,via:[6,4],packagenam:[],intermedi:[1,6,4],ask:[],"__setformat__":[3,4],thankfulli:6,decrement:[],select:2,aggress:6,etc:[1,4],two:[2,3,4,5,6,7,9],coverag:0,almost:3,minor:4,more:[0,1,2,4,5,6,7,9],flaw:6,"__coerce__":3,particular:[1,6,7,9],known:7,cach:[],none:[0,1,2,3,4,5,6,7,9],copyingmock:7,caveat:6,def:[0,1,2,3,5,6,7,9],frustratingli:0,share:[5,6,4],templat:[0,4],minimum:5,cours:[2,6,7],xxxx:[],newlin:[],rather:[1,2,3,4,5,6,7,9],anoth:[0,2,4,5,6,7,8,9],assert_called_once_with:[0,1,2,4,5,6,7,9],simpl:[0,1,2,7],variant:[0,7,3,9],confus:[0,9],django:[0,5,4],caus:[5,2,7,3,4],callback:4,mortem:0,"__le__":3,egg:[6,7,9],help:[5,2,6,4],mockiti:[0,9],through:[0,6,7,9,4],suffer:6,annoyingli:0,paramet:[0,9,3],style:4,mockstat:[],times_cal:1,bypass:6,stephen:4,might:[2,6,7,5],wouldn:[6,4],good:[1,7],"return":[0,1,2,3,4,5,6,7,8,9],framework:[0,1],detach:[],complain:[],achiev:[7,9],found:[0,7],unicod:[0,3],mock_respons:7,truncat:4,obj_typ:[],michael:[0,4],fish:[9,7,3,5],hard:[2,6,7],realli:[],some_modul:1,connect:2,horribl:4,todo:[0,1,4],some_valu:1,mock_foo:[7,9],print:[5,9,4],proxi:[0,5,4],advanc:[2,7],reason:[6,7,9],base:[0,6,7,9,4],put:[7,5],new_inst:1,omit:[1,5],perhap:0,assign:[0,1,9,4],singleton:9,obviou:7,feel:7,number:[1,9],done:[0,4,5,6,7,9],construct:[2,6,7,9,5],stabl:0,miss:[1,3],"__float__":3,differ:[0,1,2,3,4,5,6,7],script:0,interact:[],least:[1,6,5],fladisch:4,"__dir__":[9,3],statement:[0,2,3,4,5,7],store:[0,7,9],option:[2,6,7,9,4],propertymock:[7,9,4],part:[0,2,7,4],aaa:0,whenev:9,remov:[0,9,4],my_dingu:1,reus:[9,4],str:[0,3,4],arrang:0,toward:[],well:[0,2,4,5,6,7,9],packag:[5,2,7,4],use_mock_anyth:1,imagin:[7,5],built:[7,4],florian:4,self:[1,2,3,4,5,6,7,9],violat:4,also:[0,1,2,3,4,5,6,7,9],useless:6,distribut:[0,7,4],previou:[],most:[0,1,2,3,5,6,7,9],alpha:[0,4],promulg:0,appear:[2,9,3,5],clear:[0,2,4,5,7,9],and_return:1,newvalu:[0,2,5],usual:[7,3],visibl:6,favour:[],particularli:6,and_rais:1,mock_open:[0,6,7,4],find:[0,7,5],has_data:6,indexerror:9,solut:7,factor:[],hit:6,unus:4,express:7,mock_method:[0,2,5],foobarbaz:0,"_ani":[],common:[8,5,2,6,4],manger:5,set:[0,2,3,4,5,6,7,9],get_endpoint:7,mutabl:[0,7],see:[0,2,5,6,7,9],arg:[0,1,2,3,4,5,6,7,9],close:[2,6],inconveni:7,wow:9,won:4,has_been_cal:7,altern:[0,2,5,6,7,9],signatur:[0,5,6,7,4],syntact:5,numer:[3,4],isol:[6,7,3,4],"__divmod__":3,solv:[6,7],nervou:[],classnam:[2,5],both:[5,6,7,4],last:[0,1,2,4,5,6,7,9],"__trunc__":[3,4],someexcept:1,context:[0,1,2,3,4,5,6,7],pdf:0,whole:[],onward:0,simpli:[6,7,5],point:[7,5],instanti:[5,2,6,7,4],suppli:4,throughout:[0,9],batcheld:4,pycon:0,backend:7,sever:[2,7,9,4],java:4,due:6,empti:[6,9,3,4],sinc:[5,4],andrais:1,remark:7,understand:7,func:[],"_old_nam":[],unpatch:[5,4],look:[0,2,7,3,5],keywarg:[],unifi:[],match:[0,7,5],abov:[0,2],error:[2,6,7,4],"__hash__":3,sheremetyev:1,expected_cal:7,readi:9,flexmock:1,itself:[0,6,7,4],decor:[0,2,4,5,7,9],mockclass:[1,5],minim:[],shorter:7,lenient:[],conflict:[],"__repr__":3,wherea:6,read1:[],temporari:1,"__or__":3,"__add__":3,stack:[2,5],recent:[0,1,2,4,5,6,7,9],travers:[6,4],task:1,older:0,entri:[0,1,6,7,4],pickl:[3,4],raise_except:[],"__neg__":3,elev:[],"_wrap":4,tast:7,match_equ:7,obscur:3,stabilis:[],"_parent":4,input:9,subsequ:[7,3],hamcrest:7,big:6,inequ:4,"__gt__":3,bit:[7,4],"_method":4,resolv:6,collect:4,"boolean":[9,4],popular:1,stylish:4,often:[2,9],"1st":[],some:[0,1,2,3,4,5,6,7,9],back:7,sizeof:3,retri:7,undon:[2,7,5],recognis:[5,4],nose:4,method2:1,method3:1,method1:1,martin:0,step:7,impos:0,patched_context:1,doubl:[0,6,9],within:[0,5],ensur:[0,1,6,7,5],spam:[6,7],question:[],"long":3,custom:[9,4],includ:[0,1,2,3,4,6,7,9],suit:0,assert_has_cal:[6,7,9,4],properli:6,fishi:0,line:[6,7,4],info:4,test_modul:2,file_:1,readlin:[],similar:7,match_wrong:7,doesn:[1,4,5,6,7,9],repres:[1,2,6,9],wheeeeee:0,deletingmock:[],titl:[],sequenti:9,"__format__":3,test_prefix:[0,5,4],invalid:[0,4],mock:[0,1,2,3,4,5,6,7,8,9],nice:[2,7],test_funct:5,meaning:4,eval:[],mymodul:[7,5],dingus:1,algorithm:7,berman:4,far:1,hello:9,code:[0,2,4,5,6,7,9],partial:[0,1,7],privat:[6,4],old_method:2,this_foo:9,mock_stdout:5,func2:[],whichev:7,dandi:6,implicitli:9,relev:0,recip:[0,7],magic:[0,1,3,4,5,7,9],"__prepare__":3,mocksetexpect:[],"try":[6,7],"_mock":4,pleas:[0,1,3],"_checkseek":[],natur:[0,1],"0x1":3,video:0,mock_spam:7,download:0,"__ceil__":[3,4],compat:4,compar:[2,6,7,9,4],fine:[0,6,4],access:[0,2,4,5,6,7,8,9],experiment:[],can:[0,1,2,3,4,5,6,7,8,9],len:[1,3],bodi:[7,5],let:[6,7,3,5],becom:[6,7,9],implicit:[7,4],great:6,convers:3,"_name":4,staticmethod:6,opinion:[],chang:[2,3,4,5,6,7,9],"__enter__":[1,6,3],danger:5,appli:[0,2,3,4,5,6,7],app:5,disciplin:0,mockfoo:6,api:[0,4,5,6,7,9],duck:0,from:[0,1,2,3,4,5,6,7,9],commun:0,assret_called_once_with:6,frob:7,next:[2,9,4],few:6,sort:[],bernhardt:1,trail:9,actual:[8,5,2,7,4],annoi:6,obvious:6,create_patch:7,fetch:[5,6,7,9,4],control:[0,6,7,4],deepcopi:7,tag:0,mock1:[1,5],delai:7,foo_two:5,six:[],"__pos__":3,instead:[0,2,4,5,6,7,9],class2:[7,5],circular:[7,4],exemplifi:1,seriou:6,correspond:[6,9,5],element:[],issu:[0,1,6,7,4],allow:[0,2,3,4,5,6,7,9],move:[7,4],whilst:7,mock_class:5,bunch:[],classname1:[0,2,5],static_method:[2,5],"__getitem__":[7,3,5],handl:[0,2,3,4,5,6,7,9],auto:[0,6,7,4],handi:6,configure_mock:[7,9,4],"__pow__":3,front:[1,7],"__delitem__":[3,5],anyth:[3,5],mytest:[2,7,5],mode:[],fiddli:6,reserv:[],class1:[7,5],upward:5,second_patch:5,"static":[0,5,7,4],our:[2,6,7,5],meth:[],patch:[0,1,2,4,5,6,7,8,9],special:[1,3,4],out:[2,4,5,6,7,9],variabl:[1,6],req:6,stub:[0,9],rel:6,new_cal:[5,9,4],ref:[],clarifi:5,insid:[7,9,5],manipul:[],undo:5,dictionari:[0,2,4,5,6,7,9],releas:[0,1,4],likelihood:4,afterward:[2,5],indent:[7,5],could:[5,7,4],keep:5,keen:[],length:9,"__delete__":3,date:[0,1,7],my_fak:1,owner:[],licens:[0,4],perfectli:7,system:[0,2,6,7,5],messag:[8,1,2,7,4],attach:[0,7,9,4],monkei:[8,7,4],"final":[6,7],"__del__":3,getattr_stat:6,mmckclass:[],exactli:[0,6,7,9],haven:[6,4],cpython:4,structur:5,rybnikov:4,sens:[],stricter:9,"__rtruediv__":[3,4],julian:4,deprec:4,liner:[],blip:7,have:[0,1,2,3,4,5,6,7,8,9],disadvantag:[],need:[0,1,2,3,4,5,6,7,8,9],unexpect:2,"__unittest":4,mix:[],builtin:[0,1,2,7,4],patch_object:4,which:[0,1,3,4,5,6,7,9],singl:[5,2,6,7,4],unless:[9,3,4],writelin:[],clash:4,sekret:1,who:[],discov:0,why:[6,7],magicmock:[0,1,2,3,4,5,6,7,9],hardcod:4,some_other_object:1,basehandl:6,determin:4,fowler:0,someth:[2,3,5,6,7,9],fact:7,"_calllist":[],verbos:[0,7],bring:6,mockcheckcal:[],longer:[5,2,7,4],trivial:2,anywai:[0,4],setter:9,mock_bar:7,should:[8,5,3,4],suppos:[2,7],start_cal:7,local:7,contribut:[1,4],"0b4":[],pypi:[0,4],notimpl:[3,4],stuff:[0,6],integr:[6,7,4],contain:[6,7,3,5],noodl:[],temporarili:[7,9,5],mock_th:5,closer:2,test_on:7,correctli:[5,2,6,4],pattern:[0,2,7,5],mock_dat:7,state:[0,2,5],"__bases__":4,expectexcept:[],stuboutwithmock:1,kei:[0,2,7,9,5],dislik:6,job:[],addit:[5,1,2,6,4],indvidu:2,extens:[0,4],equal:[6,7,3,4],"__delattr__":[],instanc:[0,1,2,3,4,5,6,7,9],comment:4,"__complex__":3,unorder:4,quit:[],addition:0,"__set__":[9,3],some_attribut:[1,9],treat:[],foobl:[7,3],"0rc2":[],"0rc1":[],immedi:2,create_cal:7,assert:[0,1,2,3,4,5,6,7,8,9],togeth:[6,7],fake_fil:1,present:[0,4],determinist:4,harder:[1,7],unsuit:4,defin:[7,5],intro:[],"__floordiv__":3,"__sub__":3,helper:[0,6,7,4],fakeclass:1,"__reduce__":3,mock_urllib2:6,mutat:7,welcom:4,backendprovid:7,member:[0,5,6,9,4],mock_exit:5,andreturn:1,call_count:[1,7,9,4],"_noncallablemock__get_return_valu":6,http:0,interrog:7,effect:[0,2,3,4,5,7,9],dai:[],build:7,at_least:1,expand:5,pull:6,off:[6,9,5],keyboardinterrupt:4,pyhamcrest:7,sett:5,"__div__":3,exampl:[0,1,2,3,5,6,7,8,9],command:[0,4],choos:5,unit2:0,latest:0,unari:3,less:[0,7,4],first_patch:5,"__lt__":3,simultan:[],mock_funct:0,add:[6,7,9,5],cleanup:[7,5],bool:4,logger:0,smart:0,futur:6,rationalis:4,jython:[0,4],assert_any_cal:[6,9,4],know:[6,3,4],password:1,recurs:[6,4],python3:0,"__reversed__":3,like:[0,1,2,4,5,6,7,8,9],unord:4,lose:[],soft:4,page:[1,2],revers:[],pariti:4,flush:[],home:0,"__getnewargs__":3,librari:[0,1,5,7,4],"__setattr__":3,guid:[0,2,7],lead:9,"__contains__":[3,5],avoid:7,"__getattr__":3,leav:4,grrr:5,some_obj:7,usag:[0,6],host:[0,4],although:[],after:[0,2,5,6,7,9],simpler:[0,6,7,5],about:[0,1,2,3,6,7,9],"_noncallablemock__set_return_valu":6,rare:7,fudg:1,constructor:[0,2,3,4,6,7,9],own:[0,7,4],easy_instal:0,automat:[1,5,7,9,4],destructor:4,pointless:4,merg:4,val:[2,7],mcmillan:1,intention:1,trigger:[6,4],return_valu:[0,1,2,3,4,5,6,7,8,9],replac:[0,1,2,3,4,5,6,7,9],"var":[6,4],arg2:[1,6],"function":[0,2,3,4,5,6,7,8,9],mockclass1:[0,2,7,5],mockclass2:[0,2,7,5],filter_dir:[0,6,9,4],keyerror:[0,7,9,5],gain:[3,4],sphinx:4,bug:[0,6,4],count:4,succe:[6,7],made:[0,1,2,4,7,9],wise:9,whether:[9,4],googlecod:0,rc1:4,record:[0,2,3,4,7,9],below:[2,6],limit:[0,6,9,4],otherwis:7,problem:[2,6,7,3,5],evalu:4,"int":[6,9,3],dure:[0,2,7,4],filenam:[1,2],twist:7,contextlib:[1,5],pip:0,probabl:[1,6,4],detail:[0,9],other:[0,1,2,3,4,5,6,7,9],lookup:[6,4],boom:[2,7,9],mocksometh:[],repeat:9,"class":[0,1,2,3,4,5,6,7,9],function2:[],my_mock:[1,6],rule:[7,3],klass:1,"__index__":3},objtypes:{"0":"py:module","1":"py:data","2":"py:function","3":"py:class","4":"py:attribute","5":"py:method"},titles:["Mock - Mocking and Testing Library","Mock Library Comparison","Getting Started with Mock","Mocking Magic Methods","CHANGELOG","Patch Decorators","Helpers","Further Examples","Sentinel","The Mock Class"],objnames:{"0":["py","module","Python module"],"1":["py","data","Python data"],"2":["py","function","Python function"],"3":["py","class","Python class"],"4":["py","attribute","Python attribute"],"5":["py","method","Python method"]},filenames:["index","compare","getting-started","magicmock","changelog","patch","helpers","examples","sentinel","mock"]}) \ No newline at end of file
diff --git a/python/mock-1.0.0/html/sentinel.html b/python/mock-1.0.0/html/sentinel.html
new file mode 100644
index 000000000..5b28deb2c
--- /dev/null
+++ b/python/mock-1.0.0/html/sentinel.html
@@ -0,0 +1,156 @@
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+
+ <title>Sentinel &mdash; Mock 1.0.0 documentation</title>
+
+ <link rel="stylesheet" href="_static/nature.css" type="text/css" />
+ <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '',
+ VERSION: '1.0.0',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="_static/jquery.js"></script>
+ <script type="text/javascript" src="_static/underscore.js"></script>
+ <script type="text/javascript" src="_static/doctools.js"></script>
+ <link rel="top" title="Mock 1.0.0 documentation" href="index.html" />
+ <link rel="next" title="Mocking Magic Methods" href="magicmock.html" />
+ <link rel="prev" title="Helpers" href="helpers.html" />
+ </head>
+ <body>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ accesskey="I">index</a></li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ accesskey="N">next</a> |</li>
+ <li class="right" >
+ <a href="helpers.html" title="Helpers"
+ accesskey="P">previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+ <div class="body">
+
+ <div class="section" id="sentinel">
+<h1>Sentinel<a class="headerlink" href="#sentinel" title="Permalink to this headline">¶</a></h1>
+<dl class="data">
+<dt id="mock.sentinel">
+<tt class="descname">sentinel</tt><a class="headerlink" href="#mock.sentinel" title="Permalink to this definition">¶</a></dt>
+<dd><p>The <tt class="docutils literal"><span class="pre">sentinel</span></tt> object provides a convenient way of providing unique
+objects for your tests.</p>
+<p>Attributes are created on demand when you access them by name. Accessing
+the same attribute will always return the same object. The objects
+returned have a sensible repr so that test failure messages are readable.</p>
+</dd></dl>
+
+<dl class="data">
+<dt id="mock.DEFAULT">
+<tt class="descname">DEFAULT</tt><a class="headerlink" href="#mock.DEFAULT" title="Permalink to this definition">¶</a></dt>
+<dd><p>The <cite>DEFAULT</cite> object is a pre-created sentinel (actually
+<cite>sentinel.DEFAULT</cite>). It can be used by <a class="reference internal" href="mock.html#mock.Mock.side_effect" title="mock.Mock.side_effect"><tt class="xref py py-attr docutils literal"><span class="pre">side_effect</span></tt></a>
+functions to indicate that the normal return value should be used.</p>
+</dd></dl>
+
+<div class="section" id="sentinel-example">
+<h2>Sentinel Example<a class="headerlink" href="#sentinel-example" title="Permalink to this headline">¶</a></h2>
+<p>Sometimes when testing you need to test that a specific object is passed as an
+argument to another method, or returned. It can be common to create named
+sentinel objects to test this. <cite>sentinel</cite> provides a convenient way of
+creating and testing the identity of objects like this.</p>
+<p>In this example we monkey patch <cite>method</cite> to return
+<cite>sentinel.some_object</cite>:</p>
+<div class="highlight-python"><div class="highlight"><pre><span class="gp">&gt;&gt;&gt; </span><span class="n">real</span> <span class="o">=</span> <span class="n">ProductionClass</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">Mock</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s">&quot;method&quot;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="o">.</span><span class="n">return_value</span> <span class="o">=</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">some_object</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span> <span class="o">=</span> <span class="n">real</span><span class="o">.</span><span class="n">method</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="k">assert</span> <span class="n">result</span> <span class="ow">is</span> <span class="n">sentinel</span><span class="o">.</span><span class="n">some_object</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">sentinel</span><span class="o">.</span><span class="n">some_object</span>
+<span class="go">sentinel.some_object</span>
+</pre></div>
+</div>
+</div>
+</div>
+
+
+ </div>
+ </div>
+ </div>
+ <div class="sphinxsidebar">
+ <div class="sphinxsidebarwrapper">
+ <h3><a href="index.html">Table Of Contents</a></h3>
+ <ul>
+<li><a class="reference internal" href="#">Sentinel</a><ul>
+<li><a class="reference internal" href="#sentinel-example">Sentinel Example</a></li>
+</ul>
+</li>
+</ul>
+
+ <h4>Previous topic</h4>
+ <p class="topless"><a href="helpers.html"
+ title="previous chapter">Helpers</a></p>
+ <h4>Next topic</h4>
+ <p class="topless"><a href="magicmock.html"
+ title="next chapter">Mocking Magic Methods</a></p>
+ <h3>This Page</h3>
+ <ul class="this-page-menu">
+ <li><a href="_sources/sentinel.txt"
+ rel="nofollow">Show Source</a></li>
+ </ul>
+<div id="searchbox" style="display: none">
+ <h3>Quick search</h3>
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" />
+ <input type="submit" value="Go" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+ <p class="searchtip" style="font-size: 90%">
+ Enter search terms or a module, class or function name.
+ </p>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="related">
+ <h3>Navigation</h3>
+ <ul>
+ <li class="right" style="margin-right: 10px">
+ <a href="genindex.html" title="General Index"
+ >index</a></li>
+ <li class="right" >
+ <a href="magicmock.html" title="Mocking Magic Methods"
+ >next</a> |</li>
+ <li class="right" >
+ <a href="helpers.html" title="Helpers"
+ >previous</a> |</li>
+ <li><a href="index.html">Mock 1.0.0 documentation</a> &raquo;</li>
+ </ul>
+ </div>
+ <div class="footer">
+ &copy; Copyright 2007-2012, Michael Foord &amp; the mock team.
+ Last updated on Oct 07, 2012.
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> 1.1.3.
+ </div>
+ </body>
+</html> \ No newline at end of file
diff --git a/python/mock-1.0.0/mock.py b/python/mock-1.0.0/mock.py
new file mode 100644
index 000000000..1be4e6e45
--- /dev/null
+++ b/python/mock-1.0.0/mock.py
@@ -0,0 +1,2356 @@
+# mock.py
+# Test tools for mocking and patching.
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+
+# mock 1.0
+# http://www.voidspace.org.uk/python/mock/
+
+# Released subject to the BSD License
+# Please see http://www.voidspace.org.uk/python/license.shtml
+
+# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
+# Comments, suggestions and bug reports welcome.
+
+
+__all__ = (
+ 'Mock',
+ 'MagicMock',
+ 'patch',
+ 'sentinel',
+ 'DEFAULT',
+ 'ANY',
+ 'call',
+ 'create_autospec',
+ 'FILTER_DIR',
+ 'NonCallableMock',
+ 'NonCallableMagicMock',
+ 'mock_open',
+ 'PropertyMock',
+)
+
+
+__version__ = '1.0.0'
+
+
+import pprint
+import sys
+
+try:
+ import inspect
+except ImportError:
+ # for alternative platforms that
+ # may not have inspect
+ inspect = None
+
+try:
+ from functools import wraps
+except ImportError:
+ # Python 2.4 compatibility
+ def wraps(original):
+ def inner(f):
+ f.__name__ = original.__name__
+ f.__doc__ = original.__doc__
+ f.__module__ = original.__module__
+ return f
+ return inner
+
+try:
+ unicode
+except NameError:
+ # Python 3
+ basestring = unicode = str
+
+try:
+ long
+except NameError:
+ # Python 3
+ long = int
+
+try:
+ BaseException
+except NameError:
+ # Python 2.4 compatibility
+ BaseException = Exception
+
+try:
+ next
+except NameError:
+ def next(obj):
+ return obj.next()
+
+
+BaseExceptions = (BaseException,)
+if 'java' in sys.platform:
+ # jython
+ import java
+ BaseExceptions = (BaseException, java.lang.Throwable)
+
+try:
+ _isidentifier = str.isidentifier
+except AttributeError:
+ # Python 2.X
+ import keyword
+ import re
+ regex = re.compile(r'^[a-z_][a-z0-9_]*$', re.I)
+ def _isidentifier(string):
+ if string in keyword.kwlist:
+ return False
+ return regex.match(string)
+
+
+inPy3k = sys.version_info[0] == 3
+
+# Needed to work around Python 3 bug where use of "super" interferes with
+# defining __class__ as a descriptor
+_super = super
+
+self = 'im_self'
+builtin = '__builtin__'
+if inPy3k:
+ self = '__self__'
+ builtin = 'builtins'
+
+FILTER_DIR = True
+
+
+def _is_instance_mock(obj):
+ # can't use isinstance on Mock objects because they override __class__
+ # The base class for all mocks is NonCallableMock
+ return issubclass(type(obj), NonCallableMock)
+
+
+def _is_exception(obj):
+ return (
+ isinstance(obj, BaseExceptions) or
+ isinstance(obj, ClassTypes) and issubclass(obj, BaseExceptions)
+ )
+
+
+class _slotted(object):
+ __slots__ = ['a']
+
+
+DescriptorTypes = (
+ type(_slotted.a),
+ property,
+)
+
+
+def _getsignature(func, skipfirst, instance=False):
+ if inspect is None:
+ raise ImportError('inspect module not available')
+
+ if isinstance(func, ClassTypes) and not instance:
+ try:
+ func = func.__init__
+ except AttributeError:
+ return
+ skipfirst = True
+ elif not isinstance(func, FunctionTypes):
+ # for classes where instance is True we end up here too
+ try:
+ func = func.__call__
+ except AttributeError:
+ return
+
+ if inPy3k:
+ try:
+ argspec = inspect.getfullargspec(func)
+ except TypeError:
+ # C function / method, possibly inherited object().__init__
+ return
+ regargs, varargs, varkw, defaults, kwonly, kwonlydef, ann = argspec
+ else:
+ try:
+ regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
+ except TypeError:
+ # C function / method, possibly inherited object().__init__
+ return
+
+ # instance methods and classmethods need to lose the self argument
+ if getattr(func, self, None) is not None:
+ regargs = regargs[1:]
+ if skipfirst:
+ # this condition and the above one are never both True - why?
+ regargs = regargs[1:]
+
+ if inPy3k:
+ signature = inspect.formatargspec(
+ regargs, varargs, varkw, defaults,
+ kwonly, kwonlydef, ann, formatvalue=lambda value: "")
+ else:
+ signature = inspect.formatargspec(
+ regargs, varargs, varkwargs, defaults,
+ formatvalue=lambda value: "")
+ return signature[1:-1], func
+
+
+def _check_signature(func, mock, skipfirst, instance=False):
+ if not _callable(func):
+ return
+
+ result = _getsignature(func, skipfirst, instance)
+ if result is None:
+ return
+ signature, func = result
+
+ # can't use self because "self" is common as an argument name
+ # unfortunately even not in the first place
+ src = "lambda _mock_self, %s: None" % signature
+ checksig = eval(src, {})
+ _copy_func_details(func, checksig)
+ type(mock)._mock_check_sig = checksig
+
+
+def _copy_func_details(func, funcopy):
+ funcopy.__name__ = func.__name__
+ funcopy.__doc__ = func.__doc__
+ #funcopy.__dict__.update(func.__dict__)
+ funcopy.__module__ = func.__module__
+ if not inPy3k:
+ funcopy.func_defaults = func.func_defaults
+ return
+ funcopy.__defaults__ = func.__defaults__
+ funcopy.__kwdefaults__ = func.__kwdefaults__
+
+
+def _callable(obj):
+ if isinstance(obj, ClassTypes):
+ return True
+ if getattr(obj, '__call__', None) is not None:
+ return True
+ return False
+
+
+def _is_list(obj):
+ # checks for list or tuples
+ # XXXX badly named!
+ return type(obj) in (list, tuple)
+
+
+def _instance_callable(obj):
+ """Given an object, return True if the object is callable.
+ For classes, return True if instances would be callable."""
+ if not isinstance(obj, ClassTypes):
+ # already an instance
+ return getattr(obj, '__call__', None) is not None
+
+ klass = obj
+ # uses __bases__ instead of __mro__ so that we work with old style classes
+ if klass.__dict__.get('__call__') is not None:
+ return True
+
+ for base in klass.__bases__:
+ if _instance_callable(base):
+ return True
+ return False
+
+
+def _set_signature(mock, original, instance=False):
+ # creates a function with signature (*args, **kwargs) that delegates to a
+ # mock. It still does signature checking by calling a lambda with the same
+ # signature as the original.
+ if not _callable(original):
+ return
+
+ skipfirst = isinstance(original, ClassTypes)
+ result = _getsignature(original, skipfirst, instance)
+ if result is None:
+ # was a C function (e.g. object().__init__ ) that can't be mocked
+ return
+
+ signature, func = result
+
+ src = "lambda %s: None" % signature
+ checksig = eval(src, {})
+ _copy_func_details(func, checksig)
+
+ name = original.__name__
+ if not _isidentifier(name):
+ name = 'funcopy'
+ context = {'_checksig_': checksig, 'mock': mock}
+ src = """def %s(*args, **kwargs):
+ _checksig_(*args, **kwargs)
+ return mock(*args, **kwargs)""" % name
+ exec (src, context)
+ funcopy = context[name]
+ _setup_func(funcopy, mock)
+ return funcopy
+
+
+def _setup_func(funcopy, mock):
+ funcopy.mock = mock
+
+ # can't use isinstance with mocks
+ if not _is_instance_mock(mock):
+ return
+
+ def assert_called_with(*args, **kwargs):
+ return mock.assert_called_with(*args, **kwargs)
+ def assert_called_once_with(*args, **kwargs):
+ return mock.assert_called_once_with(*args, **kwargs)
+ def assert_has_calls(*args, **kwargs):
+ return mock.assert_has_calls(*args, **kwargs)
+ def assert_any_call(*args, **kwargs):
+ return mock.assert_any_call(*args, **kwargs)
+ def reset_mock():
+ funcopy.method_calls = _CallList()
+ funcopy.mock_calls = _CallList()
+ mock.reset_mock()
+ ret = funcopy.return_value
+ if _is_instance_mock(ret) and not ret is mock:
+ ret.reset_mock()
+
+ funcopy.called = False
+ funcopy.call_count = 0
+ funcopy.call_args = None
+ funcopy.call_args_list = _CallList()
+ funcopy.method_calls = _CallList()
+ funcopy.mock_calls = _CallList()
+
+ funcopy.return_value = mock.return_value
+ funcopy.side_effect = mock.side_effect
+ funcopy._mock_children = mock._mock_children
+
+ funcopy.assert_called_with = assert_called_with
+ funcopy.assert_called_once_with = assert_called_once_with
+ funcopy.assert_has_calls = assert_has_calls
+ funcopy.assert_any_call = assert_any_call
+ funcopy.reset_mock = reset_mock
+
+ mock._mock_delegate = funcopy
+
+
+def _is_magic(name):
+ return '__%s__' % name[2:-2] == name
+
+
+class _SentinelObject(object):
+ "A unique, named, sentinel object."
+ def __init__(self, name):
+ self.name = name
+
+ def __repr__(self):
+ return 'sentinel.%s' % self.name
+
+
+class _Sentinel(object):
+ """Access attributes to return a named object, usable as a sentinel."""
+ def __init__(self):
+ self._sentinels = {}
+
+ def __getattr__(self, name):
+ if name == '__bases__':
+ # Without this help(mock) raises an exception
+ raise AttributeError
+ return self._sentinels.setdefault(name, _SentinelObject(name))
+
+
+sentinel = _Sentinel()
+
+DEFAULT = sentinel.DEFAULT
+_missing = sentinel.MISSING
+_deleted = sentinel.DELETED
+
+
+class OldStyleClass:
+ pass
+ClassType = type(OldStyleClass)
+
+
+def _copy(value):
+ if type(value) in (dict, list, tuple, set):
+ return type(value)(value)
+ return value
+
+
+ClassTypes = (type,)
+if not inPy3k:
+ ClassTypes = (type, ClassType)
+
+_allowed_names = set(
+ [
+ 'return_value', '_mock_return_value', 'side_effect',
+ '_mock_side_effect', '_mock_parent', '_mock_new_parent',
+ '_mock_name', '_mock_new_name'
+ ]
+)
+
+
+def _delegating_property(name):
+ _allowed_names.add(name)
+ _the_name = '_mock_' + name
+ def _get(self, name=name, _the_name=_the_name):
+ sig = self._mock_delegate
+ if sig is None:
+ return getattr(self, _the_name)
+ return getattr(sig, name)
+ def _set(self, value, name=name, _the_name=_the_name):
+ sig = self._mock_delegate
+ if sig is None:
+ self.__dict__[_the_name] = value
+ else:
+ setattr(sig, name, value)
+
+ return property(_get, _set)
+
+
+
+class _CallList(list):
+
+ def __contains__(self, value):
+ if not isinstance(value, list):
+ return list.__contains__(self, value)
+ len_value = len(value)
+ len_self = len(self)
+ if len_value > len_self:
+ return False
+
+ for i in range(0, len_self - len_value + 1):
+ sub_list = self[i:i+len_value]
+ if sub_list == value:
+ return True
+ return False
+
+ def __repr__(self):
+ return pprint.pformat(list(self))
+
+
+def _check_and_set_parent(parent, value, name, new_name):
+ if not _is_instance_mock(value):
+ return False
+ if ((value._mock_name or value._mock_new_name) or
+ (value._mock_parent is not None) or
+ (value._mock_new_parent is not None)):
+ return False
+
+ _parent = parent
+ while _parent is not None:
+ # setting a mock (value) as a child or return value of itself
+ # should not modify the mock
+ if _parent is value:
+ return False
+ _parent = _parent._mock_new_parent
+
+ if new_name:
+ value._mock_new_parent = parent
+ value._mock_new_name = new_name
+ if name:
+ value._mock_parent = parent
+ value._mock_name = name
+ return True
+
+
+
+class Base(object):
+ _mock_return_value = DEFAULT
+ _mock_side_effect = None
+ def __init__(self, *args, **kwargs):
+ pass
+
+
+
+class NonCallableMock(Base):
+ """A non-callable version of `Mock`"""
+
+ def __new__(cls, *args, **kw):
+ # every instance has its own class
+ # so we can create magic methods on the
+ # class without stomping on other mocks
+ new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})
+ instance = object.__new__(new)
+ return instance
+
+
+ def __init__(
+ self, spec=None, wraps=None, name=None, spec_set=None,
+ parent=None, _spec_state=None, _new_name='', _new_parent=None,
+ **kwargs
+ ):
+ if _new_parent is None:
+ _new_parent = parent
+
+ __dict__ = self.__dict__
+ __dict__['_mock_parent'] = parent
+ __dict__['_mock_name'] = name
+ __dict__['_mock_new_name'] = _new_name
+ __dict__['_mock_new_parent'] = _new_parent
+
+ if spec_set is not None:
+ spec = spec_set
+ spec_set = True
+
+ self._mock_add_spec(spec, spec_set)
+
+ __dict__['_mock_children'] = {}
+ __dict__['_mock_wraps'] = wraps
+ __dict__['_mock_delegate'] = None
+
+ __dict__['_mock_called'] = False
+ __dict__['_mock_call_args'] = None
+ __dict__['_mock_call_count'] = 0
+ __dict__['_mock_call_args_list'] = _CallList()
+ __dict__['_mock_mock_calls'] = _CallList()
+
+ __dict__['method_calls'] = _CallList()
+
+ if kwargs:
+ self.configure_mock(**kwargs)
+
+ _super(NonCallableMock, self).__init__(
+ spec, wraps, name, spec_set, parent,
+ _spec_state
+ )
+
+
+ def attach_mock(self, mock, attribute):
+ """
+ Attach a mock as an attribute of this one, replacing its name and
+ parent. Calls to the attached mock will be recorded in the
+ `method_calls` and `mock_calls` attributes of this one."""
+ mock._mock_parent = None
+ mock._mock_new_parent = None
+ mock._mock_name = ''
+ mock._mock_new_name = None
+
+ setattr(self, attribute, mock)
+
+
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+
+
+ def _mock_add_spec(self, spec, spec_set):
+ _spec_class = None
+
+ if spec is not None and not _is_list(spec):
+ if isinstance(spec, ClassTypes):
+ _spec_class = spec
+ else:
+ _spec_class = _get_class(spec)
+
+ spec = dir(spec)
+
+ __dict__ = self.__dict__
+ __dict__['_spec_class'] = _spec_class
+ __dict__['_spec_set'] = spec_set
+ __dict__['_mock_methods'] = spec
+
+
+ def __get_return_value(self):
+ ret = self._mock_return_value
+ if self._mock_delegate is not None:
+ ret = self._mock_delegate.return_value
+
+ if ret is DEFAULT:
+ ret = self._get_child_mock(
+ _new_parent=self, _new_name='()'
+ )
+ self.return_value = ret
+ return ret
+
+
+ def __set_return_value(self, value):
+ if self._mock_delegate is not None:
+ self._mock_delegate.return_value = value
+ else:
+ self._mock_return_value = value
+ _check_and_set_parent(self, value, None, '()')
+
+ __return_value_doc = "The value to be returned when the mock is called."
+ return_value = property(__get_return_value, __set_return_value,
+ __return_value_doc)
+
+
+ @property
+ def __class__(self):
+ if self._spec_class is None:
+ return type(self)
+ return self._spec_class
+
+ called = _delegating_property('called')
+ call_count = _delegating_property('call_count')
+ call_args = _delegating_property('call_args')
+ call_args_list = _delegating_property('call_args_list')
+ mock_calls = _delegating_property('mock_calls')
+
+
+ def __get_side_effect(self):
+ sig = self._mock_delegate
+ if sig is None:
+ return self._mock_side_effect
+ return sig.side_effect
+
+ def __set_side_effect(self, value):
+ value = _try_iter(value)
+ sig = self._mock_delegate
+ if sig is None:
+ self._mock_side_effect = value
+ else:
+ sig.side_effect = value
+
+ side_effect = property(__get_side_effect, __set_side_effect)
+
+
+ def reset_mock(self):
+ "Restore the mock object to its initial state."
+ self.called = False
+ self.call_args = None
+ self.call_count = 0
+ self.mock_calls = _CallList()
+ self.call_args_list = _CallList()
+ self.method_calls = _CallList()
+
+ for child in self._mock_children.values():
+ if isinstance(child, _SpecState):
+ continue
+ child.reset_mock()
+
+ ret = self._mock_return_value
+ if _is_instance_mock(ret) and ret is not self:
+ ret.reset_mock()
+
+
+ def configure_mock(self, **kwargs):
+ """Set attributes on the mock through keyword arguments.
+
+ Attributes plus return values and side effects can be set on child
+ mocks using standard dot notation and unpacking a dictionary in the
+ method call:
+
+ >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+ >>> mock.configure_mock(**attrs)"""
+ for arg, val in sorted(kwargs.items(),
+ # we sort on the number of dots so that
+ # attributes are set before we set attributes on
+ # attributes
+ key=lambda entry: entry[0].count('.')):
+ args = arg.split('.')
+ final = args.pop()
+ obj = self
+ for entry in args:
+ obj = getattr(obj, entry)
+ setattr(obj, final, val)
+
+
+ def __getattr__(self, name):
+ if name == '_mock_methods':
+ raise AttributeError(name)
+ elif self._mock_methods is not None:
+ if name not in self._mock_methods or name in _all_magics:
+ raise AttributeError("Mock object has no attribute %r" % name)
+ elif _is_magic(name):
+ raise AttributeError(name)
+
+ result = self._mock_children.get(name)
+ if result is _deleted:
+ raise AttributeError(name)
+ elif result is None:
+ wraps = None
+ if self._mock_wraps is not None:
+ # XXXX should we get the attribute without triggering code
+ # execution?
+ wraps = getattr(self._mock_wraps, name)
+
+ result = self._get_child_mock(
+ parent=self, name=name, wraps=wraps, _new_name=name,
+ _new_parent=self
+ )
+ self._mock_children[name] = result
+
+ elif isinstance(result, _SpecState):
+ result = create_autospec(
+ result.spec, result.spec_set, result.instance,
+ result.parent, result.name
+ )
+ self._mock_children[name] = result
+
+ return result
+
+
+ def __repr__(self):
+ _name_list = [self._mock_new_name]
+ _parent = self._mock_new_parent
+ last = self
+
+ dot = '.'
+ if _name_list == ['()']:
+ dot = ''
+ seen = set()
+ while _parent is not None:
+ last = _parent
+
+ _name_list.append(_parent._mock_new_name + dot)
+ dot = '.'
+ if _parent._mock_new_name == '()':
+ dot = ''
+
+ _parent = _parent._mock_new_parent
+
+ # use ids here so as not to call __hash__ on the mocks
+ if id(_parent) in seen:
+ break
+ seen.add(id(_parent))
+
+ _name_list = list(reversed(_name_list))
+ _first = last._mock_name or 'mock'
+ if len(_name_list) > 1:
+ if _name_list[1] not in ('()', '().'):
+ _first += '.'
+ _name_list[0] = _first
+ name = ''.join(_name_list)
+
+ name_string = ''
+ if name not in ('mock', 'mock.'):
+ name_string = ' name=%r' % name
+
+ spec_string = ''
+ if self._spec_class is not None:
+ spec_string = ' spec=%r'
+ if self._spec_set:
+ spec_string = ' spec_set=%r'
+ spec_string = spec_string % self._spec_class.__name__
+ return "<%s%s%s id='%s'>" % (
+ type(self).__name__,
+ name_string,
+ spec_string,
+ id(self)
+ )
+
+
+ def __dir__(self):
+ """Filter the output of `dir(mock)` to only useful members.
+ XXXX
+ """
+ extras = self._mock_methods or []
+ from_type = dir(type(self))
+ from_dict = list(self.__dict__)
+
+ if FILTER_DIR:
+ from_type = [e for e in from_type if not e.startswith('_')]
+ from_dict = [e for e in from_dict if not e.startswith('_') or
+ _is_magic(e)]
+ return sorted(set(extras + from_type + from_dict +
+ list(self._mock_children)))
+
+
+ def __setattr__(self, name, value):
+ if name in _allowed_names:
+ # property setters go through here
+ return object.__setattr__(self, name, value)
+ elif (self._spec_set and self._mock_methods is not None and
+ name not in self._mock_methods and
+ name not in self.__dict__):
+ raise AttributeError("Mock object has no attribute '%s'" % name)
+ elif name in _unsupported_magics:
+ msg = 'Attempting to set unsupported magic method %r.' % name
+ raise AttributeError(msg)
+ elif name in _all_magics:
+ if self._mock_methods is not None and name not in self._mock_methods:
+ raise AttributeError("Mock object has no attribute '%s'" % name)
+
+ if not _is_instance_mock(value):
+ setattr(type(self), name, _get_method(name, value))
+ original = value
+ value = lambda *args, **kw: original(self, *args, **kw)
+ else:
+ # only set _new_name and not name so that mock_calls is tracked
+ # but not method calls
+ _check_and_set_parent(self, value, None, name)
+ setattr(type(self), name, value)
+ self._mock_children[name] = value
+ elif name == '__class__':
+ self._spec_class = value
+ return
+ else:
+ if _check_and_set_parent(self, value, name, name):
+ self._mock_children[name] = value
+ return object.__setattr__(self, name, value)
+
+
+ def __delattr__(self, name):
+ if name in _all_magics and name in type(self).__dict__:
+ delattr(type(self), name)
+ if name not in self.__dict__:
+ # for magic methods that are still MagicProxy objects and
+ # not set on the instance itself
+ return
+
+ if name in self.__dict__:
+ object.__delattr__(self, name)
+
+ obj = self._mock_children.get(name, _missing)
+ if obj is _deleted:
+ raise AttributeError(name)
+ if obj is not _missing:
+ del self._mock_children[name]
+ self._mock_children[name] = _deleted
+
+
+
+ def _format_mock_call_signature(self, args, kwargs):
+ name = self._mock_name or 'mock'
+ return _format_call_signature(name, args, kwargs)
+
+
+ def _format_mock_failure_message(self, args, kwargs):
+ message = 'Expected call: %s\nActual call: %s'
+ expected_string = self._format_mock_call_signature(args, kwargs)
+ call_args = self.call_args
+ if len(call_args) == 3:
+ call_args = call_args[1:]
+ actual_string = self._format_mock_call_signature(*call_args)
+ return message % (expected_string, actual_string)
+
+
+ def assert_called_with(_mock_self, *args, **kwargs):
+ """assert that the mock was called with the specified arguments.
+
+ Raises an AssertionError if the args and keyword args passed in are
+ different to the last call to the mock."""
+ self = _mock_self
+ if self.call_args is None:
+ expected = self._format_mock_call_signature(args, kwargs)
+ raise AssertionError('Expected call: %s\nNot called' % (expected,))
+
+ if self.call_args != (args, kwargs):
+ msg = self._format_mock_failure_message(args, kwargs)
+ raise AssertionError(msg)
+
+
+ def assert_called_once_with(_mock_self, *args, **kwargs):
+ """assert that the mock was called exactly once and with the specified
+ arguments."""
+ self = _mock_self
+ if not self.call_count == 1:
+ msg = ("Expected to be called once. Called %s times." %
+ self.call_count)
+ raise AssertionError(msg)
+ return self.assert_called_with(*args, **kwargs)
+
+
+ def assert_has_calls(self, calls, any_order=False):
+ """assert the mock has been called with the specified calls.
+ The `mock_calls` list is checked for the calls.
+
+ If `any_order` is False (the default) then the calls must be
+ sequential. There can be extra calls before or after the
+ specified calls.
+
+ If `any_order` is True then the calls can be in any order, but
+ they must all appear in `mock_calls`."""
+ if not any_order:
+ if calls not in self.mock_calls:
+ raise AssertionError(
+ 'Calls not found.\nExpected: %r\n'
+ 'Actual: %r' % (calls, self.mock_calls)
+ )
+ return
+
+ all_calls = list(self.mock_calls)
+
+ not_found = []
+ for kall in calls:
+ try:
+ all_calls.remove(kall)
+ except ValueError:
+ not_found.append(kall)
+ if not_found:
+ raise AssertionError(
+ '%r not all found in call list' % (tuple(not_found),)
+ )
+
+
+ def assert_any_call(self, *args, **kwargs):
+ """assert the mock has been called with the specified arguments.
+
+ The assert passes if the mock has *ever* been called, unlike
+ `assert_called_with` and `assert_called_once_with` that only pass if
+ the call is the most recent one."""
+ kall = call(*args, **kwargs)
+ if kall not in self.call_args_list:
+ expected_string = self._format_mock_call_signature(args, kwargs)
+ raise AssertionError(
+ '%s call not found' % expected_string
+ )
+
+
+ def _get_child_mock(self, **kw):
+ """Create the child mocks for attributes and return value.
+ By default child mocks will be the same type as the parent.
+ Subclasses of Mock may want to override this to customize the way
+ child mocks are made.
+
+ For non-callable mocks the callable variant will be used (rather than
+ any custom subclass)."""
+ _type = type(self)
+ if not issubclass(_type, CallableMixin):
+ if issubclass(_type, NonCallableMagicMock):
+ klass = MagicMock
+ elif issubclass(_type, NonCallableMock) :
+ klass = Mock
+ else:
+ klass = _type.__mro__[1]
+ return klass(**kw)
+
+
+
+def _try_iter(obj):
+ if obj is None:
+ return obj
+ if _is_exception(obj):
+ return obj
+ if _callable(obj):
+ return obj
+ try:
+ return iter(obj)
+ except TypeError:
+ # XXXX backwards compatibility
+ # but this will blow up on first call - so maybe we should fail early?
+ return obj
+
+
+
+class CallableMixin(Base):
+
+ def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
+ wraps=None, name=None, spec_set=None, parent=None,
+ _spec_state=None, _new_name='', _new_parent=None, **kwargs):
+ self.__dict__['_mock_return_value'] = return_value
+
+ _super(CallableMixin, self).__init__(
+ spec, wraps, name, spec_set, parent,
+ _spec_state, _new_name, _new_parent, **kwargs
+ )
+
+ self.side_effect = side_effect
+
+
+ def _mock_check_sig(self, *args, **kwargs):
+ # stub method that can be replaced with one with a specific signature
+ pass
+
+
+ def __call__(_mock_self, *args, **kwargs):
+ # can't use self in-case a function / method we are mocking uses self
+ # in the signature
+ _mock_self._mock_check_sig(*args, **kwargs)
+ return _mock_self._mock_call(*args, **kwargs)
+
+
+ def _mock_call(_mock_self, *args, **kwargs):
+ self = _mock_self
+ self.called = True
+ self.call_count += 1
+ self.call_args = _Call((args, kwargs), two=True)
+ self.call_args_list.append(_Call((args, kwargs), two=True))
+
+ _new_name = self._mock_new_name
+ _new_parent = self._mock_new_parent
+ self.mock_calls.append(_Call(('', args, kwargs)))
+
+ seen = set()
+ skip_next_dot = _new_name == '()'
+ do_method_calls = self._mock_parent is not None
+ name = self._mock_name
+ while _new_parent is not None:
+ this_mock_call = _Call((_new_name, args, kwargs))
+ if _new_parent._mock_new_name:
+ dot = '.'
+ if skip_next_dot:
+ dot = ''
+
+ skip_next_dot = False
+ if _new_parent._mock_new_name == '()':
+ skip_next_dot = True
+
+ _new_name = _new_parent._mock_new_name + dot + _new_name
+
+ if do_method_calls:
+ if _new_name == name:
+ this_method_call = this_mock_call
+ else:
+ this_method_call = _Call((name, args, kwargs))
+ _new_parent.method_calls.append(this_method_call)
+
+ do_method_calls = _new_parent._mock_parent is not None
+ if do_method_calls:
+ name = _new_parent._mock_name + '.' + name
+
+ _new_parent.mock_calls.append(this_mock_call)
+ _new_parent = _new_parent._mock_new_parent
+
+ # use ids here so as not to call __hash__ on the mocks
+ _new_parent_id = id(_new_parent)
+ if _new_parent_id in seen:
+ break
+ seen.add(_new_parent_id)
+
+ ret_val = DEFAULT
+ effect = self.side_effect
+ if effect is not None:
+ if _is_exception(effect):
+ raise effect
+
+ if not _callable(effect):
+ result = next(effect)
+ if _is_exception(result):
+ raise result
+ return result
+
+ ret_val = effect(*args, **kwargs)
+ if ret_val is DEFAULT:
+ ret_val = self.return_value
+
+ if (self._mock_wraps is not None and
+ self._mock_return_value is DEFAULT):
+ return self._mock_wraps(*args, **kwargs)
+ if ret_val is DEFAULT:
+ ret_val = self.return_value
+ return ret_val
+
+
+
+class Mock(CallableMixin, NonCallableMock):
+ """
+ Create a new `Mock` object. `Mock` takes several optional arguments
+ that specify the behaviour of the Mock object:
+
+ * `spec`: This can be either a list of strings or an existing object (a
+ class or instance) that acts as the specification for the mock object. If
+ you pass in an object then a list of strings is formed by calling dir on
+ the object (excluding unsupported magic attributes and methods). Accessing
+ any attribute not in this list will raise an `AttributeError`.
+
+ If `spec` is an object (rather than a list of strings) then
+ `mock.__class__` returns the class of the spec object. This allows mocks
+ to pass `isinstance` tests.
+
+ * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
+ or get an attribute on the mock that isn't on the object passed as
+ `spec_set` will raise an `AttributeError`.
+
+ * `side_effect`: A function to be called whenever the Mock is called. See
+ the `side_effect` attribute. Useful for raising exceptions or
+ dynamically changing return values. The function is called with the same
+ arguments as the mock, and unless it returns `DEFAULT`, the return
+ value of this function is used as the return value.
+
+ Alternatively `side_effect` can be an exception class or instance. In
+ this case the exception will be raised when the mock is called.
+
+ If `side_effect` is an iterable then each call to the mock will return
+ the next value from the iterable. If any of the members of the iterable
+ are exceptions they will be raised instead of returned.
+
+ * `return_value`: The value returned when the mock is called. By default
+ this is a new Mock (created on first access). See the
+ `return_value` attribute.
+
+ * `wraps`: Item for the mock object to wrap. If `wraps` is not None then
+ calling the Mock will pass the call through to the wrapped object
+ (returning the real result). Attribute access on the mock will return a
+ Mock object that wraps the corresponding attribute of the wrapped object
+ (so attempting to access an attribute that doesn't exist will raise an
+ `AttributeError`).
+
+ If the mock has an explicit `return_value` set then calls are not passed
+ to the wrapped object and the `return_value` is returned instead.
+
+ * `name`: If the mock has a name then it will be used in the repr of the
+ mock. This can be useful for debugging. The name is propagated to child
+ mocks.
+
+ Mocks can also be called with arbitrary keyword arguments. These will be
+ used to set attributes on the mock after it is created.
+ """
+
+
+
+def _dot_lookup(thing, comp, import_path):
+ try:
+ return getattr(thing, comp)
+ except AttributeError:
+ __import__(import_path)
+ return getattr(thing, comp)
+
+
+def _importer(target):
+ components = target.split('.')
+ import_path = components.pop(0)
+ thing = __import__(import_path)
+
+ for comp in components:
+ import_path += ".%s" % comp
+ thing = _dot_lookup(thing, comp, import_path)
+ return thing
+
+
+def _is_started(patcher):
+ # XXXX horrible
+ return hasattr(patcher, 'is_local')
+
+
+class _patch(object):
+
+ attribute_name = None
+ _active_patches = set()
+
+ def __init__(
+ self, getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ ):
+ if new_callable is not None:
+ if new is not DEFAULT:
+ raise ValueError(
+ "Cannot use 'new' and 'new_callable' together"
+ )
+ if autospec is not None:
+ raise ValueError(
+ "Cannot use 'autospec' and 'new_callable' together"
+ )
+
+ self.getter = getter
+ self.attribute = attribute
+ self.new = new
+ self.new_callable = new_callable
+ self.spec = spec
+ self.create = create
+ self.has_local = False
+ self.spec_set = spec_set
+ self.autospec = autospec
+ self.kwargs = kwargs
+ self.additional_patchers = []
+
+
+ def copy(self):
+ patcher = _patch(
+ self.getter, self.attribute, self.new, self.spec,
+ self.create, self.spec_set,
+ self.autospec, self.new_callable, self.kwargs
+ )
+ patcher.attribute_name = self.attribute_name
+ patcher.additional_patchers = [
+ p.copy() for p in self.additional_patchers
+ ]
+ return patcher
+
+
+ def __call__(self, func):
+ if isinstance(func, ClassTypes):
+ return self.decorate_class(func)
+ return self.decorate_callable(func)
+
+
+ def decorate_class(self, klass):
+ for attr in dir(klass):
+ if not attr.startswith(patch.TEST_PREFIX):
+ continue
+
+ attr_value = getattr(klass, attr)
+ if not hasattr(attr_value, "__call__"):
+ continue
+
+ patcher = self.copy()
+ setattr(klass, attr, patcher(attr_value))
+ return klass
+
+
+ def decorate_callable(self, func):
+ if hasattr(func, 'patchings'):
+ func.patchings.append(self)
+ return func
+
+ @wraps(func)
+ def patched(*args, **keywargs):
+ # don't use a with here (backwards compatability with Python 2.4)
+ extra_args = []
+ entered_patchers = []
+
+ # can't use try...except...finally because of Python 2.4
+ # compatibility
+ exc_info = tuple()
+ try:
+ try:
+ for patching in patched.patchings:
+ arg = patching.__enter__()
+ entered_patchers.append(patching)
+ if patching.attribute_name is not None:
+ keywargs.update(arg)
+ elif patching.new is DEFAULT:
+ extra_args.append(arg)
+
+ args += tuple(extra_args)
+ return func(*args, **keywargs)
+ except:
+ if (patching not in entered_patchers and
+ _is_started(patching)):
+ # the patcher may have been started, but an exception
+ # raised whilst entering one of its additional_patchers
+ entered_patchers.append(patching)
+ # Pass the exception to __exit__
+ exc_info = sys.exc_info()
+ # re-raise the exception
+ raise
+ finally:
+ for patching in reversed(entered_patchers):
+ patching.__exit__(*exc_info)
+
+ patched.patchings = [self]
+ if hasattr(func, 'func_code'):
+ # not in Python 3
+ patched.compat_co_firstlineno = getattr(
+ func, "compat_co_firstlineno",
+ func.func_code.co_firstlineno
+ )
+ return patched
+
+
+ def get_original(self):
+ target = self.getter()
+ name = self.attribute
+
+ original = DEFAULT
+ local = False
+
+ try:
+ original = target.__dict__[name]
+ except (AttributeError, KeyError):
+ original = getattr(target, name, DEFAULT)
+ else:
+ local = True
+
+ if not self.create and original is DEFAULT:
+ raise AttributeError(
+ "%s does not have the attribute %r" % (target, name)
+ )
+ return original, local
+
+
+ def __enter__(self):
+ """Perform the patch."""
+ new, spec, spec_set = self.new, self.spec, self.spec_set
+ autospec, kwargs = self.autospec, self.kwargs
+ new_callable = self.new_callable
+ self.target = self.getter()
+
+ # normalise False to None
+ if spec is False:
+ spec = None
+ if spec_set is False:
+ spec_set = None
+ if autospec is False:
+ autospec = None
+
+ if spec is not None and autospec is not None:
+ raise TypeError("Can't specify spec and autospec")
+ if ((spec is not None or autospec is not None) and
+ spec_set not in (True, None)):
+ raise TypeError("Can't provide explicit spec_set *and* spec or autospec")
+
+ original, local = self.get_original()
+
+ if new is DEFAULT and autospec is None:
+ inherit = False
+ if spec is True:
+ # set spec to the object we are replacing
+ spec = original
+ if spec_set is True:
+ spec_set = original
+ spec = None
+ elif spec is not None:
+ if spec_set is True:
+ spec_set = spec
+ spec = None
+ elif spec_set is True:
+ spec_set = original
+
+ if spec is not None or spec_set is not None:
+ if original is DEFAULT:
+ raise TypeError("Can't use 'spec' with create=True")
+ if isinstance(original, ClassTypes):
+ # If we're patching out a class and there is a spec
+ inherit = True
+
+ Klass = MagicMock
+ _kwargs = {}
+ if new_callable is not None:
+ Klass = new_callable
+ elif spec is not None or spec_set is not None:
+ this_spec = spec
+ if spec_set is not None:
+ this_spec = spec_set
+ if _is_list(this_spec):
+ not_callable = '__call__' not in this_spec
+ else:
+ not_callable = not _callable(this_spec)
+ if not_callable:
+ Klass = NonCallableMagicMock
+
+ if spec is not None:
+ _kwargs['spec'] = spec
+ if spec_set is not None:
+ _kwargs['spec_set'] = spec_set
+
+ # add a name to mocks
+ if (isinstance(Klass, type) and
+ issubclass(Klass, NonCallableMock) and self.attribute):
+ _kwargs['name'] = self.attribute
+
+ _kwargs.update(kwargs)
+ new = Klass(**_kwargs)
+
+ if inherit and _is_instance_mock(new):
+ # we can only tell if the instance should be callable if the
+ # spec is not a list
+ this_spec = spec
+ if spec_set is not None:
+ this_spec = spec_set
+ if (not _is_list(this_spec) and not
+ _instance_callable(this_spec)):
+ Klass = NonCallableMagicMock
+
+ _kwargs.pop('name')
+ new.return_value = Klass(_new_parent=new, _new_name='()',
+ **_kwargs)
+ elif autospec is not None:
+ # spec is ignored, new *must* be default, spec_set is treated
+ # as a boolean. Should we check spec is not None and that spec_set
+ # is a bool?
+ if new is not DEFAULT:
+ raise TypeError(
+ "autospec creates the mock for you. Can't specify "
+ "autospec and new."
+ )
+ if original is DEFAULT:
+ raise TypeError("Can't use 'autospec' with create=True")
+ spec_set = bool(spec_set)
+ if autospec is True:
+ autospec = original
+
+ new = create_autospec(autospec, spec_set=spec_set,
+ _name=self.attribute, **kwargs)
+ elif kwargs:
+ # can't set keyword args when we aren't creating the mock
+ # XXXX If new is a Mock we could call new.configure_mock(**kwargs)
+ raise TypeError("Can't pass kwargs to a mock we aren't creating")
+
+ new_attr = new
+
+ self.temp_original = original
+ self.is_local = local
+ setattr(self.target, self.attribute, new_attr)
+ if self.attribute_name is not None:
+ extra_args = {}
+ if self.new is DEFAULT:
+ extra_args[self.attribute_name] = new
+ for patching in self.additional_patchers:
+ arg = patching.__enter__()
+ if patching.new is DEFAULT:
+ extra_args.update(arg)
+ return extra_args
+
+ return new
+
+
+ def __exit__(self, *exc_info):
+ """Undo the patch."""
+ if not _is_started(self):
+ raise RuntimeError('stop called on unstarted patcher')
+
+ if self.is_local and self.temp_original is not DEFAULT:
+ setattr(self.target, self.attribute, self.temp_original)
+ else:
+ delattr(self.target, self.attribute)
+ if not self.create and not hasattr(self.target, self.attribute):
+ # needed for proxy objects like django settings
+ setattr(self.target, self.attribute, self.temp_original)
+
+ del self.temp_original
+ del self.is_local
+ del self.target
+ for patcher in reversed(self.additional_patchers):
+ if _is_started(patcher):
+ patcher.__exit__(*exc_info)
+
+
+ def start(self):
+ """Activate a patch, returning any created mock."""
+ result = self.__enter__()
+ self._active_patches.add(self)
+ return result
+
+
+ def stop(self):
+ """Stop an active patch."""
+ self._active_patches.discard(self)
+ return self.__exit__()
+
+
+
+def _get_target(target):
+ try:
+ target, attribute = target.rsplit('.', 1)
+ except (TypeError, ValueError):
+ raise TypeError("Need a valid target to patch. You supplied: %r" %
+ (target,))
+ getter = lambda: _importer(target)
+ return getter, attribute
+
+
+def _patch_object(
+ target, attribute, new=DEFAULT, spec=None,
+ create=False, spec_set=None, autospec=None,
+ new_callable=None, **kwargs
+ ):
+ """
+ patch.object(target, attribute, new=DEFAULT, spec=None, create=False,
+ spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+ patch the named member (`attribute`) on an object (`target`) with a mock
+ object.
+
+ `patch.object` can be used as a decorator, class decorator or a context
+ manager. Arguments `new`, `spec`, `create`, `spec_set`,
+ `autospec` and `new_callable` have the same meaning as for `patch`. Like
+ `patch`, `patch.object` takes arbitrary keyword arguments for configuring
+ the mock object it creates.
+
+ When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+ """
+ getter = lambda: target
+ return _patch(
+ getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ )
+
+
+def _patch_multiple(target, spec=None, create=False, spec_set=None,
+ autospec=None, new_callable=None, **kwargs):
+ """Perform multiple patches in a single call. It takes the object to be
+ patched (either as an object or a string to fetch the object by importing)
+ and keyword arguments for the patches::
+
+ with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
+ ...
+
+ Use `DEFAULT` as the value if you want `patch.multiple` to create
+ mocks for you. In this case the created mocks are passed into a decorated
+ function by keyword, and a dictionary is returned when `patch.multiple` is
+ used as a context manager.
+
+ `patch.multiple` can be used as a decorator, class decorator or a context
+ manager. The arguments `spec`, `spec_set`, `create`,
+ `autospec` and `new_callable` have the same meaning as for `patch`. These
+ arguments will be applied to *all* patches done by `patch.multiple`.
+
+ When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
+ for choosing which methods to wrap.
+ """
+ if type(target) in (unicode, str):
+ getter = lambda: _importer(target)
+ else:
+ getter = lambda: target
+
+ if not kwargs:
+ raise ValueError(
+ 'Must supply at least one keyword argument with patch.multiple'
+ )
+ # need to wrap in a list for python 3, where items is a view
+ items = list(kwargs.items())
+ attribute, new = items[0]
+ patcher = _patch(
+ getter, attribute, new, spec, create, spec_set,
+ autospec, new_callable, {}
+ )
+ patcher.attribute_name = attribute
+ for attribute, new in items[1:]:
+ this_patcher = _patch(
+ getter, attribute, new, spec, create, spec_set,
+ autospec, new_callable, {}
+ )
+ this_patcher.attribute_name = attribute
+ patcher.additional_patchers.append(this_patcher)
+ return patcher
+
+
+def patch(
+ target, new=DEFAULT, spec=None, create=False,
+ spec_set=None, autospec=None, new_callable=None, **kwargs
+ ):
+ """
+ `patch` acts as a function decorator, class decorator or a context
+ manager. Inside the body of the function or with statement, the `target`
+ is patched with a `new` object. When the function/with statement exits
+ the patch is undone.
+
+ If `new` is omitted, then the target is replaced with a
+ `MagicMock`. If `patch` is used as a decorator and `new` is
+ omitted, the created mock is passed in as an extra argument to the
+ decorated function. If `patch` is used as a context manager the created
+ mock is returned by the context manager.
+
+ `target` should be a string in the form `'package.module.ClassName'`. The
+ `target` is imported and the specified object replaced with the `new`
+ object, so the `target` must be importable from the environment you are
+ calling `patch` from. The target is imported when the decorated function
+ is executed, not at decoration time.
+
+ The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
+ if patch is creating one for you.
+
+ In addition you can pass `spec=True` or `spec_set=True`, which causes
+ patch to pass in the object being mocked as the spec/spec_set object.
+
+ `new_callable` allows you to specify a different class, or callable object,
+ that will be called to create the `new` object. By default `MagicMock` is
+ used.
+
+ A more powerful form of `spec` is `autospec`. If you set `autospec=True`
+ then the mock with be created with a spec from the object being replaced.
+ All attributes of the mock will also have the spec of the corresponding
+ attribute of the object being replaced. Methods and functions being
+ mocked will have their arguments checked and will raise a `TypeError` if
+ they are called with the wrong signature. For mocks replacing a class,
+ their return value (the 'instance') will have the same spec as the class.
+
+ Instead of `autospec=True` you can pass `autospec=some_object` to use an
+ arbitrary object as the spec instead of the one being replaced.
+
+ By default `patch` will fail to replace attributes that don't exist. If
+ you pass in `create=True`, and the attribute doesn't exist, patch will
+ create the attribute for you when the patched function is called, and
+ delete it again afterwards. This is useful for writing tests against
+ attributes that your production code creates at runtime. It is off by by
+ default because it can be dangerous. With it switched on you can write
+ passing tests against APIs that don't actually exist!
+
+ Patch can be used as a `TestCase` class decorator. It works by
+ decorating each test method in the class. This reduces the boilerplate
+ code when your test methods share a common patchings set. `patch` finds
+ tests by looking for method names that start with `patch.TEST_PREFIX`.
+ By default this is `test`, which matches the way `unittest` finds tests.
+ You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
+
+ Patch can be used as a context manager, with the with statement. Here the
+ patching applies to the indented block after the with statement. If you
+ use "as" then the patched object will be bound to the name after the
+ "as"; very useful if `patch` is creating a mock object for you.
+
+ `patch` takes arbitrary keyword arguments. These will be passed to
+ the `Mock` (or `new_callable`) on construction.
+
+ `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
+ available for alternate use-cases.
+ """
+ getter, attribute = _get_target(target)
+ return _patch(
+ getter, attribute, new, spec, create,
+ spec_set, autospec, new_callable, kwargs
+ )
+
+
+class _patch_dict(object):
+ """
+ Patch a dictionary, or dictionary like object, and restore the dictionary
+ to its original state after the test.
+
+ `in_dict` can be a dictionary or a mapping like container. If it is a
+ mapping then it must at least support getting, setting and deleting items
+ plus iterating over keys.
+
+ `in_dict` can also be a string specifying the name of the dictionary, which
+ will then be fetched by importing it.
+
+ `values` can be a dictionary of values to set in the dictionary. `values`
+ can also be an iterable of `(key, value)` pairs.
+
+ If `clear` is True then the dictionary will be cleared before the new
+ values are set.
+
+ `patch.dict` can also be called with arbitrary keyword arguments to set
+ values in the dictionary::
+
+ with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()):
+ ...
+
+ `patch.dict` can be used as a context manager, decorator or class
+ decorator. When used as a class decorator `patch.dict` honours
+ `patch.TEST_PREFIX` for choosing which methods to wrap.
+ """
+
+ def __init__(self, in_dict, values=(), clear=False, **kwargs):
+ if isinstance(in_dict, basestring):
+ in_dict = _importer(in_dict)
+ self.in_dict = in_dict
+ # support any argument supported by dict(...) constructor
+ self.values = dict(values)
+ self.values.update(kwargs)
+ self.clear = clear
+ self._original = None
+
+
+ def __call__(self, f):
+ if isinstance(f, ClassTypes):
+ return self.decorate_class(f)
+ @wraps(f)
+ def _inner(*args, **kw):
+ self._patch_dict()
+ try:
+ return f(*args, **kw)
+ finally:
+ self._unpatch_dict()
+
+ return _inner
+
+
+ def decorate_class(self, klass):
+ for attr in dir(klass):
+ attr_value = getattr(klass, attr)
+ if (attr.startswith(patch.TEST_PREFIX) and
+ hasattr(attr_value, "__call__")):
+ decorator = _patch_dict(self.in_dict, self.values, self.clear)
+ decorated = decorator(attr_value)
+ setattr(klass, attr, decorated)
+ return klass
+
+
+ def __enter__(self):
+ """Patch the dict."""
+ self._patch_dict()
+
+
+ def _patch_dict(self):
+ values = self.values
+ in_dict = self.in_dict
+ clear = self.clear
+
+ try:
+ original = in_dict.copy()
+ except AttributeError:
+ # dict like object with no copy method
+ # must support iteration over keys
+ original = {}
+ for key in in_dict:
+ original[key] = in_dict[key]
+ self._original = original
+
+ if clear:
+ _clear_dict(in_dict)
+
+ try:
+ in_dict.update(values)
+ except AttributeError:
+ # dict like object with no update method
+ for key in values:
+ in_dict[key] = values[key]
+
+
+ def _unpatch_dict(self):
+ in_dict = self.in_dict
+ original = self._original
+
+ _clear_dict(in_dict)
+
+ try:
+ in_dict.update(original)
+ except AttributeError:
+ for key in original:
+ in_dict[key] = original[key]
+
+
+ def __exit__(self, *args):
+ """Unpatch the dict."""
+ self._unpatch_dict()
+ return False
+
+ start = __enter__
+ stop = __exit__
+
+
+def _clear_dict(in_dict):
+ try:
+ in_dict.clear()
+ except AttributeError:
+ keys = list(in_dict)
+ for key in keys:
+ del in_dict[key]
+
+
+def _patch_stopall():
+ """Stop all active patches."""
+ for patch in list(_patch._active_patches):
+ patch.stop()
+
+
+patch.object = _patch_object
+patch.dict = _patch_dict
+patch.multiple = _patch_multiple
+patch.stopall = _patch_stopall
+patch.TEST_PREFIX = 'test'
+
+magic_methods = (
+ "lt le gt ge eq ne "
+ "getitem setitem delitem "
+ "len contains iter "
+ "hash str sizeof "
+ "enter exit "
+ "divmod neg pos abs invert "
+ "complex int float index "
+ "trunc floor ceil "
+)
+
+numerics = "add sub mul div floordiv mod lshift rshift and xor or pow "
+inplace = ' '.join('i%s' % n for n in numerics.split())
+right = ' '.join('r%s' % n for n in numerics.split())
+extra = ''
+if inPy3k:
+ extra = 'bool next '
+else:
+ extra = 'unicode long nonzero oct hex truediv rtruediv '
+
+# not including __prepare__, __instancecheck__, __subclasscheck__
+# (as they are metaclass methods)
+# __del__ is not supported at all as it causes problems if it exists
+
+_non_defaults = set('__%s__' % method for method in [
+ 'cmp', 'getslice', 'setslice', 'coerce', 'subclasses',
+ 'format', 'get', 'set', 'delete', 'reversed',
+ 'missing', 'reduce', 'reduce_ex', 'getinitargs',
+ 'getnewargs', 'getstate', 'setstate', 'getformat',
+ 'setformat', 'repr', 'dir'
+])
+
+
+def _get_method(name, func):
+ "Turns a callable object (like a mock) into a real function"
+ def method(self, *args, **kw):
+ return func(self, *args, **kw)
+ method.__name__ = name
+ return method
+
+
+_magics = set(
+ '__%s__' % method for method in
+ ' '.join([magic_methods, numerics, inplace, right, extra]).split()
+)
+
+_all_magics = _magics | _non_defaults
+
+_unsupported_magics = set([
+ '__getattr__', '__setattr__',
+ '__init__', '__new__', '__prepare__'
+ '__instancecheck__', '__subclasscheck__',
+ '__del__'
+])
+
+_calculate_return_value = {
+ '__hash__': lambda self: object.__hash__(self),
+ '__str__': lambda self: object.__str__(self),
+ '__sizeof__': lambda self: object.__sizeof__(self),
+ '__unicode__': lambda self: unicode(object.__str__(self)),
+}
+
+_return_values = {
+ '__lt__': NotImplemented,
+ '__gt__': NotImplemented,
+ '__le__': NotImplemented,
+ '__ge__': NotImplemented,
+ '__int__': 1,
+ '__contains__': False,
+ '__len__': 0,
+ '__exit__': False,
+ '__complex__': 1j,
+ '__float__': 1.0,
+ '__bool__': True,
+ '__nonzero__': True,
+ '__oct__': '1',
+ '__hex__': '0x1',
+ '__long__': long(1),
+ '__index__': 1,
+}
+
+
+def _get_eq(self):
+ def __eq__(other):
+ ret_val = self.__eq__._mock_return_value
+ if ret_val is not DEFAULT:
+ return ret_val
+ return self is other
+ return __eq__
+
+def _get_ne(self):
+ def __ne__(other):
+ if self.__ne__._mock_return_value is not DEFAULT:
+ return DEFAULT
+ return self is not other
+ return __ne__
+
+def _get_iter(self):
+ def __iter__():
+ ret_val = self.__iter__._mock_return_value
+ if ret_val is DEFAULT:
+ return iter([])
+ # if ret_val was already an iterator, then calling iter on it should
+ # return the iterator unchanged
+ return iter(ret_val)
+ return __iter__
+
+_side_effect_methods = {
+ '__eq__': _get_eq,
+ '__ne__': _get_ne,
+ '__iter__': _get_iter,
+}
+
+
+
+def _set_return_value(mock, method, name):
+ fixed = _return_values.get(name, DEFAULT)
+ if fixed is not DEFAULT:
+ method.return_value = fixed
+ return
+
+ return_calulator = _calculate_return_value.get(name)
+ if return_calulator is not None:
+ try:
+ return_value = return_calulator(mock)
+ except AttributeError:
+ # XXXX why do we return AttributeError here?
+ # set it as a side_effect instead?
+ return_value = AttributeError(name)
+ method.return_value = return_value
+ return
+
+ side_effector = _side_effect_methods.get(name)
+ if side_effector is not None:
+ method.side_effect = side_effector(mock)
+
+
+
+class MagicMixin(object):
+ def __init__(self, *args, **kw):
+ _super(MagicMixin, self).__init__(*args, **kw)
+ self._mock_set_magics()
+
+
+ def _mock_set_magics(self):
+ these_magics = _magics
+
+ if self._mock_methods is not None:
+ these_magics = _magics.intersection(self._mock_methods)
+
+ remove_magics = set()
+ remove_magics = _magics - these_magics
+
+ for entry in remove_magics:
+ if entry in type(self).__dict__:
+ # remove unneeded magic methods
+ delattr(self, entry)
+
+ # don't overwrite existing attributes if called a second time
+ these_magics = these_magics - set(type(self).__dict__)
+
+ _type = type(self)
+ for entry in these_magics:
+ setattr(_type, entry, MagicProxy(entry, self))
+
+
+
+class NonCallableMagicMock(MagicMixin, NonCallableMock):
+ """A version of `MagicMock` that isn't callable."""
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+ self._mock_set_magics()
+
+
+
+class MagicMock(MagicMixin, Mock):
+ """
+ MagicMock is a subclass of Mock with default implementations
+ of most of the magic methods. You can use MagicMock without having to
+ configure the magic methods yourself.
+
+ If you use the `spec` or `spec_set` arguments then *only* magic
+ methods that exist in the spec will be created.
+
+ Attributes and the return value of a `MagicMock` will also be `MagicMocks`.
+ """
+ def mock_add_spec(self, spec, spec_set=False):
+ """Add a spec to a mock. `spec` can either be an object or a
+ list of strings. Only attributes on the `spec` can be fetched as
+ attributes from the mock.
+
+ If `spec_set` is True then only attributes on the spec can be set."""
+ self._mock_add_spec(spec, spec_set)
+ self._mock_set_magics()
+
+
+
+class MagicProxy(object):
+ def __init__(self, name, parent):
+ self.name = name
+ self.parent = parent
+
+ def __call__(self, *args, **kwargs):
+ m = self.create_mock()
+ return m(*args, **kwargs)
+
+ def create_mock(self):
+ entry = self.name
+ parent = self.parent
+ m = parent._get_child_mock(name=entry, _new_name=entry,
+ _new_parent=parent)
+ setattr(parent, entry, m)
+ _set_return_value(parent, m, entry)
+ return m
+
+ def __get__(self, obj, _type=None):
+ return self.create_mock()
+
+
+
+class _ANY(object):
+ "A helper object that compares equal to everything."
+
+ def __eq__(self, other):
+ return True
+
+ def __ne__(self, other):
+ return False
+
+ def __repr__(self):
+ return '<ANY>'
+
+ANY = _ANY()
+
+
+
+def _format_call_signature(name, args, kwargs):
+ message = '%s(%%s)' % name
+ formatted_args = ''
+ args_string = ', '.join([repr(arg) for arg in args])
+ kwargs_string = ', '.join([
+ '%s=%r' % (key, value) for key, value in kwargs.items()
+ ])
+ if args_string:
+ formatted_args = args_string
+ if kwargs_string:
+ if formatted_args:
+ formatted_args += ', '
+ formatted_args += kwargs_string
+
+ return message % formatted_args
+
+
+
+class _Call(tuple):
+ """
+ A tuple for holding the results of a call to a mock, either in the form
+ `(args, kwargs)` or `(name, args, kwargs)`.
+
+ If args or kwargs are empty then a call tuple will compare equal to
+ a tuple without those values. This makes comparisons less verbose::
+
+ _Call(('name', (), {})) == ('name',)
+ _Call(('name', (1,), {})) == ('name', (1,))
+ _Call(((), {'a': 'b'})) == ({'a': 'b'},)
+
+ The `_Call` object provides a useful shortcut for comparing with call::
+
+ _Call(((1, 2), {'a': 3})) == call(1, 2, a=3)
+ _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3)
+
+ If the _Call has no name then it will match any name.
+ """
+ def __new__(cls, value=(), name=None, parent=None, two=False,
+ from_kall=True):
+ name = ''
+ args = ()
+ kwargs = {}
+ _len = len(value)
+ if _len == 3:
+ name, args, kwargs = value
+ elif _len == 2:
+ first, second = value
+ if isinstance(first, basestring):
+ name = first
+ if isinstance(second, tuple):
+ args = second
+ else:
+ kwargs = second
+ else:
+ args, kwargs = first, second
+ elif _len == 1:
+ value, = value
+ if isinstance(value, basestring):
+ name = value
+ elif isinstance(value, tuple):
+ args = value
+ else:
+ kwargs = value
+
+ if two:
+ return tuple.__new__(cls, (args, kwargs))
+
+ return tuple.__new__(cls, (name, args, kwargs))
+
+
+ def __init__(self, value=(), name=None, parent=None, two=False,
+ from_kall=True):
+ self.name = name
+ self.parent = parent
+ self.from_kall = from_kall
+
+
+ def __eq__(self, other):
+ if other is ANY:
+ return True
+ try:
+ len_other = len(other)
+ except TypeError:
+ return False
+
+ self_name = ''
+ if len(self) == 2:
+ self_args, self_kwargs = self
+ else:
+ self_name, self_args, self_kwargs = self
+
+ other_name = ''
+ if len_other == 0:
+ other_args, other_kwargs = (), {}
+ elif len_other == 3:
+ other_name, other_args, other_kwargs = other
+ elif len_other == 1:
+ value, = other
+ if isinstance(value, tuple):
+ other_args = value
+ other_kwargs = {}
+ elif isinstance(value, basestring):
+ other_name = value
+ other_args, other_kwargs = (), {}
+ else:
+ other_args = ()
+ other_kwargs = value
+ else:
+ # len 2
+ # could be (name, args) or (name, kwargs) or (args, kwargs)
+ first, second = other
+ if isinstance(first, basestring):
+ other_name = first
+ if isinstance(second, tuple):
+ other_args, other_kwargs = second, {}
+ else:
+ other_args, other_kwargs = (), second
+ else:
+ other_args, other_kwargs = first, second
+
+ if self_name and other_name != self_name:
+ return False
+
+ # this order is important for ANY to work!
+ return (other_args, other_kwargs) == (self_args, self_kwargs)
+
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+ def __call__(self, *args, **kwargs):
+ if self.name is None:
+ return _Call(('', args, kwargs), name='()')
+
+ name = self.name + '()'
+ return _Call((self.name, args, kwargs), name=name, parent=self)
+
+
+ def __getattr__(self, attr):
+ if self.name is None:
+ return _Call(name=attr, from_kall=False)
+ name = '%s.%s' % (self.name, attr)
+ return _Call(name=name, parent=self, from_kall=False)
+
+
+ def __repr__(self):
+ if not self.from_kall:
+ name = self.name or 'call'
+ if name.startswith('()'):
+ name = 'call%s' % name
+ return name
+
+ if len(self) == 2:
+ name = 'call'
+ args, kwargs = self
+ else:
+ name, args, kwargs = self
+ if not name:
+ name = 'call'
+ elif not name.startswith('()'):
+ name = 'call.%s' % name
+ else:
+ name = 'call%s' % name
+ return _format_call_signature(name, args, kwargs)
+
+
+ def call_list(self):
+ """For a call object that represents multiple calls, `call_list`
+ returns a list of all the intermediate calls as well as the
+ final call."""
+ vals = []
+ thing = self
+ while thing is not None:
+ if thing.from_kall:
+ vals.append(thing)
+ thing = thing.parent
+ return _CallList(reversed(vals))
+
+
+call = _Call(from_kall=False)
+
+
+
+def create_autospec(spec, spec_set=False, instance=False, _parent=None,
+ _name=None, **kwargs):
+ """Create a mock object using another object as a spec. Attributes on the
+ mock will use the corresponding attribute on the `spec` object as their
+ spec.
+
+ Functions or methods being mocked will have their arguments checked
+ to check that they are called with the correct signature.
+
+ If `spec_set` is True then attempting to set attributes that don't exist
+ on the spec object will raise an `AttributeError`.
+
+ If a class is used as a spec then the return value of the mock (the
+ instance of the class) will have the same spec. You can use a class as the
+ spec for an instance object by passing `instance=True`. The returned mock
+ will only be callable if instances of the mock are callable.
+
+ `create_autospec` also takes arbitrary keyword arguments that are passed to
+ the constructor of the created mock."""
+ if _is_list(spec):
+ # can't pass a list instance to the mock constructor as it will be
+ # interpreted as a list of strings
+ spec = type(spec)
+
+ is_type = isinstance(spec, ClassTypes)
+
+ _kwargs = {'spec': spec}
+ if spec_set:
+ _kwargs = {'spec_set': spec}
+ elif spec is None:
+ # None we mock with a normal mock without a spec
+ _kwargs = {}
+
+ _kwargs.update(kwargs)
+
+ Klass = MagicMock
+ if type(spec) in DescriptorTypes:
+ # descriptors don't have a spec
+ # because we don't know what type they return
+ _kwargs = {}
+ elif not _callable(spec):
+ Klass = NonCallableMagicMock
+ elif is_type and instance and not _instance_callable(spec):
+ Klass = NonCallableMagicMock
+
+ _new_name = _name
+ if _parent is None:
+ # for a top level object no _new_name should be set
+ _new_name = ''
+
+ mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,
+ name=_name, **_kwargs)
+
+ if isinstance(spec, FunctionTypes):
+ # should only happen at the top level because we don't
+ # recurse for functions
+ mock = _set_signature(mock, spec)
+ else:
+ _check_signature(spec, mock, is_type, instance)
+
+ if _parent is not None and not instance:
+ _parent._mock_children[_name] = mock
+
+ if is_type and not instance and 'return_value' not in kwargs:
+ mock.return_value = create_autospec(spec, spec_set, instance=True,
+ _name='()', _parent=mock)
+
+ for entry in dir(spec):
+ if _is_magic(entry):
+ # MagicMock already does the useful magic methods for us
+ continue
+
+ if isinstance(spec, FunctionTypes) and entry in FunctionAttributes:
+ # allow a mock to actually be a function
+ continue
+
+ # XXXX do we need a better way of getting attributes without
+ # triggering code execution (?) Probably not - we need the actual
+ # object to mock it so we would rather trigger a property than mock
+ # the property descriptor. Likewise we want to mock out dynamically
+ # provided attributes.
+ # XXXX what about attributes that raise exceptions other than
+ # AttributeError on being fetched?
+ # we could be resilient against it, or catch and propagate the
+ # exception when the attribute is fetched from the mock
+ try:
+ original = getattr(spec, entry)
+ except AttributeError:
+ continue
+
+ kwargs = {'spec': original}
+ if spec_set:
+ kwargs = {'spec_set': original}
+
+ if not isinstance(original, FunctionTypes):
+ new = _SpecState(original, spec_set, mock, entry, instance)
+ mock._mock_children[entry] = new
+ else:
+ parent = mock
+ if isinstance(spec, FunctionTypes):
+ parent = mock.mock
+
+ new = MagicMock(parent=parent, name=entry, _new_name=entry,
+ _new_parent=parent, **kwargs)
+ mock._mock_children[entry] = new
+ skipfirst = _must_skip(spec, entry, is_type)
+ _check_signature(original, new, skipfirst=skipfirst)
+
+ # so functions created with _set_signature become instance attributes,
+ # *plus* their underlying mock exists in _mock_children of the parent
+ # mock. Adding to _mock_children may be unnecessary where we are also
+ # setting as an instance attribute?
+ if isinstance(new, FunctionTypes):
+ setattr(mock, entry, new)
+
+ return mock
+
+
+def _must_skip(spec, entry, is_type):
+ if not isinstance(spec, ClassTypes):
+ if entry in getattr(spec, '__dict__', {}):
+ # instance attribute - shouldn't skip
+ return False
+ spec = spec.__class__
+ if not hasattr(spec, '__mro__'):
+ # old style class: can't have descriptors anyway
+ return is_type
+
+ for klass in spec.__mro__:
+ result = klass.__dict__.get(entry, DEFAULT)
+ if result is DEFAULT:
+ continue
+ if isinstance(result, (staticmethod, classmethod)):
+ return False
+ return is_type
+
+ # shouldn't get here unless function is a dynamically provided attribute
+ # XXXX untested behaviour
+ return is_type
+
+
+def _get_class(obj):
+ try:
+ return obj.__class__
+ except AttributeError:
+ # in Python 2, _sre.SRE_Pattern objects have no __class__
+ return type(obj)
+
+
+class _SpecState(object):
+
+ def __init__(self, spec, spec_set=False, parent=None,
+ name=None, ids=None, instance=False):
+ self.spec = spec
+ self.ids = ids
+ self.spec_set = spec_set
+ self.parent = parent
+ self.instance = instance
+ self.name = name
+
+
+FunctionTypes = (
+ # python function
+ type(create_autospec),
+ # instance method
+ type(ANY.__eq__),
+ # unbound method
+ type(_ANY.__eq__),
+)
+
+FunctionAttributes = set([
+ 'func_closure',
+ 'func_code',
+ 'func_defaults',
+ 'func_dict',
+ 'func_doc',
+ 'func_globals',
+ 'func_name',
+])
+
+
+file_spec = None
+
+
+def mock_open(mock=None, read_data=''):
+ """
+ A helper function to create a mock to replace the use of `open`. It works
+ for `open` called directly or used as a context manager.
+
+ The `mock` argument is the mock object to configure. If `None` (the
+ default) then a `MagicMock` will be created for you, with the API limited
+ to methods or attributes available on standard file handles.
+
+ `read_data` is a string for the `read` method of the file handle to return.
+ This is an empty string by default.
+ """
+ global file_spec
+ if file_spec is None:
+ # set on first use
+ if inPy3k:
+ import _io
+ file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
+ else:
+ file_spec = file
+
+ if mock is None:
+ mock = MagicMock(name='open', spec=open)
+
+ handle = MagicMock(spec=file_spec)
+ handle.write.return_value = None
+ handle.__enter__.return_value = handle
+ handle.read.return_value = read_data
+
+ mock.return_value = handle
+ return mock
+
+
+class PropertyMock(Mock):
+ """
+ A mock intended to be used as a property, or other descriptor, on a class.
+ `PropertyMock` provides `__get__` and `__set__` methods so you can specify
+ a return value when it is fetched.
+
+ Fetching a `PropertyMock` instance from an object calls the mock, with
+ no args. Setting it calls the mock with the value being set.
+ """
+ def _get_child_mock(self, **kwargs):
+ return MagicMock(**kwargs)
+
+ def __get__(self, obj, obj_type):
+ return self()
+ def __set__(self, obj, val):
+ self(val)
diff --git a/python/mock-1.0.0/setup.cfg b/python/mock-1.0.0/setup.cfg
new file mode 100644
index 000000000..00948b7e4
--- /dev/null
+++ b/python/mock-1.0.0/setup.cfg
@@ -0,0 +1,12 @@
+[build_sphinx]
+source-dir = docs
+build-dir = html
+
+[sdist]
+force-manifest = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/mock-1.0.0/setup.py b/python/mock-1.0.0/setup.py
new file mode 100755
index 000000000..7c25e8f2d
--- /dev/null
+++ b/python/mock-1.0.0/setup.py
@@ -0,0 +1,72 @@
+#! /usr/bin/env python
+
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from mock import __version__
+
+import os
+
+
+NAME = 'mock'
+MODULES = ['mock']
+DESCRIPTION = 'A Python Mocking and Patching Library for Testing'
+
+URL = "http://www.voidspace.org.uk/python/mock/"
+
+readme = os.path.join(os.path.dirname(__file__), 'README.txt')
+LONG_DESCRIPTION = open(readme).read()
+
+CLASSIFIERS = [
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 2.4',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ 'Programming Language :: Python :: Implementation :: Jython',
+ 'Operating System :: OS Independent',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ 'Topic :: Software Development :: Testing',
+]
+
+AUTHOR = 'Michael Foord'
+AUTHOR_EMAIL = 'michael@voidspace.org.uk'
+KEYWORDS = ("testing test mock mocking unittest patching "
+ "stubs fakes doubles").split(' ')
+
+params = dict(
+ name=NAME,
+ version=__version__,
+ py_modules=MODULES,
+
+ # metadata for upload to PyPI
+ author=AUTHOR,
+ author_email=AUTHOR_EMAIL,
+ description=DESCRIPTION,
+ long_description=LONG_DESCRIPTION,
+ keywords=KEYWORDS,
+ url=URL,
+ classifiers=CLASSIFIERS,
+)
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+else:
+ params['tests_require'] = ['unittest2']
+ params['test_suite'] = 'unittest2.collector'
+
+setup(**params)
diff --git a/python/mock-1.0.0/tests/__init__.py b/python/mock-1.0.0/tests/__init__.py
new file mode 100644
index 000000000..54ddf2ecc
--- /dev/null
+++ b/python/mock-1.0.0/tests/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
diff --git a/python/mock-1.0.0/tests/_testwith.py b/python/mock-1.0.0/tests/_testwith.py
new file mode 100644
index 000000000..0b54780b8
--- /dev/null
+++ b/python/mock-1.0.0/tests/_testwith.py
@@ -0,0 +1,181 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from __future__ import with_statement
+
+from tests.support import unittest2, is_instance
+
+from mock import MagicMock, Mock, patch, sentinel, mock_open, call
+
+from tests.support_with import catch_warnings, nested
+
+something = sentinel.Something
+something_else = sentinel.SomethingElse
+
+
+
+class WithTest(unittest2.TestCase):
+
+ def test_with_statement(self):
+ with patch('tests._testwith.something', sentinel.Something2):
+ self.assertEqual(something, sentinel.Something2, "unpatched")
+ self.assertEqual(something, sentinel.Something)
+
+
+ def test_with_statement_exception(self):
+ try:
+ with patch('tests._testwith.something', sentinel.Something2):
+ self.assertEqual(something, sentinel.Something2, "unpatched")
+ raise Exception('pow')
+ except Exception:
+ pass
+ else:
+ self.fail("patch swallowed exception")
+ self.assertEqual(something, sentinel.Something)
+
+
+ def test_with_statement_as(self):
+ with patch('tests._testwith.something') as mock_something:
+ self.assertEqual(something, mock_something, "unpatched")
+ self.assertTrue(is_instance(mock_something, MagicMock),
+ "patching wrong type")
+ self.assertEqual(something, sentinel.Something)
+
+
+ def test_patch_object_with_statement(self):
+ class Foo(object):
+ something = 'foo'
+ original = Foo.something
+ with patch.object(Foo, 'something'):
+ self.assertNotEqual(Foo.something, original, "unpatched")
+ self.assertEqual(Foo.something, original)
+
+
+ def test_with_statement_nested(self):
+ with catch_warnings(record=True):
+ # nested is deprecated in Python 2.7
+ with nested(patch('tests._testwith.something'),
+ patch('tests._testwith.something_else')) as (mock_something, mock_something_else):
+ self.assertEqual(something, mock_something, "unpatched")
+ self.assertEqual(something_else, mock_something_else,
+ "unpatched")
+ self.assertEqual(something, sentinel.Something)
+ self.assertEqual(something_else, sentinel.SomethingElse)
+
+
+ def test_with_statement_specified(self):
+ with patch('tests._testwith.something', sentinel.Patched) as mock_something:
+ self.assertEqual(something, mock_something, "unpatched")
+ self.assertEqual(mock_something, sentinel.Patched, "wrong patch")
+ self.assertEqual(something, sentinel.Something)
+
+
+ def testContextManagerMocking(self):
+ mock = Mock()
+ mock.__enter__ = Mock()
+ mock.__exit__ = Mock()
+ mock.__exit__.return_value = False
+
+ with mock as m:
+ self.assertEqual(m, mock.__enter__.return_value)
+ mock.__enter__.assert_called_with()
+ mock.__exit__.assert_called_with(None, None, None)
+
+
+ def test_context_manager_with_magic_mock(self):
+ mock = MagicMock()
+
+ with self.assertRaises(TypeError):
+ with mock:
+ 'foo' + 3
+ mock.__enter__.assert_called_with()
+ self.assertTrue(mock.__exit__.called)
+
+
+ def test_with_statement_same_attribute(self):
+ with patch('tests._testwith.something', sentinel.Patched) as mock_something:
+ self.assertEqual(something, mock_something, "unpatched")
+
+ with patch('tests._testwith.something') as mock_again:
+ self.assertEqual(something, mock_again, "unpatched")
+
+ self.assertEqual(something, mock_something,
+ "restored with wrong instance")
+
+ self.assertEqual(something, sentinel.Something, "not restored")
+
+
+ def test_with_statement_imbricated(self):
+ with patch('tests._testwith.something') as mock_something:
+ self.assertEqual(something, mock_something, "unpatched")
+
+ with patch('tests._testwith.something_else') as mock_something_else:
+ self.assertEqual(something_else, mock_something_else,
+ "unpatched")
+
+ self.assertEqual(something, sentinel.Something)
+ self.assertEqual(something_else, sentinel.SomethingElse)
+
+
+ def test_dict_context_manager(self):
+ foo = {}
+ with patch.dict(foo, {'a': 'b'}):
+ self.assertEqual(foo, {'a': 'b'})
+ self.assertEqual(foo, {})
+
+ with self.assertRaises(NameError):
+ with patch.dict(foo, {'a': 'b'}):
+ self.assertEqual(foo, {'a': 'b'})
+ raise NameError('Konrad')
+
+ self.assertEqual(foo, {})
+
+
+
+class TestMockOpen(unittest2.TestCase):
+
+ def test_mock_open(self):
+ mock = mock_open()
+ with patch('%s.open' % __name__, mock, create=True) as patched:
+ self.assertIs(patched, mock)
+ open('foo')
+
+ mock.assert_called_once_with('foo')
+
+
+ def test_mock_open_context_manager(self):
+ mock = mock_open()
+ handle = mock.return_value
+ with patch('%s.open' % __name__, mock, create=True):
+ with open('foo') as f:
+ f.read()
+
+ expected_calls = [call('foo'), call().__enter__(), call().read(),
+ call().__exit__(None, None, None)]
+ self.assertEqual(mock.mock_calls, expected_calls)
+ self.assertIs(f, handle)
+
+
+ def test_explicit_mock(self):
+ mock = MagicMock()
+ mock_open(mock)
+
+ with patch('%s.open' % __name__, mock, create=True) as patched:
+ self.assertIs(patched, mock)
+ open('foo')
+
+ mock.assert_called_once_with('foo')
+
+
+ def test_read_data(self):
+ mock = mock_open(read_data='foo')
+ with patch('%s.open' % __name__, mock, create=True):
+ h = open('bar')
+ result = h.read()
+
+ self.assertEqual(result, 'foo')
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/python/mock-1.0.0/tests/support.py b/python/mock-1.0.0/tests/support.py
new file mode 100644
index 000000000..1b10c3428
--- /dev/null
+++ b/python/mock-1.0.0/tests/support.py
@@ -0,0 +1,41 @@
+import sys
+
+info = sys.version_info
+if info[:3] >= (3, 2, 0):
+ # for Python 3.2 ordinary unittest is fine
+ import unittest as unittest2
+else:
+ import unittest2
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return hasattr(obj, '__call__')
+
+
+inPy3k = sys.version_info[0] == 3
+with_available = sys.version_info[:2] >= (2, 5)
+
+
+def is_instance(obj, klass):
+ """Version of is_instance that doesn't access __class__"""
+ return issubclass(type(obj), klass)
+
+
+class SomeClass(object):
+ class_attribute = None
+
+ def wibble(self):
+ pass
+
+
+class X(object):
+ pass
+
+try:
+ next = next
+except NameError:
+ def next(obj):
+ return obj.next()
diff --git a/python/mock-1.0.0/tests/support_with.py b/python/mock-1.0.0/tests/support_with.py
new file mode 100644
index 000000000..fa286122c
--- /dev/null
+++ b/python/mock-1.0.0/tests/support_with.py
@@ -0,0 +1,93 @@
+from __future__ import with_statement
+
+import sys
+
+__all__ = ['nested', 'catch_warnings', 'examine_warnings']
+
+
+try:
+ from contextlib import nested
+except ImportError:
+ from contextlib import contextmanager
+ @contextmanager
+ def nested(*managers):
+ exits = []
+ vars = []
+ exc = (None, None, None)
+ try:
+ for mgr in managers:
+ exit = mgr.__exit__
+ enter = mgr.__enter__
+ vars.append(enter())
+ exits.append(exit)
+ yield vars
+ except:
+ exc = sys.exc_info()
+ finally:
+ while exits:
+ exit = exits.pop()
+ try:
+ if exit(*exc):
+ exc = (None, None, None)
+ except:
+ exc = sys.exc_info()
+ if exc != (None, None, None):
+ raise exc[1]
+
+# copied from Python 2.6
+try:
+ from warnings import catch_warnings
+except ImportError:
+ class catch_warnings(object):
+ def __init__(self, record=False, module=None):
+ self._record = record
+ self._module = sys.modules['warnings']
+ self._entered = False
+
+ def __repr__(self):
+ args = []
+ if self._record:
+ args.append("record=True")
+ name = type(self).__name__
+ return "%s(%s)" % (name, ", ".join(args))
+
+ def __enter__(self):
+ if self._entered:
+ raise RuntimeError("Cannot enter %r twice" % self)
+ self._entered = True
+ self._filters = self._module.filters
+ self._module.filters = self._filters[:]
+ self._showwarning = self._module.showwarning
+ if self._record:
+ log = []
+ def showwarning(*args, **kwargs):
+ log.append(WarningMessage(*args, **kwargs))
+ self._module.showwarning = showwarning
+ return log
+ else:
+ return None
+
+ def __exit__(self, *exc_info):
+ if not self._entered:
+ raise RuntimeError("Cannot exit %r without entering first" % self)
+ self._module.filters = self._filters
+ self._module.showwarning = self._showwarning
+
+ class WarningMessage(object):
+ _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
+ "line")
+ def __init__(self, message, category, filename, lineno, file=None,
+ line=None):
+ local_values = locals()
+ for attr in self._WARNING_DETAILS:
+ setattr(self, attr, local_values[attr])
+ self._category_name = None
+ if category.__name__:
+ self._category_name = category.__name__
+
+
+def examine_warnings(func):
+ def wrapper():
+ with catch_warnings(record=True) as ws:
+ func(ws)
+ return wrapper
diff --git a/python/mock-1.0.0/tests/testcallable.py b/python/mock-1.0.0/tests/testcallable.py
new file mode 100644
index 000000000..f7dcd5e1d
--- /dev/null
+++ b/python/mock-1.0.0/tests/testcallable.py
@@ -0,0 +1,158 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import is_instance, unittest2, X, SomeClass
+
+from mock import (
+ Mock, MagicMock, NonCallableMagicMock,
+ NonCallableMock, patch, create_autospec,
+ CallableMixin
+)
+
+
+
+class TestCallable(unittest2.TestCase):
+
+ def assertNotCallable(self, mock):
+ self.assertTrue(is_instance(mock, NonCallableMagicMock))
+ self.assertFalse(is_instance(mock, CallableMixin))
+
+
+ def test_non_callable(self):
+ for mock in NonCallableMagicMock(), NonCallableMock():
+ self.assertRaises(TypeError, mock)
+ self.assertFalse(hasattr(mock, '__call__'))
+ self.assertIn(mock.__class__.__name__, repr(mock))
+
+
+ def test_heirarchy(self):
+ self.assertTrue(issubclass(MagicMock, Mock))
+ self.assertTrue(issubclass(NonCallableMagicMock, NonCallableMock))
+
+
+ def test_attributes(self):
+ one = NonCallableMock()
+ self.assertTrue(issubclass(type(one.one), Mock))
+
+ two = NonCallableMagicMock()
+ self.assertTrue(issubclass(type(two.two), MagicMock))
+
+
+ def test_subclasses(self):
+ class MockSub(Mock):
+ pass
+
+ one = MockSub()
+ self.assertTrue(issubclass(type(one.one), MockSub))
+
+ class MagicSub(MagicMock):
+ pass
+
+ two = MagicSub()
+ self.assertTrue(issubclass(type(two.two), MagicSub))
+
+
+ def test_patch_spec(self):
+ patcher = patch('%s.X' % __name__, spec=True)
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertNotCallable(instance)
+ self.assertRaises(TypeError, instance)
+
+
+ def test_patch_spec_set(self):
+ patcher = patch('%s.X' % __name__, spec_set=True)
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertNotCallable(instance)
+ self.assertRaises(TypeError, instance)
+
+
+ def test_patch_spec_instance(self):
+ patcher = patch('%s.X' % __name__, spec=X())
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ self.assertNotCallable(mock)
+ self.assertRaises(TypeError, mock)
+
+
+ def test_patch_spec_set_instance(self):
+ patcher = patch('%s.X' % __name__, spec_set=X())
+ mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ self.assertNotCallable(mock)
+ self.assertRaises(TypeError, mock)
+
+
+ def test_patch_spec_callable_class(self):
+ class CallableX(X):
+ def __call__(self):
+ pass
+
+ class Sub(CallableX):
+ pass
+
+ class Multi(SomeClass, Sub):
+ pass
+
+ class OldStyle:
+ def __call__(self):
+ pass
+
+ class OldStyleSub(OldStyle):
+ pass
+
+ for arg in 'spec', 'spec_set':
+ for Klass in CallableX, Sub, Multi, OldStyle, OldStyleSub:
+ patcher = patch('%s.X' % __name__, **{arg: Klass})
+ mock = patcher.start()
+
+ try:
+ instance = mock()
+ mock.assert_called_once_with()
+
+ self.assertTrue(is_instance(instance, MagicMock))
+ # inherited spec
+ self.assertRaises(AttributeError, getattr, instance,
+ 'foobarbaz')
+
+ result = instance()
+ # instance is callable, result has no spec
+ instance.assert_called_once_with()
+
+ result(3, 2, 1)
+ result.assert_called_once_with(3, 2, 1)
+ result.foo(3, 2, 1)
+ result.foo.assert_called_once_with(3, 2, 1)
+ finally:
+ patcher.stop()
+
+
+ def test_create_autopsec(self):
+ mock = create_autospec(X)
+ instance = mock()
+ self.assertRaises(TypeError, instance)
+
+ mock = create_autospec(X())
+ self.assertRaises(TypeError, mock)
+
+
+ def test_create_autospec_instance(self):
+ mock = create_autospec(SomeClass, instance=True)
+
+ self.assertRaises(TypeError, mock)
+ mock.wibble()
+ mock.wibble.assert_called_once_with()
+
+ self.assertRaises(TypeError, mock.wibble, 'some', 'args')
diff --git a/python/mock-1.0.0/tests/testhelpers.py b/python/mock-1.0.0/tests/testhelpers.py
new file mode 100644
index 000000000..e788da844
--- /dev/null
+++ b/python/mock-1.0.0/tests/testhelpers.py
@@ -0,0 +1,940 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import unittest2, inPy3k
+
+from mock import (
+ call, _Call, create_autospec, MagicMock,
+ Mock, ANY, _CallList, patch, PropertyMock
+)
+
+from datetime import datetime
+
+class SomeClass(object):
+ def one(self, a, b):
+ pass
+ def two(self):
+ pass
+ def three(self, a=None):
+ pass
+
+
+
+class AnyTest(unittest2.TestCase):
+
+ def test_any(self):
+ self.assertEqual(ANY, object())
+
+ mock = Mock()
+ mock(ANY)
+ mock.assert_called_with(ANY)
+
+ mock = Mock()
+ mock(foo=ANY)
+ mock.assert_called_with(foo=ANY)
+
+ def test_repr(self):
+ self.assertEqual(repr(ANY), '<ANY>')
+ self.assertEqual(str(ANY), '<ANY>')
+
+
+ def test_any_and_datetime(self):
+ mock = Mock()
+ mock(datetime.now(), foo=datetime.now())
+
+ mock.assert_called_with(ANY, foo=ANY)
+
+
+ def test_any_mock_calls_comparison_order(self):
+ mock = Mock()
+ d = datetime.now()
+ class Foo(object):
+ def __eq__(self, other):
+ return False
+ def __ne__(self, other):
+ return True
+
+ for d in datetime.now(), Foo():
+ mock.reset_mock()
+
+ mock(d, foo=d, bar=d)
+ mock.method(d, zinga=d, alpha=d)
+ mock().method(a1=d, z99=d)
+
+ expected = [
+ call(ANY, foo=ANY, bar=ANY),
+ call.method(ANY, zinga=ANY, alpha=ANY),
+ call(), call().method(a1=ANY, z99=ANY)
+ ]
+ self.assertEqual(expected, mock.mock_calls)
+ self.assertEqual(mock.mock_calls, expected)
+
+
+
+class CallTest(unittest2.TestCase):
+
+ def test_call_with_call(self):
+ kall = _Call()
+ self.assertEqual(kall, _Call())
+ self.assertEqual(kall, _Call(('',)))
+ self.assertEqual(kall, _Call(((),)))
+ self.assertEqual(kall, _Call(({},)))
+ self.assertEqual(kall, _Call(('', ())))
+ self.assertEqual(kall, _Call(('', {})))
+ self.assertEqual(kall, _Call(('', (), {})))
+ self.assertEqual(kall, _Call(('foo',)))
+ self.assertEqual(kall, _Call(('bar', ())))
+ self.assertEqual(kall, _Call(('baz', {})))
+ self.assertEqual(kall, _Call(('spam', (), {})))
+
+ kall = _Call(((1, 2, 3),))
+ self.assertEqual(kall, _Call(((1, 2, 3),)))
+ self.assertEqual(kall, _Call(('', (1, 2, 3))))
+ self.assertEqual(kall, _Call(((1, 2, 3), {})))
+ self.assertEqual(kall, _Call(('', (1, 2, 3), {})))
+
+ kall = _Call(((1, 2, 4),))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 3))))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 3), {})))
+
+ kall = _Call(('foo', (1, 2, 4),))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 4))))
+ self.assertNotEqual(kall, _Call(('', (1, 2, 4), {})))
+ self.assertNotEqual(kall, _Call(('bar', (1, 2, 4))))
+ self.assertNotEqual(kall, _Call(('bar', (1, 2, 4), {})))
+
+ kall = _Call(({'a': 3},))
+ self.assertEqual(kall, _Call(('', (), {'a': 3})))
+ self.assertEqual(kall, _Call(('', {'a': 3})))
+ self.assertEqual(kall, _Call(((), {'a': 3})))
+ self.assertEqual(kall, _Call(({'a': 3},)))
+
+
+ def test_empty__Call(self):
+ args = _Call()
+
+ self.assertEqual(args, ())
+ self.assertEqual(args, ('foo',))
+ self.assertEqual(args, ((),))
+ self.assertEqual(args, ('foo', ()))
+ self.assertEqual(args, ('foo',(), {}))
+ self.assertEqual(args, ('foo', {}))
+ self.assertEqual(args, ({},))
+
+
+ def test_named_empty_call(self):
+ args = _Call(('foo', (), {}))
+
+ self.assertEqual(args, ('foo',))
+ self.assertEqual(args, ('foo', ()))
+ self.assertEqual(args, ('foo',(), {}))
+ self.assertEqual(args, ('foo', {}))
+
+ self.assertNotEqual(args, ((),))
+ self.assertNotEqual(args, ())
+ self.assertNotEqual(args, ({},))
+ self.assertNotEqual(args, ('bar',))
+ self.assertNotEqual(args, ('bar', ()))
+ self.assertNotEqual(args, ('bar', {}))
+
+
+ def test_call_with_args(self):
+ args = _Call(((1, 2, 3), {}))
+
+ self.assertEqual(args, ((1, 2, 3),))
+ self.assertEqual(args, ('foo', (1, 2, 3)))
+ self.assertEqual(args, ('foo', (1, 2, 3), {}))
+ self.assertEqual(args, ((1, 2, 3), {}))
+
+
+ def test_named_call_with_args(self):
+ args = _Call(('foo', (1, 2, 3), {}))
+
+ self.assertEqual(args, ('foo', (1, 2, 3)))
+ self.assertEqual(args, ('foo', (1, 2, 3), {}))
+
+ self.assertNotEqual(args, ((1, 2, 3),))
+ self.assertNotEqual(args, ((1, 2, 3), {}))
+
+
+ def test_call_with_kwargs(self):
+ args = _Call(((), dict(a=3, b=4)))
+
+ self.assertEqual(args, (dict(a=3, b=4),))
+ self.assertEqual(args, ('foo', dict(a=3, b=4)))
+ self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
+ self.assertEqual(args, ((), dict(a=3, b=4)))
+
+
+ def test_named_call_with_kwargs(self):
+ args = _Call(('foo', (), dict(a=3, b=4)))
+
+ self.assertEqual(args, ('foo', dict(a=3, b=4)))
+ self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
+
+ self.assertNotEqual(args, (dict(a=3, b=4),))
+ self.assertNotEqual(args, ((), dict(a=3, b=4)))
+
+
+ def test_call_with_args_call_empty_name(self):
+ args = _Call(((1, 2, 3), {}))
+ self.assertEqual(args, call(1, 2, 3))
+ self.assertEqual(call(1, 2, 3), args)
+ self.assertTrue(call(1, 2, 3) in [args])
+
+
+ def test_call_ne(self):
+ self.assertNotEqual(_Call(((1, 2, 3),)), call(1, 2))
+ self.assertFalse(_Call(((1, 2, 3),)) != call(1, 2, 3))
+ self.assertTrue(_Call(((1, 2), {})) != call(1, 2, 3))
+
+
+ def test_call_non_tuples(self):
+ kall = _Call(((1, 2, 3),))
+ for value in 1, None, self, int:
+ self.assertNotEqual(kall, value)
+ self.assertFalse(kall == value)
+
+
+ def test_repr(self):
+ self.assertEqual(repr(_Call()), 'call()')
+ self.assertEqual(repr(_Call(('foo',))), 'call.foo()')
+
+ self.assertEqual(repr(_Call(((1, 2, 3), {'a': 'b'}))),
+ "call(1, 2, 3, a='b')")
+ self.assertEqual(repr(_Call(('bar', (1, 2, 3), {'a': 'b'}))),
+ "call.bar(1, 2, 3, a='b')")
+
+ self.assertEqual(repr(call), 'call')
+ self.assertEqual(str(call), 'call')
+
+ self.assertEqual(repr(call()), 'call()')
+ self.assertEqual(repr(call(1)), 'call(1)')
+ self.assertEqual(repr(call(zz='thing')), "call(zz='thing')")
+
+ self.assertEqual(repr(call().foo), 'call().foo')
+ self.assertEqual(repr(call(1).foo.bar(a=3).bing),
+ 'call().foo.bar().bing')
+ self.assertEqual(
+ repr(call().foo(1, 2, a=3)),
+ "call().foo(1, 2, a=3)"
+ )
+ self.assertEqual(repr(call()()), "call()()")
+ self.assertEqual(repr(call(1)(2)), "call()(2)")
+ self.assertEqual(
+ repr(call()().bar().baz.beep(1)),
+ "call()().bar().baz.beep(1)"
+ )
+
+
+ def test_call(self):
+ self.assertEqual(call(), ('', (), {}))
+ self.assertEqual(call('foo', 'bar', one=3, two=4),
+ ('', ('foo', 'bar'), {'one': 3, 'two': 4}))
+
+ mock = Mock()
+ mock(1, 2, 3)
+ mock(a=3, b=6)
+ self.assertEqual(mock.call_args_list,
+ [call(1, 2, 3), call(a=3, b=6)])
+
+ def test_attribute_call(self):
+ self.assertEqual(call.foo(1), ('foo', (1,), {}))
+ self.assertEqual(call.bar.baz(fish='eggs'),
+ ('bar.baz', (), {'fish': 'eggs'}))
+
+ mock = Mock()
+ mock.foo(1, 2 ,3)
+ mock.bar.baz(a=3, b=6)
+ self.assertEqual(mock.method_calls,
+ [call.foo(1, 2, 3), call.bar.baz(a=3, b=6)])
+
+
+ def test_extended_call(self):
+ result = call(1).foo(2).bar(3, a=4)
+ self.assertEqual(result, ('().foo().bar', (3,), dict(a=4)))
+
+ mock = MagicMock()
+ mock(1, 2, a=3, b=4)
+ self.assertEqual(mock.call_args, call(1, 2, a=3, b=4))
+ self.assertNotEqual(mock.call_args, call(1, 2, 3))
+
+ self.assertEqual(mock.call_args_list, [call(1, 2, a=3, b=4)])
+ self.assertEqual(mock.mock_calls, [call(1, 2, a=3, b=4)])
+
+ mock = MagicMock()
+ mock.foo(1).bar()().baz.beep(a=6)
+
+ last_call = call.foo(1).bar()().baz.beep(a=6)
+ self.assertEqual(mock.mock_calls[-1], last_call)
+ self.assertEqual(mock.mock_calls, last_call.call_list())
+
+
+ def test_call_list(self):
+ mock = MagicMock()
+ mock(1)
+ self.assertEqual(call(1).call_list(), mock.mock_calls)
+
+ mock = MagicMock()
+ mock(1).method(2)
+ self.assertEqual(call(1).method(2).call_list(),
+ mock.mock_calls)
+
+ mock = MagicMock()
+ mock(1).method(2)(3)
+ self.assertEqual(call(1).method(2)(3).call_list(),
+ mock.mock_calls)
+
+ mock = MagicMock()
+ int(mock(1).method(2)(3).foo.bar.baz(4)(5))
+ kall = call(1).method(2)(3).foo.bar.baz(4)(5).__int__()
+ self.assertEqual(kall.call_list(), mock.mock_calls)
+
+
+ def test_call_any(self):
+ self.assertEqual(call, ANY)
+
+ m = MagicMock()
+ int(m)
+ self.assertEqual(m.mock_calls, [ANY])
+ self.assertEqual([ANY], m.mock_calls)
+
+
+ def test_two_args_call(self):
+ args = _Call(((1, 2), {'a': 3}), two=True)
+ self.assertEqual(len(args), 2)
+ self.assertEqual(args[0], (1, 2))
+ self.assertEqual(args[1], {'a': 3})
+
+ other_args = _Call(((1, 2), {'a': 3}))
+ self.assertEqual(args, other_args)
+
+
+class SpecSignatureTest(unittest2.TestCase):
+
+ def _check_someclass_mock(self, mock):
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+ mock.one(1, 2)
+ mock.one.assert_called_with(1, 2)
+ self.assertRaises(AssertionError,
+ mock.one.assert_called_with, 3, 4)
+ self.assertRaises(TypeError, mock.one, 1)
+
+ mock.two()
+ mock.two.assert_called_with()
+ self.assertRaises(AssertionError,
+ mock.two.assert_called_with, 3)
+ self.assertRaises(TypeError, mock.two, 1)
+
+ mock.three()
+ mock.three.assert_called_with()
+ self.assertRaises(AssertionError,
+ mock.three.assert_called_with, 3)
+ self.assertRaises(TypeError, mock.three, 3, 2)
+
+ mock.three(1)
+ mock.three.assert_called_with(1)
+
+ mock.three(a=1)
+ mock.three.assert_called_with(a=1)
+
+
+ def test_basic(self):
+ for spec in (SomeClass, SomeClass()):
+ mock = create_autospec(spec)
+ self._check_someclass_mock(mock)
+
+
+ def test_create_autospec_return_value(self):
+ def f():
+ pass
+ mock = create_autospec(f, return_value='foo')
+ self.assertEqual(mock(), 'foo')
+
+ class Foo(object):
+ pass
+
+ mock = create_autospec(Foo, return_value='foo')
+ self.assertEqual(mock(), 'foo')
+
+
+ def test_autospec_reset_mock(self):
+ m = create_autospec(int)
+ int(m)
+ m.reset_mock()
+ self.assertEqual(m.__int__.call_count, 0)
+
+
+ def test_mocking_unbound_methods(self):
+ class Foo(object):
+ def foo(self, foo):
+ pass
+ p = patch.object(Foo, 'foo')
+ mock_foo = p.start()
+ Foo().foo(1)
+
+ mock_foo.assert_called_with(1)
+
+
+ @unittest2.expectedFailure
+ def test_create_autospec_unbound_methods(self):
+ # see issue 128
+ class Foo(object):
+ def foo(self):
+ pass
+
+ klass = create_autospec(Foo)
+ instance = klass()
+ self.assertRaises(TypeError, instance.foo, 1)
+
+ # Note: no type checking on the "self" parameter
+ klass.foo(1)
+ klass.foo.assert_called_with(1)
+ self.assertRaises(TypeError, klass.foo)
+
+
+ def test_create_autospec_keyword_arguments(self):
+ class Foo(object):
+ a = 3
+ m = create_autospec(Foo, a='3')
+ self.assertEqual(m.a, '3')
+
+ @unittest2.skipUnless(inPy3k, "Keyword only arguments Python 3 specific")
+ def test_create_autospec_keyword_only_arguments(self):
+ func_def = "def foo(a, *, b=None):\n pass\n"
+ namespace = {}
+ exec (func_def, namespace)
+ foo = namespace['foo']
+
+ m = create_autospec(foo)
+ m(1)
+ m.assert_called_with(1)
+ self.assertRaises(TypeError, m, 1, 2)
+
+ m(2, b=3)
+ m.assert_called_with(2, b=3)
+
+ def test_function_as_instance_attribute(self):
+ obj = SomeClass()
+ def f(a):
+ pass
+ obj.f = f
+
+ mock = create_autospec(obj)
+ mock.f('bing')
+ mock.f.assert_called_with('bing')
+
+
+ def test_spec_as_list(self):
+ # because spec as a list of strings in the mock constructor means
+ # something very different we treat a list instance as the type.
+ mock = create_autospec([])
+ mock.append('foo')
+ mock.append.assert_called_with('foo')
+
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+
+ class Foo(object):
+ foo = []
+
+ mock = create_autospec(Foo)
+ mock.foo.append(3)
+ mock.foo.append.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.foo, 'foo')
+
+
+ def test_attributes(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ sub_mock = create_autospec(Sub)
+
+ for mock in (sub_mock, sub_mock.attr):
+ self._check_someclass_mock(mock)
+
+
+ def test_builtin_functions_types(self):
+ # we could replace builtin functions / methods with a function
+ # with *args / **kwargs signature. Using the builtin method type
+ # as a spec seems to work fairly well though.
+ class BuiltinSubclass(list):
+ def bar(self, arg):
+ pass
+ sorted = sorted
+ attr = {}
+
+ mock = create_autospec(BuiltinSubclass)
+ mock.append(3)
+ mock.append.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.append, 'foo')
+
+ mock.bar('foo')
+ mock.bar.assert_called_with('foo')
+ self.assertRaises(TypeError, mock.bar, 'foo', 'bar')
+ self.assertRaises(AttributeError, getattr, mock.bar, 'foo')
+
+ mock.sorted([1, 2])
+ mock.sorted.assert_called_with([1, 2])
+ self.assertRaises(AttributeError, getattr, mock.sorted, 'foo')
+
+ mock.attr.pop(3)
+ mock.attr.pop.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock.attr, 'foo')
+
+
+ def test_method_calls(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ mock = create_autospec(Sub)
+ mock.one(1, 2)
+ mock.two()
+ mock.three(3)
+
+ expected = [call.one(1, 2), call.two(), call.three(3)]
+ self.assertEqual(mock.method_calls, expected)
+
+ mock.attr.one(1, 2)
+ mock.attr.two()
+ mock.attr.three(3)
+
+ expected.extend(
+ [call.attr.one(1, 2), call.attr.two(), call.attr.three(3)]
+ )
+ self.assertEqual(mock.method_calls, expected)
+
+
+ def test_magic_methods(self):
+ class BuiltinSubclass(list):
+ attr = {}
+
+ mock = create_autospec(BuiltinSubclass)
+ self.assertEqual(list(mock), [])
+ self.assertRaises(TypeError, int, mock)
+ self.assertRaises(TypeError, int, mock.attr)
+ self.assertEqual(list(mock), [])
+
+ self.assertIsInstance(mock['foo'], MagicMock)
+ self.assertIsInstance(mock.attr['foo'], MagicMock)
+
+
+ def test_spec_set(self):
+ class Sub(SomeClass):
+ attr = SomeClass()
+
+ for spec in (Sub, Sub()):
+ mock = create_autospec(spec, spec_set=True)
+ self._check_someclass_mock(mock)
+
+ self.assertRaises(AttributeError, setattr, mock, 'foo', 'bar')
+ self.assertRaises(AttributeError, setattr, mock.attr, 'foo', 'bar')
+
+
+ def test_descriptors(self):
+ class Foo(object):
+ @classmethod
+ def f(cls, a, b):
+ pass
+ @staticmethod
+ def g(a, b):
+ pass
+
+ class Bar(Foo):
+ pass
+
+ class Baz(SomeClass, Bar):
+ pass
+
+ for spec in (Foo, Foo(), Bar, Bar(), Baz, Baz()):
+ mock = create_autospec(spec)
+ mock.f(1, 2)
+ mock.f.assert_called_once_with(1, 2)
+
+ mock.g(3, 4)
+ mock.g.assert_called_once_with(3, 4)
+
+
+ @unittest2.skipIf(inPy3k, "No old style classes in Python 3")
+ def test_old_style_classes(self):
+ class Foo:
+ def f(self, a, b):
+ pass
+
+ class Bar(Foo):
+ g = Foo()
+
+ for spec in (Foo, Foo(), Bar, Bar()):
+ mock = create_autospec(spec)
+ mock.f(1, 2)
+ mock.f.assert_called_once_with(1, 2)
+
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+ self.assertRaises(AttributeError, getattr, mock.f, 'foo')
+
+ mock.g.f(1, 2)
+ mock.g.f.assert_called_once_with(1, 2)
+ self.assertRaises(AttributeError, getattr, mock.g, 'foo')
+
+
+ def test_recursive(self):
+ class A(object):
+ def a(self):
+ pass
+ foo = 'foo bar baz'
+ bar = foo
+
+ A.B = A
+ mock = create_autospec(A)
+
+ mock()
+ self.assertFalse(mock.B.called)
+
+ mock.a()
+ mock.B.a()
+ self.assertEqual(mock.method_calls, [call.a(), call.B.a()])
+
+ self.assertIs(A.foo, A.bar)
+ self.assertIsNot(mock.foo, mock.bar)
+ mock.foo.lower()
+ self.assertRaises(AssertionError, mock.bar.lower.assert_called_with)
+
+
+ def test_spec_inheritance_for_classes(self):
+ class Foo(object):
+ def a(self):
+ pass
+ class Bar(object):
+ def f(self):
+ pass
+
+ class_mock = create_autospec(Foo)
+
+ self.assertIsNot(class_mock, class_mock())
+
+ for this_mock in class_mock, class_mock():
+ this_mock.a()
+ this_mock.a.assert_called_with()
+ self.assertRaises(TypeError, this_mock.a, 'foo')
+ self.assertRaises(AttributeError, getattr, this_mock, 'b')
+
+ instance_mock = create_autospec(Foo())
+ instance_mock.a()
+ instance_mock.a.assert_called_with()
+ self.assertRaises(TypeError, instance_mock.a, 'foo')
+ self.assertRaises(AttributeError, getattr, instance_mock, 'b')
+
+ # The return value isn't isn't callable
+ self.assertRaises(TypeError, instance_mock)
+
+ instance_mock.Bar.f()
+ instance_mock.Bar.f.assert_called_with()
+ self.assertRaises(AttributeError, getattr, instance_mock.Bar, 'g')
+
+ instance_mock.Bar().f()
+ instance_mock.Bar().f.assert_called_with()
+ self.assertRaises(AttributeError, getattr, instance_mock.Bar(), 'g')
+
+
+ def test_inherit(self):
+ class Foo(object):
+ a = 3
+
+ Foo.Foo = Foo
+
+ # class
+ mock = create_autospec(Foo)
+ instance = mock()
+ self.assertRaises(AttributeError, getattr, instance, 'b')
+
+ attr_instance = mock.Foo()
+ self.assertRaises(AttributeError, getattr, attr_instance, 'b')
+
+ # instance
+ mock = create_autospec(Foo())
+ self.assertRaises(AttributeError, getattr, mock, 'b')
+ self.assertRaises(TypeError, mock)
+
+ # attribute instance
+ call_result = mock.Foo()
+ self.assertRaises(AttributeError, getattr, call_result, 'b')
+
+
+ def test_builtins(self):
+ # used to fail with infinite recursion
+ create_autospec(1)
+
+ create_autospec(int)
+ create_autospec('foo')
+ create_autospec(str)
+ create_autospec({})
+ create_autospec(dict)
+ create_autospec([])
+ create_autospec(list)
+ create_autospec(set())
+ create_autospec(set)
+ create_autospec(1.0)
+ create_autospec(float)
+ create_autospec(1j)
+ create_autospec(complex)
+ create_autospec(False)
+ create_autospec(True)
+
+
+ def test_function(self):
+ def f(a, b):
+ pass
+
+ mock = create_autospec(f)
+ self.assertRaises(TypeError, mock)
+ mock(1, 2)
+ mock.assert_called_with(1, 2)
+
+ f.f = f
+ mock = create_autospec(f)
+ self.assertRaises(TypeError, mock.f)
+ mock.f(3, 4)
+ mock.f.assert_called_with(3, 4)
+
+
+ def test_skip_attributeerrors(self):
+ class Raiser(object):
+ def __get__(self, obj, type=None):
+ if obj is None:
+ raise AttributeError('Can only be accessed via an instance')
+
+ class RaiserClass(object):
+ raiser = Raiser()
+
+ @staticmethod
+ def existing(a, b):
+ return a + b
+
+ s = create_autospec(RaiserClass)
+ self.assertRaises(TypeError, lambda x: s.existing(1, 2, 3))
+ s.existing(1, 2)
+ self.assertRaises(AttributeError, lambda: s.nonexisting)
+
+ # check we can fetch the raiser attribute and it has no spec
+ obj = s.raiser
+ obj.foo, obj.bar
+
+
+ def test_signature_class(self):
+ class Foo(object):
+ def __init__(self, a, b=3):
+ pass
+
+ mock = create_autospec(Foo)
+
+ self.assertRaises(TypeError, mock)
+ mock(1)
+ mock.assert_called_once_with(1)
+
+ mock(4, 5)
+ mock.assert_called_with(4, 5)
+
+
+ @unittest2.skipIf(inPy3k, 'no old style classes in Python 3')
+ def test_signature_old_style_class(self):
+ class Foo:
+ def __init__(self, a, b=3):
+ pass
+
+ mock = create_autospec(Foo)
+
+ self.assertRaises(TypeError, mock)
+ mock(1)
+ mock.assert_called_once_with(1)
+
+ mock(4, 5)
+ mock.assert_called_with(4, 5)
+
+
+ def test_class_with_no_init(self):
+ # this used to raise an exception
+ # due to trying to get a signature from object.__init__
+ class Foo(object):
+ pass
+ create_autospec(Foo)
+
+
+ @unittest2.skipIf(inPy3k, 'no old style classes in Python 3')
+ def test_old_style_class_with_no_init(self):
+ # this used to raise an exception
+ # due to Foo.__init__ raising an AttributeError
+ class Foo:
+ pass
+ create_autospec(Foo)
+
+
+ def test_signature_callable(self):
+ class Callable(object):
+ def __init__(self):
+ pass
+ def __call__(self, a):
+ pass
+
+ mock = create_autospec(Callable)
+ mock()
+ mock.assert_called_once_with()
+ self.assertRaises(TypeError, mock, 'a')
+
+ instance = mock()
+ self.assertRaises(TypeError, instance)
+ instance(a='a')
+ instance.assert_called_once_with(a='a')
+ instance('a')
+ instance.assert_called_with('a')
+
+ mock = create_autospec(Callable())
+ mock(a='a')
+ mock.assert_called_once_with(a='a')
+ self.assertRaises(TypeError, mock)
+ mock('a')
+ mock.assert_called_with('a')
+
+
+ def test_signature_noncallable(self):
+ class NonCallable(object):
+ def __init__(self):
+ pass
+
+ mock = create_autospec(NonCallable)
+ instance = mock()
+ mock.assert_called_once_with()
+ self.assertRaises(TypeError, mock, 'a')
+ self.assertRaises(TypeError, instance)
+ self.assertRaises(TypeError, instance, 'a')
+
+ mock = create_autospec(NonCallable())
+ self.assertRaises(TypeError, mock)
+ self.assertRaises(TypeError, mock, 'a')
+
+
+ def test_create_autospec_none(self):
+ class Foo(object):
+ bar = None
+
+ mock = create_autospec(Foo)
+ none = mock.bar
+ self.assertNotIsInstance(none, type(None))
+
+ none.foo()
+ none.foo.assert_called_once_with()
+
+
+ def test_autospec_functions_with_self_in_odd_place(self):
+ class Foo(object):
+ def f(a, self):
+ pass
+
+ a = create_autospec(Foo)
+ a.f(self=10)
+ a.f.assert_called_with(self=10)
+
+
+ def test_autospec_property(self):
+ class Foo(object):
+ @property
+ def foo(self):
+ return 3
+
+ foo = create_autospec(Foo)
+ mock_property = foo.foo
+
+ # no spec on properties
+ self.assertTrue(isinstance(mock_property, MagicMock))
+ mock_property(1, 2, 3)
+ mock_property.abc(4, 5, 6)
+ mock_property.assert_called_once_with(1, 2, 3)
+ mock_property.abc.assert_called_once_with(4, 5, 6)
+
+
+ def test_autospec_slots(self):
+ class Foo(object):
+ __slots__ = ['a']
+
+ foo = create_autospec(Foo)
+ mock_slot = foo.a
+
+ # no spec on slots
+ mock_slot(1, 2, 3)
+ mock_slot.abc(4, 5, 6)
+ mock_slot.assert_called_once_with(1, 2, 3)
+ mock_slot.abc.assert_called_once_with(4, 5, 6)
+
+
+class TestCallList(unittest2.TestCase):
+
+ def test_args_list_contains_call_list(self):
+ mock = Mock()
+ self.assertIsInstance(mock.call_args_list, _CallList)
+
+ mock(1, 2)
+ mock(a=3)
+ mock(3, 4)
+ mock(b=6)
+
+ for kall in call(1, 2), call(a=3), call(3, 4), call(b=6):
+ self.assertTrue(kall in mock.call_args_list)
+
+ calls = [call(a=3), call(3, 4)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(1, 2), call(a=3)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(3, 4), call(b=6)]
+ self.assertTrue(calls in mock.call_args_list)
+ calls = [call(3, 4)]
+ self.assertTrue(calls in mock.call_args_list)
+
+ self.assertFalse(call('fish') in mock.call_args_list)
+ self.assertFalse([call('fish')] in mock.call_args_list)
+
+
+ def test_call_list_str(self):
+ mock = Mock()
+ mock(1, 2)
+ mock.foo(a=3)
+ mock.foo.bar().baz('fish', cat='dog')
+
+ expected = (
+ "[call(1, 2),\n"
+ " call.foo(a=3),\n"
+ " call.foo.bar(),\n"
+ " call.foo.bar().baz('fish', cat='dog')]"
+ )
+ self.assertEqual(str(mock.mock_calls), expected)
+
+
+ def test_propertymock(self):
+ p = patch('%s.SomeClass.one' % __name__, new_callable=PropertyMock)
+ mock = p.start()
+ try:
+ SomeClass.one
+ mock.assert_called_once_with()
+
+ s = SomeClass()
+ s.one
+ mock.assert_called_with()
+ self.assertEqual(mock.mock_calls, [call(), call()])
+
+ s.one = 3
+ self.assertEqual(mock.mock_calls, [call(), call(), call(3)])
+ finally:
+ p.stop()
+
+
+ def test_propertymock_returnvalue(self):
+ m = MagicMock()
+ p = PropertyMock()
+ type(m).foo = p
+
+ returned = m.foo
+ p.assert_called_once_with()
+ self.assertIsInstance(returned, MagicMock)
+ self.assertNotIsInstance(returned, PropertyMock)
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/python/mock-1.0.0/tests/testmagicmethods.py b/python/mock-1.0.0/tests/testmagicmethods.py
new file mode 100644
index 000000000..ef0f16d82
--- /dev/null
+++ b/python/mock-1.0.0/tests/testmagicmethods.py
@@ -0,0 +1,486 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import unittest2, inPy3k
+
+try:
+ unicode
+except NameError:
+ # Python 3
+ unicode = str
+ long = int
+
+import inspect
+import sys
+from mock import Mock, MagicMock, _magics
+
+
+
+class TestMockingMagicMethods(unittest2.TestCase):
+
+ def test_deleting_magic_methods(self):
+ mock = Mock()
+ self.assertFalse(hasattr(mock, '__getitem__'))
+
+ mock.__getitem__ = Mock()
+ self.assertTrue(hasattr(mock, '__getitem__'))
+
+ del mock.__getitem__
+ self.assertFalse(hasattr(mock, '__getitem__'))
+
+
+ def test_magicmock_del(self):
+ mock = MagicMock()
+ # before using getitem
+ del mock.__getitem__
+ self.assertRaises(TypeError, lambda: mock['foo'])
+
+ mock = MagicMock()
+ # this time use it first
+ mock['foo']
+ del mock.__getitem__
+ self.assertRaises(TypeError, lambda: mock['foo'])
+
+
+ def test_magic_method_wrapping(self):
+ mock = Mock()
+ def f(self, name):
+ return self, 'fish'
+
+ mock.__getitem__ = f
+ self.assertFalse(mock.__getitem__ is f)
+ self.assertEqual(mock['foo'], (mock, 'fish'))
+ self.assertEqual(mock.__getitem__('foo'), (mock, 'fish'))
+
+ mock.__getitem__ = mock
+ self.assertTrue(mock.__getitem__ is mock)
+
+
+ def test_magic_methods_isolated_between_mocks(self):
+ mock1 = Mock()
+ mock2 = Mock()
+
+ mock1.__iter__ = Mock(return_value=iter([]))
+ self.assertEqual(list(mock1), [])
+ self.assertRaises(TypeError, lambda: list(mock2))
+
+
+ def test_repr(self):
+ mock = Mock()
+ self.assertEqual(repr(mock), "<Mock id='%s'>" % id(mock))
+ mock.__repr__ = lambda s: 'foo'
+ self.assertEqual(repr(mock), 'foo')
+
+
+ def test_str(self):
+ mock = Mock()
+ self.assertEqual(str(mock), object.__str__(mock))
+ mock.__str__ = lambda s: 'foo'
+ self.assertEqual(str(mock), 'foo')
+
+
+ @unittest2.skipIf(inPy3k, "no unicode in Python 3")
+ def test_unicode(self):
+ mock = Mock()
+ self.assertEqual(unicode(mock), unicode(str(mock)))
+
+ mock.__unicode__ = lambda s: unicode('foo')
+ self.assertEqual(unicode(mock), unicode('foo'))
+
+
+ def test_dict_methods(self):
+ mock = Mock()
+
+ self.assertRaises(TypeError, lambda: mock['foo'])
+ def _del():
+ del mock['foo']
+ def _set():
+ mock['foo'] = 3
+ self.assertRaises(TypeError, _del)
+ self.assertRaises(TypeError, _set)
+
+ _dict = {}
+ def getitem(s, name):
+ return _dict[name]
+ def setitem(s, name, value):
+ _dict[name] = value
+ def delitem(s, name):
+ del _dict[name]
+
+ mock.__setitem__ = setitem
+ mock.__getitem__ = getitem
+ mock.__delitem__ = delitem
+
+ self.assertRaises(KeyError, lambda: mock['foo'])
+ mock['foo'] = 'bar'
+ self.assertEqual(_dict, {'foo': 'bar'})
+ self.assertEqual(mock['foo'], 'bar')
+ del mock['foo']
+ self.assertEqual(_dict, {})
+
+
+ def test_numeric(self):
+ original = mock = Mock()
+ mock.value = 0
+
+ self.assertRaises(TypeError, lambda: mock + 3)
+
+ def add(self, other):
+ mock.value += other
+ return self
+ mock.__add__ = add
+ self.assertEqual(mock + 3, mock)
+ self.assertEqual(mock.value, 3)
+
+ del mock.__add__
+ def iadd(mock):
+ mock += 3
+ self.assertRaises(TypeError, iadd, mock)
+ mock.__iadd__ = add
+ mock += 6
+ self.assertEqual(mock, original)
+ self.assertEqual(mock.value, 9)
+
+ self.assertRaises(TypeError, lambda: 3 + mock)
+ mock.__radd__ = add
+ self.assertEqual(7 + mock, mock)
+ self.assertEqual(mock.value, 16)
+
+
+ @unittest2.skipIf(inPy3k, 'no truediv in Python 3')
+ def test_truediv(self):
+ mock = MagicMock()
+ mock.__truediv__.return_value = 6
+
+ context = {'mock': mock}
+ code = 'from __future__ import division\nresult = mock / 7\n'
+ exec(code, context)
+ self.assertEqual(context['result'], 6)
+
+ mock.__rtruediv__.return_value = 3
+ code = 'from __future__ import division\nresult = 2 / mock\n'
+ exec(code, context)
+ self.assertEqual(context['result'], 3)
+
+
+ @unittest2.skipIf(not inPy3k, 'truediv is available in Python 2')
+ def test_no_truediv(self):
+ self.assertRaises(
+ AttributeError, getattr, MagicMock(), '__truediv__'
+ )
+ self.assertRaises(
+ AttributeError, getattr, MagicMock(), '__rtruediv__'
+ )
+
+
+ def test_hash(self):
+ mock = Mock()
+ # test delegation
+ self.assertEqual(hash(mock), Mock.__hash__(mock))
+
+ def _hash(s):
+ return 3
+ mock.__hash__ = _hash
+ self.assertEqual(hash(mock), 3)
+
+
+ def test_nonzero(self):
+ m = Mock()
+ self.assertTrue(bool(m))
+
+ nonzero = lambda s: False
+ if not inPy3k:
+ m.__nonzero__ = nonzero
+ else:
+ m.__bool__ = nonzero
+
+ self.assertFalse(bool(m))
+
+
+ def test_comparison(self):
+ # note: this test fails with Jython 2.5.1 due to a Jython bug
+ # it is fixed in jython 2.5.2
+ if not inPy3k:
+ # incomparable in Python 3
+ self. assertEqual(Mock() < 3, object() < 3)
+ self. assertEqual(Mock() > 3, object() > 3)
+ self. assertEqual(Mock() <= 3, object() <= 3)
+ self. assertEqual(Mock() >= 3, object() >= 3)
+ else:
+ self.assertRaises(TypeError, lambda: MagicMock() < object())
+ self.assertRaises(TypeError, lambda: object() < MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() < MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() > object())
+ self.assertRaises(TypeError, lambda: object() > MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() > MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() <= object())
+ self.assertRaises(TypeError, lambda: object() <= MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() <= MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() >= object())
+ self.assertRaises(TypeError, lambda: object() >= MagicMock())
+ self.assertRaises(TypeError, lambda: MagicMock() >= MagicMock())
+
+ mock = Mock()
+ def comp(s, o):
+ return True
+ mock.__lt__ = mock.__gt__ = mock.__le__ = mock.__ge__ = comp
+ self. assertTrue(mock < 3)
+ self. assertTrue(mock > 3)
+ self. assertTrue(mock <= 3)
+ self. assertTrue(mock >= 3)
+
+
+ def test_equality(self):
+ for mock in Mock(), MagicMock():
+ self.assertEqual(mock == mock, True)
+ self.assertIsInstance(mock == mock, bool)
+ self.assertEqual(mock != mock, False)
+ self.assertIsInstance(mock != mock, bool)
+ self.assertEqual(mock == object(), False)
+ self.assertEqual(mock != object(), True)
+
+ def eq(self, other):
+ return other == 3
+ mock.__eq__ = eq
+ self.assertTrue(mock == 3)
+ self.assertFalse(mock == 4)
+
+ def ne(self, other):
+ return other == 3
+ mock.__ne__ = ne
+ self.assertTrue(mock != 3)
+ self.assertFalse(mock != 4)
+
+ mock = MagicMock()
+ mock.__eq__.return_value = True
+ self.assertIsInstance(mock == 3, bool)
+ self.assertEqual(mock == 3, True)
+
+ mock.__ne__.return_value = False
+ self.assertIsInstance(mock != 3, bool)
+ self.assertEqual(mock != 3, False)
+
+
+ def test_len_contains_iter(self):
+ mock = Mock()
+
+ self.assertRaises(TypeError, len, mock)
+ self.assertRaises(TypeError, iter, mock)
+ self.assertRaises(TypeError, lambda: 'foo' in mock)
+
+ mock.__len__ = lambda s: 6
+ self.assertEqual(len(mock), 6)
+
+ mock.__contains__ = lambda s, o: o == 3
+ self.assertTrue(3 in mock)
+ self.assertFalse(6 in mock)
+
+ mock.__iter__ = lambda s: iter('foobarbaz')
+ self.assertEqual(list(mock), list('foobarbaz'))
+
+
+ def test_magicmock(self):
+ mock = MagicMock()
+
+ mock.__iter__.return_value = iter([1, 2, 3])
+ self.assertEqual(list(mock), [1, 2, 3])
+
+ name = '__nonzero__'
+ other = '__bool__'
+ if inPy3k:
+ name, other = other, name
+ getattr(mock, name).return_value = False
+ self.assertFalse(hasattr(mock, other))
+ self.assertFalse(bool(mock))
+
+ for entry in _magics:
+ self.assertTrue(hasattr(mock, entry))
+ self.assertFalse(hasattr(mock, '__imaginery__'))
+
+
+ def test_magic_mock_equality(self):
+ mock = MagicMock()
+ self.assertIsInstance(mock == object(), bool)
+ self.assertIsInstance(mock != object(), bool)
+
+ self.assertEqual(mock == object(), False)
+ self.assertEqual(mock != object(), True)
+ self.assertEqual(mock == mock, True)
+ self.assertEqual(mock != mock, False)
+
+
+ def test_magicmock_defaults(self):
+ mock = MagicMock()
+ self.assertEqual(int(mock), 1)
+ self.assertEqual(complex(mock), 1j)
+ self.assertEqual(float(mock), 1.0)
+ self.assertEqual(long(mock), long(1))
+ self.assertNotIn(object(), mock)
+ self.assertEqual(len(mock), 0)
+ self.assertEqual(list(mock), [])
+ self.assertEqual(hash(mock), object.__hash__(mock))
+ self.assertEqual(str(mock), object.__str__(mock))
+ self.assertEqual(unicode(mock), object.__str__(mock))
+ self.assertIsInstance(unicode(mock), unicode)
+ self.assertTrue(bool(mock))
+ if not inPy3k:
+ self.assertEqual(oct(mock), '1')
+ else:
+ # in Python 3 oct and hex use __index__
+ # so these tests are for __index__ in py3k
+ self.assertEqual(oct(mock), '0o1')
+ self.assertEqual(hex(mock), '0x1')
+ # how to test __sizeof__ ?
+
+
+ @unittest2.skipIf(inPy3k, "no __cmp__ in Python 3")
+ def test_non_default_magic_methods(self):
+ mock = MagicMock()
+ self.assertRaises(AttributeError, lambda: mock.__cmp__)
+
+ mock = Mock()
+ mock.__cmp__ = lambda s, o: 0
+
+ self.assertEqual(mock, object())
+
+
+ def test_magic_methods_and_spec(self):
+ class Iterable(object):
+ def __iter__(self):
+ pass
+
+ mock = Mock(spec=Iterable)
+ self.assertRaises(AttributeError, lambda: mock.__iter__)
+
+ mock.__iter__ = Mock(return_value=iter([]))
+ self.assertEqual(list(mock), [])
+
+ class NonIterable(object):
+ pass
+ mock = Mock(spec=NonIterable)
+ self.assertRaises(AttributeError, lambda: mock.__iter__)
+
+ def set_int():
+ mock.__int__ = Mock(return_value=iter([]))
+ self.assertRaises(AttributeError, set_int)
+
+ mock = MagicMock(spec=Iterable)
+ self.assertEqual(list(mock), [])
+ self.assertRaises(AttributeError, set_int)
+
+
+ def test_magic_methods_and_spec_set(self):
+ class Iterable(object):
+ def __iter__(self):
+ pass
+
+ mock = Mock(spec_set=Iterable)
+ self.assertRaises(AttributeError, lambda: mock.__iter__)
+
+ mock.__iter__ = Mock(return_value=iter([]))
+ self.assertEqual(list(mock), [])
+
+ class NonIterable(object):
+ pass
+ mock = Mock(spec_set=NonIterable)
+ self.assertRaises(AttributeError, lambda: mock.__iter__)
+
+ def set_int():
+ mock.__int__ = Mock(return_value=iter([]))
+ self.assertRaises(AttributeError, set_int)
+
+ mock = MagicMock(spec_set=Iterable)
+ self.assertEqual(list(mock), [])
+ self.assertRaises(AttributeError, set_int)
+
+
+ def test_setting_unsupported_magic_method(self):
+ mock = MagicMock()
+ def set_setattr():
+ mock.__setattr__ = lambda self, name: None
+ self.assertRaisesRegexp(AttributeError,
+ "Attempting to set unsupported magic method '__setattr__'.",
+ set_setattr
+ )
+
+
+ def test_attributes_and_return_value(self):
+ mock = MagicMock()
+ attr = mock.foo
+ def _get_type(obj):
+ # the type of every mock (or magicmock) is a custom subclass
+ # so the real type is the second in the mro
+ return type(obj).__mro__[1]
+ self.assertEqual(_get_type(attr), MagicMock)
+
+ returned = mock()
+ self.assertEqual(_get_type(returned), MagicMock)
+
+
+ def test_magic_methods_are_magic_mocks(self):
+ mock = MagicMock()
+ self.assertIsInstance(mock.__getitem__, MagicMock)
+
+ mock[1][2].__getitem__.return_value = 3
+ self.assertEqual(mock[1][2][3], 3)
+
+
+ def test_magic_method_reset_mock(self):
+ mock = MagicMock()
+ str(mock)
+ self.assertTrue(mock.__str__.called)
+ mock.reset_mock()
+ self.assertFalse(mock.__str__.called)
+
+
+ @unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
+ "__dir__ not available until Python 2.6 or later")
+ def test_dir(self):
+ # overriding the default implementation
+ for mock in Mock(), MagicMock():
+ def _dir(self):
+ return ['foo']
+ mock.__dir__ = _dir
+ self.assertEqual(dir(mock), ['foo'])
+
+
+ @unittest2.skipIf('PyPy' in sys.version, "This fails differently on pypy")
+ def test_bound_methods(self):
+ m = Mock()
+
+ # XXXX should this be an expected failure instead?
+
+ # this seems like it should work, but is hard to do without introducing
+ # other api inconsistencies. Failure message could be better though.
+ m.__iter__ = [3].__iter__
+ self.assertRaises(TypeError, iter, m)
+
+
+ def test_magic_method_type(self):
+ class Foo(MagicMock):
+ pass
+
+ foo = Foo()
+ self.assertIsInstance(foo.__int__, Foo)
+
+
+ def test_descriptor_from_class(self):
+ m = MagicMock()
+ type(m).__str__.return_value = 'foo'
+ self.assertEqual(str(m), 'foo')
+
+
+ def test_iterable_as_iter_return_value(self):
+ m = MagicMock()
+ m.__iter__.return_value = [1, 2, 3]
+ self.assertEqual(list(m), [1, 2, 3])
+ self.assertEqual(list(m), [1, 2, 3])
+
+ m.__iter__.return_value = iter([4, 5, 6])
+ self.assertEqual(list(m), [4, 5, 6])
+ self.assertEqual(list(m), [])
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/python/mock-1.0.0/tests/testmock.py b/python/mock-1.0.0/tests/testmock.py
new file mode 100644
index 000000000..f3ceea995
--- /dev/null
+++ b/python/mock-1.0.0/tests/testmock.py
@@ -0,0 +1,1351 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import (
+ callable, unittest2, inPy3k, is_instance, next
+)
+
+import copy
+import pickle
+import sys
+
+import mock
+from mock import (
+ call, DEFAULT, patch, sentinel,
+ MagicMock, Mock, NonCallableMock,
+ NonCallableMagicMock, _CallList,
+ create_autospec
+)
+
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+
+class Iter(object):
+ def __init__(self):
+ self.thing = iter(['this', 'is', 'an', 'iter'])
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ return next(self.thing)
+
+ __next__ = next
+
+
+class Subclass(MagicMock):
+ pass
+
+
+class Thing(object):
+ attribute = 6
+ foo = 'bar'
+
+
+
+class MockTest(unittest2.TestCase):
+
+ def test_all(self):
+ # if __all__ is badly defined then import * will raise an error
+ # We have to exec it because you can't import * inside a method
+ # in Python 3
+ exec("from mock import *")
+
+
+ def test_constructor(self):
+ mock = Mock()
+
+ self.assertFalse(mock.called, "called not initialised correctly")
+ self.assertEqual(mock.call_count, 0,
+ "call_count not initialised correctly")
+ self.assertTrue(is_instance(mock.return_value, Mock),
+ "return_value not initialised correctly")
+
+ self.assertEqual(mock.call_args, None,
+ "call_args not initialised correctly")
+ self.assertEqual(mock.call_args_list, [],
+ "call_args_list not initialised correctly")
+ self.assertEqual(mock.method_calls, [],
+ "method_calls not initialised correctly")
+
+ # Can't use hasattr for this test as it always returns True on a mock
+ self.assertFalse('_items' in mock.__dict__,
+ "default mock should not have '_items' attribute")
+
+ self.assertIsNone(mock._mock_parent,
+ "parent not initialised correctly")
+ self.assertIsNone(mock._mock_methods,
+ "methods not initialised correctly")
+ self.assertEqual(mock._mock_children, {},
+ "children not initialised incorrectly")
+
+
+ def test_unicode_not_broken(self):
+ # This used to raise an exception with Python 2.5 and Mock 0.4
+ unicode(Mock())
+
+
+ def test_return_value_in_constructor(self):
+ mock = Mock(return_value=None)
+ self.assertIsNone(mock.return_value,
+ "return value in constructor not honoured")
+
+
+ def test_repr(self):
+ mock = Mock(name='foo')
+ self.assertIn('foo', repr(mock))
+ self.assertIn("'%s'" % id(mock), repr(mock))
+
+ mocks = [(Mock(), 'mock'), (Mock(name='bar'), 'bar')]
+ for mock, name in mocks:
+ self.assertIn('%s.bar' % name, repr(mock.bar))
+ self.assertIn('%s.foo()' % name, repr(mock.foo()))
+ self.assertIn('%s.foo().bing' % name, repr(mock.foo().bing))
+ self.assertIn('%s()' % name, repr(mock()))
+ self.assertIn('%s()()' % name, repr(mock()()))
+ self.assertIn('%s()().foo.bar.baz().bing' % name,
+ repr(mock()().foo.bar.baz().bing))
+
+
+ def test_repr_with_spec(self):
+ class X(object):
+ pass
+
+ mock = Mock(spec=X)
+ self.assertIn(" spec='X' ", repr(mock))
+
+ mock = Mock(spec=X())
+ self.assertIn(" spec='X' ", repr(mock))
+
+ mock = Mock(spec_set=X)
+ self.assertIn(" spec_set='X' ", repr(mock))
+
+ mock = Mock(spec_set=X())
+ self.assertIn(" spec_set='X' ", repr(mock))
+
+ mock = Mock(spec=X, name='foo')
+ self.assertIn(" spec='X' ", repr(mock))
+ self.assertIn(" name='foo' ", repr(mock))
+
+ mock = Mock(name='foo')
+ self.assertNotIn("spec", repr(mock))
+
+ mock = Mock()
+ self.assertNotIn("spec", repr(mock))
+
+ mock = Mock(spec=['foo'])
+ self.assertNotIn("spec", repr(mock))
+
+
+ def test_side_effect(self):
+ mock = Mock()
+
+ def effect(*args, **kwargs):
+ raise SystemError('kablooie')
+
+ mock.side_effect = effect
+ self.assertRaises(SystemError, mock, 1, 2, fish=3)
+ mock.assert_called_with(1, 2, fish=3)
+
+ results = [1, 2, 3]
+ def effect():
+ return results.pop()
+ mock.side_effect = effect
+
+ self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
+ "side effect not used correctly")
+
+ mock = Mock(side_effect=sentinel.SideEffect)
+ self.assertEqual(mock.side_effect, sentinel.SideEffect,
+ "side effect in constructor not used")
+
+ def side_effect():
+ return DEFAULT
+ mock = Mock(side_effect=side_effect, return_value=sentinel.RETURN)
+ self.assertEqual(mock(), sentinel.RETURN)
+
+
+ @unittest2.skipUnless('java' in sys.platform,
+ 'This test only applies to Jython')
+ def test_java_exception_side_effect(self):
+ import java
+ mock = Mock(side_effect=java.lang.RuntimeException("Boom!"))
+
+ # can't use assertRaises with java exceptions
+ try:
+ mock(1, 2, fish=3)
+ except java.lang.RuntimeException:
+ pass
+ else:
+ self.fail('java exception not raised')
+ mock.assert_called_with(1,2, fish=3)
+
+
+ def test_reset_mock(self):
+ parent = Mock()
+ spec = ["something"]
+ mock = Mock(name="child", parent=parent, spec=spec)
+ mock(sentinel.Something, something=sentinel.SomethingElse)
+ something = mock.something
+ mock.something()
+ mock.side_effect = sentinel.SideEffect
+ return_value = mock.return_value
+ return_value()
+
+ mock.reset_mock()
+
+ self.assertEqual(mock._mock_name, "child",
+ "name incorrectly reset")
+ self.assertEqual(mock._mock_parent, parent,
+ "parent incorrectly reset")
+ self.assertEqual(mock._mock_methods, spec,
+ "methods incorrectly reset")
+
+ self.assertFalse(mock.called, "called not reset")
+ self.assertEqual(mock.call_count, 0, "call_count not reset")
+ self.assertEqual(mock.call_args, None, "call_args not reset")
+ self.assertEqual(mock.call_args_list, [], "call_args_list not reset")
+ self.assertEqual(mock.method_calls, [],
+ "method_calls not initialised correctly: %r != %r" %
+ (mock.method_calls, []))
+ self.assertEqual(mock.mock_calls, [])
+
+ self.assertEqual(mock.side_effect, sentinel.SideEffect,
+ "side_effect incorrectly reset")
+ self.assertEqual(mock.return_value, return_value,
+ "return_value incorrectly reset")
+ self.assertFalse(return_value.called, "return value mock not reset")
+ self.assertEqual(mock._mock_children, {'something': something},
+ "children reset incorrectly")
+ self.assertEqual(mock.something, something,
+ "children incorrectly cleared")
+ self.assertFalse(mock.something.called, "child not reset")
+
+
+ def test_reset_mock_recursion(self):
+ mock = Mock()
+ mock.return_value = mock
+
+ # used to cause recursion
+ mock.reset_mock()
+
+
+ def test_call(self):
+ mock = Mock()
+ self.assertTrue(is_instance(mock.return_value, Mock),
+ "Default return_value should be a Mock")
+
+ result = mock()
+ self.assertEqual(mock(), result,
+ "different result from consecutive calls")
+ mock.reset_mock()
+
+ ret_val = mock(sentinel.Arg)
+ self.assertTrue(mock.called, "called not set")
+ self.assertEqual(mock.call_count, 1, "call_count incoreect")
+ self.assertEqual(mock.call_args, ((sentinel.Arg,), {}),
+ "call_args not set")
+ self.assertEqual(mock.call_args_list, [((sentinel.Arg,), {})],
+ "call_args_list not initialised correctly")
+
+ mock.return_value = sentinel.ReturnValue
+ ret_val = mock(sentinel.Arg, key=sentinel.KeyArg)
+ self.assertEqual(ret_val, sentinel.ReturnValue,
+ "incorrect return value")
+
+ self.assertEqual(mock.call_count, 2, "call_count incorrect")
+ self.assertEqual(mock.call_args,
+ ((sentinel.Arg,), {'key': sentinel.KeyArg}),
+ "call_args not set")
+ self.assertEqual(mock.call_args_list, [
+ ((sentinel.Arg,), {}),
+ ((sentinel.Arg,), {'key': sentinel.KeyArg})
+ ],
+ "call_args_list not set")
+
+
+ def test_call_args_comparison(self):
+ mock = Mock()
+ mock()
+ mock(sentinel.Arg)
+ mock(kw=sentinel.Kwarg)
+ mock(sentinel.Arg, kw=sentinel.Kwarg)
+ self.assertEqual(mock.call_args_list, [
+ (),
+ ((sentinel.Arg,),),
+ ({"kw": sentinel.Kwarg},),
+ ((sentinel.Arg,), {"kw": sentinel.Kwarg})
+ ])
+ self.assertEqual(mock.call_args,
+ ((sentinel.Arg,), {"kw": sentinel.Kwarg}))
+
+
+ def test_assert_called_with(self):
+ mock = Mock()
+ mock()
+
+ # Will raise an exception if it fails
+ mock.assert_called_with()
+ self.assertRaises(AssertionError, mock.assert_called_with, 1)
+
+ mock.reset_mock()
+ self.assertRaises(AssertionError, mock.assert_called_with)
+
+ mock(1, 2, 3, a='fish', b='nothing')
+ mock.assert_called_with(1, 2, 3, a='fish', b='nothing')
+
+
+ def test_assert_called_once_with(self):
+ mock = Mock()
+ mock()
+
+ # Will raise an exception if it fails
+ mock.assert_called_once_with()
+
+ mock()
+ self.assertRaises(AssertionError, mock.assert_called_once_with)
+
+ mock.reset_mock()
+ self.assertRaises(AssertionError, mock.assert_called_once_with)
+
+ mock('foo', 'bar', baz=2)
+ mock.assert_called_once_with('foo', 'bar', baz=2)
+
+ mock.reset_mock()
+ mock('foo', 'bar', baz=2)
+ self.assertRaises(
+ AssertionError,
+ lambda: mock.assert_called_once_with('bob', 'bar', baz=2)
+ )
+
+
+ def test_attribute_access_returns_mocks(self):
+ mock = Mock()
+ something = mock.something
+ self.assertTrue(is_instance(something, Mock), "attribute isn't a mock")
+ self.assertEqual(mock.something, something,
+ "different attributes returned for same name")
+
+ # Usage example
+ mock = Mock()
+ mock.something.return_value = 3
+
+ self.assertEqual(mock.something(), 3, "method returned wrong value")
+ self.assertTrue(mock.something.called,
+ "method didn't record being called")
+
+
+ def test_attributes_have_name_and_parent_set(self):
+ mock = Mock()
+ something = mock.something
+
+ self.assertEqual(something._mock_name, "something",
+ "attribute name not set correctly")
+ self.assertEqual(something._mock_parent, mock,
+ "attribute parent not set correctly")
+
+
+ def test_method_calls_recorded(self):
+ mock = Mock()
+ mock.something(3, fish=None)
+ mock.something_else.something(6, cake=sentinel.Cake)
+
+ self.assertEqual(mock.something_else.method_calls,
+ [("something", (6,), {'cake': sentinel.Cake})],
+ "method calls not recorded correctly")
+ self.assertEqual(mock.method_calls, [
+ ("something", (3,), {'fish': None}),
+ ("something_else.something", (6,), {'cake': sentinel.Cake})
+ ],
+ "method calls not recorded correctly")
+
+
+ def test_method_calls_compare_easily(self):
+ mock = Mock()
+ mock.something()
+ self.assertEqual(mock.method_calls, [('something',)])
+ self.assertEqual(mock.method_calls, [('something', (), {})])
+
+ mock = Mock()
+ mock.something('different')
+ self.assertEqual(mock.method_calls, [('something', ('different',))])
+ self.assertEqual(mock.method_calls,
+ [('something', ('different',), {})])
+
+ mock = Mock()
+ mock.something(x=1)
+ self.assertEqual(mock.method_calls, [('something', {'x': 1})])
+ self.assertEqual(mock.method_calls, [('something', (), {'x': 1})])
+
+ mock = Mock()
+ mock.something('different', some='more')
+ self.assertEqual(mock.method_calls, [
+ ('something', ('different',), {'some': 'more'})
+ ])
+
+
+ def test_only_allowed_methods_exist(self):
+ for spec in ['something'], ('something',):
+ for arg in 'spec', 'spec_set':
+ mock = Mock(**{arg: spec})
+
+ # this should be allowed
+ mock.something
+ self.assertRaisesRegexp(
+ AttributeError,
+ "Mock object has no attribute 'something_else'",
+ getattr, mock, 'something_else'
+ )
+
+
+ def test_from_spec(self):
+ class Something(object):
+ x = 3
+ __something__ = None
+ def y(self):
+ pass
+
+ def test_attributes(mock):
+ # should work
+ mock.x
+ mock.y
+ mock.__something__
+ self.assertRaisesRegexp(
+ AttributeError,
+ "Mock object has no attribute 'z'",
+ getattr, mock, 'z'
+ )
+ self.assertRaisesRegexp(
+ AttributeError,
+ "Mock object has no attribute '__foobar__'",
+ getattr, mock, '__foobar__'
+ )
+
+ test_attributes(Mock(spec=Something))
+ test_attributes(Mock(spec=Something()))
+
+
+ def test_wraps_calls(self):
+ real = Mock()
+
+ mock = Mock(wraps=real)
+ self.assertEqual(mock(), real())
+
+ real.reset_mock()
+
+ mock(1, 2, fish=3)
+ real.assert_called_with(1, 2, fish=3)
+
+
+ def test_wraps_call_with_nondefault_return_value(self):
+ real = Mock()
+
+ mock = Mock(wraps=real)
+ mock.return_value = 3
+
+ self.assertEqual(mock(), 3)
+ self.assertFalse(real.called)
+
+
+ def test_wraps_attributes(self):
+ class Real(object):
+ attribute = Mock()
+
+ real = Real()
+
+ mock = Mock(wraps=real)
+ self.assertEqual(mock.attribute(), real.attribute())
+ self.assertRaises(AttributeError, lambda: mock.fish)
+
+ self.assertNotEqual(mock.attribute, real.attribute)
+ result = mock.attribute.frog(1, 2, fish=3)
+ Real.attribute.frog.assert_called_with(1, 2, fish=3)
+ self.assertEqual(result, Real.attribute.frog())
+
+
+ def test_exceptional_side_effect(self):
+ mock = Mock(side_effect=AttributeError)
+ self.assertRaises(AttributeError, mock)
+
+ mock = Mock(side_effect=AttributeError('foo'))
+ self.assertRaises(AttributeError, mock)
+
+
+ def test_baseexceptional_side_effect(self):
+ mock = Mock(side_effect=KeyboardInterrupt)
+ self.assertRaises(KeyboardInterrupt, mock)
+
+ mock = Mock(side_effect=KeyboardInterrupt('foo'))
+ self.assertRaises(KeyboardInterrupt, mock)
+
+
+ def test_assert_called_with_message(self):
+ mock = Mock()
+ self.assertRaisesRegexp(AssertionError, 'Not called',
+ mock.assert_called_with)
+
+
+ def test__name__(self):
+ mock = Mock()
+ self.assertRaises(AttributeError, lambda: mock.__name__)
+
+ mock.__name__ = 'foo'
+ self.assertEqual(mock.__name__, 'foo')
+
+
+ def test_spec_list_subclass(self):
+ class Sub(list):
+ pass
+ mock = Mock(spec=Sub(['foo']))
+
+ mock.append(3)
+ mock.append.assert_called_with(3)
+ self.assertRaises(AttributeError, getattr, mock, 'foo')
+
+
+ def test_spec_class(self):
+ class X(object):
+ pass
+
+ mock = Mock(spec=X)
+ self.assertTrue(isinstance(mock, X))
+
+ mock = Mock(spec=X())
+ self.assertTrue(isinstance(mock, X))
+
+ self.assertIs(mock.__class__, X)
+ self.assertEqual(Mock().__class__.__name__, 'Mock')
+
+ mock = Mock(spec_set=X)
+ self.assertTrue(isinstance(mock, X))
+
+ mock = Mock(spec_set=X())
+ self.assertTrue(isinstance(mock, X))
+
+
+ def test_setting_attribute_with_spec_set(self):
+ class X(object):
+ y = 3
+
+ mock = Mock(spec=X)
+ mock.x = 'foo'
+
+ mock = Mock(spec_set=X)
+ def set_attr():
+ mock.x = 'foo'
+
+ mock.y = 'foo'
+ self.assertRaises(AttributeError, set_attr)
+
+
+ def test_copy(self):
+ current = sys.getrecursionlimit()
+ self.addCleanup(sys.setrecursionlimit, current)
+
+ # can't use sys.maxint as this doesn't exist in Python 3
+ sys.setrecursionlimit(int(10e8))
+ # this segfaults without the fix in place
+ copy.copy(Mock())
+
+
+ @unittest2.skipIf(inPy3k, "no old style classes in Python 3")
+ def test_spec_old_style_classes(self):
+ class Foo:
+ bar = 7
+
+ mock = Mock(spec=Foo)
+ mock.bar = 6
+ self.assertRaises(AttributeError, lambda: mock.foo)
+
+ mock = Mock(spec=Foo())
+ mock.bar = 6
+ self.assertRaises(AttributeError, lambda: mock.foo)
+
+
+ @unittest2.skipIf(inPy3k, "no old style classes in Python 3")
+ def test_spec_set_old_style_classes(self):
+ class Foo:
+ bar = 7
+
+ mock = Mock(spec_set=Foo)
+ mock.bar = 6
+ self.assertRaises(AttributeError, lambda: mock.foo)
+
+ def _set():
+ mock.foo = 3
+ self.assertRaises(AttributeError, _set)
+
+ mock = Mock(spec_set=Foo())
+ mock.bar = 6
+ self.assertRaises(AttributeError, lambda: mock.foo)
+
+ def _set():
+ mock.foo = 3
+ self.assertRaises(AttributeError, _set)
+
+
+ def test_subclass_with_properties(self):
+ class SubClass(Mock):
+ def _get(self):
+ return 3
+ def _set(self, value):
+ raise NameError('strange error')
+ some_attribute = property(_get, _set)
+
+ s = SubClass(spec_set=SubClass)
+ self.assertEqual(s.some_attribute, 3)
+
+ def test():
+ s.some_attribute = 3
+ self.assertRaises(NameError, test)
+
+ def test():
+ s.foo = 'bar'
+ self.assertRaises(AttributeError, test)
+
+
+ def test_setting_call(self):
+ mock = Mock()
+ def __call__(self, a):
+ return self._mock_call(a)
+
+ type(mock).__call__ = __call__
+ mock('one')
+ mock.assert_called_with('one')
+
+ self.assertRaises(TypeError, mock, 'one', 'two')
+
+
+ @unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
+ "__dir__ not available until Python 2.6 or later")
+ def test_dir(self):
+ mock = Mock()
+ attrs = set(dir(mock))
+ type_attrs = set([m for m in dir(Mock) if not m.startswith('_')])
+
+ # all public attributes from the type are included
+ self.assertEqual(set(), type_attrs - attrs)
+
+ # creates these attributes
+ mock.a, mock.b
+ self.assertIn('a', dir(mock))
+ self.assertIn('b', dir(mock))
+
+ # instance attributes
+ mock.c = mock.d = None
+ self.assertIn('c', dir(mock))
+ self.assertIn('d', dir(mock))
+
+ # magic methods
+ mock.__iter__ = lambda s: iter([])
+ self.assertIn('__iter__', dir(mock))
+
+
+ @unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
+ "__dir__ not available until Python 2.6 or later")
+ def test_dir_from_spec(self):
+ mock = Mock(spec=unittest2.TestCase)
+ testcase_attrs = set(dir(unittest2.TestCase))
+ attrs = set(dir(mock))
+
+ # all attributes from the spec are included
+ self.assertEqual(set(), testcase_attrs - attrs)
+
+ # shadow a sys attribute
+ mock.version = 3
+ self.assertEqual(dir(mock).count('version'), 1)
+
+
+ @unittest2.skipUnless(sys.version_info[:2] >= (2, 6),
+ "__dir__ not available until Python 2.6 or later")
+ def test_filter_dir(self):
+ patcher = patch.object(mock, 'FILTER_DIR', False)
+ patcher.start()
+ try:
+ attrs = set(dir(Mock()))
+ type_attrs = set(dir(Mock))
+
+ # ALL attributes from the type are included
+ self.assertEqual(set(), type_attrs - attrs)
+ finally:
+ patcher.stop()
+
+
+ def test_configure_mock(self):
+ mock = Mock(foo='bar')
+ self.assertEqual(mock.foo, 'bar')
+
+ mock = MagicMock(foo='bar')
+ self.assertEqual(mock.foo, 'bar')
+
+ kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
+ 'foo': MagicMock()}
+ mock = Mock(**kwargs)
+ self.assertRaises(KeyError, mock)
+ self.assertEqual(mock.foo.bar(), 33)
+ self.assertIsInstance(mock.foo, MagicMock)
+
+ mock = Mock()
+ mock.configure_mock(**kwargs)
+ self.assertRaises(KeyError, mock)
+ self.assertEqual(mock.foo.bar(), 33)
+ self.assertIsInstance(mock.foo, MagicMock)
+
+
+ def assertRaisesWithMsg(self, exception, message, func, *args, **kwargs):
+ # needed because assertRaisesRegex doesn't work easily with newlines
+ try:
+ func(*args, **kwargs)
+ except:
+ instance = sys.exc_info()[1]
+ self.assertIsInstance(instance, exception)
+ else:
+ self.fail('Exception %r not raised' % (exception,))
+
+ msg = str(instance)
+ self.assertEqual(msg, message)
+
+
+ def test_assert_called_with_failure_message(self):
+ mock = NonCallableMock()
+
+ expected = "mock(1, '2', 3, bar='foo')"
+ message = 'Expected call: %s\nNot called'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected,),
+ mock.assert_called_with, 1, '2', 3, bar='foo'
+ )
+
+ mock.foo(1, '2', 3, foo='foo')
+
+
+ asserters = [
+ mock.foo.assert_called_with, mock.foo.assert_called_once_with
+ ]
+ for meth in asserters:
+ actual = "foo(1, '2', 3, foo='foo')"
+ expected = "foo(1, '2', 3, bar='foo')"
+ message = 'Expected call: %s\nActual call: %s'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected, actual),
+ meth, 1, '2', 3, bar='foo'
+ )
+
+ # just kwargs
+ for meth in asserters:
+ actual = "foo(1, '2', 3, foo='foo')"
+ expected = "foo(bar='foo')"
+ message = 'Expected call: %s\nActual call: %s'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected, actual),
+ meth, bar='foo'
+ )
+
+ # just args
+ for meth in asserters:
+ actual = "foo(1, '2', 3, foo='foo')"
+ expected = "foo(1, 2, 3)"
+ message = 'Expected call: %s\nActual call: %s'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected, actual),
+ meth, 1, 2, 3
+ )
+
+ # empty
+ for meth in asserters:
+ actual = "foo(1, '2', 3, foo='foo')"
+ expected = "foo()"
+ message = 'Expected call: %s\nActual call: %s'
+ self.assertRaisesWithMsg(
+ AssertionError, message % (expected, actual), meth
+ )
+
+
+ def test_mock_calls(self):
+ mock = MagicMock()
+
+ # need to do this because MagicMock.mock_calls used to just return
+ # a MagicMock which also returned a MagicMock when __eq__ was called
+ self.assertIs(mock.mock_calls == [], True)
+
+ mock = MagicMock()
+ mock()
+ expected = [('', (), {})]
+ self.assertEqual(mock.mock_calls, expected)
+
+ mock.foo()
+ expected.append(call.foo())
+ self.assertEqual(mock.mock_calls, expected)
+ # intermediate mock_calls work too
+ self.assertEqual(mock.foo.mock_calls, [('', (), {})])
+
+ mock = MagicMock()
+ mock().foo(1, 2, 3, a=4, b=5)
+ expected = [
+ ('', (), {}), ('().foo', (1, 2, 3), dict(a=4, b=5))
+ ]
+ self.assertEqual(mock.mock_calls, expected)
+ self.assertEqual(mock.return_value.foo.mock_calls,
+ [('', (1, 2, 3), dict(a=4, b=5))])
+ self.assertEqual(mock.return_value.mock_calls,
+ [('foo', (1, 2, 3), dict(a=4, b=5))])
+
+ mock = MagicMock()
+ mock().foo.bar().baz()
+ expected = [
+ ('', (), {}), ('().foo.bar', (), {}),
+ ('().foo.bar().baz', (), {})
+ ]
+ self.assertEqual(mock.mock_calls, expected)
+ self.assertEqual(mock().mock_calls,
+ call.foo.bar().baz().call_list())
+
+ for kwargs in dict(), dict(name='bar'):
+ mock = MagicMock(**kwargs)
+ int(mock.foo)
+ expected = [('foo.__int__', (), {})]
+ self.assertEqual(mock.mock_calls, expected)
+
+ mock = MagicMock(**kwargs)
+ mock.a()()
+ expected = [('a', (), {}), ('a()', (), {})]
+ self.assertEqual(mock.mock_calls, expected)
+ self.assertEqual(mock.a().mock_calls, [call()])
+
+ mock = MagicMock(**kwargs)
+ mock(1)(2)(3)
+ self.assertEqual(mock.mock_calls, call(1)(2)(3).call_list())
+ self.assertEqual(mock().mock_calls, call(2)(3).call_list())
+ self.assertEqual(mock()().mock_calls, call(3).call_list())
+
+ mock = MagicMock(**kwargs)
+ mock(1)(2)(3).a.b.c(4)
+ self.assertEqual(mock.mock_calls,
+ call(1)(2)(3).a.b.c(4).call_list())
+ self.assertEqual(mock().mock_calls,
+ call(2)(3).a.b.c(4).call_list())
+ self.assertEqual(mock()().mock_calls,
+ call(3).a.b.c(4).call_list())
+
+ mock = MagicMock(**kwargs)
+ int(mock().foo.bar().baz())
+ last_call = ('().foo.bar().baz().__int__', (), {})
+ self.assertEqual(mock.mock_calls[-1], last_call)
+ self.assertEqual(mock().mock_calls,
+ call.foo.bar().baz().__int__().call_list())
+ self.assertEqual(mock().foo.bar().mock_calls,
+ call.baz().__int__().call_list())
+ self.assertEqual(mock().foo.bar().baz.mock_calls,
+ call().__int__().call_list())
+
+
+ def test_subclassing(self):
+ class Subclass(Mock):
+ pass
+
+ mock = Subclass()
+ self.assertIsInstance(mock.foo, Subclass)
+ self.assertIsInstance(mock(), Subclass)
+
+ class Subclass(Mock):
+ def _get_child_mock(self, **kwargs):
+ return Mock(**kwargs)
+
+ mock = Subclass()
+ self.assertNotIsInstance(mock.foo, Subclass)
+ self.assertNotIsInstance(mock(), Subclass)
+
+
+ def test_arg_lists(self):
+ mocks = [
+ Mock(),
+ MagicMock(),
+ NonCallableMock(),
+ NonCallableMagicMock()
+ ]
+
+ def assert_attrs(mock):
+ names = 'call_args_list', 'method_calls', 'mock_calls'
+ for name in names:
+ attr = getattr(mock, name)
+ self.assertIsInstance(attr, _CallList)
+ self.assertIsInstance(attr, list)
+ self.assertEqual(attr, [])
+
+ for mock in mocks:
+ assert_attrs(mock)
+
+ if callable(mock):
+ mock()
+ mock(1, 2)
+ mock(a=3)
+
+ mock.reset_mock()
+ assert_attrs(mock)
+
+ mock.foo()
+ mock.foo.bar(1, a=3)
+ mock.foo(1).bar().baz(3)
+
+ mock.reset_mock()
+ assert_attrs(mock)
+
+
+ def test_call_args_two_tuple(self):
+ mock = Mock()
+ mock(1, a=3)
+ mock(2, b=4)
+
+ self.assertEqual(len(mock.call_args), 2)
+ args, kwargs = mock.call_args
+ self.assertEqual(args, (2,))
+ self.assertEqual(kwargs, dict(b=4))
+
+ expected_list = [((1,), dict(a=3)), ((2,), dict(b=4))]
+ for expected, call_args in zip(expected_list, mock.call_args_list):
+ self.assertEqual(len(call_args), 2)
+ self.assertEqual(expected[0], call_args[0])
+ self.assertEqual(expected[1], call_args[1])
+
+
+ def test_side_effect_iterator(self):
+ mock = Mock(side_effect=iter([1, 2, 3]))
+ self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
+ self.assertRaises(StopIteration, mock)
+
+ mock = MagicMock(side_effect=['a', 'b', 'c'])
+ self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
+ self.assertRaises(StopIteration, mock)
+
+ mock = Mock(side_effect='ghi')
+ self.assertEqual([mock(), mock(), mock()], ['g', 'h', 'i'])
+ self.assertRaises(StopIteration, mock)
+
+ class Foo(object):
+ pass
+ mock = MagicMock(side_effect=Foo)
+ self.assertIsInstance(mock(), Foo)
+
+ mock = Mock(side_effect=Iter())
+ self.assertEqual([mock(), mock(), mock(), mock()],
+ ['this', 'is', 'an', 'iter'])
+ self.assertRaises(StopIteration, mock)
+
+
+ def test_side_effect_setting_iterator(self):
+ mock = Mock()
+ mock.side_effect = iter([1, 2, 3])
+ self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
+ self.assertRaises(StopIteration, mock)
+ side_effect = mock.side_effect
+ self.assertIsInstance(side_effect, type(iter([])))
+
+ mock.side_effect = ['a', 'b', 'c']
+ self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
+ self.assertRaises(StopIteration, mock)
+ side_effect = mock.side_effect
+ self.assertIsInstance(side_effect, type(iter([])))
+
+ this_iter = Iter()
+ mock.side_effect = this_iter
+ self.assertEqual([mock(), mock(), mock(), mock()],
+ ['this', 'is', 'an', 'iter'])
+ self.assertRaises(StopIteration, mock)
+ self.assertIs(mock.side_effect, this_iter)
+
+
+ def test_side_effect_iterator_exceptions(self):
+ for Klass in Mock, MagicMock:
+ iterable = (ValueError, 3, KeyError, 6)
+ m = Klass(side_effect=iterable)
+ self.assertRaises(ValueError, m)
+ self.assertEqual(m(), 3)
+ self.assertRaises(KeyError, m)
+ self.assertEqual(m(), 6)
+
+
+ def test_assert_has_calls_any_order(self):
+ mock = Mock()
+ mock(1, 2)
+ mock(a=3)
+ mock(3, 4)
+ mock(b=6)
+ mock(b=6)
+
+ kalls = [
+ call(1, 2), ({'a': 3},),
+ ((3, 4),), ((), {'a': 3}),
+ ('', (1, 2)), ('', {'a': 3}),
+ ('', (1, 2), {}), ('', (), {'a': 3})
+ ]
+ for kall in kalls:
+ mock.assert_has_calls([kall], any_order=True)
+
+ for kall in call(1, '2'), call(b=3), call(), 3, None, 'foo':
+ self.assertRaises(
+ AssertionError, mock.assert_has_calls,
+ [kall], any_order=True
+ )
+
+ kall_lists = [
+ [call(1, 2), call(b=6)],
+ [call(3, 4), call(1, 2)],
+ [call(b=6), call(b=6)],
+ ]
+
+ for kall_list in kall_lists:
+ mock.assert_has_calls(kall_list, any_order=True)
+
+ kall_lists = [
+ [call(b=6), call(b=6), call(b=6)],
+ [call(1, 2), call(1, 2)],
+ [call(3, 4), call(1, 2), call(5, 7)],
+ [call(b=6), call(3, 4), call(b=6), call(1, 2), call(b=6)],
+ ]
+ for kall_list in kall_lists:
+ self.assertRaises(
+ AssertionError, mock.assert_has_calls,
+ kall_list, any_order=True
+ )
+
+ def test_assert_has_calls(self):
+ kalls1 = [
+ call(1, 2), ({'a': 3},),
+ ((3, 4),), call(b=6),
+ ('', (1,), {'b': 6}),
+ ]
+ kalls2 = [call.foo(), call.bar(1)]
+ kalls2.extend(call.spam().baz(a=3).call_list())
+ kalls2.extend(call.bam(set(), foo={}).fish([1]).call_list())
+
+ mocks = []
+ for mock in Mock(), MagicMock():
+ mock(1, 2)
+ mock(a=3)
+ mock(3, 4)
+ mock(b=6)
+ mock(1, b=6)
+ mocks.append((mock, kalls1))
+
+ mock = Mock()
+ mock.foo()
+ mock.bar(1)
+ mock.spam().baz(a=3)
+ mock.bam(set(), foo={}).fish([1])
+ mocks.append((mock, kalls2))
+
+ for mock, kalls in mocks:
+ for i in range(len(kalls)):
+ for step in 1, 2, 3:
+ these = kalls[i:i+step]
+ mock.assert_has_calls(these)
+
+ if len(these) > 1:
+ self.assertRaises(
+ AssertionError,
+ mock.assert_has_calls,
+ list(reversed(these))
+ )
+
+
+ def test_assert_any_call(self):
+ mock = Mock()
+ mock(1, 2)
+ mock(a=3)
+ mock(1, b=6)
+
+ mock.assert_any_call(1, 2)
+ mock.assert_any_call(a=3)
+ mock.assert_any_call(1, b=6)
+
+ self.assertRaises(
+ AssertionError,
+ mock.assert_any_call
+ )
+ self.assertRaises(
+ AssertionError,
+ mock.assert_any_call,
+ 1, 3
+ )
+ self.assertRaises(
+ AssertionError,
+ mock.assert_any_call,
+ a=4
+ )
+
+
+ def test_mock_calls_create_autospec(self):
+ def f(a, b):
+ pass
+ obj = Iter()
+ obj.f = f
+
+ funcs = [
+ create_autospec(f),
+ create_autospec(obj).f
+ ]
+ for func in funcs:
+ func(1, 2)
+ func(3, 4)
+
+ self.assertEqual(
+ func.mock_calls, [call(1, 2), call(3, 4)]
+ )
+
+
+ def test_mock_add_spec(self):
+ class _One(object):
+ one = 1
+ class _Two(object):
+ two = 2
+ class Anything(object):
+ one = two = three = 'four'
+
+ klasses = [
+ Mock, MagicMock, NonCallableMock, NonCallableMagicMock
+ ]
+ for Klass in list(klasses):
+ klasses.append(lambda K=Klass: K(spec=Anything))
+ klasses.append(lambda K=Klass: K(spec_set=Anything))
+
+ for Klass in klasses:
+ for kwargs in dict(), dict(spec_set=True):
+ mock = Klass()
+ #no error
+ mock.one, mock.two, mock.three
+
+ for One, Two in [(_One, _Two), (['one'], ['two'])]:
+ for kwargs in dict(), dict(spec_set=True):
+ mock.mock_add_spec(One, **kwargs)
+
+ mock.one
+ self.assertRaises(
+ AttributeError, getattr, mock, 'two'
+ )
+ self.assertRaises(
+ AttributeError, getattr, mock, 'three'
+ )
+ if 'spec_set' in kwargs:
+ self.assertRaises(
+ AttributeError, setattr, mock, 'three', None
+ )
+
+ mock.mock_add_spec(Two, **kwargs)
+ self.assertRaises(
+ AttributeError, getattr, mock, 'one'
+ )
+ mock.two
+ self.assertRaises(
+ AttributeError, getattr, mock, 'three'
+ )
+ if 'spec_set' in kwargs:
+ self.assertRaises(
+ AttributeError, setattr, mock, 'three', None
+ )
+ # note that creating a mock, setting an instance attribute, and
+ # *then* setting a spec doesn't work. Not the intended use case
+
+
+ def test_mock_add_spec_magic_methods(self):
+ for Klass in MagicMock, NonCallableMagicMock:
+ mock = Klass()
+ int(mock)
+
+ mock.mock_add_spec(object)
+ self.assertRaises(TypeError, int, mock)
+
+ mock = Klass()
+ mock['foo']
+ mock.__int__.return_value =4
+
+ mock.mock_add_spec(int)
+ self.assertEqual(int(mock), 4)
+ self.assertRaises(TypeError, lambda: mock['foo'])
+
+
+ def test_adding_child_mock(self):
+ for Klass in NonCallableMock, Mock, MagicMock, NonCallableMagicMock:
+ mock = Klass()
+
+ mock.foo = Mock()
+ mock.foo()
+
+ self.assertEqual(mock.method_calls, [call.foo()])
+ self.assertEqual(mock.mock_calls, [call.foo()])
+
+ mock = Klass()
+ mock.bar = Mock(name='name')
+ mock.bar()
+ self.assertEqual(mock.method_calls, [])
+ self.assertEqual(mock.mock_calls, [])
+
+ # mock with an existing _new_parent but no name
+ mock = Klass()
+ mock.baz = MagicMock()()
+ mock.baz()
+ self.assertEqual(mock.method_calls, [])
+ self.assertEqual(mock.mock_calls, [])
+
+
+ def test_adding_return_value_mock(self):
+ for Klass in Mock, MagicMock:
+ mock = Klass()
+ mock.return_value = MagicMock()
+
+ mock()()
+ self.assertEqual(mock.mock_calls, [call(), call()()])
+
+
+ def test_manager_mock(self):
+ class Foo(object):
+ one = 'one'
+ two = 'two'
+ manager = Mock()
+ p1 = patch.object(Foo, 'one')
+ p2 = patch.object(Foo, 'two')
+
+ mock_one = p1.start()
+ self.addCleanup(p1.stop)
+ mock_two = p2.start()
+ self.addCleanup(p2.stop)
+
+ manager.attach_mock(mock_one, 'one')
+ manager.attach_mock(mock_two, 'two')
+
+ Foo.two()
+ Foo.one()
+
+ self.assertEqual(manager.mock_calls, [call.two(), call.one()])
+
+
+ def test_magic_methods_mock_calls(self):
+ for Klass in Mock, MagicMock:
+ m = Klass()
+ m.__int__ = Mock(return_value=3)
+ m.__float__ = MagicMock(return_value=3.0)
+ int(m)
+ float(m)
+
+ self.assertEqual(m.mock_calls, [call.__int__(), call.__float__()])
+ self.assertEqual(m.method_calls, [])
+
+
+ def test_attribute_deletion(self):
+ # this behaviour isn't *useful*, but at least it's now tested...
+ for Klass in Mock, MagicMock, NonCallableMagicMock, NonCallableMock:
+ m = Klass()
+ original = m.foo
+ m.foo = 3
+ del m.foo
+ self.assertEqual(m.foo, original)
+
+ new = m.foo = Mock()
+ del m.foo
+ self.assertEqual(m.foo, new)
+
+
+ def test_mock_parents(self):
+ for Klass in Mock, MagicMock:
+ m = Klass()
+ original_repr = repr(m)
+ m.return_value = m
+ self.assertIs(m(), m)
+ self.assertEqual(repr(m), original_repr)
+
+ m.reset_mock()
+ self.assertIs(m(), m)
+ self.assertEqual(repr(m), original_repr)
+
+ m = Klass()
+ m.b = m.a
+ self.assertIn("name='mock.a'", repr(m.b))
+ self.assertIn("name='mock.a'", repr(m.a))
+ m.reset_mock()
+ self.assertIn("name='mock.a'", repr(m.b))
+ self.assertIn("name='mock.a'", repr(m.a))
+
+ m = Klass()
+ original_repr = repr(m)
+ m.a = m()
+ m.a.return_value = m
+
+ self.assertEqual(repr(m), original_repr)
+ self.assertEqual(repr(m.a()), original_repr)
+
+
+ def test_attach_mock(self):
+ classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
+ for Klass in classes:
+ for Klass2 in classes:
+ m = Klass()
+
+ m2 = Klass2(name='foo')
+ m.attach_mock(m2, 'bar')
+
+ self.assertIs(m.bar, m2)
+ self.assertIn("name='mock.bar'", repr(m2))
+
+ m.bar.baz(1)
+ self.assertEqual(m.mock_calls, [call.bar.baz(1)])
+ self.assertEqual(m.method_calls, [call.bar.baz(1)])
+
+
+ def test_attach_mock_return_value(self):
+ classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
+ for Klass in Mock, MagicMock:
+ for Klass2 in classes:
+ m = Klass()
+
+ m2 = Klass2(name='foo')
+ m.attach_mock(m2, 'return_value')
+
+ self.assertIs(m(), m2)
+ self.assertIn("name='mock()'", repr(m2))
+
+ m2.foo()
+ self.assertEqual(m.mock_calls, call().foo().call_list())
+
+
+ def test_attribute_deletion(self):
+ for mock in Mock(), MagicMock():
+ self.assertTrue(hasattr(mock, 'm'))
+
+ del mock.m
+ self.assertFalse(hasattr(mock, 'm'))
+
+ del mock.f
+ self.assertFalse(hasattr(mock, 'f'))
+ self.assertRaises(AttributeError, getattr, mock, 'f')
+
+
+ def test_class_assignable(self):
+ for mock in Mock(), MagicMock():
+ self.assertNotIsInstance(mock, int)
+
+ mock.__class__ = int
+ self.assertIsInstance(mock, int)
+
+
+ @unittest2.expectedFailure
+ def test_pickle(self):
+ for Klass in (MagicMock, Mock, Subclass, NonCallableMagicMock):
+ mock = Klass(name='foo', attribute=3)
+ mock.foo(1, 2, 3)
+ data = pickle.dumps(mock)
+ new = pickle.loads(data)
+
+ new.foo.assert_called_once_with(1, 2, 3)
+ self.assertFalse(new.called)
+ self.assertTrue(is_instance(new, Klass))
+ self.assertIsInstance(new, Thing)
+ self.assertIn('name="foo"', repr(new))
+ self.assertEqual(new.attribute, 3)
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/python/mock-1.0.0/tests/testpatch.py b/python/mock-1.0.0/tests/testpatch.py
new file mode 100644
index 000000000..1ebe67106
--- /dev/null
+++ b/python/mock-1.0.0/tests/testpatch.py
@@ -0,0 +1,1790 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+import os
+import sys
+
+from tests import support
+from tests.support import unittest2, inPy3k, SomeClass, is_instance, callable
+
+from mock import (
+ NonCallableMock, CallableMixin, patch, sentinel,
+ MagicMock, Mock, NonCallableMagicMock, patch, _patch,
+ DEFAULT, call, _get_target
+)
+
+builtin_string = '__builtin__'
+if inPy3k:
+ builtin_string = 'builtins'
+ unicode = str
+
+PTModule = sys.modules[__name__]
+MODNAME = '%s.PTModule' % __name__
+
+
+def _get_proxy(obj, get_only=True):
+ class Proxy(object):
+ def __getattr__(self, name):
+ return getattr(obj, name)
+ if not get_only:
+ def __setattr__(self, name, value):
+ setattr(obj, name, value)
+ def __delattr__(self, name):
+ delattr(obj, name)
+ Proxy.__setattr__ = __setattr__
+ Proxy.__delattr__ = __delattr__
+ return Proxy()
+
+
+# for use in the test
+something = sentinel.Something
+something_else = sentinel.SomethingElse
+
+
+class Foo(object):
+ def __init__(self, a):
+ pass
+ def f(self, a):
+ pass
+ def g(self):
+ pass
+ foo = 'bar'
+
+ class Bar(object):
+ def a(self):
+ pass
+
+foo_name = '%s.Foo' % __name__
+
+
+def function(a, b=Foo):
+ pass
+
+
+class Container(object):
+ def __init__(self):
+ self.values = {}
+
+ def __getitem__(self, name):
+ return self.values[name]
+
+ def __setitem__(self, name, value):
+ self.values[name] = value
+
+ def __delitem__(self, name):
+ del self.values[name]
+
+ def __iter__(self):
+ return iter(self.values)
+
+
+
+class PatchTest(unittest2.TestCase):
+
+ def assertNotCallable(self, obj, magic=True):
+ MockClass = NonCallableMagicMock
+ if not magic:
+ MockClass = NonCallableMock
+
+ self.assertRaises(TypeError, obj)
+ self.assertTrue(is_instance(obj, MockClass))
+ self.assertFalse(is_instance(obj, CallableMixin))
+
+
+ def test_single_patchobject(self):
+ class Something(object):
+ attribute = sentinel.Original
+
+ @patch.object(Something, 'attribute', sentinel.Patched)
+ def test():
+ self.assertEqual(Something.attribute, sentinel.Patched, "unpatched")
+
+ test()
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+
+
+ def test_patchobject_with_none(self):
+ class Something(object):
+ attribute = sentinel.Original
+
+ @patch.object(Something, 'attribute', None)
+ def test():
+ self.assertIsNone(Something.attribute, "unpatched")
+
+ test()
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+
+
+ def test_multiple_patchobject(self):
+ class Something(object):
+ attribute = sentinel.Original
+ next_attribute = sentinel.Original2
+
+ @patch.object(Something, 'attribute', sentinel.Patched)
+ @patch.object(Something, 'next_attribute', sentinel.Patched2)
+ def test():
+ self.assertEqual(Something.attribute, sentinel.Patched,
+ "unpatched")
+ self.assertEqual(Something.next_attribute, sentinel.Patched2,
+ "unpatched")
+
+ test()
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+ self.assertEqual(Something.next_attribute, sentinel.Original2,
+ "patch not restored")
+
+
+ def test_object_lookup_is_quite_lazy(self):
+ global something
+ original = something
+ @patch('%s.something' % __name__, sentinel.Something2)
+ def test():
+ pass
+
+ try:
+ something = sentinel.replacement_value
+ test()
+ self.assertEqual(something, sentinel.replacement_value)
+ finally:
+ something = original
+
+
+ def test_patch(self):
+ @patch('%s.something' % __name__, sentinel.Something2)
+ def test():
+ self.assertEqual(PTModule.something, sentinel.Something2,
+ "unpatched")
+
+ test()
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "patch not restored")
+
+ @patch('%s.something' % __name__, sentinel.Something2)
+ @patch('%s.something_else' % __name__, sentinel.SomethingElse)
+ def test():
+ self.assertEqual(PTModule.something, sentinel.Something2,
+ "unpatched")
+ self.assertEqual(PTModule.something_else, sentinel.SomethingElse,
+ "unpatched")
+
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "patch not restored")
+ self.assertEqual(PTModule.something_else, sentinel.SomethingElse,
+ "patch not restored")
+
+ # Test the patching and restoring works a second time
+ test()
+
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "patch not restored")
+ self.assertEqual(PTModule.something_else, sentinel.SomethingElse,
+ "patch not restored")
+
+ mock = Mock()
+ mock.return_value = sentinel.Handle
+ @patch('%s.open' % builtin_string, mock)
+ def test():
+ self.assertEqual(open('filename', 'r'), sentinel.Handle,
+ "open not patched")
+ test()
+ test()
+
+ self.assertNotEqual(open, mock, "patch not restored")
+
+
+ def test_patch_class_attribute(self):
+ @patch('%s.SomeClass.class_attribute' % __name__,
+ sentinel.ClassAttribute)
+ def test():
+ self.assertEqual(PTModule.SomeClass.class_attribute,
+ sentinel.ClassAttribute, "unpatched")
+ test()
+
+ self.assertIsNone(PTModule.SomeClass.class_attribute,
+ "patch not restored")
+
+
+ def test_patchobject_with_default_mock(self):
+ class Test(object):
+ something = sentinel.Original
+ something2 = sentinel.Original2
+
+ @patch.object(Test, 'something')
+ def test(mock):
+ self.assertEqual(mock, Test.something,
+ "Mock not passed into test function")
+ self.assertIsInstance(mock, MagicMock,
+ "patch with two arguments did not create a mock")
+
+ test()
+
+ @patch.object(Test, 'something')
+ @patch.object(Test, 'something2')
+ def test(this1, this2, mock1, mock2):
+ self.assertEqual(this1, sentinel.this1,
+ "Patched function didn't receive initial argument")
+ self.assertEqual(this2, sentinel.this2,
+ "Patched function didn't receive second argument")
+ self.assertEqual(mock1, Test.something2,
+ "Mock not passed into test function")
+ self.assertEqual(mock2, Test.something,
+ "Second Mock not passed into test function")
+ self.assertIsInstance(mock2, MagicMock,
+ "patch with two arguments did not create a mock")
+ self.assertIsInstance(mock2, MagicMock,
+ "patch with two arguments did not create a mock")
+
+ # A hack to test that new mocks are passed the second time
+ self.assertNotEqual(outerMock1, mock1, "unexpected value for mock1")
+ self.assertNotEqual(outerMock2, mock2, "unexpected value for mock1")
+ return mock1, mock2
+
+ outerMock1 = outerMock2 = None
+ outerMock1, outerMock2 = test(sentinel.this1, sentinel.this2)
+
+ # Test that executing a second time creates new mocks
+ test(sentinel.this1, sentinel.this2)
+
+
+ def test_patch_with_spec(self):
+ @patch('%s.SomeClass' % __name__, spec=SomeClass)
+ def test(MockSomeClass):
+ self.assertEqual(SomeClass, MockSomeClass)
+ self.assertTrue(is_instance(SomeClass.wibble, MagicMock))
+ self.assertRaises(AttributeError, lambda: SomeClass.not_wibble)
+
+ test()
+
+
+ def test_patchobject_with_spec(self):
+ @patch.object(SomeClass, 'class_attribute', spec=SomeClass)
+ def test(MockAttribute):
+ self.assertEqual(SomeClass.class_attribute, MockAttribute)
+ self.assertTrue(is_instance(SomeClass.class_attribute.wibble,
+ MagicMock))
+ self.assertRaises(AttributeError,
+ lambda: SomeClass.class_attribute.not_wibble)
+
+ test()
+
+
+ def test_patch_with_spec_as_list(self):
+ @patch('%s.SomeClass' % __name__, spec=['wibble'])
+ def test(MockSomeClass):
+ self.assertEqual(SomeClass, MockSomeClass)
+ self.assertTrue(is_instance(SomeClass.wibble, MagicMock))
+ self.assertRaises(AttributeError, lambda: SomeClass.not_wibble)
+
+ test()
+
+
+ def test_patchobject_with_spec_as_list(self):
+ @patch.object(SomeClass, 'class_attribute', spec=['wibble'])
+ def test(MockAttribute):
+ self.assertEqual(SomeClass.class_attribute, MockAttribute)
+ self.assertTrue(is_instance(SomeClass.class_attribute.wibble,
+ MagicMock))
+ self.assertRaises(AttributeError,
+ lambda: SomeClass.class_attribute.not_wibble)
+
+ test()
+
+
+ def test_nested_patch_with_spec_as_list(self):
+ # regression test for nested decorators
+ @patch('%s.open' % builtin_string)
+ @patch('%s.SomeClass' % __name__, spec=['wibble'])
+ def test(MockSomeClass, MockOpen):
+ self.assertEqual(SomeClass, MockSomeClass)
+ self.assertTrue(is_instance(SomeClass.wibble, MagicMock))
+ self.assertRaises(AttributeError, lambda: SomeClass.not_wibble)
+ test()
+
+
+ def test_patch_with_spec_as_boolean(self):
+ @patch('%s.SomeClass' % __name__, spec=True)
+ def test(MockSomeClass):
+ self.assertEqual(SomeClass, MockSomeClass)
+ # Should not raise attribute error
+ MockSomeClass.wibble
+
+ self.assertRaises(AttributeError, lambda: MockSomeClass.not_wibble)
+
+ test()
+
+
+ def test_patch_object_with_spec_as_boolean(self):
+ @patch.object(PTModule, 'SomeClass', spec=True)
+ def test(MockSomeClass):
+ self.assertEqual(SomeClass, MockSomeClass)
+ # Should not raise attribute error
+ MockSomeClass.wibble
+
+ self.assertRaises(AttributeError, lambda: MockSomeClass.not_wibble)
+
+ test()
+
+
+ def test_patch_class_acts_with_spec_is_inherited(self):
+ @patch('%s.SomeClass' % __name__, spec=True)
+ def test(MockSomeClass):
+ self.assertTrue(is_instance(MockSomeClass, MagicMock))
+ instance = MockSomeClass()
+ self.assertNotCallable(instance)
+ # Should not raise attribute error
+ instance.wibble
+
+ self.assertRaises(AttributeError, lambda: instance.not_wibble)
+
+ test()
+
+
+ def test_patch_with_create_mocks_non_existent_attributes(self):
+ @patch('%s.frooble' % builtin_string, sentinel.Frooble, create=True)
+ def test():
+ self.assertEqual(frooble, sentinel.Frooble)
+
+ test()
+ self.assertRaises(NameError, lambda: frooble)
+
+
+ def test_patchobject_with_create_mocks_non_existent_attributes(self):
+ @patch.object(SomeClass, 'frooble', sentinel.Frooble, create=True)
+ def test():
+ self.assertEqual(SomeClass.frooble, sentinel.Frooble)
+
+ test()
+ self.assertFalse(hasattr(SomeClass, 'frooble'))
+
+
+ def test_patch_wont_create_by_default(self):
+ try:
+ @patch('%s.frooble' % builtin_string, sentinel.Frooble)
+ def test():
+ self.assertEqual(frooble, sentinel.Frooble)
+
+ test()
+ except AttributeError:
+ pass
+ else:
+ self.fail('Patching non existent attributes should fail')
+
+ self.assertRaises(NameError, lambda: frooble)
+
+
+ def test_patchobject_wont_create_by_default(self):
+ try:
+ @patch.object(SomeClass, 'frooble', sentinel.Frooble)
+ def test():
+ self.fail('Patching non existent attributes should fail')
+
+ test()
+ except AttributeError:
+ pass
+ else:
+ self.fail('Patching non existent attributes should fail')
+ self.assertFalse(hasattr(SomeClass, 'frooble'))
+
+
+ def test_patch_with_static_methods(self):
+ class Foo(object):
+ @staticmethod
+ def woot():
+ return sentinel.Static
+
+ @patch.object(Foo, 'woot', staticmethod(lambda: sentinel.Patched))
+ def anonymous():
+ self.assertEqual(Foo.woot(), sentinel.Patched)
+ anonymous()
+
+ self.assertEqual(Foo.woot(), sentinel.Static)
+
+
+ def test_patch_local(self):
+ foo = sentinel.Foo
+ @patch.object(sentinel, 'Foo', 'Foo')
+ def anonymous():
+ self.assertEqual(sentinel.Foo, 'Foo')
+ anonymous()
+
+ self.assertEqual(sentinel.Foo, foo)
+
+
+ def test_patch_slots(self):
+ class Foo(object):
+ __slots__ = ('Foo',)
+
+ foo = Foo()
+ foo.Foo = sentinel.Foo
+
+ @patch.object(foo, 'Foo', 'Foo')
+ def anonymous():
+ self.assertEqual(foo.Foo, 'Foo')
+ anonymous()
+
+ self.assertEqual(foo.Foo, sentinel.Foo)
+
+
+ def test_patchobject_class_decorator(self):
+ class Something(object):
+ attribute = sentinel.Original
+
+ class Foo(object):
+ def test_method(other_self):
+ self.assertEqual(Something.attribute, sentinel.Patched,
+ "unpatched")
+ def not_test_method(other_self):
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "non-test method patched")
+
+ Foo = patch.object(Something, 'attribute', sentinel.Patched)(Foo)
+
+ f = Foo()
+ f.test_method()
+ f.not_test_method()
+
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+
+
+ def test_patch_class_decorator(self):
+ class Something(object):
+ attribute = sentinel.Original
+
+ class Foo(object):
+ def test_method(other_self, mock_something):
+ self.assertEqual(PTModule.something, mock_something,
+ "unpatched")
+ def not_test_method(other_self):
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "non-test method patched")
+ Foo = patch('%s.something' % __name__)(Foo)
+
+ f = Foo()
+ f.test_method()
+ f.not_test_method()
+
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+ self.assertEqual(PTModule.something, sentinel.Something,
+ "patch not restored")
+
+
+ def test_patchobject_twice(self):
+ class Something(object):
+ attribute = sentinel.Original
+ next_attribute = sentinel.Original2
+
+ @patch.object(Something, 'attribute', sentinel.Patched)
+ @patch.object(Something, 'attribute', sentinel.Patched)
+ def test():
+ self.assertEqual(Something.attribute, sentinel.Patched, "unpatched")
+
+ test()
+
+ self.assertEqual(Something.attribute, sentinel.Original,
+ "patch not restored")
+
+
+ def test_patch_dict(self):
+ foo = {'initial': object(), 'other': 'something'}
+ original = foo.copy()
+
+ @patch.dict(foo)
+ def test():
+ foo['a'] = 3
+ del foo['initial']
+ foo['other'] = 'something else'
+
+ test()
+
+ self.assertEqual(foo, original)
+
+ @patch.dict(foo, {'a': 'b'})
+ def test():
+ self.assertEqual(len(foo), 3)
+ self.assertEqual(foo['a'], 'b')
+
+ test()
+
+ self.assertEqual(foo, original)
+
+ @patch.dict(foo, [('a', 'b')])
+ def test():
+ self.assertEqual(len(foo), 3)
+ self.assertEqual(foo['a'], 'b')
+
+ test()
+
+ self.assertEqual(foo, original)
+
+
+ def test_patch_dict_with_container_object(self):
+ foo = Container()
+ foo['initial'] = object()
+ foo['other'] = 'something'
+
+ original = foo.values.copy()
+
+ @patch.dict(foo)
+ def test():
+ foo['a'] = 3
+ del foo['initial']
+ foo['other'] = 'something else'
+
+ test()
+
+ self.assertEqual(foo.values, original)
+
+ @patch.dict(foo, {'a': 'b'})
+ def test():
+ self.assertEqual(len(foo.values), 3)
+ self.assertEqual(foo['a'], 'b')
+
+ test()
+
+ self.assertEqual(foo.values, original)
+
+
+ def test_patch_dict_with_clear(self):
+ foo = {'initial': object(), 'other': 'something'}
+ original = foo.copy()
+
+ @patch.dict(foo, clear=True)
+ def test():
+ self.assertEqual(foo, {})
+ foo['a'] = 3
+ foo['other'] = 'something else'
+
+ test()
+
+ self.assertEqual(foo, original)
+
+ @patch.dict(foo, {'a': 'b'}, clear=True)
+ def test():
+ self.assertEqual(foo, {'a': 'b'})
+
+ test()
+
+ self.assertEqual(foo, original)
+
+ @patch.dict(foo, [('a', 'b')], clear=True)
+ def test():
+ self.assertEqual(foo, {'a': 'b'})
+
+ test()
+
+ self.assertEqual(foo, original)
+
+
+ def test_patch_dict_with_container_object_and_clear(self):
+ foo = Container()
+ foo['initial'] = object()
+ foo['other'] = 'something'
+
+ original = foo.values.copy()
+
+ @patch.dict(foo, clear=True)
+ def test():
+ self.assertEqual(foo.values, {})
+ foo['a'] = 3
+ foo['other'] = 'something else'
+
+ test()
+
+ self.assertEqual(foo.values, original)
+
+ @patch.dict(foo, {'a': 'b'}, clear=True)
+ def test():
+ self.assertEqual(foo.values, {'a': 'b'})
+
+ test()
+
+ self.assertEqual(foo.values, original)
+
+
+ def test_name_preserved(self):
+ foo = {}
+
+ @patch('%s.SomeClass' % __name__, object())
+ @patch('%s.SomeClass' % __name__, object(), autospec=True)
+ @patch.object(SomeClass, object())
+ @patch.dict(foo)
+ def some_name():
+ pass
+
+ self.assertEqual(some_name.__name__, 'some_name')
+
+
+ def test_patch_with_exception(self):
+ foo = {}
+
+ @patch.dict(foo, {'a': 'b'})
+ def test():
+ raise NameError('Konrad')
+ try:
+ test()
+ except NameError:
+ pass
+ else:
+ self.fail('NameError not raised by test')
+
+ self.assertEqual(foo, {})
+
+
+ def test_patch_dict_with_string(self):
+ @patch.dict('os.environ', {'konrad_delong': 'some value'})
+ def test():
+ self.assertIn('konrad_delong', os.environ)
+
+ test()
+
+
+ @unittest2.expectedFailure
+ def test_patch_descriptor(self):
+ # would be some effort to fix this - we could special case the
+ # builtin descriptors: classmethod, property, staticmethod
+ class Nothing(object):
+ foo = None
+
+ class Something(object):
+ foo = {}
+
+ @patch.object(Nothing, 'foo', 2)
+ @classmethod
+ def klass(cls):
+ self.assertIs(cls, Something)
+
+ @patch.object(Nothing, 'foo', 2)
+ @staticmethod
+ def static(arg):
+ return arg
+
+ @patch.dict(foo)
+ @classmethod
+ def klass_dict(cls):
+ self.assertIs(cls, Something)
+
+ @patch.dict(foo)
+ @staticmethod
+ def static_dict(arg):
+ return arg
+
+ # these will raise exceptions if patching descriptors is broken
+ self.assertEqual(Something.static('f00'), 'f00')
+ Something.klass()
+ self.assertEqual(Something.static_dict('f00'), 'f00')
+ Something.klass_dict()
+
+ something = Something()
+ self.assertEqual(something.static('f00'), 'f00')
+ something.klass()
+ self.assertEqual(something.static_dict('f00'), 'f00')
+ something.klass_dict()
+
+
+ def test_patch_spec_set(self):
+ @patch('%s.SomeClass' % __name__, spec_set=SomeClass)
+ def test(MockClass):
+ MockClass.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+
+ @patch.object(support, 'SomeClass', spec_set=SomeClass)
+ def test(MockClass):
+ MockClass.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+ @patch('%s.SomeClass' % __name__, spec_set=True)
+ def test(MockClass):
+ MockClass.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+
+ @patch.object(support, 'SomeClass', spec_set=True)
+ def test(MockClass):
+ MockClass.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+
+
+ def test_spec_set_inherit(self):
+ @patch('%s.SomeClass' % __name__, spec_set=True)
+ def test(MockClass):
+ instance = MockClass()
+ instance.z = 'foo'
+
+ self.assertRaises(AttributeError, test)
+
+
+ def test_patch_start_stop(self):
+ original = something
+ patcher = patch('%s.something' % __name__)
+ self.assertIs(something, original)
+ mock = patcher.start()
+ try:
+ self.assertIsNot(mock, original)
+ self.assertIs(something, mock)
+ finally:
+ patcher.stop()
+ self.assertIs(something, original)
+
+
+ def test_stop_without_start(self):
+ patcher = patch(foo_name, 'bar', 3)
+
+ # calling stop without start used to produce a very obscure error
+ self.assertRaises(RuntimeError, patcher.stop)
+
+
+ def test_patchobject_start_stop(self):
+ original = something
+ patcher = patch.object(PTModule, 'something', 'foo')
+ self.assertIs(something, original)
+ replaced = patcher.start()
+ try:
+ self.assertEqual(replaced, 'foo')
+ self.assertIs(something, replaced)
+ finally:
+ patcher.stop()
+ self.assertIs(something, original)
+
+
+ def test_patch_dict_start_stop(self):
+ d = {'foo': 'bar'}
+ original = d.copy()
+ patcher = patch.dict(d, [('spam', 'eggs')], clear=True)
+ self.assertEqual(d, original)
+
+ patcher.start()
+ try:
+ self.assertEqual(d, {'spam': 'eggs'})
+ finally:
+ patcher.stop()
+ self.assertEqual(d, original)
+
+
+ def test_patch_dict_class_decorator(self):
+ this = self
+ d = {'spam': 'eggs'}
+ original = d.copy()
+
+ class Test(object):
+ def test_first(self):
+ this.assertEqual(d, {'foo': 'bar'})
+ def test_second(self):
+ this.assertEqual(d, {'foo': 'bar'})
+
+ Test = patch.dict(d, {'foo': 'bar'}, clear=True)(Test)
+ self.assertEqual(d, original)
+
+ test = Test()
+
+ test.test_first()
+ self.assertEqual(d, original)
+
+ test.test_second()
+ self.assertEqual(d, original)
+
+ test = Test()
+
+ test.test_first()
+ self.assertEqual(d, original)
+
+ test.test_second()
+ self.assertEqual(d, original)
+
+
+ def test_get_only_proxy(self):
+ class Something(object):
+ foo = 'foo'
+ class SomethingElse:
+ foo = 'foo'
+
+ for thing in Something, SomethingElse, Something(), SomethingElse:
+ proxy = _get_proxy(thing)
+
+ @patch.object(proxy, 'foo', 'bar')
+ def test():
+ self.assertEqual(proxy.foo, 'bar')
+ test()
+ self.assertEqual(proxy.foo, 'foo')
+ self.assertEqual(thing.foo, 'foo')
+ self.assertNotIn('foo', proxy.__dict__)
+
+
+ def test_get_set_delete_proxy(self):
+ class Something(object):
+ foo = 'foo'
+ class SomethingElse:
+ foo = 'foo'
+
+ for thing in Something, SomethingElse, Something(), SomethingElse:
+ proxy = _get_proxy(Something, get_only=False)
+
+ @patch.object(proxy, 'foo', 'bar')
+ def test():
+ self.assertEqual(proxy.foo, 'bar')
+ test()
+ self.assertEqual(proxy.foo, 'foo')
+ self.assertEqual(thing.foo, 'foo')
+ self.assertNotIn('foo', proxy.__dict__)
+
+
+ def test_patch_keyword_args(self):
+ kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
+ 'foo': MagicMock()}
+
+ patcher = patch(foo_name, **kwargs)
+ mock = patcher.start()
+ patcher.stop()
+
+ self.assertRaises(KeyError, mock)
+ self.assertEqual(mock.foo.bar(), 33)
+ self.assertIsInstance(mock.foo, MagicMock)
+
+
+ def test_patch_object_keyword_args(self):
+ kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
+ 'foo': MagicMock()}
+
+ patcher = patch.object(Foo, 'f', **kwargs)
+ mock = patcher.start()
+ patcher.stop()
+
+ self.assertRaises(KeyError, mock)
+ self.assertEqual(mock.foo.bar(), 33)
+ self.assertIsInstance(mock.foo, MagicMock)
+
+
+ def test_patch_dict_keyword_args(self):
+ original = {'foo': 'bar'}
+ copy = original.copy()
+
+ patcher = patch.dict(original, foo=3, bar=4, baz=5)
+ patcher.start()
+
+ try:
+ self.assertEqual(original, dict(foo=3, bar=4, baz=5))
+ finally:
+ patcher.stop()
+
+ self.assertEqual(original, copy)
+
+
+ def test_autospec(self):
+ class Boo(object):
+ def __init__(self, a):
+ pass
+ def f(self, a):
+ pass
+ def g(self):
+ pass
+ foo = 'bar'
+
+ class Bar(object):
+ def a(self):
+ pass
+
+ def _test(mock):
+ mock(1)
+ mock.assert_called_with(1)
+ self.assertRaises(TypeError, mock)
+
+ def _test2(mock):
+ mock.f(1)
+ mock.f.assert_called_with(1)
+ self.assertRaises(TypeError, mock.f)
+
+ mock.g()
+ mock.g.assert_called_with()
+ self.assertRaises(TypeError, mock.g, 1)
+
+ self.assertRaises(AttributeError, getattr, mock, 'h')
+
+ mock.foo.lower()
+ mock.foo.lower.assert_called_with()
+ self.assertRaises(AttributeError, getattr, mock.foo, 'bar')
+
+ mock.Bar()
+ mock.Bar.assert_called_with()
+
+ mock.Bar.a()
+ mock.Bar.a.assert_called_with()
+ self.assertRaises(TypeError, mock.Bar.a, 1)
+
+ mock.Bar().a()
+ mock.Bar().a.assert_called_with()
+ self.assertRaises(TypeError, mock.Bar().a, 1)
+
+ self.assertRaises(AttributeError, getattr, mock.Bar, 'b')
+ self.assertRaises(AttributeError, getattr, mock.Bar(), 'b')
+
+ def function(mock):
+ _test(mock)
+ _test2(mock)
+ _test2(mock(1))
+ self.assertIs(mock, Foo)
+ return mock
+
+ test = patch(foo_name, autospec=True)(function)
+
+ mock = test()
+ self.assertIsNot(Foo, mock)
+ # test patching a second time works
+ test()
+
+ module = sys.modules[__name__]
+ test = patch.object(module, 'Foo', autospec=True)(function)
+
+ mock = test()
+ self.assertIsNot(Foo, mock)
+ # test patching a second time works
+ test()
+
+
+ def test_autospec_function(self):
+ @patch('%s.function' % __name__, autospec=True)
+ def test(mock):
+ function(1)
+ function.assert_called_with(1)
+ function(2, 3)
+ function.assert_called_with(2, 3)
+
+ self.assertRaises(TypeError, function)
+ self.assertRaises(AttributeError, getattr, function, 'foo')
+
+ test()
+
+
+ def test_autospec_keywords(self):
+ @patch('%s.function' % __name__, autospec=True,
+ return_value=3)
+ def test(mock_function):
+ #self.assertEqual(function.abc, 'foo')
+ return function(1, 2)
+
+ result = test()
+ self.assertEqual(result, 3)
+
+
+ def test_autospec_with_new(self):
+ patcher = patch('%s.function' % __name__, new=3, autospec=True)
+ self.assertRaises(TypeError, patcher.start)
+
+ module = sys.modules[__name__]
+ patcher = patch.object(module, 'function', new=3, autospec=True)
+ self.assertRaises(TypeError, patcher.start)
+
+
+ def test_autospec_with_object(self):
+ class Bar(Foo):
+ extra = []
+
+ patcher = patch(foo_name, autospec=Bar)
+ mock = patcher.start()
+ try:
+ self.assertIsInstance(mock, Bar)
+ self.assertIsInstance(mock.extra, list)
+ finally:
+ patcher.stop()
+
+
+ def test_autospec_inherits(self):
+ FooClass = Foo
+ patcher = patch(foo_name, autospec=True)
+ mock = patcher.start()
+ try:
+ self.assertIsInstance(mock, FooClass)
+ self.assertIsInstance(mock(3), FooClass)
+ finally:
+ patcher.stop()
+
+
+ def test_autospec_name(self):
+ patcher = patch(foo_name, autospec=True)
+ mock = patcher.start()
+
+ try:
+ self.assertIn(" name='Foo'", repr(mock))
+ self.assertIn(" name='Foo.f'", repr(mock.f))
+ self.assertIn(" name='Foo()'", repr(mock(None)))
+ self.assertIn(" name='Foo().f'", repr(mock(None).f))
+ finally:
+ patcher.stop()
+
+
+ def test_tracebacks(self):
+ @patch.object(Foo, 'f', object())
+ def test():
+ raise AssertionError
+ try:
+ test()
+ except:
+ err = sys.exc_info()
+
+ result = unittest2.TextTestResult(None, None, 0)
+ traceback = result._exc_info_to_string(err, self)
+ self.assertIn('raise AssertionError', traceback)
+
+
+ def test_new_callable_patch(self):
+ patcher = patch(foo_name, new_callable=NonCallableMagicMock)
+
+ m1 = patcher.start()
+ patcher.stop()
+ m2 = patcher.start()
+ patcher.stop()
+
+ self.assertIsNot(m1, m2)
+ for mock in m1, m2:
+ self.assertNotCallable(m1)
+
+
+ def test_new_callable_patch_object(self):
+ patcher = patch.object(Foo, 'f', new_callable=NonCallableMagicMock)
+
+ m1 = patcher.start()
+ patcher.stop()
+ m2 = patcher.start()
+ patcher.stop()
+
+ self.assertIsNot(m1, m2)
+ for mock in m1, m2:
+ self.assertNotCallable(m1)
+
+
+ def test_new_callable_keyword_arguments(self):
+ class Bar(object):
+ kwargs = None
+ def __init__(self, **kwargs):
+ Bar.kwargs = kwargs
+
+ patcher = patch(foo_name, new_callable=Bar, arg1=1, arg2=2)
+ m = patcher.start()
+ try:
+ self.assertIs(type(m), Bar)
+ self.assertEqual(Bar.kwargs, dict(arg1=1, arg2=2))
+ finally:
+ patcher.stop()
+
+
+ def test_new_callable_spec(self):
+ class Bar(object):
+ kwargs = None
+ def __init__(self, **kwargs):
+ Bar.kwargs = kwargs
+
+ patcher = patch(foo_name, new_callable=Bar, spec=Bar)
+ patcher.start()
+ try:
+ self.assertEqual(Bar.kwargs, dict(spec=Bar))
+ finally:
+ patcher.stop()
+
+ patcher = patch(foo_name, new_callable=Bar, spec_set=Bar)
+ patcher.start()
+ try:
+ self.assertEqual(Bar.kwargs, dict(spec_set=Bar))
+ finally:
+ patcher.stop()
+
+
+ def test_new_callable_create(self):
+ non_existent_attr = '%s.weeeee' % foo_name
+ p = patch(non_existent_attr, new_callable=NonCallableMock)
+ self.assertRaises(AttributeError, p.start)
+
+ p = patch(non_existent_attr, new_callable=NonCallableMock,
+ create=True)
+ m = p.start()
+ try:
+ self.assertNotCallable(m, magic=False)
+ finally:
+ p.stop()
+
+
+ def test_new_callable_incompatible_with_new(self):
+ self.assertRaises(
+ ValueError, patch, foo_name, new=object(), new_callable=MagicMock
+ )
+ self.assertRaises(
+ ValueError, patch.object, Foo, 'f', new=object(),
+ new_callable=MagicMock
+ )
+
+
+ def test_new_callable_incompatible_with_autospec(self):
+ self.assertRaises(
+ ValueError, patch, foo_name, new_callable=MagicMock,
+ autospec=True
+ )
+ self.assertRaises(
+ ValueError, patch.object, Foo, 'f', new_callable=MagicMock,
+ autospec=True
+ )
+
+
+ def test_new_callable_inherit_for_mocks(self):
+ class MockSub(Mock):
+ pass
+
+ MockClasses = (
+ NonCallableMock, NonCallableMagicMock, MagicMock, Mock, MockSub
+ )
+ for Klass in MockClasses:
+ for arg in 'spec', 'spec_set':
+ kwargs = {arg: True}
+ p = patch(foo_name, new_callable=Klass, **kwargs)
+ m = p.start()
+ try:
+ instance = m.return_value
+ self.assertRaises(AttributeError, getattr, instance, 'x')
+ finally:
+ p.stop()
+
+
+ def test_new_callable_inherit_non_mock(self):
+ class NotAMock(object):
+ def __init__(self, spec):
+ self.spec = spec
+
+ p = patch(foo_name, new_callable=NotAMock, spec=True)
+ m = p.start()
+ try:
+ self.assertTrue(is_instance(m, NotAMock))
+ self.assertRaises(AttributeError, getattr, m, 'return_value')
+ finally:
+ p.stop()
+
+ self.assertEqual(m.spec, Foo)
+
+
+ def test_new_callable_class_decorating(self):
+ test = self
+ original = Foo
+ class SomeTest(object):
+
+ def _test(self, mock_foo):
+ test.assertIsNot(Foo, original)
+ test.assertIs(Foo, mock_foo)
+ test.assertIsInstance(Foo, SomeClass)
+
+ def test_two(self, mock_foo):
+ self._test(mock_foo)
+ def test_one(self, mock_foo):
+ self._test(mock_foo)
+
+ SomeTest = patch(foo_name, new_callable=SomeClass)(SomeTest)
+ SomeTest().test_one()
+ SomeTest().test_two()
+ self.assertIs(Foo, original)
+
+
+ def test_patch_multiple(self):
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ patcher1 = patch.multiple(foo_name, f=1, g=2)
+ patcher2 = patch.multiple(Foo, f=1, g=2)
+
+ for patcher in patcher1, patcher2:
+ patcher.start()
+ try:
+ self.assertIs(Foo, original_foo)
+ self.assertEqual(Foo.f, 1)
+ self.assertEqual(Foo.g, 2)
+ finally:
+ patcher.stop()
+
+ self.assertIs(Foo, original_foo)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ @patch.multiple(foo_name, f=3, g=4)
+ def test():
+ self.assertIs(Foo, original_foo)
+ self.assertEqual(Foo.f, 3)
+ self.assertEqual(Foo.g, 4)
+
+ test()
+
+
+ def test_patch_multiple_no_kwargs(self):
+ self.assertRaises(ValueError, patch.multiple, foo_name)
+ self.assertRaises(ValueError, patch.multiple, Foo)
+
+
+ def test_patch_multiple_create_mocks(self):
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ @patch.multiple(foo_name, f=DEFAULT, g=3, foo=DEFAULT)
+ def test(f, foo):
+ self.assertIs(Foo, original_foo)
+ self.assertIs(Foo.f, f)
+ self.assertEqual(Foo.g, 3)
+ self.assertIs(Foo.foo, foo)
+ self.assertTrue(is_instance(f, MagicMock))
+ self.assertTrue(is_instance(foo, MagicMock))
+
+ test()
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_create_mocks_different_order(self):
+ # bug revealed by Jython!
+ original_f = Foo.f
+ original_g = Foo.g
+
+ patcher = patch.object(Foo, 'f', 3)
+ patcher.attribute_name = 'f'
+
+ other = patch.object(Foo, 'g', DEFAULT)
+ other.attribute_name = 'g'
+ patcher.additional_patchers = [other]
+
+ @patcher
+ def test(g):
+ self.assertIs(Foo.g, g)
+ self.assertEqual(Foo.f, 3)
+
+ test()
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_stacked_decorators(self):
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ @patch.multiple(foo_name, f=DEFAULT)
+ @patch.multiple(foo_name, foo=DEFAULT)
+ @patch(foo_name + '.g')
+ def test1(g, **kwargs):
+ _test(g, **kwargs)
+
+ @patch.multiple(foo_name, f=DEFAULT)
+ @patch(foo_name + '.g')
+ @patch.multiple(foo_name, foo=DEFAULT)
+ def test2(g, **kwargs):
+ _test(g, **kwargs)
+
+ @patch(foo_name + '.g')
+ @patch.multiple(foo_name, f=DEFAULT)
+ @patch.multiple(foo_name, foo=DEFAULT)
+ def test3(g, **kwargs):
+ _test(g, **kwargs)
+
+ def _test(g, **kwargs):
+ f = kwargs.pop('f')
+ foo = kwargs.pop('foo')
+ self.assertFalse(kwargs)
+
+ self.assertIs(Foo, original_foo)
+ self.assertIs(Foo.f, f)
+ self.assertIs(Foo.g, g)
+ self.assertIs(Foo.foo, foo)
+ self.assertTrue(is_instance(f, MagicMock))
+ self.assertTrue(is_instance(g, MagicMock))
+ self.assertTrue(is_instance(foo, MagicMock))
+
+ test1()
+ test2()
+ test3()
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_create_mocks_patcher(self):
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ patcher = patch.multiple(foo_name, f=DEFAULT, g=3, foo=DEFAULT)
+
+ result = patcher.start()
+ try:
+ f = result['f']
+ foo = result['foo']
+ self.assertEqual(set(result), set(['f', 'foo']))
+
+ self.assertIs(Foo, original_foo)
+ self.assertIs(Foo.f, f)
+ self.assertIs(Foo.foo, foo)
+ self.assertTrue(is_instance(f, MagicMock))
+ self.assertTrue(is_instance(foo, MagicMock))
+ finally:
+ patcher.stop()
+
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_decorating_class(self):
+ test = self
+ original_foo = Foo
+ original_f = Foo.f
+ original_g = Foo.g
+
+ class SomeTest(object):
+
+ def _test(self, f, foo):
+ test.assertIs(Foo, original_foo)
+ test.assertIs(Foo.f, f)
+ test.assertEqual(Foo.g, 3)
+ test.assertIs(Foo.foo, foo)
+ test.assertTrue(is_instance(f, MagicMock))
+ test.assertTrue(is_instance(foo, MagicMock))
+
+ def test_two(self, f, foo):
+ self._test(f, foo)
+ def test_one(self, f, foo):
+ self._test(f, foo)
+
+ SomeTest = patch.multiple(
+ foo_name, f=DEFAULT, g=3, foo=DEFAULT
+ )(SomeTest)
+
+ thing = SomeTest()
+ thing.test_one()
+ thing.test_two()
+
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_create(self):
+ patcher = patch.multiple(Foo, blam='blam')
+ self.assertRaises(AttributeError, patcher.start)
+
+ patcher = patch.multiple(Foo, blam='blam', create=True)
+ patcher.start()
+ try:
+ self.assertEqual(Foo.blam, 'blam')
+ finally:
+ patcher.stop()
+
+ self.assertFalse(hasattr(Foo, 'blam'))
+
+
+ def test_patch_multiple_spec_set(self):
+ # if spec_set works then we can assume that spec and autospec also
+ # work as the underlying machinery is the same
+ patcher = patch.multiple(Foo, foo=DEFAULT, spec_set=['a', 'b'])
+ result = patcher.start()
+ try:
+ self.assertEqual(Foo.foo, result['foo'])
+ Foo.foo.a(1)
+ Foo.foo.b(2)
+ Foo.foo.a.assert_called_with(1)
+ Foo.foo.b.assert_called_with(2)
+ self.assertRaises(AttributeError, setattr, Foo.foo, 'c', None)
+ finally:
+ patcher.stop()
+
+
+ def test_patch_multiple_new_callable(self):
+ class Thing(object):
+ pass
+
+ patcher = patch.multiple(
+ Foo, f=DEFAULT, g=DEFAULT, new_callable=Thing
+ )
+ result = patcher.start()
+ try:
+ self.assertIs(Foo.f, result['f'])
+ self.assertIs(Foo.g, result['g'])
+ self.assertIsInstance(Foo.f, Thing)
+ self.assertIsInstance(Foo.g, Thing)
+ self.assertIsNot(Foo.f, Foo.g)
+ finally:
+ patcher.stop()
+
+
+ def test_nested_patch_failure(self):
+ original_f = Foo.f
+ original_g = Foo.g
+
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'missing', 1)
+ @patch.object(Foo, 'f', 1)
+ def thing1():
+ pass
+
+ @patch.object(Foo, 'missing', 1)
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'f', 1)
+ def thing2():
+ pass
+
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'f', 1)
+ @patch.object(Foo, 'missing', 1)
+ def thing3():
+ pass
+
+ for func in thing1, thing2, thing3:
+ self.assertRaises(AttributeError, func)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_new_callable_failure(self):
+ original_f = Foo.f
+ original_g = Foo.g
+ original_foo = Foo.foo
+
+ def crasher():
+ raise NameError('crasher')
+
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'foo', new_callable=crasher)
+ @patch.object(Foo, 'f', 1)
+ def thing1():
+ pass
+
+ @patch.object(Foo, 'foo', new_callable=crasher)
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'f', 1)
+ def thing2():
+ pass
+
+ @patch.object(Foo, 'g', 1)
+ @patch.object(Foo, 'f', 1)
+ @patch.object(Foo, 'foo', new_callable=crasher)
+ def thing3():
+ pass
+
+ for func in thing1, thing2, thing3:
+ self.assertRaises(NameError, func)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+ self.assertEqual(Foo.foo, original_foo)
+
+
+ def test_patch_multiple_failure(self):
+ original_f = Foo.f
+ original_g = Foo.g
+
+ patcher = patch.object(Foo, 'f', 1)
+ patcher.attribute_name = 'f'
+
+ good = patch.object(Foo, 'g', 1)
+ good.attribute_name = 'g'
+
+ bad = patch.object(Foo, 'missing', 1)
+ bad.attribute_name = 'missing'
+
+ for additionals in [good, bad], [bad, good]:
+ patcher.additional_patchers = additionals
+
+ @patcher
+ def func():
+ pass
+
+ self.assertRaises(AttributeError, func)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+
+
+ def test_patch_multiple_new_callable_failure(self):
+ original_f = Foo.f
+ original_g = Foo.g
+ original_foo = Foo.foo
+
+ def crasher():
+ raise NameError('crasher')
+
+ patcher = patch.object(Foo, 'f', 1)
+ patcher.attribute_name = 'f'
+
+ good = patch.object(Foo, 'g', 1)
+ good.attribute_name = 'g'
+
+ bad = patch.object(Foo, 'foo', new_callable=crasher)
+ bad.attribute_name = 'foo'
+
+ for additionals in [good, bad], [bad, good]:
+ patcher.additional_patchers = additionals
+
+ @patcher
+ def func():
+ pass
+
+ self.assertRaises(NameError, func)
+ self.assertEqual(Foo.f, original_f)
+ self.assertEqual(Foo.g, original_g)
+ self.assertEqual(Foo.foo, original_foo)
+
+
+ def test_patch_multiple_string_subclasses(self):
+ for base in (str, unicode):
+ Foo = type('Foo', (base,), {'fish': 'tasty'})
+ foo = Foo()
+ @patch.multiple(foo, fish='nearly gone')
+ def test():
+ self.assertEqual(foo.fish, 'nearly gone')
+
+ test()
+ self.assertEqual(foo.fish, 'tasty')
+
+
+ @patch('mock.patch.TEST_PREFIX', 'foo')
+ def test_patch_test_prefix(self):
+ class Foo(object):
+ thing = 'original'
+
+ def foo_one(self):
+ return self.thing
+ def foo_two(self):
+ return self.thing
+ def test_one(self):
+ return self.thing
+ def test_two(self):
+ return self.thing
+
+ Foo = patch.object(Foo, 'thing', 'changed')(Foo)
+
+ foo = Foo()
+ self.assertEqual(foo.foo_one(), 'changed')
+ self.assertEqual(foo.foo_two(), 'changed')
+ self.assertEqual(foo.test_one(), 'original')
+ self.assertEqual(foo.test_two(), 'original')
+
+
+ @patch('mock.patch.TEST_PREFIX', 'bar')
+ def test_patch_dict_test_prefix(self):
+ class Foo(object):
+ def bar_one(self):
+ return dict(the_dict)
+ def bar_two(self):
+ return dict(the_dict)
+ def test_one(self):
+ return dict(the_dict)
+ def test_two(self):
+ return dict(the_dict)
+
+ the_dict = {'key': 'original'}
+ Foo = patch.dict(the_dict, key='changed')(Foo)
+
+ foo =Foo()
+ self.assertEqual(foo.bar_one(), {'key': 'changed'})
+ self.assertEqual(foo.bar_two(), {'key': 'changed'})
+ self.assertEqual(foo.test_one(), {'key': 'original'})
+ self.assertEqual(foo.test_two(), {'key': 'original'})
+
+
+ def test_patch_with_spec_mock_repr(self):
+ for arg in ('spec', 'autospec', 'spec_set'):
+ p = patch('%s.SomeClass' % __name__, **{arg: True})
+ m = p.start()
+ try:
+ self.assertIn(" name='SomeClass'", repr(m))
+ self.assertIn(" name='SomeClass.class_attribute'",
+ repr(m.class_attribute))
+ self.assertIn(" name='SomeClass()'", repr(m()))
+ self.assertIn(" name='SomeClass().class_attribute'",
+ repr(m().class_attribute))
+ finally:
+ p.stop()
+
+
+ def test_patch_nested_autospec_repr(self):
+ p = patch('tests.support', autospec=True)
+ m = p.start()
+ try:
+ self.assertIn(" name='support.SomeClass.wibble()'",
+ repr(m.SomeClass.wibble()))
+ self.assertIn(" name='support.SomeClass().wibble()'",
+ repr(m.SomeClass().wibble()))
+ finally:
+ p.stop()
+
+
+ def test_mock_calls_with_patch(self):
+ for arg in ('spec', 'autospec', 'spec_set'):
+ p = patch('%s.SomeClass' % __name__, **{arg: True})
+ m = p.start()
+ try:
+ m.wibble()
+
+ kalls = [call.wibble()]
+ self.assertEqual(m.mock_calls, kalls)
+ self.assertEqual(m.method_calls, kalls)
+ self.assertEqual(m.wibble.mock_calls, [call()])
+
+ result = m()
+ kalls.append(call())
+ self.assertEqual(m.mock_calls, kalls)
+
+ result.wibble()
+ kalls.append(call().wibble())
+ self.assertEqual(m.mock_calls, kalls)
+
+ self.assertEqual(result.mock_calls, [call.wibble()])
+ self.assertEqual(result.wibble.mock_calls, [call()])
+ self.assertEqual(result.method_calls, [call.wibble()])
+ finally:
+ p.stop()
+
+
+ def test_patch_imports_lazily(self):
+ sys.modules.pop('squizz', None)
+
+ p1 = patch('squizz.squozz')
+ self.assertRaises(ImportError, p1.start)
+
+ squizz = Mock()
+ squizz.squozz = 6
+ sys.modules['squizz'] = squizz
+ p1 = patch('squizz.squozz')
+ squizz.squozz = 3
+ p1.start()
+ p1.stop()
+ self.assertEqual(squizz.squozz, 3)
+
+
+ def test_patch_propogrates_exc_on_exit(self):
+ class holder:
+ exc_info = None, None, None
+
+ class custom_patch(_patch):
+ def __exit__(self, etype=None, val=None, tb=None):
+ _patch.__exit__(self, etype, val, tb)
+ holder.exc_info = etype, val, tb
+ stop = __exit__
+
+ def with_custom_patch(target):
+ getter, attribute = _get_target(target)
+ return custom_patch(
+ getter, attribute, DEFAULT, None, False, None,
+ None, None, {}
+ )
+
+ @with_custom_patch('squizz.squozz')
+ def test(mock):
+ raise RuntimeError
+
+ self.assertRaises(RuntimeError, test)
+ self.assertIs(holder.exc_info[0], RuntimeError)
+ self.assertIsNotNone(holder.exc_info[1],
+ 'exception value not propgated')
+ self.assertIsNotNone(holder.exc_info[2],
+ 'exception traceback not propgated')
+
+
+ def test_create_and_specs(self):
+ for kwarg in ('spec', 'spec_set', 'autospec'):
+ p = patch('%s.doesnotexist' % __name__, create=True,
+ **{kwarg: True})
+ self.assertRaises(TypeError, p.start)
+ self.assertRaises(NameError, lambda: doesnotexist)
+
+ # check that spec with create is innocuous if the original exists
+ p = patch(MODNAME, create=True, **{kwarg: True})
+ p.start()
+ p.stop()
+
+
+ def test_multiple_specs(self):
+ original = PTModule
+ for kwarg in ('spec', 'spec_set'):
+ p = patch(MODNAME, autospec=0, **{kwarg: 0})
+ self.assertRaises(TypeError, p.start)
+ self.assertIs(PTModule, original)
+
+ for kwarg in ('spec', 'autospec'):
+ p = patch(MODNAME, spec_set=0, **{kwarg: 0})
+ self.assertRaises(TypeError, p.start)
+ self.assertIs(PTModule, original)
+
+ for kwarg in ('spec_set', 'autospec'):
+ p = patch(MODNAME, spec=0, **{kwarg: 0})
+ self.assertRaises(TypeError, p.start)
+ self.assertIs(PTModule, original)
+
+
+ def test_specs_false_instead_of_none(self):
+ p = patch(MODNAME, spec=False, spec_set=False, autospec=False)
+ mock = p.start()
+ try:
+ # no spec should have been set, so attribute access should not fail
+ mock.does_not_exist
+ mock.does_not_exist = 3
+ finally:
+ p.stop()
+
+
+ def test_falsey_spec(self):
+ for kwarg in ('spec', 'autospec', 'spec_set'):
+ p = patch(MODNAME, **{kwarg: 0})
+ m = p.start()
+ try:
+ self.assertRaises(AttributeError, getattr, m, 'doesnotexit')
+ finally:
+ p.stop()
+
+
+ def test_spec_set_true(self):
+ for kwarg in ('spec', 'autospec'):
+ p = patch(MODNAME, spec_set=True, **{kwarg: True})
+ m = p.start()
+ try:
+ self.assertRaises(AttributeError, setattr, m,
+ 'doesnotexist', 'something')
+ self.assertRaises(AttributeError, getattr, m, 'doesnotexist')
+ finally:
+ p.stop()
+
+
+ def test_callable_spec_as_list(self):
+ spec = ('__call__',)
+ p = patch(MODNAME, spec=spec)
+ m = p.start()
+ try:
+ self.assertTrue(callable(m))
+ finally:
+ p.stop()
+
+
+ def test_not_callable_spec_as_list(self):
+ spec = ('foo', 'bar')
+ p = patch(MODNAME, spec=spec)
+ m = p.start()
+ try:
+ self.assertFalse(callable(m))
+ finally:
+ p.stop()
+
+
+ def test_patch_stopall(self):
+ unlink = os.unlink
+ chdir = os.chdir
+ path = os.path
+ patch('os.unlink', something).start()
+ patch('os.chdir', something_else).start()
+
+ @patch('os.path')
+ def patched(mock_path):
+ patch.stopall()
+ self.assertIs(os.path, mock_path)
+ self.assertIs(os.unlink, unlink)
+ self.assertIs(os.chdir, chdir)
+
+ patched()
+ self.assertIs(os.path, path)
+
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/python/mock-1.0.0/tests/testsentinel.py b/python/mock-1.0.0/tests/testsentinel.py
new file mode 100644
index 000000000..981171a45
--- /dev/null
+++ b/python/mock-1.0.0/tests/testsentinel.py
@@ -0,0 +1,33 @@
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+# http://www.voidspace.org.uk/python/mock/
+
+from tests.support import unittest2
+
+from mock import sentinel, DEFAULT
+
+
+class SentinelTest(unittest2.TestCase):
+
+ def testSentinels(self):
+ self.assertEqual(sentinel.whatever, sentinel.whatever,
+ 'sentinel not stored')
+ self.assertNotEqual(sentinel.whatever, sentinel.whateverelse,
+ 'sentinel should be unique')
+
+
+ def testSentinelName(self):
+ self.assertEqual(str(sentinel.whatever), 'sentinel.whatever',
+ 'sentinel name incorrect')
+
+
+ def testDEFAULT(self):
+ self.assertTrue(DEFAULT is sentinel.DEFAULT)
+
+ def testBases(self):
+ # If this doesn't raise an AttributeError then help(mock) is broken
+ self.assertRaises(AttributeError, lambda: sentinel.__bases__)
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/python/mock-1.0.0/tests/testwith.py b/python/mock-1.0.0/tests/testwith.py
new file mode 100644
index 000000000..34529eb9f
--- /dev/null
+++ b/python/mock-1.0.0/tests/testwith.py
@@ -0,0 +1,16 @@
+import sys
+
+if sys.version_info[:2] >= (2, 5):
+ from tests._testwith import *
+else:
+ from tests.support import unittest2
+
+ class TestWith(unittest2.TestCase):
+
+ @unittest2.skip('tests using with statement skipped on Python 2.4')
+ def testWith(self):
+ pass
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/python/mock-1.0.0/tox.ini b/python/mock-1.0.0/tox.ini
new file mode 100644
index 000000000..554f87096
--- /dev/null
+++ b/python/mock-1.0.0/tox.ini
@@ -0,0 +1,40 @@
+[tox]
+envlist = py24,py25,py26,py27,py31,pypy,py32,py33,jython
+
+[testenv]
+deps=unittest2
+commands={envbindir}/unit2 discover []
+
+[testenv:py26]
+commands=
+ {envbindir}/unit2 discover []
+ {envbindir}/sphinx-build -E -b doctest docs html
+ {envbindir}/sphinx-build -E docs html
+deps =
+ unittest2
+ sphinx
+
+[testenv:py27]
+commands=
+ {envbindir}/unit2 discover []
+ {envbindir}/sphinx-build -E -b doctest docs html
+deps =
+ unittest2
+ sphinx
+
+[testenv:py31]
+deps =
+ unittest2py3k
+
+[testenv:py32]
+commands=
+ {envbindir}/python -m unittest discover []
+deps =
+
+[testenv:py33]
+commands=
+ {envbindir}/python -m unittest discover []
+deps =
+
+# note for jython. Execute in tests directory:
+# rm `find . -name '*$py.class'` \ No newline at end of file
diff --git a/python/moz.build b/python/moz.build
new file mode 100644
index 000000000..7f11f6535
--- /dev/null
+++ b/python/moz.build
@@ -0,0 +1,88 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files('mach/**'):
+ BUG_COMPONENT = ('Core', 'mach')
+
+with Files('mozbuild/**'):
+ BUG_COMPONENT = ('Core', 'Build Config')
+
+SPHINX_PYTHON_PACKAGE_DIRS += [
+ 'mach',
+ 'mozbuild/mozbuild',
+ 'mozbuild/mozpack',
+ 'mozlint/mozlint',
+ 'mozversioncontrol/mozversioncontrol',
+]
+
+SPHINX_TREES['mach'] = 'mach/docs'
+
+PYTHON_UNIT_TESTS += [
+ 'mach/mach/test/test_conditions.py',
+ 'mach/mach/test/test_config.py',
+ 'mach/mach/test/test_dispatcher.py',
+ 'mach/mach/test/test_entry_point.py',
+ 'mach/mach/test/test_error_output.py',
+ 'mach/mach/test/test_logger.py',
+ 'mozbuild/dumbmake/test/test_dumbmake.py',
+ 'mozbuild/mozbuild/test/action/test_buildlist.py',
+ 'mozbuild/mozbuild/test/action/test_generate_browsersearch.py',
+ 'mozbuild/mozbuild/test/action/test_package_fennec_apk.py',
+ 'mozbuild/mozbuild/test/backend/test_android_eclipse.py',
+ 'mozbuild/mozbuild/test/backend/test_build.py',
+ 'mozbuild/mozbuild/test/backend/test_configenvironment.py',
+ 'mozbuild/mozbuild/test/backend/test_recursivemake.py',
+ 'mozbuild/mozbuild/test/backend/test_visualstudio.py',
+ 'mozbuild/mozbuild/test/compilation/test_warnings.py',
+ 'mozbuild/mozbuild/test/configure/lint.py',
+ 'mozbuild/mozbuild/test/configure/test_checks_configure.py',
+ 'mozbuild/mozbuild/test/configure/test_compile_checks.py',
+ 'mozbuild/mozbuild/test/configure/test_configure.py',
+ 'mozbuild/mozbuild/test/configure/test_lint.py',
+ 'mozbuild/mozbuild/test/configure/test_moz_configure.py',
+ 'mozbuild/mozbuild/test/configure/test_options.py',
+ 'mozbuild/mozbuild/test/configure/test_toolchain_configure.py',
+ 'mozbuild/mozbuild/test/configure/test_toolchain_helpers.py',
+ 'mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py',
+ 'mozbuild/mozbuild/test/configure/test_util.py',
+ 'mozbuild/mozbuild/test/controller/test_ccachestats.py',
+ 'mozbuild/mozbuild/test/controller/test_clobber.py',
+ 'mozbuild/mozbuild/test/frontend/test_context.py',
+ 'mozbuild/mozbuild/test/frontend/test_emitter.py',
+ 'mozbuild/mozbuild/test/frontend/test_namespaces.py',
+ 'mozbuild/mozbuild/test/frontend/test_reader.py',
+ 'mozbuild/mozbuild/test/frontend/test_sandbox.py',
+ 'mozbuild/mozbuild/test/test_base.py',
+ 'mozbuild/mozbuild/test/test_containers.py',
+ 'mozbuild/mozbuild/test/test_dotproperties.py',
+ 'mozbuild/mozbuild/test/test_expression.py',
+ 'mozbuild/mozbuild/test/test_jarmaker.py',
+ 'mozbuild/mozbuild/test/test_line_endings.py',
+ 'mozbuild/mozbuild/test/test_makeutil.py',
+ 'mozbuild/mozbuild/test/test_mozconfig.py',
+ 'mozbuild/mozbuild/test/test_mozinfo.py',
+ 'mozbuild/mozbuild/test/test_preprocessor.py',
+ 'mozbuild/mozbuild/test/test_pythonutil.py',
+ 'mozbuild/mozbuild/test/test_testing.py',
+ 'mozbuild/mozbuild/test/test_util.py',
+ 'mozbuild/mozpack/test/test_chrome_flags.py',
+ 'mozbuild/mozpack/test/test_chrome_manifest.py',
+ 'mozbuild/mozpack/test/test_copier.py',
+ 'mozbuild/mozpack/test/test_errors.py',
+ 'mozbuild/mozpack/test/test_files.py',
+ 'mozbuild/mozpack/test/test_manifests.py',
+ 'mozbuild/mozpack/test/test_mozjar.py',
+ 'mozbuild/mozpack/test/test_packager.py',
+ 'mozbuild/mozpack/test/test_packager_formats.py',
+ 'mozbuild/mozpack/test/test_packager_l10n.py',
+ 'mozbuild/mozpack/test/test_packager_unpack.py',
+ 'mozbuild/mozpack/test/test_path.py',
+ 'mozbuild/mozpack/test/test_unify.py',
+ 'mozlint/test/test_formatters.py',
+ 'mozlint/test/test_parser.py',
+ 'mozlint/test/test_roller.py',
+ 'mozlint/test/test_types.py',
+]
diff --git a/python/mozboot/README.rst b/python/mozboot/README.rst
new file mode 100644
index 000000000..a1366eea2
--- /dev/null
+++ b/python/mozboot/README.rst
@@ -0,0 +1,19 @@
+mozboot - Bootstrap your system to build Mozilla projects
+=========================================================
+
+This package contains code used for bootstrapping a system to build
+mozilla-central.
+
+This code is not part of the build system per se. Instead, it is related
+to everything up to invoking the actual build system.
+
+If you have a copy of the source tree, you run:
+
+ python bin/bootstrap.py
+
+If you don't have a copy of the source tree, you can run:
+
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/python/mozboot/bin/bootstrap.py | python -
+
+The bootstrap script will download everything it needs from hg.mozilla.org
+automatically!
diff --git a/python/mozboot/bin/bootstrap-msys2.vbs b/python/mozboot/bin/bootstrap-msys2.vbs
new file mode 100644
index 000000000..304d4f9df
--- /dev/null
+++ b/python/mozboot/bin/bootstrap-msys2.vbs
@@ -0,0 +1,116 @@
+' This Source Code Form is subject to the terms of the Mozilla Public
+' License, v. 2.0. If a copy of the MPL was not distributed with this
+' file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+' This script downloads and install MSYS2 and the preferred terminal emulator ConEmu
+
+Sub Download(uri, path)
+ Dim httpRequest, stream
+
+ Set httpRequest = CreateObject("MSXML2.ServerXMLHTTP.6.0")
+ Set stream = CreateObject("Adodb.Stream")
+
+ httpRequest.Open "GET", uri, False
+ httpRequest.Send
+
+ With stream
+ .type = 1
+ .open
+ .write httpRequest.responseBody
+ .savetofile path, 2
+ End With
+End Sub
+
+Function GetInstallPath()
+ Dim message, prompt
+
+ message = "When you click OK, we will download and extract a build environment to the directory specified. You should see various windows appear. Do NOT interact with them until one explicitly prompts you to continue." & vbCrLf & vbCrLf & "Installation Path:"
+ title = "Select Installation Location"
+ GetInstallPath = InputBox(message, title, "c:\mozdev")
+end Function
+
+Dim installPath, msysPath, conemuPath, conemuSettingsPath, conemuExecutable, bashExecutable
+Dim conemuSettingsURI, settingsFile, settingsText, fso, shell, msysArchive, appShell, errorCode
+Dim mingwExecutable
+
+' Set up OS interaction like filesystem and shell
+Set fso = CreateObject("Scripting.FileSystemObject")
+Set shell = CreateObject("WScript.Shell")
+Set appShell = CreateObject("Shell.Application")
+
+' Get where MSYS2 and ConEmu should be installed, create directories if necessary
+installPath = GetInstallPath()
+msysPath = fso.BuildPath(installPath, "msys64")
+conemuPath = fso.BuildPath(installPath, "ConEmu")
+If NOT fso.FolderExists(installPath) Then
+ fso.CreateFolder(installPath)
+ fso.CreateFolder(msysPath)
+End If
+If NOT fso.FolderExists(installPath) Then
+ MsgBox("Failed to create folder. Do you have permission to install in this directory?")
+ WScript.Quit 1
+End If
+
+On Error Resume Next
+' Download and move MSYS2 into the right place
+Download "https://api.pub.build.mozilla.org/tooltool/sha512/f93a685c8a10abbd349cbef5306441ba235c4cbfba1cc000299e11b58f258e9953cbe23463515407925eeca94c3f5d8e5f637c95be387e620845efa43cdcb0c0", "msys2.zip"
+Set FilesInZip = appShell.NameSpace(fso.GetAbsolutePathName("msys2.zip")).Items()
+appShell.NameSpace(msysPath).CopyHere(FilesInZip)
+' MSYS2 archive doesn't have tmp directory...
+fso.CreateFolder(fso.BuildPath(msysPath, "tmp"))
+fso.DeleteFile("msys2.zip")
+If Err.Number <> 0 Then
+ MsgBox("Error downloading and installing MSYS2. Make sure you have internet connection. If you think this is a bug, please file one in Bugzilla https://bugzilla.mozilla.org/enter_bug.cgi?product=Core&component=Build%20Config")
+ WScript.Quit 1
+End If
+On Error GoTo 0
+
+' Install ConEmu
+' Download installer
+On Error Resume Next
+Download "https://conemu.github.io/install2.ps1", "install2.ps1"
+conemuSettingsURI = "https://api.pub.build.mozilla.org/tooltool/sha512/9aa384ecc8025a974999e913c83064b3b797e05d19806e62ef558c8300e4c3f72967e9464ace59759f76216fc2fc66f338a1e5cdea3b9aa264529487f091d929"
+' Run installer
+errorCode = shell.Run("powershell.exe -NoProfile -ExecutionPolicy Unrestricted set dst '" & conemuPath & "'; set ver 'stable'; set lnk 'Mozilla Development Shell'; set xml '" & conemuSettingsURI & "'; set run $FALSE; .\install2.ps1", 0, true)
+' Delete ConEmu installer
+fso.DeleteFile("install2.ps1")
+If Err.Number <> 0 Then
+ MsgBox("Error downloading and installing ConEmu. Make sure you have internet connection and Powershell installed. If you think this is a bug, please file one in Bugzilla https://bugzilla.mozilla.org/enter_bug.cgi?product=Core&component=Build%20Config")
+ WScript.Quit 1
+End If
+On Error GoTo 0
+
+' Replace paths in ConEmu settings file
+conemuSettingsPath = fso.BuildPath(conemuPath, "ConEmu.xml")
+Set settingsFile = fso.OpenTextFile(conemuSettingsPath, 1)
+settingsText = settingsFile.ReadAll
+settingsFile.Close
+settingsText = Replace(settingsText, "%MSYS2_PATH", msysPath)
+Set settingsFile = fso.OpenTextFile(conemuSettingsPath, 2)
+settingsFile.WriteLine settingsText
+settingsFile.Close
+
+' Make MSYS2 Mozilla-ready
+bashExecutable = fso.BuildPath(msysPath, fso.BuildPath("usr", fso.BuildPath("bin", "bash.exe")))
+conemuExecutable = fso.BuildPath(conemuPath, "ConEmu.exe")
+' There may be spaces in the paths to the executable, this ensures they're parsed correctly
+bashExecutable = """" & bashExecutable & """"
+conemuExecutable = """" & conemuExecutable & """"
+
+errorCode = shell.Run(bashExecutable & " -l -c 'logout", 1, true)
+If errorCode <> 0 Then
+ MsgBox("MSYS2 initial setup failed. Make sure you have full access to the path you specified. If you think this is a bug, please file one in Bugzilla at https://bugzilla.mozilla.org/enter_bug.cgi?product=Core&component=Build%20Config")
+ WScript.Quit 1
+End If
+
+errorCode = shell.Run(bashExecutable & " -l -c 'pacman -Syu --noconfirm wget mingw-w64-x86_64-python2-pip && logout'", 1, true)
+If errorCode <> 0 Then
+ MsgBox("Package update failed. Make sure you have internet access. If you think this is a bug, please file one in Bugzilla at https://bugzilla.mozilla.org/enter_bug.cgi?product=Core&component=Build%20Config")
+ WScript.Quit 1
+End If
+
+errorCode = shell.Run(conemuExecutable & " -run set CHERE_INVOKING=1 & set MSYSTEM=MINGW64 & " & bashExecutable & " -cil 'export MOZ_WINDOWS_BOOTSTRAP=1 && cd """ & installPath & """ && wget -q https://hg.mozilla.org/mozilla-central/raw-file/default/python/mozboot/bin/bootstrap.py -O /tmp/bootstrap.py && python /tmp/bootstrap.py'", 1, true)
+If errorCode <> 0 Then
+ MsgBox("Bootstrap failed. Make sure you have internet access. If you think this is a bug, please file one in Bugzilla https://bugzilla.mozilla.org/enter_bug.cgi?product=Core&component=Build%20Config")
+ WScript.Quit 1
+End If
diff --git a/python/mozboot/bin/bootstrap.py b/python/mozboot/bin/bootstrap.py
new file mode 100755
index 000000000..d916351e7
--- /dev/null
+++ b/python/mozboot/bin/bootstrap.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script provides one-line bootstrap support to configure systems to build
+# the tree.
+#
+# The role of this script is to load the Python modules containing actual
+# bootstrap support. It does this through various means, including fetching
+# content from the upstream source repository.
+
+# If we add unicode_literals, optparse breaks on Python 2.6.1 (which is needed
+# to support OS X 10.6).
+
+from __future__ import print_function
+
+WRONG_PYTHON_VERSION_MESSAGE = '''
+Bootstrap currently only runs on Python 2.7 or Python 2.6. Please try re-running with python2.7 or python2.6.
+
+If these aren't available on your system, you may need to install them. Look for a "python2" or "python27" package in your package manager.
+'''
+
+import sys
+if sys.version_info[:2] not in [(2, 6), (2, 7)]:
+ print(WRONG_PYTHON_VERSION_MESSAGE)
+ sys.exit(1)
+
+import os
+import shutil
+from StringIO import StringIO
+import tempfile
+try:
+ from urllib2 import urlopen
+except ImportError:
+ from urllib.request import urlopen
+import zipfile
+
+from optparse import OptionParser
+
+# The next two variables define where in the repository the Python files
+# reside. This is used to remotely download file content when it isn't
+# available locally.
+REPOSITORY_PATH_PREFIX = 'python/mozboot/'
+
+TEMPDIR = None
+
+
+def setup_proxy():
+ # Some Linux environments define ALL_PROXY, which is a SOCKS proxy
+ # intended for all protocols. Python doesn't currently automatically
+ # detect this like it does for http_proxy and https_proxy.
+ if 'ALL_PROXY' in os.environ and 'https_proxy' not in os.environ:
+ os.environ['https_proxy'] = os.environ['ALL_PROXY']
+ if 'ALL_PROXY' in os.environ and 'http_proxy' not in os.environ:
+ os.environ['http_proxy'] = os.environ['ALL_PROXY']
+
+
+def fetch_files(repo_url, repo_type):
+ setup_proxy()
+ repo_url = repo_url.rstrip('/')
+
+ files = {}
+
+ if repo_type == 'hgweb':
+ url = repo_url + '/archive/default.zip/python/mozboot'
+ req = urlopen(url=url, timeout=30)
+ data = StringIO(req.read())
+ data.seek(0)
+ zip = zipfile.ZipFile(data, 'r')
+ for f in zip.infolist():
+ # The paths are prefixed with the repo and revision name before the
+ # directory name.
+ offset = f.filename.find(REPOSITORY_PATH_PREFIX) + len(REPOSITORY_PATH_PREFIX)
+ name = f.filename[offset:]
+
+ # We only care about the Python modules.
+ if not name.startswith('mozboot/'):
+ continue
+
+ files[name] = zip.read(f)
+ else:
+ raise NotImplementedError('Not sure how to handle repo type.', repo_type)
+
+ return files
+
+
+def ensure_environment(repo_url=None, repo_type=None):
+ """Ensure we can load the Python modules necessary to perform bootstrap."""
+
+ try:
+ from mozboot.bootstrap import Bootstrapper
+ return Bootstrapper
+ except ImportError:
+ # The first fallback is to assume we are running from a tree checkout
+ # and have the files in a sibling directory.
+ pardir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+ include = os.path.normpath(pardir)
+
+ sys.path.append(include)
+ try:
+ from mozboot.bootstrap import Bootstrapper
+ return Bootstrapper
+ except ImportError:
+ sys.path.pop()
+
+ # The next fallback is to download the files from the source
+ # repository.
+ files = fetch_files(repo_url, repo_type)
+
+ # Install them into a temporary location. They will be deleted
+ # after this script has finished executing.
+ global TEMPDIR
+ TEMPDIR = tempfile.mkdtemp()
+
+ for relpath in files.keys():
+ destpath = os.path.join(TEMPDIR, relpath)
+ destdir = os.path.dirname(destpath)
+
+ if not os.path.exists(destdir):
+ os.makedirs(destdir)
+
+ with open(destpath, 'wb') as fh:
+ fh.write(files[relpath])
+
+ # This should always work.
+ sys.path.append(TEMPDIR)
+ from mozboot.bootstrap import Bootstrapper
+ return Bootstrapper
+
+
+def main(args):
+ parser = OptionParser()
+ parser.add_option('-r', '--repo-url', dest='repo_url',
+ default='https://hg.mozilla.org/mozilla-central/',
+ help='Base URL of source control repository where bootstrap files can '
+ 'be downloaded.')
+ parser.add_option('--repo-type', dest='repo_type',
+ default='hgweb',
+ help='The type of the repository. This defines how we fetch file '
+ 'content. Like --repo, you should not need to set this.')
+
+ parser.add_option('--application-choice', dest='application_choice',
+ help='Pass in an application choice (desktop/android) instead of using the '
+ 'default interactive prompt.')
+ parser.add_option('--no-interactive', dest='no_interactive', action='store_true',
+ help='Answer yes to any (Y/n) interactive prompts.')
+
+ options, leftover = parser.parse_args(args)
+
+ try:
+ try:
+ cls = ensure_environment(options.repo_url, options.repo_type)
+ except Exception as e:
+ print('Could not load the bootstrap Python environment.\n')
+ print('This should never happen. Consider filing a bug.\n')
+ print('\n')
+ print(e)
+ return 1
+ dasboot = cls(choice=options.application_choice, no_interactive=options.no_interactive)
+ dasboot.bootstrap()
+
+ return 0
+ finally:
+ if TEMPDIR is not None:
+ shutil.rmtree(TEMPDIR)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/python/mozboot/mozboot/__init__.py b/python/mozboot/mozboot/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozboot/mozboot/__init__.py
diff --git a/python/mozboot/mozboot/android.py b/python/mozboot/mozboot/android.py
new file mode 100644
index 000000000..cac000610
--- /dev/null
+++ b/python/mozboot/mozboot/android.py
@@ -0,0 +1,270 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# If we add unicode_literals, Python 2.6.1 (required for OS X 10.6) breaks.
+from __future__ import print_function
+
+import errno
+import os
+import stat
+import subprocess
+import sys
+
+# These are the platform and build-tools versions for building
+# mobile/android, respectively. Try to keep these in synch with the
+# build system and Mozilla's automation.
+ANDROID_TARGET_SDK = '23'
+ANDROID_BUILD_TOOLS_VERSION = '23.0.3'
+
+# These are the "Android packages" needed for building Firefox for Android.
+# Use |android list sdk --extended| to see these identifiers.
+ANDROID_PACKAGES = [
+ 'tools',
+ 'platform-tools',
+ 'build-tools-%s' % ANDROID_BUILD_TOOLS_VERSION,
+ 'android-%s' % ANDROID_TARGET_SDK,
+ 'extra-google-m2repository',
+ 'extra-android-m2repository',
+]
+
+ANDROID_NDK_EXISTS = '''
+Looks like you have the Android NDK installed at:
+%s
+'''
+
+ANDROID_SDK_EXISTS = '''
+Looks like you have the Android SDK installed at:
+%s
+We will install all required Android packages.
+'''
+
+NOT_INSTALLING_ANDROID_PACKAGES = '''
+It looks like you already have the following Android packages:
+%s
+No need to update!
+'''
+
+INSTALLING_ANDROID_PACKAGES = '''
+We are now installing the following Android packages:
+%s
+You may be prompted to agree to the Android license. You may see some of
+output as packages are downloaded and installed.
+'''
+
+MISSING_ANDROID_PACKAGES = '''
+We tried to install the following Android packages:
+%s
+But it looks like we couldn't install:
+%s
+Install these Android packages manually and run this bootstrapper again.
+'''
+
+MOBILE_ANDROID_MOZCONFIG_TEMPLATE = '''
+Paste the lines between the chevrons (>>> and <<<) into your mozconfig file:
+
+<<<
+# Build Firefox for Android:
+ac_add_options --enable-application=mobile/android
+ac_add_options --target=arm-linux-androideabi
+
+# With the following Android SDK and NDK:
+ac_add_options --with-android-sdk="%s"
+ac_add_options --with-android-ndk="%s"
+>>>
+'''
+
+MOBILE_ANDROID_ARTIFACT_MODE_MOZCONFIG_TEMPLATE = '''
+Paste the lines between the chevrons (>>> and <<<) into your mozconfig file:
+
+<<<
+# Build Firefox for Android Artifact Mode:
+ac_add_options --enable-application=mobile/android
+ac_add_options --target=arm-linux-androideabi
+ac_add_options --enable-artifact-builds
+
+# With the following Android SDK:
+ac_add_options --with-android-sdk="%s"
+
+# Write build artifacts to:
+mk_add_options MOZ_OBJDIR=./objdir-frontend
+>>>
+'''
+
+
+def check_output(*args, **kwargs):
+ """Run subprocess.check_output even if Python doesn't provide it."""
+ from base import BaseBootstrapper
+ fn = getattr(subprocess, 'check_output', BaseBootstrapper._check_output)
+
+ return fn(*args, **kwargs)
+
+
+def list_missing_android_packages(android_tool, packages):
+ '''
+ Use the given |android| tool to return the sub-list of Android
+ |packages| given that are not installed.
+ '''
+ missing = []
+
+ # There's no obvious way to see what's been installed already,
+ # but packages that are installed don't appear in the list of
+ # available packages.
+ lines = check_output([android_tool,
+ 'list', 'sdk', '--no-ui', '--extended']).splitlines()
+
+ # Lines look like: 'id: 59 or "extra-google-simulators"'
+ for line in lines:
+ is_id_line = False
+ try:
+ is_id_line = line.startswith("id:")
+ except:
+ # Some lines contain non-ASCII characters. Ignore them.
+ pass
+ if not is_id_line:
+ continue
+
+ for package in packages:
+ if '"%s"' % package in line:
+ # Not installed!
+ missing.append(package)
+
+ return missing
+
+
+def install_mobile_android_sdk_or_ndk(url, path):
+ '''
+ Fetch an Android SDK or NDK from |url| and unpack it into
+ the given |path|.
+
+ We expect wget to be installed and found on the system path.
+
+ We use, and wget respects, https. We could also include SHAs for a
+ small improvement in the integrity guarantee we give. But this script is
+ bootstrapped over https anyway, so it's a really minor improvement.
+
+ We use |wget --continue| as a cheap cache of the downloaded artifacts,
+ writing into |path|/mozboot. We don't yet clean the cache; it's better
+ to waste disk and not require a long re-download than to wipe the cache
+ prematurely.
+ '''
+
+ old_path = os.getcwd()
+ try:
+ download_path = os.path.join(path, 'mozboot')
+ try:
+ os.makedirs(download_path)
+ except OSError as e:
+ if e.errno == errno.EEXIST and os.path.isdir(download_path):
+ pass
+ else:
+ raise
+
+ os.chdir(download_path)
+ subprocess.check_call(['wget', '--continue', url])
+ file = url.split('/')[-1]
+
+ os.chdir(path)
+ abspath = os.path.join(download_path, file)
+ if file.endswith('.tar.gz') or file.endswith('.tgz'):
+ cmd = ['tar', 'zxf', abspath]
+ elif file.endswith('.tar.bz2'):
+ cmd = ['tar', 'jxf', abspath]
+ elif file.endswith('.zip'):
+ cmd = ['unzip', '-q', abspath]
+ elif file.endswith('.bin'):
+ # Execute the .bin file, which unpacks the content.
+ mode = os.stat(path).st_mode
+ os.chmod(abspath, mode | stat.S_IXUSR)
+ cmd = [abspath]
+ else:
+ raise NotImplementedError("Don't know how to unpack file: %s" % file)
+
+ print('Unpacking %s...' % abspath)
+
+ with open(os.devnull, "w") as stdout:
+ # These unpack commands produce a ton of output; ignore it. The
+ # .bin files are 7z archives; there's no command line flag to quiet
+ # output, so we use this hammer.
+ subprocess.check_call(cmd, stdout=stdout)
+
+ print('Unpacking %s... DONE' % abspath)
+
+ finally:
+ os.chdir(old_path)
+
+
+def ensure_android_sdk_and_ndk(path, sdk_path, sdk_url, ndk_path, ndk_url, artifact_mode):
+ '''
+ Ensure the Android SDK and NDK are found at the given paths. If not, fetch
+ and unpack the SDK and/or NDK from the given URLs into |path|.
+ '''
+
+ # It's not particularly bad to overwrite the NDK toolchain, but it does take
+ # a while to unpack, so let's avoid the disk activity if possible. The SDK
+ # may prompt about licensing, so we do this first.
+ # Check for Android NDK only if we are not in artifact mode.
+ if not artifact_mode:
+ if os.path.isdir(ndk_path):
+ print(ANDROID_NDK_EXISTS % ndk_path)
+ else:
+ install_mobile_android_sdk_or_ndk(ndk_url, path)
+
+ # We don't want to blindly overwrite, since we use the |android| tool to
+ # install additional parts of the Android toolchain. If we overwrite,
+ # we lose whatever Android packages the user may have already installed.
+ if os.path.isdir(sdk_path):
+ print(ANDROID_SDK_EXISTS % sdk_path)
+ else:
+ install_mobile_android_sdk_or_ndk(sdk_url, path)
+
+
+def ensure_android_packages(android_tool, packages=None):
+ '''
+ Use the given android tool (like 'android') to install required Android
+ packages.
+ '''
+
+ if not packages:
+ packages = ANDROID_PACKAGES
+
+ # Bug 1171232: The |android| tool behaviour has changed; we no longer can
+ # see what packages are installed easily. Force installing everything until
+ # we find a way to actually see the missing packages.
+ missing = packages
+ if not missing:
+ print(NOT_INSTALLING_ANDROID_PACKAGES % ', '.join(packages))
+ return
+
+ # This tries to install all the required Android packages. The user
+ # may be prompted to agree to the Android license.
+ print(INSTALLING_ANDROID_PACKAGES % ', '.join(missing))
+ subprocess.check_call([android_tool,
+ 'update', 'sdk', '--no-ui', '--all',
+ '--filter', ','.join(missing)])
+
+ # Bug 1171232: The |android| tool behaviour has changed; we no longer can
+ # see what packages are installed easily. Don't check until we find a way
+ # to actually verify.
+ failing = []
+ if failing:
+ raise Exception(MISSING_ANDROID_PACKAGES % (', '.join(missing), ', '.join(failing)))
+
+
+def suggest_mozconfig(sdk_path=None, ndk_path=None, artifact_mode=False):
+ if artifact_mode:
+ print(MOBILE_ANDROID_ARTIFACT_MODE_MOZCONFIG_TEMPLATE % (sdk_path))
+ else:
+ print(MOBILE_ANDROID_MOZCONFIG_TEMPLATE % (sdk_path, ndk_path))
+
+
+def android_ndk_url(os_name, ver='r11b'):
+ # Produce a URL like 'https://dl.google.com/android/repository/android-ndk-r11b-linux-x86_64.zip
+ base_url = 'https://dl.google.com/android/repository/android-ndk'
+
+ if sys.maxsize > 2**32:
+ arch = 'x86_64'
+ else:
+ arch = 'x86'
+
+ return '%s-%s-%s-%s.zip' % (base_url, ver, os_name, arch)
diff --git a/python/mozboot/mozboot/archlinux.py b/python/mozboot/mozboot/archlinux.py
new file mode 100644
index 000000000..12ce6b4a4
--- /dev/null
+++ b/python/mozboot/mozboot/archlinux.py
@@ -0,0 +1,223 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import tempfile
+import subprocess
+import glob
+
+from mozboot.base import BaseBootstrapper
+
+
+class ArchlinuxBootstrapper(BaseBootstrapper):
+ '''Archlinux experimental bootstrapper.'''
+
+ SYSTEM_PACKAGES = [
+ 'autoconf2.13',
+ 'base-devel',
+ 'ccache',
+ 'mercurial',
+ 'python2',
+ 'python2-setuptools',
+ 'unzip',
+ 'zip',
+ ]
+
+ BROWSER_PACKAGES = [
+ 'alsa-lib',
+ 'dbus-glib',
+ 'desktop-file-utils',
+ 'gconf',
+ 'gtk2',
+ 'gtk3',
+ 'hicolor-icon-theme',
+ 'hunspell',
+ 'icu',
+ 'libevent',
+ 'libvpx',
+ 'libxt',
+ 'mime-types',
+ 'mozilla-common',
+ 'nss',
+ 'sqlite',
+ 'startup-notification',
+ 'diffutils',
+ 'gst-plugins-base-libs',
+ 'imake',
+ 'inetutils',
+ 'libpulse',
+ 'mercurial',
+ 'mesa',
+ 'python2',
+ 'unzip',
+ 'xorg-server-xvfb',
+ 'yasm',
+ 'zip',
+ 'gst-libav',
+ 'gst-plugins-good',
+ 'networkmanager',
+ ]
+
+ BROWSER_AUR_PACKAGES = [
+ 'https://aur.archlinux.org/cgit/aur.git/snapshot/uuid.tar.gz',
+ ]
+
+ MOBILE_ANDROID_COMMON_PACKAGES = [
+ 'zlib', # mobile/android requires system zlib.
+ 'jdk7-openjdk', # It would be nice to handle alternative JDKs. See https://wiki.archlinux.org/index.php/Java.
+ 'wget', # For downloading the Android SDK and NDK.
+ 'multilib/lib32-libstdc++5', # See comment about 32 bit binaries and multilib below.
+ 'multilib/lib32-ncurses',
+ 'multilib/lib32-readline',
+ 'multilib/lib32-zlib',
+ ]
+
+ def __init__(self, version, dist_id, **kwargs):
+ print 'Using an experimental bootstrapper for Archlinux.'
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ def install_system_packages(self):
+ self.pacman_install(*self.SYSTEM_PACKAGES)
+
+ def install_browser_packages(self):
+ self.ensure_browser_packages()
+
+ def install_browser_artifact_mode_packages(self):
+ self.ensure_browser_packages(artifact_mode=True)
+
+ def install_mobile_android_packages(self):
+ self.ensure_mobile_android_packages()
+
+ def install_mobile_android_artifact_mode_packages(self):
+ self.ensure_mobile_android_packages(artifact_mode=True)
+
+ def ensure_browser_packages(self, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ self.aur_install(*self.BROWSER_AUR_PACKAGES)
+ self.pacman_install(*self.BROWSER_PACKAGES)
+
+ def ensure_mobile_android_packages(self, artifact_mode=False):
+ import android
+
+ # Multi-part process:
+ # 1. System packages.
+ # 2. Android SDK. Android NDK only if we are not in artifact mode.
+ # 3. Android packages.
+
+ # 1. This is hard to believe, but the Android SDK binaries are 32-bit
+ # and that conflicts with 64-bit Arch installations out of the box. The
+ # solution is to add the multilibs repository; unfortunately, this
+ # requires manual intervention.
+ try:
+ self.pacman_install(*self.MOBILE_ANDROID_COMMON_PACKAGES)
+ except e:
+ print('Failed to install all packages. The Android developer '
+ 'toolchain requires 32 bit binaries be enabled (see '
+ 'https://wiki.archlinux.org/index.php/Android). You may need to '
+ 'manually enable the multilib repository following the instructions '
+ 'at https://wiki.archlinux.org/index.php/Multilib.')
+ raise e
+
+ # 2. The user may have an external Android SDK (in which case we save
+ # them a lengthy download), or they may have already completed the
+ # download. We unpack to ~/.mozbuild/{android-sdk-linux, android-ndk-r11b}.
+ mozbuild_path = os.environ.get('MOZBUILD_STATE_PATH', os.path.expanduser(os.path.join('~', '.mozbuild')))
+ self.sdk_path = os.environ.get('ANDROID_SDK_HOME', os.path.join(mozbuild_path, 'android-sdk-linux'))
+ self.ndk_path = os.environ.get('ANDROID_NDK_HOME', os.path.join(mozbuild_path, 'android-ndk-r11b'))
+ self.sdk_url = 'https://dl.google.com/android/android-sdk_r24.0.1-linux.tgz'
+ self.ndk_url = android.android_ndk_url('linux')
+
+ android.ensure_android_sdk_and_ndk(path=mozbuild_path,
+ sdk_path=self.sdk_path, sdk_url=self.sdk_url,
+ ndk_path=self.ndk_path, ndk_url=self.ndk_url,
+ artifact_mode=artifact_mode)
+ android_tool = os.path.join(self.sdk_path, 'tools', 'android')
+ android.ensure_android_packages(android_tool=android_tool)
+
+ def suggest_mobile_android_mozconfig(self, artifact_mode=False):
+ import android
+ android.suggest_mozconfig(sdk_path=self.sdk_path,
+ ndk_path=self.ndk_path,
+ artifact_mode=artifact_mode)
+
+ def suggest_mobile_android_artifact_mode_mozconfig(self):
+ self.suggest_mobile_android_mozconfig(artifact_mode=True)
+
+ def _update_package_manager(self):
+ self.pacman_update
+
+ def upgrade_mercurial(self, current):
+ self.pacman_install('mercurial')
+
+ def upgrade_python(self, current):
+ self.pacman_install('python2')
+
+ def pacman_install(self, *packages):
+ command = ['pacman', '-S', '--needed']
+ if self.no_interactive:
+ command.append('--noconfirm')
+
+ command.extend(packages)
+
+ self.run_as_root(command)
+
+ def pacman_update(self):
+ command = ['pacman', '-S', '--refresh']
+
+ self.run_as_root(command)
+
+ def run(self, command, env=None):
+ subprocess.check_call(command, stdin=sys.stdin, env=env)
+
+ def download(self, uri):
+ command = ['curl', '-L', '-O', uri]
+ self.run(command)
+
+ def unpack(self, path, name, ext):
+ if ext == 'gz':
+ compression = '-z'
+ elif ext == 'bz':
+ compression == '-j'
+ elif exit == 'xz':
+ compression == 'x'
+
+ name = os.path.join(path, name) + '.tar.' + ext
+ command = ['tar', '-x', compression, '-f', name, '-C', path]
+ self.run(command)
+
+ def makepkg(self, name):
+ command = ['makepkg', '-s']
+ makepkg_env = os.environ.copy()
+ makepkg_env['PKGEXT'] = '.pkg.tar.xz'
+ self.run(command, env=makepkg_env)
+ pack = glob.glob(name + '*.pkg.tar.xz')[0]
+ command = ['pacman', '-U']
+ if self.no_interactive:
+ command.append('--noconfirm')
+ command.append(pack)
+ self.run_as_root(command)
+
+ def aur_install(self, *packages):
+ path = tempfile.mkdtemp()
+ if not self.no_interactive:
+ print('WARNING! This script requires to install packages from the AUR '
+ 'This is potentially unsecure so I recommend that you carefully '
+ 'read each package description and check the sources.'
+ 'These packages will be built in ' + path + '.')
+ choice = raw_input('Do you want to continue? (yes/no) [no]')
+ if choice != 'yes':
+ sys.exit(1)
+
+ base_dir = os.getcwd()
+ os.chdir(path)
+ for package in packages:
+ name, _, ext = package.split('/')[-1].split('.')
+ directory = os.path.join(path, name)
+ self.download(package)
+ self.unpack(path, name, ext)
+ os.chdir(directory)
+ self.makepkg(name)
+
+ os.chdir(base_dir)
diff --git a/python/mozboot/mozboot/base.py b/python/mozboot/mozboot/base.py
new file mode 100644
index 000000000..0e1871da9
--- /dev/null
+++ b/python/mozboot/mozboot/base.py
@@ -0,0 +1,452 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function, unicode_literals
+
+import hashlib
+import os
+import re
+import subprocess
+import sys
+import urllib2
+
+from distutils.version import LooseVersion
+
+
+NO_MERCURIAL = '''
+Could not find Mercurial (hg) in the current shell's path. Try starting a new
+shell and running the bootstrapper again.
+'''
+
+MERCURIAL_UNABLE_UPGRADE = '''
+You are currently running Mercurial %s. Running %s or newer is
+recommended for performance and stability reasons.
+
+Unfortunately, this bootstrapper currently does not know how to automatically
+upgrade Mercurial on your machine.
+
+You can usually install Mercurial through your package manager or by
+downloading a package from http://mercurial.selenic.com/.
+'''
+
+MERCURIAL_UPGRADE_FAILED = '''
+We attempted to upgrade Mercurial to a modern version (%s or newer).
+However, you appear to have version %s still.
+
+It's possible your package manager doesn't support a modern version of
+Mercurial. It's also possible Mercurial is not being installed in the search
+path for this shell. Try creating a new shell and run this bootstrapper again.
+
+If it continues to fail, consider installing Mercurial by following the
+instructions at http://mercurial.selenic.com/.
+'''
+
+PYTHON_UNABLE_UPGRADE = '''
+You are currently running Python %s. Running %s or newer (but
+not 3.x) is required.
+
+Unfortunately, this bootstrapper does not currently know how to automatically
+upgrade Python on your machine.
+
+Please search the Internet for how to upgrade your Python and try running this
+bootstrapper again to ensure your machine is up to date.
+'''
+
+PYTHON_UPGRADE_FAILED = '''
+We attempted to upgrade Python to a modern version (%s or newer).
+However, you appear to still have version %s.
+
+It's possible your package manager doesn't yet expose a modern version of
+Python. It's also possible Python is not being installed in the search path for
+this shell. Try creating a new shell and run this bootstrapper again.
+
+If this continues to fail and you are sure you have a modern Python on your
+system, ensure it is on the $PATH and try again. If that fails, you'll need to
+install Python manually and ensure the path with the python binary is listed in
+the $PATH environment variable.
+
+We recommend the following tools for installing Python:
+
+ pyenv -- https://github.com/yyuu/pyenv)
+ pythonz -- https://github.com/saghul/pythonz
+ official installers -- http://www.python.org/
+'''
+
+BROWSER_ARTIFACT_MODE_MOZCONFIG = '''
+Paste the lines between the chevrons (>>> and <<<) into your mozconfig file:
+
+<<<
+# Automatically download and use compiled C++ components:
+ac_add_options --enable-artifact-builds
+>>>
+'''
+
+# Upgrade Mercurial older than this.
+# This should match OLDEST_NON_LEGACY_VERSION from
+# the hg setup wizard in version-control-tools.
+MODERN_MERCURIAL_VERSION = LooseVersion('3.7.3')
+
+# Upgrade Python older than this.
+MODERN_PYTHON_VERSION = LooseVersion('2.7.3')
+
+
+class BaseBootstrapper(object):
+ """Base class for system bootstrappers."""
+
+ def __init__(self, no_interactive=False):
+ self.package_manager_updated = False
+ self.no_interactive = no_interactive
+
+ def install_system_packages(self):
+ '''
+ Install packages shared by all applications. These are usually
+ packages required by the development (like mercurial) or the
+ build system (like autoconf).
+ '''
+ raise NotImplementedError('%s must implement install_system_packages()' %
+ __name__)
+
+ def install_browser_packages(self):
+ '''
+ Install packages required to build Firefox for Desktop (application
+ 'browser').
+ '''
+ raise NotImplementedError('Cannot bootstrap Firefox for Desktop: '
+ '%s does not yet implement install_browser_packages()' %
+ __name__)
+
+ def suggest_browser_mozconfig(self):
+ '''
+ Print a message to the console detailing what the user's mozconfig
+ should contain.
+
+ Firefox for Desktop can in simple cases determine its build environment
+ entirely from configure.
+ '''
+ pass
+
+ def install_browser_artifact_mode_packages(self):
+ '''
+ Install packages required to build Firefox for Desktop (application
+ 'browser') in Artifact Mode.
+ '''
+ raise NotImplementedError(
+ 'Cannot bootstrap Firefox for Desktop Artifact Mode: '
+ '%s does not yet implement install_browser_artifact_mode_packages()' %
+ __name__)
+
+ def suggest_browser_artifact_mode_mozconfig(self):
+ '''
+ Print a message to the console detailing what the user's mozconfig
+ should contain.
+
+ Firefox for Desktop Artifact Mode needs to enable artifact builds and
+ a path where the build artifacts will be written to.
+ '''
+ print(BROWSER_ARTIFACT_MODE_MOZCONFIG)
+
+ def install_mobile_android_packages(self):
+ '''
+ Install packages required to build Firefox for Android (application
+ 'mobile/android', also known as Fennec).
+ '''
+ raise NotImplementedError('Cannot bootstrap Firefox for Android: '
+ '%s does not yet implement install_mobile_android_packages()'
+ % __name__)
+
+ def suggest_mobile_android_mozconfig(self):
+ '''
+ Print a message to the console detailing what the user's mozconfig
+ should contain.
+
+ Firefox for Android needs an application and an ABI set, and it needs
+ paths to the Android SDK and NDK.
+ '''
+ raise NotImplementedError('%s does not yet implement suggest_mobile_android_mozconfig()' %
+ __name__)
+
+ def install_mobile_android_artifact_mode_packages(self):
+ '''
+ Install packages required to build Firefox for Android (application
+ 'mobile/android', also known as Fennec) in Artifact Mode.
+ '''
+ raise NotImplementedError(
+ 'Cannot bootstrap Firefox for Android Artifact Mode: '
+ '%s does not yet implement install_mobile_android_artifact_mode_packages()'
+ % __name__)
+
+ def suggest_mobile_android_artifact_mode_mozconfig(self):
+ '''
+ Print a message to the console detailing what the user's mozconfig
+ should contain.
+
+ Firefox for Android Artifact Mode needs an application and an ABI set,
+ and it needs paths to the Android SDK.
+ '''
+ raise NotImplementedError(
+ '%s does not yet implement suggest_mobile_android_artifact_mode_mozconfig()'
+ % __name__)
+
+ def which(self, name):
+ """Python implementation of which.
+
+ It returns the path of an executable or None if it couldn't be found.
+ """
+ for path in os.environ['PATH'].split(os.pathsep):
+ test = os.path.join(path, name)
+ if os.path.exists(test) and os.access(test, os.X_OK):
+ return test
+
+ return None
+
+ def run_as_root(self, command):
+ if os.geteuid() != 0:
+ if self.which('sudo'):
+ command.insert(0, 'sudo')
+ else:
+ command = ['su', 'root', '-c', ' '.join(command)]
+
+ print('Executing as root:', subprocess.list2cmdline(command))
+
+ subprocess.check_call(command, stdin=sys.stdin)
+
+ def dnf_install(self, *packages):
+ if self.which('dnf'):
+ command = ['dnf', 'install']
+ else:
+ command = ['yum', 'install']
+
+ if self.no_interactive:
+ command.append('-y')
+ command.extend(packages)
+
+ self.run_as_root(command)
+
+ def dnf_groupinstall(self, *packages):
+ if self.which('dnf'):
+ command = ['dnf', 'groupinstall']
+ else:
+ command = ['yum', 'groupinstall']
+
+ if self.no_interactive:
+ command.append('-y')
+ command.extend(packages)
+
+ self.run_as_root(command)
+
+ def dnf_update(self, *packages):
+ if self.which('dnf'):
+ command = ['dnf', 'update']
+ else:
+ command = ['yum', 'update']
+
+ if self.no_interactive:
+ command.append('-y')
+ command.extend(packages)
+
+ self.run_as_root(command)
+
+ def apt_install(self, *packages):
+ command = ['apt-get', 'install']
+ if self.no_interactive:
+ command.append('-y')
+ command.extend(packages)
+
+ self.run_as_root(command)
+
+ def apt_update(self):
+ command = ['apt-get', 'update']
+ if self.no_interactive:
+ command.append('-y')
+
+ self.run_as_root(command)
+
+ def apt_add_architecture(self, arch):
+ command = ['dpkg', '--add-architecture']
+ command.extend(arch)
+
+ self.run_as_root(command)
+
+ def check_output(self, *args, **kwargs):
+ """Run subprocess.check_output even if Python doesn't provide it."""
+ fn = getattr(subprocess, 'check_output', BaseBootstrapper._check_output)
+
+ return fn(*args, **kwargs)
+
+ @staticmethod
+ def _check_output(*args, **kwargs):
+ """Python 2.6 compatible implementation of subprocess.check_output."""
+ proc = subprocess.Popen(stdout=subprocess.PIPE, *args, **kwargs)
+ output, unused_err = proc.communicate()
+ retcode = proc.poll()
+ if retcode:
+ cmd = kwargs.get('args', args[0])
+ e = subprocess.CalledProcessError(retcode, cmd)
+ e.output = output
+ raise e
+
+ return output
+
+ def prompt_int(self, prompt, low, high, limit=5):
+ ''' Prompts the user with prompt and requires an integer between low and high. '''
+ valid = False
+ while not valid and limit > 0:
+ try:
+ choice = int(raw_input(prompt))
+ if not low <= choice <= high:
+ print("ERROR! Please enter a valid option!")
+ limit -= 1
+ else:
+ valid = True
+ except ValueError:
+ print("ERROR! Please enter a valid option!")
+ limit -= 1
+
+ if limit > 0:
+ return choice
+ else:
+ raise Exception("Error! Reached max attempts of entering option.")
+
+ def _ensure_package_manager_updated(self):
+ if self.package_manager_updated:
+ return
+
+ self._update_package_manager()
+ self.package_manager_updated = True
+
+ def _update_package_manager(self):
+ """Updates the package manager's manifests/package list.
+
+ This should be defined in child classes.
+ """
+
+ def _hgplain_env(self):
+ """ Returns a copy of the current environment updated with the HGPLAIN
+ environment variable.
+
+ HGPLAIN prevents Mercurial from applying locale variations to the output
+ making it suitable for use in scripts.
+ """
+ env = os.environ.copy()
+ env[b'HGPLAIN'] = b'1'
+
+ return env
+
+ def is_mercurial_modern(self):
+ hg = self.which('hg')
+ if not hg:
+ print(NO_MERCURIAL)
+ return False, False, None
+
+ info = self.check_output([hg, '--version'], env=self._hgplain_env()).splitlines()[0]
+
+ match = re.search('version ([^\+\)]+)', info)
+ if not match:
+ print('ERROR: Unable to identify Mercurial version.')
+ return True, False, None
+
+ our = LooseVersion(match.group(1))
+
+ return True, our >= MODERN_MERCURIAL_VERSION, our
+
+ def ensure_mercurial_modern(self):
+ installed, modern, version = self.is_mercurial_modern()
+
+ if modern:
+ print('Your version of Mercurial (%s) is sufficiently modern.' %
+ version)
+ return installed, modern
+
+ self._ensure_package_manager_updated()
+
+ if installed:
+ print('Your version of Mercurial (%s) is not modern enough.' %
+ version)
+ print('(Older versions of Mercurial have known security vulnerabilities. '
+ 'Unless you are running a patched Mercurial version, you may be '
+ 'vulnerable.')
+ else:
+ print('You do not have Mercurial installed')
+
+ if self.upgrade_mercurial(version) is False:
+ return installed, modern
+
+ installed, modern, after = self.is_mercurial_modern()
+
+ if installed and not modern:
+ print(MERCURIAL_UPGRADE_FAILED % (MODERN_MERCURIAL_VERSION, after))
+
+ return installed, modern
+
+ def upgrade_mercurial(self, current):
+ """Upgrade Mercurial.
+
+ Child classes should reimplement this.
+
+ Return False to not perform a version check after the upgrade is
+ performed.
+ """
+ print(MERCURIAL_UNABLE_UPGRADE % (current, MODERN_MERCURIAL_VERSION))
+
+ def is_python_modern(self):
+ python = None
+
+ for test in ['python2.7', 'python']:
+ python = self.which(test)
+ if python:
+ break
+
+ assert python
+
+ info = self.check_output([python, '--version'],
+ stderr=subprocess.STDOUT)
+ match = re.search('Python ([a-z0-9\.]+)', info)
+ if not match:
+ print('ERROR Unable to identify Python version.')
+ return False, None
+
+ our = LooseVersion(match.group(1))
+
+ return our >= MODERN_PYTHON_VERSION, our
+
+ def ensure_python_modern(self):
+ modern, version = self.is_python_modern()
+
+ if modern:
+ print('Your version of Python (%s) is new enough.' % version)
+ return
+
+ print('Your version of Python (%s) is too old. Will try to upgrade.' %
+ version)
+
+ self._ensure_package_manager_updated()
+ self.upgrade_python(version)
+
+ modern, after = self.is_python_modern()
+
+ if not modern:
+ print(PYTHON_UPGRADE_FAILED % (MODERN_PYTHON_VERSION, after))
+ sys.exit(1)
+
+ def upgrade_python(self, current):
+ """Upgrade Python.
+
+ Child classes should reimplement this.
+ """
+ print(PYTHON_UNABLE_UPGRADE % (current, MODERN_PYTHON_VERSION))
+
+ def http_download_and_save(self, url, dest, sha256hexhash):
+ f = urllib2.urlopen(url)
+ h = hashlib.sha256()
+ with open(dest, 'wb') as out:
+ while True:
+ data = f.read(4096)
+ if data:
+ out.write(data)
+ h.update(data)
+ else:
+ break
+ if h.hexdigest() != sha256hexhash:
+ os.remove(dest)
+ raise ValueError('Hash of downloaded file does not match expected hash')
diff --git a/python/mozboot/mozboot/bootstrap.py b/python/mozboot/mozboot/bootstrap.py
new file mode 100644
index 000000000..40bb7cc86
--- /dev/null
+++ b/python/mozboot/mozboot/bootstrap.py
@@ -0,0 +1,437 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# If we add unicode_literals, Python 2.6.1 (required for OS X 10.6) breaks.
+from __future__ import print_function
+
+import platform
+import sys
+import os
+import subprocess
+
+# Don't forgot to add new mozboot modules to the bootstrap download
+# list in bin/bootstrap.py!
+from mozboot.centosfedora import CentOSFedoraBootstrapper
+from mozboot.debian import DebianBootstrapper
+from mozboot.freebsd import FreeBSDBootstrapper
+from mozboot.gentoo import GentooBootstrapper
+from mozboot.osx import OSXBootstrapper
+from mozboot.openbsd import OpenBSDBootstrapper
+from mozboot.archlinux import ArchlinuxBootstrapper
+from mozboot.windows import WindowsBootstrapper
+from mozboot.mozillabuild import MozillaBuildBootstrapper
+from mozboot.util import (
+ get_state_dir,
+)
+
+APPLICATION_CHOICE = '''
+Please choose the version of Firefox you want to build:
+%s
+
+Note on Artifact Mode:
+
+Firefox for Desktop and Android supports a fast build mode called
+artifact mode. Artifact mode downloads pre-built C++ components rather
+than building them locally, trading bandwidth for time.
+
+Artifact builds will be useful to many developers who are not working
+with compiled code. If you want to work on look-and-feel of Firefox,
+you want "Firefox for Desktop Artifact Mode".
+
+Similarly, if you want to work on the look-and-feel of Firefox for Android,
+you want "Firefox for Android Artifact Mode".
+
+To work on the Gecko technology platform, you would need to opt to full,
+non-artifact mode. Gecko is Mozilla's web rendering engine, similar to Edge,
+Blink, and WebKit. Gecko is implemented in C++ and JavaScript. If you
+want to work on web rendering, you want "Firefox for Desktop", or
+"Firefox for Android".
+
+If you don't know what you want, start with just Artifact Mode of the desired
+platform. Your builds will be much shorter than if you build Gecko as well.
+But don't worry! You can always switch configurations later.
+
+You can learn more about Artifact mode builds at
+https://developer.mozilla.org/en-US/docs/Artifact_builds.
+
+Your choice:
+'''
+
+APPLICATIONS_LIST=[
+ ('Firefox for Desktop Artifact Mode', 'browser_artifact_mode'),
+ ('Firefox for Desktop', 'browser'),
+ ('Firefox for Android Artifact Mode', 'mobile_android_artifact_mode'),
+ ('Firefox for Android', 'mobile_android'),
+]
+
+# This is a workaround for the fact that we must support python2.6 (which has
+# no OrderedDict)
+APPLICATIONS = dict(
+ browser_artifact_mode=APPLICATIONS_LIST[0],
+ browser=APPLICATIONS_LIST[1],
+ mobile_android_artifact_mode=APPLICATIONS_LIST[2],
+ mobile_android=APPLICATIONS_LIST[3],
+)
+
+STATE_DIR_INFO = '''
+The Firefox build system and related tools store shared, persistent state
+in a common directory on the filesystem. On this machine, that directory
+is:
+
+ {statedir}
+
+If you would like to use a different directory, hit CTRL+c and set the
+MOZBUILD_STATE_PATH environment variable to the directory you'd like to
+use and re-run the bootstrapper.
+
+Would you like to create this directory?
+
+ 1. Yes
+ 2. No
+
+Your choice:
+'''
+
+FINISHED = '''
+Your system should be ready to build %s!
+'''
+
+SOURCE_ADVERTISE = '''
+Source code can be obtained by running
+
+ hg clone https://hg.mozilla.org/mozilla-unified
+
+Or, if you prefer Git, you should install git-cinnabar, and follow the
+instruction here to clone from the Mercurial repository:
+
+ https://github.com/glandium/git-cinnabar/wiki/Mozilla:-A-git-workflow-for-Gecko-development
+
+Or, if you really prefer vanilla flavor Git:
+
+ git clone https://git.mozilla.org/integration/gecko-dev.git
+'''
+
+CONFIGURE_MERCURIAL = '''
+Mozilla recommends a number of changes to Mercurial to enhance your
+experience with it.
+
+Would you like to run a configuration wizard to ensure Mercurial is
+optimally configured?
+
+ 1. Yes
+ 2. No
+
+Please enter your reply: '''.lstrip()
+
+CLONE_MERCURIAL = '''
+If you would like to clone the canonical Mercurial repository, please
+enter the destination path below.
+
+(If you prefer to use Git, leave this blank.)
+
+Destination directory for Mercurial clone (leave empty to not clone): '''.lstrip()
+
+
+DEBIAN_DISTROS = (
+ 'Debian',
+ 'debian',
+ 'Ubuntu',
+ # Most Linux Mint editions are based on Ubuntu. One is based on Debian.
+ # The difference is reported in dist_id from platform.linux_distribution.
+ # But it doesn't matter since we share a bootstrapper between Debian and
+ # Ubuntu.
+ 'Mint',
+ 'LinuxMint',
+ 'Elementary OS',
+ 'Elementary',
+ '"elementary OS"',
+)
+
+
+class Bootstrapper(object):
+ """Main class that performs system bootstrap."""
+
+ def __init__(self, finished=FINISHED, choice=None, no_interactive=False,
+ hg_configure=False):
+ self.instance = None
+ self.finished = finished
+ self.choice = choice
+ self.hg_configure = hg_configure
+ cls = None
+ args = {'no_interactive': no_interactive}
+
+ if sys.platform.startswith('linux'):
+ distro, version, dist_id = platform.linux_distribution()
+
+ if distro in ('CentOS', 'CentOS Linux', 'Fedora'):
+ cls = CentOSFedoraBootstrapper
+ args['distro'] = distro
+ elif distro in DEBIAN_DISTROS:
+ cls = DebianBootstrapper
+ elif distro == 'Gentoo Base System':
+ cls = GentooBootstrapper
+ elif os.path.exists('/etc/arch-release'):
+ # Even on archlinux, platform.linux_distribution() returns ['','','']
+ cls = ArchlinuxBootstrapper
+ else:
+ raise NotImplementedError('Bootstrap support for this Linux '
+ 'distro not yet available.')
+
+ args['version'] = version
+ args['dist_id'] = dist_id
+
+ elif sys.platform.startswith('darwin'):
+ # TODO Support Darwin platforms that aren't OS X.
+ osx_version = platform.mac_ver()[0]
+
+ cls = OSXBootstrapper
+ args['version'] = osx_version
+
+ elif sys.platform.startswith('openbsd'):
+ cls = OpenBSDBootstrapper
+ args['version'] = platform.uname()[2]
+
+ elif sys.platform.startswith('dragonfly') or \
+ sys.platform.startswith('freebsd'):
+ cls = FreeBSDBootstrapper
+ args['version'] = platform.release()
+ args['flavor'] = platform.system()
+
+ elif sys.platform.startswith('win32') or sys.platform.startswith('msys'):
+ if 'MOZILLABUILD' in os.environ:
+ cls = MozillaBuildBootstrapper
+ else:
+ cls = WindowsBootstrapper
+
+ if cls is None:
+ raise NotImplementedError('Bootstrap support is not yet available '
+ 'for your OS.')
+
+ self.instance = cls(**args)
+
+ def bootstrap(self):
+ if self.choice is None:
+ # Like ['1. Firefox for Desktop', '2. Firefox for Android Artifact Mode', ...].
+ labels = ['%s. %s' % (i + 1, name) for (i, (name, _)) in enumerate(APPLICATIONS_LIST)]
+ prompt = APPLICATION_CHOICE % '\n'.join(labels)
+ prompt_choice = self.instance.prompt_int(prompt=prompt, low=1, high=len(APPLICATIONS))
+ name, application = APPLICATIONS_LIST[prompt_choice-1]
+ elif self.choice not in APPLICATIONS.keys():
+ raise Exception('Please pick a valid application choice: (%s)' % '/'.join(APPLICATIONS.keys()))
+ else:
+ name, application = APPLICATIONS[self.choice]
+
+ self.instance.install_system_packages()
+
+ # Like 'install_browser_packages' or 'install_mobile_android_packages'.
+ getattr(self.instance, 'install_%s_packages' % application)()
+
+ hg_installed, hg_modern = self.instance.ensure_mercurial_modern()
+ self.instance.ensure_python_modern()
+
+ # The state directory code is largely duplicated from mach_bootstrap.py.
+ # We can't easily import mach_bootstrap.py because the bootstrapper may
+ # run in self-contained mode and only the files in this directory will
+ # be available. We /could/ refactor parts of mach_bootstrap.py to be
+ # part of this directory to avoid the code duplication.
+ state_dir, _ = get_state_dir()
+
+ if not os.path.exists(state_dir):
+ if not self.instance.no_interactive:
+ choice = self.instance.prompt_int(
+ prompt=STATE_DIR_INFO.format(statedir=state_dir),
+ low=1,
+ high=2)
+
+ if choice == 1:
+ print('Creating global state directory: %s' % state_dir)
+ os.makedirs(state_dir, mode=0o770)
+
+ state_dir_available = os.path.exists(state_dir)
+
+ # Possibly configure Mercurial if the user wants to.
+ # TODO offer to configure Git.
+ if hg_installed and state_dir_available:
+ configure_hg = False
+ if not self.instance.no_interactive:
+ choice = self.instance.prompt_int(prompt=CONFIGURE_MERCURIAL,
+ low=1, high=2)
+ if choice == 1:
+ configure_hg = True
+ else:
+ configure_hg = self.hg_configure
+
+ if configure_hg:
+ configure_mercurial(self.instance.which('hg'), state_dir)
+
+ # Offer to clone if we're not inside a clone.
+ checkout_type = current_firefox_checkout(check_output=self.instance.check_output,
+ hg=self.instance.which('hg'))
+ have_clone = False
+
+ if checkout_type:
+ have_clone = True
+ elif hg_installed and not self.instance.no_interactive:
+ dest = raw_input(CLONE_MERCURIAL)
+ dest = dest.strip()
+ if dest:
+ dest = os.path.expanduser(dest)
+ have_clone = clone_firefox(self.instance.which('hg'), dest)
+
+ if not have_clone:
+ print(SOURCE_ADVERTISE)
+
+ print(self.finished % name)
+
+ # Like 'suggest_browser_mozconfig' or 'suggest_mobile_android_mozconfig'.
+ getattr(self.instance, 'suggest_%s_mozconfig' % application)()
+
+
+def update_vct(hg, root_state_dir):
+ """Ensure version-control-tools in the state directory is up to date."""
+ vct_dir = os.path.join(root_state_dir, 'version-control-tools')
+
+ # Ensure the latest revision of version-control-tools is present.
+ update_mercurial_repo(hg, 'https://hg.mozilla.org/hgcustom/version-control-tools',
+ vct_dir, '@')
+
+ return vct_dir
+
+
+def configure_mercurial(hg, root_state_dir):
+ """Run the Mercurial configuration wizard."""
+ vct_dir = update_vct(hg, root_state_dir)
+
+ # Run the config wizard from v-c-t.
+ args = [
+ hg,
+ '--config', 'extensions.configwizard=%s/hgext/configwizard' % vct_dir,
+ 'configwizard',
+ ]
+ subprocess.call(args)
+
+
+def update_mercurial_repo(hg, url, dest, revision):
+ """Perform a clone/pull + update of a Mercurial repository."""
+ args = [hg]
+
+ # Disable common extensions whose older versions may cause `hg`
+ # invocations to abort.
+ disable_exts = [
+ 'bzexport',
+ 'bzpost',
+ 'firefoxtree',
+ 'hgwatchman',
+ 'mozext',
+ 'mqext',
+ 'qimportbz',
+ 'push-to-try',
+ 'reviewboard',
+ ]
+ for ext in disable_exts:
+ args.extend(['--config', 'extensions.%s=!' % ext])
+
+ if os.path.exists(dest):
+ args.extend(['pull', url])
+ cwd = dest
+ else:
+ args.extend(['clone', '--noupdate', url, dest])
+ cwd = '/'
+
+ print('=' * 80)
+ print('Ensuring %s is up to date at %s' % (url, dest))
+
+ try:
+ subprocess.check_call(args, cwd=cwd)
+ subprocess.check_call([hg, 'update', '-r', revision], cwd=dest)
+ finally:
+ print('=' * 80)
+
+
+def clone_firefox(hg, dest):
+ """Clone the Firefox repository to a specified destination."""
+ print('Cloning Firefox Mercurial repository to %s' % dest)
+
+ # We create an empty repo then modify the config before adding data.
+ # This is necessary to ensure storage settings are optimally
+ # configured.
+ args = [
+ hg,
+ # The unified repo is generaldelta, so ensure the client is as
+ # well.
+ '--config', 'format.generaldelta=true',
+ 'init',
+ dest
+ ]
+ res = subprocess.call(args)
+ if res:
+ print('unable to create destination repo; please try cloning manually')
+ return False
+
+ # Strictly speaking, this could overwrite a config based on a template
+ # the user has installed. Let's pretend this problem doesn't exist
+ # unless someone complains about it.
+ with open(os.path.join(dest, '.hg', 'hgrc'), 'ab') as fh:
+ fh.write('[paths]\n')
+ fh.write('default = https://hg.mozilla.org/mozilla-unified\n')
+ fh.write('\n')
+
+ # The server uses aggressivemergedeltas which can blow up delta chain
+ # length. This can cause performance to tank due to delta chains being
+ # too long. Limit the delta chain length to something reasonable
+ # to bound revlog read time.
+ fh.write('[format]\n')
+ fh.write('# This is necessary to keep performance in check\n')
+ fh.write('maxchainlen = 10000\n')
+
+ res = subprocess.call([hg, 'pull', 'https://hg.mozilla.org/mozilla-unified'], cwd=dest)
+ print('')
+ if res:
+ print('error pulling; try running `hg pull https://hg.mozilla.org/mozilla-unified` manually')
+ return False
+
+ print('updating to "central" - the development head of Gecko and Firefox')
+ res = subprocess.call([hg, 'update', '-r', 'central'], cwd=dest)
+ if res:
+ print('error updating; you will need to `hg update` manually')
+
+ print('Firefox source code available at %s' % dest)
+ return True
+
+
+def current_firefox_checkout(check_output, hg=None):
+ """Determine whether we're in a Firefox checkout.
+
+ Returns one of None, ``git``, or ``hg``.
+ """
+ HG_ROOT_REVISIONS = set([
+ # From mozilla-central.
+ '8ba995b74e18334ab3707f27e9eb8f4e37ba3d29',
+ ])
+
+ path = os.getcwd()
+ while path:
+ hg_dir = os.path.join(path, '.hg')
+ git_dir = os.path.join(path, '.git')
+ if hg and os.path.exists(hg_dir):
+ # Verify the hg repo is a Firefox repo by looking at rev 0.
+ try:
+ node = check_output([hg, 'log', '-r', '0', '--template', '{node}'], cwd=path)
+ if node in HG_ROOT_REVISIONS:
+ return 'hg'
+ # Else the root revision is different. There could be nested
+ # repos. So keep traversing the parents.
+ except subprocess.CalledProcessError:
+ pass
+
+ # TODO check git remotes or `git rev-parse -q --verify $sha1^{commit}`
+ # for signs of Firefox.
+ elif os.path.exists(git_dir):
+ return 'git'
+
+ path, child = os.path.split(path)
+ if child == '':
+ break
+
+ return None
diff --git a/python/mozboot/mozboot/centosfedora.py b/python/mozboot/mozboot/centosfedora.py
new file mode 100644
index 000000000..111e5eb90
--- /dev/null
+++ b/python/mozboot/mozboot/centosfedora.py
@@ -0,0 +1,153 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import platform
+
+from mozboot.base import BaseBootstrapper
+
+
+class CentOSFedoraBootstrapper(BaseBootstrapper):
+ def __init__(self, distro, version, dist_id, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.distro = distro
+ self.version = version
+ self.dist_id = dist_id
+
+ self.group_packages = []
+
+ self.packages = [
+ 'autoconf213',
+ 'mercurial',
+ ]
+
+ self.browser_group_packages = [
+ 'GNOME Software Development',
+ ]
+
+ self.browser_packages = [
+ 'alsa-lib-devel',
+ 'GConf2-devel',
+ 'glibc-static',
+ 'gtk2-devel', # It is optional in Fedora 20's GNOME Software
+ # Development group.
+ 'libstdc++-static',
+ 'libXt-devel',
+ 'mesa-libGL-devel',
+ 'pulseaudio-libs-devel',
+ 'wireless-tools-devel',
+ 'yasm',
+ ]
+
+ self.mobile_android_packages = []
+
+ if self.distro in ('CentOS', 'CentOS Linux'):
+ self.group_packages += [
+ 'Development Tools',
+ 'Development Libraries',
+ 'GNOME Software Development',
+ ]
+
+ self.packages += [
+ 'curl-devel',
+ ]
+
+ self.browser_packages += [
+ 'dbus-glib-devel',
+ 'gtk3-devel',
+ ]
+
+ elif self.distro == 'Fedora':
+ self.group_packages += [
+ 'C Development Tools and Libraries',
+ ]
+
+ self.packages += [
+ 'python2-devel',
+ ]
+
+ self.browser_packages += [
+ 'gcc-c++',
+ ]
+
+ self.mobile_android_packages += [
+ 'java-1.8.0-openjdk-devel',
+ 'ncurses-devel.i686',
+ 'libstdc++.i686',
+ 'zlib-devel.i686',
+ ]
+
+ def install_system_packages(self):
+ self.dnf_groupinstall(*self.group_packages)
+ self.dnf_install(*self.packages)
+
+ def install_browser_packages(self):
+ self.ensure_browser_packages()
+
+ def install_browser_artifact_mode_packages(self):
+ self.ensure_browser_packages(artifact_mode=True)
+
+ def install_mobile_android_packages(self):
+ if self.distro in ('CentOS', 'CentOS Linux'):
+ BaseBootstrapper.install_mobile_android_packages(self)
+ elif self.distro == 'Fedora':
+ self.install_fedora_mobile_android_packages()
+
+ def install_mobile_android_artifact_mode_packages(self):
+ if self.distro in ('CentOS', 'CentOS Linux'):
+ BaseBootstrapper.install_mobile_android_artifact_mode_packages(self)
+ elif self.distro == 'Fedora':
+ self.install_fedora_mobile_android_packages(artifact_mode=True)
+
+ def ensure_browser_packages(self, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ self.dnf_groupinstall(*self.browser_group_packages)
+ self.dnf_install(*self.browser_packages)
+
+ if self.distro in ('CentOS', 'CentOS Linux'):
+ yasm = 'http://pkgs.repoforge.org/yasm/yasm-1.1.0-1.el6.rf.i686.rpm'
+ if platform.architecture()[0] == '64bit':
+ yasm = 'http://pkgs.repoforge.org/yasm/yasm-1.1.0-1.el6.rf.x86_64.rpm'
+
+ self.run_as_root(['rpm', '-ivh', yasm])
+
+ def install_fedora_mobile_android_packages(self, artifact_mode=False):
+ import android
+
+ # Install Android specific packages.
+ self.dnf_install(*self.mobile_android_packages)
+
+ # Fetch Android SDK and NDK.
+ mozbuild_path = os.environ.get('MOZBUILD_STATE_PATH', os.path.expanduser(os.path.join('~', '.mozbuild')))
+ self.sdk_path = os.environ.get('ANDROID_SDK_HOME', os.path.join(mozbuild_path, 'android-sdk-linux'))
+ self.ndk_path = os.environ.get('ANDROID_NDK_HOME', os.path.join(mozbuild_path, 'android-ndk-r11b'))
+ self.sdk_url = 'https://dl.google.com/android/android-sdk_r24.0.1-linux.tgz'
+ self.ndk_url = android.android_ndk_url('linux')
+
+ android.ensure_android_sdk_and_ndk(path=mozbuild_path,
+ sdk_path=self.sdk_path, sdk_url=self.sdk_url,
+ ndk_path=self.ndk_path, ndk_url=self.ndk_url,
+ artifact_mode=artifact_mode)
+
+ # Most recent version of build-tools appears to be 23.0.1 on Fedora
+ packages = [p for p in android.ANDROID_PACKAGES if not p.startswith('build-tools')]
+ packages.append('build-tools-23.0.1')
+
+ # 3. We expect the |android| tool to be at
+ # ~/.mozbuild/android-sdk-linux/tools/android.
+ android_tool = os.path.join(self.sdk_path, 'tools', 'android')
+ android.ensure_android_packages(android_tool=android_tool, packages=packages)
+
+ def suggest_mobile_android_mozconfig(self, artifact_mode=False):
+ import android
+ android.suggest_mozconfig(sdk_path=self.sdk_path,
+ ndk_path=self.ndk_path,
+ artifact_mode=artifact_mode)
+
+ def suggest_mobile_android_artifact_mode_mozconfig(self):
+ self.suggest_mobile_android_mozconfig(artifact_mode=True)
+
+ def upgrade_mercurial(self, current):
+ self.dnf_update('mercurial')
diff --git a/python/mozboot/mozboot/debian.py b/python/mozboot/mozboot/debian.py
new file mode 100644
index 000000000..6e33e9e5b
--- /dev/null
+++ b/python/mozboot/mozboot/debian.py
@@ -0,0 +1,188 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+from mozboot.base import BaseBootstrapper
+
+
+MERCURIAL_INSTALL_PROMPT = '''
+Mercurial releases a new version every 3 months and your distro's package
+may become out of date. This may cause incompatibility with some
+Mercurial extensions that rely on new Mercurial features. As a result,
+you may not have an optimal version control experience.
+
+To have the best Mercurial experience possible, we recommend installing
+Mercurial via the "pip" Python packaging utility. This will likely result
+in files being placed in /usr/local/bin and /usr/local/lib.
+
+How would you like to continue?
+
+1) Install a modern Mercurial via pip (recommended)
+2) Install a legacy Mercurial via apt
+3) Do not install Mercurial
+
+Choice:
+'''.strip()
+
+
+class DebianBootstrapper(BaseBootstrapper):
+ # These are common packages for all Debian-derived distros (such as
+ # Ubuntu).
+ COMMON_PACKAGES = [
+ 'autoconf2.13',
+ 'build-essential',
+ 'ccache',
+ 'python-dev',
+ 'python-pip',
+ 'python-setuptools',
+ 'unzip',
+ 'uuid',
+ 'zip',
+ ]
+
+ # Subclasses can add packages to this variable to have them installed.
+ DISTRO_PACKAGES = []
+
+ # These are common packages for building Firefox for Desktop
+ # (browser) for all Debian-derived distros (such as Ubuntu).
+ BROWSER_COMMON_PACKAGES = [
+ 'libasound2-dev',
+ 'libcurl4-openssl-dev',
+ 'libdbus-1-dev',
+ 'libdbus-glib-1-dev',
+ 'libgconf2-dev',
+ 'libgtk-3-dev',
+ 'libgtk2.0-dev',
+ 'libiw-dev',
+ 'libnotify-dev',
+ 'libpulse-dev',
+ 'libx11-xcb-dev',
+ 'libxt-dev',
+ 'mesa-common-dev',
+ 'python-dbus',
+ 'xvfb',
+ 'yasm',
+ ]
+
+ # Subclasses can add packages to this variable to have them installed.
+ BROWSER_DISTRO_PACKAGES = []
+
+ # These are common packages for building Firefox for Android
+ # (mobile/android) for all Debian-derived distros (such as Ubuntu).
+ MOBILE_ANDROID_COMMON_PACKAGES = [
+ 'zlib1g-dev', # mobile/android requires system zlib.
+ 'openjdk-7-jdk',
+ 'wget', # For downloading the Android SDK and NDK.
+ 'libncurses5:i386', # See comments about i386 below.
+ 'libstdc++6:i386',
+ 'zlib1g:i386',
+ ]
+
+ # Subclasses can add packages to this variable to have them installed.
+ MOBILE_ANDROID_DISTRO_PACKAGES = []
+
+ def __init__(self, version, dist_id, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.version = version
+ self.dist_id = dist_id
+
+ self.packages = self.COMMON_PACKAGES + self.DISTRO_PACKAGES
+ self.browser_packages = self.BROWSER_COMMON_PACKAGES + self.BROWSER_DISTRO_PACKAGES
+ self.mobile_android_packages = self.MOBILE_ANDROID_COMMON_PACKAGES + self.MOBILE_ANDROID_DISTRO_PACKAGES
+
+
+ def install_system_packages(self):
+ self.apt_install(*self.packages)
+
+ def install_browser_packages(self):
+ self.ensure_browser_packages()
+
+ def install_browser_artifact_mode_packages(self):
+ self.ensure_browser_packages(artifact_mode=True)
+
+ def install_mobile_android_packages(self):
+ self.ensure_mobile_android_packages()
+
+ def install_mobile_android_artifact_mode_packages(self):
+ self.ensure_mobile_android_packages(artifact_mode=True)
+
+ def ensure_browser_packages(self, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ self.apt_install(*self.browser_packages)
+
+ def ensure_mobile_android_packages(self, artifact_mode=False):
+ import android
+
+ # Multi-part process:
+ # 1. System packages.
+ # 2. Android SDK. Android NDK only if we are not in artifact mode.
+ # 3. Android packages.
+
+ # 1. This is hard to believe, but the Android SDK binaries are 32-bit
+ # and that conflicts with 64-bit Debian and Ubuntu installations out of
+ # the box. The solution is to add the i386 architecture. See
+ # "Troubleshooting Ubuntu" at
+ # http://developer.android.com/sdk/installing/index.html?pkg=tools.
+ self.run_as_root(['dpkg', '--add-architecture', 'i386'])
+ # After adding a new arch, the list of packages has to be updated
+ self.apt_update()
+ self.apt_install(*self.mobile_android_packages)
+
+ # 2. The user may have an external Android SDK (in which case we save
+ # them a lengthy download), or they may have already completed the
+ # download. We unpack to ~/.mozbuild/{android-sdk-linux, android-ndk-r11b}.
+ mozbuild_path = os.environ.get('MOZBUILD_STATE_PATH', os.path.expanduser(os.path.join('~', '.mozbuild')))
+ self.sdk_path = os.environ.get('ANDROID_SDK_HOME', os.path.join(mozbuild_path, 'android-sdk-linux'))
+ self.ndk_path = os.environ.get('ANDROID_NDK_HOME', os.path.join(mozbuild_path, 'android-ndk-r11b'))
+ self.sdk_url = 'https://dl.google.com/android/android-sdk_r24.0.1-linux.tgz'
+ self.ndk_url = android.android_ndk_url('linux')
+
+ android.ensure_android_sdk_and_ndk(path=mozbuild_path,
+ sdk_path=self.sdk_path, sdk_url=self.sdk_url,
+ ndk_path=self.ndk_path, ndk_url=self.ndk_url,
+ artifact_mode=artifact_mode)
+
+ # 3. We expect the |android| tool to at
+ # ~/.mozbuild/android-sdk-linux/tools/android.
+ android_tool = os.path.join(self.sdk_path, 'tools', 'android')
+ android.ensure_android_packages(android_tool=android_tool)
+
+ def suggest_mobile_android_mozconfig(self, artifact_mode=False):
+ import android
+ android.suggest_mozconfig(sdk_path=self.sdk_path,
+ ndk_path=self.ndk_path,
+ artifact_mode=artifact_mode)
+
+ def suggest_mobile_android_artifact_mode_mozconfig(self):
+ self.suggest_mobile_android_mozconfig(artifact_mode=True)
+
+ def _update_package_manager(self):
+ self.apt_update()
+
+ def upgrade_mercurial(self, current):
+ """Install Mercurial from pip because Debian packages typically lag."""
+ if self.no_interactive:
+ # Install via Apt in non-interactive mode because it is the more
+ # conservative option and less likely to make people upset.
+ self.apt_install('mercurial')
+ return
+
+ res = self.prompt_int(MERCURIAL_INSTALL_PROMPT, 1, 3)
+
+ # Apt.
+ if res == 2:
+ self.apt_install('mercurial')
+ return False
+
+ # No Mercurial.
+ if res == 3:
+ print('Not installing Mercurial.')
+ return False
+
+ # pip.
+ assert res == 1
+ self.run_as_root(['pip', 'install', '--upgrade', 'Mercurial'])
diff --git a/python/mozboot/mozboot/freebsd.py b/python/mozboot/mozboot/freebsd.py
new file mode 100644
index 000000000..c524d188c
--- /dev/null
+++ b/python/mozboot/mozboot/freebsd.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozboot.base import BaseBootstrapper
+
+
+class FreeBSDBootstrapper(BaseBootstrapper):
+ def __init__(self, version, flavor, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+ self.version = int(version.split('.')[0])
+ self.flavor = flavor.lower()
+
+ self.packages = [
+ 'autoconf213',
+ 'gmake',
+ 'gtar',
+ 'mercurial',
+ 'pkgconf',
+ 'watchman',
+ 'zip',
+ ]
+
+ self.browser_packages = [
+ 'dbus-glib',
+ 'gconf2',
+ 'gtk2',
+ 'gtk3',
+ 'pulseaudio',
+ 'v4l_compat',
+ 'yasm',
+ ]
+
+ if not self.which('unzip'):
+ self.packages.append('unzip')
+
+ # GCC 4.2 or Clang 3.4 in base are too old
+ if self.flavor == 'freebsd' and self.version < 11:
+ self.browser_packages.append('gcc')
+
+ def pkg_install(self, *packages):
+ command = ['pkg', 'install']
+ if self.no_interactive:
+ command.append('-y')
+
+ command.extend(packages)
+ self.run_as_root(command)
+
+ def install_system_packages(self):
+ self.pkg_install(*self.packages)
+
+ def install_browser_packages(self):
+ self.ensure_browser_packages()
+
+ def install_browser_artifact_mode_packages(self):
+ self.ensure_browser_packages(artifact_mode=True)
+
+ def ensure_browser_packages(self, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ self.pkg_install(*self.browser_packages)
+
+ def upgrade_mercurial(self, current):
+ self.pkg_install('mercurial')
diff --git a/python/mozboot/mozboot/gentoo.py b/python/mozboot/mozboot/gentoo.py
new file mode 100644
index 000000000..085f03ae3
--- /dev/null
+++ b/python/mozboot/mozboot/gentoo.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozboot.base import BaseBootstrapper
+
+
+class GentooBootstrapper(BaseBootstrapper):
+ def __init__(self, version, dist_id, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.version = version
+ self.dist_id = dist_id
+
+ def install_system_packages(self):
+ self.run_as_root(['emerge', '--quiet', 'dev-vcs/git', 'mercurial'])
+
+ def install_browser_packages(self):
+ self.ensure_browser_packages()
+
+ def install_browser_artifact_mode_packages(self):
+ self.ensure_browser_packages(artifact_mode=True)
+
+ def ensure_browser_packages(self, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ self.run_as_root(['emerge', '--onlydeps', '--quiet', 'firefox'])
+ self.run_as_root(['emerge', '--quiet', 'gtk+'])
+
+ def _update_package_manager(self):
+ self.run_as_root(['emerge', '--sync'])
+
+ def upgrade_mercurial(self, current):
+ self.run_as_root(['emerge', '--update', 'mercurial'])
diff --git a/python/mozboot/mozboot/mach_commands.py b/python/mozboot/mozboot/mach_commands.py
new file mode 100644
index 000000000..940ffabbb
--- /dev/null
+++ b/python/mozboot/mozboot/mach_commands.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import sys
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+
+@CommandProvider
+class Bootstrap(object):
+ """Bootstrap system and mach for optimal development experience."""
+
+ @Command('bootstrap', category='devenv',
+ description='Install required system packages for building.')
+ def bootstrap(self):
+ from mozboot.bootstrap import Bootstrapper
+
+ bootstrapper = Bootstrapper()
+ bootstrapper.bootstrap()
+
+
+@CommandProvider
+class VersionControlCommands(object):
+ def __init__(self, context):
+ self._context = context
+
+ @Command('mercurial-setup', category='devenv',
+ description='Help configure Mercurial for optimal development.')
+ @CommandArgument('-u', '--update-only', action='store_true',
+ help='Only update recommended extensions, don\'t run the wizard.')
+ def mercurial_setup(self, update_only=False):
+ """Ensure Mercurial is optimally configured.
+
+ This command will inspect your Mercurial configuration and
+ guide you through an interactive wizard helping you configure
+ Mercurial for optimal use on Mozilla projects.
+
+ User choice is respected: no changes are made without explicit
+ confirmation from you.
+
+ If "--update-only" is used, the interactive wizard is disabled
+ and this command only ensures that remote repositories providing
+ Mercurial extensions are up to date.
+ """
+ import which
+ import mozboot.bootstrap as bootstrap
+
+ # "hg" is an executable script with a shebang, which will be found
+ # be which.which. We need to pass a win32 executable to the function
+ # because we spawn a process
+ # from it.
+ if sys.platform in ('win32', 'msys'):
+ hg = which.which('hg.exe')
+ else:
+ hg = which.which('hg')
+
+ if update_only:
+ bootstrap.update_vct(hg, self._context.state_dir)
+ else:
+ bootstrap.configure_mercurial(hg, self._context.state_dir)
diff --git a/python/mozboot/mozboot/mozillabuild.py b/python/mozboot/mozboot/mozillabuild.py
new file mode 100644
index 000000000..6d4958812
--- /dev/null
+++ b/python/mozboot/mozboot/mozillabuild.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import subprocess
+import tempfile
+
+from mozboot.base import BaseBootstrapper
+
+class MozillaBuildBootstrapper(BaseBootstrapper):
+ '''Bootstrapper for MozillaBuild to install rustup.'''
+ def __init__(self, no_interactive=False):
+ BaseBootstrapper.__init__(self, no_interactive=no_interactive)
+ print("mach bootstrap is not fully implemented in MozillaBuild")
+
+ def which(self, name):
+ return BaseBootstrapper.which(self, name + '.exe')
+
+ def install_system_packages(self):
+ self.install_rustup()
+
+ def install_rustup(self):
+ try:
+ rustup_init = tempfile.gettempdir() + '/rustup-init.exe'
+ self.http_download_and_save(
+ 'https://static.rust-lang.org/rustup/archive/0.2.0/i686-pc-windows-msvc/rustup-init.exe',
+ rustup_init,
+ 'a45ab7462b567dacddaf6e9e48bb43a1b9c1db4404ba77868f7d6fc685282a46')
+ self.run([rustup_init, '--no-modify-path', '--default-host',
+ 'x86_64-pc-windows-msvc', '--default-toolchain', 'stable', '-y'])
+ mozillabuild_dir = os.environ['MOZILLABUILD']
+
+ with open(mozillabuild_dir + 'msys/etc/profile.d/profile-rustup.sh', 'wb') as f:
+ f.write('#!/bash/sh\n')
+ f.write('if test -n "$MOZILLABUILD"; then\n')
+ f.write(' WIN_HOME=$(cd "$HOME" && pwd)\n')
+ f.write(' PATH="$WIN_HOME/.cargo/bin:$PATH"\n')
+ f.write(' export PATH\n')
+ f.write('fi')
+ finally:
+ try:
+ os.remove(rustup_init)
+ except FileNotFoundError:
+ pass
+
+ def upgrade_mercurial(self, current):
+ self.pip_install('mercurial')
+
+ def upgrade_python(self, current):
+ pass
+
+ def install_browser_packages(self):
+ pass
+
+ def install_browser_artifact_mode_packages(self):
+ pass
+
+ def install_mobile_android_packages(self):
+ pass
+
+ def install_mobile_android_artifact_mode_packages(self):
+ pass
+
+ def _update_package_manager(self):
+ pass
+
+ def run(self, command):
+ subprocess.check_call(command, stdin=sys.stdin)
+
+ def pip_install(self, *packages):
+ pip_dir = os.path.join(os.environ['MOZILLABUILD'], 'python', 'Scripts', 'pip.exe')
+ command = [pip_dir, 'install', '--upgrade']
+ command.extend(packages)
+ self.run(command)
+
diff --git a/python/mozboot/mozboot/openbsd.py b/python/mozboot/mozboot/openbsd.py
new file mode 100644
index 000000000..df6a195fd
--- /dev/null
+++ b/python/mozboot/mozboot/openbsd.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozboot.base import BaseBootstrapper
+
+
+class OpenBSDBootstrapper(BaseBootstrapper):
+ def __init__(self, version, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.packages = [
+ 'mercurial',
+ 'autoconf-2.13',
+ 'gmake',
+ 'gtar',
+ 'wget',
+ 'unzip',
+ 'zip',
+ ]
+
+ self.browser_packages = [
+ 'llvm',
+ 'yasm',
+ 'gconf2',
+ 'gtk+2',
+ 'gtk+3',
+ 'dbus-glib',
+ 'pulseaudio',
+ ]
+
+ def install_system_packages(self):
+ # we use -z because there's no other way to say "any autoconf-2.13"
+ self.run_as_root(['pkg_add', '-z'] + self.packages)
+
+ def install_browser_packages(self):
+ self.ensure_browser_packages()
+
+ def install_browser_artifact_mode_packages(self):
+ self.ensure_browser_packages(artifact_mode=True)
+
+ def ensure_browser_packages(self, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ # we use -z because there's no other way to say "any autoconf-2.13"
+ self.run_as_root(['pkg_add', '-z'] + self.browser_packages)
diff --git a/python/mozboot/mozboot/osx.py b/python/mozboot/mozboot/osx.py
new file mode 100644
index 000000000..d66d66d6b
--- /dev/null
+++ b/python/mozboot/mozboot/osx.py
@@ -0,0 +1,577 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function, unicode_literals
+
+import os
+import re
+import subprocess
+import sys
+import tempfile
+try:
+ from urllib2 import urlopen
+except ImportError:
+ from urllib.request import urlopen
+
+from distutils.version import StrictVersion
+
+from mozboot.base import BaseBootstrapper
+
+HOMEBREW_BOOTSTRAP = 'https://raw.githubusercontent.com/Homebrew/install/master/install'
+XCODE_APP_STORE = 'macappstore://itunes.apple.com/app/id497799835?mt=12'
+XCODE_LEGACY = 'https://developer.apple.com/downloads/download.action?path=Developer_Tools/xcode_3.2.6_and_ios_sdk_4.3__final/xcode_3.2.6_and_ios_sdk_4.3.dmg'
+HOMEBREW_AUTOCONF213 = 'https://raw.github.com/Homebrew/homebrew-versions/master/autoconf213.rb'
+
+MACPORTS_URL = {'11': 'https://distfiles.macports.org/MacPorts/MacPorts-2.3.4-10.11-ElCapitan.pkg',
+ '10': 'https://distfiles.macports.org/MacPorts/MacPorts-2.3.4-10.10-Yosemite.pkg',
+ '9': 'https://distfiles.macports.org/MacPorts/MacPorts-2.3.4-10.9-Mavericks.pkg',
+ '8': 'https://distfiles.macports.org/MacPorts/MacPorts-2.3.4-10.8-MountainLion.pkg',
+ '7': 'https://distfiles.macports.org/MacPorts/MacPorts-2.3.4-10.7-Lion.pkg',
+ '6': 'https://distfiles.macports.org/MacPorts/MacPorts-2.3.4-10.6-SnowLeopard.pkg', }
+
+MACPORTS_CLANG_PACKAGE = 'clang-3.3'
+
+RE_CLANG_VERSION = re.compile('Apple (?:clang|LLVM) version (\d+\.\d+)')
+
+APPLE_CLANG_MINIMUM_VERSION = StrictVersion('4.2')
+
+XCODE_REQUIRED = '''
+Xcode is required to build Firefox. Please complete the install of Xcode
+through the App Store.
+
+It's possible Xcode is already installed on this machine but it isn't being
+detected. This is possible with developer preview releases of Xcode, for
+example. To correct this problem, run:
+
+ `xcode-select --switch /path/to/Xcode.app`.
+
+e.g. `sudo xcode-select --switch /Applications/Xcode.app`.
+'''
+
+XCODE_REQUIRED_LEGACY = '''
+You will need to download and install Xcode to build Firefox.
+
+Please complete the Xcode download and then relaunch this script.
+'''
+
+XCODE_NO_DEVELOPER_DIRECTORY = '''
+xcode-select says you don't have a developer directory configured. We think
+this is due to you not having Xcode installed (properly). We're going to
+attempt to install Xcode through the App Store. If the App Store thinks you
+have Xcode installed, please run xcode-select by hand until it stops
+complaining and then re-run this script.
+'''
+
+XCODE_COMMAND_LINE_TOOLS_MISSING = '''
+The Xcode command line tools are required to build Firefox.
+'''
+
+INSTALL_XCODE_COMMAND_LINE_TOOLS_STEPS = '''
+Perform the following steps to install the Xcode command line tools:
+
+ 1) Open Xcode.app
+ 2) Click through any first-run prompts
+ 3) From the main Xcode menu, select Preferences (Command ,)
+ 4) Go to the Download tab (near the right)
+ 5) Install the "Command Line Tools"
+
+When that has finished installing, please relaunch this script.
+'''
+
+UPGRADE_XCODE_COMMAND_LINE_TOOLS = '''
+An old version of the Xcode command line tools is installed. You will need to
+install a newer version in order to compile Firefox. If Xcode itself is old,
+its command line tools may be too old even if it claims there are no updates
+available, so if you are seeing this message multiple times, please update
+Xcode first.
+'''
+
+PACKAGE_MANAGER_INSTALL = '''
+We will install the %s package manager to install required packages.
+
+You will be prompted to install %s with its default settings. If you
+would prefer to do this manually, hit CTRL+c, install %s yourself, ensure
+"%s" is in your $PATH, and relaunch bootstrap.
+'''
+
+PACKAGE_MANAGER_PACKAGES = '''
+We are now installing all required packages via %s. You will see a lot of
+output as packages are built.
+'''
+
+PACKAGE_MANAGER_OLD_CLANG = '''
+We require a newer compiler than what is provided by your version of Xcode.
+
+We will install a modern version of Clang through %s.
+'''
+
+PACKAGE_MANAGER_CHOICE = '''
+Please choose a package manager you'd like:
+1. Homebrew
+2. MacPorts (Does not yet support bootstrapping Firefox for Android.)
+Your choice:
+'''
+
+NO_PACKAGE_MANAGER_WARNING = '''
+It seems you don't have any supported package manager installed.
+'''
+
+PACKAGE_MANAGER_EXISTS = '''
+Looks like you have %s installed. We will install all required packages via %s.
+'''
+
+MULTI_PACKAGE_MANAGER_EXISTS = '''
+It looks like you have multiple package managers installed.
+'''
+
+# May add support for other package manager on os x.
+PACKAGE_MANAGER = {'Homebrew': 'brew',
+ 'MacPorts': 'port'}
+
+PACKAGE_MANAGER_CHOICES = ['Homebrew', 'MacPorts']
+
+PACKAGE_MANAGER_BIN_MISSING = '''
+A package manager is installed. However, your current shell does
+not know where to find '%s' yet. You'll need to start a new shell
+to pick up the environment changes so it can be found.
+
+Please start a new shell or terminal window and run this
+bootstrapper again.
+
+If this problem persists, you will likely want to adjust your
+shell's init script (e.g. ~/.bash_profile) to export a PATH
+environment variable containing the location of your package
+manager binary. e.g.
+
+ export PATH=/usr/local/bin:$PATH
+'''
+
+BAD_PATH_ORDER = '''
+Your environment's PATH variable lists a system path directory (%s)
+before the path to your package manager's binaries (%s).
+This means that the package manager's binaries likely won't be
+detected properly.
+
+Modify your shell's configuration (e.g. ~/.profile or
+~/.bash_profile) to have %s appear in $PATH before %s. e.g.
+
+ export PATH=%s:$PATH
+
+Once this is done, start a new shell (likely Command+T) and run
+this bootstrap again.
+'''
+
+JAVA_LICENSE_NOTICE = '''
+We installed a recent Java toolchain for you. We agreed to the Oracle Java
+license for you by downloading the JDK. If this is unacceptable you should
+uninstall.
+'''
+
+
+class OSXBootstrapper(BaseBootstrapper):
+ def __init__(self, version, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.os_version = StrictVersion(version)
+
+ if self.os_version < StrictVersion('10.6'):
+ raise Exception('OS X 10.6 or above is required.')
+
+ self.minor_version = version.split('.')[1]
+
+ def install_system_packages(self):
+ self.ensure_xcode()
+
+ choice = self.ensure_package_manager()
+ self.package_manager = choice
+ getattr(self, 'ensure_%s_system_packages' % self.package_manager)()
+
+ def install_browser_packages(self):
+ getattr(self, 'ensure_%s_browser_packages' % self.package_manager)()
+
+ def install_browser_artifact_mode_packages(self):
+ getattr(self, 'ensure_%s_browser_packages' % self.package_manager)(artifact_mode=True)
+
+ def install_mobile_android_packages(self):
+ getattr(self, 'ensure_%s_mobile_android_packages' % self.package_manager)()
+
+ def install_mobile_android_artifact_mode_packages(self):
+ getattr(self, 'ensure_%s_mobile_android_packages' % self.package_manager)(artifact_mode=True)
+
+ def suggest_mobile_android_mozconfig(self):
+ getattr(self, 'suggest_%s_mobile_android_mozconfig' % self.package_manager)()
+
+ def suggest_mobile_android_artifact_mode_mozconfig(self):
+ getattr(self, 'suggest_%s_mobile_android_mozconfig' % self.package_manager)(artifact_mode=True)
+
+ def ensure_xcode(self):
+ if self.os_version < StrictVersion('10.7'):
+ if not os.path.exists('/Developer/Applications/Xcode.app'):
+ print(XCODE_REQUIRED_LEGACY)
+
+ subprocess.check_call(['open', XCODE_LEGACY])
+ sys.exit(1)
+
+ # OS X 10.7 have Xcode come from the app store. However, users can
+ # still install Xcode into any arbitrary location. We honor the
+ # location of Xcode as set by xcode-select. This should also pick up
+ # developer preview releases of Xcode, which can be installed into
+ # paths like /Applications/Xcode5-DP6.app.
+ elif self.os_version >= StrictVersion('10.7'):
+ select = self.which('xcode-select')
+ try:
+ output = self.check_output([select, '--print-path'],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ # This seems to appear on fresh OS X machines before any Xcode
+ # has been installed. It may only occur on OS X 10.9 and later.
+ if b'unable to get active developer directory' in e.output:
+ print(XCODE_NO_DEVELOPER_DIRECTORY)
+ self._install_xcode_app_store()
+ assert False # Above should exit.
+
+ output = e.output
+
+ # This isn't the most robust check in the world. It relies on the
+ # default value not being in an application bundle, which seems to
+ # hold on at least Mavericks.
+ if b'.app/' not in output:
+ print(XCODE_REQUIRED)
+ self._install_xcode_app_store()
+ assert False # Above should exit.
+
+ # Once Xcode is installed, you need to agree to the license before you can
+ # use it.
+ try:
+ output = self.check_output(['/usr/bin/xcrun', 'clang'],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ if b'license' in e.output:
+ xcodebuild = self.which('xcodebuild')
+ try:
+ subprocess.check_call([xcodebuild, '-license'],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ if b'requires admin privileges' in e.output:
+ self.run_as_root([xcodebuild, '-license'])
+
+ # Even then we're not done! We need to install the Xcode command line tools.
+ # As of Mountain Lion, apparently the only way to do this is to go through a
+ # menu dialog inside Xcode itself. We're not making this up.
+ if self.os_version >= StrictVersion('10.7'):
+ if not os.path.exists('/usr/bin/clang'):
+ print(XCODE_COMMAND_LINE_TOOLS_MISSING)
+ print(INSTALL_XCODE_COMMAND_LINE_TOOLS_STEPS)
+ sys.exit(1)
+
+ output = self.check_output(['/usr/bin/clang', '--version'])
+ match = RE_CLANG_VERSION.search(output)
+ if match is None:
+ raise Exception('Could not determine Clang version.')
+
+ version = StrictVersion(match.group(1))
+
+ if version < APPLE_CLANG_MINIMUM_VERSION:
+ print(UPGRADE_XCODE_COMMAND_LINE_TOOLS)
+ print(INSTALL_XCODE_COMMAND_LINE_TOOLS_STEPS)
+ sys.exit(1)
+
+ def _install_xcode_app_store(self):
+ subprocess.check_call(['open', XCODE_APP_STORE])
+ print('Once the install has finished, please relaunch this script.')
+ sys.exit(1)
+
+ def _ensure_homebrew_packages(self, packages, extra_brew_args=[]):
+ self.brew = self.which('brew')
+ assert self.brew is not None
+ cmd = [self.brew] + extra_brew_args
+
+ installed = self.check_output(cmd + ['list']).split()
+
+ printed = False
+
+ for name, package in packages:
+ if name in installed:
+ continue
+
+ if not printed:
+ print(PACKAGE_MANAGER_PACKAGES % ('Homebrew',))
+ printed = True
+
+ subprocess.check_call(cmd + ['install', package])
+
+ return printed
+
+ def _ensure_homebrew_casks(self, casks):
+ # Change |brew install cask| into |brew cask install cask|.
+ return self._ensure_homebrew_packages(casks, extra_brew_args=['cask'])
+
+ def ensure_homebrew_system_packages(self):
+ packages = [
+ # We need to install Python because Mercurial requires the Python
+ # development headers which are missing from OS X (at least on
+ # 10.8) and because the build system wants a version newer than
+ # what Apple ships.
+ ('python', 'python'),
+ ('mercurial', 'mercurial'),
+ ('git', 'git'),
+ ('autoconf213', HOMEBREW_AUTOCONF213),
+ ('gnu-tar', 'gnu-tar'),
+ ('watchman', 'watchman',),
+ ('terminal-notifier', 'terminal-notifier')
+ ]
+ self._ensure_homebrew_packages(packages)
+
+ def ensure_homebrew_browser_packages(self, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ packages = [
+ ('yasm', 'yasm'),
+ ]
+ self._ensure_homebrew_packages(packages)
+
+ installed = self.check_output([self.brew, 'list']).split()
+ if self.os_version < StrictVersion('10.7') and b'llvm' not in installed:
+ print(PACKAGE_MANAGER_OLD_CLANG % ('Homebrew',))
+
+ subprocess.check_call([self.brew, '-v', 'install', 'llvm',
+ '--with-clang', '--all-targets'])
+
+ def ensure_homebrew_mobile_android_packages(self, artifact_mode=False):
+ # Multi-part process:
+ # 1. System packages.
+ # 2. Android SDK. Android NDK only if we are not in artifact mode.
+ # 3. Android packages.
+
+ import android
+
+ # 1. System packages.
+ packages = [
+ ('brew-cask', 'caskroom/cask/brew-cask'), # For installing Java later.
+ ('wget', 'wget'),
+ ]
+ self._ensure_homebrew_packages(packages)
+
+ casks = [
+ ('java', 'java'),
+ ]
+ installed = self._ensure_homebrew_casks(casks)
+ if installed:
+ print(JAVA_LICENSE_NOTICE) # We accepted a license agreement for the user.
+
+ # 2. The user may have an external Android SDK (in which case we save
+ # them a lengthy download), or they may have already completed the
+ # download. We unpack to ~/.mozbuild/{android-sdk-linux, android-ndk-r11b}.
+ mozbuild_path = os.environ.get('MOZBUILD_STATE_PATH', os.path.expanduser(os.path.join('~', '.mozbuild')))
+ self.sdk_path = os.environ.get('ANDROID_SDK_HOME', os.path.join(mozbuild_path, 'android-sdk-macosx'))
+ self.ndk_path = os.environ.get('ANDROID_NDK_HOME', os.path.join(mozbuild_path, 'android-ndk-r11b'))
+ self.sdk_url = 'https://dl.google.com/android/android-sdk_r24.0.1-macosx.zip'
+ is_64bits = sys.maxsize > 2**32
+ if is_64bits:
+ self.ndk_url = android.android_ndk_url('darwin')
+ else:
+ raise Exception('You need a 64-bit version of Mac OS X to build Firefox for Android.')
+
+ android.ensure_android_sdk_and_ndk(path=mozbuild_path,
+ sdk_path=self.sdk_path, sdk_url=self.sdk_url,
+ ndk_path=self.ndk_path, ndk_url=self.ndk_url,
+ artifact_mode=artifact_mode)
+
+ # 3. We expect the |android| tool to at
+ # ~/.mozbuild/android-sdk-macosx/tools/android.
+ android_tool = os.path.join(self.sdk_path, 'tools', 'android')
+ android.ensure_android_packages(android_tool=android_tool)
+
+ def suggest_homebrew_mobile_android_mozconfig(self, artifact_mode=False):
+ import android
+ android.suggest_mozconfig(sdk_path=self.sdk_path,
+ ndk_path=self.ndk_path,
+ artifact_mode=artifact_mode)
+
+ def _ensure_macports_packages(self, packages):
+ self.port = self.which('port')
+ assert self.port is not None
+
+ installed = set(self.check_output([self.port, 'installed']).split())
+
+ missing = [package for package in packages if package not in installed]
+ if missing:
+ print(PACKAGE_MANAGER_PACKAGES % ('MacPorts',))
+ self.run_as_root([self.port, '-v', 'install'] + missing)
+
+ def ensure_macports_system_packages(self):
+ packages = [
+ 'python27',
+ 'py27-readline',
+ 'mercurial',
+ 'autoconf213',
+ 'gnutar',
+ 'watchman',
+ ]
+
+ self._ensure_macports_packages(packages)
+ self.run_as_root([self.port, 'select', '--set', 'python', 'python27'])
+
+ def ensure_macports_browser_packages(self, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ packages = ['yasm']
+
+ self._ensure_macports_packages(packages)
+
+ installed = set(self.check_output([self.port, 'installed']).split())
+ if self.os_version < StrictVersion('10.7') and MACPORTS_CLANG_PACKAGE not in installed:
+ print(PACKAGE_MANAGER_OLD_CLANG % ('MacPorts',))
+ self.run_as_root([self.port, '-v', 'install', MACPORTS_CLANG_PACKAGE])
+ self.run_as_root([self.port, 'select', '--set', 'clang', 'mp-' + MACPORTS_CLANG_PACKAGE])
+
+ def ensure_macports_mobile_android_packages(self, artifact_mode=False):
+ # Multi-part process:
+ # 1. System packages.
+ # 2. Android SDK. Android NDK only if we are not in artifact mode.
+ # 3. Android packages.
+
+ import android
+
+ # 1. System packages.
+ packages = [
+ 'wget',
+ ]
+ self._ensure_macports_packages(packages)
+
+ # Verify the presence of java and javac.
+ if not self.which('java') or not self.which('javac'):
+ raise Exception('You need to have Java version 1.7 or later installed. Please visit http://www.java.com/en/download/mac_download.jsp to get the latest version.')
+
+ # 2. The user may have an external Android SDK (in which case we save
+ # them a lengthy download), or they may have already completed the
+ # download. We unpack to ~/.mozbuild/{android-sdk-linux, android-ndk-r11b}.
+ mozbuild_path = os.environ.get('MOZBUILD_STATE_PATH', os.path.expanduser(os.path.join('~', '.mozbuild')))
+ self.sdk_path = os.environ.get('ANDROID_SDK_HOME', os.path.join(mozbuild_path, 'android-sdk-macosx'))
+ self.ndk_path = os.environ.get('ANDROID_NDK_HOME', os.path.join(mozbuild_path, 'android-ndk-r11b'))
+ self.sdk_url = 'https://dl.google.com/android/android-sdk_r24.0.1-macosx.zip'
+ is_64bits = sys.maxsize > 2**32
+ if is_64bits:
+ self.ndk_url = android.android_ndk_url('darwin')
+ else:
+ raise Exception('You need a 64-bit version of Mac OS X to build Firefox for Android.')
+
+ android.ensure_android_sdk_and_ndk(path=mozbuild_path,
+ sdk_path=self.sdk_path, sdk_url=self.sdk_url,
+ ndk_path=self.ndk_path, ndk_url=self.ndk_url,
+ artifact_mode=artifact_mode)
+
+ # 3. We expect the |android| tool to at
+ # ~/.mozbuild/android-sdk-macosx/tools/android.
+ android_tool = os.path.join(self.sdk_path, 'tools', 'android')
+ android.ensure_android_packages(android_tool=android_tool)
+
+ def suggest_macports_mobile_android_mozconfig(self, artifact_mode=False):
+ import android
+ android.suggest_mozconfig(sdk_path=self.sdk_path,
+ ndk_path=self.ndk_path,
+ artifact_mode=artifact_mode)
+
+ def ensure_package_manager(self):
+ '''
+ Search package mgr in sys.path, if none is found, prompt the user to install one.
+ If only one is found, use that one. If both are found, prompt the user to choose
+ one.
+ '''
+ installed = []
+ for name, cmd in PACKAGE_MANAGER.iteritems():
+ if self.which(cmd) is not None:
+ installed.append(name)
+
+ active_name, active_cmd = None, None
+
+ if not installed:
+ print(NO_PACKAGE_MANAGER_WARNING)
+ choice = self.prompt_int(prompt=PACKAGE_MANAGER_CHOICE, low=1, high=2)
+ active_name = PACKAGE_MANAGER_CHOICES[choice - 1]
+ active_cmd = PACKAGE_MANAGER[active_name]
+ getattr(self, 'install_%s' % active_name.lower())()
+ elif len(installed) == 1:
+ print(PACKAGE_MANAGER_EXISTS % (installed[0], installed[0]))
+ active_name = installed[0]
+ active_cmd = PACKAGE_MANAGER[active_name]
+ else:
+ print(MULTI_PACKAGE_MANAGER_EXISTS)
+ choice = self.prompt_int(prompt=PACKAGE_MANAGER_CHOICE, low=1, high=2)
+
+ active_name = PACKAGE_MANAGER_CHOICES[choice - 1]
+ active_cmd = PACKAGE_MANAGER[active_name]
+
+ # Ensure the active package manager is in $PATH and it comes before
+ # /usr/bin. If it doesn't come before /usr/bin, we'll pick up system
+ # packages before package manager installed packages and the build may
+ # break.
+ p = self.which(active_cmd)
+ if not p:
+ print(PACKAGE_MANAGER_BIN_MISSING % active_cmd)
+ sys.exit(1)
+
+ p_dir = os.path.dirname(p)
+ for path in os.environ['PATH'].split(os.pathsep):
+ if path == p_dir:
+ break
+
+ for check in ('/bin', '/usr/bin'):
+ if path == check:
+ print(BAD_PATH_ORDER % (check, p_dir, p_dir, check, p_dir))
+ sys.exit(1)
+
+ return active_name.lower()
+
+ def install_homebrew(self):
+ print(PACKAGE_MANAGER_INSTALL % ('Homebrew', 'Homebrew', 'Homebrew', 'brew'))
+ bootstrap = urlopen(url=HOMEBREW_BOOTSTRAP, timeout=20).read()
+ with tempfile.NamedTemporaryFile() as tf:
+ tf.write(bootstrap)
+ tf.flush()
+
+ subprocess.check_call(['ruby', tf.name])
+
+ def install_macports(self):
+ url = MACPORTS_URL.get(self.minor_version, None)
+ if not url:
+ raise Exception('We do not have a MacPorts install URL for your '
+ 'OS X version. You will need to install MacPorts manually.')
+
+ print(PACKAGE_MANAGER_INSTALL % ('MacPorts', 'MacPorts', 'MacPorts', 'port'))
+ pkg = urlopen(url=url, timeout=300).read()
+ with tempfile.NamedTemporaryFile(suffix='.pkg') as tf:
+ tf.write(pkg)
+ tf.flush()
+
+ self.run_as_root(['installer', '-pkg', tf.name, '-target', '/'])
+
+ def _update_package_manager(self):
+ if self.package_manager == 'homebrew':
+ subprocess.check_call([self.brew, '-v', 'update'])
+ else:
+ assert self.package_manager == 'macports'
+ self.run_as_root([self.port, 'selfupdate'])
+
+ def _upgrade_package(self, package):
+ self._ensure_package_manager_updated()
+
+ if self.package_manager == 'homebrew':
+ try:
+ subprocess.check_output([self.brew, '-v', 'upgrade', package],
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ if b'already installed' not in e.output:
+ raise
+ else:
+ assert self.package_manager == 'macports'
+
+ self.run_as_root([self.port, 'upgrade', package])
+
+ def upgrade_mercurial(self, current):
+ self._upgrade_package('mercurial')
+
+ def upgrade_python(self, current):
+ if self.package_manager == 'homebrew':
+ self._upgrade_package('python')
+ else:
+ self._upgrade_package('python27')
diff --git a/python/mozboot/mozboot/util.py b/python/mozboot/mozboot/util.py
new file mode 100644
index 000000000..f2bbb76db
--- /dev/null
+++ b/python/mozboot/mozboot/util.py
@@ -0,0 +1,20 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+
+def get_state_dir():
+ """Obtain path to a directory to hold state.
+
+ Returns a tuple of the path and a bool indicating whether the
+ value came from an environment variable.
+ """
+ state_user_dir = os.path.expanduser('~/.mozbuild')
+ state_env_dir = os.environ.get('MOZBUILD_STATE_PATH')
+
+ if state_env_dir:
+ return state_env_dir, True
+ else:
+ return state_user_dir, False
diff --git a/python/mozboot/mozboot/windows.py b/python/mozboot/mozboot/windows.py
new file mode 100644
index 000000000..c072b1b91
--- /dev/null
+++ b/python/mozboot/mozboot/windows.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import subprocess
+
+from mozboot.base import BaseBootstrapper
+
+class WindowsBootstrapper(BaseBootstrapper):
+ '''Bootstrapper for msys2 based environments for building in Windows.'''
+
+ SYSTEM_PACKAGES = [
+ 'mingw-w64-x86_64-make',
+ 'mingw-w64-x86_64-python2-pip',
+ 'mingw-w64-x86_64-perl',
+ 'patch',
+ 'patchutils',
+ 'diffutils',
+ 'autoconf2.13',
+ 'tar',
+ 'zip',
+ 'unzip',
+ 'mingw-w64-x86_64-toolchain', # TODO: Should be removed when Mercurial is installable from a wheel.
+ 'mingw-w64-i686-toolchain'
+ ]
+
+ BROWSER_PACKAGES = [
+ 'mingw-w64-x86_64-yasm',
+ 'mingw-w64-i686-nsis'
+ ]
+
+ MOBILE_ANDROID_COMMON_PACKAGES = [
+ 'wget'
+ ]
+
+ def __init__(self, **kwargs):
+ if 'MOZ_WINDOWS_BOOTSTRAP' not in os.environ or os.environ['MOZ_WINDOWS_BOOTSTRAP'] != '1':
+ raise NotImplementedError('Bootstrap support for Windows is under development. For now, use MozillaBuild '
+ 'to set up a build environment on Windows. If you are testing Windows Bootstrap support, '
+ 'try `export MOZ_WINDOWS_BOOTSTRAP=1`')
+ BaseBootstrapper.__init__(self, **kwargs)
+ if not self.which('pacman'):
+ raise NotImplementedError('The Windows bootstrapper only works with msys2 with pacman. Get msys2 at '
+ 'http://msys2.github.io/')
+ print 'Using an experimental bootstrapper for Windows.'
+
+ def which(self, name):
+ return BaseBootstrapper.which(self, name + '.exe')
+
+ def install_system_packages(self):
+ self.pacman_install(*self.SYSTEM_PACKAGES)
+
+ def upgrade_mercurial(self, current):
+ self.pip_install('mercurial')
+
+ def upgrade_python(self, current):
+ self.pacman_install('mingw-w64-x86_64-python2')
+
+ def install_browser_packages(self):
+ self.pacman_install(*self.BROWSER_PACKAGES)
+
+ def install_mobile_android_packages(self):
+ raise NotImplementedError('We do not support building Android on Windows. Sorry!')
+
+ def install_mobile_android_artifact_mode_packages(self):
+ raise NotImplementedError('We do not support building Android on Windows. Sorry!')
+
+ def _update_package_manager(self):
+ self.pacman_update()
+
+ def run(self, command):
+ subprocess.check_call(command, stdin=sys.stdin)
+
+ def pacman_update(self):
+ command = ['pacman', '--sync', '--refresh']
+ self.run(command)
+
+ def pacman_upgrade(self):
+ command = ['pacman', '--sync', '--refresh', '--sysupgrade']
+ self.run(command)
+
+ def pacman_install(self, *packages):
+ command = ['pacman', '--sync', '--needed']
+ if self.no_interactive:
+ command.append('--noconfirm')
+
+ command.extend(packages)
+ self.run(command)
+
+ def pip_install(self, *packages):
+ command = ['pip', 'install', '--upgrade']
+ command.extend(packages)
+ self.run(command)
diff --git a/python/mozboot/setup.py b/python/mozboot/setup.py
new file mode 100644
index 000000000..2ad2c63ec
--- /dev/null
+++ b/python/mozboot/setup.py
@@ -0,0 +1,16 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from distutils.core import setup
+
+VERSION = '0.1'
+
+setup(
+ name='mozboot',
+ description='System bootstrap for building Mozilla projects.',
+ license='MPL 2.0',
+ packages=['mozboot'],
+ version=VERSION,
+ scripts=['bin/bootstrap.py'],
+)
diff --git a/python/mozboot/support/ConEmu.xml b/python/mozboot/support/ConEmu.xml
new file mode 100755
index 000000000..e2df514f1
--- /dev/null
+++ b/python/mozboot/support/ConEmu.xml
@@ -0,0 +1,897 @@
+<?xml version="1.0" encoding="utf-8"?>
+<key name="Software">
+ <key name="ConEmu">
+ <key name=".Vanilla" modified="2016-08-05 15:29:26" build="160724">
+ <value name="SingleInstance" type="hex" data="00"/>
+ <value name="KeyboardHooks" type="hex" data="01"/>
+ <value name="UseInjects" type="hex" data="01"/>
+ <value name="Update.CheckOnStartup" type="hex" data="00"/>
+ <value name="Update.CheckHourly" type="hex" data="00"/>
+ <value name="Update.ConfirmDownload" type="hex" data="01"/>
+ <value name="Update.UseBuilds" type="hex" data="02"/>
+ <value name="ColorTable00" type="dword" data="00000000"/>
+ <value name="ColorTable01" type="dword" data="00800000"/>
+ <value name="ColorTable02" type="dword" data="00008000"/>
+ <value name="ColorTable03" type="dword" data="00808000"/>
+ <value name="ColorTable04" type="dword" data="00000080"/>
+ <value name="ColorTable05" type="dword" data="00800080"/>
+ <value name="ColorTable06" type="dword" data="00008080"/>
+ <value name="ColorTable07" type="dword" data="00c0c0c0"/>
+ <value name="ColorTable08" type="dword" data="00808080"/>
+ <value name="ColorTable09" type="dword" data="00ff0000"/>
+ <value name="ColorTable10" type="dword" data="0000ff00"/>
+ <value name="ColorTable11" type="dword" data="00ffff00"/>
+ <value name="ColorTable12" type="dword" data="000000ff"/>
+ <value name="ColorTable13" type="dword" data="00ff00ff"/>
+ <value name="ColorTable14" type="dword" data="0000ffff"/>
+ <value name="ColorTable15" type="dword" data="00ffffff"/>
+ <value name="ColorTable16" type="dword" data="00000000"/>
+ <value name="ColorTable17" type="dword" data="00800000"/>
+ <value name="ColorTable18" type="dword" data="00008000"/>
+ <value name="ColorTable19" type="dword" data="00808000"/>
+ <value name="ColorTable20" type="dword" data="00000080"/>
+ <value name="ColorTable21" type="dword" data="00800080"/>
+ <value name="ColorTable22" type="dword" data="00008080"/>
+ <value name="ColorTable23" type="dword" data="00c0c0c0"/>
+ <value name="ColorTable24" type="dword" data="00808080"/>
+ <value name="ColorTable25" type="dword" data="00ff0000"/>
+ <value name="ColorTable26" type="dword" data="0000ff00"/>
+ <value name="ColorTable27" type="dword" data="00ffff00"/>
+ <value name="ColorTable28" type="dword" data="000000ff"/>
+ <value name="ColorTable29" type="dword" data="00ff00ff"/>
+ <value name="ColorTable30" type="dword" data="0000ffff"/>
+ <value name="ColorTable31" type="dword" data="00ffffff"/>
+ <value name="ExtendColors" type="hex" data="00"/>
+ <value name="ExtendColorIdx" type="hex" data="0e"/>
+ <value name="TextColorIdx" type="hex" data="10"/>
+ <value name="BackColorIdx" type="hex" data="10"/>
+ <value name="PopTextColorIdx" type="hex" data="10"/>
+ <value name="PopBackColorIdx" type="hex" data="10"/>
+ <value name="ExtendFonts" type="hex" data="00"/>
+ <value name="ExtendFontNormalIdx" type="hex" data="01"/>
+ <value name="ExtendFontBoldIdx" type="hex" data="0c"/>
+ <value name="ExtendFontItalicIdx" type="hex" data="0d"/>
+ <value name="CursorTypeActive" type="dword" data="000232c1"/>
+ <value name="CursorTypeInactive" type="dword" data="00823283"/>
+ <value name="ClipboardDetectLineEnd" type="hex" data="01"/>
+ <value name="ClipboardBashMargin" type="hex" data="00"/>
+ <value name="ClipboardTrimTrailing" type="hex" data="02"/>
+ <value name="ClipboardEOL" type="hex" data="00"/>
+ <value name="ClipboardArrowStart" type="hex" data="01"/>
+ <value name="ClipboardAllLines" type="hex" data="01"/>
+ <value name="ClipboardFirstLine" type="hex" data="01"/>
+ <value name="ClipboardClickPromptPosition" type="hex" data="02"/>
+ <value name="ClipboardDeleteLeftWord" type="hex" data="02"/>
+ <value name="TrueColorerSupport" type="hex" data="01"/>
+ <value name="FadeInactive" type="hex" data="01"/>
+ <value name="FadeInactiveLow" type="hex" data="00"/>
+ <value name="FadeInactiveHigh" type="hex" data="d0"/>
+ <value name="ConVisible" type="hex" data="00"/>
+ <value name="ConInMode" type="dword" data="ffffffff"/>
+ <value name="SetDefaultTerminal" type="hex" data="00"/>
+ <value name="SetDefaultTerminalStartup" type="hex" data="00"/>
+ <value name="SetDefaultTerminalStartupTSA" type="hex" data="01"/>
+ <value name="DefaultTerminalNoInjects" type="hex" data="00"/>
+ <value name="DefaultTerminalNewWindow" type="hex" data="00"/>
+ <value name="DefaultTerminalConfirm" type="hex" data="01"/>
+ <value name="DefaultTerminalApps" type="string" data="explorer.exe"/>
+ <value name="ProcessAnsi" type="hex" data="01"/>
+ <value name="AnsiLog" type="hex" data="00"/>
+ <value name="AnsiLogPath" type="string" data="%ConEmuDir%\Logs\"/>
+ <value name="ProcessNewConArg" type="hex" data="01"/>
+ <value name="ConsoleExceptionHandler" type="hex" data="00"/>
+ <value name="UseClink" type="hex" data="01"/>
+ <value name="StartType" type="hex" data="00"/>
+ <value name="CmdLine" type="string" data=""/>
+ <value name="StartTasksFile" type="string" data=""/>
+ <value name="StartTasksName" type="string" data=""/>
+ <value name="StartFarFolders" type="hex" data="00"/>
+ <value name="StartFarEditors" type="hex" data="00"/>
+ <value name="StoreTaskbarkTasks" type="hex" data="00"/>
+ <value name="StoreTaskbarCommands" type="hex" data="00"/>
+ <value name="SaveCmdHistory" type="hex" data="01"/>
+ <value name="CmdLineHistory" type="multi">
+ <line data="{Bash::mozdev}"/>
+ </value>
+ <value name="ShowHelpTooltips" type="hex" data="01"/>
+ <value name="Multi" type="hex" data="01"/>
+ <value name="Multi.ShowButtons" type="hex" data="01"/>
+ <value name="Multi.NumberInCaption" type="hex" data="00"/>
+ <value name="Multi.CloseConfirm" type="hex" data="01"/>
+ <value name="Multi.CloseEditViewConfirm" type="hex" data="00"/>
+ <value name="Multi.NewConfirm" type="hex" data="01"/>
+ <value name="Multi.UseArrows" type="hex" data="00"/>
+ <value name="Multi.UseNumbers" type="hex" data="01"/>
+ <value name="Multi.UseWinTab" type="hex" data="00"/>
+ <value name="Multi.AutoCreate" type="hex" data="00"/>
+ <value name="Multi.LeaveOnClose" type="hex" data="00"/>
+ <value name="Multi.HideOnClose" type="hex" data="00"/>
+ <value name="Multi.MinByEsc" type="hex" data="02"/>
+ <value name="MapShiftEscToEsc" type="hex" data="01"/>
+ <value name="Multi.Iterate" type="hex" data="01"/>
+ <value name="Multi.SplitWidth" type="hex" data="04"/>
+ <value name="Multi.SplitHeight" type="hex" data="04"/>
+ <value name="FontName" type="string" data="Consolas"/>
+ <value name="FontName2" type="string" data="Consolas"/>
+ <value name="FontAutoSize" type="hex" data="00"/>
+ <value name="FontSize" type="ulong" data="16"/>
+ <value name="FontSizeX" type="ulong" data="0"/>
+ <value name="FontSizeX2" type="ulong" data="0"/>
+ <value name="FontSizeX3" type="ulong" data="0"/>
+ <value name="FontCharSet" type="hex" data="cc"/>
+ <value name="Anti-aliasing" type="ulong" data="6"/>
+ <value name="FontBold" type="hex" data="00"/>
+ <value name="FontItalic" type="hex" data="00"/>
+ <value name="Monospace" type="hex" data="01"/>
+ <value name="BackGround Image show" type="hex" data="00"/>
+ <value name="BackGround Image" type="string" data="c:\back.bmp"/>
+ <value name="bgImageDarker" type="hex" data="ff"/>
+ <value name="bgImageColors" type="dword" data="ffffffff"/>
+ <value name="bgOperation" type="hex" data="00"/>
+ <value name="bgPluginAllowed" type="hex" data="01"/>
+ <value name="AlphaValue" type="hex" data="ff"/>
+ <value name="AlphaValueSeparate" type="hex" data="00"/>
+ <value name="AlphaValueInactive" type="hex" data="ff"/>
+ <value name="UserScreenTransparent" type="hex" data="00"/>
+ <value name="ColorKeyTransparent" type="hex" data="00"/>
+ <value name="ColorKeyValue" type="dword" data="00010101"/>
+ <value name="UseCurrentSizePos" type="hex" data="01"/>
+ <value name="WindowMode" type="dword" data="0000051f"/>
+ <value name="ConWnd Width" type="dword" data="00000050"/>
+ <value name="ConWnd Height" type="dword" data="00000019"/>
+ <value name="Cascaded" type="hex" data="01"/>
+ <value name="ConWnd X" type="long" data="164"/>
+ <value name="ConWnd Y" type="long" data="428"/>
+ <value name="16bit Height" type="ulong" data="0"/>
+ <value name="AutoSaveSizePos" type="hex" data="00"/>
+ <value name="IntegralSize" type="hex" data="00"/>
+ <value name="QuakeStyle" type="hex" data="00"/>
+ <value name="QuakeAnimation" type="ulong" data="300"/>
+ <value name="HideCaption" type="hex" data="00"/>
+ <value name="HideChildCaption" type="hex" data="01"/>
+ <value name="FocusInChildWindows" type="hex" data="01"/>
+ <value name="HideCaptionAlways" type="hex" data="00"/>
+ <value name="HideCaptionAlwaysFrame" type="hex" data="ff"/>
+ <value name="HideCaptionAlwaysDelay" type="ulong" data="2000"/>
+ <value name="HideCaptionAlwaysDisappear" type="ulong" data="2000"/>
+ <value name="DownShowHiddenMessage" type="hex" data="00"/>
+ <value name="ConsoleFontName" type="string" data="Lucida Console"/>
+ <value name="ConsoleFontWidth" type="long" data="3"/>
+ <value name="ConsoleFontHeight" type="long" data="5"/>
+ <value name="DefaultBufferHeight" type="long" data="1000"/>
+ <value name="AutoBufferHeight" type="hex" data="01"/>
+ <value name="CmdOutputCP" type="long" data="0"/>
+ <value name="ComSpec.Type" type="hex" data="00"/>
+ <value name="ComSpec.Bits" type="hex" data="00"/>
+ <value name="ComSpec.UpdateEnv" type="hex" data="00"/>
+ <value name="ComSpec.EnvAddPath" type="hex" data="01"/>
+ <value name="ComSpec.EnvAddExePath" type="hex" data="01"/>
+ <value name="ComSpec.UncPaths" type="hex" data="01"/>
+ <value name="ComSpec.Path" type="string" data=""/>
+ <value name="CTS.Intelligent" type="hex" data="01"/>
+ <value name="CTS.IntelligentExceptions" type="string" data="far|vim.exe"/>
+ <value name="CTS.AutoCopy" type="hex" data="01"/>
+ <value name="CTS.IBeam" type="hex" data="01"/>
+ <value name="CTS.EndOnTyping" type="hex" data="00"/>
+ <value name="CTS.EndOnKeyPress" type="hex" data="00"/>
+ <value name="CTS.Freeze" type="hex" data="00"/>
+ <value name="CTS.SelectBlock" type="hex" data="01"/>
+ <value name="CTS.SelectText" type="hex" data="01"/>
+ <value name="CTS.HtmlFormat" type="hex" data="00"/>
+ <value name="CTS.ActMode" type="hex" data="02"/>
+ <value name="CTS.RBtnAction" type="hex" data="03"/>
+ <value name="CTS.MBtnAction" type="hex" data="00"/>
+ <value name="CTS.ColorIndex" type="hex" data="e0"/>
+ <value name="ClipboardConfirmEnter" type="hex" data="01"/>
+ <value name="ClipboardConfirmLonger" type="ulong" data="200"/>
+ <value name="FarGotoEditorOpt" type="hex" data="01"/>
+ <value name="FarGotoEditorPath" type="string" data="far.exe /e%1:%2 &quot;%3&quot;"/>
+ <value name="HighlightMouseRow" type="hex" data="00"/>
+ <value name="HighlightMouseCol" type="hex" data="00"/>
+ <value name="FixFarBorders" type="hex" data="01"/>
+ <value name="FixFarBordersRanges" type="string" data="2013-25C4;"/>
+ <value name="ExtendUCharMap" type="hex" data="01"/>
+ <value name="EnhanceGraphics" type="hex" data="01"/>
+ <value name="EnhanceButtons" type="hex" data="00"/>
+ <value name="PartBrush75" type="hex" data="c8"/>
+ <value name="PartBrush50" type="hex" data="96"/>
+ <value name="PartBrush25" type="hex" data="5a"/>
+ <value name="PartBrushBlack" type="hex" data="20"/>
+ <value name="RightClick opens context menu" type="hex" data="02"/>
+ <value name="RightClickMacro2" type="string" data=""/>
+ <value name="SendAltTab" type="hex" data="00"/>
+ <value name="SendAltEsc" type="hex" data="00"/>
+ <value name="SendAltPrintScrn" type="hex" data="00"/>
+ <value name="SendPrintScrn" type="hex" data="00"/>
+ <value name="SendCtrlEsc" type="hex" data="00"/>
+ <value name="Min2Tray" type="hex" data="00"/>
+ <value name="AlwaysShowTrayIcon" type="hex" data="00"/>
+ <value name="SafeFarClose" type="hex" data="01"/>
+ <value name="SafeFarCloseMacro" type="string" data=""/>
+ <value name="FARuseASCIIsort" type="hex" data="00"/>
+ <value name="ShellNoZoneCheck" type="hex" data="00"/>
+ <value name="FixAltOnAltTab" type="hex" data="00"/>
+ <value name="DisableMouse" type="hex" data="00"/>
+ <value name="RSelectionFix" type="hex" data="01"/>
+ <value name="MouseSkipActivation" type="hex" data="01"/>
+ <value name="MouseSkipMoving" type="hex" data="01"/>
+ <value name="FarHourglass" type="hex" data="01"/>
+ <value name="FarHourglassDelay" type="ulong" data="500"/>
+ <value name="Dnd" type="hex" data="01"/>
+ <value name="DndDrop" type="hex" data="01"/>
+ <value name="DefCopy" type="hex" data="01"/>
+ <value name="DropUseMenu" type="hex" data="02"/>
+ <value name="DragOverlay" type="hex" data="01"/>
+ <value name="DragShowIcons" type="hex" data="01"/>
+ <value name="DebugSteps" type="hex" data="01"/>
+ <value name="DragPanel" type="hex" data="02"/>
+ <value name="DragPanelBothEdges" type="hex" data="00"/>
+ <value name="KeyBarRClick" type="hex" data="01"/>
+ <value name="StatusBar.Show" type="hex" data="01"/>
+ <value name="StatusBar.Flags" type="dword" data="00000002"/>
+ <value name="StatusFontFace" type="string" data="Segoe UI"/>
+ <value name="StatusFontCharSet" type="ulong" data="0"/>
+ <value name="StatusFontHeight" type="long" data="14"/>
+ <value name="StatusBar.Color.Back" type="dword" data="00404040"/>
+ <value name="StatusBar.Color.Light" type="dword" data="00ffffff"/>
+ <value name="StatusBar.Color.Dark" type="dword" data="00a0a0a0"/>
+ <value name="StatusBar.Hide.VCon" type="hex" data="00"/>
+ <value name="StatusBar.Hide.CapsL" type="hex" data="00"/>
+ <value name="StatusBar.Hide.NumL" type="hex" data="00"/>
+ <value name="StatusBar.Hide.ScrL" type="hex" data="00"/>
+ <value name="StatusBar.Hide.Lang" type="hex" data="01"/>
+ <value name="StatusBar.Hide.WPos" type="hex" data="01"/>
+ <value name="StatusBar.Hide.WSize" type="hex" data="01"/>
+ <value name="StatusBar.Hide.WClient" type="hex" data="01"/>
+ <value name="StatusBar.Hide.WWork" type="hex" data="01"/>
+ <value name="StatusBar.Hide.Style" type="hex" data="01"/>
+ <value name="StatusBar.Hide.StyleEx" type="hex" data="01"/>
+ <value name="StatusBar.Hide.hFore" type="hex" data="01"/>
+ <value name="StatusBar.Hide.hFocus" type="hex" data="01"/>
+ <value name="StatusBar.Hide.ABuf" type="hex" data="00"/>
+ <value name="StatusBar.Hide.CPos" type="hex" data="00"/>
+ <value name="StatusBar.Hide.CSize" type="hex" data="01"/>
+ <value name="StatusBar.Hide.BSize" type="hex" data="00"/>
+ <value name="StatusBar.Hide.CurX" type="hex" data="01"/>
+ <value name="StatusBar.Hide.CurY" type="hex" data="01"/>
+ <value name="StatusBar.Hide.CurS" type="hex" data="01"/>
+ <value name="StatusBar.Hide.CurI" type="hex" data="00"/>
+ <value name="StatusBar.Hide.ConEmuPID" type="hex" data="01"/>
+ <value name="StatusBar.Hide.ConEmuHWND" type="hex" data="01"/>
+ <value name="StatusBar.Hide.ConEmuView" type="hex" data="01"/>
+ <value name="StatusBar.Hide.Srv" type="hex" data="00"/>
+ <value name="StatusBar.Hide.SrvHWND" type="hex" data="01"/>
+ <value name="StatusBar.Hide.Transparency" type="hex" data="00"/>
+ <value name="StatusBar.Hide.New" type="hex" data="00"/>
+ <value name="StatusBar.Hide.Sync" type="hex" data="00"/>
+ <value name="StatusBar.Hide.Proc" type="hex" data="00"/>
+ <value name="StatusBar.Hide.Title" type="hex" data="01"/>
+ <value name="StatusBar.Hide.Time" type="hex" data="01"/>
+ <value name="StatusBar.Hide.Resize" type="hex" data="00"/>
+ <value name="Tabs" type="hex" data="01"/>
+ <value name="TabsLocation" type="hex" data="00"/>
+ <value name="TabIcons" type="hex" data="01"/>
+ <value name="OneTabPerGroup" type="hex" data="00"/>
+ <value name="ActivateSplitMouseOver" type="hex" data="02"/>
+ <value name="TabSelf" type="hex" data="01"/>
+ <value name="TabLazy" type="hex" data="01"/>
+ <value name="TabRecent" type="hex" data="01"/>
+ <value name="TabDblClick" type="ulong" data="1"/>
+ <value name="TabBtnDblClick" type="ulong" data="0"/>
+ <value name="TabsOnTaskBar" type="hex" data="02"/>
+ <value name="TaskBarOverlay" type="hex" data="01"/>
+ <value name="TaskbarProgress" type="hex" data="01"/>
+ <value name="TabCloseMacro" type="string" data=""/>
+ <value name="TabFontFace" type="string" data="Segoe UI"/>
+ <value name="TabFontCharSet" type="ulong" data="0"/>
+ <value name="TabFontHeight" type="long" data="16"/>
+ <value name="SaveAllEditors" type="string" data=""/>
+ <value name="ToolbarAddSpace" type="long" data="0"/>
+ <value name="TabConsole" type="string" data="&lt;%c&gt; %s"/>
+ <value name="TabSkipWords" type="string" data="Administrator:|ÐдминиÑтратор:"/>
+ <value name="TabPanels" type="string" data="&lt;%c&gt; %s"/>
+ <value name="TabEditor" type="string" data="&lt;%c.%i&gt;{%s}"/>
+ <value name="TabEditorModified" type="string" data="&lt;%c.%i&gt;[%s] *"/>
+ <value name="TabViewer" type="string" data="&lt;%c.%i&gt;[%s]"/>
+ <value name="TabLenMax" type="ulong" data="20"/>
+ <value name="AdminTitleSuffix" type="string" data=" (Admin)"/>
+ <value name="AdminShowShield" type="hex" data="01"/>
+ <value name="HideInactiveConsoleTabs" type="hex" data="00"/>
+ <value name="ShowFarWindows" type="hex" data="01"/>
+ <value name="TryToCenter" type="hex" data="00"/>
+ <value name="CenterConsolePad" type="ulong" data="0"/>
+ <value name="ShowScrollbar" type="hex" data="02"/>
+ <value name="ScrollBarAppearDelay" type="ulong" data="100"/>
+ <value name="ScrollBarDisappearDelay" type="ulong" data="1000"/>
+ <value name="IconID" type="ulong" data="1"/>
+ <value name="MainTimerElapse" type="ulong" data="10"/>
+ <value name="MainTimerInactiveElapse" type="ulong" data="1000"/>
+ <value name="AffinityMask" type="dword" data="00000000"/>
+ <value name="SkipFocusEvents" type="hex" data="00"/>
+ <value name="MonitorConsoleLang" type="hex" data="03"/>
+ <value name="DesktopMode" type="hex" data="00"/>
+ <value name="SnapToDesktopEdges" type="hex" data="00"/>
+ <value name="AlwaysOnTop" type="hex" data="00"/>
+ <value name="SleepInBackground" type="hex" data="00"/>
+ <value name="RetardInactivePanes" type="hex" data="00"/>
+ <value name="MinimizeOnLoseFocus" type="hex" data="00"/>
+ <value name="DisableFarFlashing" type="hex" data="00"/>
+ <value name="DisableAllFlashing" type="hex" data="00"/>
+ <value name="FindText" type="string" data=""/>
+ <value name="FindMatchCase" type="hex" data="00"/>
+ <value name="FindMatchWholeWords" type="hex" data="00"/>
+ <value name="FindTransparent" type="hex" data="01"/>
+ <value name="PanView.BackColor" type="dword" data="30ffffff"/>
+ <value name="PanView.PFrame" type="long" data="1"/>
+ <value name="PanView.PFrameColor" type="dword" data="28808080"/>
+ <value name="PanView.SFrame" type="long" data="1"/>
+ <value name="PanView.SFrameColor" type="dword" data="07c0c0c0"/>
+ <value name="PanView.Thumbs.ImgSize" type="long" data="96"/>
+ <value name="PanView.Thumbs.SpaceX1" type="long" data="1"/>
+ <value name="PanView.Thumbs.SpaceY1" type="long" data="1"/>
+ <value name="PanView.Thumbs.SpaceX2" type="long" data="5"/>
+ <value name="PanView.Thumbs.SpaceY2" type="long" data="20"/>
+ <value name="PanView.Thumbs.LabelSpacing" type="long" data="2"/>
+ <value name="PanView.Thumbs.LabelPadding" type="long" data="0"/>
+ <value name="PanView.Thumbs.FontName" type="string" data="Segoe UI"/>
+ <value name="PanView.Thumbs.FontHeight" type="long" data="14"/>
+ <value name="PanView.Tiles.ImgSize" type="long" data="48"/>
+ <value name="PanView.Tiles.SpaceX1" type="long" data="4"/>
+ <value name="PanView.Tiles.SpaceY1" type="long" data="4"/>
+ <value name="PanView.Tiles.SpaceX2" type="long" data="172"/>
+ <value name="PanView.Tiles.SpaceY2" type="long" data="4"/>
+ <value name="PanView.Tiles.LabelSpacing" type="long" data="4"/>
+ <value name="PanView.Tiles.LabelPadding" type="long" data="1"/>
+ <value name="PanView.Tiles.FontName" type="string" data="Segoe UI"/>
+ <value name="PanView.Tiles.FontHeight" type="long" data="14"/>
+ <value name="PanView.LoadPreviews" type="hex" data="03"/>
+ <value name="PanView.LoadFolders" type="hex" data="01"/>
+ <value name="PanView.LoadTimeout" type="ulong" data="15"/>
+ <value name="PanView.MaxZoom" type="ulong" data="600"/>
+ <value name="PanView.UsePicView2" type="hex" data="01"/>
+ <value name="PanView.RestoreOnStartup" type="hex" data="00"/>
+ <value name="Update.VerLocation" type="string" data=""/>
+ <value name="Update.UseProxy" type="hex" data="00"/>
+ <value name="Update.Proxy" type="string" data=""/>
+ <value name="Update.ProxyUser" type="string" data=""/>
+ <value name="Update.ProxyPassword" type="string" data=""/>
+ <value name="Update.ExeCmdLine" type="string" data=""/>
+ <value name="Update.ArcCmdLine" type="string" data=""/>
+ <value name="Update.DownloadPath" type="string" data="%TEMP%\ConEmu"/>
+ <value name="Update.LeavePackages" type="hex" data="00"/>
+ <value name="Update.PostUpdateCmd" type="string" data="echo Last successful update&gt;ConEmuUpdate.info &amp;&amp; date /t&gt;&gt;ConEmuUpdate.info &amp;&amp; time /t&gt;&gt;ConEmuUpdate.info"/>
+ <value name="Multi.Modifier" type="dword" data="0000005b"/>
+ <value name="Multi.ArrowsModifier" type="dword" data="0000005b"/>
+ <value name="KeyMacroVersion" type="hex" data="02"/>
+ <value name="MinimizeRestore" type="dword" data="000011c0"/>
+ <value name="MinimizeRestore2" type="dword" data="00000000"/>
+ <value name="GlobalRestore" type="dword" data="00000000"/>
+ <value name="ForcedFullScreen" type="dword" data="125b110d"/>
+ <value name="SwitchGuiFocus" type="dword" data="00000000"/>
+ <value name="SetFocusGui" type="dword" data="00000000"/>
+ <value name="SetFocusChild" type="dword" data="00000000"/>
+ <value name="ChildSystemMenu" type="dword" data="00000000"/>
+ <value name="Multi.NewSplitV" type="dword" data="0010114f"/>
+ <value name="Multi.NewSplitH" type="dword" data="00101145"/>
+ <value name="Multi.SplitSizeVU" type="dword" data="00105d26"/>
+ <value name="Multi.SplitSizeVD" type="dword" data="00105d28"/>
+ <value name="Multi.SplitSizeHL" type="dword" data="00105d25"/>
+ <value name="Multi.SplitSizeHR" type="dword" data="00105d27"/>
+ <value name="Key.TabPane1" type="dword" data="00005d09"/>
+ <value name="Key.TabPane2" type="dword" data="00105d09"/>
+ <value name="Multi.SplitFocusU" type="dword" data="00005d26"/>
+ <value name="Multi.SplitFocusD" type="dword" data="00005d28"/>
+ <value name="Multi.SplitFocusL" type="dword" data="00005d25"/>
+ <value name="Multi.SplitFocusR" type="dword" data="00005d27"/>
+ <value name="Multi.NewConsole" type="dword" data="00005b57"/>
+ <value name="Multi.NewConsoleShift" type="dword" data="00105b57"/>
+ <value name="Multi.NewConsolePopup" type="dword" data="00005b4e"/>
+ <value name="Multi.NewConsolePopup2" type="dword" data="00000000"/>
+ <value name="Multi.NewWindow" type="dword" data="00000000"/>
+ <value name="Multi.NewAttach" type="dword" data="00005b47"/>
+ <value name="Multi.Next" type="dword" data="00005b51"/>
+ <value name="Multi.NextShift" type="dword" data="00105b51"/>
+ <value name="Multi.Recreate" type="dword" data="00005bc0"/>
+ <value name="Multi.AltCon" type="dword" data="00005b41"/>
+ <value name="Multi.Scroll" type="dword" data="00005b53"/>
+ <value name="Multi.Close" type="dword" data="00005b2e"/>
+ <value name="CloseTabKey" type="dword" data="00125b2e"/>
+ <value name="CloseGroupKey" type="dword" data="00000000"/>
+ <value name="CloseGroupPrcKey" type="dword" data="00000000"/>
+ <value name="CloseAllConKey" type="dword" data="00000000"/>
+ <value name="CloseExceptConKey" type="dword" data="00000000"/>
+ <value name="TerminateProcessKey" type="dword" data="00105b2e"/>
+ <value name="DuplicateRootKey" type="dword" data="00000000"/>
+ <value name="CloseConEmuKey" type="dword" data="00005b73"/>
+ <value name="Multi.Rename" type="dword" data="00005d52"/>
+ <value name="Multi.MoveLeft" type="dword" data="00125b25"/>
+ <value name="Multi.MoveRight" type="dword" data="00125b27"/>
+ <value name="Multi.CmdKey" type="dword" data="00005b58"/>
+ <value name="CTS.VkBlockStart" type="dword" data="00000000"/>
+ <value name="CTS.VkTextStart" type="dword" data="00000000"/>
+ <value name="CTS.VkCopyFmt0" type="dword" data="00001143"/>
+ <value name="CTS.VkCopyFmt1" type="dword" data="00101143"/>
+ <value name="CTS.VkCopyFmt2" type="dword" data="00000000"/>
+ <value name="CTS.VkCopyAll" type="dword" data="00000000"/>
+ <value name="HighlightMouseSwitch" type="dword" data="00005d4c"/>
+ <value name="Multi.ShowTabsList" type="dword" data="00000000"/>
+ <value name="Multi.ShowTabsList2" type="dword" data="00005d7b"/>
+ <value name="ClipboardVkAllLines" type="dword" data="0000102d"/>
+ <value name="ClipboardVkFirstLine" type="dword" data="00001156"/>
+ <value name="DeleteWordToLeft" type="dword" data="00001108"/>
+ <value name="FindTextKey" type="dword" data="00005d46"/>
+ <value name="ScreenshotKey" type="dword" data="00005b48"/>
+ <value name="ScreenshotFullKey" type="dword" data="00105b48"/>
+ <value name="ShowStatusBarKey" type="dword" data="00005d53"/>
+ <value name="ShowTabBarKey" type="dword" data="00005d54"/>
+ <value name="ShowCaptionKey" type="dword" data="00005d43"/>
+ <value name="AlwaysOnTopKey" type="dword" data="00000000"/>
+ <value name="TransparencyInc" type="dword" data="00000000"/>
+ <value name="TransparencyDec" type="dword" data="00000000"/>
+ <value name="Key.TabMenu" type="dword" data="00005d20"/>
+ <value name="Key.TabMenu2" type="dword" data="00001002"/>
+ <value name="Key.Maximize" type="dword" data="00001278"/>
+ <value name="Key.MaximizeWidth" type="dword" data="00000000"/>
+ <value name="Key.MaximizeHeight" type="dword" data="00000000"/>
+ <value name="Key.FullScreen" type="dword" data="0000120d"/>
+ <value name="Key.SysMenu" type="dword" data="00001220"/>
+ <value name="Key.SysMenu2" type="dword" data="00001102"/>
+ <value name="Key.BufUp" type="dword" data="00001126"/>
+ <value name="Key.BufDn" type="dword" data="00001128"/>
+ <value name="Key.BufPgUp" type="dword" data="00001121"/>
+ <value name="Key.BufPgDn" type="dword" data="00001122"/>
+ <value name="Key.PicViewSlide" type="dword" data="80808013"/>
+ <value name="Key.PicViewSlower" type="dword" data="808080bd"/>
+ <value name="Key.PicViewFaster" type="dword" data="808080bb"/>
+ <value name="FontLargerKey" type="dword" data="000011d0"/>
+ <value name="FontSmallerKey" type="dword" data="000011d1"/>
+ <value name="PasteFileKey" type="dword" data="00101146"/>
+ <value name="PastePathKey" type="dword" data="00101144"/>
+ <value name="PasteCygwinKey" type="dword" data="00005d2d"/>
+ <value name="Key.JumpPrevMonitor" type="dword" data="00105b25"/>
+ <value name="Key.JumpNextMonitor" type="dword" data="00105b27"/>
+ <value name="Key.TileToLeft" type="dword" data="00005b25"/>
+ <value name="Key.TileToRIght" type="dword" data="00005b27"/>
+ <value name="KeyMacro01" type="dword" data="00000000"/>
+ <value name="KeyMacro01.Text" type="string" data=""/>
+ <value name="KeyMacro02" type="dword" data="00000000"/>
+ <value name="KeyMacro02.Text" type="string" data=""/>
+ <value name="KeyMacro03" type="dword" data="00000000"/>
+ <value name="KeyMacro03.Text" type="string" data=""/>
+ <value name="KeyMacro04" type="dword" data="00000000"/>
+ <value name="KeyMacro04.Text" type="string" data=""/>
+ <value name="KeyMacro05" type="dword" data="00000000"/>
+ <value name="KeyMacro05.Text" type="string" data=""/>
+ <value name="KeyMacro06" type="dword" data="00000000"/>
+ <value name="KeyMacro06.Text" type="string" data=""/>
+ <value name="KeyMacro07" type="dword" data="00000000"/>
+ <value name="KeyMacro07.Text" type="string" data=""/>
+ <value name="KeyMacro08" type="dword" data="00000000"/>
+ <value name="KeyMacro08.Text" type="string" data=""/>
+ <value name="KeyMacro09" type="dword" data="00000000"/>
+ <value name="KeyMacro09.Text" type="string" data=""/>
+ <value name="KeyMacro10" type="dword" data="00000000"/>
+ <value name="KeyMacro10.Text" type="string" data=""/>
+ <value name="KeyMacro11" type="dword" data="00000000"/>
+ <value name="KeyMacro11.Text" type="string" data=""/>
+ <value name="KeyMacro12" type="dword" data="00000000"/>
+ <value name="KeyMacro12.Text" type="string" data=""/>
+ <value name="KeyMacro13" type="dword" data="00000000"/>
+ <value name="KeyMacro13.Text" type="string" data=""/>
+ <value name="KeyMacro14" type="dword" data="00000000"/>
+ <value name="KeyMacro14.Text" type="string" data=""/>
+ <value name="KeyMacro15" type="dword" data="00000000"/>
+ <value name="KeyMacro15.Text" type="string" data=""/>
+ <value name="KeyMacro16" type="dword" data="00000000"/>
+ <value name="KeyMacro16.Text" type="string" data=""/>
+ <value name="KeyMacro17" type="dword" data="00000000"/>
+ <value name="KeyMacro17.Text" type="string" data=""/>
+ <value name="KeyMacro18" type="dword" data="00000000"/>
+ <value name="KeyMacro18.Text" type="string" data=""/>
+ <value name="KeyMacro19" type="dword" data="00000000"/>
+ <value name="KeyMacro19.Text" type="string" data=""/>
+ <value name="KeyMacro20" type="dword" data="00000000"/>
+ <value name="KeyMacro20.Text" type="string" data=""/>
+ <value name="KeyMacro21" type="dword" data="00000000"/>
+ <value name="KeyMacro21.Text" type="string" data=""/>
+ <value name="KeyMacro22" type="dword" data="00000000"/>
+ <value name="KeyMacro22.Text" type="string" data=""/>
+ <value name="KeyMacro23" type="dword" data="00000000"/>
+ <value name="KeyMacro23.Text" type="string" data=""/>
+ <value name="KeyMacro24" type="dword" data="00000000"/>
+ <value name="KeyMacro24.Text" type="string" data=""/>
+ <value name="KeyMacro25" type="dword" data="00000000"/>
+ <value name="KeyMacro25.Text" type="string" data=""/>
+ <value name="KeyMacro26" type="dword" data="00000000"/>
+ <value name="KeyMacro26.Text" type="string" data=""/>
+ <value name="KeyMacro27" type="dword" data="00000000"/>
+ <value name="KeyMacro27.Text" type="string" data=""/>
+ <value name="KeyMacro28" type="dword" data="00000000"/>
+ <value name="KeyMacro28.Text" type="string" data=""/>
+ <value name="KeyMacro29" type="dword" data="00000000"/>
+ <value name="KeyMacro29.Text" type="string" data=""/>
+ <value name="KeyMacro30" type="dword" data="00000000"/>
+ <value name="KeyMacro30.Text" type="string" data=""/>
+ <value name="KeyMacro31" type="dword" data="00000000"/>
+ <value name="KeyMacro31.Text" type="string" data=""/>
+ <value name="KeyMacro32" type="dword" data="00000000"/>
+ <value name="KeyMacro32.Text" type="string" data=""/>
+ <value name="CTS.VkBlock" type="hex" data="a4"/>
+ <value name="CTS.VkText" type="hex" data="a0"/>
+ <value name="CTS.VkAct" type="hex" data="00"/>
+ <value name="CTS.VkPrompt" type="hex" data="00"/>
+ <value name="FarGotoEditorVk" type="hex" data="a2"/>
+ <value name="DndLKey" type="hex" data="00"/>
+ <value name="DndRKey" type="hex" data="a2"/>
+ <value name="WndDragKey" type="dword" data="00121101"/>
+ <value name="StartCreateDelay" type="ulong" data="100"/>
+ <value name="VividColors" type="hex" data="01"/>
+ <value name="DefaultTerminalAgressive" type="hex" data="01"/>
+ <value name="DefaultTerminalDebugLog" type="hex" data="00"/>
+ <value name="AnsiExecution" type="hex" data="01"/>
+ <value name="AnsiAllowedCommands" type="multi">
+ <line data="cmd -cur_console:R /cGitShowBranch.cmd"/>
+ </value>
+ <value name="ProcessCmdStart" type="hex" data="00"/>
+ <value name="ProcessCtrlZ" type="hex" data="00"/>
+ <value name="SuppressBells" type="hex" data="01"/>
+ <value name="JumpListAutoUpdate" type="hex" data="01"/>
+ <value name="Multi.ShowSearch" type="hex" data="01"/>
+ <value name="Multi.CloseConfirmFlags" type="hex" data="07"/>
+ <value name="Multi.DupConfirm" type="hex" data="01"/>
+ <value name="Multi.DetachConfirm" type="hex" data="01"/>
+ <value name="FontUseDpi" type="hex" data="01"/>
+ <value name="FontUseUnits" type="hex" data="00"/>
+ <value name="CompressLongStrings" type="hex" data="01"/>
+ <value name="LastMonitor" type="string" data="0,0,2880,1540"/>
+ <value name="Restore2ActiveMon" type="hex" data="00"/>
+ <value name="DownShowExOnTopMessage" type="hex" data="00"/>
+ <value name="UseScrollLock" type="hex" data="01"/>
+ <value name="EnvironmentSet" type="multi">
+ <line data="set PATH=%ConEmuBaseDir%\Scripts;%PATH%"/>
+ </value>
+ <value name="CTS.ResetOnRelease" type="hex" data="00"/>
+ <value name="CTS.EraseBeforeReset" type="hex" data="01"/>
+ <value name="CTS.ForceLocale" type="dword" data="00000000"/>
+ <value name="Anti-aliasing2" type="hex" data="00"/>
+ <value name="UseAltGrayPlus" type="hex" data="01"/>
+ <value name="MouseDragWindow" type="hex" data="01"/>
+ <value name="DebugLog" type="hex" data="00"/>
+ <value name="StatusBar.Hide.VisL" type="hex" data="01"/>
+ <value name="StatusBar.Hide.KeyHooks" type="hex" data="01"/>
+ <value name="StatusBar.Hide.TMode" type="hex" data="01"/>
+ <value name="StatusBar.Hide.RMode" type="hex" data="01"/>
+ <value name="StatusBar.Hide.WVBack" type="hex" data="01"/>
+ <value name="StatusBar.Hide.WVDC" type="hex" data="01"/>
+ <value name="StatusBar.Hide.Zoom" type="hex" data="01"/>
+ <value name="StatusBar.Hide.Dpi" type="hex" data="01"/>
+ <value name="TabFlashChanged" type="long" data="8"/>
+ <value name="TabModifiedSuffix" type="string" data="[*]"/>
+ <value name="Update.InetTool" type="hex" data="00"/>
+ <value name="Update.InetToolCmd" type="string" data=""/>
+ <key name="HotKeys" modified="2016-08-05 15:29:11" build="160724">
+ <value name="KeyMacroVersion" type="hex" data="02"/>
+ <value name="Multi.Modifier" type="dword" data="0000005b"/>
+ <value name="Multi.ArrowsModifier" type="dword" data="0000005b"/>
+ <value name="MinimizeRestore" type="dword" data="000011c0"/>
+ <value name="MinimizeRestore2" type="dword" data="00000000"/>
+ <value name="GlobalRestore" type="dword" data="00000000"/>
+ <value name="CdExplorerPath" type="dword" data="00000000"/>
+ <value name="ForcedFullScreen" type="dword" data="12115b0d"/>
+ <value name="SwitchGuiFocus" type="dword" data="00000000"/>
+ <value name="SetFocusGui" type="dword" data="00000000"/>
+ <value name="SetFocusChild" type="dword" data="00000000"/>
+ <value name="ChildSystemMenu" type="dword" data="00000000"/>
+ <value name="Multi.NewConsole" type="dword" data="00005b57"/>
+ <value name="Multi.NewConsoleShift" type="dword" data="00105b57"/>
+ <value name="Multi.CmdKey" type="dword" data="00005b58"/>
+ <value name="Multi.NewWindow" type="dword" data="00000000"/>
+ <value name="Multi.NewConsolePopup" type="dword" data="00005b4e"/>
+ <value name="Multi.NewConsolePopup2" type="dword" data="00000000"/>
+ <value name="Multi.NewAttach" type="dword" data="00005b47"/>
+ <value name="Multi.NewSplitV" type="dword" data="0010114f"/>
+ <value name="Multi.NewSplitH" type="dword" data="00101145"/>
+ <value name="Multi.SplitMaximize" type="dword" data="00005d0d"/>
+ <value name="Multi.SplitSizeVU" type="dword" data="00105d26"/>
+ <value name="Multi.SplitSizeVD" type="dword" data="00105d28"/>
+ <value name="Multi.SplitSizeHL" type="dword" data="00105d25"/>
+ <value name="Multi.SplitSizeHR" type="dword" data="00105d27"/>
+ <value name="Key.TabPane1" type="dword" data="00005d09"/>
+ <value name="Key.TabPane2" type="dword" data="00105d09"/>
+ <value name="Multi.SplitFocusU" type="dword" data="00005d26"/>
+ <value name="Multi.SplitFocusD" type="dword" data="00005d28"/>
+ <value name="Multi.SplitFocusL" type="dword" data="00005d25"/>
+ <value name="Multi.SplitFocusR" type="dword" data="00005d27"/>
+ <value name="Multi.Next" type="dword" data="00005b51"/>
+ <value name="Multi.NextShift" type="dword" data="00105b51"/>
+ <value name="Multi.Recreate" type="dword" data="00005bc0"/>
+ <value name="Multi.AltCon" type="dword" data="00005b41"/>
+ <value name="Multi.Pause" type="dword" data="80808013"/>
+ <value name="Multi.Scroll" type="dword" data="00005b53"/>
+ <value name="Multi.GroupInput" type="dword" data="00005d47"/>
+ <value name="Multi.Detach" type="dword" data="00000000"/>
+ <value name="Multi.Unfasten" type="dword" data="00000000"/>
+ <value name="Multi.Close" type="dword" data="00005b2e"/>
+ <value name="CloseTabKey" type="dword" data="00125b2e"/>
+ <value name="CloseGroupKey" type="dword" data="00000000"/>
+ <value name="CloseGroupPrcKey" type="dword" data="00000000"/>
+ <value name="CloseAllConKey" type="dword" data="00000000"/>
+ <value name="CloseZombiesKey" type="dword" data="00000000"/>
+ <value name="CloseExceptConKey" type="dword" data="00000000"/>
+ <value name="KillProcessKey" type="dword" data="00121103"/>
+ <value name="KillAllButShellKey" type="dword" data="00125b13"/>
+ <value name="DuplicateRootKey" type="dword" data="00000000"/>
+ <value name="CloseConEmuKey" type="dword" data="00005b73"/>
+ <value name="Multi.Rename" type="dword" data="00005d52"/>
+ <value name="AffinityPriorityKey" type="dword" data="00005d41"/>
+ <value name="Multi.MoveLeft" type="dword" data="00125b25"/>
+ <value name="Multi.MoveRight" type="dword" data="00125b27"/>
+ <value name="CTS.VkBlockStart" type="dword" data="00000000"/>
+ <value name="CTS.VkTextStart" type="dword" data="00000000"/>
+ <value name="CTS.VkCopyFmt0" type="dword" data="00001143"/>
+ <value name="CTS.VkCopyFmt1" type="dword" data="00101143"/>
+ <value name="CTS.VkCopyFmt2" type="dword" data="00000000"/>
+ <value name="CTS.VkCopyAll" type="dword" data="00000000"/>
+ <value name="HighlightMouseSwitch" type="dword" data="00005d4c"/>
+ <value name="HighlightMouseSwitchX" type="dword" data="00005d58"/>
+ <value name="Multi.ShowTabsList" type="dword" data="00000000"/>
+ <value name="Multi.ShowTabsList2" type="dword" data="00005d7b"/>
+ <value name="ClipboardVkAllLines" type="dword" data="0000102d"/>
+ <value name="ClipboardVkFirstLine" type="dword" data="00001156"/>
+ <value name="Key.AltNumpad" type="dword" data="00000000"/>
+ <value name="DeleteWordToLeft" type="dword" data="00001108"/>
+ <value name="FindTextKey" type="dword" data="00005d46"/>
+ <value name="ScreenshotKey" type="dword" data="00005b48"/>
+ <value name="ScreenshotFullKey" type="dword" data="00105b48"/>
+ <value name="ShowStatusBarKey" type="dword" data="00005d53"/>
+ <value name="ShowTabBarKey" type="dword" data="00005d54"/>
+ <value name="ShowCaptionKey" type="dword" data="00005d43"/>
+ <value name="AlwaysOnTopKey" type="dword" data="00000000"/>
+ <value name="TransparencyInc" type="dword" data="00000000"/>
+ <value name="TransparencyDec" type="dword" data="00000000"/>
+ <value name="Key.TabMenu" type="dword" data="00005d20"/>
+ <value name="Key.TabMenu2" type="dword" data="00001002"/>
+ <value name="Key.Maximize" type="dword" data="00001278"/>
+ <value name="Key.MaximizeWidth" type="dword" data="00000000"/>
+ <value name="Key.MaximizeHeight" type="dword" data="00000000"/>
+ <value name="Key.TileToLeft" type="dword" data="00005b25"/>
+ <value name="Key.TileToRight" type="dword" data="00005b27"/>
+ <value name="Key.JumpActiveMonitor" type="dword" data="00000000"/>
+ <value name="Key.JumpPrevMonitor" type="dword" data="00105b25"/>
+ <value name="Key.JumpNextMonitor" type="dword" data="00105b27"/>
+ <value name="Key.FullScreen" type="dword" data="0000120d"/>
+ <value name="Key.SysMenu" type="dword" data="00001220"/>
+ <value name="Key.SysMenu2" type="dword" data="00001102"/>
+ <value name="Key.DebugProcess" type="dword" data="00105b44"/>
+ <value name="Key.DumpProcess" type="dword" data="00000000"/>
+ <value name="Key.DumpTree" type="dword" data="00000000"/>
+ <value name="Key.BufUp" type="dword" data="00001126"/>
+ <value name="Key.BufDn" type="dword" data="00001128"/>
+ <value name="Key.BufPgUp" type="dword" data="00001121"/>
+ <value name="Key.BufPgDn" type="dword" data="00001122"/>
+ <value name="Key.BufHfPgUp" type="dword" data="00005d21"/>
+ <value name="Key.BufHfPgDn" type="dword" data="00005d22"/>
+ <value name="Key.BufTop" type="dword" data="00005d24"/>
+ <value name="Key.BufBottom" type="dword" data="00005d23"/>
+ <value name="Key.BufCursor" type="dword" data="00005d08"/>
+ <value name="Key.ResetTerm" type="dword" data="00000000"/>
+ <value name="FontLargerKey" type="dword" data="000011d0"/>
+ <value name="FontSmallerKey" type="dword" data="000011d1"/>
+ <value name="FontOriginalKey" type="dword" data="00001104"/>
+ <value name="PasteFileKey" type="dword" data="00101146"/>
+ <value name="PastePathKey" type="dword" data="00101144"/>
+ <value name="PasteCygwinKey" type="dword" data="00005d2d"/>
+ <value name="KeyMacro01" type="dword" data="00000000"/>
+ <value name="KeyMacro01.Text" type="string" data=""/>
+ <value name="KeyMacro02" type="dword" data="00000000"/>
+ <value name="KeyMacro02.Text" type="string" data=""/>
+ <value name="KeyMacro03" type="dword" data="00000000"/>
+ <value name="KeyMacro03.Text" type="string" data=""/>
+ <value name="KeyMacro04" type="dword" data="00000000"/>
+ <value name="KeyMacro04.Text" type="string" data=""/>
+ <value name="KeyMacro05" type="dword" data="00000000"/>
+ <value name="KeyMacro05.Text" type="string" data=""/>
+ <value name="KeyMacro06" type="dword" data="00000000"/>
+ <value name="KeyMacro06.Text" type="string" data=""/>
+ <value name="KeyMacro07" type="dword" data="00000000"/>
+ <value name="KeyMacro07.Text" type="string" data=""/>
+ <value name="KeyMacro08" type="dword" data="00000000"/>
+ <value name="KeyMacro08.Text" type="string" data=""/>
+ <value name="KeyMacro09" type="dword" data="00000000"/>
+ <value name="KeyMacro09.Text" type="string" data=""/>
+ <value name="KeyMacro10" type="dword" data="00000000"/>
+ <value name="KeyMacro10.Text" type="string" data=""/>
+ <value name="KeyMacro11" type="dword" data="00000000"/>
+ <value name="KeyMacro11.Text" type="string" data=""/>
+ <value name="KeyMacro12" type="dword" data="00000000"/>
+ <value name="KeyMacro12.Text" type="string" data=""/>
+ <value name="KeyMacro13" type="dword" data="00000000"/>
+ <value name="KeyMacro13.Text" type="string" data=""/>
+ <value name="KeyMacro14" type="dword" data="00000000"/>
+ <value name="KeyMacro14.Text" type="string" data=""/>
+ <value name="KeyMacro15" type="dword" data="00000000"/>
+ <value name="KeyMacro15.Text" type="string" data=""/>
+ <value name="KeyMacro16" type="dword" data="00000000"/>
+ <value name="KeyMacro16.Text" type="string" data=""/>
+ <value name="KeyMacro17" type="dword" data="00000000"/>
+ <value name="KeyMacro17.Text" type="string" data=""/>
+ <value name="KeyMacro18" type="dword" data="00000000"/>
+ <value name="KeyMacro18.Text" type="string" data=""/>
+ <value name="KeyMacro19" type="dword" data="00000000"/>
+ <value name="KeyMacro19.Text" type="string" data=""/>
+ <value name="KeyMacro20" type="dword" data="00000000"/>
+ <value name="KeyMacro20.Text" type="string" data=""/>
+ <value name="KeyMacro21" type="dword" data="00000000"/>
+ <value name="KeyMacro21.Text" type="string" data=""/>
+ <value name="KeyMacro22" type="dword" data="00000000"/>
+ <value name="KeyMacro22.Text" type="string" data=""/>
+ <value name="KeyMacro23" type="dword" data="00000000"/>
+ <value name="KeyMacro23.Text" type="string" data=""/>
+ <value name="KeyMacro24" type="dword" data="00000000"/>
+ <value name="KeyMacro24.Text" type="string" data=""/>
+ <value name="KeyMacro25" type="dword" data="00000000"/>
+ <value name="KeyMacro25.Text" type="string" data=""/>
+ <value name="KeyMacro26" type="dword" data="00000000"/>
+ <value name="KeyMacro26.Text" type="string" data=""/>
+ <value name="KeyMacro27" type="dword" data="00000000"/>
+ <value name="KeyMacro27.Text" type="string" data=""/>
+ <value name="KeyMacro28" type="dword" data="00000000"/>
+ <value name="KeyMacro28.Text" type="string" data=""/>
+ <value name="KeyMacro29" type="dword" data="00000000"/>
+ <value name="KeyMacro29.Text" type="string" data=""/>
+ <value name="KeyMacro30" type="dword" data="00000000"/>
+ <value name="KeyMacro30.Text" type="string" data=""/>
+ <value name="KeyMacro31" type="dword" data="00000000"/>
+ <value name="KeyMacro31.Text" type="string" data=""/>
+ <value name="KeyMacro32" type="dword" data="00000000"/>
+ <value name="KeyMacro32.Text" type="string" data=""/>
+ <value name="CTS.VkBlock" type="hex" data="a4"/>
+ <value name="CTS.VkText" type="hex" data="a0"/>
+ <value name="CTS.VkAct" type="hex" data="00"/>
+ <value name="CTS.VkPrompt" type="hex" data="00"/>
+ <value name="FarGotoEditorVk" type="hex" data="a2"/>
+ <value name="DndLKey" type="hex" data="00"/>
+ <value name="DndRKey" type="hex" data="a2"/>
+ <value name="WndDragKey" type="dword" data="00121101"/>
+ </key>
+ <key name="Tasks" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Count" type="long" data="14"/>
+ <!-- MOZ: This task launches Mozilla's MSYS2 development enviornment -->
+ <key name="Task1" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Bash::mozdev}"/>
+ <value name="Flags" type="dword" data="00000005"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="set CHERE_INVOKING=1 &amp; set MSYSTEM=MINGW64 &amp; %MSYS2_PATH\usr\bin\bash.exe --login -i -new_console:C:&quot;%MSYS2_PATH\msys2.ico&quot;"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <!-- MOZ: End of Mozilla code. -->
+ <key name="Task2" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Shells::cmd}"/>
+ <value name="Flags" type="dword" data="00000002"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="cmd.exe /k &quot;%ConEmuBaseDir%\CmdInit.cmd&quot;"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task3" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Shells::cmd (Admin)}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="cmd.exe /k &quot;%ConEmuBaseDir%\CmdInit.cmd&quot; -new_console:a"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task4" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Shells::cmd-32}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="&quot;%windir%\syswow64\cmd.exe&quot; /k &quot;%ConEmuBaseDir%\CmdInit.cmd&quot;"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task5" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Shells::cmd 64/32}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="&gt; &quot;%windir%\system32\cmd.exe&quot; /k &quot;&quot;%ConEmuBaseDir%\CmdInit.cmd&quot; &amp; echo This is Native cmd.exe&quot;"/>
+ <value name="Cmd2" type="string" data="&quot;%windir%\syswow64\cmd.exe&quot; /k &quot;&quot;%ConEmuBaseDir%\CmdInit.cmd&quot; &amp; echo This is 32 bit cmd.exe -new_console:s50V&quot;"/>
+ <value name="Active" type="long" data="1"/>
+ <value name="Count" type="long" data="2"/>
+ </key>
+ <key name="Task6" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Shells::PowerShell}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="powershell.exe"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task7" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Shells::PowerShell (Admin)}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="powershell.exe -new_console:a"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task8" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Bash::bash}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data="-icon &quot;%USERPROFILE%\AppData\Local\lxss\bash.ico&quot;"/>
+ <value name="Cmd1" type="string" data="%windir%\system32\bash.exe -cur_console:p"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task9" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Bash::Msys2-64}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="set CHERE_INVOKING=1 &amp; %ConEmuDrive%\msys64\usr\bin\bash.exe --login -i -new_console:C:&quot;%ConEmuDrive%\msys64\msys2.ico&quot;"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task10" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Helper::Show ANSI colors}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="cmd.exe /k type &quot;%ConEmuBaseDir%\Addons\AnsiColors16t.ans&quot; -cur_console:n"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task11" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{SDK::VS 11.0 x86 tools prompt}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="cmd /k &quot;&quot;C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\vcvarsall.bat&quot;&quot; x86 -new_console:t:&quot;VS 11.0&quot; -new_console:C:&quot;%CommonProgramFiles(x86)%\microsoft shared\MSEnv\VSFileHandler.dll,23&quot;"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task12" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{SDK::VS 12.0 x86 tools prompt}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="cmd /k &quot;&quot;C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat&quot;&quot; x86 -new_console:t:&quot;VS 12.0&quot; -new_console:C:&quot;%CommonProgramFiles(x86)%\microsoft shared\MSEnv\VSFileHandler.dll,28&quot;"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task13" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{SDK::VS 14.0 x86 tools prompt}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="cmd /k &quot;&quot;C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat&quot;&quot; x86 -new_console:t:&quot;VS 14.0&quot; -new_console:C:&quot;%CommonProgramFiles(x86)%\microsoft shared\MSEnv\VSFileHandler.dll,33&quot;"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ <key name="Task14" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Name" type="string" data="{Tools::Chocolatey (Admin)}"/>
+ <value name="Flags" type="dword" data="00000004"/>
+ <value name="Hotkey" type="dword" data="00000000"/>
+ <value name="GuiArgs" type="string" data=""/>
+ <value name="Cmd1" type="string" data="*cmd.exe /k Title Chocolatey &amp; &quot;%ConEmuBaseDir%\Addons\ChocolateyAbout.cmd&quot;"/>
+ <value name="Active" type="long" data="0"/>
+ <value name="Count" type="long" data="1"/>
+ </key>
+ </key>
+ <key name="Apps" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Count" type="long" data="0"/>
+ </key>
+ <key name="Colors" modified="2016-08-05 15:29:11" build="160724">
+ <value name="Count" type="long" data="0"/>
+ </key>
+ </key>
+ </key>
+</key>
diff --git a/python/mozbuild/TODO b/python/mozbuild/TODO
new file mode 100644
index 000000000..4f519f9dd
--- /dev/null
+++ b/python/mozbuild/TODO
@@ -0,0 +1,3 @@
+dom/imptests Makefile.in's are autogenerated. See
+dom/imptests/writeMakefile.py and bug 782651. We will need to update
+writeMakefile.py to produce mozbuild files.
diff --git a/python/mozbuild/dumbmake/__init__.py b/python/mozbuild/dumbmake/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/dumbmake/__init__.py
diff --git a/python/mozbuild/dumbmake/dumbmake.py b/python/mozbuild/dumbmake/dumbmake.py
new file mode 100644
index 000000000..5457c8b0a
--- /dev/null
+++ b/python/mozbuild/dumbmake/dumbmake.py
@@ -0,0 +1,122 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+from collections import OrderedDict
+from itertools import groupby
+from operator import itemgetter
+from os.path import dirname
+
+WHITESPACE_CHARACTERS = ' \t'
+
+def indentation(line):
+ """Number of whitespace (tab and space) characters at start of |line|."""
+ i = 0
+ while i < len(line):
+ if line[i] not in WHITESPACE_CHARACTERS:
+ break
+ i += 1
+ return i
+
+def dependency_map(lines):
+ """Return a dictionary with keys that are targets and values that
+ are ordered lists of targets that should also be built.
+
+ This implementation is O(n^2), but lovely and simple! We walk the
+ targets in the list, and for each target we walk backwards
+ collecting its dependencies. To make the walking easier, we
+ reverse the list so that we are always walking forwards.
+
+ """
+ pairs = [(indentation(line), line.strip()) for line in lines]
+ pairs.reverse()
+
+ deps = {}
+
+ for i, (indent, target) in enumerate(pairs):
+ if not deps.has_key(target):
+ deps[target] = []
+
+ for j in range(i+1, len(pairs)):
+ ind, tar = pairs[j]
+ if ind < indent:
+ indent = ind
+ if tar not in deps[target]:
+ deps[target].append(tar)
+
+ return deps
+
+def all_dependencies(*targets, **kwargs):
+ """Return a list containing all the dependencies of |targets|.
+
+ The relative order of targets is maintained if possible.
+
+ """
+ dm = kwargs.pop('dependency_map', None)
+ if dm is None:
+ dm = dependency_map(targets)
+
+ all_targets = OrderedDict() # Used as an ordered set.
+
+ for target in targets:
+ if target in dm:
+ for dependency in dm[target]:
+ # Move element back in the ordered set.
+ if dependency in all_targets:
+ del all_targets[dependency]
+ all_targets[dependency] = True
+
+ return all_targets.keys()
+
+def get_components(path):
+ """Take a path and return all the components of the path."""
+ paths = [path]
+ while True:
+ parent = dirname(paths[-1])
+ if parent == "":
+ break
+ paths.append(parent)
+
+ paths.reverse()
+ return paths
+
+def add_extra_dependencies(target_pairs, dependency_map):
+ """Take a list [(make_dir, make_target)] and expand (make_dir, None)
+ entries with extra make dependencies from |dependency_map|.
+
+ Returns an iterator of pairs (make_dir, make_target).
+
+ """
+ all_targets = OrderedDict() # Used as an ordered set.
+ make_dirs = OrderedDict() # Used as an ordered set.
+
+ for make_target, group in groupby(target_pairs, itemgetter(1)):
+ # Return non-simple directory targets untouched.
+ if make_target is not None:
+ for pair in group:
+ # Generate dependencies for all components of a path.
+ # Given path a/b/c, examine a, a/b, and a/b/c in that order.
+ paths = get_components(pair[1])
+ # For each component of a path, find and add all dependencies
+ # to the final target list.
+ for target in paths:
+ if target not in all_targets:
+ yield pair[0], target
+ all_targets[target] = True
+ continue
+
+ # Add extra dumbmake dependencies to simple directory targets.
+ for make_dir, _ in group:
+ if make_dir not in make_dirs:
+ yield make_dir, None
+ make_dirs[make_dir] = True
+
+ all_components = []
+ for make_dir in make_dirs.iterkeys():
+ all_components.extend(get_components(make_dir))
+
+ for i in all_dependencies(*all_components, dependency_map=dependency_map):
+ if i not in make_dirs:
+ yield i, None
diff --git a/python/mozbuild/dumbmake/test/__init__.py b/python/mozbuild/dumbmake/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/dumbmake/test/__init__.py
diff --git a/python/mozbuild/dumbmake/test/test_dumbmake.py b/python/mozbuild/dumbmake/test/test_dumbmake.py
new file mode 100644
index 000000000..1172117aa
--- /dev/null
+++ b/python/mozbuild/dumbmake/test/test_dumbmake.py
@@ -0,0 +1,106 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import unicode_literals
+
+import unittest
+
+from mozunit import (
+ main,
+)
+
+from dumbmake.dumbmake import (
+ add_extra_dependencies,
+ all_dependencies,
+ dependency_map,
+ indentation,
+)
+
+class TestDumbmake(unittest.TestCase):
+ def test_indentation(self):
+ self.assertEqual(indentation(""), 0)
+ self.assertEqual(indentation("x"), 0)
+ self.assertEqual(indentation(" x"), 1)
+ self.assertEqual(indentation("\tx"), 1)
+ self.assertEqual(indentation(" \tx"), 2)
+ self.assertEqual(indentation("\t x"), 2)
+ self.assertEqual(indentation(" x "), 1)
+ self.assertEqual(indentation("\tx\t"), 1)
+ self.assertEqual(indentation(" x"), 2)
+ self.assertEqual(indentation(" x"), 4)
+
+ def test_dependency_map(self):
+ self.assertEqual(dependency_map([]), {})
+ self.assertEqual(dependency_map(["a"]), {"a": []})
+ self.assertEqual(dependency_map(["a", "b"]), {"a": [], "b": []})
+ self.assertEqual(dependency_map(["a", "b", "c"]), {"a": [], "b": [], "c": []})
+ # indentation
+ self.assertEqual(dependency_map(["a", "\tb", "a", "\tc"]), {"a": [], "b": ["a"], "c": ["a"]})
+ self.assertEqual(dependency_map(["a", "\tb", "\t\tc"]), {"a": [], "b": ["a"], "c": ["b", "a"]})
+ self.assertEqual(dependency_map(["a", "\tb", "\t\tc", "\td", "\te", "f"]), {"a": [], "b": ["a"], "c": ["b", "a"], "d": ["a"], "e": ["a"], "f": []})
+ # irregular indentation
+ self.assertEqual(dependency_map(["\ta", "b"]), {"a": [], "b": []})
+ self.assertEqual(dependency_map(["a", "\t\t\tb", "\t\tc"]), {"a": [], "b": ["a"], "c": ["a"]})
+ self.assertEqual(dependency_map(["a", "\t\tb", "\t\t\tc", "\t\td", "\te", "f"]), {"a": [], "b": ["a"], "c": ["b", "a"], "d": ["a"], "e": ["a"], "f": []})
+ # repetitions
+ self.assertEqual(dependency_map(["a", "\tb", "a", "\tb"]), {"a": [], "b": ["a"]})
+ self.assertEqual(dependency_map(["a", "\tb", "\t\tc", "b", "\td", "\t\te"]), {"a": [], "b": ["a"], "d": ["b"], "e": ["d", "b"], "c": ["b", "a"]})
+ # cycles are okay
+ self.assertEqual(dependency_map(["a", "\tb", "\t\ta"]), {"a": ["b", "a"], "b": ["a"]})
+
+ def test_all_dependencies(self):
+ dm = {"a": [], "b": ["a"], "c": ["b", "a"], "d": ["a"], "e": ["a"], "f": []}
+ self.assertEqual(all_dependencies("a", dependency_map=dm), [])
+ self.assertEqual(all_dependencies("b", dependency_map=dm), ["a"])
+ self.assertEqual(all_dependencies("c", "a", "b", dependency_map=dm), ["b", "a"])
+ self.assertEqual(all_dependencies("d", dependency_map=dm), ["a"])
+ self.assertEqual(all_dependencies("d", "f", "c", dependency_map=dm), ["b", "a"])
+ self.assertEqual(all_dependencies("a", "b", dependency_map=dm), ["a"])
+ self.assertEqual(all_dependencies("b", "b", dependency_map=dm), ["a"])
+
+ def test_missing_entry(self):
+ # a depends on b, which is missing
+ dm = {"a": ["b"]}
+ self.assertEqual(all_dependencies("a", dependency_map=dm), ["b"])
+ self.assertEqual(all_dependencies("a", "b", dependency_map=dm), ["b"])
+ self.assertEqual(all_dependencies("b", dependency_map=dm), [])
+
+ def test_two_dependencies(self):
+ dm = {"a": ["c"], "b": ["c"], "c": []}
+ # suppose a and b both depend on c. Then we want to build a and b before c...
+ self.assertEqual(all_dependencies("a", "b", dependency_map=dm), ["c"])
+ # ... but relative order is preserved.
+ self.assertEqual(all_dependencies("b", "a", dependency_map=dm), ["c"])
+
+ def test_nested_dependencies(self):
+ # a depends on b depends on c depends on d
+ dm = {"a": ["b", "c", "d"], "b": ["c", "d"], "c": ["d"]}
+ self.assertEqual(all_dependencies("b", "a", dependency_map=dm), ["b", "c", "d"])
+ self.assertEqual(all_dependencies("c", "a", dependency_map=dm), ["b", "c", "d"])
+
+ def test_add_extra_dependencies(self):
+ # a depends on b depends on c depends on d
+ dm = {"a": ["b", "c", "d"], "b": ["c", "d"], "c": ["d"]}
+ # Edge cases.
+ self.assertEqual(list(add_extra_dependencies([], dependency_map=dm)),
+ [])
+ self.assertEqual(list(add_extra_dependencies([(None, "package")], dependency_map=dm)),
+ [(None, "package")])
+ # Easy expansion.
+ self.assertEqual(list(add_extra_dependencies([("b", None)], dependency_map=dm)),
+ [("b", None), ("c", None), ("d", None)])
+ # Expansion with two groups -- each group is handled independently.
+ self.assertEqual(list(add_extra_dependencies([("b", None),
+ (None, "package"),
+ ("c", None)], dependency_map=dm)),
+ [("b", None), (None, "package"),
+ ("c", None), ("d", None)])
+ # Two groups, no duplicate dependencies in each group.
+ self.assertEqual(list(add_extra_dependencies([("a", None), ("b", None),
+ (None, "package"), (None, "install"),
+ ("c", None), ("d", None)], dependency_map=dm)),
+ [("a", None), ("b", None), (None, "package"),
+ (None, "install"), ("c", None), ("d", None)])
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/__init__.py b/python/mozbuild/mozbuild/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/__init__.py
diff --git a/python/mozbuild/mozbuild/action/__init__.py b/python/mozbuild/mozbuild/action/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/__init__.py
diff --git a/python/mozbuild/mozbuild/action/buildlist.py b/python/mozbuild/mozbuild/action/buildlist.py
new file mode 100644
index 000000000..9d601d69a
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/buildlist.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''A generic script to add entries to a file
+if the entry does not already exist.
+
+Usage: buildlist.py <filename> <entry> [<entry> ...]
+'''
+from __future__ import absolute_import, print_function
+
+import sys
+import os
+
+from mozbuild.util import (
+ ensureParentDir,
+ lock_file,
+)
+
+def addEntriesToListFile(listFile, entries):
+ """Given a file |listFile| containing one entry per line,
+ add each entry in |entries| to the file, unless it is already
+ present."""
+ ensureParentDir(listFile)
+ lock = lock_file(listFile + ".lck")
+ try:
+ if os.path.exists(listFile):
+ f = open(listFile)
+ existing = set(x.strip() for x in f.readlines())
+ f.close()
+ else:
+ existing = set()
+ for e in entries:
+ if e not in existing:
+ existing.add(e)
+ with open(listFile, 'wb') as f:
+ f.write("\n".join(sorted(existing))+"\n")
+ finally:
+ lock = None
+
+
+def main(args):
+ if len(args) < 2:
+ print("Usage: buildlist.py <list file> <entry> [<entry> ...]",
+ file=sys.stderr)
+ return 1
+
+ return addEntriesToListFile(args[0], args[1:])
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/cl.py b/python/mozbuild/mozbuild/action/cl.py
new file mode 100644
index 000000000..1840d7d85
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/cl.py
@@ -0,0 +1,124 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import ctypes
+import os
+import sys
+
+from mozprocess.processhandler import ProcessHandlerMixin
+from mozbuild.makeutil import Makefile
+
+CL_INCLUDES_PREFIX = os.environ.get("CL_INCLUDES_PREFIX", "Note: including file:")
+
+GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW
+GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW
+
+
+# cl.exe likes to print inconsistent paths in the showIncludes output
+# (some lowercased, some not, with different directions of slashes),
+# and we need the original file case for make/pymake to be happy.
+# As this is slow and needs to be called a lot of times, use a cache
+# to speed things up.
+_normcase_cache = {}
+
+def normcase(path):
+ # Get*PathName want paths with backslashes
+ path = path.replace('/', os.sep)
+ dir = os.path.dirname(path)
+ # name is fortunately always going to have the right case,
+ # so we can use a cache for the directory part only.
+ name = os.path.basename(path)
+ if dir in _normcase_cache:
+ result = _normcase_cache[dir]
+ else:
+ path = ctypes.create_unicode_buffer(dir)
+ length = GetShortPathName(path, None, 0)
+ shortpath = ctypes.create_unicode_buffer(length)
+ GetShortPathName(path, shortpath, length)
+ length = GetLongPathName(shortpath, None, 0)
+ if length > len(path):
+ path = ctypes.create_unicode_buffer(length)
+ GetLongPathName(shortpath, path, length)
+ result = _normcase_cache[dir] = path.value
+ return os.path.join(result, name)
+
+
+def InvokeClWithDependencyGeneration(cmdline):
+ target = ""
+ # Figure out what the target is
+ for arg in cmdline:
+ if arg.startswith("-Fo"):
+ target = arg[3:]
+ break
+
+ if target is None:
+ print >>sys.stderr, "No target set"
+ return 1
+
+ # Assume the source file is the last argument
+ source = cmdline[-1]
+ assert not source.startswith('-')
+
+ # The deps target lives here
+ depstarget = os.path.basename(target) + ".pp"
+
+ cmdline += ['-showIncludes']
+
+ mk = Makefile()
+ rule = mk.create_rule([target])
+ rule.add_dependencies([normcase(source)])
+
+ def on_line(line):
+ # cl -showIncludes prefixes every header with "Note: including file:"
+ # and an indentation corresponding to the depth (which we don't need)
+ if line.startswith(CL_INCLUDES_PREFIX):
+ dep = line[len(CL_INCLUDES_PREFIX):].strip()
+ # We can't handle pathes with spaces properly in mddepend.pl, but
+ # we can assume that anything in a path with spaces is a system
+ # header and throw it away.
+ dep = normcase(dep)
+ if ' ' not in dep:
+ rule.add_dependencies([dep])
+ else:
+ # Make sure we preserve the relevant output from cl. mozprocess
+ # swallows the newline delimiter, so we need to re-add it.
+ sys.stdout.write(line)
+ sys.stdout.write('\n')
+
+ # We need to ignore children because MSVC can fire up a background process
+ # during compilation. This process is cleaned up on its own. If we kill it,
+ # we can run into weird compilation issues.
+ p = ProcessHandlerMixin(cmdline, processOutputLine=[on_line],
+ ignore_children=True)
+ p.run()
+ p.processOutput()
+ ret = p.wait()
+
+ if ret != 0 or target == "":
+ # p.wait() returns a long. Somehow sys.exit(long(0)) is like
+ # sys.exit(1). Don't ask why.
+ return int(ret)
+
+ depsdir = os.path.normpath(os.path.join(os.curdir, ".deps"))
+ depstarget = os.path.join(depsdir, depstarget)
+ if not os.path.isdir(depsdir):
+ try:
+ os.makedirs(depsdir)
+ except OSError:
+ pass # This suppresses the error we get when the dir exists, at the
+ # cost of masking failure to create the directory. We'll just
+ # die on the next line though, so it's not that much of a loss.
+
+ with open(depstarget, "w") as f:
+ mk.dump(f)
+
+ return 0
+
+def main(args):
+ return InvokeClWithDependencyGeneration(args)
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/dump_env.py b/python/mozbuild/mozbuild/action/dump_env.py
new file mode 100644
index 000000000..a6fa19f3a
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/dump_env.py
@@ -0,0 +1,10 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# We invoke a Python program to dump our environment in order to get
+# native paths printed on Windows so that these paths can be incorporated
+# into Python configure's environment.
+import os
+for key, value in os.environ.items():
+ print('%s=%s' % (key, value))
diff --git a/python/mozbuild/mozbuild/action/explode_aar.py b/python/mozbuild/mozbuild/action/explode_aar.py
new file mode 100644
index 000000000..fcaf594c1
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/explode_aar.py
@@ -0,0 +1,72 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import errno
+import os
+import shutil
+import sys
+import zipfile
+
+from mozpack.files import FileFinder
+import mozpack.path as mozpath
+from mozbuild.util import ensureParentDir
+
+def explode(aar, destdir):
+ # Take just the support-v4-22.2.1 part.
+ name, _ = os.path.splitext(os.path.basename(aar))
+
+ destdir = mozpath.join(destdir, name)
+ if os.path.exists(destdir):
+ # We always want to start fresh.
+ shutil.rmtree(destdir)
+ ensureParentDir(destdir)
+ with zipfile.ZipFile(aar) as zf:
+ zf.extractall(destdir)
+
+ # classes.jar is always present. However, multiple JAR files with the same
+ # name confuses our staged Proguard process in
+ # mobile/android/base/Makefile.in, so we make the names unique here.
+ classes_jar = mozpath.join(destdir, name + '-classes.jar')
+ os.rename(mozpath.join(destdir, 'classes.jar'), classes_jar)
+
+ # Embedded JAR libraries are optional.
+ finder = FileFinder(mozpath.join(destdir, 'libs'), find_executables=False)
+ for p, _ in finder.find('*.jar'):
+ jar = mozpath.join(finder.base, name + '-' + p)
+ os.rename(mozpath.join(finder.base, p), jar)
+
+ # Frequently assets/ is present but empty. Protect against meaningless
+ # changes to the AAR files by deleting empty assets/ directories.
+ assets = mozpath.join(destdir, 'assets')
+ try:
+ os.rmdir(assets)
+ except OSError, e:
+ if e.errno in (errno.ENOTEMPTY, errno.ENOENT):
+ pass
+ else:
+ raise
+
+ return True
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ description='Explode Android AAR file.')
+
+ parser.add_argument('--destdir', required=True, help='Destination directory.')
+ parser.add_argument('aars', nargs='+', help='Path to AAR file(s).')
+
+ args = parser.parse_args(argv)
+
+ for aar in args.aars:
+ if not explode(aar, args.destdir):
+ return 1
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/file_generate.py b/python/mozbuild/mozbuild/action/file_generate.py
new file mode 100644
index 000000000..3bdbc264b
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/file_generate.py
@@ -0,0 +1,108 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Given a Python script and arguments describing the output file, and
+# the arguments that can be used to generate the output file, call the
+# script's |main| method with appropriate arguments.
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import imp
+import os
+import sys
+import traceback
+
+from mozbuild.pythonutil import iter_modules_in_path
+from mozbuild.makeutil import Makefile
+from mozbuild.util import FileAvoidWrite
+import buildconfig
+
+
+def main(argv):
+ parser = argparse.ArgumentParser('Generate a file from a Python script',
+ add_help=False)
+ parser.add_argument('python_script', metavar='python-script', type=str,
+ help='The Python script to run')
+ parser.add_argument('method_name', metavar='method-name', type=str,
+ help='The method of the script to invoke')
+ parser.add_argument('output_file', metavar='output-file', type=str,
+ help='The file to generate')
+ parser.add_argument('dep_file', metavar='dep-file', type=str,
+ help='File to write any additional make dependencies to')
+ parser.add_argument('additional_arguments', metavar='arg',
+ nargs=argparse.REMAINDER,
+ help="Additional arguments to the script's main() method")
+
+ args = parser.parse_args(argv)
+
+ script = args.python_script
+ # Permit the script to import modules from the same directory in which it
+ # resides. The justification for doing this is that if we were invoking
+ # the script as:
+ #
+ # python script arg1...
+ #
+ # then importing modules from the script's directory would come for free.
+ # Since we're invoking the script in a roundabout way, we provide this
+ # bit of convenience.
+ sys.path.append(os.path.dirname(script))
+ with open(script, 'r') as fh:
+ module = imp.load_module('script', fh, script,
+ ('.py', 'r', imp.PY_SOURCE))
+ method = args.method_name
+ if not hasattr(module, method):
+ print('Error: script "{0}" is missing a {1} method'.format(script, method),
+ file=sys.stderr)
+ return 1
+
+ ret = 1
+ try:
+ with FileAvoidWrite(args.output_file) as output:
+ ret = module.__dict__[method](output, *args.additional_arguments)
+ # The following values indicate a statement of success:
+ # - a set() (see below)
+ # - 0
+ # - False
+ # - None
+ #
+ # Everything else is an error (so scripts can conveniently |return
+ # 1| or similar). If a set is returned, the elements of the set
+ # indicate additional dependencies that will be listed in the deps
+ # file. Python module imports are automatically included as
+ # dependencies.
+ if isinstance(ret, set):
+ deps = ret
+ # The script succeeded, so reset |ret| to indicate that.
+ ret = None
+ else:
+ deps = set()
+
+ # Only write out the dependencies if the script was successful
+ if not ret:
+ # Add dependencies on any python modules that were imported by
+ # the script.
+ deps |= set(iter_modules_in_path(buildconfig.topsrcdir,
+ buildconfig.topobjdir))
+ mk = Makefile()
+ mk.create_rule([args.output_file]).add_dependencies(deps)
+ with FileAvoidWrite(args.dep_file) as dep_file:
+ mk.dump(dep_file)
+ # Even when our file's contents haven't changed, we want to update
+ # the file's mtime so make knows this target isn't still older than
+ # whatever prerequisite caused it to be built this time around.
+ try:
+ os.utime(args.output_file, None)
+ except:
+ print('Error processing file "{0}"'.format(args.output_file),
+ file=sys.stderr)
+ traceback.print_exc()
+ except IOError as e:
+ print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
+ traceback.print_exc()
+ return 1
+ return ret
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/generate_browsersearch.py b/python/mozbuild/mozbuild/action/generate_browsersearch.py
new file mode 100644
index 000000000..231abe9be
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/generate_browsersearch.py
@@ -0,0 +1,131 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''
+Script to generate the browsersearch.json file for Fennec.
+
+This script follows these steps:
+
+1. Read the region.properties file in all the given source directories (see
+srcdir option). Merge all properties into a single dict accounting for the
+priority of source directories.
+
+2. Read the default search plugin from 'browser.search.defaultenginename'.
+
+3. Read the list of search plugins from the 'browser.search.order.INDEX'
+properties with values identifying particular search plugins by name.
+
+4. Read each region-specific default search plugin from each property named like
+'browser.search.defaultenginename.REGION'.
+
+5. Read the list of region-specific search plugins from the
+'browser.search.order.REGION.INDEX' properties with values identifying
+particular search plugins by name. Here, REGION is derived from a REGION for
+which we have seen a region-specific default plugin.
+
+6. Generate a JSON representation of the above information, and write the result
+to browsersearch.json in the locale-specific raw resource directory
+e.g. raw/browsersearch.json, raw-pt-rBR/browsersearch.json.
+'''
+
+from __future__ import (
+ absolute_import,
+ print_function,
+ unicode_literals,
+)
+
+import argparse
+import codecs
+import json
+import sys
+import os
+
+from mozbuild.dotproperties import (
+ DotProperties,
+)
+from mozbuild.util import (
+ FileAvoidWrite,
+)
+import mozpack.path as mozpath
+
+
+def merge_properties(filename, srcdirs):
+ """Merges properties from the given file in the given source directories."""
+ properties = DotProperties()
+ for srcdir in srcdirs:
+ path = mozpath.join(srcdir, filename)
+ try:
+ properties.update(path)
+ except IOError:
+ # Ignore non-existing files
+ continue
+ return properties
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--verbose', '-v', default=False, action='store_true',
+ help='be verbose')
+ parser.add_argument('--silent', '-s', default=False, action='store_true',
+ help='be silent')
+ parser.add_argument('--srcdir', metavar='SRCDIR',
+ action='append', required=True,
+ help='directories to read inputs from, in order of priority')
+ parser.add_argument('output', metavar='OUTPUT',
+ help='output')
+ opts = parser.parse_args(args)
+
+ # Use reversed order so that the first srcdir has higher priority to override keys.
+ properties = merge_properties('region.properties', reversed(opts.srcdir))
+
+ # Default, not region-specific.
+ default = properties.get('browser.search.defaultenginename')
+ engines = properties.get_list('browser.search.order')
+
+ writer = codecs.getwriter('utf-8')(sys.stdout)
+ if opts.verbose:
+ print('Read {len} engines: {engines}'.format(len=len(engines), engines=engines), file=writer)
+ print("Default engine is '{default}'.".format(default=default), file=writer)
+
+ browsersearch = {}
+ browsersearch['default'] = default
+ browsersearch['engines'] = engines
+
+ # This gets defaults, yes; but it also gets the list of regions known.
+ regions = properties.get_dict('browser.search.defaultenginename')
+
+ browsersearch['regions'] = {}
+ for region in regions.keys():
+ region_default = regions[region]
+ region_engines = properties.get_list('browser.search.order.{region}'.format(region=region))
+
+ if opts.verbose:
+ print("Region '{region}': Read {len} engines: {region_engines}".format(
+ len=len(region_engines), region=region, region_engines=region_engines), file=writer)
+ print("Region '{region}': Default engine is '{region_default}'.".format(
+ region=region, region_default=region_default), file=writer)
+
+ browsersearch['regions'][region] = {
+ 'default': region_default,
+ 'engines': region_engines,
+ }
+
+ # FileAvoidWrite creates its parent directories.
+ output = os.path.abspath(opts.output)
+ fh = FileAvoidWrite(output)
+ json.dump(browsersearch, fh)
+ existed, updated = fh.close()
+
+ if not opts.silent:
+ if updated:
+ print('{output} updated'.format(output=output))
+ else:
+ print('{output} already up-to-date'.format(output=output))
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/generate_searchjson.py b/python/mozbuild/mozbuild/action/generate_searchjson.py
new file mode 100644
index 000000000..765a3550a
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/generate_searchjson.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+import json
+
+engines = []
+
+locale = sys.argv[2]
+output_file = sys.argv[3]
+
+output = open(output_file, 'w')
+
+with open(sys.argv[1]) as f:
+ searchinfo = json.load(f)
+
+if locale in searchinfo["locales"]:
+ output.write(json.dumps(searchinfo["locales"][locale]))
+else:
+ output.write(json.dumps(searchinfo["default"]))
+
+output.close();
diff --git a/python/mozbuild/mozbuild/action/generate_suggestedsites.py b/python/mozbuild/mozbuild/action/generate_suggestedsites.py
new file mode 100644
index 000000000..96d824cc2
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/generate_suggestedsites.py
@@ -0,0 +1,147 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+''' Script to generate the suggestedsites.json file for Fennec.
+
+This script follows these steps:
+
+1. Read the region.properties file in all the given source directories
+(see srcdir option). Merge all properties into a single dict accounting for
+the priority of source directories.
+
+2. Read the list of sites from the list 'browser.suggestedsites.list.INDEX' and
+'browser.suggestedsites.restricted.list.INDEX' properties with value of these keys
+being an identifier for each suggested site e.g. browser.suggestedsites.list.0=mozilla,
+browser.suggestedsites.list.1=fxmarketplace.
+
+3. For each site identifier defined by the list keys, look for matching branches
+containing the respective properties i.e. url, title, etc. For example,
+for a 'mozilla' identifier, we'll look for keys like:
+browser.suggestedsites.mozilla.url, browser.suggestedsites.mozilla.title, etc.
+
+4. Generate a JSON representation of each site, join them in a JSON array, and
+write the result to suggestedsites.json on the locale-specific raw resource
+directory e.g. raw/suggestedsites.json, raw-pt-rBR/suggestedsites.json.
+'''
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import copy
+import json
+import sys
+import os
+
+from mozbuild.dotproperties import (
+ DotProperties,
+)
+from mozbuild.util import (
+ FileAvoidWrite,
+)
+from mozpack.files import (
+ FileFinder,
+)
+import mozpack.path as mozpath
+
+
+def merge_properties(filename, srcdirs):
+ """Merges properties from the given file in the given source directories."""
+ properties = DotProperties()
+ for srcdir in srcdirs:
+ path = mozpath.join(srcdir, filename)
+ try:
+ properties.update(path)
+ except IOError:
+ # Ignore non-existing files
+ continue
+ return properties
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--verbose', '-v', default=False, action='store_true',
+ help='be verbose')
+ parser.add_argument('--silent', '-s', default=False, action='store_true',
+ help='be silent')
+ parser.add_argument('--android-package-name', metavar='NAME',
+ required=True,
+ help='Android package name')
+ parser.add_argument('--resources', metavar='RESOURCES',
+ default=None,
+ help='optional Android resource directory to find drawables in')
+ parser.add_argument('--srcdir', metavar='SRCDIR',
+ action='append', required=True,
+ help='directories to read inputs from, in order of priority')
+ parser.add_argument('output', metavar='OUTPUT',
+ help='output')
+ opts = parser.parse_args(args)
+
+ # Use reversed order so that the first srcdir has higher priority to override keys.
+ properties = merge_properties('region.properties', reversed(opts.srcdir))
+
+ # Keep these two in sync.
+ image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
+ drawables_template = 'drawable*/suggestedsites_{name}.*'
+
+ # Load properties corresponding to each site name and define their
+ # respective image URL.
+ sites = []
+
+ def add_names(names, defaults={}):
+ for name in names:
+ site = copy.deepcopy(defaults)
+ site.update(properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor')))
+ site['imageurl'] = image_url_template.format(name=name)
+ sites.append(site)
+
+ # Now check for existence of an appropriately named drawable. If none
+ # exists, throw. This stops a locale discovering, at runtime, that the
+ # corresponding drawable was not added to en-US.
+ if not opts.resources:
+ continue
+ resources = os.path.abspath(opts.resources)
+ finder = FileFinder(resources)
+ matches = [p for p, _ in finder.find(drawables_template.format(name=name))]
+ if not matches:
+ raise Exception("Could not find drawable in '{resources}' for '{name}'"
+ .format(resources=resources, name=name))
+ else:
+ if opts.verbose:
+ print("Found {len} drawables in '{resources}' for '{name}': {matches}"
+ .format(len=len(matches), resources=resources, name=name, matches=matches))
+
+ # We want the lists to be ordered for reproducibility. Each list has a
+ # "default" JSON list item which will be extended by the properties read.
+ lists = [
+ ('browser.suggestedsites.list', {}),
+ ('browser.suggestedsites.restricted.list', {'restricted': True}),
+ ]
+ if opts.verbose:
+ print('Reading {len} suggested site lists: {lists}'.format(len=len(lists), lists=[list_name for list_name, _ in lists]))
+
+ for (list_name, list_item_defaults) in lists:
+ names = properties.get_list(list_name)
+ if opts.verbose:
+ print('Reading {len} suggested sites from {list}: {names}'.format(len=len(names), list=list_name, names=names))
+ add_names(names, list_item_defaults)
+
+
+ # FileAvoidWrite creates its parent directories.
+ output = os.path.abspath(opts.output)
+ fh = FileAvoidWrite(output)
+ json.dump(sites, fh)
+ existed, updated = fh.close()
+
+ if not opts.silent:
+ if updated:
+ print('{output} updated'.format(output=output))
+ else:
+ print('{output} already up-to-date'.format(output=output))
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/generate_symbols_file.py b/python/mozbuild/mozbuild/action/generate_symbols_file.py
new file mode 100644
index 000000000..ff6136bb1
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/generate_symbols_file.py
@@ -0,0 +1,91 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import buildconfig
+import os
+from StringIO import StringIO
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import DefinesAction
+
+
+def generate_symbols_file(output, *args):
+ ''' '''
+ parser = argparse.ArgumentParser()
+ parser.add_argument('input')
+ parser.add_argument('-D', action=DefinesAction)
+ parser.add_argument('-U', action='append', default=[])
+ args = parser.parse_args(args)
+ input = os.path.abspath(args.input)
+
+ pp = Preprocessor()
+ pp.context.update(buildconfig.defines)
+ if args.D:
+ pp.context.update(args.D)
+ for undefine in args.U:
+ if undefine in pp.context:
+ del pp.context[undefine]
+ # Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines
+ if buildconfig.substs['MOZ_DEBUG']:
+ pp.context['DEBUG'] = '1'
+ # Ensure @DATA@ works as expected (see the Windows section further below)
+ if buildconfig.substs['OS_TARGET'] == 'WINNT':
+ pp.context['DATA'] = 'DATA'
+ else:
+ pp.context['DATA'] = ''
+ pp.out = StringIO()
+ pp.do_filter('substitution')
+ pp.do_include(input)
+
+ symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()]
+
+ if buildconfig.substs['OS_TARGET'] == 'WINNT':
+ # A def file is generated for MSVC link.exe that looks like the
+ # following:
+ # LIBRARY library.dll
+ # EXPORTS
+ # symbol1
+ # symbol2
+ # ...
+ #
+ # link.exe however requires special markers for data symbols, so in
+ # that case the symbols look like:
+ # data_symbol1 DATA
+ # data_symbol2 DATA
+ # ...
+ #
+ # In the input file, this is just annotated with the following syntax:
+ # data_symbol1 @DATA@
+ # data_symbol2 @DATA@
+ # ...
+ # The DATA variable is "simply" expanded by the preprocessor, to
+ # nothing on non-Windows, such that we only get the symbol name on
+ # those platforms, and to DATA on Windows, so that the "DATA" part
+ # is, in fact, part of the symbol name as far as the symbols variable
+ # is concerned.
+ libname, ext = os.path.splitext(os.path.basename(output.name))
+ assert ext == '.def'
+ output.write('LIBRARY %s\nEXPORTS\n %s\n'
+ % (libname, '\n '.join(symbols)))
+ elif buildconfig.substs['GCC_USE_GNU_LD']:
+ # A linker version script is generated for GNU LD that looks like the
+ # following:
+ # {
+ # global:
+ # symbol1;
+ # symbol2;
+ # ...
+ # local:
+ # *;
+ # };
+ output.write('{\nglobal:\n %s;\nlocal:\n *;\n};'
+ % ';\n '.join(symbols))
+ elif buildconfig.substs['OS_TARGET'] == 'Darwin':
+ # A list of symbols is generated for Apple ld that simply lists all
+ # symbols, with an underscore prefix.
+ output.write(''.join('_%s\n' % s for s in symbols))
+
+ return set(pp.includes)
diff --git a/python/mozbuild/mozbuild/action/jar_maker.py b/python/mozbuild/mozbuild/action/jar_maker.py
new file mode 100644
index 000000000..3e3c3c83e
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/jar_maker.py
@@ -0,0 +1,17 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import sys
+
+import mozbuild.jar
+
+
+def main(args):
+ return mozbuild.jar.main(args)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/make_dmg.py b/python/mozbuild/mozbuild/action/make_dmg.py
new file mode 100644
index 000000000..8d77bf374
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/make_dmg.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function
+
+from mozbuild.base import MozbuildObject
+from mozpack import dmg
+
+import os
+import sys
+
+
+def make_dmg(source_directory, output_dmg):
+ build = MozbuildObject.from_environment()
+ extra_files = [
+ (os.path.join(build.distdir, 'branding', 'dsstore'), '.DS_Store'),
+ (os.path.join(build.distdir, 'branding', 'background.png'),
+ '.background/background.png'),
+ (os.path.join(build.distdir, 'branding', 'disk.icns'),
+ '.VolumeIcon.icns'),
+ ]
+ volume_name = build.substs['MOZ_APP_DISPLAYNAME']
+ dmg.create_dmg(source_directory, output_dmg, volume_name, extra_files)
+
+
+def main(args):
+ if len(args) != 2:
+ print('Usage: make_dmg.py <source directory> <output dmg>',
+ file=sys.stderr)
+ return 1
+ make_dmg(args[0], args[1])
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/output_searchplugins_list.py b/python/mozbuild/mozbuild/action/output_searchplugins_list.py
new file mode 100644
index 000000000..c20e2c732
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/output_searchplugins_list.py
@@ -0,0 +1,21 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+import json
+
+engines = []
+
+locale = sys.argv[2]
+
+with open(sys.argv[1]) as f:
+ searchinfo = json.load(f)
+
+if locale in searchinfo["locales"]:
+ for region in searchinfo["locales"][locale]:
+ engines = list(set(engines)|set(searchinfo["locales"][locale][region]["visibleDefaultEngines"]))
+else:
+ engines = searchinfo["default"]["visibleDefaultEngines"]
+
+print '\n'.join(engines)
diff --git a/python/mozbuild/mozbuild/action/package_fennec_apk.py b/python/mozbuild/mozbuild/action/package_fennec_apk.py
new file mode 100644
index 000000000..ecd5a9af3
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/package_fennec_apk.py
@@ -0,0 +1,150 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''
+Script to produce an Android package (.apk) for Fennec.
+'''
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import buildconfig
+import os
+import subprocess
+import sys
+
+from mozpack.copier import Jarrer
+from mozpack.files import (
+ DeflatedFile,
+ File,
+ FileFinder,
+)
+from mozpack.mozjar import JarReader
+import mozpack.path as mozpath
+
+
+def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None,
+ lib_dirs=[],
+ assets_dirs=[],
+ features_dirs=[],
+ root_files=[],
+ verbose=False):
+ jarrer = Jarrer(optimize=False)
+
+ # First, take input files. The contents of the later files overwrites the
+ # content of earlier files.
+ for input in inputs:
+ jar = JarReader(input)
+ for file in jar:
+ path = file.filename
+ if jarrer.contains(path):
+ jarrer.remove(path)
+ jarrer.add(path, DeflatedFile(file), compress=file.compressed)
+
+ def add(path, file, compress=None):
+ abspath = os.path.abspath(file.path)
+ if verbose:
+ print('Packaging %s from %s' % (path, file.path))
+ if not os.path.exists(abspath):
+ raise ValueError('File %s not found (looked for %s)' % \
+ (file.path, abspath))
+ if jarrer.contains(path):
+ jarrer.remove(path)
+ jarrer.add(path, file, compress=compress)
+
+ for features_dir in features_dirs:
+ finder = FileFinder(features_dir, find_executables=False)
+ for p, f in finder.find('**'):
+ add(mozpath.join('assets', 'features', p), f, False)
+
+ for assets_dir in assets_dirs:
+ finder = FileFinder(assets_dir, find_executables=False)
+ for p, f in finder.find('**'):
+ compress = None # Take default from Jarrer.
+ if p.endswith('.so'):
+ # Asset libraries are special.
+ if f.open().read(5)[1:] == '7zXZ':
+ print('%s is already compressed' % p)
+ # We need to store (rather than deflate) compressed libraries
+ # (even if we don't compress them ourselves).
+ compress = False
+ elif buildconfig.substs.get('XZ'):
+ cmd = [buildconfig.substs.get('XZ'), '-zkf',
+ mozpath.join(finder.base, p)]
+
+ bcj = None
+ if buildconfig.substs.get('MOZ_THUMB2'):
+ bcj = '--armthumb'
+ elif buildconfig.substs.get('CPU_ARCH') == 'arm':
+ bcj = '--arm'
+ elif buildconfig.substs.get('CPU_ARCH') == 'x86':
+ bcj = '--x86'
+
+ if bcj:
+ cmd.extend([bcj, '--lzma2'])
+ print('xz-compressing %s with %s' % (p, ' '.join(cmd)))
+ subprocess.check_output(cmd)
+ os.rename(f.path + '.xz', f.path)
+ compress = False
+
+ add(mozpath.join('assets', p), f, compress=compress)
+
+ for lib_dir in lib_dirs:
+ finder = FileFinder(lib_dir, find_executables=False)
+ for p, f in finder.find('**'):
+ add(mozpath.join('lib', p), f)
+
+ for root_file in root_files:
+ add(os.path.basename(root_file), File(root_file))
+
+ if omni_ja:
+ add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False)
+
+ if classes_dex:
+ add('classes.dex', File(classes_dex))
+
+ return jarrer
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--verbose', '-v', default=False, action='store_true',
+ help='be verbose')
+ parser.add_argument('--inputs', nargs='+',
+ help='Input skeleton AP_ or APK file(s).')
+ parser.add_argument('-o', '--output',
+ help='Output APK file.')
+ parser.add_argument('--omnijar', default=None,
+ help='Optional omni.ja to pack into APK file.')
+ parser.add_argument('--classes-dex', default=None,
+ help='Optional classes.dex to pack into APK file.')
+ parser.add_argument('--lib-dirs', nargs='*', default=[],
+ help='Optional lib/ dirs to pack into APK file.')
+ parser.add_argument('--assets-dirs', nargs='*', default=[],
+ help='Optional assets/ dirs to pack into APK file.')
+ parser.add_argument('--features-dirs', nargs='*', default=[],
+ help='Optional features/ dirs to pack into APK file.')
+ parser.add_argument('--root-files', nargs='*', default=[],
+ help='Optional files to pack into APK file root.')
+ args = parser.parse_args(args)
+
+ if buildconfig.substs.get('OMNIJAR_NAME') != 'assets/omni.ja':
+ raise ValueError("Don't know how package Fennec APKs when "
+ " OMNIJAR_NAME is not 'assets/omni.jar'.")
+
+ jarrer = package_fennec_apk(inputs=args.inputs,
+ omni_ja=args.omnijar,
+ classes_dex=args.classes_dex,
+ lib_dirs=args.lib_dirs,
+ assets_dirs=args.assets_dirs,
+ features_dirs=args.features_dirs,
+ root_files=args.root_files,
+ verbose=args.verbose)
+ jarrer.copy(args.output)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/preprocessor.py b/python/mozbuild/mozbuild/action/preprocessor.py
new file mode 100644
index 000000000..e5a4d576b
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/preprocessor.py
@@ -0,0 +1,18 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import sys
+
+from mozbuild.preprocessor import Preprocessor
+
+
+def main(args):
+ pp = Preprocessor()
+ pp.handleCommandLine(args, True)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/process_define_files.py b/python/mozbuild/mozbuild/action/process_define_files.py
new file mode 100644
index 000000000..f6d0c1695
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/process_define_files.py
@@ -0,0 +1,94 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import os
+import re
+import sys
+from buildconfig import topobjdir
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.util import FileAvoidWrite
+import mozpack.path as mozpath
+
+
+def process_define_file(output, input):
+ '''Creates the given config header. A config header is generated by
+ taking the corresponding source file and replacing some #define/#undef
+ occurences:
+ "#undef NAME" is turned into "#define NAME VALUE"
+ "#define NAME" is unchanged
+ "#define NAME ORIGINAL_VALUE" is turned into "#define NAME VALUE"
+ "#undef UNKNOWN_NAME" is turned into "/* #undef UNKNOWN_NAME */"
+ Whitespaces are preserved.
+
+ As a special rule, "#undef ALLDEFINES" is turned into "#define NAME
+ VALUE" for all the defined variables.
+ '''
+
+ path = os.path.abspath(input)
+
+ config = ConfigEnvironment.from_config_status(
+ mozpath.join(topobjdir, 'config.status'))
+
+ if mozpath.basedir(path,
+ [mozpath.join(config.topsrcdir, 'js/src')]) and \
+ not config.substs.get('JS_STANDALONE'):
+ config = ConfigEnvironment.from_config_status(
+ mozpath.join(topobjdir, 'js', 'src', 'config.status'))
+
+ with open(path, 'rU') as input:
+ r = re.compile('^\s*#\s*(?P<cmd>[a-z]+)(?:\s+(?P<name>\S+)(?:\s+(?P<value>\S+))?)?', re.U)
+ for l in input:
+ m = r.match(l)
+ if m:
+ cmd = m.group('cmd')
+ name = m.group('name')
+ value = m.group('value')
+ if name:
+ if name == 'ALLDEFINES':
+ if cmd == 'define':
+ raise Exception(
+ '`#define ALLDEFINES` is not allowed in a '
+ 'CONFIGURE_DEFINE_FILE')
+ defines = '\n'.join(sorted(
+ '#define %s %s' % (name, val)
+ for name, val in config.defines.iteritems()
+ if name not in config.non_global_defines))
+ l = l[:m.start('cmd') - 1] \
+ + defines + l[m.end('name'):]
+ elif name in config.defines:
+ if cmd == 'define' and value:
+ l = l[:m.start('value')] \
+ + str(config.defines[name]) \
+ + l[m.end('value'):]
+ elif cmd == 'undef':
+ l = l[:m.start('cmd')] \
+ + 'define' \
+ + l[m.end('cmd'):m.end('name')] \
+ + ' ' \
+ + str(config.defines[name]) \
+ + l[m.end('name'):]
+ elif cmd == 'undef':
+ l = '/* ' + l[:m.end('name')] + ' */' + l[m.end('name'):]
+
+ output.write(l)
+
+ return {path, config.source}
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ description='Process define files.')
+
+ parser.add_argument('input', help='Input define file.')
+
+ args = parser.parse_args(argv)
+
+ return process_define_file(sys.stdout, args.input)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/process_install_manifest.py b/python/mozbuild/mozbuild/action/process_install_manifest.py
new file mode 100644
index 000000000..e19fe4eda
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/process_install_manifest.py
@@ -0,0 +1,120 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import os
+import sys
+import time
+
+from mozpack.copier import (
+ FileCopier,
+ FileRegistry,
+)
+from mozpack.files import (
+ BaseFile,
+ FileFinder,
+)
+from mozpack.manifests import (
+ InstallManifest,
+ InstallManifestNoSymlinks,
+)
+from mozbuild.util import DefinesAction
+
+
+COMPLETE = 'Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; ' \
+ 'Added/updated {updated}; ' \
+ 'Removed {rm_files} files and {rm_dirs} directories.'
+
+
+def process_manifest(destdir, paths, track=None,
+ remove_unaccounted=True,
+ remove_all_directory_symlinks=True,
+ remove_empty_directories=True,
+ no_symlinks=False,
+ defines={}):
+
+ if track:
+ if os.path.exists(track):
+ # We use the same format as install manifests for the tracking
+ # data.
+ manifest = InstallManifest(path=track)
+ remove_unaccounted = FileRegistry()
+ dummy_file = BaseFile()
+
+ finder = FileFinder(destdir, find_executables=False,
+ find_dotfiles=True)
+ for dest in manifest._dests:
+ for p, f in finder.find(dest):
+ remove_unaccounted.add(p, dummy_file)
+
+ else:
+ # If tracking is enabled and there is no file, we don't want to
+ # be removing anything.
+ remove_unaccounted=False
+ remove_empty_directories=False
+ remove_all_directory_symlinks=False
+
+ manifest_cls = InstallManifestNoSymlinks if no_symlinks else InstallManifest
+ manifest = manifest_cls()
+ for path in paths:
+ manifest |= manifest_cls(path=path)
+
+ copier = FileCopier()
+ manifest.populate_registry(copier, defines_override=defines)
+ result = copier.copy(destdir,
+ remove_unaccounted=remove_unaccounted,
+ remove_all_directory_symlinks=remove_all_directory_symlinks,
+ remove_empty_directories=remove_empty_directories)
+
+ if track:
+ manifest.write(path=track)
+
+ return result
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ description='Process install manifest files.')
+
+ parser.add_argument('destdir', help='Destination directory.')
+ parser.add_argument('manifests', nargs='+', help='Path to manifest file(s).')
+ parser.add_argument('--no-remove', action='store_true',
+ help='Do not remove unaccounted files from destination.')
+ parser.add_argument('--no-remove-all-directory-symlinks', action='store_true',
+ help='Do not remove all directory symlinks from destination.')
+ parser.add_argument('--no-remove-empty-directories', action='store_true',
+ help='Do not remove empty directories from destination.')
+ parser.add_argument('--no-symlinks', action='store_true',
+ help='Do not install symbolic links. Always copy files')
+ parser.add_argument('--track', metavar="PATH",
+ help='Use installed files tracking information from the given path.')
+ parser.add_argument('-D', action=DefinesAction,
+ dest='defines', metavar="VAR[=VAL]",
+ help='Define a variable to override what is specified in the manifest')
+
+ args = parser.parse_args(argv)
+
+ start = time.time()
+
+ result = process_manifest(args.destdir, args.manifests,
+ track=args.track, remove_unaccounted=not args.no_remove,
+ remove_all_directory_symlinks=not args.no_remove_all_directory_symlinks,
+ remove_empty_directories=not args.no_remove_empty_directories,
+ no_symlinks=args.no_symlinks,
+ defines=args.defines)
+
+ elapsed = time.time() - start
+
+ print(COMPLETE.format(
+ elapsed=elapsed,
+ dest=args.destdir,
+ existing=result.existing_files_count,
+ updated=result.updated_files_count,
+ rm_files=result.removed_files_count,
+ rm_dirs=result.removed_directories_count))
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/test_archive.py b/python/mozbuild/mozbuild/action/test_archive.py
new file mode 100644
index 000000000..8ec4dd2a9
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/test_archive.py
@@ -0,0 +1,565 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This action is used to produce test archives.
+#
+# Ideally, the data in this file should be defined in moz.build files.
+# It is defined inline because this was easiest to make test archive
+# generation faster.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import itertools
+import os
+import sys
+import time
+
+from manifestparser import TestManifest
+from reftest import ReftestManifest
+
+from mozbuild.util import ensureParentDir
+from mozpack.files import FileFinder
+from mozpack.mozjar import JarWriter
+import mozpack.path as mozpath
+
+import buildconfig
+
+STAGE = mozpath.join(buildconfig.topobjdir, 'dist', 'test-stage')
+
+TEST_HARNESS_BINS = [
+ 'BadCertServer',
+ 'GenerateOCSPResponse',
+ 'OCSPStaplingServer',
+ 'SmokeDMD',
+ 'certutil',
+ 'crashinject',
+ 'fileid',
+ 'minidumpwriter',
+ 'pk12util',
+ 'screenshot',
+ 'screentopng',
+ 'ssltunnel',
+ 'xpcshell',
+]
+
+# The fileid utility depends on mozglue. See bug 1069556.
+TEST_HARNESS_DLLS = [
+ 'crashinjectdll',
+ 'mozglue'
+]
+
+TEST_PLUGIN_DLLS = [
+ 'npctrltest',
+ 'npsecondtest',
+ 'npswftest',
+ 'nptest',
+ 'nptestjava',
+ 'npthirdtest',
+]
+
+TEST_PLUGIN_DIRS = [
+ 'JavaTest.plugin/**',
+ 'SecondTest.plugin/**',
+ 'Test.plugin/**',
+ 'ThirdTest.plugin/**',
+ 'npctrltest.plugin/**',
+ 'npswftest.plugin/**',
+]
+
+GMP_TEST_PLUGIN_DIRS = [
+ 'gmp-clearkey/**',
+ 'gmp-fake/**',
+ 'gmp-fakeopenh264/**',
+]
+
+
+ARCHIVE_FILES = {
+ 'common': [
+ {
+ 'source': STAGE,
+ 'base': '',
+ 'pattern': '**',
+ 'ignore': [
+ 'cppunittest/**',
+ 'gtest/**',
+ 'mochitest/**',
+ 'reftest/**',
+ 'talos/**',
+ 'web-platform/**',
+ 'xpcshell/**',
+ ],
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '_tests',
+ 'pattern': 'modules/**',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing/marionette',
+ 'patterns': [
+ 'client/**',
+ 'harness/**',
+ 'puppeteer/**',
+ 'mach_test_package_commands.py',
+ ],
+ 'dest': 'marionette',
+ 'ignore': [
+ 'client/docs',
+ 'harness/marionette_harness/tests',
+ 'puppeteer/firefox/docs',
+ ],
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': '',
+ 'manifests': [
+ 'testing/marionette/harness/marionette_harness/tests/unit-tests.ini',
+ 'testing/marionette/harness/marionette_harness/tests/webapi-tests.ini',
+ ],
+ # We also need the manifests and harness_unit tests
+ 'pattern': 'testing/marionette/harness/marionette_harness/tests/**',
+ 'dest': 'marionette/tests',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '_tests',
+ 'pattern': 'mozbase/**',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'firefox-ui/**',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'dom/media/test/external',
+ 'pattern': '**',
+ 'dest': 'external-media-tests',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'js/src',
+ 'pattern': 'jit-test/**',
+ 'dest': 'jit-test',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'js/src/tests',
+ 'pattern': 'ecma_6/**',
+ 'dest': 'jit-test/tests',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'js/src/tests',
+ 'pattern': 'js1_8_5/**',
+ 'dest': 'jit-test/tests',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'js/src/tests',
+ 'pattern': 'lib/**',
+ 'dest': 'jit-test/tests',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'js/src',
+ 'pattern': 'jsapi.h',
+ 'dest': 'jit-test',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'tps/**',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'services/sync/',
+ 'pattern': 'tps/**',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'services/sync/tests/tps',
+ 'pattern': '**',
+ 'dest': 'tps/tests',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing/web-platform/tests/tools/wptserve',
+ 'pattern': '**',
+ 'dest': 'tools/wptserve',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '',
+ 'pattern': 'mozinfo.json',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': 'dist/bin',
+ 'patterns': [
+ '%s%s' % (f, buildconfig.substs['BIN_SUFFIX'])
+ for f in TEST_HARNESS_BINS
+ ] + [
+ '%s%s%s' % (buildconfig.substs['DLL_PREFIX'], f, buildconfig.substs['DLL_SUFFIX'])
+ for f in TEST_HARNESS_DLLS
+ ],
+ 'dest': 'bin',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': 'dist/plugins',
+ 'patterns': [
+ '%s%s%s' % (buildconfig.substs['DLL_PREFIX'], f, buildconfig.substs['DLL_SUFFIX'])
+ for f in TEST_PLUGIN_DLLS
+ ],
+ 'dest': 'bin/plugins',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': 'dist/plugins',
+ 'patterns': TEST_PLUGIN_DIRS,
+ 'dest': 'bin/plugins',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': 'dist/bin',
+ 'patterns': GMP_TEST_PLUGIN_DIRS,
+ 'dest': 'bin/plugins',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': 'dist/bin',
+ 'patterns': [
+ 'dmd.py',
+ 'fix_linux_stack.py',
+ 'fix_macosx_stack.py',
+ 'fix_stack_using_bpsyms.py',
+ ],
+ 'dest': 'bin',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': 'dist/bin/components',
+ 'patterns': [
+ 'httpd.js',
+ 'httpd.manifest',
+ 'test_necko.xpt',
+ ],
+ 'dest': 'bin/components',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'build/pgo/certs',
+ 'pattern': '**',
+ 'dest': 'certs',
+ }
+ ],
+ 'cppunittest': [
+ {
+ 'source': STAGE,
+ 'base': '',
+ 'pattern': 'cppunittest/**',
+ },
+ # We don't ship these files if startup cache is disabled, which is
+ # rare. But it shouldn't matter for test archives.
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'startupcache/test',
+ 'pattern': 'TestStartupCacheTelemetry.*',
+ 'dest': 'cppunittest',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'runcppunittests.py',
+ 'dest': 'cppunittest',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'remotecppunittests.py',
+ 'dest': 'cppunittest',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'cppunittest.ini',
+ 'dest': 'cppunittest',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '',
+ 'pattern': 'mozinfo.json',
+ 'dest': 'cppunittest',
+ },
+ ],
+ 'gtest': [
+ {
+ 'source': STAGE,
+ 'base': '',
+ 'pattern': 'gtest/**',
+ },
+ ],
+ 'mochitest': [
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '_tests/testing',
+ 'pattern': 'mochitest/**',
+ },
+ {
+ 'source': STAGE,
+ 'base': '',
+ 'pattern': 'mochitest/**',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '',
+ 'pattern': 'mozinfo.json',
+ 'dest': 'mochitest'
+ }
+ ],
+ 'mozharness': [
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'mozharness/**',
+ },
+ ],
+ 'reftest': [
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '_tests',
+ 'pattern': 'reftest/**',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '',
+ 'pattern': 'mozinfo.json',
+ 'dest': 'reftest',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': '',
+ 'manifests': [
+ 'layout/reftests/reftest.list',
+ 'testing/crashtest/crashtests.list',
+ ],
+ 'dest': 'reftest/tests',
+ }
+ ],
+ 'talos': [
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'talos/**',
+ },
+ ],
+ 'web-platform': [
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'web-platform/meta/**',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'web-platform/mozilla/**',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing',
+ 'pattern': 'web-platform/tests/**',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '_tests',
+ 'pattern': 'web-platform/**',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '',
+ 'pattern': 'mozinfo.json',
+ 'dest': 'web-platform',
+ },
+ ],
+ 'xpcshell': [
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '_tests/xpcshell',
+ 'pattern': '**',
+ 'dest': 'xpcshell/tests',
+ },
+ {
+ 'source': buildconfig.topsrcdir,
+ 'base': 'testing/xpcshell',
+ 'patterns': [
+ 'head.js',
+ 'mach_test_package_commands.py',
+ 'moz-http2/**',
+ 'moz-spdy/**',
+ 'node-http2/**',
+ 'node-spdy/**',
+ 'remotexpcshelltests.py',
+ 'runtestsb2g.py',
+ 'runxpcshelltests.py',
+ 'xpcshellcommandline.py',
+ ],
+ 'dest': 'xpcshell',
+ },
+ {
+ 'source': STAGE,
+ 'base': '',
+ 'pattern': 'xpcshell/**',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': '',
+ 'pattern': 'mozinfo.json',
+ 'dest': 'xpcshell',
+ },
+ {
+ 'source': buildconfig.topobjdir,
+ 'base': 'build',
+ 'pattern': 'automation.py',
+ 'dest': 'xpcshell',
+ },
+ ],
+}
+
+
+# "common" is our catch all archive and it ignores things from other archives.
+# Verify nothing sneaks into ARCHIVE_FILES without a corresponding exclusion
+# rule in the "common" archive.
+for k, v in ARCHIVE_FILES.items():
+ # Skip mozharness because it isn't staged.
+ if k in ('common', 'mozharness'):
+ continue
+
+ ignores = set(itertools.chain(*(e.get('ignore', [])
+ for e in ARCHIVE_FILES['common'])))
+
+ if not any(p.startswith('%s/' % k) for p in ignores):
+ raise Exception('"common" ignore list probably should contain %s' % k)
+
+
+def find_files(archive):
+ for entry in ARCHIVE_FILES[archive]:
+ source = entry['source']
+ dest = entry.get('dest')
+ base = entry.get('base', '')
+
+ pattern = entry.get('pattern')
+ patterns = entry.get('patterns', [])
+ if pattern:
+ patterns.append(pattern)
+
+ manifest = entry.get('manifest')
+ manifests = entry.get('manifests', [])
+ if manifest:
+ manifests.append(manifest)
+ if manifests:
+ dirs = find_manifest_dirs(buildconfig.topsrcdir, manifests)
+ patterns.extend({'{}/**'.format(d) for d in dirs})
+
+ ignore = list(entry.get('ignore', []))
+ ignore.extend([
+ '**/.flake8',
+ '**/.mkdir.done',
+ '**/*.pyc',
+ ])
+
+ common_kwargs = {
+ 'find_executables': False,
+ 'find_dotfiles': True,
+ 'ignore': ignore,
+ }
+
+ finder = FileFinder(os.path.join(source, base), **common_kwargs)
+
+ for pattern in patterns:
+ for p, f in finder.find(pattern):
+ if dest:
+ p = mozpath.join(dest, p)
+ yield p, f
+
+
+def find_manifest_dirs(topsrcdir, manifests):
+ """Routine to retrieve directories specified in a manifest, relative to topsrcdir.
+
+ It does not recurse into manifests, as we currently have no need for that.
+ """
+ dirs = set()
+
+ for p in manifests:
+ p = os.path.join(topsrcdir, p)
+
+ if p.endswith('.ini'):
+ test_manifest = TestManifest()
+ test_manifest.read(p)
+ dirs |= set([os.path.dirname(m) for m in test_manifest.manifests()])
+
+ elif p.endswith('.list'):
+ m = ReftestManifest()
+ m.load(p)
+ dirs |= m.dirs
+
+ else:
+ raise Exception('"{}" is not a supported manifest format.'.format(
+ os.path.splitext(p)[1]))
+
+ dirs = {mozpath.normpath(d[len(topsrcdir):]).lstrip('/') for d in dirs}
+
+ # Filter out children captured by parent directories because duplicates
+ # will confuse things later on.
+ def parents(p):
+ while True:
+ p = mozpath.dirname(p)
+ if not p:
+ break
+ yield p
+
+ seen = set()
+ for d in sorted(dirs, key=len):
+ if not any(p in seen for p in parents(d)):
+ seen.add(d)
+
+ return sorted(seen)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ description='Produce test archives')
+ parser.add_argument('archive', help='Which archive to generate')
+ parser.add_argument('outputfile', help='File to write output to')
+
+ args = parser.parse_args(argv)
+
+ if not args.outputfile.endswith('.zip'):
+ raise Exception('expected zip output file')
+
+ file_count = 0
+ t_start = time.time()
+ ensureParentDir(args.outputfile)
+ with open(args.outputfile, 'wb') as fh:
+ # Experimentation revealed that level 5 is significantly faster and has
+ # marginally larger sizes than higher values and is the sweet spot
+ # for optimal compression. Read the detailed commit message that
+ # introduced this for raw numbers.
+ with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer:
+ res = find_files(args.archive)
+ for p, f in res:
+ writer.add(p.encode('utf-8'), f.read(), mode=f.mode, skip_duplicates=True)
+ file_count += 1
+
+ duration = time.time() - t_start
+ zip_size = os.path.getsize(args.outputfile)
+ basename = os.path.basename(args.outputfile)
+ print('Wrote %d files in %d bytes to %s in %.2fs' % (
+ file_count, zip_size, basename, duration))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/webidl.py b/python/mozbuild/mozbuild/action/webidl.py
new file mode 100644
index 000000000..d595c728e
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/webidl.py
@@ -0,0 +1,19 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import sys
+
+from mozwebidlcodegen import BuildSystemWebIDL
+
+
+def main(argv):
+ """Perform WebIDL code generation required by the build system."""
+ manager = BuildSystemWebIDL.from_environment().manager
+ manager.generate_build_files()
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/xpccheck.py b/python/mozbuild/mozbuild/action/xpccheck.py
new file mode 100644
index 000000000..c3170a8da
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/xpccheck.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''A generic script to verify all test files are in the
+corresponding .ini file.
+
+Usage: xpccheck.py <directory> [<directory> ...]
+'''
+
+from __future__ import absolute_import
+
+import sys
+import os
+from glob import glob
+import manifestparser
+
+def getIniTests(testdir):
+ mp = manifestparser.ManifestParser(strict=False)
+ mp.read(os.path.join(testdir, 'xpcshell.ini'))
+ return mp.tests
+
+def verifyDirectory(initests, directory):
+ files = glob(os.path.join(os.path.abspath(directory), "test_*"))
+ for f in files:
+ if (not os.path.isfile(f)):
+ continue
+
+ name = os.path.basename(f)
+ if name.endswith('.in'):
+ name = name[:-3]
+
+ if not name.endswith('.js'):
+ continue
+
+ found = False
+ for test in initests:
+ if os.path.join(os.path.abspath(directory), name) == test['path']:
+ found = True
+ break
+
+ if not found:
+ print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | test %s is missing from test manifest %s!" % (name, os.path.join(directory, 'xpcshell.ini'))
+ sys.exit(1)
+
+def verifyIniFile(initests, directory):
+ files = glob(os.path.join(os.path.abspath(directory), "test_*"))
+ for test in initests:
+ name = test['path'].split('/')[-1]
+
+ found = False
+ for f in files:
+
+ fname = f.split('/')[-1]
+ if fname.endswith('.in'):
+ fname = '.in'.join(fname.split('.in')[:-1])
+
+ if os.path.join(os.path.abspath(directory), fname) == test['path']:
+ found = True
+ break
+
+ if not found:
+ print >>sys.stderr, "TEST-UNEXPECTED-FAIL | xpccheck | found %s in xpcshell.ini and not in directory '%s'" % (name, directory)
+ sys.exit(1)
+
+def main(argv):
+ if len(argv) < 2:
+ print >>sys.stderr, "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]"
+ sys.exit(1)
+
+ topsrcdir = argv[0]
+ for d in argv[1:]:
+ # xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
+ # we copy all files (including xpcshell.ini from the sibling directory.
+ if d.endswith('toolkit/mozapps/extensions/test/xpcshell-unpack'):
+ continue
+
+ initests = getIniTests(d)
+ verifyDirectory(initests, d)
+ verifyIniFile(initests, d)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/xpidl-process.py b/python/mozbuild/mozbuild/action/xpidl-process.py
new file mode 100755
index 000000000..07ea3cf96
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/xpidl-process.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script is used to generate an output header and xpt file for
+# input IDL file(s). It's purpose is to directly support the build
+# system. The API will change to meet the needs of the build system.
+
+from __future__ import absolute_import
+
+import argparse
+import os
+import sys
+
+from io import BytesIO
+
+from buildconfig import topsrcdir
+from xpidl.header import print_header
+from xpidl.typelib import write_typelib
+from xpidl.xpidl import IDLParser
+from xpt import xpt_link
+
+from mozbuild.makeutil import Makefile
+from mozbuild.pythonutil import iter_modules_in_path
+from mozbuild.util import FileAvoidWrite
+
+
+def process(input_dir, inc_paths, cache_dir, header_dir, xpt_dir, deps_dir, module, stems):
+ p = IDLParser(outputdir=cache_dir)
+
+ xpts = {}
+ mk = Makefile()
+ rule = mk.create_rule()
+
+ # Write out dependencies for Python modules we import. If this list isn't
+ # up to date, we will not re-process XPIDL files if the processor changes.
+ rule.add_dependencies(iter_modules_in_path(topsrcdir))
+
+ for stem in stems:
+ path = os.path.join(input_dir, '%s.idl' % stem)
+ idl_data = open(path).read()
+
+ idl = p.parse(idl_data, filename=path)
+ idl.resolve([input_dir] + inc_paths, p)
+
+ header_path = os.path.join(header_dir, '%s.h' % stem)
+
+ xpt = BytesIO()
+ write_typelib(idl, xpt, path)
+ xpt.seek(0)
+ xpts[stem] = xpt
+
+ rule.add_dependencies(idl.deps)
+
+ with FileAvoidWrite(header_path) as fh:
+ print_header(idl, fh, path)
+
+ # TODO use FileAvoidWrite once it supports binary mode.
+ xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
+ xpt_link(xpts.values()).write(xpt_path)
+
+ rule.add_targets([xpt_path])
+ if deps_dir:
+ deps_path = os.path.join(deps_dir, '%s.pp' % module)
+ with FileAvoidWrite(deps_path) as fh:
+ mk.dump(fh)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--cache-dir',
+ help='Directory in which to find or write cached lexer data.')
+ parser.add_argument('--depsdir',
+ help='Directory in which to write dependency files.')
+ parser.add_argument('inputdir',
+ help='Directory in which to find source .idl files.')
+ parser.add_argument('headerdir',
+ help='Directory in which to write header files.')
+ parser.add_argument('xptdir',
+ help='Directory in which to write xpt file.')
+ parser.add_argument('module',
+ help='Final module name to use for linked output xpt file.')
+ parser.add_argument('idls', nargs='+',
+ help='Source .idl file(s). Specified as stems only.')
+ parser.add_argument('-I', dest='incpath', action='append', default=[],
+ help='Extra directories where to look for included .idl files.')
+
+ args = parser.parse_args(argv)
+ process(args.inputdir, args.incpath, args.cache_dir, args.headerdir,
+ args.xptdir, args.depsdir, args.module, args.idls)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/zip.py b/python/mozbuild/mozbuild/action/zip.py
new file mode 100644
index 000000000..143d7766e
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/zip.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script creates a zip file, but will also strip any binaries
+# it finds before adding them to the zip.
+
+from __future__ import absolute_import
+
+from mozpack.files import FileFinder
+from mozpack.copier import Jarrer
+from mozpack.errors import errors
+
+import argparse
+import mozpack.path as mozpath
+import sys
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-C", metavar='DIR', default=".",
+ help="Change to given directory before considering "
+ "other paths")
+ parser.add_argument("zip", help="Path to zip file to write")
+ parser.add_argument("input", nargs="+",
+ help="Path to files to add to zip")
+ args = parser.parse_args(args)
+
+ jarrer = Jarrer(optimize=False)
+
+ with errors.accumulate():
+ finder = FileFinder(args.C)
+ for path in args.input:
+ for p, f in finder.find(path):
+ jarrer.add(p, f)
+ jarrer.copy(mozpath.join(args.C, args.zip))
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/android_version_code.py b/python/mozbuild/mozbuild/android_version_code.py
new file mode 100644
index 000000000..69ce22b8e
--- /dev/null
+++ b/python/mozbuild/mozbuild/android_version_code.py
@@ -0,0 +1,167 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import math
+import sys
+import time
+
+# Builds before this build ID use the v0 version scheme. Builds after this
+# build ID use the v1 version scheme.
+V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
+
+def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
+ base = int(str(buildid)[:10])
+ # None is interpreted as arm.
+ if not cpu_arch or cpu_arch in ['armeabi', 'armeabi-v7a']:
+ # Increment by MIN_SDK_VERSION -- this adds 9 to every build ID as a
+ # minimum. Our split APK starts at 15.
+ return base + min_sdk + 0
+ elif cpu_arch in ['x86']:
+ # Increment the version code by 3 for x86 builds so they are offered to
+ # x86 phones that have ARM emulators, beating the 2-point advantage that
+ # the v15+ ARMv7 APK has. If we change our splits in the future, we'll
+ # need to do this further still.
+ return base + min_sdk + 3
+ else:
+ raise ValueError("Don't know how to compute android:versionCode "
+ "for CPU arch %s" % cpu_arch)
+
+def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
+ '''Generate a v1 android:versionCode.
+
+ The important consideration is that version codes be monotonically
+ increasing (per Android package name) for all published builds. The input
+ build IDs are based on timestamps and hence are always monotonically
+ increasing.
+
+ The generated v1 version codes look like (in binary):
+
+ 0111 1000 0010 tttt tttt tttt tttt txpg
+
+ The 17 bits labelled 't' represent the number of hours since midnight on
+ September 1, 2015. (2015090100 in YYYYMMMDDHH format.) This yields a
+ little under 15 years worth of hourly build identifiers, since 2**17 / (366
+ * 24) =~ 14.92.
+
+ The bits labelled 'x', 'p', and 'g' are feature flags.
+
+ The bit labelled 'x' is 1 if the build is for an x86 architecture and 0
+ otherwise, which means the build is for an ARM architecture. (Fennec no
+ longer supports ARMv6, so ARM is equivalent to ARMv7 and above.)
+
+ The bit labelled 'p' is a placeholder that is always 0 (for now).
+
+ Firefox no longer supports API 14 or earlier.
+
+ This version code computation allows for a split on API levels that allowed
+ us to ship builds specifically for Gingerbread (API 9-10); we preserve
+ that functionality for sanity's sake, and to allow us to reintroduce a
+ split in the future.
+
+ At present, the bit labelled 'g' is 1 if the build is an ARM build
+ targeting API 15+, which will always be the case.
+
+ We throw an explanatory exception when we are within one calendar year of
+ running out of build events. This gives lots of time to update the version
+ scheme. The responsible individual should then bump the range (to allow
+ builds to continue) and use the time remaining to update the version scheme
+ via the reserved high order bits.
+
+ N.B.: the reserved 0 bit to the left of the highest order 't' bit can,
+ sometimes, be used to bump the version scheme. In addition, by reducing the
+ granularity of the build identifiers (for example, moving to identifying
+ builds every 2 or 4 hours), the version scheme may be adjusted further still
+ without losing a (valuable) high order bit.
+ '''
+ def hours_since_cutoff(buildid):
+ # The ID is formatted like YYYYMMDDHHMMSS (using
+ # datetime.now().strftime('%Y%m%d%H%M%S'); see build/variables.py).
+ # The inverse function is time.strptime.
+ # N.B.: the time module expresses time as decimal seconds since the
+ # epoch.
+ fmt = '%Y%m%d%H%M%S'
+ build = time.strptime(str(buildid), fmt)
+ cutoff = time.strptime(str(V1_CUTOFF), fmt)
+ return int(math.floor((time.mktime(build) - time.mktime(cutoff)) / (60.0 * 60.0)))
+
+ # Of the 21 low order bits, we take 17 bits for builds.
+ base = hours_since_cutoff(buildid)
+ if base < 0:
+ raise ValueError("Something has gone horribly wrong: cannot calculate "
+ "android:versionCode from build ID %s: hours underflow "
+ "bits allotted!" % buildid)
+ if base > 2**17:
+ raise ValueError("Something has gone horribly wrong: cannot calculate "
+ "android:versionCode from build ID %s: hours overflow "
+ "bits allotted!" % buildid)
+ if base > 2**17 - 366 * 24:
+ raise ValueError("Running out of low order bits calculating "
+ "android:versionCode from build ID %s: "
+ "; YOU HAVE ONE YEAR TO UPDATE THE VERSION SCHEME." % buildid)
+
+ version = 0b1111000001000000000000000000000
+ # We reserve 1 "middle" high order bit for the future, and 3 low order bits
+ # for architecture and APK splits.
+ version |= base << 3
+
+ # None is interpreted as arm.
+ if not cpu_arch or cpu_arch in ['armeabi', 'armeabi-v7a']:
+ # 0 is interpreted as SDK 9.
+ if not min_sdk or min_sdk == 9:
+ pass
+ # This used to compare to 11. The 15+ APK directly supersedes 11+, so
+ # we reuse this check.
+ elif min_sdk == 15:
+ version |= 1 << 0
+ else:
+ raise ValueError("Don't know how to compute android:versionCode "
+ "for CPU arch %s and min SDK %s" % (cpu_arch, min_sdk))
+ elif cpu_arch in ['x86']:
+ version |= 1 << 2
+ else:
+ raise ValueError("Don't know how to compute android:versionCode "
+ "for CPU arch %s" % cpu_arch)
+
+ return version
+
+def android_version_code(buildid, *args, **kwargs):
+ base = int(str(buildid))
+ if base < V1_CUTOFF:
+ return android_version_code_v0(buildid, *args, **kwargs)
+ else:
+ return android_version_code_v1(buildid, *args, **kwargs)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser('Generate an android:versionCode',
+ add_help=False)
+ parser.add_argument('--verbose', action='store_true',
+ default=False,
+ help='Be verbose')
+ parser.add_argument('--with-android-cpu-arch', dest='cpu_arch',
+ choices=['armeabi', 'armeabi-v7a', 'mips', 'x86'],
+ help='The target CPU architecture')
+ parser.add_argument('--with-android-min-sdk-version', dest='min_sdk',
+ type=int, default=0,
+ help='The minimum target SDK')
+ parser.add_argument('--with-android-max-sdk-version', dest='max_sdk',
+ type=int, default=0,
+ help='The maximum target SDK')
+ parser.add_argument('buildid', type=int,
+ help='The input build ID')
+
+ args = parser.parse_args(argv)
+ code = android_version_code(args.buildid,
+ cpu_arch=args.cpu_arch,
+ min_sdk=args.min_sdk,
+ max_sdk=args.max_sdk)
+ print(code)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/artifacts.py b/python/mozbuild/mozbuild/artifacts.py
new file mode 100644
index 000000000..02538938f
--- /dev/null
+++ b/python/mozbuild/mozbuild/artifacts.py
@@ -0,0 +1,1089 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''
+Fetch build artifacts from a Firefox tree.
+
+This provides an (at-the-moment special purpose) interface to download Android
+artifacts from Mozilla's Task Cluster.
+
+This module performs the following steps:
+
+* find a candidate hg parent revision. At one time we used the local pushlog,
+ which required the mozext hg extension. This isn't feasible with git, and it
+ is only mildly less efficient to not use the pushlog, so we don't use it even
+ when querying hg.
+
+* map the candidate parent to candidate Task Cluster tasks and artifact
+ locations. Pushlog entries might not correspond to tasks (yet), and those
+ tasks might not produce the desired class of artifacts.
+
+* fetch fresh Task Cluster artifacts and purge old artifacts, using a simple
+ Least Recently Used cache.
+
+* post-process fresh artifacts, to speed future installation. In particular,
+ extract relevant files from Mac OS X DMG files into a friendly archive format
+ so we don't have to mount DMG files frequently.
+
+The bulk of the complexity is in managing and persisting several caches. If
+we found a Python LRU cache that pickled cleanly, we could remove a lot of
+this code! Sadly, I found no such candidate implementations, so we pickle
+pylru caches manually.
+
+None of the instances (or the underlying caches) are safe for concurrent use.
+A future need, perhaps.
+
+This module requires certain modules be importable from the ambient Python
+environment. |mach artifact| ensures these modules are available, but other
+consumers will need to arrange this themselves.
+'''
+
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import collections
+import functools
+import glob
+import hashlib
+import logging
+import operator
+import os
+import pickle
+import re
+import requests
+import shutil
+import stat
+import subprocess
+import tarfile
+import tempfile
+import urlparse
+import zipfile
+
+import pylru
+import taskcluster
+
+from mozbuild.util import (
+ ensureParentDir,
+ FileAvoidWrite,
+)
+import mozinstall
+from mozpack.files import (
+ JarFinder,
+ TarFinder,
+)
+from mozpack.mozjar import (
+ JarReader,
+ JarWriter,
+)
+from mozpack.packager.unpack import UnpackFinder
+import mozpack.path as mozpath
+from mozregression.download_manager import (
+ DownloadManager,
+)
+from mozregression.persist_limit import (
+ PersistLimit,
+)
+
+NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 # Number of candidate pushheads to cache per parent changeset.
+
+# Number of parent changesets to consider as possible pushheads.
+# There isn't really such a thing as a reasonable default here, because we don't
+# know how many pushheads we'll need to look at to find a build with our artifacts,
+# and we don't know how many changesets will be in each push. For now we assume
+# we'll find a build in the last 50 pushes, assuming each push contains 10 changesets.
+NUM_REVISIONS_TO_QUERY = 500
+
+MAX_CACHED_TASKS = 400 # Number of pushheads to cache Task Cluster task data for.
+
+# Number of downloaded artifacts to cache. Each artifact can be very large,
+# so don't make this to large! TODO: make this a size (like 500 megs) rather than an artifact count.
+MAX_CACHED_ARTIFACTS = 6
+
+# Downloaded artifacts are cached, and a subset of their contents extracted for
+# easy installation. This is most noticeable on Mac OS X: since mounting and
+# copying from DMG files is very slow, we extract the desired binaries to a
+# separate archive for fast re-installation.
+PROCESSED_SUFFIX = '.processed.jar'
+
+CANDIDATE_TREES = (
+ 'mozilla-central',
+ 'integration/mozilla-inbound',
+ 'releases/mozilla-aurora'
+)
+
+class ArtifactJob(object):
+ # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in.
+ # Each item is a pair of (pattern, (src_prefix, dest_prefix), where src_prefix
+ # is the prefix of the pattern relevant to its location in the archive, and
+ # dest_prefix is the prefix to be added that will yield the final path relative
+ # to dist/.
+ test_artifact_patterns = {
+ ('bin/BadCertServer', ('bin', 'bin')),
+ ('bin/GenerateOCSPResponse', ('bin', 'bin')),
+ ('bin/OCSPStaplingServer', ('bin', 'bin')),
+ ('bin/certutil', ('bin', 'bin')),
+ ('bin/fileid', ('bin', 'bin')),
+ ('bin/pk12util', ('bin', 'bin')),
+ ('bin/ssltunnel', ('bin', 'bin')),
+ ('bin/xpcshell', ('bin', 'bin')),
+ ('bin/plugins/*', ('bin/plugins', 'plugins'))
+ }
+
+ # We can tell our input is a test archive by this suffix, which happens to
+ # be the same across platforms.
+ _test_archive_suffix = '.common.tests.zip'
+
+ def __init__(self, package_re, tests_re, log=None, download_symbols=False):
+ self._package_re = re.compile(package_re)
+ self._tests_re = None
+ if tests_re:
+ self._tests_re = re.compile(tests_re)
+ self._log = log
+ self._symbols_archive_suffix = None
+ if download_symbols:
+ self._symbols_archive_suffix = 'crashreporter-symbols.zip'
+
+ def log(self, *args, **kwargs):
+ if self._log:
+ self._log(*args, **kwargs)
+
+ def find_candidate_artifacts(self, artifacts):
+ # TODO: Handle multiple artifacts, taking the latest one.
+ tests_artifact = None
+ for artifact in artifacts:
+ name = artifact['name']
+ if self._package_re and self._package_re.match(name):
+ yield name
+ elif self._tests_re and self._tests_re.match(name):
+ tests_artifact = name
+ yield name
+ elif self._symbols_archive_suffix and name.endswith(self._symbols_archive_suffix):
+ yield name
+ else:
+ self.log(logging.DEBUG, 'artifact',
+ {'name': name},
+ 'Not yielding artifact named {name} as a candidate artifact')
+ if self._tests_re and not tests_artifact:
+ raise ValueError('Expected tests archive matching "{re}", but '
+ 'found none!'.format(re=self._tests_re))
+
+ def process_artifact(self, filename, processed_filename):
+ if filename.endswith(ArtifactJob._test_archive_suffix) and self._tests_re:
+ return self.process_tests_artifact(filename, processed_filename)
+ if self._symbols_archive_suffix and filename.endswith(self._symbols_archive_suffix):
+ return self.process_symbols_archive(filename, processed_filename)
+ return self.process_package_artifact(filename, processed_filename)
+
+ def process_package_artifact(self, filename, processed_filename):
+ raise NotImplementedError("Subclasses must specialize process_package_artifact!")
+
+ def process_tests_artifact(self, filename, processed_filename):
+ added_entry = False
+
+ with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+ reader = JarReader(filename)
+ for filename, entry in reader.entries.iteritems():
+ for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
+ if not mozpath.match(filename, pattern):
+ continue
+ destpath = mozpath.relpath(filename, src_prefix)
+ destpath = mozpath.join(dest_prefix, destpath)
+ self.log(logging.INFO, 'artifact',
+ {'destpath': destpath},
+ 'Adding {destpath} to processed archive')
+ mode = entry['external_attr'] >> 16
+ writer.add(destpath.encode('utf-8'), reader[filename], mode=mode)
+ added_entry = True
+
+ if not added_entry:
+ raise ValueError('Archive format changed! No pattern from "{patterns}"'
+ 'matched an archive path.'.format(
+ patterns=LinuxArtifactJob.test_artifact_patterns))
+
+ def process_symbols_archive(self, filename, processed_filename):
+ with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+ reader = JarReader(filename)
+ for filename in reader.entries:
+ destpath = mozpath.join('crashreporter-symbols', filename)
+ self.log(logging.INFO, 'artifact',
+ {'destpath': destpath},
+ 'Adding {destpath} to processed archive')
+ writer.add(destpath.encode('utf-8'), reader[filename])
+
+class AndroidArtifactJob(ArtifactJob):
+
+ product = 'mobile'
+
+ package_artifact_patterns = {
+ 'application.ini',
+ 'platform.ini',
+ '**/*.so',
+ '**/interfaces.xpt',
+ }
+
+ def process_artifact(self, filename, processed_filename):
+ # Extract all .so files into the root, which will get copied into dist/bin.
+ with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+ for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
+ if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
+ continue
+
+ dirname, basename = os.path.split(p)
+ self.log(logging.INFO, 'artifact',
+ {'basename': basename},
+ 'Adding {basename} to processed archive')
+
+ basedir = 'bin'
+ if not basename.endswith('.so'):
+ basedir = mozpath.join('bin', dirname.lstrip('assets/'))
+ basename = mozpath.join(basedir, basename)
+ writer.add(basename.encode('utf-8'), f.open())
+
+
+class LinuxArtifactJob(ArtifactJob):
+
+ product = 'firefox'
+
+ package_artifact_patterns = {
+ 'firefox/application.ini',
+ 'firefox/crashreporter',
+ 'firefox/dependentlibs.list',
+ 'firefox/firefox',
+ 'firefox/firefox-bin',
+ 'firefox/minidump-analyzer',
+ 'firefox/platform.ini',
+ 'firefox/plugin-container',
+ 'firefox/updater',
+ 'firefox/**/*.so',
+ 'firefox/**/interfaces.xpt',
+ }
+
+ def process_package_artifact(self, filename, processed_filename):
+ added_entry = False
+
+ with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+ with tarfile.open(filename) as reader:
+ for p, f in UnpackFinder(TarFinder(filename, reader)):
+ if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
+ continue
+
+ # We strip off the relative "firefox/" bit from the path,
+ # but otherwise preserve it.
+ destpath = mozpath.join('bin',
+ mozpath.relpath(p, "firefox"))
+ self.log(logging.INFO, 'artifact',
+ {'destpath': destpath},
+ 'Adding {destpath} to processed archive')
+ writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)
+ added_entry = True
+
+ if not added_entry:
+ raise ValueError('Archive format changed! No pattern from "{patterns}" '
+ 'matched an archive path.'.format(
+ patterns=LinuxArtifactJob.package_artifact_patterns))
+
+
+class MacArtifactJob(ArtifactJob):
+
+ product = 'firefox'
+
+ def process_package_artifact(self, filename, processed_filename):
+ tempdir = tempfile.mkdtemp()
+ try:
+ self.log(logging.INFO, 'artifact',
+ {'tempdir': tempdir},
+ 'Unpacking DMG into {tempdir}')
+ mozinstall.install(filename, tempdir) # Doesn't handle already mounted DMG files nicely:
+
+ # InstallError: Failed to install "/Users/nalexander/.mozbuild/package-frontend/b38eeeb54cdcf744-firefox-44.0a1.en-US.mac.dmg (local variable 'appDir' referenced before assignment)"
+
+ # File "/Users/nalexander/Mozilla/gecko/mobile/android/mach_commands.py", line 250, in artifact_install
+ # return artifacts.install_from(source, self.distdir)
+ # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 457, in install_from
+ # return self.install_from_hg(source, distdir)
+ # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 445, in install_from_hg
+ # return self.install_from_url(url, distdir)
+ # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 418, in install_from_url
+ # return self.install_from_file(filename, distdir)
+ # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 336, in install_from_file
+ # mozinstall.install(filename, tempdir)
+ # File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 117, in install
+ # install_dir = _install_dmg(src, dest)
+ # File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 261, in _install_dmg
+ # subprocess.call('hdiutil detach %s -quiet' % appDir,
+
+ bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
+ if len(bundle_dirs) != 1:
+ raise ValueError('Expected one source bundle, found: {}'.format(bundle_dirs))
+ [source] = bundle_dirs
+
+ # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
+ paths_no_keep_path = ('Contents/MacOS', [
+ 'crashreporter.app/Contents/MacOS/crashreporter',
+ 'crashreporter.app/Contents/MacOS/minidump-analyzer',
+ 'firefox',
+ 'firefox-bin',
+ 'libfreebl3.dylib',
+ 'liblgpllibs.dylib',
+ # 'liblogalloc.dylib',
+ 'libmozglue.dylib',
+ 'libnss3.dylib',
+ 'libnssckbi.dylib',
+ 'libnssdbm3.dylib',
+ 'libplugin_child_interpose.dylib',
+ # 'libreplace_jemalloc.dylib',
+ # 'libreplace_malloc.dylib',
+ 'libmozavutil.dylib',
+ 'libmozavcodec.dylib',
+ 'libsoftokn3.dylib',
+ 'plugin-container.app/Contents/MacOS/plugin-container',
+ 'updater.app/Contents/MacOS/org.mozilla.updater',
+ # 'xpcshell',
+ 'XUL',
+ ])
+
+ # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
+ paths_keep_path = ('Contents/Resources', [
+ 'browser/components/libbrowsercomps.dylib',
+ 'dependentlibs.list',
+ # 'firefox',
+ 'gmp-clearkey/0.1/libclearkey.dylib',
+ # 'gmp-fake/1.0/libfake.dylib',
+ # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
+ '**/interfaces.xpt',
+ ])
+
+ with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+ root, paths = paths_no_keep_path
+ finder = UnpackFinder(mozpath.join(source, root))
+ for path in paths:
+ for p, f in finder.find(path):
+ self.log(logging.INFO, 'artifact',
+ {'path': p},
+ 'Adding {path} to processed archive')
+ destpath = mozpath.join('bin', os.path.basename(p))
+ writer.add(destpath.encode('utf-8'), f, mode=f.mode)
+
+ root, paths = paths_keep_path
+ finder = UnpackFinder(mozpath.join(source, root))
+ for path in paths:
+ for p, f in finder.find(path):
+ self.log(logging.INFO, 'artifact',
+ {'path': p},
+ 'Adding {path} to processed archive')
+ destpath = mozpath.join('bin', p)
+ writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)
+
+ finally:
+ try:
+ shutil.rmtree(tempdir)
+ except (OSError, IOError):
+ self.log(logging.WARN, 'artifact',
+ {'tempdir': tempdir},
+ 'Unable to delete {tempdir}')
+ pass
+
+
+class WinArtifactJob(ArtifactJob):
+ package_artifact_patterns = {
+ 'firefox/dependentlibs.list',
+ 'firefox/platform.ini',
+ 'firefox/application.ini',
+ 'firefox/**/*.dll',
+ 'firefox/*.exe',
+ 'firefox/**/interfaces.xpt',
+ }
+
+ product = 'firefox'
+
+ # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in.
+ test_artifact_patterns = {
+ ('bin/BadCertServer.exe', ('bin', 'bin')),
+ ('bin/GenerateOCSPResponse.exe', ('bin', 'bin')),
+ ('bin/OCSPStaplingServer.exe', ('bin', 'bin')),
+ ('bin/certutil.exe', ('bin', 'bin')),
+ ('bin/fileid.exe', ('bin', 'bin')),
+ ('bin/pk12util.exe', ('bin', 'bin')),
+ ('bin/ssltunnel.exe', ('bin', 'bin')),
+ ('bin/xpcshell.exe', ('bin', 'bin')),
+ ('bin/plugins/*', ('bin/plugins', 'plugins'))
+ }
+
+ def process_package_artifact(self, filename, processed_filename):
+ added_entry = False
+ with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+ for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
+ if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
+ continue
+
+ # strip off the relative "firefox/" bit from the path:
+ basename = mozpath.relpath(p, "firefox")
+ basename = mozpath.join('bin', basename)
+ self.log(logging.INFO, 'artifact',
+ {'basename': basename},
+ 'Adding {basename} to processed archive')
+ writer.add(basename.encode('utf-8'), f.open(), mode=f.mode)
+ added_entry = True
+
+ if not added_entry:
+ raise ValueError('Archive format changed! No pattern from "{patterns}"'
+ 'matched an archive path.'.format(
+ patterns=self.artifact_patterns))
+
+# Keep the keys of this map in sync with the |mach artifact| --job
+# options. The keys of this map correspond to entries at
+# https://tools.taskcluster.net/index/artifacts/#gecko.v2.mozilla-central.latest/gecko.v2.mozilla-central.latest
+# The values correpsond to a pair of (<package regex>, <test archive regex>).
+JOB_DETAILS = {
+ 'android-api-15-opt': (AndroidArtifactJob, ('public/build/target.apk',
+ None)),
+ 'android-api-15-debug': (AndroidArtifactJob, ('public/build/target.apk',
+ None)),
+ 'android-x86-opt': (AndroidArtifactJob, ('public/build/target.apk',
+ None)),
+ 'linux-opt': (LinuxArtifactJob, ('public/build/firefox-(.*)\.linux-i686\.tar\.bz2',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+ 'linux-debug': (LinuxArtifactJob, ('public/build/firefox-(.*)\.linux-i686\.tar\.bz2',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+ 'linux64-opt': (LinuxArtifactJob, ('public/build/firefox-(.*)\.linux-x86_64\.tar\.bz2',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+ 'linux64-debug': (LinuxArtifactJob, ('public/build/target\.tar\.bz2',
+ 'public/build/target\.common\.tests\.zip')),
+ 'macosx64-opt': (MacArtifactJob, ('public/build/firefox-(.*)\.mac\.dmg',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+ 'macosx64-debug': (MacArtifactJob, ('public/build/firefox-(.*)\.mac64\.dmg',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+ 'win32-opt': (WinArtifactJob, ('public/build/firefox-(.*)\.win32.zip',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+ 'win32-debug': (WinArtifactJob, ('public/build/firefox-(.*)\.win32.zip',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+ 'win64-opt': (WinArtifactJob, ('public/build/firefox-(.*)\.win64.zip',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+ 'win64-debug': (WinArtifactJob, ('public/build/firefox-(.*)\.win64.zip',
+ 'public/build/firefox-(.*)\.common\.tests\.zip')),
+}
+
+
+
+def get_job_details(job, log=None, download_symbols=False):
+ cls, (package_re, tests_re) = JOB_DETAILS[job]
+ return cls(package_re, tests_re, log=log, download_symbols=download_symbols)
+
+def cachedmethod(cachefunc):
+ '''Decorator to wrap a class or instance method with a memoizing callable that
+ saves results in a (possibly shared) cache.
+ '''
+ def decorator(method):
+ def wrapper(self, *args, **kwargs):
+ mapping = cachefunc(self)
+ if mapping is None:
+ return method(self, *args, **kwargs)
+ key = (method.__name__, args, tuple(sorted(kwargs.items())))
+ try:
+ value = mapping[key]
+ return value
+ except KeyError:
+ pass
+ result = method(self, *args, **kwargs)
+ mapping[key] = result
+ return result
+ return functools.update_wrapper(wrapper, method)
+ return decorator
+
+
+class CacheManager(object):
+ '''Maintain an LRU cache. Provide simple persistence, including support for
+ loading and saving the state using a "with" block. Allow clearing the cache
+ and printing the cache for debugging.
+
+ Provide simple logging.
+ '''
+
+ def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None, log=None, skip_cache=False):
+ self._skip_cache = skip_cache
+ self._cache = pylru.lrucache(cache_size, callback=cache_callback)
+ self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle')
+ self._log = log
+
+ def log(self, *args, **kwargs):
+ if self._log:
+ self._log(*args, **kwargs)
+
+ def load_cache(self):
+ if self._skip_cache:
+ self.log(logging.DEBUG, 'artifact',
+ {},
+ 'Skipping cache: ignoring load_cache!')
+ return
+
+ try:
+ items = pickle.load(open(self._cache_filename, 'rb'))
+ for key, value in items:
+ self._cache[key] = value
+ except Exception as e:
+ # Corrupt cache, perhaps? Sadly, pickle raises many different
+ # exceptions, so it's not worth trying to be fine grained here.
+ # We ignore any exception, so the cache is effectively dropped.
+ self.log(logging.INFO, 'artifact',
+ {'filename': self._cache_filename, 'exception': repr(e)},
+ 'Ignoring exception unpickling cache file {filename}: {exception}')
+ pass
+
+ def dump_cache(self):
+ if self._skip_cache:
+ self.log(logging.DEBUG, 'artifact',
+ {},
+ 'Skipping cache: ignoring dump_cache!')
+ return
+
+ ensureParentDir(self._cache_filename)
+ pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)
+
+ def clear_cache(self):
+ if self._skip_cache:
+ self.log(logging.DEBUG, 'artifact',
+ {},
+ 'Skipping cache: ignoring clear_cache!')
+ return
+
+ with self:
+ self._cache.clear()
+
+ def print_cache(self):
+ with self:
+ for item in self._cache.items():
+ self.log(logging.INFO, 'artifact',
+ {'item': item},
+ '{item}')
+
+ def print_last_item(self, args, sorted_kwargs, result):
+ # By default, show nothing.
+ pass
+
+ def print_last(self):
+ # We use the persisted LRU caches to our advantage. The first item is
+ # most recent.
+ with self:
+ item = next(self._cache.items(), None)
+ if item is not None:
+ (name, args, sorted_kwargs), result = item
+ self.print_last_item(args, sorted_kwargs, result)
+ else:
+ self.log(logging.WARN, 'artifact',
+ {},
+ 'No last cached item found.')
+
+ def __enter__(self):
+ self.load_cache()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.dump_cache()
+
+class PushheadCache(CacheManager):
+ '''Helps map tree/revision pairs to parent pushheads according to the pushlog.'''
+
+ def __init__(self, cache_dir, log=None, skip_cache=False):
+ CacheManager.__init__(self, cache_dir, 'pushhead_cache', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
+
+ @cachedmethod(operator.attrgetter('_cache'))
+ def parent_pushhead_id(self, tree, revision):
+ cset_url_tmpl = ('https://hg.mozilla.org/{tree}/json-pushes?'
+ 'changeset={changeset}&version=2&tipsonly=1')
+ req = requests.get(cset_url_tmpl.format(tree=tree, changeset=revision),
+ headers={'Accept': 'application/json'})
+ if req.status_code not in range(200, 300):
+ raise ValueError
+ result = req.json()
+ [found_pushid] = result['pushes'].keys()
+ return int(found_pushid)
+
+ @cachedmethod(operator.attrgetter('_cache'))
+ def pushid_range(self, tree, start, end):
+ pushid_url_tmpl = ('https://hg.mozilla.org/{tree}/json-pushes?'
+ 'startID={start}&endID={end}&version=2&tipsonly=1')
+
+ req = requests.get(pushid_url_tmpl.format(tree=tree, start=start,
+ end=end),
+ headers={'Accept': 'application/json'})
+ result = req.json()
+ return [
+ p['changesets'][-1] for p in result['pushes'].values()
+ ]
+
+class TaskCache(CacheManager):
+ '''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
+
+ def __init__(self, cache_dir, log=None, skip_cache=False):
+ CacheManager.__init__(self, cache_dir, 'artifact_url', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
+ self._index = taskcluster.Index()
+ self._queue = taskcluster.Queue()
+
+ @cachedmethod(operator.attrgetter('_cache'))
+ def artifact_urls(self, tree, job, rev, download_symbols):
+ try:
+ artifact_job = get_job_details(job, log=self._log, download_symbols=download_symbols)
+ except KeyError:
+ self.log(logging.INFO, 'artifact',
+ {'job': job},
+ 'Unknown job {job}')
+ raise KeyError("Unknown job")
+
+ # Grab the second part of the repo name, which is generally how things
+ # are indexed. Eg: 'integration/mozilla-inbound' is indexed as
+ # 'mozilla-inbound'
+ tree = tree.split('/')[1] if '/' in tree else tree
+
+ namespace = 'gecko.v2.{tree}.revision.{rev}.{product}.{job}'.format(
+ rev=rev,
+ tree=tree,
+ product=artifact_job.product,
+ job=job,
+ )
+ self.log(logging.DEBUG, 'artifact',
+ {'namespace': namespace},
+ 'Searching Taskcluster index with namespace: {namespace}')
+ try:
+ task = self._index.findTask(namespace)
+ except Exception:
+ # Not all revisions correspond to pushes that produce the job we
+ # care about; and even those that do may not have completed yet.
+ raise ValueError('Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
+ taskId = task['taskId']
+
+ artifacts = self._queue.listLatestArtifacts(taskId)['artifacts']
+
+ urls = []
+ for artifact_name in artifact_job.find_candidate_artifacts(artifacts):
+ # We can easily extract the task ID from the URL. We can't easily
+ # extract the build ID; we use the .ini files embedded in the
+ # downloaded artifact for this. We could also use the uploaded
+ # public/build/buildprops.json for this purpose.
+ url = self._queue.buildUrl('getLatestArtifact', taskId, artifact_name)
+ urls.append(url)
+ if not urls:
+ raise ValueError('Task for {namespace} existed, but no artifacts found!'.format(namespace=namespace))
+ return urls
+
+ def print_last_item(self, args, sorted_kwargs, result):
+ tree, job, rev = args
+ self.log(logging.INFO, 'artifact',
+ {'rev': rev},
+ 'Last installed binaries from hg parent revision {rev}')
+
+
+class ArtifactCache(CacheManager):
+ '''Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk.'''
+
+ def __init__(self, cache_dir, log=None, skip_cache=False):
+ # TODO: instead of storing N artifact packages, store M megabytes.
+ CacheManager.__init__(self, cache_dir, 'fetch', MAX_CACHED_ARTIFACTS, cache_callback=self.delete_file, log=log, skip_cache=skip_cache)
+ self._cache_dir = cache_dir
+ size_limit = 1024 * 1024 * 1024 # 1Gb in bytes.
+ file_limit = 4 # But always keep at least 4 old artifacts around.
+ persist_limit = PersistLimit(size_limit, file_limit)
+ self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)
+ self._last_dl_update = -1
+
+ def delete_file(self, key, value):
+ try:
+ os.remove(value)
+ self.log(logging.INFO, 'artifact',
+ {'filename': value},
+ 'Purged artifact {filename}')
+ except (OSError, IOError):
+ pass
+
+ try:
+ os.remove(value + PROCESSED_SUFFIX)
+ self.log(logging.INFO, 'artifact',
+ {'filename': value + PROCESSED_SUFFIX},
+ 'Purged processed artifact {filename}')
+ except (OSError, IOError):
+ pass
+
+ @cachedmethod(operator.attrgetter('_cache'))
+ def fetch(self, url, force=False):
+ # We download to a temporary name like HASH[:16]-basename to
+ # differentiate among URLs with the same basenames. We used to then
+ # extract the build ID from the downloaded artifact and use it to make a
+ # human readable unique name, but extracting build IDs is time consuming
+ # (especially on Mac OS X, where we must mount a large DMG file).
+ hash = hashlib.sha256(url).hexdigest()[:16]
+ fname = hash + '-' + os.path.basename(url)
+
+ path = os.path.abspath(mozpath.join(self._cache_dir, fname))
+ if self._skip_cache and os.path.exists(path):
+ self.log(logging.DEBUG, 'artifact',
+ {'path': path},
+ 'Skipping cache: removing cached downloaded artifact {path}')
+ os.remove(path)
+
+ self.log(logging.INFO, 'artifact',
+ {'path': path},
+ 'Downloading to temporary location {path}')
+ try:
+ dl = self._download_manager.download(url, fname)
+
+ def download_progress(dl, bytes_so_far, total_size):
+ percent = (float(bytes_so_far) / total_size) * 100
+ now = int(percent / 5)
+ if now == self._last_dl_update:
+ return
+ self._last_dl_update = now
+ self.log(logging.INFO, 'artifact',
+ {'bytes_so_far': bytes_so_far, 'total_size': total_size, 'percent': percent},
+ 'Downloading... {percent:02.1f} %')
+
+ if dl:
+ dl.set_progress(download_progress)
+ dl.wait()
+ self.log(logging.INFO, 'artifact',
+ {'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
+ 'Downloaded artifact to {path}')
+ return os.path.abspath(mozpath.join(self._cache_dir, fname))
+ finally:
+ # Cancel any background downloads in progress.
+ self._download_manager.cancel()
+
+ def print_last_item(self, args, sorted_kwargs, result):
+ url, = args
+ self.log(logging.INFO, 'artifact',
+ {'url': url},
+ 'Last installed binaries from url {url}')
+ self.log(logging.INFO, 'artifact',
+ {'filename': result},
+ 'Last installed binaries from local file {filename}')
+ self.log(logging.INFO, 'artifact',
+ {'filename': result + PROCESSED_SUFFIX},
+ 'Last installed binaries from local processed file {filename}')
+
+
+class Artifacts(object):
+ '''Maintain state to efficiently fetch build artifacts from a Firefox tree.'''
+
+ def __init__(self, tree, substs, defines, job=None, log=None,
+ cache_dir='.', hg=None, git=None, skip_cache=False,
+ topsrcdir=None):
+ if (hg and git) or (not hg and not git):
+ raise ValueError("Must provide path to exactly one of hg and git")
+
+ self._substs = substs
+ self._download_symbols = self._substs.get('MOZ_ARTIFACT_BUILD_SYMBOLS', False)
+ self._defines = defines
+ self._tree = tree
+ self._job = job or self._guess_artifact_job()
+ self._log = log
+ self._hg = hg
+ self._git = git
+ self._cache_dir = cache_dir
+ self._skip_cache = skip_cache
+ self._topsrcdir = topsrcdir
+
+ try:
+ self._artifact_job = get_job_details(self._job, log=self._log, download_symbols=self._download_symbols)
+ except KeyError:
+ self.log(logging.INFO, 'artifact',
+ {'job': self._job},
+ 'Unknown job {job}')
+ raise KeyError("Unknown job")
+
+ self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
+ self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
+ self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
+
+ def log(self, *args, **kwargs):
+ if self._log:
+ self._log(*args, **kwargs)
+
+ def _guess_artifact_job(self):
+ # Add the "-debug" suffix to the guessed artifact job name
+ # if MOZ_DEBUG is enabled.
+ if self._substs.get('MOZ_DEBUG'):
+ target_suffix = '-debug'
+ else:
+ target_suffix = '-opt'
+
+ if self._substs.get('MOZ_BUILD_APP', '') == 'mobile/android':
+ if self._substs['ANDROID_CPU_ARCH'] == 'x86':
+ return 'android-x86-opt'
+ return 'android-api-15' + target_suffix
+
+ target_64bit = False
+ if self._substs['target_cpu'] == 'x86_64':
+ target_64bit = True
+
+ if self._defines.get('XP_LINUX', False):
+ return ('linux64' if target_64bit else 'linux') + target_suffix
+ if self._defines.get('XP_WIN', False):
+ return ('win64' if target_64bit else 'win32') + target_suffix
+ if self._defines.get('XP_MACOSX', False):
+ # We only produce unified builds in automation, so the target_cpu
+ # check is not relevant.
+ return 'macosx64' + target_suffix
+ raise Exception('Cannot determine default job for |mach artifact|!')
+
+ def _pushheads_from_rev(self, rev, count):
+ """Queries hg.mozilla.org's json-pushlog for pushheads that are nearby
+ ancestors or `rev`. Multiple trees are queried, as the `rev` may
+ already have been pushed to multiple repositories. For each repository
+ containing `rev`, the pushhead introducing `rev` and the previous
+ `count` pushheads from that point are included in the output.
+ """
+
+ with self._pushhead_cache as pushhead_cache:
+ found_pushids = {}
+ for tree in CANDIDATE_TREES:
+ self.log(logging.INFO, 'artifact',
+ {'tree': tree,
+ 'rev': rev},
+ 'Attempting to find a pushhead containing {rev} on {tree}.')
+ try:
+ pushid = pushhead_cache.parent_pushhead_id(tree, rev)
+ found_pushids[tree] = pushid
+ except ValueError:
+ continue
+
+ candidate_pushheads = collections.defaultdict(list)
+
+ for tree, pushid in found_pushids.iteritems():
+ end = pushid
+ start = pushid - NUM_PUSHHEADS_TO_QUERY_PER_PARENT
+
+ self.log(logging.INFO, 'artifact',
+ {'tree': tree,
+ 'pushid': pushid,
+ 'num': NUM_PUSHHEADS_TO_QUERY_PER_PARENT},
+ 'Retrieving the last {num} pushheads starting with id {pushid} on {tree}')
+ for pushhead in pushhead_cache.pushid_range(tree, start, end):
+ candidate_pushheads[pushhead].append(tree)
+
+ return candidate_pushheads
+
+ def _get_hg_revisions_from_git(self):
+ rev_list = subprocess.check_output([
+ self._git, 'rev-list', '--topo-order',
+ '--max-count={num}'.format(num=NUM_REVISIONS_TO_QUERY),
+ 'HEAD',
+ ], cwd=self._topsrcdir)
+
+ hg_hash_list = subprocess.check_output([
+ self._git, 'cinnabar', 'git2hg'
+ ] + rev_list.splitlines(), cwd=self._topsrcdir)
+
+ zeroes = "0" * 40
+
+ hashes = []
+ for hg_hash in hg_hash_list.splitlines():
+ hg_hash = hg_hash.strip()
+ if not hg_hash or hg_hash == zeroes:
+ continue
+ hashes.append(hg_hash)
+ return hashes
+
+ def _get_recent_public_revisions(self):
+ """Returns recent ancestors of the working parent that are likely to
+ to be known to Mozilla automation.
+
+ If we're using git, retrieves hg revisions from git-cinnabar.
+ """
+ if self._git:
+ return self._get_hg_revisions_from_git()
+
+ return subprocess.check_output([
+ self._hg, 'log',
+ '--template', '{node}\n',
+ '-r', 'last(public() and ::., {num})'.format(
+ num=NUM_REVISIONS_TO_QUERY)
+ ], cwd=self._topsrcdir).splitlines()
+
+ def _find_pushheads(self):
+ """Returns an iterator of recent pushhead revisions, starting with the
+ working parent.
+ """
+
+ last_revs = self._get_recent_public_revisions()
+ candidate_pushheads = self._pushheads_from_rev(last_revs[0].rstrip(),
+ NUM_PUSHHEADS_TO_QUERY_PER_PARENT)
+ count = 0
+ for rev in last_revs:
+ rev = rev.rstrip()
+ if not rev:
+ continue
+ if rev not in candidate_pushheads:
+ continue
+ count += 1
+ yield candidate_pushheads[rev], rev
+
+ if not count:
+ raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n'
+ 'Search started with {rev}, which must be known to Mozilla automation.\n\n'
+ 'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
+ rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
+
+ def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
+ try:
+ urls = task_cache.artifact_urls(tree, job, pushhead, self._download_symbols)
+ except ValueError:
+ return None
+ if urls:
+ self.log(logging.INFO, 'artifact',
+ {'pushhead': pushhead,
+ 'tree': tree},
+ 'Installing from remote pushhead {pushhead} on {tree}')
+ return urls
+ return None
+
+ def install_from_file(self, filename, distdir):
+ self.log(logging.INFO, 'artifact',
+ {'filename': filename},
+ 'Installing from {filename}')
+
+ # Do we need to post-process?
+ processed_filename = filename + PROCESSED_SUFFIX
+
+ if self._skip_cache and os.path.exists(processed_filename):
+ self.log(logging.DEBUG, 'artifact',
+ {'path': processed_filename},
+ 'Skipping cache: removing cached processed artifact {path}')
+ os.remove(processed_filename)
+
+ if not os.path.exists(processed_filename):
+ self.log(logging.INFO, 'artifact',
+ {'filename': filename},
+ 'Processing contents of {filename}')
+ self.log(logging.INFO, 'artifact',
+ {'processed_filename': processed_filename},
+ 'Writing processed {processed_filename}')
+ self._artifact_job.process_artifact(filename, processed_filename)
+
+ self.log(logging.INFO, 'artifact',
+ {'processed_filename': processed_filename},
+ 'Installing from processed {processed_filename}')
+
+ # Copy all .so files, avoiding modification where possible.
+ ensureParentDir(mozpath.join(distdir, '.dummy'))
+
+ with zipfile.ZipFile(processed_filename) as zf:
+ for info in zf.infolist():
+ if info.filename.endswith('.ini'):
+ continue
+ n = mozpath.join(distdir, info.filename)
+ fh = FileAvoidWrite(n, mode='rb')
+ shutil.copyfileobj(zf.open(info), fh)
+ file_existed, file_updated = fh.close()
+ self.log(logging.INFO, 'artifact',
+ {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
+ '{updating} {filename}')
+ if not file_existed or file_updated:
+ # Libraries and binaries may need to be marked executable,
+ # depending on platform.
+ perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
+ perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
+ os.chmod(n, perms)
+ return 0
+
+ def install_from_url(self, url, distdir):
+ self.log(logging.INFO, 'artifact',
+ {'url': url},
+ 'Installing from {url}')
+ with self._artifact_cache as artifact_cache: # The with block handles persistence.
+ filename = artifact_cache.fetch(url)
+ return self.install_from_file(filename, distdir)
+
+ def _install_from_hg_pushheads(self, hg_pushheads, distdir):
+ """Iterate pairs (hg_hash, {tree-set}) associating hg revision hashes
+ and tree-sets they are known to be in, trying to download and
+ install from each.
+ """
+
+ urls = None
+ count = 0
+ # with blocks handle handle persistence.
+ with self._task_cache as task_cache:
+ for trees, hg_hash in hg_pushheads:
+ for tree in trees:
+ count += 1
+ self.log(logging.DEBUG, 'artifact',
+ {'hg_hash': hg_hash,
+ 'tree': tree},
+ 'Trying to find artifacts for hg revision {hg_hash} on tree {tree}.')
+ urls = self.find_pushhead_artifacts(task_cache, self._job, tree, hg_hash)
+ if urls:
+ for url in urls:
+ if self.install_from_url(url, distdir):
+ return 1
+ return 0
+
+ self.log(logging.ERROR, 'artifact',
+ {'count': count},
+ 'Tried {count} pushheads, no built artifacts found.')
+ return 1
+
+ def install_from_recent(self, distdir):
+ hg_pushheads = self._find_pushheads()
+ return self._install_from_hg_pushheads(hg_pushheads, distdir)
+
+ def install_from_revset(self, revset, distdir):
+ if self._hg:
+ revision = subprocess.check_output([self._hg, 'log', '--template', '{node}\n',
+ '-r', revset], cwd=self._topsrcdir).strip()
+ if len(revision.split('\n')) != 1:
+ raise ValueError('hg revision specification must resolve to exactly one commit')
+ else:
+ revision = subprocess.check_output([self._git, 'rev-parse', revset], cwd=self._topsrcdir).strip()
+ revision = subprocess.check_output([self._git, 'cinnabar', 'git2hg', revision], cwd=self._topsrcdir).strip()
+ if len(revision.split('\n')) != 1:
+ raise ValueError('hg revision specification must resolve to exactly one commit')
+ if revision == "0" * 40:
+ raise ValueError('git revision specification must resolve to a commit known to hg')
+
+ self.log(logging.INFO, 'artifact',
+ {'revset': revset,
+ 'revision': revision},
+ 'Will only accept artifacts from a pushhead at {revision} '
+ '(matched revset "{revset}").')
+ pushheads = [(list(CANDIDATE_TREES), revision)]
+ return self._install_from_hg_pushheads(pushheads, distdir)
+
+ def install_from(self, source, distdir):
+ """Install artifacts from a ``source`` into the given ``distdir``.
+ """
+ if source and os.path.isfile(source):
+ return self.install_from_file(source, distdir)
+ elif source and urlparse.urlparse(source).scheme:
+ return self.install_from_url(source, distdir)
+ else:
+ if source is None and 'MOZ_ARTIFACT_REVISION' in os.environ:
+ source = os.environ['MOZ_ARTIFACT_REVISION']
+
+ if source:
+ return self.install_from_revset(source, distdir)
+
+ return self.install_from_recent(distdir)
+
+
+ def print_last(self):
+ self.log(logging.INFO, 'artifact',
+ {},
+ 'Printing last used artifact details.')
+ self._task_cache.print_last()
+ self._artifact_cache.print_last()
+ self._pushhead_cache.print_last()
+
+ def clear_cache(self):
+ self.log(logging.INFO, 'artifact',
+ {},
+ 'Deleting cached artifacts and caches.')
+ self._task_cache.clear_cache()
+ self._artifact_cache.clear_cache()
+ self._pushhead_cache.clear_cache()
+
+ def print_cache(self):
+ self.log(logging.INFO, 'artifact',
+ {},
+ 'Printing cached artifacts and caches.')
+ self._task_cache.print_cache()
+ self._artifact_cache.print_cache()
+ self._pushhead_cache.print_cache()
diff --git a/python/mozbuild/mozbuild/backend/__init__.py b/python/mozbuild/mozbuild/backend/__init__.py
new file mode 100644
index 000000000..64bcb87d9
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/__init__.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+backends = {
+ 'AndroidEclipse': 'mozbuild.backend.android_eclipse',
+ 'ChromeMap': 'mozbuild.codecoverage.chrome_map',
+ 'CompileDB': 'mozbuild.compilation.database',
+ 'CppEclipse': 'mozbuild.backend.cpp_eclipse',
+ 'FasterMake': 'mozbuild.backend.fastermake',
+ 'FasterMake+RecursiveMake': None,
+ 'RecursiveMake': 'mozbuild.backend.recursivemake',
+ 'Tup': 'mozbuild.backend.tup',
+ 'VisualStudio': 'mozbuild.backend.visualstudio',
+}
+
+
+def get_backend_class(name):
+ if '+' in name:
+ from mozbuild.backend.base import HybridBackend
+ return HybridBackend(*(get_backend_class(name)
+ for name in name.split('+')))
+
+ class_name = '%sBackend' % name
+ module = __import__(backends[name], globals(), locals(), [class_name])
+ return getattr(module, class_name)
diff --git a/python/mozbuild/mozbuild/backend/android_eclipse.py b/python/mozbuild/mozbuild/backend/android_eclipse.py
new file mode 100644
index 000000000..f17eb8d34
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/android_eclipse.py
@@ -0,0 +1,267 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import itertools
+import os
+import time
+import types
+import xml.dom.minidom as minidom
+import xml.etree.ElementTree as ET
+
+from mozpack.copier import FileCopier
+from mozpack.files import (FileFinder, PreprocessedFile)
+from mozpack.manifests import InstallManifest
+import mozpack.path as mozpath
+
+from .common import CommonBackend
+from ..frontend.data import (
+ AndroidEclipseProjectData,
+ ContextDerived,
+ ContextWrapped,
+)
+from ..makeutil import Makefile
+from ..util import ensureParentDir
+from mozbuild.base import (
+ ExecutionSummary,
+ MachCommandConditions,
+)
+
+
+def pretty_print(element):
+ """Return a pretty-printed XML string for an Element.
+ """
+ s = ET.tostring(element, 'utf-8')
+ # minidom wraps element in a Document node; firstChild strips it.
+ return minidom.parseString(s).firstChild.toprettyxml(indent=' ')
+
+
+class AndroidEclipseBackend(CommonBackend):
+ """Backend that generates Android Eclipse project files.
+ """
+ def __init__(self, environment):
+ if not MachCommandConditions.is_android(environment):
+ raise Exception(
+ 'The Android Eclipse backend is not available with this '
+ 'configuration.')
+
+ super(AndroidEclipseBackend, self).__init__(environment)
+
+ def summary(self):
+ return ExecutionSummary(
+ 'AndroidEclipse backend executed in {execution_time:.2f}s\n'
+ 'Wrote {projects:d} Android Eclipse projects to {path:s}; '
+ '{created:d} created; {updated:d} updated',
+ execution_time=self._execution_time,
+ projects=self._created_count + self._updated_count,
+ path=mozpath.join(self.environment.topobjdir, 'android_eclipse'),
+ created=self._created_count,
+ updated=self._updated_count,
+ )
+
+ def consume_object(self, obj):
+ """Write out Android Eclipse project files."""
+
+ if not isinstance(obj, ContextDerived):
+ return False
+
+ if CommonBackend.consume_object(self, obj):
+ # If CommonBackend acknowledged the object, we're done with it.
+ return True
+
+ # Handle the one case we care about specially.
+ if isinstance(obj, ContextWrapped) and isinstance(obj.wrapped, AndroidEclipseProjectData):
+ self._process_android_eclipse_project_data(obj.wrapped, obj.srcdir, obj.objdir)
+
+ # We don't want to handle most things, so we just acknowledge all objects
+ return True
+
+ def consume_finished(self):
+ """The common backend handles WebIDL and test files. We don't handle
+ these, so we don't call our superclass.
+ """
+
+ def _Element_for_classpathentry(self, cpe):
+ """Turn a ClassPathEntry into an XML Element, like one of:
+ <classpathentry including="**/*.java" kind="src" path="preprocessed"/>
+ <classpathentry including="**/*.java" excluding="org/mozilla/gecko/Excluded.java|org/mozilla/gecko/SecondExcluded.java" kind="src" path="src"/>
+ <classpathentry including="**/*.java" kind="src" path="thirdparty">
+ <attributes>
+ <attribute name="ignore_optional_problems" value="true"/>
+ </attributes>
+ </classpathentry>
+ """
+ e = ET.Element('classpathentry')
+ e.set('kind', 'src')
+ e.set('including', '**/*.java')
+ e.set('path', cpe.path)
+ if cpe.exclude_patterns:
+ e.set('excluding', '|'.join(sorted(cpe.exclude_patterns)))
+ if cpe.ignore_warnings:
+ attrs = ET.SubElement(e, 'attributes')
+ attr = ET.SubElement(attrs, 'attribute')
+ attr.set('name', 'ignore_optional_problems')
+ attr.set('value', 'true')
+ return e
+
+ def _Element_for_referenced_project(self, name):
+ """Turn a referenced project name into an XML Element, like:
+ <classpathentry combineaccessrules="false" kind="src" path="/Fennec"/>
+ """
+ e = ET.Element('classpathentry')
+ e.set('kind', 'src')
+ e.set('combineaccessrules', 'false')
+ # All project directories are in the same root; this
+ # reference is absolute in the Eclipse namespace.
+ e.set('path', '/' + name)
+ return e
+
+ def _Element_for_extra_jar(self, name):
+ """Turn a referenced JAR name into an XML Element, like:
+ <classpathentry exported="true" kind="lib" path="/Users/nalexander/Mozilla/gecko-dev/build/mobile/robocop/robotium-solo-4.3.1.jar"/>
+ """
+ e = ET.Element('classpathentry')
+ e.set('kind', 'lib')
+ e.set('exported', 'true')
+ e.set('path', name)
+ return e
+
+ def _Element_for_filtered_resources(self, filtered_resources):
+ """Turn a list of filtered resource arguments like
+ ['1.0-projectRelativePath-matches-false-false-*org/mozilla/gecko/resources/**']
+ into an XML Element, like:
+ <filteredResources>
+ <filter>
+ <id>1393009101322</id>
+ <name></name>
+ <type>30</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-projectRelativePath-matches-false-false-*org/mozilla/gecko/resources/**</arguments>
+ </matcher>
+ </filter>
+ </filteredResources>
+
+ The id is random; the values are magic."""
+
+ id = int(1000 * time.time())
+ filteredResources = ET.Element('filteredResources')
+ for arg in sorted(filtered_resources):
+ e = ET.SubElement(filteredResources, 'filter')
+ ET.SubElement(e, 'id').text = str(id)
+ id += 1
+ ET.SubElement(e, 'name')
+ ET.SubElement(e, 'type').text = '30' # It's magic!
+ matcher = ET.SubElement(e, 'matcher')
+ ET.SubElement(matcher, 'id').text = 'org.eclipse.ui.ide.multiFilter'
+ ET.SubElement(matcher, 'arguments').text = str(arg)
+ return filteredResources
+
+ def _manifest_for_project(self, srcdir, project):
+ manifest = InstallManifest()
+
+ if project.manifest:
+ manifest.add_copy(mozpath.join(srcdir, project.manifest), 'AndroidManifest.xml')
+
+ if project.res:
+ manifest.add_symlink(mozpath.join(srcdir, project.res), 'res')
+ else:
+ # Eclipse expects a res directory no matter what, so we
+ # make an empty directory if the project doesn't specify.
+ res = os.path.abspath(mozpath.join(os.path.dirname(__file__),
+ 'templates', 'android_eclipse_empty_resource_directory'))
+ manifest.add_pattern_copy(res, '.**', 'res')
+
+ if project.assets:
+ manifest.add_symlink(mozpath.join(srcdir, project.assets), 'assets')
+
+ for cpe in project._classpathentries:
+ manifest.add_symlink(mozpath.join(srcdir, cpe.srcdir), cpe.dstdir)
+
+ # JARs and native libraries go in the same place. For now, we're adding
+ # class path entries with the full path to required JAR files (which
+ # makes sense for JARs in the source directory, but probably doesn't for
+ # JARs in the object directory). This could be a problem because we only
+ # know the contents of (a subdirectory of) libs/ after a successful
+ # build and package, which is after build-backend time. At the cost of
+ # some flexibility, we explicitly copy certain libraries here; if the
+ # libraries aren't present -- namely, when the tree hasn't been packaged
+ # -- this fails. That's by design, to avoid crashes on device caused by
+ # missing native libraries.
+ for src, dst in project.libs:
+ manifest.add_copy(mozpath.join(srcdir, src), dst)
+
+ return manifest
+
+ def _process_android_eclipse_project_data(self, data, srcdir, objdir):
+ # This can't be relative to the environment's topsrcdir,
+ # because during testing topsrcdir is faked.
+ template_directory = os.path.abspath(mozpath.join(os.path.dirname(__file__),
+ 'templates', 'android_eclipse'))
+
+ project_directory = mozpath.join(self.environment.topobjdir, 'android_eclipse', data.name)
+ manifest_path = mozpath.join(self.environment.topobjdir, 'android_eclipse', '%s.manifest' % data.name)
+
+ manifest = self._manifest_for_project(srcdir, data)
+ ensureParentDir(manifest_path)
+ manifest.write(path=manifest_path)
+
+ classpathentries = []
+ for cpe in sorted(data._classpathentries, key=lambda x: x.path):
+ e = self._Element_for_classpathentry(cpe)
+ classpathentries.append(ET.tostring(e))
+
+ for name in sorted(data.referenced_projects):
+ e = self._Element_for_referenced_project(name)
+ classpathentries.append(ET.tostring(e))
+
+ for name in sorted(data.extra_jars):
+ e = self._Element_for_extra_jar(mozpath.join(srcdir, name))
+ classpathentries.append(ET.tostring(e))
+
+ defines = {}
+ defines['IDE_OBJDIR'] = objdir
+ defines['IDE_TOPOBJDIR'] = self.environment.topobjdir
+ defines['IDE_SRCDIR'] = srcdir
+ defines['IDE_TOPSRCDIR'] = self.environment.topsrcdir
+ defines['IDE_PROJECT_NAME'] = data.name
+ defines['IDE_PACKAGE_NAME'] = data.package_name
+ defines['IDE_PROJECT_DIRECTORY'] = project_directory
+ defines['IDE_RELSRCDIR'] = mozpath.relpath(srcdir, self.environment.topsrcdir)
+ defines['IDE_CLASSPATH_ENTRIES'] = '\n'.join('\t' + cpe for cpe in classpathentries)
+ defines['IDE_RECURSIVE_MAKE_TARGETS'] = ' '.join(sorted(data.recursive_make_targets))
+ # Like android.library=true
+ defines['IDE_PROJECT_LIBRARY_SETTING'] = 'android.library=true' if data.is_library else ''
+ # Like android.library.reference.1=FennecBrandingResources
+ defines['IDE_PROJECT_LIBRARY_REFERENCES'] = '\n'.join(
+ 'android.library.reference.%s=%s' % (i + 1, ref)
+ for i, ref in enumerate(sorted(data.included_projects)))
+ if data.filtered_resources:
+ filteredResources = self._Element_for_filtered_resources(data.filtered_resources)
+ defines['IDE_PROJECT_FILTERED_RESOURCES'] = pretty_print(filteredResources).strip()
+ else:
+ defines['IDE_PROJECT_FILTERED_RESOURCES'] = ''
+ defines['ANDROID_TARGET_SDK'] = self.environment.substs['ANDROID_TARGET_SDK']
+ defines['MOZ_ANDROID_MIN_SDK_VERSION'] = self.environment.defines['MOZ_ANDROID_MIN_SDK_VERSION']
+
+ copier = FileCopier()
+ finder = FileFinder(template_directory)
+ for input_filename, f in itertools.chain(finder.find('**'), finder.find('.**')):
+ if input_filename == 'AndroidManifest.xml' and not data.is_library:
+ # Main projects supply their own manifests.
+ continue
+ copier.add(input_filename, PreprocessedFile(
+ mozpath.join(finder.base, input_filename),
+ depfile_path=None,
+ marker='#',
+ defines=defines,
+ extra_depends={mozpath.join(finder.base, input_filename)}))
+
+ # When we re-create the build backend, we kill everything that was there.
+ if os.path.isdir(project_directory):
+ self._updated_count += 1
+ else:
+ self._created_count += 1
+ copier.copy(project_directory, skip_if_older=False, remove_unaccounted=True)
diff --git a/python/mozbuild/mozbuild/backend/base.py b/python/mozbuild/mozbuild/backend/base.py
new file mode 100644
index 000000000..f5e0c2d3c
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/base.py
@@ -0,0 +1,317 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+from abc import (
+ ABCMeta,
+ abstractmethod,
+)
+
+import errno
+import itertools
+import os
+import time
+
+from contextlib import contextmanager
+
+from mach.mixin.logging import LoggingMixin
+
+import mozpack.path as mozpath
+from ..preprocessor import Preprocessor
+from ..pythonutil import iter_modules_in_path
+from ..util import (
+ FileAvoidWrite,
+ simple_diff,
+)
+from ..frontend.data import ContextDerived
+from .configenvironment import ConfigEnvironment
+from mozbuild.base import ExecutionSummary
+
+
+class BuildBackend(LoggingMixin):
+ """Abstract base class for build backends.
+
+ A build backend is merely a consumer of the build configuration (the output
+ of the frontend processing). It does something with said data. What exactly
+ is the discretion of the specific implementation.
+ """
+
+ __metaclass__ = ABCMeta
+
+ def __init__(self, environment):
+ assert isinstance(environment, ConfigEnvironment)
+
+ self.populate_logger()
+
+ self.environment = environment
+
+ # Files whose modification should cause a new read and backend
+ # generation.
+ self.backend_input_files = set()
+
+ # Files generated by the backend.
+ self._backend_output_files = set()
+
+ self._environments = {}
+ self._environments[environment.topobjdir] = environment
+
+ # The number of backend files created.
+ self._created_count = 0
+
+ # The number of backend files updated.
+ self._updated_count = 0
+
+ # The number of unchanged backend files.
+ self._unchanged_count = 0
+
+ # The number of deleted backend files.
+ self._deleted_count = 0
+
+ # The total wall time spent in the backend. This counts the time the
+ # backend writes out files, etc.
+ self._execution_time = 0.0
+
+ # Mapping of changed file paths to diffs of the changes.
+ self.file_diffs = {}
+
+ self.dry_run = False
+
+ self._init()
+
+ def summary(self):
+ return ExecutionSummary(
+ self.__class__.__name__.replace('Backend', '') +
+ ' backend executed in {execution_time:.2f}s\n '
+ '{total:d} total backend files; '
+ '{created:d} created; '
+ '{updated:d} updated; '
+ '{unchanged:d} unchanged; '
+ '{deleted:d} deleted',
+ execution_time=self._execution_time,
+ total=self._created_count + self._updated_count +
+ self._unchanged_count,
+ created=self._created_count,
+ updated=self._updated_count,
+ unchanged=self._unchanged_count,
+ deleted=self._deleted_count)
+
+ def _init(self):
+ """Hook point for child classes to perform actions during __init__.
+
+ This exists so child classes don't need to implement __init__.
+ """
+
+ def consume(self, objs):
+ """Consume a stream of TreeMetadata instances.
+
+ This is the main method of the interface. This is what takes the
+ frontend output and does something with it.
+
+ Child classes are not expected to implement this method. Instead, the
+ base class consumes objects and calls methods (possibly) implemented by
+ child classes.
+ """
+
+ # Previously generated files.
+ list_file = mozpath.join(self.environment.topobjdir, 'backend.%s'
+ % self.__class__.__name__)
+ backend_output_list = set()
+ if os.path.exists(list_file):
+ with open(list_file) as fh:
+ backend_output_list.update(mozpath.normsep(p)
+ for p in fh.read().splitlines())
+
+ for obj in objs:
+ obj_start = time.time()
+ if (not self.consume_object(obj) and
+ not isinstance(self, PartialBackend)):
+ raise Exception('Unhandled object of type %s' % type(obj))
+ self._execution_time += time.time() - obj_start
+
+ if (isinstance(obj, ContextDerived) and
+ not isinstance(self, PartialBackend)):
+ self.backend_input_files |= obj.context_all_paths
+
+ # Pull in all loaded Python as dependencies so any Python changes that
+ # could influence our output result in a rescan.
+ self.backend_input_files |= set(iter_modules_in_path(
+ self.environment.topsrcdir, self.environment.topobjdir))
+
+ finished_start = time.time()
+ self.consume_finished()
+ self._execution_time += time.time() - finished_start
+
+ # Purge backend files created in previous run, but not created anymore
+ delete_files = backend_output_list - self._backend_output_files
+ for path in delete_files:
+ full_path = mozpath.join(self.environment.topobjdir, path)
+ try:
+ with open(full_path, 'r') as existing:
+ old_content = existing.read()
+ if old_content:
+ self.file_diffs[full_path] = simple_diff(
+ full_path, old_content.splitlines(), None)
+ except IOError:
+ pass
+ try:
+ if not self.dry_run:
+ os.unlink(full_path)
+ self._deleted_count += 1
+ except OSError:
+ pass
+ # Remove now empty directories
+ for dir in set(mozpath.dirname(d) for d in delete_files):
+ try:
+ os.removedirs(dir)
+ except OSError:
+ pass
+
+ # Write out the list of backend files generated, if it changed.
+ if self._deleted_count or self._created_count or \
+ not os.path.exists(list_file):
+ with self._write_file(list_file) as fh:
+ fh.write('\n'.join(sorted(self._backend_output_files)))
+ else:
+ # Always update its mtime.
+ with open(list_file, 'a'):
+ os.utime(list_file, None)
+
+ # Write out the list of input files for the backend
+ with self._write_file('%s.in' % list_file) as fh:
+ fh.write('\n'.join(sorted(
+ mozpath.normsep(f) for f in self.backend_input_files)))
+
+ @abstractmethod
+ def consume_object(self, obj):
+ """Consumes an individual TreeMetadata instance.
+
+ This is the main method used by child classes to react to build
+ metadata.
+ """
+
+ def consume_finished(self):
+ """Called when consume() has completed handling all objects."""
+
+ def build(self, config, output, jobs, verbose):
+ """Called when 'mach build' is executed.
+
+ This should return the status value of a subprocess, where 0 denotes
+ success and any other value is an error code. A return value of None
+ indicates that the default 'make -f client.mk' should run.
+ """
+ return None
+
+ @contextmanager
+ def _write_file(self, path=None, fh=None, mode='rU'):
+ """Context manager to write a file.
+
+ This is a glorified wrapper around FileAvoidWrite with integration to
+ update the summary data on this instance.
+
+ Example usage:
+
+ with self._write_file('foo.txt') as fh:
+ fh.write('hello world')
+ """
+
+ if path is not None:
+ assert fh is None
+ fh = FileAvoidWrite(path, capture_diff=True, dry_run=self.dry_run,
+ mode=mode)
+ else:
+ assert fh is not None
+
+ dirname = mozpath.dirname(fh.name)
+ try:
+ os.makedirs(dirname)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+ yield fh
+
+ self._backend_output_files.add(mozpath.relpath(fh.name, self.environment.topobjdir))
+ existed, updated = fh.close()
+ if fh.diff:
+ self.file_diffs[fh.name] = fh.diff
+ if not existed:
+ self._created_count += 1
+ elif updated:
+ self._updated_count += 1
+ else:
+ self._unchanged_count += 1
+
+ @contextmanager
+ def _get_preprocessor(self, obj):
+ '''Returns a preprocessor with a few predefined values depending on
+ the given BaseConfigSubstitution(-like) object, and all the substs
+ in the current environment.'''
+ pp = Preprocessor()
+ srcdir = mozpath.dirname(obj.input_path)
+ pp.context.update({
+ k: ' '.join(v) if isinstance(v, list) else v
+ for k, v in obj.config.substs.iteritems()
+ })
+ pp.context.update(
+ top_srcdir=obj.topsrcdir,
+ topobjdir=obj.topobjdir,
+ srcdir=srcdir,
+ relativesrcdir=mozpath.relpath(srcdir, obj.topsrcdir) or '.',
+ DEPTH=mozpath.relpath(obj.topobjdir, mozpath.dirname(obj.output_path)) or '.',
+ )
+ pp.do_filter('attemptSubstitution')
+ pp.setMarker(None)
+ with self._write_file(obj.output_path) as fh:
+ pp.out = fh
+ yield pp
+
+
+class PartialBackend(BuildBackend):
+ """A PartialBackend is a BuildBackend declaring that its consume_object
+ method may not handle all build configuration objects it's passed, and
+ that it's fine."""
+
+
+def HybridBackend(*backends):
+ """A HybridBackend is the combination of one or more PartialBackends
+ with a non-partial BuildBackend.
+
+ Build configuration objects are passed to each backend, stopping at the
+ first of them that declares having handled them.
+ """
+ assert len(backends) >= 2
+ assert all(issubclass(b, PartialBackend) for b in backends[:-1])
+ assert not(issubclass(backends[-1], PartialBackend))
+ assert all(issubclass(b, BuildBackend) for b in backends)
+
+ class TheHybridBackend(BuildBackend):
+ def __init__(self, environment):
+ self._backends = [b(environment) for b in backends]
+ super(TheHybridBackend, self).__init__(environment)
+
+ def consume_object(self, obj):
+ return any(b.consume_object(obj) for b in self._backends)
+
+ def consume_finished(self):
+ for backend in self._backends:
+ backend.consume_finished()
+
+ for attr in ('_execution_time', '_created_count', '_updated_count',
+ '_unchanged_count', '_deleted_count'):
+ setattr(self, attr,
+ sum(getattr(b, attr) for b in self._backends))
+
+ for b in self._backends:
+ self.file_diffs.update(b.file_diffs)
+ for attr in ('backend_input_files', '_backend_output_files'):
+ files = getattr(self, attr)
+ files |= getattr(b, attr)
+
+ name = '+'.join(itertools.chain(
+ (b.__name__.replace('Backend', '') for b in backends[:1]),
+ (b.__name__ for b in backends[-1:])
+ ))
+
+ return type(str(name), (TheHybridBackend,), {})
diff --git a/python/mozbuild/mozbuild/backend/common.py b/python/mozbuild/mozbuild/backend/common.py
new file mode 100644
index 000000000..12b2a27c4
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/common.py
@@ -0,0 +1,567 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import cPickle as pickle
+import itertools
+import json
+import os
+
+import mozpack.path as mozpath
+
+from mozbuild.backend.base import BuildBackend
+
+from mozbuild.frontend.context import (
+ Context,
+ Path,
+ RenamedSourcePath,
+ VARIABLES,
+)
+from mozbuild.frontend.data import (
+ BaseProgram,
+ ChromeManifestEntry,
+ ConfigFileSubstitution,
+ ExampleWebIDLInterface,
+ IPDLFile,
+ FinalTargetPreprocessedFiles,
+ FinalTargetFiles,
+ GeneratedEventWebIDLFile,
+ GeneratedWebIDLFile,
+ PreprocessedTestWebIDLFile,
+ PreprocessedWebIDLFile,
+ SharedLibrary,
+ TestManifest,
+ TestWebIDLFile,
+ UnifiedSources,
+ XPIDLFile,
+ WebIDLFile,
+)
+from mozbuild.jar import (
+ DeprecatedJarManifest,
+ JarManifestParser,
+)
+from mozbuild.preprocessor import Preprocessor
+from mozpack.chrome.manifest import parse_manifest_line
+
+from collections import defaultdict
+
+from mozbuild.util import group_unified_files
+
+class XPIDLManager(object):
+ """Helps manage XPCOM IDLs in the context of the build system."""
+ def __init__(self, config):
+ self.config = config
+ self.topsrcdir = config.topsrcdir
+ self.topobjdir = config.topobjdir
+
+ self.idls = {}
+ self.modules = {}
+ self.interface_manifests = {}
+ self.chrome_manifests = set()
+
+ def register_idl(self, idl, allow_existing=False):
+ """Registers an IDL file with this instance.
+
+ The IDL file will be built, installed, etc.
+ """
+ basename = mozpath.basename(idl.source_path)
+ root = mozpath.splitext(basename)[0]
+ xpt = '%s.xpt' % idl.module
+ manifest = mozpath.join(idl.install_target, 'components', 'interfaces.manifest')
+ chrome_manifest = mozpath.join(idl.install_target, 'chrome.manifest')
+
+ entry = {
+ 'source': idl.source_path,
+ 'module': idl.module,
+ 'basename': basename,
+ 'root': root,
+ 'manifest': manifest,
+ }
+
+ if not allow_existing and entry['basename'] in self.idls:
+ raise Exception('IDL already registered: %s' % entry['basename'])
+
+ self.idls[entry['basename']] = entry
+ t = self.modules.setdefault(entry['module'], (idl.install_target, set()))
+ t[1].add(entry['root'])
+
+ if idl.add_to_manifest:
+ self.interface_manifests.setdefault(manifest, set()).add(xpt)
+ self.chrome_manifests.add(chrome_manifest)
+
+
+class WebIDLCollection(object):
+ """Collects WebIDL info referenced during the build."""
+
+ def __init__(self):
+ self.sources = set()
+ self.generated_sources = set()
+ self.generated_events_sources = set()
+ self.preprocessed_sources = set()
+ self.test_sources = set()
+ self.preprocessed_test_sources = set()
+ self.example_interfaces = set()
+
+ def all_regular_sources(self):
+ return self.sources | self.generated_sources | \
+ self.generated_events_sources | self.preprocessed_sources
+
+ def all_regular_basenames(self):
+ return [os.path.basename(source) for source in self.all_regular_sources()]
+
+ def all_regular_stems(self):
+ return [os.path.splitext(b)[0] for b in self.all_regular_basenames()]
+
+ def all_regular_bindinggen_stems(self):
+ for stem in self.all_regular_stems():
+ yield '%sBinding' % stem
+
+ for source in self.generated_events_sources:
+ yield os.path.splitext(os.path.basename(source))[0]
+
+ def all_regular_cpp_basenames(self):
+ for stem in self.all_regular_bindinggen_stems():
+ yield '%s.cpp' % stem
+
+ def all_test_sources(self):
+ return self.test_sources | self.preprocessed_test_sources
+
+ def all_test_basenames(self):
+ return [os.path.basename(source) for source in self.all_test_sources()]
+
+ def all_test_stems(self):
+ return [os.path.splitext(b)[0] for b in self.all_test_basenames()]
+
+ def all_test_cpp_basenames(self):
+ return ['%sBinding.cpp' % s for s in self.all_test_stems()]
+
+ def all_static_sources(self):
+ return self.sources | self.generated_events_sources | \
+ self.test_sources
+
+ def all_non_static_sources(self):
+ return self.generated_sources | self.all_preprocessed_sources()
+
+ def all_non_static_basenames(self):
+ return [os.path.basename(s) for s in self.all_non_static_sources()]
+
+ def all_preprocessed_sources(self):
+ return self.preprocessed_sources | self.preprocessed_test_sources
+
+ def all_sources(self):
+ return set(self.all_regular_sources()) | set(self.all_test_sources())
+
+ def all_basenames(self):
+ return [os.path.basename(source) for source in self.all_sources()]
+
+ def all_stems(self):
+ return [os.path.splitext(b)[0] for b in self.all_basenames()]
+
+ def generated_events_basenames(self):
+ return [os.path.basename(s) for s in self.generated_events_sources]
+
+ def generated_events_stems(self):
+ return [os.path.splitext(b)[0] for b in self.generated_events_basenames()]
+
+
+class TestManager(object):
+ """Helps hold state related to tests."""
+
+ def __init__(self, config):
+ self.config = config
+ self.topsrcdir = mozpath.normpath(config.topsrcdir)
+
+ self.tests_by_path = defaultdict(list)
+ self.installs_by_path = defaultdict(list)
+ self.deferred_installs = set()
+ self.manifest_defaults = {}
+
+ def add(self, t, flavor, topsrcdir):
+ t = dict(t)
+ t['flavor'] = flavor
+
+ path = mozpath.normpath(t['path'])
+ assert mozpath.basedir(path, [topsrcdir])
+
+ key = path[len(topsrcdir)+1:]
+ t['file_relpath'] = key
+ t['dir_relpath'] = mozpath.dirname(key)
+
+ self.tests_by_path[key].append(t)
+
+ def add_defaults(self, manifest):
+ if not hasattr(manifest, 'manifest_defaults'):
+ return
+ for sub_manifest, defaults in manifest.manifest_defaults.items():
+ self.manifest_defaults[sub_manifest] = defaults
+
+ def add_installs(self, obj, topsrcdir):
+ for src, (dest, _) in obj.installs.iteritems():
+ key = src[len(topsrcdir)+1:]
+ self.installs_by_path[key].append((src, dest))
+ for src, pat, dest in obj.pattern_installs:
+ key = mozpath.join(src[len(topsrcdir)+1:], pat)
+ self.installs_by_path[key].append((src, pat, dest))
+ for path in obj.deferred_installs:
+ self.deferred_installs.add(path[2:])
+
+
+class BinariesCollection(object):
+ """Tracks state of binaries produced by the build."""
+
+ def __init__(self):
+ self.shared_libraries = []
+ self.programs = []
+
+
+class CommonBackend(BuildBackend):
+ """Holds logic common to all build backends."""
+
+ def _init(self):
+ self._idl_manager = XPIDLManager(self.environment)
+ self._test_manager = TestManager(self.environment)
+ self._webidls = WebIDLCollection()
+ self._binaries = BinariesCollection()
+ self._configs = set()
+ self._ipdl_sources = set()
+
+ def consume_object(self, obj):
+ self._configs.add(obj.config)
+
+ if isinstance(obj, TestManifest):
+ for test in obj.tests:
+ self._test_manager.add(test, obj.flavor, obj.topsrcdir)
+ self._test_manager.add_defaults(obj.manifest)
+ self._test_manager.add_installs(obj, obj.topsrcdir)
+
+ elif isinstance(obj, XPIDLFile):
+ # TODO bug 1240134 tracks not processing XPIDL files during
+ # artifact builds.
+ self._idl_manager.register_idl(obj)
+
+ elif isinstance(obj, ConfigFileSubstitution):
+ # Do not handle ConfigFileSubstitution for Makefiles. Leave that
+ # to other
+ if mozpath.basename(obj.output_path) == 'Makefile':
+ return False
+ with self._get_preprocessor(obj) as pp:
+ pp.do_include(obj.input_path)
+ self.backend_input_files.add(obj.input_path)
+
+ # We should consider aggregating WebIDL types in emitter.py.
+ elif isinstance(obj, WebIDLFile):
+ # WebIDL isn't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename))
+
+ elif isinstance(obj, GeneratedEventWebIDLFile):
+ # WebIDL isn't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ self._webidls.generated_events_sources.add(mozpath.join(
+ obj.srcdir, obj.basename))
+
+ elif isinstance(obj, TestWebIDLFile):
+ # WebIDL isn't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ self._webidls.test_sources.add(mozpath.join(obj.srcdir,
+ obj.basename))
+
+ elif isinstance(obj, PreprocessedTestWebIDLFile):
+ # WebIDL isn't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ self._webidls.preprocessed_test_sources.add(mozpath.join(
+ obj.srcdir, obj.basename))
+
+ elif isinstance(obj, GeneratedWebIDLFile):
+ # WebIDL isn't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ self._webidls.generated_sources.add(mozpath.join(obj.srcdir,
+ obj.basename))
+
+ elif isinstance(obj, PreprocessedWebIDLFile):
+ # WebIDL isn't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ self._webidls.preprocessed_sources.add(mozpath.join(
+ obj.srcdir, obj.basename))
+
+ elif isinstance(obj, ExampleWebIDLInterface):
+ # WebIDL isn't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ self._webidls.example_interfaces.add(obj.name)
+
+ elif isinstance(obj, IPDLFile):
+ # IPDL isn't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename))
+
+ elif isinstance(obj, UnifiedSources):
+ # Unified sources aren't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ if obj.have_unified_mapping:
+ self._write_unified_files(obj.unified_source_mapping, obj.objdir)
+ if hasattr(self, '_process_unified_sources'):
+ self._process_unified_sources(obj)
+
+ elif isinstance(obj, BaseProgram):
+ self._binaries.programs.append(obj)
+ return False
+
+ elif isinstance(obj, SharedLibrary):
+ self._binaries.shared_libraries.append(obj)
+ return False
+
+ else:
+ return False
+
+ return True
+
+ def consume_finished(self):
+ if len(self._idl_manager.idls):
+ self._handle_idl_manager(self._idl_manager)
+
+ self._handle_webidl_collection(self._webidls)
+
+ sorted_ipdl_sources = list(sorted(self._ipdl_sources))
+
+ def files_from(ipdl):
+ base = mozpath.basename(ipdl)
+ root, ext = mozpath.splitext(base)
+
+ # Both .ipdl and .ipdlh become .cpp files
+ files = ['%s.cpp' % root]
+ if ext == '.ipdl':
+ # .ipdl also becomes Child/Parent.cpp files
+ files.extend(['%sChild.cpp' % root,
+ '%sParent.cpp' % root])
+ return files
+
+ ipdl_dir = mozpath.join(self.environment.topobjdir, 'ipc', 'ipdl')
+
+ ipdl_cppsrcs = list(itertools.chain(*[files_from(p) for p in sorted_ipdl_sources]))
+ unified_source_mapping = list(group_unified_files(ipdl_cppsrcs,
+ unified_prefix='UnifiedProtocols',
+ unified_suffix='cpp',
+ files_per_unified_file=16))
+
+ self._write_unified_files(unified_source_mapping, ipdl_dir, poison_windows_h=False)
+ self._handle_ipdl_sources(ipdl_dir, sorted_ipdl_sources, unified_source_mapping)
+
+ for config in self._configs:
+ self.backend_input_files.add(config.source)
+
+ # Write out a machine-readable file describing every test.
+ topobjdir = self.environment.topobjdir
+ with self._write_file(mozpath.join(topobjdir, 'all-tests.pkl'), mode='rb') as fh:
+ pickle.dump(dict(self._test_manager.tests_by_path), fh, protocol=2)
+
+ with self._write_file(mozpath.join(topobjdir, 'test-defaults.pkl'), mode='rb') as fh:
+ pickle.dump(self._test_manager.manifest_defaults, fh, protocol=2)
+
+ path = mozpath.join(self.environment.topobjdir, 'test-installs.pkl')
+ with self._write_file(path, mode='rb') as fh:
+ pickle.dump({k: v for k, v in self._test_manager.installs_by_path.items()
+ if k in self._test_manager.deferred_installs},
+ fh,
+ protocol=2)
+
+ # Write out a machine-readable file describing binaries.
+ with self._write_file(mozpath.join(topobjdir, 'binaries.json')) as fh:
+ d = {
+ 'shared_libraries': [s.to_dict() for s in self._binaries.shared_libraries],
+ 'programs': [p.to_dict() for p in self._binaries.programs],
+ }
+ json.dump(d, fh, sort_keys=True, indent=4)
+
+ def _handle_webidl_collection(self, webidls):
+ if not webidls.all_stems():
+ return
+
+ bindings_dir = mozpath.join(self.environment.topobjdir, 'dom', 'bindings')
+
+ all_inputs = set(webidls.all_static_sources())
+ for s in webidls.all_non_static_basenames():
+ all_inputs.add(mozpath.join(bindings_dir, s))
+
+ generated_events_stems = webidls.generated_events_stems()
+ exported_stems = webidls.all_regular_stems()
+
+ # The WebIDL manager reads configuration from a JSON file. So, we
+ # need to write this file early.
+ o = dict(
+ webidls=sorted(all_inputs),
+ generated_events_stems=sorted(generated_events_stems),
+ exported_stems=sorted(exported_stems),
+ example_interfaces=sorted(webidls.example_interfaces),
+ )
+
+ file_lists = mozpath.join(bindings_dir, 'file-lists.json')
+ with self._write_file(file_lists) as fh:
+ json.dump(o, fh, sort_keys=True, indent=2)
+
+ import mozwebidlcodegen
+
+ manager = mozwebidlcodegen.create_build_system_manager(
+ self.environment.topsrcdir,
+ self.environment.topobjdir,
+ mozpath.join(self.environment.topobjdir, 'dist')
+ )
+
+ # Bindings are compiled in unified mode to speed up compilation and
+ # to reduce linker memory size. Note that test bindings are separated
+ # from regular ones so tests bindings aren't shipped.
+ unified_source_mapping = list(group_unified_files(webidls.all_regular_cpp_basenames(),
+ unified_prefix='UnifiedBindings',
+ unified_suffix='cpp',
+ files_per_unified_file=32))
+ self._write_unified_files(unified_source_mapping, bindings_dir,
+ poison_windows_h=True)
+ self._handle_webidl_build(bindings_dir, unified_source_mapping,
+ webidls,
+ manager.expected_build_output_files(),
+ manager.GLOBAL_DEFINE_FILES)
+
+ def _write_unified_file(self, unified_file, source_filenames,
+ output_directory, poison_windows_h=False):
+ with self._write_file(mozpath.join(output_directory, unified_file)) as f:
+ f.write('#define MOZ_UNIFIED_BUILD\n')
+ includeTemplate = '#include "%(cppfile)s"'
+ if poison_windows_h:
+ includeTemplate += (
+ '\n'
+ '#ifdef _WINDOWS_\n'
+ '#error "%(cppfile)s included windows.h"\n'
+ "#endif")
+ includeTemplate += (
+ '\n'
+ '#ifdef PL_ARENA_CONST_ALIGN_MASK\n'
+ '#error "%(cppfile)s uses PL_ARENA_CONST_ALIGN_MASK, '
+ 'so it cannot be built in unified mode."\n'
+ '#undef PL_ARENA_CONST_ALIGN_MASK\n'
+ '#endif\n'
+ '#ifdef INITGUID\n'
+ '#error "%(cppfile)s defines INITGUID, '
+ 'so it cannot be built in unified mode."\n'
+ '#undef INITGUID\n'
+ '#endif')
+ f.write('\n'.join(includeTemplate % { "cppfile": s } for
+ s in source_filenames))
+
+ def _write_unified_files(self, unified_source_mapping, output_directory,
+ poison_windows_h=False):
+ for unified_file, source_filenames in unified_source_mapping:
+ self._write_unified_file(unified_file, source_filenames,
+ output_directory, poison_windows_h)
+
+ def _consume_jar_manifest(self, obj):
+ # Ideally, this would all be handled somehow in the emitter, but
+ # this would require all the magic surrounding l10n and addons in
+ # the recursive make backend to die, which is not going to happen
+ # any time soon enough.
+ # Notably missing:
+ # - DEFINES from config/config.mk
+ # - L10n support
+ # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in
+ # moz.build, but it doesn't matter in dist/bin.
+ pp = Preprocessor()
+ if obj.defines:
+ pp.context.update(obj.defines.defines)
+ pp.context.update(self.environment.defines)
+ pp.context.update(
+ AB_CD='en-US',
+ BUILD_FASTER=1,
+ )
+ pp.out = JarManifestParser()
+ try:
+ pp.do_include(obj.path.full_path)
+ except DeprecatedJarManifest as e:
+ raise DeprecatedJarManifest('Parsing error while processing %s: %s'
+ % (obj.path.full_path, e.message))
+ self.backend_input_files |= pp.includes
+
+ for jarinfo in pp.out:
+ jar_context = Context(
+ allowed_variables=VARIABLES, config=obj._context.config)
+ jar_context.push_source(obj._context.main_path)
+ jar_context.push_source(obj.path.full_path)
+
+ install_target = obj.install_target
+ if jarinfo.base:
+ install_target = mozpath.normpath(
+ mozpath.join(install_target, jarinfo.base))
+ jar_context['FINAL_TARGET'] = install_target
+ if obj.defines:
+ jar_context['DEFINES'] = obj.defines.defines
+ files = jar_context['FINAL_TARGET_FILES']
+ files_pp = jar_context['FINAL_TARGET_PP_FILES']
+
+ for e in jarinfo.entries:
+ if e.is_locale:
+ if jarinfo.relativesrcdir:
+ src = '/%s' % jarinfo.relativesrcdir
+ else:
+ src = ''
+ src = mozpath.join(src, 'en-US', e.source)
+ else:
+ src = e.source
+
+ src = Path(jar_context, src)
+
+ if '*' not in e.source and not os.path.exists(src.full_path):
+ if e.is_locale:
+ raise Exception(
+ '%s: Cannot find %s' % (obj.path, e.source))
+ if e.source.startswith('/'):
+ src = Path(jar_context, '!' + e.source)
+ else:
+ # This actually gets awkward if the jar.mn is not
+ # in the same directory as the moz.build declaring
+ # it, but it's how it works in the recursive make,
+ # not that anything relies on that, but it's simpler.
+ src = Path(obj._context, '!' + e.source)
+
+ output_basename = mozpath.basename(e.output)
+ if output_basename != src.target_basename:
+ src = RenamedSourcePath(jar_context,
+ (src, output_basename))
+ path = mozpath.dirname(mozpath.join(jarinfo.name, e.output))
+
+ if e.preprocess:
+ if '*' in e.source:
+ raise Exception('%s: Wildcards are not supported with '
+ 'preprocessing' % obj.path)
+ files_pp[path] += [src]
+ else:
+ files[path] += [src]
+
+ if files:
+ self.consume_object(FinalTargetFiles(jar_context, files))
+ if files_pp:
+ self.consume_object(
+ FinalTargetPreprocessedFiles(jar_context, files_pp))
+
+ for m in jarinfo.chrome_manifests:
+ entry = parse_manifest_line(
+ mozpath.dirname(jarinfo.name),
+ m.replace('%', mozpath.basename(jarinfo.name) + '/'))
+ self.consume_object(ChromeManifestEntry(
+ jar_context, '%s.manifest' % jarinfo.name, entry))
diff --git a/python/mozbuild/mozbuild/backend/configenvironment.py b/python/mozbuild/mozbuild/backend/configenvironment.py
new file mode 100644
index 000000000..331309af6
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
@@ -0,0 +1,199 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import os
+import sys
+
+from collections import Iterable
+from types import StringTypes, ModuleType
+
+import mozpack.path as mozpath
+
+from mozbuild.util import ReadOnlyDict
+from mozbuild.shellutil import quote as shell_quote
+
+
+if sys.version_info.major == 2:
+ text_type = unicode
+else:
+ text_type = str
+
+
+class BuildConfig(object):
+ """Represents the output of configure."""
+
+ _CODE_CACHE = {}
+
+ def __init__(self):
+ self.topsrcdir = None
+ self.topobjdir = None
+ self.defines = {}
+ self.non_global_defines = []
+ self.substs = {}
+ self.files = []
+ self.mozconfig = None
+
+ @classmethod
+ def from_config_status(cls, path):
+ """Create an instance from a config.status file."""
+ code_cache = cls._CODE_CACHE
+ mtime = os.path.getmtime(path)
+
+ # cache the compiled code as it can be reused
+ # we cache it the first time, or if the file changed
+ if not path in code_cache or code_cache[path][0] != mtime:
+ # Add config.status manually to sys.modules so it gets picked up by
+ # iter_modules_in_path() for automatic dependencies.
+ mod = ModuleType('config.status')
+ mod.__file__ = path
+ sys.modules['config.status'] = mod
+
+ with open(path, 'rt') as fh:
+ source = fh.read()
+ code_cache[path] = (
+ mtime,
+ compile(source, path, 'exec', dont_inherit=1)
+ )
+
+ g = {
+ '__builtins__': __builtins__,
+ '__file__': path,
+ }
+ l = {}
+ exec(code_cache[path][1], g, l)
+
+ config = BuildConfig()
+
+ for name in l['__all__']:
+ setattr(config, name, l[name])
+
+ return config
+
+
+class ConfigEnvironment(object):
+ """Perform actions associated with a configured but bare objdir.
+
+ The purpose of this class is to preprocess files from the source directory
+ and output results in the object directory.
+
+ There are two types of files: config files and config headers,
+ each treated through a different member function.
+
+ Creating a ConfigEnvironment requires a few arguments:
+ - topsrcdir and topobjdir are, respectively, the top source and
+ the top object directory.
+ - defines is a dict filled from AC_DEFINE and AC_DEFINE_UNQUOTED in
+ autoconf.
+ - non_global_defines are a list of names appearing in defines above
+ that are not meant to be exported in ACDEFINES (see below)
+ - substs is a dict filled from AC_SUBST in autoconf.
+
+ ConfigEnvironment automatically defines one additional substs variable
+ from all the defines not appearing in non_global_defines:
+ - ACDEFINES contains the defines in the form -DNAME=VALUE, for use on
+ preprocessor command lines. The order in which defines were given
+ when creating the ConfigEnvironment is preserved.
+ and two other additional subst variables from all the other substs:
+ - ALLSUBSTS contains the substs in the form NAME = VALUE, in sorted
+ order, for use in autoconf.mk. It includes ACDEFINES
+ Only substs with a VALUE are included, such that the resulting file
+ doesn't change when new empty substs are added.
+ This results in less invalidation of build dependencies in the case
+ of autoconf.mk..
+ - ALLEMPTYSUBSTS contains the substs with an empty value, in the form
+ NAME =.
+
+ ConfigEnvironment expects a "top_srcdir" subst to be set with the top
+ source directory, in msys format on windows. It is used to derive a
+ "srcdir" subst when treating config files. It can either be an absolute
+ path or a path relative to the topobjdir.
+ """
+
+ def __init__(self, topsrcdir, topobjdir, defines=None,
+ non_global_defines=None, substs=None, source=None, mozconfig=None):
+
+ if not source:
+ source = mozpath.join(topobjdir, 'config.status')
+ self.source = source
+ self.defines = ReadOnlyDict(defines or {})
+ self.non_global_defines = non_global_defines or []
+ self.substs = dict(substs or {})
+ self.topsrcdir = mozpath.abspath(topsrcdir)
+ self.topobjdir = mozpath.abspath(topobjdir)
+ self.mozconfig = mozpath.abspath(mozconfig) if mozconfig else None
+ self.lib_prefix = self.substs.get('LIB_PREFIX', '')
+ if 'LIB_SUFFIX' in self.substs:
+ self.lib_suffix = '.%s' % self.substs['LIB_SUFFIX']
+ self.dll_prefix = self.substs.get('DLL_PREFIX', '')
+ self.dll_suffix = self.substs.get('DLL_SUFFIX', '')
+ if self.substs.get('IMPORT_LIB_SUFFIX'):
+ self.import_prefix = self.lib_prefix
+ self.import_suffix = '.%s' % self.substs['IMPORT_LIB_SUFFIX']
+ else:
+ self.import_prefix = self.dll_prefix
+ self.import_suffix = self.dll_suffix
+
+ global_defines = [name for name in self.defines
+ if not name in self.non_global_defines]
+ self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name,
+ shell_quote(self.defines[name]).replace('$', '$$'))
+ for name in sorted(global_defines)])
+ def serialize(obj):
+ if isinstance(obj, StringTypes):
+ return obj
+ if isinstance(obj, Iterable):
+ return ' '.join(obj)
+ raise Exception('Unhandled type %s', type(obj))
+ self.substs['ALLSUBSTS'] = '\n'.join(sorted(['%s = %s' % (name,
+ serialize(self.substs[name])) for name in self.substs if self.substs[name]]))
+ self.substs['ALLEMPTYSUBSTS'] = '\n'.join(sorted(['%s =' % name
+ for name in self.substs if not self.substs[name]]))
+
+ self.substs = ReadOnlyDict(self.substs)
+
+ self.external_source_dir = None
+ external = self.substs.get('EXTERNAL_SOURCE_DIR', '')
+ if external:
+ external = mozpath.normpath(external)
+ if not os.path.isabs(external):
+ external = mozpath.join(self.topsrcdir, external)
+ self.external_source_dir = mozpath.normpath(external)
+
+ # Populate a Unicode version of substs. This is an optimization to make
+ # moz.build reading faster, since each sandbox needs a Unicode version
+ # of these variables and doing it over a thousand times is a hotspot
+ # during sandbox execution!
+ # Bug 844509 tracks moving everything to Unicode.
+ self.substs_unicode = {}
+
+ def decode(v):
+ if not isinstance(v, text_type):
+ try:
+ return v.decode('utf-8')
+ except UnicodeDecodeError:
+ return v.decode('utf-8', 'replace')
+
+ for k, v in self.substs.items():
+ if not isinstance(v, StringTypes):
+ if isinstance(v, Iterable):
+ type(v)(decode(i) for i in v)
+ elif not isinstance(v, text_type):
+ v = decode(v)
+
+ self.substs_unicode[k] = v
+
+ self.substs_unicode = ReadOnlyDict(self.substs_unicode)
+
+ @property
+ def is_artifact_build(self):
+ return self.substs.get('MOZ_ARTIFACT_BUILDS', False)
+
+ @staticmethod
+ def from_config_status(path):
+ config = BuildConfig.from_config_status(path)
+
+ return ConfigEnvironment(config.topsrcdir, config.topobjdir,
+ config.defines, config.non_global_defines, config.substs, path)
diff --git a/python/mozbuild/mozbuild/backend/cpp_eclipse.py b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
new file mode 100644
index 000000000..cbdbdde8c
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
@@ -0,0 +1,698 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import errno
+import random
+import os
+import subprocess
+import types
+import xml.etree.ElementTree as ET
+from .common import CommonBackend
+
+from ..frontend.data import (
+ Defines,
+)
+from mozbuild.base import ExecutionSummary
+
+# TODO Have ./mach eclipse generate the workspace and index it:
+# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse
+# Open eclipse:
+# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
+
+class CppEclipseBackend(CommonBackend):
+ """Backend that generates Cpp Eclipse project files.
+ """
+
+ def __init__(self, environment):
+ if os.name == 'nt':
+ raise Exception('Eclipse is not supported on Windows. '
+ 'Consider using Visual Studio instead.')
+ super(CppEclipseBackend, self).__init__(environment)
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ self._paths_to_defines = {}
+ self._project_name = 'Gecko'
+ self._workspace_dir = self._get_workspace_path()
+ self._project_dir = os.path.join(self._workspace_dir, self._project_name)
+ self._overwriting_workspace = os.path.isdir(self._workspace_dir)
+
+ self._macbundle = self.environment.substs['MOZ_MACBUNDLE_NAME']
+ self._appname = self.environment.substs['MOZ_APP_NAME']
+ self._bin_suffix = self.environment.substs['BIN_SUFFIX']
+ self._cxx = self.environment.substs['CXX']
+ # Note: We need the C Pre Processor (CPP) flags, not the CXX flags
+ self._cppflags = self.environment.substs.get('CPPFLAGS', '')
+
+ def summary(self):
+ return ExecutionSummary(
+ 'CppEclipse backend executed in {execution_time:.2f}s\n'
+ 'Generated Cpp Eclipse workspace in "{workspace:s}".\n'
+ 'If missing, import the project using File > Import > General > Existing Project into workspace\n'
+ '\n'
+ 'Run with: eclipse -data {workspace:s}\n',
+ execution_time=self._execution_time,
+ workspace=self._workspace_dir)
+
+ def _get_workspace_path(self):
+ return CppEclipseBackend.get_workspace_path(self.environment.topsrcdir, self.environment.topobjdir)
+
+ @staticmethod
+ def get_workspace_path(topsrcdir, topobjdir):
+ # Eclipse doesn't support having the workspace inside the srcdir.
+ # Since most people have their objdir inside their srcdir it's easier
+ # and more consistent to just put the workspace along side the srcdir
+ srcdir_parent = os.path.dirname(topsrcdir)
+ workspace_dirname = "eclipse_" + os.path.basename(topobjdir)
+ return os.path.join(srcdir_parent, workspace_dirname)
+
+ def consume_object(self, obj):
+ reldir = getattr(obj, 'relativedir', None)
+
+ # Note that unlike VS, Eclipse' indexer seem to crawl the headers and
+ # isn't picky about the local includes.
+ if isinstance(obj, Defines):
+ self._paths_to_defines.setdefault(reldir, {}).update(obj.defines)
+
+ return True
+
+ def consume_finished(self):
+ settings_dir = os.path.join(self._project_dir, '.settings')
+ launch_dir = os.path.join(self._project_dir, 'RunConfigurations')
+ workspace_settings_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
+ workspace_language_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.cdt.core')
+
+ for dir_name in [self._project_dir, settings_dir, launch_dir, workspace_settings_dir, workspace_language_dir]:
+ try:
+ os.makedirs(dir_name)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ project_path = os.path.join(self._project_dir, '.project')
+ with open(project_path, 'wb') as fh:
+ self._write_project(fh)
+
+ cproject_path = os.path.join(self._project_dir, '.cproject')
+ with open(cproject_path, 'wb') as fh:
+ self._write_cproject(fh)
+
+ language_path = os.path.join(settings_dir, 'language.settings.xml')
+ with open(language_path, 'wb') as fh:
+ self._write_language_settings(fh)
+
+ workspace_language_path = os.path.join(workspace_language_dir, 'language.settings.xml')
+ with open(workspace_language_path, 'wb') as fh:
+ workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
+ workspace_lang_settings = workspace_lang_settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
+ fh.write(workspace_lang_settings)
+
+ self._write_launch_files(launch_dir)
+
+ # This will show up as an 'unmanged' formatter. This can be named by generating
+ # another file.
+ formatter_prefs_path = os.path.join(settings_dir, 'org.eclipse.cdt.core.prefs')
+ with open(formatter_prefs_path, 'wb') as fh:
+ fh.write(FORMATTER_SETTINGS);
+
+ editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs");
+ with open(editor_prefs_path, 'wb') as fh:
+ fh.write(EDITOR_SETTINGS);
+
+ # Now import the project into the workspace
+ self._import_project()
+
+ def _import_project(self):
+ # If the workspace already exists then don't import the project again because
+ # eclipse doesn't handle this properly
+ if self._overwriting_workspace:
+ return
+
+ # We disable the indexer otherwise we're forced to index
+ # the whole codebase when importing the project. Indexing the project can take 20 minutes.
+ self._write_noindex()
+
+ try:
+ process = subprocess.check_call(
+ ["eclipse", "-application", "-nosplash",
+ "org.eclipse.cdt.managedbuilder.core.headlessbuild",
+ "-data", self._workspace_dir, "-importAll", self._project_dir])
+ finally:
+ self._remove_noindex()
+
+ def _write_noindex(self):
+ noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
+ with open(noindex_path, 'wb') as fh:
+ fh.write(NOINDEX_TEMPLATE);
+
+ def _remove_noindex(self):
+ noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
+ os.remove(noindex_path)
+
+ def _define_entry(self, name, value):
+ define = ET.Element('entry')
+ define.set('kind', 'macro')
+ define.set('name', name)
+ define.set('value', value)
+ return ET.tostring(define)
+
+ def _write_language_settings(self, fh):
+ settings = LANGUAGE_SETTINGS_TEMPLATE
+
+ settings = settings.replace('@GLOBAL_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include'))
+ settings = settings.replace('@NSPR_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/nspr'))
+ settings = settings.replace('@IPDL_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'ipc/ipdl/_ipdlheaders'))
+ settings = settings.replace('@PREINCLUDE_FILE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/mozilla-config.h'))
+ settings = settings.replace('@DEFINE_MOZILLA_INTERNAL_API@', self._define_entry('MOZILLA_INTERNAL_API', '1'))
+ settings = settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
+
+ fh.write(settings)
+
+ def _write_launch_files(self, launch_dir):
+ bin_dir = os.path.join(self.environment.topobjdir, 'dist')
+
+ # TODO Improve binary detection
+ if self._macbundle:
+ exe_path = os.path.join(bin_dir, self._macbundle, 'Contents/MacOS')
+ else:
+ exe_path = os.path.join(bin_dir, 'bin')
+
+ exe_path = os.path.join(exe_path, self._appname + self._bin_suffix)
+
+ if self.environment.substs['MOZ_WIDGET_TOOLKIT'] != 'gonk':
+ main_gecko_launch = os.path.join(launch_dir, 'gecko.launch')
+ with open(main_gecko_launch, 'wb') as fh:
+ launch = GECKO_LAUNCH_CONFIG_TEMPLATE
+ launch = launch.replace('@LAUNCH_PROGRAM@', exe_path)
+ launch = launch.replace('@LAUNCH_ARGS@', '-P -no-remote')
+ fh.write(launch)
+
+ if self.environment.substs['MOZ_WIDGET_TOOLKIT'] == 'gonk':
+ b2g_flash = os.path.join(launch_dir, 'b2g-flash.launch')
+ with open(b2g_flash, 'wb') as fh:
+ # We assume that the srcdir is inside the b2g tree.
+ # If that's not the case the user can always adjust the path
+ # from the eclipse IDE.
+ fastxul_path = os.path.join(self.environment.topsrcdir, '..', 'scripts', 'fastxul.sh')
+ launch = B2GFLASH_LAUNCH_CONFIG_TEMPLATE
+ launch = launch.replace('@LAUNCH_PROGRAM@', fastxul_path)
+ launch = launch.replace('@OBJDIR@', self.environment.topobjdir)
+ fh.write(launch)
+
+ #TODO Add more launch configs (and delegate calls to mach)
+
+ def _write_project(self, fh):
+ project = PROJECT_TEMPLATE;
+
+ project = project.replace('@PROJECT_NAME@', self._project_name)
+ project = project.replace('@PROJECT_TOPSRCDIR@', self.environment.topsrcdir)
+ fh.write(project)
+
+ def _write_cproject(self, fh):
+ cproject_header = CPROJECT_TEMPLATE_HEADER
+ cproject_header = cproject_header.replace('@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
+ cproject_header = cproject_header.replace('@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
+ fh.write(cproject_header)
+
+ for path, defines in self._paths_to_defines.items():
+ folderinfo = CPROJECT_TEMPLATE_FOLDER_INFO_HEADER
+ folderinfo = folderinfo.replace('@FOLDER_ID@', str(random.randint(1000000, 99999999999)))
+ folderinfo = folderinfo.replace('@FOLDER_NAME@', 'tree/' + path)
+ fh.write(folderinfo)
+ for k, v in defines.items():
+ define = ET.Element('listOptionValue')
+ define.set('builtIn', 'false')
+ define.set('value', str(k) + "=" + str(v))
+ fh.write(ET.tostring(define))
+ fh.write(CPROJECT_TEMPLATE_FOLDER_INFO_FOOTER)
+
+
+ fh.write(CPROJECT_TEMPLATE_FOOTER)
+
+
+PROJECT_TEMPLATE = """<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>@PROJECT_NAME@</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
+ <triggers>clean,full,incremental,</triggers>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
+ <triggers></triggers>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.cdt.core.cnature</nature>
+ <nature>org.eclipse.cdt.core.ccnature</nature>
+ <nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
+ <nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>tree</name>
+ <type>2</type>
+ <location>@PROJECT_TOPSRCDIR@</location>
+ </link>
+ </linkedResources>
+ <filteredResources>
+ <filter>
+ <id>17111971</id>
+ <name>tree</name>
+ <type>30</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-obj-*</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>14081994</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.rej</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>25121970</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.orig</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>10102004</id>
+ <name>tree</name>
+ <type>10</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-.hg</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>23122002</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.pyc</arguments>
+ </matcher>
+ </filter>
+ </filteredResources>
+</projectDescription>
+"""
+
+CPROJECT_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?fileVersion 4.0.0?>
+
+<cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage">
+ <storageModule moduleId="org.eclipse.cdt.core.settings">
+ <cconfiguration id="0.1674256904">
+ <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.1674256904" moduleId="org.eclipse.cdt.core.settings" name="Default">
+ <externalSettings/>
+ <extensions>
+ <extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ </extensions>
+ </storageModule>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <configuration artifactName="${ProjName}" buildProperties="" description="" id="0.1674256904" name="Default" parent="org.eclipse.cdt.build.core.prefbase.cfg">
+ <folderInfo id="0.1674256904." name="/" resourcePath="">
+ <toolChain id="cdt.managedbuild.toolchain.gnu.cross.exe.debug.1276586933" name="Cross GCC" superClass="cdt.managedbuild.toolchain.gnu.cross.exe.debug">
+ <targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF" id="cdt.managedbuild.targetPlatform.gnu.cross.710759961" isAbstract="false" osList="all" superClass="cdt.managedbuild.targetPlatform.gnu.cross"/>
+ <builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
+ </toolChain>
+ </folderInfo>
+"""
+CPROJECT_TEMPLATE_FOLDER_INFO_HEADER = """
+ <folderInfo id="0.1674256904.@FOLDER_ID@" name="/" resourcePath="@FOLDER_NAME@">
+ <toolChain id="org.eclipse.cdt.build.core.prefbase.toolchain.1022318069" name="No ToolChain" superClass="org.eclipse.cdt.build.core.prefbase.toolchain" unusedChildren="">
+ <tool id="org.eclipse.cdt.build.core.settings.holder.libs.1259030812" name="holder for library settings" superClass="org.eclipse.cdt.build.core.settings.holder.libs.1800697532"/>
+ <tool id="org.eclipse.cdt.build.core.settings.holder.1407291069" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder.582514939">
+ <option id="org.eclipse.cdt.build.core.settings.holder.symbols.1907658087" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
+"""
+CPROJECT_TEMPLATE_FOLDER_INFO_DEFINE = """
+ <listOptionValue builtIn="false" value="@FOLDER_DEFINE@"/>
+"""
+CPROJECT_TEMPLATE_FOLDER_INFO_FOOTER = """
+ </option>
+ <inputType id="org.eclipse.cdt.build.core.settings.holder.inType.440601711" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
+ </tool>
+ </toolChain>
+ </folderInfo>
+"""
+CPROJECT_TEMPLATE_FILEINFO = """ <fileInfo id="0.1674256904.474736658" name="Layers.cpp" rcbsApplicability="disable" resourcePath="tree/gfx/layers/Layers.cpp" toolsToInvoke="org.eclipse.cdt.build.core.settings.holder.582514939.463639939">
+ <tool id="org.eclipse.cdt.build.core.settings.holder.582514939.463639939" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder.582514939">
+ <option id="org.eclipse.cdt.build.core.settings.holder.symbols.232300236" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
+ <listOptionValue builtIn="false" value="BENWA=BENWAVAL"/>
+ </option>
+ <inputType id="org.eclipse.cdt.build.core.settings.holder.inType.1942876228" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
+ </tool>
+ </fileInfo>
+"""
+CPROJECT_TEMPLATE_FOOTER = """ </configuration>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
+ </cconfiguration>
+ </storageModule>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <project id="Empty.null.1281234804" name="Empty"/>
+ </storageModule>
+ <storageModule moduleId="scannerConfiguration">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ <scannerConfigBuildInfo instanceId="0.1674256904">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ </scannerConfigBuildInfo>
+ </storageModule>
+ <storageModule moduleId="refreshScope" versionNumber="2">
+ <configuration configurationName="Default"/>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/>
+</cproject>
+"""
+
+WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<plugin>
+ <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
+ <provider class="org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector" console="true" id="org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot;">
+ <language-scope id="org.eclipse.cdt.core.gcc"/>
+ <language-scope id="org.eclipse.cdt.core.g++"/>
+ </provider>
+ </extension>
+</plugin>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<project>
+ <configuration id="0.1674256904" name="Default">
+ <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
+ <provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
+ <language id="org.eclipse.cdt.core.g++">
+ <resource project-relative-path="">
+ <entry kind="includePath" name="@GLOBAL_INCLUDE_PATH@">
+ <flag value="LOCAL"/>
+ </entry>
+ <entry kind="includePath" name="@NSPR_INCLUDE_PATH@">
+ <flag value="LOCAL"/>
+ </entry>
+ <entry kind="includePath" name="@IPDL_INCLUDE_PATH@">
+ <flag value="LOCAL"/>
+ </entry>
+ <entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
+ <flag value="LOCAL"/>
+ </entry>
+ <!--
+ Because of https://developer.mozilla.org/en-US/docs/Eclipse_CDT#Headers_are_only_parsed_once
+ we need to make sure headers are parsed with MOZILLA_INTERNAL_API to make sure
+ the indexer gets the version that is used in most of the true. This means that
+ MOZILLA_EXTERNAL_API code will suffer.
+ -->
+ @DEFINE_MOZILLA_INTERNAL_API@
+ </resource>
+ </language>
+ </provider>
+ <provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot; -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
+ <language-scope id="org.eclipse.cdt.core.gcc"/>
+ <language-scope id="org.eclipse.cdt.core.g++"/>
+ </provider>
+ <provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
+ </extension>
+ </configuration>
+</project>
+"""
+
+GECKO_LAUNCH_CONFIG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
+<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="lldb"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=""/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
+<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
+<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="2"/>
+<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
+<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_ARGUMENTS" value="@LAUNCH_ARGS@"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="@LAUNCH_PROGRAM@"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="Gecko"/>
+<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="true"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
+<booleanAttribute key="org.eclipse.cdt.launch.use_terminal" value="true"/>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
+<listEntry value="/gecko"/>
+</listAttribute>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
+<listEntry value="4"/>
+</listAttribute>
+<booleanAttribute key="org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND" value="false"/>
+<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
+</launchConfiguration>
+"""
+
+B2GFLASH_LAUNCH_CONFIG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
+<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="lldb"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=""/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
+<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
+<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="2"/>
+<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
+<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="@LAUNCH_PROGRAM@"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="Gecko"/>
+<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="true"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
+<stringAttribute key="org.eclipse.cdt.launch.WORKING_DIRECTORY" value="@OBJDIR@"/>
+<booleanAttribute key="org.eclipse.cdt.launch.use_terminal" value="true"/>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
+<listEntry value="/gecko"/>
+</listAttribute>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
+<listEntry value="4"/>
+</listAttribute>
+<booleanAttribute key="org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND" value="false"/>
+<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
+</launchConfiguration>
+"""
+
+
+EDITOR_SETTINGS = """eclipse.preferences.version=1
+lineNumberRuler=true
+overviewRuler_migration=migrated_3.1
+printMargin=true
+printMarginColumn=80
+showCarriageReturn=false
+showEnclosedSpaces=false
+showLeadingSpaces=false
+showLineFeed=false
+showWhitespaceCharacters=true
+spacesForTabs=true
+tabWidth=2
+undoHistorySize=200
+"""
+
+FORMATTER_SETTINGS = """eclipse.preferences.version=1
+org.eclipse.cdt.core.formatter.alignment_for_arguments_in_method_invocation=16
+org.eclipse.cdt.core.formatter.alignment_for_assignment=16
+org.eclipse.cdt.core.formatter.alignment_for_base_clause_in_type_declaration=80
+org.eclipse.cdt.core.formatter.alignment_for_binary_expression=16
+org.eclipse.cdt.core.formatter.alignment_for_compact_if=16
+org.eclipse.cdt.core.formatter.alignment_for_conditional_expression=34
+org.eclipse.cdt.core.formatter.alignment_for_conditional_expression_chain=18
+org.eclipse.cdt.core.formatter.alignment_for_constructor_initializer_list=48
+org.eclipse.cdt.core.formatter.alignment_for_declarator_list=16
+org.eclipse.cdt.core.formatter.alignment_for_enumerator_list=48
+org.eclipse.cdt.core.formatter.alignment_for_expression_list=0
+org.eclipse.cdt.core.formatter.alignment_for_expressions_in_array_initializer=16
+org.eclipse.cdt.core.formatter.alignment_for_member_access=0
+org.eclipse.cdt.core.formatter.alignment_for_overloaded_left_shift_chain=16
+org.eclipse.cdt.core.formatter.alignment_for_parameters_in_method_declaration=16
+org.eclipse.cdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
+org.eclipse.cdt.core.formatter.brace_position_for_array_initializer=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_block=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_block_in_case=next_line_shifted
+org.eclipse.cdt.core.formatter.brace_position_for_method_declaration=next_line
+org.eclipse.cdt.core.formatter.brace_position_for_namespace_declaration=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_switch=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_type_declaration=next_line
+org.eclipse.cdt.core.formatter.comment.min_distance_between_code_and_line_comment=1
+org.eclipse.cdt.core.formatter.comment.never_indent_line_comments_on_first_column=true
+org.eclipse.cdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=true
+org.eclipse.cdt.core.formatter.compact_else_if=true
+org.eclipse.cdt.core.formatter.continuation_indentation=2
+org.eclipse.cdt.core.formatter.continuation_indentation_for_array_initializer=2
+org.eclipse.cdt.core.formatter.format_guardian_clause_on_one_line=false
+org.eclipse.cdt.core.formatter.indent_access_specifier_compare_to_type_header=false
+org.eclipse.cdt.core.formatter.indent_access_specifier_extra_spaces=0
+org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_access_specifier=true
+org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_namespace_header=false
+org.eclipse.cdt.core.formatter.indent_breaks_compare_to_cases=true
+org.eclipse.cdt.core.formatter.indent_declaration_compare_to_template_header=true
+org.eclipse.cdt.core.formatter.indent_empty_lines=false
+org.eclipse.cdt.core.formatter.indent_statements_compare_to_block=true
+org.eclipse.cdt.core.formatter.indent_statements_compare_to_body=true
+org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_cases=true
+org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_switch=false
+org.eclipse.cdt.core.formatter.indentation.size=2
+org.eclipse.cdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_after_template_declaration=insert
+org.eclipse.cdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_colon_in_constructor_initializer_list=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_identifier_in_function_declaration=insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_in_empty_block=insert
+org.eclipse.cdt.core.formatter.insert_space_after_assignment_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_after_binary_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_brace_in_block=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_base_clause=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_case=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_base_types=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_declarator_list=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_expression_list=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_postfix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_prefix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_question_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_after_semicolon_in_for=insert
+org.eclipse.cdt.core.formatter.insert_space_after_unary_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_assignment_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_before_binary_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_base_clause=insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_case=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_default=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_base_types=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_declarator_list=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_expression_list=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_block=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_namespace_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_exception_specification=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_for=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_if=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_while=insert
+org.eclipse.cdt.core.formatter.insert_space_before_postfix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_prefix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_question_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_before_semicolon=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_unary_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_brackets=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.join_wrapped_lines=false
+org.eclipse.cdt.core.formatter.keep_else_statement_on_same_line=false
+org.eclipse.cdt.core.formatter.keep_empty_array_initializer_on_one_line=false
+org.eclipse.cdt.core.formatter.keep_imple_if_on_one_line=false
+org.eclipse.cdt.core.formatter.keep_then_statement_on_same_line=false
+org.eclipse.cdt.core.formatter.lineSplit=80
+org.eclipse.cdt.core.formatter.number_of_empty_lines_to_preserve=1
+org.eclipse.cdt.core.formatter.put_empty_statement_on_new_line=true
+org.eclipse.cdt.core.formatter.tabulation.char=space
+org.eclipse.cdt.core.formatter.tabulation.size=2
+org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
+"""
+
+NOINDEX_TEMPLATE = """eclipse.preferences.version=1
+indexer/indexerId=org.eclipse.cdt.core.nullIndexer
+"""
diff --git a/python/mozbuild/mozbuild/backend/fastermake.py b/python/mozbuild/mozbuild/backend/fastermake.py
new file mode 100644
index 000000000..d55928e8c
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/fastermake.py
@@ -0,0 +1,165 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals, print_function
+
+from mozbuild.backend.base import PartialBackend
+from mozbuild.backend.common import CommonBackend
+from mozbuild.frontend.context import (
+ ObjDirPath,
+)
+from mozbuild.frontend.data import (
+ ChromeManifestEntry,
+ FinalTargetPreprocessedFiles,
+ FinalTargetFiles,
+ JARManifest,
+ XPIDLFile,
+)
+from mozbuild.makeutil import Makefile
+from mozbuild.util import OrderedDefaultDict
+from mozpack.manifests import InstallManifest
+import mozpack.path as mozpath
+
+
+class FasterMakeBackend(CommonBackend, PartialBackend):
+ def _init(self):
+ super(FasterMakeBackend, self)._init()
+
+ self._manifest_entries = OrderedDefaultDict(set)
+
+ self._install_manifests = OrderedDefaultDict(InstallManifest)
+
+ self._dependencies = OrderedDefaultDict(list)
+
+ self._has_xpidl = False
+
+ def _add_preprocess(self, obj, path, dest, target=None, **kwargs):
+ if target is None:
+ target = mozpath.basename(path)
+ # This matches what PP_TARGETS do in config/rules.
+ if target.endswith('.in'):
+ target = target[:-3]
+ if target.endswith('.css'):
+ kwargs['marker'] = '%'
+ depfile = mozpath.join(
+ self.environment.topobjdir, 'faster', '.deps',
+ mozpath.join(obj.install_target, dest, target).replace('/', '_'))
+ self._install_manifests[obj.install_target].add_preprocess(
+ mozpath.join(obj.srcdir, path),
+ mozpath.join(dest, target),
+ depfile,
+ **kwargs)
+
+ def consume_object(self, obj):
+ if isinstance(obj, JARManifest) and \
+ obj.install_target.startswith('dist/bin'):
+ self._consume_jar_manifest(obj)
+
+ elif isinstance(obj, (FinalTargetFiles,
+ FinalTargetPreprocessedFiles)) and \
+ obj.install_target.startswith('dist/bin'):
+ defines = obj.defines or {}
+ if defines:
+ defines = defines.defines
+ for path, files in obj.files.walk():
+ for f in files:
+ if isinstance(obj, FinalTargetPreprocessedFiles):
+ self._add_preprocess(obj, f.full_path, path,
+ target=f.target_basename,
+ defines=defines)
+ elif '*' in f:
+ def _prefix(s):
+ for p in mozpath.split(s):
+ if '*' not in p:
+ yield p + '/'
+ prefix = ''.join(_prefix(f.full_path))
+
+ self._install_manifests[obj.install_target] \
+ .add_pattern_symlink(
+ prefix,
+ f.full_path[len(prefix):],
+ mozpath.join(path, f.target_basename))
+ else:
+ self._install_manifests[obj.install_target].add_symlink(
+ f.full_path,
+ mozpath.join(path, f.target_basename)
+ )
+ if isinstance(f, ObjDirPath):
+ dep_target = 'install-%s' % obj.install_target
+ self._dependencies[dep_target].append(
+ mozpath.relpath(f.full_path,
+ self.environment.topobjdir))
+
+ elif isinstance(obj, ChromeManifestEntry) and \
+ obj.install_target.startswith('dist/bin'):
+ top_level = mozpath.join(obj.install_target, 'chrome.manifest')
+ if obj.path != top_level:
+ entry = 'manifest %s' % mozpath.relpath(obj.path,
+ obj.install_target)
+ self._manifest_entries[top_level].add(entry)
+ self._manifest_entries[obj.path].add(str(obj.entry))
+
+ elif isinstance(obj, XPIDLFile):
+ self._has_xpidl = True
+ # We're not actually handling XPIDL files.
+ return False
+
+ else:
+ return False
+
+ return True
+
+ def consume_finished(self):
+ mk = Makefile()
+ # Add the default rule at the very beginning.
+ mk.create_rule(['default'])
+ mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
+ mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
+ if not self._has_xpidl:
+ mk.add_statement('NO_XPIDL = 1')
+
+ # Add a few necessary variables inherited from configure
+ for var in (
+ 'PYTHON',
+ 'ACDEFINES',
+ 'MOZ_BUILD_APP',
+ 'MOZ_WIDGET_TOOLKIT',
+ ):
+ value = self.environment.substs.get(var)
+ if value is not None:
+ mk.add_statement('%s = %s' % (var, value))
+
+ install_manifests_bases = self._install_manifests.keys()
+
+ # Add information for chrome manifest generation
+ manifest_targets = []
+
+ for target, entries in self._manifest_entries.iteritems():
+ manifest_targets.append(target)
+ install_target = mozpath.basedir(target, install_manifests_bases)
+ self._install_manifests[install_target].add_content(
+ ''.join('%s\n' % e for e in sorted(entries)),
+ mozpath.relpath(target, install_target))
+
+ # Add information for install manifests.
+ mk.add_statement('INSTALL_MANIFESTS = %s'
+ % ' '.join(self._install_manifests.keys()))
+
+ # Add dependencies we infered:
+ for target, deps in self._dependencies.iteritems():
+ mk.create_rule([target]).add_dependencies(
+ '$(TOPOBJDIR)/%s' % d for d in deps)
+
+ mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')
+
+ for base, install_manifest in self._install_manifests.iteritems():
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, 'faster',
+ 'install_%s' % base.replace('/', '_'))) as fh:
+ install_manifest.write(fileobj=fh)
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, 'faster',
+ 'Makefile')) as fh:
+ mk.dump(fh, removal_guard=False)
diff --git a/python/mozbuild/mozbuild/backend/mach_commands.py b/python/mozbuild/mozbuild/backend/mach_commands.py
new file mode 100644
index 000000000..5608d40b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/mach_commands.py
@@ -0,0 +1,132 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import os
+import sys
+import subprocess
+import which
+
+from mozbuild.base import (
+ MachCommandBase,
+)
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+@CommandProvider
+class MachCommands(MachCommandBase):
+ @Command('ide', category='devenv',
+ description='Generate a project and launch an IDE.')
+ @CommandArgument('ide', choices=['eclipse', 'visualstudio', 'androidstudio', 'intellij'])
+ @CommandArgument('args', nargs=argparse.REMAINDER)
+ def eclipse(self, ide, args):
+ if ide == 'eclipse':
+ backend = 'CppEclipse'
+ elif ide == 'visualstudio':
+ backend = 'VisualStudio'
+ elif ide == 'androidstudio' or ide == 'intellij':
+ # The build backend for Android Studio and IntelliJ is just the regular one.
+ backend = 'RecursiveMake'
+
+ if ide == 'eclipse':
+ try:
+ which.which('eclipse')
+ except which.WhichError:
+ print('Eclipse CDT 8.4 or later must be installed in your PATH.')
+ print('Download: http://www.eclipse.org/cdt/downloads.php')
+ return 1
+ elif ide == 'androidstudio' or ide =='intellij':
+ studio = ['studio'] if ide == 'androidstudio' else ['idea']
+ if sys.platform != 'darwin':
+ try:
+ which.which(studio[0])
+ except:
+ self.print_ide_error(ide)
+ return 1
+ else:
+ # In order of preference!
+ for d in self.get_mac_ide_preferences(ide):
+ if os.path.isdir(d):
+ studio = ['open', '-a', d]
+ break
+ else:
+ print('Android Studio or IntelliJ IDEA 14 is not installed in /Applications.')
+ return 1
+
+ # Here we refresh the whole build. 'build export' is sufficient here and is probably more
+ # correct but it's also nice having a single target to get a fully built and indexed
+ # project (gives a easy target to use before go out to lunch).
+ res = self._mach_context.commands.dispatch('build', self._mach_context)
+ if res != 0:
+ return 1
+
+ if ide in ('androidstudio', 'intellij'):
+ res = self._mach_context.commands.dispatch('package', self._mach_context)
+ if res != 0:
+ return 1
+ else:
+ # Generate or refresh the IDE backend.
+ python = self.virtualenv_manager.python_path
+ config_status = os.path.join(self.topobjdir, 'config.status')
+ args = [python, config_status, '--backend=%s' % backend]
+ res = self._run_command_in_objdir(args=args, pass_thru=True, ensure_exit_code=False)
+ if res != 0:
+ return 1
+
+
+ if ide == 'eclipse':
+ eclipse_workspace_dir = self.get_eclipse_workspace_path()
+ process = subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
+ elif ide == 'visualstudio':
+ visual_studio_workspace_dir = self.get_visualstudio_workspace_path()
+ process = subprocess.check_call(['explorer.exe', visual_studio_workspace_dir])
+ elif ide == 'androidstudio' or ide == 'intellij':
+ gradle_dir = None
+ if self.is_gradle_project_already_imported():
+ gradle_dir = self.get_gradle_project_path()
+ else:
+ gradle_dir = self.get_gradle_import_path()
+ process = subprocess.check_call(studio + [gradle_dir])
+
+ def get_eclipse_workspace_path(self):
+ from mozbuild.backend.cpp_eclipse import CppEclipseBackend
+ return CppEclipseBackend.get_workspace_path(self.topsrcdir, self.topobjdir)
+
+ def get_visualstudio_workspace_path(self):
+ return os.path.join(self.topobjdir, 'msvc', 'mozilla.sln')
+
+ def get_gradle_project_path(self):
+ return os.path.join(self.topobjdir, 'mobile', 'android', 'gradle')
+
+ def get_gradle_import_path(self):
+ return os.path.join(self.get_gradle_project_path(), 'build.gradle')
+
+ def is_gradle_project_already_imported(self):
+ gradle_project_path = os.path.join(self.get_gradle_project_path(), '.idea')
+ return os.path.exists(gradle_project_path)
+
+ def get_mac_ide_preferences(self, ide):
+ if sys.platform == 'darwin':
+ if ide == 'androidstudio':
+ return ['/Applications/Android Studio.app']
+ else:
+ return [
+ '/Applications/IntelliJ IDEA 14 EAP.app',
+ '/Applications/IntelliJ IDEA 14.app',
+ '/Applications/IntelliJ IDEA 14 CE EAP.app',
+ '/Applications/IntelliJ IDEA 14 CE.app']
+
+ def print_ide_error(self, ide):
+ if ide == 'androidstudio':
+ print('Android Studio is not installed in your PATH.')
+ print('You can generate a command-line launcher from Android Studio->Tools->Create Command-line launcher with script name \'studio\'')
+ elif ide == 'intellij':
+ print('IntelliJ is not installed in your PATH.')
+ print('You can generate a command-line launcher from IntelliJ IDEA->Tools->Create Command-line launcher with script name \'idea\'')
diff --git a/python/mozbuild/mozbuild/backend/recursivemake.py b/python/mozbuild/mozbuild/backend/recursivemake.py
new file mode 100644
index 000000000..132dcf944
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
@@ -0,0 +1,1513 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import logging
+import os
+import re
+
+from collections import (
+ defaultdict,
+ namedtuple,
+)
+from StringIO import StringIO
+from itertools import chain
+
+from mozpack.manifests import (
+ InstallManifest,
+)
+import mozpack.path as mozpath
+
+from mozbuild.frontend.context import (
+ AbsolutePath,
+ Path,
+ RenamedSourcePath,
+ SourcePath,
+ ObjDirPath,
+)
+from .common import CommonBackend
+from ..frontend.data import (
+ AndroidAssetsDirs,
+ AndroidResDirs,
+ AndroidExtraResDirs,
+ AndroidExtraPackages,
+ AndroidEclipseProjectData,
+ ChromeManifestEntry,
+ ConfigFileSubstitution,
+ ContextDerived,
+ ContextWrapped,
+ Defines,
+ DirectoryTraversal,
+ ExternalLibrary,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ GeneratedSources,
+ HostDefines,
+ HostLibrary,
+ HostProgram,
+ HostSimpleProgram,
+ HostSources,
+ InstallationTarget,
+ JARManifest,
+ JavaJarData,
+ Library,
+ LocalInclude,
+ ObjdirFiles,
+ ObjdirPreprocessedFiles,
+ PerSourceFlag,
+ Program,
+ RustLibrary,
+ SharedLibrary,
+ SimpleProgram,
+ Sources,
+ StaticLibrary,
+ TestManifest,
+ VariablePassthru,
+ XPIDLFile,
+)
+from ..util import (
+ ensureParentDir,
+ FileAvoidWrite,
+)
+from ..makeutil import Makefile
+from mozbuild.shellutil import quote as shell_quote
+
+MOZBUILD_VARIABLES = [
+ b'ANDROID_APK_NAME',
+ b'ANDROID_APK_PACKAGE',
+ b'ANDROID_ASSETS_DIRS',
+ b'ANDROID_EXTRA_PACKAGES',
+ b'ANDROID_EXTRA_RES_DIRS',
+ b'ANDROID_GENERATED_RESFILES',
+ b'ANDROID_RES_DIRS',
+ b'ASFLAGS',
+ b'CMSRCS',
+ b'CMMSRCS',
+ b'CPP_UNIT_TESTS',
+ b'DIRS',
+ b'DIST_INSTALL',
+ b'EXTRA_DSO_LDOPTS',
+ b'EXTRA_JS_MODULES',
+ b'EXTRA_PP_COMPONENTS',
+ b'EXTRA_PP_JS_MODULES',
+ b'FORCE_SHARED_LIB',
+ b'FORCE_STATIC_LIB',
+ b'FINAL_LIBRARY',
+ b'HOST_CFLAGS',
+ b'HOST_CSRCS',
+ b'HOST_CMMSRCS',
+ b'HOST_CXXFLAGS',
+ b'HOST_EXTRA_LIBS',
+ b'HOST_LIBRARY_NAME',
+ b'HOST_PROGRAM',
+ b'HOST_SIMPLE_PROGRAMS',
+ b'IS_COMPONENT',
+ b'JAR_MANIFEST',
+ b'JAVA_JAR_TARGETS',
+ b'LD_VERSION_SCRIPT',
+ b'LIBRARY_NAME',
+ b'LIBS',
+ b'MAKE_FRAMEWORK',
+ b'MODULE',
+ b'NO_DIST_INSTALL',
+ b'NO_EXPAND_LIBS',
+ b'NO_INTERFACES_MANIFEST',
+ b'NO_JS_MANIFEST',
+ b'OS_LIBS',
+ b'PARALLEL_DIRS',
+ b'PREF_JS_EXPORTS',
+ b'PROGRAM',
+ b'PYTHON_UNIT_TESTS',
+ b'RESOURCE_FILES',
+ b'SDK_HEADERS',
+ b'SDK_LIBRARY',
+ b'SHARED_LIBRARY_LIBS',
+ b'SHARED_LIBRARY_NAME',
+ b'SIMPLE_PROGRAMS',
+ b'SONAME',
+ b'STATIC_LIBRARY_NAME',
+ b'TEST_DIRS',
+ b'TOOL_DIRS',
+ # XXX config/Makefile.in specifies this in a make invocation
+ #'USE_EXTENSION_MANIFEST',
+ b'XPCSHELL_TESTS',
+ b'XPIDL_MODULE',
+]
+
+DEPRECATED_VARIABLES = [
+ b'ANDROID_RESFILES',
+ b'EXPORT_LIBRARY',
+ b'EXTRA_LIBS',
+ b'HOST_LIBS',
+ b'LIBXUL_LIBRARY',
+ b'MOCHITEST_A11Y_FILES',
+ b'MOCHITEST_BROWSER_FILES',
+ b'MOCHITEST_BROWSER_FILES_PARTS',
+ b'MOCHITEST_CHROME_FILES',
+ b'MOCHITEST_FILES',
+ b'MOCHITEST_FILES_PARTS',
+ b'MOCHITEST_METRO_FILES',
+ b'MOCHITEST_ROBOCOP_FILES',
+ b'MODULE_OPTIMIZE_FLAGS',
+ b'MOZ_CHROME_FILE_FORMAT',
+ b'SHORT_LIBNAME',
+ b'TESTING_JS_MODULES',
+ b'TESTING_JS_MODULE_DIR',
+]
+
+MOZBUILD_VARIABLES_MESSAGE = 'It should only be defined in moz.build files.'
+
+DEPRECATED_VARIABLES_MESSAGE = (
+ 'This variable has been deprecated. It does nothing. It must be removed '
+ 'in order to build.'
+)
+
+
+def make_quote(s):
+ return s.replace('#', '\#').replace('$', '$$')
+
+
+class BackendMakeFile(object):
+ """Represents a generated backend.mk file.
+
+ This is both a wrapper around a file handle as well as a container that
+ holds accumulated state.
+
+ It's worth taking a moment to explain the make dependencies. The
+ generated backend.mk as well as the Makefile.in (if it exists) are in the
+ GLOBAL_DEPS list. This means that if one of them changes, all targets
+ in that Makefile are invalidated. backend.mk also depends on all of its
+ input files.
+
+ It's worth considering the effect of file mtimes on build behavior.
+
+ Since we perform an "all or none" traversal of moz.build files (the whole
+ tree is scanned as opposed to individual files), if we were to blindly
+ write backend.mk files, the net effect of updating a single mozbuild file
+ in the tree is all backend.mk files have new mtimes. This would in turn
+ invalidate all make targets across the whole tree! This would effectively
+ undermine incremental builds as any mozbuild change would cause the entire
+ tree to rebuild!
+
+ The solution is to not update the mtimes of backend.mk files unless they
+ actually change. We use FileAvoidWrite to accomplish this.
+ """
+
+ def __init__(self, srcdir, objdir, environment, topsrcdir, topobjdir):
+ self.topsrcdir = topsrcdir
+ self.srcdir = srcdir
+ self.objdir = objdir
+ self.relobjdir = mozpath.relpath(objdir, topobjdir)
+ self.environment = environment
+ self.name = mozpath.join(objdir, 'backend.mk')
+
+ self.xpt_name = None
+
+ self.fh = FileAvoidWrite(self.name, capture_diff=True)
+ self.fh.write('# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT.\n')
+ self.fh.write('\n')
+
+ def write(self, buf):
+ self.fh.write(buf)
+
+ def write_once(self, buf):
+ if isinstance(buf, unicode):
+ buf = buf.encode('utf-8')
+ if b'\n' + buf not in self.fh.getvalue():
+ self.write(buf)
+
+ # For compatibility with makeutil.Makefile
+ def add_statement(self, stmt):
+ self.write('%s\n' % stmt)
+
+ def close(self):
+ if self.xpt_name:
+ # We just recompile all xpidls because it's easier and less error
+ # prone.
+ self.fh.write('NONRECURSIVE_TARGETS += export\n')
+ self.fh.write('NONRECURSIVE_TARGETS_export += xpidl\n')
+ self.fh.write('NONRECURSIVE_TARGETS_export_xpidl_DIRECTORY = '
+ '$(DEPTH)/xpcom/xpidl\n')
+ self.fh.write('NONRECURSIVE_TARGETS_export_xpidl_TARGETS += '
+ 'export\n')
+
+ return self.fh.close()
+
+ @property
+ def diff(self):
+ return self.fh.diff
+
+
+class RecursiveMakeTraversal(object):
+ """
+ Helper class to keep track of how the "traditional" recursive make backend
+ recurses subdirectories. This is useful until all adhoc rules are removed
+ from Makefiles.
+
+ Each directory may have one or more types of subdirectories:
+ - (normal) dirs
+ - tests
+ """
+ SubDirectoryCategories = ['dirs', 'tests']
+ SubDirectoriesTuple = namedtuple('SubDirectories', SubDirectoryCategories)
+ class SubDirectories(SubDirectoriesTuple):
+ def __new__(self):
+ return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
+
+ def __init__(self):
+ self._traversal = {}
+
+ def add(self, dir, dirs=[], tests=[]):
+ """
+ Adds a directory to traversal, registering its subdirectories,
+ sorted by categories. If the directory was already added to
+ traversal, adds the new subdirectories to the already known lists.
+ """
+ subdirs = self._traversal.setdefault(dir, self.SubDirectories())
+ for key, value in (('dirs', dirs), ('tests', tests)):
+ assert(key in self.SubDirectoryCategories)
+ getattr(subdirs, key).extend(value)
+
+ @staticmethod
+ def default_filter(current, subdirs):
+ """
+ Default filter for use with compute_dependencies and traverse.
+ """
+ return current, [], subdirs.dirs + subdirs.tests
+
+ def call_filter(self, current, filter):
+ """
+ Helper function to call a filter from compute_dependencies and
+ traverse.
+ """
+ return filter(current, self._traversal.get(current,
+ self.SubDirectories()))
+
+ def compute_dependencies(self, filter=None):
+ """
+ Compute make dependencies corresponding to the registered directory
+ traversal.
+
+ filter is a function with the following signature:
+ def filter(current, subdirs)
+ where current is the directory being traversed, and subdirs the
+ SubDirectories instance corresponding to it.
+ The filter function returns a tuple (filtered_current, filtered_parallel,
+ filtered_dirs) where filtered_current is either current or None if
+ the current directory is to be skipped, and filtered_parallel and
+ filtered_dirs are lists of parallel directories and sequential
+ directories, which can be rearranged from whatever is given in the
+ SubDirectories members.
+
+ The default filter corresponds to a default recursive traversal.
+ """
+ filter = filter or self.default_filter
+
+ deps = {}
+
+ def recurse(start_node, prev_nodes=None):
+ current, parallel, sequential = self.call_filter(start_node, filter)
+ if current is not None:
+ if start_node != '':
+ deps[start_node] = prev_nodes
+ prev_nodes = (start_node,)
+ if not start_node in self._traversal:
+ return prev_nodes
+ parallel_nodes = []
+ for node in parallel:
+ nodes = recurse(node, prev_nodes)
+ if nodes and nodes != ('',):
+ parallel_nodes.extend(nodes)
+ if parallel_nodes:
+ prev_nodes = tuple(parallel_nodes)
+ for dir in sequential:
+ prev_nodes = recurse(dir, prev_nodes)
+ return prev_nodes
+
+ return recurse(''), deps
+
+ def traverse(self, start, filter=None):
+ """
+ Iterate over the filtered subdirectories, following the traditional
+ make traversal order.
+ """
+ if filter is None:
+ filter = self.default_filter
+
+ current, parallel, sequential = self.call_filter(start, filter)
+ if current is not None:
+ yield start
+ if not start in self._traversal:
+ return
+ for node in parallel:
+ for n in self.traverse(node, filter):
+ yield n
+ for dir in sequential:
+ for d in self.traverse(dir, filter):
+ yield d
+
+ def get_subdirs(self, dir):
+ """
+ Returns all direct subdirectories under the given directory.
+ """
+ return self._traversal.get(dir, self.SubDirectories())
+
+
+class RecursiveMakeBackend(CommonBackend):
+ """Backend that integrates with the existing recursive make build system.
+
+ This backend facilitates the transition from Makefile.in to moz.build
+ files.
+
+ This backend performs Makefile.in -> Makefile conversion. It also writes
+ out .mk files containing content derived from moz.build files. Both are
+ consumed by the recursive make builder.
+
+ This backend may eventually evolve to write out non-recursive make files.
+ However, as long as there are Makefile.in files in the tree, we are tied to
+ recursive make and thus will need this backend.
+ """
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ self._backend_files = {}
+ self._idl_dirs = set()
+
+ self._makefile_in_count = 0
+ self._makefile_out_count = 0
+
+ self._test_manifests = {}
+
+ self.backend_input_files.add(mozpath.join(self.environment.topobjdir,
+ 'config', 'autoconf.mk'))
+
+ self._install_manifests = defaultdict(InstallManifest)
+ # The build system relies on some install manifests always existing
+ # even if they are empty, because the directories are still filled
+ # by the build system itself, and the install manifests are only
+ # used for a "magic" rm -rf.
+ self._install_manifests['dist_public']
+ self._install_manifests['dist_private']
+ self._install_manifests['dist_sdk']
+
+ self._traversal = RecursiveMakeTraversal()
+ self._compile_graph = defaultdict(set)
+
+ self._no_skip = {
+ 'export': set(),
+ 'libs': set(),
+ 'misc': set(),
+ 'tools': set(),
+ }
+
+ def summary(self):
+ summary = super(RecursiveMakeBackend, self).summary()
+ summary.extend('; {makefile_in:d} -> {makefile_out:d} Makefile',
+ makefile_in=self._makefile_in_count,
+ makefile_out=self._makefile_out_count)
+ return summary
+
+ def _get_backend_file_for(self, obj):
+ if obj.objdir not in self._backend_files:
+ self._backend_files[obj.objdir] = \
+ BackendMakeFile(obj.srcdir, obj.objdir, obj.config,
+ obj.topsrcdir, self.environment.topobjdir)
+ return self._backend_files[obj.objdir]
+
+ def consume_object(self, obj):
+ """Write out build files necessary to build with recursive make."""
+
+ if not isinstance(obj, ContextDerived):
+ return False
+
+ backend_file = self._get_backend_file_for(obj)
+
+ consumed = CommonBackend.consume_object(self, obj)
+
+ # CommonBackend handles XPIDLFile and TestManifest, but we want to do
+ # some extra things for them.
+ if isinstance(obj, XPIDLFile):
+ backend_file.xpt_name = '%s.xpt' % obj.module
+ self._idl_dirs.add(obj.relobjdir)
+
+ elif isinstance(obj, TestManifest):
+ self._process_test_manifest(obj, backend_file)
+
+ # If CommonBackend acknowledged the object, we're done with it.
+ if consumed:
+ return True
+
+ if not isinstance(obj, Defines):
+ self.consume_object(obj.defines)
+
+ if isinstance(obj, DirectoryTraversal):
+ self._process_directory_traversal(obj, backend_file)
+ elif isinstance(obj, ConfigFileSubstitution):
+ # Other ConfigFileSubstitution should have been acked by
+ # CommonBackend.
+ assert os.path.basename(obj.output_path) == 'Makefile'
+ self._create_makefile(obj)
+ elif isinstance(obj, (Sources, GeneratedSources)):
+ suffix_map = {
+ '.s': 'ASFILES',
+ '.c': 'CSRCS',
+ '.m': 'CMSRCS',
+ '.mm': 'CMMSRCS',
+ '.cpp': 'CPPSRCS',
+ '.rs': 'RSSRCS',
+ '.S': 'SSRCS',
+ }
+ variables = [suffix_map[obj.canonical_suffix]]
+ if isinstance(obj, GeneratedSources):
+ variables.append('GARBAGE')
+ base = backend_file.objdir
+ else:
+ base = backend_file.srcdir
+ for f in sorted(obj.files):
+ f = mozpath.relpath(f, base)
+ for var in variables:
+ backend_file.write('%s += %s\n' % (var, f))
+ elif isinstance(obj, HostSources):
+ suffix_map = {
+ '.c': 'HOST_CSRCS',
+ '.mm': 'HOST_CMMSRCS',
+ '.cpp': 'HOST_CPPSRCS',
+ }
+ var = suffix_map[obj.canonical_suffix]
+ for f in sorted(obj.files):
+ backend_file.write('%s += %s\n' % (
+ var, mozpath.relpath(f, backend_file.srcdir)))
+ elif isinstance(obj, VariablePassthru):
+ # Sorted so output is consistent and we don't bump mtimes.
+ for k, v in sorted(obj.variables.items()):
+ if k == 'HAS_MISC_RULE':
+ self._no_skip['misc'].add(backend_file.relobjdir)
+ continue
+ if isinstance(v, list):
+ for item in v:
+ backend_file.write(
+ '%s += %s\n' % (k, make_quote(shell_quote(item))))
+ elif isinstance(v, bool):
+ if v:
+ backend_file.write('%s := 1\n' % k)
+ else:
+ backend_file.write('%s := %s\n' % (k, v))
+ elif isinstance(obj, HostDefines):
+ self._process_defines(obj, backend_file, which='HOST_DEFINES')
+ elif isinstance(obj, Defines):
+ self._process_defines(obj, backend_file)
+
+ elif isinstance(obj, GeneratedFile):
+ export_suffixes = (
+ '.c',
+ '.cpp',
+ '.h',
+ '.inc',
+ '.py',
+ )
+ tier = 'export' if any(f.endswith(export_suffixes) for f in obj.outputs) else 'misc'
+ self._no_skip[tier].add(backend_file.relobjdir)
+ first_output = obj.outputs[0]
+ dep_file = "%s.pp" % first_output
+ backend_file.write('%s:: %s\n' % (tier, first_output))
+ for output in obj.outputs:
+ if output != first_output:
+ backend_file.write('%s: %s ;\n' % (output, first_output))
+ backend_file.write('GARBAGE += %s\n' % output)
+ backend_file.write('EXTRA_MDDEPEND_FILES += %s\n' % dep_file)
+ if obj.script:
+ backend_file.write("""{output}: {script}{inputs}{backend}
+\t$(REPORT_BUILD)
+\t$(call py_action,file_generate,{script} {method} {output} $(MDDEPDIR)/{dep_file}{inputs}{flags})
+
+""".format(output=first_output,
+ dep_file=dep_file,
+ inputs=' ' + ' '.join([self._pretty_path(f, backend_file) for f in obj.inputs]) if obj.inputs else '',
+ flags=' ' + ' '.join(obj.flags) if obj.flags else '',
+ backend=' backend.mk' if obj.flags else '',
+ script=obj.script,
+ method=obj.method))
+
+ elif isinstance(obj, JARManifest):
+ self._no_skip['libs'].add(backend_file.relobjdir)
+ backend_file.write('JAR_MANIFEST := %s\n' % obj.path.full_path)
+
+ elif isinstance(obj, Program):
+ self._process_program(obj.program, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, HostProgram):
+ self._process_host_program(obj.program, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, SimpleProgram):
+ self._process_simple_program(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, HostSimpleProgram):
+ self._process_host_simple_program(obj.program, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, LocalInclude):
+ self._process_local_include(obj.path, backend_file)
+
+ elif isinstance(obj, PerSourceFlag):
+ self._process_per_source_flag(obj, backend_file)
+
+ elif isinstance(obj, InstallationTarget):
+ self._process_installation_target(obj, backend_file)
+
+ elif isinstance(obj, ContextWrapped):
+ # Process a rich build system object from the front-end
+ # as-is. Please follow precedent and handle CamelCaseData
+ # in a function named _process_camel_case_data. At some
+ # point in the future, this unwrapping process may be
+ # automated.
+ if isinstance(obj.wrapped, JavaJarData):
+ self._process_java_jar_data(obj.wrapped, backend_file)
+ elif isinstance(obj.wrapped, AndroidEclipseProjectData):
+ self._process_android_eclipse_project_data(obj.wrapped, backend_file)
+ else:
+ return False
+
+ elif isinstance(obj, RustLibrary):
+ self.backend_input_files.add(obj.cargo_file)
+ self._process_rust_library(obj, backend_file)
+ # No need to call _process_linked_libraries, because Rust
+ # libraries are self-contained objects at this point.
+
+ elif isinstance(obj, SharedLibrary):
+ self._process_shared_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, StaticLibrary):
+ self._process_static_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, HostLibrary):
+ self._process_host_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, FinalTargetFiles):
+ self._process_final_target_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, FinalTargetPreprocessedFiles):
+ self._process_final_target_pp_files(obj, obj.files, backend_file, 'DIST_FILES')
+
+ elif isinstance(obj, ObjdirFiles):
+ self._process_objdir_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, ObjdirPreprocessedFiles):
+ self._process_final_target_pp_files(obj, obj.files, backend_file, 'OBJDIR_PP_FILES')
+
+ elif isinstance(obj, AndroidResDirs):
+ # Order matters.
+ for p in obj.paths:
+ backend_file.write('ANDROID_RES_DIRS += %s\n' % p.full_path)
+
+ elif isinstance(obj, AndroidAssetsDirs):
+ # Order matters.
+ for p in obj.paths:
+ backend_file.write('ANDROID_ASSETS_DIRS += %s\n' % p.full_path)
+
+ elif isinstance(obj, AndroidExtraResDirs):
+ # Order does not matter.
+ for p in sorted(set(p.full_path for p in obj.paths)):
+ backend_file.write('ANDROID_EXTRA_RES_DIRS += %s\n' % p)
+
+ elif isinstance(obj, AndroidExtraPackages):
+ # Order does not matter.
+ for p in sorted(set(obj.packages)):
+ backend_file.write('ANDROID_EXTRA_PACKAGES += %s\n' % p)
+
+ elif isinstance(obj, ChromeManifestEntry):
+ self._process_chrome_manifest_entry(obj, backend_file)
+
+ else:
+ return False
+
+ return True
+
+ def _fill_root_mk(self):
+ """
+ Create two files, root.mk and root-deps.mk, the first containing
+ convenience variables, and the other dependency definitions for a
+ hopefully proper directory traversal.
+ """
+ for tier, no_skip in self._no_skip.items():
+ self.log(logging.DEBUG, 'fill_root_mk', {
+ 'number': len(no_skip), 'tier': tier
+ }, 'Using {number} directories during {tier}')
+
+ def should_skip(tier, dir):
+ if tier in self._no_skip:
+ return dir not in self._no_skip[tier]
+ return False
+
+ # Traverse directories in parallel, and skip static dirs
+ def parallel_filter(current, subdirs):
+ all_subdirs = subdirs.dirs + subdirs.tests
+ if should_skip(tier, current) or current.startswith('subtiers/'):
+ current = None
+ return current, all_subdirs, []
+
+ # build everything in parallel, including static dirs
+ # Because of bug 925236 and possible other unknown race conditions,
+ # don't parallelize the libs tier.
+ def libs_filter(current, subdirs):
+ if should_skip('libs', current) or current.startswith('subtiers/'):
+ current = None
+ return current, [], subdirs.dirs + subdirs.tests
+
+ # Because of bug 925236 and possible other unknown race conditions,
+ # don't parallelize the tools tier. There aren't many directories for
+ # this tier anyways.
+ def tools_filter(current, subdirs):
+ if should_skip('tools', current) or current.startswith('subtiers/'):
+ current = None
+ return current, [], subdirs.dirs + subdirs.tests
+
+ filters = [
+ ('export', parallel_filter),
+ ('libs', libs_filter),
+ ('misc', parallel_filter),
+ ('tools', tools_filter),
+ ]
+
+ root_deps_mk = Makefile()
+
+ # Fill the dependencies for traversal of each tier.
+ for tier, filter in filters:
+ main, all_deps = \
+ self._traversal.compute_dependencies(filter)
+ for dir, deps in all_deps.items():
+ if deps is not None or (dir in self._idl_dirs \
+ and tier == 'export'):
+ rule = root_deps_mk.create_rule(['%s/%s' % (dir, tier)])
+ if deps:
+ rule.add_dependencies('%s/%s' % (d, tier) for d in deps if d)
+ if dir in self._idl_dirs and tier == 'export':
+ rule.add_dependencies(['xpcom/xpidl/%s' % tier])
+ rule = root_deps_mk.create_rule(['recurse_%s' % tier])
+ if main:
+ rule.add_dependencies('%s/%s' % (d, tier) for d in main)
+
+ all_compile_deps = reduce(lambda x,y: x|y,
+ self._compile_graph.values()) if self._compile_graph else set()
+ compile_roots = set(self._compile_graph.keys()) - all_compile_deps
+
+ rule = root_deps_mk.create_rule(['recurse_compile'])
+ rule.add_dependencies(compile_roots)
+ for target, deps in sorted(self._compile_graph.items()):
+ if deps:
+ rule = root_deps_mk.create_rule([target])
+ rule.add_dependencies(deps)
+
+ root_mk = Makefile()
+
+ # Fill root.mk with the convenience variables.
+ for tier, filter in filters:
+ all_dirs = self._traversal.traverse('', filter)
+ root_mk.add_statement('%s_dirs := %s' % (tier, ' '.join(all_dirs)))
+
+ # Need a list of compile targets because we can't use pattern rules:
+ # https://savannah.gnu.org/bugs/index.php?42833
+ root_mk.add_statement('compile_targets := %s' % ' '.join(sorted(
+ set(self._compile_graph.keys()) | all_compile_deps)))
+
+ root_mk.add_statement('include root-deps.mk')
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, 'root.mk')) as root:
+ root_mk.dump(root, removal_guard=False)
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, 'root-deps.mk')) as root_deps:
+ root_deps_mk.dump(root_deps, removal_guard=False)
+
+ def _add_unified_build_rules(self, makefile, unified_source_mapping,
+ unified_files_makefile_variable='unified_files',
+ include_curdir_build_rules=True):
+
+ # In case it's a generator.
+ unified_source_mapping = sorted(unified_source_mapping)
+
+ explanation = "\n" \
+ "# We build files in 'unified' mode by including several files\n" \
+ "# together into a single source file. This cuts down on\n" \
+ "# compilation times and debug information size."
+ makefile.add_statement(explanation)
+
+ all_sources = ' '.join(source for source, _ in unified_source_mapping)
+ makefile.add_statement('%s := %s' % (unified_files_makefile_variable,
+ all_sources))
+
+ if include_curdir_build_rules:
+ makefile.add_statement('\n'
+ '# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
+ '# Help it out by explicitly specifiying dependencies.')
+ makefile.add_statement('all_absolute_unified_files := \\\n'
+ ' $(addprefix $(CURDIR)/,$(%s))'
+ % unified_files_makefile_variable)
+ rule = makefile.create_rule(['$(all_absolute_unified_files)'])
+ rule.add_dependencies(['$(CURDIR)/%: %'])
+
+ def _check_blacklisted_variables(self, makefile_in, makefile_content):
+ if b'EXTERNALLY_MANAGED_MAKE_FILE' in makefile_content:
+ # Bypass the variable restrictions for externally managed makefiles.
+ return
+
+ for l in makefile_content.splitlines():
+ l = l.strip()
+ # Don't check comments
+ if l.startswith(b'#'):
+ continue
+ for x in chain(MOZBUILD_VARIABLES, DEPRECATED_VARIABLES):
+ if x not in l:
+ continue
+
+ # Finding the variable name in the Makefile is not enough: it
+ # may just appear as part of something else, like DIRS appears
+ # in GENERATED_DIRS.
+ if re.search(r'\b%s\s*[:?+]?=' % x, l):
+ if x in MOZBUILD_VARIABLES:
+ message = MOZBUILD_VARIABLES_MESSAGE
+ else:
+ message = DEPRECATED_VARIABLES_MESSAGE
+ raise Exception('Variable %s is defined in %s. %s'
+ % (x, makefile_in, message))
+
+ def consume_finished(self):
+ CommonBackend.consume_finished(self)
+
+ for objdir, backend_file in sorted(self._backend_files.items()):
+ srcdir = backend_file.srcdir
+ with self._write_file(fh=backend_file) as bf:
+ makefile_in = mozpath.join(srcdir, 'Makefile.in')
+ makefile = mozpath.join(objdir, 'Makefile')
+
+ # If Makefile.in exists, use it as a template. Otherwise,
+ # create a stub.
+ stub = not os.path.exists(makefile_in)
+ if not stub:
+ self.log(logging.DEBUG, 'substitute_makefile',
+ {'path': makefile}, 'Substituting makefile: {path}')
+ self._makefile_in_count += 1
+
+ # In the export and libs tiers, we don't skip directories
+ # containing a Makefile.in.
+ # topobjdir is handled separatedly, don't do anything for
+ # it.
+ if bf.relobjdir:
+ for tier in ('export', 'libs',):
+ self._no_skip[tier].add(bf.relobjdir)
+ else:
+ self.log(logging.DEBUG, 'stub_makefile',
+ {'path': makefile}, 'Creating stub Makefile: {path}')
+
+ obj = self.Substitution()
+ obj.output_path = makefile
+ obj.input_path = makefile_in
+ obj.topsrcdir = backend_file.topsrcdir
+ obj.topobjdir = bf.environment.topobjdir
+ obj.config = bf.environment
+ self._create_makefile(obj, stub=stub)
+ with open(obj.output_path) as fh:
+ content = fh.read()
+ # Skip every directory but those with a Makefile
+ # containing a tools target, or XPI_PKGNAME or
+ # INSTALL_EXTENSION_ID.
+ for t in (b'XPI_PKGNAME', b'INSTALL_EXTENSION_ID',
+ b'tools'):
+ if t not in content:
+ continue
+ if t == b'tools' and not re.search('(?:^|\s)tools.*::', content, re.M):
+ continue
+ if objdir == self.environment.topobjdir:
+ continue
+ self._no_skip['tools'].add(mozpath.relpath(objdir,
+ self.environment.topobjdir))
+
+ # Detect any Makefile.ins that contain variables on the
+ # moz.build-only list
+ self._check_blacklisted_variables(makefile_in, content)
+
+ self._fill_root_mk()
+
+ # Make the master test manifest files.
+ for flavor, t in self._test_manifests.items():
+ install_prefix, manifests = t
+ manifest_stem = mozpath.join(install_prefix, '%s.ini' % flavor)
+ self._write_master_test_manifest(mozpath.join(
+ self.environment.topobjdir, '_tests', manifest_stem),
+ manifests)
+
+ # Catch duplicate inserts.
+ try:
+ self._install_manifests['_tests'].add_optional_exists(manifest_stem)
+ except ValueError:
+ pass
+
+ self._write_manifests('install', self._install_manifests)
+
+ ensureParentDir(mozpath.join(self.environment.topobjdir, 'dist', 'foo'))
+
+ def _pretty_path_parts(self, path, backend_file):
+ assert isinstance(path, Path)
+ if isinstance(path, SourcePath):
+ if path.full_path.startswith(backend_file.srcdir):
+ return '$(srcdir)', path.full_path[len(backend_file.srcdir):]
+ if path.full_path.startswith(backend_file.topsrcdir):
+ return '$(topsrcdir)', path.full_path[len(backend_file.topsrcdir):]
+ elif isinstance(path, ObjDirPath):
+ if path.full_path.startswith(backend_file.objdir):
+ return '', path.full_path[len(backend_file.objdir) + 1:]
+ if path.full_path.startswith(self.environment.topobjdir):
+ return '$(DEPTH)', path.full_path[len(self.environment.topobjdir):]
+
+ return '', path.full_path
+
+ def _pretty_path(self, path, backend_file):
+ return ''.join(self._pretty_path_parts(path, backend_file))
+
+ def _process_unified_sources(self, obj):
+ backend_file = self._get_backend_file_for(obj)
+
+ suffix_map = {
+ '.c': 'UNIFIED_CSRCS',
+ '.mm': 'UNIFIED_CMMSRCS',
+ '.cpp': 'UNIFIED_CPPSRCS',
+ }
+
+ var = suffix_map[obj.canonical_suffix]
+ non_unified_var = var[len('UNIFIED_'):]
+
+ if obj.have_unified_mapping:
+ self._add_unified_build_rules(backend_file,
+ obj.unified_source_mapping,
+ unified_files_makefile_variable=var,
+ include_curdir_build_rules=False)
+ backend_file.write('%s += $(%s)\n' % (non_unified_var, var))
+ else:
+ # Sorted so output is consistent and we don't bump mtimes.
+ source_files = list(sorted(obj.files))
+
+ backend_file.write('%s += %s\n' % (
+ non_unified_var, ' '.join(source_files)))
+
+ def _process_directory_traversal(self, obj, backend_file):
+ """Process a data.DirectoryTraversal instance."""
+ fh = backend_file.fh
+
+ def relativize(base, dirs):
+ return (mozpath.relpath(d.translated, base) for d in dirs)
+
+ if obj.dirs:
+ fh.write('DIRS := %s\n' % ' '.join(
+ relativize(backend_file.objdir, obj.dirs)))
+ self._traversal.add(backend_file.relobjdir,
+ dirs=relativize(self.environment.topobjdir, obj.dirs))
+
+ # The directory needs to be registered whether subdirectories have been
+ # registered or not.
+ self._traversal.add(backend_file.relobjdir)
+
+ def _process_defines(self, obj, backend_file, which='DEFINES'):
+ """Output the DEFINES rules to the given backend file."""
+ defines = list(obj.get_defines())
+ if defines:
+ defines = ' '.join(shell_quote(d) for d in defines)
+ backend_file.write_once('%s += %s\n' % (which, defines))
+
+ def _process_installation_target(self, obj, backend_file):
+ # A few makefiles need to be able to override the following rules via
+ # make XPI_NAME=blah commands, so we default to the lazy evaluation as
+ # much as possible here to avoid breaking things.
+ if obj.xpiname:
+ backend_file.write('XPI_NAME = %s\n' % (obj.xpiname))
+ if obj.subdir:
+ backend_file.write('DIST_SUBDIR = %s\n' % (obj.subdir))
+ if obj.target and not obj.is_custom():
+ backend_file.write('FINAL_TARGET = $(DEPTH)/%s\n' % (obj.target))
+ else:
+ backend_file.write('FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n')
+
+ if not obj.enabled:
+ backend_file.write('NO_DIST_INSTALL := 1\n')
+
+ def _handle_idl_manager(self, manager):
+ build_files = self._install_manifests['xpidl']
+
+ for p in ('Makefile', 'backend.mk', '.deps/.mkdir.done'):
+ build_files.add_optional_exists(p)
+
+ for idl in manager.idls.values():
+ self._install_manifests['dist_idl'].add_symlink(idl['source'],
+ idl['basename'])
+ self._install_manifests['dist_include'].add_optional_exists('%s.h'
+ % idl['root'])
+
+ for module in manager.modules:
+ build_files.add_optional_exists(mozpath.join('.deps',
+ '%s.pp' % module))
+
+ modules = manager.modules
+ xpt_modules = sorted(modules.keys())
+ xpt_files = set()
+ registered_xpt_files = set()
+
+ mk = Makefile()
+
+ for module in xpt_modules:
+ install_target, sources = modules[module]
+ deps = sorted(sources)
+
+ # It may seem strange to have the .idl files listed as
+ # prerequisites both here and in the auto-generated .pp files.
+ # It is necessary to list them here to handle the case where a
+ # new .idl is added to an xpt. If we add a new .idl and nothing
+ # else has changed, the new .idl won't be referenced anywhere
+ # except in the command invocation. Therefore, the .xpt won't
+ # be rebuilt because the dependencies say it is up to date. By
+ # listing the .idls here, we ensure the make file has a
+ # reference to the new .idl. Since the new .idl presumably has
+ # an mtime newer than the .xpt, it will trigger xpt generation.
+ xpt_path = '$(DEPTH)/%s/components/%s.xpt' % (install_target, module)
+ xpt_files.add(xpt_path)
+ mk.add_statement('%s_deps = %s' % (module, ' '.join(deps)))
+
+ if install_target.startswith('dist/'):
+ path = mozpath.relpath(xpt_path, '$(DEPTH)/dist')
+ prefix, subpath = path.split('/', 1)
+ key = 'dist_%s' % prefix
+
+ self._install_manifests[key].add_optional_exists(subpath)
+
+ rules = StringIO()
+ mk.dump(rules, removal_guard=False)
+
+ interfaces_manifests = []
+ dist_dir = mozpath.join(self.environment.topobjdir, 'dist')
+ for manifest, entries in manager.interface_manifests.items():
+ interfaces_manifests.append(mozpath.join('$(DEPTH)', manifest))
+ for xpt in sorted(entries):
+ registered_xpt_files.add(mozpath.join(
+ '$(DEPTH)', mozpath.dirname(manifest), xpt))
+
+ if install_target.startswith('dist/'):
+ path = mozpath.join(self.environment.topobjdir, manifest)
+ path = mozpath.relpath(path, dist_dir)
+ prefix, subpath = path.split('/', 1)
+ key = 'dist_%s' % prefix
+ self._install_manifests[key].add_optional_exists(subpath)
+
+ chrome_manifests = [mozpath.join('$(DEPTH)', m) for m in sorted(manager.chrome_manifests)]
+
+ # Create dependency for output header so we force regeneration if the
+ # header was deleted. This ideally should not be necessary. However,
+ # some processes (such as PGO at the time this was implemented) wipe
+ # out dist/include without regard to our install manifests.
+
+ obj = self.Substitution()
+ obj.output_path = mozpath.join(self.environment.topobjdir, 'config',
+ 'makefiles', 'xpidl', 'Makefile')
+ obj.input_path = mozpath.join(self.environment.topsrcdir, 'config',
+ 'makefiles', 'xpidl', 'Makefile.in')
+ obj.topsrcdir = self.environment.topsrcdir
+ obj.topobjdir = self.environment.topobjdir
+ obj.config = self.environment
+ self._create_makefile(obj, extra=dict(
+ chrome_manifests = ' '.join(chrome_manifests),
+ interfaces_manifests = ' '.join(interfaces_manifests),
+ xpidl_rules=rules.getvalue(),
+ xpidl_modules=' '.join(xpt_modules),
+ xpt_files=' '.join(sorted(xpt_files - registered_xpt_files)),
+ registered_xpt_files=' '.join(sorted(registered_xpt_files)),
+ ))
+
+ def _process_program(self, program, backend_file):
+ backend_file.write('PROGRAM = %s\n' % program)
+
+ def _process_host_program(self, program, backend_file):
+ backend_file.write('HOST_PROGRAM = %s\n' % program)
+
+ def _process_simple_program(self, obj, backend_file):
+ if obj.is_unit_test:
+ backend_file.write('CPP_UNIT_TESTS += %s\n' % obj.program)
+ else:
+ backend_file.write('SIMPLE_PROGRAMS += %s\n' % obj.program)
+
+ def _process_host_simple_program(self, program, backend_file):
+ backend_file.write('HOST_SIMPLE_PROGRAMS += %s\n' % program)
+
+ def _process_test_manifest(self, obj, backend_file):
+ # Much of the logic in this function could be moved to CommonBackend.
+ self.backend_input_files.add(mozpath.join(obj.topsrcdir,
+ obj.manifest_relpath))
+
+ # Don't allow files to be defined multiple times unless it is allowed.
+ # We currently allow duplicates for non-test files or test files if
+ # the manifest is listed as a duplicate.
+ for source, (dest, is_test) in obj.installs.items():
+ try:
+ self._install_manifests['_test_files'].add_symlink(source, dest)
+ except ValueError:
+ if not obj.dupe_manifest and is_test:
+ raise
+
+ for base, pattern, dest in obj.pattern_installs:
+ try:
+ self._install_manifests['_test_files'].add_pattern_symlink(base,
+ pattern, dest)
+ except ValueError:
+ if not obj.dupe_manifest:
+ raise
+
+ for dest in obj.external_installs:
+ try:
+ self._install_manifests['_test_files'].add_optional_exists(dest)
+ except ValueError:
+ if not obj.dupe_manifest:
+ raise
+
+ m = self._test_manifests.setdefault(obj.flavor,
+ (obj.install_prefix, set()))
+ m[1].add(obj.manifest_obj_relpath)
+
+ try:
+ from reftest import ReftestManifest
+
+ if isinstance(obj.manifest, ReftestManifest):
+ # Mark included files as part of the build backend so changes
+ # result in re-config.
+ self.backend_input_files |= obj.manifest.manifests
+ except ImportError:
+ # Ignore errors caused by the reftest module not being present.
+ # This can happen when building SpiderMonkey standalone, for example.
+ pass
+
+ def _process_local_include(self, local_include, backend_file):
+ d, path = self._pretty_path_parts(local_include, backend_file)
+ if isinstance(local_include, ObjDirPath) and not d:
+ # path doesn't start with a slash in this case
+ d = '$(CURDIR)/'
+ elif d == '$(DEPTH)':
+ d = '$(topobjdir)'
+ quoted_path = shell_quote(path) if path else path
+ if quoted_path != path:
+ path = quoted_path[0] + d + quoted_path[1:]
+ else:
+ path = d + path
+ backend_file.write('LOCAL_INCLUDES += -I%s\n' % path)
+
+ def _process_per_source_flag(self, per_source_flag, backend_file):
+ for flag in per_source_flag.flags:
+ backend_file.write('%s_FLAGS += %s\n' % (mozpath.basename(per_source_flag.file_name), flag))
+
+ def _process_java_jar_data(self, jar, backend_file):
+ target = jar.name
+ backend_file.write('JAVA_JAR_TARGETS += %s\n' % target)
+ backend_file.write('%s_DEST := %s.jar\n' % (target, jar.name))
+ if jar.sources:
+ backend_file.write('%s_JAVAFILES := %s\n' %
+ (target, ' '.join(jar.sources)))
+ if jar.generated_sources:
+ backend_file.write('%s_PP_JAVAFILES := %s\n' %
+ (target, ' '.join(mozpath.join('generated', f) for f in jar.generated_sources)))
+ if jar.extra_jars:
+ backend_file.write('%s_EXTRA_JARS := %s\n' %
+ (target, ' '.join(sorted(set(jar.extra_jars)))))
+ if jar.javac_flags:
+ backend_file.write('%s_JAVAC_FLAGS := %s\n' %
+ (target, ' '.join(jar.javac_flags)))
+
+ def _process_android_eclipse_project_data(self, project, backend_file):
+ # We add a single target to the backend.mk corresponding to
+ # the moz.build defining the Android Eclipse project. This
+ # target depends on some targets to be fresh, and installs a
+ # manifest generated by the Android Eclipse build backend. The
+ # manifests for all projects live in $TOPOBJDIR/android_eclipse
+ # and are installed into subdirectories thereof.
+
+ project_directory = mozpath.join(self.environment.topobjdir, 'android_eclipse', project.name)
+ manifest_path = mozpath.join(self.environment.topobjdir, 'android_eclipse', '%s.manifest' % project.name)
+
+ fragment = Makefile()
+ rule = fragment.create_rule(targets=['ANDROID_ECLIPSE_PROJECT_%s' % project.name])
+ rule.add_dependencies(project.recursive_make_targets)
+ args = ['--no-remove',
+ '--no-remove-all-directory-symlinks',
+ '--no-remove-empty-directories',
+ project_directory,
+ manifest_path]
+ rule.add_commands(['$(call py_action,process_install_manifest,%s)' % ' '.join(args)])
+ fragment.dump(backend_file.fh, removal_guard=False)
+
+ def _process_shared_library(self, libdef, backend_file):
+ backend_file.write_once('LIBRARY_NAME := %s\n' % libdef.basename)
+ backend_file.write('FORCE_SHARED_LIB := 1\n')
+ backend_file.write('IMPORT_LIBRARY := %s\n' % libdef.import_name)
+ backend_file.write('SHARED_LIBRARY := %s\n' % libdef.lib_name)
+ if libdef.variant == libdef.COMPONENT:
+ backend_file.write('IS_COMPONENT := 1\n')
+ if libdef.soname:
+ backend_file.write('DSO_SONAME := %s\n' % libdef.soname)
+ if libdef.is_sdk:
+ backend_file.write('SDK_LIBRARY := %s\n' % libdef.import_name)
+ if libdef.symbols_file:
+ backend_file.write('SYMBOLS_FILE := %s\n' % libdef.symbols_file)
+ if not libdef.cxx_link:
+ backend_file.write('LIB_IS_C_ONLY := 1\n')
+
+ def _process_static_library(self, libdef, backend_file):
+ backend_file.write_once('LIBRARY_NAME := %s\n' % libdef.basename)
+ backend_file.write('FORCE_STATIC_LIB := 1\n')
+ backend_file.write('REAL_LIBRARY := %s\n' % libdef.lib_name)
+ if libdef.is_sdk:
+ backend_file.write('SDK_LIBRARY := %s\n' % libdef.import_name)
+ if libdef.no_expand_lib:
+ backend_file.write('NO_EXPAND_LIBS := 1\n')
+
+ def _process_rust_library(self, libdef, backend_file):
+ backend_file.write_once('RUST_LIBRARY_FILE := %s\n' % libdef.import_name)
+ backend_file.write('CARGO_FILE := $(srcdir)/Cargo.toml')
+
+ def _process_host_library(self, libdef, backend_file):
+ backend_file.write('HOST_LIBRARY_NAME = %s\n' % libdef.basename)
+
+ def _build_target_for_obj(self, obj):
+ return '%s/%s' % (mozpath.relpath(obj.objdir,
+ self.environment.topobjdir), obj.KIND)
+
+ def _process_linked_libraries(self, obj, backend_file):
+ def write_shared_and_system_libs(lib):
+ for l in lib.linked_libraries:
+ if isinstance(l, (StaticLibrary, RustLibrary)):
+ write_shared_and_system_libs(l)
+ else:
+ backend_file.write_once('SHARED_LIBS += %s/%s\n'
+ % (pretty_relpath(l), l.import_name))
+ for l in lib.linked_system_libs:
+ backend_file.write_once('OS_LIBS += %s\n' % l)
+
+ def pretty_relpath(lib):
+ return '$(DEPTH)/%s' % mozpath.relpath(lib.objdir, topobjdir)
+
+ topobjdir = mozpath.normsep(obj.topobjdir)
+ # This will create the node even if there aren't any linked libraries.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+
+ for lib in obj.linked_libraries:
+ if not isinstance(lib, ExternalLibrary):
+ self._compile_graph[build_target].add(
+ self._build_target_for_obj(lib))
+ relpath = pretty_relpath(lib)
+ if isinstance(obj, Library):
+ if isinstance(lib, RustLibrary):
+ # We don't need to do anything here; we will handle
+ # linkage for any RustLibrary elsewhere.
+ continue
+ elif isinstance(lib, StaticLibrary):
+ backend_file.write_once('STATIC_LIBS += %s/%s\n'
+ % (relpath, lib.import_name))
+ if isinstance(obj, SharedLibrary):
+ write_shared_and_system_libs(lib)
+ elif isinstance(obj, SharedLibrary):
+ assert lib.variant != lib.COMPONENT
+ backend_file.write_once('SHARED_LIBS += %s/%s\n'
+ % (relpath, lib.import_name))
+ elif isinstance(obj, (Program, SimpleProgram)):
+ if isinstance(lib, StaticLibrary):
+ backend_file.write_once('STATIC_LIBS += %s/%s\n'
+ % (relpath, lib.import_name))
+ write_shared_and_system_libs(lib)
+ else:
+ assert lib.variant != lib.COMPONENT
+ backend_file.write_once('SHARED_LIBS += %s/%s\n'
+ % (relpath, lib.import_name))
+ elif isinstance(obj, (HostLibrary, HostProgram, HostSimpleProgram)):
+ assert isinstance(lib, HostLibrary)
+ backend_file.write_once('HOST_LIBS += %s/%s\n'
+ % (relpath, lib.import_name))
+
+ # We have to link any Rust libraries after all intermediate static
+ # libraries have been listed to ensure that the Rust libraries are
+ # searched after the C/C++ objects that might reference Rust symbols.
+ if isinstance(obj, SharedLibrary):
+ self._process_rust_libraries(obj, backend_file, pretty_relpath)
+
+ for lib in obj.linked_system_libs:
+ if obj.KIND == 'target':
+ backend_file.write_once('OS_LIBS += %s\n' % lib)
+ else:
+ backend_file.write_once('HOST_EXTRA_LIBS += %s\n' % lib)
+
+ # Process library-based defines
+ self._process_defines(obj.lib_defines, backend_file)
+
+ def _process_rust_libraries(self, obj, backend_file, pretty_relpath):
+ assert isinstance(obj, SharedLibrary)
+
+ # If this library does not depend on any Rust libraries, then we are done.
+ direct_linked = [l for l in obj.linked_libraries if isinstance(l, RustLibrary)]
+ if not direct_linked:
+ return
+
+ # We should have already checked this in Linkable.link_library.
+ assert len(direct_linked) == 1
+
+ # TODO: see bug 1310063 for checking dependencies are set up correctly.
+
+ direct_linked = direct_linked[0]
+ backend_file.write('RUST_STATIC_LIB_FOR_SHARED_LIB := %s/%s\n' %
+ (pretty_relpath(direct_linked), direct_linked.import_name))
+
+ def _process_final_target_files(self, obj, files, backend_file):
+ target = obj.install_target
+ path = mozpath.basedir(target, (
+ 'dist/bin',
+ 'dist/xpi-stage',
+ '_tests',
+ 'dist/include',
+ 'dist/branding',
+ 'dist/sdk',
+ ))
+ if not path:
+ raise Exception("Cannot install to " + target)
+
+ manifest = path.replace('/', '_')
+ install_manifest = self._install_manifests[manifest]
+ reltarget = mozpath.relpath(target, path)
+
+ # Also emit the necessary rules to create $(DIST)/branding during
+ # partial tree builds. The locale makefiles rely on this working.
+ if path == 'dist/branding':
+ backend_file.write('NONRECURSIVE_TARGETS += export\n')
+ backend_file.write('NONRECURSIVE_TARGETS_export += branding\n')
+ backend_file.write('NONRECURSIVE_TARGETS_export_branding_DIRECTORY = $(DEPTH)\n')
+ backend_file.write('NONRECURSIVE_TARGETS_export_branding_TARGETS += install-dist/branding\n')
+
+ for path, files in files.walk():
+ target_var = (mozpath.join(target, path)
+ if path else target).replace('/', '_')
+ have_objdir_files = False
+ for f in files:
+ assert not isinstance(f, RenamedSourcePath)
+ dest = mozpath.join(reltarget, path, f.target_basename)
+ if not isinstance(f, ObjDirPath):
+ if '*' in f:
+ if f.startswith('/') or isinstance(f, AbsolutePath):
+ basepath, wild = os.path.split(f.full_path)
+ if '*' in basepath:
+ raise Exception("Wildcards are only supported in the filename part of "
+ "srcdir-relative or absolute paths.")
+
+ install_manifest.add_pattern_symlink(basepath, wild, path)
+ else:
+ install_manifest.add_pattern_symlink(f.srcdir, f, path)
+ else:
+ install_manifest.add_symlink(f.full_path, dest)
+ else:
+ install_manifest.add_optional_exists(dest)
+ backend_file.write('%s_FILES += %s\n' % (
+ target_var, self._pretty_path(f, backend_file)))
+ have_objdir_files = True
+ if have_objdir_files:
+ tier = 'export' if obj.install_target == 'dist/include' else 'misc'
+ self._no_skip[tier].add(backend_file.relobjdir)
+ backend_file.write('%s_DEST := $(DEPTH)/%s\n'
+ % (target_var,
+ mozpath.join(target, path)))
+ backend_file.write('%s_TARGET := %s\n' % (target_var, tier))
+ backend_file.write('INSTALL_TARGETS += %s\n' % target_var)
+
+ def _process_final_target_pp_files(self, obj, files, backend_file, name):
+ # Bug 1177710 - We'd like to install these via manifests as
+ # preprocessed files. But they currently depend on non-standard flags
+ # being added via some Makefiles, so for now we just pass them through
+ # to the underlying Makefile.in.
+ #
+ # Note that if this becomes a manifest, OBJDIR_PP_FILES will likely
+ # still need to use PP_TARGETS internally because we can't have an
+ # install manifest for the root of the objdir.
+ for i, (path, files) in enumerate(files.walk()):
+ self._no_skip['misc'].add(backend_file.relobjdir)
+ var = '%s_%d' % (name, i)
+ for f in files:
+ backend_file.write('%s += %s\n' % (
+ var, self._pretty_path(f, backend_file)))
+ backend_file.write('%s_PATH := $(DEPTH)/%s\n'
+ % (var, mozpath.join(obj.install_target, path)))
+ backend_file.write('%s_TARGET := misc\n' % var)
+ backend_file.write('PP_TARGETS += %s\n' % var)
+
+ def _process_objdir_files(self, obj, files, backend_file):
+ # We can't use an install manifest for the root of the objdir, since it
+ # would delete all the other files that get put there by the build
+ # system.
+ for i, (path, files) in enumerate(files.walk()):
+ self._no_skip['misc'].add(backend_file.relobjdir)
+ for f in files:
+ backend_file.write('OBJDIR_%d_FILES += %s\n' % (
+ i, self._pretty_path(f, backend_file)))
+ backend_file.write('OBJDIR_%d_DEST := $(topobjdir)/%s\n' % (i, path))
+ backend_file.write('OBJDIR_%d_TARGET := misc\n' % i)
+ backend_file.write('INSTALL_TARGETS += OBJDIR_%d\n' % i)
+
+ def _process_chrome_manifest_entry(self, obj, backend_file):
+ fragment = Makefile()
+ rule = fragment.create_rule(targets=['misc:'])
+
+ top_level = mozpath.join(obj.install_target, 'chrome.manifest')
+ if obj.path != top_level:
+ args = [
+ mozpath.join('$(DEPTH)', top_level),
+ make_quote(shell_quote('manifest %s' %
+ mozpath.relpath(obj.path,
+ obj.install_target))),
+ ]
+ rule.add_commands(['$(call py_action,buildlist,%s)' %
+ ' '.join(args)])
+ args = [
+ mozpath.join('$(DEPTH)', obj.path),
+ make_quote(shell_quote(str(obj.entry))),
+ ]
+ rule.add_commands(['$(call py_action,buildlist,%s)' % ' '.join(args)])
+ fragment.dump(backend_file.fh, removal_guard=False)
+
+ self._no_skip['misc'].add(obj.relativedir)
+
+ def _write_manifests(self, dest, manifests):
+ man_dir = mozpath.join(self.environment.topobjdir, '_build_manifests',
+ dest)
+
+ for k, manifest in manifests.items():
+ with self._write_file(mozpath.join(man_dir, k)) as fh:
+ manifest.write(fileobj=fh)
+
+ def _write_master_test_manifest(self, path, manifests):
+ with self._write_file(path) as master:
+ master.write(
+ '; THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n\n')
+
+ for manifest in sorted(manifests):
+ master.write('[include:%s]\n' % manifest)
+
+ class Substitution(object):
+ """BaseConfigSubstitution-like class for use with _create_makefile."""
+ __slots__ = (
+ 'input_path',
+ 'output_path',
+ 'topsrcdir',
+ 'topobjdir',
+ 'config',
+ )
+
+ def _create_makefile(self, obj, stub=False, extra=None):
+ '''Creates the given makefile. Makefiles are treated the same as
+ config files, but some additional header and footer is added to the
+ output.
+
+ When the stub argument is True, no source file is used, and a stub
+ makefile with the default header and footer only is created.
+ '''
+ with self._get_preprocessor(obj) as pp:
+ if extra:
+ pp.context.update(extra)
+ if not pp.context.get('autoconfmk', ''):
+ pp.context['autoconfmk'] = 'autoconf.mk'
+ pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n');
+ pp.handleLine(b'DEPTH := @DEPTH@\n')
+ pp.handleLine(b'topobjdir := @topobjdir@\n')
+ pp.handleLine(b'topsrcdir := @top_srcdir@\n')
+ pp.handleLine(b'srcdir := @srcdir@\n')
+ pp.handleLine(b'VPATH := @srcdir@\n')
+ pp.handleLine(b'relativesrcdir := @relativesrcdir@\n')
+ pp.handleLine(b'include $(DEPTH)/config/@autoconfmk@\n')
+ if not stub:
+ pp.do_include(obj.input_path)
+ # Empty line to avoid failures when last line in Makefile.in ends
+ # with a backslash.
+ pp.handleLine(b'\n')
+ pp.handleLine(b'include $(topsrcdir)/config/recurse.mk\n')
+ if not stub:
+ # Adding the Makefile.in here has the desired side-effect
+ # that if the Makefile.in disappears, this will force
+ # moz.build traversal. This means that when we remove empty
+ # Makefile.in files, the old file will get replaced with
+ # the autogenerated one automatically.
+ self.backend_input_files.add(obj.input_path)
+
+ self._makefile_out_count += 1
+
+ def _handle_linked_rust_crates(self, obj, extern_crate_file):
+ backend_file = self._get_backend_file_for(obj)
+
+ backend_file.write('RS_STATICLIB_CRATE_SRC := %s\n' % extern_crate_file)
+
+ def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources,
+ unified_ipdl_cppsrcs_mapping):
+ # Write out a master list of all IPDL source files.
+ mk = Makefile()
+
+ mk.add_statement('ALL_IPDLSRCS := %s' % ' '.join(sorted_ipdl_sources))
+
+ self._add_unified_build_rules(mk, unified_ipdl_cppsrcs_mapping,
+ unified_files_makefile_variable='CPPSRCS')
+
+ mk.add_statement('IPDLDIRS := %s' % ' '.join(sorted(set(mozpath.dirname(p)
+ for p in self._ipdl_sources))))
+
+ with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
+ mk.dump(ipdls, removal_guard=False)
+
+ def _handle_webidl_build(self, bindings_dir, unified_source_mapping,
+ webidls, expected_build_output_files,
+ global_define_files):
+ include_dir = mozpath.join(self.environment.topobjdir, 'dist',
+ 'include')
+ for f in expected_build_output_files:
+ if f.startswith(include_dir):
+ self._install_manifests['dist_include'].add_optional_exists(
+ mozpath.relpath(f, include_dir))
+
+ # We pass WebIDL info to make via a completely generated make file.
+ mk = Makefile()
+ mk.add_statement('nonstatic_webidl_files := %s' % ' '.join(
+ sorted(webidls.all_non_static_basenames())))
+ mk.add_statement('globalgen_sources := %s' % ' '.join(
+ sorted(global_define_files)))
+ mk.add_statement('test_sources := %s' % ' '.join(
+ sorted('%sBinding.cpp' % s for s in webidls.all_test_stems())))
+
+ # Add rules to preprocess bindings.
+ # This should ideally be using PP_TARGETS. However, since the input
+ # filenames match the output filenames, the existing PP_TARGETS rules
+ # result in circular dependencies and other make weirdness. One
+ # solution is to rename the input or output files repsectively. See
+ # bug 928195 comment 129.
+ for source in sorted(webidls.all_preprocessed_sources()):
+ basename = os.path.basename(source)
+ rule = mk.create_rule([basename])
+ rule.add_dependencies([source, '$(GLOBAL_DEPS)'])
+ rule.add_commands([
+ # Remove the file before writing so bindings that go from
+ # static to preprocessed don't end up writing to a symlink,
+ # which would modify content in the source directory.
+ '$(RM) $@',
+ '$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
+ '$< -o $@)'
+ ])
+
+ self._add_unified_build_rules(mk,
+ unified_source_mapping,
+ unified_files_makefile_variable='unified_binding_cpp_files')
+
+ webidls_mk = mozpath.join(bindings_dir, 'webidlsrcs.mk')
+ with self._write_file(webidls_mk) as fh:
+ mk.dump(fh, removal_guard=False)
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/.classpath b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.classpath
new file mode 100644
index 000000000..7c51c539c
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.classpath
@@ -0,0 +1,10 @@
+#filter substitution
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="gen"/>
+ <classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
+ <classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
+ <classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.DEPENDENCIES"/>
+ <classpathentry kind="output" path="bin/classes"/>
+@IDE_CLASSPATH_ENTRIES@
+</classpath>
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ApkBuilder.launch b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ApkBuilder.launch
new file mode 100644
index 000000000..3005dee45
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ApkBuilder.launch
@@ -0,0 +1,8 @@
+#filter substitution
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.ant.AntBuilderLaunchConfigurationType">
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_BUILDER_ENABLED" value="false"/>
+<stringAttribute key="org.eclipse.ui.externaltools.ATTR_DISABLED_BUILDER" value="com.android.ide.eclipse.adt.ApkBuilder"/>
+<mapAttribute key="org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS"/>
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED" value="true"/>
+</launchConfiguration>
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.PreCompilerBuilder.launch b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.PreCompilerBuilder.launch
new file mode 100644
index 000000000..9fa599f5f
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.PreCompilerBuilder.launch
@@ -0,0 +1,8 @@
+#filter substitution
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.ant.AntBuilderLaunchConfigurationType">
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_BUILDER_ENABLED" value="false"/>
+<stringAttribute key="org.eclipse.ui.externaltools.ATTR_DISABLED_BUILDER" value="com.android.ide.eclipse.adt.PreCompilerBuilder"/>
+<mapAttribute key="org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS"/>
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED" value="true"/>
+</launchConfiguration>
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ResourceManagerBuilder.launch b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ResourceManagerBuilder.launch
new file mode 100644
index 000000000..20d1c3f4e
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ResourceManagerBuilder.launch
@@ -0,0 +1,8 @@
+#filter substitution
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.ant.AntBuilderLaunchConfigurationType">
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_BUILDER_ENABLED" value="false"/>
+<stringAttribute key="org.eclipse.ui.externaltools.ATTR_DISABLED_BUILDER" value="com.android.ide.eclipse.adt.ResourceManagerBuilder"/>
+<mapAttribute key="org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS"/>
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED" value="true"/>
+</launchConfiguration>
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/org.eclipse.jdt.core.javabuilder.launch b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/org.eclipse.jdt.core.javabuilder.launch
new file mode 100644
index 000000000..ed5bf6885
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/org.eclipse.jdt.core.javabuilder.launch
@@ -0,0 +1,8 @@
+#filter substitution
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.ant.AntBuilderLaunchConfigurationType">
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_BUILDER_ENABLED" value="true"/>
+<stringAttribute key="org.eclipse.ui.externaltools.ATTR_DISABLED_BUILDER" value="org.eclipse.jdt.core.javabuilder"/>
+<mapAttribute key="org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS"/>
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED" value="true"/>
+</launchConfiguration>
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/AndroidManifest.xml b/python/mozbuild/mozbuild/backend/templates/android_eclipse/AndroidManifest.xml
new file mode 100644
index 000000000..57d8aca8c
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/AndroidManifest.xml
@@ -0,0 +1,11 @@
+#filter substitution
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="@IDE_PACKAGE_NAME@"
+ android:versionCode="1"
+ android:versionName="1.0" >
+
+ <uses-sdk
+ android:minSdkVersion="@MOZ_ANDROID_MIN_SDK_VERSION@"
+ android:targetSdkVersion="@ANDROID_TARGET_SDK@" />
+
+</manifest>
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/gen/tmp b/python/mozbuild/mozbuild/backend/templates/android_eclipse/gen/tmp
new file mode 100644
index 000000000..c1c78936f
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/gen/tmp
@@ -0,0 +1 @@
+#filter substitution
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/lint.xml b/python/mozbuild/mozbuild/backend/templates/android_eclipse/lint.xml
new file mode 100644
index 000000000..43ad15dc9
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/lint.xml
@@ -0,0 +1,5 @@
+#filter substitution
+<?xml version="1.0" encoding="UTF-8"?>
+<lint>
+ <issue id="NewApi" severity="ignore" />
+</lint>
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse/project.properties b/python/mozbuild/mozbuild/backend/templates/android_eclipse/project.properties
new file mode 100644
index 000000000..2106d9646
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse/project.properties
@@ -0,0 +1,14 @@
+#filter substitution
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+
+# Project target.
+target=android-L
+@IDE_PROJECT_LIBRARY_SETTING@
+@IDE_PROJECT_LIBRARY_REFERENCES@
diff --git a/python/mozbuild/mozbuild/backend/templates/android_eclipse_empty_resource_directory/.not_an_android_resource b/python/mozbuild/mozbuild/backend/templates/android_eclipse_empty_resource_directory/.not_an_android_resource
new file mode 100644
index 000000000..8ffce0692
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/templates/android_eclipse_empty_resource_directory/.not_an_android_resource
@@ -0,0 +1,5 @@
+This file is named such that it is ignored by Android aapt. The file
+itself ensures that the AndroidEclipse build backend can create an
+empty res/ directory for projects explicitly specifying that it has no
+resource directory. This is necessary because the Android Eclipse
+plugin requires that each project have a res/ directory.
diff --git a/python/mozbuild/mozbuild/backend/tup.py b/python/mozbuild/mozbuild/backend/tup.py
new file mode 100644
index 000000000..0f7250eb0
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/tup.py
@@ -0,0 +1,344 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import os
+
+import mozpack.path as mozpath
+from mozbuild.base import MozbuildObject
+from mozbuild.backend.base import PartialBackend, HybridBackend
+from mozbuild.backend.recursivemake import RecursiveMakeBackend
+from mozbuild.shellutil import quote as shell_quote
+
+from .common import CommonBackend
+from ..frontend.data import (
+ ContextDerived,
+ Defines,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ HostDefines,
+ ObjdirPreprocessedFiles,
+)
+from ..util import (
+ FileAvoidWrite,
+)
+
+
+class BackendTupfile(object):
+ """Represents a generated Tupfile.
+ """
+
+ def __init__(self, srcdir, objdir, environment, topsrcdir, topobjdir):
+ self.topsrcdir = topsrcdir
+ self.srcdir = srcdir
+ self.objdir = objdir
+ self.relobjdir = mozpath.relpath(objdir, topobjdir)
+ self.environment = environment
+ self.name = mozpath.join(objdir, 'Tupfile')
+ self.rules_included = False
+ self.shell_exported = False
+ self.defines = []
+ self.host_defines = []
+ self.delayed_generated_files = []
+
+ self.fh = FileAvoidWrite(self.name, capture_diff=True)
+ self.fh.write('# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT.\n')
+ self.fh.write('\n')
+
+ def write(self, buf):
+ self.fh.write(buf)
+
+ def include_rules(self):
+ if not self.rules_included:
+ self.write('include_rules\n')
+ self.rules_included = True
+
+ def rule(self, cmd, inputs=None, outputs=None, display=None, extra_outputs=None, check_unchanged=False):
+ inputs = inputs or []
+ outputs = outputs or []
+ display = display or ""
+ self.include_rules()
+ flags = ""
+ if check_unchanged:
+ # This flag causes tup to compare the outputs with the previous run
+ # of the command, and skip the rest of the DAG for any that are the
+ # same.
+ flags += "o"
+
+ if display:
+ caret_text = flags + ' ' + display
+ else:
+ caret_text = flags
+
+ self.write(': %(inputs)s |> %(display)s%(cmd)s |> %(outputs)s%(extra_outputs)s\n' % {
+ 'inputs': ' '.join(inputs),
+ 'display': '^%s^ ' % caret_text if caret_text else '',
+ 'cmd': ' '.join(cmd),
+ 'outputs': ' '.join(outputs),
+ 'extra_outputs': ' | ' + ' '.join(extra_outputs) if extra_outputs else '',
+ })
+
+ def export_shell(self):
+ if not self.shell_exported:
+ # These are used by mach/mixin/process.py to determine the current
+ # shell.
+ for var in ('SHELL', 'MOZILLABUILD', 'COMSPEC'):
+ self.write('export %s\n' % var)
+ self.shell_exported = True
+
+ def close(self):
+ return self.fh.close()
+
+ @property
+ def diff(self):
+ return self.fh.diff
+
+
+class TupOnly(CommonBackend, PartialBackend):
+ """Backend that generates Tupfiles for the tup build system.
+ """
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ self._backend_files = {}
+ self._cmd = MozbuildObject.from_environment()
+
+ def _get_backend_file(self, relativedir):
+ objdir = mozpath.join(self.environment.topobjdir, relativedir)
+ srcdir = mozpath.join(self.environment.topsrcdir, relativedir)
+ if objdir not in self._backend_files:
+ self._backend_files[objdir] = \
+ BackendTupfile(srcdir, objdir, self.environment,
+ self.environment.topsrcdir, self.environment.topobjdir)
+ return self._backend_files[objdir]
+
+ def _get_backend_file_for(self, obj):
+ return self._get_backend_file(obj.relativedir)
+
+ def _py_action(self, action):
+ cmd = [
+ '$(PYTHON)',
+ '-m',
+ 'mozbuild.action.%s' % action,
+ ]
+ return cmd
+
+ def consume_object(self, obj):
+ """Write out build files necessary to build with tup."""
+
+ if not isinstance(obj, ContextDerived):
+ return False
+
+ consumed = CommonBackend.consume_object(self, obj)
+
+ # Even if CommonBackend acknowledged the object, we still need to let
+ # the RecursiveMake backend also handle these objects.
+ if consumed:
+ return False
+
+ backend_file = self._get_backend_file_for(obj)
+
+ if isinstance(obj, GeneratedFile):
+ # These files are already generated by make before tup runs.
+ skip_files = (
+ 'buildid.h',
+ 'source-repo.h',
+ )
+ if any(f in skip_files for f in obj.outputs):
+ # Let the RecursiveMake backend handle these.
+ return False
+
+ if 'application.ini.h' in obj.outputs:
+ # application.ini.h is a special case since we need to process
+ # the FINAL_TARGET_PP_FILES for application.ini before running
+ # the GENERATED_FILES script, and tup doesn't handle the rules
+ # out of order.
+ backend_file.delayed_generated_files.append(obj)
+ else:
+ self._process_generated_file(backend_file, obj)
+ elif isinstance(obj, Defines):
+ self._process_defines(backend_file, obj)
+ elif isinstance(obj, HostDefines):
+ self._process_defines(backend_file, obj, host=True)
+ elif isinstance(obj, FinalTargetPreprocessedFiles):
+ self._process_final_target_pp_files(obj, backend_file)
+ elif isinstance(obj, ObjdirPreprocessedFiles):
+ self._process_final_target_pp_files(obj, backend_file)
+
+ return True
+
+ def consume_finished(self):
+ CommonBackend.consume_finished(self)
+
+ for objdir, backend_file in sorted(self._backend_files.items()):
+ for obj in backend_file.delayed_generated_files:
+ self._process_generated_file(backend_file, obj)
+ with self._write_file(fh=backend_file):
+ pass
+
+ with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh:
+ acdefines = [name for name in self.environment.defines
+ if not name in self.environment.non_global_defines]
+ acdefines_flags = ' '.join(['-D%s=%s' % (name,
+ shell_quote(self.environment.defines[name]))
+ for name in sorted(acdefines)])
+ # TODO: AB_CD only exists in Makefiles at the moment.
+ acdefines_flags += ' -DAB_CD=en-US'
+
+ fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n')
+ fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n')
+ fh.write('ACDEFINES = %s\n' % acdefines_flags)
+ fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % (
+ os.path.relpath(self.environment.topsrcdir, self.environment.topobjdir)
+ ))
+ fh.write('PYTHON = $(MOZ_OBJ_ROOT)/_virtualenv/bin/python -B\n')
+ fh.write('PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n')
+ fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n')
+ fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n')
+ fh.write('IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n')
+
+ # Run 'tup init' if necessary.
+ if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")):
+ tup = self.environment.substs.get('TUP', 'tup')
+ self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init'])
+
+ def _process_generated_file(self, backend_file, obj):
+ # TODO: These are directories that don't work in the tup backend
+ # yet, because things they depend on aren't built yet.
+ skip_directories = (
+ 'layout/style/test', # HostSimplePrograms
+ 'toolkit/library', # libxul.so
+ )
+ if obj.script and obj.method and obj.relobjdir not in skip_directories:
+ backend_file.export_shell()
+ cmd = self._py_action('file_generate')
+ cmd.extend([
+ obj.script,
+ obj.method,
+ obj.outputs[0],
+ '%s.pp' % obj.outputs[0], # deps file required
+ ])
+ full_inputs = [f.full_path for f in obj.inputs]
+ cmd.extend(full_inputs)
+
+ outputs = []
+ outputs.extend(obj.outputs)
+ outputs.append('%s.pp' % obj.outputs[0])
+
+ backend_file.rule(
+ display='python {script}:{method} -> [%o]'.format(script=obj.script, method=obj.method),
+ cmd=cmd,
+ inputs=full_inputs,
+ outputs=outputs,
+ )
+
+ def _process_defines(self, backend_file, obj, host=False):
+ defines = list(obj.get_defines())
+ if defines:
+ if host:
+ backend_file.host_defines = defines
+ else:
+ backend_file.defines = defines
+
+ def _process_final_target_pp_files(self, obj, backend_file):
+ for i, (path, files) in enumerate(obj.files.walk()):
+ for f in files:
+ self._preprocess(backend_file, f.full_path,
+ destdir=mozpath.join(self.environment.topobjdir, obj.install_target, path))
+
+ def _handle_idl_manager(self, manager):
+ backend_file = self._get_backend_file('xpcom/xpidl')
+ backend_file.export_shell()
+
+ for module, data in sorted(manager.modules.iteritems()):
+ dest, idls = data
+ cmd = [
+ '$(PYTHON_PATH)',
+ '$(PLY_INCLUDE)',
+ '-I$(IDL_PARSER_DIR)',
+ '-I$(IDL_PARSER_CACHE_DIR)',
+ '$(topsrcdir)/python/mozbuild/mozbuild/action/xpidl-process.py',
+ '--cache-dir', '$(IDL_PARSER_CACHE_DIR)',
+ '$(DIST)/idl',
+ '$(DIST)/include',
+ '$(MOZ_OBJ_ROOT)/%s/components' % dest,
+ module,
+ ]
+ cmd.extend(sorted(idls))
+
+ outputs = ['$(MOZ_OBJ_ROOT)/%s/components/%s.xpt' % (dest, module)]
+ outputs.extend(['$(MOZ_OBJ_ROOT)/dist/include/%s.h' % f for f in sorted(idls)])
+ backend_file.rule(
+ inputs=[
+ '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidllex.py',
+ '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidlyacc.py',
+ ],
+ display='XPIDL %s' % module,
+ cmd=cmd,
+ outputs=outputs,
+ )
+
+ def _preprocess(self, backend_file, input_file, destdir=None):
+ cmd = self._py_action('preprocessor')
+ cmd.extend(backend_file.defines)
+ cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o'])
+
+ base_input = mozpath.basename(input_file)
+ if base_input.endswith('.in'):
+ base_input = mozpath.splitext(base_input)[0]
+ output = mozpath.join(destdir, base_input) if destdir else base_input
+
+ backend_file.rule(
+ inputs=[input_file],
+ display='Preprocess %o',
+ cmd=cmd,
+ outputs=[output],
+ )
+
+ def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources,
+ unified_ipdl_cppsrcs_mapping):
+ # TODO: This isn't implemented yet in the tup backend, but it is called
+ # by the CommonBackend.
+ pass
+
+ def _handle_webidl_build(self, bindings_dir, unified_source_mapping,
+ webidls, expected_build_output_files,
+ global_define_files):
+ backend_file = self._get_backend_file('dom/bindings')
+ backend_file.export_shell()
+
+ for source in sorted(webidls.all_preprocessed_sources()):
+ self._preprocess(backend_file, source)
+
+ cmd = self._py_action('webidl')
+ cmd.append(mozpath.join(self.environment.topsrcdir, 'dom', 'bindings'))
+
+ # The WebIDLCodegenManager knows all of the .cpp and .h files that will
+ # be created (expected_build_output_files), but there are a few
+ # additional files that are also created by the webidl py_action.
+ outputs = [
+ '_cache/webidlyacc.py',
+ 'codegen.json',
+ 'codegen.pp',
+ 'parser.out',
+ ]
+ outputs.extend(expected_build_output_files)
+
+ backend_file.rule(
+ display='WebIDL code generation',
+ cmd=cmd,
+ inputs=webidls.all_non_static_basenames(),
+ outputs=outputs,
+ check_unchanged=True,
+ )
+
+
+class TupBackend(HybridBackend(TupOnly, RecursiveMakeBackend)):
+ def build(self, config, output, jobs, verbose):
+ status = config._run_make(directory=self.environment.topobjdir, target='tup',
+ line_handler=output.on_line, log=False, print_directory=False,
+ ensure_exit_code=False, num_jobs=jobs, silent=not verbose)
+ return status
diff --git a/python/mozbuild/mozbuild/backend/visualstudio.py b/python/mozbuild/mozbuild/backend/visualstudio.py
new file mode 100644
index 000000000..86e97d13d
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/visualstudio.py
@@ -0,0 +1,582 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains a build backend for generating Visual Studio project
+# files.
+
+from __future__ import absolute_import, unicode_literals
+
+import errno
+import os
+import re
+import types
+import uuid
+
+from xml.dom import getDOMImplementation
+
+from mozpack.files import FileFinder
+
+from .common import CommonBackend
+from ..frontend.data import (
+ Defines,
+ GeneratedSources,
+ HostProgram,
+ HostSources,
+ Library,
+ LocalInclude,
+ Program,
+ Sources,
+ UnifiedSources,
+)
+from mozbuild.base import ExecutionSummary
+
+
+MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
+
+def get_id(name):
+ return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
+
+def visual_studio_product_to_solution_version(version):
+ if version == '2015':
+ return '12.00', '14'
+ else:
+ raise Exception('Unknown version seen: %s' % version)
+
+def visual_studio_product_to_platform_toolset_version(version):
+ if version == '2015':
+ return 'v140'
+ else:
+ raise Exception('Unknown version seen: %s' % version)
+
+class VisualStudioBackend(CommonBackend):
+ """Generate Visual Studio project files.
+
+ This backend is used to produce Visual Studio projects and a solution
+ to foster developing Firefox with Visual Studio.
+
+ This backend is currently considered experimental. There are many things
+ not optimal about how it works.
+ """
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ # These should eventually evolve into parameters.
+ self._out_dir = os.path.join(self.environment.topobjdir, 'msvc')
+ self._projsubdir = 'projects'
+
+ self._version = self.environment.substs.get('MSVS_VERSION', '2015')
+
+ self._paths_to_sources = {}
+ self._paths_to_includes = {}
+ self._paths_to_defines = {}
+ self._paths_to_configs = {}
+ self._libs_to_paths = {}
+ self._progs_to_paths = {}
+
+ def summary(self):
+ return ExecutionSummary(
+ 'VisualStudio backend executed in {execution_time:.2f}s\n'
+ 'Generated Visual Studio solution at {path:s}',
+ execution_time=self._execution_time,
+ path=os.path.join(self._out_dir, 'mozilla.sln'))
+
+ def consume_object(self, obj):
+ reldir = getattr(obj, 'relativedir', None)
+
+ if hasattr(obj, 'config') and reldir not in self._paths_to_configs:
+ self._paths_to_configs[reldir] = obj.config
+
+ if isinstance(obj, Sources):
+ self._add_sources(reldir, obj)
+
+ elif isinstance(obj, HostSources):
+ self._add_sources(reldir, obj)
+
+ elif isinstance(obj, GeneratedSources):
+ self._add_sources(reldir, obj)
+
+ elif isinstance(obj, UnifiedSources):
+ # XXX we should be letting CommonBackend.consume_object call this
+ # for us instead.
+ self._process_unified_sources(obj);
+
+ elif isinstance(obj, Library):
+ self._libs_to_paths[obj.basename] = reldir
+
+ elif isinstance(obj, Program) or isinstance(obj, HostProgram):
+ self._progs_to_paths[obj.program] = reldir
+
+ elif isinstance(obj, Defines):
+ self._paths_to_defines.setdefault(reldir, {}).update(obj.defines)
+
+ elif isinstance(obj, LocalInclude):
+ includes = self._paths_to_includes.setdefault(reldir, [])
+ includes.append(obj.path.full_path)
+
+ # Just acknowledge everything.
+ return True
+
+ def _add_sources(self, reldir, obj):
+ s = self._paths_to_sources.setdefault(reldir, set())
+ s.update(obj.files)
+
+ def _process_unified_sources(self, obj):
+ reldir = getattr(obj, 'relativedir', None)
+
+ s = self._paths_to_sources.setdefault(reldir, set())
+ s.update(obj.files)
+
+ def consume_finished(self):
+ out_dir = self._out_dir
+ out_proj_dir = os.path.join(self._out_dir, self._projsubdir)
+
+ projects = self._write_projects_for_sources(self._libs_to_paths,
+ "library", out_proj_dir)
+ projects.update(self._write_projects_for_sources(self._progs_to_paths,
+ "binary", out_proj_dir))
+
+ # Generate projects that can be used to build common targets.
+ for target in ('export', 'binaries', 'tools', 'full'):
+ basename = 'target_%s' % target
+ command = '$(SolutionDir)\\mach.bat build'
+ if target != 'full':
+ command += ' %s' % target
+
+ project_id = self._write_vs_project(out_proj_dir, basename, target,
+ build_command=command,
+ clean_command='$(SolutionDir)\\mach.bat build clean')
+
+ projects[basename] = (project_id, basename, target)
+
+ # A project that can be used to regenerate the visual studio projects.
+ basename = 'target_vs'
+ project_id = self._write_vs_project(out_proj_dir, basename, 'visual-studio',
+ build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
+ projects[basename] = (project_id, basename, 'visual-studio')
+
+ # Write out a shared property file with common variables.
+ props_path = os.path.join(out_proj_dir, 'mozilla.props')
+ with self._write_file(props_path, mode='rb') as fh:
+ self._write_props(fh)
+
+ # Generate some wrapper scripts that allow us to invoke mach inside
+ # a MozillaBuild-like environment. We currently only use the batch
+ # script. We'd like to use the PowerShell script. However, it seems
+ # to buffer output from within Visual Studio (surely this is
+ # configurable) and the default execution policy of PowerShell doesn't
+ # allow custom scripts to be executed.
+ with self._write_file(os.path.join(out_dir, 'mach.bat'), mode='rb') as fh:
+ self._write_mach_batch(fh)
+
+ with self._write_file(os.path.join(out_dir, 'mach.ps1'), mode='rb') as fh:
+ self._write_mach_powershell(fh)
+
+ # Write out a solution file to tie it all together.
+ solution_path = os.path.join(out_dir, 'mozilla.sln')
+ with self._write_file(solution_path, mode='rb') as fh:
+ self._write_solution(fh, projects)
+
+ def _write_projects_for_sources(self, sources, prefix, out_dir):
+ projects = {}
+ for item, path in sorted(sources.items()):
+ config = self._paths_to_configs.get(path, None)
+ sources = self._paths_to_sources.get(path, set())
+ sources = set(os.path.join('$(TopSrcDir)', path, s) for s in sources)
+ sources = set(os.path.normpath(s) for s in sources)
+
+ finder = FileFinder(os.path.join(self.environment.topsrcdir, path),
+ find_executables=False)
+
+ headers = [t[0] for t in finder.find('*.h')]
+ headers = [os.path.normpath(os.path.join('$(TopSrcDir)',
+ path, f)) for f in headers]
+
+ includes = [
+ os.path.join('$(TopSrcDir)', path),
+ os.path.join('$(TopObjDir)', path),
+ ]
+ includes.extend(self._paths_to_includes.get(path, []))
+ includes.append('$(TopObjDir)\\dist\\include\\nss')
+ includes.append('$(TopObjDir)\\dist\\include')
+
+ for v in ('NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS',
+ 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
+ if not config:
+ break
+
+ args = config.substs.get(v, [])
+
+ for i, arg in enumerate(args):
+ if arg.startswith('-I'):
+ includes.append(os.path.normpath(arg[2:]))
+
+ # Pull in system defaults.
+ includes.append('$(DefaultIncludes)')
+
+ includes = [os.path.normpath(i) for i in includes]
+
+ defines = []
+ for k, v in self._paths_to_defines.get(path, {}).items():
+ if v is True:
+ defines.append(k)
+ else:
+ defines.append('%s=%s' % (k, v))
+
+ debugger=None
+ if prefix == 'binary':
+ if item.startswith(self.environment.substs['MOZ_APP_NAME']):
+ debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '-no-remote')
+ else:
+ debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '')
+
+ basename = '%s_%s' % (prefix, item)
+
+ project_id = self._write_vs_project(out_dir, basename, item,
+ includes=includes,
+ forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
+ defines=defines,
+ headers=headers,
+ sources=sources,
+ debugger=debugger)
+
+ projects[basename] = (project_id, basename, item)
+
+ return projects
+
+ def _write_solution(self, fh, projects):
+ # Visual Studio appears to write out its current version in the
+ # solution file. Instead of trying to figure out what version it will
+ # write, try to parse the version out of the existing file and use it
+ # verbatim.
+ vs_version = None
+ try:
+ with open(fh.name, 'rb') as sfh:
+ for line in sfh:
+ if line.startswith(b'VisualStudioVersion = '):
+ vs_version = line.split(b' = ', 1)[1].strip()
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ format_version, comment_version = visual_studio_product_to_solution_version(self._version)
+ # This is a Visual C++ Project type.
+ project_type = '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
+
+ # Visual Studio seems to require this header.
+ fh.write('Microsoft Visual Studio Solution File, Format Version %s\r\n' %
+ format_version)
+ fh.write('# Visual Studio %s\r\n' % comment_version)
+
+ if vs_version:
+ fh.write('VisualStudioVersion = %s\r\n' % vs_version)
+
+ # Corresponds to VS2013.
+ fh.write('MinimumVisualStudioVersion = 12.0.31101.0\r\n')
+
+ binaries_id = projects['target_binaries'][0]
+
+ # Write out entries for each project.
+ for key in sorted(projects):
+ project_id, basename, name = projects[key]
+ path = os.path.join(self._projsubdir, '%s.vcxproj' % basename)
+
+ fh.write('Project("{%s}") = "%s", "%s", "{%s}"\r\n' % (
+ project_type, name, path, project_id))
+
+ # Make all libraries depend on the binaries target.
+ if key.startswith('library_'):
+ fh.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
+ fh.write('\t\t{%s} = {%s}\r\n' % (binaries_id, binaries_id))
+ fh.write('\tEndProjectSection\r\n')
+
+ fh.write('EndProject\r\n')
+
+ # Write out solution folders for organizing things.
+
+ # This is the UUID you use for solution folders.
+ container_id = '2150E333-8FDC-42A3-9474-1A3956D46DE8'
+
+ def write_container(desc):
+ cid = get_id(desc.encode('utf-8'))
+ fh.write('Project("{%s}") = "%s", "%s", "{%s}"\r\n' % (
+ container_id, desc, desc, cid))
+ fh.write('EndProject\r\n')
+
+ return cid
+
+ library_id = write_container('Libraries')
+ target_id = write_container('Build Targets')
+ binary_id = write_container('Binaries')
+
+ fh.write('Global\r\n')
+
+ # Make every project a member of our one configuration.
+ fh.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
+ fh.write('\t\tBuild|Win32 = Build|Win32\r\n')
+ fh.write('\tEndGlobalSection\r\n')
+
+ # Set every project's active configuration to the one configuration and
+ # set up the default build project.
+ fh.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
+ for name, project in sorted(projects.items()):
+ fh.write('\t\t{%s}.Build|Win32.ActiveCfg = Build|Win32\r\n' % project[0])
+
+ # Only build the full build target by default.
+ # It's important we don't write multiple entries here because they
+ # conflict!
+ if name == 'target_full':
+ fh.write('\t\t{%s}.Build|Win32.Build.0 = Build|Win32\r\n' % project[0])
+
+ fh.write('\tEndGlobalSection\r\n')
+
+ fh.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
+ fh.write('\t\tHideSolutionNode = FALSE\r\n')
+ fh.write('\tEndGlobalSection\r\n')
+
+ # Associate projects with containers.
+ fh.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
+ for key in sorted(projects):
+ project_id = projects[key][0]
+
+ if key.startswith('library_'):
+ container_id = library_id
+ elif key.startswith('target_'):
+ container_id = target_id
+ elif key.startswith('binary_'):
+ container_id = binary_id
+ else:
+ raise Exception('Unknown project type: %s' % key)
+
+ fh.write('\t\t{%s} = {%s}\r\n' % (project_id, container_id))
+ fh.write('\tEndGlobalSection\r\n')
+
+ fh.write('EndGlobal\r\n')
+
+ def _write_props(self, fh):
+ impl = getDOMImplementation()
+ doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
+
+ project = doc.documentElement
+ project.setAttribute('xmlns', MSBUILD_NAMESPACE)
+ project.setAttribute('ToolsVersion', '4.0')
+
+ ig = project.appendChild(doc.createElement('ImportGroup'))
+ ig.setAttribute('Label', 'PropertySheets')
+
+ pg = project.appendChild(doc.createElement('PropertyGroup'))
+ pg.setAttribute('Label', 'UserMacros')
+
+ ig = project.appendChild(doc.createElement('ItemGroup'))
+
+ def add_var(k, v):
+ e = pg.appendChild(doc.createElement(k))
+ e.appendChild(doc.createTextNode(v))
+
+ e = ig.appendChild(doc.createElement('BuildMacro'))
+ e.setAttribute('Include', k)
+
+ e = e.appendChild(doc.createElement('Value'))
+ e.appendChild(doc.createTextNode('$(%s)' % k))
+
+ add_var('TopObjDir', os.path.normpath(self.environment.topobjdir))
+ add_var('TopSrcDir', os.path.normpath(self.environment.topsrcdir))
+ add_var('PYTHON', '$(TopObjDir)\\_virtualenv\\Scripts\\python.exe')
+ add_var('MACH', '$(TopSrcDir)\\mach')
+
+ # From MozillaBuild.
+ add_var('DefaultIncludes', os.environ.get('INCLUDE', ''))
+
+ fh.write(b'\xef\xbb\xbf')
+ doc.writexml(fh, addindent=' ', newl='\r\n')
+
+ def _relevant_environment_variables(self):
+ # Write out the environment variables, presumably coming from
+ # MozillaBuild.
+ for k, v in sorted(os.environ.items()):
+ if not re.match('^[a-zA-Z0-9_]+$', k):
+ continue
+
+ if k in ('OLDPWD', 'PS1'):
+ continue
+
+ if k.startswith('_'):
+ continue
+
+ yield k, v
+
+ yield 'TOPSRCDIR', self.environment.topsrcdir
+ yield 'TOPOBJDIR', self.environment.topobjdir
+
+ def _write_mach_powershell(self, fh):
+ for k, v in self._relevant_environment_variables():
+ fh.write(b'$env:%s = "%s"\r\n' % (k, v))
+
+ relpath = os.path.relpath(self.environment.topsrcdir,
+ self.environment.topobjdir).replace('\\', '/')
+
+ fh.write(b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath)
+ fh.write(b'$bashargs = $bashargs + $args\r\n')
+
+ fh.write(b"$expanded = $bashargs -join ' '\r\n")
+ fh.write(b'$procargs = "-c", $expanded\r\n')
+
+ fh.write(b'Start-Process -WorkingDirectory $env:TOPOBJDIR '
+ b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
+ b'-ArgumentList $procargs '
+ b'-Wait -NoNewWindow\r\n')
+
+ def _write_mach_batch(self, fh):
+ """Write out a batch script that builds the tree.
+
+ The script "bootstraps" into the MozillaBuild environment by setting
+ the environment variables that are active in the current MozillaBuild
+ environment. Then, it builds the tree.
+ """
+ for k, v in self._relevant_environment_variables():
+ fh.write(b'SET "%s=%s"\r\n' % (k, v))
+
+ fh.write(b'cd %TOPOBJDIR%\r\n')
+
+ # We need to convert Windows-native paths to msys paths. Easiest way is
+ # relative paths, since munging c:\ to /c/ is slightly more
+ # complicated.
+ relpath = os.path.relpath(self.environment.topsrcdir,
+ self.environment.topobjdir).replace('\\', '/')
+
+ # We go through mach because it has the logic for choosing the most
+ # appropriate build tool.
+ fh.write(b'"%%MOZILLABUILD%%\\msys\\bin\\bash" '
+ b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
+
+ def _write_vs_project(self, out_dir, basename, name, **kwargs):
+ root = '%s.vcxproj' % basename
+ project_id = get_id(basename.encode('utf-8'))
+
+ with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
+ project_id, name = VisualStudioBackend.write_vs_project(fh,
+ self._version, project_id, name, **kwargs)
+
+ with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
+ fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
+ fh.write('<Project ToolsVersion="4.0" xmlns="%s">\r\n' %
+ MSBUILD_NAMESPACE)
+ fh.write('</Project>\r\n')
+
+ return project_id
+
+ @staticmethod
+ def write_vs_project(fh, version, project_id, name, includes=[],
+ forced_includes=[], defines=[],
+ build_command=None, clean_command=None,
+ debugger=None, headers=[], sources=[]):
+
+ impl = getDOMImplementation()
+ doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
+
+ project = doc.documentElement
+ project.setAttribute('DefaultTargets', 'Build')
+ project.setAttribute('ToolsVersion', '4.0')
+ project.setAttribute('xmlns', MSBUILD_NAMESPACE)
+
+ ig = project.appendChild(doc.createElement('ItemGroup'))
+ ig.setAttribute('Label', 'ProjectConfigurations')
+
+ pc = ig.appendChild(doc.createElement('ProjectConfiguration'))
+ pc.setAttribute('Include', 'Build|Win32')
+
+ c = pc.appendChild(doc.createElement('Configuration'))
+ c.appendChild(doc.createTextNode('Build'))
+
+ p = pc.appendChild(doc.createElement('Platform'))
+ p.appendChild(doc.createTextNode('Win32'))
+
+ pg = project.appendChild(doc.createElement('PropertyGroup'))
+ pg.setAttribute('Label', 'Globals')
+
+ n = pg.appendChild(doc.createElement('ProjectName'))
+ n.appendChild(doc.createTextNode(name))
+
+ k = pg.appendChild(doc.createElement('Keyword'))
+ k.appendChild(doc.createTextNode('MakeFileProj'))
+
+ g = pg.appendChild(doc.createElement('ProjectGuid'))
+ g.appendChild(doc.createTextNode('{%s}' % project_id))
+
+ rn = pg.appendChild(doc.createElement('RootNamespace'))
+ rn.appendChild(doc.createTextNode('mozilla'))
+
+ pts = pg.appendChild(doc.createElement('PlatformToolset'))
+ pts.appendChild(doc.createTextNode(visual_studio_product_to_platform_toolset_version(version)))
+
+ i = project.appendChild(doc.createElement('Import'))
+ i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')
+
+ ig = project.appendChild(doc.createElement('ImportGroup'))
+ ig.setAttribute('Label', 'ExtensionTargets')
+
+ ig = project.appendChild(doc.createElement('ImportGroup'))
+ ig.setAttribute('Label', 'ExtensionSettings')
+
+ ig = project.appendChild(doc.createElement('ImportGroup'))
+ ig.setAttribute('Label', 'PropertySheets')
+ i = ig.appendChild(doc.createElement('Import'))
+ i.setAttribute('Project', 'mozilla.props')
+
+ pg = project.appendChild(doc.createElement('PropertyGroup'))
+ pg.setAttribute('Label', 'Configuration')
+ ct = pg.appendChild(doc.createElement('ConfigurationType'))
+ ct.appendChild(doc.createTextNode('Makefile'))
+
+ pg = project.appendChild(doc.createElement('PropertyGroup'))
+ pg.setAttribute('Condition', "'$(Configuration)|$(Platform)'=='Build|Win32'")
+
+ if build_command:
+ n = pg.appendChild(doc.createElement('NMakeBuildCommandLine'))
+ n.appendChild(doc.createTextNode(build_command))
+
+ if clean_command:
+ n = pg.appendChild(doc.createElement('NMakeCleanCommandLine'))
+ n.appendChild(doc.createTextNode(clean_command))
+
+ if includes:
+ n = pg.appendChild(doc.createElement('NMakeIncludeSearchPath'))
+ n.appendChild(doc.createTextNode(';'.join(includes)))
+
+ if forced_includes:
+ n = pg.appendChild(doc.createElement('NMakeForcedIncludes'))
+ n.appendChild(doc.createTextNode(';'.join(forced_includes)))
+
+ if defines:
+ n = pg.appendChild(doc.createElement('NMakePreprocessorDefinitions'))
+ n.appendChild(doc.createTextNode(';'.join(defines)))
+
+ if debugger:
+ n = pg.appendChild(doc.createElement('LocalDebuggerCommand'))
+ n.appendChild(doc.createTextNode(debugger[0]))
+
+ n = pg.appendChild(doc.createElement('LocalDebuggerCommandArguments'))
+ n.appendChild(doc.createTextNode(debugger[1]))
+
+ i = project.appendChild(doc.createElement('Import'))
+ i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.props')
+
+ i = project.appendChild(doc.createElement('Import'))
+ i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.targets')
+
+ # Now add files to the project.
+ ig = project.appendChild(doc.createElement('ItemGroup'))
+ for header in sorted(headers or []):
+ n = ig.appendChild(doc.createElement('ClInclude'))
+ n.setAttribute('Include', header)
+
+ ig = project.appendChild(doc.createElement('ItemGroup'))
+ for source in sorted(sources or []):
+ n = ig.appendChild(doc.createElement('ClCompile'))
+ n.setAttribute('Include', source)
+
+ fh.write(b'\xef\xbb\xbf')
+ doc.writexml(fh, addindent=' ', newl='\r\n')
+
+ return project_id, name
diff --git a/python/mozbuild/mozbuild/base.py b/python/mozbuild/mozbuild/base.py
new file mode 100644
index 000000000..a50b8ff89
--- /dev/null
+++ b/python/mozbuild/mozbuild/base.py
@@ -0,0 +1,850 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import logging
+import mozpack.path as mozpath
+import multiprocessing
+import os
+import subprocess
+import sys
+import which
+
+from mach.mixin.logging import LoggingMixin
+from mach.mixin.process import ProcessExecutionMixin
+from mozversioncontrol import get_repository_object
+
+from .backend.configenvironment import ConfigEnvironment
+from .controller.clobber import Clobberer
+from .mozconfig import (
+ MozconfigFindException,
+ MozconfigLoadException,
+ MozconfigLoader,
+)
+from .util import memoized_property
+from .virtualenv import VirtualenvManager
+
+
+_config_guess_output = []
+
+
+def ancestors(path):
+ """Emit the parent directories of a path."""
+ while path:
+ yield path
+ newpath = os.path.dirname(path)
+ if newpath == path:
+ break
+ path = newpath
+
+def samepath(path1, path2):
+ if hasattr(os.path, 'samefile'):
+ return os.path.samefile(path1, path2)
+ return os.path.normcase(os.path.realpath(path1)) == \
+ os.path.normcase(os.path.realpath(path2))
+
+class BadEnvironmentException(Exception):
+ """Base class for errors raised when the build environment is not sane."""
+
+
+class BuildEnvironmentNotFoundException(BadEnvironmentException):
+ """Raised when we could not find a build environment."""
+
+
+class ObjdirMismatchException(BadEnvironmentException):
+ """Raised when the current dir is an objdir and doesn't match the mozconfig."""
+ def __init__(self, objdir1, objdir2):
+ self.objdir1 = objdir1
+ self.objdir2 = objdir2
+
+ def __str__(self):
+ return "Objdir mismatch: %s != %s" % (self.objdir1, self.objdir2)
+
+
+class MozbuildObject(ProcessExecutionMixin):
+ """Base class providing basic functionality useful to many modules.
+
+ Modules in this package typically require common functionality such as
+ accessing the current config, getting the location of the source directory,
+ running processes, etc. This classes provides that functionality. Other
+ modules can inherit from this class to obtain this functionality easily.
+ """
+ def __init__(self, topsrcdir, settings, log_manager, topobjdir=None,
+ mozconfig=MozconfigLoader.AUTODETECT):
+ """Create a new Mozbuild object instance.
+
+ Instances are bound to a source directory, a ConfigSettings instance,
+ and a LogManager instance. The topobjdir may be passed in as well. If
+ it isn't, it will be calculated from the active mozconfig.
+ """
+ self.topsrcdir = mozpath.normsep(topsrcdir)
+ self.settings = settings
+
+ self.populate_logger()
+ self.log_manager = log_manager
+
+ self._make = None
+ self._topobjdir = mozpath.normsep(topobjdir) if topobjdir else topobjdir
+ self._mozconfig = mozconfig
+ self._config_environment = None
+ self._virtualenv_manager = None
+
+ @classmethod
+ def from_environment(cls, cwd=None, detect_virtualenv_mozinfo=True):
+ """Create a MozbuildObject by detecting the proper one from the env.
+
+ This examines environment state like the current working directory and
+ creates a MozbuildObject from the found source directory, mozconfig, etc.
+
+ The role of this function is to identify a topsrcdir, topobjdir, and
+ mozconfig file.
+
+ If the current working directory is inside a known objdir, we always
+ use the topsrcdir and mozconfig associated with that objdir.
+
+ If the current working directory is inside a known srcdir, we use that
+ topsrcdir and look for mozconfigs using the default mechanism, which
+ looks inside environment variables.
+
+ If the current Python interpreter is running from a virtualenv inside
+ an objdir, we use that as our objdir.
+
+ If we're not inside a srcdir or objdir, an exception is raised.
+
+ detect_virtualenv_mozinfo determines whether we should look for a
+ mozinfo.json file relative to the virtualenv directory. This was
+ added to facilitate testing. Callers likely shouldn't change the
+ default.
+ """
+
+ cwd = cwd or os.getcwd()
+ topsrcdir = None
+ topobjdir = None
+ mozconfig = MozconfigLoader.AUTODETECT
+
+ def load_mozinfo(path):
+ info = json.load(open(path, 'rt'))
+ topsrcdir = info.get('topsrcdir')
+ topobjdir = os.path.dirname(path)
+ mozconfig = info.get('mozconfig')
+ return topsrcdir, topobjdir, mozconfig
+
+ for dir_path in ancestors(cwd):
+ # If we find a mozinfo.json, we are in the objdir.
+ mozinfo_path = os.path.join(dir_path, 'mozinfo.json')
+ if os.path.isfile(mozinfo_path):
+ topsrcdir, topobjdir, mozconfig = load_mozinfo(mozinfo_path)
+ break
+
+ # We choose an arbitrary file as an indicator that this is a
+ # srcdir. We go with ourself because why not!
+ our_path = os.path.join(dir_path, 'python', 'mozbuild', 'mozbuild', 'base.py')
+ if os.path.isfile(our_path):
+ topsrcdir = dir_path
+ break
+
+ # See if we're running from a Python virtualenv that's inside an objdir.
+ mozinfo_path = os.path.join(os.path.dirname(sys.prefix), "mozinfo.json")
+ if detect_virtualenv_mozinfo and os.path.isfile(mozinfo_path):
+ topsrcdir, topobjdir, mozconfig = load_mozinfo(mozinfo_path)
+
+ # If we were successful, we're only guaranteed to find a topsrcdir. If
+ # we couldn't find that, there's nothing we can do.
+ if not topsrcdir:
+ raise BuildEnvironmentNotFoundException(
+ 'Could not find Mozilla source tree or build environment.')
+
+ topsrcdir = mozpath.normsep(topsrcdir)
+ if topobjdir:
+ topobjdir = mozpath.normsep(os.path.normpath(topobjdir))
+
+ if topsrcdir == topobjdir:
+ raise BadEnvironmentException('The object directory appears '
+ 'to be the same as your source directory (%s). This build '
+ 'configuration is not supported.' % topsrcdir)
+
+ # If we can't resolve topobjdir, oh well. We'll figure out when we need
+ # one.
+ return cls(topsrcdir, None, None, topobjdir=topobjdir,
+ mozconfig=mozconfig)
+
+ def resolve_mozconfig_topobjdir(self, default=None):
+ topobjdir = self.mozconfig['topobjdir'] or default
+ if not topobjdir:
+ return None
+
+ if '@CONFIG_GUESS@' in topobjdir:
+ topobjdir = topobjdir.replace('@CONFIG_GUESS@',
+ self.resolve_config_guess())
+
+ if not os.path.isabs(topobjdir):
+ topobjdir = os.path.abspath(os.path.join(self.topsrcdir, topobjdir))
+
+ return mozpath.normsep(os.path.normpath(topobjdir))
+
+ @property
+ def topobjdir(self):
+ if self._topobjdir is None:
+ self._topobjdir = self.resolve_mozconfig_topobjdir(
+ default='obj-@CONFIG_GUESS@')
+
+ return self._topobjdir
+
+ @property
+ def virtualenv_manager(self):
+ if self._virtualenv_manager is None:
+ self._virtualenv_manager = VirtualenvManager(self.topsrcdir,
+ self.topobjdir, os.path.join(self.topobjdir, '_virtualenv'),
+ sys.stdout, os.path.join(self.topsrcdir, 'build',
+ 'virtualenv_packages.txt'))
+
+ return self._virtualenv_manager
+
+ @property
+ def mozconfig(self):
+ """Returns information about the current mozconfig file.
+
+ This a dict as returned by MozconfigLoader.read_mozconfig()
+ """
+ if not isinstance(self._mozconfig, dict):
+ loader = MozconfigLoader(self.topsrcdir)
+ self._mozconfig = loader.read_mozconfig(path=self._mozconfig,
+ moz_build_app=os.environ.get('MOZ_CURRENT_PROJECT'))
+
+ return self._mozconfig
+
+ @property
+ def config_environment(self):
+ """Returns the ConfigEnvironment for the current build configuration.
+
+ This property is only available once configure has executed.
+
+ If configure's output is not available, this will raise.
+ """
+ if self._config_environment:
+ return self._config_environment
+
+ config_status = os.path.join(self.topobjdir, 'config.status')
+
+ if not os.path.exists(config_status):
+ raise BuildEnvironmentNotFoundException('config.status not available. Run configure.')
+
+ self._config_environment = \
+ ConfigEnvironment.from_config_status(config_status)
+
+ return self._config_environment
+
+ @property
+ def defines(self):
+ return self.config_environment.defines
+
+ @property
+ def non_global_defines(self):
+ return self.config_environment.non_global_defines
+
+ @property
+ def substs(self):
+ return self.config_environment.substs
+
+ @property
+ def distdir(self):
+ return os.path.join(self.topobjdir, 'dist')
+
+ @property
+ def bindir(self):
+ return os.path.join(self.topobjdir, 'dist', 'bin')
+
+ @property
+ def includedir(self):
+ return os.path.join(self.topobjdir, 'dist', 'include')
+
+ @property
+ def statedir(self):
+ return os.path.join(self.topobjdir, '.mozbuild')
+
+ @memoized_property
+ def extra_environment_variables(self):
+ '''Some extra environment variables are stored in .mozconfig.mk.
+ This functions extracts and returns them.'''
+ from mozbuild import shellutil
+ mozconfig_mk = os.path.join(self.topobjdir, '.mozconfig.mk')
+ env = {}
+ with open(mozconfig_mk) as fh:
+ for line in fh:
+ if line.startswith('export '):
+ exports = shellutil.split(line)[1:]
+ for e in exports:
+ if '=' in e:
+ key, value = e.split('=')
+ env[key] = value
+ return env
+
+ @memoized_property
+ def repository(self):
+ '''Get a `mozversioncontrol.Repository` object for the
+ top source directory.'''
+ return get_repository_object(self.topsrcdir)
+
+ def is_clobber_needed(self):
+ if not os.path.exists(self.topobjdir):
+ return False
+ return Clobberer(self.topsrcdir, self.topobjdir).clobber_needed()
+
+ def get_binary_path(self, what='app', validate_exists=True, where='default'):
+ """Obtain the path to a compiled binary for this build configuration.
+
+ The what argument is the program or tool being sought after. See the
+ code implementation for supported values.
+
+ If validate_exists is True (the default), we will ensure the found path
+ exists before returning, raising an exception if it doesn't.
+
+ If where is 'staged-package', we will return the path to the binary in
+ the package staging directory.
+
+ If no arguments are specified, we will return the main binary for the
+ configured XUL application.
+ """
+
+ if where not in ('default', 'staged-package'):
+ raise Exception("Don't know location %s" % where)
+
+ substs = self.substs
+
+ stem = self.distdir
+ if where == 'staged-package':
+ stem = os.path.join(stem, substs['MOZ_APP_NAME'])
+
+ if substs['OS_ARCH'] == 'Darwin':
+ if substs['MOZ_BUILD_APP'] == 'xulrunner':
+ stem = os.path.join(stem, 'XUL.framework');
+ else:
+ stem = os.path.join(stem, substs['MOZ_MACBUNDLE_NAME'], 'Contents',
+ 'MacOS')
+ elif where == 'default':
+ stem = os.path.join(stem, 'bin')
+
+ leaf = None
+
+ leaf = (substs['MOZ_APP_NAME'] if what == 'app' else what) + substs['BIN_SUFFIX']
+ path = os.path.join(stem, leaf)
+
+ if validate_exists and not os.path.exists(path):
+ raise Exception('Binary expected at %s does not exist.' % path)
+
+ return path
+
+ def resolve_config_guess(self):
+ make_extra = self.mozconfig['make_extra'] or []
+ make_extra = dict(m.split('=', 1) for m in make_extra)
+
+ config_guess = make_extra.get('CONFIG_GUESS', None)
+
+ if config_guess:
+ return config_guess
+
+ # config.guess results should be constant for process lifetime. Cache
+ # it.
+ if _config_guess_output:
+ return _config_guess_output[0]
+
+ p = os.path.join(self.topsrcdir, 'build', 'autoconf', 'config.guess')
+
+ # This is a little kludgy. We need access to the normalize_command
+ # function. However, that's a method of a mach mixin, so we need a
+ # class instance. Ideally the function should be accessible as a
+ # standalone function.
+ o = MozbuildObject(self.topsrcdir, None, None, None)
+ args = o._normalize_command([p], True)
+
+ _config_guess_output.append(
+ subprocess.check_output(args, cwd=self.topsrcdir).strip())
+ return _config_guess_output[0]
+
+ def notify(self, msg):
+ """Show a desktop notification with the supplied message
+
+ On Linux and Mac, this will show a desktop notification with the message,
+ but on Windows we can only flash the screen.
+ """
+ moz_nospam = os.environ.get('MOZ_NOSPAM')
+ if moz_nospam:
+ return
+
+ try:
+ if sys.platform.startswith('darwin'):
+ try:
+ notifier = which.which('terminal-notifier')
+ except which.WhichError:
+ raise Exception('Install terminal-notifier to get '
+ 'a notification when the build finishes.')
+ self.run_process([notifier, '-title',
+ 'Mozilla Build System', '-group', 'mozbuild',
+ '-message', msg], ensure_exit_code=False)
+ elif sys.platform.startswith('linux'):
+ try:
+ import dbus
+ except ImportError:
+ raise Exception('Install the python dbus module to '
+ 'get a notification when the build finishes.')
+ bus = dbus.SessionBus()
+ notify = bus.get_object('org.freedesktop.Notifications',
+ '/org/freedesktop/Notifications')
+ method = notify.get_dbus_method('Notify',
+ 'org.freedesktop.Notifications')
+ method('Mozilla Build System', 0, '', msg, '', [], [], -1)
+ elif sys.platform.startswith('win'):
+ from ctypes import Structure, windll, POINTER, sizeof
+ from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT
+ class FLASHWINDOW(Structure):
+ _fields_ = [("cbSize", UINT),
+ ("hwnd", HANDLE),
+ ("dwFlags", DWORD),
+ ("uCount", UINT),
+ ("dwTimeout", DWORD)]
+ FlashWindowExProto = WINFUNCTYPE(BOOL, POINTER(FLASHWINDOW))
+ FlashWindowEx = FlashWindowExProto(("FlashWindowEx", windll.user32))
+ FLASHW_CAPTION = 0x01
+ FLASHW_TRAY = 0x02
+ FLASHW_TIMERNOFG = 0x0C
+
+ # GetConsoleWindows returns NULL if no console is attached. We
+ # can't flash nothing.
+ console = windll.kernel32.GetConsoleWindow()
+ if not console:
+ return
+
+ params = FLASHWINDOW(sizeof(FLASHWINDOW),
+ console,
+ FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0)
+ FlashWindowEx(params)
+ except Exception as e:
+ self.log(logging.WARNING, 'notifier-failed', {'error':
+ e.message}, 'Notification center failed: {error}')
+
+ def _ensure_objdir_exists(self):
+ if os.path.isdir(self.statedir):
+ return
+
+ os.makedirs(self.statedir)
+
+ def _ensure_state_subdir_exists(self, subdir):
+ path = os.path.join(self.statedir, subdir)
+
+ if os.path.isdir(path):
+ return
+
+ os.makedirs(path)
+
+ def _get_state_filename(self, filename, subdir=None):
+ path = self.statedir
+
+ if subdir:
+ path = os.path.join(path, subdir)
+
+ return os.path.join(path, filename)
+
+ def _wrap_path_argument(self, arg):
+ return PathArgument(arg, self.topsrcdir, self.topobjdir)
+
+ def _run_make(self, directory=None, filename=None, target=None, log=True,
+ srcdir=False, allow_parallel=True, line_handler=None,
+ append_env=None, explicit_env=None, ignore_errors=False,
+ ensure_exit_code=0, silent=True, print_directory=True,
+ pass_thru=False, num_jobs=0):
+ """Invoke make.
+
+ directory -- Relative directory to look for Makefile in.
+ filename -- Explicit makefile to run.
+ target -- Makefile target(s) to make. Can be a string or iterable of
+ strings.
+ srcdir -- If True, invoke make from the source directory tree.
+ Otherwise, make will be invoked from the object directory.
+ silent -- If True (the default), run make in silent mode.
+ print_directory -- If True (the default), have make print directories
+ while doing traversal.
+ """
+ self._ensure_objdir_exists()
+
+ args = self._make_path()
+
+ if directory:
+ args.extend(['-C', directory.replace(os.sep, '/')])
+
+ if filename:
+ args.extend(['-f', filename])
+
+ if num_jobs == 0 and self.mozconfig['make_flags']:
+ flags = iter(self.mozconfig['make_flags'])
+ for flag in flags:
+ if flag == '-j':
+ try:
+ flag = flags.next()
+ except StopIteration:
+ break
+ try:
+ num_jobs = int(flag)
+ except ValueError:
+ args.append(flag)
+ elif flag.startswith('-j'):
+ try:
+ num_jobs = int(flag[2:])
+ except (ValueError, IndexError):
+ break
+ else:
+ args.append(flag)
+
+ if allow_parallel:
+ if num_jobs > 0:
+ args.append('-j%d' % num_jobs)
+ else:
+ args.append('-j%d' % multiprocessing.cpu_count())
+ elif num_jobs > 0:
+ args.append('MOZ_PARALLEL_BUILD=%d' % num_jobs)
+
+ if ignore_errors:
+ args.append('-k')
+
+ if silent:
+ args.append('-s')
+
+ # Print entering/leaving directory messages. Some consumers look at
+ # these to measure progress.
+ if print_directory:
+ args.append('-w')
+
+ if isinstance(target, list):
+ args.extend(target)
+ elif target:
+ args.append(target)
+
+ fn = self._run_command_in_objdir
+
+ if srcdir:
+ fn = self._run_command_in_srcdir
+
+ append_env = dict(append_env or ())
+ append_env[b'MACH'] = '1'
+
+ params = {
+ 'args': args,
+ 'line_handler': line_handler,
+ 'append_env': append_env,
+ 'explicit_env': explicit_env,
+ 'log_level': logging.INFO,
+ 'require_unix_environment': False,
+ 'ensure_exit_code': ensure_exit_code,
+ 'pass_thru': pass_thru,
+
+ # Make manages its children, so mozprocess doesn't need to bother.
+ # Having mozprocess manage children can also have side-effects when
+ # building on Windows. See bug 796840.
+ 'ignore_children': True,
+ }
+
+ if log:
+ params['log_name'] = 'make'
+
+ return fn(**params)
+
+ def _make_path(self):
+ baseconfig = os.path.join(self.topsrcdir, 'config', 'baseconfig.mk')
+
+ def is_xcode_lisense_error(output):
+ return self._is_osx() and 'Agreeing to the Xcode' in output
+
+ def validate_make(make):
+ if os.path.exists(baseconfig) and os.path.exists(make):
+ cmd = [make, '-f', baseconfig]
+ if self._is_windows():
+ cmd.append('HOST_OS_ARCH=WINNT')
+ try:
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ return False, is_xcode_lisense_error(e.output)
+ return True, False
+ return False, False
+
+ xcode_lisense_error = False
+ possible_makes = ['gmake', 'make', 'mozmake', 'gnumake', 'mingw32-make']
+
+ if 'MAKE' in os.environ:
+ make = os.environ['MAKE']
+ possible_makes.insert(0, make)
+
+ for test in possible_makes:
+ if os.path.isabs(test):
+ make = test
+ else:
+ try:
+ make = which.which(test)
+ except which.WhichError:
+ continue
+ result, xcode_lisense_error_tmp = validate_make(make)
+ if result:
+ return [make]
+ if xcode_lisense_error_tmp:
+ xcode_lisense_error = True
+
+ if xcode_lisense_error:
+ raise Exception('Xcode requires accepting to the license agreement.\n'
+ 'Please run Xcode and accept the license agreement.')
+
+ if self._is_windows():
+ raise Exception('Could not find a suitable make implementation.\n'
+ 'Please use MozillaBuild 1.9 or newer')
+ else:
+ raise Exception('Could not find a suitable make implementation.')
+
+ def _run_command_in_srcdir(self, **args):
+ return self.run_process(cwd=self.topsrcdir, **args)
+
+ def _run_command_in_objdir(self, **args):
+ return self.run_process(cwd=self.topobjdir, **args)
+
+ def _is_windows(self):
+ return os.name in ('nt', 'ce')
+
+ def _is_osx(self):
+ return 'darwin' in str(sys.platform).lower()
+
+ def _spawn(self, cls):
+ """Create a new MozbuildObject-derived class instance from ourselves.
+
+ This is used as a convenience method to create other
+ MozbuildObject-derived class instances. It can only be used on
+ classes that have the same constructor arguments as us.
+ """
+
+ return cls(self.topsrcdir, self.settings, self.log_manager,
+ topobjdir=self.topobjdir)
+
+ def _activate_virtualenv(self):
+ self.virtualenv_manager.ensure()
+ self.virtualenv_manager.activate()
+
+
+class MachCommandBase(MozbuildObject):
+ """Base class for mach command providers that wish to be MozbuildObjects.
+
+ This provides a level of indirection so MozbuildObject can be refactored
+ without having to change everything that inherits from it.
+ """
+
+ def __init__(self, context):
+ # Attempt to discover topobjdir through environment detection, as it is
+ # more reliable than mozconfig when cwd is inside an objdir.
+ topsrcdir = context.topdir
+ topobjdir = None
+ detect_virtualenv_mozinfo = True
+ if hasattr(context, 'detect_virtualenv_mozinfo'):
+ detect_virtualenv_mozinfo = getattr(context,
+ 'detect_virtualenv_mozinfo')
+ try:
+ dummy = MozbuildObject.from_environment(cwd=context.cwd,
+ detect_virtualenv_mozinfo=detect_virtualenv_mozinfo)
+ topsrcdir = dummy.topsrcdir
+ topobjdir = dummy._topobjdir
+ if topobjdir:
+ # If we're inside a objdir and the found mozconfig resolves to
+ # another objdir, we abort. The reasoning here is that if you
+ # are inside an objdir you probably want to perform actions on
+ # that objdir, not another one. This prevents accidental usage
+ # of the wrong objdir when the current objdir is ambiguous.
+ config_topobjdir = dummy.resolve_mozconfig_topobjdir()
+
+ try:
+ universal_bin = dummy.substs.get('UNIVERSAL_BINARY')
+ except:
+ universal_bin = False
+
+ if config_topobjdir and not (samepath(topobjdir, config_topobjdir) or
+ universal_bin and topobjdir.startswith(config_topobjdir)):
+ raise ObjdirMismatchException(topobjdir, config_topobjdir)
+ except BuildEnvironmentNotFoundException:
+ pass
+ except ObjdirMismatchException as e:
+ print('Ambiguous object directory detected. We detected that '
+ 'both %s and %s could be object directories. This is '
+ 'typically caused by having a mozconfig pointing to a '
+ 'different object directory from the current working '
+ 'directory. To solve this problem, ensure you do not have a '
+ 'default mozconfig in searched paths.' % (e.objdir1,
+ e.objdir2))
+ sys.exit(1)
+
+ except MozconfigLoadException as e:
+ print('Error loading mozconfig: ' + e.path)
+ print('')
+ print(e.message)
+ if e.output:
+ print('')
+ print('mozconfig output:')
+ print('')
+ for line in e.output:
+ print(line)
+
+ sys.exit(1)
+
+ MozbuildObject.__init__(self, topsrcdir, context.settings,
+ context.log_manager, topobjdir=topobjdir)
+
+ self._mach_context = context
+
+ # Incur mozconfig processing so we have unified error handling for
+ # errors. Otherwise, the exceptions could bubble back to mach's error
+ # handler.
+ try:
+ self.mozconfig
+
+ except MozconfigFindException as e:
+ print(e.message)
+ sys.exit(1)
+
+ except MozconfigLoadException as e:
+ print('Error loading mozconfig: ' + e.path)
+ print('')
+ print(e.message)
+ if e.output:
+ print('')
+ print('mozconfig output:')
+ print('')
+ for line in e.output:
+ print(line)
+
+ sys.exit(1)
+
+ # Always keep a log of the last command, but don't do that for mach
+ # invokations from scripts (especially not the ones done by the build
+ # system itself).
+ if (os.isatty(sys.stdout.fileno()) and
+ not getattr(self, 'NO_AUTO_LOG', False)):
+ self._ensure_state_subdir_exists('.')
+ logfile = self._get_state_filename('last_log.json')
+ try:
+ fd = open(logfile, "wb")
+ self.log_manager.add_json_handler(fd)
+ except Exception as e:
+ self.log(logging.WARNING, 'mach', {'error': e},
+ 'Log will not be kept for this command: {error}.')
+
+
+class MachCommandConditions(object):
+ """A series of commonly used condition functions which can be applied to
+ mach commands with providers deriving from MachCommandBase.
+ """
+ @staticmethod
+ def is_firefox(cls):
+ """Must have a Firefox build."""
+ if hasattr(cls, 'substs'):
+ return cls.substs.get('MOZ_BUILD_APP') == 'browser'
+ return False
+
+ @staticmethod
+ def is_mulet(cls):
+ """Must have a Mulet build."""
+ if hasattr(cls, 'substs'):
+ return cls.substs.get('MOZ_BUILD_APP') == 'b2g/dev'
+ return False
+
+ @staticmethod
+ def is_b2g(cls):
+ """Must have a B2G build."""
+ if hasattr(cls, 'substs'):
+ return cls.substs.get('MOZ_WIDGET_TOOLKIT') == 'gonk'
+ return False
+
+ @staticmethod
+ def is_b2g_desktop(cls):
+ """Must have a B2G desktop build."""
+ if hasattr(cls, 'substs'):
+ return cls.substs.get('MOZ_BUILD_APP') == 'b2g' and \
+ cls.substs.get('MOZ_WIDGET_TOOLKIT') != 'gonk'
+ return False
+
+ @staticmethod
+ def is_emulator(cls):
+ """Must have a B2G build with an emulator configured."""
+ try:
+ return MachCommandConditions.is_b2g(cls) and \
+ cls.device_name.startswith('emulator')
+ except AttributeError:
+ return False
+
+ @staticmethod
+ def is_android(cls):
+ """Must have an Android build."""
+ if hasattr(cls, 'substs'):
+ return cls.substs.get('MOZ_WIDGET_TOOLKIT') == 'android'
+ return False
+
+ @staticmethod
+ def is_hg(cls):
+ """Must have a mercurial source checkout."""
+ if hasattr(cls, 'substs'):
+ top_srcdir = cls.substs.get('top_srcdir')
+ return top_srcdir and os.path.isdir(os.path.join(top_srcdir, '.hg'))
+ return False
+
+ @staticmethod
+ def is_git(cls):
+ """Must have a git source checkout."""
+ if hasattr(cls, 'substs'):
+ top_srcdir = cls.substs.get('top_srcdir')
+ return top_srcdir and os.path.isdir(os.path.join(top_srcdir, '.git'))
+ return False
+
+
+class PathArgument(object):
+ """Parse a filesystem path argument and transform it in various ways."""
+
+ def __init__(self, arg, topsrcdir, topobjdir, cwd=None):
+ self.arg = arg
+ self.topsrcdir = topsrcdir
+ self.topobjdir = topobjdir
+ self.cwd = os.getcwd() if cwd is None else cwd
+
+ def relpath(self):
+ """Return a path relative to the topsrcdir or topobjdir.
+
+ If the argument is a path to a location in one of the base directories
+ (topsrcdir or topobjdir), then strip off the base directory part and
+ just return the path within the base directory."""
+
+ abspath = os.path.abspath(os.path.join(self.cwd, self.arg))
+
+ # If that path is within topsrcdir or topobjdir, return an equivalent
+ # path relative to that base directory.
+ for base_dir in [self.topobjdir, self.topsrcdir]:
+ if abspath.startswith(os.path.abspath(base_dir)):
+ return mozpath.relpath(abspath, base_dir)
+
+ return mozpath.normsep(self.arg)
+
+ def srcdir_path(self):
+ return mozpath.join(self.topsrcdir, self.relpath())
+
+ def objdir_path(self):
+ return mozpath.join(self.topobjdir, self.relpath())
+
+
+class ExecutionSummary(dict):
+ """Helper for execution summaries."""
+
+ def __init__(self, summary_format, **data):
+ self._summary_format = ''
+ assert 'execution_time' in data
+ self.extend(summary_format, **data)
+
+ def extend(self, summary_format, **data):
+ self._summary_format += summary_format
+ self.update(data)
+
+ def __str__(self):
+ return self._summary_format.format(**self)
+
+ def __getattr__(self, key):
+ return self[key]
diff --git a/python/mozbuild/mozbuild/codecoverage/__init__.py b/python/mozbuild/mozbuild/codecoverage/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/codecoverage/__init__.py
diff --git a/python/mozbuild/mozbuild/codecoverage/chrome_map.py b/python/mozbuild/mozbuild/codecoverage/chrome_map.py
new file mode 100644
index 000000000..81c3c9a07
--- /dev/null
+++ b/python/mozbuild/mozbuild/codecoverage/chrome_map.py
@@ -0,0 +1,105 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from collections import defaultdict
+import json
+import os
+import urlparse
+
+from mach.config import ConfigSettings
+from mach.logging import LoggingManager
+from mozbuild.backend.common import CommonBackend
+from mozbuild.base import MozbuildObject
+from mozbuild.frontend.data import (
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+)
+from mozbuild.frontend.data import JARManifest, ChromeManifestEntry
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestChrome,
+ ManifestOverride,
+ ManifestResource,
+ parse_manifest,
+)
+import mozpack.path as mozpath
+
+
+class ChromeManifestHandler(object):
+ def __init__(self):
+ self.overrides = {}
+ self.chrome_mapping = defaultdict(set)
+
+ def handle_manifest_entry(self, entry):
+ format_strings = {
+ "content": "chrome://%s/content/",
+ "resource": "resource://%s/",
+ "locale": "chrome://%s/locale/",
+ "skin": "chrome://%s/skin/",
+ }
+
+ if isinstance(entry, (ManifestChrome, ManifestResource)):
+ if isinstance(entry, ManifestResource):
+ dest = entry.target
+ url = urlparse.urlparse(dest)
+ if not url.scheme:
+ dest = mozpath.normpath(mozpath.join(entry.base, dest))
+ if url.scheme == 'file':
+ dest = mozpath.normpath(url.path)
+ else:
+ dest = mozpath.normpath(entry.path)
+
+ base_uri = format_strings[entry.type] % entry.name
+ self.chrome_mapping[base_uri].add(dest)
+ if isinstance(entry, ManifestOverride):
+ self.overrides[entry.overloaded] = entry.overload
+ if isinstance(entry, Manifest):
+ for e in parse_manifest(None, entry.path):
+ self.handle_manifest_entry(e)
+
+class ChromeMapBackend(CommonBackend):
+ def _init(self):
+ CommonBackend._init(self)
+
+ log_manager = LoggingManager()
+ self._cmd = MozbuildObject(self.environment.topsrcdir, ConfigSettings(),
+ log_manager, self.environment.topobjdir)
+ self._install_mapping = {}
+ self.manifest_handler = ChromeManifestHandler()
+
+ def consume_object(self, obj):
+ if isinstance(obj, JARManifest):
+ self._consume_jar_manifest(obj)
+ if isinstance(obj, ChromeManifestEntry):
+ self.manifest_handler.handle_manifest_entry(obj.entry)
+ if isinstance(obj, (FinalTargetFiles,
+ FinalTargetPreprocessedFiles)):
+ self._handle_final_target_files(obj)
+ return True
+
+ def _handle_final_target_files(self, obj):
+ for path, files in obj.files.walk():
+ for f in files:
+ dest = mozpath.join(obj.install_target, path, f.target_basename)
+ is_pp = isinstance(obj,
+ FinalTargetPreprocessedFiles)
+ self._install_mapping[dest] = f.full_path, is_pp
+
+ def consume_finished(self):
+ # Our result has three parts:
+ # A map from url prefixes to objdir directories:
+ # { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
+ # A map of overrides.
+ # A map from objdir paths to sourcedir paths, and a flag for whether the source was preprocessed:
+ # { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
+ # [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", false ], ... }
+ outputfile = os.path.join(self.environment.topobjdir, 'chrome-map.json')
+ with self._write_file(outputfile) as fh:
+ chrome_mapping = self.manifest_handler.chrome_mapping
+ overrides = self.manifest_handler.overrides
+ json.dump([
+ {k: list(v) for k, v in chrome_mapping.iteritems()},
+ overrides,
+ self._install_mapping,
+ ], fh, sort_keys=True, indent=2)
diff --git a/python/mozbuild/mozbuild/codecoverage/packager.py b/python/mozbuild/mozbuild/codecoverage/packager.py
new file mode 100644
index 000000000..3a4f359f6
--- /dev/null
+++ b/python/mozbuild/mozbuild/codecoverage/packager.py
@@ -0,0 +1,43 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import sys
+
+from mozpack.files import FileFinder
+from mozpack.copier import Jarrer
+
+def package_gcno_tree(root, output_file):
+ # XXX JarWriter doesn't support unicode strings, see bug 1056859
+ if isinstance(root, unicode):
+ root = root.encode('utf-8')
+
+ finder = FileFinder(root)
+ jarrer = Jarrer(optimize=False)
+ for p, f in finder.find("**/*.gcno"):
+ jarrer.add(p, f)
+ jarrer.copy(output_file)
+
+
+def cli(args=sys.argv[1:]):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-o', '--output-file',
+ dest='output_file',
+ help='Path to save packaged data to.')
+ parser.add_argument('--root',
+ dest='root',
+ default=None,
+ help='Root directory to search from.')
+ args = parser.parse_args(args)
+
+ if not args.root:
+ from buildconfig import topobjdir
+ args.root = topobjdir
+
+ return package_gcno_tree(args.root, args.output_file)
+
+if __name__ == '__main__':
+ sys.exit(cli())
diff --git a/python/mozbuild/mozbuild/compilation/__init__.py b/python/mozbuild/mozbuild/compilation/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/__init__.py
diff --git a/python/mozbuild/mozbuild/compilation/codecomplete.py b/python/mozbuild/mozbuild/compilation/codecomplete.py
new file mode 100644
index 000000000..05583961a
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/codecomplete.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This modules provides functionality for dealing with code completion.
+
+from __future__ import absolute_import
+
+import os
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+from mozbuild.base import MachCommandBase
+from mozbuild.shellutil import (
+ split as shell_split,
+ quote as shell_quote,
+)
+
+
+@CommandProvider
+class Introspection(MachCommandBase):
+ """Instropection commands."""
+
+ @Command('compileflags', category='devenv',
+ description='Display the compilation flags for a given source file')
+ @CommandArgument('what', default=None,
+ help='Source file to display compilation flags for')
+ def compileflags(self, what):
+ from mozbuild.util import resolve_target_to_make
+ from mozbuild.compilation import util
+
+ if not util.check_top_objdir(self.topobjdir):
+ return 1
+
+ path_arg = self._wrap_path_argument(what)
+
+ make_dir, make_target = resolve_target_to_make(self.topobjdir,
+ path_arg.relpath())
+
+ if make_dir is None and make_target is None:
+ return 1
+
+ build_vars = util.get_build_vars(make_dir, self)
+
+ if what.endswith('.c'):
+ cc = 'CC'
+ name = 'COMPILE_CFLAGS'
+ else:
+ cc = 'CXX'
+ name = 'COMPILE_CXXFLAGS'
+
+ if name not in build_vars:
+ return
+
+ # Drop the first flag since that is the pathname of the compiler.
+ flags = (shell_split(build_vars[cc]) + shell_split(build_vars[name]))[1:]
+
+ print(' '.join(shell_quote(arg)
+ for arg in util.sanitize_cflags(flags)))
diff --git a/python/mozbuild/mozbuild/compilation/database.py b/python/mozbuild/mozbuild/compilation/database.py
new file mode 100644
index 000000000..4193e1bcf
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/database.py
@@ -0,0 +1,252 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This modules provides functionality for dealing with code completion.
+
+import os
+import types
+
+from mozbuild.compilation import util
+from mozbuild.backend.common import CommonBackend
+from mozbuild.frontend.data import (
+ Sources,
+ GeneratedSources,
+ DirectoryTraversal,
+ Defines,
+ Linkable,
+ LocalInclude,
+ VariablePassthru,
+ SimpleProgram,
+)
+from mozbuild.shellutil import (
+ quote as shell_quote,
+)
+from mozbuild.util import expand_variables
+import mozpack.path as mozpath
+from collections import (
+ defaultdict,
+ OrderedDict,
+)
+
+
+class CompileDBBackend(CommonBackend):
+ def _init(self):
+ CommonBackend._init(self)
+ if not util.check_top_objdir(self.environment.topobjdir):
+ raise Exception()
+
+ # The database we're going to dump out to.
+ self._db = OrderedDict()
+
+ # The cache for per-directory flags
+ self._flags = {}
+
+ self._envs = {}
+ self._includes = defaultdict(list)
+ self._defines = defaultdict(list)
+ self._local_flags = defaultdict(dict)
+ self._extra_includes = defaultdict(list)
+ self._gyp_dirs = set()
+ self._dist_include_testing = '-I%s' % mozpath.join(
+ self.environment.topobjdir, 'dist', 'include', 'testing')
+
+ def consume_object(self, obj):
+ # Those are difficult directories, that will be handled later.
+ if obj.relativedir in (
+ 'build/unix/elfhack',
+ 'build/unix/elfhack/inject',
+ 'build/clang-plugin',
+ 'build/clang-plugin/tests',
+ 'security/sandbox/win/wow_helper',
+ 'toolkit/crashreporter/google-breakpad/src/common'):
+ return True
+
+ consumed = CommonBackend.consume_object(self, obj)
+
+ if consumed:
+ return True
+
+ if isinstance(obj, DirectoryTraversal):
+ self._envs[obj.objdir] = obj.config
+ for var in ('STL_FLAGS', 'VISIBILITY_FLAGS', 'WARNINGS_AS_ERRORS'):
+ value = obj.config.substs.get(var)
+ if value:
+ self._local_flags[obj.objdir][var] = value
+
+ elif isinstance(obj, (Sources, GeneratedSources)):
+ # For other sources, include each source file.
+ for f in obj.files:
+ self._build_db_line(obj.objdir, obj.relativedir, obj.config, f,
+ obj.canonical_suffix)
+
+ elif isinstance(obj, LocalInclude):
+ self._includes[obj.objdir].append('-I%s' % mozpath.normpath(
+ obj.path.full_path))
+
+ elif isinstance(obj, Linkable):
+ if isinstance(obj.defines, Defines): # As opposed to HostDefines
+ for d in obj.defines.get_defines():
+ if d not in self._defines[obj.objdir]:
+ self._defines[obj.objdir].append(d)
+ self._defines[obj.objdir].extend(obj.lib_defines.get_defines())
+ if isinstance(obj, SimpleProgram) and obj.is_unit_test:
+ if (self._dist_include_testing not in
+ self._extra_includes[obj.objdir]):
+ self._extra_includes[obj.objdir].append(
+ self._dist_include_testing)
+
+ elif isinstance(obj, VariablePassthru):
+ if obj.variables.get('IS_GYP_DIR'):
+ self._gyp_dirs.add(obj.objdir)
+ for var in ('MOZBUILD_CFLAGS', 'MOZBUILD_CXXFLAGS',
+ 'MOZBUILD_CMFLAGS', 'MOZBUILD_CMMFLAGS',
+ 'RTL_FLAGS', 'VISIBILITY_FLAGS'):
+ if var in obj.variables:
+ self._local_flags[obj.objdir][var] = obj.variables[var]
+ if (obj.variables.get('DISABLE_STL_WRAPPING') and
+ 'STL_FLAGS' in self._local_flags[obj.objdir]):
+ del self._local_flags[obj.objdir]['STL_FLAGS']
+ if (obj.variables.get('ALLOW_COMPILER_WARNINGS') and
+ 'WARNINGS_AS_ERRORS' in self._local_flags[obj.objdir]):
+ del self._local_flags[obj.objdir]['WARNINGS_AS_ERRORS']
+
+ return True
+
+ def consume_finished(self):
+ CommonBackend.consume_finished(self)
+
+ db = []
+
+ for (directory, filename), cmd in self._db.iteritems():
+ env = self._envs[directory]
+ cmd = list(cmd)
+ cmd.append(filename)
+ local_extra = list(self._extra_includes[directory])
+ if directory not in self._gyp_dirs:
+ for var in (
+ 'NSPR_CFLAGS',
+ 'NSS_CFLAGS',
+ 'MOZ_JPEG_CFLAGS',
+ 'MOZ_PNG_CFLAGS',
+ 'MOZ_ZLIB_CFLAGS',
+ 'MOZ_PIXMAN_CFLAGS',
+ ):
+ f = env.substs.get(var)
+ if f:
+ local_extra.extend(f)
+ variables = {
+ 'LOCAL_INCLUDES': self._includes[directory],
+ 'DEFINES': self._defines[directory],
+ 'EXTRA_INCLUDES': local_extra,
+ 'DIST': mozpath.join(env.topobjdir, 'dist'),
+ 'DEPTH': env.topobjdir,
+ 'MOZILLA_DIR': env.topsrcdir,
+ 'topsrcdir': env.topsrcdir,
+ 'topobjdir': env.topobjdir,
+ }
+ variables.update(self._local_flags[directory])
+ c = []
+ for a in cmd:
+ a = expand_variables(a, variables).split()
+ if not a:
+ continue
+ if isinstance(a, types.StringTypes):
+ c.append(a)
+ else:
+ c.extend(a)
+ db.append({
+ 'directory': directory,
+ 'command': ' '.join(shell_quote(a) for a in c),
+ 'file': filename,
+ })
+
+ import json
+ # Output the database (a JSON file) to objdir/compile_commands.json
+ outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json')
+ with self._write_file(outputfile) as jsonout:
+ json.dump(db, jsonout, indent=0)
+
+ def _process_unified_sources(self, obj):
+ # For unified sources, only include the unified source file.
+ # Note that unified sources are never used for host sources.
+ for f in obj.unified_source_mapping:
+ self._build_db_line(obj.objdir, obj.relativedir, obj.config, f[0],
+ obj.canonical_suffix)
+
+ def _handle_idl_manager(self, idl_manager):
+ pass
+
+ def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources,
+ unified_ipdl_cppsrcs_mapping):
+ for f in unified_ipdl_cppsrcs_mapping:
+ self._build_db_line(ipdl_dir, None, self.environment, f[0],
+ '.cpp')
+
+ def _handle_webidl_build(self, bindings_dir, unified_source_mapping,
+ webidls, expected_build_output_files,
+ global_define_files):
+ for f in unified_source_mapping:
+ self._build_db_line(bindings_dir, None, self.environment, f[0],
+ '.cpp')
+
+ COMPILERS = {
+ '.c': 'CC',
+ '.cpp': 'CXX',
+ '.m': 'CC',
+ '.mm': 'CXX',
+ }
+
+ CFLAGS = {
+ '.c': 'CFLAGS',
+ '.cpp': 'CXXFLAGS',
+ '.m': 'CFLAGS',
+ '.mm': 'CXXFLAGS',
+ }
+
+ def _build_db_line(self, objdir, reldir, cenv, filename, canonical_suffix):
+ if canonical_suffix not in self.COMPILERS:
+ return
+ db = self._db.setdefault((objdir, filename),
+ cenv.substs[self.COMPILERS[canonical_suffix]].split() +
+ ['-o', '/dev/null', '-c'])
+ reldir = reldir or mozpath.relpath(objdir, cenv.topobjdir)
+
+ def append_var(name):
+ value = cenv.substs.get(name)
+ if not value:
+ return
+ if isinstance(value, types.StringTypes):
+ value = value.split()
+ db.extend(value)
+
+ if canonical_suffix in ('.mm', '.cpp'):
+ db.append('$(STL_FLAGS)')
+
+ db.extend((
+ '$(VISIBILITY_FLAGS)',
+ '$(DEFINES)',
+ '-I%s' % mozpath.join(cenv.topsrcdir, reldir),
+ '-I%s' % objdir,
+ '$(LOCAL_INCLUDES)',
+ '-I%s/dist/include' % cenv.topobjdir,
+ '$(EXTRA_INCLUDES)',
+ ))
+ append_var('DSO_CFLAGS')
+ append_var('DSO_PIC_CFLAGS')
+ if canonical_suffix in ('.c', '.cpp'):
+ db.append('$(RTL_FLAGS)')
+ append_var('OS_COMPILE_%s' % self.CFLAGS[canonical_suffix])
+ append_var('OS_CPPFLAGS')
+ append_var('OS_%s' % self.CFLAGS[canonical_suffix])
+ append_var('MOZ_DEBUG_FLAGS')
+ append_var('MOZ_OPTIMIZE_FLAGS')
+ append_var('MOZ_FRAMEPTR_FLAGS')
+ db.append('$(WARNINGS_AS_ERRORS)')
+ db.append('$(MOZBUILD_%s)' % self.CFLAGS[canonical_suffix])
+ if canonical_suffix == '.m':
+ append_var('OS_COMPILE_CMFLAGS')
+ db.append('$(MOZBUILD_CMFLAGS)')
+ elif canonical_suffix == '.mm':
+ append_var('OS_COMPILE_CMMFLAGS')
+ db.append('$(MOZBUILD_CMMFLAGS)')
diff --git a/python/mozbuild/mozbuild/compilation/util.py b/python/mozbuild/mozbuild/compilation/util.py
new file mode 100644
index 000000000..32ff2f876
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/util.py
@@ -0,0 +1,54 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from mozbuild import shellutil
+
+def check_top_objdir(topobjdir):
+ top_make = os.path.join(topobjdir, 'Makefile')
+ if not os.path.exists(top_make):
+ print('Your tree has not been built yet. Please run '
+ '|mach build| with no arguments.')
+ return False
+ return True
+
+def get_build_vars(directory, cmd):
+ build_vars = {}
+
+ def on_line(line):
+ elements = [s.strip() for s in line.split('=', 1)]
+
+ if len(elements) != 2:
+ return
+
+ build_vars[elements[0]] = elements[1]
+
+ try:
+ old_logger = cmd.log_manager.replace_terminal_handler(None)
+ cmd._run_make(directory=directory, target='showbuild', log=False,
+ print_directory=False, allow_parallel=False, silent=True,
+ line_handler=on_line)
+ finally:
+ cmd.log_manager.replace_terminal_handler(old_logger)
+
+ return build_vars
+
+def sanitize_cflags(flags):
+ # We filter out -Xclang arguments as clang based tools typically choke on
+ # passing these flags down to the clang driver. -Xclang tells the clang
+ # driver driver to pass whatever comes after it down to clang cc1, which is
+ # why we skip -Xclang and the argument immediately after it. Here is an
+ # example: the following two invocations pass |-foo -bar -baz| to cc1:
+ # clang -cc1 -foo -bar -baz
+ # clang -Xclang -foo -Xclang -bar -Xclang -baz
+ sanitized = []
+ saw_xclang = False
+ for flag in flags:
+ if flag == '-Xclang':
+ saw_xclang = True
+ elif saw_xclang:
+ saw_xclang = False
+ else:
+ sanitized.append(flag)
+ return sanitized
diff --git a/python/mozbuild/mozbuild/compilation/warnings.py b/python/mozbuild/mozbuild/compilation/warnings.py
new file mode 100644
index 000000000..8fb20ccbf
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/warnings.py
@@ -0,0 +1,376 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This modules provides functionality for dealing with compiler warnings.
+
+from __future__ import absolute_import, unicode_literals
+
+import errno
+import json
+import os
+import re
+
+from mozbuild.util import hash_file
+import mozpack.path as mozpath
+
+
+# Regular expression to strip ANSI color sequences from a string. This is
+# needed to properly analyze Clang compiler output, which may be colorized.
+# It assumes ANSI escape sequences.
+RE_STRIP_COLORS = re.compile(r'\x1b\[[\d;]+m')
+
+# This captures Clang diagnostics with the standard formatting.
+RE_CLANG_WARNING = re.compile(r"""
+ (?P<file>[^:]+)
+ :
+ (?P<line>\d+)
+ :
+ (?P<column>\d+)
+ :
+ \swarning:\s
+ (?P<message>.+)
+ \[(?P<flag>[^\]]+)
+ """, re.X)
+
+# This captures Visual Studio's warning format.
+RE_MSVC_WARNING = re.compile(r"""
+ (?P<file>.*)
+ \((?P<line>\d+)\)
+ \s?:\swarning\s
+ (?P<flag>[^:]+)
+ :\s
+ (?P<message>.*)
+ """, re.X)
+
+IN_FILE_INCLUDED_FROM = 'In file included from '
+
+
+class CompilerWarning(dict):
+ """Represents an individual compiler warning."""
+
+ def __init__(self):
+ dict.__init__(self)
+
+ self['filename'] = None
+ self['line'] = None
+ self['column'] = None
+ self['message'] = None
+ self['flag'] = None
+
+ # Since we inherit from dict, functools.total_ordering gets confused.
+ # Thus, we define a key function, a generic comparison, and then
+ # implement all the rich operators with those; approach is from:
+ # http://regebro.wordpress.com/2010/12/13/python-implementing-rich-comparison-the-correct-way/
+ def _cmpkey(self):
+ return (self['filename'], self['line'], self['column'])
+
+ def _compare(self, other, func):
+ if not isinstance(other, CompilerWarning):
+ return NotImplemented
+
+ return func(self._cmpkey(), other._cmpkey())
+
+ def __eq__(self, other):
+ return self._compare(other, lambda s,o: s == o)
+
+ def __neq__(self, other):
+ return self._compare(other, lambda s,o: s != o)
+
+ def __lt__(self, other):
+ return self._compare(other, lambda s,o: s < o)
+
+ def __le__(self, other):
+ return self._compare(other, lambda s,o: s <= o)
+
+ def __gt__(self, other):
+ return self._compare(other, lambda s,o: s > o)
+
+ def __ge__(self, other):
+ return self._compare(other, lambda s,o: s >= o)
+
+ def __hash__(self):
+ """Define so this can exist inside a set, etc."""
+ return hash(tuple(sorted(self.items())))
+
+
+class WarningsDatabase(object):
+ """Holds a collection of warnings.
+
+ The warnings database is a semi-intelligent container that holds warnings
+ encountered during builds.
+
+ The warnings database is backed by a JSON file. But, that is transparent
+ to consumers.
+
+ Under most circumstances, the warnings database is insert only. When a
+ warning is encountered, the caller simply blindly inserts it into the
+ database. The database figures out whether it is a dupe, etc.
+
+ During the course of development, it is common for warnings to change
+ slightly as source code changes. For example, line numbers will disagree.
+ The WarningsDatabase handles this by storing the hash of a file a warning
+ occurred in. At warning insert time, if the hash of the file does not match
+ what is stored in the database, the existing warnings for that file are
+ purged from the database.
+
+ Callers should periodically prune old, invalid warnings from the database
+ by calling prune(). A good time to do this is at the end of a build.
+ """
+ def __init__(self):
+ """Create an empty database."""
+ self._files = {}
+
+ def __len__(self):
+ i = 0
+ for value in self._files.values():
+ i += len(value['warnings'])
+
+ return i
+
+ def __iter__(self):
+ for value in self._files.values():
+ for warning in value['warnings']:
+ yield warning
+
+ def __contains__(self, item):
+ for value in self._files.values():
+ for warning in value['warnings']:
+ if warning == item:
+ return True
+
+ return False
+
+ @property
+ def warnings(self):
+ """All the CompilerWarning instances in this database."""
+ for value in self._files.values():
+ for w in value['warnings']:
+ yield w
+
+ def type_counts(self, dirpath=None):
+ """Returns a mapping of warning types to their counts."""
+
+ types = {}
+ for value in self._files.values():
+ for warning in value['warnings']:
+ if dirpath and not mozpath.normsep(warning['filename']).startswith(dirpath):
+ continue
+ flag = warning['flag']
+ count = types.get(flag, 0)
+ count += 1
+
+ types[flag] = count
+
+ return types
+
+ def has_file(self, filename):
+ """Whether we have any warnings for the specified file."""
+ return filename in self._files
+
+ def warnings_for_file(self, filename):
+ """Obtain the warnings for the specified file."""
+ f = self._files.get(filename, {'warnings': []})
+
+ for warning in f['warnings']:
+ yield warning
+
+ def insert(self, warning, compute_hash=True):
+ assert isinstance(warning, CompilerWarning)
+
+ filename = warning['filename']
+
+ new_hash = None
+
+ if compute_hash:
+ new_hash = hash_file(filename)
+
+ if filename in self._files:
+ if new_hash != self._files[filename]['hash']:
+ del self._files[filename]
+
+ value = self._files.get(filename, {
+ 'hash': new_hash,
+ 'warnings': set(),
+ })
+
+ value['warnings'].add(warning)
+
+ self._files[filename] = value
+
+ def prune(self):
+ """Prune the contents of the database.
+
+ This removes warnings that are no longer valid. A warning is no longer
+ valid if the file it was in no longer exists or if the content has
+ changed.
+
+ The check for changed content catches the case where a file previously
+ contained warnings but no longer does.
+ """
+
+ # Need to calculate up front since we are mutating original object.
+ filenames = self._files.keys()
+ for filename in filenames:
+ if not os.path.exists(filename):
+ del self._files[filename]
+ continue
+
+ if self._files[filename]['hash'] is None:
+ continue
+
+ current_hash = hash_file(filename)
+ if current_hash != self._files[filename]['hash']:
+ del self._files[filename]
+ continue
+
+ def serialize(self, fh):
+ """Serialize the database to an open file handle."""
+ obj = {'files': {}}
+
+ # All this hackery because JSON can't handle sets.
+ for k, v in self._files.iteritems():
+ obj['files'][k] = {}
+
+ for k2, v2 in v.iteritems():
+ normalized = v2
+
+ if k2 == 'warnings':
+ normalized = [w for w in v2]
+
+ obj['files'][k][k2] = normalized
+
+ json.dump(obj, fh, indent=2)
+
+ def deserialize(self, fh):
+ """Load serialized content from a handle into the current instance."""
+ obj = json.load(fh)
+
+ self._files = obj['files']
+
+ # Normalize data types.
+ for filename, value in self._files.iteritems():
+ for k, v in value.iteritems():
+ if k != 'warnings':
+ continue
+
+ normalized = set()
+ for d in v:
+ w = CompilerWarning()
+ w.update(d)
+ normalized.add(w)
+
+ self._files[filename]['warnings'] = normalized
+
+ def load_from_file(self, filename):
+ """Load the database from a file."""
+ with open(filename, 'rb') as fh:
+ self.deserialize(fh)
+
+ def save_to_file(self, filename):
+ """Save the database to a file."""
+ try:
+ # Ensure the directory exists
+ os.makedirs(os.path.dirname(filename))
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ with open(filename, 'wb') as fh:
+ self.serialize(fh)
+
+
+class WarningsCollector(object):
+ """Collects warnings from text data.
+
+ Instances of this class receive data (usually the output of compiler
+ invocations) and parse it into warnings and add these warnings to a
+ database.
+
+ The collector works by incrementally receiving data, usually line-by-line
+ output from the compiler. Therefore, it can maintain state to parse
+ multi-line warning messages.
+ """
+ def __init__(self, database=None, objdir=None, resolve_files=True):
+ self.database = database
+ self.objdir = objdir
+ self.resolve_files = resolve_files
+ self.included_from = []
+
+ if database is None:
+ self.database = WarningsDatabase()
+
+ def process_line(self, line):
+ """Take a line of text and process it for a warning."""
+
+ filtered = RE_STRIP_COLORS.sub('', line)
+
+ # Clang warnings in files included from the one(s) being compiled will
+ # start with "In file included from /path/to/file:line:". Here, we
+ # record those.
+ if filtered.startswith(IN_FILE_INCLUDED_FROM):
+ included_from = filtered[len(IN_FILE_INCLUDED_FROM):]
+
+ parts = included_from.split(':')
+
+ self.included_from.append(parts[0])
+
+ return
+
+ warning = CompilerWarning()
+ filename = None
+
+ # TODO make more efficient so we run minimal regexp matches.
+ match_clang = RE_CLANG_WARNING.match(filtered)
+ match_msvc = RE_MSVC_WARNING.match(filtered)
+ if match_clang:
+ d = match_clang.groupdict()
+
+ filename = d['file']
+ warning['line'] = int(d['line'])
+ warning['column'] = int(d['column'])
+ warning['flag'] = d['flag']
+ warning['message'] = d['message'].rstrip()
+
+ elif match_msvc:
+ d = match_msvc.groupdict()
+
+ filename = d['file']
+ warning['line'] = int(d['line'])
+ warning['flag'] = d['flag']
+ warning['message'] = d['message'].rstrip()
+ else:
+ self.included_from = []
+ return None
+
+ filename = os.path.normpath(filename)
+
+ # Sometimes we get relative includes. These typically point to files in
+ # the object directory. We try to resolve the relative path.
+ if not os.path.isabs(filename):
+ filename = self._normalize_relative_path(filename)
+
+ if not os.path.exists(filename) and self.resolve_files:
+ raise Exception('Could not find file containing warning: %s' %
+ filename)
+
+ warning['filename'] = filename
+
+ self.database.insert(warning, compute_hash=self.resolve_files)
+
+ return warning
+
+ def _normalize_relative_path(self, filename):
+ # Special case files in dist/include.
+ idx = filename.find('/dist/include')
+ if idx != -1:
+ return self.objdir + filename[idx:]
+
+ for included_from in self.included_from:
+ source_dir = os.path.dirname(included_from)
+
+ candidate = os.path.normpath(os.path.join(source_dir, filename))
+
+ if os.path.exists(candidate):
+ return candidate
+
+ return filename
diff --git a/python/mozbuild/mozbuild/config_status.py b/python/mozbuild/mozbuild/config_status.py
new file mode 100644
index 000000000..343dcc3a2
--- /dev/null
+++ b/python/mozbuild/mozbuild/config_status.py
@@ -0,0 +1,182 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Combined with build/autoconf/config.status.m4, ConfigStatus is an almost
+# drop-in replacement for autoconf 2.13's config.status, with features
+# borrowed from autoconf > 2.5, and additional features.
+
+from __future__ import absolute_import, print_function
+
+import logging
+import os
+import subprocess
+import sys
+import time
+
+from argparse import ArgumentParser
+
+from mach.logging import LoggingManager
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.base import MachCommandConditions
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+from mozbuild.mozinfo import write_mozinfo
+from itertools import chain
+
+from mozbuild.backend import (
+ backends,
+ get_backend_class,
+)
+
+
+log_manager = LoggingManager()
+
+
+ANDROID_IDE_ADVERTISEMENT = '''
+=============
+ADVERTISEMENT
+
+You are building Firefox for Android. After your build completes, you can open
+the top source directory in IntelliJ or Android Studio directly and build using
+Gradle. See the documentation at
+
+https://developer.mozilla.org/en-US/docs/Simple_Firefox_for_Android_build
+
+PLEASE BE AWARE THAT GRADLE AND INTELLIJ/ANDROID STUDIO SUPPORT IS EXPERIMENTAL.
+You should verify any changes using |mach build|.
+=============
+'''.strip()
+
+VISUAL_STUDIO_ADVERTISEMENT = '''
+===============================
+Visual Studio Support Available
+
+You are building Firefox on Windows. You can generate Visual Studio
+files by running:
+
+ mach build-backend --backend=VisualStudio
+
+===============================
+'''.strip()
+
+
+def config_status(topobjdir='.', topsrcdir='.', defines=None,
+ non_global_defines=None, substs=None, source=None,
+ mozconfig=None, args=sys.argv[1:]):
+ '''Main function, providing config.status functionality.
+
+ Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
+ variables.
+
+ Without the -n option, this program acts as config.status and considers
+ the current directory as the top object directory, even when config.status
+ is in a different directory. It will, however, treat the directory
+ containing config.status as the top object directory with the -n option.
+
+ The options to this function are passed when creating the
+ ConfigEnvironment. These lists, as well as the actual wrapper script
+ around this function, are meant to be generated by configure.
+ See build/autoconf/config.status.m4.
+ '''
+
+ if 'CONFIG_FILES' in os.environ:
+ raise Exception('Using the CONFIG_FILES environment variable is not '
+ 'supported.')
+ if 'CONFIG_HEADERS' in os.environ:
+ raise Exception('Using the CONFIG_HEADERS environment variable is not '
+ 'supported.')
+
+ if not os.path.isabs(topsrcdir):
+ raise Exception('topsrcdir must be defined as an absolute directory: '
+ '%s' % topsrcdir)
+
+ default_backends = ['RecursiveMake']
+ default_backends = (substs or {}).get('BUILD_BACKENDS', ['RecursiveMake'])
+
+ parser = ArgumentParser()
+ parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
+ help='display verbose output')
+ parser.add_argument('-n', dest='not_topobjdir', action='store_true',
+ help='do not consider current directory as top object directory')
+ parser.add_argument('-d', '--diff', action='store_true',
+ help='print diffs of changed files.')
+ parser.add_argument('-b', '--backend', nargs='+', choices=sorted(backends),
+ default=default_backends,
+ help='what backend to build (default: %s).' %
+ ' '.join(default_backends))
+ parser.add_argument('--dry-run', action='store_true',
+ help='do everything except writing files out.')
+ options = parser.parse_args(args)
+
+ # Without -n, the current directory is meant to be the top object directory
+ if not options.not_topobjdir:
+ topobjdir = os.path.abspath('.')
+
+ env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
+ non_global_defines=non_global_defines, substs=substs,
+ source=source, mozconfig=mozconfig)
+
+ # mozinfo.json only needs written if configure changes and configure always
+ # passes this environment variable.
+ if 'WRITE_MOZINFO' in os.environ:
+ write_mozinfo(os.path.join(topobjdir, 'mozinfo.json'), env, os.environ)
+
+ cpu_start = time.clock()
+ time_start = time.time()
+
+ # Make appropriate backend instances, defaulting to RecursiveMakeBackend,
+ # or what is in BUILD_BACKENDS.
+ selected_backends = [get_backend_class(b)(env) for b in options.backend]
+
+ if options.dry_run:
+ for b in selected_backends:
+ b.dry_run = True
+
+ reader = BuildReader(env)
+ emitter = TreeMetadataEmitter(env)
+ # This won't actually do anything because of the magic of generators.
+ definitions = emitter.emit(reader.read_topsrcdir())
+
+ log_level = logging.DEBUG if options.verbose else logging.INFO
+ log_manager.add_terminal_logging(level=log_level)
+ log_manager.enable_unstructured()
+
+ print('Reticulating splines...', file=sys.stderr)
+ if len(selected_backends) > 1:
+ definitions = list(definitions)
+
+ for the_backend in selected_backends:
+ the_backend.consume(definitions)
+
+ execution_time = 0.0
+ for obj in chain((reader, emitter), selected_backends):
+ summary = obj.summary()
+ print(summary, file=sys.stderr)
+ execution_time += summary.execution_time
+
+ cpu_time = time.clock() - cpu_start
+ wall_time = time.time() - time_start
+ efficiency = cpu_time / wall_time if wall_time else 100
+ untracked = wall_time - execution_time
+
+ print(
+ 'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: '
+ '{:.0%}; Untracked: {:.2f}s'.format(
+ wall_time, cpu_time, efficiency, untracked),
+ file=sys.stderr
+ )
+
+ if options.diff:
+ for the_backend in selected_backends:
+ for path, diff in sorted(the_backend.file_diffs.items()):
+ print('\n'.join(diff))
+
+ # Advertise Visual Studio if appropriate.
+ if os.name == 'nt' and 'VisualStudio' not in options.backend:
+ print(VISUAL_STUDIO_ADVERTISEMENT)
+
+ # Advertise Eclipse if it is appropriate.
+ if MachCommandConditions.is_android(env):
+ if 'AndroidEclipse' not in options.backend:
+ print(ANDROID_IDE_ADVERTISEMENT)
diff --git a/python/mozbuild/mozbuild/configure/__init__.py b/python/mozbuild/mozbuild/configure/__init__.py
new file mode 100644
index 000000000..0fe640cae
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/__init__.py
@@ -0,0 +1,935 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import inspect
+import logging
+import os
+import re
+import sys
+import types
+from collections import OrderedDict
+from contextlib import contextmanager
+from functools import wraps
+from mozbuild.configure.options import (
+ CommandLineHelper,
+ ConflictingOptionError,
+ InvalidOptionError,
+ NegativeOptionValue,
+ Option,
+ OptionValue,
+ PositiveOptionValue,
+)
+from mozbuild.configure.help import HelpFormatter
+from mozbuild.configure.util import (
+ ConfigureOutputHandler,
+ getpreferredencoding,
+ LineIO,
+)
+from mozbuild.util import (
+ exec_,
+ memoize,
+ memoized_property,
+ ReadOnlyDict,
+ ReadOnlyNamespace,
+)
+
+import mozpack.path as mozpath
+
+
+class ConfigureError(Exception):
+ pass
+
+
+class SandboxDependsFunction(object):
+ '''Sandbox-visible representation of @depends functions.'''
+ def __call__(self, *arg, **kwargs):
+ raise ConfigureError('The `%s` function may not be called'
+ % self.__name__)
+
+
+class DependsFunction(object):
+ __slots__ = (
+ 'func', 'dependencies', 'when', 'sandboxed', 'sandbox', '_result')
+
+ def __init__(self, sandbox, func, dependencies, when=None):
+ assert isinstance(sandbox, ConfigureSandbox)
+ self.func = func
+ self.dependencies = dependencies
+ self.sandboxed = wraps(func)(SandboxDependsFunction())
+ self.sandbox = sandbox
+ self.when = when
+ sandbox._depends[self.sandboxed] = self
+
+ # Only @depends functions with a dependency on '--help' are executed
+ # immediately. Everything else is queued for later execution.
+ if sandbox._help_option in dependencies:
+ sandbox._value_for(self)
+ elif not sandbox._help:
+ sandbox._execution_queue.append((sandbox._value_for, (self,)))
+
+ @property
+ def name(self):
+ return self.func.__name__
+
+ @property
+ def sandboxed_dependencies(self):
+ return [
+ d.sandboxed if isinstance(d, DependsFunction) else d
+ for d in self.dependencies
+ ]
+
+ @memoized_property
+ def result(self):
+ if self.when and not self.sandbox._value_for(self.when):
+ return None
+
+ resolved_args = [self.sandbox._value_for(d) for d in self.dependencies]
+ return self.func(*resolved_args)
+
+ def __repr__(self):
+ return '<%s.%s %s(%s)>' % (
+ self.__class__.__module__,
+ self.__class__.__name__,
+ self.name,
+ ', '.join(repr(d) for d in self.dependencies),
+ )
+
+
+class CombinedDependsFunction(DependsFunction):
+ def __init__(self, sandbox, func, dependencies):
+ @memoize
+ @wraps(func)
+ def wrapper(*args):
+ return func(args)
+
+ flatten_deps = []
+ for d in dependencies:
+ if isinstance(d, CombinedDependsFunction) and d.func == wrapper:
+ for d2 in d.dependencies:
+ if d2 not in flatten_deps:
+ flatten_deps.append(d2)
+ elif d not in flatten_deps:
+ flatten_deps.append(d)
+
+ # Automatically add a --help dependency if one of the dependencies
+ # depends on it.
+ for d in flatten_deps:
+ if (isinstance(d, DependsFunction) and
+ sandbox._help_option in d.dependencies):
+ flatten_deps.insert(0, sandbox._help_option)
+ break
+
+ super(CombinedDependsFunction, self).__init__(
+ sandbox, wrapper, flatten_deps)
+
+ @memoized_property
+ def result(self):
+ # Ignore --help for the combined result
+ deps = self.dependencies
+ if deps[0] == self.sandbox._help_option:
+ deps = deps[1:]
+ resolved_args = [self.sandbox._value_for(d) for d in deps]
+ return self.func(*resolved_args)
+
+ def __eq__(self, other):
+ return (isinstance(other, self.__class__) and
+ self.func == other.func and
+ set(self.dependencies) == set(other.dependencies))
+
+ def __ne__(self, other):
+ return not self == other
+
+class SandboxedGlobal(dict):
+ '''Identifiable dict type for use as function global'''
+
+
+def forbidden_import(*args, **kwargs):
+ raise ImportError('Importing modules is forbidden')
+
+
+class ConfigureSandbox(dict):
+ """Represents a sandbox for executing Python code for build configuration.
+ This is a different kind of sandboxing than the one used for moz.build
+ processing.
+
+ The sandbox has 9 primitives:
+ - option
+ - depends
+ - template
+ - imports
+ - include
+ - set_config
+ - set_define
+ - imply_option
+ - only_when
+
+ `option`, `include`, `set_config`, `set_define` and `imply_option` are
+ functions. `depends`, `template`, and `imports` are decorators. `only_when`
+ is a context_manager.
+
+ These primitives are declared as name_impl methods to this class and
+ the mapping name -> name_impl is done automatically in __getitem__.
+
+ Additional primitives should be frowned upon to keep the sandbox itself as
+ simple as possible. Instead, helpers should be created within the sandbox
+ with the existing primitives.
+
+ The sandbox is given, at creation, a dict where the yielded configuration
+ will be stored.
+
+ config = {}
+ sandbox = ConfigureSandbox(config)
+ sandbox.run(path)
+ do_stuff(config)
+ """
+
+ # The default set of builtins. We expose unicode as str to make sandboxed
+ # files more python3-ready.
+ BUILTINS = ReadOnlyDict({
+ b: __builtins__[b]
+ for b in ('None', 'False', 'True', 'int', 'bool', 'any', 'all', 'len',
+ 'list', 'tuple', 'set', 'dict', 'isinstance', 'getattr',
+ 'hasattr', 'enumerate', 'range', 'zip')
+ }, __import__=forbidden_import, str=unicode)
+
+ # Expose a limited set of functions from os.path
+ OS = ReadOnlyNamespace(path=ReadOnlyNamespace(**{
+ k: getattr(mozpath, k, getattr(os.path, k))
+ for k in ('abspath', 'basename', 'dirname', 'isabs', 'join',
+ 'normcase', 'normpath', 'realpath', 'relpath')
+ }))
+
+ def __init__(self, config, environ=os.environ, argv=sys.argv,
+ stdout=sys.stdout, stderr=sys.stderr, logger=None):
+ dict.__setitem__(self, '__builtins__', self.BUILTINS)
+
+ self._paths = []
+ self._all_paths = set()
+ self._templates = set()
+ # Associate SandboxDependsFunctions to DependsFunctions.
+ self._depends = {}
+ self._seen = set()
+ # Store the @imports added to a given function.
+ self._imports = {}
+
+ self._options = OrderedDict()
+ # Store raw option (as per command line or environment) for each Option
+ self._raw_options = OrderedDict()
+
+ # Store options added with `imply_option`, and the reason they were
+ # added (which can either have been given to `imply_option`, or
+ # inferred. Their order matters, so use a list.
+ self._implied_options = []
+
+ # Store all results from _prepare_function
+ self._prepared_functions = set()
+
+ # Queue of functions to execute, with their arguments
+ self._execution_queue = []
+
+ # Store the `when`s associated to some options.
+ self._conditions = {}
+
+ # A list of conditions to apply as a default `when` for every *_impl()
+ self._default_conditions = []
+
+ self._helper = CommandLineHelper(environ, argv)
+
+ assert isinstance(config, dict)
+ self._config = config
+
+ if logger is None:
+ logger = moz_logger = logging.getLogger('moz.configure')
+ logger.setLevel(logging.DEBUG)
+ formatter = logging.Formatter('%(levelname)s: %(message)s')
+ handler = ConfigureOutputHandler(stdout, stderr)
+ handler.setFormatter(formatter)
+ queue_debug = handler.queue_debug
+ logger.addHandler(handler)
+
+ else:
+ assert isinstance(logger, logging.Logger)
+ moz_logger = None
+ @contextmanager
+ def queue_debug():
+ yield
+
+ # Some callers will manage to log a bytestring with characters in it
+ # that can't be converted to ascii. Make our log methods robust to this
+ # by detecting the encoding that a producer is likely to have used.
+ encoding = getpreferredencoding()
+ def wrapped_log_method(logger, key):
+ method = getattr(logger, key)
+ if not encoding:
+ return method
+ def wrapped(*args, **kwargs):
+ out_args = [
+ arg.decode(encoding) if isinstance(arg, str) else arg
+ for arg in args
+ ]
+ return method(*out_args, **kwargs)
+ return wrapped
+
+ log_namespace = {
+ k: wrapped_log_method(logger, k)
+ for k in ('debug', 'info', 'warning', 'error')
+ }
+ log_namespace['queue_debug'] = queue_debug
+ self.log_impl = ReadOnlyNamespace(**log_namespace)
+
+ self._help = None
+ self._help_option = self.option_impl('--help',
+ help='print this message')
+ self._seen.add(self._help_option)
+
+ self._always = DependsFunction(self, lambda: True, [])
+ self._never = DependsFunction(self, lambda: False, [])
+
+ if self._value_for(self._help_option):
+ self._help = HelpFormatter(argv[0])
+ self._help.add(self._help_option)
+ elif moz_logger:
+ handler = logging.FileHandler('config.log', mode='w', delay=True)
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+ def include_file(self, path):
+ '''Include one file in the sandbox. Users of this class probably want
+
+ Note: this will execute all template invocations, as well as @depends
+ functions that depend on '--help', but nothing else.
+ '''
+
+ if self._paths:
+ path = mozpath.join(mozpath.dirname(self._paths[-1]), path)
+ path = mozpath.normpath(path)
+ if not mozpath.basedir(path, (mozpath.dirname(self._paths[0]),)):
+ raise ConfigureError(
+ 'Cannot include `%s` because it is not in a subdirectory '
+ 'of `%s`' % (path, mozpath.dirname(self._paths[0])))
+ else:
+ path = mozpath.realpath(mozpath.abspath(path))
+ if path in self._all_paths:
+ raise ConfigureError(
+ 'Cannot include `%s` because it was included already.' % path)
+ self._paths.append(path)
+ self._all_paths.add(path)
+
+ source = open(path, 'rb').read()
+
+ code = compile(source, path, 'exec')
+
+ exec_(code, self)
+
+ self._paths.pop(-1)
+
+ def run(self, path=None):
+ '''Executes the given file within the sandbox, as well as everything
+ pending from any other included file, and ensure the overall
+ consistency of the executed script(s).'''
+ if path:
+ self.include_file(path)
+
+ for option in self._options.itervalues():
+ # All options must be referenced by some @depends function
+ if option not in self._seen:
+ raise ConfigureError(
+ 'Option `%s` is not handled ; reference it with a @depends'
+ % option.option
+ )
+
+ self._value_for(option)
+
+ # All implied options should exist.
+ for implied_option in self._implied_options:
+ value = self._resolve(implied_option.value,
+ need_help_dependency=False)
+ if value is not None:
+ raise ConfigureError(
+ '`%s`, emitted from `%s` line %d, is unknown.'
+ % (implied_option.option, implied_option.caller[1],
+ implied_option.caller[2]))
+
+ # All options should have been removed (handled) by now.
+ for arg in self._helper:
+ without_value = arg.split('=', 1)[0]
+ raise InvalidOptionError('Unknown option: %s' % without_value)
+
+ # Run the execution queue
+ for func, args in self._execution_queue:
+ func(*args)
+
+ if self._help:
+ with LineIO(self.log_impl.info) as out:
+ self._help.usage(out)
+
+ def __getitem__(self, key):
+ impl = '%s_impl' % key
+ func = getattr(self, impl, None)
+ if func:
+ return func
+
+ return super(ConfigureSandbox, self).__getitem__(key)
+
+ def __setitem__(self, key, value):
+ if (key in self.BUILTINS or key == '__builtins__' or
+ hasattr(self, '%s_impl' % key)):
+ raise KeyError('Cannot reassign builtins')
+
+ if inspect.isfunction(value) and value not in self._templates:
+ value, _ = self._prepare_function(value)
+
+ elif (not isinstance(value, SandboxDependsFunction) and
+ value not in self._templates and
+ not (inspect.isclass(value) and issubclass(value, Exception))):
+ raise KeyError('Cannot assign `%s` because it is neither a '
+ '@depends nor a @template' % key)
+
+ return super(ConfigureSandbox, self).__setitem__(key, value)
+
+ def _resolve(self, arg, need_help_dependency=True):
+ if isinstance(arg, SandboxDependsFunction):
+ return self._value_for_depends(self._depends[arg],
+ need_help_dependency)
+ return arg
+
+ def _value_for(self, obj, need_help_dependency=False):
+ if isinstance(obj, SandboxDependsFunction):
+ assert obj in self._depends
+ return self._value_for_depends(self._depends[obj],
+ need_help_dependency)
+
+ elif isinstance(obj, DependsFunction):
+ return self._value_for_depends(obj, need_help_dependency)
+
+ elif isinstance(obj, Option):
+ return self._value_for_option(obj)
+
+ assert False
+
+ @memoize
+ def _value_for_depends(self, obj, need_help_dependency=False):
+ assert not inspect.isgeneratorfunction(obj.func)
+ return obj.result
+
+ @memoize
+ def _value_for_option(self, option):
+ implied = {}
+ for implied_option in self._implied_options[:]:
+ if implied_option.name not in (option.name, option.env):
+ continue
+ self._implied_options.remove(implied_option)
+
+ if (implied_option.when and
+ not self._value_for(implied_option.when)):
+ continue
+
+ value = self._resolve(implied_option.value,
+ need_help_dependency=False)
+
+ if value is not None:
+ if isinstance(value, OptionValue):
+ pass
+ elif value is True:
+ value = PositiveOptionValue()
+ elif value is False or value == ():
+ value = NegativeOptionValue()
+ elif isinstance(value, types.StringTypes):
+ value = PositiveOptionValue((value,))
+ elif isinstance(value, tuple):
+ value = PositiveOptionValue(value)
+ else:
+ raise TypeError("Unexpected type: '%s'"
+ % type(value).__name__)
+
+ opt = value.format(implied_option.option)
+ self._helper.add(opt, 'implied')
+ implied[opt] = implied_option
+
+ try:
+ value, option_string = self._helper.handle(option)
+ except ConflictingOptionError as e:
+ reason = implied[e.arg].reason
+ if isinstance(reason, Option):
+ reason = self._raw_options.get(reason) or reason.option
+ reason = reason.split('=', 1)[0]
+ raise InvalidOptionError(
+ "'%s' implied by '%s' conflicts with '%s' from the %s"
+ % (e.arg, reason, e.old_arg, e.old_origin))
+
+ if option_string:
+ self._raw_options[option] = option_string
+
+ when = self._conditions.get(option)
+ if (when and not self._value_for(when, need_help_dependency=True) and
+ value is not None and value.origin != 'default'):
+ if value.origin == 'environment':
+ # The value we return doesn't really matter, because of the
+ # requirement for @depends to have the same when.
+ return None
+ raise InvalidOptionError(
+ '%s is not available in this configuration'
+ % option_string.split('=', 1)[0])
+
+ return value
+
+ def _dependency(self, arg, callee_name, arg_name=None):
+ if isinstance(arg, types.StringTypes):
+ prefix, name, values = Option.split_option(arg)
+ if values != ():
+ raise ConfigureError("Option must not contain an '='")
+ if name not in self._options:
+ raise ConfigureError("'%s' is not a known option. "
+ "Maybe it's declared too late?"
+ % arg)
+ arg = self._options[name]
+ self._seen.add(arg)
+ elif isinstance(arg, SandboxDependsFunction):
+ assert arg in self._depends
+ arg = self._depends[arg]
+ else:
+ raise TypeError(
+ "Cannot use object of type '%s' as %sargument to %s"
+ % (type(arg).__name__, '`%s` ' % arg_name if arg_name else '',
+ callee_name))
+ return arg
+
+ def _normalize_when(self, when, callee_name):
+ if when is True:
+ when = self._always
+ elif when is False:
+ when = self._never
+ elif when is not None:
+ when = self._dependency(when, callee_name, 'when')
+
+ if self._default_conditions:
+ # Create a pseudo @depends function for the combination of all
+ # default conditions and `when`.
+ dependencies = [when] if when else []
+ dependencies.extend(self._default_conditions)
+ if len(dependencies) == 1:
+ return dependencies[0]
+ return CombinedDependsFunction(self, all, dependencies)
+ return when
+
+ @contextmanager
+ def only_when_impl(self, when):
+ '''Implementation of only_when()
+
+ `only_when` is a context manager that essentially makes calls to
+ other sandbox functions within the context block ignored.
+ '''
+ when = self._normalize_when(when, 'only_when')
+ if when and self._default_conditions[-1:] != [when]:
+ self._default_conditions.append(when)
+ yield
+ self._default_conditions.pop()
+ else:
+ yield
+
+ def option_impl(self, *args, **kwargs):
+ '''Implementation of option()
+ This function creates and returns an Option() object, passing it the
+ resolved arguments (uses the result of functions when functions are
+ passed). In most cases, the result of this function is not expected to
+ be used.
+ Command line argument/environment variable parsing for this Option is
+ handled here.
+ '''
+ when = self._normalize_when(kwargs.get('when'), 'option')
+ args = [self._resolve(arg) for arg in args]
+ kwargs = {k: self._resolve(v) for k, v in kwargs.iteritems()
+ if k != 'when'}
+ option = Option(*args, **kwargs)
+ if when:
+ self._conditions[option] = when
+ if option.name in self._options:
+ raise ConfigureError('Option `%s` already defined' % option.option)
+ if option.env in self._options:
+ raise ConfigureError('Option `%s` already defined' % option.env)
+ if option.name:
+ self._options[option.name] = option
+ if option.env:
+ self._options[option.env] = option
+
+ if self._help and (when is None or
+ self._value_for(when, need_help_dependency=True)):
+ self._help.add(option)
+
+ return option
+
+ def depends_impl(self, *args, **kwargs):
+ '''Implementation of @depends()
+ This function is a decorator. It returns a function that subsequently
+ takes a function and returns a dummy function. The dummy function
+ identifies the actual function for the sandbox, while preventing
+ further function calls from within the sandbox.
+
+ @depends() takes a variable number of option strings or dummy function
+ references. The decorated function is called as soon as the decorator
+ is called, and the arguments it receives are the OptionValue or
+ function results corresponding to each of the arguments to @depends.
+ As an exception, when a HelpFormatter is attached, only functions that
+ have '--help' in their @depends argument list are called.
+
+ The decorated function is altered to use a different global namespace
+ for its execution. This different global namespace exposes a limited
+ set of functions from os.path.
+ '''
+ for k in kwargs:
+ if k != 'when':
+ raise TypeError(
+ "depends_impl() got an unexpected keyword argument '%s'"
+ % k)
+
+ when = self._normalize_when(kwargs.get('when'), '@depends')
+
+ if not when and not args:
+ raise ConfigureError('@depends needs at least one argument')
+
+ dependencies = tuple(self._dependency(arg, '@depends') for arg in args)
+
+ conditions = [
+ self._conditions[d]
+ for d in dependencies
+ if d in self._conditions and isinstance(d, Option)
+ ]
+ for c in conditions:
+ if c != when:
+ raise ConfigureError('@depends function needs the same `when` '
+ 'as options it depends on')
+
+ def decorator(func):
+ if inspect.isgeneratorfunction(func):
+ raise ConfigureError(
+ 'Cannot decorate generator functions with @depends')
+ func, glob = self._prepare_function(func)
+ depends = DependsFunction(self, func, dependencies, when=when)
+ return depends.sandboxed
+
+ return decorator
+
+ def include_impl(self, what, when=None):
+ '''Implementation of include().
+ Allows to include external files for execution in the sandbox.
+ It is possible to use a @depends function as argument, in which case
+ the result of the function is the file name to include. This latter
+ feature is only really meant for --enable-application/--enable-project.
+ '''
+ with self.only_when_impl(when):
+ what = self._resolve(what)
+ if what:
+ if not isinstance(what, types.StringTypes):
+ raise TypeError("Unexpected type: '%s'" % type(what).__name__)
+ self.include_file(what)
+
+ def template_impl(self, func):
+ '''Implementation of @template.
+ This function is a decorator. Template functions are called
+ immediately. They are altered so that their global namespace exposes
+ a limited set of functions from os.path, as well as `depends` and
+ `option`.
+ Templates allow to simplify repetitive constructs, or to implement
+ helper decorators and somesuch.
+ '''
+ template, glob = self._prepare_function(func)
+ glob.update(
+ (k[:-len('_impl')], getattr(self, k))
+ for k in dir(self) if k.endswith('_impl') and k != 'template_impl'
+ )
+ glob.update((k, v) for k, v in self.iteritems() if k not in glob)
+
+ # Any function argument to the template must be prepared to be sandboxed.
+ # If the template itself returns a function (in which case, it's very
+ # likely a decorator), that function must be prepared to be sandboxed as
+ # well.
+ def wrap_template(template):
+ isfunction = inspect.isfunction
+
+ def maybe_prepare_function(obj):
+ if isfunction(obj):
+ func, _ = self._prepare_function(obj)
+ return func
+ return obj
+
+ # The following function may end up being prepared to be sandboxed,
+ # so it mustn't depend on anything from the global scope in this
+ # file. It can however depend on variables from the closure, thus
+ # maybe_prepare_function and isfunction are declared above to be
+ # available there.
+ @wraps(template)
+ def wrapper(*args, **kwargs):
+ args = [maybe_prepare_function(arg) for arg in args]
+ kwargs = {k: maybe_prepare_function(v)
+ for k, v in kwargs.iteritems()}
+ ret = template(*args, **kwargs)
+ if isfunction(ret):
+ # We can't expect the sandboxed code to think about all the
+ # details of implementing decorators, so do some of the
+ # work for them. If the function takes exactly one function
+ # as argument and returns a function, it must be a
+ # decorator, so mark the returned function as wrapping the
+ # function passed in.
+ if len(args) == 1 and not kwargs and isfunction(args[0]):
+ ret = wraps(args[0])(ret)
+ return wrap_template(ret)
+ return ret
+ return wrapper
+
+ wrapper = wrap_template(template)
+ self._templates.add(wrapper)
+ return wrapper
+
+ RE_MODULE = re.compile('^[a-zA-Z0-9_\.]+$')
+
+ def imports_impl(self, _import, _from=None, _as=None):
+ '''Implementation of @imports.
+ This decorator imports the given _import from the given _from module
+ optionally under a different _as name.
+ The options correspond to the various forms for the import builtin.
+ @imports('sys')
+ @imports(_from='mozpack', _import='path', _as='mozpath')
+ '''
+ for value, required in (
+ (_import, True), (_from, False), (_as, False)):
+
+ if not isinstance(value, types.StringTypes) and (
+ required or value is not None):
+ raise TypeError("Unexpected type: '%s'" % type(value).__name__)
+ if value is not None and not self.RE_MODULE.match(value):
+ raise ValueError("Invalid argument to @imports: '%s'" % value)
+ if _as and '.' in _as:
+ raise ValueError("Invalid argument to @imports: '%s'" % _as)
+
+ def decorator(func):
+ if func in self._templates:
+ raise ConfigureError(
+ '@imports must appear after @template')
+ if func in self._depends:
+ raise ConfigureError(
+ '@imports must appear after @depends')
+ # For the imports to apply in the order they appear in the
+ # .configure file, we accumulate them in reverse order and apply
+ # them later.
+ imports = self._imports.setdefault(func, [])
+ imports.insert(0, (_from, _import, _as))
+ return func
+
+ return decorator
+
+ def _apply_imports(self, func, glob):
+ for _from, _import, _as in self._imports.get(func, ()):
+ _from = '%s.' % _from if _from else ''
+ if _as:
+ glob[_as] = self._get_one_import('%s%s' % (_from, _import))
+ else:
+ what = _import.split('.')[0]
+ glob[what] = self._get_one_import('%s%s' % (_from, what))
+
+ def _get_one_import(self, what):
+ # The special `__sandbox__` module gives access to the sandbox
+ # instance.
+ if what == '__sandbox__':
+ return self
+ # Special case for the open() builtin, because otherwise, using it
+ # fails with "IOError: file() constructor not accessible in
+ # restricted mode"
+ if what == '__builtin__.open':
+ return lambda *args, **kwargs: open(*args, **kwargs)
+ # Until this proves to be a performance problem, just construct an
+ # import statement and execute it.
+ import_line = ''
+ if '.' in what:
+ _from, what = what.rsplit('.', 1)
+ import_line += 'from %s ' % _from
+ import_line += 'import %s as imported' % what
+ glob = {}
+ exec_(import_line, {}, glob)
+ return glob['imported']
+
+ def _resolve_and_set(self, data, name, value, when=None):
+ # Don't set anything when --help was on the command line
+ if self._help:
+ return
+ if when and not self._value_for(when):
+ return
+ name = self._resolve(name, need_help_dependency=False)
+ if name is None:
+ return
+ if not isinstance(name, types.StringTypes):
+ raise TypeError("Unexpected type: '%s'" % type(name).__name__)
+ if name in data:
+ raise ConfigureError(
+ "Cannot add '%s' to configuration: Key already "
+ "exists" % name)
+ value = self._resolve(value, need_help_dependency=False)
+ if value is not None:
+ data[name] = value
+
+ def set_config_impl(self, name, value, when=None):
+ '''Implementation of set_config().
+ Set the configuration items with the given name to the given value.
+ Both `name` and `value` can be references to @depends functions,
+ in which case the result from these functions is used. If the result
+ of either function is None, the configuration item is not set.
+ '''
+ when = self._normalize_when(when, 'set_config')
+
+ self._execution_queue.append((
+ self._resolve_and_set, (self._config, name, value, when)))
+
+ def set_define_impl(self, name, value, when=None):
+ '''Implementation of set_define().
+ Set the define with the given name to the given value. Both `name` and
+ `value` can be references to @depends functions, in which case the
+ result from these functions is used. If the result of either function
+ is None, the define is not set. If the result is False, the define is
+ explicitly undefined (-U).
+ '''
+ when = self._normalize_when(when, 'set_define')
+
+ defines = self._config.setdefault('DEFINES', {})
+ self._execution_queue.append((
+ self._resolve_and_set, (defines, name, value, when)))
+
+ def imply_option_impl(self, option, value, reason=None, when=None):
+ '''Implementation of imply_option().
+ Injects additional options as if they had been passed on the command
+ line. The `option` argument is a string as in option()'s `name` or
+ `env`. The option must be declared after `imply_option` references it.
+ The `value` argument indicates the value to pass to the option.
+ It can be:
+ - True. In this case `imply_option` injects the positive option
+ (--enable-foo/--with-foo).
+ imply_option('--enable-foo', True)
+ imply_option('--disable-foo', True)
+ are both equivalent to `--enable-foo` on the command line.
+
+ - False. In this case `imply_option` injects the negative option
+ (--disable-foo/--without-foo).
+ imply_option('--enable-foo', False)
+ imply_option('--disable-foo', False)
+ are both equivalent to `--disable-foo` on the command line.
+
+ - None. In this case `imply_option` does nothing.
+ imply_option('--enable-foo', None)
+ imply_option('--disable-foo', None)
+ are both equivalent to not passing any flag on the command line.
+
+ - a string or a tuple. In this case `imply_option` injects the positive
+ option with the given value(s).
+ imply_option('--enable-foo', 'a')
+ imply_option('--disable-foo', 'a')
+ are both equivalent to `--enable-foo=a` on the command line.
+ imply_option('--enable-foo', ('a', 'b'))
+ imply_option('--disable-foo', ('a', 'b'))
+ are both equivalent to `--enable-foo=a,b` on the command line.
+
+ Because imply_option('--disable-foo', ...) can be misleading, it is
+ recommended to use the positive form ('--enable' or '--with') for
+ `option`.
+
+ The `value` argument can also be (and usually is) a reference to a
+ @depends function, in which case the result of that function will be
+ used as per the descripted mapping above.
+
+ The `reason` argument indicates what caused the option to be implied.
+ It is necessary when it cannot be inferred from the `value`.
+ '''
+ # Don't do anything when --help was on the command line
+ if self._help:
+ return
+ if not reason and isinstance(value, SandboxDependsFunction):
+ deps = self._depends[value].dependencies
+ possible_reasons = [d for d in deps if d != self._help_option]
+ if len(possible_reasons) == 1:
+ if isinstance(possible_reasons[0], Option):
+ reason = possible_reasons[0]
+ if not reason and (isinstance(value, (bool, tuple)) or
+ isinstance(value, types.StringTypes)):
+ # A reason can be provided automatically when imply_option
+ # is called with an immediate value.
+ _, filename, line, _, _, _ = inspect.stack()[1]
+ reason = "imply_option at %s:%s" % (filename, line)
+
+ if not reason:
+ raise ConfigureError(
+ "Cannot infer what implies '%s'. Please add a `reason` to "
+ "the `imply_option` call."
+ % option)
+
+ when = self._normalize_when(when, 'imply_option')
+
+ prefix, name, values = Option.split_option(option)
+ if values != ():
+ raise ConfigureError("Implied option must not contain an '='")
+
+ self._implied_options.append(ReadOnlyNamespace(
+ option=option,
+ prefix=prefix,
+ name=name,
+ value=value,
+ caller=inspect.stack()[1],
+ reason=reason,
+ when=when,
+ ))
+
+ def _prepare_function(self, func):
+ '''Alter the given function global namespace with the common ground
+ for @depends, and @template.
+ '''
+ if not inspect.isfunction(func):
+ raise TypeError("Unexpected type: '%s'" % type(func).__name__)
+ if func in self._prepared_functions:
+ return func, func.func_globals
+
+ glob = SandboxedGlobal(
+ (k, v) for k, v in func.func_globals.iteritems()
+ if (inspect.isfunction(v) and v not in self._templates) or (
+ inspect.isclass(v) and issubclass(v, Exception))
+ )
+ glob.update(
+ __builtins__=self.BUILTINS,
+ __file__=self._paths[-1] if self._paths else '',
+ __name__=self._paths[-1] if self._paths else '',
+ os=self.OS,
+ log=self.log_impl,
+ )
+
+ # The execution model in the sandbox doesn't guarantee the execution
+ # order will always be the same for a given function, and if it uses
+ # variables from a closure that are changed after the function is
+ # declared, depending when the function is executed, the value of the
+ # variable can differ. For consistency, we force the function to use
+ # the value from the earliest it can be run, which is at declaration.
+ # Note this is not entirely bullet proof (if the value is e.g. a list,
+ # the list contents could have changed), but covers the bases.
+ closure = None
+ if func.func_closure:
+ def makecell(content):
+ def f():
+ content
+ return f.func_closure[0]
+
+ closure = tuple(makecell(cell.cell_contents)
+ for cell in func.func_closure)
+
+ new_func = wraps(func)(types.FunctionType(
+ func.func_code,
+ glob,
+ func.__name__,
+ func.func_defaults,
+ closure
+ ))
+ @wraps(new_func)
+ def wrapped(*args, **kwargs):
+ if func in self._imports:
+ self._apply_imports(func, glob)
+ del self._imports[func]
+ return new_func(*args, **kwargs)
+
+ self._prepared_functions.add(wrapped)
+ return wrapped, glob
diff --git a/python/mozbuild/mozbuild/configure/check_debug_ranges.py b/python/mozbuild/mozbuild/configure/check_debug_ranges.py
new file mode 100644
index 000000000..ca312dff4
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/check_debug_ranges.py
@@ -0,0 +1,62 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script returns the number of items for the DW_AT_ranges corresponding
+# to a given compilation unit. This is used as a helper to find a bug in some
+# versions of GNU ld.
+
+from __future__ import absolute_import
+
+import subprocess
+import sys
+import re
+
+def get_range_for(compilation_unit, debug_info):
+ '''Returns the range offset for a given compilation unit
+ in a given debug_info.'''
+ name = ranges = ''
+ search_cu = False
+ for nfo in debug_info.splitlines():
+ if 'DW_TAG_compile_unit' in nfo:
+ search_cu = True
+ elif 'DW_TAG_' in nfo or not nfo.strip():
+ if name == compilation_unit and ranges != '':
+ return int(ranges, 16)
+ name = ranges = ''
+ search_cu = False
+ if search_cu:
+ if 'DW_AT_name' in nfo:
+ name = nfo.rsplit(None, 1)[1]
+ elif 'DW_AT_ranges' in nfo:
+ ranges = nfo.rsplit(None, 1)[1]
+ return None
+
+def get_range_length(range, debug_ranges):
+ '''Returns the number of items in the range starting at the
+ given offset.'''
+ length = 0
+ for line in debug_ranges.splitlines():
+ m = re.match('\s*([0-9a-fA-F]+)\s+([0-9a-fA-F]+)\s+([0-9a-fA-F]+)', line)
+ if m and int(m.group(1), 16) == range:
+ length += 1
+ return length
+
+def main(bin, compilation_unit):
+ p = subprocess.Popen(['objdump', '-W', bin], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
+ (out, err) = p.communicate()
+ sections = re.split('\n(Contents of the|The section) ', out)
+ debug_info = [s for s in sections if s.startswith('.debug_info')]
+ debug_ranges = [s for s in sections if s.startswith('.debug_ranges')]
+ if not debug_ranges or not debug_info:
+ return 0
+
+ range = get_range_for(compilation_unit, debug_info[0])
+ if range is not None:
+ return get_range_length(range, debug_ranges[0])
+
+ return -1
+
+
+if __name__ == '__main__':
+ print main(*sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/configure/constants.py b/python/mozbuild/mozbuild/configure/constants.py
new file mode 100644
index 000000000..dfc7cf8ad
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/constants.py
@@ -0,0 +1,103 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from mozbuild.util import EnumString
+from collections import OrderedDict
+
+
+CompilerType = EnumString.subclass(
+ 'clang',
+ 'clang-cl',
+ 'gcc',
+ 'msvc',
+)
+
+OS = EnumString.subclass(
+ 'Android',
+ 'DragonFly',
+ 'FreeBSD',
+ 'GNU',
+ 'iOS',
+ 'NetBSD',
+ 'OpenBSD',
+ 'OSX',
+ 'WINNT',
+)
+
+Kernel = EnumString.subclass(
+ 'Darwin',
+ 'DragonFly',
+ 'FreeBSD',
+ 'kFreeBSD',
+ 'Linux',
+ 'NetBSD',
+ 'OpenBSD',
+ 'WINNT',
+)
+
+CPU_bitness = {
+ 'aarch64': 64,
+ 'Alpha': 32,
+ 'arm': 32,
+ 'hppa': 32,
+ 'ia64': 64,
+ 'mips32': 32,
+ 'mips64': 64,
+ 'ppc': 32,
+ 'ppc64': 64,
+ 's390': 32,
+ 's390x': 64,
+ 'sparc': 32,
+ 'sparc64': 64,
+ 'x86': 32,
+ 'x86_64': 64,
+}
+
+CPU = EnumString.subclass(*CPU_bitness.keys())
+
+Endianness = EnumString.subclass(
+ 'big',
+ 'little',
+)
+
+WindowsBinaryType = EnumString.subclass(
+ 'win32',
+ 'win64',
+)
+
+# The order of those checks matter
+CPU_preprocessor_checks = OrderedDict((
+ ('x86', '__i386__ || _M_IX86'),
+ ('x86_64', '__x86_64__ || _M_X64'),
+ ('arm', '__arm__ || _M_ARM'),
+ ('aarch64', '__aarch64__'),
+ ('ia64', '__ia64__'),
+ ('s390x', '__s390x__'),
+ ('s390', '__s390__'),
+ ('ppc64', '__powerpc64__'),
+ ('ppc', '__powerpc__'),
+ ('Alpha', '__alpha__'),
+ ('hppa', '__hppa__'),
+ ('sparc64', '__sparc__ && __arch64__'),
+ ('sparc', '__sparc__'),
+ ('mips64', '__mips64'),
+ ('mips32', '__mips__'),
+))
+
+assert sorted(CPU_preprocessor_checks.keys()) == sorted(CPU.POSSIBLE_VALUES)
+
+kernel_preprocessor_checks = {
+ 'Darwin': '__APPLE__',
+ 'DragonFly': '__DragonFly__',
+ 'FreeBSD': '__FreeBSD__',
+ 'kFreeBSD': '__FreeBSD_kernel__',
+ 'Linux': '__linux__',
+ 'NetBSD': '__NetBSD__',
+ 'OpenBSD': '__OpenBSD__',
+ 'WINNT': '_WIN32 || __CYGWIN__',
+}
+
+assert sorted(kernel_preprocessor_checks.keys()) == sorted(Kernel.POSSIBLE_VALUES)
diff --git a/python/mozbuild/mozbuild/configure/help.py b/python/mozbuild/mozbuild/configure/help.py
new file mode 100644
index 000000000..cd7876fbd
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/help.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+from mozbuild.configure.options import Option
+
+
+class HelpFormatter(object):
+ def __init__(self, argv0):
+ self.intro = ['Usage: %s [options]' % os.path.basename(argv0)]
+ self.options = ['Options: [defaults in brackets after descriptions]']
+ self.env = ['Environment variables:']
+
+ def add(self, option):
+ assert isinstance(option, Option)
+
+ if option.possible_origins == ('implied',):
+ # Don't display help if our option can only be implied.
+ return
+
+ # TODO: improve formatting
+ target = self.options if option.name else self.env
+ opt = option.option
+ if option.choices:
+ opt += '={%s}' % ','.join(option.choices)
+ help = option.help or ''
+ if len(option.default):
+ if help:
+ help += ' '
+ help += '[%s]' % ','.join(option.default)
+
+ if len(opt) > 24 or not help:
+ target.append(' %s' % opt)
+ if help:
+ target.append('%s%s' % (' ' * 28, help))
+ else:
+ target.append(' %-24s %s' % (opt, help))
+
+ def usage(self, out):
+ print('\n\n'.join('\n'.join(t)
+ for t in (self.intro, self.options, self.env)),
+ file=out)
diff --git a/python/mozbuild/mozbuild/configure/libstdcxx.py b/python/mozbuild/mozbuild/configure/libstdcxx.py
new file mode 100644
index 000000000..cab0ccb11
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/libstdcxx.py
@@ -0,0 +1,81 @@
+#!/usr/bin/python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+# This script find the version of libstdc++ and prints it as single number
+# with 8 bits per element. For example, GLIBCXX_3.4.10 becomes
+# 3 << 16 | 4 << 8 | 10 = 197642. This format is easy to use
+# in the C preprocessor.
+
+# We find out both the host and target versions. Since the output
+# will be used from shell, we just print the two assignments and evaluate
+# them from shell.
+
+from __future__ import absolute_import
+
+import os
+import subprocess
+import re
+
+re_for_ld = re.compile('.*\((.*)\).*')
+
+def parse_readelf_line(x):
+ """Return the version from a readelf line that looks like:
+ 0x00ec: Rev: 1 Flags: none Index: 8 Cnt: 2 Name: GLIBCXX_3.4.6
+ """
+ return x.split(':')[-1].split('_')[-1].strip()
+
+def parse_ld_line(x):
+ """Parse a line from the output of ld -t. The output of gold is just
+ the full path, gnu ld prints "-lstdc++ (path)".
+ """
+ t = re_for_ld.match(x)
+ if t:
+ return t.groups()[0].strip()
+ return x.strip()
+
+def split_ver(v):
+ """Covert the string '1.2.3' into the list [1,2,3]
+ """
+ return [int(x) for x in v.split('.')]
+
+def cmp_ver(a, b):
+ """Compare versions in the form 'a.b.c'
+ """
+ for (i, j) in zip(split_ver(a), split_ver(b)):
+ if i != j:
+ return i - j
+ return 0
+
+def encode_ver(v):
+ """Encode the version as a single number.
+ """
+ t = split_ver(v)
+ return t[0] << 16 | t[1] << 8 | t[2]
+
+def find_version(e):
+ """Given the value of environment variable CXX or HOST_CXX, find the
+ version of the libstdc++ it uses.
+ """
+ args = e.split()
+ args += ['-shared', '-Wl,-t']
+ p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
+ candidates = [x for x in p.stdout if 'libstdc++.so' in x]
+ if not candidates:
+ return ''
+ assert len(candidates) == 1
+ libstdcxx = parse_ld_line(candidates[-1])
+
+ p = subprocess.Popen(['readelf', '-V', libstdcxx], stdout=subprocess.PIPE)
+ versions = [parse_readelf_line(x)
+ for x in p.stdout.readlines() if 'Name: GLIBCXX' in x]
+ last_version = sorted(versions, cmp = cmp_ver)[-1]
+ return encode_ver(last_version)
+
+if __name__ == '__main__':
+ cxx_env = os.environ['CXX']
+ print 'MOZ_LIBSTDCXX_TARGET_VERSION=%s' % find_version(cxx_env)
+ host_cxx_env = os.environ.get('HOST_CXX', cxx_env)
+ print 'MOZ_LIBSTDCXX_HOST_VERSION=%s' % find_version(host_cxx_env)
diff --git a/python/mozbuild/mozbuild/configure/lint.py b/python/mozbuild/mozbuild/configure/lint.py
new file mode 100644
index 000000000..e0a5c8328
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/lint.py
@@ -0,0 +1,78 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from StringIO import StringIO
+from . import (
+ CombinedDependsFunction,
+ ConfigureError,
+ ConfigureSandbox,
+ DependsFunction,
+)
+from .lint_util import disassemble_as_iter
+from mozbuild.util import memoize
+
+
+class LintSandbox(ConfigureSandbox):
+ def __init__(self, environ=None, argv=None, stdout=None, stderr=None):
+ out = StringIO()
+ stdout = stdout or out
+ stderr = stderr or out
+ environ = environ or {}
+ argv = argv or []
+ self._wrapped = {}
+ super(LintSandbox, self).__init__({}, environ=environ, argv=argv,
+ stdout=stdout, stderr=stderr)
+
+ def run(self, path=None):
+ if path:
+ self.include_file(path)
+
+ def _missing_help_dependency(self, obj):
+ if isinstance(obj, CombinedDependsFunction):
+ return False
+ if isinstance(obj, DependsFunction):
+ if (self._help_option in obj.dependencies or
+ obj in (self._always, self._never)):
+ return False
+ func, glob = self._wrapped[obj.func]
+ # We allow missing --help dependencies for functions that:
+ # - don't use @imports
+ # - don't have a closure
+ # - don't use global variables
+ if func in self._imports or func.func_closure:
+ return True
+ for op, arg in disassemble_as_iter(func):
+ if op in ('LOAD_GLOBAL', 'STORE_GLOBAL'):
+ # There is a fake os module when one is not imported,
+ # and it's allowed for functions without a --help
+ # dependency.
+ if arg == 'os' and glob.get('os') is self.OS:
+ continue
+ return True
+ return False
+
+ @memoize
+ def _value_for_depends(self, obj, need_help_dependency=False):
+ with_help = self._help_option in obj.dependencies
+ if with_help:
+ for arg in obj.dependencies:
+ if self._missing_help_dependency(arg):
+ raise ConfigureError(
+ "`%s` depends on '--help' and `%s`. "
+ "`%s` must depend on '--help'"
+ % (obj.name, arg.name, arg.name))
+ elif ((self._help or need_help_dependency) and
+ self._missing_help_dependency(obj)):
+ raise ConfigureError("Missing @depends for `%s`: '--help'" %
+ obj.name)
+ return super(LintSandbox, self)._value_for_depends(
+ obj, need_help_dependency)
+
+ def _prepare_function(self, func):
+ wrapped, glob = super(LintSandbox, self)._prepare_function(func)
+ if wrapped not in self._wrapped:
+ self._wrapped[wrapped] = func, glob
+ return wrapped, glob
diff --git a/python/mozbuild/mozbuild/configure/lint_util.py b/python/mozbuild/mozbuild/configure/lint_util.py
new file mode 100644
index 000000000..f1c2f8731
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/lint_util.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import dis
+import inspect
+
+
+# dis.dis only outputs to stdout. This is a modified version that
+# returns an iterator.
+def disassemble_as_iter(co):
+ if inspect.ismethod(co):
+ co = co.im_func
+ if inspect.isfunction(co):
+ co = co.func_code
+ code = co.co_code
+ n = len(code)
+ i = 0
+ extended_arg = 0
+ free = None
+ while i < n:
+ c = code[i]
+ op = ord(c)
+ opname = dis.opname[op]
+ i += 1;
+ if op >= dis.HAVE_ARGUMENT:
+ arg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
+ extended_arg = 0
+ i += 2
+ if op == dis.EXTENDED_ARG:
+ extended_arg = arg * 65536L
+ continue
+ if op in dis.hasconst:
+ yield opname, co.co_consts[arg]
+ elif op in dis.hasname:
+ yield opname, co.co_names[arg]
+ elif op in dis.hasjrel:
+ yield opname, i + arg
+ elif op in dis.haslocal:
+ yield opname, co.co_varnames[arg]
+ elif op in dis.hascompare:
+ yield opname, dis.cmp_op[arg]
+ elif op in dis.hasfree:
+ if free is None:
+ free = co.co_cellvars + co.co_freevars
+ yield opname, free[arg]
+ else:
+ yield opname, None
+ else:
+ yield opname, None
diff --git a/python/mozbuild/mozbuild/configure/options.py b/python/mozbuild/mozbuild/configure/options.py
new file mode 100644
index 000000000..4310c8627
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/options.py
@@ -0,0 +1,485 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import sys
+import types
+from collections import OrderedDict
+
+
+def istupleofstrings(obj):
+ return isinstance(obj, tuple) and len(obj) and all(
+ isinstance(o, types.StringTypes) for o in obj)
+
+
+class OptionValue(tuple):
+ '''Represents the value of a configure option.
+
+ This class is not meant to be used directly. Use its subclasses instead.
+
+ The `origin` attribute holds where the option comes from (e.g. environment,
+ command line, or default)
+ '''
+ def __new__(cls, values=(), origin='unknown'):
+ return super(OptionValue, cls).__new__(cls, values)
+
+ def __init__(self, values=(), origin='unknown'):
+ self.origin = origin
+
+ def format(self, option):
+ if option.startswith('--'):
+ prefix, name, values = Option.split_option(option)
+ assert values == ()
+ for prefix_set in (
+ ('disable', 'enable'),
+ ('without', 'with'),
+ ):
+ if prefix in prefix_set:
+ prefix = prefix_set[int(bool(self))]
+ break
+ if prefix:
+ option = '--%s-%s' % (prefix, name)
+ elif self:
+ option = '--%s' % name
+ else:
+ return ''
+ if len(self):
+ return '%s=%s' % (option, ','.join(self))
+ return option
+ elif self and not len(self):
+ return '%s=1' % option
+ return '%s=%s' % (option, ','.join(self))
+
+ def __eq__(self, other):
+ if type(other) != type(self):
+ return False
+ return super(OptionValue, self).__eq__(other)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return '%s%s' % (self.__class__.__name__,
+ super(OptionValue, self).__repr__())
+
+
+class PositiveOptionValue(OptionValue):
+ '''Represents the value for a positive option (--enable/--with/--foo)
+ in the form of a tuple for when values are given to the option (in the form
+ --option=value[,value2...].
+ '''
+ def __nonzero__(self):
+ return True
+
+
+class NegativeOptionValue(OptionValue):
+ '''Represents the value for a negative option (--disable/--without)
+
+ This is effectively an empty tuple with a `origin` attribute.
+ '''
+ def __new__(cls, origin='unknown'):
+ return super(NegativeOptionValue, cls).__new__(cls, origin=origin)
+
+ def __init__(self, origin='unknown'):
+ return super(NegativeOptionValue, self).__init__(origin=origin)
+
+
+class InvalidOptionError(Exception):
+ pass
+
+
+class ConflictingOptionError(InvalidOptionError):
+ def __init__(self, message, **format_data):
+ if format_data:
+ message = message.format(**format_data)
+ super(ConflictingOptionError, self).__init__(message)
+ for k, v in format_data.iteritems():
+ setattr(self, k, v)
+
+
+class Option(object):
+ '''Represents a configure option
+
+ A configure option can be a command line flag or an environment variable
+ or both.
+
+ - `name` is the full command line flag (e.g. --enable-foo).
+ - `env` is the environment variable name (e.g. ENV)
+ - `nargs` is the number of arguments the option may take. It can be a
+ number or the special values '?' (0 or 1), '*' (0 or more), or '+' (1 or
+ more).
+ - `default` can be used to give a default value to the option. When the
+ `name` of the option starts with '--enable-' or '--with-', the implied
+ default is an empty PositiveOptionValue. When it starts with '--disable-'
+ or '--without-', the implied default is a NegativeOptionValue.
+ - `choices` restricts the set of values that can be given to the option.
+ - `help` is the option description for use in the --help output.
+ - `possible_origins` is a tuple of strings that are origins accepted for
+ this option. Example origins are 'mozconfig', 'implied', and 'environment'.
+ '''
+ __slots__ = (
+ 'id', 'prefix', 'name', 'env', 'nargs', 'default', 'choices', 'help',
+ 'possible_origins',
+ )
+
+ def __init__(self, name=None, env=None, nargs=None, default=None,
+ possible_origins=None, choices=None, help=None):
+ if not name and not env:
+ raise InvalidOptionError(
+ 'At least an option name or an environment variable name must '
+ 'be given')
+ if name:
+ if not isinstance(name, types.StringTypes):
+ raise InvalidOptionError('Option must be a string')
+ if not name.startswith('--'):
+ raise InvalidOptionError('Option must start with `--`')
+ if '=' in name:
+ raise InvalidOptionError('Option must not contain an `=`')
+ if not name.islower():
+ raise InvalidOptionError('Option must be all lowercase')
+ if env:
+ if not isinstance(env, types.StringTypes):
+ raise InvalidOptionError(
+ 'Environment variable name must be a string')
+ if not env.isupper():
+ raise InvalidOptionError(
+ 'Environment variable name must be all uppercase')
+ if nargs not in (None, '?', '*', '+') and not (
+ isinstance(nargs, int) and nargs >= 0):
+ raise InvalidOptionError(
+ "nargs must be a positive integer, '?', '*' or '+'")
+ if (not isinstance(default, types.StringTypes) and
+ not isinstance(default, (bool, types.NoneType)) and
+ not istupleofstrings(default)):
+ raise InvalidOptionError(
+ 'default must be a bool, a string or a tuple of strings')
+ if choices and not istupleofstrings(choices):
+ raise InvalidOptionError(
+ 'choices must be a tuple of strings')
+ if not help:
+ raise InvalidOptionError('A help string must be provided')
+ if possible_origins and not istupleofstrings(possible_origins):
+ raise InvalidOptionError(
+ 'possible_origins must be a tuple of strings')
+ self.possible_origins = possible_origins
+
+ if name:
+ prefix, name, values = self.split_option(name)
+ assert values == ()
+
+ # --disable and --without options mean the default is enabled.
+ # --enable and --with options mean the default is disabled.
+ # However, we allow a default to be given so that the default
+ # can be affected by other factors.
+ if prefix:
+ if default is None:
+ default = prefix in ('disable', 'without')
+ elif default is False:
+ prefix = {
+ 'disable': 'enable',
+ 'without': 'with',
+ }.get(prefix, prefix)
+ elif default is True:
+ prefix = {
+ 'enable': 'disable',
+ 'with': 'without',
+ }.get(prefix, prefix)
+ else:
+ prefix = ''
+
+ self.prefix = prefix
+ self.name = name
+ self.env = env
+ if default in (None, False):
+ self.default = NegativeOptionValue(origin='default')
+ elif isinstance(default, tuple):
+ self.default = PositiveOptionValue(default, origin='default')
+ elif default is True:
+ self.default = PositiveOptionValue(origin='default')
+ else:
+ self.default = PositiveOptionValue((default,), origin='default')
+ if nargs is None:
+ nargs = 0
+ if len(self.default) == 1:
+ nargs = '?'
+ elif len(self.default) > 1:
+ nargs = '*'
+ elif choices:
+ nargs = 1
+ self.nargs = nargs
+ has_choices = choices is not None
+ if isinstance(self.default, PositiveOptionValue):
+ if has_choices and len(self.default) == 0:
+ raise InvalidOptionError(
+ 'A `default` must be given along with `choices`')
+ if not self._validate_nargs(len(self.default)):
+ raise InvalidOptionError(
+ "The given `default` doesn't satisfy `nargs`")
+ if has_choices and not all(d in choices for d in self.default):
+ raise InvalidOptionError(
+ 'The `default` value must be one of %s' %
+ ', '.join("'%s'" % c for c in choices))
+ elif has_choices:
+ maxargs = self.maxargs
+ if len(choices) < maxargs and maxargs != sys.maxint:
+ raise InvalidOptionError('Not enough `choices` for `nargs`')
+ self.choices = choices
+ self.help = help
+
+ @staticmethod
+ def split_option(option):
+ '''Split a flag or variable into a prefix, a name and values
+
+ Variables come in the form NAME=values (no prefix).
+ Flags come in the form --name=values or --prefix-name=values
+ where prefix is one of 'with', 'without', 'enable' or 'disable'.
+ The '=values' part is optional. Values are separated with commas.
+ '''
+ if not isinstance(option, types.StringTypes):
+ raise InvalidOptionError('Option must be a string')
+
+ elements = option.split('=', 1)
+ name = elements[0]
+ values = tuple(elements[1].split(',')) if len(elements) == 2 else ()
+ if name.startswith('--'):
+ name = name[2:]
+ if not name.islower():
+ raise InvalidOptionError('Option must be all lowercase')
+ elements = name.split('-', 1)
+ prefix = elements[0]
+ if len(elements) == 2 and prefix in ('enable', 'disable',
+ 'with', 'without'):
+ return prefix, elements[1], values
+ else:
+ if name.startswith('-'):
+ raise InvalidOptionError(
+ 'Option must start with two dashes instead of one')
+ if name.islower():
+ raise InvalidOptionError(
+ 'Environment variable name must be all uppercase')
+ return '', name, values
+
+ @staticmethod
+ def _join_option(prefix, name):
+ # The constraints around name and env in __init__ make it so that
+ # we can distinguish between flags and environment variables with
+ # islower/isupper.
+ if name.isupper():
+ assert not prefix
+ return name
+ elif prefix:
+ return '--%s-%s' % (prefix, name)
+ return '--%s' % name
+
+ @property
+ def option(self):
+ if self.prefix or self.name:
+ return self._join_option(self.prefix, self.name)
+ else:
+ return self.env
+
+ @property
+ def minargs(self):
+ if isinstance(self.nargs, int):
+ return self.nargs
+ return 1 if self.nargs == '+' else 0
+
+ @property
+ def maxargs(self):
+ if isinstance(self.nargs, int):
+ return self.nargs
+ return 1 if self.nargs == '?' else sys.maxint
+
+ def _validate_nargs(self, num):
+ minargs, maxargs = self.minargs, self.maxargs
+ return num >= minargs and num <= maxargs
+
+ def get_value(self, option=None, origin='unknown'):
+ '''Given a full command line option (e.g. --enable-foo=bar) or a
+ variable assignment (FOO=bar), returns the corresponding OptionValue.
+
+ Note: variable assignments can come from either the environment or
+ from the command line (e.g. `../configure CFLAGS=-O2`)
+ '''
+ if not option:
+ return self.default
+
+ if self.possible_origins and origin not in self.possible_origins:
+ raise InvalidOptionError(
+ '%s can not be set by %s. Values are accepted from: %s' %
+ (option, origin, ', '.join(self.possible_origins)))
+
+ prefix, name, values = self.split_option(option)
+ option = self._join_option(prefix, name)
+
+ assert name in (self.name, self.env)
+
+ if prefix in ('disable', 'without'):
+ if values != ():
+ raise InvalidOptionError('Cannot pass a value to %s' % option)
+ return NegativeOptionValue(origin=origin)
+
+ if name == self.env:
+ if values == ('',):
+ return NegativeOptionValue(origin=origin)
+ if self.nargs in (0, '?', '*') and values == ('1',):
+ return PositiveOptionValue(origin=origin)
+
+ values = PositiveOptionValue(values, origin=origin)
+
+ if not self._validate_nargs(len(values)):
+ raise InvalidOptionError('%s takes %s value%s' % (
+ option,
+ {
+ '?': '0 or 1',
+ '*': '0 or more',
+ '+': '1 or more',
+ }.get(self.nargs, str(self.nargs)),
+ 's' if (not isinstance(self.nargs, int) or
+ self.nargs != 1) else ''
+ ))
+
+ if len(values) and self.choices:
+ relative_result = None
+ for val in values:
+ if self.nargs in ('+', '*'):
+ if val.startswith(('+', '-')):
+ if relative_result is None:
+ relative_result = list(self.default)
+ sign = val[0]
+ val = val[1:]
+ if sign == '+':
+ if val not in relative_result:
+ relative_result.append(val)
+ else:
+ try:
+ relative_result.remove(val)
+ except ValueError:
+ pass
+
+ if val not in self.choices:
+ raise InvalidOptionError(
+ "'%s' is not one of %s"
+ % (val, ', '.join("'%s'" % c for c in self.choices)))
+
+ if relative_result is not None:
+ values = PositiveOptionValue(relative_result, origin=origin)
+
+ return values
+
+ def __repr__(self):
+ return '<%s.%s [%s]>' % (self.__class__.__module__,
+ self.__class__.__name__, self.option)
+
+
+class CommandLineHelper(object):
+ '''Helper class to handle the various ways options can be given either
+ on the command line of through the environment.
+
+ For instance, an Option('--foo', env='FOO') can be passed as --foo on the
+ command line, or as FOO=1 in the environment *or* on the command line.
+
+ If multiple variants are given, command line is prefered over the
+ environment, and if different values are given on the command line, the
+ last one wins. (This mimicks the behavior of autoconf, avoiding to break
+ existing mozconfigs using valid options in weird ways)
+
+ Extra options can be added afterwards through API calls. For those,
+ conflicting values will raise an exception.
+ '''
+ def __init__(self, environ=os.environ, argv=sys.argv):
+ self._environ = dict(environ)
+ self._args = OrderedDict()
+ self._extra_args = OrderedDict()
+ self._origins = {}
+ self._last = 0
+
+ for arg in argv[1:]:
+ self.add(arg, 'command-line', self._args)
+
+ def add(self, arg, origin='command-line', args=None):
+ assert origin != 'default'
+ prefix, name, values = Option.split_option(arg)
+ if args is None:
+ args = self._extra_args
+ if args is self._extra_args and name in self._extra_args:
+ old_arg = self._extra_args[name][0]
+ old_prefix, _, old_values = Option.split_option(old_arg)
+ if prefix != old_prefix or values != old_values:
+ raise ConflictingOptionError(
+ "Cannot add '{arg}' to the {origin} set because it "
+ "conflicts with '{old_arg}' that was added earlier",
+ arg=arg, origin=origin, old_arg=old_arg,
+ old_origin=self._origins[old_arg])
+ self._last += 1
+ args[name] = arg, self._last
+ self._origins[arg] = origin
+
+ def _prepare(self, option, args):
+ arg = None
+ origin = 'command-line'
+ from_name = args.get(option.name)
+ from_env = args.get(option.env)
+ if from_name and from_env:
+ arg1, pos1 = from_name
+ arg2, pos2 = from_env
+ arg, pos = (arg1, pos1) if abs(pos1) > abs(pos2) else (arg2, pos2)
+ if args is self._extra_args and (option.get_value(arg1) !=
+ option.get_value(arg2)):
+ origin = self._origins[arg]
+ old_arg = arg2 if abs(pos1) > abs(pos2) else arg1
+ raise ConflictingOptionError(
+ "Cannot add '{arg}' to the {origin} set because it "
+ "conflicts with '{old_arg}' that was added earlier",
+ arg=arg, origin=origin, old_arg=old_arg,
+ old_origin=self._origins[old_arg])
+ elif from_name or from_env:
+ arg, pos = from_name if from_name else from_env
+ elif option.env and args is self._args:
+ env = self._environ.get(option.env)
+ if env is not None:
+ arg = '%s=%s' % (option.env, env)
+ origin = 'environment'
+
+ origin = self._origins.get(arg, origin)
+
+ for k in (option.name, option.env):
+ try:
+ del args[k]
+ except KeyError:
+ pass
+
+ return arg, origin
+
+ def handle(self, option):
+ '''Return the OptionValue corresponding to the given Option instance,
+ depending on the command line, environment, and extra arguments, and
+ the actual option or variable that set it.
+ Only works once for a given Option.
+ '''
+ assert isinstance(option, Option)
+
+ arg, origin = self._prepare(option, self._args)
+ ret = option.get_value(arg, origin)
+
+ extra_arg, extra_origin = self._prepare(option, self._extra_args)
+ extra_ret = option.get_value(extra_arg, extra_origin)
+
+ if extra_ret.origin == 'default':
+ return ret, arg
+
+ if ret.origin != 'default' and extra_ret != ret:
+ raise ConflictingOptionError(
+ "Cannot add '{arg}' to the {origin} set because it conflicts "
+ "with {old_arg} from the {old_origin} set", arg=extra_arg,
+ origin=extra_ret.origin, old_arg=arg, old_origin=ret.origin)
+
+ return extra_ret, extra_arg
+
+ def __iter__(self):
+ for d in (self._args, self._extra_args):
+ for arg, pos in d.itervalues():
+ yield arg
diff --git a/python/mozbuild/mozbuild/configure/util.py b/python/mozbuild/mozbuild/configure/util.py
new file mode 100644
index 000000000..c7a305282
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/util.py
@@ -0,0 +1,226 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import codecs
+import itertools
+import locale
+import logging
+import os
+import sys
+from collections import deque
+from contextlib import contextmanager
+from distutils.version import LooseVersion
+
+def getpreferredencoding():
+ # locale._parse_localename makes locale.getpreferredencoding
+ # return None when LC_ALL is C, instead of e.g. 'US-ASCII' or
+ # 'ANSI_X3.4-1968' when it uses nl_langinfo.
+ encoding = None
+ try:
+ encoding = locale.getpreferredencoding()
+ except ValueError:
+ # On english OSX, LC_ALL is UTF-8 (not en-US.UTF-8), and
+ # that throws off locale._parse_localename, which ends up
+ # being used on e.g. homebrew python.
+ if os.environ.get('LC_ALL', '').upper() == 'UTF-8':
+ encoding = 'utf-8'
+ return encoding
+
+class Version(LooseVersion):
+ '''A simple subclass of distutils.version.LooseVersion.
+ Adds attributes for `major`, `minor`, `patch` for the first three
+ version components so users can easily pull out major/minor
+ versions, like:
+
+ v = Version('1.2b')
+ v.major == 1
+ v.minor == 2
+ v.patch == 0
+ '''
+ def __init__(self, version):
+ # Can't use super, LooseVersion's base class is not a new-style class.
+ LooseVersion.__init__(self, version)
+ # Take the first three integer components, stopping at the first
+ # non-integer and padding the rest with zeroes.
+ (self.major, self.minor, self.patch) = list(itertools.chain(
+ itertools.takewhile(lambda x:isinstance(x, int), self.version),
+ (0, 0, 0)))[:3]
+
+
+ def __cmp__(self, other):
+ # LooseVersion checks isinstance(StringType), so work around it.
+ if isinstance(other, unicode):
+ other = other.encode('ascii')
+ return LooseVersion.__cmp__(self, other)
+
+
+class ConfigureOutputHandler(logging.Handler):
+ '''A logging handler class that sends info messages to stdout and other
+ messages to stderr.
+
+ Messages sent to stdout are not formatted with the attached Formatter.
+ Additionally, if they end with '... ', no newline character is printed,
+ making the next message printed follow the '... '.
+
+ Only messages above log level INFO (included) are logged.
+
+ Messages below that level can be kept until an ERROR message is received,
+ at which point the last `maxlen` accumulated messages below INFO are
+ printed out. This feature is only enabled under the `queue_debug` context
+ manager.
+ '''
+ def __init__(self, stdout=sys.stdout, stderr=sys.stderr, maxlen=20):
+ super(ConfigureOutputHandler, self).__init__()
+
+ # Python has this feature where it sets the encoding of pipes to
+ # ascii, which blatantly fails when trying to print out non-ascii.
+ def fix_encoding(fh):
+ try:
+ isatty = fh.isatty()
+ except AttributeError:
+ isatty = True
+
+ if not isatty:
+ encoding = getpreferredencoding()
+ if encoding:
+ return codecs.getwriter(encoding)(fh)
+ return fh
+
+ self._stdout = fix_encoding(stdout)
+ self._stderr = fix_encoding(stderr) if stdout != stderr else self._stdout
+ try:
+ fd1 = self._stdout.fileno()
+ fd2 = self._stderr.fileno()
+ self._same_output = self._is_same_output(fd1, fd2)
+ except AttributeError:
+ self._same_output = self._stdout == self._stderr
+ self._stdout_waiting = None
+ self._debug = deque(maxlen=maxlen + 1)
+ self._keep_if_debug = self.THROW
+ self._queue_is_active = False
+
+ @staticmethod
+ def _is_same_output(fd1, fd2):
+ if fd1 == fd2:
+ return True
+ stat1 = os.fstat(fd1)
+ stat2 = os.fstat(fd2)
+ return stat1.st_ino == stat2.st_ino and stat1.st_dev == stat2.st_dev
+
+ # possible values for _stdout_waiting
+ WAITING = 1
+ INTERRUPTED = 2
+
+ # possible values for _keep_if_debug
+ THROW = 0
+ KEEP = 1
+ PRINT = 2
+
+ def emit(self, record):
+ try:
+ if record.levelno == logging.INFO:
+ stream = self._stdout
+ msg = record.getMessage()
+ if (self._stdout_waiting == self.INTERRUPTED and
+ self._same_output):
+ msg = ' ... %s' % msg
+ self._stdout_waiting = msg.endswith('... ')
+ if msg.endswith('... '):
+ self._stdout_waiting = self.WAITING
+ else:
+ self._stdout_waiting = None
+ msg = '%s\n' % msg
+ elif (record.levelno < logging.INFO and
+ self._keep_if_debug != self.PRINT):
+ if self._keep_if_debug == self.KEEP:
+ self._debug.append(record)
+ return
+ else:
+ if record.levelno >= logging.ERROR and len(self._debug):
+ self._emit_queue()
+
+ if self._stdout_waiting == self.WAITING and self._same_output:
+ self._stdout_waiting = self.INTERRUPTED
+ self._stdout.write('\n')
+ self._stdout.flush()
+ stream = self._stderr
+ msg = '%s\n' % self.format(record)
+ stream.write(msg)
+ stream.flush()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ self.handleError(record)
+
+ @contextmanager
+ def queue_debug(self):
+ if self._queue_is_active:
+ yield
+ return
+ self._queue_is_active = True
+ self._keep_if_debug = self.KEEP
+ try:
+ yield
+ except Exception:
+ self._emit_queue()
+ # The exception will be handled and very probably printed out by
+ # something upper in the stack.
+ raise
+ finally:
+ self._queue_is_active = False
+ self._keep_if_debug = self.THROW
+ self._debug.clear()
+
+ def _emit_queue(self):
+ self._keep_if_debug = self.PRINT
+ if len(self._debug) == self._debug.maxlen:
+ r = self._debug.popleft()
+ self.emit(logging.LogRecord(
+ r.name, r.levelno, r.pathname, r.lineno,
+ '<truncated - see config.log for full output>',
+ (), None))
+ while True:
+ try:
+ self.emit(self._debug.popleft())
+ except IndexError:
+ break
+ self._keep_if_debug = self.KEEP
+
+
+class LineIO(object):
+ '''File-like class that sends each line of the written data to a callback
+ (without carriage returns).
+ '''
+ def __init__(self, callback):
+ self._callback = callback
+ self._buf = ''
+ self._encoding = getpreferredencoding()
+
+ def write(self, buf):
+ if self._encoding and isinstance(buf, str):
+ buf = buf.decode(self._encoding)
+ lines = buf.splitlines()
+ if not lines:
+ return
+ if self._buf:
+ lines[0] = self._buf + lines[0]
+ self._buf = ''
+ if not buf.endswith('\n'):
+ self._buf = lines.pop()
+
+ for line in lines:
+ self._callback(line)
+
+ def close(self):
+ if self._buf:
+ self._callback(self._buf)
+ self._buf = ''
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
diff --git a/python/mozbuild/mozbuild/controller/__init__.py b/python/mozbuild/mozbuild/controller/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/controller/__init__.py
diff --git a/python/mozbuild/mozbuild/controller/building.py b/python/mozbuild/mozbuild/controller/building.py
new file mode 100644
index 000000000..663f789b8
--- /dev/null
+++ b/python/mozbuild/mozbuild/controller/building.py
@@ -0,0 +1,680 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import getpass
+import json
+import logging
+import os
+import platform
+import subprocess
+import sys
+import time
+import which
+
+from collections import (
+ namedtuple,
+ OrderedDict,
+)
+
+try:
+ import psutil
+except Exception:
+ psutil = None
+
+from mozsystemmonitor.resourcemonitor import SystemResourceMonitor
+
+import mozpack.path as mozpath
+
+from ..base import MozbuildObject
+
+from ..testing import install_test_files
+
+from ..compilation.warnings import (
+ WarningsCollector,
+ WarningsDatabase,
+)
+
+from textwrap import TextWrapper
+
+INSTALL_TESTS_CLOBBER = ''.join([TextWrapper().fill(line) + '\n' for line in
+'''
+The build system was unable to install tests because the CLOBBER file has \
+been updated. This means if you edited any test files, your changes may not \
+be picked up until a full/clobber build is performed.
+
+The easiest and fastest way to perform a clobber build is to run:
+
+ $ mach clobber
+ $ mach build
+
+If you did not modify any test files, it is safe to ignore this message \
+and proceed with running tests. To do this run:
+
+ $ touch {clobber_file}
+'''.splitlines()])
+
+
+
+BuildOutputResult = namedtuple('BuildOutputResult',
+ ('warning', 'state_changed', 'for_display'))
+
+
+class TierStatus(object):
+ """Represents the state and progress of tier traversal.
+
+ The build system is organized into linear phases called tiers. Each tier
+ executes in the order it was defined, 1 at a time.
+ """
+
+ def __init__(self, resources):
+ """Accepts a SystemResourceMonitor to record results against."""
+ self.tiers = OrderedDict()
+ self.tier_status = OrderedDict()
+ self.resources = resources
+
+ def set_tiers(self, tiers):
+ """Record the set of known tiers."""
+ for tier in tiers:
+ self.tiers[tier] = dict(
+ begin_time=None,
+ finish_time=None,
+ duration=None,
+ )
+ self.tier_status[tier] = None
+
+ def begin_tier(self, tier):
+ """Record that execution of a tier has begun."""
+ self.tier_status[tier] = 'active'
+ t = self.tiers[tier]
+ # We should ideally use a monotonic clock here. Unfortunately, we won't
+ # have one until Python 3.
+ t['begin_time'] = time.time()
+ self.resources.begin_phase(tier)
+
+ def finish_tier(self, tier):
+ """Record that execution of a tier has finished."""
+ self.tier_status[tier] = 'finished'
+ t = self.tiers[tier]
+ t['finish_time'] = time.time()
+ t['duration'] = self.resources.finish_phase(tier)
+
+ def tiered_resource_usage(self):
+ """Obtains an object containing resource usage for tiers.
+
+ The returned object is suitable for serialization.
+ """
+ o = []
+
+ for tier, state in self.tiers.items():
+ t_entry = dict(
+ name=tier,
+ start=state['begin_time'],
+ end=state['finish_time'],
+ duration=state['duration'],
+ )
+
+ self.add_resources_to_dict(t_entry, phase=tier)
+
+ o.append(t_entry)
+
+ return o
+
+ def add_resources_to_dict(self, entry, start=None, end=None, phase=None):
+ """Helper function to append resource information to a dict."""
+ cpu_percent = self.resources.aggregate_cpu_percent(start=start,
+ end=end, phase=phase, per_cpu=False)
+ cpu_times = self.resources.aggregate_cpu_times(start=start, end=end,
+ phase=phase, per_cpu=False)
+ io = self.resources.aggregate_io(start=start, end=end, phase=phase)
+
+ if cpu_percent is None:
+ return entry
+
+ entry['cpu_percent'] = cpu_percent
+ entry['cpu_times'] = list(cpu_times)
+ entry['io'] = list(io)
+
+ return entry
+
+ def add_resource_fields_to_dict(self, d):
+ for usage in self.resources.range_usage():
+ cpu_times = self.resources.aggregate_cpu_times(per_cpu=False)
+
+ d['cpu_times_fields'] = list(cpu_times._fields)
+ d['io_fields'] = list(usage.io._fields)
+ d['virt_fields'] = list(usage.virt._fields)
+ d['swap_fields'] = list(usage.swap._fields)
+
+ return d
+
+
+class BuildMonitor(MozbuildObject):
+ """Monitors the output of the build."""
+
+ def init(self, warnings_path):
+ """Create a new monitor.
+
+ warnings_path is a path of a warnings database to use.
+ """
+ self._warnings_path = warnings_path
+ self.resources = SystemResourceMonitor(poll_interval=1.0)
+ self._resources_started = False
+
+ self.tiers = TierStatus(self.resources)
+
+ self.warnings_database = WarningsDatabase()
+ if os.path.exists(warnings_path):
+ try:
+ self.warnings_database.load_from_file(warnings_path)
+ except ValueError:
+ os.remove(warnings_path)
+
+ self._warnings_collector = WarningsCollector(
+ database=self.warnings_database, objdir=self.topobjdir)
+
+ self.build_objects = []
+
+ def start(self):
+ """Record the start of the build."""
+ self.start_time = time.time()
+ self._finder_start_cpu = self._get_finder_cpu_usage()
+
+ def start_resource_recording(self):
+ # This should be merged into start() once bug 892342 lands.
+ self.resources.start()
+ self._resources_started = True
+
+ def on_line(self, line):
+ """Consume a line of output from the build system.
+
+ This will parse the line for state and determine whether more action is
+ needed.
+
+ Returns a BuildOutputResult instance.
+
+ In this named tuple, warning will be an object describing a new parsed
+ warning. Otherwise it will be None.
+
+ state_changed indicates whether the build system changed state with
+ this line. If the build system changed state, the caller may want to
+ query this instance for the current state in order to update UI, etc.
+
+ for_display is a boolean indicating whether the line is relevant to the
+ user. This is typically used to filter whether the line should be
+ presented to the user.
+ """
+ if line.startswith('BUILDSTATUS'):
+ args = line.split()[1:]
+
+ action = args.pop(0)
+ update_needed = True
+
+ if action == 'TIERS':
+ self.tiers.set_tiers(args)
+ update_needed = False
+ elif action == 'TIER_START':
+ tier = args[0]
+ self.tiers.begin_tier(tier)
+ elif action == 'TIER_FINISH':
+ tier, = args
+ self.tiers.finish_tier(tier)
+ elif action == 'OBJECT_FILE':
+ self.build_objects.append(args[0])
+ update_needed = False
+ else:
+ raise Exception('Unknown build status: %s' % action)
+
+ return BuildOutputResult(None, update_needed, False)
+
+ warning = None
+
+ try:
+ warning = self._warnings_collector.process_line(line)
+ except:
+ pass
+
+ return BuildOutputResult(warning, False, True)
+
+ def stop_resource_recording(self):
+ if self._resources_started:
+ self.resources.stop()
+
+ self._resources_started = False
+
+ def finish(self, record_usage=True):
+ """Record the end of the build."""
+ self.stop_resource_recording()
+ self.end_time = time.time()
+ self._finder_end_cpu = self._get_finder_cpu_usage()
+ self.elapsed = self.end_time - self.start_time
+
+ self.warnings_database.prune()
+ self.warnings_database.save_to_file(self._warnings_path)
+
+ if not record_usage:
+ return
+
+ try:
+ usage = self.get_resource_usage()
+ if not usage:
+ return
+
+ self.log_resource_usage(usage)
+ with open(self._get_state_filename('build_resources.json'), 'w') as fh:
+ json.dump(self.resources.as_dict(), fh, indent=2)
+ except Exception as e:
+ self.log(logging.WARNING, 'build_resources_error',
+ {'msg': str(e)},
+ 'Exception when writing resource usage file: {msg}')
+
+ def _get_finder_cpu_usage(self):
+ """Obtain the CPU usage of the Finder app on OS X.
+
+ This is used to detect high CPU usage.
+ """
+ if not sys.platform.startswith('darwin'):
+ return None
+
+ if not psutil:
+ return None
+
+ for proc in psutil.process_iter():
+ if proc.name != 'Finder':
+ continue
+
+ if proc.username != getpass.getuser():
+ continue
+
+ # Try to isolate system finder as opposed to other "Finder"
+ # processes.
+ if not proc.exe.endswith('CoreServices/Finder.app/Contents/MacOS/Finder'):
+ continue
+
+ return proc.get_cpu_times()
+
+ return None
+
+ def have_high_finder_usage(self):
+ """Determine whether there was high Finder CPU usage during the build.
+
+ Returns True if there was high Finder CPU usage, False if there wasn't,
+ or None if there is nothing to report.
+ """
+ if not self._finder_start_cpu:
+ return None, None
+
+ # We only measure if the measured range is sufficiently long.
+ if self.elapsed < 15:
+ return None, None
+
+ if not self._finder_end_cpu:
+ return None, None
+
+ start = self._finder_start_cpu
+ end = self._finder_end_cpu
+
+ start_total = start.user + start.system
+ end_total = end.user + end.system
+
+ cpu_seconds = end_total - start_total
+
+ # If Finder used more than 25% of 1 core during the build, report an
+ # error.
+ finder_percent = cpu_seconds / self.elapsed * 100
+
+ return finder_percent > 25, finder_percent
+
+ def have_excessive_swapping(self):
+ """Determine whether there was excessive swapping during the build.
+
+ Returns a tuple of (excessive, swap_in, swap_out). All values are None
+ if no swap information is available.
+ """
+ if not self.have_resource_usage:
+ return None, None, None
+
+ swap_in = sum(m.swap.sin for m in self.resources.measurements)
+ swap_out = sum(m.swap.sout for m in self.resources.measurements)
+
+ # The threshold of 1024 MB has been arbitrarily chosen.
+ #
+ # Choosing a proper value that is ideal for everyone is hard. We will
+ # likely iterate on the logic until people are generally satisfied.
+ # If a value is too low, the eventual warning produced does not carry
+ # much meaning. If the threshold is too high, people may not see the
+ # warning and the warning will thus be ineffective.
+ excessive = swap_in > 512 * 1048576 or swap_out > 512 * 1048576
+ return excessive, swap_in, swap_out
+
+ @property
+ def have_resource_usage(self):
+ """Whether resource usage is available."""
+ return self.resources.start_time is not None
+
+ def get_resource_usage(self):
+ """ Produce a data structure containing the low-level resource usage information.
+
+ This data structure can e.g. be serialized into JSON and saved for
+ subsequent analysis.
+
+ If no resource usage is available, None is returned.
+ """
+ if not self.have_resource_usage:
+ return None
+
+ cpu_percent = self.resources.aggregate_cpu_percent(phase=None,
+ per_cpu=False)
+ cpu_times = self.resources.aggregate_cpu_times(phase=None,
+ per_cpu=False)
+ io = self.resources.aggregate_io(phase=None)
+
+ o = dict(
+ version=3,
+ argv=sys.argv,
+ start=self.start_time,
+ end=self.end_time,
+ duration=self.end_time - self.start_time,
+ resources=[],
+ cpu_percent=cpu_percent,
+ cpu_times=cpu_times,
+ io=io,
+ objects=self.build_objects
+ )
+
+ o['tiers'] = self.tiers.tiered_resource_usage()
+
+ self.tiers.add_resource_fields_to_dict(o)
+
+ for usage in self.resources.range_usage():
+ cpu_percent = self.resources.aggregate_cpu_percent(usage.start,
+ usage.end, per_cpu=False)
+ cpu_times = self.resources.aggregate_cpu_times(usage.start,
+ usage.end, per_cpu=False)
+
+ entry = dict(
+ start=usage.start,
+ end=usage.end,
+ virt=list(usage.virt),
+ swap=list(usage.swap),
+ )
+
+ self.tiers.add_resources_to_dict(entry, start=usage.start,
+ end=usage.end)
+
+ o['resources'].append(entry)
+
+
+ # If the imports for this file ran before the in-tree virtualenv
+ # was bootstrapped (for instance, for a clobber build in automation),
+ # psutil might not be available.
+ #
+ # Treat psutil as optional to avoid an outright failure to log resources
+ # TODO: it would be nice to collect data on the storage device as well
+ # in this case.
+ o['system'] = {}
+ if psutil:
+ o['system'].update(dict(
+ logical_cpu_count=psutil.cpu_count(),
+ physical_cpu_count=psutil.cpu_count(logical=False),
+ swap_total=psutil.swap_memory()[0],
+ vmem_total=psutil.virtual_memory()[0],
+ ))
+
+ return o
+
+ def log_resource_usage(self, usage):
+ """Summarize the resource usage of this build in a log message."""
+
+ if not usage:
+ return
+
+ params = dict(
+ duration=self.end_time - self.start_time,
+ cpu_percent=usage['cpu_percent'],
+ io_read_bytes=usage['io'].read_bytes,
+ io_write_bytes=usage['io'].write_bytes,
+ io_read_time=usage['io'].read_time,
+ io_write_time=usage['io'].write_time,
+ )
+
+ message = 'Overall system resources - Wall time: {duration:.0f}s; ' \
+ 'CPU: {cpu_percent:.0f}%; ' \
+ 'Read bytes: {io_read_bytes}; Write bytes: {io_write_bytes}; ' \
+ 'Read time: {io_read_time}; Write time: {io_write_time}'
+
+ self.log(logging.WARNING, 'resource_usage', params, message)
+
+ excessive, sin, sout = self.have_excessive_swapping()
+ if excessive is not None and (sin or sout):
+ sin /= 1048576
+ sout /= 1048576
+ self.log(logging.WARNING, 'swap_activity',
+ {'sin': sin, 'sout': sout},
+ 'Swap in/out (MB): {sin}/{sout}')
+
+ def ccache_stats(self):
+ ccache_stats = None
+
+ try:
+ ccache = which.which('ccache')
+ output = subprocess.check_output([ccache, '-s'])
+ ccache_stats = CCacheStats(output)
+ except which.WhichError:
+ pass
+ except ValueError as e:
+ self.log(logging.WARNING, 'ccache', {'msg': str(e)}, '{msg}')
+
+ return ccache_stats
+
+
+class CCacheStats(object):
+ """Holds statistics from ccache.
+
+ Instances can be subtracted from each other to obtain differences.
+ print() or str() the object to show a ``ccache -s`` like output
+ of the captured stats.
+
+ """
+ STATS_KEYS = [
+ # (key, description)
+ # Refer to stats.c in ccache project for all the descriptions.
+ ('cache_hit_direct', 'cache hit (direct)'),
+ ('cache_hit_preprocessed', 'cache hit (preprocessed)'),
+ ('cache_hit_rate', 'cache hit rate'),
+ ('cache_miss', 'cache miss'),
+ ('link', 'called for link'),
+ ('preprocessing', 'called for preprocessing'),
+ ('multiple', 'multiple source files'),
+ ('stdout', 'compiler produced stdout'),
+ ('no_output', 'compiler produced no output'),
+ ('empty_output', 'compiler produced empty output'),
+ ('failed', 'compile failed'),
+ ('error', 'ccache internal error'),
+ ('preprocessor_error', 'preprocessor error'),
+ ('cant_use_pch', "can't use precompiled header"),
+ ('compiler_missing', "couldn't find the compiler"),
+ ('cache_file_missing', 'cache file missing'),
+ ('bad_args', 'bad compiler arguments'),
+ ('unsupported_lang', 'unsupported source language'),
+ ('compiler_check_failed', 'compiler check failed'),
+ ('autoconf', 'autoconf compile/link'),
+ ('unsupported_compiler_option', 'unsupported compiler option'),
+ ('out_stdout', 'output to stdout'),
+ ('out_device', 'output to a non-regular file'),
+ ('no_input', 'no input file'),
+ ('bad_extra_file', 'error hashing extra file'),
+ ('num_cleanups', 'cleanups performed'),
+ ('cache_files', 'files in cache'),
+ ('cache_size', 'cache size'),
+ ('cache_max_size', 'max cache size'),
+ ]
+
+ DIRECTORY_DESCRIPTION = "cache directory"
+ PRIMARY_CONFIG_DESCRIPTION = "primary config"
+ SECONDARY_CONFIG_DESCRIPTION = "secondary config (readonly)"
+ ABSOLUTE_KEYS = {'cache_files', 'cache_size', 'cache_max_size'}
+ FORMAT_KEYS = {'cache_size', 'cache_max_size'}
+
+ GiB = 1024 ** 3
+ MiB = 1024 ** 2
+ KiB = 1024
+
+ def __init__(self, output=None):
+ """Construct an instance from the output of ccache -s."""
+ self._values = {}
+ self.cache_dir = ""
+ self.primary_config = ""
+ self.secondary_config = ""
+
+ if not output:
+ return
+
+ for line in output.splitlines():
+ line = line.strip()
+ if line:
+ self._parse_line(line)
+
+ def _parse_line(self, line):
+ if line.startswith(self.DIRECTORY_DESCRIPTION):
+ self.cache_dir = self._strip_prefix(line, self.DIRECTORY_DESCRIPTION)
+ elif line.startswith(self.PRIMARY_CONFIG_DESCRIPTION):
+ self.primary_config = self._strip_prefix(
+ line, self.PRIMARY_CONFIG_DESCRIPTION)
+ elif line.startswith(self.SECONDARY_CONFIG_DESCRIPTION):
+ self.secondary_config = self._strip_prefix(
+ line, self.SECONDARY_CONFIG_DESCRIPTION)
+ else:
+ for stat_key, stat_description in self.STATS_KEYS:
+ if line.startswith(stat_description):
+ raw_value = self._strip_prefix(line, stat_description)
+ self._values[stat_key] = self._parse_value(raw_value)
+ break
+ else:
+ raise ValueError('Failed to parse ccache stats output: %s' % line)
+
+ @staticmethod
+ def _strip_prefix(line, prefix):
+ return line[len(prefix):].strip() if line.startswith(prefix) else line
+
+ @staticmethod
+ def _parse_value(raw_value):
+ value = raw_value.split()
+ unit = ''
+ if len(value) == 1:
+ numeric = value[0]
+ elif len(value) == 2:
+ numeric, unit = value
+ else:
+ raise ValueError('Failed to parse ccache stats value: %s' % raw_value)
+
+ if '.' in numeric:
+ numeric = float(numeric)
+ else:
+ numeric = int(numeric)
+
+ if unit in ('GB', 'Gbytes'):
+ unit = CCacheStats.GiB
+ elif unit in ('MB', 'Mbytes'):
+ unit = CCacheStats.MiB
+ elif unit in ('KB', 'Kbytes'):
+ unit = CCacheStats.KiB
+ else:
+ unit = 1
+
+ return int(numeric * unit)
+
+ def hit_rate_message(self):
+ return 'ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%}; miss rate: {:.1%}'.format(*self.hit_rates())
+
+ def hit_rates(self):
+ direct = self._values['cache_hit_direct']
+ preprocessed = self._values['cache_hit_preprocessed']
+ miss = self._values['cache_miss']
+ total = float(direct + preprocessed + miss)
+
+ if total > 0:
+ direct /= total
+ preprocessed /= total
+ miss /= total
+
+ return (direct, preprocessed, miss)
+
+ def __sub__(self, other):
+ result = CCacheStats()
+ result.cache_dir = self.cache_dir
+
+ for k, prefix in self.STATS_KEYS:
+ if k not in self._values and k not in other._values:
+ continue
+
+ our_value = self._values.get(k, 0)
+ other_value = other._values.get(k, 0)
+
+ if k in self.ABSOLUTE_KEYS:
+ result._values[k] = our_value
+ else:
+ result._values[k] = our_value - other_value
+
+ return result
+
+ def __str__(self):
+ LEFT_ALIGN = 34
+ lines = []
+
+ if self.cache_dir:
+ lines.append('%s%s' % (self.DIRECTORY_DESCRIPTION.ljust(LEFT_ALIGN),
+ self.cache_dir))
+
+ for stat_key, stat_description in self.STATS_KEYS:
+ if stat_key not in self._values:
+ continue
+
+ value = self._values[stat_key]
+
+ if stat_key in self.FORMAT_KEYS:
+ value = '%15s' % self._format_value(value)
+ else:
+ value = '%8u' % value
+
+ lines.append('%s%s' % (stat_description.ljust(LEFT_ALIGN), value))
+
+ return '\n'.join(lines)
+
+ def __nonzero__(self):
+ relative_values = [v for k, v in self._values.items()
+ if k not in self.ABSOLUTE_KEYS]
+ return (all(v >= 0 for v in relative_values) and
+ any(v > 0 for v in relative_values))
+
+ @staticmethod
+ def _format_value(v):
+ if v > CCacheStats.GiB:
+ return '%.1f Gbytes' % (float(v) / CCacheStats.GiB)
+ elif v > CCacheStats.MiB:
+ return '%.1f Mbytes' % (float(v) / CCacheStats.MiB)
+ else:
+ return '%.1f Kbytes' % (float(v) / CCacheStats.KiB)
+
+
+class BuildDriver(MozbuildObject):
+ """Provides a high-level API for build actions."""
+
+ def install_tests(self, test_objs):
+ """Install test files."""
+
+ if self.is_clobber_needed():
+ print(INSTALL_TESTS_CLOBBER.format(
+ clobber_file=os.path.join(self.topobjdir, 'CLOBBER')))
+ sys.exit(1)
+
+ if not test_objs:
+ # If we don't actually have a list of tests to install we install
+ # test and support files wholesale.
+ self._run_make(target='install-test-files', pass_thru=True,
+ print_directory=False)
+ else:
+ install_test_files(mozpath.normpath(self.topsrcdir), self.topobjdir,
+ '_tests', test_objs)
diff --git a/python/mozbuild/mozbuild/controller/clobber.py b/python/mozbuild/mozbuild/controller/clobber.py
new file mode 100644
index 000000000..02f75c6ad
--- /dev/null
+++ b/python/mozbuild/mozbuild/controller/clobber.py
@@ -0,0 +1,237 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function
+
+r'''This module contains code for managing clobbering of the tree.'''
+
+import errno
+import os
+import subprocess
+import sys
+
+from mozfile.mozfile import remove as mozfileremove
+from textwrap import TextWrapper
+
+
+CLOBBER_MESSAGE = ''.join([TextWrapper().fill(line) + '\n' for line in
+'''
+The CLOBBER file has been updated, indicating that an incremental build since \
+your last build will probably not work. A full/clobber build is required.
+
+The reason for the clobber is:
+
+{clobber_reason}
+
+Clobbering can be performed automatically. However, we didn't automatically \
+clobber this time because:
+
+{no_reason}
+
+The easiest and fastest way to clobber is to run:
+
+ $ mach clobber
+
+If you know this clobber doesn't apply to you or you're feeling lucky -- \
+Well, are ya? -- you can ignore this clobber requirement by running:
+
+ $ touch {clobber_file}
+'''.splitlines()])
+
+class Clobberer(object):
+ def __init__(self, topsrcdir, topobjdir):
+ """Create a new object to manage clobbering the tree.
+
+ It is bound to a top source directory and to a specific object
+ directory.
+ """
+ assert os.path.isabs(topsrcdir)
+ assert os.path.isabs(topobjdir)
+
+ self.topsrcdir = os.path.normpath(topsrcdir)
+ self.topobjdir = os.path.normpath(topobjdir)
+ self.src_clobber = os.path.join(topsrcdir, 'CLOBBER')
+ self.obj_clobber = os.path.join(topobjdir, 'CLOBBER')
+
+ # Try looking for mozilla/CLOBBER, for comm-central
+ if not os.path.isfile(self.src_clobber):
+ self.src_clobber = os.path.join(topsrcdir, 'mozilla', 'CLOBBER')
+
+ assert os.path.isfile(self.src_clobber)
+
+ def clobber_needed(self):
+ """Returns a bool indicating whether a tree clobber is required."""
+
+ # No object directory clobber file means we're good.
+ if not os.path.exists(self.obj_clobber):
+ return False
+
+ # Object directory clobber older than current is fine.
+ if os.path.getmtime(self.src_clobber) <= \
+ os.path.getmtime(self.obj_clobber):
+
+ return False
+
+ return True
+
+ def clobber_cause(self):
+ """Obtain the cause why a clobber is required.
+
+ This reads the cause from the CLOBBER file.
+
+ This returns a list of lines describing why the clobber was required.
+ Each line is stripped of leading and trailing whitespace.
+ """
+ with open(self.src_clobber, 'rt') as fh:
+ lines = [l.strip() for l in fh.readlines()]
+ return [l for l in lines if l and not l.startswith('#')]
+
+ def have_winrm(self):
+ # `winrm -h` should print 'winrm version ...' and exit 1
+ try:
+ p = subprocess.Popen(['winrm.exe', '-h'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ return p.wait() == 1 and p.stdout.read().startswith('winrm')
+ except:
+ return False
+
+ def remove_objdir(self, full=True):
+ """Remove the object directory.
+
+ ``full`` controls whether to fully delete the objdir. If False,
+ some directories (e.g. Visual Studio Project Files) will not be
+ deleted.
+ """
+ # Top-level files and directories to not clobber by default.
+ no_clobber = {
+ '.mozbuild',
+ 'msvc',
+ }
+
+ if full:
+ # mozfile doesn't like unicode arguments (bug 818783).
+ paths = [self.topobjdir.encode('utf-8')]
+ else:
+ try:
+ paths = []
+ for p in os.listdir(self.topobjdir):
+ if p not in no_clobber:
+ paths.append(os.path.join(self.topobjdir, p).encode('utf-8'))
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ return
+
+ procs = []
+ for p in sorted(paths):
+ path = os.path.join(self.topobjdir, p)
+ if sys.platform.startswith('win') and self.have_winrm() and os.path.isdir(path):
+ procs.append(subprocess.Popen(['winrm', '-rf', path]))
+ else:
+ # We use mozfile because it is faster than shutil.rmtree().
+ mozfileremove(path)
+
+ for p in procs:
+ p.wait()
+
+ def ensure_objdir_state(self):
+ """Ensure the CLOBBER file in the objdir exists.
+
+ This is called as part of the build to ensure the clobber information
+ is configured properly for the objdir.
+ """
+ if not os.path.exists(self.topobjdir):
+ os.makedirs(self.topobjdir)
+
+ if not os.path.exists(self.obj_clobber):
+ # Simply touch the file.
+ with open(self.obj_clobber, 'a'):
+ pass
+
+ def maybe_do_clobber(self, cwd, allow_auto=False, fh=sys.stderr):
+ """Perform a clobber if it is required. Maybe.
+
+ This is the API the build system invokes to determine if a clobber
+ is needed and to automatically perform that clobber if we can.
+
+ This returns a tuple of (bool, bool, str). The elements are:
+
+ - Whether a clobber was/is required.
+ - Whether a clobber was performed.
+ - The reason why the clobber failed or could not be performed. This
+ will be None if no clobber is required or if we clobbered without
+ error.
+ """
+ assert cwd
+ cwd = os.path.normpath(cwd)
+
+ if not self.clobber_needed():
+ print('Clobber not needed.', file=fh)
+ self.ensure_objdir_state()
+ return False, False, None
+
+ # So a clobber is needed. We only perform a clobber if we are
+ # allowed to perform an automatic clobber (off by default) and if the
+ # current directory is not under the object directory. The latter is
+ # because operating systems, filesystems, and shell can throw fits
+ # if the current working directory is deleted from under you. While it
+ # can work in some scenarios, we take the conservative approach and
+ # never try.
+ if not allow_auto:
+ return True, False, \
+ self._message('Automatic clobbering is not enabled\n'
+ ' (add "mk_add_options AUTOCLOBBER=1" to your '
+ 'mozconfig).')
+
+ if cwd.startswith(self.topobjdir) and cwd != self.topobjdir:
+ return True, False, self._message(
+ 'Cannot clobber while the shell is inside the object directory.')
+
+ print('Automatically clobbering %s' % self.topobjdir, file=fh)
+ try:
+ self.remove_objdir(False)
+ self.ensure_objdir_state()
+ print('Successfully completed auto clobber.', file=fh)
+ return True, True, None
+ except (IOError) as error:
+ return True, False, self._message(
+ 'Error when automatically clobbering: ' + str(error))
+
+ def _message(self, reason):
+ lines = [' ' + line for line in self.clobber_cause()]
+
+ return CLOBBER_MESSAGE.format(clobber_reason='\n'.join(lines),
+ no_reason=' ' + reason, clobber_file=self.obj_clobber)
+
+
+def main(args, env, cwd, fh=sys.stderr):
+ if len(args) != 2:
+ print('Usage: clobber.py topsrcdir topobjdir', file=fh)
+ return 1
+
+ topsrcdir, topobjdir = args
+
+ if not os.path.isabs(topsrcdir):
+ topsrcdir = os.path.abspath(topsrcdir)
+
+ if not os.path.isabs(topobjdir):
+ topobjdir = os.path.abspath(topobjdir)
+
+ auto = True if env.get('AUTOCLOBBER', False) else False
+ clobber = Clobberer(topsrcdir, topobjdir)
+ required, performed, message = clobber.maybe_do_clobber(cwd, auto, fh)
+
+ if not required or performed:
+ if performed and env.get('TINDERBOX_OUTPUT'):
+ print('TinderboxPrint: auto clobber', file=fh)
+ return 0
+
+ print(message, file=fh)
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:], os.environ, os.getcwd(), sys.stdout))
+
diff --git a/python/mozbuild/mozbuild/doctor.py b/python/mozbuild/mozbuild/doctor.py
new file mode 100644
index 000000000..2175042bf
--- /dev/null
+++ b/python/mozbuild/mozbuild/doctor.py
@@ -0,0 +1,293 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import os
+import subprocess
+import sys
+
+import psutil
+
+from distutils.util import strtobool
+from distutils.version import LooseVersion
+import mozpack.path as mozpath
+
+# Minimum recommended logical processors in system.
+PROCESSORS_THRESHOLD = 4
+
+# Minimum recommended total system memory, in gigabytes.
+MEMORY_THRESHOLD = 7.4
+
+# Minimum recommended free space on each disk, in gigabytes.
+FREESPACE_THRESHOLD = 10
+
+# Latest MozillaBuild version
+LATEST_MOZILLABUILD_VERSION = '1.11.0'
+
+DISABLE_LASTACCESS_WIN = '''
+Disable the last access time feature?
+This improves the speed of file and
+directory access by deferring Last Access Time modification on disk by up to an
+hour. Backup programs that rely on this feature may be affected.
+https://technet.microsoft.com/en-us/library/cc785435.aspx
+'''
+
+class Doctor(object):
+ def __init__(self, srcdir, objdir, fix):
+ self.srcdir = mozpath.normpath(srcdir)
+ self.objdir = mozpath.normpath(objdir)
+ self.srcdir_mount = self.getmount(self.srcdir)
+ self.objdir_mount = self.getmount(self.objdir)
+ self.path_mounts = [
+ ('srcdir', self.srcdir, self.srcdir_mount),
+ ('objdir', self.objdir, self.objdir_mount)
+ ]
+ self.fix = fix
+ self.results = []
+
+ def check_all(self):
+ checks = [
+ 'cpu',
+ 'memory',
+ 'storage_freespace',
+ 'fs_lastaccess',
+ 'mozillabuild'
+ ]
+ for check in checks:
+ self.report(getattr(self, check))
+ good = True
+ fixable = False
+ denied = False
+ for result in self.results:
+ if result.get('status') != 'GOOD':
+ good = False
+ if result.get('fixable', False):
+ fixable = True
+ if result.get('denied', False):
+ denied = True
+ if denied:
+ print('run "mach doctor --fix" AS ADMIN to re-attempt fixing your system')
+ elif False: # elif fixable:
+ print('run "mach doctor --fix" as admin to attempt fixing your system')
+ return int(not good)
+
+ def getmount(self, path):
+ while path != '/' and not os.path.ismount(path):
+ path = mozpath.abspath(mozpath.join(path, os.pardir))
+ return path
+
+ def prompt_bool(self, prompt, limit=5):
+ ''' Prompts the user with prompt and requires a boolean value. '''
+ valid = False
+ while not valid and limit > 0:
+ try:
+ choice = strtobool(raw_input(prompt + '[Y/N]\n'))
+ valid = True
+ except ValueError:
+ print("ERROR! Please enter a valid option!")
+ limit -= 1
+
+ if limit > 0:
+ return choice
+ else:
+ raise Exception("Error! Reached max attempts of entering option.")
+
+ def report(self, results):
+ # Handle single dict result or list of results.
+ if isinstance(results, dict):
+ results = [results]
+ for result in results:
+ status = result.get('status', 'UNSURE')
+ if status == 'SKIPPED':
+ continue
+ self.results.append(result)
+ print('%s...\t%s\n' % (
+ result.get('desc', ''),
+ status
+ )
+ ).expandtabs(40)
+
+ @property
+ def platform(self):
+ platform = getattr(self, '_platform', None)
+ if not platform:
+ platform = sys.platform
+ while platform[-1].isdigit():
+ platform = platform[:-1]
+ setattr(self, '_platform', platform)
+ return platform
+
+ @property
+ def cpu(self):
+ cpu_count = psutil.cpu_count()
+ if cpu_count < PROCESSORS_THRESHOLD:
+ status = 'BAD'
+ desc = '%d logical processors detected, <%d' % (
+ cpu_count, PROCESSORS_THRESHOLD
+ )
+ else:
+ status = 'GOOD'
+ desc = '%d logical processors detected, >=%d' % (
+ cpu_count, PROCESSORS_THRESHOLD
+ )
+ return {'status': status, 'desc': desc}
+
+ @property
+ def memory(self):
+ memory = psutil.virtual_memory().total
+ # Convert to gigabytes.
+ memory_GB = memory / 1024**3.0
+ if memory_GB < MEMORY_THRESHOLD:
+ status = 'BAD'
+ desc = '%.1fGB of physical memory, <%.1fGB' % (
+ memory_GB, MEMORY_THRESHOLD
+ )
+ else:
+ status = 'GOOD'
+ desc = '%.1fGB of physical memory, >%.1fGB' % (
+ memory_GB, MEMORY_THRESHOLD
+ )
+ return {'status': status, 'desc': desc}
+
+ @property
+ def storage_freespace(self):
+ results = []
+ desc = ''
+ mountpoint_line = self.srcdir_mount != self.objdir_mount
+ for (purpose, path, mount) in self.path_mounts:
+ desc += '%s = %s\n' % (purpose, path)
+ if not mountpoint_line:
+ mountpoint_line = True
+ continue
+ try:
+ usage = psutil.disk_usage(mount)
+ freespace, size = usage.free, usage.total
+ freespace_GB = freespace / 1024**3
+ size_GB = size / 1024**3
+ if freespace_GB < FREESPACE_THRESHOLD:
+ status = 'BAD'
+ desc += 'mountpoint = %s\n%dGB of %dGB free, <%dGB' % (
+ mount, freespace_GB, size_GB, FREESPACE_THRESHOLD
+ )
+ else:
+ status = 'GOOD'
+ desc += 'mountpoint = %s\n%dGB of %dGB free, >=%dGB' % (
+ mount, freespace_GB, size_GB, FREESPACE_THRESHOLD
+ )
+ except OSError:
+ status = 'UNSURE'
+ desc += 'path invalid'
+ results.append({'status': status, 'desc': desc})
+ return results
+
+ @property
+ def fs_lastaccess(self):
+ results = []
+ if self.platform == 'win':
+ fixable = False
+ denied = False
+ # See 'fsutil behavior':
+ # https://technet.microsoft.com/en-us/library/cc785435.aspx
+ try:
+ command = 'fsutil behavior query disablelastaccess'.split(' ')
+ fsutil_output = subprocess.check_output(command)
+ disablelastaccess = int(fsutil_output.partition('=')[2][1])
+ except subprocess.CalledProcessError:
+ disablelastaccess = -1
+ status = 'UNSURE'
+ desc = 'unable to check lastaccess behavior'
+ if disablelastaccess == 1:
+ status = 'GOOD'
+ desc = 'lastaccess disabled systemwide'
+ elif disablelastaccess == 0:
+ if False: # if self.fix:
+ choice = self.prompt_bool(DISABLE_LASTACCESS_WIN)
+ if not choice:
+ return {'status': 'BAD, NOT FIXED',
+ 'desc': 'lastaccess enabled systemwide'}
+ try:
+ command = 'fsutil behavior set disablelastaccess 1'.split(' ')
+ fsutil_output = subprocess.check_output(command)
+ status = 'GOOD, FIXED'
+ desc = 'lastaccess disabled systemwide'
+ except subprocess.CalledProcessError, e:
+ desc = 'lastaccess enabled systemwide'
+ if e.output.find('denied') != -1:
+ status = 'BAD, FIX DENIED'
+ denied = True
+ else:
+ status = 'BAD, NOT FIXED'
+ else:
+ status = 'BAD, FIXABLE'
+ desc = 'lastaccess enabled'
+ fixable = True
+ results.append({'status': status, 'desc': desc, 'fixable': fixable,
+ 'denied': denied})
+ elif self.platform in ['darwin', 'freebsd', 'linux', 'openbsd']:
+ common_mountpoint = self.srcdir_mount == self.objdir_mount
+ for (purpose, path, mount) in self.path_mounts:
+ results.append(self.check_mount_lastaccess(mount))
+ if common_mountpoint:
+ break
+ else:
+ results.append({'status': 'SKIPPED'})
+ return results
+
+ def check_mount_lastaccess(self, mount):
+ partitions = psutil.disk_partitions()
+ atime_opts = {'atime', 'noatime', 'relatime', 'norelatime'}
+ option = ''
+ for partition in partitions:
+ if partition.mountpoint == mount:
+ mount_opts = set(partition.opts.split(','))
+ intersection = list(atime_opts & mount_opts)
+ if len(intersection) == 1:
+ option = intersection[0]
+ break
+ if not option:
+ status = 'BAD'
+ if self.platform == 'linux':
+ option = 'noatime/relatime'
+ else:
+ option = 'noatime'
+ desc = '%s has no explicit %s mount option' % (
+ mount, option
+ )
+ elif option == 'atime' or option == 'norelatime':
+ status = 'BAD'
+ desc = '%s has %s mount option' % (
+ mount, option
+ )
+ elif option == 'noatime' or option == 'relatime':
+ status = 'GOOD'
+ desc = '%s has %s mount option' % (
+ mount, option
+ )
+ return {'status': status, 'desc': desc}
+
+ @property
+ def mozillabuild(self):
+ if self.platform != 'win':
+ return {'status': 'SKIPPED'}
+ MOZILLABUILD = mozpath.normpath(os.environ.get('MOZILLABUILD', ''))
+ if not MOZILLABUILD or not os.path.exists(MOZILLABUILD):
+ return {'desc': 'not running under MozillaBuild'}
+ try:
+ with open(mozpath.join(MOZILLABUILD, 'VERSION'), 'r') as fh:
+ version = fh.readline()
+ if not version:
+ raise ValueError()
+ if LooseVersion(version) < LooseVersion(LATEST_MOZILLABUILD_VERSION):
+ status = 'BAD'
+ desc = 'MozillaBuild %s in use, <%s' % (
+ version, LATEST_MOZILLABUILD_VERSION
+ )
+ else:
+ status = 'GOOD'
+ desc = 'MozillaBuild %s in use' % version
+ except (IOError, ValueError):
+ status = 'UNSURE'
+ desc = 'MozillaBuild version not found'
+ return {'status': status, 'desc': desc}
diff --git a/python/mozbuild/mozbuild/dotproperties.py b/python/mozbuild/mozbuild/dotproperties.py
new file mode 100644
index 000000000..972ff2329
--- /dev/null
+++ b/python/mozbuild/mozbuild/dotproperties.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains utility functions for reading .properties files, like
+# region.properties.
+
+from __future__ import absolute_import, unicode_literals
+
+import codecs
+import re
+import sys
+
+if sys.version_info[0] == 3:
+ str_type = str
+else:
+ str_type = basestring
+
+class DotProperties:
+ r'''A thin representation of a key=value .properties file.'''
+
+ def __init__(self, file=None):
+ self._properties = {}
+ if file:
+ self.update(file)
+
+ def update(self, file):
+ '''Updates properties from a file name or file-like object.
+
+ Ignores empty lines and comment lines.'''
+
+ if isinstance(file, str_type):
+ f = codecs.open(file, 'r', 'utf-8')
+ else:
+ f = file
+
+ for l in f.readlines():
+ line = l.strip()
+ if not line or line.startswith('#'):
+ continue
+ (k, v) = re.split('\s*=\s*', line, 1)
+ self._properties[k] = v
+
+ def get(self, key, default=None):
+ return self._properties.get(key, default)
+
+ def get_list(self, prefix):
+ '''Turns {'list.0':'foo', 'list.1':'bar'} into ['foo', 'bar'].
+
+ Returns [] to indicate an empty or missing list.'''
+
+ if not prefix.endswith('.'):
+ prefix = prefix + '.'
+ indexes = []
+ for k, v in self._properties.iteritems():
+ if not k.startswith(prefix):
+ continue
+ key = k[len(prefix):]
+ if '.' in key:
+ # We have something like list.sublist.0.
+ continue
+ indexes.append(int(key))
+ return [self._properties[prefix + str(index)] for index in sorted(indexes)]
+
+ def get_dict(self, prefix, required_keys=[]):
+ '''Turns {'foo.title':'title', ...} into {'title':'title', ...}.
+
+ If |required_keys| is present, it must be an iterable of required key
+ names. If a required key is not present, ValueError is thrown.
+
+ Returns {} to indicate an empty or missing dict.'''
+
+ if not prefix.endswith('.'):
+ prefix = prefix + '.'
+
+ D = dict((k[len(prefix):], v) for k, v in self._properties.iteritems()
+ if k.startswith(prefix) and '.' not in k[len(prefix):])
+
+ for required_key in required_keys:
+ if not required_key in D:
+ raise ValueError('Required key %s not present' % required_key)
+
+ return D
diff --git a/python/mozbuild/mozbuild/frontend/__init__.py b/python/mozbuild/mozbuild/frontend/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/__init__.py
diff --git a/python/mozbuild/mozbuild/frontend/context.py b/python/mozbuild/mozbuild/frontend/context.py
new file mode 100644
index 000000000..eb501dc66
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/context.py
@@ -0,0 +1,2292 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+######################################################################
+# DO NOT UPDATE THIS FILE WITHOUT SIGN-OFF FROM A BUILD MODULE PEER. #
+######################################################################
+
+r"""This module contains the data structure (context) holding the configuration
+from a moz.build. The data emitted by the frontend derives from those contexts.
+
+It also defines the set of variables and functions available in moz.build.
+If you are looking for the absolute authority on what moz.build files can
+contain, you've come to the right place.
+"""
+
+from __future__ import absolute_import, unicode_literals
+
+import os
+
+from collections import (
+ Counter,
+ OrderedDict,
+)
+from mozbuild.util import (
+ HierarchicalStringList,
+ KeyedDefaultDict,
+ List,
+ ListWithAction,
+ memoize,
+ memoized_property,
+ ReadOnlyKeyedDefaultDict,
+ StrictOrderingOnAppendList,
+ StrictOrderingOnAppendListWithAction,
+ StrictOrderingOnAppendListWithFlagsFactory,
+ TypedList,
+ TypedNamedTuple,
+)
+
+from ..testing import (
+ all_test_flavors,
+ read_manifestparser_manifest,
+ read_reftest_manifest,
+ read_wpt_manifest,
+)
+
+import mozpack.path as mozpath
+from types import FunctionType
+
+import itertools
+
+
+class ContextDerivedValue(object):
+ """Classes deriving from this one receive a special treatment in a
+ Context. See Context documentation.
+ """
+ __slots__ = ()
+
+
+class Context(KeyedDefaultDict):
+ """Represents a moz.build configuration context.
+
+ Instances of this class are filled by the execution of sandboxes.
+ At the core, a Context is a dict, with a defined set of possible keys we'll
+ call variables. Each variable is associated with a type.
+
+ When reading a value for a given key, we first try to read the existing
+ value. If a value is not found and it is defined in the allowed variables
+ set, we return a new instance of the class for that variable. We don't
+ assign default instances until they are accessed because this makes
+ debugging the end-result much simpler. Instead of a data structure with
+ lots of empty/default values, you have a data structure with only the
+ values that were read or touched.
+
+ Instances of variables classes are created by invoking ``class_name()``,
+ except when class_name derives from ``ContextDerivedValue`` or
+ ``SubContext``, in which case ``class_name(instance_of_the_context)`` or
+ ``class_name(self)`` is invoked. A value is added to those calls when
+ instances are created during assignment (setitem).
+
+ allowed_variables is a dict of the variables that can be set and read in
+ this context instance. Keys in this dict are the strings representing keys
+ in this context which are valid. Values are tuples of stored type,
+ assigned type, default value, a docstring describing the purpose of the
+ variable, and a tier indicator (see comment above the VARIABLES declaration
+ in this module).
+
+ config is the ConfigEnvironment for this context.
+ """
+ def __init__(self, allowed_variables={}, config=None, finder=None):
+ self._allowed_variables = allowed_variables
+ self.main_path = None
+ self.current_path = None
+ # There aren't going to be enough paths for the performance of scanning
+ # a list to be a problem.
+ self._all_paths = []
+ self.config = config
+ self._sandbox = None
+ self._finder = finder
+ KeyedDefaultDict.__init__(self, self._factory)
+
+ def push_source(self, path):
+ """Adds the given path as source of the data from this context and make
+ it the current path for the context."""
+ assert os.path.isabs(path)
+ if not self.main_path:
+ self.main_path = path
+ else:
+ # Callers shouldn't push after main_path has been popped.
+ assert self.current_path
+ self.current_path = path
+ # The same file can be pushed twice, so don't remove any previous
+ # occurrence.
+ self._all_paths.append(path)
+
+ def pop_source(self):
+ """Get back to the previous current path for the context."""
+ assert self.main_path
+ assert self.current_path
+ last = self._all_paths.pop()
+ # Keep the popped path in the list of all paths, but before the main
+ # path so that it's not popped again.
+ self._all_paths.insert(0, last)
+ if last == self.main_path:
+ self.current_path = None
+ else:
+ self.current_path = self._all_paths[-1]
+ return last
+
+ def add_source(self, path):
+ """Adds the given path as source of the data from this context."""
+ assert os.path.isabs(path)
+ if not self.main_path:
+ self.main_path = self.current_path = path
+ # Insert at the beginning of the list so that it's always before the
+ # main path.
+ if path not in self._all_paths:
+ self._all_paths.insert(0, path)
+
+ @property
+ def error_is_fatal(self):
+ """Returns True if the error function should be fatal."""
+ return self.config and getattr(self.config, 'error_is_fatal', True)
+
+ @property
+ def all_paths(self):
+ """Returns all paths ever added to the context."""
+ return set(self._all_paths)
+
+ @property
+ def source_stack(self):
+ """Returns the current stack of pushed sources."""
+ if not self.current_path:
+ return []
+ return self._all_paths[self._all_paths.index(self.main_path):]
+
+ @memoized_property
+ def objdir(self):
+ return mozpath.join(self.config.topobjdir, self.relobjdir).rstrip('/')
+
+ @memoize
+ def _srcdir(self, path):
+ return mozpath.join(self.config.topsrcdir,
+ self._relsrcdir(path)).rstrip('/')
+
+ @property
+ def srcdir(self):
+ return self._srcdir(self.current_path or self.main_path)
+
+ @memoize
+ def _relsrcdir(self, path):
+ return mozpath.relpath(mozpath.dirname(path), self.config.topsrcdir)
+
+ @property
+ def relsrcdir(self):
+ assert self.main_path
+ return self._relsrcdir(self.current_path or self.main_path)
+
+ @memoized_property
+ def relobjdir(self):
+ assert self.main_path
+ return mozpath.relpath(mozpath.dirname(self.main_path),
+ self.config.topsrcdir)
+
+ def _factory(self, key):
+ """Function called when requesting a missing key."""
+ defaults = self._allowed_variables.get(key)
+ if not defaults:
+ raise KeyError('global_ns', 'get_unknown', key)
+
+ # If the default is specifically a lambda (or, rather, any function
+ # --but not a class that can be called), then it is actually a rule to
+ # generate the default that should be used.
+ default = defaults[0]
+ if issubclass(default, ContextDerivedValue):
+ return default(self)
+ else:
+ return default()
+
+ def _validate(self, key, value, is_template=False):
+ """Validates whether the key is allowed and if the value's type
+ matches.
+ """
+ stored_type, input_type, docs = \
+ self._allowed_variables.get(key, (None, None, None))
+
+ if stored_type is None or not is_template and key in TEMPLATE_VARIABLES:
+ raise KeyError('global_ns', 'set_unknown', key, value)
+
+ # If the incoming value is not the type we store, we try to convert
+ # it to that type. This relies on proper coercion rules existing. This
+ # is the responsibility of whoever defined the symbols: a type should
+ # not be in the allowed set if the constructor function for the stored
+ # type does not accept an instance of that type.
+ if not isinstance(value, (stored_type, input_type)):
+ raise ValueError('global_ns', 'set_type', key, value, input_type)
+
+ return stored_type
+
+ def __setitem__(self, key, value):
+ stored_type = self._validate(key, value)
+
+ if not isinstance(value, stored_type):
+ if issubclass(stored_type, ContextDerivedValue):
+ value = stored_type(self, value)
+ else:
+ value = stored_type(value)
+
+ return KeyedDefaultDict.__setitem__(self, key, value)
+
+ def update(self, iterable={}, **kwargs):
+ """Like dict.update(), but using the context's setitem.
+
+ This function is transactional: if setitem fails for one of the values,
+ the context is not updated at all."""
+ if isinstance(iterable, dict):
+ iterable = iterable.items()
+
+ update = {}
+ for key, value in itertools.chain(iterable, kwargs.items()):
+ stored_type = self._validate(key, value)
+ # Don't create an instance of stored_type if coercion is needed,
+ # until all values are validated.
+ update[key] = (value, stored_type)
+ for key, (value, stored_type) in update.items():
+ if not isinstance(value, stored_type):
+ update[key] = stored_type(value)
+ else:
+ update[key] = value
+ KeyedDefaultDict.update(self, update)
+
+
+class TemplateContext(Context):
+ def __init__(self, template=None, allowed_variables={}, config=None):
+ self.template = template
+ super(TemplateContext, self).__init__(allowed_variables, config)
+
+ def _validate(self, key, value):
+ return Context._validate(self, key, value, True)
+
+
+class SubContext(Context, ContextDerivedValue):
+ """A Context derived from another Context.
+
+ Sub-contexts are intended to be used as context managers.
+
+ Sub-contexts inherit paths and other relevant state from the parent
+ context.
+ """
+ def __init__(self, parent):
+ assert isinstance(parent, Context)
+
+ Context.__init__(self, allowed_variables=self.VARIABLES,
+ config=parent.config)
+
+ # Copy state from parent.
+ for p in parent.source_stack:
+ self.push_source(p)
+ self._sandbox = parent._sandbox
+
+ def __enter__(self):
+ if not self._sandbox or self._sandbox() is None:
+ raise Exception('a sandbox is required')
+
+ self._sandbox().push_subcontext(self)
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._sandbox().pop_subcontext(self)
+
+
+class InitializedDefines(ContextDerivedValue, OrderedDict):
+ def __init__(self, context, value=None):
+ OrderedDict.__init__(self)
+ for define in context.config.substs.get('MOZ_DEBUG_DEFINES', ()):
+ self[define] = 1
+ if value:
+ self.update(value)
+
+
+class FinalTargetValue(ContextDerivedValue, unicode):
+ def __new__(cls, context, value=""):
+ if not value:
+ value = 'dist/'
+ if context['XPI_NAME']:
+ value += 'xpi-stage/' + context['XPI_NAME']
+ else:
+ value += 'bin'
+ if context['DIST_SUBDIR']:
+ value += '/' + context['DIST_SUBDIR']
+ return unicode.__new__(cls, value)
+
+
+def Enum(*values):
+ assert len(values)
+ default = values[0]
+
+ class EnumClass(object):
+ def __new__(cls, value=None):
+ if value is None:
+ return default
+ if value in values:
+ return value
+ raise ValueError('Invalid value. Allowed values are: %s'
+ % ', '.join(repr(v) for v in values))
+ return EnumClass
+
+
+class PathMeta(type):
+ """Meta class for the Path family of classes.
+
+ It handles calling __new__ and __init__ with the right arguments
+ in cases where a Path is instantiated with another instance of
+ Path instead of having received a context.
+
+ It also makes Path(context, value) instantiate one of the
+ subclasses depending on the value, allowing callers to do
+ standard type checking (isinstance(path, ObjDirPath)) instead
+ of checking the value itself (path.startswith('!')).
+ """
+ def __call__(cls, context, value=None):
+ if isinstance(context, Path):
+ assert value is None
+ value = context
+ context = context.context
+ else:
+ assert isinstance(context, Context)
+ if isinstance(value, Path):
+ context = value.context
+ if not issubclass(cls, (SourcePath, ObjDirPath, AbsolutePath)):
+ if value.startswith('!'):
+ cls = ObjDirPath
+ elif value.startswith('%'):
+ cls = AbsolutePath
+ else:
+ cls = SourcePath
+ return super(PathMeta, cls).__call__(context, value)
+
+class Path(ContextDerivedValue, unicode):
+ """Stores and resolves a source path relative to a given context
+
+ This class is used as a backing type for some of the sandbox variables.
+ It expresses paths relative to a context. Supported paths are:
+ - '/topsrcdir/relative/paths'
+ - 'srcdir/relative/paths'
+ - '!/topobjdir/relative/paths'
+ - '!objdir/relative/paths'
+ - '%/filesystem/absolute/paths'
+ """
+ __metaclass__ = PathMeta
+
+ def __new__(cls, context, value=None):
+ return super(Path, cls).__new__(cls, value)
+
+ def __init__(self, context, value=None):
+ # Only subclasses should be instantiated.
+ assert self.__class__ != Path
+ self.context = context
+ self.srcdir = context.srcdir
+
+ def join(self, *p):
+ """ContextDerived equivalent of mozpath.join(self, *p), returning a
+ new Path instance.
+ """
+ return Path(self.context, mozpath.join(self, *p))
+
+ def __cmp__(self, other):
+ if isinstance(other, Path) and self.srcdir != other.srcdir:
+ return cmp(self.full_path, other.full_path)
+ return cmp(unicode(self), other)
+
+ # __cmp__ is not enough because unicode has __eq__, __ne__, etc. defined
+ # and __cmp__ is only used for those when they don't exist.
+ def __eq__(self, other):
+ return self.__cmp__(other) == 0
+
+ def __ne__(self, other):
+ return self.__cmp__(other) != 0
+
+ def __lt__(self, other):
+ return self.__cmp__(other) < 0
+
+ def __gt__(self, other):
+ return self.__cmp__(other) > 0
+
+ def __le__(self, other):
+ return self.__cmp__(other) <= 0
+
+ def __ge__(self, other):
+ return self.__cmp__(other) >= 0
+
+ def __repr__(self):
+ return '<%s (%s)%s>' % (self.__class__.__name__, self.srcdir, self)
+
+ def __hash__(self):
+ return hash(self.full_path)
+
+ @memoized_property
+ def target_basename(self):
+ return mozpath.basename(self.full_path)
+
+
+class SourcePath(Path):
+ """Like Path, but limited to paths in the source directory."""
+ def __init__(self, context, value):
+ if value.startswith('!'):
+ raise ValueError('Object directory paths are not allowed')
+ if value.startswith('%'):
+ raise ValueError('Filesystem absolute paths are not allowed')
+ super(SourcePath, self).__init__(context, value)
+
+ if value.startswith('/'):
+ path = None
+ # If the path starts with a '/' and is actually relative to an
+ # external source dir, use that as base instead of topsrcdir.
+ if context.config.external_source_dir:
+ path = mozpath.join(context.config.external_source_dir,
+ value[1:])
+ if not path or not os.path.exists(path):
+ path = mozpath.join(context.config.topsrcdir,
+ value[1:])
+ else:
+ path = mozpath.join(self.srcdir, value)
+ self.full_path = mozpath.normpath(path)
+
+ @memoized_property
+ def translated(self):
+ """Returns the corresponding path in the objdir.
+
+ Ideally, we wouldn't need this function, but the fact that both source
+ path under topsrcdir and the external source dir end up mixed in the
+ objdir (aka pseudo-rework), this is needed.
+ """
+ return ObjDirPath(self.context, '!%s' % self).full_path
+
+
+class RenamedSourcePath(SourcePath):
+ """Like SourcePath, but with a different base name when installed.
+
+ The constructor takes a tuple of (source, target_basename).
+
+ This class is not meant to be exposed to moz.build sandboxes as of now,
+ and is not supported by the RecursiveMake backend.
+ """
+ def __init__(self, context, value):
+ assert isinstance(value, tuple)
+ source, self._target_basename = value
+ super(RenamedSourcePath, self).__init__(context, source)
+
+ @property
+ def target_basename(self):
+ return self._target_basename
+
+
+class ObjDirPath(Path):
+ """Like Path, but limited to paths in the object directory."""
+ def __init__(self, context, value=None):
+ if not value.startswith('!'):
+ raise ValueError('Object directory paths must start with ! prefix')
+ super(ObjDirPath, self).__init__(context, value)
+
+ if value.startswith('!/'):
+ path = mozpath.join(context.config.topobjdir,value[2:])
+ else:
+ path = mozpath.join(context.objdir, value[1:])
+ self.full_path = mozpath.normpath(path)
+
+
+class AbsolutePath(Path):
+ """Like Path, but allows arbitrary paths outside the source and object directories."""
+ def __init__(self, context, value=None):
+ if not value.startswith('%'):
+ raise ValueError('Absolute paths must start with % prefix')
+ if not os.path.isabs(value[1:]):
+ raise ValueError('Path \'%s\' is not absolute' % value[1:])
+ super(AbsolutePath, self).__init__(context, value)
+
+ self.full_path = mozpath.normpath(value[1:])
+
+
+@memoize
+def ContextDerivedTypedList(klass, base_class=List):
+ """Specialized TypedList for use with ContextDerivedValue types.
+ """
+ assert issubclass(klass, ContextDerivedValue)
+ class _TypedList(ContextDerivedValue, TypedList(klass, base_class)):
+ def __init__(self, context, iterable=[]):
+ self.context = context
+ super(_TypedList, self).__init__(iterable)
+
+ def normalize(self, e):
+ if not isinstance(e, klass):
+ e = klass(self.context, e)
+ return e
+
+ return _TypedList
+
+@memoize
+def ContextDerivedTypedListWithItems(type, base_class=List):
+ """Specialized TypedList for use with ContextDerivedValue types.
+ """
+ class _TypedListWithItems(ContextDerivedTypedList(type, base_class)):
+ def __getitem__(self, name):
+ name = self.normalize(name)
+ return super(_TypedListWithItems, self).__getitem__(name)
+
+ return _TypedListWithItems
+
+
+@memoize
+def ContextDerivedTypedRecord(*fields):
+ """Factory for objects with certain properties and dynamic
+ type checks.
+
+ This API is extremely similar to the TypedNamedTuple API,
+ except that properties may be mutated. This supports syntax like:
+
+ VARIABLE_NAME.property += [
+ 'item1',
+ 'item2',
+ ]
+ """
+
+ class _TypedRecord(ContextDerivedValue):
+ __slots__ = tuple([name for name, _ in fields])
+
+ def __init__(self, context):
+ for fname, ftype in self._fields.items():
+ if issubclass(ftype, ContextDerivedValue):
+ setattr(self, fname, self._fields[fname](context))
+ else:
+ setattr(self, fname, self._fields[fname]())
+
+ def __setattr__(self, name, value):
+ if name in self._fields and not isinstance(value, self._fields[name]):
+ value = self._fields[name](value)
+ object.__setattr__(self, name, value)
+
+ _TypedRecord._fields = dict(fields)
+ return _TypedRecord
+
+
+@memoize
+def ContextDerivedTypedHierarchicalStringList(type):
+ """Specialized HierarchicalStringList for use with ContextDerivedValue
+ types."""
+ class _TypedListWithItems(ContextDerivedValue, HierarchicalStringList):
+ __slots__ = ('_strings', '_children', '_context')
+
+ def __init__(self, context):
+ self._strings = ContextDerivedTypedList(
+ type, StrictOrderingOnAppendList)(context)
+ self._children = {}
+ self._context = context
+
+ def _get_exportvariable(self, name):
+ child = self._children.get(name)
+ if not child:
+ child = self._children[name] = _TypedListWithItems(
+ self._context)
+ return child
+
+ return _TypedListWithItems
+
+def OrderedListWithAction(action):
+ """Returns a class which behaves as a StrictOrderingOnAppendList, but
+ invokes the given callable with each input and a context as it is
+ read, storing a tuple including the result and the original item.
+
+ This used to extend moz.build reading to make more data available in
+ filesystem-reading mode.
+ """
+ class _OrderedListWithAction(ContextDerivedValue,
+ StrictOrderingOnAppendListWithAction):
+ def __init__(self, context, *args):
+ def _action(item):
+ return item, action(context, item)
+ super(_OrderedListWithAction, self).__init__(action=_action, *args)
+
+ return _OrderedListWithAction
+
+def TypedListWithAction(typ, action):
+ """Returns a class which behaves as a TypedList with the provided type, but
+ invokes the given given callable with each input and a context as it is
+ read, storing a tuple including the result and the original item.
+
+ This used to extend moz.build reading to make more data available in
+ filesystem-reading mode.
+ """
+ class _TypedListWithAction(ContextDerivedValue, TypedList(typ), ListWithAction):
+ def __init__(self, context, *args):
+ def _action(item):
+ return item, action(context, item)
+ super(_TypedListWithAction, self).__init__(action=_action, *args)
+ return _TypedListWithAction
+
+WebPlatformTestManifest = TypedNamedTuple("WebPlatformTestManifest",
+ [("manifest_path", unicode),
+ ("test_root", unicode)])
+ManifestparserManifestList = OrderedListWithAction(read_manifestparser_manifest)
+ReftestManifestList = OrderedListWithAction(read_reftest_manifest)
+WptManifestList = TypedListWithAction(WebPlatformTestManifest, read_wpt_manifest)
+
+OrderedSourceList = ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList)
+OrderedTestFlavorList = TypedList(Enum(*all_test_flavors()),
+ StrictOrderingOnAppendList)
+OrderedStringList = TypedList(unicode, StrictOrderingOnAppendList)
+DependentTestsEntry = ContextDerivedTypedRecord(('files', OrderedSourceList),
+ ('tags', OrderedStringList),
+ ('flavors', OrderedTestFlavorList))
+BugzillaComponent = TypedNamedTuple('BugzillaComponent',
+ [('product', unicode), ('component', unicode)])
+
+
+class Files(SubContext):
+ """Metadata attached to files.
+
+ It is common to want to annotate files with metadata, such as which
+ Bugzilla component tracks issues with certain files. This sub-context is
+ where we stick that metadata.
+
+ The argument to this sub-context is a file matching pattern that is applied
+ against the host file's directory. If the pattern matches a file whose info
+ is currently being sought, the metadata attached to this instance will be
+ applied to that file.
+
+ Patterns are collections of filename characters with ``/`` used as the
+ directory separate (UNIX-style paths) and ``*`` and ``**`` used to denote
+ wildcard matching.
+
+ Patterns without the ``*`` character are literal matches and will match at
+ most one entity.
+
+ Patterns with ``*`` or ``**`` are wildcard matches. ``*`` matches files
+ at least within a single directory. ``**`` matches files across several
+ directories.
+
+ ``foo.html``
+ Will match only the ``foo.html`` file in the current directory.
+ ``*.jsm``
+ Will match all ``.jsm`` files in the current directory.
+ ``**/*.cpp``
+ Will match all ``.cpp`` files in this and all child directories.
+ ``foo/*.css``
+ Will match all ``.css`` files in the ``foo/`` directory.
+ ``bar/*``
+ Will match all files in the ``bar/`` directory and all of its
+ children directories.
+ ``bar/**``
+ This is equivalent to ``bar/*`` above.
+ ``bar/**/foo``
+ Will match all ``foo`` files in the ``bar/`` directory and all of its
+ children directories.
+
+ The difference in behavior between ``*`` and ``**`` is only evident if
+ a pattern follows the ``*`` or ``**``. A pattern ending with ``*`` is
+ greedy. ``**`` is needed when you need an additional pattern after the
+ wildcard. e.g. ``**/foo``.
+ """
+
+ VARIABLES = {
+ 'BUG_COMPONENT': (BugzillaComponent, tuple,
+ """The bug component that tracks changes to these files.
+
+ Values are a 2-tuple of unicode describing the Bugzilla product and
+ component. e.g. ``('Core', 'Build Config')``.
+ """),
+
+ 'FINAL': (bool, bool,
+ """Mark variable assignments as finalized.
+
+ During normal processing, values from newer Files contexts
+ overwrite previously set values. Last write wins. This behavior is
+ not always desired. ``FINAL`` provides a mechanism to prevent
+ further updates to a variable.
+
+ When ``FINAL`` is set, the value of all variables defined in this
+ context are marked as frozen and all subsequent writes to them
+ are ignored during metadata reading.
+
+ See :ref:`mozbuild_files_metadata_finalizing` for more info.
+ """),
+ 'IMPACTED_TESTS': (DependentTestsEntry, list,
+ """File patterns, tags, and flavors for tests relevant to these files.
+
+ Maps source files to the tests potentially impacted by those files.
+ Tests can be specified by file pattern, tag, or flavor.
+
+ For example:
+
+ with Files('runtests.py'):
+ IMPACTED_TESTS.files += [
+ '**',
+ ]
+
+ in testing/mochitest/moz.build will suggest that any of the tests
+ under testing/mochitest may be impacted by a change to runtests.py.
+
+ File patterns may be made relative to the topsrcdir with a leading
+ '/', so
+
+ with Files('httpd.js'):
+ IMPACTED_TESTS.files += [
+ '/testing/mochitest/tests/Harness_sanity/**',
+ ]
+
+ in netwerk/test/httpserver/moz.build will suggest that any change to httpd.js
+ will be relevant to the mochitest sanity tests.
+
+ Tags and flavors are sorted string lists (flavors are limited to valid
+ values).
+
+ For example:
+
+ with Files('toolkit/devtools/*'):
+ IMPACTED_TESTS.tags += [
+ 'devtools',
+ ]
+
+ in the root moz.build would suggest that any test tagged 'devtools' would
+ potentially be impacted by a change to a file under toolkit/devtools, and
+
+ with Files('dom/base/nsGlobalWindow.cpp'):
+ IMPACTED_TESTS.flavors += [
+ 'mochitest',
+ ]
+
+ Would suggest that nsGlobalWindow.cpp is potentially relevant to
+ any plain mochitest.
+ """),
+ }
+
+ def __init__(self, parent, pattern=None):
+ super(Files, self).__init__(parent)
+ self.pattern = pattern
+ self.finalized = set()
+ self.test_files = set()
+ self.test_tags = set()
+ self.test_flavors = set()
+
+ def __iadd__(self, other):
+ assert isinstance(other, Files)
+
+ self.test_files |= other.test_files
+ self.test_tags |= other.test_tags
+ self.test_flavors |= other.test_flavors
+
+ for k, v in other.items():
+ if k == 'IMPACTED_TESTS':
+ self.test_files |= set(mozpath.relpath(e.full_path, e.context.config.topsrcdir)
+ for e in v.files)
+ self.test_tags |= set(v.tags)
+ self.test_flavors |= set(v.flavors)
+ continue
+
+ # Ignore updates to finalized flags.
+ if k in self.finalized:
+ continue
+
+ # Only finalize variables defined in this instance.
+ if k == 'FINAL':
+ self.finalized |= set(other) - {'FINAL'}
+ continue
+
+ self[k] = v
+
+ return self
+
+ def asdict(self):
+ """Return this instance as a dict with built-in data structures.
+
+ Call this to obtain an object suitable for serializing.
+ """
+ d = {}
+ if 'BUG_COMPONENT' in self:
+ bc = self['BUG_COMPONENT']
+ d['bug_component'] = (bc.product, bc.component)
+
+ return d
+
+ @staticmethod
+ def aggregate(files):
+ """Given a mapping of path to Files, obtain aggregate results.
+
+ Consumers may want to extract useful information from a collection of
+ Files describing paths. e.g. given the files info data for N paths,
+ recommend a single bug component based on the most frequent one. This
+ function provides logic for deriving aggregate knowledge from a
+ collection of path File metadata.
+
+ Note: the intent of this function is to operate on the result of
+ :py:func:`mozbuild.frontend.reader.BuildReader.files_info`. The
+ :py:func:`mozbuild.frontend.context.Files` instances passed in are
+ thus the "collapsed" (``__iadd__``ed) results of all ``Files`` from all
+ moz.build files relevant to a specific path, not individual ``Files``
+ instances from a single moz.build file.
+ """
+ d = {}
+
+ bug_components = Counter()
+
+ for f in files.values():
+ bug_component = f.get('BUG_COMPONENT')
+ if bug_component:
+ bug_components[bug_component] += 1
+
+ d['bug_component_counts'] = []
+ for c, count in bug_components.most_common():
+ component = (c.product, c.component)
+ d['bug_component_counts'].append((c, count))
+
+ if 'recommended_bug_component' not in d:
+ d['recommended_bug_component'] = component
+ recommended_count = count
+ elif count == recommended_count:
+ # Don't recommend a component if it doesn't have a clear lead.
+ d['recommended_bug_component'] = None
+
+ # In case no bug components.
+ d.setdefault('recommended_bug_component', None)
+
+ return d
+
+
+# This defines functions that create sub-contexts.
+#
+# Values are classes that are SubContexts. The class name will be turned into
+# a function that when called emits an instance of that class.
+#
+# Arbitrary arguments can be passed to the class constructor. The first
+# argument is always the parent context. It is up to each class to perform
+# argument validation.
+SUBCONTEXTS = [
+ Files,
+]
+
+for cls in SUBCONTEXTS:
+ if not issubclass(cls, SubContext):
+ raise ValueError('SUBCONTEXTS entry not a SubContext class: %s' % cls)
+
+ if not hasattr(cls, 'VARIABLES'):
+ raise ValueError('SUBCONTEXTS entry does not have VARIABLES: %s' % cls)
+
+SUBCONTEXTS = {cls.__name__: cls for cls in SUBCONTEXTS}
+
+
+# This defines the set of mutable global variables.
+#
+# Each variable is a tuple of:
+#
+# (storage_type, input_types, docs)
+
+VARIABLES = {
+ 'ALLOW_COMPILER_WARNINGS': (bool, bool,
+ """Whether to allow compiler warnings (i.e. *not* treat them as
+ errors).
+
+ This is commonplace (almost mandatory, in fact) in directories
+ containing third-party code that we regularly update from upstream and
+ thus do not control, but is otherwise discouraged.
+ """),
+
+ # Variables controlling reading of other frontend files.
+ 'ANDROID_GENERATED_RESFILES': (StrictOrderingOnAppendList, list,
+ """Android resource files generated as part of the build.
+
+ This variable contains a list of files that are expected to be
+ generated (often by preprocessing) into a 'res' directory as
+ part of the build process, and subsequently merged into an APK
+ file.
+ """),
+
+ 'ANDROID_APK_NAME': (unicode, unicode,
+ """The name of an Android APK file to generate.
+ """),
+
+ 'ANDROID_APK_PACKAGE': (unicode, unicode,
+ """The name of the Android package to generate R.java for, like org.mozilla.gecko.
+ """),
+
+ 'ANDROID_EXTRA_PACKAGES': (StrictOrderingOnAppendList, list,
+ """The name of extra Android packages to generate R.java for, like ['org.mozilla.other'].
+ """),
+
+ 'ANDROID_EXTRA_RES_DIRS': (ContextDerivedTypedListWithItems(Path, List), list,
+ """Android extra package resource directories.
+
+ This variable contains a list of directories containing static files
+ to package into a 'res' directory and merge into an APK file. These
+ directories are packaged into the APK but are assumed to be static
+ unchecked dependencies that should not be otherwise re-distributed.
+ """),
+
+ 'ANDROID_RES_DIRS': (ContextDerivedTypedListWithItems(Path, List), list,
+ """Android resource directories.
+
+ This variable contains a list of directories containing static
+ files to package into a 'res' directory and merge into an APK
+ file.
+ """),
+
+ 'ANDROID_ASSETS_DIRS': (ContextDerivedTypedListWithItems(Path, List), list,
+ """Android assets directories.
+
+ This variable contains a list of directories containing static
+ files to package into an 'assets' directory and merge into an
+ APK file.
+ """),
+
+ 'ANDROID_ECLIPSE_PROJECT_TARGETS': (dict, dict,
+ """Defines Android Eclipse project targets.
+
+ This variable should not be populated directly. Instead, it should
+ populated by calling add_android_eclipse{_library}_project().
+ """),
+
+ 'SOURCES': (ContextDerivedTypedListWithItems(Path, StrictOrderingOnAppendListWithFlagsFactory({'no_pgo': bool, 'flags': List})), list,
+ """Source code files.
+
+ This variable contains a list of source code files to compile.
+ Accepts assembler, C, C++, Objective C/C++.
+ """),
+
+ 'FILES_PER_UNIFIED_FILE': (int, int,
+ """The number of source files to compile into each unified source file.
+
+ """),
+
+ 'IS_RUST_LIBRARY': (bool, bool,
+ """Whether the current library defined by this moz.build is built by Rust.
+
+ The library defined by this moz.build should have a build definition in
+ a Cargo.toml file that exists in this moz.build's directory.
+ """),
+
+ 'UNIFIED_SOURCES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
+ """Source code files that can be compiled together.
+
+ This variable contains a list of source code files to compile,
+ that can be concatenated all together and built as a single source
+ file. This can help make the build faster and reduce the debug info
+ size.
+ """),
+
+ 'GENERATED_FILES': (StrictOrderingOnAppendListWithFlagsFactory({
+ 'script': unicode,
+ 'inputs': list }), list,
+ """Generic generated files.
+
+ This variable contains a list of files for the build system to
+ generate at export time. The generation method may be declared
+ with optional ``script`` and ``inputs`` flags on individual entries.
+ If the optional ``script`` flag is not present on an entry, it
+ is assumed that rules for generating the file are present in
+ the associated Makefile.in.
+
+ Example::
+
+ GENERATED_FILES += ['bar.c', 'baz.c', 'foo.c']
+ bar = GENERATED_FILES['bar.c']
+ bar.script = 'generate.py'
+ bar.inputs = ['datafile-for-bar']
+ foo = GENERATED_FILES['foo.c']
+ foo.script = 'generate.py'
+ foo.inputs = ['datafile-for-foo']
+
+ This definition will generate bar.c by calling the main method of
+ generate.py with a open (for writing) file object for bar.c, and
+ the string ``datafile-for-bar``. In a similar fashion, the main
+ method of generate.py will also be called with an open
+ (for writing) file object for foo.c and the string
+ ``datafile-for-foo``. Please note that only string arguments are
+ supported for passing to scripts, and that all arguments provided
+ to the script should be filenames relative to the directory in which
+ the moz.build file is located.
+
+ To enable using the same script for generating multiple files with
+ slightly different non-filename parameters, alternative entry points
+ into ``script`` can be specified::
+
+ GENERATED_FILES += ['bar.c']
+ bar = GENERATED_FILES['bar.c']
+ bar.script = 'generate.py:make_bar'
+
+ The chosen script entry point may optionally return a set of strings,
+ indicating extra files the output depends on.
+ """),
+
+ 'DEFINES': (InitializedDefines, dict,
+ """Dictionary of compiler defines to declare.
+
+ These are passed in to the compiler as ``-Dkey='value'`` for string
+ values, ``-Dkey=value`` for numeric values, or ``-Dkey`` if the
+ value is True. Note that for string values, the outer-level of
+ single-quotes will be consumed by the shell. If you want to have
+ a string-literal in the program, the value needs to have
+ double-quotes.
+
+ Example::
+
+ DEFINES['NS_NO_XPCOM'] = True
+ DEFINES['MOZ_EXTENSIONS_DB_SCHEMA'] = 15
+ DEFINES['DLL_SUFFIX'] = '".so"'
+
+ This will result in the compiler flags ``-DNS_NO_XPCOM``,
+ ``-DMOZ_EXTENSIONS_DB_SCHEMA=15``, and ``-DDLL_SUFFIX='".so"'``,
+ respectively. These could also be combined into a single
+ update::
+
+ DEFINES.update({
+ 'NS_NO_XPCOM': True,
+ 'MOZ_EXTENSIONS_DB_SCHEMA': 15,
+ 'DLL_SUFFIX': '".so"',
+ })
+ """),
+
+ 'DELAYLOAD_DLLS': (List, list,
+ """Delay-loaded DLLs.
+
+ This variable contains a list of DLL files which the module being linked
+ should load lazily. This only has an effect when building with MSVC.
+ """),
+
+ 'DIRS': (ContextDerivedTypedList(SourcePath), list,
+ """Child directories to descend into looking for build frontend files.
+
+ This works similarly to the ``DIRS`` variable in make files. Each str
+ value in the list is the name of a child directory. When this file is
+ done parsing, the build reader will descend into each listed directory
+ and read the frontend file there. If there is no frontend file, an error
+ is raised.
+
+ Values are relative paths. They can be multiple directory levels
+ above or below. Use ``..`` for parent directories and ``/`` for path
+ delimiters.
+ """),
+
+ 'HAS_MISC_RULE': (bool, bool,
+ """Whether this directory should be traversed in the ``misc`` tier.
+
+ Many ``libs`` rules still exist in Makefile.in files. We highly prefer
+ that these rules exist in the ``misc`` tier/target so that they can be
+ executed concurrently during tier traversal (the ``misc`` tier is
+ fully concurrent).
+
+ Presence of this variable indicates that this directory should be
+ traversed by the ``misc`` tier.
+
+ Please note that converting ``libs`` rules to the ``misc`` tier must
+ be done with care, as there are many implicit dependencies that can
+ break the build in subtle ways.
+ """),
+
+ 'FINAL_TARGET_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+ """List of files to be installed into the application directory.
+
+ ``FINAL_TARGET_FILES`` will copy (or symlink, if the platform supports it)
+ the contents of its files to the directory specified by
+ ``FINAL_TARGET`` (typically ``dist/bin``). Files that are destined for a
+ subdirectory can be specified by accessing a field, or as a dict access.
+ For example, to export ``foo.png`` to the top-level directory and
+ ``bar.svg`` to the directory ``images/do-not-use``, append to
+ ``FINAL_TARGET_FILES`` like so::
+
+ FINAL_TARGET_FILES += ['foo.png']
+ FINAL_TARGET_FILES.images['do-not-use'] += ['bar.svg']
+ """),
+
+ 'DISABLE_STL_WRAPPING': (bool, bool,
+ """Disable the wrappers for STL which allow it to work with C++ exceptions
+ disabled.
+ """),
+
+ 'FINAL_TARGET_PP_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+ """Like ``FINAL_TARGET_FILES``, with preprocessing.
+ """),
+
+ 'OBJDIR_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+ """List of files to be installed anywhere in the objdir. Use sparingly.
+
+ ``OBJDIR_FILES`` is similar to FINAL_TARGET_FILES, but it allows copying
+ anywhere in the object directory. This is intended for various one-off
+ cases, not for general use. If you wish to add entries to OBJDIR_FILES,
+ please consult a build peer.
+ """),
+
+ 'OBJDIR_PP_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+ """Like ``OBJDIR_FILES``, with preprocessing. Use sparingly.
+ """),
+
+ 'FINAL_LIBRARY': (unicode, unicode,
+ """Library in which the objects of the current directory will be linked.
+
+ This variable contains the name of a library, defined elsewhere with
+ ``LIBRARY_NAME``, in which the objects of the current directory will be
+ linked.
+ """),
+
+ 'CPP_UNIT_TESTS': (StrictOrderingOnAppendList, list,
+ """Compile a list of C++ unit test names.
+
+ Each name in this variable corresponds to an executable built from the
+ corresponding source file with the same base name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to each name. If a name already ends with
+ ``BIN_SUFFIX``, the name will remain unchanged.
+ """),
+
+ 'FORCE_SHARED_LIB': (bool, bool,
+ """Whether the library in this directory is a shared library.
+ """),
+
+ 'FORCE_STATIC_LIB': (bool, bool,
+ """Whether the library in this directory is a static library.
+ """),
+
+ 'USE_STATIC_LIBS': (bool, bool,
+ """Whether the code in this directory is a built against the static
+ runtime library.
+
+ This variable only has an effect when building with MSVC.
+ """),
+
+ 'HOST_SOURCES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
+ """Source code files to compile with the host compiler.
+
+ This variable contains a list of source code files to compile.
+ with the host compiler.
+ """),
+
+ 'IS_COMPONENT': (bool, bool,
+ """Whether the library contains a binary XPCOM component manifest.
+
+ Implies FORCE_SHARED_LIB.
+ """),
+
+ 'PYTHON_UNIT_TESTS': (StrictOrderingOnAppendList, list,
+ """A list of python unit tests.
+ """),
+
+ 'HOST_LIBRARY_NAME': (unicode, unicode,
+ """Name of target library generated when cross compiling.
+ """),
+
+ 'JAVA_JAR_TARGETS': (dict, dict,
+ """Defines Java JAR targets to be built.
+
+ This variable should not be populated directly. Instead, it should
+ populated by calling add_java_jar().
+ """),
+
+ 'LIBRARY_DEFINES': (OrderedDict, dict,
+ """Dictionary of compiler defines to declare for the entire library.
+
+ This variable works like DEFINES, except that declarations apply to all
+ libraries that link into this library via FINAL_LIBRARY.
+ """),
+
+ 'LIBRARY_NAME': (unicode, unicode,
+ """The code name of the library generated for a directory.
+
+ By default STATIC_LIBRARY_NAME and SHARED_LIBRARY_NAME take this name.
+ In ``example/components/moz.build``,::
+
+ LIBRARY_NAME = 'xpcomsample'
+
+ would generate ``example/components/libxpcomsample.so`` on Linux, or
+ ``example/components/xpcomsample.lib`` on Windows.
+ """),
+
+ 'SHARED_LIBRARY_NAME': (unicode, unicode,
+ """The name of the static library generated for a directory, if it needs to
+ differ from the library code name.
+
+ Implies FORCE_SHARED_LIB.
+ """),
+
+ 'IS_FRAMEWORK': (bool, bool,
+ """Whether the library to build should be built as a framework on OSX.
+
+ This implies the name of the library won't be prefixed nor suffixed.
+ Implies FORCE_SHARED_LIB.
+ """),
+
+ 'STATIC_LIBRARY_NAME': (unicode, unicode,
+ """The name of the static library generated for a directory, if it needs to
+ differ from the library code name.
+
+ Implies FORCE_STATIC_LIB.
+ """),
+
+ 'USE_LIBS': (StrictOrderingOnAppendList, list,
+ """List of libraries to link to programs and libraries.
+ """),
+
+ 'HOST_USE_LIBS': (StrictOrderingOnAppendList, list,
+ """List of libraries to link to host programs and libraries.
+ """),
+
+ 'HOST_OS_LIBS': (List, list,
+ """List of system libraries for host programs and libraries.
+ """),
+
+ 'LOCAL_INCLUDES': (ContextDerivedTypedList(Path, StrictOrderingOnAppendList), list,
+ """Additional directories to be searched for include files by the compiler.
+ """),
+
+ 'NO_PGO': (bool, bool,
+ """Whether profile-guided optimization is disable in this directory.
+ """),
+
+ 'NO_VISIBILITY_FLAGS': (bool, bool,
+ """Build sources listed in this file without VISIBILITY_FLAGS.
+ """),
+
+ 'OS_LIBS': (List, list,
+ """System link libraries.
+
+ This variable contains a list of system libaries to link against.
+ """),
+ 'RCFILE': (unicode, unicode,
+ """The program .rc file.
+
+ This variable can only be used on Windows.
+ """),
+
+ 'RESFILE': (unicode, unicode,
+ """The program .res file.
+
+ This variable can only be used on Windows.
+ """),
+
+ 'RCINCLUDE': (unicode, unicode,
+ """The resource script file to be included in the default .res file.
+
+ This variable can only be used on Windows.
+ """),
+
+ 'DEFFILE': (unicode, unicode,
+ """The program .def (module definition) file.
+
+ This variable can only be used on Windows.
+ """),
+
+ 'LD_VERSION_SCRIPT': (unicode, unicode,
+ """The linker version script for shared libraries.
+
+ This variable can only be used on Linux.
+ """),
+
+ 'SYMBOLS_FILE': (Path, unicode,
+ """A file containing a list of symbols to export from a shared library.
+
+ The given file contains a list of symbols to be exported, and is
+ preprocessed.
+ A special marker "@DATA@" must be added after a symbol name if it
+ points to data instead of code, so that the Windows linker can treat
+ them correctly.
+ """),
+
+ 'BRANDING_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+ """List of files to be installed into the branding directory.
+
+ ``BRANDING_FILES`` will copy (or symlink, if the platform supports it)
+ the contents of its files to the ``dist/branding`` directory. Files that
+ are destined for a subdirectory can be specified by accessing a field.
+ For example, to export ``foo.png`` to the top-level directory and
+ ``bar.png`` to the directory ``images/subdir``, append to
+ ``BRANDING_FILES`` like so::
+
+ BRANDING_FILES += ['foo.png']
+ BRANDING_FILES.images.subdir += ['bar.png']
+ """),
+
+ 'SDK_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+ """List of files to be installed into the sdk directory.
+
+ ``SDK_FILES`` will copy (or symlink, if the platform supports it)
+ the contents of its files to the ``dist/sdk`` directory. Files that
+ are destined for a subdirectory can be specified by accessing a field.
+ For example, to export ``foo.py`` to the top-level directory and
+ ``bar.py`` to the directory ``subdir``, append to
+ ``SDK_FILES`` like so::
+
+ SDK_FILES += ['foo.py']
+ SDK_FILES.subdir += ['bar.py']
+ """),
+
+ 'SDK_LIBRARY': (bool, bool,
+ """Whether the library built in the directory is part of the SDK.
+
+ The library will be copied into ``SDK_LIB_DIR`` (``$DIST/sdk/lib``).
+ """),
+
+ 'SIMPLE_PROGRAMS': (StrictOrderingOnAppendList, list,
+ """Compile a list of executable names.
+
+ Each name in this variable corresponds to an executable built from the
+ corresponding source file with the same base name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to each name. If a name already ends with
+ ``BIN_SUFFIX``, the name will remain unchanged.
+ """),
+
+ 'SONAME': (unicode, unicode,
+ """The soname of the shared object currently being linked
+
+ soname is the "logical name" of a shared object, often used to provide
+ version backwards compatibility. This variable makes sense only for
+ shared objects, and is supported only on some unix platforms.
+ """),
+
+ 'HOST_SIMPLE_PROGRAMS': (StrictOrderingOnAppendList, list,
+ """Compile a list of host executable names.
+
+ Each name in this variable corresponds to a hosst executable built
+ from the corresponding source file with the same base name.
+
+ If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will
+ be automatically appended to each name. If a name already ends with
+ ``HOST_BIN_SUFFIX``, the name will remain unchanged.
+ """),
+
+ 'CONFIGURE_SUBST_FILES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
+ """Output files that will be generated using configure-like substitution.
+
+ This is a substitute for ``AC_OUTPUT`` in autoconf. For each path in this
+ list, we will search for a file in the srcdir having the name
+ ``{path}.in``. The contents of this file will be read and variable
+ patterns like ``@foo@`` will be substituted with the values of the
+ ``AC_SUBST`` variables declared during configure.
+ """),
+
+ 'CONFIGURE_DEFINE_FILES': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
+ """Output files generated from configure/config.status.
+
+ This is a substitute for ``AC_CONFIG_HEADER`` in autoconf. This is very
+ similar to ``CONFIGURE_SUBST_FILES`` except the generation logic takes
+ into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
+ """),
+
+ 'EXPORTS': (ContextDerivedTypedHierarchicalStringList(Path), list,
+ """List of files to be exported, and in which subdirectories.
+
+ ``EXPORTS`` is generally used to list the include files to be exported to
+ ``dist/include``, but it can be used for other files as well. This variable
+ behaves as a list when appending filenames for export in the top-level
+ directory. Files can also be appended to a field to indicate which
+ subdirectory they should be exported to. For example, to export
+ ``foo.h`` to the top-level directory, and ``bar.h`` to ``mozilla/dom/``,
+ append to ``EXPORTS`` like so::
+
+ EXPORTS += ['foo.h']
+ EXPORTS.mozilla.dom += ['bar.h']
+
+ Entries in ``EXPORTS`` are paths, so objdir paths may be used, but
+ any files listed from the objdir must also be listed in
+ ``GENERATED_FILES``.
+ """),
+
+ 'PROGRAM' : (unicode, unicode,
+ """Compiled executable name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to ``PROGRAM``. If ``PROGRAM`` already ends with
+ ``BIN_SUFFIX``, ``PROGRAM`` will remain unchanged.
+ """),
+
+ 'HOST_PROGRAM' : (unicode, unicode,
+ """Compiled host executable name.
+
+ If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will be
+ automatically appended to ``HOST_PROGRAM``. If ``HOST_PROGRAM`` already
+ ends with ``HOST_BIN_SUFFIX``, ``HOST_PROGRAM`` will remain unchanged.
+ """),
+
+ 'DIST_INSTALL': (Enum(None, False, True), bool,
+ """Whether to install certain files into the dist directory.
+
+ By default, some files types are installed in the dist directory, and
+ some aren't. Set this variable to True to force the installation of
+ some files that wouldn't be installed by default. Set this variable to
+ False to force to not install some files that would be installed by
+ default.
+
+ This is confusing for historical reasons, but eventually, the behavior
+ will be made explicit.
+ """),
+
+ 'JAR_MANIFESTS': (ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), list,
+ """JAR manifest files that should be processed as part of the build.
+
+ JAR manifests are files in the tree that define how to package files
+ into JARs and how chrome registration is performed. For more info,
+ see :ref:`jar_manifests`.
+ """),
+
+ # IDL Generation.
+ 'XPIDL_SOURCES': (StrictOrderingOnAppendList, list,
+ """XPCOM Interface Definition Files (xpidl).
+
+ This is a list of files that define XPCOM interface definitions.
+ Entries must be files that exist. Entries are almost certainly ``.idl``
+ files.
+ """),
+
+ 'XPIDL_MODULE': (unicode, unicode,
+ """XPCOM Interface Definition Module Name.
+
+ This is the name of the ``.xpt`` file that is created by linking
+ ``XPIDL_SOURCES`` together. If unspecified, it defaults to be the same
+ as ``MODULE``.
+ """),
+
+ 'XPIDL_NO_MANIFEST': (bool, bool,
+ """Indicate that the XPIDL module should not be added to a manifest.
+
+ This flag exists primarily to prevent test-only XPIDL modules from being
+ added to the application's chrome manifest. Most XPIDL modules should
+ not use this flag.
+ """),
+
+ 'IPDL_SOURCES': (StrictOrderingOnAppendList, list,
+ """IPDL source files.
+
+ These are ``.ipdl`` files that will be parsed and converted to
+ ``.cpp`` files.
+ """),
+
+ 'WEBIDL_FILES': (StrictOrderingOnAppendList, list,
+ """WebIDL source files.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files.
+ """),
+
+ 'GENERATED_EVENTS_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
+ """WebIDL source files for generated events.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files.
+ """),
+
+ 'TEST_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
+ """Test WebIDL source files.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files
+ if tests are enabled.
+ """),
+
+ 'GENERATED_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
+ """Generated WebIDL source files.
+
+ These will be generated from some other files.
+ """),
+
+ 'PREPROCESSED_TEST_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
+ """Preprocessed test WebIDL source files.
+
+ These will be preprocessed, then parsed and converted to .cpp
+ and ``.h`` files if tests are enabled.
+ """),
+
+ 'PREPROCESSED_WEBIDL_FILES': (StrictOrderingOnAppendList, list,
+ """Preprocessed WebIDL source files.
+
+ These will be preprocessed before being parsed and converted.
+ """),
+
+ 'WEBIDL_EXAMPLE_INTERFACES': (StrictOrderingOnAppendList, list,
+ """Names of example WebIDL interfaces to build as part of the build.
+
+ Names in this list correspond to WebIDL interface names defined in
+ WebIDL files included in the build from one of the \*WEBIDL_FILES
+ variables.
+ """),
+
+ # Test declaration.
+ 'A11Y_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining a11y tests.
+ """),
+
+ 'BROWSER_CHROME_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining browser chrome tests.
+ """),
+
+ 'JETPACK_PACKAGE_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining jetpack package tests.
+ """),
+
+ 'JETPACK_ADDON_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining jetpack addon tests.
+ """),
+
+ 'ANDROID_INSTRUMENTATION_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining Android instrumentation tests.
+ """),
+
+ 'FIREFOX_UI_FUNCTIONAL_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining firefox-ui-functional tests.
+ """),
+
+ 'FIREFOX_UI_UPDATE_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining firefox-ui-update tests.
+ """),
+
+ 'PUPPETEER_FIREFOX_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining puppeteer unit tests for Firefox.
+ """),
+
+ 'MARIONETTE_LAYOUT_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining marionette-layout tests.
+ """),
+
+ 'MARIONETTE_UNIT_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining marionette-unit tests.
+ """),
+
+ 'MARIONETTE_WEBAPI_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining marionette-webapi tests.
+ """),
+
+ 'METRO_CHROME_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining metro browser chrome tests.
+ """),
+
+ 'MOCHITEST_CHROME_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining mochitest chrome tests.
+ """),
+
+ 'MOCHITEST_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining mochitest tests.
+ """),
+
+ 'REFTEST_MANIFESTS': (ReftestManifestList, list,
+ """List of manifest files defining reftests.
+
+ These are commonly named reftest.list.
+ """),
+
+ 'CRASHTEST_MANIFESTS': (ReftestManifestList, list,
+ """List of manifest files defining crashtests.
+
+ These are commonly named crashtests.list.
+ """),
+
+ 'WEB_PLATFORM_TESTS_MANIFESTS': (WptManifestList, list,
+ """List of (manifest_path, test_path) defining web-platform-tests.
+ """),
+
+ 'WEBRTC_SIGNALLING_TEST_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining WebRTC signalling tests.
+ """),
+
+ 'XPCSHELL_TESTS_MANIFESTS': (ManifestparserManifestList, list,
+ """List of manifest files defining xpcshell tests.
+ """),
+
+ # The following variables are used to control the target of installed files.
+ 'XPI_NAME': (unicode, unicode,
+ """The name of an extension XPI to generate.
+
+ When this variable is present, the results of this directory will end up
+ being packaged into an extension instead of the main dist/bin results.
+ """),
+
+ 'DIST_SUBDIR': (unicode, unicode,
+ """The name of an alternate directory to install files to.
+
+ When this variable is present, the results of this directory will end up
+ being placed in the $(DIST_SUBDIR) subdirectory of where it would
+ otherwise be placed.
+ """),
+
+ 'FINAL_TARGET': (FinalTargetValue, unicode,
+ """The name of the directory to install targets to.
+
+ The directory is relative to the top of the object directory. The
+ default value is dependent on the values of XPI_NAME and DIST_SUBDIR. If
+ neither are present, the result is dist/bin. If XPI_NAME is present, the
+ result is dist/xpi-stage/$(XPI_NAME). If DIST_SUBDIR is present, then
+ the $(DIST_SUBDIR) directory of the otherwise default value is used.
+ """),
+
+ 'USE_EXTENSION_MANIFEST': (bool, bool,
+ """Controls the name of the manifest for JAR files.
+
+ By default, the name of the manifest is ${JAR_MANIFEST}.manifest.
+ Setting this variable to ``True`` changes the name of the manifest to
+ chrome.manifest.
+ """),
+
+ 'NO_JS_MANIFEST': (bool, bool,
+ """Explicitly disclaims responsibility for manifest listing in EXTRA_COMPONENTS.
+
+ Normally, if you have .js files listed in ``EXTRA_COMPONENTS`` or
+ ``EXTRA_PP_COMPONENTS``, you are expected to have a corresponding
+ .manifest file to go with those .js files. Setting ``NO_JS_MANIFEST``
+ indicates that the relevant .manifest file and entries for those .js
+ files are elsehwere (jar.mn, for instance) and this state of affairs
+ is OK.
+ """),
+
+ 'GYP_DIRS': (StrictOrderingOnAppendListWithFlagsFactory({
+ 'variables': dict,
+ 'input': unicode,
+ 'sandbox_vars': dict,
+ 'non_unified_sources': StrictOrderingOnAppendList,
+ }), list,
+ """Defines a list of object directories handled by gyp configurations.
+
+ Elements of this list give the relative object directory. For each
+ element of the list, GYP_DIRS may be accessed as a dictionary
+ (GYP_DIRS[foo]). The object this returns has attributes that need to be
+ set to further specify gyp processing:
+ - input, gives the path to the root gyp configuration file for that
+ object directory.
+ - variables, a dictionary containing variables and values to pass
+ to the gyp processor.
+ - sandbox_vars, a dictionary containing variables and values to
+ pass to the mozbuild processor on top of those derived from gyp
+ configuration.
+ - non_unified_sources, a list containing sources files, relative to
+ the current moz.build, that should be excluded from source file
+ unification.
+
+ Typical use looks like:
+ GYP_DIRS += ['foo', 'bar']
+ GYP_DIRS['foo'].input = 'foo/foo.gyp'
+ GYP_DIRS['foo'].variables = {
+ 'foo': 'bar',
+ (...)
+ }
+ (...)
+ """),
+
+ 'SPHINX_TREES': (dict, dict,
+ """Describes what the Sphinx documentation tree will look like.
+
+ Keys are relative directories inside the final Sphinx documentation
+ tree to install files into. Values are directories (relative to this
+ file) whose content to copy into the Sphinx documentation tree.
+ """),
+
+ 'SPHINX_PYTHON_PACKAGE_DIRS': (StrictOrderingOnAppendList, list,
+ """Directories containing Python packages that Sphinx documents.
+ """),
+
+ 'CFLAGS': (List, list,
+ """Flags passed to the C compiler for all of the C source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here, these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'CXXFLAGS': (List, list,
+ """Flags passed to the C++ compiler for all of the C++ source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'HOST_DEFINES': (InitializedDefines, dict,
+ """Dictionary of compiler defines to declare for host compilation.
+ See ``DEFINES`` for specifics.
+ """),
+
+ 'CMFLAGS': (List, list,
+ """Flags passed to the Objective-C compiler for all of the Objective-C
+ source files declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'CMMFLAGS': (List, list,
+ """Flags passed to the Objective-C++ compiler for all of the
+ Objective-C++ source files declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'ASFLAGS': (List, list,
+ """Flags passed to the assembler for all of the assembly source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the assembler's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'HOST_CFLAGS': (List, list,
+ """Flags passed to the host C compiler for all of the C source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here, these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'HOST_CXXFLAGS': (List, list,
+ """Flags passed to the host C++ compiler for all of the C++ source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'LDFLAGS': (List, list,
+ """Flags passed to the linker when linking all of the libraries and
+ executables declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'EXTRA_DSO_LDOPTS': (List, list,
+ """Flags passed to the linker when linking a shared library.
+
+ Note that the ordering of flags matter here, these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+ """),
+
+ 'WIN32_EXE_LDFLAGS': (List, list,
+ """Flags passed to the linker when linking a Windows .exe executable
+ declared in this directory.
+
+ Note that the ordering of flags matter here, these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+
+ This variable only has an effect on Windows.
+ """),
+
+ 'TEST_HARNESS_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+ """List of files to be installed for test harnesses.
+
+ ``TEST_HARNESS_FILES`` can be used to install files to any directory
+ under $objdir/_tests. Files can be appended to a field to indicate
+ which subdirectory they should be exported to. For example,
+ to export ``foo.py`` to ``_tests/foo``, append to
+ ``TEST_HARNESS_FILES`` like so::
+ TEST_HARNESS_FILES.foo += ['foo.py']
+
+ Files from topsrcdir and the objdir can also be installed by prefixing
+ the path(s) with a '/' character and a '!' character, respectively::
+ TEST_HARNESS_FILES.path += ['/build/bar.py', '!quux.py']
+ """),
+
+ 'NO_EXPAND_LIBS': (bool, bool,
+ """Forces to build a real static library, and no corresponding fake
+ library.
+ """),
+
+ 'NO_COMPONENTS_MANIFEST': (bool, bool,
+ """Do not create a binary-component manifest entry for the
+ corresponding XPCOMBinaryComponent.
+ """),
+
+ 'USE_YASM': (bool, bool,
+ """Use the yasm assembler to assemble assembly files from SOURCES.
+
+ By default, the build will use the toolchain assembler, $(AS), to
+ assemble source files in assembly language (.s or .asm files). Setting
+ this value to ``True`` will cause it to use yasm instead.
+
+ If yasm is not available on this system, or does not support the
+ current target architecture, an error will be raised.
+ """),
+}
+
+# Sanity check: we don't want any variable above to have a list as storage type.
+for name, (storage_type, input_types, docs) in VARIABLES.items():
+ if storage_type == list:
+ raise RuntimeError('%s has a "list" storage type. Use "List" instead.'
+ % name)
+
+# Set of variables that are only allowed in templates:
+TEMPLATE_VARIABLES = {
+ 'CPP_UNIT_TESTS',
+ 'FORCE_SHARED_LIB',
+ 'HOST_PROGRAM',
+ 'HOST_LIBRARY_NAME',
+ 'HOST_SIMPLE_PROGRAMS',
+ 'IS_COMPONENT',
+ 'IS_FRAMEWORK',
+ 'LIBRARY_NAME',
+ 'PROGRAM',
+ 'SIMPLE_PROGRAMS',
+}
+
+# Add a note to template variable documentation.
+for name in TEMPLATE_VARIABLES:
+ if name not in VARIABLES:
+ raise RuntimeError('%s is in TEMPLATE_VARIABLES but not in VARIABLES.'
+ % name)
+ storage_type, input_types, docs = VARIABLES[name]
+ docs += 'This variable is only available in templates.\n'
+ VARIABLES[name] = (storage_type, input_types, docs)
+
+
+# The set of functions exposed to the sandbox.
+#
+# Each entry is a tuple of:
+#
+# (function returning the corresponding function from a given sandbox,
+# (argument types), docs)
+#
+# The first element is an attribute on Sandbox that should be a function type.
+#
+FUNCTIONS = {
+ 'include': (lambda self: self._include, (SourcePath,),
+ """Include another mozbuild file in the context of this one.
+
+ This is similar to a ``#include`` in C languages. The filename passed to
+ the function will be read and its contents will be evaluated within the
+ context of the calling file.
+
+ If a relative path is given, it is evaluated as relative to the file
+ currently being processed. If there is a chain of multiple include(),
+ the relative path computation is from the most recent/active file.
+
+ If an absolute path is given, it is evaluated from ``TOPSRCDIR``. In
+ other words, ``include('/foo')`` references the path
+ ``TOPSRCDIR + '/foo'``.
+
+ Example usage
+ ^^^^^^^^^^^^^
+
+ Include ``sibling.build`` from the current directory.::
+
+ include('sibling.build')
+
+ Include ``foo.build`` from a path within the top source directory::
+
+ include('/elsewhere/foo.build')
+ """),
+
+ 'add_java_jar': (lambda self: self._add_java_jar, (str,),
+ """Declare a Java JAR target to be built.
+
+ This is the supported way to populate the JAVA_JAR_TARGETS
+ variable.
+
+ The parameters are:
+ * dest - target name, without the trailing .jar. (required)
+
+ This returns a rich Java JAR type, described at
+ :py:class:`mozbuild.frontend.data.JavaJarData`.
+ """),
+
+ 'add_android_eclipse_project': (
+ lambda self: self._add_android_eclipse_project, (str, str),
+ """Declare an Android Eclipse project.
+
+ This is one of the supported ways to populate the
+ ANDROID_ECLIPSE_PROJECT_TARGETS variable.
+
+ The parameters are:
+ * name - project name.
+ * manifest - path to AndroidManifest.xml.
+
+ This returns a rich Android Eclipse project type, described at
+ :py:class:`mozbuild.frontend.data.AndroidEclipseProjectData`.
+ """),
+
+ 'add_android_eclipse_library_project': (
+ lambda self: self._add_android_eclipse_library_project, (str,),
+ """Declare an Android Eclipse library project.
+
+ This is one of the supported ways to populate the
+ ANDROID_ECLIPSE_PROJECT_TARGETS variable.
+
+ The parameters are:
+ * name - project name.
+
+ This returns a rich Android Eclipse project type, described at
+ :py:class:`mozbuild.frontend.data.AndroidEclipseProjectData`.
+ """),
+
+ 'export': (lambda self: self._export, (str,),
+ """Make the specified variable available to all child directories.
+
+ The variable specified by the argument string is added to the
+ environment of all directories specified in the DIRS and TEST_DIRS
+ variables. If those directories themselves have child directories,
+ the variable will be exported to all of them.
+
+ The value used for the variable is the final value at the end of the
+ moz.build file, so it is possible (but not recommended style) to place
+ the export before the definition of the variable.
+
+ This function is limited to the upper-case variables that have special
+ meaning in moz.build files.
+
+ NOTE: Please consult with a build peer before adding a new use of this
+ function.
+
+ Example usage
+ ^^^^^^^^^^^^^
+
+ To make all children directories install as the given extension::
+
+ XPI_NAME = 'cool-extension'
+ export('XPI_NAME')
+ """),
+
+ 'warning': (lambda self: self._warning, (str,),
+ """Issue a warning.
+
+ Warnings are string messages that are printed during execution.
+
+ Warnings are ignored during execution.
+ """),
+
+ 'error': (lambda self: self._error, (str,),
+ """Issue a fatal error.
+
+ If this function is called, processing is aborted immediately.
+ """),
+
+ 'template': (lambda self: self._template_decorator, (FunctionType,),
+ """Decorator for template declarations.
+
+ Templates are a special kind of functions that can be declared in
+ mozbuild files. Uppercase variables assigned in the function scope
+ are considered to be the result of the template.
+
+ Contrary to traditional python functions:
+ - return values from template functions are ignored,
+ - template functions don't have access to the global scope.
+
+ Example template
+ ^^^^^^^^^^^^^^^^
+
+ The following ``Program`` template sets two variables ``PROGRAM`` and
+ ``USE_LIBS``. ``PROGRAM`` is set to the argument given on the template
+ invocation, and ``USE_LIBS`` to contain "mozglue"::
+
+ @template
+ def Program(name):
+ PROGRAM = name
+ USE_LIBS += ['mozglue']
+
+ Template invocation
+ ^^^^^^^^^^^^^^^^^^^
+
+ A template is invoked in the form of a function call::
+
+ Program('myprog')
+
+ The result of the template, being all the uppercase variable it sets
+ is mixed to the existing set of variables defined in the mozbuild file
+ invoking the template::
+
+ FINAL_TARGET = 'dist/other'
+ USE_LIBS += ['mylib']
+ Program('myprog')
+ USE_LIBS += ['otherlib']
+
+ The above mozbuild results in the following variables set:
+
+ - ``FINAL_TARGET`` is 'dist/other'
+ - ``USE_LIBS`` is ['mylib', 'mozglue', 'otherlib']
+ - ``PROGRAM`` is 'myprog'
+
+ """),
+}
+
+
+TestDirsPlaceHolder = List()
+
+
+# Special variables. These complement VARIABLES.
+#
+# Each entry is a tuple of:
+#
+# (function returning the corresponding value from a given context, type, docs)
+#
+SPECIAL_VARIABLES = {
+ 'TOPSRCDIR': (lambda context: context.config.topsrcdir, str,
+ """Constant defining the top source directory.
+
+ The top source directory is the parent directory containing the source
+ code and all build files. It is typically the root directory of a
+ cloned repository.
+ """),
+
+ 'TOPOBJDIR': (lambda context: context.config.topobjdir, str,
+ """Constant defining the top object directory.
+
+ The top object directory is the parent directory which will contain
+ the output of the build. This is commonly referred to as "the object
+ directory."
+ """),
+
+ 'RELATIVEDIR': (lambda context: context.relsrcdir, str,
+ """Constant defining the relative path of this file.
+
+ The relative path is from ``TOPSRCDIR``. This is defined as relative
+ to the main file being executed, regardless of whether additional
+ files have been included using ``include()``.
+ """),
+
+ 'SRCDIR': (lambda context: context.srcdir, str,
+ """Constant defining the source directory of this file.
+
+ This is the path inside ``TOPSRCDIR`` where this file is located. It
+ is the same as ``TOPSRCDIR + RELATIVEDIR``.
+ """),
+
+ 'OBJDIR': (lambda context: context.objdir, str,
+ """The path to the object directory for this file.
+
+ Is is the same as ``TOPOBJDIR + RELATIVEDIR``.
+ """),
+
+ 'CONFIG': (lambda context: ReadOnlyKeyedDefaultDict(
+ lambda key: context.config.substs_unicode.get(key)), dict,
+ """Dictionary containing the current configuration variables.
+
+ All the variables defined by the configuration system are available
+ through this object. e.g. ``ENABLE_TESTS``, ``CFLAGS``, etc.
+
+ Values in this container are read-only. Attempts at changing values
+ will result in a run-time error.
+
+ Access to an unknown variable will return None.
+ """),
+
+ 'EXTRA_COMPONENTS': (lambda context: context['FINAL_TARGET_FILES'].components._strings, list,
+ """Additional component files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/components/``.
+ """),
+
+ 'EXTRA_PP_COMPONENTS': (lambda context: context['FINAL_TARGET_PP_FILES'].components._strings, list,
+ """Javascript XPCOM files.
+
+ This variable contains a list of files to preprocess. Generated
+ files will be installed in the ``/components`` directory of the distribution.
+ """),
+
+ 'JS_PREFERENCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].defaults.pref._strings, list,
+ """Exported javascript files.
+
+ A list of files copied into the dist directory for packaging and installation.
+ Path will be defined for gre or application prefs dir based on what is building.
+ """),
+
+ 'JS_PREFERENCE_PP_FILES': (lambda context: context['FINAL_TARGET_PP_FILES'].defaults.pref._strings, list,
+ """Like JS_PREFERENCE_FILES, preprocessed..
+ """),
+
+ 'RESOURCE_FILES': (lambda context: context['FINAL_TARGET_FILES'].res, list,
+ """List of resources to be exported, and in which subdirectories.
+
+ ``RESOURCE_FILES`` is used to list the resource files to be exported to
+ ``dist/bin/res``, but it can be used for other files as well. This variable
+ behaves as a list when appending filenames for resources in the top-level
+ directory. Files can also be appended to a field to indicate which
+ subdirectory they should be exported to. For example, to export
+ ``foo.res`` to the top-level directory, and ``bar.res`` to ``fonts/``,
+ append to ``RESOURCE_FILES`` like so::
+
+ RESOURCE_FILES += ['foo.res']
+ RESOURCE_FILES.fonts += ['bar.res']
+ """),
+
+ 'EXTRA_JS_MODULES': (lambda context: context['FINAL_TARGET_FILES'].modules, list,
+ """Additional JavaScript files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/modules.
+ """),
+
+ 'EXTRA_PP_JS_MODULES': (lambda context: context['FINAL_TARGET_PP_FILES'].modules, list,
+ """Additional JavaScript files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/modules``, after preprocessing.
+ """),
+
+ 'TESTING_JS_MODULES': (lambda context: context['TEST_HARNESS_FILES'].modules, list,
+ """JavaScript modules to install in the test-only destination.
+
+ Some JavaScript modules (JSMs) are test-only and not distributed
+ with Firefox. This variable defines them.
+
+ To install modules in a subdirectory, use properties of this
+ variable to control the final destination. e.g.
+
+ ``TESTING_JS_MODULES.foo += ['module.jsm']``.
+ """),
+
+ 'TEST_DIRS': (lambda context: context['DIRS'] if context.config.substs.get('ENABLE_TESTS')
+ else TestDirsPlaceHolder, list,
+ """Like DIRS but only for directories that contain test-only code.
+
+ If tests are not enabled, this variable will be ignored.
+
+ This variable may go away once the transition away from Makefiles is
+ complete.
+ """),
+}
+
+# Deprecation hints.
+DEPRECATION_HINTS = {
+ 'CPP_UNIT_TESTS': '''
+ Please use'
+
+ CppUnitTests(['foo', 'bar'])
+
+ instead of
+
+ CPP_UNIT_TESTS += ['foo', 'bar']
+ ''',
+
+ 'HOST_PROGRAM': '''
+ Please use
+
+ HostProgram('foo')
+
+ instead of
+
+ HOST_PROGRAM = 'foo'
+ ''',
+
+ 'HOST_LIBRARY_NAME': '''
+ Please use
+
+ HostLibrary('foo')
+
+ instead of
+
+ HOST_LIBRARY_NAME = 'foo'
+ ''',
+
+ 'HOST_SIMPLE_PROGRAMS': '''
+ Please use
+
+ HostSimplePrograms(['foo', 'bar'])
+
+ instead of
+
+ HOST_SIMPLE_PROGRAMS += ['foo', 'bar']"
+ ''',
+
+ 'LIBRARY_NAME': '''
+ Please use
+
+ Library('foo')
+
+ instead of
+
+ LIBRARY_NAME = 'foo'
+ ''',
+
+ 'PROGRAM': '''
+ Please use
+
+ Program('foo')
+
+ instead of
+
+ PROGRAM = 'foo'"
+ ''',
+
+ 'SIMPLE_PROGRAMS': '''
+ Please use
+
+ SimplePrograms(['foo', 'bar'])
+
+ instead of
+
+ SIMPLE_PROGRAMS += ['foo', 'bar']"
+ ''',
+
+ 'FORCE_SHARED_LIB': '''
+ Please use
+
+ SharedLibrary('foo')
+
+ instead of
+
+ Library('foo') [ or LIBRARY_NAME = 'foo' ]
+ FORCE_SHARED_LIB = True
+ ''',
+
+ 'IS_COMPONENT': '''
+ Please use
+
+ XPCOMBinaryComponent('foo')
+
+ instead of
+
+ Library('foo') [ or LIBRARY_NAME = 'foo' ]
+ IS_COMPONENT = True
+ ''',
+
+ 'IS_FRAMEWORK': '''
+ Please use
+
+ Framework('foo')
+
+ instead of
+
+ Library('foo') [ or LIBRARY_NAME = 'foo' ]
+ IS_FRAMEWORK = True
+ ''',
+
+ 'TOOL_DIRS': 'Please use the DIRS variable instead.',
+
+ 'TEST_TOOL_DIRS': 'Please use the TEST_DIRS variable instead.',
+
+ 'PARALLEL_DIRS': 'Please use the DIRS variable instead.',
+
+ 'NO_DIST_INSTALL': '''
+ Please use
+
+ DIST_INSTALL = False
+
+ instead of
+
+ NO_DIST_INSTALL = True
+ ''',
+
+ 'GENERATED_SOURCES': '''
+ Please use
+
+ SOURCES += [ '!foo.cpp' ]
+
+ instead of
+
+ GENERATED_SOURCES += [ 'foo.cpp']
+ ''',
+
+ 'GENERATED_INCLUDES': '''
+ Please use
+
+ LOCAL_INCLUDES += [ '!foo' ]
+
+ instead of
+
+ GENERATED_INCLUDES += [ 'foo' ]
+ ''',
+
+ 'DIST_FILES': '''
+ Please use
+
+ FINAL_TARGET_PP_FILES += [ 'foo' ]
+
+ instead of
+
+ DIST_FILES += [ 'foo' ]
+ ''',
+}
+
+# Make sure that all template variables have a deprecation hint.
+for name in TEMPLATE_VARIABLES:
+ if name not in DEPRECATION_HINTS:
+ raise RuntimeError('Missing deprecation hint for %s' % name)
diff --git a/python/mozbuild/mozbuild/frontend/data.py b/python/mozbuild/mozbuild/frontend/data.py
new file mode 100644
index 000000000..fdf8cca17
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/data.py
@@ -0,0 +1,1113 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""Data structures representing Mozilla's source tree.
+
+The frontend files are parsed into static data structures. These data
+structures are defined in this module.
+
+All data structures of interest are children of the TreeMetadata class.
+
+Logic for populating these data structures is not defined in this class.
+Instead, what we have here are dumb container classes. The emitter module
+contains the code for converting executed mozbuild files into these data
+structures.
+"""
+
+from __future__ import absolute_import, unicode_literals
+
+from mozbuild.util import StrictOrderingOnAppendList
+from mozpack.chrome.manifest import ManifestEntry
+
+import mozpack.path as mozpath
+from .context import FinalTargetValue
+
+from ..util import (
+ group_unified_files,
+)
+
+from ..testing import (
+ all_test_flavors,
+)
+
+
+class TreeMetadata(object):
+ """Base class for all data being captured."""
+ __slots__ = ()
+
+ def to_dict(self):
+ return {k.lower(): getattr(self, k) for k in self.DICT_ATTRS}
+
+
+class ContextDerived(TreeMetadata):
+ """Build object derived from a single Context instance.
+
+ It holds fields common to all context derived classes. This class is likely
+ never instantiated directly but is instead derived from.
+ """
+
+ __slots__ = (
+ 'context_main_path',
+ 'context_all_paths',
+ 'topsrcdir',
+ 'topobjdir',
+ 'relativedir',
+ 'srcdir',
+ 'objdir',
+ 'config',
+ '_context',
+ )
+
+ def __init__(self, context):
+ TreeMetadata.__init__(self)
+
+ # Capture the files that were evaluated to fill this context.
+ self.context_main_path = context.main_path
+ self.context_all_paths = context.all_paths
+
+ # Basic directory state.
+ self.topsrcdir = context.config.topsrcdir
+ self.topobjdir = context.config.topobjdir
+
+ self.relativedir = context.relsrcdir
+ self.srcdir = context.srcdir
+ self.objdir = context.objdir
+
+ self.config = context.config
+
+ self._context = context
+
+ @property
+ def install_target(self):
+ return self._context['FINAL_TARGET']
+
+ @property
+ def defines(self):
+ defines = self._context['DEFINES']
+ return Defines(self._context, defines) if defines else None
+
+ @property
+ def relobjdir(self):
+ return mozpath.relpath(self.objdir, self.topobjdir)
+
+
+class HostMixin(object):
+ @property
+ def defines(self):
+ defines = self._context['HOST_DEFINES']
+ return HostDefines(self._context, defines) if defines else None
+
+
+class DirectoryTraversal(ContextDerived):
+ """Describes how directory traversal for building should work.
+
+ This build object is likely only of interest to the recursive make backend.
+ Other build backends should (ideally) not attempt to mimic the behavior of
+ the recursive make backend. The only reason this exists is to support the
+ existing recursive make backend while the transition to mozbuild frontend
+ files is complete and we move to a more optimal build backend.
+
+ Fields in this class correspond to similarly named variables in the
+ frontend files.
+ """
+ __slots__ = (
+ 'dirs',
+ )
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.dirs = []
+
+
+class BaseConfigSubstitution(ContextDerived):
+ """Base class describing autogenerated files as part of config.status."""
+
+ __slots__ = (
+ 'input_path',
+ 'output_path',
+ 'relpath',
+ )
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.input_path = None
+ self.output_path = None
+ self.relpath = None
+
+
+class ConfigFileSubstitution(BaseConfigSubstitution):
+ """Describes a config file that will be generated using substitutions."""
+
+
+class VariablePassthru(ContextDerived):
+ """A dict of variables to pass through to backend.mk unaltered.
+
+ The purpose of this object is to facilitate rapid transitioning of
+ variables from Makefile.in to moz.build. In the ideal world, this class
+ does not exist and every variable has a richer class representing it.
+ As long as we rely on this class, we lose the ability to have flexibility
+ in our build backends since we will continue to be tied to our rules.mk.
+ """
+ __slots__ = ('variables')
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.variables = {}
+
+class XPIDLFile(ContextDerived):
+ """Describes an XPIDL file to be compiled."""
+
+ __slots__ = (
+ 'source_path',
+ 'basename',
+ 'module',
+ 'add_to_manifest',
+ )
+
+ def __init__(self, context, source, module, add_to_manifest):
+ ContextDerived.__init__(self, context)
+
+ self.source_path = source
+ self.basename = mozpath.basename(source)
+ self.module = module
+ self.add_to_manifest = add_to_manifest
+
+class BaseDefines(ContextDerived):
+ """Context derived container object for DEFINES/HOST_DEFINES,
+ which are OrderedDicts.
+ """
+ __slots__ = ('defines')
+
+ def __init__(self, context, defines):
+ ContextDerived.__init__(self, context)
+ self.defines = defines
+
+ def get_defines(self):
+ for define, value in self.defines.iteritems():
+ if value is True:
+ yield('-D%s' % define)
+ elif value is False:
+ yield('-U%s' % define)
+ else:
+ yield('-D%s=%s' % (define, value))
+
+ def update(self, more_defines):
+ if isinstance(more_defines, Defines):
+ self.defines.update(more_defines.defines)
+ else:
+ self.defines.update(more_defines)
+
+class Defines(BaseDefines):
+ pass
+
+class HostDefines(BaseDefines):
+ pass
+
+class IPDLFile(ContextDerived):
+ """Describes an individual .ipdl source file."""
+
+ __slots__ = (
+ 'basename',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.basename = path
+
+class WebIDLFile(ContextDerived):
+ """Describes an individual .webidl source file."""
+
+ __slots__ = (
+ 'basename',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.basename = path
+
+class GeneratedEventWebIDLFile(ContextDerived):
+ """Describes an individual .webidl source file."""
+
+ __slots__ = (
+ 'basename',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.basename = path
+
+class TestWebIDLFile(ContextDerived):
+ """Describes an individual test-only .webidl source file."""
+
+ __slots__ = (
+ 'basename',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.basename = path
+
+class PreprocessedTestWebIDLFile(ContextDerived):
+ """Describes an individual test-only .webidl source file that requires
+ preprocessing."""
+
+ __slots__ = (
+ 'basename',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.basename = path
+
+class PreprocessedWebIDLFile(ContextDerived):
+ """Describes an individual .webidl source file that requires preprocessing."""
+
+ __slots__ = (
+ 'basename',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.basename = path
+
+class GeneratedWebIDLFile(ContextDerived):
+ """Describes an individual .webidl source file that is generated from
+ build rules."""
+
+ __slots__ = (
+ 'basename',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.basename = path
+
+
+class ExampleWebIDLInterface(ContextDerived):
+ """An individual WebIDL interface to generate."""
+
+ __slots__ = (
+ 'name',
+ )
+
+ def __init__(self, context, name):
+ ContextDerived.__init__(self, context)
+
+ self.name = name
+
+
+class LinkageWrongKindError(Exception):
+ """Error thrown when trying to link objects of the wrong kind"""
+
+
+class LinkageMultipleRustLibrariesError(Exception):
+ """Error thrown when trying to link multiple Rust libraries to an object"""
+
+
+class Linkable(ContextDerived):
+ """Generic context derived container object for programs and libraries"""
+ __slots__ = (
+ 'cxx_link',
+ 'lib_defines',
+ 'linked_libraries',
+ 'linked_system_libs',
+ )
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.cxx_link = False
+ self.linked_libraries = []
+ self.linked_system_libs = []
+ self.lib_defines = Defines(context, {})
+
+ def link_library(self, obj):
+ assert isinstance(obj, BaseLibrary)
+ if isinstance(obj, SharedLibrary) and obj.variant == obj.COMPONENT:
+ raise LinkageWrongKindError(
+ 'Linkable.link_library() does not take components.')
+ if obj.KIND != self.KIND:
+ raise LinkageWrongKindError('%s != %s' % (obj.KIND, self.KIND))
+ # Linking multiple Rust libraries into an object would result in
+ # multiple copies of the Rust standard library, as well as linking
+ # errors from duplicate symbols.
+ if isinstance(obj, RustLibrary) and any(isinstance(l, RustLibrary)
+ for l in self.linked_libraries):
+ raise LinkageMultipleRustLibrariesError("Cannot link multiple Rust libraries into %s",
+ self)
+ self.linked_libraries.append(obj)
+ if obj.cxx_link:
+ self.cxx_link = True
+ obj.refs.append(self)
+
+ def link_system_library(self, lib):
+ # The '$' check is here as a special temporary rule, allowing the
+ # inherited use of make variables, most notably in TK_LIBS.
+ if not lib.startswith('$') and not lib.startswith('-'):
+ if self.config.substs.get('GNU_CC'):
+ lib = '-l%s' % lib
+ else:
+ lib = '%s%s%s' % (
+ self.config.import_prefix,
+ lib,
+ self.config.import_suffix,
+ )
+ self.linked_system_libs.append(lib)
+
+class BaseProgram(Linkable):
+ """Context derived container object for programs, which is a unicode
+ string.
+
+ This class handles automatically appending a binary suffix to the program
+ name.
+ If the suffix is not defined, the program name is unchanged.
+ Otherwise, if the program name ends with the given suffix, it is unchanged
+ Otherwise, the suffix is appended to the program name.
+ """
+ __slots__ = ('program')
+
+ DICT_ATTRS = {
+ 'install_target',
+ 'KIND',
+ 'program',
+ 'relobjdir',
+ }
+
+ def __init__(self, context, program, is_unit_test=False):
+ Linkable.__init__(self, context)
+
+ bin_suffix = context.config.substs.get(self.SUFFIX_VAR, '')
+ if not program.endswith(bin_suffix):
+ program += bin_suffix
+ self.program = program
+ self.is_unit_test = is_unit_test
+
+ def __repr__(self):
+ return '<%s: %s/%s>' % (type(self).__name__, self.relobjdir, self.program)
+
+
+class Program(BaseProgram):
+ """Context derived container object for PROGRAM"""
+ SUFFIX_VAR = 'BIN_SUFFIX'
+ KIND = 'target'
+
+
+class HostProgram(HostMixin, BaseProgram):
+ """Context derived container object for HOST_PROGRAM"""
+ SUFFIX_VAR = 'HOST_BIN_SUFFIX'
+ KIND = 'host'
+
+
+class SimpleProgram(BaseProgram):
+ """Context derived container object for each program in SIMPLE_PROGRAMS"""
+ SUFFIX_VAR = 'BIN_SUFFIX'
+ KIND = 'target'
+
+
+class HostSimpleProgram(HostMixin, BaseProgram):
+ """Context derived container object for each program in
+ HOST_SIMPLE_PROGRAMS"""
+ SUFFIX_VAR = 'HOST_BIN_SUFFIX'
+ KIND = 'host'
+
+
+class BaseLibrary(Linkable):
+ """Generic context derived container object for libraries."""
+ __slots__ = (
+ 'basename',
+ 'lib_name',
+ 'import_name',
+ 'refs',
+ )
+
+ def __init__(self, context, basename):
+ Linkable.__init__(self, context)
+
+ self.basename = self.lib_name = basename
+ if self.lib_name:
+ self.lib_name = '%s%s%s' % (
+ context.config.lib_prefix,
+ self.lib_name,
+ context.config.lib_suffix
+ )
+ self.import_name = self.lib_name
+
+ self.refs = []
+
+ def __repr__(self):
+ return '<%s: %s/%s>' % (type(self).__name__, self.relobjdir, self.lib_name)
+
+
+class Library(BaseLibrary):
+ """Context derived container object for a library"""
+ KIND = 'target'
+ __slots__ = (
+ 'is_sdk',
+ )
+
+ def __init__(self, context, basename, real_name=None, is_sdk=False):
+ BaseLibrary.__init__(self, context, real_name or basename)
+ self.basename = basename
+ self.is_sdk = is_sdk
+
+
+class StaticLibrary(Library):
+ """Context derived container object for a static library"""
+ __slots__ = (
+ 'link_into',
+ 'no_expand_lib',
+ )
+
+ def __init__(self, context, basename, real_name=None, is_sdk=False,
+ link_into=None, no_expand_lib=False):
+ Library.__init__(self, context, basename, real_name, is_sdk)
+ self.link_into = link_into
+ self.no_expand_lib = no_expand_lib
+
+
+class RustLibrary(StaticLibrary):
+ """Context derived container object for a static library"""
+ __slots__ = (
+ 'cargo_file',
+ 'crate_type',
+ 'dependencies',
+ 'deps_path',
+ )
+
+ def __init__(self, context, basename, cargo_file, crate_type, dependencies, **args):
+ StaticLibrary.__init__(self, context, basename, **args)
+ self.cargo_file = cargo_file
+ self.crate_type = crate_type
+ # We need to adjust our naming here because cargo replaces '-' in
+ # package names defined in Cargo.toml with underscores in actual
+ # filenames. But we need to keep the basename consistent because
+ # many other things in the build system depend on that.
+ assert self.crate_type == 'staticlib'
+ self.lib_name = '%s%s%s' % (context.config.lib_prefix,
+ basename.replace('-', '_'),
+ context.config.lib_suffix)
+ self.dependencies = dependencies
+ # cargo creates several directories and places its build artifacts
+ # in those directories. The directory structure depends not only
+ # on the target, but also what sort of build we are doing.
+ rust_build_kind = 'release'
+ if context.config.substs.get('MOZ_DEBUG'):
+ rust_build_kind = 'debug'
+ build_dir = mozpath.join(context.config.substs['RUST_TARGET'],
+ rust_build_kind)
+ self.import_name = mozpath.join(build_dir, self.lib_name)
+ self.deps_path = mozpath.join(build_dir, 'deps')
+
+
+class SharedLibrary(Library):
+ """Context derived container object for a shared library"""
+ __slots__ = (
+ 'soname',
+ 'variant',
+ 'symbols_file',
+ )
+
+ DICT_ATTRS = {
+ 'basename',
+ 'import_name',
+ 'install_target',
+ 'lib_name',
+ 'relobjdir',
+ 'soname',
+ }
+
+ FRAMEWORK = 1
+ COMPONENT = 2
+ MAX_VARIANT = 3
+
+ def __init__(self, context, basename, real_name=None, is_sdk=False,
+ soname=None, variant=None, symbols_file=False):
+ assert(variant in range(1, self.MAX_VARIANT) or variant is None)
+ Library.__init__(self, context, basename, real_name, is_sdk)
+ self.variant = variant
+ self.lib_name = real_name or basename
+ assert self.lib_name
+
+ if variant == self.FRAMEWORK:
+ self.import_name = self.lib_name
+ else:
+ self.import_name = '%s%s%s' % (
+ context.config.import_prefix,
+ self.lib_name,
+ context.config.import_suffix,
+ )
+ self.lib_name = '%s%s%s' % (
+ context.config.dll_prefix,
+ self.lib_name,
+ context.config.dll_suffix,
+ )
+ if soname:
+ self.soname = '%s%s%s' % (
+ context.config.dll_prefix,
+ soname,
+ context.config.dll_suffix,
+ )
+ else:
+ self.soname = self.lib_name
+
+ if symbols_file is False:
+ # No symbols file.
+ self.symbols_file = None
+ elif symbols_file is True:
+ # Symbols file with default name.
+ if context.config.substs['OS_TARGET'] == 'WINNT':
+ self.symbols_file = '%s.def' % self.lib_name
+ else:
+ self.symbols_file = '%s.symbols' % self.lib_name
+ else:
+ # Explicitly provided name.
+ self.symbols_file = symbols_file
+
+
+
+class ExternalLibrary(object):
+ """Empty mixin for libraries built by an external build system."""
+
+
+class ExternalStaticLibrary(StaticLibrary, ExternalLibrary):
+ """Context derived container for static libraries built by an external
+ build system."""
+
+
+class ExternalSharedLibrary(SharedLibrary, ExternalLibrary):
+ """Context derived container for shared libraries built by an external
+ build system."""
+
+
+class HostLibrary(HostMixin, BaseLibrary):
+ """Context derived container object for a host library"""
+ KIND = 'host'
+
+
+class TestManifest(ContextDerived):
+ """Represents a manifest file containing information about tests."""
+
+ __slots__ = (
+ # The type of test manifest this is.
+ 'flavor',
+
+ # Maps source filename to destination filename. The destination
+ # path is relative from the tests root directory. Values are 2-tuples
+ # of (destpath, is_test_file) where the 2nd item is True if this
+ # item represents a test file (versus a support file).
+ 'installs',
+
+ # A list of pattern matching installs to perform. Entries are
+ # (base, pattern, dest).
+ 'pattern_installs',
+
+ # Where all files for this manifest flavor are installed in the unified
+ # test package directory.
+ 'install_prefix',
+
+ # Set of files provided by an external mechanism.
+ 'external_installs',
+
+ # Set of files required by multiple test directories, whose installation
+ # will be resolved when running tests.
+ 'deferred_installs',
+
+ # The full path of this manifest file.
+ 'path',
+
+ # The directory where this manifest is defined.
+ 'directory',
+
+ # The parsed manifestparser.TestManifest instance.
+ 'manifest',
+
+ # List of tests. Each element is a dict of metadata.
+ 'tests',
+
+ # The relative path of the parsed manifest within the srcdir.
+ 'manifest_relpath',
+
+ # The relative path of the parsed manifest within the objdir.
+ 'manifest_obj_relpath',
+
+ # If this manifest is a duplicate of another one, this is the
+ # manifestparser.TestManifest of the other one.
+ 'dupe_manifest',
+ )
+
+ def __init__(self, context, path, manifest, flavor=None,
+ install_prefix=None, relpath=None, dupe_manifest=False):
+ ContextDerived.__init__(self, context)
+
+ assert flavor in all_test_flavors()
+
+ self.path = path
+ self.directory = mozpath.dirname(path)
+ self.manifest = manifest
+ self.flavor = flavor
+ self.install_prefix = install_prefix
+ self.manifest_relpath = relpath
+ self.manifest_obj_relpath = relpath
+ self.dupe_manifest = dupe_manifest
+ self.installs = {}
+ self.pattern_installs = []
+ self.tests = []
+ self.external_installs = set()
+ self.deferred_installs = set()
+
+
+class LocalInclude(ContextDerived):
+ """Describes an individual local include path."""
+
+ __slots__ = (
+ 'path',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.path = path
+
+
+class PerSourceFlag(ContextDerived):
+ """Describes compiler flags specified for individual source files."""
+
+ __slots__ = (
+ 'file_name',
+ 'flags',
+ )
+
+ def __init__(self, context, file_name, flags):
+ ContextDerived.__init__(self, context)
+
+ self.file_name = file_name
+ self.flags = flags
+
+
+class JARManifest(ContextDerived):
+ """Describes an individual JAR manifest file and how to process it.
+
+ This class isn't very useful for optimizing backends yet because we don't
+ capture defines. We can't capture defines safely until all of them are
+ defined in moz.build and not Makefile.in files.
+ """
+ __slots__ = (
+ 'path',
+ )
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.path = path
+
+
+class ContextWrapped(ContextDerived):
+ """Generic context derived container object for a wrapped rich object.
+
+ Use this wrapper class to shuttle a rich build system object
+ completely defined in moz.build files through the tree metadata
+ emitter to the build backend for processing as-is.
+ """
+
+ __slots__ = (
+ 'wrapped',
+ )
+
+ def __init__(self, context, wrapped):
+ ContextDerived.__init__(self, context)
+
+ self.wrapped = wrapped
+
+
+class JavaJarData(object):
+ """Represents a Java JAR file.
+
+ A Java JAR has the following members:
+ * sources - strictly ordered list of input java sources
+ * generated_sources - strictly ordered list of generated input
+ java sources
+ * extra_jars - list of JAR file dependencies to include on the
+ javac compiler classpath
+ * javac_flags - list containing extra flags passed to the
+ javac compiler
+ """
+
+ __slots__ = (
+ 'name',
+ 'sources',
+ 'generated_sources',
+ 'extra_jars',
+ 'javac_flags',
+ )
+
+ def __init__(self, name, sources=[], generated_sources=[],
+ extra_jars=[], javac_flags=[]):
+ self.name = name
+ self.sources = StrictOrderingOnAppendList(sources)
+ self.generated_sources = StrictOrderingOnAppendList(generated_sources)
+ self.extra_jars = list(extra_jars)
+ self.javac_flags = list(javac_flags)
+
+
+class BaseSources(ContextDerived):
+ """Base class for files to be compiled during the build."""
+
+ __slots__ = (
+ 'files',
+ 'canonical_suffix',
+ )
+
+ def __init__(self, context, files, canonical_suffix):
+ ContextDerived.__init__(self, context)
+
+ self.files = files
+ self.canonical_suffix = canonical_suffix
+
+
+class Sources(BaseSources):
+ """Represents files to be compiled during the build."""
+
+ def __init__(self, context, files, canonical_suffix):
+ BaseSources.__init__(self, context, files, canonical_suffix)
+
+
+class GeneratedSources(BaseSources):
+ """Represents generated files to be compiled during the build."""
+
+ def __init__(self, context, files, canonical_suffix):
+ BaseSources.__init__(self, context, files, canonical_suffix)
+
+
+class HostSources(HostMixin, BaseSources):
+ """Represents files to be compiled for the host during the build."""
+
+ def __init__(self, context, files, canonical_suffix):
+ BaseSources.__init__(self, context, files, canonical_suffix)
+
+
+class UnifiedSources(BaseSources):
+ """Represents files to be compiled in a unified fashion during the build."""
+
+ __slots__ = (
+ 'have_unified_mapping',
+ 'unified_source_mapping'
+ )
+
+ def __init__(self, context, files, canonical_suffix, files_per_unified_file=16):
+ BaseSources.__init__(self, context, files, canonical_suffix)
+
+ self.have_unified_mapping = files_per_unified_file > 1
+
+ if self.have_unified_mapping:
+ # Sorted so output is consistent and we don't bump mtimes.
+ source_files = list(sorted(self.files))
+
+ # On Windows, path names have a maximum length of 255 characters,
+ # so avoid creating extremely long path names.
+ unified_prefix = context.relsrcdir
+ if len(unified_prefix) > 20:
+ unified_prefix = unified_prefix[-20:].split('/', 1)[-1]
+ unified_prefix = unified_prefix.replace('/', '_')
+
+ suffix = self.canonical_suffix[1:]
+ unified_prefix='Unified_%s_%s' % (suffix, unified_prefix)
+ self.unified_source_mapping = list(group_unified_files(source_files,
+ unified_prefix=unified_prefix,
+ unified_suffix=suffix,
+ files_per_unified_file=files_per_unified_file))
+
+
+class InstallationTarget(ContextDerived):
+ """Describes the rules that affect where files get installed to."""
+
+ __slots__ = (
+ 'xpiname',
+ 'subdir',
+ 'target',
+ 'enabled'
+ )
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.xpiname = context.get('XPI_NAME', '')
+ self.subdir = context.get('DIST_SUBDIR', '')
+ self.target = context['FINAL_TARGET']
+ self.enabled = context['DIST_INSTALL'] is not False
+
+ def is_custom(self):
+ """Returns whether or not the target is not derived from the default
+ given xpiname and subdir."""
+
+ return FinalTargetValue(dict(
+ XPI_NAME=self.xpiname,
+ DIST_SUBDIR=self.subdir)) == self.target
+
+
+class FinalTargetFiles(ContextDerived):
+ """Sandbox container object for FINAL_TARGET_FILES, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing FINAL_TARGET_FILES.
+ """
+ __slots__ = ('files')
+
+ def __init__(self, sandbox, files):
+ ContextDerived.__init__(self, sandbox)
+ self.files = files
+
+
+class FinalTargetPreprocessedFiles(ContextDerived):
+ """Sandbox container object for FINAL_TARGET_PP_FILES, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing
+ FINAL_TARGET_PP_FILES.
+ """
+ __slots__ = ('files')
+
+ def __init__(self, sandbox, files):
+ ContextDerived.__init__(self, sandbox)
+ self.files = files
+
+
+class ObjdirFiles(ContextDerived):
+ """Sandbox container object for OBJDIR_FILES, which is a
+ HierarchicalStringList.
+ """
+ __slots__ = ('files')
+
+ def __init__(self, sandbox, files):
+ ContextDerived.__init__(self, sandbox)
+ self.files = files
+
+ @property
+ def install_target(self):
+ return ''
+
+
+class ObjdirPreprocessedFiles(ContextDerived):
+ """Sandbox container object for OBJDIR_PP_FILES, which is a
+ HierarchicalStringList.
+ """
+ __slots__ = ('files')
+
+ def __init__(self, sandbox, files):
+ ContextDerived.__init__(self, sandbox)
+ self.files = files
+
+ @property
+ def install_target(self):
+ return ''
+
+
+class TestHarnessFiles(FinalTargetFiles):
+ """Sandbox container object for TEST_HARNESS_FILES,
+ which is a HierarchicalStringList.
+ """
+ @property
+ def install_target(self):
+ return '_tests'
+
+
+class Exports(FinalTargetFiles):
+ """Context derived container object for EXPORTS, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing EXPORTS.
+ """
+ @property
+ def install_target(self):
+ return 'dist/include'
+
+
+class BrandingFiles(FinalTargetFiles):
+ """Sandbox container object for BRANDING_FILES, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing BRANDING_FILES.
+ """
+ @property
+ def install_target(self):
+ return 'dist/branding'
+
+
+class SdkFiles(FinalTargetFiles):
+ """Sandbox container object for SDK_FILES, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing SDK_FILES.
+ """
+ @property
+ def install_target(self):
+ return 'dist/sdk'
+
+
+class GeneratedFile(ContextDerived):
+ """Represents a generated file."""
+
+ __slots__ = (
+ 'script',
+ 'method',
+ 'outputs',
+ 'inputs',
+ 'flags',
+ )
+
+ def __init__(self, context, script, method, outputs, inputs, flags=()):
+ ContextDerived.__init__(self, context)
+ self.script = script
+ self.method = method
+ self.outputs = outputs if isinstance(outputs, tuple) else (outputs,)
+ self.inputs = inputs
+ self.flags = flags
+
+
+class ClassPathEntry(object):
+ """Represents a classpathentry in an Android Eclipse project."""
+
+ __slots__ = (
+ 'dstdir',
+ 'srcdir',
+ 'path',
+ 'exclude_patterns',
+ 'ignore_warnings',
+ )
+
+ def __init__(self):
+ self.dstdir = None
+ self.srcdir = None
+ self.path = None
+ self.exclude_patterns = []
+ self.ignore_warnings = False
+
+
+class AndroidEclipseProjectData(object):
+ """Represents an Android Eclipse project."""
+
+ __slots__ = (
+ 'name',
+ 'package_name',
+ 'is_library',
+ 'res',
+ 'assets',
+ 'libs',
+ 'manifest',
+ 'recursive_make_targets',
+ 'extra_jars',
+ 'included_projects',
+ 'referenced_projects',
+ '_classpathentries',
+ 'filtered_resources',
+ )
+
+ def __init__(self, name):
+ self.name = name
+ self.is_library = False
+ self.manifest = None
+ self.res = None
+ self.assets = None
+ self.libs = []
+ self.recursive_make_targets = []
+ self.extra_jars = []
+ self.included_projects = []
+ self.referenced_projects = []
+ self._classpathentries = []
+ self.filtered_resources = []
+
+ def add_classpathentry(self, path, srcdir, dstdir, exclude_patterns=[], ignore_warnings=False):
+ cpe = ClassPathEntry()
+ cpe.srcdir = srcdir
+ cpe.dstdir = dstdir
+ cpe.path = path
+ cpe.exclude_patterns = list(exclude_patterns)
+ cpe.ignore_warnings = ignore_warnings
+ self._classpathentries.append(cpe)
+ return cpe
+
+
+class AndroidResDirs(ContextDerived):
+ """Represents Android resource directories."""
+
+ __slots__ = (
+ 'paths',
+ )
+
+ def __init__(self, context, paths):
+ ContextDerived.__init__(self, context)
+ self.paths = paths
+
+class AndroidAssetsDirs(ContextDerived):
+ """Represents Android assets directories."""
+
+ __slots__ = (
+ 'paths',
+ )
+
+ def __init__(self, context, paths):
+ ContextDerived.__init__(self, context)
+ self.paths = paths
+
+class AndroidExtraResDirs(ContextDerived):
+ """Represents Android extra resource directories.
+
+ Extra resources are resources provided by libraries and including in a
+ packaged APK, but not otherwise redistributed. In practice, this means
+ resources included in Fennec but not in GeckoView.
+ """
+
+ __slots__ = (
+ 'paths',
+ )
+
+ def __init__(self, context, paths):
+ ContextDerived.__init__(self, context)
+ self.paths = paths
+
+class AndroidExtraPackages(ContextDerived):
+ """Represents Android extra packages."""
+
+ __slots__ = (
+ 'packages',
+ )
+
+ def __init__(self, context, packages):
+ ContextDerived.__init__(self, context)
+ self.packages = packages
+
+class ChromeManifestEntry(ContextDerived):
+ """Represents a chrome.manifest entry."""
+
+ __slots__ = (
+ 'path',
+ 'entry',
+ )
+
+ def __init__(self, context, manifest_path, entry):
+ ContextDerived.__init__(self, context)
+ assert isinstance(entry, ManifestEntry)
+ self.path = mozpath.join(self.install_target, manifest_path)
+ # Ensure the entry is relative to the directory containing the
+ # manifest path.
+ entry = entry.rebase(mozpath.dirname(manifest_path))
+ # Then add the install_target to the entry base directory.
+ self.entry = entry.move(mozpath.dirname(self.path))
diff --git a/python/mozbuild/mozbuild/frontend/emitter.py b/python/mozbuild/mozbuild/frontend/emitter.py
new file mode 100644
index 000000000..52f571867
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/emitter.py
@@ -0,0 +1,1416 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import itertools
+import logging
+import os
+import traceback
+import sys
+import time
+
+from collections import defaultdict, OrderedDict
+from mach.mixin.logging import LoggingMixin
+from mozbuild.util import (
+ memoize,
+ OrderedDefaultDict,
+)
+
+import mozpack.path as mozpath
+import mozinfo
+import pytoml
+
+from .data import (
+ AndroidAssetsDirs,
+ AndroidExtraPackages,
+ AndroidExtraResDirs,
+ AndroidResDirs,
+ BaseSources,
+ BrandingFiles,
+ ChromeManifestEntry,
+ ConfigFileSubstitution,
+ ContextWrapped,
+ Defines,
+ DirectoryTraversal,
+ Exports,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedEventWebIDLFile,
+ GeneratedFile,
+ GeneratedSources,
+ GeneratedWebIDLFile,
+ ExampleWebIDLInterface,
+ ExternalStaticLibrary,
+ ExternalSharedLibrary,
+ HostDefines,
+ HostLibrary,
+ HostProgram,
+ HostSimpleProgram,
+ HostSources,
+ InstallationTarget,
+ IPDLFile,
+ JARManifest,
+ Library,
+ Linkable,
+ LocalInclude,
+ ObjdirFiles,
+ ObjdirPreprocessedFiles,
+ PerSourceFlag,
+ PreprocessedTestWebIDLFile,
+ PreprocessedWebIDLFile,
+ Program,
+ RustLibrary,
+ SdkFiles,
+ SharedLibrary,
+ SimpleProgram,
+ Sources,
+ StaticLibrary,
+ TestHarnessFiles,
+ TestWebIDLFile,
+ TestManifest,
+ UnifiedSources,
+ VariablePassthru,
+ WebIDLFile,
+ XPIDLFile,
+)
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ Manifest,
+)
+
+from .reader import SandboxValidationError
+
+from ..testing import (
+ TEST_MANIFESTS,
+ REFTEST_FLAVORS,
+ WEB_PLATFORM_TESTS_FLAVORS,
+ SupportFilesConverter,
+)
+
+from .context import (
+ Context,
+ SourcePath,
+ ObjDirPath,
+ Path,
+ SubContext,
+ TemplateContext,
+)
+
+from mozbuild.base import ExecutionSummary
+
+
+class TreeMetadataEmitter(LoggingMixin):
+ """Converts the executed mozbuild files into data structures.
+
+ This is a bridge between reader.py and data.py. It takes what was read by
+ reader.BuildReader and converts it into the classes defined in the data
+ module.
+ """
+
+ def __init__(self, config):
+ self.populate_logger()
+
+ self.config = config
+
+ mozinfo.find_and_update_from_json(config.topobjdir)
+
+ # Python 2.6 doesn't allow unicode keys to be used for keyword
+ # arguments. This gross hack works around the problem until we
+ # rid ourselves of 2.6.
+ self.info = {}
+ for k, v in mozinfo.info.items():
+ if isinstance(k, unicode):
+ k = k.encode('ascii')
+ self.info[k] = v
+
+ self._libs = OrderedDefaultDict(list)
+ self._binaries = OrderedDict()
+ self._linkage = []
+ self._static_linking_shared = set()
+ self._crate_verified_local = set()
+ self._crate_directories = dict()
+
+ # Keep track of external paths (third party build systems), starting
+ # from what we run a subconfigure in. We'll eliminate some directories
+ # as we traverse them with moz.build (e.g. js/src).
+ subconfigures = os.path.join(self.config.topobjdir, 'subconfigures')
+ paths = []
+ if os.path.exists(subconfigures):
+ paths = open(subconfigures).read().splitlines()
+ self._external_paths = set(mozpath.normsep(d) for d in paths)
+ # Add security/nss manually, since it doesn't have a subconfigure.
+ self._external_paths.add('security/nss')
+
+ self._emitter_time = 0.0
+ self._object_count = 0
+ self._test_files_converter = SupportFilesConverter()
+
+ def summary(self):
+ return ExecutionSummary(
+ 'Processed into {object_count:d} build config descriptors in '
+ '{execution_time:.2f}s',
+ execution_time=self._emitter_time,
+ object_count=self._object_count)
+
+ def emit(self, output):
+ """Convert the BuildReader output into data structures.
+
+ The return value from BuildReader.read_topsrcdir() (a generator) is
+ typically fed into this function.
+ """
+ contexts = {}
+
+ def emit_objs(objs):
+ for o in objs:
+ self._object_count += 1
+ yield o
+
+ for out in output:
+ # Nothing in sub-contexts is currently of interest to us. Filter
+ # them all out.
+ if isinstance(out, SubContext):
+ continue
+
+ if isinstance(out, Context):
+ # Keep all contexts around, we will need them later.
+ contexts[out.objdir] = out
+
+ start = time.time()
+ # We need to expand the generator for the timings to work.
+ objs = list(self.emit_from_context(out))
+ self._emitter_time += time.time() - start
+
+ for o in emit_objs(objs): yield o
+
+ else:
+ raise Exception('Unhandled output type: %s' % type(out))
+
+ # Don't emit Linkable objects when COMPILE_ENVIRONMENT is not set
+ if self.config.substs.get('COMPILE_ENVIRONMENT'):
+ start = time.time()
+ objs = list(self._emit_libs_derived(contexts))
+ self._emitter_time += time.time() - start
+
+ for o in emit_objs(objs): yield o
+
+ def _emit_libs_derived(self, contexts):
+ # First do FINAL_LIBRARY linkage.
+ for lib in (l for libs in self._libs.values() for l in libs):
+ if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
+ continue
+ if lib.link_into not in self._libs:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME'
+ % lib.link_into, contexts[lib.objdir])
+ candidates = self._libs[lib.link_into]
+
+ # When there are multiple candidates, but all are in the same
+ # directory and have a different type, we want all of them to
+ # have the library linked. The typical usecase is when building
+ # both a static and a shared library in a directory, and having
+ # that as a FINAL_LIBRARY.
+ if len(set(type(l) for l in candidates)) == len(candidates) and \
+ len(set(l.objdir for l in candidates)) == 1:
+ for c in candidates:
+ c.link_library(lib)
+ else:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in '
+ 'multiple places:\n %s' % (lib.link_into,
+ '\n '.join(l.objdir for l in candidates)),
+ contexts[lib.objdir])
+
+ # Next, USE_LIBS linkage.
+ for context, obj, variable in self._linkage:
+ self._link_libraries(context, obj, variable)
+
+ def recurse_refs(lib):
+ for o in lib.refs:
+ yield o
+ if isinstance(o, StaticLibrary):
+ for q in recurse_refs(o):
+ yield q
+
+ # Check that all static libraries refering shared libraries in
+ # USE_LIBS are linked into a shared library or program.
+ for lib in self._static_linking_shared:
+ if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)):
+ shared_libs = sorted(l.basename for l in lib.linked_libraries
+ if isinstance(l, SharedLibrary))
+ raise SandboxValidationError(
+ 'The static "%s" library is not used in a shared library '
+ 'or a program, but USE_LIBS contains the following shared '
+ 'library names:\n %s\n\nMaybe you can remove the '
+ 'static "%s" library?' % (lib.basename,
+ '\n '.join(shared_libs), lib.basename),
+ contexts[lib.objdir])
+
+ # Propagate LIBRARY_DEFINES to all child libraries recursively.
+ def propagate_defines(outerlib, defines):
+ outerlib.lib_defines.update(defines)
+ for lib in outerlib.linked_libraries:
+ # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS
+ # paths.
+ if (isinstance(lib, StaticLibrary) and
+ lib.link_into == outerlib.basename):
+ propagate_defines(lib, defines)
+
+ for lib in (l for libs in self._libs.values() for l in libs):
+ if isinstance(lib, Library):
+ propagate_defines(lib, lib.lib_defines)
+ yield lib
+
+ for obj in self._binaries.values():
+ yield obj
+
+ LIBRARY_NAME_VAR = {
+ 'host': 'HOST_LIBRARY_NAME',
+ 'target': 'LIBRARY_NAME',
+ }
+
+ def _link_libraries(self, context, obj, variable):
+ """Add linkage declarations to a given object."""
+ assert isinstance(obj, Linkable)
+
+ for path in context.get(variable, []):
+ force_static = path.startswith('static:') and obj.KIND == 'target'
+ if force_static:
+ path = path[7:]
+ name = mozpath.basename(path)
+ dir = mozpath.dirname(path)
+ candidates = [l for l in self._libs[name] if l.KIND == obj.KIND]
+ if dir:
+ if dir.startswith('/'):
+ dir = mozpath.normpath(
+ mozpath.join(obj.topobjdir, dir[1:]))
+ else:
+ dir = mozpath.normpath(
+ mozpath.join(obj.objdir, dir))
+ dir = mozpath.relpath(dir, obj.topobjdir)
+ candidates = [l for l in candidates if l.relobjdir == dir]
+ if not candidates:
+ # If the given directory is under one of the external
+ # (third party) paths, use a fake library reference to
+ # there.
+ for d in self._external_paths:
+ if dir.startswith('%s/' % d):
+ candidates = [self._get_external_library(dir, name,
+ force_static)]
+ break
+
+ if not candidates:
+ raise SandboxValidationError(
+ '%s contains "%s", but there is no "%s" %s in %s.'
+ % (variable, path, name,
+ self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
+
+ if len(candidates) > 1:
+ # If there's more than one remaining candidate, it could be
+ # that there are instances for the same library, in static and
+ # shared form.
+ libs = {}
+ for l in candidates:
+ key = mozpath.join(l.relobjdir, l.basename)
+ if force_static:
+ if isinstance(l, StaticLibrary):
+ libs[key] = l
+ else:
+ if key in libs and isinstance(l, SharedLibrary):
+ libs[key] = l
+ if key not in libs:
+ libs[key] = l
+ candidates = libs.values()
+ if force_static and not candidates:
+ if dir:
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is no static '
+ '"%s" %s in %s.' % (variable, path, name,
+ self.LIBRARY_NAME_VAR[obj.KIND], dir), context)
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is no static "%s" '
+ '%s in the tree' % (variable, name, name,
+ self.LIBRARY_NAME_VAR[obj.KIND]), context)
+
+ if not candidates:
+ raise SandboxValidationError(
+ '%s contains "%s", which does not match any %s in the tree.'
+ % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]),
+ context)
+
+ elif len(candidates) > 1:
+ paths = (mozpath.join(l.relativedir, 'moz.build')
+ for l in candidates)
+ raise SandboxValidationError(
+ '%s contains "%s", which matches a %s defined in multiple '
+ 'places:\n %s' % (variable, path,
+ self.LIBRARY_NAME_VAR[obj.KIND],
+ '\n '.join(paths)), context)
+
+ elif force_static and not isinstance(candidates[0], StaticLibrary):
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is only a shared "%s" '
+ 'in %s. You may want to add FORCE_STATIC_LIB=True in '
+ '%s/moz.build, or remove "static:".' % (variable, path,
+ name, candidates[0].relobjdir, candidates[0].relobjdir),
+ context)
+
+ elif isinstance(obj, StaticLibrary) and isinstance(candidates[0],
+ SharedLibrary):
+ self._static_linking_shared.add(obj)
+ obj.link_library(candidates[0])
+
+ # Link system libraries from OS_LIBS/HOST_OS_LIBS.
+ for lib in context.get(variable.replace('USE', 'OS'), []):
+ obj.link_system_library(lib)
+
+ @memoize
+ def _get_external_library(self, dir, name, force_static):
+ # Create ExternalStaticLibrary or ExternalSharedLibrary object with a
+ # context more or less truthful about where the external library is.
+ context = Context(config=self.config)
+ context.add_source(mozpath.join(self.config.topsrcdir, dir, 'dummy'))
+ if force_static:
+ return ExternalStaticLibrary(context, name)
+ else:
+ return ExternalSharedLibrary(context, name)
+
+ def _parse_cargo_file(self, toml_file):
+ """Parse toml_file and return a Python object representation of it."""
+ with open(toml_file, 'r') as f:
+ return pytoml.load(f)
+
+ def _verify_deps(self, context, crate_dir, crate_name, dependencies, description='Dependency'):
+ """Verify that a crate's dependencies all specify local paths."""
+ for dep_crate_name, values in dependencies.iteritems():
+ # A simple version number.
+ if isinstance(values, (str, unicode)):
+ raise SandboxValidationError(
+ '%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
+ context)
+
+ dep_path = values.get('path', None)
+ if not dep_path:
+ raise SandboxValidationError(
+ '%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
+ context)
+
+ # Try to catch the case where somebody listed a
+ # local path for development.
+ if os.path.isabs(dep_path):
+ raise SandboxValidationError(
+ '%s %s of crate %s has a non-relative path' % (description, dep_crate_name, crate_name),
+ context)
+
+ if not os.path.exists(mozpath.join(context.config.topsrcdir, crate_dir, dep_path)):
+ raise SandboxValidationError(
+ '%s %s of crate %s refers to a non-existent path' % (description, dep_crate_name, crate_name),
+ context)
+
+ def _rust_library(self, context, libname, static_args):
+ # We need to note any Rust library for linking purposes.
+ cargo_file = mozpath.join(context.srcdir, 'Cargo.toml')
+ if not os.path.exists(cargo_file):
+ raise SandboxValidationError(
+ 'No Cargo.toml file found in %s' % cargo_file, context)
+
+ config = self._parse_cargo_file(cargo_file)
+ crate_name = config['package']['name']
+
+ if crate_name != libname:
+ raise SandboxValidationError(
+ 'library %s does not match Cargo.toml-defined package %s' % (libname, crate_name),
+ context)
+
+ # Check that the [lib.crate-type] field is correct
+ lib_section = config.get('lib', None)
+ if not lib_section:
+ raise SandboxValidationError(
+ 'Cargo.toml for %s has no [lib] section' % libname,
+ context)
+
+ crate_type = lib_section.get('crate-type', None)
+ if not crate_type:
+ raise SandboxValidationError(
+ 'Can\'t determine a crate-type for %s from Cargo.toml' % libname,
+ context)
+
+ crate_type = crate_type[0]
+ if crate_type != 'staticlib':
+ raise SandboxValidationError(
+ 'crate-type %s is not permitted for %s' % (crate_type, libname),
+ context)
+
+ # Check that the [profile.{dev,release}.panic] field is "abort"
+ profile_section = config.get('profile', None)
+ if not profile_section:
+ raise SandboxValidationError(
+ 'Cargo.toml for %s has no [profile] section' % libname,
+ context)
+
+ for profile_name in ['dev', 'release']:
+ profile = profile_section.get(profile_name, None)
+ if not profile:
+ raise SandboxValidationError(
+ 'Cargo.toml for %s has no [profile.%s] section' % (libname, profile_name),
+ context)
+
+ panic = profile.get('panic', None)
+ if panic != 'abort':
+ raise SandboxValidationError(
+ ('Cargo.toml for %s does not specify `panic = "abort"`'
+ ' in [profile.%s] section') % (libname, profile_name),
+ context)
+
+ dependencies = set(config.get('dependencies', {}).iterkeys())
+
+ return RustLibrary(context, libname, cargo_file, crate_type,
+ dependencies, **static_args)
+
+ def _handle_linkables(self, context, passthru, generated_files):
+ linkables = []
+ host_linkables = []
+ def add_program(prog, var):
+ if var.startswith('HOST_'):
+ host_linkables.append(prog)
+ else:
+ linkables.append(prog)
+
+ for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
+ program = context.get(kind)
+ if program:
+ if program in self._binaries:
+ raise SandboxValidationError(
+ 'Cannot use "%s" as %s name, '
+ 'because it is already used in %s' % (program, kind,
+ self._binaries[program].relativedir), context)
+ self._binaries[program] = cls(context, program)
+ self._linkage.append((context, self._binaries[program],
+ kind.replace('PROGRAM', 'USE_LIBS')))
+ add_program(self._binaries[program], kind)
+
+ for kind, cls in [
+ ('SIMPLE_PROGRAMS', SimpleProgram),
+ ('CPP_UNIT_TESTS', SimpleProgram),
+ ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]:
+ for program in context[kind]:
+ if program in self._binaries:
+ raise SandboxValidationError(
+ 'Cannot use "%s" in %s, '
+ 'because it is already used in %s' % (program, kind,
+ self._binaries[program].relativedir), context)
+ self._binaries[program] = cls(context, program,
+ is_unit_test=kind == 'CPP_UNIT_TESTS')
+ self._linkage.append((context, self._binaries[program],
+ 'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
+ else 'USE_LIBS'))
+ add_program(self._binaries[program], kind)
+
+ host_libname = context.get('HOST_LIBRARY_NAME')
+ libname = context.get('LIBRARY_NAME')
+
+ if host_libname:
+ if host_libname == libname:
+ raise SandboxValidationError('LIBRARY_NAME and '
+ 'HOST_LIBRARY_NAME must have a different value', context)
+ lib = HostLibrary(context, host_libname)
+ self._libs[host_libname].append(lib)
+ self._linkage.append((context, lib, 'HOST_USE_LIBS'))
+ host_linkables.append(lib)
+
+ final_lib = context.get('FINAL_LIBRARY')
+ if not libname and final_lib:
+ # If no LIBRARY_NAME is given, create one.
+ libname = context.relsrcdir.replace('/', '_')
+
+ static_lib = context.get('FORCE_STATIC_LIB')
+ shared_lib = context.get('FORCE_SHARED_LIB')
+
+ static_name = context.get('STATIC_LIBRARY_NAME')
+ shared_name = context.get('SHARED_LIBRARY_NAME')
+
+ is_framework = context.get('IS_FRAMEWORK')
+ is_component = context.get('IS_COMPONENT')
+
+ soname = context.get('SONAME')
+
+ lib_defines = context.get('LIBRARY_DEFINES')
+
+ shared_args = {}
+ static_args = {}
+
+ if final_lib:
+ if static_lib:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY implies FORCE_STATIC_LIB. '
+ 'Please remove the latter.', context)
+ if shared_lib:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. '
+ 'Please remove one.', context)
+ if is_framework:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY conflicts with IS_FRAMEWORK. '
+ 'Please remove one.', context)
+ if is_component:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY conflicts with IS_COMPONENT. '
+ 'Please remove one.', context)
+ static_args['link_into'] = final_lib
+ static_lib = True
+
+ if libname:
+ if is_component:
+ if static_lib:
+ raise SandboxValidationError(
+ 'IS_COMPONENT conflicts with FORCE_STATIC_LIB. '
+ 'Please remove one.', context)
+ shared_lib = True
+ shared_args['variant'] = SharedLibrary.COMPONENT
+
+ if is_framework:
+ if soname:
+ raise SandboxValidationError(
+ 'IS_FRAMEWORK conflicts with SONAME. '
+ 'Please remove one.', context)
+ shared_lib = True
+ shared_args['variant'] = SharedLibrary.FRAMEWORK
+
+ if not static_lib and not shared_lib:
+ static_lib = True
+
+ if static_name:
+ if not static_lib:
+ raise SandboxValidationError(
+ 'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB',
+ context)
+ static_args['real_name'] = static_name
+
+ if shared_name:
+ if not shared_lib:
+ raise SandboxValidationError(
+ 'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB',
+ context)
+ shared_args['real_name'] = shared_name
+
+ if soname:
+ if not shared_lib:
+ raise SandboxValidationError(
+ 'SONAME requires FORCE_SHARED_LIB', context)
+ shared_args['soname'] = soname
+
+ # If both a shared and a static library are created, only the
+ # shared library is meant to be a SDK library.
+ if context.get('SDK_LIBRARY'):
+ if shared_lib:
+ shared_args['is_sdk'] = True
+ elif static_lib:
+ static_args['is_sdk'] = True
+
+ if context.get('NO_EXPAND_LIBS'):
+ if not static_lib:
+ raise SandboxValidationError(
+ 'NO_EXPAND_LIBS can only be set for static libraries.',
+ context)
+ static_args['no_expand_lib'] = True
+
+ if shared_lib and static_lib:
+ if not static_name and not shared_name:
+ raise SandboxValidationError(
+ 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
+ 'but neither STATIC_LIBRARY_NAME or '
+ 'SHARED_LIBRARY_NAME is set. At least one is required.',
+ context)
+ if static_name and not shared_name and static_name == libname:
+ raise SandboxValidationError(
+ 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
+ 'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, '
+ 'and SHARED_LIBRARY_NAME is unset. Please either '
+ 'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set '
+ 'SHARED_LIBRARY_NAME.', context)
+ if shared_name and not static_name and shared_name == libname:
+ raise SandboxValidationError(
+ 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
+ 'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, '
+ 'and STATIC_LIBRARY_NAME is unset. Please either '
+ 'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set '
+ 'STATIC_LIBRARY_NAME.', context)
+ if shared_name and static_name and shared_name == static_name:
+ raise SandboxValidationError(
+ 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
+ 'but SHARED_LIBRARY_NAME is the same as '
+ 'STATIC_LIBRARY_NAME. Please change one of them.',
+ context)
+
+ symbols_file = context.get('SYMBOLS_FILE')
+ if symbols_file:
+ if not shared_lib:
+ raise SandboxValidationError(
+ 'SYMBOLS_FILE can only be used with a SHARED_LIBRARY.',
+ context)
+ if context.get('DEFFILE') or context.get('LD_VERSION_SCRIPT'):
+ raise SandboxValidationError(
+ 'SYMBOLS_FILE cannot be used along DEFFILE or '
+ 'LD_VERSION_SCRIPT.', context)
+ if isinstance(symbols_file, SourcePath):
+ if not os.path.exists(symbols_file.full_path):
+ raise SandboxValidationError(
+ 'Path specified in SYMBOLS_FILE does not exist: %s '
+ '(resolved to %s)' % (symbols_file,
+ symbols_file.full_path), context)
+ shared_args['symbols_file'] = True
+ else:
+ if symbols_file.target_basename not in generated_files:
+ raise SandboxValidationError(
+ ('Objdir file specified in SYMBOLS_FILE not in ' +
+ 'GENERATED_FILES: %s') % (symbols_file,), context)
+ shared_args['symbols_file'] = symbols_file.target_basename
+
+ if shared_lib:
+ lib = SharedLibrary(context, libname, **shared_args)
+ self._libs[libname].append(lib)
+ self._linkage.append((context, lib, 'USE_LIBS'))
+ linkables.append(lib)
+ generated_files.add(lib.lib_name)
+ if is_component and not context['NO_COMPONENTS_MANIFEST']:
+ yield ChromeManifestEntry(context,
+ 'components/components.manifest',
+ ManifestBinaryComponent('components', lib.lib_name))
+ if symbols_file and isinstance(symbols_file, SourcePath):
+ script = mozpath.join(
+ mozpath.dirname(mozpath.dirname(__file__)),
+ 'action', 'generate_symbols_file.py')
+ defines = ()
+ if lib.defines:
+ defines = lib.defines.get_defines()
+ yield GeneratedFile(context, script,
+ 'generate_symbols_file', lib.symbols_file,
+ [symbols_file], defines)
+ if static_lib:
+ is_rust_library = context.get('IS_RUST_LIBRARY')
+ if is_rust_library:
+ lib = self._rust_library(context, libname, static_args)
+ else:
+ lib = StaticLibrary(context, libname, **static_args)
+ self._libs[libname].append(lib)
+ self._linkage.append((context, lib, 'USE_LIBS'))
+ linkables.append(lib)
+
+ if lib_defines:
+ if not libname:
+ raise SandboxValidationError('LIBRARY_DEFINES needs a '
+ 'LIBRARY_NAME to take effect', context)
+ lib.lib_defines.update(lib_defines)
+
+ # Only emit sources if we have linkables defined in the same context.
+ # Note the linkables are not emitted in this function, but much later,
+ # after aggregation (because of e.g. USE_LIBS processing).
+ if not (linkables or host_linkables):
+ return
+
+ sources = defaultdict(list)
+ gen_sources = defaultdict(list)
+ all_flags = {}
+ for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
+ srcs = sources[symbol]
+ gen_srcs = gen_sources[symbol]
+ context_srcs = context.get(symbol, [])
+ for f in context_srcs:
+ full_path = f.full_path
+ if isinstance(f, SourcePath):
+ srcs.append(full_path)
+ else:
+ assert isinstance(f, Path)
+ gen_srcs.append(full_path)
+ if symbol == 'SOURCES':
+ flags = context_srcs[f]
+ if flags:
+ all_flags[full_path] = flags
+
+ if isinstance(f, SourcePath) and not os.path.exists(full_path):
+ raise SandboxValidationError('File listed in %s does not '
+ 'exist: \'%s\'' % (symbol, full_path), context)
+
+ # HOST_SOURCES and UNIFIED_SOURCES only take SourcePaths, so
+ # there should be no generated source in here
+ assert not gen_sources['HOST_SOURCES']
+ assert not gen_sources['UNIFIED_SOURCES']
+
+ no_pgo = context.get('NO_PGO')
+ no_pgo_sources = [f for f, flags in all_flags.iteritems()
+ if flags.no_pgo]
+ if no_pgo:
+ if no_pgo_sources:
+ raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
+ 'cannot be set at the same time', context)
+ passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
+ if no_pgo_sources:
+ passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources
+
+ # A map from "canonical suffixes" for a particular source file
+ # language to the range of suffixes associated with that language.
+ #
+ # We deliberately don't list the canonical suffix in the suffix list
+ # in the definition; we'll add it in programmatically after defining
+ # things.
+ suffix_map = {
+ '.s': set(['.asm']),
+ '.c': set(),
+ '.m': set(),
+ '.mm': set(),
+ '.cpp': set(['.cc', '.cxx']),
+ '.S': set(),
+ }
+
+ # The inverse of the above, mapping suffixes to their canonical suffix.
+ canonicalized_suffix_map = {}
+ for suffix, alternatives in suffix_map.iteritems():
+ alternatives.add(suffix)
+ for a in alternatives:
+ canonicalized_suffix_map[a] = suffix
+
+ def canonical_suffix_for_file(f):
+ return canonicalized_suffix_map[mozpath.splitext(f)[1]]
+
+ # A map from moz.build variables to the canonical suffixes of file
+ # kinds that can be listed therein.
+ all_suffixes = list(suffix_map.keys())
+ varmap = dict(
+ SOURCES=(Sources, GeneratedSources, all_suffixes),
+ HOST_SOURCES=(HostSources, None, ['.c', '.mm', '.cpp']),
+ UNIFIED_SOURCES=(UnifiedSources, None, ['.c', '.mm', '.cpp']),
+ )
+ # Track whether there are any C++ source files.
+ # Technically this won't do the right thing for SIMPLE_PROGRAMS in
+ # a directory with mixed C and C++ source, but it's not that important.
+ cxx_sources = defaultdict(bool)
+
+ for variable, (klass, gen_klass, suffixes) in varmap.items():
+ allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])
+
+ # First ensure that we haven't been given filetypes that we don't
+ # recognize.
+ for f in itertools.chain(sources[variable], gen_sources[variable]):
+ ext = mozpath.splitext(f)[1]
+ if ext not in allowed_suffixes:
+ raise SandboxValidationError(
+ '%s has an unknown file type.' % f, context)
+
+ for srcs, cls in ((sources[variable], klass),
+ (gen_sources[variable], gen_klass)):
+ # Now sort the files to let groupby work.
+ sorted_files = sorted(srcs, key=canonical_suffix_for_file)
+ for canonical_suffix, files in itertools.groupby(
+ sorted_files, canonical_suffix_for_file):
+ if canonical_suffix in ('.cpp', '.mm'):
+ cxx_sources[variable] = True
+ arglist = [context, list(files), canonical_suffix]
+ if (variable.startswith('UNIFIED_') and
+ 'FILES_PER_UNIFIED_FILE' in context):
+ arglist.append(context['FILES_PER_UNIFIED_FILE'])
+ obj = cls(*arglist)
+ yield obj
+
+ for f, flags in all_flags.iteritems():
+ if flags.flags:
+ ext = mozpath.splitext(f)[1]
+ yield PerSourceFlag(context, f, flags.flags)
+
+ # If there are any C++ sources, set all the linkables defined here
+ # to require the C++ linker.
+ for vars, linkable_items in ((('SOURCES', 'UNIFIED_SOURCES'), linkables),
+ (('HOST_SOURCES',), host_linkables)):
+ for var in vars:
+ if cxx_sources[var]:
+ for l in linkable_items:
+ l.cxx_link = True
+ break
+
+
+ def emit_from_context(self, context):
+ """Convert a Context to tree metadata objects.
+
+ This is a generator of mozbuild.frontend.data.ContextDerived instances.
+ """
+
+ # We only want to emit an InstallationTarget if one of the consulted
+ # variables is defined. Later on, we look up FINAL_TARGET, which has
+ # the side-effect of populating it. So, we need to do this lookup
+ # early.
+ if any(k in context for k in ('FINAL_TARGET', 'XPI_NAME', 'DIST_SUBDIR')):
+ yield InstallationTarget(context)
+
+ # We always emit a directory traversal descriptor. This is needed by
+ # the recursive make backend.
+ for o in self._emit_directory_traversal_from_context(context): yield o
+
+ for obj in self._process_xpidl(context):
+ yield obj
+
+ # Proxy some variables as-is until we have richer classes to represent
+ # them. We should aim to keep this set small because it violates the
+ # desired abstraction of the build definition away from makefiles.
+ passthru = VariablePassthru(context)
+ varlist = [
+ 'ALLOW_COMPILER_WARNINGS',
+ 'ANDROID_APK_NAME',
+ 'ANDROID_APK_PACKAGE',
+ 'ANDROID_GENERATED_RESFILES',
+ 'DISABLE_STL_WRAPPING',
+ 'EXTRA_DSO_LDOPTS',
+ 'PYTHON_UNIT_TESTS',
+ 'RCFILE',
+ 'RESFILE',
+ 'RCINCLUDE',
+ 'DEFFILE',
+ 'WIN32_EXE_LDFLAGS',
+ 'LD_VERSION_SCRIPT',
+ 'USE_EXTENSION_MANIFEST',
+ 'NO_JS_MANIFEST',
+ 'HAS_MISC_RULE',
+ ]
+ for v in varlist:
+ if v in context and context[v]:
+ passthru.variables[v] = context[v]
+
+ if context.config.substs.get('OS_TARGET') == 'WINNT' and \
+ context['DELAYLOAD_DLLS']:
+ context['LDFLAGS'].extend([('-DELAYLOAD:%s' % dll)
+ for dll in context['DELAYLOAD_DLLS']])
+ context['OS_LIBS'].append('delayimp')
+
+ for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'ASFLAGS',
+ 'LDFLAGS', 'HOST_CFLAGS', 'HOST_CXXFLAGS']:
+ if v in context and context[v]:
+ passthru.variables['MOZBUILD_' + v] = context[v]
+
+ # NO_VISIBILITY_FLAGS is slightly different
+ if context['NO_VISIBILITY_FLAGS']:
+ passthru.variables['VISIBILITY_FLAGS'] = ''
+
+ if isinstance(context, TemplateContext) and context.template == 'Gyp':
+ passthru.variables['IS_GYP_DIR'] = True
+
+ dist_install = context['DIST_INSTALL']
+ if dist_install is True:
+ passthru.variables['DIST_INSTALL'] = True
+ elif dist_install is False:
+ passthru.variables['NO_DIST_INSTALL'] = True
+
+ # Ideally, this should be done in templates, but this is difficult at
+ # the moment because USE_STATIC_LIBS can be set after a template
+ # returns. Eventually, with context-based templates, it will be
+ # possible.
+ if (context.config.substs.get('OS_ARCH') == 'WINNT' and
+ not context.config.substs.get('GNU_CC')):
+ use_static_lib = (context.get('USE_STATIC_LIBS') and
+ not context.config.substs.get('MOZ_ASAN'))
+ rtl_flag = '-MT' if use_static_lib else '-MD'
+ if (context.config.substs.get('MOZ_DEBUG') and
+ not context.config.substs.get('MOZ_NO_DEBUG_RTL')):
+ rtl_flag += 'd'
+ # Use a list, like MOZBUILD_*FLAGS variables
+ passthru.variables['RTL_FLAGS'] = [rtl_flag]
+
+ generated_files = set()
+ for obj in self._process_generated_files(context):
+ for f in obj.outputs:
+ generated_files.add(f)
+ yield obj
+
+ for path in context['CONFIGURE_SUBST_FILES']:
+ sub = self._create_substitution(ConfigFileSubstitution, context,
+ path)
+ generated_files.add(str(sub.relpath))
+ yield sub
+
+ defines = context.get('DEFINES')
+ if defines:
+ yield Defines(context, defines)
+
+ host_defines = context.get('HOST_DEFINES')
+ if host_defines:
+ yield HostDefines(context, host_defines)
+
+ simple_lists = [
+ ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile),
+ ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile),
+ ('IPDL_SOURCES', IPDLFile),
+ ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile),
+ ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile),
+ ('TEST_WEBIDL_FILES', TestWebIDLFile),
+ ('WEBIDL_FILES', WebIDLFile),
+ ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface),
+ ]
+ for context_var, klass in simple_lists:
+ for name in context.get(context_var, []):
+ yield klass(context, name)
+
+ for local_include in context.get('LOCAL_INCLUDES', []):
+ if (not isinstance(local_include, ObjDirPath) and
+ not os.path.exists(local_include.full_path)):
+ raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
+ 'does not exist: %s (resolved to %s)' % (local_include,
+ local_include.full_path), context)
+ yield LocalInclude(context, local_include)
+
+ for obj in self._handle_linkables(context, passthru, generated_files):
+ yield obj
+
+ generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', '')) for k in self._binaries.keys()])
+
+ components = []
+ for var, cls in (
+ ('BRANDING_FILES', BrandingFiles),
+ ('EXPORTS', Exports),
+ ('FINAL_TARGET_FILES', FinalTargetFiles),
+ ('FINAL_TARGET_PP_FILES', FinalTargetPreprocessedFiles),
+ ('OBJDIR_FILES', ObjdirFiles),
+ ('OBJDIR_PP_FILES', ObjdirPreprocessedFiles),
+ ('SDK_FILES', SdkFiles),
+ ('TEST_HARNESS_FILES', TestHarnessFiles),
+ ):
+ all_files = context.get(var)
+ if not all_files:
+ continue
+ if dist_install is False and var != 'TEST_HARNESS_FILES':
+ raise SandboxValidationError(
+ '%s cannot be used with DIST_INSTALL = False' % var,
+ context)
+ has_prefs = False
+ has_resources = False
+ for base, files in all_files.walk():
+ if var == 'TEST_HARNESS_FILES' and not base:
+ raise SandboxValidationError(
+ 'Cannot install files to the root of TEST_HARNESS_FILES', context)
+ if base == 'components':
+ components.extend(files)
+ if base == 'defaults/pref':
+ has_prefs = True
+ if mozpath.split(base)[0] == 'res':
+ has_resources = True
+ for f in files:
+ if ((var == 'FINAL_TARGET_PP_FILES' or
+ var == 'OBJDIR_PP_FILES') and
+ not isinstance(f, SourcePath)):
+ raise SandboxValidationError(
+ ('Only source directory paths allowed in ' +
+ '%s: %s')
+ % (var, f,), context)
+ if not isinstance(f, ObjDirPath):
+ path = f.full_path
+ if '*' not in path and not os.path.exists(path):
+ raise SandboxValidationError(
+ 'File listed in %s does not exist: %s'
+ % (var, path), context)
+ else:
+ # TODO: Bug 1254682 - The '/' check is to allow
+ # installing files generated from other directories,
+ # which is done occasionally for tests. However, it
+ # means we don't fail early if the file isn't actually
+ # created by the other moz.build file.
+ if f.target_basename not in generated_files and '/' not in f:
+ raise SandboxValidationError(
+ ('Objdir file listed in %s not in ' +
+ 'GENERATED_FILES: %s') % (var, f), context)
+
+ # Addons (when XPI_NAME is defined) and Applications (when
+ # DIST_SUBDIR is defined) use a different preferences directory
+ # (default/preferences) from the one the GRE uses (defaults/pref).
+ # Hence, we move the files from the latter to the former in that
+ # case.
+ if has_prefs and (context.get('XPI_NAME') or
+ context.get('DIST_SUBDIR')):
+ all_files.defaults.preferences += all_files.defaults.pref
+ del all_files.defaults._children['pref']
+
+ if has_resources and (context.get('DIST_SUBDIR') or
+ context.get('XPI_NAME')):
+ raise SandboxValidationError(
+ 'RESOURCES_FILES cannot be used with DIST_SUBDIR or '
+ 'XPI_NAME.', context)
+
+ yield cls(context, all_files)
+
+ # Check for manifest declarations in EXTRA_{PP_,}COMPONENTS.
+ if any(e.endswith('.js') for e in components) and \
+ not any(e.endswith('.manifest') for e in components) and \
+ not context.get('NO_JS_MANIFEST', False):
+ raise SandboxValidationError('A .js component was specified in EXTRA_COMPONENTS '
+ 'or EXTRA_PP_COMPONENTS without a matching '
+ '.manifest file. See '
+ 'https://developer.mozilla.org/en/XPCOM/XPCOM_changes_in_Gecko_2.0 .',
+ context);
+
+ for c in components:
+ if c.endswith('.manifest'):
+ yield ChromeManifestEntry(context, 'chrome.manifest',
+ Manifest('components',
+ mozpath.basename(c)))
+
+ for obj in self._process_test_manifests(context):
+ yield obj
+
+ for obj in self._process_jar_manifests(context):
+ yield obj
+
+ for name, jar in context.get('JAVA_JAR_TARGETS', {}).items():
+ yield ContextWrapped(context, jar)
+
+ for name, data in context.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items():
+ yield ContextWrapped(context, data)
+
+ if context.get('USE_YASM') is True:
+ yasm = context.config.substs.get('YASM')
+ if not yasm:
+ raise SandboxValidationError('yasm is not available', context)
+ passthru.variables['AS'] = yasm
+ passthru.variables['ASFLAGS'] = context.config.substs.get('YASM_ASFLAGS')
+ passthru.variables['AS_DASH_C_FLAG'] = ''
+
+ for (symbol, cls) in [
+ ('ANDROID_RES_DIRS', AndroidResDirs),
+ ('ANDROID_EXTRA_RES_DIRS', AndroidExtraResDirs),
+ ('ANDROID_ASSETS_DIRS', AndroidAssetsDirs)]:
+ paths = context.get(symbol)
+ if not paths:
+ continue
+ for p in paths:
+ if isinstance(p, SourcePath) and not os.path.isdir(p.full_path):
+ raise SandboxValidationError('Directory listed in '
+ '%s is not a directory: \'%s\'' %
+ (symbol, p.full_path), context)
+ yield cls(context, paths)
+
+ android_extra_packages = context.get('ANDROID_EXTRA_PACKAGES')
+ if android_extra_packages:
+ yield AndroidExtraPackages(context, android_extra_packages)
+
+ if passthru.variables:
+ yield passthru
+
+ def _create_substitution(self, cls, context, path):
+ sub = cls(context)
+ sub.input_path = '%s.in' % path.full_path
+ sub.output_path = path.translated
+ sub.relpath = path
+
+ return sub
+
+ def _process_xpidl(self, context):
+ # XPIDL source files get processed and turned into .h and .xpt files.
+ # If there are multiple XPIDL files in a directory, they get linked
+ # together into a final .xpt, which has the name defined by
+ # XPIDL_MODULE.
+ xpidl_module = context['XPIDL_MODULE']
+
+ if context['XPIDL_SOURCES'] and not xpidl_module:
+ raise SandboxValidationError('XPIDL_MODULE must be defined if '
+ 'XPIDL_SOURCES is defined.', context)
+
+ if xpidl_module and not context['XPIDL_SOURCES']:
+ raise SandboxValidationError('XPIDL_MODULE cannot be defined '
+ 'unless there are XPIDL_SOURCES', context)
+
+ if context['XPIDL_SOURCES'] and context['DIST_INSTALL'] is False:
+ self.log(logging.WARN, 'mozbuild_warning', dict(
+ path=context.main_path),
+ '{path}: DIST_INSTALL = False has no effect on XPIDL_SOURCES.')
+
+ for idl in context['XPIDL_SOURCES']:
+ yield XPIDLFile(context, mozpath.join(context.srcdir, idl),
+ xpidl_module, add_to_manifest=not context['XPIDL_NO_MANIFEST'])
+
+ def _process_generated_files(self, context):
+ for path in context['CONFIGURE_DEFINE_FILES']:
+ script = mozpath.join(mozpath.dirname(mozpath.dirname(__file__)),
+ 'action', 'process_define_files.py')
+ yield GeneratedFile(context, script, 'process_define_file',
+ unicode(path),
+ [Path(context, path + '.in')])
+
+ generated_files = context.get('GENERATED_FILES')
+ if not generated_files:
+ return
+
+ for f in generated_files:
+ flags = generated_files[f]
+ outputs = f
+ inputs = []
+ if flags.script:
+ method = "main"
+ script = SourcePath(context, flags.script).full_path
+
+ # Deal with cases like "C:\\path\\to\\script.py:function".
+ if '.py:' in script:
+ script, method = script.rsplit('.py:', 1)
+ script += '.py'
+
+ if not os.path.exists(script):
+ raise SandboxValidationError(
+ 'Script for generating %s does not exist: %s'
+ % (f, script), context)
+ if os.path.splitext(script)[1] != '.py':
+ raise SandboxValidationError(
+ 'Script for generating %s does not end in .py: %s'
+ % (f, script), context)
+
+ for i in flags.inputs:
+ p = Path(context, i)
+ if (isinstance(p, SourcePath) and
+ not os.path.exists(p.full_path)):
+ raise SandboxValidationError(
+ 'Input for generating %s does not exist: %s'
+ % (f, p.full_path), context)
+ inputs.append(p)
+ else:
+ script = None
+ method = None
+ yield GeneratedFile(context, script, method, outputs, inputs)
+
+ def _process_test_manifests(self, context):
+ for prefix, info in TEST_MANIFESTS.items():
+ for path, manifest in context.get('%s_MANIFESTS' % prefix, []):
+ for obj in self._process_test_manifest(context, info, path, manifest):
+ yield obj
+
+ for flavor in REFTEST_FLAVORS:
+ for path, manifest in context.get('%s_MANIFESTS' % flavor.upper(), []):
+ for obj in self._process_reftest_manifest(context, flavor, path, manifest):
+ yield obj
+
+ for flavor in WEB_PLATFORM_TESTS_FLAVORS:
+ for path, manifest in context.get("%s_MANIFESTS" % flavor.upper().replace('-', '_'), []):
+ for obj in self._process_web_platform_tests_manifest(context, path, manifest):
+ yield obj
+
+ python_tests = context.get('PYTHON_UNIT_TESTS')
+ if python_tests:
+ for obj in self._process_python_tests(context, python_tests):
+ yield obj
+
+ def _process_test_manifest(self, context, info, manifest_path, mpmanifest):
+ flavor, install_root, install_subdir, package_tests = info
+
+ path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
+ manifest_dir = mozpath.dirname(path)
+ manifest_reldir = mozpath.dirname(mozpath.relpath(path,
+ context.config.topsrcdir))
+ install_prefix = mozpath.join(install_root, install_subdir)
+
+ try:
+ if not mpmanifest.tests:
+ raise SandboxValidationError('Empty test manifest: %s'
+ % path, context)
+
+ defaults = mpmanifest.manifest_defaults[os.path.normpath(path)]
+ obj = TestManifest(context, path, mpmanifest, flavor=flavor,
+ install_prefix=install_prefix,
+ relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
+ dupe_manifest='dupe-manifest' in defaults)
+
+ filtered = mpmanifest.tests
+
+ # Jetpack add-on tests are expected to be generated during the
+ # build process so they won't exist here.
+ if flavor != 'jetpack-addon':
+ missing = [t['name'] for t in filtered if not os.path.exists(t['path'])]
+ if missing:
+ raise SandboxValidationError('Test manifest (%s) lists '
+ 'test that does not exist: %s' % (
+ path, ', '.join(missing)), context)
+
+ out_dir = mozpath.join(install_prefix, manifest_reldir)
+ if 'install-to-subdir' in defaults:
+ # This is terrible, but what are you going to do?
+ out_dir = mozpath.join(out_dir, defaults['install-to-subdir'])
+ obj.manifest_obj_relpath = mozpath.join(manifest_reldir,
+ defaults['install-to-subdir'],
+ mozpath.basename(path))
+
+ def process_support_files(test):
+ install_info = self._test_files_converter.convert_support_files(
+ test, install_root, manifest_dir, out_dir)
+
+ obj.pattern_installs.extend(install_info.pattern_installs)
+ for source, dest in install_info.installs:
+ obj.installs[source] = (dest, False)
+ obj.external_installs |= install_info.external_installs
+ for install_path in install_info.deferred_installs:
+ if all(['*' not in install_path,
+ not os.path.isfile(mozpath.join(context.config.topsrcdir,
+ install_path[2:])),
+ install_path not in install_info.external_installs]):
+ raise SandboxValidationError('Error processing test '
+ 'manifest %s: entry in support-files not present '
+ 'in the srcdir: %s' % (path, install_path), context)
+
+ obj.deferred_installs |= install_info.deferred_installs
+
+ for test in filtered:
+ obj.tests.append(test)
+
+ # Some test files are compiled and should not be copied into the
+ # test package. They function as identifiers rather than files.
+ if package_tests:
+ manifest_relpath = mozpath.relpath(test['path'],
+ mozpath.dirname(test['manifest']))
+ obj.installs[mozpath.normpath(test['path'])] = \
+ ((mozpath.join(out_dir, manifest_relpath)), True)
+
+ process_support_files(test)
+
+ for path, m_defaults in mpmanifest.manifest_defaults.items():
+ process_support_files(m_defaults)
+
+ # We also copy manifests into the output directory,
+ # including manifests from [include:foo] directives.
+ for mpath in mpmanifest.manifests():
+ mpath = mozpath.normpath(mpath)
+ out_path = mozpath.join(out_dir, mozpath.basename(mpath))
+ obj.installs[mpath] = (out_path, False)
+
+ # Some manifests reference files that are auto generated as
+ # part of the build or shouldn't be installed for some
+ # reason. Here, we prune those files from the install set.
+ # FUTURE we should be able to detect autogenerated files from
+ # other build metadata. Once we do that, we can get rid of this.
+ for f in defaults.get('generated-files', '').split():
+ # We re-raise otherwise the stack trace isn't informative.
+ try:
+ del obj.installs[mozpath.join(manifest_dir, f)]
+ except KeyError:
+ raise SandboxValidationError('Error processing test '
+ 'manifest %s: entry in generated-files not present '
+ 'elsewhere in manifest: %s' % (path, f), context)
+
+ yield obj
+ except (AssertionError, Exception):
+ raise SandboxValidationError('Error processing test '
+ 'manifest file %s: %s' % (path,
+ '\n'.join(traceback.format_exception(*sys.exc_info()))),
+ context)
+
+ def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
+ manifest_full_path = mozpath.normpath(mozpath.join(
+ context.srcdir, manifest_path))
+ manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path,
+ context.config.topsrcdir))
+
+ # reftest manifests don't come from manifest parser. But they are
+ # similar enough that we can use the same emitted objects. Note
+ # that we don't perform any installs for reftests.
+ obj = TestManifest(context, manifest_full_path, manifest,
+ flavor=flavor, install_prefix='%s/' % flavor,
+ relpath=mozpath.join(manifest_reldir,
+ mozpath.basename(manifest_path)))
+
+ for test, source_manifest in sorted(manifest.tests):
+ obj.tests.append({
+ 'path': test,
+ 'here': mozpath.dirname(test),
+ 'manifest': source_manifest,
+ 'name': mozpath.basename(test),
+ 'head': '',
+ 'tail': '',
+ 'support-files': '',
+ 'subsuite': '',
+ })
+
+ yield obj
+
+ def _process_web_platform_tests_manifest(self, context, paths, manifest):
+ manifest_path, tests_root = paths
+ manifest_full_path = mozpath.normpath(mozpath.join(
+ context.srcdir, manifest_path))
+ manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path,
+ context.config.topsrcdir))
+ tests_root = mozpath.normpath(mozpath.join(context.srcdir, tests_root))
+
+ # Create a equivalent TestManifest object
+ obj = TestManifest(context, manifest_full_path, manifest,
+ flavor="web-platform-tests",
+ relpath=mozpath.join(manifest_reldir,
+ mozpath.basename(manifest_path)),
+ install_prefix="web-platform/")
+
+
+ for path, tests in manifest:
+ path = mozpath.join(tests_root, path)
+ for test in tests:
+ if test.item_type not in ["testharness", "reftest"]:
+ continue
+
+ obj.tests.append({
+ 'path': path,
+ 'here': mozpath.dirname(path),
+ 'manifest': manifest_path,
+ 'name': test.id,
+ 'head': '',
+ 'tail': '',
+ 'support-files': '',
+ 'subsuite': '',
+ })
+
+ yield obj
+
+ def _process_python_tests(self, context, python_tests):
+ manifest_full_path = context.main_path
+ manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path,
+ context.config.topsrcdir))
+
+ obj = TestManifest(context, manifest_full_path,
+ mozpath.basename(manifest_full_path),
+ flavor='python', install_prefix='python/',
+ relpath=mozpath.join(manifest_reldir,
+ mozpath.basename(manifest_full_path)))
+
+ for test in python_tests:
+ test = mozpath.normpath(mozpath.join(context.srcdir, test))
+ if not os.path.isfile(test):
+ raise SandboxValidationError('Path specified in '
+ 'PYTHON_UNIT_TESTS does not exist: %s' % test,
+ context)
+ obj.tests.append({
+ 'path': test,
+ 'here': mozpath.dirname(test),
+ 'manifest': manifest_full_path,
+ 'name': mozpath.basename(test),
+ 'head': '',
+ 'tail': '',
+ 'support-files': '',
+ 'subsuite': '',
+ })
+
+ yield obj
+
+ def _process_jar_manifests(self, context):
+ jar_manifests = context.get('JAR_MANIFESTS', [])
+ if len(jar_manifests) > 1:
+ raise SandboxValidationError('While JAR_MANIFESTS is a list, '
+ 'it is currently limited to one value.', context)
+
+ for path in jar_manifests:
+ yield JARManifest(context, path)
+
+ # Temporary test to look for jar.mn files that creep in without using
+ # the new declaration. Before, we didn't require jar.mn files to
+ # declared anywhere (they were discovered). This will detect people
+ # relying on the old behavior.
+ if os.path.exists(os.path.join(context.srcdir, 'jar.mn')):
+ if 'jar.mn' not in jar_manifests:
+ raise SandboxValidationError('A jar.mn exists but it '
+ 'is not referenced in the moz.build file. '
+ 'Please define JAR_MANIFESTS.', context)
+
+ def _emit_directory_traversal_from_context(self, context):
+ o = DirectoryTraversal(context)
+ o.dirs = context.get('DIRS', [])
+
+ # Some paths have a subconfigure, yet also have a moz.build. Those
+ # shouldn't end up in self._external_paths.
+ if o.objdir:
+ self._external_paths -= { o.relobjdir }
+
+ yield o
diff --git a/python/mozbuild/mozbuild/frontend/gyp_reader.py b/python/mozbuild/mozbuild/frontend/gyp_reader.py
new file mode 100644
index 000000000..459c553c3
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/gyp_reader.py
@@ -0,0 +1,248 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import gyp
+import sys
+import os
+import types
+import mozpack.path as mozpath
+from mozpack.files import FileFinder
+from .sandbox import alphabetical_sorted
+from .context import (
+ SourcePath,
+ TemplateContext,
+ VARIABLES,
+)
+from mozbuild.util import (
+ expand_variables,
+ List,
+ memoize,
+)
+from .reader import SandboxValidationError
+
+# Define this module as gyp.generator.mozbuild so that gyp can use it
+# as a generator under the name "mozbuild".
+sys.modules['gyp.generator.mozbuild'] = sys.modules[__name__]
+
+# build/gyp_chromium does this:
+# script_dir = os.path.dirname(os.path.realpath(__file__))
+# chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+# sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+# We're not importing gyp_chromium, but we want both script_dir and
+# chrome_src for the default includes, so go backwards from the pylib
+# directory, which is the parent directory of gyp module.
+chrome_src = mozpath.abspath(mozpath.join(mozpath.dirname(gyp.__file__),
+ '../../../..'))
+script_dir = mozpath.join(chrome_src, 'build')
+
+# Default variables gyp uses when evaluating gyp files.
+generator_default_variables = {
+}
+for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
+ 'LIB_DIR', 'SHARED_LIB_DIR']:
+ # Some gyp steps fail if these are empty(!).
+ generator_default_variables[dirname] = b'dir'
+
+for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
+ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
+ 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
+ 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
+ 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
+ 'LINKER_SUPPORTS_ICF']:
+ generator_default_variables[unused] = b''
+
+
+class GypContext(TemplateContext):
+ """Specialized Context for use with data extracted from Gyp.
+
+ config is the ConfigEnvironment for this context.
+ relobjdir is the object directory that will be used for this context,
+ relative to the topobjdir defined in the ConfigEnvironment.
+ """
+ def __init__(self, config, relobjdir):
+ self._relobjdir = relobjdir
+ TemplateContext.__init__(self, template='Gyp',
+ allowed_variables=VARIABLES, config=config)
+
+
+def encode(value):
+ if isinstance(value, unicode):
+ return value.encode('utf-8')
+ return value
+
+
+def read_from_gyp(config, path, output, vars, non_unified_sources = set()):
+ """Read a gyp configuration and emits GypContexts for the backend to
+ process.
+
+ config is a ConfigEnvironment, path is the path to a root gyp configuration
+ file, output is the base path under which the objdir for the various gyp
+ dependencies will be, and vars a dict of variables to pass to the gyp
+ processor.
+ """
+
+ # gyp expects plain str instead of unicode. The frontend code gives us
+ # unicode strings, so convert them.
+ path = encode(path)
+ str_vars = dict((name, encode(value)) for name, value in vars.items())
+
+ params = {
+ b'parallel': False,
+ b'generator_flags': {},
+ b'build_files': [path],
+ b'root_targets': None,
+ }
+
+ # Files that gyp_chromium always includes
+ includes = [encode(mozpath.join(script_dir, 'common.gypi'))]
+ finder = FileFinder(chrome_src, find_executables=False)
+ includes.extend(encode(mozpath.join(chrome_src, name))
+ for name, _ in finder.find('*/supplement.gypi'))
+
+ # Read the given gyp file and its dependencies.
+ generator, flat_list, targets, data = \
+ gyp.Load([path], format=b'mozbuild',
+ default_variables=str_vars,
+ includes=includes,
+ depth=encode(chrome_src),
+ params=params)
+
+ # Process all targets from the given gyp files and its dependencies.
+ # The path given to AllTargets needs to use os.sep, while the frontend code
+ # gives us paths normalized with forward slash separator.
+ for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)):
+ build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)
+
+ # Each target is given its own objdir. The base of that objdir
+ # is derived from the relative path from the root gyp file path
+ # to the current build_file, placed under the given output
+ # directory. Since several targets can be in a given build_file,
+ # separate them in subdirectories using the build_file basename
+ # and the target_name.
+ reldir = mozpath.relpath(mozpath.dirname(build_file),
+ mozpath.dirname(path))
+ subdir = '%s_%s' % (
+ mozpath.splitext(mozpath.basename(build_file))[0],
+ target_name,
+ )
+ # Emit a context for each target.
+ context = GypContext(config, mozpath.relpath(
+ mozpath.join(output, reldir, subdir), config.topobjdir))
+ context.add_source(mozpath.abspath(build_file))
+ # The list of included files returned by gyp are relative to build_file
+ for f in data[build_file]['included_files']:
+ context.add_source(mozpath.abspath(mozpath.join(
+ mozpath.dirname(build_file), f)))
+
+ spec = targets[target]
+
+ # Derive which gyp configuration to use based on MOZ_DEBUG.
+ c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
+ if c not in spec['configurations']:
+ raise RuntimeError('Missing %s gyp configuration for target %s '
+ 'in %s' % (c, target_name, build_file))
+ target_conf = spec['configurations'][c]
+
+ if spec['type'] == 'none':
+ continue
+ elif spec['type'] == 'static_library':
+ # Remove leading 'lib' from the target_name if any, and use as
+ # library name.
+ name = spec['target_name']
+ if name.startswith('lib'):
+ name = name[3:]
+ # The context expects an unicode string.
+ context['LIBRARY_NAME'] = name.decode('utf-8')
+ # gyp files contain headers and asm sources in sources lists.
+ sources = []
+ unified_sources = []
+ extensions = set()
+ for f in spec.get('sources', []):
+ ext = mozpath.splitext(f)[-1]
+ extensions.add(ext)
+ s = SourcePath(context, f)
+ if ext == '.h':
+ continue
+ if ext != '.S' and s not in non_unified_sources:
+ unified_sources.append(s)
+ else:
+ sources.append(s)
+
+ # The context expects alphabetical order when adding sources
+ context['SOURCES'] = alphabetical_sorted(sources)
+ context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)
+
+ for define in target_conf.get('defines', []):
+ if '=' in define:
+ name, value = define.split('=', 1)
+ context['DEFINES'][name] = value
+ else:
+ context['DEFINES'][define] = True
+
+ for include in target_conf.get('include_dirs', []):
+ # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
+ #
+ # NB: gyp files sometimes have actual absolute paths (e.g.
+ # /usr/include32) and sometimes paths that moz.build considers
+ # absolute, i.e. starting from topsrcdir. There's no good way
+ # to tell them apart here, and the actual absolute paths are
+ # likely bogus. In any event, actual absolute paths will be
+ # filtered out by trying to find them in topsrcdir.
+ if include.startswith('/'):
+ resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
+ else:
+ resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
+ if not os.path.exists(resolved):
+ continue
+ context['LOCAL_INCLUDES'] += [include]
+
+ context['ASFLAGS'] = target_conf.get('asflags_mozilla', [])
+ flags = target_conf.get('cflags_mozilla', [])
+ if flags:
+ suffix_map = {
+ '.c': 'CFLAGS',
+ '.cpp': 'CXXFLAGS',
+ '.cc': 'CXXFLAGS',
+ '.m': 'CMFLAGS',
+ '.mm': 'CMMFLAGS',
+ }
+ variables = (
+ suffix_map[e]
+ for e in extensions if e in suffix_map
+ )
+ for var in variables:
+ for f in flags:
+ # We may be getting make variable references out of the
+ # gyp data, and we don't want those in emitted data, so
+ # substitute them with their actual value.
+ f = expand_variables(f, config.substs).split()
+ if not f:
+ continue
+ # the result may be a string or a list.
+ if isinstance(f, types.StringTypes):
+ context[var].append(f)
+ else:
+ context[var].extend(f)
+ else:
+ # Ignore other types than static_library because we don't have
+ # anything using them, and we're not testing them. They can be
+ # added when that becomes necessary.
+ raise NotImplementedError('Unsupported gyp target type: %s' % spec['type'])
+
+ # Add some features to all contexts. Put here in case LOCAL_INCLUDES
+ # order matters.
+ context['LOCAL_INCLUDES'] += [
+ '!/ipc/ipdl/_ipdlheaders',
+ '/ipc/chromium/src',
+ '/ipc/glue',
+ ]
+ # These get set via VC project file settings for normal GYP builds.
+ if config.substs['OS_TARGET'] == 'WINNT':
+ context['DEFINES']['UNICODE'] = True
+ context['DEFINES']['_UNICODE'] = True
+ context['DISABLE_STL_WRAPPING'] = True
+
+ yield context
diff --git a/python/mozbuild/mozbuild/frontend/mach_commands.py b/python/mozbuild/mozbuild/frontend/mach_commands.py
new file mode 100644
index 000000000..cbecc1137
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/mach_commands.py
@@ -0,0 +1,218 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from collections import defaultdict
+import os
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+ SubCommand,
+)
+
+from mozbuild.base import MachCommandBase
+import mozpack.path as mozpath
+
+
+class InvalidPathException(Exception):
+ """Represents an error due to an invalid path."""
+
+
+@CommandProvider
+class MozbuildFileCommands(MachCommandBase):
+ @Command('mozbuild-reference', category='build-dev',
+ description='View reference documentation on mozbuild files.')
+ @CommandArgument('symbol', default=None, nargs='*',
+ help='Symbol to view help on. If not specified, all will be shown.')
+ @CommandArgument('--name-only', '-n', default=False, action='store_true',
+ help='Print symbol names only.')
+ def reference(self, symbol, name_only=False):
+ # mozbuild.sphinx imports some Sphinx modules, so we need to be sure
+ # the optional Sphinx package is installed.
+ self._activate_virtualenv()
+ self.virtualenv_manager.install_pip_package('Sphinx==1.1.3')
+
+ from mozbuild.sphinx import (
+ format_module,
+ function_reference,
+ special_reference,
+ variable_reference,
+ )
+
+ import mozbuild.frontend.context as m
+
+ if name_only:
+ for s in sorted(m.VARIABLES.keys()):
+ print(s)
+
+ for s in sorted(m.FUNCTIONS.keys()):
+ print(s)
+
+ for s in sorted(m.SPECIAL_VARIABLES.keys()):
+ print(s)
+
+ return 0
+
+ if len(symbol):
+ for s in symbol:
+ if s in m.VARIABLES:
+ for line in variable_reference(s, *m.VARIABLES[s]):
+ print(line)
+ continue
+ elif s in m.FUNCTIONS:
+ for line in function_reference(s, *m.FUNCTIONS[s]):
+ print(line)
+ continue
+ elif s in m.SPECIAL_VARIABLES:
+ for line in special_reference(s, *m.SPECIAL_VARIABLES[s]):
+ print(line)
+ continue
+
+ print('Could not find symbol: %s' % s)
+ return 1
+
+ return 0
+
+ for line in format_module(m):
+ print(line)
+
+ return 0
+
+ @Command('file-info', category='build-dev',
+ description='Query for metadata about files.')
+ def file_info(self):
+ """Show files metadata derived from moz.build files.
+
+ moz.build files contain "Files" sub-contexts for declaring metadata
+ against file patterns. This command suite is used to query that data.
+ """
+
+ @SubCommand('file-info', 'bugzilla-component',
+ 'Show Bugzilla component info for files listed.')
+ @CommandArgument('-r', '--rev',
+ help='Version control revision to look up info from')
+ @CommandArgument('paths', nargs='+',
+ help='Paths whose data to query')
+ def file_info_bugzilla(self, paths, rev=None):
+ """Show Bugzilla component for a set of files.
+
+ Given a requested set of files (which can be specified using
+ wildcards), print the Bugzilla component for each file.
+ """
+ components = defaultdict(set)
+ try:
+ for p, m in self._get_files_info(paths, rev=rev).items():
+ components[m.get('BUG_COMPONENT')].add(p)
+ except InvalidPathException as e:
+ print(e.message)
+ return 1
+
+ for component, files in sorted(components.items(), key=lambda x: (x is None, x)):
+ print('%s :: %s' % (component.product, component.component) if component else 'UNKNOWN')
+ for f in sorted(files):
+ print(' %s' % f)
+
+ @SubCommand('file-info', 'missing-bugzilla',
+ 'Show files missing Bugzilla component info')
+ @CommandArgument('-r', '--rev',
+ help='Version control revision to look up info from')
+ @CommandArgument('paths', nargs='+',
+ help='Paths whose data to query')
+ def file_info_missing_bugzilla(self, paths, rev=None):
+ try:
+ for p, m in sorted(self._get_files_info(paths, rev=rev).items()):
+ if 'BUG_COMPONENT' not in m:
+ print(p)
+ except InvalidPathException as e:
+ print(e.message)
+ return 1
+
+ @SubCommand('file-info', 'dep-tests',
+ 'Show test files marked as dependencies of these source files.')
+ @CommandArgument('-r', '--rev',
+ help='Version control revision to look up info from')
+ @CommandArgument('paths', nargs='+',
+ help='Paths whose data to query')
+ def file_info_test_deps(self, paths, rev=None):
+ try:
+ for p, m in self._get_files_info(paths, rev=rev).items():
+ print('%s:' % mozpath.relpath(p, self.topsrcdir))
+ if m.test_files:
+ print('\tTest file patterns:')
+ for p in m.test_files:
+ print('\t\t%s' % p)
+ if m.test_tags:
+ print('\tRelevant tags:')
+ for p in m.test_tags:
+ print('\t\t%s' % p)
+ if m.test_flavors:
+ print('\tRelevant flavors:')
+ for p in m.test_flavors:
+ print('\t\t%s' % p)
+
+ except InvalidPathException as e:
+ print(e.message)
+ return 1
+
+
+ def _get_reader(self, finder):
+ from mozbuild.frontend.reader import (
+ BuildReader,
+ EmptyConfig,
+ )
+
+ config = EmptyConfig(self.topsrcdir)
+ return BuildReader(config, finder=finder)
+
+ def _get_files_info(self, paths, rev=None):
+ from mozbuild.frontend.reader import default_finder
+ from mozpack.files import FileFinder, MercurialRevisionFinder
+
+ # Normalize to relative from topsrcdir.
+ relpaths = []
+ for p in paths:
+ a = mozpath.abspath(p)
+ if not mozpath.basedir(a, [self.topsrcdir]):
+ raise InvalidPathException('path is outside topsrcdir: %s' % p)
+
+ relpaths.append(mozpath.relpath(a, self.topsrcdir))
+
+ repo = None
+ if rev:
+ hg_path = os.path.join(self.topsrcdir, '.hg')
+ if not os.path.exists(hg_path):
+ raise InvalidPathException('a Mercurial repo is required '
+ 'when specifying a revision')
+
+ repo = self.topsrcdir
+
+ # We need two finders because the reader's finder operates on
+ # absolute paths.
+ finder = FileFinder(self.topsrcdir, find_executables=False)
+ if repo:
+ reader_finder = MercurialRevisionFinder(repo, rev=rev,
+ recognize_repo_paths=True)
+ else:
+ reader_finder = default_finder
+
+ # Expand wildcards.
+ allpaths = []
+ for p in relpaths:
+ if '*' not in p:
+ if p not in allpaths:
+ allpaths.append(p)
+ continue
+
+ if repo:
+ raise InvalidPathException('cannot use wildcard in version control mode')
+
+ for path, f in finder.find(p):
+ if path not in allpaths:
+ allpaths.append(path)
+
+ reader = self._get_reader(finder=reader_finder)
+ return reader.files_info(allpaths)
diff --git a/python/mozbuild/mozbuild/frontend/reader.py b/python/mozbuild/mozbuild/frontend/reader.py
new file mode 100644
index 000000000..8192b1ec6
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/reader.py
@@ -0,0 +1,1408 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains code for reading metadata from the build system into
+# data structures.
+
+r"""Read build frontend files into data structures.
+
+In terms of code architecture, the main interface is BuildReader. BuildReader
+starts with a root mozbuild file. It creates a new execution environment for
+this file, which is represented by the Sandbox class. The Sandbox class is used
+to fill a Context, representing the output of an individual mozbuild file. The
+
+The BuildReader contains basic logic for traversing a tree of mozbuild files.
+It does this by examining specific variables populated during execution.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import ast
+import inspect
+import logging
+import os
+import sys
+import textwrap
+import time
+import traceback
+import types
+
+from collections import (
+ defaultdict,
+ OrderedDict,
+)
+from io import StringIO
+
+from mozbuild.util import (
+ EmptyValue,
+ HierarchicalStringList,
+ memoize,
+ ReadOnlyDefaultDict,
+)
+
+from mozbuild.testing import (
+ TEST_MANIFESTS,
+ REFTEST_FLAVORS,
+ WEB_PLATFORM_TESTS_FLAVORS,
+)
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+
+from mozpack.files import FileFinder
+import mozpack.path as mozpath
+
+from .data import (
+ AndroidEclipseProjectData,
+ JavaJarData,
+)
+
+from .sandbox import (
+ default_finder,
+ SandboxError,
+ SandboxExecutionError,
+ SandboxLoadError,
+ Sandbox,
+)
+
+from .context import (
+ Context,
+ ContextDerivedValue,
+ Files,
+ FUNCTIONS,
+ VARIABLES,
+ DEPRECATION_HINTS,
+ SourcePath,
+ SPECIAL_VARIABLES,
+ SUBCONTEXTS,
+ SubContext,
+ TemplateContext,
+)
+
+from mozbuild.base import ExecutionSummary
+
+
+if sys.version_info.major == 2:
+ text_type = unicode
+ type_type = types.TypeType
+else:
+ text_type = str
+ type_type = type
+
+
+def log(logger, level, action, params, formatter):
+ logger.log(level, formatter, extra={'action': action, 'params': params})
+
+
+class EmptyConfig(object):
+ """A config object that is empty.
+
+ This config object is suitable for using with a BuildReader on a vanilla
+ checkout, without any existing configuration. The config is simply
+ bootstrapped from a top source directory path.
+ """
+ class PopulateOnGetDict(ReadOnlyDefaultDict):
+ """A variation on ReadOnlyDefaultDict that populates during .get().
+
+ This variation is needed because CONFIG uses .get() to access members.
+ Without it, None (instead of our EmptyValue types) would be returned.
+ """
+ def get(self, key, default=None):
+ return self[key]
+
+ def __init__(self, topsrcdir):
+ self.topsrcdir = topsrcdir
+ self.topobjdir = ''
+
+ self.substs = self.PopulateOnGetDict(EmptyValue, {
+ # These 2 variables are used semi-frequently and it isn't worth
+ # changing all the instances.
+ b'MOZ_APP_NAME': b'empty',
+ b'MOZ_CHILD_PROCESS_NAME': b'empty',
+ # Set manipulations are performed within the moz.build files. But
+ # set() is not an exposed symbol, so we can't create an empty set.
+ b'NECKO_PROTOCOLS': set(),
+ # Needed to prevent js/src's config.status from loading.
+ b'JS_STANDALONE': b'1',
+ })
+ udict = {}
+ for k, v in self.substs.items():
+ if isinstance(v, str):
+ udict[k.decode('utf-8')] = v.decode('utf-8')
+ else:
+ udict[k] = v
+ self.substs_unicode = self.PopulateOnGetDict(EmptyValue, udict)
+ self.defines = self.substs
+ self.external_source_dir = None
+ self.error_is_fatal = False
+
+
+def is_read_allowed(path, config):
+ """Whether we are allowed to load a mozbuild file at the specified path.
+
+ This is used as cheap security to ensure the build is isolated to known
+ source directories.
+
+ We are allowed to read from the main source directory and any defined
+ external source directories. The latter is to allow 3rd party applications
+ to hook into our build system.
+ """
+ assert os.path.isabs(path)
+ assert os.path.isabs(config.topsrcdir)
+
+ path = mozpath.normpath(path)
+ topsrcdir = mozpath.normpath(config.topsrcdir)
+
+ if mozpath.basedir(path, [topsrcdir]):
+ return True
+
+ if config.external_source_dir:
+ external_dir = os.path.normcase(config.external_source_dir)
+ norm_path = os.path.normcase(path)
+ if mozpath.basedir(norm_path, [external_dir]):
+ return True
+
+ return False
+
+
+class SandboxCalledError(SandboxError):
+ """Represents an error resulting from calling the error() function."""
+
+ def __init__(self, file_stack, message):
+ SandboxError.__init__(self, file_stack)
+ self.message = message
+
+
+class MozbuildSandbox(Sandbox):
+ """Implementation of a Sandbox tailored for mozbuild files.
+
+ We expose a few useful functions and expose the set of variables defining
+ Mozilla's build system.
+
+ context is a Context instance.
+
+ metadata is a dict of metadata that can be used during the sandbox
+ evaluation.
+ """
+ def __init__(self, context, metadata={}, finder=default_finder):
+ assert isinstance(context, Context)
+
+ Sandbox.__init__(self, context, finder=finder)
+
+ self._log = logging.getLogger(__name__)
+
+ self.metadata = dict(metadata)
+ exports = self.metadata.get('exports', {})
+ self.exports = set(exports.keys())
+ context.update(exports)
+ self.templates = self.metadata.setdefault('templates', {})
+ self.special_variables = self.metadata.setdefault('special_variables',
+ SPECIAL_VARIABLES)
+ self.functions = self.metadata.setdefault('functions', FUNCTIONS)
+ self.subcontext_types = self.metadata.setdefault('subcontexts',
+ SUBCONTEXTS)
+
+ def __getitem__(self, key):
+ if key in self.special_variables:
+ return self.special_variables[key][0](self._context)
+ if key in self.functions:
+ return self._create_function(self.functions[key])
+ if key in self.subcontext_types:
+ return self._create_subcontext(self.subcontext_types[key])
+ if key in self.templates:
+ return self._create_template_wrapper(self.templates[key])
+ return Sandbox.__getitem__(self, key)
+
+ def __contains__(self, key):
+ if any(key in d for d in (self.special_variables, self.functions,
+ self.subcontext_types, self.templates)):
+ return True
+
+ return Sandbox.__contains__(self, key)
+
+ def __setitem__(self, key, value):
+ if key in self.special_variables and value is self[key]:
+ return
+ if key in self.special_variables or key in self.functions or key in self.subcontext_types:
+ raise KeyError('Cannot set "%s" because it is a reserved keyword'
+ % key)
+ if key in self.exports:
+ self._context[key] = value
+ self.exports.remove(key)
+ return
+ Sandbox.__setitem__(self, key, value)
+
+ def exec_file(self, path):
+ """Override exec_file to normalize paths and restrict file loading.
+
+ Paths will be rejected if they do not fall under topsrcdir or one of
+ the external roots.
+ """
+
+ # realpath() is needed for true security. But, this isn't for security
+ # protection, so it is omitted.
+ if not is_read_allowed(path, self._context.config):
+ raise SandboxLoadError(self._context.source_stack,
+ sys.exc_info()[2], illegal_path=path)
+
+ Sandbox.exec_file(self, path)
+
+ def _add_java_jar(self, name):
+ """Add a Java JAR build target."""
+ if not name:
+ raise Exception('Java JAR cannot be registered without a name')
+
+ if '/' in name or '\\' in name or '.jar' in name:
+ raise Exception('Java JAR names must not include slashes or'
+ ' .jar: %s' % name)
+
+ if name in self['JAVA_JAR_TARGETS']:
+ raise Exception('Java JAR has already been registered: %s' % name)
+
+ jar = JavaJarData(name)
+ self['JAVA_JAR_TARGETS'][name] = jar
+ return jar
+
+ # Not exposed to the sandbox.
+ def add_android_eclipse_project_helper(self, name):
+ """Add an Android Eclipse project target."""
+ if not name:
+ raise Exception('Android Eclipse project cannot be registered without a name')
+
+ if name in self['ANDROID_ECLIPSE_PROJECT_TARGETS']:
+ raise Exception('Android Eclipse project has already been registered: %s' % name)
+
+ data = AndroidEclipseProjectData(name)
+ self['ANDROID_ECLIPSE_PROJECT_TARGETS'][name] = data
+ return data
+
+ def _add_android_eclipse_project(self, name, manifest):
+ if not manifest:
+ raise Exception('Android Eclipse project must specify a manifest')
+
+ data = self.add_android_eclipse_project_helper(name)
+ data.manifest = manifest
+ data.is_library = False
+ return data
+
+ def _add_android_eclipse_library_project(self, name):
+ data = self.add_android_eclipse_project_helper(name)
+ data.manifest = None
+ data.is_library = True
+ return data
+
+ def _export(self, varname):
+ """Export the variable to all subdirectories of the current path."""
+
+ exports = self.metadata.setdefault('exports', dict())
+ if varname in exports:
+ raise Exception('Variable has already been exported: %s' % varname)
+
+ try:
+ # Doing a regular self._context[varname] causes a set as a side
+ # effect. By calling the dict method instead, we don't have any
+ # side effects.
+ exports[varname] = dict.__getitem__(self._context, varname)
+ except KeyError:
+ self.last_name_error = KeyError('global_ns', 'get_unknown', varname)
+ raise self.last_name_error
+
+ def recompute_exports(self):
+ """Recompute the variables to export to subdirectories with the current
+ values in the subdirectory."""
+
+ if 'exports' in self.metadata:
+ for key in self.metadata['exports']:
+ self.metadata['exports'][key] = self[key]
+
+ def _include(self, path):
+ """Include and exec another file within the context of this one."""
+
+ # path is a SourcePath
+ self.exec_file(path.full_path)
+
+ def _warning(self, message):
+ # FUTURE consider capturing warnings in a variable instead of printing.
+ print('WARNING: %s' % message, file=sys.stderr)
+
+ def _error(self, message):
+ if self._context.error_is_fatal:
+ raise SandboxCalledError(self._context.source_stack, message)
+ else:
+ self._warning(message)
+
+ def _template_decorator(self, func):
+ """Registers a template function."""
+
+ if not inspect.isfunction(func):
+ raise Exception('`template` is a function decorator. You must '
+ 'use it as `@template` preceding a function declaration.')
+
+ name = func.func_name
+
+ if name in self.templates:
+ raise KeyError(
+ 'A template named "%s" was already declared in %s.' % (name,
+ self.templates[name].path))
+
+ if name.islower() or name.isupper() or name[0].islower():
+ raise NameError('Template function names must be CamelCase.')
+
+ self.templates[name] = TemplateFunction(func, self)
+
+ @memoize
+ def _create_subcontext(self, cls):
+ """Return a function object that creates SubContext instances."""
+ def fn(*args, **kwargs):
+ return cls(self._context, *args, **kwargs)
+
+ return fn
+
+ @memoize
+ def _create_function(self, function_def):
+ """Returns a function object for use within the sandbox for the given
+ function definition.
+
+ The wrapper function does type coercion on the function arguments
+ """
+ func, args_def, doc = function_def
+ def function(*args):
+ def coerce(arg, type):
+ if not isinstance(arg, type):
+ if issubclass(type, ContextDerivedValue):
+ arg = type(self._context, arg)
+ else:
+ arg = type(arg)
+ return arg
+ args = [coerce(arg, type) for arg, type in zip(args, args_def)]
+ return func(self)(*args)
+
+ return function
+
+ @memoize
+ def _create_template_wrapper(self, template):
+ """Returns a function object for use within the sandbox for the given
+ TemplateFunction instance..
+
+ When a moz.build file contains a reference to a template call, the
+ sandbox needs a function to execute. This is what this method returns.
+ That function creates a new sandbox for execution of the template.
+ After the template is executed, the data from its execution is merged
+ with the context of the calling sandbox.
+ """
+ def template_wrapper(*args, **kwargs):
+ context = TemplateContext(
+ template=template.name,
+ allowed_variables=self._context._allowed_variables,
+ config=self._context.config)
+ context.add_source(self._context.current_path)
+ for p in self._context.all_paths:
+ context.add_source(p)
+
+ sandbox = MozbuildSandbox(context, metadata={
+ # We should arguably set these defaults to something else.
+ # Templates, for example, should arguably come from the state
+ # of the sandbox from when the template was declared, not when
+ # it was instantiated. Bug 1137319.
+ 'functions': self.metadata.get('functions', {}),
+ 'special_variables': self.metadata.get('special_variables', {}),
+ 'subcontexts': self.metadata.get('subcontexts', {}),
+ 'templates': self.metadata.get('templates', {})
+ }, finder=self._finder)
+
+ template.exec_in_sandbox(sandbox, *args, **kwargs)
+
+ # This is gross, but allows the merge to happen. Eventually, the
+ # merging will go away and template contexts emitted independently.
+ klass = self._context.__class__
+ self._context.__class__ = TemplateContext
+ # The sandbox will do all the necessary checks for these merges.
+ for key, value in context.items():
+ if isinstance(value, dict):
+ self[key].update(value)
+ elif isinstance(value, (list, HierarchicalStringList)):
+ self[key] += value
+ else:
+ self[key] = value
+ self._context.__class__ = klass
+
+ for p in context.all_paths:
+ self._context.add_source(p)
+
+ return template_wrapper
+
+
+class TemplateFunction(object):
+ def __init__(self, func, sandbox):
+ self.path = func.func_code.co_filename
+ self.name = func.func_name
+
+ code = func.func_code
+ firstlineno = code.co_firstlineno
+ lines = sandbox._current_source.splitlines(True)
+ lines = inspect.getblock(lines[firstlineno - 1:])
+
+ # The code lines we get out of inspect.getsourcelines look like
+ # @template
+ # def Template(*args, **kwargs):
+ # VAR = 'value'
+ # ...
+ func_ast = ast.parse(''.join(lines), self.path)
+ # Remove decorators
+ func_ast.body[0].decorator_list = []
+ # Adjust line numbers accordingly
+ ast.increment_lineno(func_ast, firstlineno - 1)
+
+ # When using a custom dictionary for function globals/locals, Cpython
+ # actually never calls __getitem__ and __setitem__, so we need to
+ # modify the AST so that accesses to globals are properly directed
+ # to a dict.
+ self._global_name = b'_data' # AST wants str for this, not unicode
+ # In case '_data' is a name used for a variable in the function code,
+ # prepend more underscores until we find an unused name.
+ while (self._global_name in code.co_names or
+ self._global_name in code.co_varnames):
+ self._global_name += '_'
+ func_ast = self.RewriteName(sandbox, self._global_name).visit(func_ast)
+
+ # Execute the rewritten code. That code now looks like:
+ # def Template(*args, **kwargs):
+ # _data['VAR'] = 'value'
+ # ...
+ # The result of executing this code is the creation of a 'Template'
+ # function object in the global namespace.
+ glob = {'__builtins__': sandbox._builtins}
+ func = types.FunctionType(
+ compile(func_ast, self.path, 'exec'),
+ glob,
+ self.name,
+ func.func_defaults,
+ func.func_closure,
+ )
+ func()
+
+ self._func = glob[self.name]
+
+ def exec_in_sandbox(self, sandbox, *args, **kwargs):
+ """Executes the template function in the given sandbox."""
+ # Create a new function object associated with the execution sandbox
+ glob = {
+ self._global_name: sandbox,
+ '__builtins__': sandbox._builtins
+ }
+ func = types.FunctionType(
+ self._func.func_code,
+ glob,
+ self.name,
+ self._func.func_defaults,
+ self._func.func_closure
+ )
+ sandbox.exec_function(func, args, kwargs, self.path,
+ becomes_current_path=False)
+
+ class RewriteName(ast.NodeTransformer):
+ """AST Node Transformer to rewrite variable accesses to go through
+ a dict.
+ """
+ def __init__(self, sandbox, global_name):
+ self._sandbox = sandbox
+ self._global_name = global_name
+
+ def visit_Str(self, node):
+ # String nodes we got from the AST parser are str, but we want
+ # unicode literals everywhere, so transform them.
+ node.s = unicode(node.s)
+ return node
+
+ def visit_Name(self, node):
+ # Modify uppercase variable references and names known to the
+ # sandbox as if they were retrieved from a dict instead.
+ if not node.id.isupper() and node.id not in self._sandbox:
+ return node
+
+ def c(new_node):
+ return ast.copy_location(new_node, node)
+
+ return c(ast.Subscript(
+ value=c(ast.Name(id=self._global_name, ctx=ast.Load())),
+ slice=c(ast.Index(value=c(ast.Str(s=node.id)))),
+ ctx=node.ctx
+ ))
+
+
+class SandboxValidationError(Exception):
+ """Represents an error encountered when validating sandbox results."""
+ def __init__(self, message, context):
+ Exception.__init__(self, message)
+ self.context = context
+
+ def __str__(self):
+ s = StringIO()
+
+ delim = '=' * 30
+ s.write('\n%s\nERROR PROCESSING MOZBUILD FILE\n%s\n\n' % (delim, delim))
+
+ s.write('The error occurred while processing the following file or ')
+ s.write('one of the files it includes:\n')
+ s.write('\n')
+ s.write(' %s/moz.build\n' % self.context.srcdir)
+ s.write('\n')
+
+ s.write('The error occurred when validating the result of ')
+ s.write('the execution. The reported error is:\n')
+ s.write('\n')
+ s.write(''.join(' %s\n' % l
+ for l in self.message.splitlines()))
+ s.write('\n')
+
+ return s.getvalue()
+
+
+class BuildReaderError(Exception):
+ """Represents errors encountered during BuildReader execution.
+
+ The main purpose of this class is to facilitate user-actionable error
+ messages. Execution errors should say:
+
+ - Why they failed
+ - Where they failed
+ - What can be done to prevent the error
+
+ A lot of the code in this class should arguably be inside sandbox.py.
+ However, extraction is somewhat difficult given the additions
+ MozbuildSandbox has over Sandbox (e.g. the concept of included files -
+ which affect error messages, of course).
+ """
+ def __init__(self, file_stack, trace, sandbox_exec_error=None,
+ sandbox_load_error=None, validation_error=None, other_error=None,
+ sandbox_called_error=None):
+
+ self.file_stack = file_stack
+ self.trace = trace
+ self.sandbox_called_error = sandbox_called_error
+ self.sandbox_exec = sandbox_exec_error
+ self.sandbox_load = sandbox_load_error
+ self.validation_error = validation_error
+ self.other = other_error
+
+ @property
+ def main_file(self):
+ return self.file_stack[-1]
+
+ @property
+ def actual_file(self):
+ # We report the file that called out to the file that couldn't load.
+ if self.sandbox_load is not None:
+ if len(self.sandbox_load.file_stack) > 1:
+ return self.sandbox_load.file_stack[-2]
+
+ if len(self.file_stack) > 1:
+ return self.file_stack[-2]
+
+ if self.sandbox_error is not None and \
+ len(self.sandbox_error.file_stack):
+ return self.sandbox_error.file_stack[-1]
+
+ return self.file_stack[-1]
+
+ @property
+ def sandbox_error(self):
+ return self.sandbox_exec or self.sandbox_load or \
+ self.sandbox_called_error
+
+ def __str__(self):
+ s = StringIO()
+
+ delim = '=' * 30
+ s.write('\n%s\nERROR PROCESSING MOZBUILD FILE\n%s\n\n' % (delim, delim))
+
+ s.write('The error occurred while processing the following file:\n')
+ s.write('\n')
+ s.write(' %s\n' % self.actual_file)
+ s.write('\n')
+
+ if self.actual_file != self.main_file and not self.sandbox_load:
+ s.write('This file was included as part of processing:\n')
+ s.write('\n')
+ s.write(' %s\n' % self.main_file)
+ s.write('\n')
+
+ if self.sandbox_error is not None:
+ self._print_sandbox_error(s)
+ elif self.validation_error is not None:
+ s.write('The error occurred when validating the result of ')
+ s.write('the execution. The reported error is:\n')
+ s.write('\n')
+ s.write(''.join(' %s\n' % l
+ for l in self.validation_error.message.splitlines()))
+ s.write('\n')
+ else:
+ s.write('The error appears to be part of the %s ' % __name__)
+ s.write('Python module itself! It is possible you have stumbled ')
+ s.write('across a legitimate bug.\n')
+ s.write('\n')
+
+ for l in traceback.format_exception(type(self.other), self.other,
+ self.trace):
+ s.write(unicode(l))
+
+ return s.getvalue()
+
+ def _print_sandbox_error(self, s):
+ # Try to find the frame of the executed code.
+ script_frame = None
+
+ # We don't currently capture the trace for SandboxCalledError.
+ # Therefore, we don't get line numbers from the moz.build file.
+ # FUTURE capture this.
+ trace = getattr(self.sandbox_error, 'trace', None)
+ frames = []
+ if trace:
+ frames = traceback.extract_tb(trace)
+ for frame in frames:
+ if frame[0] == self.actual_file:
+ script_frame = frame
+
+ # Reset if we enter a new execution context. This prevents errors
+ # in this module from being attributes to a script.
+ elif frame[0] == __file__ and frame[2] == 'exec_function':
+ script_frame = None
+
+ if script_frame is not None:
+ s.write('The error was triggered on line %d ' % script_frame[1])
+ s.write('of this file:\n')
+ s.write('\n')
+ s.write(' %s\n' % script_frame[3])
+ s.write('\n')
+
+ if self.sandbox_called_error is not None:
+ self._print_sandbox_called_error(s)
+ return
+
+ if self.sandbox_load is not None:
+ self._print_sandbox_load_error(s)
+ return
+
+ self._print_sandbox_exec_error(s)
+
+ def _print_sandbox_called_error(self, s):
+ assert self.sandbox_called_error is not None
+
+ s.write('A moz.build file called the error() function.\n')
+ s.write('\n')
+ s.write('The error it encountered is:\n')
+ s.write('\n')
+ s.write(' %s\n' % self.sandbox_called_error.message)
+ s.write('\n')
+ s.write('Correct the error condition and try again.\n')
+
+ def _print_sandbox_load_error(self, s):
+ assert self.sandbox_load is not None
+
+ if self.sandbox_load.illegal_path is not None:
+ s.write('The underlying problem is an illegal file access. ')
+ s.write('This is likely due to trying to access a file ')
+ s.write('outside of the top source directory.\n')
+ s.write('\n')
+ s.write('The path whose access was denied is:\n')
+ s.write('\n')
+ s.write(' %s\n' % self.sandbox_load.illegal_path)
+ s.write('\n')
+ s.write('Modify the script to not access this file and ')
+ s.write('try again.\n')
+ return
+
+ if self.sandbox_load.read_error is not None:
+ if not os.path.exists(self.sandbox_load.read_error):
+ s.write('The underlying problem is we referenced a path ')
+ s.write('that does not exist. That path is:\n')
+ s.write('\n')
+ s.write(' %s\n' % self.sandbox_load.read_error)
+ s.write('\n')
+ s.write('Either create the file if it needs to exist or ')
+ s.write('do not reference it.\n')
+ else:
+ s.write('The underlying problem is a referenced path could ')
+ s.write('not be read. The trouble path is:\n')
+ s.write('\n')
+ s.write(' %s\n' % self.sandbox_load.read_error)
+ s.write('\n')
+ s.write('It is possible the path is not correct. Is it ')
+ s.write('pointing to a directory? It could also be a file ')
+ s.write('permissions issue. Ensure that the file is ')
+ s.write('readable.\n')
+
+ return
+
+ # This module is buggy if you see this.
+ raise AssertionError('SandboxLoadError with unhandled properties!')
+
+ def _print_sandbox_exec_error(self, s):
+ assert self.sandbox_exec is not None
+
+ inner = self.sandbox_exec.exc_value
+
+ if isinstance(inner, SyntaxError):
+ s.write('The underlying problem is a Python syntax error ')
+ s.write('on line %d:\n' % inner.lineno)
+ s.write('\n')
+ s.write(' %s\n' % inner.text)
+ if inner.offset:
+ s.write((' ' * (inner.offset + 4)) + '^\n')
+ s.write('\n')
+ s.write('Fix the syntax error and try again.\n')
+ return
+
+ if isinstance(inner, KeyError):
+ self._print_keyerror(inner, s)
+ elif isinstance(inner, ValueError):
+ self._print_valueerror(inner, s)
+ else:
+ self._print_exception(inner, s)
+
+ def _print_keyerror(self, inner, s):
+ if not inner.args or inner.args[0] not in ('global_ns', 'local_ns'):
+ self._print_exception(inner, s)
+ return
+
+ if inner.args[0] == 'global_ns':
+ import difflib
+
+ verb = None
+ if inner.args[1] == 'get_unknown':
+ verb = 'read'
+ elif inner.args[1] == 'set_unknown':
+ verb = 'write'
+ elif inner.args[1] == 'reassign':
+ s.write('The underlying problem is an attempt to reassign ')
+ s.write('a reserved UPPERCASE variable.\n')
+ s.write('\n')
+ s.write('The reassigned variable causing the error is:\n')
+ s.write('\n')
+ s.write(' %s\n' % inner.args[2])
+ s.write('\n')
+ s.write('Maybe you meant "+=" instead of "="?\n')
+ return
+ else:
+ raise AssertionError('Unhandled global_ns: %s' % inner.args[1])
+
+ s.write('The underlying problem is an attempt to %s ' % verb)
+ s.write('a reserved UPPERCASE variable that does not exist.\n')
+ s.write('\n')
+ s.write('The variable %s causing the error is:\n' % verb)
+ s.write('\n')
+ s.write(' %s\n' % inner.args[2])
+ s.write('\n')
+ close_matches = difflib.get_close_matches(inner.args[2],
+ VARIABLES.keys(), 2)
+ if close_matches:
+ s.write('Maybe you meant %s?\n' % ' or '.join(close_matches))
+ s.write('\n')
+
+ if inner.args[2] in DEPRECATION_HINTS:
+ s.write('%s\n' %
+ textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip())
+ return
+
+ s.write('Please change the file to not use this variable.\n')
+ s.write('\n')
+ s.write('For reference, the set of valid variables is:\n')
+ s.write('\n')
+ s.write(', '.join(sorted(VARIABLES.keys())) + '\n')
+ return
+
+ s.write('The underlying problem is a reference to an undefined ')
+ s.write('local variable:\n')
+ s.write('\n')
+ s.write(' %s\n' % inner.args[2])
+ s.write('\n')
+ s.write('Please change the file to not reference undefined ')
+ s.write('variables and try again.\n')
+
+ def _print_valueerror(self, inner, s):
+ if not inner.args or inner.args[0] not in ('global_ns', 'local_ns'):
+ self._print_exception(inner, s)
+ return
+
+ assert inner.args[1] == 'set_type'
+
+ s.write('The underlying problem is an attempt to write an illegal ')
+ s.write('value to a special variable.\n')
+ s.write('\n')
+ s.write('The variable whose value was rejected is:\n')
+ s.write('\n')
+ s.write(' %s' % inner.args[2])
+ s.write('\n')
+ s.write('The value being written to it was of the following type:\n')
+ s.write('\n')
+ s.write(' %s\n' % type(inner.args[3]).__name__)
+ s.write('\n')
+ s.write('This variable expects the following type(s):\n')
+ s.write('\n')
+ if type(inner.args[4]) == type_type:
+ s.write(' %s\n' % inner.args[4].__name__)
+ else:
+ for t in inner.args[4]:
+ s.write( ' %s\n' % t.__name__)
+ s.write('\n')
+ s.write('Change the file to write a value of the appropriate type ')
+ s.write('and try again.\n')
+
+ def _print_exception(self, e, s):
+ s.write('An error was encountered as part of executing the file ')
+ s.write('itself. The error appears to be the fault of the script.\n')
+ s.write('\n')
+ s.write('The error as reported by Python is:\n')
+ s.write('\n')
+ s.write(' %s\n' % traceback.format_exception_only(type(e), e))
+
+
+class BuildReader(object):
+ """Read a tree of mozbuild files into data structures.
+
+ This is where the build system starts. You give it a tree configuration
+ (the output of configuration) and it executes the moz.build files and
+ collects the data they define.
+
+ The reader can optionally call a callable after each sandbox is evaluated
+ but before its evaluated content is processed. This gives callers the
+ opportunity to modify contexts before side-effects occur from their
+ content. This callback receives the ``Context`` containing the result of
+ each sandbox evaluation. Its return value is ignored.
+ """
+
+ def __init__(self, config, finder=default_finder):
+ self.config = config
+
+ self._log = logging.getLogger(__name__)
+ self._read_files = set()
+ self._execution_stack = []
+ self._finder = finder
+
+ self._execution_time = 0.0
+ self._file_count = 0
+
+ def summary(self):
+ return ExecutionSummary(
+ 'Finished reading {file_count:d} moz.build files in '
+ '{execution_time:.2f}s',
+ file_count=self._file_count,
+ execution_time=self._execution_time)
+
+ def read_topsrcdir(self):
+ """Read the tree of linked moz.build files.
+
+ This starts with the tree's top-most moz.build file and descends into
+ all linked moz.build files until all relevant files have been evaluated.
+
+ This is a generator of Context instances. As each moz.build file is
+ read, a new Context is created and emitted.
+ """
+ path = mozpath.join(self.config.topsrcdir, 'moz.build')
+ return self.read_mozbuild(path, self.config)
+
+ def all_mozbuild_paths(self):
+ """Iterator over all available moz.build files.
+
+ This method has little to do with the reader. It should arguably belong
+ elsewhere.
+ """
+ # In the future, we may traverse moz.build files by looking
+ # for DIRS references in the AST, even if a directory is added behind
+ # a conditional. For now, just walk the filesystem.
+ ignore = {
+ # Ignore fake moz.build files used for testing moz.build.
+ 'python/mozbuild/mozbuild/test',
+
+ # Ignore object directories.
+ 'obj*',
+ }
+
+ finder = FileFinder(self.config.topsrcdir, find_executables=False,
+ ignore=ignore)
+
+ # The root doesn't get picked up by FileFinder.
+ yield 'moz.build'
+
+ for path, f in finder.find('**/moz.build'):
+ yield path
+
+ def find_sphinx_variables(self):
+ """This function finds all assignments of Sphinx documentation variables.
+
+ This is a generator of tuples of (moz.build path, var, key, value). For
+ variables that assign to keys in objects, key will be defined.
+
+ With a little work, this function could be made more generic. But if we
+ end up writing a lot of ast code, it might be best to import a
+ high-level AST manipulation library into the tree.
+ """
+ # This function looks for assignments to SPHINX_TREES and
+ # SPHINX_PYTHON_PACKAGE_DIRS variables.
+ #
+ # SPHINX_TREES is a dict. Keys and values should both be strings. The
+ # target of the assignment should be a Subscript node. The value
+ # assigned should be a Str node. e.g.
+ #
+ # SPHINX_TREES['foo'] = 'bar'
+ #
+ # This is an Assign node with a Subscript target. The Subscript's value
+ # is a Name node with id "SPHINX_TREES." The slice of this target
+ # is an Index node and its value is a Str with value "foo."
+ #
+ # SPHINX_PYTHON_PACKAGE_DIRS is a simple list. The target of the
+ # assignment should be a Name node. Values should be a List node, whose
+ # elements are Str nodes. e.g.
+ #
+ # SPHINX_PYTHON_PACKAGE_DIRS += ['foo']
+ #
+ # This is an AugAssign node with a Name target with id
+ # "SPHINX_PYTHON_PACKAGE_DIRS." The value is a List node containing 1
+ # Str elt whose value is "foo."
+ relevant = [
+ 'SPHINX_TREES',
+ 'SPHINX_PYTHON_PACKAGE_DIRS',
+ ]
+
+ def assigned_variable(node):
+ # This is not correct, but we don't care yet.
+ if hasattr(node, 'targets'):
+ # Nothing in moz.build does multi-assignment (yet). So error if
+ # we see it.
+ assert len(node.targets) == 1
+
+ target = node.targets[0]
+ else:
+ target = node.target
+
+ if isinstance(target, ast.Subscript):
+ if not isinstance(target.value, ast.Name):
+ return None, None
+ name = target.value.id
+ elif isinstance(target, ast.Name):
+ name = target.id
+ else:
+ return None, None
+
+ if name not in relevant:
+ return None, None
+
+ key = None
+ if isinstance(target, ast.Subscript):
+ assert isinstance(target.slice, ast.Index)
+ assert isinstance(target.slice.value, ast.Str)
+ key = target.slice.value.s
+
+ return name, key
+
+ def assigned_values(node):
+ value = node.value
+ if isinstance(value, ast.List):
+ for v in value.elts:
+ assert isinstance(v, ast.Str)
+ yield v.s
+ else:
+ assert isinstance(value, ast.Str)
+ yield value.s
+
+ assignments = []
+
+ class Visitor(ast.NodeVisitor):
+ def helper(self, node):
+ name, key = assigned_variable(node)
+ if not name:
+ return
+
+ for v in assigned_values(node):
+ assignments.append((name, key, v))
+
+ def visit_Assign(self, node):
+ self.helper(node)
+
+ def visit_AugAssign(self, node):
+ self.helper(node)
+
+ for p in self.all_mozbuild_paths():
+ assignments[:] = []
+ full = os.path.join(self.config.topsrcdir, p)
+
+ with open(full, 'rb') as fh:
+ source = fh.read()
+
+ tree = ast.parse(source, full)
+ Visitor().visit(tree)
+
+ for name, key, value in assignments:
+ yield p, name, key, value
+
+ def read_mozbuild(self, path, config, descend=True, metadata={}):
+ """Read and process a mozbuild file, descending into children.
+
+ This starts with a single mozbuild file, executes it, and descends into
+ other referenced files per our traversal logic.
+
+ The traversal logic is to iterate over the *DIRS variables, treating
+ each element as a relative directory path. For each encountered
+ directory, we will open the moz.build file located in that
+ directory in a new Sandbox and process it.
+
+ If descend is True (the default), we will descend into child
+ directories and files per variable values.
+
+ Arbitrary metadata in the form of a dict can be passed into this
+ function. This feature is intended to facilitate the build reader
+ injecting state and annotations into moz.build files that is
+ independent of the sandbox's execution context.
+
+ Traversal is performed depth first (for no particular reason).
+ """
+ self._execution_stack.append(path)
+ try:
+ for s in self._read_mozbuild(path, config, descend=descend,
+ metadata=metadata):
+ yield s
+
+ except BuildReaderError as bre:
+ raise bre
+
+ except SandboxCalledError as sce:
+ raise BuildReaderError(list(self._execution_stack),
+ sys.exc_info()[2], sandbox_called_error=sce)
+
+ except SandboxExecutionError as se:
+ raise BuildReaderError(list(self._execution_stack),
+ sys.exc_info()[2], sandbox_exec_error=se)
+
+ except SandboxLoadError as sle:
+ raise BuildReaderError(list(self._execution_stack),
+ sys.exc_info()[2], sandbox_load_error=sle)
+
+ except SandboxValidationError as ve:
+ raise BuildReaderError(list(self._execution_stack),
+ sys.exc_info()[2], validation_error=ve)
+
+ except Exception as e:
+ raise BuildReaderError(list(self._execution_stack),
+ sys.exc_info()[2], other_error=e)
+
+ def _read_mozbuild(self, path, config, descend, metadata):
+ path = mozpath.normpath(path)
+ log(self._log, logging.DEBUG, 'read_mozbuild', {'path': path},
+ 'Reading file: {path}')
+
+ if path in self._read_files:
+ log(self._log, logging.WARNING, 'read_already', {'path': path},
+ 'File already read. Skipping: {path}')
+ return
+
+ self._read_files.add(path)
+
+ time_start = time.time()
+
+ topobjdir = config.topobjdir
+
+ if not mozpath.basedir(path, [config.topsrcdir]):
+ external = config.external_source_dir
+ if external and mozpath.basedir(path, [external]):
+ config = ConfigEnvironment.from_config_status(
+ mozpath.join(topobjdir, 'config.status'))
+ config.topsrcdir = external
+ config.external_source_dir = None
+
+ relpath = mozpath.relpath(path, config.topsrcdir)
+ reldir = mozpath.dirname(relpath)
+
+ if mozpath.dirname(relpath) == 'js/src' and \
+ not config.substs.get('JS_STANDALONE'):
+ config = ConfigEnvironment.from_config_status(
+ mozpath.join(topobjdir, reldir, 'config.status'))
+ config.topobjdir = topobjdir
+ config.external_source_dir = None
+
+ context = Context(VARIABLES, config, self._finder)
+ sandbox = MozbuildSandbox(context, metadata=metadata,
+ finder=self._finder)
+ sandbox.exec_file(path)
+ self._execution_time += time.time() - time_start
+ self._file_count += len(context.all_paths)
+
+ # Yield main context before doing any processing. This gives immediate
+ # consumers an opportunity to change state before our remaining
+ # processing is performed.
+ yield context
+
+ # We need the list of directories pre-gyp processing for later.
+ dirs = list(context.get('DIRS', []))
+
+ curdir = mozpath.dirname(path)
+
+ gyp_contexts = []
+ for target_dir in context.get('GYP_DIRS', []):
+ gyp_dir = context['GYP_DIRS'][target_dir]
+ for v in ('input', 'variables'):
+ if not getattr(gyp_dir, v):
+ raise SandboxValidationError('Missing value for '
+ 'GYP_DIRS["%s"].%s' % (target_dir, v), context)
+
+ # The make backend assumes contexts for sub-directories are
+ # emitted after their parent, so accumulate the gyp contexts.
+ # We could emit the parent context before processing gyp
+ # configuration, but we need to add the gyp objdirs to that context
+ # first.
+ from .gyp_reader import read_from_gyp
+ non_unified_sources = set()
+ for s in gyp_dir.non_unified_sources:
+ source = SourcePath(context, s)
+ if not self._finder.get(source.full_path):
+ raise SandboxValidationError('Cannot find %s.' % source,
+ context)
+ non_unified_sources.add(source)
+ time_start = time.time()
+ for gyp_context in read_from_gyp(context.config,
+ mozpath.join(curdir, gyp_dir.input),
+ mozpath.join(context.objdir,
+ target_dir),
+ gyp_dir.variables,
+ non_unified_sources = non_unified_sources):
+ gyp_context.update(gyp_dir.sandbox_vars)
+ gyp_contexts.append(gyp_context)
+ self._file_count += len(gyp_context.all_paths)
+ self._execution_time += time.time() - time_start
+
+ for gyp_context in gyp_contexts:
+ context['DIRS'].append(mozpath.relpath(gyp_context.objdir, context.objdir))
+ sandbox.subcontexts.append(gyp_context)
+
+ for subcontext in sandbox.subcontexts:
+ yield subcontext
+
+ # Traverse into referenced files.
+
+ # It's very tempting to use a set here. Unfortunately, the recursive
+ # make backend needs order preserved. Once we autogenerate all backend
+ # files, we should be able to convert this to a set.
+ recurse_info = OrderedDict()
+ for d in dirs:
+ if d in recurse_info:
+ raise SandboxValidationError(
+ 'Directory (%s) registered multiple times' % (
+ mozpath.relpath(d.full_path, context.srcdir)),
+ context)
+
+ recurse_info[d] = {}
+ for key in sandbox.metadata:
+ if key == 'exports':
+ sandbox.recompute_exports()
+
+ recurse_info[d][key] = dict(sandbox.metadata[key])
+
+ for path, child_metadata in recurse_info.items():
+ child_path = path.join('moz.build').full_path
+
+ # Ensure we don't break out of the topsrcdir. We don't do realpath
+ # because it isn't necessary. If there are symlinks in the srcdir,
+ # that's not our problem. We're not a hosted application: we don't
+ # need to worry about security too much.
+ if not is_read_allowed(child_path, context.config):
+ raise SandboxValidationError(
+ 'Attempting to process file outside of allowed paths: %s' %
+ child_path, context)
+
+ if not descend:
+ continue
+
+ for res in self.read_mozbuild(child_path, context.config,
+ metadata=child_metadata):
+ yield res
+
+ self._execution_stack.pop()
+
+ def _find_relevant_mozbuilds(self, paths):
+ """Given a set of filesystem paths, find all relevant moz.build files.
+
+ We assume that a moz.build file in the directory ancestry of a given path
+ is relevant to that path. Let's say we have the following files on disk::
+
+ moz.build
+ foo/moz.build
+ foo/baz/moz.build
+ foo/baz/file1
+ other/moz.build
+ other/file2
+
+ If ``foo/baz/file1`` is passed in, the relevant moz.build files are
+ ``moz.build``, ``foo/moz.build``, and ``foo/baz/moz.build``. For
+ ``other/file2``, the relevant moz.build files are ``moz.build`` and
+ ``other/moz.build``.
+
+ Returns a dict of input paths to a list of relevant moz.build files.
+ The root moz.build file is first and the leaf-most moz.build is last.
+ """
+ root = self.config.topsrcdir
+ result = {}
+
+ @memoize
+ def exists(path):
+ return self._finder.get(path) is not None
+
+ def itermozbuild(path):
+ subpath = ''
+ yield 'moz.build'
+ for part in mozpath.split(path):
+ subpath = mozpath.join(subpath, part)
+ yield mozpath.join(subpath, 'moz.build')
+
+ for path in sorted(paths):
+ path = mozpath.normpath(path)
+ if os.path.isabs(path):
+ if not mozpath.basedir(path, [root]):
+ raise Exception('Path outside topsrcdir: %s' % path)
+ path = mozpath.relpath(path, root)
+
+ result[path] = [p for p in itermozbuild(path)
+ if exists(mozpath.join(root, p))]
+
+ return result
+
+ def read_relevant_mozbuilds(self, paths):
+ """Read and process moz.build files relevant for a set of paths.
+
+ For an iterable of relative-to-root filesystem paths ``paths``,
+ find all moz.build files that may apply to them based on filesystem
+ hierarchy and read those moz.build files.
+
+ The return value is a 2-tuple. The first item is a dict mapping each
+ input filesystem path to a list of Context instances that are relevant
+ to that path. The second item is a list of all Context instances. Each
+ Context instance is in both data structures.
+ """
+ relevants = self._find_relevant_mozbuilds(paths)
+
+ topsrcdir = self.config.topsrcdir
+
+ # Source moz.build file to directories to traverse.
+ dirs = defaultdict(set)
+ # Relevant path to absolute paths of relevant contexts.
+ path_mozbuilds = {}
+
+ # There is room to improve this code (and the code in
+ # _find_relevant_mozbuilds) to better handle multiple files in the same
+ # directory. Bug 1136966 tracks.
+ for path, mbpaths in relevants.items():
+ path_mozbuilds[path] = [mozpath.join(topsrcdir, p) for p in mbpaths]
+
+ for i, mbpath in enumerate(mbpaths[0:-1]):
+ source_dir = mozpath.dirname(mbpath)
+ target_dir = mozpath.dirname(mbpaths[i + 1])
+
+ d = mozpath.normpath(mozpath.join(topsrcdir, mbpath))
+ dirs[d].add(mozpath.relpath(target_dir, source_dir))
+
+ # Exporting doesn't work reliably in tree traversal mode. Override
+ # the function to no-op.
+ functions = dict(FUNCTIONS)
+ def export(sandbox):
+ return lambda varname: None
+ functions['export'] = tuple([export] + list(FUNCTIONS['export'][1:]))
+
+ metadata = {
+ 'functions': functions,
+ }
+
+ contexts = defaultdict(list)
+ all_contexts = []
+ for context in self.read_mozbuild(mozpath.join(topsrcdir, 'moz.build'),
+ self.config, metadata=metadata):
+ # Explicitly set directory traversal variables to override default
+ # traversal rules.
+ if not isinstance(context, SubContext):
+ for v in ('DIRS', 'GYP_DIRS'):
+ context[v][:] = []
+
+ context['DIRS'] = sorted(dirs[context.main_path])
+
+ contexts[context.main_path].append(context)
+ all_contexts.append(context)
+
+ result = {}
+ for path, paths in path_mozbuilds.items():
+ result[path] = reduce(lambda x, y: x + y, (contexts[p] for p in paths), [])
+
+ return result, all_contexts
+
+ def files_info(self, paths):
+ """Obtain aggregate data from Files for a set of files.
+
+ Given a set of input paths, determine which moz.build files may
+ define metadata for them, evaluate those moz.build files, and
+ apply file metadata rules defined within to determine metadata
+ values for each file requested.
+
+ Essentially, for each input path:
+
+ 1. Determine the set of moz.build files relevant to that file by
+ looking for moz.build files in ancestor directories.
+ 2. Evaluate moz.build files starting with the most distant.
+ 3. Iterate over Files sub-contexts.
+ 4. If the file pattern matches the file we're seeking info on,
+ apply attribute updates.
+ 5. Return the most recent value of attributes.
+ """
+ paths, _ = self.read_relevant_mozbuilds(paths)
+
+ r = {}
+
+ for path, ctxs in paths.items():
+ flags = Files(Context())
+
+ for ctx in ctxs:
+ if not isinstance(ctx, Files):
+ continue
+
+ relpath = mozpath.relpath(path, ctx.relsrcdir)
+ pattern = ctx.pattern
+
+ # Only do wildcard matching if the '*' character is present.
+ # Otherwise, mozpath.match will match directories, which we've
+ # arbitrarily chosen to not allow.
+ if pattern == relpath or \
+ ('*' in pattern and mozpath.match(relpath, pattern)):
+ flags += ctx
+
+ if not any([flags.test_tags, flags.test_files, flags.test_flavors]):
+ flags += self.test_defaults_for_path(ctxs)
+
+ r[path] = flags
+
+ return r
+
+ def test_defaults_for_path(self, ctxs):
+ # This names the context keys that will end up emitting a test
+ # manifest.
+ test_manifest_contexts = set(
+ ['%s_MANIFESTS' % key for key in TEST_MANIFESTS] +
+ ['%s_MANIFESTS' % flavor.upper() for flavor in REFTEST_FLAVORS] +
+ ['%s_MANIFESTS' % flavor.upper().replace('-', '_') for flavor in WEB_PLATFORM_TESTS_FLAVORS]
+ )
+
+ result_context = Files(Context())
+ for ctx in ctxs:
+ for key in ctx:
+ if key not in test_manifest_contexts:
+ continue
+ for paths, obj in ctx[key]:
+ if isinstance(paths, tuple):
+ path, tests_root = paths
+ tests_root = mozpath.join(ctx.relsrcdir, tests_root)
+ for t in (mozpath.join(tests_root, path) for path, _ in obj):
+ result_context.test_files.add(mozpath.dirname(t) + '/**')
+ else:
+ for t in obj.tests:
+ if isinstance(t, tuple):
+ path, _ = t
+ relpath = mozpath.relpath(path,
+ self.config.topsrcdir)
+ else:
+ relpath = t['relpath']
+ result_context.test_files.add(mozpath.dirname(relpath) + '/**')
+ return result_context
diff --git a/python/mozbuild/mozbuild/frontend/sandbox.py b/python/mozbuild/mozbuild/frontend/sandbox.py
new file mode 100644
index 000000000..0bf1599f2
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/sandbox.py
@@ -0,0 +1,308 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""Python sandbox implementation for build files.
+
+This module contains classes for Python sandboxes that execute in a
+highly-controlled environment.
+
+The main class is `Sandbox`. This provides an execution environment for Python
+code and is used to fill a Context instance for the takeaway information from
+the execution.
+
+Code in this module takes a different approach to exception handling compared
+to what you'd see elsewhere in Python. Arguments to built-in exceptions like
+KeyError are machine parseable. This machine-friendly data is used to present
+user-friendly error messages in the case of errors.
+"""
+
+from __future__ import absolute_import, unicode_literals
+
+import os
+import sys
+import weakref
+
+from mozbuild.util import (
+ exec_,
+ ReadOnlyDict,
+)
+from .context import Context
+from mozpack.files import FileFinder
+
+
+default_finder = FileFinder('/', find_executables=False)
+
+
+def alphabetical_sorted(iterable, cmp=None, key=lambda x: x.lower(),
+ reverse=False):
+ """sorted() replacement for the sandbox, ordering alphabetically by
+ default.
+ """
+ return sorted(iterable, cmp, key, reverse)
+
+
+class SandboxError(Exception):
+ def __init__(self, file_stack):
+ self.file_stack = file_stack
+
+
+class SandboxExecutionError(SandboxError):
+ """Represents errors encountered during execution of a Sandbox.
+
+ This is a simple container exception. It's purpose is to capture state
+ so something else can report on it.
+ """
+ def __init__(self, file_stack, exc_type, exc_value, trace):
+ SandboxError.__init__(self, file_stack)
+
+ self.exc_type = exc_type
+ self.exc_value = exc_value
+ self.trace = trace
+
+
+class SandboxLoadError(SandboxError):
+ """Represents errors encountered when loading a file for execution.
+
+ This exception represents errors in a Sandbox that occurred as part of
+ loading a file. The error could have occurred in the course of executing
+ a file. If so, the file_stack will be non-empty and the file that caused
+ the load will be on top of the stack.
+ """
+ def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
+ SandboxError.__init__(self, file_stack)
+
+ self.trace = trace
+ self.illegal_path = illegal_path
+ self.read_error = read_error
+
+
+class Sandbox(dict):
+ """Represents a sandbox for executing Python code.
+
+ This class provides a sandbox for execution of a single mozbuild frontend
+ file. The results of that execution is stored in the Context instance given
+ as the ``context`` argument.
+
+ Sandbox is effectively a glorified wrapper around compile() + exec(). You
+ point it at some Python code and it executes it. The main difference from
+ executing Python code like normal is that the executed code is very limited
+ in what it can do: the sandbox only exposes a very limited set of Python
+ functionality. Only specific types and functions are available. This
+ prevents executed code from doing things like import modules, open files,
+ etc.
+
+ Sandbox instances act as global namespace for the sandboxed execution
+ itself. They shall not be used to access the results of the execution.
+ Those results are available in the given Context instance after execution.
+
+ The Sandbox itself is responsible for enforcing rules such as forbidding
+ reassignment of variables.
+
+ Implementation note: Sandbox derives from dict because exec() insists that
+ what it is given for namespaces is a dict.
+ """
+ # The default set of builtins.
+ BUILTINS = ReadOnlyDict({
+ # Only real Python built-ins should go here.
+ 'None': None,
+ 'False': False,
+ 'True': True,
+ 'sorted': alphabetical_sorted,
+ 'int': int,
+ })
+
+ def __init__(self, context, builtins=None, finder=default_finder):
+ """Initialize a Sandbox ready for execution.
+ """
+ self._builtins = builtins or self.BUILTINS
+ dict.__setitem__(self, '__builtins__', self._builtins)
+
+ assert isinstance(self._builtins, ReadOnlyDict)
+ assert isinstance(context, Context)
+
+ # Contexts are modeled as a stack because multiple context managers
+ # may be active.
+ self._active_contexts = [context]
+
+ # Seen sub-contexts. Will be populated with other Context instances
+ # that were related to execution of this instance.
+ self.subcontexts = []
+
+ # We need to record this because it gets swallowed as part of
+ # evaluation.
+ self._last_name_error = None
+
+ # Current literal source being executed.
+ self._current_source = None
+
+ self._finder = finder
+
+ @property
+ def _context(self):
+ return self._active_contexts[-1]
+
+ def exec_file(self, path):
+ """Execute code at a path in the sandbox.
+
+ The path must be absolute.
+ """
+ assert os.path.isabs(path)
+
+ try:
+ source = self._finder.get(path).read()
+ except Exception as e:
+ raise SandboxLoadError(self._context.source_stack,
+ sys.exc_info()[2], read_error=path)
+
+ self.exec_source(source, path)
+
+ def exec_source(self, source, path=''):
+ """Execute Python code within a string.
+
+ The passed string should contain Python code to be executed. The string
+ will be compiled and executed.
+
+ You should almost always go through exec_file() because exec_source()
+ does not perform extra path normalization. This can cause relative
+ paths to behave weirdly.
+ """
+ def execute():
+ # compile() inherits the __future__ from the module by default. We
+ # do want Unicode literals.
+ code = compile(source, path, 'exec')
+ # We use ourself as the global namespace for the execution. There
+ # is no need for a separate local namespace as moz.build execution
+ # is flat, namespace-wise.
+ old_source = self._current_source
+ self._current_source = source
+ try:
+ exec_(code, self)
+ finally:
+ self._current_source = old_source
+
+ self.exec_function(execute, path=path)
+
+ def exec_function(self, func, args=(), kwargs={}, path='',
+ becomes_current_path=True):
+ """Execute function with the given arguments in the sandbox.
+ """
+ if path and becomes_current_path:
+ self._context.push_source(path)
+
+ old_sandbox = self._context._sandbox
+ self._context._sandbox = weakref.ref(self)
+
+ # We don't have to worry about bytecode generation here because we are
+ # too low-level for that. However, we could add bytecode generation via
+ # the marshall module if parsing performance were ever an issue.
+
+ old_source = self._current_source
+ self._current_source = None
+ try:
+ func(*args, **kwargs)
+ except SandboxError as e:
+ raise e
+ except NameError as e:
+ # A NameError is raised when a variable could not be found.
+ # The original KeyError has been dropped by the interpreter.
+ # However, we should have it cached in our instance!
+
+ # Unless a script is doing something wonky like catching NameError
+ # itself (that would be silly), if there is an exception on the
+ # global namespace, that's our error.
+ actual = e
+
+ if self._last_name_error is not None:
+ actual = self._last_name_error
+ source_stack = self._context.source_stack
+ if not becomes_current_path:
+ # Add current file to the stack because it wasn't added before
+ # sandbox execution.
+ source_stack.append(path)
+ raise SandboxExecutionError(source_stack, type(actual), actual,
+ sys.exc_info()[2])
+
+ except Exception as e:
+ # Need to copy the stack otherwise we get a reference and that is
+ # mutated during the finally.
+ exc = sys.exc_info()
+ source_stack = self._context.source_stack
+ if not becomes_current_path:
+ # Add current file to the stack because it wasn't added before
+ # sandbox execution.
+ source_stack.append(path)
+ raise SandboxExecutionError(source_stack, exc[0], exc[1], exc[2])
+ finally:
+ self._current_source = old_source
+ self._context._sandbox = old_sandbox
+ if path and becomes_current_path:
+ self._context.pop_source()
+
+ def push_subcontext(self, context):
+ """Push a SubContext onto the execution stack.
+
+ When called, the active context will be set to the specified context,
+ meaning all variable accesses will go through it. We also record this
+ SubContext as having been executed as part of this sandbox.
+ """
+ self._active_contexts.append(context)
+ if context not in self.subcontexts:
+ self.subcontexts.append(context)
+
+ def pop_subcontext(self, context):
+ """Pop a SubContext off the execution stack.
+
+ SubContexts must be pushed and popped in opposite order. This is
+ validated as part of the function call to ensure proper consumer API
+ use.
+ """
+ popped = self._active_contexts.pop()
+ assert popped == context
+
+ def __getitem__(self, key):
+ if key.isupper():
+ try:
+ return self._context[key]
+ except Exception as e:
+ self._last_name_error = e
+ raise
+
+ return dict.__getitem__(self, key)
+
+ def __setitem__(self, key, value):
+ if key in self._builtins or key == '__builtins__':
+ raise KeyError('Cannot reassign builtins')
+
+ if key.isupper():
+ # Forbid assigning over a previously set value. Interestingly, when
+ # doing FOO += ['bar'], python actually does something like:
+ # foo = namespace.__getitem__('FOO')
+ # foo.__iadd__(['bar'])
+ # namespace.__setitem__('FOO', foo)
+ # This means __setitem__ is called with the value that is already
+ # in the dict, when doing +=, which is permitted.
+ if key in self._context and self._context[key] is not value:
+ raise KeyError('global_ns', 'reassign', key)
+
+ if (key not in self._context and isinstance(value, (list, dict))
+ and not value):
+ raise KeyError('Variable %s assigned an empty value.' % key)
+
+ self._context[key] = value
+ else:
+ dict.__setitem__(self, key, value)
+
+ def get(self, key, default=None):
+ raise NotImplementedError('Not supported')
+
+ def __len__(self):
+ raise NotImplementedError('Not supported')
+
+ def __iter__(self):
+ raise NotImplementedError('Not supported')
+
+ def __contains__(self, key):
+ if key.isupper():
+ return key in self._context
+ return dict.__contains__(self, key)
diff --git a/python/mozbuild/mozbuild/html_build_viewer.py b/python/mozbuild/mozbuild/html_build_viewer.py
new file mode 100644
index 000000000..5151f04a4
--- /dev/null
+++ b/python/mozbuild/mozbuild/html_build_viewer.py
@@ -0,0 +1,120 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module contains code for running an HTTP server to view build info.
+
+from __future__ import absolute_import, unicode_literals
+
+import BaseHTTPServer
+import json
+import os
+
+import requests
+
+
+class HTTPHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ def do_GET(self):
+ s = self.server.wrapper
+ p = self.path
+
+ if p == '/list':
+ self.send_response(200)
+ self.send_header('Content-Type', 'application/json; charset=utf-8')
+ self.end_headers()
+
+ keys = sorted(s.json_files.keys())
+ json.dump({'files': keys}, self.wfile)
+ return
+
+ if p.startswith('/resources/'):
+ key = p[len('/resources/'):]
+
+ if key not in s.json_files:
+ self.send_error(404)
+ return
+
+ self.send_response(200)
+ self.send_header('Content-Type', 'application/json; charset=utf-8')
+ self.end_headers()
+
+ self.wfile.write(s.json_files[key])
+ return
+
+ if p == '/':
+ p = '/index.html'
+
+ self.serve_docroot(s.doc_root, p[1:])
+
+ def do_POST(self):
+ if self.path == '/shutdown':
+ self.server.wrapper.do_shutdown = True
+ self.send_response(200)
+ return
+
+ self.send_error(404)
+
+ def serve_docroot(self, root, path):
+ local_path = os.path.normpath(os.path.join(root, path))
+
+ # Cheap security. This doesn't resolve symlinks, etc. But, it should be
+ # acceptable since this server only runs locally.
+ if not local_path.startswith(root):
+ self.send_error(404)
+
+ if not os.path.exists(local_path):
+ self.send_error(404)
+ return
+
+ if os.path.isdir(local_path):
+ self.send_error(500)
+ return
+
+ self.send_response(200)
+ ct = 'text/plain'
+ if path.endswith('.html'):
+ ct = 'text/html'
+
+ self.send_header('Content-Type', ct)
+ self.end_headers()
+
+ with open(local_path, 'rb') as fh:
+ self.wfile.write(fh.read())
+
+
+class BuildViewerServer(object):
+ def __init__(self, address='localhost', port=0):
+ # TODO use pkg_resources to obtain HTML resources.
+ pkg_dir = os.path.dirname(os.path.abspath(__file__))
+ doc_root = os.path.join(pkg_dir, 'resources', 'html-build-viewer')
+ assert os.path.isdir(doc_root)
+
+ self.doc_root = doc_root
+ self.json_files = {}
+
+ self.server = BaseHTTPServer.HTTPServer((address, port), HTTPHandler)
+ self.server.wrapper = self
+ self.do_shutdown = False
+
+ @property
+ def url(self):
+ hostname, port = self.server.server_address
+ return 'http://%s:%d/' % (hostname, port)
+
+ def add_resource_json_file(self, key, path):
+ """Register a resource JSON file with the server.
+
+ The file will be made available under the name/key specified."""
+ with open(path, 'rb') as fh:
+ self.json_files[key] = fh.read()
+
+ def add_resource_json_url(self, key, url):
+ """Register a resource JSON file at a URL."""
+ r = requests.get(url)
+ if r.status_code != 200:
+ raise Exception('Non-200 HTTP response code')
+ self.json_files[key] = r.text
+
+ def run(self):
+ while not self.do_shutdown:
+ self.server.handle_request()
diff --git a/python/mozbuild/mozbuild/jar.py b/python/mozbuild/mozbuild/jar.py
new file mode 100644
index 000000000..d40751b69
--- /dev/null
+++ b/python/mozbuild/mozbuild/jar.py
@@ -0,0 +1,597 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+'''jarmaker.py provides a python class to package up chrome content by
+processing jar.mn files.
+
+See the documentation for jar.mn on MDC for further details on the format.
+'''
+
+from __future__ import absolute_import
+
+import sys
+import os
+import errno
+import re
+import logging
+from time import localtime
+from MozZipFile import ZipFile
+from cStringIO import StringIO
+from collections import defaultdict
+
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.action.buildlist import addEntriesToListFile
+from mozpack.files import FileFinder
+import mozpack.path as mozpath
+if sys.platform == 'win32':
+ from ctypes import windll, WinError
+ CreateHardLink = windll.kernel32.CreateHardLinkA
+
+__all__ = ['JarMaker']
+
+
+class ZipEntry(object):
+ '''Helper class for jar output.
+
+ This class defines a simple file-like object for a zipfile.ZipEntry
+ so that we can consecutively write to it and then close it.
+ This methods hooks into ZipFile.writestr on close().
+ '''
+
+ def __init__(self, name, zipfile):
+ self._zipfile = zipfile
+ self._name = name
+ self._inner = StringIO()
+
+ def write(self, content):
+ '''Append the given content to this zip entry'''
+
+ self._inner.write(content)
+ return
+
+ def close(self):
+ '''The close method writes the content back to the zip file.'''
+
+ self._zipfile.writestr(self._name, self._inner.getvalue())
+
+
+def getModTime(aPath):
+ if not os.path.isfile(aPath):
+ return 0
+ mtime = os.stat(aPath).st_mtime
+ return localtime(mtime)
+
+
+class JarManifestEntry(object):
+ def __init__(self, output, source, is_locale=False, preprocess=False):
+ self.output = output
+ self.source = source
+ self.is_locale = is_locale
+ self.preprocess = preprocess
+
+
+class JarInfo(object):
+ def __init__(self, base_or_jarinfo, name=None):
+ if name is None:
+ assert isinstance(base_or_jarinfo, JarInfo)
+ self.base = base_or_jarinfo.base
+ self.name = base_or_jarinfo.name
+ else:
+ assert not isinstance(base_or_jarinfo, JarInfo)
+ self.base = base_or_jarinfo or ''
+ self.name = name
+ # For compatibility with existing jar.mn files, if there is no
+ # base, the jar name is under chrome/
+ if not self.base:
+ self.name = mozpath.join('chrome', self.name)
+ self.relativesrcdir = None
+ self.chrome_manifests = []
+ self.entries = []
+
+
+class DeprecatedJarManifest(Exception): pass
+
+
+class JarManifestParser(object):
+
+ ignore = re.compile('\s*(\#.*)?$')
+ jarline = re.compile('''
+ (?:
+ (?:\[(?P<base>[\w\d.\-\_\\\/{}@]+)\]\s*)? # optional [base/path]
+ (?P<jarfile>[\w\d.\-\_\\\/{}]+).jar\: # filename.jar:
+ |
+ (?:\s*(\#.*)?) # comment
+ )\s*$ # whitespaces
+ ''', re.VERBOSE)
+ relsrcline = re.compile('relativesrcdir\s+(?P<relativesrcdir>.+?):')
+ regline = re.compile('\%\s+(.*)$')
+ entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+'
+ entryline = re.compile(entryre
+ + '(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$'
+ )
+
+ def __init__(self):
+ self._current_jar = None
+ self._jars = []
+
+ def write(self, line):
+ # A Preprocessor instance feeds the parser through calls to this method.
+
+ # Ignore comments and empty lines
+ if self.ignore.match(line):
+ return
+
+ # A jar manifest file can declare several different sections, each of
+ # which applies to a given "jar file". Each of those sections starts
+ # with "<name>.jar:", in which case the path is assumed relative to
+ # a "chrome" directory, or "[<base/path>] <subpath/name>.jar:", where
+ # a base directory is given (usually pointing at the root of the
+ # application or addon) and the jar path is given relative to the base
+ # directory.
+ if self._current_jar is None:
+ m = self.jarline.match(line)
+ if not m:
+ raise RuntimeError(line)
+ if m.group('jarfile'):
+ self._current_jar = JarInfo(m.group('base'),
+ m.group('jarfile'))
+ self._jars.append(self._current_jar)
+ return
+
+ # Within each section, there can be three different types of entries:
+
+ # - indications of the relative source directory we pretend to be in
+ # when considering localization files, in the following form;
+ # "relativesrcdir <path>:"
+ m = self.relsrcline.match(line)
+ if m:
+ if self._current_jar.chrome_manifests or self._current_jar.entries:
+ self._current_jar = JarInfo(self._current_jar)
+ self._jars.append(self._current_jar)
+ self._current_jar.relativesrcdir = m.group('relativesrcdir')
+ return
+
+ # - chrome manifest entries, prefixed with "%".
+ m = self.regline.match(line)
+ if m:
+ rline = ' '.join(m.group(1).split())
+ if rline not in self._current_jar.chrome_manifests:
+ self._current_jar.chrome_manifests.append(rline)
+ return
+
+ # - entries indicating files to be part of the given jar. They are
+ # formed thusly:
+ # "<dest_path>"
+ # or
+ # "<dest_path> (<source_path>)"
+ # The <dest_path> is where the file(s) will be put in the chrome jar.
+ # The <source_path> is where the file(s) can be found in the source
+ # directory. The <source_path> may start with a "%" for files part
+ # of a localization directory, in which case the "%" counts as the
+ # locale.
+ # Each entry can be prefixed with "*" for preprocessing.
+ m = self.entryline.match(line)
+ if m:
+ if m.group('optOverwrite'):
+ raise DeprecatedJarManifest(
+ 'The "+" prefix is not supported anymore')
+ self._current_jar.entries.append(JarManifestEntry(
+ m.group('output'),
+ m.group('source') or mozpath.basename(m.group('output')),
+ is_locale=bool(m.group('locale')),
+ preprocess=bool(m.group('optPreprocess')),
+ ))
+ return
+
+ self._current_jar = None
+ self.write(line)
+
+ def __iter__(self):
+ return iter(self._jars)
+
+
+class JarMaker(object):
+ '''JarMaker reads jar.mn files and process those into jar files or
+ flat directories, along with chrome.manifest files.
+ '''
+
+ def __init__(self, outputFormat='flat', useJarfileManifest=True,
+ useChromeManifest=False):
+
+ self.outputFormat = outputFormat
+ self.useJarfileManifest = useJarfileManifest
+ self.useChromeManifest = useChromeManifest
+ self.pp = Preprocessor()
+ self.topsourcedir = None
+ self.sourcedirs = []
+ self.localedirs = None
+ self.l10nbase = None
+ self.l10nmerge = None
+ self.relativesrcdir = None
+ self.rootManifestAppId = None
+ self._seen_output = set()
+
+ def getCommandLineParser(self):
+ '''Get a optparse.OptionParser for jarmaker.
+
+ This OptionParser has the options for jarmaker as well as
+ the options for the inner PreProcessor.
+ '''
+
+ # HACK, we need to unescape the string variables we get,
+ # the perl versions didn't grok strings right
+
+ p = self.pp.getCommandLineParser(unescapeDefines=True)
+ p.add_option('-f', type='choice', default='jar',
+ choices=('jar', 'flat', 'symlink'),
+ help='fileformat used for output',
+ metavar='[jar, flat, symlink]',
+ )
+ p.add_option('-v', action='store_true', dest='verbose',
+ help='verbose output')
+ p.add_option('-q', action='store_false', dest='verbose',
+ help='verbose output')
+ p.add_option('-e', action='store_true',
+ help='create chrome.manifest instead of jarfile.manifest'
+ )
+ p.add_option('-s', type='string', action='append', default=[],
+ help='source directory')
+ p.add_option('-t', type='string', help='top source directory')
+ p.add_option('-c', '--l10n-src', type='string', action='append'
+ , help='localization directory')
+ p.add_option('--l10n-base', type='string', action='store',
+ help='base directory to be used for localization (requires relativesrcdir)'
+ )
+ p.add_option('--locale-mergedir', type='string', action='store'
+ ,
+ help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
+ )
+ p.add_option('--relativesrcdir', type='string',
+ help='relativesrcdir to be used for localization')
+ p.add_option('-d', type='string', help='base directory')
+ p.add_option('--root-manifest-entry-appid', type='string',
+ help='add an app id specific root chrome manifest entry.'
+ )
+ return p
+
+ def finalizeJar(self, jardir, jarbase, jarname, chromebasepath, register, doZip=True):
+ '''Helper method to write out the chrome registration entries to
+ jarfile.manifest or chrome.manifest, or both.
+
+ The actual file processing is done in updateManifest.
+ '''
+
+ # rewrite the manifest, if entries given
+ if not register:
+ return
+
+ chromeManifest = os.path.join(jardir, jarbase, 'chrome.manifest')
+
+ if self.useJarfileManifest:
+ self.updateManifest(os.path.join(jardir, jarbase,
+ jarname + '.manifest'),
+ chromebasepath.format(''), register)
+ if jarname != 'chrome':
+ addEntriesToListFile(chromeManifest,
+ ['manifest {0}.manifest'.format(jarname)])
+ if self.useChromeManifest:
+ chromebase = os.path.dirname(jarname) + '/'
+ self.updateManifest(chromeManifest,
+ chromebasepath.format(chromebase), register)
+
+ # If requested, add a root chrome manifest entry (assumed to be in the parent directory
+ # of chromeManifest) with the application specific id. In cases where we're building
+ # lang packs, the root manifest must know about application sub directories.
+
+ if self.rootManifestAppId:
+ rootChromeManifest = \
+ os.path.join(os.path.normpath(os.path.dirname(chromeManifest)),
+ '..', 'chrome.manifest')
+ rootChromeManifest = os.path.normpath(rootChromeManifest)
+ chromeDir = \
+ os.path.basename(os.path.dirname(os.path.normpath(chromeManifest)))
+ logging.info("adding '%s' entry to root chrome manifest appid=%s"
+ % (chromeDir, self.rootManifestAppId))
+ addEntriesToListFile(rootChromeManifest,
+ ['manifest %s/chrome.manifest application=%s'
+ % (chromeDir,
+ self.rootManifestAppId)])
+
+ def updateManifest(self, manifestPath, chromebasepath, register):
+ '''updateManifest replaces the % in the chrome registration entries
+ with the given chrome base path, and updates the given manifest file.
+ '''
+ myregister = dict.fromkeys(map(lambda s: s.replace('%',
+ chromebasepath), register))
+ addEntriesToListFile(manifestPath, myregister.iterkeys())
+
+ def makeJar(self, infile, jardir):
+ '''makeJar is the main entry point to JarMaker.
+
+ It takes the input file, the output directory, the source dirs and the
+ top source dir as argument, and optionally the l10n dirs.
+ '''
+
+ # making paths absolute, guess srcdir if file and add to sourcedirs
+ _normpath = lambda p: os.path.normpath(os.path.abspath(p))
+ self.topsourcedir = _normpath(self.topsourcedir)
+ self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
+ if self.localedirs:
+ self.localedirs = [_normpath(p) for p in self.localedirs]
+ elif self.relativesrcdir:
+ self.localedirs = \
+ self.generateLocaleDirs(self.relativesrcdir)
+ if isinstance(infile, basestring):
+ logging.info('processing ' + infile)
+ self.sourcedirs.append(_normpath(os.path.dirname(infile)))
+ pp = self.pp.clone()
+ pp.out = JarManifestParser()
+ pp.do_include(infile)
+
+ for info in pp.out:
+ self.processJarSection(info, jardir)
+
+ def generateLocaleDirs(self, relativesrcdir):
+ if os.path.basename(relativesrcdir) == 'locales':
+ # strip locales
+ l10nrelsrcdir = os.path.dirname(relativesrcdir)
+ else:
+ l10nrelsrcdir = relativesrcdir
+ locdirs = []
+
+ # generate locales dirs, merge, l10nbase, en-US
+ if self.l10nmerge:
+ locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir))
+ if self.l10nbase:
+ locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir))
+ if self.l10nmerge or not self.l10nbase:
+ # add en-US if we merge, or if it's not l10n
+ locdirs.append(os.path.join(self.topsourcedir,
+ relativesrcdir, 'en-US'))
+ return locdirs
+
+ def processJarSection(self, jarinfo, jardir):
+ '''Internal method called by makeJar to actually process a section
+ of a jar.mn file.
+ '''
+
+ # chromebasepath is used for chrome registration manifests
+ # {0} is getting replaced with chrome/ for chrome.manifest, and with
+ # an empty string for jarfile.manifest
+
+ chromebasepath = '{0}' + os.path.basename(jarinfo.name)
+ if self.outputFormat == 'jar':
+ chromebasepath = 'jar:' + chromebasepath + '.jar!'
+ chromebasepath += '/'
+
+ jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name)
+ jf = None
+ if self.outputFormat == 'jar':
+ # jar
+ jarfilepath = jarfile + '.jar'
+ try:
+ os.makedirs(os.path.dirname(jarfilepath))
+ except OSError, error:
+ if error.errno != errno.EEXIST:
+ raise
+ jf = ZipFile(jarfilepath, 'a', lock=True)
+ outHelper = self.OutputHelper_jar(jf)
+ else:
+ outHelper = getattr(self, 'OutputHelper_'
+ + self.outputFormat)(jarfile)
+
+ if jarinfo.relativesrcdir:
+ self.localedirs = self.generateLocaleDirs(jarinfo.relativesrcdir)
+
+ for e in jarinfo.entries:
+ self._processEntryLine(e, outHelper, jf)
+
+ self.finalizeJar(jardir, jarinfo.base, jarinfo.name, chromebasepath,
+ jarinfo.chrome_manifests)
+ if jf is not None:
+ jf.close()
+
+ def _processEntryLine(self, e, outHelper, jf):
+ out = e.output
+ src = e.source
+
+ # pick the right sourcedir -- l10n, topsrc or src
+
+ if e.is_locale:
+ src_base = self.localedirs
+ elif src.startswith('/'):
+ # path/in/jar/file_name.xul (/path/in/sourcetree/file_name.xul)
+ # refers to a path relative to topsourcedir, use that as base
+ # and strip the leading '/'
+ src_base = [self.topsourcedir]
+ src = src[1:]
+ else:
+ # use srcdirs and the objdir (current working dir) for relative paths
+ src_base = self.sourcedirs + [os.getcwd()]
+
+ if '*' in src:
+ def _prefix(s):
+ for p in s.split('/'):
+ if '*' not in p:
+ yield p + '/'
+ prefix = ''.join(_prefix(src))
+ emitted = set()
+ for _srcdir in src_base:
+ finder = FileFinder(_srcdir, find_executables=False)
+ for path, _ in finder.find(src):
+ # If the path was already seen in one of the other source
+ # directories, skip it. That matches the non-wildcard case
+ # below, where we pick the first existing file.
+ reduced_path = path[len(prefix):]
+ if reduced_path in emitted:
+ continue
+ emitted.add(reduced_path)
+ e = JarManifestEntry(
+ mozpath.join(out, reduced_path),
+ path,
+ is_locale=e.is_locale,
+ preprocess=e.preprocess,
+ )
+ self._processEntryLine(e, outHelper, jf)
+ return
+
+ # check if the source file exists
+ realsrc = None
+ for _srcdir in src_base:
+ if os.path.isfile(os.path.join(_srcdir, src)):
+ realsrc = os.path.join(_srcdir, src)
+ break
+ if realsrc is None:
+ if jf is not None:
+ jf.close()
+ raise RuntimeError('File "{0}" not found in {1}'.format(src,
+ ', '.join(src_base)))
+
+ if out in self._seen_output:
+ raise RuntimeError('%s already added' % out)
+ self._seen_output.add(out)
+
+ if e.preprocess:
+ outf = outHelper.getOutput(out)
+ inf = open(realsrc)
+ pp = self.pp.clone()
+ if src[-4:] == '.css':
+ pp.setMarker('%')
+ pp.out = outf
+ pp.do_include(inf)
+ pp.failUnused(realsrc)
+ outf.close()
+ inf.close()
+ return
+
+ # copy or symlink if newer
+
+ if getModTime(realsrc) > outHelper.getDestModTime(e.output):
+ if self.outputFormat == 'symlink':
+ outHelper.symlink(realsrc, out)
+ return
+ outf = outHelper.getOutput(out)
+
+ # open in binary mode, this can be images etc
+
+ inf = open(realsrc, 'rb')
+ outf.write(inf.read())
+ outf.close()
+ inf.close()
+
+ class OutputHelper_jar(object):
+ '''Provide getDestModTime and getOutput for a given jarfile.'''
+
+ def __init__(self, jarfile):
+ self.jarfile = jarfile
+
+ def getDestModTime(self, aPath):
+ try:
+ info = self.jarfile.getinfo(aPath)
+ return info.date_time
+ except:
+ return 0
+
+ def getOutput(self, name):
+ return ZipEntry(name, self.jarfile)
+
+ class OutputHelper_flat(object):
+ '''Provide getDestModTime and getOutput for a given flat
+ output directory. The helper method ensureDirFor is used by
+ the symlink subclass.
+ '''
+
+ def __init__(self, basepath):
+ self.basepath = basepath
+
+ def getDestModTime(self, aPath):
+ return getModTime(os.path.join(self.basepath, aPath))
+
+ def getOutput(self, name):
+ out = self.ensureDirFor(name)
+
+ # remove previous link or file
+ try:
+ os.remove(out)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ return open(out, 'wb')
+
+ def ensureDirFor(self, name):
+ out = os.path.join(self.basepath, name)
+ outdir = os.path.dirname(out)
+ if not os.path.isdir(outdir):
+ try:
+ os.makedirs(outdir)
+ except OSError, error:
+ if error.errno != errno.EEXIST:
+ raise
+ return out
+
+ class OutputHelper_symlink(OutputHelper_flat):
+ '''Subclass of OutputHelper_flat that provides a helper for
+ creating a symlink including creating the parent directories.
+ '''
+
+ def symlink(self, src, dest):
+ out = self.ensureDirFor(dest)
+
+ # remove previous link or file
+ try:
+ os.remove(out)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ if sys.platform != 'win32':
+ os.symlink(src, out)
+ else:
+ # On Win32, use ctypes to create a hardlink
+ rv = CreateHardLink(out, src, None)
+ if rv == 0:
+ raise WinError()
+
+
+def main(args=None):
+ args = args or sys.argv
+ jm = JarMaker()
+ p = jm.getCommandLineParser()
+ (options, args) = p.parse_args(args)
+ jm.outputFormat = options.f
+ jm.sourcedirs = options.s
+ jm.topsourcedir = options.t
+ if options.e:
+ jm.useChromeManifest = True
+ jm.useJarfileManifest = False
+ if options.l10n_base:
+ if not options.relativesrcdir:
+ p.error('relativesrcdir required when using l10n-base')
+ if options.l10n_src:
+ p.error('both l10n-src and l10n-base are not supported')
+ jm.l10nbase = options.l10n_base
+ jm.relativesrcdir = options.relativesrcdir
+ jm.l10nmerge = options.locale_mergedir
+ if jm.l10nmerge and not os.path.isdir(jm.l10nmerge):
+ logging.warning("WARNING: --locale-mergedir passed, but '%s' does not exist. "
+ "Ignore this message if the locale is complete." % jm.l10nmerge)
+ elif options.locale_mergedir:
+ p.error('l10n-base required when using locale-mergedir')
+ jm.localedirs = options.l10n_src
+ if options.root_manifest_entry_appid:
+ jm.rootManifestAppId = options.root_manifest_entry_appid
+ noise = logging.INFO
+ if options.verbose is not None:
+ noise = options.verbose and logging.DEBUG or logging.WARN
+ if sys.version_info[:2] > (2, 3):
+ logging.basicConfig(format='%(message)s')
+ else:
+ logging.basicConfig()
+ logging.getLogger().setLevel(noise)
+ topsrc = options.t
+ topsrc = os.path.normpath(os.path.abspath(topsrc))
+ if not args:
+ infile = sys.stdin
+ else:
+ (infile, ) = args
+ jm.makeJar(infile, options.d)
diff --git a/python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.mo b/python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.mo
new file mode 100644
index 000000000..be7711cb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.mo
Binary files differ
diff --git a/python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.po b/python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.po
new file mode 100644
index 000000000..fbdfabd83
--- /dev/null
+++ b/python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.po
@@ -0,0 +1,8 @@
+msgid "build.threads.short"
+msgstr "Thread Count"
+
+msgid "build.threads.full"
+msgstr "The number of threads to use when performing CPU intensive tasks. "
+ "This constrols the level of parallelization. The default value is "
+ "the number of cores in your machine."
+
diff --git a/python/mozbuild/mozbuild/mach_commands.py b/python/mozbuild/mozbuild/mach_commands.py
new file mode 100644
index 000000000..b6802a47c
--- /dev/null
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -0,0 +1,1603 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import errno
+import itertools
+import json
+import logging
+import operator
+import os
+import subprocess
+import sys
+
+import mozpack.path as mozpath
+
+from mach.decorators import (
+ CommandArgument,
+ CommandArgumentGroup,
+ CommandProvider,
+ Command,
+ SubCommand,
+)
+
+from mach.mixin.logging import LoggingMixin
+
+from mozbuild.base import (
+ BuildEnvironmentNotFoundException,
+ MachCommandBase,
+ MachCommandConditions as conditions,
+ MozbuildObject,
+ MozconfigFindException,
+ MozconfigLoadException,
+ ObjdirMismatchException,
+)
+
+from mozbuild.backend import (
+ backends,
+ get_backend_class,
+)
+from mozbuild.shellutil import quote as shell_quote
+
+
+BUILD_WHAT_HELP = '''
+What to build. Can be a top-level make target or a relative directory. If
+multiple options are provided, they will be built serially. Takes dependency
+information from `topsrcdir/build/dumbmake-dependencies` to build additional
+targets as needed. BUILDING ONLY PARTS OF THE TREE CAN RESULT IN BAD TREE
+STATE. USE AT YOUR OWN RISK.
+'''.strip()
+
+FINDER_SLOW_MESSAGE = '''
+===================
+PERFORMANCE WARNING
+
+The OS X Finder application (file indexing used by Spotlight) used a lot of CPU
+during the build - an average of %f%% (100%% is 1 core). This made your build
+slower.
+
+Consider adding ".noindex" to the end of your object directory name to have
+Finder ignore it. Or, add an indexing exclusion through the Spotlight System
+Preferences.
+===================
+'''.strip()
+
+EXCESSIVE_SWAP_MESSAGE = '''
+===================
+PERFORMANCE WARNING
+
+Your machine experienced a lot of swap activity during the build. This is
+possibly a sign that your machine doesn't have enough physical memory or
+not enough available memory to perform the build. It's also possible some
+other system activity during the build is to blame.
+
+If you feel this message is not appropriate for your machine configuration,
+please file a Core :: Build Config bug at
+https://bugzilla.mozilla.org/enter_bug.cgi?product=Core&component=Build%20Config
+and tell us about your machine and build configuration so we can adjust the
+warning heuristic.
+===================
+'''
+
+
+class TerminalLoggingHandler(logging.Handler):
+ """Custom logging handler that works with terminal window dressing.
+
+ This class should probably live elsewhere, like the mach core. Consider
+ this a proving ground for its usefulness.
+ """
+ def __init__(self):
+ logging.Handler.__init__(self)
+
+ self.fh = sys.stdout
+ self.footer = None
+
+ def flush(self):
+ self.acquire()
+
+ try:
+ self.fh.flush()
+ finally:
+ self.release()
+
+ def emit(self, record):
+ msg = self.format(record)
+
+ self.acquire()
+
+ try:
+ if self.footer:
+ self.footer.clear()
+
+ self.fh.write(msg)
+ self.fh.write('\n')
+
+ if self.footer:
+ self.footer.draw()
+
+ # If we don't flush, the footer may not get drawn.
+ self.fh.flush()
+ finally:
+ self.release()
+
+
+class BuildProgressFooter(object):
+ """Handles display of a build progress indicator in a terminal.
+
+ When mach builds inside a blessings-supported terminal, it will render
+ progress information collected from a BuildMonitor. This class converts the
+ state of BuildMonitor into terminal output.
+ """
+
+ def __init__(self, terminal, monitor):
+ # terminal is a blessings.Terminal.
+ self._t = terminal
+ self._fh = sys.stdout
+ self.tiers = monitor.tiers.tier_status.viewitems()
+
+ def clear(self):
+ """Removes the footer from the current terminal."""
+ self._fh.write(self._t.move_x(0))
+ self._fh.write(self._t.clear_eos())
+
+ def draw(self):
+ """Draws this footer in the terminal."""
+
+ if not self.tiers:
+ return
+
+ # The drawn terminal looks something like:
+ # TIER: base nspr nss js platform app SUBTIER: static export libs tools DIRECTORIES: 06/09 (memory)
+
+ # This is a list of 2-tuples of (encoding function, input). None means
+ # no encoding. For a full reason on why we do things this way, read the
+ # big comment below.
+ parts = [('bold', 'TIER:')]
+ append = parts.append
+ for tier, status in self.tiers:
+ if status is None:
+ append(tier)
+ elif status == 'finished':
+ append(('green', tier))
+ else:
+ append(('underline_yellow', tier))
+
+ # We don't want to write more characters than the current width of the
+ # terminal otherwise wrapping may result in weird behavior. We can't
+ # simply truncate the line at terminal width characters because a)
+ # non-viewable escape characters count towards the limit and b) we
+ # don't want to truncate in the middle of an escape sequence because
+ # subsequent output would inherit the escape sequence.
+ max_width = self._t.width
+ written = 0
+ write_pieces = []
+ for part in parts:
+ try:
+ func, part = part
+ encoded = getattr(self._t, func)(part)
+ except ValueError:
+ encoded = part
+
+ len_part = len(part)
+ len_spaces = len(write_pieces)
+ if written + len_part + len_spaces > max_width:
+ write_pieces.append(part[0:max_width - written - len_spaces])
+ written += len_part
+ break
+
+ write_pieces.append(encoded)
+ written += len_part
+
+ with self._t.location():
+ self._t.move(self._t.height-1,0)
+ self._fh.write(' '.join(write_pieces))
+
+
+class BuildOutputManager(LoggingMixin):
+ """Handles writing build output to a terminal, to logs, etc."""
+
+ def __init__(self, log_manager, monitor):
+ self.populate_logger()
+
+ self.monitor = monitor
+ self.footer = None
+
+ terminal = log_manager.terminal
+
+ # TODO convert terminal footer to config file setting.
+ if not terminal or os.environ.get('MACH_NO_TERMINAL_FOOTER', None):
+ return
+
+ self.t = terminal
+ self.footer = BuildProgressFooter(terminal, monitor)
+
+ self._handler = TerminalLoggingHandler()
+ self._handler.setFormatter(log_manager.terminal_formatter)
+ self._handler.footer = self.footer
+
+ old = log_manager.replace_terminal_handler(self._handler)
+ self._handler.level = old.level
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if self.footer:
+ self.footer.clear()
+ # Prevents the footer from being redrawn if logging occurs.
+ self._handler.footer = None
+
+ # Ensure the resource monitor is stopped because leaving it running
+ # could result in the process hanging on exit because the resource
+ # collection child process hasn't been told to stop.
+ self.monitor.stop_resource_recording()
+
+ def write_line(self, line):
+ if self.footer:
+ self.footer.clear()
+
+ print(line)
+
+ if self.footer:
+ self.footer.draw()
+
+ def refresh(self):
+ if not self.footer:
+ return
+
+ self.footer.clear()
+ self.footer.draw()
+
+ def on_line(self, line):
+ warning, state_changed, relevant = self.monitor.on_line(line)
+
+ if warning:
+ self.log(logging.INFO, 'compiler_warning', warning,
+ 'Warning: {flag} in {filename}: {message}')
+
+ if relevant:
+ self.log(logging.INFO, 'build_output', {'line': line}, '{line}')
+ elif state_changed:
+ have_handler = hasattr(self, 'handler')
+ if have_handler:
+ self.handler.acquire()
+ try:
+ self.refresh()
+ finally:
+ if have_handler:
+ self.handler.release()
+
+
+@CommandProvider
+class Build(MachCommandBase):
+ """Interface to build the tree."""
+
+ @Command('build', category='build', description='Build the tree.')
+ @CommandArgument('--jobs', '-j', default='0', metavar='jobs', type=int,
+ help='Number of concurrent jobs to run. Default is the number of CPUs.')
+ @CommandArgument('-C', '--directory', default=None,
+ help='Change to a subdirectory of the build directory first.')
+ @CommandArgument('what', default=None, nargs='*', help=BUILD_WHAT_HELP)
+ @CommandArgument('-X', '--disable-extra-make-dependencies',
+ default=False, action='store_true',
+ help='Do not add extra make dependencies.')
+ @CommandArgument('-v', '--verbose', action='store_true',
+ help='Verbose output for what commands the build is running.')
+ def build(self, what=None, disable_extra_make_dependencies=None, jobs=0,
+ directory=None, verbose=False):
+ """Build the source tree.
+
+ With no arguments, this will perform a full build.
+
+ Positional arguments define targets to build. These can be make targets
+ or patterns like "<dir>/<target>" to indicate a make target within a
+ directory.
+
+ There are a few special targets that can be used to perform a partial
+ build faster than what `mach build` would perform:
+
+ * binaries - compiles and links all C/C++ sources and produces shared
+ libraries and executables (binaries).
+
+ * faster - builds JavaScript, XUL, CSS, etc files.
+
+ "binaries" and "faster" almost fully complement each other. However,
+ there are build actions not captured by either. If things don't appear to
+ be rebuilding, perform a vanilla `mach build` to rebuild the world.
+ """
+ import which
+ from mozbuild.controller.building import BuildMonitor
+ from mozbuild.util import (
+ mkdir,
+ resolve_target_to_make,
+ )
+
+ self.log_manager.register_structured_logger(logging.getLogger('mozbuild'))
+
+ warnings_path = self._get_state_filename('warnings.json')
+ monitor = self._spawn(BuildMonitor)
+ monitor.init(warnings_path)
+ ccache_start = monitor.ccache_stats()
+
+ # Disable indexing in objdir because it is not necessary and can slow
+ # down builds.
+ mkdir(self.topobjdir, not_indexed=True)
+
+ with BuildOutputManager(self.log_manager, monitor) as output:
+ monitor.start()
+
+ if directory is not None and not what:
+ print('Can only use -C/--directory with an explicit target '
+ 'name.')
+ return 1
+
+ if directory is not None:
+ disable_extra_make_dependencies=True
+ directory = mozpath.normsep(directory)
+ if directory.startswith('/'):
+ directory = directory[1:]
+
+ status = None
+ monitor.start_resource_recording()
+ if what:
+ top_make = os.path.join(self.topobjdir, 'Makefile')
+ if not os.path.exists(top_make):
+ print('Your tree has not been configured yet. Please run '
+ '|mach build| with no arguments.')
+ return 1
+
+ # Collect target pairs.
+ target_pairs = []
+ for target in what:
+ path_arg = self._wrap_path_argument(target)
+
+ if directory is not None:
+ make_dir = os.path.join(self.topobjdir, directory)
+ make_target = target
+ else:
+ make_dir, make_target = \
+ resolve_target_to_make(self.topobjdir,
+ path_arg.relpath())
+
+ if make_dir is None and make_target is None:
+ return 1
+
+ # See bug 886162 - we don't want to "accidentally" build
+ # the entire tree (if that's really the intent, it's
+ # unlikely they would have specified a directory.)
+ if not make_dir and not make_target:
+ print("The specified directory doesn't contain a "
+ "Makefile and the first parent with one is the "
+ "root of the tree. Please specify a directory "
+ "with a Makefile or run |mach build| if you "
+ "want to build the entire tree.")
+ return 1
+
+ target_pairs.append((make_dir, make_target))
+
+ # Possibly add extra make depencies using dumbmake.
+ if not disable_extra_make_dependencies:
+ from dumbmake.dumbmake import (dependency_map,
+ add_extra_dependencies)
+ depfile = os.path.join(self.topsrcdir, 'build',
+ 'dumbmake-dependencies')
+ with open(depfile) as f:
+ dm = dependency_map(f.readlines())
+ new_pairs = list(add_extra_dependencies(target_pairs, dm))
+ self.log(logging.DEBUG, 'dumbmake',
+ {'target_pairs': target_pairs,
+ 'new_pairs': new_pairs},
+ 'Added extra dependencies: will build {new_pairs} ' +
+ 'instead of {target_pairs}.')
+ target_pairs = new_pairs
+
+ # Ensure build backend is up to date. The alternative is to
+ # have rules in the invoked Makefile to rebuild the build
+ # backend. But that involves make reinvoking itself and there
+ # are undesired side-effects of this. See bug 877308 for a
+ # comprehensive history lesson.
+ self._run_make(directory=self.topobjdir, target='backend',
+ line_handler=output.on_line, log=False,
+ print_directory=False)
+
+ # Build target pairs.
+ for make_dir, make_target in target_pairs:
+ # We don't display build status messages during partial
+ # tree builds because they aren't reliable there. This
+ # could potentially be fixed if the build monitor were more
+ # intelligent about encountering undefined state.
+ status = self._run_make(directory=make_dir, target=make_target,
+ line_handler=output.on_line, log=False, print_directory=False,
+ ensure_exit_code=False, num_jobs=jobs, silent=not verbose,
+ append_env={b'NO_BUILDSTATUS_MESSAGES': b'1'})
+
+ if status != 0:
+ break
+ else:
+ # Try to call the default backend's build() method. This will
+ # run configure to determine BUILD_BACKENDS if it hasn't run
+ # yet.
+ config = None
+ try:
+ config = self.config_environment
+ except Exception:
+ config_rc = self.configure(buildstatus_messages=True,
+ line_handler=output.on_line)
+ if config_rc != 0:
+ return config_rc
+
+ # Even if configure runs successfully, we may have trouble
+ # getting the config_environment for some builds, such as
+ # OSX Universal builds. These have to go through client.mk
+ # regardless.
+ try:
+ config = self.config_environment
+ except Exception:
+ pass
+
+ if config:
+ active_backend = config.substs.get('BUILD_BACKENDS', [None])[0]
+ if active_backend:
+ backend_cls = get_backend_class(active_backend)(config)
+ status = backend_cls.build(self, output, jobs, verbose)
+
+ # If the backend doesn't specify a build() method, then just
+ # call client.mk directly.
+ if status is None:
+ status = self._run_make(srcdir=True, filename='client.mk',
+ line_handler=output.on_line, log=False, print_directory=False,
+ allow_parallel=False, ensure_exit_code=False, num_jobs=jobs,
+ silent=not verbose)
+
+ self.log(logging.WARNING, 'warning_summary',
+ {'count': len(monitor.warnings_database)},
+ '{count} compiler warnings present.')
+
+ monitor.finish(record_usage=status==0)
+
+ high_finder, finder_percent = monitor.have_high_finder_usage()
+ if high_finder:
+ print(FINDER_SLOW_MESSAGE % finder_percent)
+
+ ccache_end = monitor.ccache_stats()
+
+ ccache_diff = None
+ if ccache_start and ccache_end:
+ ccache_diff = ccache_end - ccache_start
+ if ccache_diff:
+ self.log(logging.INFO, 'ccache',
+ {'msg': ccache_diff.hit_rate_message()}, "{msg}")
+
+ notify_minimum_time = 300
+ try:
+ notify_minimum_time = int(os.environ.get('MACH_NOTIFY_MINTIME', '300'))
+ except ValueError:
+ # Just stick with the default
+ pass
+
+ if monitor.elapsed > notify_minimum_time:
+ # Display a notification when the build completes.
+ self.notify('Build complete' if not status else 'Build failed')
+
+ if status:
+ return status
+
+ long_build = monitor.elapsed > 600
+
+ if long_build:
+ output.on_line('We know it took a while, but your build finally finished successfully!')
+ else:
+ output.on_line('Your build was successful!')
+
+ if monitor.have_resource_usage:
+ excessive, swap_in, swap_out = monitor.have_excessive_swapping()
+ # if excessive:
+ # print(EXCESSIVE_SWAP_MESSAGE)
+
+ print('To view resource usage of the build, run |mach '
+ 'resource-usage|.')
+
+ telemetry_handler = getattr(self._mach_context,
+ 'telemetry_handler', None)
+ telemetry_data = monitor.get_resource_usage()
+
+ # Record build configuration data. For now, we cherry pick
+ # items we need rather than grabbing everything, in order
+ # to avoid accidentally disclosing PII.
+ telemetry_data['substs'] = {}
+ try:
+ for key in ['MOZ_ARTIFACT_BUILDS', 'MOZ_USING_CCACHE']:
+ value = self.substs.get(key, False)
+ telemetry_data['substs'][key] = value
+ except BuildEnvironmentNotFoundException:
+ pass
+
+ # Grab ccache stats if available. We need to be careful not
+ # to capture information that can potentially identify the
+ # user (such as the cache location)
+ if ccache_diff:
+ telemetry_data['ccache'] = {}
+ for key in [key[0] for key in ccache_diff.STATS_KEYS]:
+ try:
+ telemetry_data['ccache'][key] = ccache_diff._values[key]
+ except KeyError:
+ pass
+
+ telemetry_handler(self._mach_context, telemetry_data)
+
+ # Only for full builds because incremental builders likely don't
+ # need to be burdened with this.
+ if not what:
+ try:
+ # Fennec doesn't have useful output from just building. We should
+ # arguably make the build action useful for Fennec. Another day...
+ if self.substs['MOZ_BUILD_APP'] != 'mobile/android':
+ print('To take your build for a test drive, run: |mach run|')
+ app = self.substs['MOZ_BUILD_APP']
+ if app in ('browser', 'mobile/android'):
+ print('For more information on what to do now, see '
+ 'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox')
+ except Exception:
+ # Ignore Exceptions in case we can't find config.status (such
+ # as when doing OSX Universal builds)
+ pass
+
+ return status
+
+ @Command('configure', category='build',
+ description='Configure the tree (run configure and config.status).')
+ @CommandArgument('options', default=None, nargs=argparse.REMAINDER,
+ help='Configure options')
+ def configure(self, options=None, buildstatus_messages=False, line_handler=None):
+ def on_line(line):
+ self.log(logging.INFO, 'build_output', {'line': line}, '{line}')
+
+ line_handler = line_handler or on_line
+
+ options = ' '.join(shell_quote(o) for o in options or ())
+ append_env = {b'CONFIGURE_ARGS': options.encode('utf-8')}
+
+ # Only print build status messages when we have an active
+ # monitor.
+ if not buildstatus_messages:
+ append_env[b'NO_BUILDSTATUS_MESSAGES'] = b'1'
+ status = self._run_make(srcdir=True, filename='client.mk',
+ target='configure', line_handler=line_handler, log=False,
+ print_directory=False, allow_parallel=False, ensure_exit_code=False,
+ append_env=append_env)
+
+ if not status:
+ print('Configure complete!')
+ print('Be sure to run |mach build| to pick up any changes');
+
+ return status
+
+ @Command('resource-usage', category='post-build',
+ description='Show information about system resource usage for a build.')
+ @CommandArgument('--address', default='localhost',
+ help='Address the HTTP server should listen on.')
+ @CommandArgument('--port', type=int, default=0,
+ help='Port number the HTTP server should listen on.')
+ @CommandArgument('--browser', default='firefox',
+ help='Web browser to automatically open. See webbrowser Python module.')
+ @CommandArgument('--url',
+ help='URL of JSON document to display')
+ def resource_usage(self, address=None, port=None, browser=None, url=None):
+ import webbrowser
+ from mozbuild.html_build_viewer import BuildViewerServer
+
+ server = BuildViewerServer(address, port)
+
+ if url:
+ server.add_resource_json_url('url', url)
+ else:
+ last = self._get_state_filename('build_resources.json')
+ if not os.path.exists(last):
+ print('Build resources not available. If you have performed a '
+ 'build and receive this message, the psutil Python package '
+ 'likely failed to initialize properly.')
+ return 1
+
+ server.add_resource_json_file('last', last)
+ try:
+ webbrowser.get(browser).open_new_tab(server.url)
+ except Exception:
+ print('Cannot get browser specified, trying the default instead.')
+ try:
+ browser = webbrowser.get().open_new_tab(server.url)
+ except Exception:
+ print('Please open %s in a browser.' % server.url)
+
+ print('Hit CTRL+c to stop server.')
+ server.run()
+
+ @Command('build-backend', category='build',
+ description='Generate a backend used to build the tree.')
+ @CommandArgument('-d', '--diff', action='store_true',
+ help='Show a diff of changes.')
+ # It would be nice to filter the choices below based on
+ # conditions, but that is for another day.
+ @CommandArgument('-b', '--backend', nargs='+', choices=sorted(backends),
+ help='Which backend to build.')
+ @CommandArgument('-v', '--verbose', action='store_true',
+ help='Verbose output.')
+ @CommandArgument('-n', '--dry-run', action='store_true',
+ help='Do everything except writing files out.')
+ def build_backend(self, backend, diff=False, verbose=False, dry_run=False):
+ python = self.virtualenv_manager.python_path
+ config_status = os.path.join(self.topobjdir, 'config.status')
+
+ if not os.path.exists(config_status):
+ print('config.status not found. Please run |mach configure| '
+ 'or |mach build| prior to building the %s build backend.'
+ % backend)
+ return 1
+
+ args = [python, config_status]
+ if backend:
+ args.append('--backend')
+ args.extend(backend)
+ if diff:
+ args.append('--diff')
+ if verbose:
+ args.append('--verbose')
+ if dry_run:
+ args.append('--dry-run')
+
+ return self._run_command_in_objdir(args=args, pass_thru=True,
+ ensure_exit_code=False)
+
+@CommandProvider
+class Doctor(MachCommandBase):
+ """Provide commands for diagnosing common build environment problems"""
+ @Command('doctor', category='devenv',
+ description='')
+ @CommandArgument('--fix', default=None, action='store_true',
+ help='Attempt to fix found problems.')
+ def doctor(self, fix=None):
+ self._activate_virtualenv()
+ from mozbuild.doctor import Doctor
+ doctor = Doctor(self.topsrcdir, self.topobjdir, fix)
+ return doctor.check_all()
+
+@CommandProvider
+class Clobber(MachCommandBase):
+ NO_AUTO_LOG = True
+ CLOBBER_CHOICES = ['objdir', 'python']
+ @Command('clobber', category='build',
+ description='Clobber the tree (delete the object directory).')
+ @CommandArgument('what', default=['objdir'], nargs='*',
+ help='Target to clobber, must be one of {{{}}} (default objdir).'.format(
+ ', '.join(CLOBBER_CHOICES)))
+ @CommandArgument('--full', action='store_true',
+ help='Perform a full clobber')
+ def clobber(self, what, full=False):
+ invalid = set(what) - set(self.CLOBBER_CHOICES)
+ if invalid:
+ print('Unknown clobber target(s): {}'.format(', '.join(invalid)))
+ return 1
+
+ ret = 0
+ if 'objdir' in what:
+ from mozbuild.controller.clobber import Clobberer
+ try:
+ Clobberer(self.topsrcdir, self.topobjdir).remove_objdir(full)
+ except OSError as e:
+ if sys.platform.startswith('win'):
+ if isinstance(e, WindowsError) and e.winerror in (5,32):
+ self.log(logging.ERROR, 'file_access_error', {'error': e},
+ "Could not clobber because a file was in use. If the "
+ "application is running, try closing it. {error}")
+ return 1
+ raise
+
+ if 'python' in what:
+ if os.path.isdir(mozpath.join(self.topsrcdir, '.hg')):
+ cmd = ['hg', 'purge', '--all', '-I', 'glob:**.py[co]']
+ elif os.path.isdir(mozpath.join(self.topsrcdir, '.git')):
+ cmd = ['git', 'clean', '-f', '-x', '*.py[co]']
+ else:
+ cmd = ['find', '.', '-type', 'f', '-name', '*.py[co]', '-delete']
+ ret = subprocess.call(cmd, cwd=self.topsrcdir)
+ return ret
+
+@CommandProvider
+class Logs(MachCommandBase):
+ """Provide commands to read mach logs."""
+ NO_AUTO_LOG = True
+
+ @Command('show-log', category='post-build',
+ description='Display mach logs')
+ @CommandArgument('log_file', nargs='?', type=argparse.FileType('rb'),
+ help='Filename to read log data from. Defaults to the log of the last '
+ 'mach command.')
+ def show_log(self, log_file=None):
+ if not log_file:
+ path = self._get_state_filename('last_log.json')
+ log_file = open(path, 'rb')
+
+ if os.isatty(sys.stdout.fileno()):
+ env = dict(os.environ)
+ if 'LESS' not in env:
+ # Sensible default flags if none have been set in the user
+ # environment.
+ env[b'LESS'] = b'FRX'
+ less = subprocess.Popen(['less'], stdin=subprocess.PIPE, env=env)
+ # Various objects already have a reference to sys.stdout, so we
+ # can't just change it, we need to change the file descriptor under
+ # it to redirect to less's input.
+ # First keep a copy of the sys.stdout file descriptor.
+ output_fd = os.dup(sys.stdout.fileno())
+ os.dup2(less.stdin.fileno(), sys.stdout.fileno())
+
+ startTime = 0
+ for line in log_file:
+ created, action, params = json.loads(line)
+ if not startTime:
+ startTime = created
+ self.log_manager.terminal_handler.formatter.start_time = \
+ created
+ if 'line' in params:
+ record = logging.makeLogRecord({
+ 'created': created,
+ 'name': self._logger.name,
+ 'levelno': logging.INFO,
+ 'msg': '{line}',
+ 'params': params,
+ 'action': action,
+ })
+ self._logger.handle(record)
+
+ if self.log_manager.terminal:
+ # Close less's input so that it knows that we're done sending data.
+ less.stdin.close()
+ # Since the less's input file descriptor is now also the stdout
+ # file descriptor, we still actually have a non-closed system file
+ # descriptor for less's input. Replacing sys.stdout's file
+ # descriptor with what it was before we replaced it will properly
+ # close less's input.
+ os.dup2(output_fd, sys.stdout.fileno())
+ less.wait()
+
+
+@CommandProvider
+class Warnings(MachCommandBase):
+ """Provide commands for inspecting warnings."""
+
+ @property
+ def database_path(self):
+ return self._get_state_filename('warnings.json')
+
+ @property
+ def database(self):
+ from mozbuild.compilation.warnings import WarningsDatabase
+
+ path = self.database_path
+
+ database = WarningsDatabase()
+
+ if os.path.exists(path):
+ database.load_from_file(path)
+
+ return database
+
+ @Command('warnings-summary', category='post-build',
+ description='Show a summary of compiler warnings.')
+ @CommandArgument('-C', '--directory', default=None,
+ help='Change to a subdirectory of the build directory first.')
+ @CommandArgument('report', default=None, nargs='?',
+ help='Warnings report to display. If not defined, show the most '
+ 'recent report.')
+ def summary(self, directory=None, report=None):
+ database = self.database
+
+ if directory:
+ dirpath = self.join_ensure_dir(self.topsrcdir, directory)
+ if not dirpath:
+ return 1
+ else:
+ dirpath = None
+
+ type_counts = database.type_counts(dirpath)
+ sorted_counts = sorted(type_counts.iteritems(),
+ key=operator.itemgetter(1))
+
+ total = 0
+ for k, v in sorted_counts:
+ print('%d\t%s' % (v, k))
+ total += v
+
+ print('%d\tTotal' % total)
+
+ @Command('warnings-list', category='post-build',
+ description='Show a list of compiler warnings.')
+ @CommandArgument('-C', '--directory', default=None,
+ help='Change to a subdirectory of the build directory first.')
+ @CommandArgument('--flags', default=None, nargs='+',
+ help='Which warnings flags to match.')
+ @CommandArgument('report', default=None, nargs='?',
+ help='Warnings report to display. If not defined, show the most '
+ 'recent report.')
+ def list(self, directory=None, flags=None, report=None):
+ database = self.database
+
+ by_name = sorted(database.warnings)
+
+ topsrcdir = mozpath.normpath(self.topsrcdir)
+
+ if directory:
+ directory = mozpath.normsep(directory)
+ dirpath = self.join_ensure_dir(topsrcdir, directory)
+ if not dirpath:
+ return 1
+
+ if flags:
+ # Flatten lists of flags.
+ flags = set(itertools.chain(*[flaglist.split(',') for flaglist in flags]))
+
+ for warning in by_name:
+ filename = mozpath.normsep(warning['filename'])
+
+ if filename.startswith(topsrcdir):
+ filename = filename[len(topsrcdir) + 1:]
+
+ if directory and not filename.startswith(directory):
+ continue
+
+ if flags and warning['flag'] not in flags:
+ continue
+
+ if warning['column'] is not None:
+ print('%s:%d:%d [%s] %s' % (filename, warning['line'],
+ warning['column'], warning['flag'], warning['message']))
+ else:
+ print('%s:%d [%s] %s' % (filename, warning['line'],
+ warning['flag'], warning['message']))
+
+ def join_ensure_dir(self, dir1, dir2):
+ dir1 = mozpath.normpath(dir1)
+ dir2 = mozpath.normsep(dir2)
+ joined_path = mozpath.join(dir1, dir2)
+ if os.path.isdir(joined_path):
+ return joined_path
+ else:
+ print('Specified directory not found.')
+ return None
+
+@CommandProvider
+class GTestCommands(MachCommandBase):
+ @Command('gtest', category='testing',
+ description='Run GTest unit tests (C++ tests).')
+ @CommandArgument('gtest_filter', default=b"*", nargs='?', metavar='gtest_filter',
+ help="test_filter is a ':'-separated list of wildcard patterns (called the positive patterns),"
+ "optionally followed by a '-' and another ':'-separated pattern list (called the negative patterns).")
+ @CommandArgument('--jobs', '-j', default='1', nargs='?', metavar='jobs', type=int,
+ help='Run the tests in parallel using multiple processes.')
+ @CommandArgument('--tbpl-parser', '-t', action='store_true',
+ help='Output test results in a format that can be parsed by TBPL.')
+ @CommandArgument('--shuffle', '-s', action='store_true',
+ help='Randomize the execution order of tests.')
+
+ @CommandArgumentGroup('debugging')
+ @CommandArgument('--debug', action='store_true', group='debugging',
+ help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
+ @CommandArgument('--debugger', default=None, type=str, group='debugging',
+ help='Name of debugger to use.')
+ @CommandArgument('--debugger-args', default=None, metavar='params', type=str,
+ group='debugging',
+ help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
+
+ def gtest(self, shuffle, jobs, gtest_filter, tbpl_parser, debug, debugger,
+ debugger_args):
+
+ # We lazy build gtest because it's slow to link
+ self._run_make(directory="testing/gtest", target='gtest',
+ print_directory=False, ensure_exit_code=True)
+
+ app_path = self.get_binary_path('app')
+ args = [app_path, '-unittest'];
+
+ if debug or debugger or debugger_args:
+ args = self.prepend_debugger_args(args, debugger, debugger_args)
+
+ cwd = os.path.join(self.topobjdir, '_tests', 'gtest')
+
+ if not os.path.isdir(cwd):
+ os.makedirs(cwd)
+
+ # Use GTest environment variable to control test execution
+ # For details see:
+ # https://code.google.com/p/googletest/wiki/AdvancedGuide#Running_Test_Programs:_Advanced_Options
+ gtest_env = {b'GTEST_FILTER': gtest_filter}
+
+ # Note: we must normalize the path here so that gtest on Windows sees
+ # a MOZ_GMP_PATH which has only Windows dir seperators, because
+ # nsILocalFile cannot open the paths with non-Windows dir seperators.
+ xre_path = os.path.join(os.path.normpath(self.topobjdir), "dist", "bin")
+ gtest_env["MOZ_XRE_DIR"] = xre_path
+ gtest_env["MOZ_GMP_PATH"] = os.pathsep.join(
+ os.path.join(xre_path, p, "1.0")
+ for p in ('gmp-fake', 'gmp-fakeopenh264')
+ )
+
+ gtest_env[b"MOZ_RUN_GTEST"] = b"True"
+
+ if shuffle:
+ gtest_env[b"GTEST_SHUFFLE"] = b"True"
+
+ if tbpl_parser:
+ gtest_env[b"MOZ_TBPL_PARSER"] = b"True"
+
+ if jobs == 1:
+ return self.run_process(args=args,
+ append_env=gtest_env,
+ cwd=cwd,
+ ensure_exit_code=False,
+ pass_thru=True)
+
+ from mozprocess import ProcessHandlerMixin
+ import functools
+ def handle_line(job_id, line):
+ # Prepend the jobId
+ line = '[%d] %s' % (job_id + 1, line.strip())
+ self.log(logging.INFO, "GTest", {'line': line}, '{line}')
+
+ gtest_env["GTEST_TOTAL_SHARDS"] = str(jobs)
+ processes = {}
+ for i in range(0, jobs):
+ gtest_env["GTEST_SHARD_INDEX"] = str(i)
+ processes[i] = ProcessHandlerMixin([app_path, "-unittest"],
+ cwd=cwd,
+ env=gtest_env,
+ processOutputLine=[functools.partial(handle_line, i)],
+ universal_newlines=True)
+ processes[i].run()
+
+ exit_code = 0
+ for process in processes.values():
+ status = process.wait()
+ if status:
+ exit_code = status
+
+ # Clamp error code to 255 to prevent overflowing multiple of
+ # 256 into 0
+ if exit_code > 255:
+ exit_code = 255
+
+ return exit_code
+
+ def prepend_debugger_args(self, args, debugger, debugger_args):
+ '''
+ Given an array with program arguments, prepend arguments to run it under a
+ debugger.
+
+ :param args: The executable and arguments used to run the process normally.
+ :param debugger: The debugger to use, or empty to use the default debugger.
+ :param debugger_args: Any additional parameters to pass to the debugger.
+ '''
+
+ import mozdebug
+
+ if not debugger:
+ # No debugger name was provided. Look for the default ones on
+ # current OS.
+ debugger = mozdebug.get_default_debugger_name(mozdebug.DebuggerSearch.KeepLooking)
+
+ if debugger:
+ debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
+ if not debuggerInfo:
+ print("Could not find a suitable debugger in your PATH.")
+ return 1
+
+ # Parameters come from the CLI. We need to convert them before
+ # their use.
+ if debugger_args:
+ from mozbuild import shellutil
+ try:
+ debugger_args = shellutil.split(debugger_args)
+ except shellutil.MetaCharacterException as e:
+ print("The --debugger_args you passed require a real shell to parse them.")
+ print("(We can't handle the %r character.)" % e.char)
+ return 1
+
+ # Prepend the debugger args.
+ args = [debuggerInfo.path] + debuggerInfo.args + args
+ return args
+
+@CommandProvider
+class ClangCommands(MachCommandBase):
+ @Command('clang-complete', category='devenv',
+ description='Generate a .clang_complete file.')
+ def clang_complete(self):
+ import shlex
+
+ build_vars = {}
+
+ def on_line(line):
+ elements = [s.strip() for s in line.split('=', 1)]
+
+ if len(elements) != 2:
+ return
+
+ build_vars[elements[0]] = elements[1]
+
+ try:
+ old_logger = self.log_manager.replace_terminal_handler(None)
+ self._run_make(target='showbuild', log=False, line_handler=on_line)
+ finally:
+ self.log_manager.replace_terminal_handler(old_logger)
+
+ def print_from_variable(name):
+ if name not in build_vars:
+ return
+
+ value = build_vars[name]
+
+ value = value.replace('-I.', '-I%s' % self.topobjdir)
+ value = value.replace(' .', ' %s' % self.topobjdir)
+ value = value.replace('-I..', '-I%s/..' % self.topobjdir)
+ value = value.replace(' ..', ' %s/..' % self.topobjdir)
+
+ args = shlex.split(value)
+ for i in range(0, len(args) - 1):
+ arg = args[i]
+
+ if arg.startswith(('-I', '-D')):
+ print(arg)
+ continue
+
+ if arg.startswith('-include'):
+ print(arg + ' ' + args[i + 1])
+ continue
+
+ print_from_variable('COMPILE_CXXFLAGS')
+
+ print('-I%s/ipc/chromium/src' % self.topsrcdir)
+ print('-I%s/ipc/glue' % self.topsrcdir)
+ print('-I%s/ipc/ipdl/_ipdlheaders' % self.topobjdir)
+
+
+@CommandProvider
+class Package(MachCommandBase):
+ """Package the built product for distribution."""
+
+ @Command('package', category='post-build',
+ description='Package the built product for distribution as an APK, DMG, etc.')
+ @CommandArgument('-v', '--verbose', action='store_true',
+ help='Verbose output for what commands the packaging process is running.')
+ def package(self, verbose=False):
+ ret = self._run_make(directory=".", target='package',
+ silent=not verbose, ensure_exit_code=False)
+ if ret == 0:
+ self.notify('Packaging complete')
+ return ret
+
+@CommandProvider
+class Install(MachCommandBase):
+ """Install a package."""
+
+ @Command('install', category='post-build',
+ description='Install the package on the machine, or on a device.')
+ @CommandArgument('--verbose', '-v', action='store_true',
+ help='Print verbose output when installing to an Android emulator.')
+ def install(self, verbose=False):
+ if conditions.is_android(self):
+ from mozrunner.devices.android_device import verify_android_device
+ verify_android_device(self, verbose=verbose)
+ ret = self._run_make(directory=".", target='install', ensure_exit_code=False)
+ if ret == 0:
+ self.notify('Install complete')
+ return ret
+
+@CommandProvider
+class RunProgram(MachCommandBase):
+ """Run the compiled program."""
+
+ prog_group = 'the compiled program'
+
+ @Command('run', category='post-build',
+ description='Run the compiled program, possibly under a debugger or DMD.')
+ @CommandArgument('params', nargs='...', group=prog_group,
+ help='Command-line arguments to be passed through to the program. Not specifying a --profile or -P option will result in a temporary profile being used.')
+ @CommandArgumentGroup(prog_group)
+ @CommandArgument('--remote', '-r', action='store_true', group=prog_group,
+ help='Do not pass the --no-remote argument by default.')
+ @CommandArgument('--background', '-b', action='store_true', group=prog_group,
+ help='Do not pass the --foreground argument by default on Mac.')
+ @CommandArgument('--noprofile', '-n', action='store_true', group=prog_group,
+ help='Do not pass the --profile argument by default.')
+ @CommandArgument('--disable-e10s', action='store_true', group=prog_group,
+ help='Run the program with electrolysis disabled.')
+
+ @CommandArgumentGroup('debugging')
+ @CommandArgument('--debug', action='store_true', group='debugging',
+ help='Enable the debugger. Not specifying a --debugger option will result in the default debugger being used.')
+ @CommandArgument('--debugger', default=None, type=str, group='debugging',
+ help='Name of debugger to use.')
+ @CommandArgument('--debugparams', default=None, metavar='params', type=str,
+ group='debugging',
+ help='Command-line arguments to pass to the debugger itself; split as the Bourne shell would.')
+ # Bug 933807 introduced JS_DISABLE_SLOW_SCRIPT_SIGNALS to avoid clever
+ # segfaults induced by the slow-script-detecting logic for Ion/Odin JITted
+ # code. If we don't pass this, the user will need to periodically type
+ # "continue" to (safely) resume execution. There are ways to implement
+ # automatic resuming; see the bug.
+ @CommandArgument('--slowscript', action='store_true', group='debugging',
+ help='Do not set the JS_DISABLE_SLOW_SCRIPT_SIGNALS env variable; when not set, recoverable but misleading SIGSEGV instances may occur in Ion/Odin JIT code.')
+
+ @CommandArgumentGroup('DMD')
+ @CommandArgument('--dmd', action='store_true', group='DMD',
+ help='Enable DMD. The following arguments have no effect without this.')
+ @CommandArgument('--mode', choices=['live', 'dark-matter', 'cumulative', 'scan'], group='DMD',
+ help='Profiling mode. The default is \'dark-matter\'.')
+ @CommandArgument('--stacks', choices=['partial', 'full'], group='DMD',
+ help='Allocation stack trace coverage. The default is \'partial\'.')
+ @CommandArgument('--show-dump-stats', action='store_true', group='DMD',
+ help='Show stats when doing dumps.')
+ def run(self, params, remote, background, noprofile, disable_e10s, debug,
+ debugger, debugparams, slowscript, dmd, mode, stacks, show_dump_stats):
+
+ if conditions.is_android(self):
+ # Running Firefox for Android is completely different
+ if dmd:
+ print("DMD is not supported for Firefox for Android")
+ return 1
+ from mozrunner.devices.android_device import verify_android_device, run_firefox_for_android
+ if not (debug or debugger or debugparams):
+ verify_android_device(self, install=True)
+ return run_firefox_for_android(self, params)
+ verify_android_device(self, install=True, debugger=True)
+ args = ['']
+
+ else:
+
+ try:
+ binpath = self.get_binary_path('app')
+ except Exception as e:
+ print("It looks like your program isn't built.",
+ "You can run |mach build| to build it.")
+ print(e)
+ return 1
+
+ args = [binpath]
+
+ if params:
+ args.extend(params)
+
+ if not remote:
+ args.append('-no-remote')
+
+ if not background and sys.platform == 'darwin':
+ args.append('-foreground')
+
+ no_profile_option_given = \
+ all(p not in params for p in ['-profile', '--profile', '-P'])
+ if no_profile_option_given and not noprofile:
+ path = os.path.join(self.topobjdir, 'tmp', 'scratch_user')
+ if not os.path.isdir(path):
+ os.makedirs(path)
+ args.append('-profile')
+ args.append(path)
+
+ extra_env = {'MOZ_CRASHREPORTER_DISABLE': '1'}
+ if disable_e10s:
+ extra_env['MOZ_FORCE_DISABLE_E10S'] = '1'
+
+ if debug or debugger or debugparams:
+ if 'INSIDE_EMACS' in os.environ:
+ self.log_manager.terminal_handler.setLevel(logging.WARNING)
+
+ import mozdebug
+ if not debugger:
+ # No debugger name was provided. Look for the default ones on
+ # current OS.
+ debugger = mozdebug.get_default_debugger_name(mozdebug.DebuggerSearch.KeepLooking)
+
+ if debugger:
+ self.debuggerInfo = mozdebug.get_debugger_info(debugger, debugparams)
+ if not self.debuggerInfo:
+ print("Could not find a suitable debugger in your PATH.")
+ return 1
+
+ # Parameters come from the CLI. We need to convert them before
+ # their use.
+ if debugparams:
+ from mozbuild import shellutil
+ try:
+ debugparams = shellutil.split(debugparams)
+ except shellutil.MetaCharacterException as e:
+ print("The --debugparams you passed require a real shell to parse them.")
+ print("(We can't handle the %r character.)" % e.char)
+ return 1
+
+ if not slowscript:
+ extra_env['JS_DISABLE_SLOW_SCRIPT_SIGNALS'] = '1'
+
+ # Prepend the debugger args.
+ args = [self.debuggerInfo.path] + self.debuggerInfo.args + args
+
+ if dmd:
+ dmd_params = []
+
+ if mode:
+ dmd_params.append('--mode=' + mode)
+ if stacks:
+ dmd_params.append('--stacks=' + stacks)
+ if show_dump_stats:
+ dmd_params.append('--show-dump-stats=yes')
+
+ bin_dir = os.path.dirname(binpath)
+ lib_name = self.substs['DLL_PREFIX'] + 'dmd' + self.substs['DLL_SUFFIX']
+ dmd_lib = os.path.join(bin_dir, lib_name)
+ if not os.path.exists(dmd_lib):
+ print("Please build with |--enable-dmd| to use DMD.")
+ return 1
+
+ env_vars = {
+ "Darwin": {
+ "DYLD_INSERT_LIBRARIES": dmd_lib,
+ "LD_LIBRARY_PATH": bin_dir,
+ },
+ "Linux": {
+ "LD_PRELOAD": dmd_lib,
+ "LD_LIBRARY_PATH": bin_dir,
+ },
+ "WINNT": {
+ "MOZ_REPLACE_MALLOC_LIB": dmd_lib,
+ },
+ }
+
+ arch = self.substs['OS_ARCH']
+
+ if dmd_params:
+ env_vars[arch]["DMD"] = " ".join(dmd_params)
+
+ extra_env.update(env_vars.get(arch, {}))
+
+ return self.run_process(args=args, ensure_exit_code=False,
+ pass_thru=True, append_env=extra_env)
+
+@CommandProvider
+class Buildsymbols(MachCommandBase):
+ """Produce a package of debug symbols suitable for use with Breakpad."""
+
+ @Command('buildsymbols', category='post-build',
+ description='Produce a package of Breakpad-format symbols.')
+ def buildsymbols(self):
+ return self._run_make(directory=".", target='buildsymbols', ensure_exit_code=False)
+
+@CommandProvider
+class Makefiles(MachCommandBase):
+ @Command('empty-makefiles', category='build-dev',
+ description='Find empty Makefile.in in the tree.')
+ def empty(self):
+ import pymake.parser
+ import pymake.parserdata
+
+ IGNORE_VARIABLES = {
+ 'DEPTH': ('@DEPTH@',),
+ 'topsrcdir': ('@top_srcdir@',),
+ 'srcdir': ('@srcdir@',),
+ 'relativesrcdir': ('@relativesrcdir@',),
+ 'VPATH': ('@srcdir@',),
+ }
+
+ IGNORE_INCLUDES = [
+ 'include $(DEPTH)/config/autoconf.mk',
+ 'include $(topsrcdir)/config/config.mk',
+ 'include $(topsrcdir)/config/rules.mk',
+ ]
+
+ def is_statement_relevant(s):
+ if isinstance(s, pymake.parserdata.SetVariable):
+ exp = s.vnameexp
+ if not exp.is_static_string:
+ return True
+
+ if exp.s not in IGNORE_VARIABLES:
+ return True
+
+ return s.value not in IGNORE_VARIABLES[exp.s]
+
+ if isinstance(s, pymake.parserdata.Include):
+ if s.to_source() in IGNORE_INCLUDES:
+ return False
+
+ return True
+
+ for path in self._makefile_ins():
+ relpath = os.path.relpath(path, self.topsrcdir)
+ try:
+ statements = [s for s in pymake.parser.parsefile(path)
+ if is_statement_relevant(s)]
+
+ if not statements:
+ print(relpath)
+ except pymake.parser.SyntaxError:
+ print('Warning: Could not parse %s' % relpath, file=sys.stderr)
+
+ def _makefile_ins(self):
+ for root, dirs, files in os.walk(self.topsrcdir):
+ for f in files:
+ if f == 'Makefile.in':
+ yield os.path.join(root, f)
+
+@CommandProvider
+class MachDebug(MachCommandBase):
+ @Command('environment', category='build-dev',
+ description='Show info about the mach and build environment.')
+ @CommandArgument('--format', default='pretty',
+ choices=['pretty', 'client.mk', 'configure', 'json'],
+ help='Print data in the given format.')
+ @CommandArgument('--output', '-o', type=str,
+ help='Output to the given file.')
+ @CommandArgument('--verbose', '-v', action='store_true',
+ help='Print verbose output.')
+ def environment(self, format, output=None, verbose=False):
+ func = getattr(self, '_environment_%s' % format.replace('.', '_'))
+
+ if output:
+ # We want to preserve mtimes if the output file already exists
+ # and the content hasn't changed.
+ from mozbuild.util import FileAvoidWrite
+ with FileAvoidWrite(output) as out:
+ return func(out, verbose)
+ return func(sys.stdout, verbose)
+
+ def _environment_pretty(self, out, verbose):
+ state_dir = self._mach_context.state_dir
+ import platform
+ print('platform:\n\t%s' % platform.platform(), file=out)
+ print('python version:\n\t%s' % sys.version, file=out)
+ print('python prefix:\n\t%s' % sys.prefix, file=out)
+ print('mach cwd:\n\t%s' % self._mach_context.cwd, file=out)
+ print('os cwd:\n\t%s' % os.getcwd(), file=out)
+ print('mach directory:\n\t%s' % self._mach_context.topdir, file=out)
+ print('state directory:\n\t%s' % state_dir, file=out)
+
+ print('object directory:\n\t%s' % self.topobjdir, file=out)
+
+ if self.mozconfig['path']:
+ print('mozconfig path:\n\t%s' % self.mozconfig['path'], file=out)
+ if self.mozconfig['configure_args']:
+ print('mozconfig configure args:', file=out)
+ for arg in self.mozconfig['configure_args']:
+ print('\t%s' % arg, file=out)
+
+ if self.mozconfig['make_extra']:
+ print('mozconfig extra make args:', file=out)
+ for arg in self.mozconfig['make_extra']:
+ print('\t%s' % arg, file=out)
+
+ if self.mozconfig['make_flags']:
+ print('mozconfig make flags:', file=out)
+ for arg in self.mozconfig['make_flags']:
+ print('\t%s' % arg, file=out)
+
+ config = None
+
+ try:
+ config = self.config_environment
+
+ except Exception:
+ pass
+
+ if config:
+ print('config topsrcdir:\n\t%s' % config.topsrcdir, file=out)
+ print('config topobjdir:\n\t%s' % config.topobjdir, file=out)
+
+ if verbose:
+ print('config substitutions:', file=out)
+ for k in sorted(config.substs):
+ print('\t%s: %s' % (k, config.substs[k]), file=out)
+
+ print('config defines:', file=out)
+ for k in sorted(config.defines):
+ print('\t%s' % k, file=out)
+
+ def _environment_client_mk(self, out, verbose):
+ if self.mozconfig['make_extra']:
+ for arg in self.mozconfig['make_extra']:
+ print(arg, file=out)
+ if self.mozconfig['make_flags']:
+ print('MOZ_MAKE_FLAGS=%s' % ' '.join(self.mozconfig['make_flags']))
+ objdir = mozpath.normsep(self.topobjdir)
+ print('MOZ_OBJDIR=%s' % objdir, file=out)
+ if 'MOZ_CURRENT_PROJECT' in os.environ:
+ objdir = mozpath.join(objdir, os.environ['MOZ_CURRENT_PROJECT'])
+ print('OBJDIR=%s' % objdir, file=out)
+ if self.mozconfig['path']:
+ print('FOUND_MOZCONFIG=%s' % mozpath.normsep(self.mozconfig['path']),
+ file=out)
+
+ def _environment_json(self, out, verbose):
+ import json
+ class EnvironmentEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, MozbuildObject):
+ result = {
+ 'topsrcdir': obj.topsrcdir,
+ 'topobjdir': obj.topobjdir,
+ 'mozconfig': obj.mozconfig,
+ }
+ if verbose:
+ result['substs'] = obj.substs
+ result['defines'] = obj.defines
+ return result
+ elif isinstance(obj, set):
+ return list(obj)
+ return json.JSONEncoder.default(self, obj)
+ json.dump(self, cls=EnvironmentEncoder, sort_keys=True, fp=out)
+
+class ArtifactSubCommand(SubCommand):
+ def __call__(self, func):
+ after = SubCommand.__call__(self, func)
+ jobchoices = {
+ 'android-api-15',
+ 'android-x86',
+ 'linux',
+ 'linux64',
+ 'macosx64',
+ 'win32',
+ 'win64'
+ }
+ args = [
+ CommandArgument('--tree', metavar='TREE', type=str,
+ help='Firefox tree.'),
+ CommandArgument('--job', metavar='JOB', choices=jobchoices,
+ help='Build job.'),
+ CommandArgument('--verbose', '-v', action='store_true',
+ help='Print verbose output.'),
+ ]
+ for arg in args:
+ after = arg(after)
+ return after
+
+
+@CommandProvider
+class PackageFrontend(MachCommandBase):
+ """Fetch and install binary artifacts from Mozilla automation."""
+
+ @Command('artifact', category='post-build',
+ description='Use pre-built artifacts to build Firefox.')
+ def artifact(self):
+ '''Download, cache, and install pre-built binary artifacts to build Firefox.
+
+ Use |mach build| as normal to freshen your installed binary libraries:
+ artifact builds automatically download, cache, and install binary
+ artifacts from Mozilla automation, replacing whatever may be in your
+ object directory. Use |mach artifact last| to see what binary artifacts
+ were last used.
+
+ Never build libxul again!
+
+ '''
+ pass
+
+ def _set_log_level(self, verbose):
+ self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
+
+ def _install_pip_package(self, package):
+ if os.environ.get('MOZ_AUTOMATION'):
+ self.virtualenv_manager._run_pip([
+ 'install',
+ package,
+ '--no-index',
+ '--find-links',
+ 'http://pypi.pub.build.mozilla.org/pub',
+ '--trusted-host',
+ 'pypi.pub.build.mozilla.org',
+ ])
+ return
+ self.virtualenv_manager.install_pip_package(package)
+
+ def _make_artifacts(self, tree=None, job=None, skip_cache=False):
+ # Undo PATH munging that will be done by activating the virtualenv,
+ # so that invoked subprocesses expecting to find system python
+ # (git cinnabar, in particular), will not find virtualenv python.
+ original_path = os.environ.get('PATH', '')
+ self._activate_virtualenv()
+ os.environ['PATH'] = original_path
+
+ for package in ('taskcluster==0.0.32',
+ 'mozregression==1.0.2'):
+ self._install_pip_package(package)
+
+ state_dir = self._mach_context.state_dir
+ cache_dir = os.path.join(state_dir, 'package-frontend')
+
+ try:
+ os.makedirs(cache_dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ import which
+
+ here = os.path.abspath(os.path.dirname(__file__))
+ build_obj = MozbuildObject.from_environment(cwd=here)
+
+ hg = None
+ if conditions.is_hg(build_obj):
+ if self._is_windows():
+ hg = which.which('hg.exe')
+ else:
+ hg = which.which('hg')
+
+ git = None
+ if conditions.is_git(build_obj):
+ if self._is_windows():
+ git = which.which('git.exe')
+ else:
+ git = which.which('git')
+
+ # Absolutely must come after the virtualenv is populated!
+ from mozbuild.artifacts import Artifacts
+ artifacts = Artifacts(tree, self.substs, self.defines, job,
+ log=self.log, cache_dir=cache_dir,
+ skip_cache=skip_cache, hg=hg, git=git,
+ topsrcdir=self.topsrcdir)
+ return artifacts
+
+ @ArtifactSubCommand('artifact', 'install',
+ 'Install a good pre-built artifact.')
+ @CommandArgument('source', metavar='SRC', nargs='?', type=str,
+ help='Where to fetch and install artifacts from. Can be omitted, in '
+ 'which case the current hg repository is inspected; an hg revision; '
+ 'a remote URL; or a local file.',
+ default=None)
+ @CommandArgument('--skip-cache', action='store_true',
+ help='Skip all local caches to force re-fetching remote artifacts.',
+ default=False)
+ def artifact_install(self, source=None, skip_cache=False, tree=None, job=None, verbose=False):
+ self._set_log_level(verbose)
+ artifacts = self._make_artifacts(tree=tree, job=job, skip_cache=skip_cache)
+
+ return artifacts.install_from(source, self.distdir)
+
+ @ArtifactSubCommand('artifact', 'last',
+ 'Print the last pre-built artifact installed.')
+ def artifact_print_last(self, tree=None, job=None, verbose=False):
+ self._set_log_level(verbose)
+ artifacts = self._make_artifacts(tree=tree, job=job)
+ artifacts.print_last()
+ return 0
+
+ @ArtifactSubCommand('artifact', 'print-cache',
+ 'Print local artifact cache for debugging.')
+ def artifact_print_cache(self, tree=None, job=None, verbose=False):
+ self._set_log_level(verbose)
+ artifacts = self._make_artifacts(tree=tree, job=job)
+ artifacts.print_cache()
+ return 0
+
+ @ArtifactSubCommand('artifact', 'clear-cache',
+ 'Delete local artifacts and reset local artifact cache.')
+ def artifact_clear_cache(self, tree=None, job=None, verbose=False):
+ self._set_log_level(verbose)
+ artifacts = self._make_artifacts(tree=tree, job=job)
+ artifacts.clear_cache()
+ return 0
+
+@CommandProvider
+class Vendor(MachCommandBase):
+ """Vendor third-party dependencies into the source repository."""
+
+ @Command('vendor', category='misc',
+ description='Vendor third-party dependencies into the source repository.')
+ def vendor(self):
+ self.parser.print_usage()
+ sys.exit(1)
+
+ @SubCommand('vendor', 'rust',
+ description='Vendor rust crates from crates.io into third_party/rust')
+ @CommandArgument('--ignore-modified', action='store_true',
+ help='Ignore modified files in current checkout',
+ default=False)
+ def vendor_rust(self, **kwargs):
+ from mozbuild.vendor_rust import VendorRust
+ vendor_command = self._spawn(VendorRust)
+ vendor_command.vendor(**kwargs)
diff --git a/python/mozbuild/mozbuild/makeutil.py b/python/mozbuild/mozbuild/makeutil.py
new file mode 100644
index 000000000..fcd45bed2
--- /dev/null
+++ b/python/mozbuild/mozbuild/makeutil.py
@@ -0,0 +1,186 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import os
+import re
+from types import StringTypes
+from collections import Iterable
+
+
+class Makefile(object):
+ '''Provides an interface for writing simple makefiles
+
+ Instances of this class are created, populated with rules, then
+ written.
+ '''
+
+ def __init__(self):
+ self._statements = []
+
+ def create_rule(self, targets=[]):
+ '''
+ Create a new rule in the makefile for the given targets.
+ Returns the corresponding Rule instance.
+ '''
+ rule = Rule(targets)
+ self._statements.append(rule)
+ return rule
+
+ def add_statement(self, statement):
+ '''
+ Add a raw statement in the makefile. Meant to be used for
+ simple variable assignments.
+ '''
+ self._statements.append(statement)
+
+ def dump(self, fh, removal_guard=True):
+ '''
+ Dump all the rules to the given file handle. Optionally (and by
+ default), add guard rules for file removals (empty rules for other
+ rules' dependencies)
+ '''
+ all_deps = set()
+ all_targets = set()
+ for statement in self._statements:
+ if isinstance(statement, Rule):
+ statement.dump(fh)
+ all_deps.update(statement.dependencies())
+ all_targets.update(statement.targets())
+ else:
+ fh.write('%s\n' % statement)
+ if removal_guard:
+ guard = Rule(sorted(all_deps - all_targets))
+ guard.dump(fh)
+
+
+class _SimpleOrderedSet(object):
+ '''
+ Simple ordered set, specialized for used in Rule below only.
+ It doesn't expose a complete API, and normalizes path separators
+ at insertion.
+ '''
+ def __init__(self):
+ self._list = []
+ self._set = set()
+
+ def __nonzero__(self):
+ return bool(self._set)
+
+ def __iter__(self):
+ return iter(self._list)
+
+ def __contains__(self, key):
+ return key in self._set
+
+ def update(self, iterable):
+ def _add(iterable):
+ emitted = set()
+ for i in iterable:
+ i = i.replace(os.sep, '/')
+ if i not in self._set and i not in emitted:
+ yield i
+ emitted.add(i)
+ added = list(_add(iterable))
+ self._set.update(added)
+ self._list.extend(added)
+
+
+class Rule(object):
+ '''Class handling simple rules in the form:
+ target1 target2 ... : dep1 dep2 ...
+ command1
+ command2
+ ...
+ '''
+ def __init__(self, targets=[]):
+ self._targets = _SimpleOrderedSet()
+ self._dependencies = _SimpleOrderedSet()
+ self._commands = []
+ self.add_targets(targets)
+
+ def add_targets(self, targets):
+ '''Add additional targets to the rule.'''
+ assert isinstance(targets, Iterable) and not isinstance(targets, StringTypes)
+ self._targets.update(targets)
+ return self
+
+ def add_dependencies(self, deps):
+ '''Add dependencies to the rule.'''
+ assert isinstance(deps, Iterable) and not isinstance(deps, StringTypes)
+ self._dependencies.update(deps)
+ return self
+
+ def add_commands(self, commands):
+ '''Add commands to the rule.'''
+ assert isinstance(commands, Iterable) and not isinstance(commands, StringTypes)
+ self._commands.extend(commands)
+ return self
+
+ def targets(self):
+ '''Return an iterator on the rule targets.'''
+ # Ensure the returned iterator is actually just that, an iterator.
+ # Avoids caller fiddling with the set itself.
+ return iter(self._targets)
+
+ def dependencies(self):
+ '''Return an iterator on the rule dependencies.'''
+ return iter(d for d in self._dependencies if not d in self._targets)
+
+ def commands(self):
+ '''Return an iterator on the rule commands.'''
+ return iter(self._commands)
+
+ def dump(self, fh):
+ '''
+ Dump the rule to the given file handle.
+ '''
+ if not self._targets:
+ return
+ fh.write('%s:' % ' '.join(self._targets))
+ if self._dependencies:
+ fh.write(' %s' % ' '.join(self.dependencies()))
+ fh.write('\n')
+ for cmd in self._commands:
+ fh.write('\t%s\n' % cmd)
+
+
+# colon followed by anything except a slash (Windows path detection)
+_depfilesplitter = re.compile(r':(?![\\/])')
+
+
+def read_dep_makefile(fh):
+ """
+ Read the file handler containing a dep makefile (simple makefile only
+ containing dependencies) and returns an iterator of the corresponding Rules
+ it contains. Ignores removal guard rules.
+ """
+
+ rule = ''
+ for line in fh.readlines():
+ assert not line.startswith('\t')
+ line = line.strip()
+ if line.endswith('\\'):
+ rule += line[:-1]
+ else:
+ rule += line
+ split_rule = _depfilesplitter.split(rule, 1)
+ if len(split_rule) > 1 and split_rule[1].strip():
+ yield Rule(split_rule[0].strip().split()) \
+ .add_dependencies(split_rule[1].strip().split())
+ rule = ''
+
+ if rule:
+ raise Exception('Makefile finishes with a backslash. Expected more input.')
+
+def write_dep_makefile(fh, target, deps):
+ '''
+ Write a Makefile containing only target's dependencies to the file handle
+ specified.
+ '''
+ mk = Makefile()
+ rule = mk.create_rule(targets=[target])
+ rule.add_dependencies(deps)
+ mk.dump(fh, removal_guard=True)
diff --git a/python/mozbuild/mozbuild/milestone.py b/python/mozbuild/mozbuild/milestone.py
new file mode 100644
index 000000000..c2aa78fcd
--- /dev/null
+++ b/python/mozbuild/mozbuild/milestone.py
@@ -0,0 +1,75 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import os
+import re
+import sys
+
+
+def get_milestone_ab_with_num(milestone):
+ """
+ Returns the alpha and beta tag with its number (a1, a2, b3, ...).
+ """
+
+ match = re.search(r"([ab]\d+)", milestone)
+ if match:
+ return match.group(1)
+
+ return ""
+
+
+def get_official_milestone(path):
+ """
+ Returns the contents of the first line in `path` that starts with a digit.
+ """
+
+ with open(path) as fp:
+ for line in fp:
+ line = line.strip()
+ if line[:1].isdigit():
+ return line
+
+ raise Exception("Didn't find a line that starts with a digit.")
+
+
+def get_milestone_major(milestone):
+ """
+ Returns the major (first) part of the milestone.
+ """
+
+ return milestone.split('.')[0]
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--uaversion', default=False, action='store_true')
+ parser.add_argument('--symbolversion', default=False, action='store_true')
+ parser.add_argument('--topsrcdir', metavar='TOPSRCDIR', required=True)
+ options = parser.parse_args(args)
+
+ milestone_file = os.path.join(options.topsrcdir, 'config', 'milestone.txt')
+
+ milestone = get_official_milestone(milestone_file)
+
+ if options.uaversion:
+ # Only expose the major milestone in the UA string, hide the patch
+ # level (bugs 572659 and 870868).
+ uaversion = "%s.0" % (get_milestone_major(milestone),)
+ print(uaversion)
+
+ elif options.symbolversion:
+ # Only expose major milestone and alpha version. Used for symbol
+ # versioning on Linux.
+ symbolversion = "%s%s" % (get_milestone_major(milestone),
+ get_milestone_ab_with_num(milestone))
+ print(symbolversion)
+ else:
+ print(milestone)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/mozconfig.py b/python/mozbuild/mozbuild/mozconfig.py
new file mode 100644
index 000000000..71267c1be
--- /dev/null
+++ b/python/mozbuild/mozbuild/mozconfig.py
@@ -0,0 +1,485 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import filecmp
+import os
+import re
+import sys
+import subprocess
+import traceback
+
+from collections import defaultdict
+from mozpack import path as mozpath
+
+
+MOZ_MYCONFIG_ERROR = '''
+The MOZ_MYCONFIG environment variable to define the location of mozconfigs
+is deprecated. If you wish to define the mozconfig path via an environment
+variable, use MOZCONFIG instead.
+'''.strip()
+
+MOZCONFIG_LEGACY_PATH = '''
+You currently have a mozconfig at %s. This implicit location is no longer
+supported. Please move it to %s/.mozconfig or set an explicit path
+via the $MOZCONFIG environment variable.
+'''.strip()
+
+MOZCONFIG_BAD_EXIT_CODE = '''
+Evaluation of your mozconfig exited with an error. This could be triggered
+by a command inside your mozconfig failing. Please change your mozconfig
+to not error and/or to catch errors in executed commands.
+'''.strip()
+
+MOZCONFIG_BAD_OUTPUT = '''
+Evaluation of your mozconfig produced unexpected output. This could be
+triggered by a command inside your mozconfig failing or producing some warnings
+or error messages. Please change your mozconfig to not error and/or to catch
+errors in executed commands.
+'''.strip()
+
+
+class MozconfigFindException(Exception):
+ """Raised when a mozconfig location is not defined properly."""
+
+
+class MozconfigLoadException(Exception):
+ """Raised when a mozconfig could not be loaded properly.
+
+ This typically indicates a malformed or misbehaving mozconfig file.
+ """
+
+ def __init__(self, path, message, output=None):
+ self.path = path
+ self.output = output
+ Exception.__init__(self, message)
+
+
+class MozconfigLoader(object):
+ """Handles loading and parsing of mozconfig files."""
+
+ RE_MAKE_VARIABLE = re.compile('''
+ ^\s* # Leading whitespace
+ (?P<var>[a-zA-Z_0-9]+) # Variable name
+ \s* [?:]?= \s* # Assignment operator surrounded by optional
+ # spaces
+ (?P<value>.*$)''', # Everything else (likely the value)
+ re.VERBOSE)
+
+ # Default mozconfig files in the topsrcdir.
+ DEFAULT_TOPSRCDIR_PATHS = ('.mozconfig', 'mozconfig')
+
+ DEPRECATED_TOPSRCDIR_PATHS = ('mozconfig.sh', 'myconfig.sh')
+ DEPRECATED_HOME_PATHS = ('.mozconfig', '.mozconfig.sh', '.mozmyconfig.sh')
+
+ IGNORE_SHELL_VARIABLES = {'_'}
+
+ ENVIRONMENT_VARIABLES = {
+ 'CC', 'CXX', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'MOZ_OBJDIR',
+ }
+
+ AUTODETECT = object()
+
+ def __init__(self, topsrcdir):
+ self.topsrcdir = topsrcdir
+
+ @property
+ def _loader_script(self):
+ our_dir = os.path.abspath(os.path.dirname(__file__))
+
+ return os.path.join(our_dir, 'mozconfig_loader')
+
+ def find_mozconfig(self, env=os.environ):
+ """Find the active mozconfig file for the current environment.
+
+ This emulates the logic in mozconfig-find.
+
+ 1) If ENV[MOZCONFIG] is set, use that
+ 2) If $TOPSRCDIR/mozconfig or $TOPSRCDIR/.mozconfig exists, use it.
+ 3) If both exist or if there are legacy locations detected, error out.
+
+ The absolute path to the found mozconfig will be returned on success.
+ None will be returned if no mozconfig could be found. A
+ MozconfigFindException will be raised if there is a bad state,
+ including conditions from #3 above.
+ """
+ # Check for legacy methods first.
+
+ if 'MOZ_MYCONFIG' in env:
+ raise MozconfigFindException(MOZ_MYCONFIG_ERROR)
+
+ env_path = env.get('MOZCONFIG', None) or None
+ if env_path is not None:
+ if not os.path.isabs(env_path):
+ potential_roots = [self.topsrcdir, os.getcwd()]
+ # Attempt to eliminate duplicates for e.g.
+ # self.topsrcdir == os.curdir.
+ potential_roots = set(os.path.abspath(p) for p in potential_roots)
+ existing = [root for root in potential_roots
+ if os.path.exists(os.path.join(root, env_path))]
+ if len(existing) > 1:
+ # There are multiple files, but we might have a setup like:
+ #
+ # somedirectory/
+ # srcdir/
+ # objdir/
+ #
+ # MOZCONFIG=../srcdir/some/path/to/mozconfig
+ #
+ # and be configuring from the objdir. So even though we
+ # have multiple existing files, they are actually the same
+ # file.
+ mozconfigs = [os.path.join(root, env_path)
+ for root in existing]
+ if not all(map(lambda p1, p2: filecmp.cmp(p1, p2, shallow=False),
+ mozconfigs[:-1], mozconfigs[1:])):
+ raise MozconfigFindException(
+ 'MOZCONFIG environment variable refers to a path that ' +
+ 'exists in more than one of ' + ', '.join(potential_roots) +
+ '. Remove all but one.')
+ elif not existing:
+ raise MozconfigFindException(
+ 'MOZCONFIG environment variable refers to a path that ' +
+ 'does not exist in any of ' + ', '.join(potential_roots))
+
+ env_path = os.path.join(existing[0], env_path)
+ elif not os.path.exists(env_path): # non-relative path
+ raise MozconfigFindException(
+ 'MOZCONFIG environment variable refers to a path that '
+ 'does not exist: ' + env_path)
+
+ if not os.path.isfile(env_path):
+ raise MozconfigFindException(
+ 'MOZCONFIG environment variable refers to a '
+ 'non-file: ' + env_path)
+
+ srcdir_paths = [os.path.join(self.topsrcdir, p) for p in
+ self.DEFAULT_TOPSRCDIR_PATHS]
+ existing = [p for p in srcdir_paths if os.path.isfile(p)]
+
+ if env_path is None and len(existing) > 1:
+ raise MozconfigFindException('Multiple default mozconfig files '
+ 'present. Remove all but one. ' + ', '.join(existing))
+
+ path = None
+
+ if env_path is not None:
+ path = env_path
+ elif len(existing):
+ assert len(existing) == 1
+ path = existing[0]
+
+ if path is not None:
+ return os.path.abspath(path)
+
+ deprecated_paths = [os.path.join(self.topsrcdir, s) for s in
+ self.DEPRECATED_TOPSRCDIR_PATHS]
+
+ home = env.get('HOME', None)
+ if home is not None:
+ deprecated_paths.extend([os.path.join(home, s) for s in
+ self.DEPRECATED_HOME_PATHS])
+
+ for path in deprecated_paths:
+ if os.path.exists(path):
+ raise MozconfigFindException(
+ MOZCONFIG_LEGACY_PATH % (path, self.topsrcdir))
+
+ return None
+
+ def read_mozconfig(self, path=None, moz_build_app=None):
+ """Read the contents of a mozconfig into a data structure.
+
+ This takes the path to a mozconfig to load. If the given path is
+ AUTODETECT, will try to find a mozconfig from the environment using
+ find_mozconfig().
+
+ mozconfig files are shell scripts. So, we can't just parse them.
+ Instead, we run the shell script in a wrapper which allows us to record
+ state from execution. Thus, the output from a mozconfig is a friendly
+ static data structure.
+ """
+ if path is self.AUTODETECT:
+ path = self.find_mozconfig()
+
+ result = {
+ 'path': path,
+ 'topobjdir': None,
+ 'configure_args': None,
+ 'make_flags': None,
+ 'make_extra': None,
+ 'env': None,
+ 'vars': None,
+ }
+
+ if path is None:
+ return result
+
+ path = mozpath.normsep(path)
+
+ result['configure_args'] = []
+ result['make_extra'] = []
+ result['make_flags'] = []
+
+ env = dict(os.environ)
+
+ # Since mozconfig_loader is a shell script, running it "normally"
+ # actually leads to two shell executions on Windows. Avoid this by
+ # directly calling sh mozconfig_loader.
+ shell = 'sh'
+ if 'MOZILLABUILD' in os.environ:
+ shell = os.environ['MOZILLABUILD'] + '/msys/bin/sh'
+ if sys.platform == 'win32':
+ shell = shell + '.exe'
+
+ command = [shell, mozpath.normsep(self._loader_script),
+ mozpath.normsep(self.topsrcdir), path, sys.executable,
+ mozpath.join(mozpath.dirname(self._loader_script),
+ 'action', 'dump_env.py')]
+
+ try:
+ # We need to capture stderr because that's where the shell sends
+ # errors if execution fails.
+ output = subprocess.check_output(command, stderr=subprocess.STDOUT,
+ cwd=self.topsrcdir, env=env)
+ except subprocess.CalledProcessError as e:
+ lines = e.output.splitlines()
+
+ # Output before actual execution shouldn't be relevant.
+ try:
+ index = lines.index('------END_BEFORE_SOURCE')
+ lines = lines[index + 1:]
+ except ValueError:
+ pass
+
+ raise MozconfigLoadException(path, MOZCONFIG_BAD_EXIT_CODE, lines)
+
+ try:
+ parsed = self._parse_loader_output(output)
+ except AssertionError:
+ # _parse_loader_output uses assertions to verify the
+ # well-formedness of the shell output; when these fail, it
+ # generally means there was a problem with the output, but we
+ # include the assertion traceback just to be sure.
+ print('Assertion failed in _parse_loader_output:')
+ traceback.print_exc()
+ raise MozconfigLoadException(path, MOZCONFIG_BAD_OUTPUT,
+ output.splitlines())
+
+ def diff_vars(vars_before, vars_after):
+ set1 = set(vars_before.keys()) - self.IGNORE_SHELL_VARIABLES
+ set2 = set(vars_after.keys()) - self.IGNORE_SHELL_VARIABLES
+ added = set2 - set1
+ removed = set1 - set2
+ maybe_modified = set1 & set2
+ changed = {
+ 'added': {},
+ 'removed': {},
+ 'modified': {},
+ 'unmodified': {},
+ }
+
+ for key in added:
+ changed['added'][key] = vars_after[key]
+
+ for key in removed:
+ changed['removed'][key] = vars_before[key]
+
+ for key in maybe_modified:
+ if vars_before[key] != vars_after[key]:
+ changed['modified'][key] = (
+ vars_before[key], vars_after[key])
+ elif key in self.ENVIRONMENT_VARIABLES:
+ # In order for irrelevant environment variable changes not
+ # to incur in re-running configure, only a set of
+ # environment variables are stored when they are
+ # unmodified. Otherwise, changes such as using a different
+ # terminal window, or even rebooting, would trigger
+ # reconfigures.
+ changed['unmodified'][key] = vars_after[key]
+
+ return changed
+
+ result['env'] = diff_vars(parsed['env_before'], parsed['env_after'])
+
+ # Environment variables also appear as shell variables, but that's
+ # uninteresting duplication of information. Filter them out.
+ filt = lambda x, y: {k: v for k, v in x.items() if k not in y}
+ result['vars'] = diff_vars(
+ filt(parsed['vars_before'], parsed['env_before']),
+ filt(parsed['vars_after'], parsed['env_after'])
+ )
+
+ result['configure_args'] = [self._expand(o) for o in parsed['ac']]
+
+ if moz_build_app is not None:
+ result['configure_args'].extend(self._expand(o) for o in
+ parsed['ac_app'][moz_build_app])
+
+ if 'MOZ_OBJDIR' in parsed['env_before']:
+ result['topobjdir'] = parsed['env_before']['MOZ_OBJDIR']
+
+ mk = [self._expand(o) for o in parsed['mk']]
+
+ for o in mk:
+ match = self.RE_MAKE_VARIABLE.match(o)
+
+ if match is None:
+ result['make_extra'].append(o)
+ continue
+
+ name, value = match.group('var'), match.group('value')
+
+ if name == 'MOZ_MAKE_FLAGS':
+ result['make_flags'] = value.split()
+ continue
+
+ if name == 'MOZ_OBJDIR':
+ result['topobjdir'] = value
+ continue
+
+ result['make_extra'].append(o)
+
+ return result
+
+ def _parse_loader_output(self, output):
+ mk_options = []
+ ac_options = []
+ ac_app_options = defaultdict(list)
+ before_source = {}
+ after_source = {}
+ env_before_source = {}
+ env_after_source = {}
+
+ current = None
+ current_type = None
+ in_variable = None
+
+ for line in output.splitlines():
+
+ # XXX This is an ugly hack. Data may be lost from things
+ # like environment variable values.
+ # See https://bugzilla.mozilla.org/show_bug.cgi?id=831381
+ line = line.decode('mbcs' if sys.platform == 'win32' else 'utf-8',
+ 'ignore')
+
+ if not line:
+ continue
+
+ if line.startswith('------BEGIN_'):
+ assert current_type is None
+ assert current is None
+ assert not in_variable
+ current_type = line[len('------BEGIN_'):]
+ current = []
+ continue
+
+ if line.startswith('------END_'):
+ assert not in_variable
+ section = line[len('------END_'):]
+ assert current_type == section
+
+ if current_type == 'AC_OPTION':
+ ac_options.append('\n'.join(current))
+ elif current_type == 'MK_OPTION':
+ mk_options.append('\n'.join(current))
+ elif current_type == 'AC_APP_OPTION':
+ app = current.pop(0)
+ ac_app_options[app].append('\n'.join(current))
+
+ current = None
+ current_type = None
+ continue
+
+ assert current_type is not None
+
+ vars_mapping = {
+ 'BEFORE_SOURCE': before_source,
+ 'AFTER_SOURCE': after_source,
+ 'ENV_BEFORE_SOURCE': env_before_source,
+ 'ENV_AFTER_SOURCE': env_after_source,
+ }
+
+ if current_type in vars_mapping:
+ # mozconfigs are sourced using the Bourne shell (or at least
+ # in Bourne shell mode). This means |set| simply lists
+ # variables from the current shell (not functions). (Note that
+ # if Bash is installed in /bin/sh it acts like regular Bourne
+ # and doesn't print functions.) So, lines should have the
+ # form:
+ #
+ # key='value'
+ # key=value
+ #
+ # The only complication is multi-line variables. Those have the
+ # form:
+ #
+ # key='first
+ # second'
+
+ # TODO Bug 818377 Properly handle multi-line variables of form:
+ # $ foo="a='b'
+ # c='d'"
+ # $ set
+ # foo='a='"'"'b'"'"'
+ # c='"'"'d'"'"
+
+ name = in_variable
+ value = None
+ if in_variable:
+ # Reached the end of a multi-line variable.
+ if line.endswith("'") and not line.endswith("\\'"):
+ current.append(line[:-1])
+ value = '\n'.join(current)
+ in_variable = None
+ else:
+ current.append(line)
+ continue
+ else:
+ equal_pos = line.find('=')
+
+ if equal_pos < 1:
+ # TODO log warning?
+ continue
+
+ name = line[0:equal_pos]
+ value = line[equal_pos + 1:]
+
+ if len(value):
+ has_quote = value[0] == "'"
+
+ if has_quote:
+ value = value[1:]
+
+ # Lines with a quote not ending in a quote are multi-line.
+ if has_quote and not value.endswith("'"):
+ in_variable = name
+ current.append(value)
+ continue
+ else:
+ value = value[:-1] if has_quote else value
+
+ assert name is not None
+
+ vars_mapping[current_type][name] = value
+
+ current = []
+
+ continue
+
+ current.append(line)
+
+ return {
+ 'mk': mk_options,
+ 'ac': ac_options,
+ 'ac_app': ac_app_options,
+ 'vars_before': before_source,
+ 'vars_after': after_source,
+ 'env_before': env_before_source,
+ 'env_after': env_after_source,
+ }
+
+ def _expand(self, s):
+ return s.replace('@TOPSRCDIR@', self.topsrcdir)
diff --git a/python/mozbuild/mozbuild/mozconfig_loader b/python/mozbuild/mozbuild/mozconfig_loader
new file mode 100755
index 000000000..6b1e05dce
--- /dev/null
+++ b/python/mozbuild/mozbuild/mozconfig_loader
@@ -0,0 +1,80 @@
+#!/bin/sh
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script provides an execution environment for mozconfig scripts.
+# This script is not meant to be called by users. Instead, some
+# higher-level driver invokes it and parses the machine-tailored output.
+
+set -e
+
+ac_add_options() {
+ local opt
+ for opt; do
+ case "$opt" in
+ --target=*)
+ echo "------BEGIN_MK_OPTION"
+ echo $opt | sed s/--target/CONFIG_GUESS/
+ echo "------END_MK_OPTION"
+ ;;
+ esac
+ echo "------BEGIN_AC_OPTION"
+ echo $opt
+ echo "------END_AC_OPTION"
+ done
+}
+
+ac_add_app_options() {
+ local app
+ app=$1
+ shift
+ echo "------BEGIN_AC_APP_OPTION"
+ echo $app
+ echo "$*"
+ echo "------END_AC_APP_OPTION"
+}
+
+mk_add_options() {
+ local opt name op value
+ for opt; do
+ echo "------BEGIN_MK_OPTION"
+ echo $opt
+ # Remove any leading "export"
+ opt=${opt#export}
+ case "$opt" in
+ *\?=*) op="?=" ;;
+ *:=*) op=":=" ;;
+ *+=*) op="+=" ;;
+ *=*) op="=" ;;
+ esac
+ # Remove the operator and the value that follows
+ name=${opt%%${op}*}
+ # Note: $(echo ${name}) strips the variable from any leading and trailing
+ # whitespaces.
+ eval "$(echo ${name})_IS_SET=1"
+ echo "------END_MK_OPTION"
+ done
+}
+
+echo "------BEGIN_ENV_BEFORE_SOURCE"
+$3 $4
+echo "------END_ENV_BEFORE_SOURCE"
+
+echo "------BEGIN_BEFORE_SOURCE"
+set
+echo "------END_BEFORE_SOURCE"
+
+topsrcdir=$1
+
+. $2
+
+unset topsrcdir
+
+echo "------BEGIN_AFTER_SOURCE"
+set
+echo "------END_AFTER_SOURCE"
+
+echo "------BEGIN_ENV_AFTER_SOURCE"
+$3 $4
+echo "------END_ENV_AFTER_SOURCE"
diff --git a/python/mozbuild/mozbuild/mozinfo.py b/python/mozbuild/mozbuild/mozinfo.py
new file mode 100755
index 000000000..f0b0df9bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/mozinfo.py
@@ -0,0 +1,160 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module produces a JSON file that provides basic build info and
+# configuration metadata.
+
+from __future__ import absolute_import
+
+import os
+import re
+import json
+
+
+def build_dict(config, env=os.environ):
+ """
+ Build a dict containing data about the build configuration from
+ the environment.
+ """
+ substs = config.substs
+
+ # Check that all required variables are present first.
+ required = ["TARGET_CPU", "OS_TARGET"]
+ missing = [r for r in required if r not in substs]
+ if missing:
+ raise Exception("Missing required environment variables: %s" %
+ ', '.join(missing))
+
+ d = {}
+ d['topsrcdir'] = config.topsrcdir
+
+ if config.mozconfig:
+ d['mozconfig'] = config.mozconfig
+
+ # os
+ o = substs["OS_TARGET"]
+ known_os = {"Linux": "linux",
+ "WINNT": "win",
+ "Darwin": "mac",
+ "Android": "b2g" if substs.get("MOZ_WIDGET_TOOLKIT") == "gonk" else "android"}
+ if o in known_os:
+ d["os"] = known_os[o]
+ else:
+ # Allow unknown values, just lowercase them.
+ d["os"] = o.lower()
+
+ # Widget toolkit, just pass the value directly through.
+ d["toolkit"] = substs.get("MOZ_WIDGET_TOOLKIT")
+
+ # Application name
+ if 'MOZ_APP_NAME' in substs:
+ d["appname"] = substs["MOZ_APP_NAME"]
+
+ # Build app name
+ if 'MOZ_MULET' in substs and substs.get('MOZ_MULET') == "1":
+ d["buildapp"] = "mulet"
+ elif 'MOZ_BUILD_APP' in substs:
+ d["buildapp"] = substs["MOZ_BUILD_APP"]
+
+ # processor
+ p = substs["TARGET_CPU"]
+ # for universal mac builds, put in a special value
+ if d["os"] == "mac" and "UNIVERSAL_BINARY" in substs and substs["UNIVERSAL_BINARY"] == "1":
+ p = "universal-x86-x86_64"
+ else:
+ # do some slight massaging for some values
+ #TODO: retain specific values in case someone wants them?
+ if p.startswith("arm"):
+ p = "arm"
+ elif re.match("i[3-9]86", p):
+ p = "x86"
+ d["processor"] = p
+ # hardcoded list of 64-bit CPUs
+ if p in ["x86_64", "ppc64"]:
+ d["bits"] = 64
+ # hardcoded list of known 32-bit CPUs
+ elif p in ["x86", "arm", "ppc"]:
+ d["bits"] = 32
+ # other CPUs will wind up with unknown bits
+
+ d['debug'] = substs.get('MOZ_DEBUG') == '1'
+ d['nightly_build'] = substs.get('NIGHTLY_BUILD') == '1'
+ d['release_or_beta'] = substs.get('RELEASE_OR_BETA') == '1'
+ d['pgo'] = substs.get('MOZ_PGO') == '1'
+ d['crashreporter'] = bool(substs.get('MOZ_CRASHREPORTER'))
+ d['datareporting'] = bool(substs.get('MOZ_DATA_REPORTING'))
+ d['healthreport'] = substs.get('MOZ_SERVICES_HEALTHREPORT') == '1'
+ d['sync'] = substs.get('MOZ_SERVICES_SYNC') == '1'
+ d['asan'] = substs.get('MOZ_ASAN') == '1'
+ d['tsan'] = substs.get('MOZ_TSAN') == '1'
+ d['telemetry'] = substs.get('MOZ_TELEMETRY_REPORTING') == '1'
+ d['tests_enabled'] = substs.get('ENABLE_TESTS') == "1"
+ d['bin_suffix'] = substs.get('BIN_SUFFIX', '')
+ d['addon_signing'] = substs.get('MOZ_ADDON_SIGNING') == '1'
+ d['require_signing'] = substs.get('MOZ_REQUIRE_SIGNING') == '1'
+ d['official'] = bool(substs.get('MOZILLA_OFFICIAL'))
+ d['sm_promise'] = bool(substs.get('SPIDERMONKEY_PROMISE'))
+
+ def guess_platform():
+ if d['buildapp'] in ('browser', 'mulet'):
+ p = d['os']
+ if p == 'mac':
+ p = 'macosx64'
+ elif d['bits'] == 64:
+ p = '{}64'.format(p)
+ elif p in ('win',):
+ p = '{}32'.format(p)
+
+ if d['buildapp'] == 'mulet':
+ p = '{}-mulet'.format(p)
+
+ if d['asan']:
+ p = '{}-asan'.format(p)
+
+ return p
+
+ if d['buildapp'] == 'b2g':
+ if d['toolkit'] == 'gonk':
+ return 'emulator'
+
+ if d['bits'] == 64:
+ return 'linux64_gecko'
+ return 'linux32_gecko'
+
+ if d['buildapp'] == 'mobile/android':
+ if d['processor'] == 'x86':
+ return 'android-x86'
+ return 'android-arm'
+
+ def guess_buildtype():
+ if d['debug']:
+ return 'debug'
+ if d['pgo']:
+ return 'pgo'
+ return 'opt'
+
+ # if buildapp or bits are unknown, we don't have a configuration similar to
+ # any in automation and the guesses are useless.
+ if 'buildapp' in d and (d['os'] == 'mac' or 'bits' in d):
+ d['platform_guess'] = guess_platform()
+ d['buildtype_guess'] = guess_buildtype()
+
+ if 'buildapp' in d and d['buildapp'] == 'mobile/android' and 'MOZ_ANDROID_MIN_SDK_VERSION' in substs:
+ d['android_min_sdk'] = substs['MOZ_ANDROID_MIN_SDK_VERSION']
+
+ return d
+
+
+def write_mozinfo(file, config, env=os.environ):
+ """Write JSON data about the configuration specified in config and an
+ environment variable dict to |file|, which may be a filename or file-like
+ object.
+ See build_dict for information about what environment variables are used,
+ and what keys are produced.
+ """
+ build_conf = build_dict(config, env)
+ if isinstance(file, basestring):
+ file = open(file, 'wb')
+
+ json.dump(build_conf, file, sort_keys=True, indent=4)
diff --git a/python/mozbuild/mozbuild/preprocessor.py b/python/mozbuild/mozbuild/preprocessor.py
new file mode 100644
index 000000000..e8aac7057
--- /dev/null
+++ b/python/mozbuild/mozbuild/preprocessor.py
@@ -0,0 +1,805 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This is a very primitive line based preprocessor, for times when using
+a C preprocessor isn't an option.
+
+It currently supports the following grammar for expressions, whitespace is
+ignored:
+
+expression :
+ and_cond ( '||' expression ) ? ;
+and_cond:
+ test ( '&&' and_cond ) ? ;
+test:
+ unary ( ( '==' | '!=' ) unary ) ? ;
+unary :
+ '!'? value ;
+value :
+ [0-9]+ # integer
+ | 'defined(' \w+ ')'
+ | \w+ # string identifier or value;
+"""
+
+import sys
+import os
+import re
+from optparse import OptionParser
+import errno
+from makeutil import Makefile
+
+# hack around win32 mangling our line endings
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65443
+if sys.platform == "win32":
+ import msvcrt
+ msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
+ os.linesep = '\n'
+
+
+__all__ = [
+ 'Context',
+ 'Expression',
+ 'Preprocessor',
+ 'preprocess'
+]
+
+
+class Expression:
+ def __init__(self, expression_string):
+ """
+ Create a new expression with this string.
+ The expression will already be parsed into an Abstract Syntax Tree.
+ """
+ self.content = expression_string
+ self.offset = 0
+ self.__ignore_whitespace()
+ self.e = self.__get_logical_or()
+ if self.content:
+ raise Expression.ParseError, self
+
+ def __get_logical_or(self):
+ """
+ Production: and_cond ( '||' expression ) ?
+ """
+ if not len(self.content):
+ return None
+ rv = Expression.__AST("logical_op")
+ # test
+ rv.append(self.__get_logical_and())
+ self.__ignore_whitespace()
+ if self.content[:2] != '||':
+ # no logical op needed, short cut to our prime element
+ return rv[0]
+ # append operator
+ rv.append(Expression.__ASTLeaf('op', self.content[:2]))
+ self.__strip(2)
+ self.__ignore_whitespace()
+ rv.append(self.__get_logical_or())
+ self.__ignore_whitespace()
+ return rv
+
+ def __get_logical_and(self):
+ """
+ Production: test ( '&&' and_cond ) ?
+ """
+ if not len(self.content):
+ return None
+ rv = Expression.__AST("logical_op")
+ # test
+ rv.append(self.__get_equality())
+ self.__ignore_whitespace()
+ if self.content[:2] != '&&':
+ # no logical op needed, short cut to our prime element
+ return rv[0]
+ # append operator
+ rv.append(Expression.__ASTLeaf('op', self.content[:2]))
+ self.__strip(2)
+ self.__ignore_whitespace()
+ rv.append(self.__get_logical_and())
+ self.__ignore_whitespace()
+ return rv
+
+ def __get_equality(self):
+ """
+ Production: unary ( ( '==' | '!=' ) unary ) ?
+ """
+ if not len(self.content):
+ return None
+ rv = Expression.__AST("equality")
+ # unary
+ rv.append(self.__get_unary())
+ self.__ignore_whitespace()
+ if not re.match('[=!]=', self.content):
+ # no equality needed, short cut to our prime unary
+ return rv[0]
+ # append operator
+ rv.append(Expression.__ASTLeaf('op', self.content[:2]))
+ self.__strip(2)
+ self.__ignore_whitespace()
+ rv.append(self.__get_unary())
+ self.__ignore_whitespace()
+ return rv
+
+ def __get_unary(self):
+ """
+ Production: '!'? value
+ """
+ # eat whitespace right away, too
+ not_ws = re.match('!\s*', self.content)
+ if not not_ws:
+ return self.__get_value()
+ rv = Expression.__AST('not')
+ self.__strip(not_ws.end())
+ rv.append(self.__get_value())
+ self.__ignore_whitespace()
+ return rv
+
+ def __get_value(self):
+ """
+ Production: ( [0-9]+ | 'defined(' \w+ ')' | \w+ )
+ Note that the order is important, and the expression is kind-of
+ ambiguous as \w includes 0-9. One could make it unambiguous by
+ removing 0-9 from the first char of a string literal.
+ """
+ rv = None
+ m = re.match('defined\s*\(\s*(\w+)\s*\)', self.content)
+ if m:
+ word_len = m.end()
+ rv = Expression.__ASTLeaf('defined', m.group(1))
+ else:
+ word_len = re.match('[0-9]*', self.content).end()
+ if word_len:
+ value = int(self.content[:word_len])
+ rv = Expression.__ASTLeaf('int', value)
+ else:
+ word_len = re.match('\w*', self.content).end()
+ if word_len:
+ rv = Expression.__ASTLeaf('string', self.content[:word_len])
+ else:
+ raise Expression.ParseError, self
+ self.__strip(word_len)
+ self.__ignore_whitespace()
+ return rv
+
+ def __ignore_whitespace(self):
+ ws_len = re.match('\s*', self.content).end()
+ self.__strip(ws_len)
+ return
+
+ def __strip(self, length):
+ """
+ Remove a given amount of chars from the input and update
+ the offset.
+ """
+ self.content = self.content[length:]
+ self.offset += length
+
+ def evaluate(self, context):
+ """
+ Evaluate the expression with the given context
+ """
+
+ # Helper function to evaluate __get_equality results
+ def eval_equality(tok):
+ left = opmap[tok[0].type](tok[0])
+ right = opmap[tok[2].type](tok[2])
+ rv = left == right
+ if tok[1].value == '!=':
+ rv = not rv
+ return rv
+ # Helper function to evaluate __get_logical_and and __get_logical_or results
+ def eval_logical_op(tok):
+ left = opmap[tok[0].type](tok[0])
+ right = opmap[tok[2].type](tok[2])
+ if tok[1].value == '&&':
+ return left and right
+ elif tok[1].value == '||':
+ return left or right
+ raise Expression.ParseError, self
+
+ # Mapping from token types to evaluator functions
+ # Apart from (non-)equality, all these can be simple lambda forms.
+ opmap = {
+ 'logical_op': eval_logical_op,
+ 'equality': eval_equality,
+ 'not': lambda tok: not opmap[tok[0].type](tok[0]),
+ 'string': lambda tok: context[tok.value],
+ 'defined': lambda tok: tok.value in context,
+ 'int': lambda tok: tok.value}
+
+ return opmap[self.e.type](self.e);
+
+ class __AST(list):
+ """
+ Internal class implementing Abstract Syntax Tree nodes
+ """
+ def __init__(self, type):
+ self.type = type
+ super(self.__class__, self).__init__(self)
+
+ class __ASTLeaf:
+ """
+ Internal class implementing Abstract Syntax Tree leafs
+ """
+ def __init__(self, type, value):
+ self.value = value
+ self.type = type
+ def __str__(self):
+ return self.value.__str__()
+ def __repr__(self):
+ return self.value.__repr__()
+
+ class ParseError(StandardError):
+ """
+ Error raised when parsing fails.
+ It has two members, offset and content, which give the offset of the
+ error and the offending content.
+ """
+ def __init__(self, expression):
+ self.offset = expression.offset
+ self.content = expression.content[:3]
+ def __str__(self):
+ return 'Unexpected content at offset {0}, "{1}"'.format(self.offset,
+ self.content)
+
+class Context(dict):
+ """
+ This class holds variable values by subclassing dict, and while it
+ truthfully reports True and False on
+
+ name in context
+
+ it returns the variable name itself on
+
+ context["name"]
+
+ to reflect the ambiguity between string literals and preprocessor
+ variables.
+ """
+ def __getitem__(self, key):
+ if key in self:
+ return super(self.__class__, self).__getitem__(key)
+ return key
+
+
+class Preprocessor:
+ """
+ Class for preprocessing text files.
+ """
+ class Error(RuntimeError):
+ def __init__(self, cpp, MSG, context):
+ self.file = cpp.context['FILE']
+ self.line = cpp.context['LINE']
+ self.key = MSG
+ RuntimeError.__init__(self, (self.file, self.line, self.key, context))
+
+ def __init__(self, defines=None, marker='#'):
+ self.context = Context()
+ for k,v in {'FILE': '',
+ 'LINE': 0,
+ 'DIRECTORY': os.path.abspath('.')}.iteritems():
+ self.context[k] = v
+ self.actionLevel = 0
+ self.disableLevel = 0
+ # ifStates can be
+ # 0: hadTrue
+ # 1: wantsTrue
+ # 2: #else found
+ self.ifStates = []
+ self.checkLineNumbers = False
+ self.filters = []
+ self.cmds = {}
+ for cmd, level in {'define': 0,
+ 'undef': 0,
+ 'if': sys.maxint,
+ 'ifdef': sys.maxint,
+ 'ifndef': sys.maxint,
+ 'else': 1,
+ 'elif': 1,
+ 'elifdef': 1,
+ 'elifndef': 1,
+ 'endif': sys.maxint,
+ 'expand': 0,
+ 'literal': 0,
+ 'filter': 0,
+ 'unfilter': 0,
+ 'include': 0,
+ 'includesubst': 0,
+ 'error': 0}.iteritems():
+ self.cmds[cmd] = (level, getattr(self, 'do_' + cmd))
+ self.out = sys.stdout
+ self.setMarker(marker)
+ self.varsubst = re.compile('@(?P<VAR>\w+)@', re.U)
+ self.includes = set()
+ self.silenceMissingDirectiveWarnings = False
+ if defines:
+ self.context.update(defines)
+
+ def failUnused(self, file):
+ msg = None
+ if self.actionLevel == 0 and not self.silenceMissingDirectiveWarnings:
+ msg = 'no preprocessor directives found'
+ elif self.actionLevel == 1:
+ msg = 'no useful preprocessor directives found'
+ if msg:
+ class Fake(object): pass
+ fake = Fake()
+ fake.context = {
+ 'FILE': file,
+ 'LINE': None,
+ }
+ raise Preprocessor.Error(fake, msg, None)
+
+ def setMarker(self, aMarker):
+ """
+ Set the marker to be used for processing directives.
+ Used for handling CSS files, with pp.setMarker('%'), for example.
+ The given marker may be None, in which case no markers are processed.
+ """
+ self.marker = aMarker
+ if aMarker:
+ self.instruction = re.compile('{0}(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
+ .format(aMarker),
+ re.U)
+ self.comment = re.compile(aMarker, re.U)
+ else:
+ class NoMatch(object):
+ def match(self, *args):
+ return False
+ self.instruction = self.comment = NoMatch()
+
+ def setSilenceDirectiveWarnings(self, value):
+ """
+ Sets whether missing directive warnings are silenced, according to
+ ``value``. The default behavior of the preprocessor is to emit
+ such warnings.
+ """
+ self.silenceMissingDirectiveWarnings = value
+
+ def addDefines(self, defines):
+ """
+ Adds the specified defines to the preprocessor.
+ ``defines`` may be a dictionary object or an iterable of key/value pairs
+ (as tuples or other iterables of length two)
+ """
+ self.context.update(defines)
+
+ def clone(self):
+ """
+ Create a clone of the current processor, including line ending
+ settings, marker, variable definitions, output stream.
+ """
+ rv = Preprocessor()
+ rv.context.update(self.context)
+ rv.setMarker(self.marker)
+ rv.out = self.out
+ return rv
+
+ def processFile(self, input, output, depfile=None):
+ """
+ Preprocesses the contents of the ``input`` stream and writes the result
+ to the ``output`` stream. If ``depfile`` is set, the dependencies of
+ ``output`` file are written to ``depfile`` in Makefile format.
+ """
+ self.out = output
+
+ self.do_include(input, False)
+ self.failUnused(input.name)
+
+ if depfile:
+ mk = Makefile()
+ mk.create_rule([output.name]).add_dependencies(self.includes)
+ mk.dump(depfile)
+
+ def computeDependencies(self, input):
+ """
+ Reads the ``input`` stream, and computes the dependencies for that input.
+ """
+ try:
+ old_out = self.out
+ self.out = None
+ self.do_include(input, False)
+
+ return self.includes
+ finally:
+ self.out = old_out
+
+ def applyFilters(self, aLine):
+ for f in self.filters:
+ aLine = f[1](aLine)
+ return aLine
+
+ def noteLineInfo(self):
+ # Record the current line and file. Called once before transitioning
+ # into or out of an included file and after writing each line.
+ self.line_info = self.context['FILE'], self.context['LINE']
+
+ def write(self, aLine):
+ """
+ Internal method for handling output.
+ """
+ if not self.out:
+ return
+
+ next_line, next_file = self.context['LINE'], self.context['FILE']
+ if self.checkLineNumbers:
+ expected_file, expected_line = self.line_info
+ expected_line += 1
+ if (expected_line != next_line or
+ expected_file and expected_file != next_file):
+ self.out.write('//@line {line} "{file}"\n'.format(line=next_line,
+ file=next_file))
+ self.noteLineInfo()
+
+ filteredLine = self.applyFilters(aLine)
+ if filteredLine != aLine:
+ self.actionLevel = 2
+ self.out.write(filteredLine)
+
+ def handleCommandLine(self, args, defaultToStdin = False):
+ """
+ Parse a commandline into this parser.
+ Uses OptionParser internally, no args mean sys.argv[1:].
+ """
+ def get_output_file(path):
+ dir = os.path.dirname(path)
+ if dir:
+ try:
+ os.makedirs(dir)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+ return open(path, 'wb')
+
+ p = self.getCommandLineParser()
+ options, args = p.parse_args(args=args)
+ out = self.out
+ depfile = None
+
+ if options.output:
+ out = get_output_file(options.output)
+ if defaultToStdin and len(args) == 0:
+ args = [sys.stdin]
+ if options.depend:
+ raise Preprocessor.Error(self, "--depend doesn't work with stdin",
+ None)
+ if options.depend:
+ if not options.output:
+ raise Preprocessor.Error(self, "--depend doesn't work with stdout",
+ None)
+ try:
+ from makeutil import Makefile
+ except:
+ raise Preprocessor.Error(self, "--depend requires the "
+ "mozbuild.makeutil module", None)
+ depfile = get_output_file(options.depend)
+
+ if args:
+ for f in args:
+ with open(f, 'rU') as input:
+ self.processFile(input=input, output=out)
+ if depfile:
+ mk = Makefile()
+ mk.create_rule([options.output]).add_dependencies(self.includes)
+ mk.dump(depfile)
+ depfile.close()
+
+ if options.output:
+ out.close()
+
+ def getCommandLineParser(self, unescapeDefines = False):
+ escapedValue = re.compile('".*"$')
+ numberValue = re.compile('\d+$')
+ def handleD(option, opt, value, parser):
+ vals = value.split('=', 1)
+ if len(vals) == 1:
+ vals.append(1)
+ elif unescapeDefines and escapedValue.match(vals[1]):
+ # strip escaped string values
+ vals[1] = vals[1][1:-1]
+ elif numberValue.match(vals[1]):
+ vals[1] = int(vals[1])
+ self.context[vals[0]] = vals[1]
+ def handleU(option, opt, value, parser):
+ del self.context[value]
+ def handleF(option, opt, value, parser):
+ self.do_filter(value)
+ def handleMarker(option, opt, value, parser):
+ self.setMarker(value)
+ def handleSilenceDirectiveWarnings(option, opt, value, parse):
+ self.setSilenceDirectiveWarnings(True)
+ p = OptionParser()
+ p.add_option('-D', action='callback', callback=handleD, type="string",
+ metavar="VAR[=VAL]", help='Define a variable')
+ p.add_option('-U', action='callback', callback=handleU, type="string",
+ metavar="VAR", help='Undefine a variable')
+ p.add_option('-F', action='callback', callback=handleF, type="string",
+ metavar="FILTER", help='Enable the specified filter')
+ p.add_option('-o', '--output', type="string", default=None,
+ metavar="FILENAME", help='Output to the specified file '+
+ 'instead of stdout')
+ p.add_option('--depend', type="string", default=None, metavar="FILENAME",
+ help='Generate dependencies in the given file')
+ p.add_option('--marker', action='callback', callback=handleMarker,
+ type="string",
+ help='Use the specified marker instead of #')
+ p.add_option('--silence-missing-directive-warnings', action='callback',
+ callback=handleSilenceDirectiveWarnings,
+ help='Don\'t emit warnings about missing directives')
+ return p
+
+ def handleLine(self, aLine):
+ """
+ Handle a single line of input (internal).
+ """
+ if self.actionLevel == 0 and self.comment.match(aLine):
+ self.actionLevel = 1
+ m = self.instruction.match(aLine)
+ if m:
+ args = None
+ cmd = m.group('cmd')
+ try:
+ args = m.group('args')
+ except IndexError:
+ pass
+ if cmd not in self.cmds:
+ raise Preprocessor.Error(self, 'INVALID_CMD', aLine)
+ level, cmd = self.cmds[cmd]
+ if (level >= self.disableLevel):
+ cmd(args)
+ if cmd != 'literal':
+ self.actionLevel = 2
+ elif self.disableLevel == 0 and not self.comment.match(aLine):
+ self.write(aLine)
+
+ # Instruction handlers
+ # These are named do_'instruction name' and take one argument
+
+ # Variables
+ def do_define(self, args):
+ m = re.match('(?P<name>\w+)(?:\s(?P<value>.*))?', args, re.U)
+ if not m:
+ raise Preprocessor.Error(self, 'SYNTAX_DEF', args)
+ val = ''
+ if m.group('value'):
+ val = self.applyFilters(m.group('value'))
+ try:
+ val = int(val)
+ except:
+ pass
+ self.context[m.group('name')] = val
+ def do_undef(self, args):
+ m = re.match('(?P<name>\w+)$', args, re.U)
+ if not m:
+ raise Preprocessor.Error(self, 'SYNTAX_DEF', args)
+ if args in self.context:
+ del self.context[args]
+ # Logic
+ def ensure_not_else(self):
+ if len(self.ifStates) == 0 or self.ifStates[-1] == 2:
+ sys.stderr.write('WARNING: bad nesting of #else\n')
+ def do_if(self, args, replace=False):
+ if self.disableLevel and not replace:
+ self.disableLevel += 1
+ return
+ val = None
+ try:
+ e = Expression(args)
+ val = e.evaluate(self.context)
+ except Exception:
+ # XXX do real error reporting
+ raise Preprocessor.Error(self, 'SYNTAX_ERR', args)
+ if type(val) == str:
+ # we're looking for a number value, strings are false
+ val = False
+ if not val:
+ self.disableLevel = 1
+ if replace:
+ if val:
+ self.disableLevel = 0
+ self.ifStates[-1] = self.disableLevel
+ else:
+ self.ifStates.append(self.disableLevel)
+ pass
+ def do_ifdef(self, args, replace=False):
+ if self.disableLevel and not replace:
+ self.disableLevel += 1
+ return
+ if re.match('\W', args, re.U):
+ raise Preprocessor.Error(self, 'INVALID_VAR', args)
+ if args not in self.context:
+ self.disableLevel = 1
+ if replace:
+ if args in self.context:
+ self.disableLevel = 0
+ self.ifStates[-1] = self.disableLevel
+ else:
+ self.ifStates.append(self.disableLevel)
+ pass
+ def do_ifndef(self, args, replace=False):
+ if self.disableLevel and not replace:
+ self.disableLevel += 1
+ return
+ if re.match('\W', args, re.U):
+ raise Preprocessor.Error(self, 'INVALID_VAR', args)
+ if args in self.context:
+ self.disableLevel = 1
+ if replace:
+ if args not in self.context:
+ self.disableLevel = 0
+ self.ifStates[-1] = self.disableLevel
+ else:
+ self.ifStates.append(self.disableLevel)
+ pass
+ def do_else(self, args, ifState = 2):
+ self.ensure_not_else()
+ hadTrue = self.ifStates[-1] == 0
+ self.ifStates[-1] = ifState # in-else
+ if hadTrue:
+ self.disableLevel = 1
+ return
+ self.disableLevel = 0
+ def do_elif(self, args):
+ if self.disableLevel == 1:
+ if self.ifStates[-1] == 1:
+ self.do_if(args, replace=True)
+ else:
+ self.do_else(None, self.ifStates[-1])
+ def do_elifdef(self, args):
+ if self.disableLevel == 1:
+ if self.ifStates[-1] == 1:
+ self.do_ifdef(args, replace=True)
+ else:
+ self.do_else(None, self.ifStates[-1])
+ def do_elifndef(self, args):
+ if self.disableLevel == 1:
+ if self.ifStates[-1] == 1:
+ self.do_ifndef(args, replace=True)
+ else:
+ self.do_else(None, self.ifStates[-1])
+ def do_endif(self, args):
+ if self.disableLevel > 0:
+ self.disableLevel -= 1
+ if self.disableLevel == 0:
+ self.ifStates.pop()
+ # output processing
+ def do_expand(self, args):
+ lst = re.split('__(\w+)__', args, re.U)
+ do_replace = False
+ def vsubst(v):
+ if v in self.context:
+ return str(self.context[v])
+ return ''
+ for i in range(1, len(lst), 2):
+ lst[i] = vsubst(lst[i])
+ lst.append('\n') # add back the newline
+ self.write(reduce(lambda x, y: x+y, lst, ''))
+ def do_literal(self, args):
+ self.write(args + '\n')
+ def do_filter(self, args):
+ filters = [f for f in args.split(' ') if hasattr(self, 'filter_' + f)]
+ if len(filters) == 0:
+ return
+ current = dict(self.filters)
+ for f in filters:
+ current[f] = getattr(self, 'filter_' + f)
+ filterNames = current.keys()
+ filterNames.sort()
+ self.filters = [(fn, current[fn]) for fn in filterNames]
+ return
+ def do_unfilter(self, args):
+ filters = args.split(' ')
+ current = dict(self.filters)
+ for f in filters:
+ if f in current:
+ del current[f]
+ filterNames = current.keys()
+ filterNames.sort()
+ self.filters = [(fn, current[fn]) for fn in filterNames]
+ return
+ # Filters
+ #
+ # emptyLines
+ # Strips blank lines from the output.
+ def filter_emptyLines(self, aLine):
+ if aLine == '\n':
+ return ''
+ return aLine
+ # slashslash
+ # Strips everything after //
+ def filter_slashslash(self, aLine):
+ if (aLine.find('//') == -1):
+ return aLine
+ [aLine, rest] = aLine.split('//', 1)
+ if rest:
+ aLine += '\n'
+ return aLine
+ # spaces
+ # Collapses sequences of spaces into a single space
+ def filter_spaces(self, aLine):
+ return re.sub(' +', ' ', aLine).strip(' ')
+ # substition
+ # helper to be used by both substition and attemptSubstitution
+ def filter_substitution(self, aLine, fatal=True):
+ def repl(matchobj):
+ varname = matchobj.group('VAR')
+ if varname in self.context:
+ return str(self.context[varname])
+ if fatal:
+ raise Preprocessor.Error(self, 'UNDEFINED_VAR', varname)
+ return matchobj.group(0)
+ return self.varsubst.sub(repl, aLine)
+ def filter_attemptSubstitution(self, aLine):
+ return self.filter_substitution(aLine, fatal=False)
+ # File ops
+ def do_include(self, args, filters=True):
+ """
+ Preprocess a given file.
+ args can either be a file name, or a file-like object.
+ Files should be opened, and will be closed after processing.
+ """
+ isName = type(args) == str or type(args) == unicode
+ oldCheckLineNumbers = self.checkLineNumbers
+ self.checkLineNumbers = False
+ if isName:
+ try:
+ args = str(args)
+ if filters:
+ args = self.applyFilters(args)
+ if not os.path.isabs(args):
+ args = os.path.join(self.context['DIRECTORY'], args)
+ args = open(args, 'rU')
+ except Preprocessor.Error:
+ raise
+ except:
+ raise Preprocessor.Error(self, 'FILE_NOT_FOUND', str(args))
+ self.checkLineNumbers = bool(re.search('\.(js|jsm|java)(?:\.in)?$', args.name))
+ oldFile = self.context['FILE']
+ oldLine = self.context['LINE']
+ oldDir = self.context['DIRECTORY']
+ self.noteLineInfo()
+
+ if args.isatty():
+ # we're stdin, use '-' and '' for file and dir
+ self.context['FILE'] = '-'
+ self.context['DIRECTORY'] = ''
+ else:
+ abspath = os.path.abspath(args.name)
+ self.includes.add(abspath)
+ self.context['FILE'] = abspath
+ self.context['DIRECTORY'] = os.path.dirname(abspath)
+ self.context['LINE'] = 0
+
+ for l in args:
+ self.context['LINE'] += 1
+ self.handleLine(l)
+ if isName:
+ args.close()
+
+ self.context['FILE'] = oldFile
+ self.checkLineNumbers = oldCheckLineNumbers
+ self.context['LINE'] = oldLine
+ self.context['DIRECTORY'] = oldDir
+ def do_includesubst(self, args):
+ args = self.filter_substitution(args)
+ self.do_include(args)
+ def do_error(self, args):
+ raise Preprocessor.Error(self, 'Error: ', str(args))
+
+
+def preprocess(includes=[sys.stdin], defines={},
+ output = sys.stdout,
+ marker='#'):
+ pp = Preprocessor(defines=defines,
+ marker=marker)
+ for f in includes:
+ with open(f, 'rU') as input:
+ pp.processFile(input=input, output=output)
+ return pp.includes
+
+
+# Keep this module independently executable.
+if __name__ == "__main__":
+ pp = Preprocessor()
+ pp.handleCommandLine(None, True)
diff --git a/python/mozbuild/mozbuild/pythonutil.py b/python/mozbuild/mozbuild/pythonutil.py
new file mode 100644
index 000000000..3dba25691
--- /dev/null
+++ b/python/mozbuild/mozbuild/pythonutil.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import os
+import sys
+
+
+def iter_modules_in_path(*paths):
+ paths = [os.path.abspath(os.path.normcase(p)) + os.sep
+ for p in paths]
+ for name, module in sys.modules.items():
+ if not hasattr(module, '__file__'):
+ continue
+
+ path = module.__file__
+
+ if path.endswith('.pyc'):
+ path = path[:-1]
+ path = os.path.abspath(os.path.normcase(path))
+
+ if any(path.startswith(p) for p in paths):
+ yield path
diff --git a/python/mozbuild/mozbuild/resources/html-build-viewer/index.html b/python/mozbuild/mozbuild/resources/html-build-viewer/index.html
new file mode 100644
index 000000000..fe7512188
--- /dev/null
+++ b/python/mozbuild/mozbuild/resources/html-build-viewer/index.html
@@ -0,0 +1,475 @@
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>Build System Resource Usage</title>
+
+ <meta charset='utf-8'>
+ <script src="http://d3js.org/d3.v3.min.js" charset="utf-8"></script>
+ <style>
+
+svg {
+ overflow: visible;
+}
+
+.axis path,
+.axis line {
+ fill: none;
+ stroke: #000;
+ shape-rendering: crispEdges;
+}
+
+.area {
+ fill: steelblue;
+}
+
+.graphs {
+ text-anchor: end;
+}
+
+.timeline {
+ fill: steelblue;
+ stroke: gray;
+ stroke-width: 3;
+}
+
+.short {
+ fill: gray;
+ stroke: gray;
+ stroke-width: 3;
+}
+
+#tooltip {
+ z-index: 10;
+ position: fixed;
+ background: #efefef;
+}
+ </style>
+ </head>
+ <body>
+ <script>
+var currentResources;
+
+/**
+ * Interface for a build resources JSON file.
+ */
+function BuildResources(data) {
+ if (data.version < 1 || data.version > 3) {
+ throw new Error("Unsupported version of the JSON format: " + data.version);
+ }
+
+ this.resources = [];
+
+ var cpu_fields = data.cpu_times_fields;
+ var io_fields = data.io_fields;
+ var virt_fields = data.virt_fields;
+ var swap_fields = data.swap_fields;
+
+ function convert(dest, source, sourceKey, destKey, fields) {
+ var i = 0;
+ fields.forEach(function (field) {
+ dest[destKey][field] = source[sourceKey][i];
+ i++;
+ });
+ }
+
+ var offset = data.start;
+ var cpu_times_totals = {};
+
+ cpu_fields.forEach(function (field) {
+ cpu_times_totals[field] = 0;
+ });
+
+ this.ioTotal = {};
+ var i = 0;
+ io_fields.forEach(function (field) {
+ this.ioTotal[field] = data.overall.io[i];
+ i++;
+ }.bind(this));
+
+ data.samples.forEach(function (sample) {
+ var entry = {
+ start: sample.start - offset,
+ end: sample.end - offset,
+ duration: sample.duration,
+ cpu_percent: sample.cpu_percent_mean,
+ cpu_times: {},
+ cpu_times_percents: {},
+ io: {},
+ virt: {},
+ swap: {},
+ };
+
+ convert(entry, sample, "cpu_times_sum", "cpu_times", cpu_fields);
+ convert(entry, sample, "io", "io", io_fields);
+ convert(entry, sample, "virt", "virt", virt_fields);
+ convert(entry, sample, "swap", "swap", swap_fields);
+
+ var total = 0;
+ for (var k in entry.cpu_times) {
+ cpu_times_totals[k] += entry.cpu_times[k];
+ total += entry.cpu_times[k];
+ }
+
+ for (var k in entry.cpu_times) {
+ if (total == 0) {
+ if (k == "idle") {
+ entry.cpu_times_percents[k] = 100;
+ } else {
+ entry.cpu_times_percents[k] = 0;
+ }
+ } else {
+ entry.cpu_times_percents[k] = entry.cpu_times[k] / total * 100;
+ }
+ }
+
+ this.resources.push(entry);
+ }.bind(this));
+
+ this.cpu_times_fields = [];
+
+ // Filter out CPU fields that have no values.
+ for (var k in cpu_times_totals) {
+ var v = cpu_times_totals[k];
+ if (v) {
+ this.cpu_times_fields.push(k);
+ continue;
+ }
+
+ this.resources.forEach(function (entry) {
+ delete entry.cpu_times[k];
+ delete entry.cpu_times_percents[k];
+ });
+ }
+
+ this.offset = offset;
+ this.data = data;
+}
+
+BuildResources.prototype = Object.freeze({
+ get start() {
+ return this.data.start;
+ },
+
+ get startDate() {
+ return new Date(this.start * 1000);
+ },
+
+ get end() {
+ return this.data.end;
+ },
+
+ get endDate() {
+ return new Date(this.end * 1000);
+ },
+
+ get duration() {
+ return this.data.duration;
+ },
+
+ get sample_times() {
+ var times = [];
+ this.resources.forEach(function (sample) {
+ times.push(sample.start);
+ });
+
+ return times;
+ },
+
+ get cpuPercent() {
+ return this.data.overall.cpu_percent_mean;
+ },
+
+ get tiers() {
+ var t = [];
+
+ this.data.phases.forEach(function (e) {
+ t.push(e.name);
+ });
+
+ return t;
+ },
+
+ getTier: function (tier) {
+ for (var i = 0; i < this.data.phases.length; i++) {
+ var t = this.data.phases[i];
+
+ if (t.name == tier) {
+ return t;
+ }
+ }
+ },
+});
+
+function updateResourcesGraph() {
+ //var selected = document.getElementById("resourceType");
+ //var what = selected[selected.selectedIndex].value;
+ var what = "cpu";
+
+ renderResources("resource_graph", currentResources, what);
+ document.getElementById("wall_time").innerHTML = Math.round(currentResources.duration * 100) / 100;
+ document.getElementById("start_date").innerHTML = currentResources.startDate.toISOString();
+ document.getElementById("end_date").innerHTML = currentResources.endDate.toISOString();
+ document.getElementById("cpu_percent").innerHTML = Math.round(currentResources.cpuPercent * 100) / 100;
+ document.getElementById("write_bytes").innerHTML = currentResources.ioTotal["write_bytes"];
+ document.getElementById("read_bytes").innerHTML = currentResources.ioTotal["read_bytes"];
+ document.getElementById("write_time").innerHTML = currentResources.ioTotal["write_time"];
+ document.getElementById("read_time").innerHTML = currentResources.ioTotal["read_time"];
+}
+
+function renderKey(key) {
+ d3.json("/resources/" + key, function onResource(error, response) {
+ if (error) {
+ alert("Data not available. Is the server still running?");
+ return;
+ }
+
+ currentResources = new BuildResources(response);
+ updateResourcesGraph();
+ });
+}
+
+function renderResources(id, resources, what) {
+ document.getElementById(id).innerHTML = "";
+
+ var margin = {top: 20, right: 20, bottom: 20, left: 50};
+ var width = window.innerWidth - 50 - margin.left - margin.right;
+ var height = 400 - margin.top - margin.bottom;
+
+ var x = d3.scale.linear()
+ .range([0, width])
+ .domain(d3.extent(resources.resources, function (d) { return d.start; }))
+ ;
+ var y = d3.scale.linear()
+ .range([height, 0])
+ .domain([0, 1])
+ ;
+
+ var xAxis = d3.svg.axis()
+ .scale(x)
+ .orient("bottom")
+ ;
+ var yAxis = d3.svg.axis()
+ .scale(y)
+ .orient("left")
+ .tickFormat(d3.format(".0%"))
+ ;
+
+ var area = d3.svg.area()
+ .x(function (d) { return x(d.start); })
+ .y0(function(d) { return y(d.y0); })
+ .y1(function(d) { return y(d.y0 + d.y); })
+ ;
+
+ var stack = d3.layout.stack()
+ .values(function (d) { return d.values; })
+ ;
+
+ // Manually control the layer order because we want it consistent and want
+ // to inject some sanity.
+ var layers = [
+ ["nice", "#0d9fff"],
+ ["irq", "#ff0d9f"],
+ ["softirq", "#ff0d9f"],
+ ["steal", "#000000"],
+ ["guest", "#000000"],
+ ["guest_nice", "#000000"],
+ ["system", "#f69a5c"],
+ ["iowait", "#ff0d25"],
+ ["user", "#5cb9f6"],
+ ["idle", "#e1e1e1"],
+ ].filter(function (l) {
+ return resources.cpu_times_fields.indexOf(l[0]) != -1;
+ });
+
+ // Draw a legend.
+ var legend = d3.select("#" + id)
+ .append("svg")
+ .attr("width", width + margin.left + margin.right)
+ .attr("height", 15)
+ .append("g")
+ .attr("class", "legend")
+ ;
+
+ legend.selectAll("g")
+ .data(layers)
+ .enter()
+ .append("g")
+ .each(function (d, i) {
+ var g = d3.select(this);
+ g.append("rect")
+ .attr("x", i * 100 + 20)
+ .attr("y", 0)
+ .attr("width", 10)
+ .attr("height", 10)
+ .style("fill", d[1])
+ ;
+ g.append("text")
+ .attr("x", i * 100 + 40)
+ .attr("y", 10)
+ .attr("height", 10)
+ .attr("width", 70)
+ .text(d[0])
+ ;
+ })
+ ;
+
+ var svg = d3.select("#" + id).append("svg")
+ .attr("width", width + margin.left + margin.right)
+ .attr("height", height + margin.top + margin.bottom)
+ .append("g")
+ .attr("transform", "translate(" + margin.left + "," + margin.top + ")")
+ ;
+
+ var data = stack(layers.map(function (layer) {
+ return {
+ name: layer[0],
+ color: layer[1],
+ values: resources.resources.map(function (d) {
+ return {
+ start: d.start,
+ y: d.cpu_times_percents[layer[0]] / 100,
+ };
+ }),
+ };
+ }));
+
+ var graphs = svg.selectAll(".graphs")
+ .data(data)
+ .enter().append("g")
+ .attr("class", "graphs")
+ ;
+
+ graphs.append("path")
+ .attr("class", "area")
+ .attr("d", function (d) { return area(d.values); })
+ .style("fill", function (d) { return d.color; })
+ ;
+
+ svg.append("g")
+ .attr("class", "x axis")
+ .attr("transform", "translate(0," + height + ")")
+ .call(xAxis)
+ ;
+
+ svg.append("g")
+ .attr("class", "y axis")
+ .call(yAxis)
+ ;
+
+ // Now we render a timeline of sorts of the tiers
+ // There is a row of rectangles that visualize divisions between the
+ // different items. We use the same x scale as the resource graph so times
+ // line up properly.
+ svg = d3.select("#" + id).append("svg")
+ .attr("width", width + margin.left + margin.right)
+ .attr("height", 100 + margin.top + margin.bottom)
+ .append("g")
+ .attr("transform", "translate(" + margin.left + "," + margin.top + ")")
+ ;
+
+ var y = d3.scale.linear().range([10, 0]).domain([0, 1]);
+
+ resources.tiers.forEach(function (t, i) {
+ var tier = resources.getTier(t);
+
+ var x_start = x(tier.start - resources.offset);
+ var x_end = x(tier.end - resources.offset);
+
+ svg.append("rect")
+ .attr("x", x_start)
+ .attr("y", 20)
+ .attr("height", 30)
+ .attr("width", x_end - x_start)
+ .attr("class", "timeline tier")
+ .attr("tier", t)
+ ;
+ });
+
+ function getEntry(element) {
+ var tier = element.getAttribute("tier");
+
+ var entry = resources.getTier(tier);
+ entry.tier = tier;
+
+ return entry;
+ }
+
+ d3.selectAll(".timeline")
+ .on("mouseenter", function () {
+ var entry = getEntry(this);
+
+ d3.select("#tt_tier").html(entry.tier);
+ d3.select("#tt_duration").html(entry.duration || "n/a");
+ d3.select("#tt_cpu_percent").html(entry.cpu_percent_mean || "n/a");
+
+ d3.select("#tooltip").style("display", "");
+ })
+ .on("mouseleave", function () {
+ var tooltip = d3.select("#tooltip");
+ tooltip.style("display", "none");
+ })
+ .on("mousemove", function () {
+ var e = d3.event;
+ x_offset = 10;
+
+ if (e.pageX > window.innerWidth / 2) {
+ x_offset = -150;
+ }
+
+ d3.select("#tooltip")
+ .style("left", (e.pageX + x_offset) + "px")
+ .style("top", (e.pageY + 10) + "px")
+ ;
+ })
+ ;
+}
+
+document.addEventListener("DOMContentLoaded", function() {
+ d3.json("list", function onList(error, response) {
+ if (!response || !("files" in response)) {
+ return;
+ }
+
+ renderKey(response.files[0]);
+ });
+}, false);
+
+ </script>
+ <h3>Build Resource Usage Report</h3>
+
+ <div id="tooltip" style="display: none;">
+ <table border="0">
+ <tr><td>Tier</td><td id="tt_tier"></td></tr>
+ <tr><td>Duration</td><td id="tt_duration"></td></tr>
+ <tr><td>CPU %</td><td id="tt_cpu_percent"></td></tr>
+ </table>
+ </div>
+
+ <!--
+ <select id="resourceType" onchange="updateResourcesGraph();">
+ <option value="cpu">CPU</option>
+ <option value="io_count">Disk I/O Count</option>
+ <option value="io_bytes">Disk I/O Bytes</option>
+ <option value="io_time">Disk I/O Time</option>
+ <option value="virt">Memory</option>
+ </select>
+ -->
+
+ <div id="resource_graph"></div>
+ <div id="summary" style="padding-top: 20px">
+ <table border="0">
+ <tr><td>Wall Time (s)</td><td id="wall_time"></td></tr>
+ <tr><td>Start Date</td><td id="start_date"></td></tr>
+ <tr><td>End Date</td><td id="end_date"></td></tr>
+ <tr><td>CPU %</td><td id="cpu_percent"></td></tr>
+ <tr><td>Write Bytes</td><td id="write_bytes"></td></tr>
+ <tr><td>Read Bytes</td><td id="read_bytes"></td></tr>
+ <tr><td>Write Time</td><td id="write_time"></td></tr>
+ <tr><td>Read Time</td><td id="read_time"></td></tr>
+ </table>
+ </div>
+ </body>
+</html>
diff --git a/python/mozbuild/mozbuild/shellutil.py b/python/mozbuild/mozbuild/shellutil.py
new file mode 100644
index 000000000..185a970ee
--- /dev/null
+++ b/python/mozbuild/mozbuild/shellutil.py
@@ -0,0 +1,209 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+
+
+def _tokens2re(**tokens):
+ # Create a pattern for non-escaped tokens, in the form:
+ # (?<!\\)(?:a|b|c...)
+ # This is meant to match patterns a, b, or c, or ... if they are not
+ # preceded by a backslash.
+ # where a, b, c... are in the form
+ # (?P<name>pattern)
+ # which matches the pattern and captures it in a named match group.
+ # The group names and patterns are given as arguments.
+ all_tokens = '|'.join('(?P<%s>%s)' % (name, value)
+ for name, value in tokens.iteritems())
+ nonescaped = r'(?<!\\)(?:%s)' % all_tokens
+
+ # The final pattern matches either the above pattern, or an escaped
+ # backslash, captured in the "escape" match group.
+ return re.compile('(?:%s|%s)' % (nonescaped, r'(?P<escape>\\\\)'))
+
+UNQUOTED_TOKENS_RE = _tokens2re(
+ whitespace=r'[\t\r\n ]+',
+ quote=r'[\'"]',
+ comment='#',
+ special=r'[<>&|`~(){}$;\*\?]',
+ backslashed=r'\\[^\\]',
+)
+
+DOUBLY_QUOTED_TOKENS_RE = _tokens2re(
+ quote='"',
+ backslashedquote=r'\\"',
+ special='\$',
+ backslashed=r'\\[^\\"]',
+)
+
+ESCAPED_NEWLINES_RE = re.compile(r'\\\n')
+
+# This regexp contains the same characters as all those listed in
+# UNQUOTED_TOKENS_RE. Please keep in sync.
+SHELL_QUOTE_RE = re.compile(r'[\\\t\r\n \'\"#<>&|`~(){}$;\*\?]')
+
+
+class MetaCharacterException(Exception):
+ def __init__(self, char):
+ self.char = char
+
+
+class _ClineSplitter(object):
+ '''
+ Parses a given command line string and creates a list of command
+ and arguments, with wildcard expansion.
+ '''
+ def __init__(self, cline):
+ self.arg = None
+ self.cline = cline
+ self.result = []
+ self._parse_unquoted()
+
+ def _push(self, str):
+ '''
+ Push the given string as part of the current argument
+ '''
+ if self.arg is None:
+ self.arg = ''
+ self.arg += str
+
+ def _next(self):
+ '''
+ Finalize current argument, effectively adding it to the list.
+ '''
+ if self.arg is None:
+ return
+ self.result.append(self.arg)
+ self.arg = None
+
+ def _parse_unquoted(self):
+ '''
+ Parse command line remainder in the context of an unquoted string.
+ '''
+ while self.cline:
+ # Find the next token
+ m = UNQUOTED_TOKENS_RE.search(self.cline)
+ # If we find none, the remainder of the string can be pushed to
+ # the current argument and the argument finalized
+ if not m:
+ self._push(self.cline)
+ break
+ # The beginning of the string, up to the found token, is part of
+ # the current argument
+ if m.start():
+ self._push(self.cline[:m.start()])
+ self.cline = self.cline[m.end():]
+
+ match = {name: value
+ for name, value in m.groupdict().items() if value}
+ if 'quote' in match:
+ # " or ' start a quoted string
+ if match['quote'] == '"':
+ self._parse_doubly_quoted()
+ else:
+ self._parse_quoted()
+ elif 'comment' in match:
+ # Comments are ignored. The current argument can be finalized,
+ # and parsing stopped.
+ break
+ elif 'special' in match:
+ # Unquoted, non-escaped special characters need to be sent to a
+ # shell.
+ raise MetaCharacterException(match['special'])
+ elif 'whitespace' in match:
+ # Whitespaces terminate current argument.
+ self._next()
+ elif 'escape' in match:
+ # Escaped backslashes turn into a single backslash
+ self._push('\\')
+ elif 'backslashed' in match:
+ # Backslashed characters are unbackslashed
+ # e.g. echo \a -> a
+ self._push(match['backslashed'][1])
+ else:
+ raise Exception("Shouldn't reach here")
+ if self.arg:
+ self._next()
+
+ def _parse_quoted(self):
+ # Single quoted strings are preserved, except for the final quote
+ index = self.cline.find("'")
+ if index == -1:
+ raise Exception('Unterminated quoted string in command')
+ self._push(self.cline[:index])
+ self.cline = self.cline[index+1:]
+
+ def _parse_doubly_quoted(self):
+ if not self.cline:
+ raise Exception('Unterminated quoted string in command')
+ while self.cline:
+ m = DOUBLY_QUOTED_TOKENS_RE.search(self.cline)
+ if not m:
+ raise Exception('Unterminated quoted string in command')
+ self._push(self.cline[:m.start()])
+ self.cline = self.cline[m.end():]
+ match = {name: value
+ for name, value in m.groupdict().items() if value}
+ if 'quote' in match:
+ # a double quote ends the quoted string, so go back to
+ # unquoted parsing
+ return
+ elif 'special' in match:
+ # Unquoted, non-escaped special characters in a doubly quoted
+ # string still have a special meaning and need to be sent to a
+ # shell.
+ raise MetaCharacterException(match['special'])
+ elif 'escape' in match:
+ # Escaped backslashes turn into a single backslash
+ self._push('\\')
+ elif 'backslashedquote' in match:
+ # Backslashed double quotes are un-backslashed
+ self._push('"')
+ elif 'backslashed' in match:
+ # Backslashed characters are kept backslashed
+ self._push(match['backslashed'])
+
+
+def split(cline):
+ '''
+ Split the given command line string.
+ '''
+ s = ESCAPED_NEWLINES_RE.sub('', cline)
+ return _ClineSplitter(s).result
+
+
+def _quote(s):
+ '''Given a string, returns a version that can be used literally on a shell
+ command line, enclosing it with single quotes if necessary.
+
+ As a special case, if given an int, returns a string containing the int,
+ not enclosed in quotes.
+ '''
+ if type(s) == int:
+ return '%d' % s
+
+ # Empty strings need to be quoted to have any significance
+ if s and not SHELL_QUOTE_RE.search(s):
+ return s
+
+ # Single quoted strings can contain any characters unescaped except the
+ # single quote itself, which can't even be escaped, so the string needs to
+ # be closed, an escaped single quote added, and reopened.
+ t = type(s)
+ return t("'%s'") % s.replace(t("'"), t("'\\''"))
+
+
+def quote(*strings):
+ '''Given one or more strings, returns a quoted string that can be used
+ literally on a shell command line.
+
+ >>> quote('a', 'b')
+ "a b"
+ >>> quote('a b', 'c')
+ "'a b' c"
+ '''
+ return ' '.join(_quote(s) for s in strings)
+
+
+__all__ = ['MetaCharacterException', 'split', 'quote']
diff --git a/python/mozbuild/mozbuild/sphinx.py b/python/mozbuild/mozbuild/sphinx.py
new file mode 100644
index 000000000..0f8e22ca1
--- /dev/null
+++ b/python/mozbuild/mozbuild/sphinx.py
@@ -0,0 +1,200 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import importlib
+import os
+import sys
+
+from sphinx.util.compat import Directive
+from sphinx.util.docstrings import prepare_docstring
+
+
+def function_reference(f, attr, args, doc):
+ lines = []
+
+ lines.extend([
+ f,
+ '-' * len(f),
+ '',
+ ])
+
+ docstring = prepare_docstring(doc)
+
+ lines.extend([
+ docstring[0],
+ '',
+ ])
+
+ arg_types = []
+
+ for t in args:
+ if isinstance(t, list):
+ inner_types = [t2.__name__ for t2 in t]
+ arg_types.append(' | ' .join(inner_types))
+ continue
+
+ arg_types.append(t.__name__)
+
+ arg_s = '(%s)' % ', '.join(arg_types)
+
+ lines.extend([
+ ':Arguments: %s' % arg_s,
+ '',
+ ])
+
+ lines.extend(docstring[1:])
+ lines.append('')
+
+ return lines
+
+
+def variable_reference(v, st_type, in_type, doc):
+ lines = [
+ v,
+ '-' * len(v),
+ '',
+ ]
+
+ docstring = prepare_docstring(doc)
+
+ lines.extend([
+ docstring[0],
+ '',
+ ])
+
+ lines.extend([
+ ':Storage Type: ``%s``' % st_type.__name__,
+ ':Input Type: ``%s``' % in_type.__name__,
+ '',
+ ])
+
+ lines.extend(docstring[1:])
+ lines.append('')
+
+ return lines
+
+
+def special_reference(v, func, typ, doc):
+ lines = [
+ v,
+ '-' * len(v),
+ '',
+ ]
+
+ docstring = prepare_docstring(doc)
+
+ lines.extend([
+ docstring[0],
+ '',
+ ':Type: ``%s``' % typ.__name__,
+ '',
+ ])
+
+ lines.extend(docstring[1:])
+ lines.append('')
+
+ return lines
+
+
+def format_module(m):
+ lines = []
+
+ for subcontext, cls in sorted(m.SUBCONTEXTS.items()):
+ lines.extend([
+ '.. _mozbuild_subcontext_%s:' % subcontext,
+ '',
+ 'Sub-Context: %s' % subcontext,
+ '=============' + '=' * len(subcontext),
+ '',
+ ])
+ lines.extend(prepare_docstring(cls.__doc__))
+ if lines[-1]:
+ lines.append('')
+
+ for k, v in sorted(cls.VARIABLES.items()):
+ lines.extend(variable_reference(k, *v))
+
+ lines.extend([
+ 'Variables',
+ '=========',
+ '',
+ ])
+
+ for v in sorted(m.VARIABLES):
+ lines.extend(variable_reference(v, *m.VARIABLES[v]))
+
+ lines.extend([
+ 'Functions',
+ '=========',
+ '',
+ ])
+
+ for func in sorted(m.FUNCTIONS):
+ lines.extend(function_reference(func, *m.FUNCTIONS[func]))
+
+ lines.extend([
+ 'Special Variables',
+ '=================',
+ '',
+ ])
+
+ for v in sorted(m.SPECIAL_VARIABLES):
+ lines.extend(special_reference(v, *m.SPECIAL_VARIABLES[v]))
+
+ return lines
+
+
+class MozbuildSymbols(Directive):
+ """Directive to insert mozbuild sandbox symbol information."""
+
+ required_arguments = 1
+
+ def run(self):
+ module = importlib.import_module(self.arguments[0])
+ fname = module.__file__
+ if fname.endswith('.pyc'):
+ fname = fname[0:-1]
+
+ self.state.document.settings.record_dependencies.add(fname)
+
+ # We simply format out the documentation as rst then feed it back
+ # into the parser for conversion. We don't even emit ourselves, so
+ # there's no record of us.
+ self.state_machine.insert_input(format_module(module), fname)
+
+ return []
+
+
+def setup(app):
+ app.add_directive('mozbuildsymbols', MozbuildSymbols)
+
+ # Unlike typical Sphinx installs, our documentation is assembled from
+ # many sources and staged in a common location. This arguably isn't a best
+ # practice, but it was the easiest to implement at the time.
+ #
+ # Here, we invoke our custom code for staging/generating all our
+ # documentation.
+ from moztreedocs import SphinxManager
+
+ topsrcdir = app.config._raw_config['topsrcdir']
+ manager = SphinxManager(topsrcdir,
+ os.path.join(topsrcdir, 'tools', 'docs'),
+ app.outdir)
+ manager.generate_docs(app)
+
+ app.srcdir = os.path.join(app.outdir, '_staging')
+
+ # We need to adjust sys.path in order for Python API docs to get generated
+ # properly. We leverage the in-tree virtualenv for this.
+ from mozbuild.virtualenv import VirtualenvManager
+
+ ve = VirtualenvManager(topsrcdir,
+ os.path.join(topsrcdir, 'dummy-objdir'),
+ os.path.join(app.outdir, '_venv'),
+ sys.stderr,
+ os.path.join(topsrcdir, 'build', 'virtualenv_packages.txt'))
+ ve.ensure()
+ ve.activate()
diff --git a/python/mozbuild/mozbuild/test/__init__.py b/python/mozbuild/mozbuild/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/__init__.py
diff --git a/python/mozbuild/mozbuild/test/action/data/invalid/region.properties b/python/mozbuild/mozbuild/test/action/data/invalid/region.properties
new file mode 100644
index 000000000..d4d8109b6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/invalid/region.properties
@@ -0,0 +1,12 @@
+# A region.properties file with invalid unicode byte sequences. The
+# sequences were cribbed from Markus Kuhn's "UTF-8 decoder capability
+# and stress test", available at
+# http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
+
+# 3.5 Impossible bytes |
+# |
+# The following two bytes cannot appear in a correct UTF-8 string |
+# |
+# 3.5.1 fe = "þ" |
+# 3.5.2 ff = "ÿ" |
+# 3.5.3 fe fe ff ff = "þþÿÿ" |
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/assets/asset.txt b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/assets/asset.txt
new file mode 100644
index 000000000..b01830602
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/assets/asset.txt
@@ -0,0 +1 @@
+assets/asset.txt
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/classes.dex b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/classes.dex
new file mode 100644
index 000000000..dfc99f9c2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/classes.dex
@@ -0,0 +1 @@
+classes.dex \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1.ap_ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1.ap_
new file mode 100644
index 000000000..915be683b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1.ap_
Binary files differ
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/res/res.txt b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/res/res.txt
new file mode 100644
index 000000000..01d2fb0a1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/res/res.txt
@@ -0,0 +1 @@
+input1/res/res.txt
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/resources.arsc b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/resources.arsc
new file mode 100644
index 000000000..6274a181a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/resources.arsc
@@ -0,0 +1 @@
+input1/resources.arsc \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2.apk b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2.apk
new file mode 100644
index 000000000..3003f5ae9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2.apk
Binary files differ
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/asset.txt b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/asset.txt
new file mode 100644
index 000000000..31a0e5129
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/asset.txt
@@ -0,0 +1 @@
+input2/assets/asset.txt
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/omni.ja b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/omni.ja
new file mode 100644
index 000000000..36deb6725
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/omni.ja
@@ -0,0 +1 @@
+input2/assets/omni.ja \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/classes.dex b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/classes.dex
new file mode 100644
index 000000000..99779eb45
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/classes.dex
@@ -0,0 +1 @@
+input2/classes.dex \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/lib/lib.txt b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/lib/lib.txt
new file mode 100644
index 000000000..7a2594a02
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/lib/lib.txt
@@ -0,0 +1 @@
+input2/lib/lib.txt
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/res/res.txt b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/res/res.txt
new file mode 100644
index 000000000..2a52ab524
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/res/res.txt
@@ -0,0 +1 @@
+input2/res/res.txt
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/resources.arsc b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/resources.arsc
new file mode 100644
index 000000000..64f4b77ad
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/resources.arsc
@@ -0,0 +1 @@
+input/resources.arsc \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/root_file.txt b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/root_file.txt
new file mode 100644
index 000000000..9f2f53518
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/root_file.txt
@@ -0,0 +1 @@
+input2/root_file.txt
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/lib/lib.txt b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/lib/lib.txt
new file mode 100644
index 000000000..acbcebb3d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/lib/lib.txt
@@ -0,0 +1 @@
+lib/lib.txt
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/omni.ja b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/omni.ja
new file mode 100644
index 000000000..48c422a3a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/omni.ja
@@ -0,0 +1 @@
+omni.ja \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/root_file.txt b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/root_file.txt
new file mode 100644
index 000000000..89b006da4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/package_fennec_apk/root_file.txt
@@ -0,0 +1 @@
+root_file.txt
diff --git a/python/mozbuild/mozbuild/test/action/data/valid-zh-CN/region.properties b/python/mozbuild/mozbuild/test/action/data/valid-zh-CN/region.properties
new file mode 100644
index 000000000..d4d7fcfee
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/valid-zh-CN/region.properties
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Default search engine
+browser.search.defaultenginename=百度
+
+# Search engine order (order displayed in the search bar dropdown)s
+browser.search.order.1=百度
+browser.search.order.2=Google
+
+# This is the default set of web based feed handlers shown in the reader
+# selection UI
+browser.contentHandlers.types.0.title=Bloglines
+browser.contentHandlers.types.0.uri=http://www.bloglines.com/login?r=/sub/%s
+
+# increment this number when anything gets changed in the list below. This will
+# cause Firefox to re-read these prefs and inject any new handlers into the
+# profile database. Note that "new" is defined as "has a different URL"; this
+# means that it's not possible to update the name of existing handler, so
+# don't make any spelling errors here.
+gecko.handlerService.defaultHandlersVersion=3
+
+# The default set of protocol handlers for webcal:
+gecko.handlerService.schemes.webcal.0.name=30 Boxes
+gecko.handlerService.schemes.webcal.0.uriTemplate=https://30boxes.com/external/widget?refer=ff&url=%s
+
+# The default set of protocol handlers for mailto:
+gecko.handlerService.schemes.mailto.0.name=Yahoo! 邮件
+gecko.handlerService.schemes.mailto.0.uriTemplate=https://compose.mail.yahoo.com/?To=%s
+gecko.handlerService.schemes.mailto.1.name=Gmail
+gecko.handlerService.schemes.mailto.1.uriTemplate=https://mail.google.com/mail/?extsrc=mailto&url=%s
+
+# This is the default set of web based feed handlers shown in the reader
+# selection UI
+browser.contentHandlers.types.0.title=My Yahoo!
+browser.contentHandlers.types.0.uri=http://www.bloglines.com/login?r=/sub/%s
diff --git a/python/mozbuild/mozbuild/test/action/test_buildlist.py b/python/mozbuild/mozbuild/test/action/test_buildlist.py
new file mode 100644
index 000000000..9c2631812
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/test_buildlist.py
@@ -0,0 +1,89 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import os, sys, os.path, time
+from tempfile import mkdtemp
+from shutil import rmtree
+import mozunit
+
+from mozbuild.action.buildlist import addEntriesToListFile
+
+
+class TestBuildList(unittest.TestCase):
+ """
+ Unit tests for buildlist.py
+ """
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+
+ def tearDown(self):
+ rmtree(self.tmpdir)
+
+ # utility methods for tests
+ def touch(self, file, dir=None):
+ if dir is None:
+ dir = self.tmpdir
+ f = os.path.join(dir, file)
+ open(f, 'w').close()
+ return f
+
+ def assertFileContains(self, filename, l):
+ """Assert that the lines in the file |filename| are equal
+ to the contents of the list |l|, in order."""
+ l = l[:]
+ f = open(filename, 'r')
+ lines = [line.rstrip() for line in f.readlines()]
+ f.close()
+ for line in lines:
+ self.assert_(len(l) > 0,
+ "ran out of expected lines! (expected '{0}', got '{1}')"
+ .format(l, lines))
+ self.assertEqual(line, l.pop(0))
+ self.assert_(len(l) == 0,
+ "not enough lines in file! (expected '{0}',"
+ " got '{1}'".format(l, lines))
+
+ def test_basic(self):
+ "Test that addEntriesToListFile works when file doesn't exist."
+ testfile = os.path.join(self.tmpdir, "test.list")
+ l = ["a", "b", "c"]
+ addEntriesToListFile(testfile, l)
+ self.assertFileContains(testfile, l)
+ # ensure that attempting to add the same entries again doesn't change it
+ addEntriesToListFile(testfile, l)
+ self.assertFileContains(testfile, l)
+
+ def test_append(self):
+ "Test adding new entries."
+ testfile = os.path.join(self.tmpdir, "test.list")
+ l = ["a", "b", "c"]
+ addEntriesToListFile(testfile, l)
+ self.assertFileContains(testfile, l)
+ l2 = ["x","y","z"]
+ addEntriesToListFile(testfile, l2)
+ l.extend(l2)
+ self.assertFileContains(testfile, l)
+
+ def test_append_some(self):
+ "Test adding new entries mixed with existing entries."
+ testfile = os.path.join(self.tmpdir, "test.list")
+ l = ["a", "b", "c"]
+ addEntriesToListFile(testfile, l)
+ self.assertFileContains(testfile, l)
+ addEntriesToListFile(testfile, ["a", "x", "c", "z"])
+ self.assertFileContains(testfile, ["a", "b", "c", "x", "z"])
+
+ def test_add_multiple(self):
+ """Test that attempting to add the same entry multiple times results in
+ only one entry being added."""
+ testfile = os.path.join(self.tmpdir, "test.list")
+ addEntriesToListFile(testfile, ["a","b","a","a","b"])
+ self.assertFileContains(testfile, ["a","b"])
+ addEntriesToListFile(testfile, ["c","a","c","b","c"])
+ self.assertFileContains(testfile, ["a","b","c"])
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/action/test_generate_browsersearch.py b/python/mozbuild/mozbuild/test/action/test_generate_browsersearch.py
new file mode 100644
index 000000000..4c7f5635e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/test_generate_browsersearch.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from __future__ import unicode_literals
+
+import json
+import os
+import unittest
+
+import mozunit
+
+import mozbuild.action.generate_browsersearch as generate_browsersearch
+
+from mozfile.mozfile import (
+ NamedTemporaryFile,
+ TemporaryDirectory,
+)
+
+import mozpack.path as mozpath
+
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data')
+
+
+class TestGenerateBrowserSearch(unittest.TestCase):
+ """
+ Unit tests for generate_browsersearch.py.
+ """
+
+ def _test_one(self, name):
+ with TemporaryDirectory() as tmpdir:
+ with NamedTemporaryFile(mode='r+') as temp:
+ srcdir = os.path.join(test_data_path, name)
+
+ generate_browsersearch.main([
+ '--silent',
+ '--srcdir', srcdir,
+ temp.name])
+ return json.load(temp)
+
+ def test_valid_unicode(self):
+ o = self._test_one('valid-zh-CN')
+ self.assertEquals(o['default'], '百度')
+ self.assertEquals(o['engines'], ['百度', 'Google'])
+
+ def test_invalid_unicode(self):
+ with self.assertRaises(UnicodeDecodeError):
+ self._test_one('invalid')
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/action/test_package_fennec_apk.py b/python/mozbuild/mozbuild/test/action/test_package_fennec_apk.py
new file mode 100644
index 000000000..5b7760836
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/test_package_fennec_apk.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from __future__ import unicode_literals
+
+import os
+import unittest
+
+import mozunit
+
+from mozbuild.action.package_fennec_apk import (
+ package_fennec_apk as package,
+)
+from mozpack.mozjar import JarReader
+import mozpack.path as mozpath
+
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data', 'package_fennec_apk')
+
+
+def data(name):
+ return os.path.join(test_data_path, name)
+
+
+class TestPackageFennecAPK(unittest.TestCase):
+ """
+ Unit tests for package_fennec_apk.py.
+ """
+
+ def test_arguments(self):
+ # Language repacks take updated resources from an ap_ and pack them
+ # into an apk. Make sure the second input overrides the first.
+ jarrer = package(inputs=[],
+ omni_ja=data('omni.ja'),
+ classes_dex=data('classes.dex'),
+ assets_dirs=[data('assets')],
+ lib_dirs=[data('lib')],
+ root_files=[data('root_file.txt')])
+
+ # omni.ja ends up in assets/omni.ja.
+ self.assertEquals(jarrer['assets/omni.ja'].open().read().strip(), 'omni.ja')
+
+ # Everything else is in place.
+ for name in ('classes.dex',
+ 'assets/asset.txt',
+ 'lib/lib.txt',
+ 'root_file.txt'):
+ self.assertEquals(jarrer[name].open().read().strip(), name)
+
+ def test_inputs(self):
+ # Language repacks take updated resources from an ap_ and pack them
+ # into an apk. In this case, the first input is the original package,
+ # the second input the update ap_. Make sure the second input
+ # overrides the first.
+ jarrer = package(inputs=[data('input2.apk'), data('input1.ap_')])
+
+ files1 = JarReader(data('input1.ap_')).entries.keys()
+ files2 = JarReader(data('input2.apk')).entries.keys()
+ for name in files2:
+ self.assertTrue(name in files1 or
+ jarrer[name].open().read().startswith('input2/'))
+ for name in files1:
+ self.assertTrue(jarrer[name].open().read().startswith('input1/'))
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/backend/__init__.py b/python/mozbuild/mozbuild/test/backend/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/__init__.py
diff --git a/python/mozbuild/mozbuild/test/backend/common.py b/python/mozbuild/mozbuild/test/backend/common.py
new file mode 100644
index 000000000..85ccb1037
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/common.py
@@ -0,0 +1,156 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+import unittest
+
+from collections import defaultdict
+from shutil import rmtree
+from tempfile import mkdtemp
+
+from mach.logging import LoggingManager
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+
+import mozpack.path as mozpath
+
+
+log_manager = LoggingManager()
+log_manager.add_terminal_logging()
+
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data')
+
+
+CONFIGS = defaultdict(lambda: {
+ 'defines': {},
+ 'non_global_defines': [],
+ 'substs': {'OS_TARGET': 'WINNT'},
+}, {
+ 'android_eclipse': {
+ 'defines': {
+ 'MOZ_ANDROID_MIN_SDK_VERSION': '15',
+ },
+ 'non_global_defines': [],
+ 'substs': {
+ 'ANDROID_TARGET_SDK': '16',
+ 'MOZ_WIDGET_TOOLKIT': 'android',
+ },
+ },
+ 'binary-components': {
+ 'defines': {},
+ 'non_global_defines': [],
+ 'substs': {
+ 'LIB_PREFIX': 'lib',
+ 'LIB_SUFFIX': 'a',
+ 'COMPILE_ENVIRONMENT': '1',
+ },
+ },
+ 'sources': {
+ 'defines': {},
+ 'non_global_defines': [],
+ 'substs': {
+ 'LIB_PREFIX': 'lib',
+ 'LIB_SUFFIX': 'a',
+ },
+ },
+ 'stub0': {
+ 'defines': {
+ 'MOZ_TRUE_1': '1',
+ 'MOZ_TRUE_2': '1',
+ },
+ 'non_global_defines': [
+ 'MOZ_NONGLOBAL_1',
+ 'MOZ_NONGLOBAL_2',
+ ],
+ 'substs': {
+ 'MOZ_FOO': 'foo',
+ 'MOZ_BAR': 'bar',
+ },
+ },
+ 'substitute_config_files': {
+ 'defines': {},
+ 'non_global_defines': [],
+ 'substs': {
+ 'MOZ_FOO': 'foo',
+ 'MOZ_BAR': 'bar',
+ },
+ },
+ 'test_config': {
+ 'defines': {
+ 'foo': 'baz qux',
+ 'baz': 1,
+ },
+ 'non_global_defines': [],
+ 'substs': {
+ 'foo': 'bar baz',
+ },
+ },
+ 'visual-studio': {
+ 'defines': {},
+ 'non_global_defines': [],
+ 'substs': {
+ 'MOZ_APP_NAME': 'my_app',
+ },
+ },
+})
+
+
+class BackendTester(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop('MOZ_OBJDIR', None)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def _get_environment(self, name):
+ """Obtain a new instance of a ConfigEnvironment for a known profile.
+
+ A new temporary object directory is created for the environment. The
+ environment is cleaned up automatically when the test finishes.
+ """
+ config = CONFIGS[name]
+
+ objdir = mkdtemp()
+ self.addCleanup(rmtree, objdir)
+
+ srcdir = mozpath.join(test_data_path, name)
+ config['substs']['top_srcdir'] = srcdir
+ return ConfigEnvironment(srcdir, objdir, **config)
+
+ def _emit(self, name, env=None):
+ env = env or self._get_environment(name)
+ reader = BuildReader(env)
+ emitter = TreeMetadataEmitter(env)
+
+ return env, emitter.emit(reader.read_topsrcdir())
+
+ def _consume(self, name, cls, env=None):
+ env, objs = self._emit(name, env=env)
+ backend = cls(env)
+ backend.consume(objs)
+
+ return env
+
+ def _tree_paths(self, topdir, filename):
+ for dirpath, dirnames, filenames in os.walk(topdir):
+ for f in filenames:
+ if f == filename:
+ yield mozpath.relpath(mozpath.join(dirpath, f), topdir)
+
+ def _mozbuild_paths(self, env):
+ return self._tree_paths(env.topsrcdir, 'moz.build')
+
+ def _makefile_in_paths(self, env):
+ return self._tree_paths(env.topsrcdir, 'Makefile.in')
+
+
+__all__ = ['BackendTester']
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/library1/resources/values/strings.xml b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/library1/resources/values/strings.xml
new file mode 100644
index 000000000..a7337c554
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/library1/resources/values/strings.xml
@@ -0,0 +1 @@
+<string name="label">library1</string>
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main1/AndroidManifest.xml b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main1/AndroidManifest.xml
new file mode 100644
index 000000000..7a906454d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main1/AndroidManifest.xml
@@ -0,0 +1 @@
+<!-- Placeholder. -->
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/AndroidManifest.xml b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/AndroidManifest.xml
new file mode 100644
index 000000000..7a906454d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/AndroidManifest.xml
@@ -0,0 +1 @@
+<!-- Placeholder. -->
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/assets/dummy.txt b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/assets/dummy.txt
new file mode 100644
index 000000000..c32a95993
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/assets/dummy.txt
@@ -0,0 +1 @@
+# Placeholder. \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/extra.jar b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/extra.jar
new file mode 100644
index 000000000..c32a95993
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/extra.jar
@@ -0,0 +1 @@
+# Placeholder. \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/res/values/strings.xml b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/res/values/strings.xml
new file mode 100644
index 000000000..0b28bf41e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/res/values/strings.xml
@@ -0,0 +1 @@
+<string name="label">main1</string>
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/AndroidManifest.xml b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/AndroidManifest.xml
new file mode 100644
index 000000000..7a906454d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/AndroidManifest.xml
@@ -0,0 +1 @@
+<!-- Placeholder. -->
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/a/A.java b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/a/A.java
new file mode 100644
index 000000000..0ab867d3d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/a/A.java
@@ -0,0 +1 @@
+package a.a;
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/b/B.java b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/b/B.java
new file mode 100644
index 000000000..66eb44c15
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/b/B.java
@@ -0,0 +1 @@
+package b;
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/c/C.java b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/c/C.java
new file mode 100644
index 000000000..ca474ff33
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/c/C.java
@@ -0,0 +1 @@
+package d.e;
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main4 b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main4
new file mode 100644
index 000000000..7a906454d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/main4
@@ -0,0 +1 @@
+<!-- Placeholder. -->
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/moz.build b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/moz.build
new file mode 100644
index 000000000..327284c88
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/moz.build
@@ -0,0 +1,37 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+p = add_android_eclipse_library_project('library1')
+p.package_name = 'org.mozilla.test.library1'
+p.res = 'library1/resources'
+
+p = add_android_eclipse_library_project('library2')
+p.package_name = 'org.mozilla.test.library2'
+
+p = add_android_eclipse_project('main1', 'main1/AndroidManifest.xml')
+p.package_name = 'org.mozilla.test.main1'
+p.recursive_make_targets += ['target1', 'target2']
+
+p = add_android_eclipse_project('main2', 'main2/AndroidManifest.xml')
+p.package_name = 'org.mozilla.test.main2'
+p.res = 'main2/res'
+p.assets = 'main2/assets'
+p.extra_jars = ['main2/extra.jar']
+
+p = add_android_eclipse_project('main3', 'main3/AndroidManifest.xml')
+p.package_name = 'org.mozilla.test.main3'
+cpe = p.add_classpathentry('a', 'main3/a', dstdir='a/a')
+cpe = p.add_classpathentry('b', 'main3/b', dstdir='b')
+cpe.exclude_patterns += ['b/Excludes.java', 'b/Excludes2.java']
+cpe = p.add_classpathentry('c', 'main3/c', dstdir='d/e')
+cpe.ignore_warnings = True
+
+p = add_android_eclipse_project('main4', 'main3/AndroidManifest.xml')
+p.package_name = 'org.mozilla.test.main3'
+p.referenced_projects += ['library1']
+p.included_projects += ['library2']
+p.recursive_make_targets += ['target3', 'target4']
+
+DIRS += ['subdir']
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/moz.build b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/moz.build
new file mode 100644
index 000000000..c75aec456
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/moz.build
@@ -0,0 +1,13 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DEFINES['FOO'] = 'FOO'
+
+p = add_android_eclipse_library_project('sublibrary')
+p.package_name = 'org.mozilla.test.sublibrary'
+p.is_library = True
+
+p = add_android_eclipse_project('submain', 'submain/AndroidManifest.xml')
+p.package_name = 'org.mozilla.test.submain'
+p.recursive_make_targets += ['subtarget1', 'subtarget2']
diff --git a/python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/submain/AndroidManifest.xml b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/submain/AndroidManifest.xml
new file mode 100644
index 000000000..7a906454d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/submain/AndroidManifest.xml
@@ -0,0 +1 @@
+<!-- Placeholder. -->
diff --git a/python/mozbuild/mozbuild/test/backend/data/binary-components/bar/moz.build b/python/mozbuild/mozbuild/test/backend/data/binary-components/bar/moz.build
new file mode 100644
index 000000000..2946e42aa
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/binary-components/bar/moz.build
@@ -0,0 +1,2 @@
+Component('bar')
+NO_COMPONENTS_MANIFEST = True
diff --git a/python/mozbuild/mozbuild/test/backend/data/binary-components/foo/moz.build b/python/mozbuild/mozbuild/test/backend/data/binary-components/foo/moz.build
new file mode 100644
index 000000000..8611a74be
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/binary-components/foo/moz.build
@@ -0,0 +1 @@
+Component('foo')
diff --git a/python/mozbuild/mozbuild/test/backend/data/binary-components/moz.build b/python/mozbuild/mozbuild/test/backend/data/binary-components/moz.build
new file mode 100644
index 000000000..1776d0514
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/binary-components/moz.build
@@ -0,0 +1,10 @@
+@template
+def Component(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+ IS_COMPONENT = True
+
+DIRS += [
+ 'foo',
+ 'bar',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/branding-files/bar.ico b/python/mozbuild/mozbuild/test/backend/data/branding-files/bar.ico
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/branding-files/bar.ico
diff --git a/python/mozbuild/mozbuild/test/backend/data/branding-files/foo.ico b/python/mozbuild/mozbuild/test/backend/data/branding-files/foo.ico
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/branding-files/foo.ico
diff --git a/python/mozbuild/mozbuild/test/backend/data/branding-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/branding-files/moz.build
new file mode 100644
index 000000000..083f0f82d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/branding-files/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+BRANDING_FILES += [
+ 'bar.ico',
+ 'sub/quux.png',
+]
+
+BRANDING_FILES.icons += [
+ 'foo.ico',
+]
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/branding-files/sub/quux.png b/python/mozbuild/mozbuild/test/backend/data/branding-files/sub/quux.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/branding-files/sub/quux.png
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build b/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build
new file mode 100644
index 000000000..8d6218ea9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build
@@ -0,0 +1,54 @@
+DIST_SUBDIR = 'app'
+
+EXTRA_JS_MODULES += [
+ '../foo.jsm',
+]
+
+EXTRA_JS_MODULES.child += [
+ '../bar.jsm',
+]
+
+EXTRA_PP_JS_MODULES += [
+ '../baz.jsm',
+]
+
+EXTRA_PP_JS_MODULES.child2 += [
+ '../qux.jsm',
+]
+
+FINAL_TARGET_FILES += [
+ '../foo.ini',
+]
+
+FINAL_TARGET_FILES.child += [
+ '../bar.ini',
+]
+
+FINAL_TARGET_PP_FILES += [
+ '../baz.ini',
+ '../foo.css',
+]
+
+FINAL_TARGET_PP_FILES.child2 += [
+ '../qux.ini',
+]
+
+EXTRA_COMPONENTS += [
+ '../components.manifest',
+ '../foo.js',
+]
+
+EXTRA_PP_COMPONENTS += [
+ '../bar.js',
+]
+
+JS_PREFERENCE_FILES += [
+ '../prefs.js',
+]
+
+JAR_MANIFESTS += [
+ '../jar.mn',
+]
+
+DEFINES['FOO'] = 'bar'
+DEFINES['BAR'] = True
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.ini b/python/mozbuild/mozbuild/test/backend/data/build/bar.ini
new file mode 100644
index 000000000..91dcbe153
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.ini
@@ -0,0 +1 @@
+bar.ini
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.js b/python/mozbuild/mozbuild/test/backend/data/build/bar.js
new file mode 100644
index 000000000..1a608e8a5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.js
@@ -0,0 +1,2 @@
+#filter substitution
+bar.js: FOO is @FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm b/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm
new file mode 100644
index 000000000..05db2e2f6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm
@@ -0,0 +1 @@
+bar.jsm
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/baz.ini b/python/mozbuild/mozbuild/test/backend/data/build/baz.ini
new file mode 100644
index 000000000..975a1e437
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/baz.ini
@@ -0,0 +1,2 @@
+#filter substitution
+baz.ini: FOO is @FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm b/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm
new file mode 100644
index 000000000..f39ed0208
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm
@@ -0,0 +1,2 @@
+#filter substitution
+baz.jsm: FOO is @FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/components.manifest b/python/mozbuild/mozbuild/test/backend/data/build/components.manifest
new file mode 100644
index 000000000..b5bb87254
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/components.manifest
@@ -0,0 +1,2 @@
+component {foo} foo.js
+component {bar} bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.css b/python/mozbuild/mozbuild/test/backend/data/build/foo.css
new file mode 100644
index 000000000..1803d6c57
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.css
@@ -0,0 +1,2 @@
+%filter substitution
+foo.css: FOO is @FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.ini b/python/mozbuild/mozbuild/test/backend/data/build/foo.ini
new file mode 100644
index 000000000..c93c9d765
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.ini
@@ -0,0 +1 @@
+foo.ini
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.js b/python/mozbuild/mozbuild/test/backend/data/build/foo.js
new file mode 100644
index 000000000..4fa71e2d2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.js
@@ -0,0 +1 @@
+foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm b/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm
new file mode 100644
index 000000000..d58fd61c1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm
@@ -0,0 +1 @@
+foo.jsm
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/jar.mn b/python/mozbuild/mozbuild/test/backend/data/build/jar.mn
new file mode 100644
index 000000000..393055c4e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/jar.mn
@@ -0,0 +1,11 @@
+foo.jar:
+% content bar %child/
+% content foo %
+ foo.js
+* foo.css
+ bar.js (subdir/bar.js)
+ qux.js (subdir/bar.js)
+* child/hoge.js (bar.js)
+* child/baz.jsm
+
+% override chrome://foo/bar.svg#hello chrome://bar/bar.svg#hello
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/moz.build b/python/mozbuild/mozbuild/test/backend/data/build/moz.build
new file mode 100644
index 000000000..b0b0cabd1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/moz.build
@@ -0,0 +1,68 @@
+CONFIGURE_SUBST_FILES += [
+ '/config/autoconf.mk',
+ '/config/emptyvars.mk',
+]
+
+EXTRA_JS_MODULES += [
+ 'foo.jsm',
+]
+
+EXTRA_JS_MODULES.child += [
+ 'bar.jsm',
+]
+
+EXTRA_PP_JS_MODULES += [
+ 'baz.jsm',
+]
+
+EXTRA_PP_JS_MODULES.child2 += [
+ 'qux.jsm',
+]
+
+FINAL_TARGET_FILES += [
+ 'foo.ini',
+]
+
+FINAL_TARGET_FILES.child += [
+ 'bar.ini',
+]
+
+FINAL_TARGET_PP_FILES += [
+ 'baz.ini',
+]
+
+FINAL_TARGET_PP_FILES.child2 += [
+ 'foo.css',
+ 'qux.ini',
+]
+
+EXTRA_COMPONENTS += [
+ 'components.manifest',
+ 'foo.js',
+]
+
+EXTRA_PP_COMPONENTS += [
+ 'bar.js',
+]
+
+JS_PREFERENCE_FILES += [
+ 'prefs.js',
+]
+
+RESOURCE_FILES += [
+ 'resource',
+]
+
+RESOURCE_FILES.child += [
+ 'resource2',
+]
+
+DEFINES['FOO'] = 'foo'
+
+JAR_MANIFESTS += [
+ 'jar.mn',
+]
+
+DIRS += [
+ 'app',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/prefs.js b/python/mozbuild/mozbuild/test/backend/data/build/prefs.js
new file mode 100644
index 000000000..a030da9fd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/prefs.js
@@ -0,0 +1 @@
+prefs.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/qux.ini b/python/mozbuild/mozbuild/test/backend/data/build/qux.ini
new file mode 100644
index 000000000..3ce157eb6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/qux.ini
@@ -0,0 +1,5 @@
+#ifdef BAR
+qux.ini: BAR is defined
+#else
+qux.ini: BAR is not defined
+#endif
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm b/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm
new file mode 100644
index 000000000..9c5fe28d5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm
@@ -0,0 +1,5 @@
+#ifdef BAR
+qux.jsm: BAR is defined
+#else
+qux.jsm: BAR is not defined
+#endif
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/resource b/python/mozbuild/mozbuild/test/backend/data/build/resource
new file mode 100644
index 000000000..91e75c679
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/resource
@@ -0,0 +1 @@
+resource
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/resource2 b/python/mozbuild/mozbuild/test/backend/data/build/resource2
new file mode 100644
index 000000000..b7c270096
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/resource2
@@ -0,0 +1 @@
+resource2
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js b/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js
new file mode 100644
index 000000000..80c887a84
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js
@@ -0,0 +1 @@
+bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/defines/moz.build b/python/mozbuild/mozbuild/test/backend/data/defines/moz.build
new file mode 100644
index 000000000..be4b31143
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/defines/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+value = 'xyz'
+DEFINES = {
+ 'FOO': True,
+}
+
+DEFINES['BAZ'] = '"ab\'cd"'
+DEFINES.update({
+ 'BAR': 7,
+ 'VALUE': value,
+ 'QUX': False,
+})
diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf b/python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf
diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/main.js b/python/mozbuild/mozbuild/test/backend/data/dist-files/main.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/dist-files/main.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build
new file mode 100644
index 000000000..cbd2c942b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET_PP_FILES += [
+ 'install.rdf',
+ 'main.js',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build b/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build
new file mode 100644
index 000000000..b604ef1a0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ['!bar.h', 'foo.h']
+EXPORTS.mozilla += ['!mozilla2.h', 'mozilla1.h']
+EXPORTS.mozilla.dom += ['!dom2.h', '!dom3.h', 'dom1.h']
+EXPORTS.gfx += ['gfx.h']
+
+GENERATED_FILES += ['bar.h']
+GENERATED_FILES += ['mozilla2.h']
+GENERATED_FILES += ['dom2.h']
+GENERATED_FILES += ['dom3.h']
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/dom1.h b/python/mozbuild/mozbuild/test/backend/data/exports/dom1.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/dom1.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/dom2.h b/python/mozbuild/mozbuild/test/backend/data/exports/dom2.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/dom2.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/foo.h b/python/mozbuild/mozbuild/test/backend/data/exports/foo.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/foo.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/gfx.h b/python/mozbuild/mozbuild/test/backend/data/exports/gfx.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/gfx.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/moz.build b/python/mozbuild/mozbuild/test/backend/data/exports/moz.build
new file mode 100644
index 000000000..725fa1fd4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/moz.build
@@ -0,0 +1,8 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ['foo.h']
+EXPORTS.mozilla += ['mozilla1.h', 'mozilla2.h']
+EXPORTS.mozilla.dom += ['dom1.h', 'dom2.h']
+EXPORTS.mozilla.gfx += ['gfx.h']
+EXPORTS.nspr.private += ['pprio.h']
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/pprio.h b/python/mozbuild/mozbuild/test/backend/data/exports/pprio.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/pprio.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build
new file mode 100644
index 000000000..c926e3788
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPI_NAME = 'mycrazyxpi'
+DIST_SUBDIR = 'asubdir'
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build
new file mode 100644
index 000000000..8dcf066a4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_SUBDIR = 'asubdir'
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build
new file mode 100644
index 000000000..1d746eea5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET = 'random-final-target'
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build
new file mode 100644
index 000000000..280299475
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ['xpi-name', 'dist-subdir', 'both', 'final-target']
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build
new file mode 100644
index 000000000..54bc30fec
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPI_NAME = 'mycrazyxpi'
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data b/python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build
new file mode 100644
index 000000000..1fa389f51
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build
@@ -0,0 +1,12 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += [ 'bar.c', 'foo.c', 'quux.c' ]
+
+bar = GENERATED_FILES['bar.c']
+bar.script = 'generate-bar.py:baz'
+
+foo = GENERATED_FILES['foo.c']
+foo.script = 'generate-foo.py'
+foo.inputs = ['foo-data']
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build b/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build
new file mode 100644
index 000000000..14deaf8cf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ['!/bar/baz', '!foo']
diff --git a/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build b/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build
new file mode 100644
index 000000000..30f8c160f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+value = 'xyz'
+HOST_DEFINES = {
+ 'FOO': True,
+}
+
+HOST_DEFINES['BAZ'] = '"ab\'cd"'
+HOST_DEFINES.update({
+ 'BAR': 7,
+ 'VALUE': value,
+ 'QUX': False,
+})
diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build
new file mode 100644
index 000000000..dbadef914
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+# We want to test recursion into the subdir, so do the real work in 'sub'
+DIRS += ['sub']
diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in
new file mode 100644
index 000000000..da287dfca
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in
@@ -0,0 +1 @@
+#define MOZ_FOO @MOZ_FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build
new file mode 100644
index 000000000..c2ef44079
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+CONFIGURE_SUBST_FILES = ['foo.h']
+
+EXPORTS.out += ['!foo.h']
diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build
new file mode 100644
index 000000000..f189212fd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+IPDL_SOURCES += [
+ 'bar.ipdl',
+ 'bar2.ipdlh',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build
new file mode 100644
index 000000000..4e1554559
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+IPDL_SOURCES += [
+ 'foo.ipdl',
+ 'foo2.ipdlh',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build
new file mode 100644
index 000000000..03cf5e236
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+DIRS += [
+ 'bar',
+ 'foo',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build b/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build
new file mode 100644
index 000000000..7daa419f1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+JAR_MANIFESTS += ['jar.mn']
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory
diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory
diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build b/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build
new file mode 100644
index 000000000..565c2bee6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ['/bar/baz', 'foo']
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in b/python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur b/python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest b/python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/foo.res b/python/mozbuild/mozbuild/test/backend/data/resources/foo.res
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/foo.res
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/moz.build b/python/mozbuild/mozbuild/test/backend/data/resources/moz.build
new file mode 100644
index 000000000..a5771c808
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+RESOURCE_FILES += ['bar.res.in', 'foo.res']
+RESOURCE_FILES.cursors += ['cursor.cur']
+RESOURCE_FILES.fonts += ['font1.ttf', 'font2.ttf']
+RESOURCE_FILES.fonts.desktop += ['desktop1.ttf', 'desktop2.ttf']
+RESOURCE_FILES.fonts.mobile += ['mobile.ttf']
+RESOURCE_FILES.tests += ['extra.manifest', 'test.manifest']
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/test.manifest b/python/mozbuild/mozbuild/test/backend/data/resources/test.manifest
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/test.manifest
diff --git a/python/mozbuild/mozbuild/test/backend/data/sdk-files/bar.ico b/python/mozbuild/mozbuild/test/backend/data/sdk-files/bar.ico
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sdk-files/bar.ico
diff --git a/python/mozbuild/mozbuild/test/backend/data/sdk-files/foo.ico b/python/mozbuild/mozbuild/test/backend/data/sdk-files/foo.ico
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sdk-files/foo.ico
diff --git a/python/mozbuild/mozbuild/test/backend/data/sdk-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/sdk-files/moz.build
new file mode 100644
index 000000000..342987741
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sdk-files/moz.build
@@ -0,0 +1,11 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SDK_FILES += [
+ 'bar.ico',
+ 'sub/quux.png',
+]
+
+SDK_FILES.icons += [
+ 'foo.ico',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/sdk-files/sub/quux.png b/python/mozbuild/mozbuild/test/backend/data/sdk-files/sub/quux.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sdk-files/sub/quux.png
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/bar.c b/python/mozbuild/mozbuild/test/backend/data/sources/bar.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/bar.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp b/python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/bar.mm b/python/mozbuild/mozbuild/test/backend/data/sources/bar.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/bar.mm
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/bar.s b/python/mozbuild/mozbuild/test/backend/data/sources/bar.s
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/bar.s
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/baz.S b/python/mozbuild/mozbuild/test/backend/data/sources/baz.S
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/baz.S
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.S b/python/mozbuild/mozbuild/test/backend/data/sources/foo.S
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/foo.S
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.asm b/python/mozbuild/mozbuild/test/backend/data/sources/foo.asm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/foo.asm
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.c b/python/mozbuild/mozbuild/test/backend/data/sources/foo.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/foo.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp b/python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.mm b/python/mozbuild/mozbuild/test/backend/data/sources/foo.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/foo.mm
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/moz.build b/python/mozbuild/mozbuild/test/backend/data/sources/moz.build
new file mode 100644
index 000000000..d31acae3d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/moz.build
@@ -0,0 +1,21 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+Library('dummy')
+
+SOURCES += ['bar.s', 'foo.asm']
+
+HOST_SOURCES += ['bar.cpp', 'foo.cpp']
+HOST_SOURCES += ['bar.c', 'foo.c']
+
+SOURCES += ['bar.c', 'foo.c']
+
+SOURCES += ['bar.mm', 'foo.mm']
+
+SOURCES += ['baz.S', 'foo.S']
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in
new file mode 100644
index 000000000..02ff0a3f9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FOO := foo
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in
new file mode 100644
index 000000000..17c147d97
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include $(DEPTH)/config/autoconf.mk
+
+include $(topsrcdir)/config/rules.mk
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build
new file mode 100644
index 000000000..041381548
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build
new file mode 100644
index 000000000..32a37fe46
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build
@@ -0,0 +1,4 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in
new file mode 100644
index 000000000..17c147d97
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include $(DEPTH)/config/autoconf.mk
+
+include $(topsrcdir)/config/rules.mk
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build
new file mode 100644
index 000000000..32a37fe46
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build
@@ -0,0 +1,4 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build
new file mode 100644
index 000000000..0d92bb7c3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ['dir1']
+DIRS += ['dir2']
+TEST_DIRS += ['dir3']
diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in
diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in
new file mode 100644
index 000000000..5331f1f05
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in
@@ -0,0 +1 @@
+TEST = @MOZ_FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build
new file mode 100644
index 000000000..01545c250
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+CONFIGURE_SUBST_FILES = ['foo']
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini
new file mode 100644
index 000000000..4f1335d6b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini
@@ -0,0 +1,6 @@
+[DEFAULT]
+support-files =
+ another-file.sjs
+ data/**
+
+[test_sub.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini
new file mode 100644
index 000000000..a9860f3de
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini
@@ -0,0 +1,8 @@
+[DEFAULT]
+support-files =
+ support-file.txt
+ !/child/test_sub.js
+ !/child/another-file.sjs
+ !/child/data/**
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build
new file mode 100644
index 000000000..1c1d064ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['mochitest.ini']
+BROWSER_CHROME_MANIFESTS += ['child/browser.ini']
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini
new file mode 100644
index 000000000..1f9816a89
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support-file.txt
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini
new file mode 100644
index 000000000..e2a2fc96a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support-file.txt
+
+[test_bar.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build
new file mode 100644
index 000000000..d10500f8d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += [
+ 'mochitest1.ini',
+ 'mochitest2.ini',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini
new file mode 100644
index 000000000..03d4f794e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini
@@ -0,0 +1 @@
+[not_packaged.java]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini
new file mode 100644
index 000000000..009b2b223
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini
@@ -0,0 +1 @@
+[mochitest.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build
new file mode 100644
index 000000000..82dba29dc
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build
@@ -0,0 +1,10 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += [
+ 'mochitest.ini',
+]
+
+ANDROID_INSTRUMENTATION_MANIFESTS += [
+ 'instrumentation.ini',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini
new file mode 100644
index 000000000..0cddad8ba
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini
@@ -0,0 +1,3 @@
+[DEFAULT]
+
+[test_bar.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini
new file mode 100644
index 000000000..81869e1fa
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini
@@ -0,0 +1,3 @@
+[DEFAULT]
+
+[mochitest.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build
new file mode 100644
index 000000000..d004cdd0f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPCSHELL_TESTS_MANIFESTS += [
+ 'dir1/xpcshell.ini',
+ 'xpcshell.ini',
+]
+
+MOCHITEST_MANIFESTS += ['mochitest.ini']
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini
new file mode 100644
index 000000000..f6a5351e9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support/**
+
+[xpcshell.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test_config/file.in b/python/mozbuild/mozbuild/test/backend/data/test_config/file.in
new file mode 100644
index 000000000..07aa30deb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test_config/file.in
@@ -0,0 +1,3 @@
+#ifdef foo
+@foo@
+@bar@
diff --git a/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build b/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build
new file mode 100644
index 000000000..f0c357aaf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build
@@ -0,0 +1,3 @@
+CONFIGURE_SUBST_FILES = [
+ 'file',
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build
new file mode 100644
index 000000000..36a2603b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build
@@ -0,0 +1,23 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+NO_VISIBILITY_FLAGS = True
+
+DELAYLOAD_DLLS = ['foo.dll', 'bar.dll']
+
+RCFILE = 'foo.rc'
+RESFILE = 'bar.res'
+RCINCLUDE = 'bar.rc'
+DEFFILE = 'baz.def'
+
+CFLAGS += ['-fno-exceptions', '-w']
+CXXFLAGS += ['-fcxx-exceptions', '-option with spaces']
+LDFLAGS += ['-ld flag with spaces', '-x']
+HOST_CFLAGS += ['-funroll-loops', '-wall']
+HOST_CXXFLAGS += ['-funroll-loops-harder', '-wall-day-everyday']
+WIN32_EXE_LDFLAGS += ['-subsystem:console']
+
+DISABLE_STL_WRAPPING = True
+
+ALLOW_COMPILER_WARNINGS = True
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm
diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build
new file mode 100644
index 000000000..b77e67ade
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_LIBRARY = 'test'
+SOURCES += ['bar.cpp', 'foo.cpp']
+LOCAL_INCLUDES += ['/includeA/foo']
+DEFINES['DEFINEFOO'] = True
+DEFINES['DEFINEBAR'] = 'bar'
diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build b/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build
new file mode 100644
index 000000000..d339b48c4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ['dir1']
+
+Library('test')
diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in
diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build b/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build
new file mode 100644
index 000000000..d49efde26
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPIDL_MODULE = 'my_module'
+XPIDL_SOURCES = ['bar.idl', 'foo.idl']
diff --git a/python/mozbuild/mozbuild/test/backend/test_android_eclipse.py b/python/mozbuild/mozbuild/test/backend/test_android_eclipse.py
new file mode 100644
index 000000000..c4e9221c9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_android_eclipse.py
@@ -0,0 +1,153 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import json
+import os
+import unittest
+
+from mozbuild.backend.android_eclipse import AndroidEclipseBackend
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+from mozbuild.test.backend.common import BackendTester
+from mozpack.manifests import InstallManifest
+from mozunit import main
+
+import mozpack.path as mozpath
+
+class TestAndroidEclipseBackend(BackendTester):
+ def __init__(self, *args, **kwargs):
+ BackendTester.__init__(self, *args, **kwargs)
+ self.env = None
+
+ def assertExists(self, *args):
+ p = mozpath.join(self.env.topobjdir, 'android_eclipse', *args)
+ self.assertTrue(os.path.exists(p), "Path %s exists" % p)
+
+ def assertNotExists(self, *args):
+ p = mozpath.join(self.env.topobjdir, 'android_eclipse', *args)
+ self.assertFalse(os.path.exists(p), "Path %s does not exist" % p)
+
+ def test_library_project_files(self):
+ """Ensure we generate reasonable files for library projects."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ for f in ['.classpath',
+ '.project',
+ '.settings',
+ 'AndroidManifest.xml',
+ 'project.properties']:
+ self.assertExists('library1', f)
+
+ def test_main_project_files(self):
+ """Ensure we generate reasonable files for main (non-library) projects."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ for f in ['.classpath',
+ '.project',
+ '.settings',
+ 'gen',
+ 'lint.xml',
+ 'project.properties']:
+ self.assertExists('main1', f)
+
+ def test_library_manifest(self):
+ """Ensure we generate manifest for library projects."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertExists('library1', 'AndroidManifest.xml')
+
+ def test_classpathentries(self):
+ """Ensure we produce reasonable classpathentries."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertExists('main3', '.classpath')
+ # This is brittle but simple.
+ with open(mozpath.join(self.env.topobjdir, 'android_eclipse', 'main3', '.classpath'), 'rt') as fh:
+ lines = fh.readlines()
+ lines = [line.strip() for line in lines]
+ self.assertIn('<classpathentry including="**/*.java" kind="src" path="a" />', lines)
+ self.assertIn('<classpathentry excluding="b/Excludes.java|b/Excludes2.java" including="**/*.java" kind="src" path="b" />', lines)
+ self.assertIn('<classpathentry including="**/*.java" kind="src" path="c"><attributes><attribute name="ignore_optional_problems" value="true" /></attributes></classpathentry>', lines)
+
+ def test_library_project_setting(self):
+ """Ensure we declare a library project correctly."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+
+ self.assertExists('library1', 'project.properties')
+ with open(mozpath.join(self.env.topobjdir, 'android_eclipse', 'library1', 'project.properties'), 'rt') as fh:
+ lines = fh.readlines()
+ lines = [line.strip() for line in lines]
+ self.assertIn('android.library=true', lines)
+
+ self.assertExists('main1', 'project.properties')
+ with open(mozpath.join(self.env.topobjdir, 'android_eclipse', 'main1', 'project.properties'), 'rt') as fh:
+ lines = fh.readlines()
+ lines = [line.strip() for line in lines]
+ self.assertNotIn('android.library=true', lines)
+
+ def test_referenced_projects(self):
+ """Ensure we reference another project correctly."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertExists('main4', '.classpath')
+ # This is brittle but simple.
+ with open(mozpath.join(self.env.topobjdir, 'android_eclipse', 'main4', '.classpath'), 'rt') as fh:
+ lines = fh.readlines()
+ lines = [line.strip() for line in lines]
+ self.assertIn('<classpathentry combineaccessrules="false" kind="src" path="/library1" />', lines)
+
+ def test_extra_jars(self):
+ """Ensure we add class path entries to extra jars iff asked to."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertExists('main2', '.classpath')
+ # This is brittle but simple.
+ with open(mozpath.join(self.env.topobjdir, 'android_eclipse', 'main2', '.classpath'), 'rt') as fh:
+ lines = fh.readlines()
+ lines = [line.strip() for line in lines]
+ self.assertIn('<classpathentry exported="true" kind="lib" path="%s/main2/extra.jar" />' % self.env.topsrcdir, lines)
+
+ def test_included_projects(self):
+ """Ensure we include another project correctly."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertExists('main4', 'project.properties')
+ # This is brittle but simple.
+ with open(mozpath.join(self.env.topobjdir, 'android_eclipse', 'main4', 'project.properties'), 'rt') as fh:
+ lines = fh.readlines()
+ lines = [line.strip() for line in lines]
+ self.assertIn('android.library.reference.1=library2', lines)
+
+ def assertInManifest(self, project_name, *args):
+ manifest_path = mozpath.join(self.env.topobjdir, 'android_eclipse', '%s.manifest' % project_name)
+ manifest = InstallManifest(manifest_path)
+ for arg in args:
+ self.assertIn(arg, manifest, '%s in manifest for project %s' % (arg, project_name))
+
+ def assertNotInManifest(self, project_name, *args):
+ manifest_path = mozpath.join(self.env.topobjdir, 'android_eclipse', '%s.manifest' % project_name)
+ manifest = InstallManifest(manifest_path)
+ for arg in args:
+ self.assertNotIn(arg, manifest, '%s not in manifest for project %s' % (arg, project_name))
+
+ def test_manifest_main_manifest(self):
+ """Ensure we symlink manifest if asked to for main projects."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertInManifest('main1', 'AndroidManifest.xml')
+
+ def test_manifest_res(self):
+ """Ensure we symlink res/ iff asked to."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertInManifest('library1', 'res')
+ self.assertNotInManifest('library2', 'res')
+
+ def test_manifest_classpathentries(self):
+ """Ensure we symlink classpathentries correctly."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertInManifest('main3', 'a/a', 'b', 'd/e')
+
+ def test_manifest_assets(self):
+ """Ensure we symlink assets/ iff asked to."""
+ self.env = self._consume('android_eclipse', AndroidEclipseBackend)
+ self.assertNotInManifest('main1', 'assets')
+ self.assertInManifest('main2', 'assets')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_build.py b/python/mozbuild/mozbuild/test/backend/test_build.py
new file mode 100644
index 000000000..d3f5fb6a9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_build.py
@@ -0,0 +1,233 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals, print_function
+
+import buildconfig
+import os
+import shutil
+import sys
+import unittest
+import mozpack.path as mozpath
+from contextlib import contextmanager
+from mozunit import main
+from mozbuild.backend import get_backend_class
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.backend.recursivemake import RecursiveMakeBackend
+from mozbuild.backend.fastermake import FasterMakeBackend
+from mozbuild.base import MozbuildObject
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+from mozbuild.util import ensureParentDir
+from mozpack.files import FileFinder
+from tempfile import mkdtemp
+
+
+BASE_SUBSTS = [
+ ('PYTHON', mozpath.normsep(sys.executable)),
+]
+
+
+class TestBuild(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop('MOZCONFIG', None)
+ os.environ.pop('MOZ_OBJDIR', None)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ @contextmanager
+ def do_test_backend(self, *backends, **kwargs):
+ topobjdir = mkdtemp()
+ try:
+ config = ConfigEnvironment(buildconfig.topsrcdir, topobjdir,
+ **kwargs)
+ reader = BuildReader(config)
+ emitter = TreeMetadataEmitter(config)
+ moz_build = mozpath.join(config.topsrcdir, 'test.mozbuild')
+ definitions = list(emitter.emit(
+ reader.read_mozbuild(moz_build, config)))
+ for backend in backends:
+ backend(config).consume(definitions)
+
+ yield config
+ except:
+ raise
+ finally:
+ if not os.environ.get('MOZ_NO_CLEANUP'):
+ shutil.rmtree(topobjdir)
+
+ @contextmanager
+ def line_handler(self):
+ lines = []
+
+ def handle_make_line(line):
+ lines.append(line)
+
+ try:
+ yield handle_make_line
+ except:
+ print('\n'.join(lines))
+ raise
+
+ if os.environ.get('MOZ_VERBOSE_MAKE'):
+ print('\n'.join(lines))
+
+ def test_recursive_make(self):
+ substs = list(BASE_SUBSTS)
+ with self.do_test_backend(RecursiveMakeBackend,
+ substs=substs) as config:
+ build = MozbuildObject(config.topsrcdir, None, None,
+ config.topobjdir)
+ overrides = [
+ 'install_manifest_depends=',
+ 'MOZ_JAR_MAKER_FILE_FORMAT=flat',
+ 'TEST_MOZBUILD=1',
+ ]
+ with self.line_handler() as handle_make_line:
+ build._run_make(directory=config.topobjdir, target=overrides,
+ silent=False, line_handler=handle_make_line)
+
+ self.validate(config)
+
+ def test_faster_recursive_make(self):
+ substs = list(BASE_SUBSTS) + [
+ ('BUILD_BACKENDS', 'FasterMake+RecursiveMake'),
+ ]
+ with self.do_test_backend(get_backend_class(
+ 'FasterMake+RecursiveMake'), substs=substs) as config:
+ buildid = mozpath.join(config.topobjdir, 'config', 'buildid')
+ ensureParentDir(buildid)
+ with open(buildid, 'w') as fh:
+ fh.write('20100101012345\n')
+
+ build = MozbuildObject(config.topsrcdir, None, None,
+ config.topobjdir)
+ overrides = [
+ 'install_manifest_depends=',
+ 'MOZ_JAR_MAKER_FILE_FORMAT=flat',
+ 'TEST_MOZBUILD=1',
+ ]
+ with self.line_handler() as handle_make_line:
+ build._run_make(directory=config.topobjdir, target=overrides,
+ silent=False, line_handler=handle_make_line)
+
+ self.validate(config)
+
+ def test_faster_make(self):
+ substs = list(BASE_SUBSTS) + [
+ ('MOZ_BUILD_APP', 'dummy_app'),
+ ('MOZ_WIDGET_TOOLKIT', 'dummy_widget'),
+ ]
+ with self.do_test_backend(RecursiveMakeBackend, FasterMakeBackend,
+ substs=substs) as config:
+ buildid = mozpath.join(config.topobjdir, 'config', 'buildid')
+ ensureParentDir(buildid)
+ with open(buildid, 'w') as fh:
+ fh.write('20100101012345\n')
+
+ build = MozbuildObject(config.topsrcdir, None, None,
+ config.topobjdir)
+ overrides = [
+ 'TEST_MOZBUILD=1',
+ ]
+ with self.line_handler() as handle_make_line:
+ build._run_make(directory=mozpath.join(config.topobjdir,
+ 'faster'),
+ target=overrides, silent=False,
+ line_handler=handle_make_line)
+
+ self.validate(config)
+
+ def validate(self, config):
+ self.maxDiff = None
+ test_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'data', 'build') + os.sep
+
+ # We want unicode instances out of the files, because having plain str
+ # makes assertEqual diff output in case of error extra verbose because
+ # of the difference in type.
+ result = {
+ p: f.open().read().decode('utf-8')
+ for p, f in FileFinder(mozpath.join(config.topobjdir, 'dist'))
+ }
+ self.assertTrue(len(result))
+ self.assertEqual(result, {
+ 'bin/baz.ini': 'baz.ini: FOO is foo\n',
+ 'bin/child/bar.ini': 'bar.ini\n',
+ 'bin/child2/foo.css': 'foo.css: FOO is foo\n',
+ 'bin/child2/qux.ini': 'qux.ini: BAR is not defined\n',
+ 'bin/chrome.manifest':
+ 'manifest chrome/foo.manifest\n'
+ 'manifest components/components.manifest\n',
+ 'bin/chrome/foo.manifest':
+ 'content bar foo/child/\n'
+ 'content foo foo/\n'
+ 'override chrome://foo/bar.svg#hello '
+ 'chrome://bar/bar.svg#hello\n',
+ 'bin/chrome/foo/bar.js': 'bar.js\n',
+ 'bin/chrome/foo/child/baz.jsm':
+ '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is foo\n' % (test_path),
+ 'bin/chrome/foo/child/hoge.js':
+ '//@line 2 "%sbar.js"\nbar.js: FOO is foo\n' % (test_path),
+ 'bin/chrome/foo/foo.css': 'foo.css: FOO is foo\n',
+ 'bin/chrome/foo/foo.js': 'foo.js\n',
+ 'bin/chrome/foo/qux.js': 'bar.js\n',
+ 'bin/components/bar.js':
+ '//@line 2 "%sbar.js"\nbar.js: FOO is foo\n' % (test_path),
+ 'bin/components/components.manifest':
+ 'component {foo} foo.js\ncomponent {bar} bar.js\n',
+ 'bin/components/foo.js': 'foo.js\n',
+ 'bin/defaults/pref/prefs.js': 'prefs.js\n',
+ 'bin/foo.ini': 'foo.ini\n',
+ 'bin/modules/baz.jsm':
+ '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is foo\n' % (test_path),
+ 'bin/modules/child/bar.jsm': 'bar.jsm\n',
+ 'bin/modules/child2/qux.jsm':
+ '//@line 4 "%squx.jsm"\nqux.jsm: BAR is not defined\n'
+ % (test_path),
+ 'bin/modules/foo.jsm': 'foo.jsm\n',
+ 'bin/res/resource': 'resource\n',
+ 'bin/res/child/resource2': 'resource2\n',
+
+ 'bin/app/baz.ini': 'baz.ini: FOO is bar\n',
+ 'bin/app/child/bar.ini': 'bar.ini\n',
+ 'bin/app/child2/qux.ini': 'qux.ini: BAR is defined\n',
+ 'bin/app/chrome.manifest':
+ 'manifest chrome/foo.manifest\n'
+ 'manifest components/components.manifest\n',
+ 'bin/app/chrome/foo.manifest':
+ 'content bar foo/child/\n'
+ 'content foo foo/\n'
+ 'override chrome://foo/bar.svg#hello '
+ 'chrome://bar/bar.svg#hello\n',
+ 'bin/app/chrome/foo/bar.js': 'bar.js\n',
+ 'bin/app/chrome/foo/child/baz.jsm':
+ '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is bar\n' % (test_path),
+ 'bin/app/chrome/foo/child/hoge.js':
+ '//@line 2 "%sbar.js"\nbar.js: FOO is bar\n' % (test_path),
+ 'bin/app/chrome/foo/foo.css': 'foo.css: FOO is bar\n',
+ 'bin/app/chrome/foo/foo.js': 'foo.js\n',
+ 'bin/app/chrome/foo/qux.js': 'bar.js\n',
+ 'bin/app/components/bar.js':
+ '//@line 2 "%sbar.js"\nbar.js: FOO is bar\n' % (test_path),
+ 'bin/app/components/components.manifest':
+ 'component {foo} foo.js\ncomponent {bar} bar.js\n',
+ 'bin/app/components/foo.js': 'foo.js\n',
+ 'bin/app/defaults/preferences/prefs.js': 'prefs.js\n',
+ 'bin/app/foo.css': 'foo.css: FOO is bar\n',
+ 'bin/app/foo.ini': 'foo.ini\n',
+ 'bin/app/modules/baz.jsm':
+ '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is bar\n' % (test_path),
+ 'bin/app/modules/child/bar.jsm': 'bar.jsm\n',
+ 'bin/app/modules/child2/qux.jsm':
+ '//@line 2 "%squx.jsm"\nqux.jsm: BAR is defined\n'
+ % (test_path),
+ 'bin/app/modules/foo.jsm': 'foo.jsm\n',
+ })
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_configenvironment.py b/python/mozbuild/mozbuild/test/backend/test_configenvironment.py
new file mode 100644
index 000000000..95593e186
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_configenvironment.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os, posixpath
+from StringIO import StringIO
+import unittest
+from mozunit import main, MockedOpen
+
+import mozbuild.backend.configenvironment as ConfigStatus
+
+from mozbuild.util import ReadOnlyDict
+
+import mozpack.path as mozpath
+
+
+class ConfigEnvironment(ConfigStatus.ConfigEnvironment):
+ def __init__(self, *args, **kwargs):
+ ConfigStatus.ConfigEnvironment.__init__(self, *args, **kwargs)
+ # Be helpful to unit tests
+ if not 'top_srcdir' in self.substs:
+ if os.path.isabs(self.topsrcdir):
+ top_srcdir = self.topsrcdir.replace(os.sep, '/')
+ else:
+ top_srcdir = mozpath.relpath(self.topsrcdir, self.topobjdir).replace(os.sep, '/')
+
+ d = dict(self.substs)
+ d['top_srcdir'] = top_srcdir
+ self.substs = ReadOnlyDict(d)
+
+ d = dict(self.substs_unicode)
+ d[u'top_srcdir'] = top_srcdir.decode('utf-8')
+ self.substs_unicode = ReadOnlyDict(d)
+
+
+class TestEnvironment(unittest.TestCase):
+ def test_auto_substs(self):
+ '''Test the automatically set values of ACDEFINES, ALLSUBSTS
+ and ALLEMPTYSUBSTS.
+ '''
+ env = ConfigEnvironment('.', '.',
+ defines = { 'foo': 'bar', 'baz': 'qux 42',
+ 'abc': "d'e'f", 'extra': 'foobar' },
+ non_global_defines = ['extra', 'ignore'],
+ substs = { 'FOO': 'bar', 'FOOBAR': '', 'ABC': 'def',
+ 'bar': 'baz qux', 'zzz': '"abc def"',
+ 'qux': '' })
+ # non_global_defines should be filtered out in ACDEFINES.
+ # Original order of the defines need to be respected in ACDEFINES
+ self.assertEqual(env.substs['ACDEFINES'], """-Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar""")
+ # Likewise for ALLSUBSTS, which also must contain ACDEFINES
+ self.assertEqual(env.substs['ALLSUBSTS'], '''ABC = def
+ACDEFINES = -Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar
+FOO = bar
+bar = baz qux
+zzz = "abc def"''')
+ # ALLEMPTYSUBSTS contains all substs with no value.
+ self.assertEqual(env.substs['ALLEMPTYSUBSTS'], '''FOOBAR =
+qux =''')
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_recursivemake.py b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py
new file mode 100644
index 000000000..87f50f497
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py
@@ -0,0 +1,942 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import cPickle as pickle
+import json
+import os
+import unittest
+
+from mozpack.manifests import (
+ InstallManifest,
+)
+from mozunit import main
+
+from mozbuild.backend.recursivemake import (
+ RecursiveMakeBackend,
+ RecursiveMakeTraversal,
+)
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+
+from mozbuild.test.backend.common import BackendTester
+
+import mozpack.path as mozpath
+
+
+class TestRecursiveMakeTraversal(unittest.TestCase):
+ def test_traversal(self):
+ traversal = RecursiveMakeTraversal()
+ traversal.add('', dirs=['A', 'B', 'C'])
+ traversal.add('', dirs=['D'])
+ traversal.add('A')
+ traversal.add('B', dirs=['E', 'F'])
+ traversal.add('C', dirs=['G', 'H'])
+ traversal.add('D', dirs=['I', 'K'])
+ traversal.add('D', dirs=['J', 'L'])
+ traversal.add('E')
+ traversal.add('F')
+ traversal.add('G')
+ traversal.add('H')
+ traversal.add('I', dirs=['M', 'N'])
+ traversal.add('J', dirs=['O', 'P'])
+ traversal.add('K', dirs=['Q', 'R'])
+ traversal.add('L', dirs=['S'])
+ traversal.add('M')
+ traversal.add('N', dirs=['T'])
+ traversal.add('O')
+ traversal.add('P', dirs=['U'])
+ traversal.add('Q')
+ traversal.add('R', dirs=['V'])
+ traversal.add('S', dirs=['W'])
+ traversal.add('T')
+ traversal.add('U')
+ traversal.add('V')
+ traversal.add('W', dirs=['X'])
+ traversal.add('X')
+
+ parallels = set(('G', 'H', 'I', 'J', 'O', 'P', 'Q', 'R', 'U'))
+ def filter(current, subdirs):
+ return (current, [d for d in subdirs.dirs if d in parallels],
+ [d for d in subdirs.dirs if d not in parallels])
+
+ start, deps = traversal.compute_dependencies(filter)
+ self.assertEqual(start, ('X',))
+ self.maxDiff = None
+ self.assertEqual(deps, {
+ 'A': ('',),
+ 'B': ('A',),
+ 'C': ('F',),
+ 'D': ('G', 'H'),
+ 'E': ('B',),
+ 'F': ('E',),
+ 'G': ('C',),
+ 'H': ('C',),
+ 'I': ('D',),
+ 'J': ('D',),
+ 'K': ('T', 'O', 'U'),
+ 'L': ('Q', 'V'),
+ 'M': ('I',),
+ 'N': ('M',),
+ 'O': ('J',),
+ 'P': ('J',),
+ 'Q': ('K',),
+ 'R': ('K',),
+ 'S': ('L',),
+ 'T': ('N',),
+ 'U': ('P',),
+ 'V': ('R',),
+ 'W': ('S',),
+ 'X': ('W',),
+ })
+
+ self.assertEqual(list(traversal.traverse('', filter)),
+ ['', 'A', 'B', 'E', 'F', 'C', 'G', 'H', 'D', 'I',
+ 'M', 'N', 'T', 'J', 'O', 'P', 'U', 'K', 'Q', 'R',
+ 'V', 'L', 'S', 'W', 'X'])
+
+ self.assertEqual(list(traversal.traverse('C', filter)),
+ ['C', 'G', 'H'])
+
+ def test_traversal_2(self):
+ traversal = RecursiveMakeTraversal()
+ traversal.add('', dirs=['A', 'B', 'C'])
+ traversal.add('A')
+ traversal.add('B', dirs=['D', 'E', 'F'])
+ traversal.add('C', dirs=['G', 'H', 'I'])
+ traversal.add('D')
+ traversal.add('E')
+ traversal.add('F')
+ traversal.add('G')
+ traversal.add('H')
+ traversal.add('I')
+
+ start, deps = traversal.compute_dependencies()
+ self.assertEqual(start, ('I',))
+ self.assertEqual(deps, {
+ 'A': ('',),
+ 'B': ('A',),
+ 'C': ('F',),
+ 'D': ('B',),
+ 'E': ('D',),
+ 'F': ('E',),
+ 'G': ('C',),
+ 'H': ('G',),
+ 'I': ('H',),
+ })
+
+ def test_traversal_filter(self):
+ traversal = RecursiveMakeTraversal()
+ traversal.add('', dirs=['A', 'B', 'C'])
+ traversal.add('A')
+ traversal.add('B', dirs=['D', 'E', 'F'])
+ traversal.add('C', dirs=['G', 'H', 'I'])
+ traversal.add('D')
+ traversal.add('E')
+ traversal.add('F')
+ traversal.add('G')
+ traversal.add('H')
+ traversal.add('I')
+
+ def filter(current, subdirs):
+ if current == 'B':
+ current = None
+ return current, [], subdirs.dirs
+
+ start, deps = traversal.compute_dependencies(filter)
+ self.assertEqual(start, ('I',))
+ self.assertEqual(deps, {
+ 'A': ('',),
+ 'C': ('F',),
+ 'D': ('A',),
+ 'E': ('D',),
+ 'F': ('E',),
+ 'G': ('C',),
+ 'H': ('G',),
+ 'I': ('H',),
+ })
+
+class TestRecursiveMakeBackend(BackendTester):
+ def test_basic(self):
+ """Ensure the RecursiveMakeBackend works without error."""
+ env = self._consume('stub0', RecursiveMakeBackend)
+ self.assertTrue(os.path.exists(mozpath.join(env.topobjdir,
+ 'backend.RecursiveMakeBackend')))
+ self.assertTrue(os.path.exists(mozpath.join(env.topobjdir,
+ 'backend.RecursiveMakeBackend.in')))
+
+ def test_output_files(self):
+ """Ensure proper files are generated."""
+ env = self._consume('stub0', RecursiveMakeBackend)
+
+ expected = ['', 'dir1', 'dir2']
+
+ for d in expected:
+ out_makefile = mozpath.join(env.topobjdir, d, 'Makefile')
+ out_backend = mozpath.join(env.topobjdir, d, 'backend.mk')
+
+ self.assertTrue(os.path.exists(out_makefile))
+ self.assertTrue(os.path.exists(out_backend))
+
+ def test_makefile_conversion(self):
+ """Ensure Makefile.in is converted properly."""
+ env = self._consume('stub0', RecursiveMakeBackend)
+
+ p = mozpath.join(env.topobjdir, 'Makefile')
+
+ lines = [l.strip() for l in open(p, 'rt').readlines()[1:] if not l.startswith('#')]
+ self.assertEqual(lines, [
+ 'DEPTH := .',
+ 'topobjdir := %s' % env.topobjdir,
+ 'topsrcdir := %s' % env.topsrcdir,
+ 'srcdir := %s' % env.topsrcdir,
+ 'VPATH := %s' % env.topsrcdir,
+ 'relativesrcdir := .',
+ 'include $(DEPTH)/config/autoconf.mk',
+ '',
+ 'FOO := foo',
+ '',
+ 'include $(topsrcdir)/config/recurse.mk',
+ ])
+
+ def test_missing_makefile_in(self):
+ """Ensure missing Makefile.in results in Makefile creation."""
+ env = self._consume('stub0', RecursiveMakeBackend)
+
+ p = mozpath.join(env.topobjdir, 'dir2', 'Makefile')
+ self.assertTrue(os.path.exists(p))
+
+ lines = [l.strip() for l in open(p, 'rt').readlines()]
+ self.assertEqual(len(lines), 10)
+
+ self.assertTrue(lines[0].startswith('# THIS FILE WAS AUTOMATICALLY'))
+
+ def test_backend_mk(self):
+ """Ensure backend.mk file is written out properly."""
+ env = self._consume('stub0', RecursiveMakeBackend)
+
+ p = mozpath.join(env.topobjdir, 'backend.mk')
+
+ lines = [l.strip() for l in open(p, 'rt').readlines()[2:]]
+ self.assertEqual(lines, [
+ 'DIRS := dir1 dir2',
+ ])
+
+ # Make env.substs writable to add ENABLE_TESTS
+ env.substs = dict(env.substs)
+ env.substs['ENABLE_TESTS'] = '1'
+ self._consume('stub0', RecursiveMakeBackend, env=env)
+ p = mozpath.join(env.topobjdir, 'backend.mk')
+
+ lines = [l.strip() for l in open(p, 'rt').readlines()[2:]]
+ self.assertEqual(lines, [
+ 'DIRS := dir1 dir2 dir3',
+ ])
+
+ def test_mtime_no_change(self):
+ """Ensure mtime is not updated if file content does not change."""
+
+ env = self._consume('stub0', RecursiveMakeBackend)
+
+ makefile_path = mozpath.join(env.topobjdir, 'Makefile')
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ makefile_mtime = os.path.getmtime(makefile_path)
+ backend_mtime = os.path.getmtime(backend_path)
+
+ reader = BuildReader(env)
+ emitter = TreeMetadataEmitter(env)
+ backend = RecursiveMakeBackend(env)
+ backend.consume(emitter.emit(reader.read_topsrcdir()))
+
+ self.assertEqual(os.path.getmtime(makefile_path), makefile_mtime)
+ self.assertEqual(os.path.getmtime(backend_path), backend_mtime)
+
+ def test_substitute_config_files(self):
+ """Ensure substituted config files are produced."""
+ env = self._consume('substitute_config_files', RecursiveMakeBackend)
+
+ p = mozpath.join(env.topobjdir, 'foo')
+ self.assertTrue(os.path.exists(p))
+ lines = [l.strip() for l in open(p, 'rt').readlines()]
+ self.assertEqual(lines, [
+ 'TEST = foo',
+ ])
+
+ def test_install_substitute_config_files(self):
+ """Ensure we recurse into the dirs that install substituted config files."""
+ env = self._consume('install_substitute_config_files', RecursiveMakeBackend)
+
+ root_deps_path = mozpath.join(env.topobjdir, 'root-deps.mk')
+ lines = [l.strip() for l in open(root_deps_path, 'rt').readlines()]
+
+ # Make sure we actually recurse into the sub directory during export to
+ # install the subst file.
+ self.assertTrue(any(l == 'recurse_export: sub/export' for l in lines))
+
+ def test_variable_passthru(self):
+ """Ensure variable passthru is written out correctly."""
+ env = self._consume('variable_passthru', RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+
+ expected = {
+ 'ALLOW_COMPILER_WARNINGS': [
+ 'ALLOW_COMPILER_WARNINGS := 1',
+ ],
+ 'DISABLE_STL_WRAPPING': [
+ 'DISABLE_STL_WRAPPING := 1',
+ ],
+ 'VISIBILITY_FLAGS': [
+ 'VISIBILITY_FLAGS :=',
+ ],
+ 'RCFILE': [
+ 'RCFILE := foo.rc',
+ ],
+ 'RESFILE': [
+ 'RESFILE := bar.res',
+ ],
+ 'RCINCLUDE': [
+ 'RCINCLUDE := bar.rc',
+ ],
+ 'DEFFILE': [
+ 'DEFFILE := baz.def',
+ ],
+ 'MOZBUILD_CFLAGS': [
+ 'MOZBUILD_CFLAGS += -fno-exceptions',
+ 'MOZBUILD_CFLAGS += -w',
+ ],
+ 'MOZBUILD_CXXFLAGS': [
+ 'MOZBUILD_CXXFLAGS += -fcxx-exceptions',
+ "MOZBUILD_CXXFLAGS += '-option with spaces'",
+ ],
+ 'MOZBUILD_LDFLAGS': [
+ "MOZBUILD_LDFLAGS += '-ld flag with spaces'",
+ 'MOZBUILD_LDFLAGS += -x',
+ 'MOZBUILD_LDFLAGS += -DELAYLOAD:foo.dll',
+ 'MOZBUILD_LDFLAGS += -DELAYLOAD:bar.dll',
+ ],
+ 'MOZBUILD_HOST_CFLAGS': [
+ 'MOZBUILD_HOST_CFLAGS += -funroll-loops',
+ 'MOZBUILD_HOST_CFLAGS += -wall',
+ ],
+ 'MOZBUILD_HOST_CXXFLAGS': [
+ 'MOZBUILD_HOST_CXXFLAGS += -funroll-loops-harder',
+ 'MOZBUILD_HOST_CXXFLAGS += -wall-day-everyday',
+ ],
+ 'WIN32_EXE_LDFLAGS': [
+ 'WIN32_EXE_LDFLAGS += -subsystem:console',
+ ],
+ }
+
+ for var, val in expected.items():
+ # print("test_variable_passthru[%s]" % (var))
+ found = [str for str in lines if str.startswith(var)]
+ self.assertEqual(found, val)
+
+ def test_sources(self):
+ """Ensure SOURCES and HOST_SOURCES are handled properly."""
+ env = self._consume('sources', RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+
+ expected = {
+ 'ASFILES': [
+ 'ASFILES += bar.s',
+ 'ASFILES += foo.asm',
+ ],
+ 'CMMSRCS': [
+ 'CMMSRCS += bar.mm',
+ 'CMMSRCS += foo.mm',
+ ],
+ 'CSRCS': [
+ 'CSRCS += bar.c',
+ 'CSRCS += foo.c',
+ ],
+ 'HOST_CPPSRCS': [
+ 'HOST_CPPSRCS += bar.cpp',
+ 'HOST_CPPSRCS += foo.cpp',
+ ],
+ 'HOST_CSRCS': [
+ 'HOST_CSRCS += bar.c',
+ 'HOST_CSRCS += foo.c',
+ ],
+ 'SSRCS': [
+ 'SSRCS += baz.S',
+ 'SSRCS += foo.S',
+ ],
+ }
+
+ for var, val in expected.items():
+ found = [str for str in lines if str.startswith(var)]
+ self.assertEqual(found, val)
+
+ def test_exports(self):
+ """Ensure EXPORTS is handled properly."""
+ env = self._consume('exports', RecursiveMakeBackend)
+
+ # EXPORTS files should appear in the dist_include install manifest.
+ m = InstallManifest(path=mozpath.join(env.topobjdir,
+ '_build_manifests', 'install', 'dist_include'))
+ self.assertEqual(len(m), 7)
+ self.assertIn('foo.h', m)
+ self.assertIn('mozilla/mozilla1.h', m)
+ self.assertIn('mozilla/dom/dom2.h', m)
+
+ def test_generated_files(self):
+ """Ensure GENERATED_FILES is handled properly."""
+ env = self._consume('generated-files', RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+
+ expected = [
+ 'export:: bar.c',
+ 'GARBAGE += bar.c',
+ 'EXTRA_MDDEPEND_FILES += bar.c.pp',
+ 'bar.c: %s/generate-bar.py' % env.topsrcdir,
+ '$(REPORT_BUILD)',
+ '$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp)' % env.topsrcdir,
+ '',
+ 'export:: foo.c',
+ 'GARBAGE += foo.c',
+ 'EXTRA_MDDEPEND_FILES += foo.c.pp',
+ 'foo.c: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
+ '$(REPORT_BUILD)',
+ '$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(srcdir)/foo-data)' % (env.topsrcdir),
+ '',
+ 'export:: quux.c',
+ 'GARBAGE += quux.c',
+ 'EXTRA_MDDEPEND_FILES += quux.c.pp',
+ ]
+
+ self.maxDiff = None
+ self.assertEqual(lines, expected)
+
+ def test_exports_generated(self):
+ """Ensure EXPORTS that are listed in GENERATED_FILES
+ are handled properly."""
+ env = self._consume('exports-generated', RecursiveMakeBackend)
+
+ # EXPORTS files should appear in the dist_include install manifest.
+ m = InstallManifest(path=mozpath.join(env.topobjdir,
+ '_build_manifests', 'install', 'dist_include'))
+ self.assertEqual(len(m), 8)
+ self.assertIn('foo.h', m)
+ self.assertIn('mozilla/mozilla1.h', m)
+ self.assertIn('mozilla/dom/dom1.h', m)
+ self.assertIn('gfx/gfx.h', m)
+ self.assertIn('bar.h', m)
+ self.assertIn('mozilla/mozilla2.h', m)
+ self.assertIn('mozilla/dom/dom2.h', m)
+ self.assertIn('mozilla/dom/dom3.h', m)
+ # EXPORTS files that are also GENERATED_FILES should be handled as
+ # INSTALL_TARGETS.
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+ expected = [
+ 'export:: bar.h',
+ 'GARBAGE += bar.h',
+ 'EXTRA_MDDEPEND_FILES += bar.h.pp',
+ 'export:: mozilla2.h',
+ 'GARBAGE += mozilla2.h',
+ 'EXTRA_MDDEPEND_FILES += mozilla2.h.pp',
+ 'export:: dom2.h',
+ 'GARBAGE += dom2.h',
+ 'EXTRA_MDDEPEND_FILES += dom2.h.pp',
+ 'export:: dom3.h',
+ 'GARBAGE += dom3.h',
+ 'EXTRA_MDDEPEND_FILES += dom3.h.pp',
+ 'dist_include_FILES += bar.h',
+ 'dist_include_DEST := $(DEPTH)/dist/include/',
+ 'dist_include_TARGET := export',
+ 'INSTALL_TARGETS += dist_include',
+ 'dist_include_mozilla_FILES += mozilla2.h',
+ 'dist_include_mozilla_DEST := $(DEPTH)/dist/include/mozilla',
+ 'dist_include_mozilla_TARGET := export',
+ 'INSTALL_TARGETS += dist_include_mozilla',
+ 'dist_include_mozilla_dom_FILES += dom2.h',
+ 'dist_include_mozilla_dom_FILES += dom3.h',
+ 'dist_include_mozilla_dom_DEST := $(DEPTH)/dist/include/mozilla/dom',
+ 'dist_include_mozilla_dom_TARGET := export',
+ 'INSTALL_TARGETS += dist_include_mozilla_dom',
+ ]
+ self.maxDiff = None
+ self.assertEqual(lines, expected)
+
+ def test_resources(self):
+ """Ensure RESOURCE_FILES is handled properly."""
+ env = self._consume('resources', RecursiveMakeBackend)
+
+ # RESOURCE_FILES should appear in the dist_bin install manifest.
+ m = InstallManifest(path=os.path.join(env.topobjdir,
+ '_build_manifests', 'install', 'dist_bin'))
+ self.assertEqual(len(m), 10)
+ self.assertIn('res/foo.res', m)
+ self.assertIn('res/fonts/font1.ttf', m)
+ self.assertIn('res/fonts/desktop/desktop2.ttf', m)
+
+ self.assertIn('res/bar.res.in', m)
+ self.assertIn('res/tests/test.manifest', m)
+ self.assertIn('res/tests/extra.manifest', m)
+
+ def test_branding_files(self):
+ """Ensure BRANDING_FILES is handled properly."""
+ env = self._consume('branding-files', RecursiveMakeBackend)
+
+ #BRANDING_FILES should appear in the dist_branding install manifest.
+ m = InstallManifest(path=os.path.join(env.topobjdir,
+ '_build_manifests', 'install', 'dist_branding'))
+ self.assertEqual(len(m), 3)
+ self.assertIn('bar.ico', m)
+ self.assertIn('quux.png', m)
+ self.assertIn('icons/foo.ico', m)
+
+ def test_sdk_files(self):
+ """Ensure SDK_FILES is handled properly."""
+ env = self._consume('sdk-files', RecursiveMakeBackend)
+
+ #SDK_FILES should appear in the dist_sdk install manifest.
+ m = InstallManifest(path=os.path.join(env.topobjdir,
+ '_build_manifests', 'install', 'dist_sdk'))
+ self.assertEqual(len(m), 3)
+ self.assertIn('bar.ico', m)
+ self.assertIn('quux.png', m)
+ self.assertIn('icons/foo.ico', m)
+
+ def test_test_manifests_files_written(self):
+ """Ensure test manifests get turned into files."""
+ env = self._consume('test-manifests-written', RecursiveMakeBackend)
+
+ tests_dir = mozpath.join(env.topobjdir, '_tests')
+ m_master = mozpath.join(tests_dir, 'testing', 'mochitest', 'tests', 'mochitest.ini')
+ x_master = mozpath.join(tests_dir, 'xpcshell', 'xpcshell.ini')
+ self.assertTrue(os.path.exists(m_master))
+ self.assertTrue(os.path.exists(x_master))
+
+ lines = [l.strip() for l in open(x_master, 'rt').readlines()]
+ self.assertEqual(lines, [
+ '; THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.',
+ '',
+ '[include:dir1/xpcshell.ini]',
+ '[include:xpcshell.ini]',
+ ])
+
+ all_tests_path = mozpath.join(env.topobjdir, 'all-tests.pkl')
+ self.assertTrue(os.path.exists(all_tests_path))
+
+ with open(all_tests_path, 'rb') as fh:
+ o = pickle.load(fh)
+
+ self.assertIn('xpcshell.js', o)
+ self.assertIn('dir1/test_bar.js', o)
+
+ self.assertEqual(len(o['xpcshell.js']), 1)
+
+ def test_test_manifest_pattern_matches_recorded(self):
+ """Pattern matches in test manifests' support-files should be recorded."""
+ env = self._consume('test-manifests-written', RecursiveMakeBackend)
+ m = InstallManifest(path=mozpath.join(env.topobjdir,
+ '_build_manifests', 'install', '_test_files'))
+
+ # This is not the most robust test in the world, but it gets the job
+ # done.
+ entries = [e for e in m._dests.keys() if '**' in e]
+ self.assertEqual(len(entries), 1)
+ self.assertIn('support/**', entries[0])
+
+ def test_test_manifest_deffered_installs_written(self):
+ """Shared support files are written to their own data file by the backend."""
+ env = self._consume('test-manifest-shared-support', RecursiveMakeBackend)
+ all_tests_path = mozpath.join(env.topobjdir, 'all-tests.pkl')
+ self.assertTrue(os.path.exists(all_tests_path))
+ test_installs_path = mozpath.join(env.topobjdir, 'test-installs.pkl')
+
+ with open(test_installs_path, 'r') as fh:
+ test_installs = pickle.load(fh)
+
+ self.assertEqual(set(test_installs.keys()),
+ set(['child/test_sub.js',
+ 'child/data/**',
+ 'child/another-file.sjs']))
+ for key in test_installs.keys():
+ self.assertIn(key, test_installs)
+
+ test_files_manifest = mozpath.join(env.topobjdir,
+ '_build_manifests',
+ 'install',
+ '_test_files')
+
+ # First, read the generated for ini manifest contents.
+ m = InstallManifest(path=test_files_manifest)
+
+ # Then, synthesize one from the test-installs.pkl file. This should
+ # allow us to re-create a subset of the above.
+ synthesized_manifest = InstallManifest()
+ for item, installs in test_installs.items():
+ for install_info in installs:
+ if len(install_info) == 3:
+ synthesized_manifest.add_pattern_symlink(*install_info)
+ if len(install_info) == 2:
+ synthesized_manifest.add_symlink(*install_info)
+
+ self.assertEqual(len(synthesized_manifest), 3)
+ for item, info in synthesized_manifest._dests.items():
+ self.assertIn(item, m)
+ self.assertEqual(info, m._dests[item])
+
+ def test_xpidl_generation(self):
+ """Ensure xpidl files and directories are written out."""
+ env = self._consume('xpidl', RecursiveMakeBackend)
+
+ # Install manifests should contain entries.
+ install_dir = mozpath.join(env.topobjdir, '_build_manifests',
+ 'install')
+ self.assertTrue(os.path.isfile(mozpath.join(install_dir, 'dist_idl')))
+ self.assertTrue(os.path.isfile(mozpath.join(install_dir, 'xpidl')))
+
+ m = InstallManifest(path=mozpath.join(install_dir, 'dist_idl'))
+ self.assertEqual(len(m), 2)
+ self.assertIn('bar.idl', m)
+ self.assertIn('foo.idl', m)
+
+ m = InstallManifest(path=mozpath.join(install_dir, 'xpidl'))
+ self.assertIn('.deps/my_module.pp', m)
+
+ m = InstallManifest(path=os.path.join(install_dir, 'dist_bin'))
+ self.assertIn('components/my_module.xpt', m)
+ self.assertIn('components/interfaces.manifest', m)
+
+ m = InstallManifest(path=mozpath.join(install_dir, 'dist_include'))
+ self.assertIn('foo.h', m)
+
+ p = mozpath.join(env.topobjdir, 'config/makefiles/xpidl')
+ self.assertTrue(os.path.isdir(p))
+
+ self.assertTrue(os.path.isfile(mozpath.join(p, 'Makefile')))
+
+ def test_old_install_manifest_deleted(self):
+ # Simulate an install manifest from a previous backend version. Ensure
+ # it is deleted.
+ env = self._get_environment('stub0')
+ purge_dir = mozpath.join(env.topobjdir, '_build_manifests', 'install')
+ manifest_path = mozpath.join(purge_dir, 'old_manifest')
+ os.makedirs(purge_dir)
+ m = InstallManifest()
+ m.write(path=manifest_path)
+ with open(mozpath.join(
+ env.topobjdir, 'backend.RecursiveMakeBackend'), 'w') as f:
+ f.write('%s\n' % manifest_path)
+
+ self.assertTrue(os.path.exists(manifest_path))
+ self._consume('stub0', RecursiveMakeBackend, env)
+ self.assertFalse(os.path.exists(manifest_path))
+
+ def test_install_manifests_written(self):
+ env, objs = self._emit('stub0')
+ backend = RecursiveMakeBackend(env)
+
+ m = InstallManifest()
+ backend._install_manifests['testing'] = m
+ m.add_symlink(__file__, 'self')
+ backend.consume(objs)
+
+ man_dir = mozpath.join(env.topobjdir, '_build_manifests', 'install')
+ self.assertTrue(os.path.isdir(man_dir))
+
+ expected = ['testing']
+ for e in expected:
+ full = mozpath.join(man_dir, e)
+ self.assertTrue(os.path.exists(full))
+
+ m2 = InstallManifest(path=full)
+ self.assertEqual(m, m2)
+
+ def test_ipdl_sources(self):
+ """Test that IPDL_SOURCES are written to ipdlsrcs.mk correctly."""
+ env = self._consume('ipdl_sources', RecursiveMakeBackend)
+
+ manifest_path = mozpath.join(env.topobjdir,
+ 'ipc', 'ipdl', 'ipdlsrcs.mk')
+ lines = [l.strip() for l in open(manifest_path, 'rt').readlines()]
+
+ # Handle Windows paths correctly
+ topsrcdir = env.topsrcdir.replace(os.sep, '/')
+
+ expected = [
+ "ALL_IPDLSRCS := %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" % tuple([topsrcdir] * 4),
+ "CPPSRCS := UnifiedProtocols0.cpp",
+ "IPDLDIRS := %s/bar %s/foo" % (topsrcdir, topsrcdir),
+ ]
+
+ found = [str for str in lines if str.startswith(('ALL_IPDLSRCS',
+ 'CPPSRCS',
+ 'IPDLDIRS'))]
+ self.assertEqual(found, expected)
+
+ def test_defines(self):
+ """Test that DEFINES are written to backend.mk correctly."""
+ env = self._consume('defines', RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+
+ var = 'DEFINES'
+ defines = [val for val in lines if val.startswith(var)]
+
+ expected = ['DEFINES += -DFOO \'-DBAZ="ab\'\\\'\'cd"\' -UQUX -DBAR=7 -DVALUE=xyz']
+ self.assertEqual(defines, expected)
+
+ def test_host_defines(self):
+ """Test that HOST_DEFINES are written to backend.mk correctly."""
+ env = self._consume('host-defines', RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+
+ var = 'HOST_DEFINES'
+ defines = [val for val in lines if val.startswith(var)]
+
+ expected = ['HOST_DEFINES += -DFOO \'-DBAZ="ab\'\\\'\'cd"\' -UQUX -DBAR=7 -DVALUE=xyz']
+ self.assertEqual(defines, expected)
+
+ def test_local_includes(self):
+ """Test that LOCAL_INCLUDES are written to backend.mk correctly."""
+ env = self._consume('local_includes', RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+
+ expected = [
+ 'LOCAL_INCLUDES += -I$(srcdir)/bar/baz',
+ 'LOCAL_INCLUDES += -I$(srcdir)/foo',
+ ]
+
+ found = [str for str in lines if str.startswith('LOCAL_INCLUDES')]
+ self.assertEqual(found, expected)
+
+ def test_generated_includes(self):
+ """Test that GENERATED_INCLUDES are written to backend.mk correctly."""
+ env = self._consume('generated_includes', RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+
+ topobjdir = env.topobjdir.replace('\\', '/')
+
+ expected = [
+ 'LOCAL_INCLUDES += -I$(CURDIR)/bar/baz',
+ 'LOCAL_INCLUDES += -I$(CURDIR)/foo',
+ ]
+
+ found = [str for str in lines if str.startswith('LOCAL_INCLUDES')]
+ self.assertEqual(found, expected)
+
+ def test_final_target(self):
+ """Test that FINAL_TARGET is written to backend.mk correctly."""
+ env = self._consume('final_target', RecursiveMakeBackend)
+
+ final_target_rule = "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)"
+ expected = dict()
+ expected[env.topobjdir] = []
+ expected[mozpath.join(env.topobjdir, 'both')] = [
+ 'XPI_NAME = mycrazyxpi',
+ 'DIST_SUBDIR = asubdir',
+ final_target_rule
+ ]
+ expected[mozpath.join(env.topobjdir, 'dist-subdir')] = [
+ 'DIST_SUBDIR = asubdir',
+ final_target_rule
+ ]
+ expected[mozpath.join(env.topobjdir, 'xpi-name')] = [
+ 'XPI_NAME = mycrazyxpi',
+ final_target_rule
+ ]
+ expected[mozpath.join(env.topobjdir, 'final-target')] = [
+ 'FINAL_TARGET = $(DEPTH)/random-final-target'
+ ]
+ for key, expected_rules in expected.iteritems():
+ backend_path = mozpath.join(key, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+ found = [str for str in lines if
+ str.startswith('FINAL_TARGET') or str.startswith('XPI_NAME') or
+ str.startswith('DIST_SUBDIR')]
+ self.assertEqual(found, expected_rules)
+
+ def test_final_target_pp_files(self):
+ """Test that FINAL_TARGET_PP_FILES is written to backend.mk correctly."""
+ env = self._consume('dist-files', RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, 'backend.mk')
+ lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
+
+ expected = [
+ 'DIST_FILES_0 += $(srcdir)/install.rdf',
+ 'DIST_FILES_0 += $(srcdir)/main.js',
+ 'DIST_FILES_0_PATH := $(DEPTH)/dist/bin/',
+ 'DIST_FILES_0_TARGET := misc',
+ 'PP_TARGETS += DIST_FILES_0',
+ ]
+
+ found = [str for str in lines if 'DIST_FILES' in str]
+ self.assertEqual(found, expected)
+
+ def test_config(self):
+ """Test that CONFIGURE_SUBST_FILES are properly handled."""
+ env = self._consume('test_config', RecursiveMakeBackend)
+
+ self.assertEqual(
+ open(os.path.join(env.topobjdir, 'file'), 'r').readlines(), [
+ '#ifdef foo\n',
+ 'bar baz\n',
+ '@bar@\n',
+ ])
+
+ def test_jar_manifests(self):
+ env = self._consume('jar-manifests', RecursiveMakeBackend)
+
+ with open(os.path.join(env.topobjdir, 'backend.mk'), 'rb') as fh:
+ lines = fh.readlines()
+
+ lines = [line.rstrip() for line in lines]
+
+ self.assertIn('JAR_MANIFEST := %s/jar.mn' % env.topsrcdir, lines)
+
+ def test_test_manifests_duplicate_support_files(self):
+ """Ensure duplicate support-files in test manifests work."""
+ env = self._consume('test-manifests-duplicate-support-files',
+ RecursiveMakeBackend)
+
+ p = os.path.join(env.topobjdir, '_build_manifests', 'install', '_test_files')
+ m = InstallManifest(p)
+ self.assertIn('testing/mochitest/tests/support-file.txt', m)
+
+ def test_android_eclipse(self):
+ env = self._consume('android_eclipse', RecursiveMakeBackend)
+
+ with open(mozpath.join(env.topobjdir, 'backend.mk'), 'rb') as fh:
+ lines = fh.readlines()
+
+ lines = [line.rstrip() for line in lines]
+
+ # Dependencies first.
+ self.assertIn('ANDROID_ECLIPSE_PROJECT_main1: target1 target2', lines)
+ self.assertIn('ANDROID_ECLIPSE_PROJECT_main4: target3 target4', lines)
+
+ command_template = '\t$(call py_action,process_install_manifest,' + \
+ '--no-remove --no-remove-all-directory-symlinks ' + \
+ '--no-remove-empty-directories %s %s.manifest)'
+ # Commands second.
+ for project_name in ['main1', 'main2', 'library1', 'library2']:
+ stem = '%s/android_eclipse/%s' % (env.topobjdir, project_name)
+ self.assertIn(command_template % (stem, stem), lines)
+
+ # Projects declared in subdirectories.
+ with open(mozpath.join(env.topobjdir, 'subdir', 'backend.mk'), 'rb') as fh:
+ lines = fh.readlines()
+
+ lines = [line.rstrip() for line in lines]
+
+ self.assertIn('ANDROID_ECLIPSE_PROJECT_submain: subtarget1 subtarget2', lines)
+
+ for project_name in ['submain', 'sublibrary']:
+ # Destination and install manifest are relative to topobjdir.
+ stem = '%s/android_eclipse/%s' % (env.topobjdir, project_name)
+ self.assertIn(command_template % (stem, stem), lines)
+
+ def test_install_manifests_package_tests(self):
+ """Ensure test suites honor package_tests=False."""
+ env = self._consume('test-manifests-package-tests', RecursiveMakeBackend)
+
+ all_tests_path = mozpath.join(env.topobjdir, 'all-tests.pkl')
+ self.assertTrue(os.path.exists(all_tests_path))
+
+ with open(all_tests_path, 'rb') as fh:
+ o = pickle.load(fh)
+ self.assertIn('mochitest.js', o)
+ self.assertIn('not_packaged.java', o)
+
+ man_dir = mozpath.join(env.topobjdir, '_build_manifests', 'install')
+ self.assertTrue(os.path.isdir(man_dir))
+
+ full = mozpath.join(man_dir, '_test_files')
+ self.assertTrue(os.path.exists(full))
+
+ m = InstallManifest(path=full)
+
+ # Only mochitest.js should be in the install manifest.
+ self.assertTrue('testing/mochitest/tests/mochitest.js' in m)
+
+ # The path is odd here because we do not normalize at test manifest
+ # processing time. This is a fragile test because there's currently no
+ # way to iterate the manifest.
+ self.assertFalse('instrumentation/./not_packaged.java' in m)
+
+ def test_binary_components(self):
+ """Ensure binary components are correctly handled."""
+ env = self._consume('binary-components', RecursiveMakeBackend)
+
+ with open(mozpath.join(env.topobjdir, 'foo', 'backend.mk')) as fh:
+ lines = fh.readlines()[2:]
+
+ self.assertEqual(lines, [
+ 'misc::\n',
+ '\t$(call py_action,buildlist,$(DEPTH)/dist/bin/chrome.manifest '
+ + "'manifest components/components.manifest')\n",
+ '\t$(call py_action,buildlist,'
+ + '$(DEPTH)/dist/bin/components/components.manifest '
+ + "'binary-component foo')\n",
+ 'LIBRARY_NAME := foo\n',
+ 'FORCE_SHARED_LIB := 1\n',
+ 'IMPORT_LIBRARY := foo\n',
+ 'SHARED_LIBRARY := foo\n',
+ 'IS_COMPONENT := 1\n',
+ 'DSO_SONAME := foo\n',
+ 'LIB_IS_C_ONLY := 1\n',
+ ])
+
+ with open(mozpath.join(env.topobjdir, 'bar', 'backend.mk')) as fh:
+ lines = fh.readlines()[2:]
+
+ self.assertEqual(lines, [
+ 'LIBRARY_NAME := bar\n',
+ 'FORCE_SHARED_LIB := 1\n',
+ 'IMPORT_LIBRARY := bar\n',
+ 'SHARED_LIBRARY := bar\n',
+ 'IS_COMPONENT := 1\n',
+ 'DSO_SONAME := bar\n',
+ 'LIB_IS_C_ONLY := 1\n',
+ ])
+
+ self.assertTrue(os.path.exists(mozpath.join(env.topobjdir, 'binaries.json')))
+ with open(mozpath.join(env.topobjdir, 'binaries.json'), 'rb') as fh:
+ binaries = json.load(fh)
+
+ self.assertEqual(binaries, {
+ 'programs': [],
+ 'shared_libraries': [
+ {
+ 'basename': 'foo',
+ 'import_name': 'foo',
+ 'install_target': 'dist/bin',
+ 'lib_name': 'foo',
+ 'relobjdir': 'foo',
+ 'soname': 'foo',
+ },
+ {
+ 'basename': 'bar',
+ 'import_name': 'bar',
+ 'install_target': 'dist/bin',
+ 'lib_name': 'bar',
+ 'relobjdir': 'bar',
+ 'soname': 'bar',
+ }
+ ],
+ })
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_visualstudio.py b/python/mozbuild/mozbuild/test/backend/test_visualstudio.py
new file mode 100644
index 000000000..bfc95e552
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_visualstudio.py
@@ -0,0 +1,64 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from xml.dom.minidom import parse
+import os
+import unittest
+
+from mozbuild.backend.visualstudio import VisualStudioBackend
+from mozbuild.test.backend.common import BackendTester
+
+from mozunit import main
+
+
+class TestVisualStudioBackend(BackendTester):
+ @unittest.skip('Failing inconsistently in automation.')
+ def test_basic(self):
+ """Ensure we can consume our stub project."""
+
+ env = self._consume('visual-studio', VisualStudioBackend)
+
+ msvc = os.path.join(env.topobjdir, 'msvc')
+ self.assertTrue(os.path.isdir(msvc))
+
+ self.assertTrue(os.path.isfile(os.path.join(msvc, 'mozilla.sln')))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, 'mozilla.props')))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, 'mach.bat')))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, 'binary_my_app.vcxproj')))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, 'target_full.vcxproj')))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, 'library_dir1.vcxproj')))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, 'library_dir1.vcxproj.user')))
+
+ d = parse(os.path.join(msvc, 'library_dir1.vcxproj'))
+ self.assertEqual(d.documentElement.tagName, 'Project')
+ els = d.getElementsByTagName('ClCompile')
+ self.assertEqual(len(els), 2)
+
+ # mozilla-config.h should be explicitly listed as an include.
+ els = d.getElementsByTagName('NMakeForcedIncludes')
+ self.assertEqual(len(els), 1)
+ self.assertEqual(els[0].firstChild.nodeValue,
+ '$(TopObjDir)\\dist\\include\\mozilla-config.h')
+
+ # LOCAL_INCLUDES get added to the include search path.
+ els = d.getElementsByTagName('NMakeIncludeSearchPath')
+ self.assertEqual(len(els), 1)
+ includes = els[0].firstChild.nodeValue.split(';')
+ self.assertIn(os.path.normpath('$(TopSrcDir)/includeA/foo'), includes)
+ self.assertIn(os.path.normpath('$(TopSrcDir)/dir1'), includes)
+ self.assertIn(os.path.normpath('$(TopObjDir)/dir1'), includes)
+ self.assertIn(os.path.normpath('$(TopObjDir)\\dist\\include'), includes)
+
+ # DEFINES get added to the project.
+ els = d.getElementsByTagName('NMakePreprocessorDefinitions')
+ self.assertEqual(len(els), 1)
+ defines = els[0].firstChild.nodeValue.split(';')
+ self.assertIn('DEFINEFOO', defines)
+ self.assertIn('DEFINEBAR=bar', defines)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/common.py b/python/mozbuild/mozbuild/test/common.py
new file mode 100644
index 000000000..76a39b313
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/common.py
@@ -0,0 +1,50 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from mach.logging import LoggingManager
+
+from mozbuild.util import ReadOnlyDict
+
+import mozpack.path as mozpath
+
+
+# By including this module, tests get structured logging.
+log_manager = LoggingManager()
+log_manager.add_terminal_logging()
+
+# mozconfig is not a reusable type (it's actually a module) so, we
+# have to mock it.
+class MockConfig(object):
+ def __init__(self,
+ topsrcdir='/path/to/topsrcdir',
+ extra_substs={},
+ error_is_fatal=True,
+ ):
+ self.topsrcdir = mozpath.abspath(topsrcdir)
+ self.topobjdir = mozpath.abspath('/path/to/topobjdir')
+
+ self.substs = ReadOnlyDict({
+ 'MOZ_FOO': 'foo',
+ 'MOZ_BAR': 'bar',
+ 'MOZ_TRUE': '1',
+ 'MOZ_FALSE': '',
+ 'DLL_PREFIX': 'lib',
+ 'DLL_SUFFIX': '.so'
+ }, **extra_substs)
+
+ self.substs_unicode = ReadOnlyDict({k.decode('utf-8'): v.decode('utf-8',
+ 'replace') for k, v in self.substs.items()})
+
+ self.defines = self.substs
+
+ self.external_source_dir = None
+ self.lib_prefix = 'lib'
+ self.lib_suffix = '.a'
+ self.import_prefix = 'lib'
+ self.import_suffix = '.so'
+ self.dll_prefix = 'lib'
+ self.dll_suffix = '.so'
+ self.error_is_fatal = error_is_fatal
diff --git a/python/mozbuild/mozbuild/test/compilation/__init__.py b/python/mozbuild/mozbuild/test/compilation/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/compilation/__init__.py
diff --git a/python/mozbuild/mozbuild/test/compilation/test_warnings.py b/python/mozbuild/mozbuild/test/compilation/test_warnings.py
new file mode 100644
index 000000000..cd2406dfc
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/compilation/test_warnings.py
@@ -0,0 +1,241 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+from mozfile.mozfile import NamedTemporaryFile
+
+from mozbuild.compilation.warnings import CompilerWarning
+from mozbuild.compilation.warnings import WarningsCollector
+from mozbuild.compilation.warnings import WarningsDatabase
+
+from mozunit import main
+
+CLANG_TESTS = [
+ ('foobar.cpp:123:10: warning: you messed up [-Wfoo]',
+ 'foobar.cpp', 123, 10, 'you messed up', '-Wfoo'),
+ ("c_locale_dummy.c:457:1: warning: (near initialization for "
+ "'full_wmonthname[0]') [-Wpointer-sign]",
+ 'c_locale_dummy.c', 457, 1,
+ "(near initialization for 'full_wmonthname[0]')", '-Wpointer-sign')
+]
+
+MSVC_TESTS = [
+ ("C:/mozilla-central/test/foo.cpp(793) : warning C4244: 'return' : "
+ "conversion from 'double' to 'uint32_t', possible loss of data",
+ 'C:/mozilla-central/test/foo.cpp', 793, 'C4244',
+ "'return' : conversion from 'double' to 'uint32_t', possible loss of "
+ 'data')
+]
+
+CURRENT_LINE = 1
+
+def get_warning():
+ global CURRENT_LINE
+
+ w = CompilerWarning()
+ w['filename'] = '/foo/bar/baz.cpp'
+ w['line'] = CURRENT_LINE
+ w['column'] = 12
+ w['message'] = 'This is irrelevant'
+
+ CURRENT_LINE += 1
+
+ return w
+
+class TestCompilerWarning(unittest.TestCase):
+ def test_equivalence(self):
+ w1 = CompilerWarning()
+ w2 = CompilerWarning()
+
+ s = set()
+
+ # Empty warnings should be equal.
+ self.assertEqual(w1, w2)
+
+ s.add(w1)
+ s.add(w2)
+
+ self.assertEqual(len(s), 1)
+
+ w1['filename'] = '/foo.c'
+ w2['filename'] = '/bar.c'
+
+ self.assertNotEqual(w1, w2)
+
+ s = set()
+ s.add(w1)
+ s.add(w2)
+
+ self.assertEqual(len(s), 2)
+
+ w1['filename'] = '/foo.c'
+ w1['line'] = 5
+ w2['line'] = 5
+
+ w2['filename'] = '/foo.c'
+ w1['column'] = 3
+ w2['column'] = 3
+
+ self.assertEqual(w1, w2)
+
+ def test_comparison(self):
+ w1 = CompilerWarning()
+ w2 = CompilerWarning()
+
+ w1['filename'] = '/aaa.c'
+ w1['line'] = 5
+ w1['column'] = 5
+
+ w2['filename'] = '/bbb.c'
+ w2['line'] = 5
+ w2['column'] = 5
+
+ self.assertLess(w1, w2)
+ self.assertGreater(w2, w1)
+ self.assertGreaterEqual(w2, w1)
+
+ w2['filename'] = '/aaa.c'
+ w2['line'] = 4
+ w2['column'] = 6
+
+ self.assertLess(w2, w1)
+ self.assertGreater(w1, w2)
+ self.assertGreaterEqual(w1, w2)
+
+ w2['filename'] = '/aaa.c'
+ w2['line'] = 5
+ w2['column'] = 10
+
+ self.assertLess(w1, w2)
+ self.assertGreater(w2, w1)
+ self.assertGreaterEqual(w2, w1)
+
+ w2['filename'] = '/aaa.c'
+ w2['line'] = 5
+ w2['column'] = 5
+
+ self.assertLessEqual(w1, w2)
+ self.assertLessEqual(w2, w1)
+ self.assertGreaterEqual(w2, w1)
+ self.assertGreaterEqual(w1, w2)
+
+class TestWarningsParsing(unittest.TestCase):
+ def test_clang_parsing(self):
+ for source, filename, line, column, message, flag in CLANG_TESTS:
+ collector = WarningsCollector(resolve_files=False)
+ warning = collector.process_line(source)
+
+ self.assertIsNotNone(warning)
+
+ self.assertEqual(warning['filename'], filename)
+ self.assertEqual(warning['line'], line)
+ self.assertEqual(warning['column'], column)
+ self.assertEqual(warning['message'], message)
+ self.assertEqual(warning['flag'], flag)
+
+ def test_msvc_parsing(self):
+ for source, filename, line, flag, message in MSVC_TESTS:
+ collector = WarningsCollector(resolve_files=False)
+ warning = collector.process_line(source)
+
+ self.assertIsNotNone(warning)
+
+ self.assertEqual(warning['filename'], os.path.normpath(filename))
+ self.assertEqual(warning['line'], line)
+ self.assertEqual(warning['flag'], flag)
+ self.assertEqual(warning['message'], message)
+
+class TestWarningsDatabase(unittest.TestCase):
+ def test_basic(self):
+ db = WarningsDatabase()
+
+ self.assertEqual(len(db), 0)
+
+ for i in range(10):
+ db.insert(get_warning(), compute_hash=False)
+
+ self.assertEqual(len(db), 10)
+
+ warnings = list(db)
+ self.assertEqual(len(warnings), 10)
+
+ def test_hashing(self):
+ """Ensure that hashing files on insert works."""
+ db = WarningsDatabase()
+
+ temp = NamedTemporaryFile(mode='wt')
+ temp.write('x' * 100)
+ temp.flush()
+
+ w = CompilerWarning()
+ w['filename'] = temp.name
+ w['line'] = 1
+ w['column'] = 4
+ w['message'] = 'foo bar'
+
+ # Should not throw.
+ db.insert(w)
+
+ w['filename'] = 'DOES_NOT_EXIST'
+
+ with self.assertRaises(Exception):
+ db.insert(w)
+
+ def test_pruning(self):
+ """Ensure old warnings are removed from database appropriately."""
+ db = WarningsDatabase()
+
+ source_files = []
+ for i in range(1, 21):
+ temp = NamedTemporaryFile(mode='wt')
+ temp.write('x' * (100 * i))
+ temp.flush()
+
+ # Keep reference so it doesn't get GC'd and deleted.
+ source_files.append(temp)
+
+ w = CompilerWarning()
+ w['filename'] = temp.name
+ w['line'] = 1
+ w['column'] = i * 10
+ w['message'] = 'irrelevant'
+
+ db.insert(w)
+
+ self.assertEqual(len(db), 20)
+
+ # If we change a source file, inserting a new warning should nuke the
+ # old one.
+ source_files[0].write('extra')
+ source_files[0].flush()
+
+ w = CompilerWarning()
+ w['filename'] = source_files[0].name
+ w['line'] = 1
+ w['column'] = 50
+ w['message'] = 'replaced'
+
+ db.insert(w)
+
+ self.assertEqual(len(db), 20)
+
+ warnings = list(db.warnings_for_file(source_files[0].name))
+ self.assertEqual(len(warnings), 1)
+ self.assertEqual(warnings[0]['column'], w['column'])
+
+ # If we delete the source file, calling prune should cause the warnings
+ # to go away.
+ old_filename = source_files[0].name
+ del source_files[0]
+
+ self.assertFalse(os.path.exists(old_filename))
+
+ db.prune()
+ self.assertEqual(len(db), 19)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/common.py b/python/mozbuild/mozbuild/test/configure/common.py
new file mode 100644
index 000000000..089d61a0d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/common.py
@@ -0,0 +1,279 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import errno
+import os
+import subprocess
+import sys
+import tempfile
+import unittest
+
+from mozbuild.configure import ConfigureSandbox
+from mozbuild.util import ReadOnlyNamespace
+from mozpack import path as mozpath
+
+from StringIO import StringIO
+from which import WhichError
+
+from buildconfig import (
+ topobjdir,
+ topsrcdir,
+)
+
+
+def fake_short_path(path):
+ if sys.platform.startswith('win'):
+ return '/'.join(p.split(' ', 1)[0] + '~1' if ' 'in p else p
+ for p in mozpath.split(path))
+ return path
+
+def ensure_exe_extension(path):
+ if sys.platform.startswith('win'):
+ return path + '.exe'
+ return path
+
+
+class ConfigureTestVFS(object):
+ def __init__(self, paths):
+ self._paths = set(mozpath.abspath(p) for p in paths)
+
+ def exists(self, path):
+ path = mozpath.abspath(path)
+ if path in self._paths:
+ return True
+ if mozpath.basedir(path, [topsrcdir, topobjdir]):
+ return os.path.exists(path)
+ return False
+
+ def isfile(self, path):
+ path = mozpath.abspath(path)
+ if path in self._paths:
+ return True
+ if mozpath.basedir(path, [topsrcdir, topobjdir]):
+ return os.path.isfile(path)
+ return False
+
+
+class ConfigureTestSandbox(ConfigureSandbox):
+ '''Wrapper around the ConfigureSandbox for testing purposes.
+
+ Its arguments are the same as ConfigureSandbox, except for the additional
+ `paths` argument, which is a dict where the keys are file paths and the
+ values are either None or a function that will be called when the sandbox
+ calls an implemented function from subprocess with the key as command.
+ When the command is CONFIG_SHELL, the function for the path of the script
+ that follows will be called.
+
+ The API for those functions is:
+ retcode, stdout, stderr = func(stdin, args)
+
+ This class is only meant to implement the minimal things to make
+ moz.configure testing possible. As such, it takes shortcuts.
+ '''
+ def __init__(self, paths, config, environ, *args, **kwargs):
+ self._search_path = environ.get('PATH', '').split(os.pathsep)
+
+ self._subprocess_paths = {
+ mozpath.abspath(k): v for k, v in paths.iteritems() if v
+ }
+
+ paths = paths.keys()
+
+ environ = dict(environ)
+ if 'CONFIG_SHELL' not in environ:
+ environ['CONFIG_SHELL'] = mozpath.abspath('/bin/sh')
+ self._subprocess_paths[environ['CONFIG_SHELL']] = self.shell
+ paths.append(environ['CONFIG_SHELL'])
+ self._environ = copy.copy(environ)
+
+ vfs = ConfigureTestVFS(paths)
+
+ os_path = {
+ k: getattr(vfs, k) for k in dir(vfs) if not k.startswith('_')
+ }
+
+ os_path.update(self.OS.path.__dict__)
+
+ self.imported_os = ReadOnlyNamespace(path=ReadOnlyNamespace(**os_path))
+
+ super(ConfigureTestSandbox, self).__init__(config, environ, *args,
+ **kwargs)
+
+ def _get_one_import(self, what):
+ if what == 'which.which':
+ return self.which
+
+ if what == 'which':
+ return ReadOnlyNamespace(
+ which=self.which,
+ WhichError=WhichError,
+ )
+
+ if what == 'subprocess.Popen':
+ return self.Popen
+
+ if what == 'subprocess':
+ return ReadOnlyNamespace(
+ CalledProcessError=subprocess.CalledProcessError,
+ check_output=self.check_output,
+ PIPE=subprocess.PIPE,
+ STDOUT=subprocess.STDOUT,
+ Popen=self.Popen,
+ )
+
+ if what == 'os.environ':
+ return self._environ
+
+ if what == 'ctypes.wintypes':
+ return ReadOnlyNamespace(
+ LPCWSTR=0,
+ LPWSTR=1,
+ DWORD=2,
+ )
+
+ if what == 'ctypes':
+ class CTypesFunc(object):
+ def __init__(self, func):
+ self._func = func
+
+ def __call__(self, *args, **kwargs):
+ return self._func(*args, **kwargs)
+
+
+ return ReadOnlyNamespace(
+ create_unicode_buffer=self.create_unicode_buffer,
+ windll=ReadOnlyNamespace(
+ kernel32=ReadOnlyNamespace(
+ GetShortPathNameW=CTypesFunc(self.GetShortPathNameW),
+ )
+ ),
+ )
+
+ if what == '_winreg':
+ def OpenKey(*args, **kwargs):
+ raise WindowsError()
+
+ return ReadOnlyNamespace(
+ HKEY_LOCAL_MACHINE=0,
+ OpenKey=OpenKey,
+ )
+
+ return super(ConfigureTestSandbox, self)._get_one_import(what)
+
+ def create_unicode_buffer(self, *args, **kwargs):
+ class Buffer(object):
+ def __init__(self):
+ self.value = ''
+
+ return Buffer()
+
+ def GetShortPathNameW(self, path_in, path_out, length):
+ path_out.value = fake_short_path(path_in)
+ return length
+
+ def which(self, command, path=None):
+ for parent in (path or self._search_path):
+ c = mozpath.abspath(mozpath.join(parent, command))
+ for candidate in (c, ensure_exe_extension(c)):
+ if self.imported_os.path.exists(candidate):
+ return candidate
+ raise WhichError()
+
+ def Popen(self, args, stdin=None, stdout=None, stderr=None, **kargs):
+ try:
+ program = self.which(args[0])
+ except WhichError:
+ raise OSError(errno.ENOENT, 'File not found')
+
+ func = self._subprocess_paths.get(program)
+ retcode, stdout, stderr = func(stdin, args[1:])
+
+ class Process(object):
+ def communicate(self, stdin=None):
+ return stdout, stderr
+
+ def wait(self):
+ return retcode
+
+ return Process()
+
+ def check_output(self, args, **kwargs):
+ proc = self.Popen(args, **kwargs)
+ stdout, stderr = proc.communicate()
+ retcode = proc.wait()
+ if retcode:
+ raise subprocess.CalledProcessError(retcode, args, stdout)
+ return stdout
+
+ def shell(self, stdin, args):
+ script = mozpath.abspath(args[0])
+ if script in self._subprocess_paths:
+ return self._subprocess_paths[script](stdin, args[1:])
+ return 127, '', 'File not found'
+
+
+class BaseConfigureTest(unittest.TestCase):
+ HOST = 'x86_64-pc-linux-gnu'
+
+ def setUp(self):
+ self._cwd = os.getcwd()
+ os.chdir(topobjdir)
+
+ def tearDown(self):
+ os.chdir(self._cwd)
+
+ def config_guess(self, stdin, args):
+ return 0, self.HOST, ''
+
+ def config_sub(self, stdin, args):
+ return 0, args[0], ''
+
+ def get_sandbox(self, paths, config, args=[], environ={}, mozconfig='',
+ out=None, logger=None):
+ kwargs = {}
+ if logger:
+ kwargs['logger'] = logger
+ else:
+ if not out:
+ out = StringIO()
+ kwargs['stdout'] = out
+ kwargs['stderr'] = out
+
+ if hasattr(self, 'TARGET'):
+ target = ['--target=%s' % self.TARGET]
+ else:
+ target = []
+
+ if mozconfig:
+ fh, mozconfig_path = tempfile.mkstemp()
+ os.write(fh, mozconfig)
+ os.close(fh)
+ else:
+ mozconfig_path = os.path.join(os.path.dirname(__file__), 'data',
+ 'empty_mozconfig')
+
+ try:
+ environ = dict(
+ environ,
+ OLD_CONFIGURE=os.path.join(topsrcdir, 'old-configure'),
+ MOZCONFIG=mozconfig_path)
+
+ paths = dict(paths)
+ autoconf_dir = mozpath.join(topsrcdir, 'build', 'autoconf')
+ paths[mozpath.join(autoconf_dir,
+ 'config.guess')] = self.config_guess
+ paths[mozpath.join(autoconf_dir, 'config.sub')] = self.config_sub
+
+ sandbox = ConfigureTestSandbox(paths, config, environ,
+ ['configure'] + target + args,
+ **kwargs)
+ sandbox.include_file(os.path.join(topsrcdir, 'moz.configure'))
+
+ return sandbox
+ finally:
+ if mozconfig:
+ os.remove(mozconfig_path)
diff --git a/python/mozbuild/mozbuild/test/configure/data/decorators.configure b/python/mozbuild/mozbuild/test/configure/data/decorators.configure
new file mode 100644
index 000000000..e5e41c68a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/decorators.configure
@@ -0,0 +1,44 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+@template
+def simple_decorator(func):
+ return func
+
+@template
+def wrapper_decorator(func):
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return wrapper
+
+@template
+def function_decorator(*args, **kwargs):
+ # We could return wrapper_decorator from above here, but then we wouldn't
+ # know if this works as expected because wrapper_decorator itself was
+ # modified or because the right thing happened here.
+ def wrapper_decorator(func):
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return wrapper
+ return wrapper_decorator
+
+@depends('--help')
+@simple_decorator
+def foo(help):
+ global FOO
+ FOO = 1
+
+@depends('--help')
+@wrapper_decorator
+def bar(help):
+ global BAR
+ BAR = 1
+
+@depends('--help')
+@function_decorator('a', 'b', 'c')
+def qux(help):
+ global QUX
+ QUX = 1
diff --git a/python/mozbuild/mozbuild/test/configure/data/empty_mozconfig b/python/mozbuild/mozbuild/test/configure/data/empty_mozconfig
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/empty_mozconfig
diff --git a/python/mozbuild/mozbuild/test/configure/data/extra.configure b/python/mozbuild/mozbuild/test/configure/data/extra.configure
new file mode 100644
index 000000000..43fbf7c5d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/extra.configure
@@ -0,0 +1,13 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--extra', help='Extra')
+
+@depends('--extra')
+def extra(extra):
+ return extra
+
+set_config('EXTRA', extra)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure
new file mode 100644
index 000000000..ad05e383c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure
@@ -0,0 +1,32 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+imply_option('--enable-foo', True)
+
+option('--enable-foo', help='enable foo')
+
+@depends('--enable-foo', '--help')
+def foo(value, help):
+ if value:
+ return True
+
+imply_option('--enable-bar', ('foo', 'bar'))
+
+option('--enable-bar', nargs='*', help='enable bar')
+
+@depends('--enable-bar')
+def bar(value):
+ if value:
+ return value
+
+imply_option('--enable-baz', 'BAZ')
+
+option('--enable-baz', nargs=1, help='enable baz')
+
+@depends('--enable-baz')
+def bar(value):
+ if value:
+ return value
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure
new file mode 100644
index 000000000..2ad1506ef
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure
@@ -0,0 +1,24 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--enable-foo', help='enable foo')
+
+@depends('--enable-foo', '--help')
+def foo(value, help):
+ if value:
+ return True
+
+imply_option('--enable-bar', foo)
+
+
+option('--enable-bar', help='enable bar')
+
+@depends('--enable-bar')
+def bar(value):
+ if value:
+ return value
+
+set_config('BAR', bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure
new file mode 100644
index 000000000..72b88d7b5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure
@@ -0,0 +1,31 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--enable-hoge', help='enable hoge')
+
+@depends('--enable-hoge')
+def hoge(value):
+ return value
+
+
+option('--enable-foo', help='enable foo')
+
+@depends('--enable-foo', hoge)
+def foo(value, hoge):
+ if value:
+ return True
+
+imply_option('--enable-bar', foo)
+
+
+option('--enable-bar', help='enable bar')
+
+@depends('--enable-bar')
+def bar(value):
+ if value:
+ return value
+
+set_config('BAR', bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure
new file mode 100644
index 000000000..ca8e9df3a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure
@@ -0,0 +1,34 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--enable-foo', help='enable foo')
+
+@depends('--enable-foo')
+def foo(value):
+ if value:
+ return False
+
+imply_option('--enable-bar', foo)
+
+
+option('--disable-hoge', help='enable hoge')
+
+@depends('--disable-hoge')
+def hoge(value):
+ if not value:
+ return False
+
+imply_option('--enable-bar', hoge)
+
+
+option('--enable-bar', default=True, help='enable bar')
+
+@depends('--enable-bar')
+def bar(value):
+ if not value:
+ return value
+
+set_config('BAR', bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure
new file mode 100644
index 000000000..6d905ebbb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure
@@ -0,0 +1,24 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--enable-foo', help='enable foo')
+
+@depends('--enable-foo')
+def foo(value):
+ if value:
+ return True
+
+imply_option('--enable-bar', foo)
+
+
+option('--enable-bar', help='enable bar')
+
+@depends('--enable-bar')
+def bar(value):
+ if value:
+ return value
+
+set_config('BAR', bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure
new file mode 100644
index 000000000..6af4b1eda
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure
@@ -0,0 +1,24 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--enable-foo', nargs='*', help='enable foo')
+
+@depends('--enable-foo')
+def foo(value):
+ if value:
+ return value
+
+imply_option('--enable-bar', foo)
+
+
+option('--enable-bar', nargs='*', help='enable bar')
+
+@depends('--enable-bar')
+def bar(value):
+ if value:
+ return value
+
+set_config('BAR', bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/included.configure b/python/mozbuild/mozbuild/test/configure/data/included.configure
new file mode 100644
index 000000000..5c056764d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/included.configure
@@ -0,0 +1,53 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# For more complex and repetitive things, we can create templates
+@template
+def check_compiler_flag(flag):
+ @depends(is_gcc)
+ def check(value):
+ if value:
+ return [flag]
+ set_config('CFLAGS', check)
+ return check
+
+
+check_compiler_flag('-Werror=foobar')
+
+# Normal functions can be used in @depends functions.
+def fortytwo():
+ return 42
+
+def twentyone():
+ yield 21
+
+@depends(is_gcc)
+def check(value):
+ if value:
+ return fortytwo()
+
+set_config('TEMPLATE_VALUE', check)
+
+@depends(is_gcc)
+def check(value):
+ if value:
+ for val in twentyone():
+ return val
+
+set_config('TEMPLATE_VALUE_2', check)
+
+# Normal functions can use @imports too to import modules.
+@imports('sys')
+def platform():
+ return sys.platform
+
+option('--enable-imports-in-template', help='Imports in template')
+@depends('--enable-imports-in-template')
+def check(value):
+ if value:
+ return platform()
+
+set_config('PLATFORM', check)
diff --git a/python/mozbuild/mozbuild/test/configure/data/moz.configure b/python/mozbuild/mozbuild/test/configure/data/moz.configure
new file mode 100644
index 000000000..32c4b8535
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/moz.configure
@@ -0,0 +1,174 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--enable-simple', help='Enable simple')
+
+# Setting MOZ_WITH_ENV in the environment has the same effect as passing
+# --enable-with-env.
+option('--enable-with-env', env='MOZ_WITH_ENV', help='Enable with env')
+
+# Optional values
+option('--enable-values', nargs='*', help='Enable values')
+
+# Everything supported in the Option class is supported in option(). Assume
+# the tests of the Option class are extensive about this.
+
+# Alternatively to --enable/--disable, there also is --with/--without. The
+# difference is semantic only. Behavior is the same as --enable/--disable.
+
+# When the option name starts with --disable/--without, the default is for
+# the option to be enabled.
+option('--without-thing', help='Build without thing')
+
+# A --enable/--with option with a default of False is equivalent to a
+# --disable/--without option. This can be used to change the defaults
+# depending on e.g. the target or the built application.
+option('--with-stuff', default=False, help='Build with stuff')
+
+# Other kinds of arbitrary options are also allowed. This is effectively
+# equivalent to --enable/--with, with no possibility of --disable/--without.
+option('--option', env='MOZ_OPTION', help='Option')
+
+# It is also possible to pass options through the environment only.
+option(env='CC', nargs=1, help='C Compiler')
+
+# Call the function when the --enable-simple option is processed, with its
+# OptionValue as argument.
+@depends('--enable-simple')
+def simple(simple):
+ if simple:
+ return simple
+
+set_config('ENABLED_SIMPLE', simple)
+
+# There can be multiple functions depending on the same option.
+@depends('--enable-simple')
+def simple(simple):
+ return simple
+
+set_config('SIMPLE', simple)
+
+@depends('--enable-with-env')
+def with_env(with_env):
+ return with_env
+
+set_config('WITH_ENV', with_env)
+
+# It doesn't matter if the dependency is on --enable or --disable
+@depends('--disable-values')
+def with_env2(values):
+ return values
+
+set_config('VALUES', with_env2)
+
+# It is possible to @depends on environment-only options.
+@depends('CC')
+def is_gcc(cc):
+ return cc and 'gcc' in cc[0]
+
+set_config('IS_GCC', is_gcc)
+
+# It is possible to depend on the result from another function.
+@depends(with_env2)
+def with_env3(values):
+ return values
+
+set_config('VALUES2', with_env3)
+
+# @depends functions can also return results for use as input to another
+# @depends.
+@depends(with_env3)
+def with_env4(values):
+ return values
+
+@depends(with_env4)
+def with_env5(values):
+ return values
+
+set_config('VALUES3', with_env5)
+
+# The result from @depends functions can also be used as input to options.
+# The result must be returned, not implied. The function must also depend
+# on --help.
+@depends('--enable-simple', '--help')
+def simple(simple, help):
+ return 'simple' if simple else 'not-simple'
+
+option('--with-returned-default', default=simple, help='Returned default')
+
+@depends('--with-returned-default')
+def default(value):
+ return value
+
+set_config('DEFAULTED', default)
+
+@depends('--enable-values', '--help')
+def choices(values, help):
+ if len(values):
+ return {
+ 'alpha': ('a', 'b', 'c'),
+ 'numeric': ('0', '1', '2'),
+ }.get(values[0])
+
+option('--returned-choices', choices=choices, help='Choices')
+
+@depends('--returned-choices')
+def returned_choices(values):
+ return values
+
+set_config('CHOICES', returned_choices)
+
+# All options must be referenced by some @depends function.
+# It is possible to depend on multiple options/functions
+@depends('--without-thing', '--with-stuff', with_env4, '--option')
+def remainder(*args):
+ return args
+
+set_config('REMAINDER', remainder)
+
+# It is possible to include other files to extend the configuration script.
+include('included.configure')
+
+# It is also possible for the include file path to come from the result of a
+# @depends function. That function needs to depend on '--help' like for option
+# defaults and choices.
+option('--enable-include', nargs=1, help='Include')
+@depends('--enable-include', '--help')
+def include_path(path, help):
+ return path[0] if path else None
+
+include(include_path)
+
+# Sandboxed functions can import from modules through the use of the @imports
+# decorator.
+# The order of the decorators matter: @imports needs to appear after other
+# decorators.
+option('--with-imports', nargs='?', help='Imports')
+
+# A limited set of functions from os.path are exposed by default.
+@depends('--with-imports')
+def with_imports(value):
+ if len(value):
+ return hasattr(os.path, 'abspath')
+
+set_config('HAS_ABSPATH', with_imports)
+
+# It is still possible to import the full set from os.path.
+# It is also possible to cherry-pick builtins.
+@depends('--with-imports')
+@imports('os.path')
+def with_imports(value):
+ if len(value):
+ return hasattr(os.path, 'getatime')
+
+set_config('HAS_GETATIME', with_imports)
+
+@depends('--with-imports')
+def with_imports(value):
+ if len(value):
+ return hasattr(os.path, 'getatime')
+
+set_config('HAS_GETATIME2', with_imports)
diff --git a/python/mozbuild/mozbuild/test/configure/data/set_config.configure b/python/mozbuild/mozbuild/test/configure/data/set_config.configure
new file mode 100644
index 000000000..cf5743963
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/set_config.configure
@@ -0,0 +1,43 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--set-foo', help='set foo')
+
+@depends('--set-foo')
+def foo(value):
+ if value:
+ return True
+
+set_config('FOO', foo)
+
+
+option('--set-bar', help='set bar')
+
+@depends('--set-bar')
+def bar(value):
+ return bool(value)
+
+set_config('BAR', bar)
+
+
+option('--set-value', nargs=1, help='set value')
+
+@depends('--set-value')
+def set_value(value):
+ if value:
+ return value[0]
+
+set_config('VALUE', set_value)
+
+
+option('--set-name', nargs=1, help='set name')
+
+@depends('--set-name')
+def set_name(value):
+ if value:
+ return value[0]
+
+set_config(set_name, True)
diff --git a/python/mozbuild/mozbuild/test/configure/data/set_define.configure b/python/mozbuild/mozbuild/test/configure/data/set_define.configure
new file mode 100644
index 000000000..422263427
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/set_define.configure
@@ -0,0 +1,43 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option('--set-foo', help='set foo')
+
+@depends('--set-foo')
+def foo(value):
+ if value:
+ return True
+
+set_define('FOO', foo)
+
+
+option('--set-bar', help='set bar')
+
+@depends('--set-bar')
+def bar(value):
+ return bool(value)
+
+set_define('BAR', bar)
+
+
+option('--set-value', nargs=1, help='set value')
+
+@depends('--set-value')
+def set_value(value):
+ if value:
+ return value[0]
+
+set_define('VALUE', set_value)
+
+
+option('--set-name', nargs=1, help='set name')
+
+@depends('--set-name')
+def set_name(value):
+ if value:
+ return value[0]
+
+set_define(set_name, True)
diff --git a/python/mozbuild/mozbuild/test/configure/data/subprocess.configure b/python/mozbuild/mozbuild/test/configure/data/subprocess.configure
new file mode 100644
index 000000000..de6be9cec
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/subprocess.configure
@@ -0,0 +1,23 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+@depends('--help')
+@imports('codecs')
+@imports(_from='mozbuild.configure.util', _import='getpreferredencoding')
+@imports('os')
+@imports(_from='__builtin__', _import='open')
+def dies_when_logging(_):
+ test_file = 'test.txt'
+ quote_char = "'"
+ if getpreferredencoding().lower() == 'utf-8':
+ quote_char = '\u00B4'.encode('utf-8')
+ try:
+ with open(test_file, 'w+') as fh:
+ fh.write(quote_char)
+ out = check_cmd_output('cat', 'test.txt')
+ log.info(out)
+ finally:
+ os.remove(test_file)
diff --git a/python/mozbuild/mozbuild/test/configure/lint.py b/python/mozbuild/mozbuild/test/configure/lint.py
new file mode 100644
index 000000000..9965a60e9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/lint.py
@@ -0,0 +1,65 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import unittest
+from StringIO import StringIO
+from mozunit import main
+from buildconfig import (
+ topobjdir,
+ topsrcdir,
+)
+
+from mozbuild.configure.lint import LintSandbox
+
+
+test_path = os.path.abspath(__file__)
+
+
+class LintMeta(type):
+ def __new__(mcs, name, bases, attrs):
+ def create_test(project, func):
+ def test(self):
+ return func(self, project)
+ return test
+
+ for project in (
+ 'b2g',
+ 'b2g/dev',
+ 'b2g/graphene',
+ 'browser',
+ 'embedding/ios',
+ 'extensions',
+ 'js',
+ 'mobile/android',
+ ):
+ attrs['test_%s' % project.replace('/', '_')] = create_test(
+ project, attrs['lint'])
+
+ return type.__new__(mcs, name, bases, attrs)
+
+
+class Lint(unittest.TestCase):
+ __metaclass__ = LintMeta
+
+ def setUp(self):
+ self._curdir = os.getcwd()
+ os.chdir(topobjdir)
+
+ def tearDown(self):
+ os.chdir(self._curdir)
+
+ def lint(self, project):
+ sandbox = LintSandbox({
+ 'OLD_CONFIGURE': os.path.join(topsrcdir, 'old-configure'),
+ 'MOZCONFIG': os.path.join(os.path.dirname(test_path), 'data',
+ 'empty_mozconfig'),
+ }, ['--enable-project=%s' % project])
+ sandbox.run(os.path.join(topsrcdir, 'moz.configure'))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_checks_configure.py b/python/mozbuild/mozbuild/test/configure/test_checks_configure.py
new file mode 100644
index 000000000..181c7acbd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_checks_configure.py
@@ -0,0 +1,940 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from StringIO import StringIO
+import os
+import sys
+import textwrap
+import unittest
+
+from mozunit import (
+ main,
+ MockedOpen,
+)
+
+from mozbuild.configure import (
+ ConfigureError,
+ ConfigureSandbox,
+)
+from mozbuild.util import exec_
+from mozpack import path as mozpath
+
+from buildconfig import topsrcdir
+from common import (
+ ConfigureTestSandbox,
+ ensure_exe_extension,
+ fake_short_path,
+)
+
+
+class TestChecksConfigure(unittest.TestCase):
+ def test_checking(self):
+ out = StringIO()
+ sandbox = ConfigureSandbox({}, stdout=out, stderr=out)
+ base_dir = os.path.join(topsrcdir, 'build', 'moz.configure')
+ sandbox.include_file(os.path.join(base_dir, 'checks.configure'))
+
+ exec_(textwrap.dedent('''
+ @checking('for a thing')
+ def foo(value):
+ return value
+ '''), sandbox)
+
+ foo = sandbox['foo']
+
+ foo(True)
+ self.assertEqual(out.getvalue(), 'checking for a thing... yes\n')
+
+ out.truncate(0)
+ foo(False)
+ self.assertEqual(out.getvalue(), 'checking for a thing... no\n')
+
+ out.truncate(0)
+ foo(42)
+ self.assertEqual(out.getvalue(), 'checking for a thing... 42\n')
+
+ out.truncate(0)
+ foo('foo')
+ self.assertEqual(out.getvalue(), 'checking for a thing... foo\n')
+
+ out.truncate(0)
+ data = ['foo', 'bar']
+ foo(data)
+ self.assertEqual(out.getvalue(), 'checking for a thing... %r\n' % data)
+
+ # When the function given to checking does nothing interesting, the
+ # behavior is not altered
+ exec_(textwrap.dedent('''
+ @checking('for a thing', lambda x: x)
+ def foo(value):
+ return value
+ '''), sandbox)
+
+ foo = sandbox['foo']
+
+ out.truncate(0)
+ foo(True)
+ self.assertEqual(out.getvalue(), 'checking for a thing... yes\n')
+
+ out.truncate(0)
+ foo(False)
+ self.assertEqual(out.getvalue(), 'checking for a thing... no\n')
+
+ out.truncate(0)
+ foo(42)
+ self.assertEqual(out.getvalue(), 'checking for a thing... 42\n')
+
+ out.truncate(0)
+ foo('foo')
+ self.assertEqual(out.getvalue(), 'checking for a thing... foo\n')
+
+ out.truncate(0)
+ data = ['foo', 'bar']
+ foo(data)
+ self.assertEqual(out.getvalue(), 'checking for a thing... %r\n' % data)
+
+ exec_(textwrap.dedent('''
+ def munge(x):
+ if not x:
+ return 'not found'
+ if isinstance(x, (str, bool, int)):
+ return x
+ return ' '.join(x)
+
+ @checking('for a thing', munge)
+ def foo(value):
+ return value
+ '''), sandbox)
+
+ foo = sandbox['foo']
+
+ out.truncate(0)
+ foo(True)
+ self.assertEqual(out.getvalue(), 'checking for a thing... yes\n')
+
+ out.truncate(0)
+ foo(False)
+ self.assertEqual(out.getvalue(), 'checking for a thing... not found\n')
+
+ out.truncate(0)
+ foo(42)
+ self.assertEqual(out.getvalue(), 'checking for a thing... 42\n')
+
+ out.truncate(0)
+ foo('foo')
+ self.assertEqual(out.getvalue(), 'checking for a thing... foo\n')
+
+ out.truncate(0)
+ foo(['foo', 'bar'])
+ self.assertEqual(out.getvalue(), 'checking for a thing... foo bar\n')
+
+ KNOWN_A = ensure_exe_extension(mozpath.abspath('/usr/bin/known-a'))
+ KNOWN_B = ensure_exe_extension(mozpath.abspath('/usr/local/bin/known-b'))
+ KNOWN_C = ensure_exe_extension(mozpath.abspath('/home/user/bin/known c'))
+ OTHER_A = ensure_exe_extension(mozpath.abspath('/lib/other/known-a'))
+
+ def get_result(self, command='', args=[], environ={},
+ prog='/bin/configure', extra_paths=None,
+ includes=('util.configure', 'checks.configure')):
+ config = {}
+ out = StringIO()
+ paths = {
+ self.KNOWN_A: None,
+ self.KNOWN_B: None,
+ self.KNOWN_C: None,
+ }
+ if extra_paths:
+ paths.update(extra_paths)
+ environ = dict(environ)
+ if 'PATH' not in environ:
+ environ['PATH'] = os.pathsep.join(os.path.dirname(p) for p in paths)
+ paths[self.OTHER_A] = None
+ sandbox = ConfigureTestSandbox(paths, config, environ, [prog] + args,
+ out, out)
+ base_dir = os.path.join(topsrcdir, 'build', 'moz.configure')
+ for f in includes:
+ sandbox.include_file(os.path.join(base_dir, f))
+
+ status = 0
+ try:
+ exec_(command, sandbox)
+ sandbox.run()
+ except SystemExit as e:
+ status = e.code
+
+ return config, out.getvalue(), status
+
+ def test_check_prog(self):
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("known-a",))')
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': self.KNOWN_A})
+ self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))')
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': self.KNOWN_B})
+ self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_B)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "unknown-2", "known c"))')
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': fake_short_path(self.KNOWN_C)})
+ self.assertEqual(out, "checking for foo... '%s'\n"
+ % fake_short_path(self.KNOWN_C))
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown",))')
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for foo... not found
+ DEBUG: foo: Trying unknown
+ ERROR: Cannot find foo
+ '''))
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"))')
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for foo... not found
+ DEBUG: foo: Trying unknown
+ DEBUG: foo: Trying unknown-2
+ DEBUG: foo: Trying 'unknown 3'
+ ERROR: Cannot find foo
+ '''))
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), '
+ 'allow_missing=True)')
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': ':'})
+ self.assertEqual(out, 'checking for foo... not found\n')
+
+ @unittest.skipIf(not sys.platform.startswith('win'), 'Windows-only test')
+ def test_check_prog_exe(self):
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))',
+ ['FOO=known-a.exe'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': self.KNOWN_A})
+ self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))',
+ ['FOO=%s' % os.path.splitext(self.KNOWN_A)[0]])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': self.KNOWN_A})
+ self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A)
+
+
+ def test_check_prog_with_args(self):
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))',
+ ['FOO=known-a'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': self.KNOWN_A})
+ self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))',
+ ['FOO=%s' % self.KNOWN_A])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': self.KNOWN_A})
+ self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A)
+
+ path = self.KNOWN_B.replace('known-b', 'known-a')
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))',
+ ['FOO=%s' % path])
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for foo... not found
+ DEBUG: foo: Trying %s
+ ERROR: Cannot find foo
+ ''') % path)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown",))',
+ ['FOO=known c'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': fake_short_path(self.KNOWN_C)})
+ self.assertEqual(out, "checking for foo... '%s'\n"
+ % fake_short_path(self.KNOWN_C))
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), '
+ 'allow_missing=True)', ['FOO=unknown'])
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for foo... not found
+ DEBUG: foo: Trying unknown
+ ERROR: Cannot find foo
+ '''))
+
+ def test_check_prog_what(self):
+ config, out, status = self.get_result(
+ 'check_prog("CC", ("known-a",), what="the target C compiler")')
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'CC': self.KNOWN_A})
+ self.assertEqual(
+ out, 'checking for the target C compiler... %s\n' % self.KNOWN_A)
+
+ config, out, status = self.get_result(
+ 'check_prog("CC", ("unknown", "unknown-2", "unknown 3"),'
+ ' what="the target C compiler")')
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for the target C compiler... not found
+ DEBUG: cc: Trying unknown
+ DEBUG: cc: Trying unknown-2
+ DEBUG: cc: Trying 'unknown 3'
+ ERROR: Cannot find the target C compiler
+ '''))
+
+ def test_check_prog_input(self):
+ config, out, status = self.get_result(textwrap.dedent('''
+ option("--with-ccache", nargs=1, help="ccache")
+ check_prog("CCACHE", ("known-a",), input="--with-ccache")
+ '''), ['--with-ccache=known-b'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'CCACHE': self.KNOWN_B})
+ self.assertEqual(out, 'checking for ccache... %s\n' % self.KNOWN_B)
+
+ script = textwrap.dedent('''
+ option(env="CC", nargs=1, help="compiler")
+ @depends("CC")
+ def compiler(value):
+ return value[0].split()[0] if value else None
+ check_prog("CC", ("known-a",), input=compiler)
+ ''')
+ config, out, status = self.get_result(script)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'CC': self.KNOWN_A})
+ self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A)
+
+ config, out, status = self.get_result(script, ['CC=known-b'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'CC': self.KNOWN_B})
+ self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_B)
+
+ config, out, status = self.get_result(script, ['CC=known-b -m32'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'CC': self.KNOWN_B})
+ self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_B)
+
+ def test_check_prog_progs(self):
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ())')
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+ self.assertEqual(out, '')
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ())', ['FOO=known-a'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'FOO': self.KNOWN_A})
+ self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A)
+
+ script = textwrap.dedent('''
+ option(env="TARGET", nargs=1, default="linux", help="target")
+ @depends("TARGET")
+ def compiler(value):
+ if value:
+ if value[0] == "linux":
+ return ("gcc", "clang")
+ if value[0] == "winnt":
+ return ("cl", "clang-cl")
+ check_prog("CC", compiler)
+ ''')
+ config, out, status = self.get_result(script)
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for cc... not found
+ DEBUG: cc: Trying gcc
+ DEBUG: cc: Trying clang
+ ERROR: Cannot find cc
+ '''))
+
+ config, out, status = self.get_result(script, ['TARGET=linux'])
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for cc... not found
+ DEBUG: cc: Trying gcc
+ DEBUG: cc: Trying clang
+ ERROR: Cannot find cc
+ '''))
+
+ config, out, status = self.get_result(script, ['TARGET=winnt'])
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for cc... not found
+ DEBUG: cc: Trying cl
+ DEBUG: cc: Trying clang-cl
+ ERROR: Cannot find cc
+ '''))
+
+ config, out, status = self.get_result(script, ['TARGET=none'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+ self.assertEqual(out, '')
+
+ config, out, status = self.get_result(script, ['TARGET=winnt',
+ 'CC=known-a'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'CC': self.KNOWN_A})
+ self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A)
+
+ config, out, status = self.get_result(script, ['TARGET=none',
+ 'CC=known-a'])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'CC': self.KNOWN_A})
+ self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A)
+
+ def test_check_prog_configure_error(self):
+ with self.assertRaises(ConfigureError) as e:
+ self.get_result('check_prog("FOO", "foo")')
+
+ self.assertEqual(e.exception.message,
+ 'progs must resolve to a list or tuple!')
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_result(
+ 'foo = depends(when=True)(lambda: ("a", "b"))\n'
+ 'check_prog("FOO", ("known-a",), input=foo)'
+ )
+
+ self.assertEqual(e.exception.message,
+ 'input must resolve to a tuple or a list with a '
+ 'single element, or a string')
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_result(
+ 'foo = depends(when=True)(lambda: {"a": "b"})\n'
+ 'check_prog("FOO", ("known-a",), input=foo)'
+ )
+
+ self.assertEqual(e.exception.message,
+ 'input must resolve to a tuple or a list with a '
+ 'single element, or a string')
+
+ def test_check_prog_with_path(self):
+ config, out, status = self.get_result('check_prog("A", ("known-a",), paths=["/some/path"])')
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for a... not found
+ DEBUG: a: Trying known-a
+ ERROR: Cannot find a
+ '''))
+
+ config, out, status = self.get_result('check_prog("A", ("known-a",), paths=["%s"])' %
+ os.path.dirname(self.OTHER_A))
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'A': self.OTHER_A})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for a... %s
+ ''' % self.OTHER_A))
+
+ dirs = map(mozpath.dirname, (self.OTHER_A, self.KNOWN_A))
+ config, out, status = self.get_result(textwrap.dedent('''\
+ check_prog("A", ("known-a",), paths=["%s"])
+ ''' % os.pathsep.join(dirs)))
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'A': self.OTHER_A})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for a... %s
+ ''' % self.OTHER_A))
+
+ dirs = map(mozpath.dirname, (self.KNOWN_A, self.KNOWN_B))
+ config, out, status = self.get_result(textwrap.dedent('''\
+ check_prog("A", ("known-a",), paths=["%s", "%s"])
+ ''' % (os.pathsep.join(dirs), self.OTHER_A)))
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'A': self.KNOWN_A})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for a... %s
+ ''' % self.KNOWN_A))
+
+ config, out, status = self.get_result('check_prog("A", ("known-a",), paths="%s")' %
+ os.path.dirname(self.OTHER_A))
+
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for a...
+ DEBUG: a: Trying known-a
+ ERROR: Paths provided to find_program must be a list of strings, not %r
+ ''' % mozpath.dirname(self.OTHER_A)))
+
+ def test_java_tool_checks(self):
+ includes = ('util.configure', 'checks.configure', 'java.configure')
+
+ def mock_valid_javac(_, args):
+ if len(args) == 1 and args[0] == '-version':
+ return 0, '1.7', ''
+ self.fail("Unexpected arguments to mock_valid_javac: %s" % args)
+
+ # A valid set of tools in a standard location.
+ java = mozpath.abspath('/usr/bin/java')
+ javah = mozpath.abspath('/usr/bin/javah')
+ javac = mozpath.abspath('/usr/bin/javac')
+ jar = mozpath.abspath('/usr/bin/jar')
+ jarsigner = mozpath.abspath('/usr/bin/jarsigner')
+ keytool = mozpath.abspath('/usr/bin/keytool')
+
+ paths = {
+ java: None,
+ javah: None,
+ javac: mock_valid_javac,
+ jar: None,
+ jarsigner: None,
+ keytool: None,
+ }
+
+ config, out, status = self.get_result(includes=includes, extra_paths=paths)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ 'JAVA': java,
+ 'JAVAH': javah,
+ 'JAVAC': javac,
+ 'JAR': jar,
+ 'JARSIGNER': jarsigner,
+ 'KEYTOOL': keytool,
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for java... %s
+ checking for javah... %s
+ checking for jar... %s
+ checking for jarsigner... %s
+ checking for keytool... %s
+ checking for javac... %s
+ checking for javac version... 1.7
+ ''' % (java, javah, jar, jarsigner, keytool, javac)))
+
+ # An alternative valid set of tools referred to by JAVA_HOME.
+ alt_java = mozpath.abspath('/usr/local/bin/java')
+ alt_javah = mozpath.abspath('/usr/local/bin/javah')
+ alt_javac = mozpath.abspath('/usr/local/bin/javac')
+ alt_jar = mozpath.abspath('/usr/local/bin/jar')
+ alt_jarsigner = mozpath.abspath('/usr/local/bin/jarsigner')
+ alt_keytool = mozpath.abspath('/usr/local/bin/keytool')
+ alt_java_home = mozpath.dirname(mozpath.dirname(alt_java))
+
+ paths.update({
+ alt_java: None,
+ alt_javah: None,
+ alt_javac: mock_valid_javac,
+ alt_jar: None,
+ alt_jarsigner: None,
+ alt_keytool: None,
+ })
+
+ config, out, status = self.get_result(includes=includes,
+ extra_paths=paths,
+ environ={
+ 'JAVA_HOME': alt_java_home,
+ 'PATH': mozpath.dirname(java)
+ })
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ 'JAVA': alt_java,
+ 'JAVAH': alt_javah,
+ 'JAVAC': alt_javac,
+ 'JAR': alt_jar,
+ 'JARSIGNER': alt_jarsigner,
+ 'KEYTOOL': alt_keytool,
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for java... %s
+ checking for javah... %s
+ checking for jar... %s
+ checking for jarsigner... %s
+ checking for keytool... %s
+ checking for javac... %s
+ checking for javac version... 1.7
+ ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner,
+ alt_keytool, alt_javac)))
+
+ # We can use --with-java-bin-path instead of JAVA_HOME to similar
+ # effect.
+ config, out, status = self.get_result(
+ args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)],
+ includes=includes,
+ extra_paths=paths,
+ environ={
+ 'PATH': mozpath.dirname(java)
+ })
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ 'JAVA': alt_java,
+ 'JAVAH': alt_javah,
+ 'JAVAC': alt_javac,
+ 'JAR': alt_jar,
+ 'JARSIGNER': alt_jarsigner,
+ 'KEYTOOL': alt_keytool,
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for java... %s
+ checking for javah... %s
+ checking for jar... %s
+ checking for jarsigner... %s
+ checking for keytool... %s
+ checking for javac... %s
+ checking for javac version... 1.7
+ ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner,
+ alt_keytool, alt_javac)))
+
+ # If --with-java-bin-path and JAVA_HOME are both set,
+ # --with-java-bin-path takes precedence.
+ config, out, status = self.get_result(
+ args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)],
+ includes=includes,
+ extra_paths=paths,
+ environ={
+ 'PATH': mozpath.dirname(java),
+ 'JAVA_HOME': mozpath.dirname(mozpath.dirname(java)),
+ })
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ 'JAVA': alt_java,
+ 'JAVAH': alt_javah,
+ 'JAVAC': alt_javac,
+ 'JAR': alt_jar,
+ 'JARSIGNER': alt_jarsigner,
+ 'KEYTOOL': alt_keytool,
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for java... %s
+ checking for javah... %s
+ checking for jar... %s
+ checking for jarsigner... %s
+ checking for keytool... %s
+ checking for javac... %s
+ checking for javac version... 1.7
+ ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner,
+ alt_keytool, alt_javac)))
+
+ def mock_old_javac(_, args):
+ if len(args) == 1 and args[0] == '-version':
+ return 0, '1.6.9', ''
+ self.fail("Unexpected arguments to mock_old_javac: %s" % args)
+
+ # An old javac is fatal.
+ paths[javac] = mock_old_javac
+ config, out, status = self.get_result(includes=includes,
+ extra_paths=paths,
+ environ={
+ 'PATH': mozpath.dirname(java)
+ })
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {
+ 'JAVA': java,
+ 'JAVAH': javah,
+ 'JAVAC': javac,
+ 'JAR': jar,
+ 'JARSIGNER': jarsigner,
+ 'KEYTOOL': keytool,
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for java... %s
+ checking for javah... %s
+ checking for jar... %s
+ checking for jarsigner... %s
+ checking for keytool... %s
+ checking for javac... %s
+ checking for javac version...
+ ERROR: javac 1.7 or higher is required (found 1.6.9)
+ ''' % (java, javah, jar, jarsigner, keytool, javac)))
+
+ # Any missing tool is fatal when these checks run.
+ del paths[jarsigner]
+ config, out, status = self.get_result(includes=includes,
+ extra_paths=paths,
+ environ={
+ 'PATH': mozpath.dirname(java)
+ })
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {
+ 'JAVA': java,
+ 'JAVAH': javah,
+ 'JAR': jar,
+ 'JARSIGNER': ':',
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for java... %s
+ checking for javah... %s
+ checking for jar... %s
+ checking for jarsigner... not found
+ ERROR: The program jarsigner was not found. Set $JAVA_HOME to your Java SDK directory or use '--with-java-bin-path={java-bin-dir}'
+ ''' % (java, javah, jar)))
+
+ def test_pkg_check_modules(self):
+ mock_pkg_config_version = '0.10.0'
+ mock_pkg_config_path = mozpath.abspath('/usr/bin/pkg-config')
+
+ def mock_pkg_config(_, args):
+ if args[0:2] == ['--errors-to-stdout', '--print-errors']:
+ assert len(args) == 3
+ package = args[2]
+ if package == 'unknown':
+ return (1, "Package unknown was not found in the pkg-config search path.\n"
+ "Perhaps you should add the directory containing `unknown.pc'\n"
+ "to the PKG_CONFIG_PATH environment variable\n"
+ "No package 'unknown' found", '')
+ if package == 'valid':
+ return 0, '', ''
+ if package == 'new > 1.1':
+ return 1, "Requested 'new > 1.1' but version of new is 1.1", ''
+ if args[0] == '--cflags':
+ assert len(args) == 2
+ return 0, '-I/usr/include/%s' % args[1], ''
+ if args[0] == '--libs':
+ assert len(args) == 2
+ return 0, '-l%s' % args[1], ''
+ if args[0] == '--version':
+ return 0, mock_pkg_config_version, ''
+ self.fail("Unexpected arguments to mock_pkg_config: %s" % args)
+
+ def get_result(cmd, args=[], extra_paths=None):
+ return self.get_result(textwrap.dedent('''\
+ option('--disable-compile-environment', help='compile env')
+ include('%(topsrcdir)s/build/moz.configure/util.configure')
+ include('%(topsrcdir)s/build/moz.configure/checks.configure')
+ include('%(topsrcdir)s/build/moz.configure/pkg.configure')
+ ''' % {'topsrcdir': topsrcdir}) + cmd, args=args, extra_paths=extra_paths,
+ includes=())
+
+ extra_paths = {
+ mock_pkg_config_path: mock_pkg_config,
+ }
+ includes = ('util.configure', 'checks.configure', 'pkg.configure')
+
+ config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')")
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for pkg_config... not found
+ ERROR: *** The pkg-config script could not be found. Make sure it is
+ *** in your path, or set the PKG_CONFIG environment variable
+ *** to the full path to pkg-config.
+ '''))
+
+
+ config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')",
+ extra_paths=extra_paths)
+ self.assertEqual(status, 0)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for pkg_config... %s
+ checking for pkg-config version... %s
+ checking for valid... yes
+ checking MOZ_VALID_CFLAGS... -I/usr/include/valid
+ checking MOZ_VALID_LIBS... -lvalid
+ ''' % (mock_pkg_config_path, mock_pkg_config_version)))
+ self.assertEqual(config, {
+ 'PKG_CONFIG': mock_pkg_config_path,
+ 'MOZ_VALID_CFLAGS': ('-I/usr/include/valid',),
+ 'MOZ_VALID_LIBS': ('-lvalid',),
+ })
+
+ config, output, status = get_result("pkg_check_modules('MOZ_UKNOWN', 'unknown')",
+ extra_paths=extra_paths)
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for pkg_config... %s
+ checking for pkg-config version... %s
+ checking for unknown... no
+ ERROR: Package unknown was not found in the pkg-config search path.
+ ERROR: Perhaps you should add the directory containing `unknown.pc'
+ ERROR: to the PKG_CONFIG_PATH environment variable
+ ERROR: No package 'unknown' found
+ ''' % (mock_pkg_config_path, mock_pkg_config_version)))
+ self.assertEqual(config, {
+ 'PKG_CONFIG': mock_pkg_config_path,
+ })
+
+ config, output, status = get_result("pkg_check_modules('MOZ_NEW', 'new > 1.1')",
+ extra_paths=extra_paths)
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for pkg_config... %s
+ checking for pkg-config version... %s
+ checking for new > 1.1... no
+ ERROR: Requested 'new > 1.1' but version of new is 1.1
+ ''' % (mock_pkg_config_path, mock_pkg_config_version)))
+ self.assertEqual(config, {
+ 'PKG_CONFIG': mock_pkg_config_path,
+ })
+
+ # allow_missing makes missing packages non-fatal.
+ cmd = textwrap.dedent('''\
+ have_new_module = pkg_check_modules('MOZ_NEW', 'new > 1.1', allow_missing=True)
+ @depends(have_new_module)
+ def log_new_module_error(mod):
+ if mod is not True:
+ log.info('Module not found.')
+ ''')
+
+ config, output, status = get_result(cmd, extra_paths=extra_paths)
+ self.assertEqual(status, 0)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for pkg_config... %s
+ checking for pkg-config version... %s
+ checking for new > 1.1... no
+ WARNING: Requested 'new > 1.1' but version of new is 1.1
+ Module not found.
+ ''' % (mock_pkg_config_path, mock_pkg_config_version)))
+ self.assertEqual(config, {
+ 'PKG_CONFIG': mock_pkg_config_path,
+ })
+
+ config, output, status = get_result(cmd,
+ args=['--disable-compile-environment'],
+ extra_paths=extra_paths)
+ self.assertEqual(status, 0)
+ self.assertEqual(output, 'Module not found.\n')
+ self.assertEqual(config, {})
+
+ def mock_old_pkg_config(_, args):
+ if args[0] == '--version':
+ return 0, '0.8.10', ''
+ self.fail("Unexpected arguments to mock_old_pkg_config: %s" % args)
+
+ extra_paths = {
+ mock_pkg_config_path: mock_old_pkg_config,
+ }
+
+ config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')",
+ extra_paths=extra_paths)
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for pkg_config... %s
+ checking for pkg-config version... 0.8.10
+ ERROR: *** Your version of pkg-config is too old. You need version 0.9.0 or newer.
+ ''' % mock_pkg_config_path))
+
+ def test_simple_keyfile(self):
+ includes = ('util.configure', 'checks.configure', 'keyfiles.configure')
+
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API')", includes=includes)
+ self.assertEqual(status, 0)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Mozilla API key... no
+ '''))
+ self.assertEqual(config, {
+ 'MOZ_MOZILLA_API_KEY': 'no-mozilla-api-key',
+ })
+
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API')",
+ args=['--with-mozilla-api-keyfile=/foo/bar/does/not/exist'],
+ includes=includes)
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Mozilla API key... no
+ ERROR: '/foo/bar/does/not/exist': No such file or directory.
+ '''))
+ self.assertEqual(config, {})
+
+ with MockedOpen({'key': ''}):
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API')",
+ args=['--with-mozilla-api-keyfile=key'],
+ includes=includes)
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Mozilla API key... no
+ ERROR: 'key' is empty.
+ '''))
+ self.assertEqual(config, {})
+
+ with MockedOpen({'key': 'fake-key\n'}):
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API')",
+ args=['--with-mozilla-api-keyfile=key'],
+ includes=includes)
+ self.assertEqual(status, 0)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Mozilla API key... yes
+ '''))
+ self.assertEqual(config, {
+ 'MOZ_MOZILLA_API_KEY': 'fake-key',
+ })
+
+ def test_id_and_secret_keyfile(self):
+ includes = ('util.configure', 'checks.configure', 'keyfiles.configure')
+
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')", includes=includes)
+ self.assertEqual(status, 0)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Bing API key... no
+ '''))
+ self.assertEqual(config, {
+ 'MOZ_BING_API_CLIENTID': 'no-bing-api-clientid',
+ 'MOZ_BING_API_KEY': 'no-bing-api-key',
+ })
+
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')",
+ args=['--with-bing-api-keyfile=/foo/bar/does/not/exist'],
+ includes=includes)
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Bing API key... no
+ ERROR: '/foo/bar/does/not/exist': No such file or directory.
+ '''))
+ self.assertEqual(config, {})
+
+ with MockedOpen({'key': ''}):
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')",
+ args=['--with-bing-api-keyfile=key'],
+ includes=includes)
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Bing API key... no
+ ERROR: 'key' is empty.
+ '''))
+ self.assertEqual(config, {})
+
+ with MockedOpen({'key': 'fake-id fake-key\n'}):
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')",
+ args=['--with-bing-api-keyfile=key'],
+ includes=includes)
+ self.assertEqual(status, 0)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Bing API key... yes
+ '''))
+ self.assertEqual(config, {
+ 'MOZ_BING_API_CLIENTID': 'fake-id',
+ 'MOZ_BING_API_KEY': 'fake-key',
+ })
+
+ with MockedOpen({'key': 'fake-key\n'}):
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')",
+ args=['--with-bing-api-keyfile=key'],
+ includes=includes)
+ self.assertEqual(status, 1)
+ self.assertEqual(output, textwrap.dedent('''\
+ checking for the Bing API key... no
+ ERROR: Bing API key file has an invalid format.
+ '''))
+ self.assertEqual(config, {})
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_compile_checks.py b/python/mozbuild/mozbuild/test/configure/test_compile_checks.py
new file mode 100644
index 000000000..5913dbe3d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_compile_checks.py
@@ -0,0 +1,403 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import textwrap
+import unittest
+import mozpack.path as mozpath
+
+from StringIO import StringIO
+
+from buildconfig import topsrcdir
+from common import ConfigureTestSandbox
+from mozbuild.util import exec_
+from mozunit import main
+from test_toolchain_helpers import FakeCompiler
+
+
+class BaseCompileChecks(unittest.TestCase):
+ def get_mock_compiler(self, expected_test_content=None, expected_flags=None):
+ expected_flags = expected_flags or []
+ def mock_compiler(stdin, args):
+ args, test_file = args[:-1], args[-1]
+ self.assertIn('-c', args)
+ for flag in expected_flags:
+ self.assertIn(flag, args)
+
+ if expected_test_content:
+ with open(test_file) as fh:
+ test_content = fh.read()
+ self.assertEqual(test_content, expected_test_content)
+
+ return FakeCompiler()(None, args)
+ return mock_compiler
+
+ def do_compile_test(self, command, expected_test_content=None,
+ expected_flags=None):
+
+ paths = {
+ os.path.abspath('/usr/bin/mockcc'): self.get_mock_compiler(
+ expected_test_content=expected_test_content,
+ expected_flags=expected_flags),
+ }
+
+ base_dir = os.path.join(topsrcdir, 'build', 'moz.configure')
+
+ mock_compiler_defs = textwrap.dedent('''\
+ @depends(when=True)
+ def extra_toolchain_flags():
+ return []
+
+ include('%s/compilers-util.configure')
+
+ @compiler_class
+ @depends(when=True)
+ def c_compiler():
+ return namespace(
+ flags=[],
+ type='gcc',
+ compiler=os.path.abspath('/usr/bin/mockcc'),
+ wrapper=[],
+ language='C',
+ )
+
+ @compiler_class
+ @depends(when=True)
+ def cxx_compiler():
+ return namespace(
+ flags=[],
+ type='gcc',
+ compiler=os.path.abspath('/usr/bin/mockcc'),
+ wrapper=[],
+ language='C++',
+ )
+ ''' % mozpath.normsep(base_dir))
+
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureTestSandbox(paths, config, {}, ['/bin/configure'],
+ out, out)
+ sandbox.include_file(os.path.join(base_dir, 'util.configure'))
+ sandbox.include_file(os.path.join(base_dir, 'checks.configure'))
+ exec_(mock_compiler_defs, sandbox)
+ sandbox.include_file(os.path.join(base_dir, 'compile-checks.configure'))
+
+ status = 0
+ try:
+ exec_(command, sandbox)
+ sandbox.run()
+ except SystemExit as e:
+ status = e.code
+
+ return config, out.getvalue(), status
+
+
+class TestHeaderChecks(BaseCompileChecks):
+ def test_try_compile_include(self):
+ expected_test_content = textwrap.dedent('''\
+ #include <foo.h>
+ #include <bar.h>
+ int
+ main(void)
+ {
+
+ ;
+ return 0;
+ }
+ ''')
+
+ cmd = textwrap.dedent('''\
+ try_compile(['foo.h', 'bar.h'], language='C')
+ ''')
+
+ config, out, status = self.do_compile_test(cmd, expected_test_content)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+
+ def test_try_compile_flags(self):
+ expected_flags = ['--extra', '--flags']
+
+ cmd = textwrap.dedent('''\
+ try_compile(language='C++', flags=['--flags', '--extra'])
+ ''')
+
+ config, out, status = self.do_compile_test(cmd, expected_flags=expected_flags)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+
+ def test_try_compile_failure(self):
+ cmd = textwrap.dedent('''\
+ have_fn = try_compile(body='somefn();', flags=['-funknown-flag'])
+ set_config('HAVE_SOMEFN', have_fn)
+
+ have_another = try_compile(body='anotherfn();', language='C')
+ set_config('HAVE_ANOTHERFN', have_another)
+ ''')
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ 'HAVE_ANOTHERFN': True,
+ })
+
+ def test_try_compile_msg(self):
+ cmd = textwrap.dedent('''\
+ known_flag = try_compile(language='C++', flags=['-fknown-flag'],
+ check_msg='whether -fknown-flag works')
+ set_config('HAVE_KNOWN_FLAG', known_flag)
+ ''')
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'HAVE_KNOWN_FLAG': True})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking whether -fknown-flag works... yes
+ '''))
+
+ def test_check_header(self):
+ expected_test_content = textwrap.dedent('''\
+ #include <foo.h>
+ int
+ main(void)
+ {
+
+ ;
+ return 0;
+ }
+ ''')
+
+ cmd = textwrap.dedent('''\
+ check_header('foo.h')
+ ''')
+
+ config, out, status = self.do_compile_test(cmd,
+ expected_test_content=expected_test_content)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'DEFINES': {'HAVE_FOO_H': True}})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for foo.h... yes
+ '''))
+
+ def test_check_header_conditional(self):
+ cmd = textwrap.dedent('''\
+ check_headers('foo.h', 'bar.h', when=never)
+ ''')
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(out, '')
+ self.assertEqual(config, {'DEFINES':{}})
+
+ def test_check_header_include(self):
+ expected_test_content = textwrap.dedent('''\
+ #include <std.h>
+ #include <bar.h>
+ #include <foo.h>
+ int
+ main(void)
+ {
+
+ ;
+ return 0;
+ }
+ ''')
+
+ cmd = textwrap.dedent('''\
+ have_foo = check_header('foo.h', includes=['std.h', 'bar.h'])
+ set_config('HAVE_FOO_H', have_foo)
+ ''')
+
+ config, out, status = self.do_compile_test(cmd,
+ expected_test_content=expected_test_content)
+
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ 'HAVE_FOO_H': True,
+ 'DEFINES': {
+ 'HAVE_FOO_H': True,
+ }
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for foo.h... yes
+ '''))
+
+ def test_check_headers_multiple(self):
+ cmd = textwrap.dedent('''\
+ baz_bar, quux_bar = check_headers('baz/foo-bar.h', 'baz-quux/foo-bar.h')
+ set_config('HAVE_BAZ_BAR', baz_bar)
+ set_config('HAVE_QUUX_BAR', quux_bar)
+ ''')
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ 'HAVE_BAZ_BAR': True,
+ 'HAVE_QUUX_BAR': True,
+ 'DEFINES': {
+ 'HAVE_BAZ_FOO_BAR_H': True,
+ 'HAVE_BAZ_QUUX_FOO_BAR_H': True,
+ }
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for baz/foo-bar.h... yes
+ checking for baz-quux/foo-bar.h... yes
+ '''))
+
+ def test_check_headers_not_found(self):
+
+ cmd = textwrap.dedent('''\
+ baz_bar, quux_bar = check_headers('baz/foo-bar.h', 'baz-quux/foo-bar.h',
+ flags=['-funknown-flag'])
+ set_config('HAVE_BAZ_BAR', baz_bar)
+ set_config('HAVE_QUUX_BAR', quux_bar)
+ ''')
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {'DEFINES': {}})
+ self.assertEqual(out, textwrap.dedent('''\
+ checking for baz/foo-bar.h... no
+ checking for baz-quux/foo-bar.h... no
+ '''))
+
+
+class TestWarningChecks(BaseCompileChecks):
+ def get_warnings(self):
+ return textwrap.dedent('''\
+ set_config('_WARNINGS_CFLAGS', warnings_cflags)
+ set_config('_WARNINGS_CXXFLAGS', warnings_cxxflags)
+ ''')
+
+ def test_check_and_add_gcc_warning(self):
+ for flag, expected_flags in (
+ ('-Wfoo', ['-Werror', '-Wfoo']),
+ ('-Wno-foo', ['-Werror', '-Wfoo']),
+ ('-Werror=foo', ['-Werror=foo']),
+ ('-Wno-error=foo', ['-Wno-error=foo']),
+ ):
+ cmd = textwrap.dedent('''\
+ check_and_add_gcc_warning('%s')
+ ''' % flag) + self.get_warnings()
+
+ config, out, status = self.do_compile_test(
+ cmd, expected_flags=expected_flags)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ '_WARNINGS_CFLAGS': [flag],
+ '_WARNINGS_CXXFLAGS': [flag],
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking whether the C compiler supports {flag}... yes
+ checking whether the C++ compiler supports {flag}... yes
+ '''.format(flag=flag)))
+
+ def test_check_and_add_gcc_warning_one(self):
+ cmd = textwrap.dedent('''\
+ check_and_add_gcc_warning('-Wfoo', cxx_compiler)
+ ''') + self.get_warnings()
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ '_WARNINGS_CFLAGS': [],
+ '_WARNINGS_CXXFLAGS': ['-Wfoo'],
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking whether the C++ compiler supports -Wfoo... yes
+ '''))
+
+ def test_check_and_add_gcc_warning_when(self):
+ cmd = textwrap.dedent('''\
+ @depends(when=True)
+ def never():
+ return False
+ check_and_add_gcc_warning('-Wfoo', cxx_compiler, when=never)
+ ''') + self.get_warnings()
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ '_WARNINGS_CFLAGS': [],
+ '_WARNINGS_CXXFLAGS': [],
+ })
+ self.assertEqual(out, '')
+
+ cmd = textwrap.dedent('''\
+ @depends(when=True)
+ def always():
+ return True
+ check_and_add_gcc_warning('-Wfoo', cxx_compiler, when=always)
+ ''') + self.get_warnings()
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ '_WARNINGS_CFLAGS': [],
+ '_WARNINGS_CXXFLAGS': ['-Wfoo'],
+ })
+ self.assertEqual(out, textwrap.dedent('''\
+ checking whether the C++ compiler supports -Wfoo... yes
+ '''))
+
+ def test_add_gcc_warning(self):
+ cmd = textwrap.dedent('''\
+ add_gcc_warning('-Wfoo')
+ ''') + self.get_warnings()
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ '_WARNINGS_CFLAGS': ['-Wfoo'],
+ '_WARNINGS_CXXFLAGS': ['-Wfoo'],
+ })
+ self.assertEqual(out, '')
+
+ def test_add_gcc_warning_one(self):
+ cmd = textwrap.dedent('''\
+ add_gcc_warning('-Wfoo', c_compiler)
+ ''') + self.get_warnings()
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ '_WARNINGS_CFLAGS': ['-Wfoo'],
+ '_WARNINGS_CXXFLAGS': [],
+ })
+ self.assertEqual(out, '')
+
+ def test_add_gcc_warning_when(self):
+ cmd = textwrap.dedent('''\
+ @depends(when=True)
+ def never():
+ return False
+ add_gcc_warning('-Wfoo', c_compiler, when=never)
+ ''') + self.get_warnings()
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ '_WARNINGS_CFLAGS': [],
+ '_WARNINGS_CXXFLAGS': [],
+ })
+ self.assertEqual(out, '')
+
+ cmd = textwrap.dedent('''\
+ @depends(when=True)
+ def always():
+ return True
+ add_gcc_warning('-Wfoo', c_compiler, when=always)
+ ''') + self.get_warnings()
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {
+ '_WARNINGS_CFLAGS': ['-Wfoo'],
+ '_WARNINGS_CXXFLAGS': [],
+ })
+ self.assertEqual(out, '')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_configure.py b/python/mozbuild/mozbuild/test/configure/test_configure.py
new file mode 100644
index 000000000..df97ba70d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_configure.py
@@ -0,0 +1,1273 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from StringIO import StringIO
+import os
+import sys
+import textwrap
+import unittest
+
+from mozunit import (
+ main,
+ MockedOpen,
+)
+
+from mozbuild.configure.options import (
+ InvalidOptionError,
+ NegativeOptionValue,
+ PositiveOptionValue,
+)
+from mozbuild.configure import (
+ ConfigureError,
+ ConfigureSandbox,
+)
+from mozbuild.util import exec_
+
+import mozpack.path as mozpath
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data')
+
+
+class TestConfigure(unittest.TestCase):
+ def get_config(self, options=[], env={}, configure='moz.configure',
+ prog='/bin/configure'):
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureSandbox(config, env, [prog] + options, out, out)
+
+ sandbox.run(mozpath.join(test_data_path, configure))
+
+ if '--help' in options:
+ return out.getvalue(), config
+ self.assertEquals('', out.getvalue())
+ return config
+
+ def moz_configure(self, source):
+ return MockedOpen({
+ os.path.join(test_data_path,
+ 'moz.configure'): textwrap.dedent(source)
+ })
+
+ def test_defaults(self):
+ config = self.get_config()
+ self.maxDiff = None
+ self.assertEquals({
+ 'CHOICES': NegativeOptionValue(),
+ 'DEFAULTED': PositiveOptionValue(('not-simple',)),
+ 'IS_GCC': NegativeOptionValue(),
+ 'REMAINDER': (PositiveOptionValue(), NegativeOptionValue(),
+ NegativeOptionValue(), NegativeOptionValue()),
+ 'SIMPLE': NegativeOptionValue(),
+ 'VALUES': NegativeOptionValue(),
+ 'VALUES2': NegativeOptionValue(),
+ 'VALUES3': NegativeOptionValue(),
+ 'WITH_ENV': NegativeOptionValue(),
+ }, config)
+
+ def test_help(self):
+ help, config = self.get_config(['--help'], prog='configure')
+
+ self.assertEquals({}, config)
+ self.maxDiff = None
+ self.assertEquals(
+ 'Usage: configure [options]\n'
+ '\n'
+ 'Options: [defaults in brackets after descriptions]\n'
+ ' --help print this message\n'
+ ' --enable-simple Enable simple\n'
+ ' --enable-with-env Enable with env\n'
+ ' --enable-values Enable values\n'
+ ' --without-thing Build without thing\n'
+ ' --with-stuff Build with stuff\n'
+ ' --option Option\n'
+ ' --with-returned-default Returned default [not-simple]\n'
+ ' --returned-choices Choices\n'
+ ' --enable-imports-in-template\n'
+ ' Imports in template\n'
+ ' --enable-include Include\n'
+ ' --with-imports Imports\n'
+ '\n'
+ 'Environment variables:\n'
+ ' CC C Compiler\n',
+ help
+ )
+
+ def test_unknown(self):
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(['--unknown'])
+
+ def test_simple(self):
+ for config in (
+ self.get_config(),
+ self.get_config(['--disable-simple']),
+ # Last option wins.
+ self.get_config(['--enable-simple', '--disable-simple']),
+ ):
+ self.assertNotIn('ENABLED_SIMPLE', config)
+ self.assertIn('SIMPLE', config)
+ self.assertEquals(NegativeOptionValue(), config['SIMPLE'])
+
+ for config in (
+ self.get_config(['--enable-simple']),
+ self.get_config(['--disable-simple', '--enable-simple']),
+ ):
+ self.assertIn('ENABLED_SIMPLE', config)
+ self.assertIn('SIMPLE', config)
+ self.assertEquals(PositiveOptionValue(), config['SIMPLE'])
+ self.assertIs(config['SIMPLE'], config['ENABLED_SIMPLE'])
+
+ # --enable-simple doesn't take values.
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(['--enable-simple=value'])
+
+ def test_with_env(self):
+ for config in (
+ self.get_config(),
+ self.get_config(['--disable-with-env']),
+ self.get_config(['--enable-with-env', '--disable-with-env']),
+ self.get_config(env={'MOZ_WITH_ENV': ''}),
+ # Options win over environment
+ self.get_config(['--disable-with-env'],
+ env={'MOZ_WITH_ENV': '1'}),
+ ):
+ self.assertIn('WITH_ENV', config)
+ self.assertEquals(NegativeOptionValue(), config['WITH_ENV'])
+
+ for config in (
+ self.get_config(['--enable-with-env']),
+ self.get_config(['--disable-with-env', '--enable-with-env']),
+ self.get_config(env={'MOZ_WITH_ENV': '1'}),
+ self.get_config(['--enable-with-env'],
+ env={'MOZ_WITH_ENV': ''}),
+ ):
+ self.assertIn('WITH_ENV', config)
+ self.assertEquals(PositiveOptionValue(), config['WITH_ENV'])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(['--enable-with-env=value'])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(env={'MOZ_WITH_ENV': 'value'})
+
+ def test_values(self, name='VALUES'):
+ for config in (
+ self.get_config(),
+ self.get_config(['--disable-values']),
+ self.get_config(['--enable-values', '--disable-values']),
+ ):
+ self.assertIn(name, config)
+ self.assertEquals(NegativeOptionValue(), config[name])
+
+ for config in (
+ self.get_config(['--enable-values']),
+ self.get_config(['--disable-values', '--enable-values']),
+ ):
+ self.assertIn(name, config)
+ self.assertEquals(PositiveOptionValue(), config[name])
+
+ config = self.get_config(['--enable-values=foo'])
+ self.assertIn(name, config)
+ self.assertEquals(PositiveOptionValue(('foo',)), config[name])
+
+ config = self.get_config(['--enable-values=foo,bar'])
+ self.assertIn(name, config)
+ self.assertTrue(config[name])
+ self.assertEquals(PositiveOptionValue(('foo', 'bar')), config[name])
+
+ def test_values2(self):
+ self.test_values('VALUES2')
+
+ def test_values3(self):
+ self.test_values('VALUES3')
+
+ def test_returned_default(self):
+ config = self.get_config(['--enable-simple'])
+ self.assertIn('DEFAULTED', config)
+ self.assertEquals(
+ PositiveOptionValue(('simple',)), config['DEFAULTED'])
+
+ config = self.get_config(['--disable-simple'])
+ self.assertIn('DEFAULTED', config)
+ self.assertEquals(
+ PositiveOptionValue(('not-simple',)), config['DEFAULTED'])
+
+ def test_returned_choices(self):
+ for val in ('a', 'b', 'c'):
+ config = self.get_config(
+ ['--enable-values=alpha', '--returned-choices=%s' % val])
+ self.assertIn('CHOICES', config)
+ self.assertEquals(PositiveOptionValue((val,)), config['CHOICES'])
+
+ for val in ('0', '1', '2'):
+ config = self.get_config(
+ ['--enable-values=numeric', '--returned-choices=%s' % val])
+ self.assertIn('CHOICES', config)
+ self.assertEquals(PositiveOptionValue((val,)), config['CHOICES'])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(['--enable-values=numeric',
+ '--returned-choices=a'])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(['--enable-values=alpha', '--returned-choices=0'])
+
+ def test_included(self):
+ config = self.get_config(env={'CC': 'gcc'})
+ self.assertIn('IS_GCC', config)
+ self.assertEquals(config['IS_GCC'], True)
+
+ config = self.get_config(
+ ['--enable-include=extra.configure', '--extra'])
+ self.assertIn('EXTRA', config)
+ self.assertEquals(PositiveOptionValue(), config['EXTRA'])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(['--extra'])
+
+ def test_template(self):
+ config = self.get_config(env={'CC': 'gcc'})
+ self.assertIn('CFLAGS', config)
+ self.assertEquals(config['CFLAGS'], ['-Werror=foobar'])
+
+ config = self.get_config(env={'CC': 'clang'})
+ self.assertNotIn('CFLAGS', config)
+
+ def test_imports(self):
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureSandbox(config, {}, [], out, out)
+
+ with self.assertRaises(ImportError):
+ exec_(textwrap.dedent('''
+ @template
+ def foo():
+ import sys
+ foo()'''),
+ sandbox
+ )
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports('sys')
+ def foo():
+ return sys'''),
+ sandbox
+ )
+
+ self.assertIs(sandbox['foo'](), sys)
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports(_from='os', _import='path')
+ def foo():
+ return path'''),
+ sandbox
+ )
+
+ self.assertIs(sandbox['foo'](), os.path)
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports(_from='os', _import='path', _as='os_path')
+ def foo():
+ return os_path'''),
+ sandbox
+ )
+
+ self.assertIs(sandbox['foo'](), os.path)
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports('__builtin__')
+ def foo():
+ return __builtin__'''),
+ sandbox
+ )
+
+ import __builtin__
+ self.assertIs(sandbox['foo'](), __builtin__)
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports(_from='__builtin__', _import='open')
+ def foo():
+ return open('%s')''' % os.devnull),
+ sandbox
+ )
+
+ f = sandbox['foo']()
+ self.assertEquals(f.name, os.devnull)
+ f.close()
+
+ # This unlocks the sandbox
+ exec_(textwrap.dedent('''
+ @template
+ @imports(_import='__builtin__', _as='__builtins__')
+ def foo():
+ import sys
+ return sys'''),
+ sandbox
+ )
+
+ self.assertIs(sandbox['foo'](), sys)
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports('__sandbox__')
+ def foo():
+ return __sandbox__'''),
+ sandbox
+ )
+
+ self.assertIs(sandbox['foo'](), sandbox)
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports(_import='__sandbox__', _as='s')
+ def foo():
+ return s'''),
+ sandbox
+ )
+
+ self.assertIs(sandbox['foo'](), sandbox)
+
+ # Nothing leaked from the function being executed
+ self.assertEquals(sandbox.keys(), ['__builtins__', 'foo'])
+ self.assertEquals(sandbox['__builtins__'], ConfigureSandbox.BUILTINS)
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports('sys')
+ def foo():
+ @depends(when=True)
+ def bar():
+ return sys
+ return bar
+ bar = foo()'''),
+ sandbox
+ )
+
+ with self.assertRaises(NameError) as e:
+ sandbox._depends[sandbox['bar']].result
+
+ self.assertEquals(e.exception.message,
+ "global name 'sys' is not defined")
+
+ def test_apply_imports(self):
+ imports = []
+
+ class CountApplyImportsSandbox(ConfigureSandbox):
+ def _apply_imports(self, *args, **kwargs):
+ imports.append((args, kwargs))
+ super(CountApplyImportsSandbox, self)._apply_imports(
+ *args, **kwargs)
+
+ config = {}
+ out = StringIO()
+ sandbox = CountApplyImportsSandbox(config, {}, [], out, out)
+
+ exec_(textwrap.dedent('''
+ @template
+ @imports('sys')
+ def foo():
+ return sys
+ foo()
+ foo()'''),
+ sandbox
+ )
+
+ self.assertEquals(len(imports), 1)
+
+ def test_os_path(self):
+ config = self.get_config(['--with-imports=%s' % __file__])
+ self.assertIn('HAS_ABSPATH', config)
+ self.assertEquals(config['HAS_ABSPATH'], True)
+ self.assertIn('HAS_GETATIME', config)
+ self.assertEquals(config['HAS_GETATIME'], True)
+ self.assertIn('HAS_GETATIME2', config)
+ self.assertEquals(config['HAS_GETATIME2'], False)
+
+ def test_template_call(self):
+ config = self.get_config(env={'CC': 'gcc'})
+ self.assertIn('TEMPLATE_VALUE', config)
+ self.assertEquals(config['TEMPLATE_VALUE'], 42)
+ self.assertIn('TEMPLATE_VALUE_2', config)
+ self.assertEquals(config['TEMPLATE_VALUE_2'], 21)
+
+ def test_template_imports(self):
+ config = self.get_config(['--enable-imports-in-template'])
+ self.assertIn('PLATFORM', config)
+ self.assertEquals(config['PLATFORM'], sys.platform)
+
+ def test_decorators(self):
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureSandbox(config, {}, [], out, out)
+
+ sandbox.include_file(mozpath.join(test_data_path, 'decorators.configure'))
+
+ self.assertNotIn('FOO', sandbox)
+ self.assertNotIn('BAR', sandbox)
+ self.assertNotIn('QUX', sandbox)
+
+ def test_set_config(self):
+ def get_config(*args):
+ return self.get_config(*args, configure='set_config.configure')
+
+ help, config = get_config(['--help'])
+ self.assertEquals(config, {})
+
+ config = get_config(['--set-foo'])
+ self.assertIn('FOO', config)
+ self.assertEquals(config['FOO'], True)
+
+ config = get_config(['--set-bar'])
+ self.assertNotIn('FOO', config)
+ self.assertIn('BAR', config)
+ self.assertEquals(config['BAR'], True)
+
+ config = get_config(['--set-value=qux'])
+ self.assertIn('VALUE', config)
+ self.assertEquals(config['VALUE'], 'qux')
+
+ config = get_config(['--set-name=hoge'])
+ self.assertIn('hoge', config)
+ self.assertEquals(config['hoge'], True)
+
+ config = get_config([])
+ self.assertEquals(config, {'BAR': False})
+
+ with self.assertRaises(ConfigureError):
+ # Both --set-foo and --set-name=FOO are going to try to
+ # set_config('FOO'...)
+ get_config(['--set-foo', '--set-name=FOO'])
+
+ def test_set_config_when(self):
+ with self.moz_configure('''
+ option('--with-qux', help='qux')
+ set_config('FOO', 'foo', when=True)
+ set_config('BAR', 'bar', when=False)
+ set_config('QUX', 'qux', when='--with-qux')
+ '''):
+ config = self.get_config()
+ self.assertEquals(config, {
+ 'FOO': 'foo',
+ })
+ config = self.get_config(['--with-qux'])
+ self.assertEquals(config, {
+ 'FOO': 'foo',
+ 'QUX': 'qux',
+ })
+
+ def test_set_define(self):
+ def get_config(*args):
+ return self.get_config(*args, configure='set_define.configure')
+
+ help, config = get_config(['--help'])
+ self.assertEquals(config, {'DEFINES': {}})
+
+ config = get_config(['--set-foo'])
+ self.assertIn('FOO', config['DEFINES'])
+ self.assertEquals(config['DEFINES']['FOO'], True)
+
+ config = get_config(['--set-bar'])
+ self.assertNotIn('FOO', config['DEFINES'])
+ self.assertIn('BAR', config['DEFINES'])
+ self.assertEquals(config['DEFINES']['BAR'], True)
+
+ config = get_config(['--set-value=qux'])
+ self.assertIn('VALUE', config['DEFINES'])
+ self.assertEquals(config['DEFINES']['VALUE'], 'qux')
+
+ config = get_config(['--set-name=hoge'])
+ self.assertIn('hoge', config['DEFINES'])
+ self.assertEquals(config['DEFINES']['hoge'], True)
+
+ config = get_config([])
+ self.assertEquals(config['DEFINES'], {'BAR': False})
+
+ with self.assertRaises(ConfigureError):
+ # Both --set-foo and --set-name=FOO are going to try to
+ # set_define('FOO'...)
+ get_config(['--set-foo', '--set-name=FOO'])
+
+ def test_set_define_when(self):
+ with self.moz_configure('''
+ option('--with-qux', help='qux')
+ set_define('FOO', 'foo', when=True)
+ set_define('BAR', 'bar', when=False)
+ set_define('QUX', 'qux', when='--with-qux')
+ '''):
+ config = self.get_config()
+ self.assertEquals(config['DEFINES'], {
+ 'FOO': 'foo',
+ })
+ config = self.get_config(['--with-qux'])
+ self.assertEquals(config['DEFINES'], {
+ 'FOO': 'foo',
+ 'QUX': 'qux',
+ })
+
+ def test_imply_option_simple(self):
+ def get_config(*args):
+ return self.get_config(
+ *args, configure='imply_option/simple.configure')
+
+ help, config = get_config(['--help'])
+ self.assertEquals(config, {})
+
+ config = get_config([])
+ self.assertEquals(config, {})
+
+ config = get_config(['--enable-foo'])
+ self.assertIn('BAR', config)
+ self.assertEquals(config['BAR'], PositiveOptionValue())
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(['--enable-foo', '--disable-bar'])
+
+ self.assertEquals(
+ e.exception.message,
+ "'--enable-bar' implied by '--enable-foo' conflicts with "
+ "'--disable-bar' from the command-line")
+
+ def test_imply_option_negative(self):
+ def get_config(*args):
+ return self.get_config(
+ *args, configure='imply_option/negative.configure')
+
+ help, config = get_config(['--help'])
+ self.assertEquals(config, {})
+
+ config = get_config([])
+ self.assertEquals(config, {})
+
+ config = get_config(['--enable-foo'])
+ self.assertIn('BAR', config)
+ self.assertEquals(config['BAR'], NegativeOptionValue())
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(['--enable-foo', '--enable-bar'])
+
+ self.assertEquals(
+ e.exception.message,
+ "'--disable-bar' implied by '--enable-foo' conflicts with "
+ "'--enable-bar' from the command-line")
+
+ config = get_config(['--disable-hoge'])
+ self.assertIn('BAR', config)
+ self.assertEquals(config['BAR'], NegativeOptionValue())
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(['--disable-hoge', '--enable-bar'])
+
+ self.assertEquals(
+ e.exception.message,
+ "'--disable-bar' implied by '--disable-hoge' conflicts with "
+ "'--enable-bar' from the command-line")
+
+ def test_imply_option_values(self):
+ def get_config(*args):
+ return self.get_config(
+ *args, configure='imply_option/values.configure')
+
+ help, config = get_config(['--help'])
+ self.assertEquals(config, {})
+
+ config = get_config([])
+ self.assertEquals(config, {})
+
+ config = get_config(['--enable-foo=a'])
+ self.assertIn('BAR', config)
+ self.assertEquals(config['BAR'], PositiveOptionValue(('a',)))
+
+ config = get_config(['--enable-foo=a,b'])
+ self.assertIn('BAR', config)
+ self.assertEquals(config['BAR'], PositiveOptionValue(('a','b')))
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(['--enable-foo=a,b', '--disable-bar'])
+
+ self.assertEquals(
+ e.exception.message,
+ "'--enable-bar=a,b' implied by '--enable-foo' conflicts with "
+ "'--disable-bar' from the command-line")
+
+ def test_imply_option_infer(self):
+ def get_config(*args):
+ return self.get_config(
+ *args, configure='imply_option/infer.configure')
+
+ help, config = get_config(['--help'])
+ self.assertEquals(config, {})
+
+ config = get_config([])
+ self.assertEquals(config, {})
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(['--enable-foo', '--disable-bar'])
+
+ self.assertEquals(
+ e.exception.message,
+ "'--enable-bar' implied by '--enable-foo' conflicts with "
+ "'--disable-bar' from the command-line")
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config([], configure='imply_option/infer_ko.configure')
+
+ self.assertEquals(
+ e.exception.message,
+ "Cannot infer what implies '--enable-bar'. Please add a `reason` "
+ "to the `imply_option` call.")
+
+ def test_imply_option_immediate_value(self):
+ def get_config(*args):
+ return self.get_config(
+ *args, configure='imply_option/imm.configure')
+
+ help, config = get_config(['--help'])
+ self.assertEquals(config, {})
+
+ config = get_config([])
+ self.assertEquals(config, {})
+
+ config_path = mozpath.abspath(
+ mozpath.join(test_data_path, 'imply_option', 'imm.configure'))
+
+ with self.assertRaisesRegexp(InvalidOptionError,
+ "--enable-foo' implied by 'imply_option at %s:7' conflicts with "
+ "'--disable-foo' from the command-line" % config_path):
+ get_config(['--disable-foo'])
+
+ with self.assertRaisesRegexp(InvalidOptionError,
+ "--enable-bar=foo,bar' implied by 'imply_option at %s:16' conflicts"
+ " with '--enable-bar=a,b,c' from the command-line" % config_path):
+ get_config(['--enable-bar=a,b,c'])
+
+ with self.assertRaisesRegexp(InvalidOptionError,
+ "--enable-baz=BAZ' implied by 'imply_option at %s:25' conflicts"
+ " with '--enable-baz=QUUX' from the command-line" % config_path):
+ get_config(['--enable-baz=QUUX'])
+
+ def test_imply_option_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ imply_option('--with-foo', ('a',), 'bar')
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "`--with-foo`, emitted from `%s` line 2, is unknown."
+ % mozpath.join(test_data_path, 'moz.configure'))
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure('''
+ imply_option('--with-foo', 42, 'bar')
+
+ option('--with-foo', help='foo')
+ @depends('--with-foo')
+ def foo(value):
+ return value
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "Unexpected type: 'int'")
+
+ def test_imply_option_when(self):
+ with self.moz_configure('''
+ option('--with-foo', help='foo')
+ imply_option('--with-qux', True, when='--with-foo')
+ option('--with-qux', help='qux')
+ set_config('QUX', depends('--with-qux')(lambda x: x))
+ '''):
+ config = self.get_config()
+ self.assertEquals(config, {
+ 'QUX': NegativeOptionValue(),
+ })
+
+ config = self.get_config(['--with-foo'])
+ self.assertEquals(config, {
+ 'QUX': PositiveOptionValue(),
+ })
+
+ def test_option_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('option("--with-foo", help="foo")'):
+ self.get_config()
+
+ self.assertEquals(
+ e.exception.message,
+ 'Option `--with-foo` is not handled ; reference it with a @depends'
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option("--with-foo", help="foo")
+ option("--with-foo", help="foo")
+ '''):
+ self.get_config()
+
+ self.assertEquals(
+ e.exception.message,
+ 'Option `--with-foo` already defined'
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option(env="MOZ_FOO", help="foo")
+ option(env="MOZ_FOO", help="foo")
+ '''):
+ self.get_config()
+
+ self.assertEquals(
+ e.exception.message,
+ 'Option `MOZ_FOO` already defined'
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option('--with-foo', env="MOZ_FOO", help="foo")
+ option(env="MOZ_FOO", help="foo")
+ '''):
+ self.get_config()
+
+ self.assertEquals(
+ e.exception.message,
+ 'Option `MOZ_FOO` already defined'
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option(env="MOZ_FOO", help="foo")
+ option('--with-foo', env="MOZ_FOO", help="foo")
+ '''):
+ self.get_config()
+
+ self.assertEquals(
+ e.exception.message,
+ 'Option `MOZ_FOO` already defined'
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option('--with-foo', env="MOZ_FOO", help="foo")
+ option('--with-foo', help="foo")
+ '''):
+ self.get_config()
+
+ self.assertEquals(
+ e.exception.message,
+ 'Option `--with-foo` already defined'
+ )
+
+ def test_option_when(self):
+ with self.moz_configure('''
+ option('--with-foo', help='foo', when=True)
+ option('--with-bar', help='bar', when=False)
+ option('--with-qux', env="QUX", help='qux', when='--with-foo')
+
+ set_config('FOO', depends('--with-foo', when=True)(lambda x: x))
+ set_config('BAR', depends('--with-bar', when=False)(lambda x: x))
+ set_config('QUX', depends('--with-qux', when='--with-foo')(lambda x: x))
+ '''):
+ config = self.get_config()
+ self.assertEquals(config, {
+ 'FOO': NegativeOptionValue(),
+ })
+
+ config = self.get_config(['--with-foo'])
+ self.assertEquals(config, {
+ 'FOO': PositiveOptionValue(),
+ 'QUX': NegativeOptionValue(),
+ })
+
+ config = self.get_config(['--with-foo', '--with-qux'])
+ self.assertEquals(config, {
+ 'FOO': PositiveOptionValue(),
+ 'QUX': PositiveOptionValue(),
+ })
+
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(['--with-bar'])
+
+ self.assertEquals(
+ e.exception.message,
+ '--with-bar is not available in this configuration'
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(['--with-qux'])
+
+ self.assertEquals(
+ e.exception.message,
+ '--with-qux is not available in this configuration'
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(['QUX=1'])
+
+ self.assertEquals(
+ e.exception.message,
+ 'QUX is not available in this configuration'
+ )
+
+ config = self.get_config(env={'QUX': '1'})
+ self.assertEquals(config, {
+ 'FOO': NegativeOptionValue(),
+ })
+
+ help, config = self.get_config(['--help'])
+ self.assertEquals(help, textwrap.dedent('''\
+ Usage: configure [options]
+
+ Options: [defaults in brackets after descriptions]
+ --help print this message
+ --with-foo foo
+
+ Environment variables:
+ '''))
+
+ help, config = self.get_config(['--help', '--with-foo'])
+ self.assertEquals(help, textwrap.dedent('''\
+ Usage: configure [options]
+
+ Options: [defaults in brackets after descriptions]
+ --help print this message
+ --with-foo foo
+ --with-qux qux
+
+ Environment variables:
+ '''))
+
+ with self.moz_configure('''
+ option('--with-foo', help='foo', when=True)
+ set_config('FOO', depends('--with-foo')(lambda x: x))
+ '''):
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ '@depends function needs the same `when` as '
+ 'options it depends on')
+
+ with self.moz_configure('''
+ @depends(when=True)
+ def always():
+ return True
+ @depends(when=True)
+ def always2():
+ return True
+ option('--with-foo', help='foo', when=always)
+ set_config('FOO', depends('--with-foo', when=always2)(lambda x: x))
+ '''):
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ '@depends function needs the same `when` as '
+ 'options it depends on')
+
+ def test_include_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('include("../foo.configure")'):
+ self.get_config()
+
+ self.assertEquals(
+ e.exception.message,
+ 'Cannot include `%s` because it is not in a subdirectory of `%s`'
+ % (mozpath.normpath(mozpath.join(test_data_path, '..',
+ 'foo.configure')),
+ mozpath.normsep(test_data_path))
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ include('extra.configure')
+ include('extra.configure')
+ '''):
+ self.get_config()
+
+ self.assertEquals(
+ e.exception.message,
+ 'Cannot include `%s` because it was included already.'
+ % mozpath.normpath(mozpath.join(test_data_path,
+ 'extra.configure'))
+ )
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure('''
+ include(42)
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message, "Unexpected type: 'int'")
+
+ def test_include_when(self):
+ with MockedOpen({
+ os.path.join(test_data_path, 'moz.configure'): textwrap.dedent('''
+ option('--with-foo', help='foo')
+
+ include('always.configure', when=True)
+ include('never.configure', when=False)
+ include('foo.configure', when='--with-foo')
+
+ set_config('FOO', foo)
+ set_config('BAR', bar)
+ set_config('QUX', qux)
+ '''),
+ os.path.join(test_data_path, 'always.configure'): textwrap.dedent('''
+ option('--with-bar', help='bar')
+ @depends('--with-bar')
+ def bar(x):
+ if x:
+ return 'bar'
+ '''),
+ os.path.join(test_data_path, 'never.configure'): textwrap.dedent('''
+ option('--with-qux', help='qux')
+ @depends('--with-qux')
+ def qux(x):
+ if x:
+ return 'qux'
+ '''),
+ os.path.join(test_data_path, 'foo.configure'): textwrap.dedent('''
+ option('--with-foo-really', help='really foo')
+ @depends('--with-foo-really')
+ def foo(x):
+ if x:
+ return 'foo'
+
+ include('foo2.configure', when='--with-foo-really')
+ '''),
+ os.path.join(test_data_path, 'foo2.configure'): textwrap.dedent('''
+ set_config('FOO2', True)
+ '''),
+ }):
+ config = self.get_config()
+ self.assertEquals(config, {})
+
+ config = self.get_config(['--with-foo'])
+ self.assertEquals(config, {})
+
+ config = self.get_config(['--with-bar'])
+ self.assertEquals(config, {
+ 'BAR': 'bar',
+ })
+
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(['--with-qux'])
+
+ self.assertEquals(
+ e.exception.message,
+ '--with-qux is not available in this configuration'
+ )
+
+ config = self.get_config(['--with-foo', '--with-foo-really'])
+ self.assertEquals(config, {
+ 'FOO': 'foo',
+ 'FOO2': True,
+ })
+
+ def test_sandbox_failures(self):
+ with self.assertRaises(KeyError) as e:
+ with self.moz_configure('''
+ include = 42
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message, 'Cannot reassign builtins')
+
+ with self.assertRaises(KeyError) as e:
+ with self.moz_configure('''
+ foo = 42
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ 'Cannot assign `foo` because it is neither a '
+ '@depends nor a @template')
+
+ def test_depends_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ @depends()
+ def foo():
+ return
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "@depends needs at least one argument")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ @depends('--with-foo')
+ def foo(value):
+ return value
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "'--with-foo' is not a known option. Maybe it's "
+ "declared too late?")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ @depends('--with-foo=42')
+ def foo(value):
+ return value
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "Option must not contain an '='")
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure('''
+ @depends(42)
+ def foo(value):
+ return value
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "Cannot use object of type 'int' as argument "
+ "to @depends")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ @depends('--help')
+ def foo(value):
+ yield
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "Cannot decorate generator functions with @depends")
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure('''
+ depends('--help')(42)
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "Unexpected type: 'int'")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ foo()
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "The `foo` function may not be called")
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure('''
+ @depends('--help', foo=42)
+ def foo(_):
+ return
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "depends_impl() got an unexpected keyword argument 'foo'")
+
+ def test_depends_when(self):
+ with self.moz_configure('''
+ @depends(when=True)
+ def foo():
+ return 'foo'
+
+ set_config('FOO', foo)
+
+ @depends(when=False)
+ def bar():
+ return 'bar'
+
+ set_config('BAR', bar)
+
+ option('--with-qux', help='qux')
+ @depends(when='--with-qux')
+ def qux():
+ return 'qux'
+
+ set_config('QUX', qux)
+ '''):
+ config = self.get_config()
+ self.assertEquals(config, {
+ 'FOO': 'foo',
+ })
+
+ config = self.get_config(['--with-qux'])
+ self.assertEquals(config, {
+ 'FOO': 'foo',
+ 'QUX': 'qux',
+ })
+
+ def test_imports_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ @imports('os')
+ @template
+ def foo(value):
+ return value
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ '@imports must appear after @template')
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option('--foo', help='foo')
+ @imports('os')
+ @depends('--foo')
+ def foo(value):
+ return value
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ '@imports must appear after @depends')
+
+ for import_ in (
+ "42",
+ "_from=42, _import='os'",
+ "_from='os', _import='path', _as=42",
+ ):
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure('''
+ @imports(%s)
+ @template
+ def foo(value):
+ return value
+ ''' % import_):
+ self.get_config()
+
+ self.assertEquals(e.exception.message, "Unexpected type: 'int'")
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure('''
+ @imports('os', 42)
+ @template
+ def foo(value):
+ return value
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message, "Unexpected type: 'int'")
+
+ with self.assertRaises(ValueError) as e:
+ with self.moz_configure('''
+ @imports('os*')
+ def foo(value):
+ return value
+ '''):
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ "Invalid argument to @imports: 'os*'")
+
+ def test_only_when(self):
+ moz_configure = '''
+ option('--enable-when', help='when')
+ @depends('--enable-when', '--help')
+ def when(value, _):
+ return bool(value)
+
+ with only_when(when):
+ option('--foo', nargs='*', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ set_config('FOO', foo)
+ set_define('FOO', foo)
+
+ # It is possible to depend on a function defined in a only_when
+ # block. It then resolves to `None`.
+ set_config('BAR', depends(foo)(lambda x: x))
+ set_define('BAR', depends(foo)(lambda x: x))
+ '''
+
+ with self.moz_configure(moz_configure):
+ config = self.get_config()
+ self.assertEqual(config, {
+ 'DEFINES': {},
+ })
+
+ config = self.get_config(['--enable-when'])
+ self.assertEqual(config, {
+ 'BAR': NegativeOptionValue(),
+ 'FOO': NegativeOptionValue(),
+ 'DEFINES': {
+ 'BAR': NegativeOptionValue(),
+ 'FOO': NegativeOptionValue(),
+ },
+ })
+
+ config = self.get_config(['--enable-when', '--foo=bar'])
+ self.assertEqual(config, {
+ 'BAR': PositiveOptionValue(['bar']),
+ 'FOO': PositiveOptionValue(['bar']),
+ 'DEFINES': {
+ 'BAR': PositiveOptionValue(['bar']),
+ 'FOO': PositiveOptionValue(['bar']),
+ },
+ })
+
+ # The --foo option doesn't exist when --enable-when is not given.
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(['--foo'])
+
+ self.assertEquals(e.exception.message,
+ '--foo is not available in this configuration')
+
+ # Cannot depend on an option defined in a only_when block, because we
+ # don't know what OptionValue would make sense.
+ with self.moz_configure(moz_configure + '''
+ set_config('QUX', depends('--foo')(lambda x: x))
+ '''):
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ '@depends function needs the same `when` as '
+ 'options it depends on')
+
+ with self.moz_configure(moz_configure + '''
+ set_config('QUX', depends('--foo', when=when)(lambda x: x))
+ '''):
+ self.get_config(['--enable-when'])
+
+ # Using imply_option for an option defined in a only_when block fails
+ # similarly if the imply_option happens outside the block.
+ with self.moz_configure('''
+ imply_option('--foo', True)
+ ''' + moz_configure):
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config()
+
+ self.assertEquals(e.exception.message,
+ '--foo is not available in this configuration')
+
+ # And similarly doesn't fail when the condition is true.
+ with self.moz_configure('''
+ imply_option('--foo', True)
+ ''' + moz_configure):
+ self.get_config(['--enable-when'])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_lint.py b/python/mozbuild/mozbuild/test/configure/test_lint.py
new file mode 100644
index 000000000..6ac2bb356
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_lint.py
@@ -0,0 +1,132 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from StringIO import StringIO
+import os
+import textwrap
+import unittest
+
+from mozunit import (
+ main,
+ MockedOpen,
+)
+
+from mozbuild.configure import ConfigureError
+from mozbuild.configure.lint import LintSandbox
+
+import mozpack.path as mozpath
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data')
+
+
+class TestLint(unittest.TestCase):
+ def lint_test(self, options=[], env={}):
+ sandbox = LintSandbox(env, ['configure'] + options)
+
+ sandbox.run(mozpath.join(test_data_path, 'moz.configure'))
+
+ def moz_configure(self, source):
+ return MockedOpen({
+ os.path.join(test_data_path,
+ 'moz.configure'): textwrap.dedent(source)
+ })
+
+ def test_depends_failures(self):
+ with self.moz_configure('''
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ @depends('--help', foo)
+ def bar(help, foo):
+ return
+ '''):
+ self.lint_test()
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option('--foo', help='foo')
+ @depends('--foo')
+ @imports('os')
+ def foo(value):
+ return value
+
+ @depends('--help', foo)
+ def bar(help, foo):
+ return
+ '''):
+ self.lint_test()
+
+ self.assertEquals(e.exception.message,
+ "`bar` depends on '--help' and `foo`. "
+ "`foo` must depend on '--help'")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ @template
+ def tmpl():
+ qux = 42
+
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ qux
+ return value
+
+ @depends('--help', foo)
+ def bar(help, foo):
+ return
+ tmpl()
+ '''):
+ self.lint_test()
+
+ self.assertEquals(e.exception.message,
+ "`bar` depends on '--help' and `foo`. "
+ "`foo` must depend on '--help'")
+
+ with self.moz_configure('''
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ include(foo)
+ '''):
+ self.lint_test()
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('''
+ option('--foo', help='foo')
+ @depends('--foo')
+ @imports('os')
+ def foo(value):
+ return value
+
+ include(foo)
+ '''):
+ self.lint_test()
+
+ self.assertEquals(e.exception.message,
+ "Missing @depends for `foo`: '--help'")
+
+ # There is a default restricted `os` module when there is no explicit
+ # @imports, and it's fine to use it without a dependency on --help.
+ with self.moz_configure('''
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ os
+ return value
+
+ include(foo)
+ '''):
+ self.lint_test()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_moz_configure.py b/python/mozbuild/mozbuild/test/configure/test_moz_configure.py
new file mode 100644
index 000000000..7c318adef
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_moz_configure.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from mozunit import main
+from mozpack import path as mozpath
+
+from common import BaseConfigureTest
+
+
+class TestMozConfigure(BaseConfigureTest):
+ def test_moz_configure_options(self):
+ def get_value_for(args=[], environ={}, mozconfig=''):
+ sandbox = self.get_sandbox({}, {}, args, environ, mozconfig)
+
+ # Add a fake old-configure option
+ sandbox.option_impl('--with-foo', nargs='*',
+ help='Help missing for old configure options')
+
+ result = sandbox._value_for(sandbox['all_configure_options'])
+ shell = mozpath.abspath('/bin/sh')
+ return result.replace('CONFIG_SHELL=%s ' % shell, '')
+
+ self.assertEquals('--enable-application=browser',
+ get_value_for(['--enable-application=browser']))
+
+ self.assertEquals('--enable-application=browser '
+ 'MOZ_PROFILING=1',
+ get_value_for(['--enable-application=browser',
+ 'MOZ_PROFILING=1']))
+
+ value = get_value_for(
+ environ={'MOZ_PROFILING': '1'},
+ mozconfig='ac_add_options --enable-project=js')
+
+ self.assertEquals('--enable-project=js MOZ_PROFILING=1',
+ value)
+
+ # --disable-js-shell is the default, so it's filtered out.
+ self.assertEquals('--enable-application=browser',
+ get_value_for(['--enable-application=browser',
+ '--disable-js-shell']))
+
+ # Normally, --without-foo would be filtered out because that's the
+ # default, but since it is a (fake) old-configure option, it always
+ # appears.
+ self.assertEquals('--enable-application=browser --without-foo',
+ get_value_for(['--enable-application=browser',
+ '--without-foo']))
+ self.assertEquals('--enable-application=browser --with-foo',
+ get_value_for(['--enable-application=browser',
+ '--with-foo']))
+
+ self.assertEquals("--enable-application=browser '--with-foo=foo bar'",
+ get_value_for(['--enable-application=browser',
+ '--with-foo=foo bar']))
+
+ def test_nsis_version(self):
+ this = self
+
+ class FakeNSIS(object):
+ def __init__(self, version):
+ self.version = version
+
+ def __call__(self, stdin, args):
+ this.assertEquals(args, ('-version',))
+ return 0, self.version, ''
+
+ def check_nsis_version(version):
+ sandbox = self.get_sandbox(
+ {'/usr/bin/makensis': FakeNSIS(version)}, {}, [],
+ {'PATH': '/usr/bin', 'MAKENSISU': '/usr/bin/makensis'})
+ return sandbox._value_for(sandbox['nsis_version'])
+
+ with self.assertRaises(SystemExit) as e:
+ check_nsis_version('v2.5')
+
+ with self.assertRaises(SystemExit) as e:
+ check_nsis_version('v3.0a2')
+
+ self.assertEquals(check_nsis_version('v3.0b1'), '3.0b1')
+ self.assertEquals(check_nsis_version('v3.0b2'), '3.0b2')
+ self.assertEquals(check_nsis_version('v3.0rc1'), '3.0rc1')
+ self.assertEquals(check_nsis_version('v3.0'), '3.0')
+ self.assertEquals(check_nsis_version('v3.0-2'), '3.0')
+ self.assertEquals(check_nsis_version('v3.0.1'), '3.0')
+ self.assertEquals(check_nsis_version('v3.1'), '3.1')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_options.py b/python/mozbuild/mozbuild/test/configure/test_options.py
new file mode 100644
index 000000000..e504f9e05
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_options.py
@@ -0,0 +1,852 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+
+from mozunit import main
+
+from mozbuild.configure.options import (
+ CommandLineHelper,
+ ConflictingOptionError,
+ InvalidOptionError,
+ NegativeOptionValue,
+ Option,
+ PositiveOptionValue,
+)
+
+
+class Option(Option):
+ def __init__(self, *args, **kwargs):
+ kwargs['help'] = 'Dummy help'
+ super(Option, self).__init__(*args, **kwargs)
+
+
+class TestOption(unittest.TestCase):
+ def test_option(self):
+ option = Option('--option')
+ self.assertEquals(option.prefix, '')
+ self.assertEquals(option.name, 'option')
+ self.assertEquals(option.env, None)
+ self.assertFalse(option.default)
+
+ option = Option('--enable-option')
+ self.assertEquals(option.prefix, 'enable')
+ self.assertEquals(option.name, 'option')
+ self.assertEquals(option.env, None)
+ self.assertFalse(option.default)
+
+ option = Option('--disable-option')
+ self.assertEquals(option.prefix, 'disable')
+ self.assertEquals(option.name, 'option')
+ self.assertEquals(option.env, None)
+ self.assertTrue(option.default)
+
+ option = Option('--with-option')
+ self.assertEquals(option.prefix, 'with')
+ self.assertEquals(option.name, 'option')
+ self.assertEquals(option.env, None)
+ self.assertFalse(option.default)
+
+ option = Option('--without-option')
+ self.assertEquals(option.prefix, 'without')
+ self.assertEquals(option.name, 'option')
+ self.assertEquals(option.env, None)
+ self.assertTrue(option.default)
+
+ option = Option('--without-option-foo', env='MOZ_OPTION')
+ self.assertEquals(option.env, 'MOZ_OPTION')
+
+ option = Option(env='MOZ_OPTION')
+ self.assertEquals(option.prefix, '')
+ self.assertEquals(option.name, None)
+ self.assertEquals(option.env, 'MOZ_OPTION')
+ self.assertFalse(option.default)
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=0, default=('a',))
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=1, default=())
+ self.assertEquals(
+ e.exception.message,
+ 'default must be a bool, a string or a tuple of strings')
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=1, default=True)
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=1, default=('a', 'b'))
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=2, default=())
+ self.assertEquals(
+ e.exception.message,
+ 'default must be a bool, a string or a tuple of strings')
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=2, default=True)
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=2, default=('a',))
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs='?', default=('a', 'b'))
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs='+', default=())
+ self.assertEquals(
+ e.exception.message,
+ 'default must be a bool, a string or a tuple of strings')
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs='+', default=True)
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ # --disable options with a nargs value that requires at least one
+ # argument need to be given a default.
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--disable-option', nargs=1)
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--disable-option', nargs='+')
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ # Test nargs inference from default value
+ option = Option('--with-foo', default=True)
+ self.assertEquals(option.nargs, 0)
+
+ option = Option('--with-foo', default=False)
+ self.assertEquals(option.nargs, 0)
+
+ option = Option('--with-foo', default='a')
+ self.assertEquals(option.nargs, '?')
+
+ option = Option('--with-foo', default=('a',))
+ self.assertEquals(option.nargs, '?')
+
+ option = Option('--with-foo', default=('a', 'b'))
+ self.assertEquals(option.nargs, '*')
+
+ option = Option(env='FOO', default=True)
+ self.assertEquals(option.nargs, 0)
+
+ option = Option(env='FOO', default=False)
+ self.assertEquals(option.nargs, 0)
+
+ option = Option(env='FOO', default='a')
+ self.assertEquals(option.nargs, '?')
+
+ option = Option(env='FOO', default=('a',))
+ self.assertEquals(option.nargs, '?')
+
+ option = Option(env='FOO', default=('a', 'b'))
+ self.assertEquals(option.nargs, '*')
+
+ def test_option_option(self):
+ for option in (
+ '--option',
+ '--enable-option',
+ '--disable-option',
+ '--with-option',
+ '--without-option',
+ ):
+ self.assertEquals(Option(option).option, option)
+ self.assertEquals(Option(option, env='FOO').option, option)
+
+ opt = Option(option, default=False)
+ self.assertEquals(opt.option,
+ option.replace('-disable-', '-enable-')
+ .replace('-without-', '-with-'))
+
+ opt = Option(option, default=True)
+ self.assertEquals(opt.option,
+ option.replace('-enable-', '-disable-')
+ .replace('-with-', '-without-'))
+
+ self.assertEquals(Option(env='FOO').option, 'FOO')
+
+ def test_option_choices(self):
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=3, choices=('a', 'b'))
+ self.assertEquals(e.exception.message,
+ 'Not enough `choices` for `nargs`')
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--without-option', nargs=1, choices=('a', 'b'))
+ self.assertEquals(e.exception.message,
+ 'A `default` must be given along with `choices`')
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--without-option', nargs='+', choices=('a', 'b'))
+ self.assertEquals(e.exception.message,
+ 'A `default` must be given along with `choices`')
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--without-option', default='c', choices=('a', 'b'))
+ self.assertEquals(e.exception.message,
+ "The `default` value must be one of 'a', 'b'")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--without-option', default=('a', 'c',), choices=('a', 'b'))
+ self.assertEquals(e.exception.message,
+ "The `default` value must be one of 'a', 'b'")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--without-option', default=('c',), choices=('a', 'b'))
+ self.assertEquals(e.exception.message,
+ "The `default` value must be one of 'a', 'b'")
+
+ option = Option('--with-option', nargs='+', choices=('a', 'b'))
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--with-option=c')
+ self.assertEquals(e.exception.message, "'c' is not one of 'a', 'b'")
+
+ value = option.get_value('--with-option=b,a')
+ self.assertTrue(value)
+ self.assertEquals(PositiveOptionValue(('b', 'a')), value)
+
+ option = Option('--without-option', nargs='*', default='a',
+ choices=('a', 'b'))
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--with-option=c')
+ self.assertEquals(e.exception.message, "'c' is not one of 'a', 'b'")
+
+ value = option.get_value('--with-option=b,a')
+ self.assertTrue(value)
+ self.assertEquals(PositiveOptionValue(('b', 'a')), value)
+
+ # Test nargs inference from choices
+ option = Option('--with-option', choices=('a', 'b'))
+ self.assertEqual(option.nargs, 1)
+
+ # Test "relative" values
+ option = Option('--with-option', nargs='*', default=('b', 'c'),
+ choices=('a', 'b', 'c', 'd'))
+
+ value = option.get_value('--with-option=+d')
+ self.assertEquals(PositiveOptionValue(('b', 'c', 'd')), value)
+
+ value = option.get_value('--with-option=-b')
+ self.assertEquals(PositiveOptionValue(('c',)), value)
+
+ value = option.get_value('--with-option=-b,+d')
+ self.assertEquals(PositiveOptionValue(('c','d')), value)
+
+ # Adding something that is in the default is fine
+ value = option.get_value('--with-option=+b')
+ self.assertEquals(PositiveOptionValue(('b', 'c')), value)
+
+ # Removing something that is not in the default is fine, as long as it
+ # is one of the choices
+ value = option.get_value('--with-option=-a')
+ self.assertEquals(PositiveOptionValue(('b', 'c')), value)
+
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--with-option=-e')
+ self.assertEquals(e.exception.message,
+ "'e' is not one of 'a', 'b', 'c', 'd'")
+
+ # Other "not a choice" errors.
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--with-option=+e')
+ self.assertEquals(e.exception.message,
+ "'e' is not one of 'a', 'b', 'c', 'd'")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--with-option=e')
+ self.assertEquals(e.exception.message,
+ "'e' is not one of 'a', 'b', 'c', 'd'")
+
+ def test_option_value_format(self):
+ val = PositiveOptionValue()
+ self.assertEquals('--with-value', val.format('--with-value'))
+ self.assertEquals('--with-value', val.format('--without-value'))
+ self.assertEquals('--enable-value', val.format('--enable-value'))
+ self.assertEquals('--enable-value', val.format('--disable-value'))
+ self.assertEquals('--value', val.format('--value'))
+ self.assertEquals('VALUE=1', val.format('VALUE'))
+
+ val = PositiveOptionValue(('a',))
+ self.assertEquals('--with-value=a', val.format('--with-value'))
+ self.assertEquals('--with-value=a', val.format('--without-value'))
+ self.assertEquals('--enable-value=a', val.format('--enable-value'))
+ self.assertEquals('--enable-value=a', val.format('--disable-value'))
+ self.assertEquals('--value=a', val.format('--value'))
+ self.assertEquals('VALUE=a', val.format('VALUE'))
+
+ val = PositiveOptionValue(('a', 'b'))
+ self.assertEquals('--with-value=a,b', val.format('--with-value'))
+ self.assertEquals('--with-value=a,b', val.format('--without-value'))
+ self.assertEquals('--enable-value=a,b', val.format('--enable-value'))
+ self.assertEquals('--enable-value=a,b', val.format('--disable-value'))
+ self.assertEquals('--value=a,b', val.format('--value'))
+ self.assertEquals('VALUE=a,b', val.format('VALUE'))
+
+ val = NegativeOptionValue()
+ self.assertEquals('--without-value', val.format('--with-value'))
+ self.assertEquals('--without-value', val.format('--without-value'))
+ self.assertEquals('--disable-value', val.format('--enable-value'))
+ self.assertEquals('--disable-value', val.format('--disable-value'))
+ self.assertEquals('', val.format('--value'))
+ self.assertEquals('VALUE=', val.format('VALUE'))
+
+ def test_option_value(self, name='option', nargs=0, default=None):
+ disabled = name.startswith(('disable-', 'without-'))
+ if disabled:
+ negOptionValue = PositiveOptionValue
+ posOptionValue = NegativeOptionValue
+ else:
+ posOptionValue = PositiveOptionValue
+ negOptionValue = NegativeOptionValue
+ defaultValue = (PositiveOptionValue(default)
+ if default else negOptionValue())
+
+ option = Option('--%s' % name, nargs=nargs, default=default)
+
+ if nargs in (0, '?', '*') or disabled:
+ value = option.get_value('--%s' % name, 'option')
+ self.assertEquals(value, posOptionValue())
+ self.assertEquals(value.origin, 'option')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--%s' % name)
+ if nargs == 1:
+ self.assertEquals(e.exception.message,
+ '--%s takes 1 value' % name)
+ elif nargs == '+':
+ self.assertEquals(e.exception.message,
+ '--%s takes 1 or more values' % name)
+ else:
+ self.assertEquals(e.exception.message,
+ '--%s takes 2 values' % name)
+
+ value = option.get_value('')
+ self.assertEquals(value, defaultValue)
+ self.assertEquals(value.origin, 'default')
+
+ value = option.get_value(None)
+ self.assertEquals(value, defaultValue)
+ self.assertEquals(value.origin, 'default')
+
+ with self.assertRaises(AssertionError):
+ value = option.get_value('MOZ_OPTION=', 'environment')
+
+ with self.assertRaises(AssertionError):
+ value = option.get_value('MOZ_OPTION=1', 'environment')
+
+ with self.assertRaises(AssertionError):
+ value = option.get_value('--foo')
+
+ if nargs in (1, '?', '*', '+') and not disabled:
+ value = option.get_value('--%s=' % name, 'option')
+ self.assertEquals(value, PositiveOptionValue(('',)))
+ self.assertEquals(value.origin, 'option')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--%s=' % name)
+ if disabled:
+ self.assertEquals(e.exception.message,
+ 'Cannot pass a value to --%s' % name)
+ else:
+ self.assertEquals(e.exception.message,
+ '--%s takes %d values' % (name, nargs))
+
+ if nargs in (1, '?', '*', '+') and not disabled:
+ value = option.get_value('--%s=foo' % name, 'option')
+ self.assertEquals(value, PositiveOptionValue(('foo',)))
+ self.assertEquals(value.origin, 'option')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--%s=foo' % name)
+ if disabled:
+ self.assertEquals(e.exception.message,
+ 'Cannot pass a value to --%s' % name)
+ else:
+ self.assertEquals(e.exception.message,
+ '--%s takes %d values' % (name, nargs))
+
+ if nargs in (2, '*', '+') and not disabled:
+ value = option.get_value('--%s=foo,bar' % name, 'option')
+ self.assertEquals(value, PositiveOptionValue(('foo', 'bar')))
+ self.assertEquals(value.origin, 'option')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--%s=foo,bar' % name, 'option')
+ if disabled:
+ self.assertEquals(e.exception.message,
+ 'Cannot pass a value to --%s' % name)
+ elif nargs == '?':
+ self.assertEquals(e.exception.message,
+ '--%s takes 0 or 1 values' % name)
+ else:
+ self.assertEquals(e.exception.message,
+ '--%s takes %d value%s'
+ % (name, nargs, 's' if nargs != 1 else ''))
+
+ option = Option('--%s' % name, env='MOZ_OPTION', nargs=nargs,
+ default=default)
+ if nargs in (0, '?', '*') or disabled:
+ value = option.get_value('--%s' % name, 'option')
+ self.assertEquals(value, posOptionValue())
+ self.assertEquals(value.origin, 'option')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--%s' % name)
+ if disabled:
+ self.assertEquals(e.exception.message,
+ 'Cannot pass a value to --%s' % name)
+ elif nargs == '+':
+ self.assertEquals(e.exception.message,
+ '--%s takes 1 or more values' % name)
+ else:
+ self.assertEquals(e.exception.message,
+ '--%s takes %d value%s'
+ % (name, nargs, 's' if nargs != 1 else ''))
+
+ value = option.get_value('')
+ self.assertEquals(value, defaultValue)
+ self.assertEquals(value.origin, 'default')
+
+ value = option.get_value(None)
+ self.assertEquals(value, defaultValue)
+ self.assertEquals(value.origin, 'default')
+
+ value = option.get_value('MOZ_OPTION=', 'environment')
+ self.assertEquals(value, NegativeOptionValue())
+ self.assertEquals(value.origin, 'environment')
+
+ if nargs in (0, '?', '*'):
+ value = option.get_value('MOZ_OPTION=1', 'environment')
+ self.assertEquals(value, PositiveOptionValue())
+ self.assertEquals(value.origin, 'environment')
+ elif nargs in (1, '+'):
+ value = option.get_value('MOZ_OPTION=1', 'environment')
+ self.assertEquals(value, PositiveOptionValue(('1',)))
+ self.assertEquals(value.origin, 'environment')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('MOZ_OPTION=1', 'environment')
+ self.assertEquals(e.exception.message, 'MOZ_OPTION takes 2 values')
+
+ if nargs in (1, '?', '*', '+') and not disabled:
+ value = option.get_value('--%s=' % name, 'option')
+ self.assertEquals(value, PositiveOptionValue(('',)))
+ self.assertEquals(value.origin, 'option')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--%s=' % name, 'option')
+ if disabled:
+ self.assertEquals(e.exception.message,
+ 'Cannot pass a value to --%s' % name)
+ else:
+ self.assertEquals(e.exception.message,
+ '--%s takes %d values' % (name, nargs))
+
+ with self.assertRaises(AssertionError):
+ value = option.get_value('--foo', 'option')
+
+ if nargs in (1, '?', '*', '+'):
+ value = option.get_value('MOZ_OPTION=foo', 'environment')
+ self.assertEquals(value, PositiveOptionValue(('foo',)))
+ self.assertEquals(value.origin, 'environment')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('MOZ_OPTION=foo', 'environment')
+ self.assertEquals(e.exception.message,
+ 'MOZ_OPTION takes %d values' % nargs)
+
+ if nargs in (2, '*', '+'):
+ value = option.get_value('MOZ_OPTION=foo,bar', 'environment')
+ self.assertEquals(value, PositiveOptionValue(('foo', 'bar')))
+ self.assertEquals(value.origin, 'environment')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('MOZ_OPTION=foo,bar', 'environment')
+ if nargs == '?':
+ self.assertEquals(e.exception.message,
+ 'MOZ_OPTION takes 0 or 1 values')
+ else:
+ self.assertEquals(e.exception.message,
+ 'MOZ_OPTION takes %d value%s'
+ % (nargs, 's' if nargs != 1 else ''))
+
+ if disabled:
+ return option
+
+ env_option = Option(env='MOZ_OPTION', nargs=nargs, default=default)
+ with self.assertRaises(AssertionError):
+ env_option.get_value('--%s' % name)
+
+ value = env_option.get_value('')
+ self.assertEquals(value, defaultValue)
+ self.assertEquals(value.origin, 'default')
+
+ value = env_option.get_value('MOZ_OPTION=', 'environment')
+ self.assertEquals(value, negOptionValue())
+ self.assertEquals(value.origin, 'environment')
+
+ if nargs in (0, '?', '*'):
+ value = env_option.get_value('MOZ_OPTION=1', 'environment')
+ self.assertEquals(value, posOptionValue())
+ self.assertTrue(value)
+ self.assertEquals(value.origin, 'environment')
+ elif nargs in (1, '+'):
+ value = env_option.get_value('MOZ_OPTION=1', 'environment')
+ self.assertEquals(value, PositiveOptionValue(('1',)))
+ self.assertEquals(value.origin, 'environment')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ env_option.get_value('MOZ_OPTION=1', 'environment')
+ self.assertEquals(e.exception.message, 'MOZ_OPTION takes 2 values')
+
+ with self.assertRaises(AssertionError) as e:
+ env_option.get_value('--%s' % name)
+
+ with self.assertRaises(AssertionError) as e:
+ env_option.get_value('--foo')
+
+ if nargs in (1, '?', '*', '+'):
+ value = env_option.get_value('MOZ_OPTION=foo', 'environment')
+ self.assertEquals(value, PositiveOptionValue(('foo',)))
+ self.assertEquals(value.origin, 'environment')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ env_option.get_value('MOZ_OPTION=foo', 'environment')
+ self.assertEquals(e.exception.message,
+ 'MOZ_OPTION takes %d values' % nargs)
+
+ if nargs in (2, '*', '+'):
+ value = env_option.get_value('MOZ_OPTION=foo,bar', 'environment')
+ self.assertEquals(value, PositiveOptionValue(('foo', 'bar')))
+ self.assertEquals(value.origin, 'environment')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ env_option.get_value('MOZ_OPTION=foo,bar', 'environment')
+ if nargs == '?':
+ self.assertEquals(e.exception.message,
+ 'MOZ_OPTION takes 0 or 1 values')
+ else:
+ self.assertEquals(e.exception.message,
+ 'MOZ_OPTION takes %d value%s'
+ % (nargs, 's' if nargs != 1 else ''))
+
+ return option
+
+ def test_option_value_enable(self, enable='enable', disable='disable',
+ nargs=0, default=None):
+ option = self.test_option_value('%s-option' % enable, nargs=nargs,
+ default=default)
+
+ value = option.get_value('--%s-option' % disable, 'option')
+ self.assertEquals(value, NegativeOptionValue())
+ self.assertEquals(value.origin, 'option')
+
+ option = self.test_option_value('%s-option' % disable, nargs=nargs,
+ default=default)
+
+ if nargs in (0, '?', '*'):
+ value = option.get_value('--%s-option' % enable, 'option')
+ self.assertEquals(value, PositiveOptionValue())
+ self.assertEquals(value.origin, 'option')
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value('--%s-option' % enable, 'option')
+ if nargs == 1:
+ self.assertEquals(e.exception.message,
+ '--%s-option takes 1 value' % enable)
+ elif nargs == '+':
+ self.assertEquals(e.exception.message,
+ '--%s-option takes 1 or more values'
+ % enable)
+ else:
+ self.assertEquals(e.exception.message,
+ '--%s-option takes 2 values' % enable)
+
+ def test_option_value_with(self):
+ self.test_option_value_enable('with', 'without')
+
+ def test_option_value_invalid_nargs(self):
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs='foo')
+ self.assertEquals(e.exception.message,
+ "nargs must be a positive integer, '?', '*' or '+'")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--option', nargs=-2)
+ self.assertEquals(e.exception.message,
+ "nargs must be a positive integer, '?', '*' or '+'")
+
+ def test_option_value_nargs_1(self):
+ self.test_option_value(nargs=1)
+ self.test_option_value(nargs=1, default=('a',))
+ self.test_option_value_enable(nargs=1, default=('a',))
+
+ # A default is required
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--disable-option', nargs=1)
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ def test_option_value_nargs_2(self):
+ self.test_option_value(nargs=2)
+ self.test_option_value(nargs=2, default=('a', 'b'))
+ self.test_option_value_enable(nargs=2, default=('a', 'b'))
+
+ # A default is required
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--disable-option', nargs=2)
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+ def test_option_value_nargs_0_or_1(self):
+ self.test_option_value(nargs='?')
+ self.test_option_value(nargs='?', default=('a',))
+ self.test_option_value_enable(nargs='?')
+ self.test_option_value_enable(nargs='?', default=('a',))
+
+ def test_option_value_nargs_0_or_more(self):
+ self.test_option_value(nargs='*')
+ self.test_option_value(nargs='*', default=('a',))
+ self.test_option_value(nargs='*', default=('a', 'b'))
+ self.test_option_value_enable(nargs='*')
+ self.test_option_value_enable(nargs='*', default=('a',))
+ self.test_option_value_enable(nargs='*', default=('a', 'b'))
+
+ def test_option_value_nargs_1_or_more(self):
+ self.test_option_value(nargs='+')
+ self.test_option_value(nargs='+', default=('a',))
+ self.test_option_value(nargs='+', default=('a', 'b'))
+ self.test_option_value_enable(nargs='+', default=('a',))
+ self.test_option_value_enable(nargs='+', default=('a', 'b'))
+
+ # A default is required
+ with self.assertRaises(InvalidOptionError) as e:
+ Option('--disable-option', nargs='+')
+ self.assertEquals(e.exception.message,
+ "The given `default` doesn't satisfy `nargs`")
+
+
+class TestCommandLineHelper(unittest.TestCase):
+ def test_basic(self):
+ helper = CommandLineHelper({}, ['cmd', '--foo', '--bar'])
+
+ self.assertEquals(['--foo', '--bar'], list(helper))
+
+ helper.add('--enable-qux')
+
+ self.assertEquals(['--foo', '--bar', '--enable-qux'], list(helper))
+
+ value, option = helper.handle(Option('--bar'))
+ self.assertEquals(['--foo', '--enable-qux'], list(helper))
+ self.assertEquals(PositiveOptionValue(), value)
+ self.assertEquals('--bar', option)
+
+ value, option = helper.handle(Option('--baz'))
+ self.assertEquals(['--foo', '--enable-qux'], list(helper))
+ self.assertEquals(NegativeOptionValue(), value)
+ self.assertEquals(None, option)
+
+ def test_precedence(self):
+ foo = Option('--with-foo', nargs='*')
+ helper = CommandLineHelper({}, ['cmd', '--with-foo=a,b'])
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b')), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('--with-foo=a,b', option)
+
+ helper = CommandLineHelper({}, ['cmd', '--with-foo=a,b',
+ '--without-foo'])
+ value, option = helper.handle(foo)
+ self.assertEquals(NegativeOptionValue(), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('--without-foo', option)
+
+ helper = CommandLineHelper({}, ['cmd', '--without-foo',
+ '--with-foo=a,b'])
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b')), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('--with-foo=a,b', option)
+
+ foo = Option('--with-foo', env='FOO', nargs='*')
+ helper = CommandLineHelper({'FOO': ''}, ['cmd', '--with-foo=a,b'])
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b')), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('--with-foo=a,b', option)
+
+ helper = CommandLineHelper({'FOO': 'a,b'}, ['cmd', '--without-foo'])
+ value, option = helper.handle(foo)
+ self.assertEquals(NegativeOptionValue(), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('--without-foo', option)
+
+ helper = CommandLineHelper({'FOO': ''}, ['cmd', '--with-bar=a,b'])
+ value, option = helper.handle(foo)
+ self.assertEquals(NegativeOptionValue(), value)
+ self.assertEquals('environment', value.origin)
+ self.assertEquals('FOO=', option)
+
+ helper = CommandLineHelper({'FOO': 'a,b'}, ['cmd', '--without-bar'])
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b')), value)
+ self.assertEquals('environment', value.origin)
+ self.assertEquals('FOO=a,b', option)
+
+ helper = CommandLineHelper({}, ['cmd', '--with-foo=a,b', 'FOO='])
+ value, option = helper.handle(foo)
+ self.assertEquals(NegativeOptionValue(), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('FOO=', option)
+
+ helper = CommandLineHelper({}, ['cmd', '--without-foo', 'FOO=a,b'])
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b')), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('FOO=a,b', option)
+
+ helper = CommandLineHelper({}, ['cmd', 'FOO=', '--with-foo=a,b'])
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b')), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('--with-foo=a,b', option)
+
+ helper = CommandLineHelper({}, ['cmd', 'FOO=a,b', '--without-foo'])
+ value, option = helper.handle(foo)
+ self.assertEquals(NegativeOptionValue(), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('--without-foo', option)
+
+ def test_extra_args(self):
+ foo = Option('--with-foo', env='FOO', nargs='*')
+ helper = CommandLineHelper({}, ['cmd'])
+ helper.add('FOO=a,b,c', 'other-origin')
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b', 'c')), value)
+ self.assertEquals('other-origin', value.origin)
+ self.assertEquals('FOO=a,b,c', option)
+
+ helper = CommandLineHelper({}, ['cmd'])
+ helper.add('FOO=a,b,c', 'other-origin')
+ helper.add('--with-foo=a,b,c', 'other-origin')
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b', 'c')), value)
+ self.assertEquals('other-origin', value.origin)
+ self.assertEquals('--with-foo=a,b,c', option)
+
+ # Adding conflicting options is not allowed.
+ helper = CommandLineHelper({}, ['cmd'])
+ helper.add('FOO=a,b,c', 'other-origin')
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.add('FOO=', 'other-origin')
+ self.assertEqual('FOO=', cm.exception.arg)
+ self.assertEqual('other-origin', cm.exception.origin)
+ self.assertEqual('FOO=a,b,c', cm.exception.old_arg)
+ self.assertEqual('other-origin', cm.exception.old_origin)
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.add('FOO=a,b', 'other-origin')
+ self.assertEqual('FOO=a,b', cm.exception.arg)
+ self.assertEqual('other-origin', cm.exception.origin)
+ self.assertEqual('FOO=a,b,c', cm.exception.old_arg)
+ self.assertEqual('other-origin', cm.exception.old_origin)
+ # But adding the same is allowed.
+ helper.add('FOO=a,b,c', 'other-origin')
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b', 'c')), value)
+ self.assertEquals('other-origin', value.origin)
+ self.assertEquals('FOO=a,b,c', option)
+
+ # The same rule as above applies when using the option form vs. the
+ # variable form. But we can't detect it when .add is called.
+ helper = CommandLineHelper({}, ['cmd'])
+ helper.add('FOO=a,b,c', 'other-origin')
+ helper.add('--without-foo', 'other-origin')
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.handle(foo)
+ self.assertEqual('--without-foo', cm.exception.arg)
+ self.assertEqual('other-origin', cm.exception.origin)
+ self.assertEqual('FOO=a,b,c', cm.exception.old_arg)
+ self.assertEqual('other-origin', cm.exception.old_origin)
+ helper = CommandLineHelper({}, ['cmd'])
+ helper.add('FOO=a,b,c', 'other-origin')
+ helper.add('--with-foo=a,b', 'other-origin')
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.handle(foo)
+ self.assertEqual('--with-foo=a,b', cm.exception.arg)
+ self.assertEqual('other-origin', cm.exception.origin)
+ self.assertEqual('FOO=a,b,c', cm.exception.old_arg)
+ self.assertEqual('other-origin', cm.exception.old_origin)
+ helper = CommandLineHelper({}, ['cmd'])
+ helper.add('FOO=a,b,c', 'other-origin')
+ helper.add('--with-foo=a,b,c', 'other-origin')
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(('a', 'b', 'c')), value)
+ self.assertEquals('other-origin', value.origin)
+ self.assertEquals('--with-foo=a,b,c', option)
+
+ # Conflicts are also not allowed against what is in the
+ # environment/on the command line.
+ helper = CommandLineHelper({}, ['cmd', '--with-foo=a,b'])
+ helper.add('FOO=a,b,c', 'other-origin')
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.handle(foo)
+ self.assertEqual('FOO=a,b,c', cm.exception.arg)
+ self.assertEqual('other-origin', cm.exception.origin)
+ self.assertEqual('--with-foo=a,b', cm.exception.old_arg)
+ self.assertEqual('command-line', cm.exception.old_origin)
+
+ helper = CommandLineHelper({}, ['cmd', '--with-foo=a,b'])
+ helper.add('--without-foo', 'other-origin')
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.handle(foo)
+ self.assertEqual('--without-foo', cm.exception.arg)
+ self.assertEqual('other-origin', cm.exception.origin)
+ self.assertEqual('--with-foo=a,b', cm.exception.old_arg)
+ self.assertEqual('command-line', cm.exception.old_origin)
+
+ def test_possible_origins(self):
+ with self.assertRaises(InvalidOptionError):
+ Option('--foo', possible_origins='command-line')
+
+ helper = CommandLineHelper({'BAZ': '1'}, ['cmd', '--foo', '--bar'])
+ foo = Option('--foo',
+ possible_origins=('command-line',))
+ value, option = helper.handle(foo)
+ self.assertEquals(PositiveOptionValue(), value)
+ self.assertEquals('command-line', value.origin)
+ self.assertEquals('--foo', option)
+
+ bar = Option('--bar',
+ possible_origins=('mozconfig',))
+ with self.assertRaisesRegexp(InvalidOptionError,
+ "--bar can not be set by command-line. Values are accepted from: mozconfig"):
+ helper.handle(bar)
+
+ baz = Option(env='BAZ',
+ possible_origins=('implied',))
+ with self.assertRaisesRegexp(InvalidOptionError,
+ "BAZ=1 can not be set by environment. Values are accepted from: implied"):
+ helper.handle(baz)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py b/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py
new file mode 100644
index 000000000..2ef93792b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py
@@ -0,0 +1,1271 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import os
+
+from StringIO import StringIO
+
+from mozunit import main
+
+from common import BaseConfigureTest
+from mozbuild.configure.util import Version
+from mozbuild.util import memoize
+from mozpack import path as mozpath
+from test_toolchain_helpers import (
+ FakeCompiler,
+ CompilerResult,
+)
+
+
+DEFAULT_C99 = {
+ '__STDC_VERSION__': '199901L',
+}
+
+DEFAULT_C11 = {
+ '__STDC_VERSION__': '201112L',
+}
+
+DEFAULT_CXX_97 = {
+ '__cplusplus': '199711L',
+}
+
+DEFAULT_CXX_11 = {
+ '__cplusplus': '201103L',
+}
+
+DEFAULT_CXX_14 = {
+ '__cplusplus': '201402L',
+}
+
+SUPPORTS_GNU99 = {
+ '-std=gnu99': DEFAULT_C99,
+}
+
+SUPPORTS_GNUXX11 = {
+ '-std=gnu++11': DEFAULT_CXX_11,
+}
+
+SUPPORTS_CXX14 = {
+ '-std=c++14': DEFAULT_CXX_14,
+}
+
+
+@memoize
+def GCC_BASE(version):
+ version = Version(version)
+ return FakeCompiler({
+ '__GNUC__': version.major,
+ '__GNUC_MINOR__': version.minor,
+ '__GNUC_PATCHLEVEL__': version.patch,
+ '__STDC__': 1,
+ '__ORDER_LITTLE_ENDIAN__': 1234,
+ '__ORDER_BIG_ENDIAN__': 4321,
+ })
+
+
+@memoize
+def GCC(version):
+ return GCC_BASE(version) + SUPPORTS_GNU99
+
+
+@memoize
+def GXX(version):
+ return GCC_BASE(version) + DEFAULT_CXX_97 + SUPPORTS_GNUXX11
+
+
+GCC_4_7 = GCC('4.7.3')
+GXX_4_7 = GXX('4.7.3')
+GCC_4_9 = GCC('4.9.3')
+GXX_4_9 = GXX('4.9.3')
+GCC_5 = GCC('5.2.1') + DEFAULT_C11
+GXX_5 = GXX('5.2.1')
+
+GCC_PLATFORM_LITTLE_ENDIAN = {
+ '__BYTE_ORDER__': 1234,
+}
+
+GCC_PLATFORM_BIG_ENDIAN = {
+ '__BYTE_ORDER__': 4321,
+}
+
+GCC_PLATFORM_X86 = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + {
+ None: {
+ '__i386__': 1,
+ },
+ '-m64': {
+ '__i386__': False,
+ '__x86_64__': 1,
+ },
+}
+
+GCC_PLATFORM_X86_64 = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + {
+ None: {
+ '__x86_64__': 1,
+ },
+ '-m32': {
+ '__x86_64__': False,
+ '__i386__': 1,
+ },
+}
+
+GCC_PLATFORM_ARM = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + {
+ '__arm__': 1,
+}
+
+GCC_PLATFORM_LINUX = {
+ '__linux__': 1,
+}
+
+GCC_PLATFORM_DARWIN = {
+ '__APPLE__': 1,
+}
+
+GCC_PLATFORM_WIN = {
+ '_WIN32': 1,
+ 'WINNT': 1,
+}
+
+GCC_PLATFORM_X86_LINUX = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_LINUX)
+GCC_PLATFORM_X86_64_LINUX = FakeCompiler(GCC_PLATFORM_X86_64,
+ GCC_PLATFORM_LINUX)
+GCC_PLATFORM_ARM_LINUX = FakeCompiler(GCC_PLATFORM_ARM, GCC_PLATFORM_LINUX)
+GCC_PLATFORM_X86_OSX = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_DARWIN)
+GCC_PLATFORM_X86_64_OSX = FakeCompiler(GCC_PLATFORM_X86_64,
+ GCC_PLATFORM_DARWIN)
+GCC_PLATFORM_X86_WIN = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_WIN)
+GCC_PLATFORM_X86_64_WIN = FakeCompiler(GCC_PLATFORM_X86_64, GCC_PLATFORM_WIN)
+
+
+@memoize
+def CLANG_BASE(version):
+ version = Version(version)
+ return FakeCompiler({
+ '__clang__': 1,
+ '__clang_major__': version.major,
+ '__clang_minor__': version.minor,
+ '__clang_patchlevel__': version.patch,
+ })
+
+
+@memoize
+def CLANG(version):
+ return GCC_BASE('4.2.1') + CLANG_BASE(version) + SUPPORTS_GNU99
+
+
+@memoize
+def CLANGXX(version):
+ return (GCC_BASE('4.2.1') + CLANG_BASE(version) + DEFAULT_CXX_97 +
+ SUPPORTS_GNUXX11)
+
+
+CLANG_3_3 = CLANG('3.3.0') + DEFAULT_C99
+CLANGXX_3_3 = CLANGXX('3.3.0')
+CLANG_3_6 = CLANG('3.6.2') + DEFAULT_C11
+CLANGXX_3_6 = CLANGXX('3.6.2') + {
+ '-std=gnu++11': {
+ '__has_feature(cxx_alignof)': '1',
+ },
+}
+
+
+def CLANG_PLATFORM(gcc_platform):
+ base = {
+ '--target=x86_64-linux-gnu': GCC_PLATFORM_X86_64_LINUX[None],
+ '--target=x86_64-darwin11.2.0': GCC_PLATFORM_X86_64_OSX[None],
+ '--target=i686-linux-gnu': GCC_PLATFORM_X86_LINUX[None],
+ '--target=i686-darwin11.2.0': GCC_PLATFORM_X86_OSX[None],
+ '--target=arm-linux-gnu': GCC_PLATFORM_ARM_LINUX[None],
+ }
+ undo_gcc_platform = {
+ k: {symbol: False for symbol in gcc_platform[None]}
+ for k in base
+ }
+ return FakeCompiler(gcc_platform, undo_gcc_platform, base)
+
+
+CLANG_PLATFORM_X86_LINUX = CLANG_PLATFORM(GCC_PLATFORM_X86_LINUX)
+CLANG_PLATFORM_X86_64_LINUX = CLANG_PLATFORM(GCC_PLATFORM_X86_64_LINUX)
+CLANG_PLATFORM_X86_OSX = CLANG_PLATFORM(GCC_PLATFORM_X86_OSX)
+CLANG_PLATFORM_X86_64_OSX = CLANG_PLATFORM(GCC_PLATFORM_X86_64_OSX)
+CLANG_PLATFORM_X86_WIN = CLANG_PLATFORM(GCC_PLATFORM_X86_WIN)
+CLANG_PLATFORM_X86_64_WIN = CLANG_PLATFORM(GCC_PLATFORM_X86_64_WIN)
+
+
+@memoize
+def VS(version):
+ version = Version(version)
+ return FakeCompiler({
+ None: {
+ '_MSC_VER': '%02d%02d' % (version.major, version.minor),
+ '_MSC_FULL_VER': '%02d%02d%05d' % (version.major, version.minor,
+ version.patch),
+ },
+ '*.cpp': DEFAULT_CXX_97,
+ })
+
+
+VS_2013u2 = VS('18.00.30501')
+VS_2013u3 = VS('18.00.30723')
+VS_2015 = VS('19.00.23026')
+VS_2015u1 = VS('19.00.23506')
+VS_2015u2 = VS('19.00.23918')
+VS_2015u3 = VS('19.00.24213')
+
+VS_PLATFORM_X86 = {
+ '_M_IX86': 600,
+ '_WIN32': 1,
+}
+
+VS_PLATFORM_X86_64 = {
+ '_M_X64': 100,
+ '_WIN32': 1,
+ '_WIN64': 1,
+}
+
+# Note: In reality, the -std=gnu* options are only supported when preceded by
+# -Xclang.
+CLANG_CL_3_9 = (CLANG_BASE('3.9.0') + VS('18.00.00000') + DEFAULT_C11 +
+ SUPPORTS_GNU99 + SUPPORTS_GNUXX11 + SUPPORTS_CXX14) + {
+ '*.cpp': {
+ '__STDC_VERSION__': False,
+ '__cplusplus': '201103L',
+ },
+ '-fms-compatibility-version=19.00.24213': VS('19.00.24213')[None],
+}
+
+CLANG_CL_PLATFORM_X86 = FakeCompiler(VS_PLATFORM_X86, GCC_PLATFORM_X86[None])
+CLANG_CL_PLATFORM_X86_64 = FakeCompiler(VS_PLATFORM_X86_64, GCC_PLATFORM_X86_64[None])
+
+
+class BaseToolchainTest(BaseConfigureTest):
+ def setUp(self):
+ super(BaseToolchainTest, self).setUp()
+ self.out = StringIO()
+ self.logger = logging.getLogger('BaseToolchainTest')
+ self.logger.setLevel(logging.ERROR)
+ self.handler = logging.StreamHandler(self.out)
+ self.logger.addHandler(self.handler)
+
+ def tearDown(self):
+ self.logger.removeHandler(self.handler)
+ del self.handler
+ del self.out
+ super(BaseToolchainTest, self).tearDown()
+
+ def do_toolchain_test(self, paths, results, args=[], environ={}):
+ '''Helper to test the toolchain checks from toolchain.configure.
+
+ - `paths` is a dict associating compiler paths to FakeCompiler
+ definitions from above.
+ - `results` is a dict associating result variable names from
+ toolchain.configure (c_compiler, cxx_compiler, host_c_compiler,
+ host_cxx_compiler) with a result.
+ The result can either be an error string, or a CompilerResult
+ corresponding to the object returned by toolchain.configure checks.
+ When the results for host_c_compiler are identical to c_compiler,
+ they can be omitted. Likewise for host_cxx_compiler vs.
+ cxx_compiler.
+ '''
+ environ = dict(environ)
+ if 'PATH' not in environ:
+ environ['PATH'] = os.pathsep.join(
+ mozpath.abspath(p) for p in ('/bin', '/usr/bin'))
+
+ sandbox = self.get_sandbox(paths, {}, args, environ,
+ logger=self.logger)
+
+ for var in ('c_compiler', 'cxx_compiler', 'host_c_compiler',
+ 'host_cxx_compiler'):
+ if var in results:
+ result = results[var]
+ elif var.startswith('host_'):
+ result = results.get(var[5:], {})
+ else:
+ result = {}
+ try:
+ self.out.truncate(0)
+ compiler = sandbox._value_for(sandbox[var])
+ # Add var on both ends to make it clear which of the
+ # variables is failing the test when that happens.
+ self.assertEquals((var, compiler), (var, result))
+ except SystemExit:
+ self.assertEquals((var, result),
+ (var, self.out.getvalue().strip()))
+ return
+
+
+class LinuxToolchainTest(BaseToolchainTest):
+ PATHS = {
+ '/usr/bin/gcc': GCC_4_9 + GCC_PLATFORM_X86_64_LINUX,
+ '/usr/bin/g++': GXX_4_9 + GCC_PLATFORM_X86_64_LINUX,
+ '/usr/bin/gcc-4.7': GCC_4_7 + GCC_PLATFORM_X86_64_LINUX,
+ '/usr/bin/g++-4.7': GXX_4_7 + GCC_PLATFORM_X86_64_LINUX,
+ '/usr/bin/gcc-5': GCC_5 + GCC_PLATFORM_X86_64_LINUX,
+ '/usr/bin/g++-5': GXX_5 + GCC_PLATFORM_X86_64_LINUX,
+ '/usr/bin/clang': CLANG_3_6 + CLANG_PLATFORM_X86_64_LINUX,
+ '/usr/bin/clang++': CLANGXX_3_6 + CLANG_PLATFORM_X86_64_LINUX,
+ '/usr/bin/clang-3.6': CLANG_3_6 + CLANG_PLATFORM_X86_64_LINUX,
+ '/usr/bin/clang++-3.6': CLANGXX_3_6 + CLANG_PLATFORM_X86_64_LINUX,
+ '/usr/bin/clang-3.3': CLANG_3_3 + CLANG_PLATFORM_X86_64_LINUX,
+ '/usr/bin/clang++-3.3': CLANGXX_3_3 + CLANG_PLATFORM_X86_64_LINUX,
+ }
+ GCC_4_7_RESULT = ('Only GCC 4.8 or newer is supported '
+ '(found version 4.7.3).')
+ GXX_4_7_RESULT = GCC_4_7_RESULT
+ GCC_4_9_RESULT = CompilerResult(
+ flags=['-std=gnu99'],
+ version='4.9.3',
+ type='gcc',
+ compiler='/usr/bin/gcc',
+ language='C',
+ )
+ GXX_4_9_RESULT = CompilerResult(
+ flags=['-std=gnu++11'],
+ version='4.9.3',
+ type='gcc',
+ compiler='/usr/bin/g++',
+ language='C++',
+ )
+ GCC_5_RESULT = CompilerResult(
+ flags=['-std=gnu99'],
+ version='5.2.1',
+ type='gcc',
+ compiler='/usr/bin/gcc-5',
+ language='C',
+ )
+ GXX_5_RESULT = CompilerResult(
+ flags=['-std=gnu++11'],
+ version='5.2.1',
+ type='gcc',
+ compiler='/usr/bin/g++-5',
+ language='C++',
+ )
+ CLANG_3_3_RESULT = CompilerResult(
+ flags=[],
+ version='3.3.0',
+ type='clang',
+ compiler='/usr/bin/clang-3.3',
+ language='C',
+ )
+ CLANGXX_3_3_RESULT = 'Only clang/llvm 3.6 or newer is supported.'
+ CLANG_3_6_RESULT = CompilerResult(
+ flags=['-std=gnu99'],
+ version='3.6.2',
+ type='clang',
+ compiler='/usr/bin/clang',
+ language='C',
+ )
+ CLANGXX_3_6_RESULT = CompilerResult(
+ flags=['-std=gnu++11'],
+ version='3.6.2',
+ type='clang',
+ compiler='/usr/bin/clang++',
+ language='C++',
+ )
+
+ def test_gcc(self):
+ # We'll try gcc and clang, and find gcc first.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT,
+ 'cxx_compiler': self.GXX_4_9_RESULT,
+ })
+
+ def test_unsupported_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_7_RESULT,
+ }, environ={
+ 'CC': 'gcc-4.7',
+ 'CXX': 'g++-4.7',
+ })
+
+ # Maybe this should be reporting the mismatched version instead.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT,
+ 'cxx_compiler': self.GXX_4_7_RESULT,
+ }, environ={
+ 'CXX': 'g++-4.7',
+ })
+
+ def test_overridden_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_5_RESULT,
+ 'cxx_compiler': self.GXX_5_RESULT,
+ }, environ={
+ 'CC': 'gcc-5',
+ 'CXX': 'g++-5',
+ })
+
+ def test_guess_cxx(self):
+ # When CXX is not set, we guess it from CC.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_5_RESULT,
+ 'cxx_compiler': self.GXX_5_RESULT,
+ }, environ={
+ 'CC': 'gcc-5',
+ })
+
+ def test_mismatched_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT,
+ 'cxx_compiler': (
+ 'The target C compiler is version 4.9.3, while the target '
+ 'C++ compiler is version 5.2.1. Need to use the same compiler '
+ 'version.'),
+ }, environ={
+ 'CXX': 'g++-5',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT,
+ 'cxx_compiler': self.GXX_4_9_RESULT,
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': (
+ 'The host C compiler is version 4.9.3, while the host '
+ 'C++ compiler is version 5.2.1. Need to use the same compiler '
+ 'version.'),
+ }, environ={
+ 'HOST_CXX': 'g++-5',
+ })
+
+ def test_mismatched_compiler(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT,
+ 'cxx_compiler': (
+ 'The target C compiler is gcc, while the target C++ compiler '
+ 'is clang. Need to use the same compiler suite.'),
+ }, environ={
+ 'CXX': 'clang++',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT,
+ 'cxx_compiler': self.GXX_4_9_RESULT,
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': (
+ 'The host C compiler is gcc, while the host C++ compiler '
+ 'is clang. Need to use the same compiler suite.'),
+ }, environ={
+ 'HOST_CXX': 'clang++',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': '`%s` is not a C compiler.'
+ % mozpath.abspath('/usr/bin/g++'),
+ }, environ={
+ 'CC': 'g++',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT,
+ 'cxx_compiler': '`%s` is not a C++ compiler.'
+ % mozpath.abspath('/usr/bin/gcc'),
+ }, environ={
+ 'CXX': 'gcc',
+ })
+
+ def test_clang(self):
+ # We'll try gcc and clang, but since there is no gcc (gcc-x.y doesn't
+ # count), find clang.
+ paths = {
+ k: v for k, v in self.PATHS.iteritems()
+ if os.path.basename(k) not in ('gcc', 'g++')
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': self.CLANG_3_6_RESULT,
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT,
+ })
+
+ def test_guess_cxx_clang(self):
+ # When CXX is not set, we guess it from CC.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_6_RESULT + {
+ 'compiler': '/usr/bin/clang-3.6',
+ },
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT + {
+ 'compiler': '/usr/bin/clang++-3.6',
+ },
+ }, environ={
+ 'CC': 'clang-3.6',
+ })
+
+ def test_unsupported_clang(self):
+ # clang 3.3 C compiler is perfectly fine, but we need more for C++.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_3_RESULT,
+ 'cxx_compiler': self.CLANGXX_3_3_RESULT,
+ }, environ={
+ 'CC': 'clang-3.3',
+ 'CXX': 'clang++-3.3',
+ })
+
+ def test_no_supported_compiler(self):
+ # Even if there are gcc-x.y or clang-x.y compilers available, we
+ # don't try them. This could be considered something to improve.
+ paths = {
+ k: v for k, v in self.PATHS.iteritems()
+ if os.path.basename(k) not in ('gcc', 'g++', 'clang', 'clang++')
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': 'Cannot find the target C compiler',
+ })
+
+ def test_absolute_path(self):
+ paths = dict(self.PATHS)
+ paths.update({
+ '/opt/clang/bin/clang': paths['/usr/bin/clang'],
+ '/opt/clang/bin/clang++': paths['/usr/bin/clang++'],
+ })
+ result = {
+ 'c_compiler': self.CLANG_3_6_RESULT + {
+ 'compiler': '/opt/clang/bin/clang',
+ },
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT + {
+ 'compiler': '/opt/clang/bin/clang++'
+ },
+ }
+ self.do_toolchain_test(paths, result, environ={
+ 'CC': '/opt/clang/bin/clang',
+ 'CXX': '/opt/clang/bin/clang++',
+ })
+ # With CXX guess too.
+ self.do_toolchain_test(paths, result, environ={
+ 'CC': '/opt/clang/bin/clang',
+ })
+
+ def test_atypical_name(self):
+ paths = dict(self.PATHS)
+ paths.update({
+ '/usr/bin/afl-clang-fast': paths['/usr/bin/clang'],
+ '/usr/bin/afl-clang-fast++': paths['/usr/bin/clang++'],
+ })
+ self.do_toolchain_test(paths, {
+ 'c_compiler': self.CLANG_3_6_RESULT + {
+ 'compiler': '/usr/bin/afl-clang-fast',
+ },
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT + {
+ 'compiler': '/usr/bin/afl-clang-fast++',
+ },
+ }, environ={
+ 'CC': 'afl-clang-fast',
+ 'CXX': 'afl-clang-fast++',
+ })
+
+ def test_mixed_compilers(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_6_RESULT,
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT,
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': self.GXX_4_9_RESULT,
+ }, environ={
+ 'CC': 'clang',
+ 'HOST_CC': 'gcc',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_6_RESULT,
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT,
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': self.GXX_4_9_RESULT,
+ }, environ={
+ 'CC': 'clang',
+ 'CXX': 'clang++',
+ 'HOST_CC': 'gcc',
+ })
+
+
+class LinuxSimpleCrossToolchainTest(BaseToolchainTest):
+ TARGET = 'i686-pc-linux-gnu'
+ PATHS = LinuxToolchainTest.PATHS
+ GCC_4_9_RESULT = LinuxToolchainTest.GCC_4_9_RESULT
+ GXX_4_9_RESULT = LinuxToolchainTest.GXX_4_9_RESULT
+ CLANG_3_6_RESULT = LinuxToolchainTest.CLANG_3_6_RESULT
+ CLANGXX_3_6_RESULT = LinuxToolchainTest.CLANGXX_3_6_RESULT
+
+ def test_cross_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT + {
+ 'flags': ['-m32']
+ },
+ 'cxx_compiler': self.GXX_4_9_RESULT + {
+ 'flags': ['-m32']
+ },
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': self.GXX_4_9_RESULT,
+ })
+
+ def test_cross_clang(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_6_RESULT + {
+ 'flags': ['--target=i686-linux-gnu'],
+ },
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT + {
+ 'flags': ['--target=i686-linux-gnu'],
+ },
+ 'host_c_compiler': self.CLANG_3_6_RESULT,
+ 'host_cxx_compiler': self.CLANGXX_3_6_RESULT,
+ }, environ={
+ 'CC': 'clang',
+ })
+
+
+class LinuxX86_64CrossToolchainTest(BaseToolchainTest):
+ HOST = 'i686-pc-linux-gnu'
+ TARGET = 'x86_64-pc-linux-gnu'
+ PATHS = {
+ '/usr/bin/gcc': GCC_4_9 + GCC_PLATFORM_X86_LINUX,
+ '/usr/bin/g++': GXX_4_9 + GCC_PLATFORM_X86_LINUX,
+ '/usr/bin/clang': CLANG_3_6 + CLANG_PLATFORM_X86_LINUX,
+ '/usr/bin/clang++': CLANGXX_3_6 + CLANG_PLATFORM_X86_LINUX,
+ }
+ GCC_4_9_RESULT = LinuxToolchainTest.GCC_4_9_RESULT
+ GXX_4_9_RESULT = LinuxToolchainTest.GXX_4_9_RESULT
+ CLANG_3_6_RESULT = LinuxToolchainTest.CLANG_3_6_RESULT
+ CLANGXX_3_6_RESULT = LinuxToolchainTest.CLANGXX_3_6_RESULT
+
+ def test_cross_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_9_RESULT + {
+ 'flags': ['-m64']
+ },
+ 'cxx_compiler': self.GXX_4_9_RESULT + {
+ 'flags': ['-m64']
+ },
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': self.GXX_4_9_RESULT,
+ })
+
+ def test_cross_clang(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_6_RESULT + {
+ 'flags': ['--target=x86_64-linux-gnu'],
+ },
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT + {
+ 'flags': ['--target=x86_64-linux-gnu'],
+ },
+ 'host_c_compiler': self.CLANG_3_6_RESULT,
+ 'host_cxx_compiler': self.CLANGXX_3_6_RESULT,
+ }, environ={
+ 'CC': 'clang',
+ })
+
+
+class OSXToolchainTest(BaseToolchainTest):
+ HOST = 'x86_64-apple-darwin11.2.0'
+ PATHS = {
+ '/usr/bin/gcc': GCC_4_9 + GCC_PLATFORM_X86_64_OSX,
+ '/usr/bin/g++': GXX_4_9 + GCC_PLATFORM_X86_64_OSX,
+ '/usr/bin/gcc-4.7': GCC_4_7 + GCC_PLATFORM_X86_64_OSX,
+ '/usr/bin/g++-4.7': GXX_4_7 + GCC_PLATFORM_X86_64_OSX,
+ '/usr/bin/gcc-5': GCC_5 + GCC_PLATFORM_X86_64_OSX,
+ '/usr/bin/g++-5': GXX_5 + GCC_PLATFORM_X86_64_OSX,
+ '/usr/bin/clang': CLANG_3_6 + CLANG_PLATFORM_X86_64_OSX,
+ '/usr/bin/clang++': CLANGXX_3_6 + CLANG_PLATFORM_X86_64_OSX,
+ '/usr/bin/clang-3.6': CLANG_3_6 + CLANG_PLATFORM_X86_64_OSX,
+ '/usr/bin/clang++-3.6': CLANGXX_3_6 + CLANG_PLATFORM_X86_64_OSX,
+ '/usr/bin/clang-3.3': CLANG_3_3 + CLANG_PLATFORM_X86_64_OSX,
+ '/usr/bin/clang++-3.3': CLANGXX_3_3 + CLANG_PLATFORM_X86_64_OSX,
+ }
+ CLANG_3_3_RESULT = LinuxToolchainTest.CLANG_3_3_RESULT
+ CLANGXX_3_3_RESULT = LinuxToolchainTest.CLANGXX_3_3_RESULT
+ CLANG_3_6_RESULT = LinuxToolchainTest.CLANG_3_6_RESULT
+ CLANGXX_3_6_RESULT = LinuxToolchainTest.CLANGXX_3_6_RESULT
+ GCC_4_7_RESULT = LinuxToolchainTest.GCC_4_7_RESULT
+ GCC_5_RESULT = LinuxToolchainTest.GCC_5_RESULT
+ GXX_5_RESULT = LinuxToolchainTest.GXX_5_RESULT
+
+ def test_clang(self):
+ # We only try clang because gcc is known not to work.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_6_RESULT,
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT,
+ })
+
+ def test_not_gcc(self):
+ # We won't pick GCC if it's the only thing available.
+ paths = {
+ k: v for k, v in self.PATHS.iteritems()
+ if os.path.basename(k) not in ('clang', 'clang++')
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': 'Cannot find the target C compiler',
+ })
+
+ def test_unsupported_clang(self):
+ # clang 3.3 C compiler is perfectly fine, but we need more for C++.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_3_RESULT,
+ 'cxx_compiler': self.CLANGXX_3_3_RESULT,
+ }, environ={
+ 'CC': 'clang-3.3',
+ 'CXX': 'clang++-3.3',
+ })
+
+ def test_forced_gcc(self):
+ # GCC can still be forced if the user really wants it.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_5_RESULT,
+ 'cxx_compiler': self.GXX_5_RESULT,
+ }, environ={
+ 'CC': 'gcc-5',
+ 'CXX': 'g++-5',
+ })
+
+ def test_forced_unsupported_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_7_RESULT,
+ }, environ={
+ 'CC': 'gcc-4.7',
+ 'CXX': 'g++-4.7',
+ })
+
+
+class WindowsToolchainTest(BaseToolchainTest):
+ HOST = 'i686-pc-mingw32'
+
+ # For the purpose of this test, it doesn't matter that the paths are not
+ # real Windows paths.
+ PATHS = {
+ '/opt/VS_2013u2/bin/cl': VS_2013u2 + VS_PLATFORM_X86,
+ '/opt/VS_2013u3/bin/cl': VS_2013u3 + VS_PLATFORM_X86,
+ '/opt/VS_2015/bin/cl': VS_2015 + VS_PLATFORM_X86,
+ '/opt/VS_2015u1/bin/cl': VS_2015u1 + VS_PLATFORM_X86,
+ '/opt/VS_2015u2/bin/cl': VS_2015u2 + VS_PLATFORM_X86,
+ '/usr/bin/cl': VS_2015u3 + VS_PLATFORM_X86,
+ '/usr/bin/clang-cl': CLANG_CL_3_9 + CLANG_CL_PLATFORM_X86,
+ '/usr/bin/gcc': GCC_4_9 + GCC_PLATFORM_X86_WIN,
+ '/usr/bin/g++': GXX_4_9 + GCC_PLATFORM_X86_WIN,
+ '/usr/bin/gcc-4.7': GCC_4_7 + GCC_PLATFORM_X86_WIN,
+ '/usr/bin/g++-4.7': GXX_4_7 + GCC_PLATFORM_X86_WIN,
+ '/usr/bin/gcc-5': GCC_5 + GCC_PLATFORM_X86_WIN,
+ '/usr/bin/g++-5': GXX_5 + GCC_PLATFORM_X86_WIN,
+ '/usr/bin/clang': CLANG_3_6 + CLANG_PLATFORM_X86_WIN,
+ '/usr/bin/clang++': CLANGXX_3_6 + CLANG_PLATFORM_X86_WIN,
+ '/usr/bin/clang-3.6': CLANG_3_6 + CLANG_PLATFORM_X86_WIN,
+ '/usr/bin/clang++-3.6': CLANGXX_3_6 + CLANG_PLATFORM_X86_WIN,
+ '/usr/bin/clang-3.3': CLANG_3_3 + CLANG_PLATFORM_X86_WIN,
+ '/usr/bin/clang++-3.3': CLANGXX_3_3 + CLANG_PLATFORM_X86_WIN,
+ }
+
+ VS_2013u2_RESULT = (
+ 'This version (18.00.30501) of the MSVC compiler is not supported.\n'
+ 'You must install Visual C++ 2015 Update 3 or newer in order to build.\n'
+ 'See https://developer.mozilla.org/en/Windows_Build_Prerequisites')
+ VS_2013u3_RESULT = (
+ 'This version (18.00.30723) of the MSVC compiler is not supported.\n'
+ 'You must install Visual C++ 2015 Update 3 or newer in order to build.\n'
+ 'See https://developer.mozilla.org/en/Windows_Build_Prerequisites')
+ VS_2015_RESULT = (
+ 'This version (19.00.23026) of the MSVC compiler is not supported.\n'
+ 'You must install Visual C++ 2015 Update 3 or newer in order to build.\n'
+ 'See https://developer.mozilla.org/en/Windows_Build_Prerequisites')
+ VS_2015u1_RESULT = (
+ 'This version (19.00.23506) of the MSVC compiler is not supported.\n'
+ 'You must install Visual C++ 2015 Update 3 or newer in order to build.\n'
+ 'See https://developer.mozilla.org/en/Windows_Build_Prerequisites')
+ VS_2015u2_RESULT = (
+ 'This version (19.00.23918) of the MSVC compiler is not supported.\n'
+ 'You must install Visual C++ 2015 Update 3 or newer in order to build.\n'
+ 'See https://developer.mozilla.org/en/Windows_Build_Prerequisites')
+ VS_2015u3_RESULT = CompilerResult(
+ flags=[],
+ version='19.00.24213',
+ type='msvc',
+ compiler='/usr/bin/cl',
+ language='C',
+ )
+ VSXX_2015u3_RESULT = CompilerResult(
+ flags=[],
+ version='19.00.24213',
+ type='msvc',
+ compiler='/usr/bin/cl',
+ language='C++',
+ )
+ CLANG_CL_3_9_RESULT = CompilerResult(
+ flags=['-Xclang', '-std=gnu99',
+ '-fms-compatibility-version=19.00.24213', '-fallback'],
+ version='19.00.24213',
+ type='clang-cl',
+ compiler='/usr/bin/clang-cl',
+ language='C',
+ )
+ CLANGXX_CL_3_9_RESULT = CompilerResult(
+ flags=['-Xclang', '-std=c++14',
+ '-fms-compatibility-version=19.00.24213', '-fallback'],
+ version='19.00.24213',
+ type='clang-cl',
+ compiler='/usr/bin/clang-cl',
+ language='C++',
+ )
+ CLANG_3_3_RESULT = LinuxToolchainTest.CLANG_3_3_RESULT
+ CLANGXX_3_3_RESULT = LinuxToolchainTest.CLANGXX_3_3_RESULT
+ CLANG_3_6_RESULT = LinuxToolchainTest.CLANG_3_6_RESULT
+ CLANGXX_3_6_RESULT = LinuxToolchainTest.CLANGXX_3_6_RESULT
+ GCC_4_7_RESULT = LinuxToolchainTest.GCC_4_7_RESULT
+ GCC_4_9_RESULT = LinuxToolchainTest.GCC_4_9_RESULT
+ GXX_4_9_RESULT = LinuxToolchainTest.GXX_4_9_RESULT
+ GCC_5_RESULT = LinuxToolchainTest.GCC_5_RESULT
+ GXX_5_RESULT = LinuxToolchainTest.GXX_5_RESULT
+
+ # VS2015u3 or greater is required.
+ def test_msvc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.VS_2015u3_RESULT,
+ 'cxx_compiler': self.VSXX_2015u3_RESULT,
+ })
+
+ def test_unsupported_msvc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.VS_2015u2_RESULT,
+ }, environ={
+ 'CC': '/opt/VS_2015u2/bin/cl',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.VS_2015u1_RESULT,
+ }, environ={
+ 'CC': '/opt/VS_2015u1/bin/cl',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.VS_2015_RESULT,
+ }, environ={
+ 'CC': '/opt/VS_2015/bin/cl',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.VS_2013u3_RESULT,
+ }, environ={
+ 'CC': '/opt/VS_2013u3/bin/cl',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.VS_2013u2_RESULT,
+ }, environ={
+ 'CC': '/opt/VS_2013u2/bin/cl',
+ })
+
+ def test_clang_cl(self):
+ # We'll pick clang-cl if msvc can't be found.
+ paths = {
+ k: v for k, v in self.PATHS.iteritems()
+ if os.path.basename(k) != 'cl'
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': self.CLANG_CL_3_9_RESULT,
+ 'cxx_compiler': self.CLANGXX_CL_3_9_RESULT,
+ })
+
+ def test_gcc(self):
+ # We'll pick GCC if msvc and clang-cl can't be found.
+ paths = {
+ k: v for k, v in self.PATHS.iteritems()
+ if os.path.basename(k) not in ('cl', 'clang-cl')
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': self.GCC_4_9_RESULT,
+ 'cxx_compiler': self.GXX_4_9_RESULT,
+ })
+
+ def test_overridden_unsupported_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.GCC_4_7_RESULT,
+ }, environ={
+ 'CC': 'gcc-4.7',
+ 'CXX': 'g++-4.7',
+ })
+
+ def test_clang(self):
+ # We'll pick clang if nothing else is found.
+ paths = {
+ k: v for k, v in self.PATHS.iteritems()
+ if os.path.basename(k) not in ('cl', 'clang-cl', 'gcc')
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': self.CLANG_3_6_RESULT,
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT,
+ })
+
+ def test_overridden_unsupported_clang(self):
+ # clang 3.3 C compiler is perfectly fine, but we need more for C++.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_3_RESULT,
+ 'cxx_compiler': self.CLANGXX_3_3_RESULT,
+ }, environ={
+ 'CC': 'clang-3.3',
+ 'CXX': 'clang++-3.3',
+ })
+
+ def test_cannot_cross(self):
+ paths = {
+ '/usr/bin/cl': VS_2015u3 + VS_PLATFORM_X86_64,
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': ('Target C compiler target CPU (x86_64) '
+ 'does not match --target CPU (i686)'),
+ })
+
+
+class Windows64ToolchainTest(WindowsToolchainTest):
+ HOST = 'x86_64-pc-mingw32'
+
+ # For the purpose of this test, it doesn't matter that the paths are not
+ # real Windows paths.
+ PATHS = {
+ '/opt/VS_2013u2/bin/cl': VS_2013u2 + VS_PLATFORM_X86_64,
+ '/opt/VS_2013u3/bin/cl': VS_2013u3 + VS_PLATFORM_X86_64,
+ '/opt/VS_2015/bin/cl': VS_2015 + VS_PLATFORM_X86_64,
+ '/opt/VS_2015u1/bin/cl': VS_2015u1 + VS_PLATFORM_X86_64,
+ '/opt/VS_2015u2/bin/cl': VS_2015u2 + VS_PLATFORM_X86_64,
+ '/usr/bin/cl': VS_2015u3 + VS_PLATFORM_X86_64,
+ '/usr/bin/clang-cl': CLANG_CL_3_9 + CLANG_CL_PLATFORM_X86_64,
+ '/usr/bin/gcc': GCC_4_9 + GCC_PLATFORM_X86_64_WIN,
+ '/usr/bin/g++': GXX_4_9 + GCC_PLATFORM_X86_64_WIN,
+ '/usr/bin/gcc-4.7': GCC_4_7 + GCC_PLATFORM_X86_64_WIN,
+ '/usr/bin/g++-4.7': GXX_4_7 + GCC_PLATFORM_X86_64_WIN,
+ '/usr/bin/gcc-5': GCC_5 + GCC_PLATFORM_X86_64_WIN,
+ '/usr/bin/g++-5': GXX_5 + GCC_PLATFORM_X86_64_WIN,
+ '/usr/bin/clang': CLANG_3_6 + CLANG_PLATFORM_X86_64_WIN,
+ '/usr/bin/clang++': CLANGXX_3_6 + CLANG_PLATFORM_X86_64_WIN,
+ '/usr/bin/clang-3.6': CLANG_3_6 + CLANG_PLATFORM_X86_64_WIN,
+ '/usr/bin/clang++-3.6': CLANGXX_3_6 + CLANG_PLATFORM_X86_64_WIN,
+ '/usr/bin/clang-3.3': CLANG_3_3 + CLANG_PLATFORM_X86_64_WIN,
+ '/usr/bin/clang++-3.3': CLANGXX_3_3 + CLANG_PLATFORM_X86_64_WIN,
+ }
+
+ def test_cannot_cross(self):
+ paths = {
+ '/usr/bin/cl': VS_2015u3 + VS_PLATFORM_X86,
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': ('Target C compiler target CPU (x86) '
+ 'does not match --target CPU (x86_64)'),
+ })
+
+
+class LinuxCrossCompileToolchainTest(BaseToolchainTest):
+ TARGET = 'arm-unknown-linux-gnu'
+ PATHS = {
+ '/usr/bin/arm-linux-gnu-gcc': GCC_4_9 + GCC_PLATFORM_ARM_LINUX,
+ '/usr/bin/arm-linux-gnu-g++': GXX_4_9 + GCC_PLATFORM_ARM_LINUX,
+ '/usr/bin/arm-linux-gnu-gcc-4.7': GCC_4_7 + GCC_PLATFORM_ARM_LINUX,
+ '/usr/bin/arm-linux-gnu-g++-4.7': GXX_4_7 + GCC_PLATFORM_ARM_LINUX,
+ '/usr/bin/arm-linux-gnu-gcc-5': GCC_5 + GCC_PLATFORM_ARM_LINUX,
+ '/usr/bin/arm-linux-gnu-g++-5': GXX_5 + GCC_PLATFORM_ARM_LINUX,
+ }
+ PATHS.update(LinuxToolchainTest.PATHS)
+ ARM_GCC_4_7_RESULT = LinuxToolchainTest.GXX_4_7_RESULT
+ ARM_GCC_5_RESULT = LinuxToolchainTest.GCC_5_RESULT + {
+ 'compiler': '/usr/bin/arm-linux-gnu-gcc-5',
+ }
+ ARM_GXX_5_RESULT = LinuxToolchainTest.GXX_5_RESULT + {
+ 'compiler': '/usr/bin/arm-linux-gnu-g++-5',
+ }
+ CLANG_3_6_RESULT = LinuxToolchainTest.CLANG_3_6_RESULT
+ CLANGXX_3_6_RESULT = LinuxToolchainTest.CLANGXX_3_6_RESULT
+ GCC_4_9_RESULT = LinuxToolchainTest.GCC_4_9_RESULT
+ GXX_4_9_RESULT = LinuxToolchainTest.GXX_4_9_RESULT
+
+ little_endian = FakeCompiler(GCC_PLATFORM_LINUX,
+ GCC_PLATFORM_LITTLE_ENDIAN)
+ big_endian = FakeCompiler(GCC_PLATFORM_LINUX, GCC_PLATFORM_BIG_ENDIAN)
+
+ PLATFORMS = {
+ 'i686-pc-linux-gnu': GCC_PLATFORM_X86_LINUX,
+ 'x86_64-pc-linux-gnu': GCC_PLATFORM_X86_64_LINUX,
+ 'arm-unknown-linux-gnu': GCC_PLATFORM_ARM_LINUX,
+ 'aarch64-unknown-linux-gnu': little_endian + {
+ '__aarch64__': 1,
+ },
+ 'ia64-unknown-linux-gnu': little_endian + {
+ '__ia64__': 1,
+ },
+ 's390x-unknown-linux-gnu': big_endian + {
+ '__s390x__': 1,
+ '__s390__': 1,
+ },
+ 's390-unknown-linux-gnu': big_endian + {
+ '__s390__': 1,
+ },
+ 'powerpc64-unknown-linux-gnu': big_endian + {
+ None: {
+ '__powerpc64__': 1,
+ '__powerpc__': 1,
+ },
+ '-m32': {
+ '__powerpc64__': False,
+ },
+ },
+ 'powerpc-unknown-linux-gnu': big_endian + {
+ None: {
+ '__powerpc__': 1,
+ },
+ '-m64': {
+ '__powerpc64__': 1,
+ },
+ },
+ 'alpha-unknown-linux-gnu': little_endian + {
+ '__alpha__': 1,
+ },
+ 'hppa-unknown-linux-gnu': big_endian + {
+ '__hppa__': 1,
+ },
+ 'sparc64-unknown-linux-gnu': big_endian + {
+ None: {
+ '__arch64__': 1,
+ '__sparc__': 1,
+ },
+ '-m32': {
+ '__arch64__': False,
+ },
+ },
+ 'sparc-unknown-linux-gnu': big_endian + {
+ None: {
+ '__sparc__': 1,
+ },
+ '-m64': {
+ '__arch64__': 1,
+ },
+ },
+ 'mips64-unknown-linux-gnuabi64': big_endian + {
+ '__mips64': 1,
+ '__mips__': 1,
+ },
+ 'mips-unknown-linux-gnu': big_endian + {
+ '__mips__': 1,
+ },
+ }
+
+ PLATFORMS['powerpc64le-unknown-linux-gnu'] = \
+ PLATFORMS['powerpc64-unknown-linux-gnu'] + GCC_PLATFORM_LITTLE_ENDIAN
+ PLATFORMS['mips64el-unknown-linux-gnuabi64'] = \
+ PLATFORMS['mips64-unknown-linux-gnuabi64'] + GCC_PLATFORM_LITTLE_ENDIAN
+ PLATFORMS['mipsel-unknown-linux-gnu'] = \
+ PLATFORMS['mips-unknown-linux-gnu'] + GCC_PLATFORM_LITTLE_ENDIAN
+
+ def do_test_cross_gcc_32_64(self, host, target):
+ self.HOST = host
+ self.TARGET = target
+ paths = {
+ '/usr/bin/gcc': GCC_4_9 + self.PLATFORMS[host],
+ '/usr/bin/g++': GXX_4_9 + self.PLATFORMS[host],
+ }
+ cross_flags = {
+ 'flags': ['-m64' if '64' in target else '-m32']
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': self.GCC_4_9_RESULT + cross_flags,
+ 'cxx_compiler': self.GXX_4_9_RESULT + cross_flags,
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': self.GXX_4_9_RESULT,
+ })
+ self.HOST = LinuxCrossCompileToolchainTest.HOST
+ self.TARGET = LinuxCrossCompileToolchainTest.TARGET
+
+ def test_cross_x86_x64(self):
+ self.do_test_cross_gcc_32_64(
+ 'i686-pc-linux-gnu', 'x86_64-pc-linux-gnu')
+ self.do_test_cross_gcc_32_64(
+ 'x86_64-pc-linux-gnu', 'i686-pc-linux-gnu')
+
+ def test_cross_sparc_sparc64(self):
+ self.do_test_cross_gcc_32_64(
+ 'sparc-unknown-linux-gnu', 'sparc64-unknown-linux-gnu')
+ self.do_test_cross_gcc_32_64(
+ 'sparc64-unknown-linux-gnu', 'sparc-unknown-linux-gnu')
+
+ def test_cross_ppc_ppc64(self):
+ self.do_test_cross_gcc_32_64(
+ 'powerpc-unknown-linux-gnu', 'powerpc64-unknown-linux-gnu')
+ self.do_test_cross_gcc_32_64(
+ 'powerpc64-unknown-linux-gnu', 'powerpc-unknown-linux-gnu')
+
+ def do_test_cross_gcc(self, host, target):
+ self.HOST = host
+ self.TARGET = target
+ host_cpu = host.split('-')[0]
+ cpu, manufacturer, os = target.split('-', 2)
+ toolchain_prefix = '/usr/bin/%s-%s' % (cpu, os)
+ paths = {
+ '/usr/bin/gcc': GCC_4_9 + self.PLATFORMS[host],
+ '/usr/bin/g++': GXX_4_9 + self.PLATFORMS[host],
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': ('Target C compiler target CPU (%s) '
+ 'does not match --target CPU (%s)'
+ % (host_cpu, cpu)),
+ })
+
+ paths.update({
+ '%s-gcc' % toolchain_prefix: GCC_4_9 + self.PLATFORMS[target],
+ '%s-g++' % toolchain_prefix: GXX_4_9 + self.PLATFORMS[target],
+ })
+ self.do_toolchain_test(paths, {
+ 'c_compiler': self.GCC_4_9_RESULT + {
+ 'compiler': '%s-gcc' % toolchain_prefix,
+ },
+ 'cxx_compiler': self.GXX_4_9_RESULT + {
+ 'compiler': '%s-g++' % toolchain_prefix,
+ },
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': self.GXX_4_9_RESULT,
+ })
+ self.HOST = LinuxCrossCompileToolchainTest.HOST
+ self.TARGET = LinuxCrossCompileToolchainTest.TARGET
+
+ def test_cross_gcc_misc(self):
+ for target in self.PLATFORMS:
+ if not target.endswith('-pc-linux-gnu'):
+ self.do_test_cross_gcc('x86_64-pc-linux-gnu', target)
+
+ def test_cannot_cross(self):
+ self.TARGET = 'mipsel-unknown-linux-gnu'
+
+ paths = {
+ '/usr/bin/gcc': GCC_4_9 + self.PLATFORMS['mips-unknown-linux-gnu'],
+ '/usr/bin/g++': GXX_4_9 + self.PLATFORMS['mips-unknown-linux-gnu'],
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': ('Target C compiler target endianness (big) '
+ 'does not match --target endianness (little)'),
+ })
+ self.TARGET = LinuxCrossCompileToolchainTest.TARGET
+
+ def test_overridden_cross_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.ARM_GCC_5_RESULT,
+ 'cxx_compiler': self.ARM_GXX_5_RESULT,
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': self.GXX_4_9_RESULT,
+ }, environ={
+ 'CC': 'arm-linux-gnu-gcc-5',
+ 'CXX': 'arm-linux-gnu-g++-5',
+ })
+
+ def test_overridden_unsupported_cross_gcc(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.ARM_GCC_4_7_RESULT,
+ }, environ={
+ 'CC': 'arm-linux-gnu-gcc-4.7',
+ 'CXX': 'arm-linux-gnu-g++-4.7',
+ })
+
+ def test_guess_cross_cxx(self):
+ # When CXX is not set, we guess it from CC.
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.ARM_GCC_5_RESULT,
+ 'cxx_compiler': self.ARM_GXX_5_RESULT,
+ 'host_c_compiler': self.GCC_4_9_RESULT,
+ 'host_cxx_compiler': self.GXX_4_9_RESULT,
+ }, environ={
+ 'CC': 'arm-linux-gnu-gcc-5',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.ARM_GCC_5_RESULT,
+ 'cxx_compiler': self.ARM_GXX_5_RESULT,
+ 'host_c_compiler': self.CLANG_3_6_RESULT,
+ 'host_cxx_compiler': self.CLANGXX_3_6_RESULT,
+ }, environ={
+ 'CC': 'arm-linux-gnu-gcc-5',
+ 'HOST_CC': 'clang',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.ARM_GCC_5_RESULT,
+ 'cxx_compiler': self.ARM_GXX_5_RESULT,
+ 'host_c_compiler': self.CLANG_3_6_RESULT,
+ 'host_cxx_compiler': self.CLANGXX_3_6_RESULT,
+ }, environ={
+ 'CC': 'arm-linux-gnu-gcc-5',
+ 'CXX': 'arm-linux-gnu-g++-5',
+ 'HOST_CC': 'clang',
+ })
+
+ def test_cross_clang(self):
+ cross_clang_result = self.CLANG_3_6_RESULT + {
+ 'flags': ['--target=arm-linux-gnu'],
+ }
+ cross_clangxx_result = self.CLANGXX_3_6_RESULT + {
+ 'flags': ['--target=arm-linux-gnu'],
+ }
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': cross_clang_result,
+ 'cxx_compiler': cross_clangxx_result,
+ 'host_c_compiler': self.CLANG_3_6_RESULT,
+ 'host_cxx_compiler': self.CLANGXX_3_6_RESULT,
+ }, environ={
+ 'CC': 'clang',
+ 'HOST_CC': 'clang',
+ })
+
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': cross_clang_result,
+ 'cxx_compiler': cross_clangxx_result,
+ 'host_c_compiler': self.CLANG_3_6_RESULT,
+ 'host_cxx_compiler': self.CLANGXX_3_6_RESULT,
+ }, environ={
+ 'CC': 'clang',
+ })
+
+ def test_cross_atypical_clang(self):
+ paths = dict(self.PATHS)
+ paths.update({
+ '/usr/bin/afl-clang-fast': paths['/usr/bin/clang'],
+ '/usr/bin/afl-clang-fast++': paths['/usr/bin/clang++'],
+ })
+ afl_clang_result = self.CLANG_3_6_RESULT + {
+ 'compiler': '/usr/bin/afl-clang-fast',
+ }
+ afl_clangxx_result = self.CLANGXX_3_6_RESULT + {
+ 'compiler': '/usr/bin/afl-clang-fast++',
+ }
+ self.do_toolchain_test(paths, {
+ 'c_compiler': afl_clang_result + {
+ 'flags': ['--target=arm-linux-gnu'],
+ },
+ 'cxx_compiler': afl_clangxx_result + {
+ 'flags': ['--target=arm-linux-gnu'],
+ },
+ 'host_c_compiler': afl_clang_result,
+ 'host_cxx_compiler': afl_clangxx_result,
+ }, environ={
+ 'CC': 'afl-clang-fast',
+ 'CXX': 'afl-clang-fast++',
+ })
+
+
+class OSXCrossToolchainTest(BaseToolchainTest):
+ TARGET = 'i686-apple-darwin11.2.0'
+ PATHS = LinuxToolchainTest.PATHS
+ CLANG_3_6_RESULT = LinuxToolchainTest.CLANG_3_6_RESULT
+ CLANGXX_3_6_RESULT = LinuxToolchainTest.CLANGXX_3_6_RESULT
+
+ def test_osx_cross(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': self.CLANG_3_6_RESULT + {
+ 'flags': ['--target=i686-darwin11.2.0'],
+ },
+ 'cxx_compiler': self.CLANGXX_3_6_RESULT + {
+ 'flags': ['--target=i686-darwin11.2.0'],
+ },
+ 'host_c_compiler': self.CLANG_3_6_RESULT,
+ 'host_cxx_compiler': self.CLANGXX_3_6_RESULT,
+ }, environ={
+ 'CC': 'clang',
+ })
+
+ def test_cannot_osx_cross(self):
+ self.do_toolchain_test(self.PATHS, {
+ 'c_compiler': 'Target C compiler target kernel (Linux) does not '
+ 'match --target kernel (Darwin)',
+ }, environ={
+ 'CC': 'gcc',
+ })
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py b/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py
new file mode 100644
index 000000000..8ec33a8b7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py
@@ -0,0 +1,437 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import re
+import types
+import unittest
+
+from fnmatch import fnmatch
+from StringIO import StringIO
+from textwrap import dedent
+
+from mozunit import (
+ main,
+ MockedOpen,
+)
+
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import ReadOnlyNamespace
+from mozpack import path as mozpath
+
+
+class CompilerPreprocessor(Preprocessor):
+ # The C preprocessor only expands macros when they are not in C strings.
+ # For now, we don't look very hard for C strings because they don't matter
+ # that much for our unit tests, but we at least avoid expanding in the
+ # simple "FOO" case.
+ VARSUBST = re.compile('(?<!")(?P<VAR>\w+)(?!")', re.U)
+ NON_WHITESPACE = re.compile('\S')
+ HAS_FEATURE = re.compile('(__has_feature)\(([^\)]*)\)')
+
+ def __init__(self, *args, **kwargs):
+ Preprocessor.__init__(self, *args, **kwargs)
+ self.do_filter('c_substitution')
+ self.setMarker('#\s*')
+
+ def do_if(self, expression, **kwargs):
+ # The C preprocessor handles numbers following C rules, which is a
+ # different handling than what our Preprocessor does out of the box.
+ # Hack around it enough that the configure tests work properly.
+ context = self.context
+ def normalize_numbers(value):
+ if isinstance(value, types.StringTypes):
+ if value[-1:] == 'L' and value[:-1].isdigit():
+ value = int(value[:-1])
+ return value
+ # Our Preprocessor doesn't handle macros with parameters, so we hack
+ # around that for __has_feature()-like things.
+ def normalize_has_feature(expr):
+ return self.HAS_FEATURE.sub(r'\1\2', expr)
+ self.context = self.Context(
+ (normalize_has_feature(k), normalize_numbers(v))
+ for k, v in context.iteritems()
+ )
+ try:
+ return Preprocessor.do_if(self, normalize_has_feature(expression),
+ **kwargs)
+ finally:
+ self.context = context
+
+ class Context(dict):
+ def __missing__(self, key):
+ return None
+
+ def filter_c_substitution(self, line):
+ def repl(matchobj):
+ varname = matchobj.group('VAR')
+ if varname in self.context:
+ result = str(self.context[varname])
+ # The C preprocessor inserts whitespaces around expanded
+ # symbols.
+ start, end = matchobj.span('VAR')
+ if self.NON_WHITESPACE.match(line[start-1:start]):
+ result = ' ' + result
+ if self.NON_WHITESPACE.match(line[end:end+1]):
+ result = result + ' '
+ return result
+ return matchobj.group(0)
+ return self.VARSUBST.sub(repl, line)
+
+
+class TestCompilerPreprocessor(unittest.TestCase):
+ def test_expansion(self):
+ pp = CompilerPreprocessor({
+ 'A': 1,
+ 'B': '2',
+ 'C': 'c',
+ 'D': 'd'
+ })
+ pp.out = StringIO()
+ input = StringIO('A.B.C "D"')
+ input.name = 'foo'
+ pp.do_include(input)
+
+ self.assertEquals(pp.out.getvalue(), '1 . 2 . c "D"')
+
+ def test_condition(self):
+ pp = CompilerPreprocessor({
+ 'A': 1,
+ 'B': '2',
+ 'C': '0L',
+ })
+ pp.out = StringIO()
+ input = StringIO(dedent('''\
+ #ifdef A
+ IFDEF_A
+ #endif
+ #if A
+ IF_A
+ #endif
+ # if B
+ IF_B
+ # else
+ IF_NOT_B
+ # endif
+ #if !C
+ IF_NOT_C
+ #else
+ IF_C
+ #endif
+ '''))
+ input.name = 'foo'
+ pp.do_include(input)
+
+ self.assertEquals('IFDEF_A\nIF_A\nIF_B\nIF_NOT_C\n', pp.out.getvalue())
+
+
+class FakeCompiler(dict):
+ '''Defines a fake compiler for use in toolchain tests below.
+
+ The definitions given when creating an instance can have one of two
+ forms:
+ - a dict giving preprocessor symbols and their respective value, e.g.
+ { '__GNUC__': 4, '__STDC__': 1 }
+ - a dict associating flags to preprocessor symbols. An entry for `None`
+ is required in this case. Those are the baseline preprocessor symbols.
+ Additional entries describe additional flags to set or existing flags
+ to unset (with a value of `False`).
+ {
+ None: { '__GNUC__': 4, '__STDC__': 1, '__STRICT_ANSI__': 1 },
+ '-std=gnu99': { '__STDC_VERSION__': '199901L',
+ '__STRICT_ANSI__': False },
+ }
+ With the dict above, invoking the preprocessor with no additional flags
+ would define __GNUC__, __STDC__ and __STRICT_ANSI__, and with -std=gnu99,
+ __GNUC__, __STDC__, and __STDC_VERSION__ (__STRICT_ANSI__ would be
+ unset).
+ It is also possible to have different symbols depending on the source
+ file extension. In this case, the key is '*.ext'. e.g.
+ {
+ '*.c': { '__STDC__': 1 },
+ '*.cpp': { '__cplusplus': '199711L' },
+ }
+
+ All the given definitions are merged together.
+
+ A FakeCompiler instance itself can be used as a definition to create
+ another FakeCompiler.
+
+ For convenience, FakeCompiler instances can be added (+) to one another.
+ '''
+ def __init__(self, *definitions):
+ for definition in definitions:
+ if all(not isinstance(d, dict) for d in definition.itervalues()):
+ definition = {None: definition}
+ for key, value in definition.iteritems():
+ self.setdefault(key, {}).update(value)
+
+ def __call__(self, stdin, args):
+ files = [arg for arg in args if not arg.startswith('-')]
+ flags = [arg for arg in args if arg.startswith('-')]
+ if '-E' in flags:
+ assert len(files) == 1
+ file = files[0]
+ pp = CompilerPreprocessor(self[None])
+
+ def apply_defn(defn):
+ for k, v in defn.iteritems():
+ if v is False:
+ if k in pp.context:
+ del pp.context[k]
+ else:
+ pp.context[k] = v
+
+ for glob, defn in self.iteritems():
+ if glob and not glob.startswith('-') and fnmatch(file, glob):
+ apply_defn(defn)
+
+ for flag in flags:
+ apply_defn(self.get(flag, {}))
+
+ pp.out = StringIO()
+ pp.do_include(file)
+ return 0, pp.out.getvalue(), ''
+ elif '-c' in flags:
+ if '-funknown-flag' in flags:
+ return 1, '', ''
+ return 0, '', ''
+
+ return 1, '', ''
+
+ def __add__(self, other):
+ return FakeCompiler(self, other)
+
+
+class TestFakeCompiler(unittest.TestCase):
+ def test_fake_compiler(self):
+ with MockedOpen({
+ 'file': 'A B C',
+ 'file.c': 'A B C',
+ }):
+ compiler = FakeCompiler({
+ 'A': '1',
+ 'B': '2',
+ })
+ self.assertEquals(compiler(None, ['-E', 'file']),
+ (0, '1 2 C', ''))
+
+ compiler = FakeCompiler({
+ None: {
+ 'A': '1',
+ 'B': '2',
+ },
+ '-foo': {
+ 'C': 'foo',
+ },
+ '-bar': {
+ 'B': 'bar',
+ 'C': 'bar',
+ },
+ '-qux': {
+ 'B': False,
+ },
+ '*.c': {
+ 'B': '42',
+ },
+ })
+ self.assertEquals(compiler(None, ['-E', 'file']),
+ (0, '1 2 C', ''))
+ self.assertEquals(compiler(None, ['-E', '-foo', 'file']),
+ (0, '1 2 foo', ''))
+ self.assertEquals(compiler(None, ['-E', '-bar', 'file']),
+ (0, '1 bar bar', ''))
+ self.assertEquals(compiler(None, ['-E', '-qux', 'file']),
+ (0, '1 B C', ''))
+ self.assertEquals(compiler(None, ['-E', '-foo', '-bar', 'file']),
+ (0, '1 bar bar', ''))
+ self.assertEquals(compiler(None, ['-E', '-bar', '-foo', 'file']),
+ (0, '1 bar foo', ''))
+ self.assertEquals(compiler(None, ['-E', '-bar', '-qux', 'file']),
+ (0, '1 B bar', ''))
+ self.assertEquals(compiler(None, ['-E', '-qux', '-bar', 'file']),
+ (0, '1 bar bar', ''))
+ self.assertEquals(compiler(None, ['-E', 'file.c']),
+ (0, '1 42 C', ''))
+ self.assertEquals(compiler(None, ['-E', '-bar', 'file.c']),
+ (0, '1 bar bar', ''))
+
+ def test_multiple_definitions(self):
+ compiler = FakeCompiler({
+ 'A': 1,
+ 'B': 2,
+ }, {
+ 'C': 3,
+ })
+
+ self.assertEquals(compiler, {
+ None: {
+ 'A': 1,
+ 'B': 2,
+ 'C': 3,
+ },
+ })
+ compiler = FakeCompiler({
+ 'A': 1,
+ 'B': 2,
+ }, {
+ 'B': 4,
+ 'C': 3,
+ })
+
+ self.assertEquals(compiler, {
+ None: {
+ 'A': 1,
+ 'B': 4,
+ 'C': 3,
+ },
+ })
+ compiler = FakeCompiler({
+ 'A': 1,
+ 'B': 2,
+ }, {
+ None: {
+ 'B': 4,
+ 'C': 3,
+ },
+ '-foo': {
+ 'D': 5,
+ },
+ })
+
+ self.assertEquals(compiler, {
+ None: {
+ 'A': 1,
+ 'B': 4,
+ 'C': 3,
+ },
+ '-foo': {
+ 'D': 5,
+ },
+ })
+
+ compiler = FakeCompiler({
+ None: {
+ 'A': 1,
+ 'B': 2,
+ },
+ '-foo': {
+ 'D': 5,
+ },
+ }, {
+ '-foo': {
+ 'D': 5,
+ },
+ '-bar': {
+ 'E': 6,
+ },
+ })
+
+ self.assertEquals(compiler, {
+ None: {
+ 'A': 1,
+ 'B': 2,
+ },
+ '-foo': {
+ 'D': 5,
+ },
+ '-bar': {
+ 'E': 6,
+ },
+ })
+
+
+class CompilerResult(ReadOnlyNamespace):
+ '''Helper of convenience to manipulate toolchain results in unit tests
+
+ When adding a dict, the result is a new CompilerResult with the values
+ from the dict replacing those from the CompilerResult, except for `flags`,
+ where the value from the dict extends the `flags` in `self`.
+ '''
+
+ def __init__(self, wrapper=None, compiler='', version='', type='',
+ language='', flags=None):
+ if flags is None:
+ flags = []
+ if wrapper is None:
+ wrapper = []
+ super(CompilerResult, self).__init__(
+ flags=flags,
+ version=version,
+ type=type,
+ compiler=mozpath.abspath(compiler),
+ wrapper=wrapper,
+ language=language,
+ )
+
+ def __add__(self, other):
+ assert isinstance(other, dict)
+ result = copy.deepcopy(self.__dict__)
+ for k, v in other.iteritems():
+ if k == 'flags':
+ result.setdefault(k, []).extend(v)
+ else:
+ result[k] = v
+ return CompilerResult(**result)
+
+
+class TestCompilerResult(unittest.TestCase):
+ def test_compiler_result(self):
+ result = CompilerResult()
+ self.assertEquals(result.__dict__, {
+ 'wrapper': [],
+ 'compiler': mozpath.abspath(''),
+ 'version': '',
+ 'type': '',
+ 'language': '',
+ 'flags': [],
+ })
+
+ result = CompilerResult(
+ compiler='/usr/bin/gcc',
+ version='4.2.1',
+ type='gcc',
+ language='C',
+ flags=['-std=gnu99'],
+ )
+ self.assertEquals(result.__dict__, {
+ 'wrapper': [],
+ 'compiler': mozpath.abspath('/usr/bin/gcc'),
+ 'version': '4.2.1',
+ 'type': 'gcc',
+ 'language': 'C',
+ 'flags': ['-std=gnu99'],
+ })
+
+ result2 = result + {'flags': ['-m32']}
+ self.assertEquals(result2.__dict__, {
+ 'wrapper': [],
+ 'compiler': mozpath.abspath('/usr/bin/gcc'),
+ 'version': '4.2.1',
+ 'type': 'gcc',
+ 'language': 'C',
+ 'flags': ['-std=gnu99', '-m32'],
+ })
+ # Original flags are untouched.
+ self.assertEquals(result.flags, ['-std=gnu99'])
+
+ result3 = result + {
+ 'compiler': '/usr/bin/gcc-4.7',
+ 'version': '4.7.3',
+ 'flags': ['-m32'],
+ }
+ self.assertEquals(result3.__dict__, {
+ 'wrapper': [],
+ 'compiler': mozpath.abspath('/usr/bin/gcc-4.7'),
+ 'version': '4.7.3',
+ 'type': 'gcc',
+ 'language': 'C',
+ 'flags': ['-std=gnu99', '-m32'],
+ })
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py b/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py
new file mode 100644
index 000000000..30dc022b7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+
+from buildconfig import topsrcdir
+from common import BaseConfigureTest
+from mozunit import main
+
+
+class TestToolkitMozConfigure(BaseConfigureTest):
+ def test_necko_protocols(self):
+ def get_value(arg):
+ sandbox = self.get_sandbox({}, {}, [arg])
+ return sandbox._value_for(sandbox['necko_protocols'])
+
+ default_protocols = get_value('')
+ self.assertNotEqual(default_protocols, ())
+
+ # Backwards compatibility
+ self.assertEqual(get_value('--enable-necko-protocols'),
+ default_protocols)
+
+ self.assertEqual(get_value('--enable-necko-protocols=yes'),
+ default_protocols)
+
+ self.assertEqual(get_value('--enable-necko-protocols=all'),
+ default_protocols)
+
+ self.assertEqual(get_value('--enable-necko-protocols=default'),
+ default_protocols)
+
+ self.assertEqual(get_value('--enable-necko-protocols='), ())
+
+ self.assertEqual(get_value('--enable-necko-protocols=no'), ())
+
+ self.assertEqual(get_value('--enable-necko-protocols=none'), ())
+
+ self.assertEqual(get_value('--disable-necko-protocols'), ())
+
+ self.assertEqual(get_value('--enable-necko-protocols=http'),
+ ('http',))
+
+ self.assertEqual(get_value('--enable-necko-protocols=http,about'),
+ ('about', 'http'))
+
+ self.assertEqual(get_value('--enable-necko-protocols=http,none'), ())
+
+ self.assertEqual(get_value('--enable-necko-protocols=-http'), ())
+
+ self.assertEqual(get_value('--enable-necko-protocols=none,http'),
+ ('http',))
+
+ self.assertEqual(
+ get_value('--enable-necko-protocols=all,-http,-about'),
+ tuple(p for p in default_protocols if p not in ('http', 'about')))
+
+ self.assertEqual(
+ get_value('--enable-necko-protocols=default,-http,-about'),
+ tuple(p for p in default_protocols if p not in ('http', 'about')))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_util.py b/python/mozbuild/mozbuild/test/configure/test_util.py
new file mode 100644
index 000000000..38b3c636e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_util.py
@@ -0,0 +1,558 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import os
+import tempfile
+import textwrap
+import unittest
+import sys
+
+from StringIO import StringIO
+
+from mozunit import main
+from mozpack import path as mozpath
+
+from mozbuild.configure.util import (
+ ConfigureOutputHandler,
+ getpreferredencoding,
+ LineIO,
+ Version,
+)
+
+from mozbuild.configure import (
+ ConfigureSandbox,
+)
+
+from mozbuild.util import exec_
+
+from buildconfig import topsrcdir
+from common import ConfigureTestSandbox
+
+
+class TestConfigureOutputHandler(unittest.TestCase):
+ def test_separation(self):
+ out = StringIO()
+ err = StringIO()
+ name = '%s.test_separation' % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ logger.addHandler(ConfigureOutputHandler(out, err))
+
+ logger.error('foo')
+ logger.warning('bar')
+ logger.info('baz')
+ # DEBUG level is not printed out by this handler
+ logger.debug('qux')
+
+ self.assertEqual(out.getvalue(), 'baz\n')
+ self.assertEqual(err.getvalue(), 'foo\nbar\n')
+
+ def test_format(self):
+ out = StringIO()
+ err = StringIO()
+ name = '%s.test_format' % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ handler = ConfigureOutputHandler(out, err)
+ handler.setFormatter(logging.Formatter('%(levelname)s:%(message)s'))
+ logger.addHandler(handler)
+
+ logger.error('foo')
+ logger.warning('bar')
+ logger.info('baz')
+ # DEBUG level is not printed out by this handler
+ logger.debug('qux')
+
+ self.assertEqual(out.getvalue(), 'baz\n')
+ self.assertEqual(
+ err.getvalue(),
+ 'ERROR:foo\n'
+ 'WARNING:bar\n'
+ )
+
+ def test_continuation(self):
+ out = StringIO()
+ name = '%s.test_continuation' % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ handler = ConfigureOutputHandler(out, out)
+ handler.setFormatter(logging.Formatter('%(levelname)s:%(message)s'))
+ logger.addHandler(handler)
+
+ logger.info('foo')
+ logger.info('checking bar... ')
+ logger.info('yes')
+ logger.info('qux')
+
+ self.assertEqual(
+ out.getvalue(),
+ 'foo\n'
+ 'checking bar... yes\n'
+ 'qux\n'
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ logger.info('foo')
+ logger.info('checking bar... ')
+ logger.warning('hoge')
+ logger.info('no')
+ logger.info('qux')
+
+ self.assertEqual(
+ out.getvalue(),
+ 'foo\n'
+ 'checking bar... \n'
+ 'WARNING:hoge\n'
+ ' ... no\n'
+ 'qux\n'
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ logger.info('foo')
+ logger.info('checking bar... ')
+ logger.warning('hoge')
+ logger.warning('fuga')
+ logger.info('no')
+ logger.info('qux')
+
+ self.assertEqual(
+ out.getvalue(),
+ 'foo\n'
+ 'checking bar... \n'
+ 'WARNING:hoge\n'
+ 'WARNING:fuga\n'
+ ' ... no\n'
+ 'qux\n'
+ )
+
+ out.seek(0)
+ out.truncate()
+ err = StringIO()
+
+ logger.removeHandler(handler)
+ handler = ConfigureOutputHandler(out, err)
+ handler.setFormatter(logging.Formatter('%(levelname)s:%(message)s'))
+ logger.addHandler(handler)
+
+ logger.info('foo')
+ logger.info('checking bar... ')
+ logger.warning('hoge')
+ logger.warning('fuga')
+ logger.info('no')
+ logger.info('qux')
+
+ self.assertEqual(
+ out.getvalue(),
+ 'foo\n'
+ 'checking bar... no\n'
+ 'qux\n'
+ )
+
+ self.assertEqual(
+ err.getvalue(),
+ 'WARNING:hoge\n'
+ 'WARNING:fuga\n'
+ )
+
+ def test_queue_debug(self):
+ out = StringIO()
+ name = '%s.test_queue_debug' % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ handler = ConfigureOutputHandler(out, out, maxlen=3)
+ handler.setFormatter(logging.Formatter('%(levelname)s:%(message)s'))
+ logger.addHandler(handler)
+
+ with handler.queue_debug():
+ logger.info('checking bar... ')
+ logger.debug('do foo')
+ logger.info('yes')
+ logger.info('qux')
+
+ self.assertEqual(
+ out.getvalue(),
+ 'checking bar... yes\n'
+ 'qux\n'
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info('checking bar... ')
+ logger.debug('do foo')
+ logger.info('no')
+ logger.error('fail')
+
+ self.assertEqual(
+ out.getvalue(),
+ 'checking bar... no\n'
+ 'DEBUG:do foo\n'
+ 'ERROR:fail\n'
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info('checking bar... ')
+ logger.debug('do foo')
+ logger.debug('do bar')
+ logger.debug('do baz')
+ logger.info('no')
+ logger.error('fail')
+
+ self.assertEqual(
+ out.getvalue(),
+ 'checking bar... no\n'
+ 'DEBUG:do foo\n'
+ 'DEBUG:do bar\n'
+ 'DEBUG:do baz\n'
+ 'ERROR:fail\n'
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info('checking bar... ')
+ logger.debug('do foo')
+ logger.debug('do bar')
+ logger.debug('do baz')
+ logger.debug('do qux')
+ logger.debug('do hoge')
+ logger.info('no')
+ logger.error('fail')
+
+ self.assertEqual(
+ out.getvalue(),
+ 'checking bar... no\n'
+ 'DEBUG:<truncated - see config.log for full output>\n'
+ 'DEBUG:do baz\n'
+ 'DEBUG:do qux\n'
+ 'DEBUG:do hoge\n'
+ 'ERROR:fail\n'
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ try:
+ with handler.queue_debug():
+ logger.info('checking bar... ')
+ logger.debug('do foo')
+ logger.debug('do bar')
+ logger.info('no')
+ e = Exception('fail')
+ raise e
+ except Exception as caught:
+ self.assertIs(caught, e)
+
+ self.assertEqual(
+ out.getvalue(),
+ 'checking bar... no\n'
+ 'DEBUG:do foo\n'
+ 'DEBUG:do bar\n'
+ )
+
+ def test_queue_debug_reentrant(self):
+ out = StringIO()
+ name = '%s.test_queue_debug_reentrant' % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ handler = ConfigureOutputHandler(out, out, maxlen=10)
+ handler.setFormatter(logging.Formatter('%(levelname)s| %(message)s'))
+ logger.addHandler(handler)
+
+ try:
+ with handler.queue_debug():
+ logger.info('outer info')
+ logger.debug('outer debug')
+ with handler.queue_debug():
+ logger.info('inner info')
+ logger.debug('inner debug')
+ e = Exception('inner exception')
+ raise e
+ except Exception as caught:
+ self.assertIs(caught, e)
+
+ self.assertEqual(out.getvalue(),
+ 'outer info\n'
+ 'inner info\n'
+ 'DEBUG| outer debug\n'
+ 'DEBUG| inner debug\n')
+
+ out.seek(0)
+ out.truncate()
+
+ try:
+ with handler.queue_debug():
+ logger.info('outer info')
+ logger.debug('outer debug')
+ with handler.queue_debug():
+ logger.info('inner info')
+ logger.debug('inner debug')
+ e = Exception('outer exception')
+ raise e
+ except Exception as caught:
+ self.assertIs(caught, e)
+
+ self.assertEqual(out.getvalue(),
+ 'outer info\n'
+ 'inner info\n'
+ 'DEBUG| outer debug\n'
+ 'DEBUG| inner debug\n')
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info('outer info')
+ logger.debug('outer debug')
+ with handler.queue_debug():
+ logger.info('inner info')
+ logger.debug('inner debug')
+ logger.error('inner error')
+ self.assertEqual(out.getvalue(),
+ 'outer info\n'
+ 'inner info\n'
+ 'DEBUG| outer debug\n'
+ 'DEBUG| inner debug\n'
+ 'ERROR| inner error\n')
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info('outer info')
+ logger.debug('outer debug')
+ with handler.queue_debug():
+ logger.info('inner info')
+ logger.debug('inner debug')
+ logger.error('outer error')
+ self.assertEqual(out.getvalue(),
+ 'outer info\n'
+ 'inner info\n'
+ 'DEBUG| outer debug\n'
+ 'DEBUG| inner debug\n'
+ 'ERROR| outer error\n')
+
+ def test_is_same_output(self):
+ fd1 = sys.stderr.fileno()
+ fd2 = os.dup(fd1)
+ try:
+ self.assertTrue(ConfigureOutputHandler._is_same_output(fd1, fd2))
+ finally:
+ os.close(fd2)
+
+ fd2, path = tempfile.mkstemp()
+ try:
+ self.assertFalse(ConfigureOutputHandler._is_same_output(fd1, fd2))
+
+ fd3 = os.dup(fd2)
+ try:
+ self.assertTrue(ConfigureOutputHandler._is_same_output(fd2, fd3))
+ finally:
+ os.close(fd3)
+
+ with open(path, 'a') as fh:
+ fd3 = fh.fileno()
+ self.assertTrue(
+ ConfigureOutputHandler._is_same_output(fd2, fd3))
+
+ finally:
+ os.close(fd2)
+ os.remove(path)
+
+
+class TestLineIO(unittest.TestCase):
+ def test_lineio(self):
+ lines = []
+ l = LineIO(lambda l: lines.append(l))
+
+ l.write('a')
+ self.assertEqual(lines, [])
+
+ l.write('b')
+ self.assertEqual(lines, [])
+
+ l.write('\n')
+ self.assertEqual(lines, ['ab'])
+
+ l.write('cdef')
+ self.assertEqual(lines, ['ab'])
+
+ l.write('\n')
+ self.assertEqual(lines, ['ab', 'cdef'])
+
+ l.write('ghi\njklm')
+ self.assertEqual(lines, ['ab', 'cdef', 'ghi'])
+
+ l.write('nop\nqrst\nuv\n')
+ self.assertEqual(lines, ['ab', 'cdef', 'ghi', 'jklmnop', 'qrst', 'uv'])
+
+ l.write('wx\nyz')
+ self.assertEqual(lines, ['ab', 'cdef', 'ghi', 'jklmnop', 'qrst', 'uv',
+ 'wx'])
+
+ l.close()
+ self.assertEqual(lines, ['ab', 'cdef', 'ghi', 'jklmnop', 'qrst', 'uv',
+ 'wx', 'yz'])
+
+ def test_lineio_contextmanager(self):
+ lines = []
+ with LineIO(lambda l: lines.append(l)) as l:
+ l.write('a\nb\nc')
+
+ self.assertEqual(lines, ['a', 'b'])
+
+ self.assertEqual(lines, ['a', 'b', 'c'])
+
+
+class TestLogSubprocessOutput(unittest.TestCase):
+
+ def test_non_ascii_subprocess_output(self):
+ out = StringIO()
+ sandbox = ConfigureSandbox({}, {}, [], out, out)
+
+ sandbox.include_file(mozpath.join(topsrcdir, 'build',
+ 'moz.configure', 'util.configure'))
+ sandbox.include_file(mozpath.join(topsrcdir, 'python', 'mozbuild',
+ 'mozbuild', 'test', 'configure',
+ 'data', 'subprocess.configure'))
+ status = 0
+ try:
+ sandbox.run()
+ except SystemExit as e:
+ status = e.code
+
+ self.assertEquals(status, 0)
+ quote_char = "'"
+ if getpreferredencoding().lower() == 'utf-8':
+ quote_char = '\u00B4'.encode('utf-8')
+ self.assertEquals(out.getvalue().strip(), quote_char)
+
+
+class TestVersion(unittest.TestCase):
+ def test_version_simple(self):
+ v = Version('1')
+ self.assertEqual(v, '1')
+ self.assertLess(v, '2')
+ self.assertGreater(v, '0.5')
+ self.assertEqual(v.major, 1)
+ self.assertEqual(v.minor, 0)
+ self.assertEqual(v.patch, 0)
+
+ def test_version_more(self):
+ v = Version('1.2.3b')
+ self.assertLess(v, '2')
+ self.assertEqual(v.major, 1)
+ self.assertEqual(v.minor, 2)
+ self.assertEqual(v.patch, 3)
+
+ def test_version_bad(self):
+ # A version with a letter in the middle doesn't really make sense,
+ # so everything after it should be ignored.
+ v = Version('1.2b.3')
+ self.assertLess(v, '2')
+ self.assertEqual(v.major, 1)
+ self.assertEqual(v.minor, 2)
+ self.assertEqual(v.patch, 0)
+
+ def test_version_badder(self):
+ v = Version('1b.2.3')
+ self.assertLess(v, '2')
+ self.assertEqual(v.major, 1)
+ self.assertEqual(v.minor, 0)
+ self.assertEqual(v.patch, 0)
+
+class TestCheckCmdOutput(unittest.TestCase):
+
+ def get_result(self, command='', paths=None):
+ paths = paths or {}
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureTestSandbox(paths, config, {}, ['/bin/configure'],
+ out, out)
+ sandbox.include_file(mozpath.join(topsrcdir, 'build',
+ 'moz.configure', 'util.configure'))
+ status = 0
+ try:
+ exec_(command, sandbox)
+ sandbox.run()
+ except SystemExit as e:
+ status = e.code
+ return config, out.getvalue(), status
+
+ def test_simple_program(self):
+ def mock_simple_prog(_, args):
+ if len(args) == 1 and args[0] == '--help':
+ return 0, 'simple program help...', ''
+ self.fail("Unexpected arguments to mock_simple_program: %s" %
+ args)
+ prog_path = mozpath.abspath('/simple/prog')
+ cmd = "log.info(check_cmd_output('%s', '--help'))" % prog_path
+ config, out, status = self.get_result(cmd,
+ paths={prog_path: mock_simple_prog})
+ self.assertEqual(config, {})
+ self.assertEqual(status, 0)
+ self.assertEqual(out, 'simple program help...\n')
+
+ def test_failing_program(self):
+ def mock_error_prog(_, args):
+ if len(args) == 1 and args[0] == '--error':
+ return (127, 'simple program output',
+ 'simple program error output')
+ self.fail("Unexpected arguments to mock_error_program: %s" %
+ args)
+ prog_path = mozpath.abspath('/simple/prog')
+ cmd = "log.info(check_cmd_output('%s', '--error'))" % prog_path
+ config, out, status = self.get_result(cmd,
+ paths={prog_path: mock_error_prog})
+ self.assertEqual(config, {})
+ self.assertEqual(status, 1)
+ self.assertEqual(out, textwrap.dedent('''\
+ DEBUG: Executing: `%s --error`
+ DEBUG: The command returned non-zero exit status 127.
+ DEBUG: Its output was:
+ DEBUG: | simple program output
+ DEBUG: Its error output was:
+ DEBUG: | simple program error output
+ ERROR: Command `%s --error` failed with exit status 127.
+ ''' % (prog_path, prog_path)))
+
+ def test_error_callback(self):
+ def mock_error_prog(_, args):
+ if len(args) == 1 and args[0] == '--error':
+ return 127, 'simple program error...', ''
+ self.fail("Unexpected arguments to mock_error_program: %s" %
+ args)
+
+ prog_path = mozpath.abspath('/simple/prog')
+ cmd = textwrap.dedent('''\
+ check_cmd_output('%s', '--error',
+ onerror=lambda: die('`prog` produced an error'))
+ ''' % prog_path)
+ config, out, status = self.get_result(cmd,
+ paths={prog_path: mock_error_prog})
+ self.assertEqual(config, {})
+ self.assertEqual(status, 1)
+ self.assertEqual(out, textwrap.dedent('''\
+ DEBUG: Executing: `%s --error`
+ DEBUG: The command returned non-zero exit status 127.
+ DEBUG: Its output was:
+ DEBUG: | simple program error...
+ ERROR: `prog` produced an error
+ ''' % prog_path))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/controller/__init__.py b/python/mozbuild/mozbuild/test/controller/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/controller/__init__.py
diff --git a/python/mozbuild/mozbuild/test/controller/test_ccachestats.py b/python/mozbuild/mozbuild/test/controller/test_ccachestats.py
new file mode 100644
index 000000000..7a6608ec8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/controller/test_ccachestats.py
@@ -0,0 +1,208 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import unittest
+
+from mozunit import main
+
+from mozbuild.controller.building import CCacheStats
+
+
+class TestCcacheStats(unittest.TestCase):
+ STAT_GARBAGE = """A garbage line which should be failed to parse"""
+
+ STAT0 = """
+ cache directory /home/tlin/.ccache
+ cache hit (direct) 0
+ cache hit (preprocessed) 0
+ cache miss 0
+ files in cache 0
+ cache size 0 Kbytes
+ max cache size 16.0 Gbytes"""
+
+ STAT1 = """
+ cache directory /home/tlin/.ccache
+ cache hit (direct) 100
+ cache hit (preprocessed) 200
+ cache miss 2500
+ called for link 180
+ called for preprocessing 6
+ compile failed 11
+ preprocessor error 3
+ bad compiler arguments 6
+ unsupported source language 9
+ autoconf compile/link 60
+ unsupported compiler option 2
+ no input file 21
+ files in cache 7344
+ cache size 1.9 Gbytes
+ max cache size 16.0 Gbytes"""
+
+ STAT2 = """
+ cache directory /home/tlin/.ccache
+ cache hit (direct) 1900
+ cache hit (preprocessed) 300
+ cache miss 2600
+ called for link 361
+ called for preprocessing 12
+ compile failed 22
+ preprocessor error 6
+ bad compiler arguments 12
+ unsupported source language 18
+ autoconf compile/link 120
+ unsupported compiler option 4
+ no input file 48
+ files in cache 7392
+ cache size 2.0 Gbytes
+ max cache size 16.0 Gbytes"""
+
+ STAT3 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.2/etc/ccache.conf
+ cache hit (direct) 12004
+ cache hit (preprocessed) 1786
+ cache miss 26348
+ called for link 2338
+ called for preprocessing 6313
+ compile failed 399
+ preprocessor error 390
+ bad compiler arguments 86
+ unsupported source language 66
+ autoconf compile/link 2439
+ unsupported compiler option 187
+ no input file 1068
+ files in cache 18044
+ cache size 7.5 GB
+ max cache size 8.6 GB
+ """
+
+ STAT4 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.2.1/etc/ccache.conf
+ cache hit (direct) 21039
+ cache hit (preprocessed) 2315
+ cache miss 39370
+ called for link 3651
+ called for preprocessing 6693
+ compile failed 723
+ ccache internal error 1
+ preprocessor error 588
+ bad compiler arguments 128
+ unsupported source language 99
+ autoconf compile/link 3669
+ unsupported compiler option 187
+ no input file 1711
+ files in cache 18313
+ cache size 6.3 GB
+ max cache size 6.0 GB
+ """
+
+ STAT5 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.2.1/etc/ccache.conf
+ cache hit (direct) 21039
+ cache hit (preprocessed) 2315
+ cache miss 39372
+ called for link 3653
+ called for preprocessing 6693
+ compile failed 723
+ ccache internal error 1
+ preprocessor error 588
+ bad compiler arguments 128
+ unsupported source language 99
+ autoconf compile/link 3669
+ unsupported compiler option 187
+ no input file 1711
+ files in cache 17411
+ cache size 6.0 GB
+ max cache size 6.0 GB
+ """
+
+ STAT6 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.3.2/etc/ccache.conf
+ cache hit (direct) 319287
+ cache hit (preprocessed) 125987
+ cache miss 749959
+ cache hit rate 37.25 %
+ called for link 87978
+ called for preprocessing 418591
+ multiple source files 1861
+ compiler produced no output 122
+ compiler produced empty output 174
+ compile failed 14330
+ ccache internal error 1
+ preprocessor error 9459
+ can't use precompiled header 4
+ bad compiler arguments 2077
+ unsupported source language 18195
+ autoconf compile/link 51485
+ unsupported compiler option 322
+ no input file 309538
+ cleanups performed 1
+ files in cache 17358
+ cache size 15.4 GB
+ max cache size 17.2 GB
+ """
+
+ def test_parse_garbage_stats_message(self):
+ self.assertRaises(ValueError, CCacheStats, self.STAT_GARBAGE)
+
+ def test_parse_zero_stats_message(self):
+ stats = CCacheStats(self.STAT0)
+ self.assertEqual(stats.cache_dir, "/home/tlin/.ccache")
+ self.assertEqual(stats.hit_rates(), (0, 0, 0))
+
+ def test_hit_rate_of_diff_stats(self):
+ stats1 = CCacheStats(self.STAT1)
+ stats2 = CCacheStats(self.STAT2)
+ stats_diff = stats2 - stats1
+ self.assertEqual(stats_diff.hit_rates(), (0.9, 0.05, 0.05))
+
+ def test_stats_contains_data(self):
+ stats0 = CCacheStats(self.STAT0)
+ stats1 = CCacheStats(self.STAT1)
+ stats2 = CCacheStats(self.STAT2)
+ stats_diff_zero = stats1 - stats1
+ stats_diff_negative1 = stats0 - stats1
+ stats_diff_negative2 = stats1 - stats2
+
+ self.assertFalse(stats0)
+ self.assertTrue(stats1)
+ self.assertTrue(stats2)
+ self.assertFalse(stats_diff_zero)
+ self.assertFalse(stats_diff_negative1)
+ self.assertFalse(stats_diff_negative2)
+
+ def test_stats_version32(self):
+ stat2 = CCacheStats(self.STAT2)
+ stat3 = CCacheStats(self.STAT3)
+ stats_diff = stat3 - stat2
+ self.assertTrue(stat3)
+ self.assertTrue(stats_diff)
+
+ def test_cache_size_shrinking(self):
+ stat4 = CCacheStats(self.STAT4)
+ stat5 = CCacheStats(self.STAT5)
+ stats_diff = stat5 - stat4
+ self.assertTrue(stat4)
+ self.assertTrue(stat5)
+ self.assertTrue(stats_diff)
+
+ def test_stats_version33(self):
+ stat3 = CCacheStats(self.STAT3)
+ stat6 = CCacheStats(self.STAT6)
+ stats_diff = stat6 - stat3
+ self.assertTrue(stat6)
+ self.assertTrue(stat3)
+ self.assertTrue(stats_diff)
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/controller/test_clobber.py b/python/mozbuild/mozbuild/test/controller/test_clobber.py
new file mode 100644
index 000000000..997f467ec
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/controller/test_clobber.py
@@ -0,0 +1,213 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+import shutil
+import tempfile
+import unittest
+
+from StringIO import StringIO
+
+from mozunit import main
+
+from mozbuild.controller.clobber import Clobberer
+from mozbuild.controller.clobber import main as clobber
+
+
+class TestClobberer(unittest.TestCase):
+ def setUp(self):
+ self._temp_dirs = []
+
+ return unittest.TestCase.setUp(self)
+
+ def tearDown(self):
+ for d in self._temp_dirs:
+ shutil.rmtree(d, ignore_errors=True)
+
+ return unittest.TestCase.tearDown(self)
+
+ def get_tempdir(self):
+ t = tempfile.mkdtemp()
+ self._temp_dirs.append(t)
+ return t
+
+ def get_topsrcdir(self):
+ t = self.get_tempdir()
+ p = os.path.join(t, 'CLOBBER')
+ with open(p, 'a'):
+ pass
+
+ return t
+
+ def test_no_objdir(self):
+ """If topobjdir does not exist, no clobber is needed."""
+
+ tmp = os.path.join(self.get_tempdir(), 'topobjdir')
+ self.assertFalse(os.path.exists(tmp))
+
+ c = Clobberer(self.get_topsrcdir(), tmp)
+ self.assertFalse(c.clobber_needed())
+
+ # Side-effect is topobjdir is created with CLOBBER file touched.
+ required, performed, reason = c.maybe_do_clobber(os.getcwd(), True)
+ self.assertFalse(required)
+ self.assertFalse(performed)
+ self.assertIsNone(reason)
+
+ self.assertTrue(os.path.isdir(tmp))
+ self.assertTrue(os.path.exists(os.path.join(tmp, 'CLOBBER')))
+
+ def test_objdir_no_clobber_file(self):
+ """If CLOBBER does not exist in topobjdir, treat as empty."""
+
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+ self.assertFalse(c.clobber_needed())
+
+ required, performed, reason = c.maybe_do_clobber(os.getcwd(), True)
+ self.assertFalse(required)
+ self.assertFalse(performed)
+ self.assertIsNone(reason)
+
+ self.assertTrue(os.path.exists(os.path.join(c.topobjdir, 'CLOBBER')))
+
+ def test_objdir_clobber_newer(self):
+ """If CLOBBER in topobjdir is newer, do nothing."""
+
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+ with open(c.obj_clobber, 'a'):
+ pass
+
+ required, performed, reason = c.maybe_do_clobber(os.getcwd(), True)
+ self.assertFalse(required)
+ self.assertFalse(performed)
+ self.assertIsNone(reason)
+
+ def test_objdir_clobber_older(self):
+ """If CLOBBER in topobjdir is older, we clobber."""
+
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+ with open(c.obj_clobber, 'a'):
+ pass
+
+ dummy_path = os.path.join(c.topobjdir, 'foo')
+ with open(dummy_path, 'a'):
+ pass
+
+ self.assertTrue(os.path.exists(dummy_path))
+
+ old_time = os.path.getmtime(c.src_clobber) - 60
+ os.utime(c.obj_clobber, (old_time, old_time))
+
+ self.assertTrue(c.clobber_needed())
+
+ required, performed, reason = c.maybe_do_clobber(os.getcwd(), True)
+ self.assertTrue(required)
+ self.assertTrue(performed)
+
+ self.assertFalse(os.path.exists(dummy_path))
+ self.assertTrue(os.path.exists(c.obj_clobber))
+ self.assertGreaterEqual(os.path.getmtime(c.obj_clobber),
+ os.path.getmtime(c.src_clobber))
+
+ def test_objdir_is_srcdir(self):
+ """If topobjdir is the topsrcdir, refuse to clobber."""
+
+ tmp = self.get_topsrcdir()
+ c = Clobberer(tmp, tmp)
+
+ self.assertFalse(c.clobber_needed())
+
+ def test_cwd_is_topobjdir(self):
+ """If cwd is topobjdir, we can still clobber."""
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+
+ with open(c.obj_clobber, 'a'):
+ pass
+
+ dummy_file = os.path.join(c.topobjdir, 'dummy_file')
+ with open(dummy_file, 'a'):
+ pass
+
+ dummy_dir = os.path.join(c.topobjdir, 'dummy_dir')
+ os.mkdir(dummy_dir)
+
+ self.assertTrue(os.path.exists(dummy_file))
+ self.assertTrue(os.path.isdir(dummy_dir))
+
+ old_time = os.path.getmtime(c.src_clobber) - 60
+ os.utime(c.obj_clobber, (old_time, old_time))
+
+ self.assertTrue(c.clobber_needed())
+
+ required, performed, reason = c.maybe_do_clobber(c.topobjdir, True)
+ self.assertTrue(required)
+ self.assertTrue(performed)
+
+ self.assertFalse(os.path.exists(dummy_file))
+ self.assertFalse(os.path.exists(dummy_dir))
+
+ def test_cwd_under_topobjdir(self):
+ """If cwd is under topobjdir, we can't clobber."""
+
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+
+ with open(c.obj_clobber, 'a'):
+ pass
+
+ old_time = os.path.getmtime(c.src_clobber) - 60
+ os.utime(c.obj_clobber, (old_time, old_time))
+
+ d = os.path.join(c.topobjdir, 'dummy_dir')
+ os.mkdir(d)
+
+ required, performed, reason = c.maybe_do_clobber(d, True)
+ self.assertTrue(required)
+ self.assertFalse(performed)
+ self.assertIn('Cannot clobber while the shell is inside', reason)
+
+
+ def test_mozconfig_opt_in(self):
+ """Auto clobber iff AUTOCLOBBER is in the environment."""
+
+ topsrcdir = self.get_topsrcdir()
+ topobjdir = self.get_tempdir()
+
+ obj_clobber = os.path.join(topobjdir, 'CLOBBER')
+ with open(obj_clobber, 'a'):
+ pass
+
+ dummy_file = os.path.join(topobjdir, 'dummy_file')
+ with open(dummy_file, 'a'):
+ pass
+
+ self.assertTrue(os.path.exists(dummy_file))
+
+ old_time = os.path.getmtime(os.path.join(topsrcdir, 'CLOBBER')) - 60
+ os.utime(obj_clobber, (old_time, old_time))
+
+ # Check auto clobber is off by default
+ env = dict(os.environ)
+ if env.get('AUTOCLOBBER', False):
+ del env['AUTOCLOBBER']
+
+ s = StringIO()
+ status = clobber([topsrcdir, topobjdir], env, os.getcwd(), s)
+ self.assertEqual(status, 1)
+ self.assertIn('Automatic clobbering is not enabled', s.getvalue())
+ self.assertTrue(os.path.exists(dummy_file))
+
+ # Check auto clobber opt-in works
+ env['AUTOCLOBBER'] = '1'
+
+ s = StringIO()
+ status = clobber([topsrcdir, topobjdir], env, os.getcwd(), s)
+ self.assertEqual(status, 0)
+ self.assertIn('Successfully completed auto clobber', s.getvalue())
+ self.assertFalse(os.path.exists(dummy_file))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/data/Makefile b/python/mozbuild/mozbuild/test/data/Makefile
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/bad.properties b/python/mozbuild/mozbuild/test/data/bad.properties
new file mode 100644
index 000000000..d4d8109b6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/bad.properties
@@ -0,0 +1,12 @@
+# A region.properties file with invalid unicode byte sequences. The
+# sequences were cribbed from Markus Kuhn's "UTF-8 decoder capability
+# and stress test", available at
+# http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
+
+# 3.5 Impossible bytes |
+# |
+# The following two bytes cannot appear in a correct UTF-8 string |
+# |
+# 3.5.1 fe = "þ" |
+# 3.5.2 ff = "ÿ" |
+# 3.5.3 fe fe ff ff = "þþÿÿ" |
diff --git a/python/mozbuild/mozbuild/test/data/test-dir/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/Makefile
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/test-dir/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/test-dir/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/with/Makefile
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/test-dir/with/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/valid.properties b/python/mozbuild/mozbuild/test/data/valid.properties
new file mode 100644
index 000000000..db64bf2ee
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/valid.properties
@@ -0,0 +1,11 @@
+# A region.properties file with unicode characters.
+
+# Danish.
+# #### ~~ Søren Munk Skrøder, sskroeder - 2009-05-30 @ #mozmae
+
+# Korean.
+A.title=한메ì¼
+
+# Russian.
+list.0 = test
+list.1 = ЯндекÑ
diff --git a/python/mozbuild/mozbuild/test/frontend/__init__.py b/python/mozbuild/mozbuild/test/frontend/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/__init__.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/dir1/foo b/python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/dir1/foo
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/dir1/foo
diff --git a/python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/moz.build
new file mode 100644
index 000000000..242a3628d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+ANDROID_RES_DIRS += [
+ '/dir1',
+ '!/dir2',
+ '%/dir3',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/binary-components/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/binary-components/bar/moz.build
new file mode 100644
index 000000000..2946e42aa
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/binary-components/bar/moz.build
@@ -0,0 +1,2 @@
+Component('bar')
+NO_COMPONENTS_MANIFEST = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/binary-components/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/binary-components/foo/moz.build
new file mode 100644
index 000000000..8611a74be
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/binary-components/foo/moz.build
@@ -0,0 +1 @@
+Component('foo')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/binary-components/moz.build b/python/mozbuild/mozbuild/test/frontend/data/binary-components/moz.build
new file mode 100644
index 000000000..1776d0514
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/binary-components/moz.build
@@ -0,0 +1,10 @@
+@template
+def Component(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+ IS_COMPONENT = True
+
+DIRS += [
+ 'foo',
+ 'bar',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico b/python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png b/python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm b/python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build
new file mode 100644
index 000000000..251bc53ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build
@@ -0,0 +1,13 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+BRANDING_FILES += [
+ 'bar.ico',
+ 'baz.png',
+ 'foo.xpm',
+]
+
+BRANDING_FILES.icons += [
+ 'quux.icns',
+]
+
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns b/python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns
diff --git a/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build b/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build
new file mode 100644
index 000000000..f53dd9454
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+CONFIGURE_SUBST_FILES += ['foo']
+CONFIGURE_SUBST_FILES += ['bar']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml
new file mode 100644
index 000000000..99d10b1a6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[dependencies]
+deep-crate = { version = "0.1.0", path = "the/depths" }
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build
new file mode 100644
index 000000000..01b3a35a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary('random-crate')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml
new file mode 100644
index 000000000..c347f8c08
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "shallow-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml
new file mode 100644
index 000000000..10a4ded0a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "deep-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[dependencies]
+shallow-crate = { path = "../../shallow" }
diff --git a/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build
new file mode 100644
index 000000000..ccb0d5e36
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+value = 'xyz'
+DEFINES = {
+ 'FOO': True,
+}
+
+DEFINES['BAZ'] = '"abcd"'
+DEFINES.update({
+ 'BAR': 7,
+ 'VALUE': value,
+ 'QUX': False,
+})
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build
new file mode 100644
index 000000000..cbd2c942b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET_PP_FILES += [
+ 'install.rdf',
+ 'main.js',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf b/python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js b/python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build
new file mode 100644
index 000000000..cbd2c942b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET_PP_FILES += [
+ 'install.rdf',
+ 'main.js',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build
new file mode 100644
index 000000000..259d96fcd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build
@@ -0,0 +1,8 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ['foo.h']
+EXPORTS.mozilla += ['mozilla1.h']
+EXPORTS.mozilla += ['!mozilla2.h']
+
+GENERATED_FILES += ['mozilla2.h']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build
new file mode 100644
index 000000000..e0dfce264
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ['foo.h']
+EXPORTS += ['!bar.h']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build
new file mode 100644
index 000000000..e1f93aab5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build
@@ -0,0 +1,6 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ['foo.h']
+EXPORTS.mozilla += ['mozilla1.h']
+EXPORTS.mozilla += ['mozilla2.h']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/bar.h b/python/mozbuild/mozbuild/test/frontend/data/exports/bar.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/bar.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/baz.h b/python/mozbuild/mozbuild/test/frontend/data/exports/baz.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/baz.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports/foo.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h b/python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mem.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mem.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/mem.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build
new file mode 100644
index 000000000..666fbeb81
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build
@@ -0,0 +1,13 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ['foo.h']
+EXPORTS += ['bar.h', 'baz.h']
+EXPORTS.mozilla += ['mozilla1.h']
+EXPORTS.mozilla += ['mozilla2.h']
+EXPORTS.mozilla.dom += ['dom1.h']
+EXPORTS.mozilla.dom += ['dom2.h', 'dom3.h']
+EXPORTS.mozilla.gfx += ['gfx.h']
+EXPORTS.vpx = ['mem.h']
+EXPORTS.vpx += ['mem2.h']
+EXPORTS.nspr.private = ['pprio.h', 'pprthred.h']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h b/python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h b/python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build
new file mode 100644
index 000000000..d6a9799b8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build
@@ -0,0 +1,2 @@
+with Files('*'):
+ BUG_COMPONENT = 'bad value'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build
new file mode 100644
index 000000000..990453f7c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build
@@ -0,0 +1,4 @@
+with Files('*.jsm'):
+ BUG_COMPONENT = ('Firefox', 'JS')
+with Files('*.cpp'):
+ BUG_COMPONENT = ('Firefox', 'C++')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build
new file mode 100644
index 000000000..cee286445
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build
@@ -0,0 +1,3 @@
+with Files('**/Makefile.in'):
+ BUG_COMPONENT = ('Core', 'Build Config')
+ FINAL = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build
new file mode 100644
index 000000000..206bf661b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build
@@ -0,0 +1,2 @@
+with Files('**'):
+ BUG_COMPONENT = ('Another', 'Component')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build
new file mode 100644
index 000000000..4ecb1112c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build
@@ -0,0 +1,2 @@
+with Files('**'):
+ BUG_COMPONENT = ('default_product', 'default_component')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build
new file mode 100644
index 000000000..7994d4a38
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build
@@ -0,0 +1,2 @@
+with Files('*'):
+ BUG_COMPONENT = ('Core', 'Build Config')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build
new file mode 100644
index 000000000..0a88e09e7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build
@@ -0,0 +1,5 @@
+with Files('foo'):
+ BUG_COMPONENT = ('FooProduct', 'FooComponent')
+
+with Files('bar'):
+ BUG_COMPONENT = ('BarProduct', 'BarComponent')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/module.js b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/module.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/module.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/moz.build
new file mode 100644
index 000000000..8915edc12
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/moz.build
@@ -0,0 +1,6 @@
+XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini']
+REFTEST_MANIFESTS += ['tests/reftests/reftest.list']
+
+EXTRA_JS_MODULES += [
+ 'module.js',
+] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest-stylo.list b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest-stylo.list
new file mode 100644
index 000000000..252a5b986
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest-stylo.list
@@ -0,0 +1,2 @@
+# DO NOT EDIT! This is a auto-generated temporary list for Stylo testing
+== test1.html test1.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest.list b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest.list
new file mode 100644
index 000000000..504d45973
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest.list
@@ -0,0 +1 @@
+== test1.html test1-ref.html \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1-ref.html b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1-ref.html
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1-ref.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1.html b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1.html
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/test_default_mod.js b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/test_default_mod.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/test_default_mod.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/xpcshell.ini b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/xpcshell.ini
new file mode 100644
index 000000000..55c18a250
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/xpcshell.ini
@@ -0,0 +1 @@
+[test_default_mod.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/moz.build
new file mode 100644
index 000000000..faff2a173
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/moz.build
@@ -0,0 +1,4 @@
+DIRS += [
+ 'default',
+ 'simple',
+] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/base.cpp b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/base.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/base.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/browser.ini
new file mode 100644
index 000000000..f284de043
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/browser.ini
@@ -0,0 +1 @@
+[test_mod.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/test_mod.js b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/test_mod.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/test_mod.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/moz.build
new file mode 100644
index 000000000..cbce16e1d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/moz.build
@@ -0,0 +1,22 @@
+with Files('src/*'):
+ IMPACTED_TESTS.files += [
+ 'tests/test_general.html',
+ ]
+
+with Files('src/module.jsm'):
+ IMPACTED_TESTS.files += [
+ 'browser/**.js',
+ ]
+
+with Files('base.cpp'):
+ IMPACTED_TESTS.files += [
+ '/default/tests/xpcshell/test_default_mod.js',
+ 'tests/*',
+ ]
+
+
+MOCHITEST_MANIFESTS += ['tests/mochitest.ini']
+BROWSER_CHROME_MANIFESTS += ['browser/browser.ini']
+
+UNIFIED_SOURCES += ['base.cpp']
+DIRS += ['src']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/module.jsm b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/module.jsm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/module.jsm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/moz.build
new file mode 100644
index 000000000..e0c49f129
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/moz.build
@@ -0,0 +1,3 @@
+EXTRA_JS_MODULES += [
+ 'module.jsm',
+] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/mochitest.ini
new file mode 100644
index 000000000..662566abd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/mochitest.ini
@@ -0,0 +1,2 @@
+[test_general.html]
+[test_specific.html] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/moz.build
new file mode 100644
index 000000000..8ef3a9fd8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/moz.build
@@ -0,0 +1 @@
+MOCHITEST_MANIFESTS += ['mochitest.ini'] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_general.html b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_general.html
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_general.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_specific.html b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_specific.html
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_specific.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/moz.build
new file mode 100644
index 000000000..0b7ca5a2b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/moz.build
@@ -0,0 +1,15 @@
+with Files('src/submodule/**'):
+ IMPACTED_TESTS.tags += [
+ 'submodule',
+ ]
+
+with Files('src/bar.jsm'):
+ IMPACTED_TESTS.flavors += [
+ 'browser-chrome',
+ ]
+ IMPACTED_TESTS.files += [
+ '**.js',
+ ]
+
+MOCHITEST_MANIFESTS += ['tests/mochitest.ini']
+XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/bar.jsm b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/bar.jsm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/bar.jsm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/submodule/foo.js b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/submodule/foo.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/submodule/foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/mochitest.ini
new file mode 100644
index 000000000..d40ca4d06
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/mochitest.ini
@@ -0,0 +1,3 @@
+[test_simple.html]
+[test_specific.html]
+tags = submodule \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_bar.js b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_bar.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_bar.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_simple.html b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_simple.html
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_simple.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_specific.html b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_specific.html
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_specific.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/xpcshell.ini b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/xpcshell.ini
new file mode 100644
index 000000000..1275764c4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/xpcshell.ini
@@ -0,0 +1 @@
+[test_bar.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build
new file mode 100644
index 000000000..73132b0cf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET_PP_FILES += [
+ '!foo.js',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build
new file mode 100644
index 000000000..0b694ed84
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ['bar.c']
+
+bar = GENERATED_FILES['bar.c']
+bar.script = '/script.py:make_bar'
+bar.inputs = []
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build
new file mode 100644
index 000000000..e080b47f9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build
@@ -0,0 +1,13 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += [ 'bar.c', 'foo.c' ]
+
+bar = GENERATED_FILES['bar.c']
+bar.script = 'script.py:make_bar'
+bar.inputs = []
+
+foo = GENERATED_FILES['foo.c']
+foo.script = 'script.py'
+foo.inputs = []
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build
new file mode 100644
index 000000000..da96c5fbc
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ['bar.c', 'foo.c']
+
+foo = GENERATED_FILES['foo.c']
+foo.script = 'script.py'
+foo.inputs = ['datafile']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build
new file mode 100644
index 000000000..080cb2a4e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ['bar.c', 'foo.c']
+
+bar = GENERATED_FILES['bar.c']
+bar.script = 'script.rb'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build
new file mode 100644
index 000000000..90fa17666
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += [ 'bar.c', 'foo.c' ]
+
+bar = GENERATED_FILES['bar.c']
+bar.script = 'nonexistent-script.py'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build
new file mode 100644
index 000000000..1c24113f3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += [ 'bar.c', 'foo.c', ('xpidllex.py', 'xpidlyacc.py'), ]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build
new file mode 100644
index 000000000..12d90b15c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build
@@ -0,0 +1,37 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+Library('dummy')
+
+SOURCES += [
+ '!a.cpp',
+ '!b.cc',
+ '!c.cxx',
+]
+
+SOURCES += [
+ '!d.c',
+]
+
+SOURCES += [
+ '!e.m',
+]
+
+SOURCES += [
+ '!f.mm',
+]
+
+SOURCES += [
+ '!g.S',
+]
+
+SOURCES += [
+ '!h.s',
+ '!i.asm',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build
new file mode 100644
index 000000000..14deaf8cf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ['!/bar/baz', '!foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-defines/moz.build
new file mode 100644
index 000000000..37628fede
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-defines/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+value = 'xyz'
+HOST_DEFINES = {
+ 'FOO': True,
+}
+
+HOST_DEFINES['BAZ'] = '"abcd"'
+HOST_DEFINES.update({
+ 'BAR': 7,
+ 'VALUE': value,
+ 'QUX': False,
+})
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm b/python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build
new file mode 100644
index 000000000..5a6f0acb6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build
@@ -0,0 +1,25 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def HostLibrary(name):
+ '''Template for libraries.'''
+ HOST_LIBRARY_NAME = name
+
+HostLibrary('dummy')
+
+HOST_SOURCES += [
+ 'a.cpp',
+ 'b.cc',
+ 'c.cxx',
+]
+
+HOST_SOURCES += [
+ 'd.c',
+]
+
+HOST_SOURCES += [
+ 'e.mm',
+ 'f.mm',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build b/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build
new file mode 100644
index 000000000..bb492a242
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ['bar']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build
new file mode 100644
index 000000000..8e6a0f338
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['foo']
+
+include('included.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build
new file mode 100644
index 000000000..a6a0fd8ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('included-2.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build
new file mode 100644
index 000000000..9bfc65481
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+ILLEGAL = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build
new file mode 100644
index 000000000..7ba111d1f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('included-1.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build
new file mode 100644
index 000000000..d72d47c46
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('missing.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build b/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build
new file mode 100644
index 000000000..f8084f0dd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('../moz.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build
new file mode 100644
index 000000000..446207081
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('../parent.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build
new file mode 100644
index 000000000..618a75ed0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('grandchild/grandchild.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build
new file mode 100644
index 000000000..4d721fde4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('../../parent.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build
new file mode 100644
index 000000000..a2ed3fa49
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build
new file mode 100644
index 000000000..f9194c00e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('/sibling.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build
new file mode 100644
index 000000000..a2ed3fa49
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build
new file mode 100644
index 000000000..568f361a5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build
new file mode 100644
index 000000000..a1b892e2d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+XPIDL_MODULE = 'baz'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build
new file mode 100644
index 000000000..a06f6d12d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+DIRS += ['baz']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build
new file mode 100644
index 000000000..2801f105d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+XPIDL_MODULE = 'foobar'
+export("XPIDL_MODULE")
+
+DIRS += ['foo', 'bar']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build
new file mode 100644
index 000000000..f189212fd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+IPDL_SOURCES += [
+ 'bar.ipdl',
+ 'bar2.ipdlh',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build
new file mode 100644
index 000000000..4e1554559
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+IPDL_SOURCES += [
+ 'foo.ipdl',
+ 'foo2.ipdlh',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build
new file mode 100644
index 000000000..03cf5e236
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+DIRS += [
+ 'bar',
+ 'foo',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build
new file mode 100644
index 000000000..43789914e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+JAR_MANIFESTS += ['jar.mn', 'other.jar']
+
diff --git a/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build
new file mode 100644
index 000000000..aac3a838c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+JAR_MANIFESTS += ['jar.mn']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build
new file mode 100644
index 000000000..5d5e78eed
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Library('liba')
+LIBRARY_DEFINES['IN_LIBA'] = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build
new file mode 100644
index 000000000..add45f6c1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Library('libb')
+FINAL_LIBRARY = 'liba'
+LIBRARY_DEFINES['IN_LIBB'] = True
+USE_LIBS += ['libd']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build
new file mode 100644
index 000000000..cf25e2c44
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Library('libc')
+FINAL_LIBRARY = 'libb'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build
new file mode 100644
index 000000000..dd057c3d7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Library('libd')
+FORCE_STATIC_LIB = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build
new file mode 100644
index 000000000..5f05fcef7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+DIRS = ['liba', 'libb', 'libc', 'libd']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build
new file mode 100644
index 000000000..565c2bee6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ['/bar/baz', 'foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build
new file mode 100644
index 000000000..565c2bee6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ['/bar/baz', 'foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build
new file mode 100644
index 000000000..b493ec5b5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build
@@ -0,0 +1,27 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+Library('test')
+
+DIRS += [
+ 'rust1',
+ 'rust2',
+]
+
+USE_LIBS += [
+ 'rust1',
+ 'rust2',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml
new file mode 100644
index 000000000..9037d8f65
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "rust1"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build
new file mode 100644
index 000000000..7418cca65
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+RustLibrary('rust1')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml
new file mode 100644
index 000000000..f2001895e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "rust2"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build
new file mode 100644
index 000000000..abd34e7db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+RustLibrary('rust2')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program/moz.build
new file mode 100644
index 000000000..4c19b90cd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program/moz.build
@@ -0,0 +1,15 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Program(name):
+ PROGRAM = name
+
+
+@template
+def SimplePrograms(names):
+ SIMPLE_PROGRAMS += names
+
+Program('test_program')
+
+SimplePrograms([ 'test_program1', 'test_program2' ])
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build
new file mode 100644
index 000000000..5fac39736
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build
new file mode 100644
index 000000000..0a91c4692
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+ILLEGAL = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build
new file mode 100644
index 000000000..4dfba1c60
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = []
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build
new file mode 100644
index 000000000..84b2cdea4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+error('Some error.')
+
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build
new file mode 100644
index 000000000..9bfc65481
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+ILLEGAL = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build
new file mode 100644
index 000000000..4a29cae11
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('child.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build
new file mode 100644
index 000000000..d72d47c46
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('missing.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build
new file mode 100644
index 000000000..149972edf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include('../include-basic/moz.build')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build
new file mode 100644
index 000000000..6fc10f766
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+l = FOO
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build
new file mode 100644
index 000000000..847f95167
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['foo']
+
+DIRS += ['foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build
new file mode 100644
index 000000000..a91d38b41
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+foo = True + None
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build
new file mode 100644
index 000000000..70a0d2c06
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+foo =
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build
new file mode 100644
index 000000000..e3d0e656a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = 'dir'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build
new file mode 100644
index 000000000..34579849d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['dir1', 'dir2']
+
+FOO = 'bar'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1 b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2 b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml
new file mode 100644
index 000000000..fa122b7ce
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build
new file mode 100644
index 000000000..01b3a35a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary('random-crate')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml
new file mode 100644
index 000000000..26c653fde
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[lib]
+crate-type = ["dylib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build
new file mode 100644
index 000000000..01b3a35a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary('random-crate')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml
new file mode 100644
index 000000000..41a9a7c8f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "deterministic-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build
new file mode 100644
index 000000000..01b3a35a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary('random-crate')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build
new file mode 100644
index 000000000..01b3a35a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary('random-crate')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml
new file mode 100644
index 000000000..a20b19c62
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build
new file mode 100644
index 000000000..01b3a35a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary('random-crate')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/Cargo.toml
new file mode 100644
index 000000000..2700849db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/moz.build
new file mode 100644
index 000000000..01b3a35a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary('random-crate')
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/Cargo.toml
new file mode 100644
index 000000000..ccdd06243
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "Nobody <nobody@mozilla.org>",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "unwind"
+
+[profile.release]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/moz.build
new file mode 100644
index 000000000..d3896decc
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ '''Template for Rust libraries.'''
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary('random-crate') \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sdk-files/bar.ico b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/bar.ico
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/bar.ico
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sdk-files/baz.png b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/baz.png
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/baz.png
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sdk-files/foo.xpm b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/foo.xpm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/foo.xpm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sdk-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/moz.build
new file mode 100644
index 000000000..a2f8ddf9b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SDK_FILES += [
+ 'bar.ico',
+ 'baz.png',
+ 'foo.xpm',
+]
+
+SDK_FILES.icons += [
+ 'quux.icns',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sdk-files/quux.icns b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/quux.icns
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sdk-files/quux.icns
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build
new file mode 100644
index 000000000..8937fc245
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build
@@ -0,0 +1,27 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+Library('dummy')
+
+SOURCES += [
+ 'd.c',
+]
+
+SOURCES += [
+ 'e.m',
+]
+
+SOURCES += [
+ 'g.S',
+]
+
+SOURCES += [
+ 'h.s',
+ 'i.asm',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/sources/b.cc
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/b.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/sources/d.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/e.m b/python/mozbuild/mozbuild/test/frontend/data/sources/e.m
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/e.m
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/sources/f.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/f.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/g.S b/python/mozbuild/mozbuild/test/frontend/data/sources/g.S
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/g.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/h.s b/python/mozbuild/mozbuild/test/frontend/data/sources/h.s
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/h.s
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/i.asm b/python/mozbuild/mozbuild/test/frontend/data/sources/i.asm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/i.asm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build
new file mode 100644
index 000000000..f9b453238
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build
@@ -0,0 +1,37 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+Library('dummy')
+
+SOURCES += [
+ 'a.cpp',
+ 'b.cc',
+ 'c.cxx',
+]
+
+SOURCES += [
+ 'd.c',
+]
+
+SOURCES += [
+ 'e.m',
+]
+
+SOURCES += [
+ 'f.mm',
+]
+
+SOURCES += [
+ 'g.S',
+]
+
+SOURCES += [
+ 'h.s',
+ 'i.asm',
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild b/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild
new file mode 100644
index 000000000..290104bc7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild
@@ -0,0 +1,21 @@
+@template
+def Template(foo, bar=[]):
+ SOURCES += foo
+ DIRS += bar
+
+@template
+def TemplateError(foo):
+ ILLEGAL = foo
+
+@template
+def TemplateGlobalVariable():
+ SOURCES += illegal
+
+@template
+def TemplateGlobalUPPERVariable():
+ SOURCES += DIRS
+
+@template
+def TemplateInherit(foo):
+ USE_LIBS += ['foo']
+ Template(foo)
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build
new file mode 100644
index 000000000..d7f6377d0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+TEST_HARNESS_FILES += ["foo.py"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini
new file mode 100644
index 000000000..d87114ac7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini
@@ -0,0 +1 @@
+# dummy file so the existence checks for TEST_HARNESS_FILES succeed
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py
new file mode 100644
index 000000000..d87114ac7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py
@@ -0,0 +1 @@
+# dummy file so the existence checks for TEST_HARNESS_FILES succeed
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build
new file mode 100644
index 000000000..ff3fed0ee
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+TEST_HARNESS_FILES.mochitest += ["runtests.py"]
+TEST_HARNESS_FILES.mochitest += ["utils.py"]
+TEST_HARNESS_FILES.testing.mochitest += ["mochitest.py"]
+TEST_HARNESS_FILES.testing.mochitest += ["mochitest.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py
new file mode 100644
index 000000000..d87114ac7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py
@@ -0,0 +1 @@
+# dummy file so the existence checks for TEST_HARNESS_FILES succeed
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py
new file mode 100644
index 000000000..d87114ac7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py
@@ -0,0 +1 @@
+# dummy file so the existence checks for TEST_HARNESS_FILES succeed
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build
new file mode 100644
index 000000000..bdb209074
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+DIST_INSTALL = False
+SharedLibrary('foo')
+
+TEST_HARNESS_FILES.foo.bar += ['!%sfoo%s' % (CONFIG['DLL_PREFIX'], CONFIG['DLL_SUFFIX'])]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build
new file mode 100644
index 000000000..b153dd085
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build
@@ -0,0 +1,11 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['one','two','three']
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+SharedLibrary('cxx_shared')
+USE_LIBS += ['cxx_static']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build
new file mode 100644
index 000000000..f66270818
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ LIBRARY_NAME = name
+
+Library('cxx_static')
+SOURCES += ['foo.cpp']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build
new file mode 100644
index 000000000..7b3497be6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SharedLibrary('just_c_shared')
+USE_LIBS += ['just_c_static']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build
new file mode 100644
index 000000000..256642fea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ LIBRARY_NAME = name
+
+Library('just_c_static')
+SOURCES += ['foo.c']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini
new file mode 100644
index 000000000..900f42158
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = /.well-known/foo.txt
+
+[test_file.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt
new file mode 100644
index 000000000..ce0136250
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt
@@ -0,0 +1 @@
+hello
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build
new file mode 100644
index 000000000..87b20c6b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['absolute-support.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini
new file mode 100644
index 000000000..2f1fc406a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+[DEFAULT]
+support-files = bar.js foo.js bar.js
+
+[test_baz.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build
new file mode 100644
index 000000000..4e7e9ff4e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['mochitest.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list
new file mode 100644
index 000000000..1caf9cc39
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list
@@ -0,0 +1 @@
+!= reftest2.html reftest2-ref.html \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build
new file mode 100644
index 000000000..39ad44c28
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build
@@ -0,0 +1 @@
+REFTEST_MANIFESTS += ['reftest.list'] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest-stylo.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest-stylo.list
new file mode 100644
index 000000000..237aea0e0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest-stylo.list
@@ -0,0 +1,3 @@
+# DO NOT EDIT! This is a auto-generated temporary list for Stylo testing
+== reftest1.html reftest1.html
+include included-reftest-stylo.list
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list
new file mode 100644
index 000000000..80caf8ffa
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list
@@ -0,0 +1,2 @@
+== reftest1.html reftest1-ref.html
+include included-reftest.list
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini
new file mode 100644
index 000000000..83a0cec0c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini
@@ -0,0 +1,2 @@
+[DEFAULT]
+foo = bar
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build
new file mode 100644
index 000000000..edfaf435f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['empty.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini
new file mode 100644
index 000000000..753cd0ec0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini
@@ -0,0 +1 @@
+[test_foo.html]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini
new file mode 100644
index 000000000..b8d4e123d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+install-to-subdir = subdir
+
+[include:common.ini]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build
new file mode 100644
index 000000000..4e7e9ff4e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['mochitest.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html
new file mode 100644
index 000000000..18ecdcb79
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html
@@ -0,0 +1 @@
+<html></html>
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/moz.build
new file mode 100644
index 000000000..9e4d7b21c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['subdir.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/subdir.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/subdir.ini
new file mode 100644
index 000000000..6b320c2d5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/subdir.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+install-to-subdir = subdir
+support-files = support.txt
+
+[test_foo.html]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/test_foo.html b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/test_foo.html
new file mode 100644
index 000000000..18ecdcb79
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/test_foo.html
@@ -0,0 +1 @@
+<html></html>
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt
new file mode 100644
index 000000000..ce0136250
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt
@@ -0,0 +1 @@
+hello
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini
new file mode 100644
index 000000000..efa2d4bc0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini
@@ -0,0 +1,2 @@
+[DEFAULT]
+support-files = foo.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build
new file mode 100644
index 000000000..80a038d42
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['just-support.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini
new file mode 100644
index 000000000..9cf798918
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = a11y-support/**
+
+[test_a11y.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini
new file mode 100644
index 000000000..a81ee3acb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support1 support2
+
+[test_browser.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini
new file mode 100644
index 000000000..1070c7853
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+skip-if = buildapp == 'b2g'
+
+[test_chrome.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list
new file mode 100644
index 000000000..b9d7f2685
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list
@@ -0,0 +1 @@
+== crashtest1.html crashtest1-ref.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini
new file mode 100644
index 000000000..a7eb6def4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini
@@ -0,0 +1,3 @@
+[DEFAULT]
+
+[test_metro.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini
new file mode 100644
index 000000000..69fd71de0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+support-files = external1 external2
+generated-files = external1 external2
+
+[test_mochitest.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build
new file mode 100644
index 000000000..33839d9e3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+A11Y_MANIFESTS += ['a11y.ini']
+BROWSER_CHROME_MANIFESTS += ['browser.ini']
+METRO_CHROME_MANIFESTS += ['metro.ini']
+MOCHITEST_MANIFESTS += ['mochitest.ini']
+MOCHITEST_CHROME_MANIFESTS += ['chrome.ini']
+XPCSHELL_TESTS_MANIFESTS += ['xpcshell.ini']
+REFTEST_MANIFESTS += ['reftest.list']
+CRASHTEST_MANIFESTS += ['crashtest.list']
+PYTHON_UNIT_TESTS += ['test_foo.py']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest-stylo.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest-stylo.list
new file mode 100644
index 000000000..bd7b4f9cb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest-stylo.list
@@ -0,0 +1,2 @@
+# DO NOT EDIT! This is a auto-generated temporary list for Stylo testing
+== reftest1.html reftest1.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list
new file mode 100644
index 000000000..3fc25b296
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list
@@ -0,0 +1 @@
+== reftest1.html reftest1-ref.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini
new file mode 100644
index 000000000..fb3005434
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini
@@ -0,0 +1,6 @@
+[DEFAULT]
+head = head1 head2
+tail = tail1 tail2
+dupe-manifest =
+
+[test_xpcshell.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build
new file mode 100644
index 000000000..45edcc027
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPCSHELL_TESTS_MANIFESTS += ['does_not_exist.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build
new file mode 100644
index 000000000..09c51cbb8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPCSHELL_TESTS_MANIFESTS += ['xpcshell.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini
new file mode 100644
index 000000000..9ab85c0ce
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support/**
+
+[missing.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini
new file mode 100644
index 000000000..e3ef6216b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini
@@ -0,0 +1 @@
+[test_missing.html]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build
new file mode 100644
index 000000000..4e7e9ff4e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['mochitest.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini
new file mode 100644
index 000000000..c78822429
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = ../support-file.txt
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build
new file mode 100644
index 000000000..a40e25625
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['child/mochitest.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini
new file mode 100644
index 000000000..4f1335d6b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini
@@ -0,0 +1,6 @@
+[DEFAULT]
+support-files =
+ another-file.sjs
+ data/**
+
+[test_sub.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini
new file mode 100644
index 000000000..ada59d387
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini
@@ -0,0 +1,9 @@
+[DEFAULT]
+support-files =
+ support-file.txt
+ !/child/test_sub.js
+ !/child/another-file.sjs
+ !/child/data/**
+ !/does/not/exist.sjs
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build
new file mode 100644
index 000000000..1c1d064ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['mochitest.ini']
+BROWSER_CHROME_MANIFESTS += ['child/browser.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini
new file mode 100644
index 000000000..4f1335d6b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini
@@ -0,0 +1,6 @@
+[DEFAULT]
+support-files =
+ another-file.sjs
+ data/**
+
+[test_sub.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini
new file mode 100644
index 000000000..a9860f3de
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini
@@ -0,0 +1,8 @@
+[DEFAULT]
+support-files =
+ support-file.txt
+ !/child/test_sub.js
+ !/child/another-file.sjs
+ !/child/data/**
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build
new file mode 100644
index 000000000..1c1d064ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['mochitest.ini']
+BROWSER_CHROME_MANIFESTS += ['child/browser.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build
new file mode 100644
index 000000000..281dee610
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ['test.ini']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini
new file mode 100644
index 000000000..caf391186
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+generated-files = does_not_exist
+
+[test_foo]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-python-unit-test-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-python-unit-test-missing/moz.build
new file mode 100644
index 000000000..c9d769802
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-python-unit-test-missing/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+PYTHON_UNIT_TESTS += ['test_foo.py']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build
new file mode 100644
index 000000000..9d35a8ccc
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build
@@ -0,0 +1,10 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+SharedLibrary('foo')
+SYMBOLS_FILE = '!foo.symbols'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build
new file mode 100644
index 000000000..fe227224d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build
@@ -0,0 +1,13 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+SharedLibrary('foo')
+SYMBOLS_FILE = '!foo.symbols'
+
+GENERATED_FILES += ['foo.symbols']
+GENERATED_FILES['foo.symbols'].script = 'foo.py'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols
new file mode 100644
index 000000000..257cc5642
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols
@@ -0,0 +1 @@
+foo
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build
new file mode 100644
index 000000000..d69333ea4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build
@@ -0,0 +1,10 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+SharedLibrary('foo')
+SYMBOLS_FILE = 'foo.symbols'
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build
new file mode 100644
index 000000000..73045dd43
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ['regular']
+TEST_DIRS += ['test']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build
new file mode 100644
index 000000000..92ceb7f3b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['../../foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build
new file mode 100644
index 000000000..ca1a429d9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['../bar']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build
new file mode 100644
index 000000000..5fac39736
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build
new file mode 100644
index 000000000..f06edcd36
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['../foo']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build
new file mode 100644
index 000000000..ca1a429d9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['../bar']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build
new file mode 100644
index 000000000..924f667d9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['foo', 'bar']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build
new file mode 100644
index 000000000..182541efd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build
@@ -0,0 +1,2 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+DIRS = ['biz']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build
new file mode 100644
index 000000000..924f667d9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ['foo', 'bar']
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build
new file mode 100644
index 000000000..a3660222d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build
@@ -0,0 +1,28 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+Library('dummy')
+
+UNIFIED_SOURCES += [
+ 'bar.cxx',
+ 'foo.cpp',
+ 'quux.cc',
+]
+
+UNIFIED_SOURCES += [
+ 'objc1.mm',
+ 'objc2.mm',
+]
+
+UNIFIED_SOURCES += [
+ 'c1.c',
+ 'c2.c',
+]
+
+FILES_PER_UNIFIED_FILE = 1
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build
new file mode 100644
index 000000000..5d1d89fb4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build
@@ -0,0 +1,28 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ '''Template for libraries.'''
+ LIBRARY_NAME = name
+
+Library('dummy')
+
+UNIFIED_SOURCES += [
+ 'bar.cxx',
+ 'foo.cpp',
+ 'quux.cc',
+]
+
+UNIFIED_SOURCES += [
+ 'objc1.mm',
+ 'objc2.mm',
+]
+
+UNIFIED_SOURCES += [
+ 'c1.c',
+ 'c2.c',
+]
+
+FILES_PER_UNIFIED_FILE = 32
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/use-yasm/moz.build b/python/mozbuild/mozbuild/test/frontend/data/use-yasm/moz.build
new file mode 100644
index 000000000..11f45953d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/use-yasm/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+USE_YASM = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build
new file mode 100644
index 000000000..e85e6ff5d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build
@@ -0,0 +1,25 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_INSTALL = False
+
+NO_VISIBILITY_FLAGS = True
+
+DELAYLOAD_DLLS = ['foo.dll', 'bar.dll']
+
+RCFILE = 'foo.rc'
+RESFILE = 'bar.res'
+RCINCLUDE = 'bar.rc'
+DEFFILE = 'baz.def'
+
+CFLAGS += ['-fno-exceptions', '-w']
+CXXFLAGS += ['-fcxx-exceptions', '-include foo.h']
+LDFLAGS += ['-framework Foo', '-x']
+HOST_CFLAGS += ['-funroll-loops', '-wall']
+HOST_CXXFLAGS += ['-funroll-loops-harder', '-wall-day-everyday']
+WIN32_EXE_LDFLAGS += ['-subsystem:console']
+
+DISABLE_STL_WRAPPING = True
+
+ALLOW_COMPILER_WARNINGS = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build
new file mode 100644
index 000000000..60f061d5c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPIDL_MODULE = 'xpidl_module'
diff --git a/python/mozbuild/mozbuild/test/frontend/test_context.py b/python/mozbuild/mozbuild/test/frontend/test_context.py
new file mode 100644
index 000000000..070cfad67
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_context.py
@@ -0,0 +1,721 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+from mozunit import main
+
+from mozbuild.frontend.context import (
+ AbsolutePath,
+ Context,
+ ContextDerivedTypedHierarchicalStringList,
+ ContextDerivedTypedList,
+ ContextDerivedTypedListWithItems,
+ ContextDerivedTypedRecord,
+ Files,
+ FUNCTIONS,
+ ObjDirPath,
+ Path,
+ SourcePath,
+ SPECIAL_VARIABLES,
+ SUBCONTEXTS,
+ VARIABLES,
+)
+
+from mozbuild.util import StrictOrderingOnAppendListWithFlagsFactory
+from mozpack import path as mozpath
+
+
+class TestContext(unittest.TestCase):
+ def test_defaults(self):
+ test = Context({
+ 'foo': (int, int, ''),
+ 'bar': (bool, bool, ''),
+ 'baz': (dict, dict, ''),
+ })
+
+ self.assertEqual(test.keys(), [])
+
+ self.assertEqual(test['foo'], 0)
+
+ self.assertEqual(set(test.keys()), { 'foo' })
+
+ self.assertEqual(test['bar'], False)
+
+ self.assertEqual(set(test.keys()), { 'foo', 'bar' })
+
+ self.assertEqual(test['baz'], {})
+
+ self.assertEqual(set(test.keys()), { 'foo', 'bar', 'baz' })
+
+ with self.assertRaises(KeyError):
+ test['qux']
+
+ self.assertEqual(set(test.keys()), { 'foo', 'bar', 'baz' })
+
+ def test_type_check(self):
+ test = Context({
+ 'foo': (int, int, ''),
+ 'baz': (dict, list, ''),
+ })
+
+ test['foo'] = 5
+
+ self.assertEqual(test['foo'], 5)
+
+ with self.assertRaises(ValueError):
+ test['foo'] = {}
+
+ self.assertEqual(test['foo'], 5)
+
+ with self.assertRaises(KeyError):
+ test['bar'] = True
+
+ test['baz'] = [('a', 1), ('b', 2)]
+
+ self.assertEqual(test['baz'], { 'a': 1, 'b': 2 })
+
+ def test_update(self):
+ test = Context({
+ 'foo': (int, int, ''),
+ 'bar': (bool, bool, ''),
+ 'baz': (dict, list, ''),
+ })
+
+ self.assertEqual(test.keys(), [])
+
+ with self.assertRaises(ValueError):
+ test.update(bar=True, foo={})
+
+ self.assertEqual(test.keys(), [])
+
+ test.update(bar=True, foo=1)
+
+ self.assertEqual(set(test.keys()), { 'foo', 'bar' })
+ self.assertEqual(test['foo'], 1)
+ self.assertEqual(test['bar'], True)
+
+ test.update([('bar', False), ('foo', 2)])
+ self.assertEqual(test['foo'], 2)
+ self.assertEqual(test['bar'], False)
+
+ test.update([('foo', 0), ('baz', { 'a': 1, 'b': 2 })])
+ self.assertEqual(test['foo'], 0)
+ self.assertEqual(test['baz'], { 'a': 1, 'b': 2 })
+
+ test.update([('foo', 42), ('baz', [('c', 3), ('d', 4)])])
+ self.assertEqual(test['foo'], 42)
+ self.assertEqual(test['baz'], { 'c': 3, 'd': 4 })
+
+ def test_context_paths(self):
+ test = Context()
+
+ # Newly created context has no paths.
+ self.assertIsNone(test.main_path)
+ self.assertIsNone(test.current_path)
+ self.assertEqual(test.all_paths, set())
+ self.assertEqual(test.source_stack, [])
+
+ foo = os.path.abspath('foo')
+ test.add_source(foo)
+
+ # Adding the first source makes it the main and current path.
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, foo)
+ self.assertEqual(test.all_paths, set([foo]))
+ self.assertEqual(test.source_stack, [foo])
+
+ bar = os.path.abspath('bar')
+ test.add_source(bar)
+
+ # Adding the second source makes leaves main and current paths alone.
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, foo)
+ self.assertEqual(test.all_paths, set([bar, foo]))
+ self.assertEqual(test.source_stack, [foo])
+
+ qux = os.path.abspath('qux')
+ test.push_source(qux)
+
+ # Pushing a source makes it the current path
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, qux)
+ self.assertEqual(test.all_paths, set([bar, foo, qux]))
+ self.assertEqual(test.source_stack, [foo, qux])
+
+ hoge = os.path.abspath('hoge')
+ test.push_source(hoge)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, hoge)
+ self.assertEqual(test.all_paths, set([bar, foo, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo, qux, hoge])
+
+ fuga = os.path.abspath('fuga')
+
+ # Adding a source after pushing doesn't change the source stack
+ test.add_source(fuga)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, hoge)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo, qux, hoge])
+
+ # Adding a source twice doesn't change anything
+ test.add_source(qux)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, hoge)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo, qux, hoge])
+
+ last = test.pop_source()
+
+ # Popping a source returns the last pushed one, not the last added one.
+ self.assertEqual(last, hoge)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, qux)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo, qux])
+
+ last = test.pop_source()
+ self.assertEqual(last, qux)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, foo)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo])
+
+ # Popping the main path is allowed.
+ last = test.pop_source()
+ self.assertEqual(last, foo)
+ self.assertEqual(test.main_path, foo)
+ self.assertIsNone(test.current_path)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [])
+
+ # Popping past the main path asserts.
+ with self.assertRaises(AssertionError):
+ test.pop_source()
+
+ # Pushing after the main path was popped asserts.
+ with self.assertRaises(AssertionError):
+ test.push_source(foo)
+
+ test = Context()
+ test.push_source(foo)
+ test.push_source(bar)
+
+ # Pushing the same file twice is allowed.
+ test.push_source(bar)
+ test.push_source(foo)
+ self.assertEqual(last, foo)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, foo)
+ self.assertEqual(test.all_paths, set([bar, foo]))
+ self.assertEqual(test.source_stack, [foo, bar, bar, foo])
+
+ def test_context_dirs(self):
+ class Config(object): pass
+ config = Config()
+ config.topsrcdir = mozpath.abspath(os.curdir)
+ config.topobjdir = mozpath.abspath('obj')
+ test = Context(config=config)
+ foo = mozpath.abspath('foo')
+ test.push_source(foo)
+
+ self.assertEqual(test.srcdir, config.topsrcdir)
+ self.assertEqual(test.relsrcdir, '')
+ self.assertEqual(test.objdir, config.topobjdir)
+ self.assertEqual(test.relobjdir, '')
+
+ foobar = os.path.abspath('foo/bar')
+ test.push_source(foobar)
+ self.assertEqual(test.srcdir, mozpath.join(config.topsrcdir, 'foo'))
+ self.assertEqual(test.relsrcdir, 'foo')
+ self.assertEqual(test.objdir, config.topobjdir)
+ self.assertEqual(test.relobjdir, '')
+
+
+class TestSymbols(unittest.TestCase):
+ def _verify_doc(self, doc):
+ # Documentation should be of the format:
+ # """SUMMARY LINE
+ #
+ # EXTRA PARAGRAPHS
+ # """
+
+ self.assertNotIn('\r', doc)
+
+ lines = doc.split('\n')
+
+ # No trailing whitespace.
+ for line in lines[0:-1]:
+ self.assertEqual(line, line.rstrip())
+
+ self.assertGreater(len(lines), 0)
+ self.assertGreater(len(lines[0].strip()), 0)
+
+ # Last line should be empty.
+ self.assertEqual(lines[-1].strip(), '')
+
+ def test_documentation_formatting(self):
+ for typ, inp, doc in VARIABLES.values():
+ self._verify_doc(doc)
+
+ for attr, args, doc in FUNCTIONS.values():
+ self._verify_doc(doc)
+
+ for func, typ, doc in SPECIAL_VARIABLES.values():
+ self._verify_doc(doc)
+
+ for name, cls in SUBCONTEXTS.items():
+ self._verify_doc(cls.__doc__)
+
+ for name, v in cls.VARIABLES.items():
+ self._verify_doc(v[2])
+
+
+class TestPaths(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ class Config(object): pass
+ cls.config = config = Config()
+ config.topsrcdir = mozpath.abspath(os.curdir)
+ config.topobjdir = mozpath.abspath('obj')
+ config.external_source_dir = None
+
+ def test_path(self):
+ config = self.config
+ ctxt1 = Context(config=config)
+ ctxt1.push_source(mozpath.join(config.topsrcdir, 'foo', 'moz.build'))
+ ctxt2 = Context(config=config)
+ ctxt2.push_source(mozpath.join(config.topsrcdir, 'bar', 'moz.build'))
+
+ path1 = Path(ctxt1, 'qux')
+ self.assertIsInstance(path1, SourcePath)
+ self.assertEqual(path1, 'qux')
+ self.assertEqual(path1.full_path,
+ mozpath.join(config.topsrcdir, 'foo', 'qux'))
+
+ path2 = Path(ctxt2, '../foo/qux')
+ self.assertIsInstance(path2, SourcePath)
+ self.assertEqual(path2, '../foo/qux')
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topsrcdir, 'foo', 'qux'))
+
+ self.assertEqual(path1, path2)
+
+ self.assertEqual(path1.join('../../bar/qux').full_path,
+ mozpath.join(config.topsrcdir, 'bar', 'qux'))
+
+ path1 = Path(ctxt1, '/qux/qux')
+ self.assertIsInstance(path1, SourcePath)
+ self.assertEqual(path1, '/qux/qux')
+ self.assertEqual(path1.full_path,
+ mozpath.join(config.topsrcdir, 'qux', 'qux'))
+
+ path2 = Path(ctxt2, '/qux/qux')
+ self.assertIsInstance(path2, SourcePath)
+ self.assertEqual(path2, '/qux/qux')
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topsrcdir, 'qux', 'qux'))
+
+ self.assertEqual(path1, path2)
+
+ path1 = Path(ctxt1, '!qux')
+ self.assertIsInstance(path1, ObjDirPath)
+ self.assertEqual(path1, '!qux')
+ self.assertEqual(path1.full_path,
+ mozpath.join(config.topobjdir, 'foo', 'qux'))
+
+ path2 = Path(ctxt2, '!../foo/qux')
+ self.assertIsInstance(path2, ObjDirPath)
+ self.assertEqual(path2, '!../foo/qux')
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topobjdir, 'foo', 'qux'))
+
+ self.assertEqual(path1, path2)
+
+ path1 = Path(ctxt1, '!/qux/qux')
+ self.assertIsInstance(path1, ObjDirPath)
+ self.assertEqual(path1, '!/qux/qux')
+ self.assertEqual(path1.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ path2 = Path(ctxt2, '!/qux/qux')
+ self.assertIsInstance(path2, ObjDirPath)
+ self.assertEqual(path2, '!/qux/qux')
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ self.assertEqual(path1, path2)
+
+ path1 = Path(ctxt1, path1)
+ self.assertIsInstance(path1, ObjDirPath)
+ self.assertEqual(path1, '!/qux/qux')
+ self.assertEqual(path1.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ path2 = Path(ctxt2, path2)
+ self.assertIsInstance(path2, ObjDirPath)
+ self.assertEqual(path2, '!/qux/qux')
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ self.assertEqual(path1, path2)
+
+ path1 = Path(path1)
+ self.assertIsInstance(path1, ObjDirPath)
+ self.assertEqual(path1, '!/qux/qux')
+ self.assertEqual(path1.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ self.assertEqual(path1, path2)
+
+ path2 = Path(path2)
+ self.assertIsInstance(path2, ObjDirPath)
+ self.assertEqual(path2, '!/qux/qux')
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ self.assertEqual(path1, path2)
+
+ def test_source_path(self):
+ config = self.config
+ ctxt = Context(config=config)
+ ctxt.push_source(mozpath.join(config.topsrcdir, 'foo', 'moz.build'))
+
+ path = SourcePath(ctxt, 'qux')
+ self.assertEqual(path, 'qux')
+ self.assertEqual(path.full_path,
+ mozpath.join(config.topsrcdir, 'foo', 'qux'))
+ self.assertEqual(path.translated,
+ mozpath.join(config.topobjdir, 'foo', 'qux'))
+
+ path = SourcePath(ctxt, '../bar/qux')
+ self.assertEqual(path, '../bar/qux')
+ self.assertEqual(path.full_path,
+ mozpath.join(config.topsrcdir, 'bar', 'qux'))
+ self.assertEqual(path.translated,
+ mozpath.join(config.topobjdir, 'bar', 'qux'))
+
+ path = SourcePath(ctxt, '/qux/qux')
+ self.assertEqual(path, '/qux/qux')
+ self.assertEqual(path.full_path,
+ mozpath.join(config.topsrcdir, 'qux', 'qux'))
+ self.assertEqual(path.translated,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ with self.assertRaises(ValueError):
+ SourcePath(ctxt, '!../bar/qux')
+
+ with self.assertRaises(ValueError):
+ SourcePath(ctxt, '!/qux/qux')
+
+ path = SourcePath(path)
+ self.assertIsInstance(path, SourcePath)
+ self.assertEqual(path, '/qux/qux')
+ self.assertEqual(path.full_path,
+ mozpath.join(config.topsrcdir, 'qux', 'qux'))
+ self.assertEqual(path.translated,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ path = Path(path)
+ self.assertIsInstance(path, SourcePath)
+
+ def test_objdir_path(self):
+ config = self.config
+ ctxt = Context(config=config)
+ ctxt.push_source(mozpath.join(config.topsrcdir, 'foo', 'moz.build'))
+
+ path = ObjDirPath(ctxt, '!qux')
+ self.assertEqual(path, '!qux')
+ self.assertEqual(path.full_path,
+ mozpath.join(config.topobjdir, 'foo', 'qux'))
+
+ path = ObjDirPath(ctxt, '!../bar/qux')
+ self.assertEqual(path, '!../bar/qux')
+ self.assertEqual(path.full_path,
+ mozpath.join(config.topobjdir, 'bar', 'qux'))
+
+ path = ObjDirPath(ctxt, '!/qux/qux')
+ self.assertEqual(path, '!/qux/qux')
+ self.assertEqual(path.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ with self.assertRaises(ValueError):
+ path = ObjDirPath(ctxt, '../bar/qux')
+
+ with self.assertRaises(ValueError):
+ path = ObjDirPath(ctxt, '/qux/qux')
+
+ path = ObjDirPath(path)
+ self.assertIsInstance(path, ObjDirPath)
+ self.assertEqual(path, '!/qux/qux')
+ self.assertEqual(path.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ path = Path(path)
+ self.assertIsInstance(path, ObjDirPath)
+
+ def test_absolute_path(self):
+ config = self.config
+ ctxt = Context(config=config)
+ ctxt.push_source(mozpath.join(config.topsrcdir, 'foo', 'moz.build'))
+
+ path = AbsolutePath(ctxt, '%/qux')
+ self.assertEqual(path, '%/qux')
+ self.assertEqual(path.full_path, '/qux')
+
+ with self.assertRaises(ValueError):
+ path = AbsolutePath(ctxt, '%qux')
+
+ def test_path_with_mixed_contexts(self):
+ config = self.config
+ ctxt1 = Context(config=config)
+ ctxt1.push_source(mozpath.join(config.topsrcdir, 'foo', 'moz.build'))
+ ctxt2 = Context(config=config)
+ ctxt2.push_source(mozpath.join(config.topsrcdir, 'bar', 'moz.build'))
+
+ path1 = Path(ctxt1, 'qux')
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, 'qux')
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topsrcdir, 'foo', 'qux'))
+
+ path1 = Path(ctxt1, '../bar/qux')
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, '../bar/qux')
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topsrcdir, 'bar', 'qux'))
+
+ path1 = Path(ctxt1, '/qux/qux')
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, '/qux/qux')
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topsrcdir, 'qux', 'qux'))
+
+ path1 = Path(ctxt1, '!qux')
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, '!qux')
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topobjdir, 'foo', 'qux'))
+
+ path1 = Path(ctxt1, '!../bar/qux')
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, '!../bar/qux')
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topobjdir, 'bar', 'qux'))
+
+ path1 = Path(ctxt1, '!/qux/qux')
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, '!/qux/qux')
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path,
+ mozpath.join(config.topobjdir, 'qux', 'qux'))
+
+ def test_path_typed_list(self):
+ config = self.config
+ ctxt1 = Context(config=config)
+ ctxt1.push_source(mozpath.join(config.topsrcdir, 'foo', 'moz.build'))
+ ctxt2 = Context(config=config)
+ ctxt2.push_source(mozpath.join(config.topsrcdir, 'bar', 'moz.build'))
+
+ paths = [
+ '!../bar/qux',
+ '!/qux/qux',
+ '!qux',
+ '../bar/qux',
+ '/qux/qux',
+ 'qux',
+ ]
+
+ MyList = ContextDerivedTypedList(Path)
+ l = MyList(ctxt1)
+ l += paths
+
+ for p_str, p_path in zip(paths, l):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+ self.assertEqual(p_path.join('foo'),
+ Path(ctxt1, mozpath.join(p_str, 'foo')))
+
+ l2 = MyList(ctxt2)
+ l2 += paths
+
+ for p_str, p_path in zip(paths, l2):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt2, p_str))
+
+ # Assigning with Paths from another context doesn't rebase them
+ l2 = MyList(ctxt2)
+ l2 += l
+
+ for p_str, p_path in zip(paths, l2):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+
+ MyListWithFlags = ContextDerivedTypedListWithItems(
+ Path, StrictOrderingOnAppendListWithFlagsFactory({
+ 'foo': bool,
+ }))
+ l = MyListWithFlags(ctxt1)
+ l += paths
+
+ for p in paths:
+ l[p].foo = True
+
+ for p_str, p_path in zip(paths, l):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+ self.assertEqual(l[p_str].foo, True)
+ self.assertEqual(l[p_path].foo, True)
+
+ def test_path_typed_hierarchy_list(self):
+ config = self.config
+ ctxt1 = Context(config=config)
+ ctxt1.push_source(mozpath.join(config.topsrcdir, 'foo', 'moz.build'))
+ ctxt2 = Context(config=config)
+ ctxt2.push_source(mozpath.join(config.topsrcdir, 'bar', 'moz.build'))
+
+ paths = [
+ '!../bar/qux',
+ '!/qux/qux',
+ '!qux',
+ '../bar/qux',
+ '/qux/qux',
+ 'qux',
+ ]
+
+ MyList = ContextDerivedTypedHierarchicalStringList(Path)
+ l = MyList(ctxt1)
+ l += paths
+ l.subdir += paths
+
+ for _, files in l.walk():
+ for p_str, p_path in zip(paths, files):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+ self.assertEqual(p_path.join('foo'),
+ Path(ctxt1, mozpath.join(p_str, 'foo')))
+
+ l2 = MyList(ctxt2)
+ l2 += paths
+ l2.subdir += paths
+
+ for _, files in l2.walk():
+ for p_str, p_path in zip(paths, files):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt2, p_str))
+
+ # Assigning with Paths from another context doesn't rebase them
+ l2 = MyList(ctxt2)
+ l2 += l
+
+ for _, files in l2.walk():
+ for p_str, p_path in zip(paths, files):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+
+
+class TestTypedRecord(unittest.TestCase):
+
+ def test_fields(self):
+ T = ContextDerivedTypedRecord(('field1', unicode),
+ ('field2', list))
+ inst = T(None)
+ self.assertEqual(inst.field1, '')
+ self.assertEqual(inst.field2, [])
+
+ inst.field1 = 'foo'
+ inst.field2 += ['bar']
+
+ self.assertEqual(inst.field1, 'foo')
+ self.assertEqual(inst.field2, ['bar'])
+
+ with self.assertRaises(AttributeError):
+ inst.field3 = []
+
+ def test_coercion(self):
+ T = ContextDerivedTypedRecord(('field1', unicode),
+ ('field2', list))
+ inst = T(None)
+ inst.field1 = 3
+ inst.field2 += ('bar',)
+ self.assertEqual(inst.field1, '3')
+ self.assertEqual(inst.field2, ['bar'])
+
+ with self.assertRaises(TypeError):
+ inst.field2 = object()
+
+
+class TestFiles(unittest.TestCase):
+ def test_aggregate_empty(self):
+ c = Context({})
+
+ files = {'moz.build': Files(c, pattern='**')}
+
+ self.assertEqual(Files.aggregate(files), {
+ 'bug_component_counts': [],
+ 'recommended_bug_component': None,
+ })
+
+ def test_single_bug_component(self):
+ c = Context({})
+ f = Files(c, pattern='**')
+ f['BUG_COMPONENT'] = (u'Product1', u'Component1')
+
+ files = {'moz.build': f}
+ self.assertEqual(Files.aggregate(files), {
+ 'bug_component_counts': [((u'Product1', u'Component1'), 1)],
+ 'recommended_bug_component': (u'Product1', u'Component1'),
+ })
+
+ def test_multiple_bug_components(self):
+ c = Context({})
+ f1 = Files(c, pattern='**')
+ f1['BUG_COMPONENT'] = (u'Product1', u'Component1')
+
+ f2 = Files(c, pattern='**')
+ f2['BUG_COMPONENT'] = (u'Product2', u'Component2')
+
+ files = {'a': f1, 'b': f2, 'c': f1}
+ self.assertEqual(Files.aggregate(files), {
+ 'bug_component_counts': [
+ ((u'Product1', u'Component1'), 2),
+ ((u'Product2', u'Component2'), 1),
+ ],
+ 'recommended_bug_component': (u'Product1', u'Component1'),
+ })
+
+ def test_no_recommended_bug_component(self):
+ """If there is no clear count winner, we don't recommend a bug component."""
+ c = Context({})
+ f1 = Files(c, pattern='**')
+ f1['BUG_COMPONENT'] = (u'Product1', u'Component1')
+
+ f2 = Files(c, pattern='**')
+ f2['BUG_COMPONENT'] = (u'Product2', u'Component2')
+
+ files = {'a': f1, 'b': f2}
+ self.assertEqual(Files.aggregate(files), {
+ 'bug_component_counts': [
+ ((u'Product1', u'Component1'), 1),
+ ((u'Product2', u'Component2'), 1),
+ ],
+ 'recommended_bug_component': None,
+ })
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/frontend/test_emitter.py b/python/mozbuild/mozbuild/test/frontend/test_emitter.py
new file mode 100644
index 000000000..6ac4e0aac
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_emitter.py
@@ -0,0 +1,1172 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+import unittest
+
+from mozunit import main
+
+from mozbuild.frontend.context import (
+ ObjDirPath,
+ Path,
+)
+from mozbuild.frontend.data import (
+ AndroidResDirs,
+ BrandingFiles,
+ ChromeManifestEntry,
+ ConfigFileSubstitution,
+ Defines,
+ DirectoryTraversal,
+ Exports,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ GeneratedSources,
+ HostDefines,
+ HostSources,
+ IPDLFile,
+ JARManifest,
+ LinkageMultipleRustLibrariesError,
+ LocalInclude,
+ Program,
+ RustLibrary,
+ SdkFiles,
+ SharedLibrary,
+ SimpleProgram,
+ Sources,
+ StaticLibrary,
+ TestHarnessFiles,
+ TestManifest,
+ UnifiedSources,
+ VariablePassthru,
+)
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import (
+ BuildReader,
+ BuildReaderError,
+ SandboxValidationError,
+)
+from mozpack.chrome import manifest
+
+from mozbuild.test.common import MockConfig
+
+import mozpack.path as mozpath
+
+
+data_path = mozpath.abspath(mozpath.dirname(__file__))
+data_path = mozpath.join(data_path, 'data')
+
+
+class TestEmitterBasic(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop('MOZ_OBJDIR', None)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def reader(self, name, enable_tests=False, extra_substs=None):
+ substs = dict(
+ ENABLE_TESTS='1' if enable_tests else '',
+ BIN_SUFFIX='.prog',
+ OS_TARGET='WINNT',
+ COMPILE_ENVIRONMENT='1',
+ )
+ if extra_substs:
+ substs.update(extra_substs)
+ config = MockConfig(mozpath.join(data_path, name), extra_substs=substs)
+
+ return BuildReader(config)
+
+ def read_topsrcdir(self, reader, filter_common=True):
+ emitter = TreeMetadataEmitter(reader.config)
+ objs = list(emitter.emit(reader.read_topsrcdir()))
+ self.assertGreater(len(objs), 0)
+
+ filtered = []
+ for obj in objs:
+ if filter_common and isinstance(obj, DirectoryTraversal):
+ continue
+
+ filtered.append(obj)
+
+ return filtered
+
+ def test_dirs_traversal_simple(self):
+ reader = self.reader('traversal-simple')
+ objs = self.read_topsrcdir(reader, filter_common=False)
+ self.assertEqual(len(objs), 4)
+
+ for o in objs:
+ self.assertIsInstance(o, DirectoryTraversal)
+ self.assertTrue(os.path.isabs(o.context_main_path))
+ self.assertEqual(len(o.context_all_paths), 1)
+
+ reldirs = [o.relativedir for o in objs]
+ self.assertEqual(reldirs, ['', 'foo', 'foo/biz', 'bar'])
+
+ dirs = [[d.full_path for d in o.dirs] for o in objs]
+ self.assertEqual(dirs, [
+ [
+ mozpath.join(reader.config.topsrcdir, 'foo'),
+ mozpath.join(reader.config.topsrcdir, 'bar')
+ ], [
+ mozpath.join(reader.config.topsrcdir, 'foo', 'biz')
+ ], [], []])
+
+ def test_traversal_all_vars(self):
+ reader = self.reader('traversal-all-vars')
+ objs = self.read_topsrcdir(reader, filter_common=False)
+ self.assertEqual(len(objs), 2)
+
+ for o in objs:
+ self.assertIsInstance(o, DirectoryTraversal)
+
+ reldirs = set([o.relativedir for o in objs])
+ self.assertEqual(reldirs, set(['', 'regular']))
+
+ for o in objs:
+ reldir = o.relativedir
+
+ if reldir == '':
+ self.assertEqual([d.full_path for d in o.dirs], [
+ mozpath.join(reader.config.topsrcdir, 'regular')])
+
+ def test_traversal_all_vars_enable_tests(self):
+ reader = self.reader('traversal-all-vars', enable_tests=True)
+ objs = self.read_topsrcdir(reader, filter_common=False)
+ self.assertEqual(len(objs), 3)
+
+ for o in objs:
+ self.assertIsInstance(o, DirectoryTraversal)
+
+ reldirs = set([o.relativedir for o in objs])
+ self.assertEqual(reldirs, set(['', 'regular', 'test']))
+
+ for o in objs:
+ reldir = o.relativedir
+
+ if reldir == '':
+ self.assertEqual([d.full_path for d in o.dirs], [
+ mozpath.join(reader.config.topsrcdir, 'regular'),
+ mozpath.join(reader.config.topsrcdir, 'test')])
+
+ def test_config_file_substitution(self):
+ reader = self.reader('config-file-substitution')
+ objs = self.read_topsrcdir(reader)
+ self.assertEqual(len(objs), 2)
+
+ self.assertIsInstance(objs[0], ConfigFileSubstitution)
+ self.assertIsInstance(objs[1], ConfigFileSubstitution)
+
+ topobjdir = mozpath.abspath(reader.config.topobjdir)
+ self.assertEqual(objs[0].relpath, 'foo')
+ self.assertEqual(mozpath.normpath(objs[0].output_path),
+ mozpath.normpath(mozpath.join(topobjdir, 'foo')))
+ self.assertEqual(mozpath.normpath(objs[1].output_path),
+ mozpath.normpath(mozpath.join(topobjdir, 'bar')))
+
+ def test_variable_passthru(self):
+ reader = self.reader('variable-passthru')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], VariablePassthru)
+
+ wanted = {
+ 'ALLOW_COMPILER_WARNINGS': True,
+ 'DISABLE_STL_WRAPPING': True,
+ 'NO_DIST_INSTALL': True,
+ 'VISIBILITY_FLAGS': '',
+ 'RCFILE': 'foo.rc',
+ 'RESFILE': 'bar.res',
+ 'RCINCLUDE': 'bar.rc',
+ 'DEFFILE': 'baz.def',
+ 'MOZBUILD_CFLAGS': ['-fno-exceptions', '-w'],
+ 'MOZBUILD_CXXFLAGS': ['-fcxx-exceptions', '-include foo.h'],
+ 'MOZBUILD_LDFLAGS': ['-framework Foo', '-x', '-DELAYLOAD:foo.dll',
+ '-DELAYLOAD:bar.dll'],
+ 'MOZBUILD_HOST_CFLAGS': ['-funroll-loops', '-wall'],
+ 'MOZBUILD_HOST_CXXFLAGS': ['-funroll-loops-harder',
+ '-wall-day-everyday'],
+ 'WIN32_EXE_LDFLAGS': ['-subsystem:console'],
+ }
+
+ variables = objs[0].variables
+ maxDiff = self.maxDiff
+ self.maxDiff = None
+ self.assertEqual(wanted, variables)
+ self.maxDiff = maxDiff
+
+ def test_use_yasm(self):
+ # When yasm is not available, this should raise.
+ reader = self.reader('use-yasm')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'yasm is not available'):
+ self.read_topsrcdir(reader)
+
+ # When yasm is available, this should work.
+ reader = self.reader('use-yasm',
+ extra_substs=dict(
+ YASM='yasm',
+ YASM_ASFLAGS='-foo',
+ ))
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], VariablePassthru)
+ maxDiff = self.maxDiff
+ self.maxDiff = None
+ self.assertEqual(objs[0].variables,
+ {'AS': 'yasm',
+ 'ASFLAGS': '-foo',
+ 'AS_DASH_C_FLAG': ''})
+ self.maxDiff = maxDiff
+
+
+ def test_generated_files(self):
+ reader = self.reader('generated-files')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, GeneratedFile)
+
+ expected = ['bar.c', 'foo.c', ('xpidllex.py', 'xpidlyacc.py'), ]
+ for o, f in zip(objs, expected):
+ expected_filename = f if isinstance(f, tuple) else (f,)
+ self.assertEqual(o.outputs, expected_filename)
+ self.assertEqual(o.script, None)
+ self.assertEqual(o.method, None)
+ self.assertEqual(o.inputs, [])
+
+ def test_generated_files_method_names(self):
+ reader = self.reader('generated-files-method-names')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 2)
+ for o in objs:
+ self.assertIsInstance(o, GeneratedFile)
+
+ expected = ['bar.c', 'foo.c']
+ expected_method_names = ['make_bar', 'main']
+ for o, expected_filename, expected_method in zip(objs, expected, expected_method_names):
+ self.assertEqual(o.outputs, (expected_filename,))
+ self.assertEqual(o.method, expected_method)
+ self.assertEqual(o.inputs, [])
+
+ def test_generated_files_absolute_script(self):
+ reader = self.reader('generated-files-absolute-script')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+
+ o = objs[0]
+ self.assertIsInstance(o, GeneratedFile)
+ self.assertEqual(o.outputs, ('bar.c',))
+ self.assertRegexpMatches(o.script, 'script.py$')
+ self.assertEqual(o.method, 'make_bar')
+ self.assertEqual(o.inputs, [])
+
+ def test_generated_files_no_script(self):
+ reader = self.reader('generated-files-no-script')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Script for generating bar.c does not exist'):
+ self.read_topsrcdir(reader)
+
+ def test_generated_files_no_inputs(self):
+ reader = self.reader('generated-files-no-inputs')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Input for generating foo.c does not exist'):
+ self.read_topsrcdir(reader)
+
+ def test_generated_files_no_python_script(self):
+ reader = self.reader('generated-files-no-python-script')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Script for generating bar.c does not end in .py'):
+ self.read_topsrcdir(reader)
+
+ def test_exports(self):
+ reader = self.reader('exports')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], Exports)
+
+ expected = [
+ ('', ['foo.h', 'bar.h', 'baz.h']),
+ ('mozilla', ['mozilla1.h', 'mozilla2.h']),
+ ('mozilla/dom', ['dom1.h', 'dom2.h', 'dom3.h']),
+ ('mozilla/gfx', ['gfx.h']),
+ ('nspr/private', ['pprio.h', 'pprthred.h']),
+ ('vpx', ['mem.h', 'mem2.h']),
+ ]
+ for (expect_path, expect_headers), (actual_path, actual_headers) in \
+ zip(expected, [(path, list(seq)) for path, seq in objs[0].files.walk()]):
+ self.assertEqual(expect_path, actual_path)
+ self.assertEqual(expect_headers, actual_headers)
+
+ def test_exports_missing(self):
+ '''
+ Missing files in EXPORTS is an error.
+ '''
+ reader = self.reader('exports-missing')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'File listed in EXPORTS does not exist:'):
+ self.read_topsrcdir(reader)
+
+ def test_exports_missing_generated(self):
+ '''
+ An objdir file in EXPORTS that is not in GENERATED_FILES is an error.
+ '''
+ reader = self.reader('exports-missing-generated')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Objdir file listed in EXPORTS not in GENERATED_FILES:'):
+ self.read_topsrcdir(reader)
+
+ def test_exports_generated(self):
+ reader = self.reader('exports-generated')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 2)
+ self.assertIsInstance(objs[0], GeneratedFile)
+ self.assertIsInstance(objs[1], Exports)
+ exports = [(path, list(seq)) for path, seq in objs[1].files.walk()]
+ self.assertEqual(exports,
+ [('', ['foo.h']),
+ ('mozilla', ['mozilla1.h', '!mozilla2.h'])])
+ path, files = exports[1]
+ self.assertIsInstance(files[1], ObjDirPath)
+
+ def test_test_harness_files(self):
+ reader = self.reader('test-harness-files')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], TestHarnessFiles)
+
+ expected = {
+ 'mochitest': ['runtests.py', 'utils.py'],
+ 'testing/mochitest': ['mochitest.py', 'mochitest.ini'],
+ }
+
+ for path, strings in objs[0].files.walk():
+ self.assertTrue(path in expected)
+ basenames = sorted(mozpath.basename(s) for s in strings)
+ self.assertEqual(sorted(expected[path]), basenames)
+
+ def test_test_harness_files_root(self):
+ reader = self.reader('test-harness-files-root')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Cannot install files to the root of TEST_HARNESS_FILES'):
+ self.read_topsrcdir(reader)
+
+ def test_branding_files(self):
+ reader = self.reader('branding-files')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], BrandingFiles)
+
+ files = objs[0].files
+
+ self.assertEqual(files._strings, ['bar.ico', 'baz.png', 'foo.xpm'])
+
+ self.assertIn('icons', files._children)
+ icons = files._children['icons']
+
+ self.assertEqual(icons._strings, ['quux.icns'])
+
+ def test_sdk_files(self):
+ reader = self.reader('sdk-files')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], SdkFiles)
+
+ files = objs[0].files
+
+ self.assertEqual(files._strings, ['bar.ico', 'baz.png', 'foo.xpm'])
+
+ self.assertIn('icons', files._children)
+ icons = files._children['icons']
+
+ self.assertEqual(icons._strings, ['quux.icns'])
+
+ def test_program(self):
+ reader = self.reader('program')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 3)
+ self.assertIsInstance(objs[0], Program)
+ self.assertIsInstance(objs[1], SimpleProgram)
+ self.assertIsInstance(objs[2], SimpleProgram)
+
+ self.assertEqual(objs[0].program, 'test_program.prog')
+ self.assertEqual(objs[1].program, 'test_program1.prog')
+ self.assertEqual(objs[2].program, 'test_program2.prog')
+
+ def test_test_manifest_missing_manifest(self):
+ """A missing manifest file should result in an error."""
+ reader = self.reader('test-manifest-missing-manifest')
+
+ with self.assertRaisesRegexp(BuildReaderError, 'IOError: Missing files'):
+ self.read_topsrcdir(reader)
+
+ def test_empty_test_manifest_rejected(self):
+ """A test manifest without any entries is rejected."""
+ reader = self.reader('test-manifest-empty')
+
+ with self.assertRaisesRegexp(SandboxValidationError, 'Empty test manifest'):
+ self.read_topsrcdir(reader)
+
+
+ def test_test_manifest_just_support_files(self):
+ """A test manifest with no tests but support-files is not supported."""
+ reader = self.reader('test-manifest-just-support')
+
+ with self.assertRaisesRegexp(SandboxValidationError, 'Empty test manifest'):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_dupe_support_files(self):
+ """A test manifest with dupe support-files in a single test is not
+ supported.
+ """
+ reader = self.reader('test-manifest-dupes')
+
+ with self.assertRaisesRegexp(SandboxValidationError, 'bar.js appears multiple times '
+ 'in a test manifest under a support-files field, please omit the duplicate entry.'):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_absolute_support_files(self):
+ """Support files starting with '/' are placed relative to the install root"""
+ reader = self.reader('test-manifest-absolute-support')
+
+ objs = self.read_topsrcdir(reader)
+ self.assertEqual(len(objs), 1)
+ o = objs[0]
+ self.assertEqual(len(o.installs), 3)
+ expected = [
+ mozpath.normpath(mozpath.join(o.install_prefix, "../.well-known/foo.txt")),
+ mozpath.join(o.install_prefix, "absolute-support.ini"),
+ mozpath.join(o.install_prefix, "test_file.js"),
+ ]
+ paths = sorted([v[0] for v in o.installs.values()])
+ self.assertEqual(paths, expected)
+
+ @unittest.skip('Bug 1304316 - Items in the second set but not the first')
+ def test_test_manifest_shared_support_files(self):
+ """Support files starting with '!' are given separate treatment, so their
+ installation can be resolved when running tests.
+ """
+ reader = self.reader('test-manifest-shared-support')
+ supported, child = self.read_topsrcdir(reader)
+
+ expected_deferred_installs = {
+ '!/child/test_sub.js',
+ '!/child/another-file.sjs',
+ '!/child/data/**',
+ }
+
+ self.assertEqual(len(supported.installs), 3)
+ self.assertEqual(set(supported.deferred_installs),
+ expected_deferred_installs)
+ self.assertEqual(len(child.installs), 3)
+ self.assertEqual(len(child.pattern_installs), 1)
+
+ def test_test_manifest_deffered_install_missing(self):
+ """A non-existent shared support file reference produces an error."""
+ reader = self.reader('test-manifest-shared-missing')
+
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'entry in support-files not present in the srcdir'):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_install_to_subdir(self):
+ """ """
+ reader = self.reader('test-manifest-install-subdir')
+
+ objs = self.read_topsrcdir(reader)
+ self.assertEqual(len(objs), 1)
+ o = objs[0]
+ self.assertEqual(len(o.installs), 3)
+ self.assertEqual(o.manifest_relpath, "subdir.ini")
+ self.assertEqual(o.manifest_obj_relpath, "subdir/subdir.ini")
+ expected = [
+ mozpath.normpath(mozpath.join(o.install_prefix, "subdir/subdir.ini")),
+ mozpath.normpath(mozpath.join(o.install_prefix, "subdir/support.txt")),
+ mozpath.normpath(mozpath.join(o.install_prefix, "subdir/test_foo.html")),
+ ]
+ paths = sorted([v[0] for v in o.installs.values()])
+ self.assertEqual(paths, expected)
+
+ def test_test_manifest_install_includes(self):
+ """Ensure that any [include:foo.ini] are copied to the objdir."""
+ reader = self.reader('test-manifest-install-includes')
+
+ objs = self.read_topsrcdir(reader)
+ self.assertEqual(len(objs), 1)
+ o = objs[0]
+ self.assertEqual(len(o.installs), 3)
+ self.assertEqual(o.manifest_relpath, "mochitest.ini")
+ self.assertEqual(o.manifest_obj_relpath, "subdir/mochitest.ini")
+ expected = [
+ mozpath.normpath(mozpath.join(o.install_prefix, "subdir/common.ini")),
+ mozpath.normpath(mozpath.join(o.install_prefix, "subdir/mochitest.ini")),
+ mozpath.normpath(mozpath.join(o.install_prefix, "subdir/test_foo.html")),
+ ]
+ paths = sorted([v[0] for v in o.installs.values()])
+ self.assertEqual(paths, expected)
+
+ def test_test_manifest_includes(self):
+ """Ensure that manifest objects from the emitter list a correct manifest.
+ """
+ reader = self.reader('test-manifest-emitted-includes')
+ [obj] = self.read_topsrcdir(reader)
+
+ # Expected manifest leafs for our tests.
+ expected_manifests = {
+ 'reftest1.html': 'reftest.list',
+ 'reftest1-ref.html': 'reftest.list',
+ 'reftest2.html': 'included-reftest.list',
+ 'reftest2-ref.html': 'included-reftest.list',
+ }
+
+ for t in obj.tests:
+ self.assertTrue(t['manifest'].endswith(expected_manifests[t['name']]))
+
+ def test_python_unit_test_missing(self):
+ """Missing files in PYTHON_UNIT_TESTS should raise."""
+ reader = self.reader('test-python-unit-test-missing')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Path specified in PYTHON_UNIT_TESTS does not exist:'):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_keys_extracted(self):
+ """Ensure all metadata from test manifests is extracted."""
+ reader = self.reader('test-manifest-keys-extracted')
+
+ objs = [o for o in self.read_topsrcdir(reader)
+ if isinstance(o, TestManifest)]
+
+ self.assertEqual(len(objs), 9)
+
+ metadata = {
+ 'a11y.ini': {
+ 'flavor': 'a11y',
+ 'installs': {
+ 'a11y.ini': False,
+ 'test_a11y.js': True,
+ },
+ 'pattern-installs': 1,
+ },
+ 'browser.ini': {
+ 'flavor': 'browser-chrome',
+ 'installs': {
+ 'browser.ini': False,
+ 'test_browser.js': True,
+ 'support1': False,
+ 'support2': False,
+ },
+ },
+ 'metro.ini': {
+ 'flavor': 'metro-chrome',
+ 'installs': {
+ 'metro.ini': False,
+ 'test_metro.js': True,
+ },
+ },
+ 'mochitest.ini': {
+ 'flavor': 'mochitest',
+ 'installs': {
+ 'mochitest.ini': False,
+ 'test_mochitest.js': True,
+ },
+ 'external': {
+ 'external1',
+ 'external2',
+ },
+ },
+ 'chrome.ini': {
+ 'flavor': 'chrome',
+ 'installs': {
+ 'chrome.ini': False,
+ 'test_chrome.js': True,
+ },
+ },
+ 'xpcshell.ini': {
+ 'flavor': 'xpcshell',
+ 'dupe': True,
+ 'installs': {
+ 'xpcshell.ini': False,
+ 'test_xpcshell.js': True,
+ 'head1': False,
+ 'head2': False,
+ 'tail1': False,
+ 'tail2': False,
+ },
+ },
+ 'reftest.list': {
+ 'flavor': 'reftest',
+ 'installs': {},
+ },
+ 'crashtest.list': {
+ 'flavor': 'crashtest',
+ 'installs': {},
+ },
+ 'moz.build': {
+ 'flavor': 'python',
+ 'installs': {},
+ }
+ }
+
+ for o in objs:
+ m = metadata[mozpath.basename(o.manifest_relpath)]
+
+ self.assertTrue(o.path.startswith(o.directory))
+ self.assertEqual(o.flavor, m['flavor'])
+ self.assertEqual(o.dupe_manifest, m.get('dupe', False))
+
+ external_normalized = set(mozpath.basename(p) for p in
+ o.external_installs)
+ self.assertEqual(external_normalized, m.get('external', set()))
+
+ self.assertEqual(len(o.installs), len(m['installs']))
+ for path in o.installs.keys():
+ self.assertTrue(path.startswith(o.directory))
+ relpath = path[len(o.directory)+1:]
+
+ self.assertIn(relpath, m['installs'])
+ self.assertEqual(o.installs[path][1], m['installs'][relpath])
+
+ if 'pattern-installs' in m:
+ self.assertEqual(len(o.pattern_installs), m['pattern-installs'])
+
+ def test_test_manifest_unmatched_generated(self):
+ reader = self.reader('test-manifest-unmatched-generated')
+
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'entry in generated-files not present elsewhere'):
+ self.read_topsrcdir(reader),
+
+ def test_test_manifest_parent_support_files_dir(self):
+ """support-files referencing a file in a parent directory works."""
+ reader = self.reader('test-manifest-parent-support-files-dir')
+
+ objs = [o for o in self.read_topsrcdir(reader)
+ if isinstance(o, TestManifest)]
+
+ self.assertEqual(len(objs), 1)
+
+ o = objs[0]
+
+ expected = mozpath.join(o.srcdir, 'support-file.txt')
+ self.assertIn(expected, o.installs)
+ self.assertEqual(o.installs[expected],
+ ('testing/mochitest/tests/child/support-file.txt', False))
+
+ def test_test_manifest_missing_test_error(self):
+ """Missing test files should result in error."""
+ reader = self.reader('test-manifest-missing-test-file')
+
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'lists test that does not exist: test_missing.html'):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_missing_test_error_unfiltered(self):
+ """Missing test files should result in error, even when the test list is not filtered."""
+ reader = self.reader('test-manifest-missing-test-file-unfiltered')
+
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'lists test that does not exist: missing.js'):
+ self.read_topsrcdir(reader)
+
+ def test_ipdl_sources(self):
+ reader = self.reader('ipdl_sources')
+ objs = self.read_topsrcdir(reader)
+
+ ipdls = []
+ for o in objs:
+ if isinstance(o, IPDLFile):
+ ipdls.append('%s/%s' % (o.relativedir, o.basename))
+
+ expected = [
+ 'bar/bar.ipdl',
+ 'bar/bar2.ipdlh',
+ 'foo/foo.ipdl',
+ 'foo/foo2.ipdlh',
+ ]
+
+ self.assertEqual(ipdls, expected)
+
+ def test_local_includes(self):
+ """Test that LOCAL_INCLUDES is emitted correctly."""
+ reader = self.reader('local_includes')
+ objs = self.read_topsrcdir(reader)
+
+ local_includes = [o.path for o in objs if isinstance(o, LocalInclude)]
+ expected = [
+ '/bar/baz',
+ 'foo',
+ ]
+
+ self.assertEqual(local_includes, expected)
+
+ local_includes = [o.path.full_path
+ for o in objs if isinstance(o, LocalInclude)]
+ expected = [
+ mozpath.join(reader.config.topsrcdir, 'bar/baz'),
+ mozpath.join(reader.config.topsrcdir, 'foo'),
+ ]
+
+ self.assertEqual(local_includes, expected)
+
+ def test_generated_includes(self):
+ """Test that GENERATED_INCLUDES is emitted correctly."""
+ reader = self.reader('generated_includes')
+ objs = self.read_topsrcdir(reader)
+
+ generated_includes = [o.path for o in objs if isinstance(o, LocalInclude)]
+ expected = [
+ '!/bar/baz',
+ '!foo',
+ ]
+
+ self.assertEqual(generated_includes, expected)
+
+ generated_includes = [o.path.full_path
+ for o in objs if isinstance(o, LocalInclude)]
+ expected = [
+ mozpath.join(reader.config.topobjdir, 'bar/baz'),
+ mozpath.join(reader.config.topobjdir, 'foo'),
+ ]
+
+ self.assertEqual(generated_includes, expected)
+
+ def test_defines(self):
+ reader = self.reader('defines')
+ objs = self.read_topsrcdir(reader)
+
+ defines = {}
+ for o in objs:
+ if isinstance(o, Defines):
+ defines = o.defines
+
+ expected = {
+ 'BAR': 7,
+ 'BAZ': '"abcd"',
+ 'FOO': True,
+ 'VALUE': 'xyz',
+ 'QUX': False,
+ }
+
+ self.assertEqual(defines, expected)
+
+ def test_host_defines(self):
+ reader = self.reader('host-defines')
+ objs = self.read_topsrcdir(reader)
+
+ defines = {}
+ for o in objs:
+ if isinstance(o, HostDefines):
+ defines = o.defines
+
+ expected = {
+ 'BAR': 7,
+ 'BAZ': '"abcd"',
+ 'FOO': True,
+ 'VALUE': 'xyz',
+ 'QUX': False,
+ }
+
+ self.assertEqual(defines, expected)
+
+ def test_jar_manifests(self):
+ reader = self.reader('jar-manifests')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ for obj in objs:
+ self.assertIsInstance(obj, JARManifest)
+ self.assertIsInstance(obj.path, Path)
+
+ def test_jar_manifests_multiple_files(self):
+ with self.assertRaisesRegexp(SandboxValidationError, 'limited to one value'):
+ reader = self.reader('jar-manifests-multiple-files')
+ self.read_topsrcdir(reader)
+
+ def test_xpidl_module_no_sources(self):
+ """XPIDL_MODULE without XPIDL_SOURCES should be rejected."""
+ with self.assertRaisesRegexp(SandboxValidationError, 'XPIDL_MODULE '
+ 'cannot be defined'):
+ reader = self.reader('xpidl-module-no-sources')
+ self.read_topsrcdir(reader)
+
+ def test_missing_local_includes(self):
+ """LOCAL_INCLUDES containing non-existent directories should be rejected."""
+ with self.assertRaisesRegexp(SandboxValidationError, 'Path specified in '
+ 'LOCAL_INCLUDES does not exist'):
+ reader = self.reader('missing-local-includes')
+ self.read_topsrcdir(reader)
+
+ def test_library_defines(self):
+ """Test that LIBRARY_DEFINES is propagated properly."""
+ reader = self.reader('library-defines')
+ objs = self.read_topsrcdir(reader)
+
+ libraries = [o for o in objs if isinstance(o,StaticLibrary)]
+ expected = {
+ 'liba': '-DIN_LIBA',
+ 'libb': '-DIN_LIBA -DIN_LIBB',
+ 'libc': '-DIN_LIBA -DIN_LIBB',
+ 'libd': ''
+ }
+ defines = {}
+ for lib in libraries:
+ defines[lib.basename] = ' '.join(lib.lib_defines.get_defines())
+ self.assertEqual(expected, defines)
+
+ def test_sources(self):
+ """Test that SOURCES works properly."""
+ reader = self.reader('sources')
+ objs = self.read_topsrcdir(reader)
+
+ # The last object is a Linkable.
+ linkable = objs.pop()
+ self.assertTrue(linkable.cxx_link)
+ self.assertEqual(len(objs), 6)
+ for o in objs:
+ self.assertIsInstance(o, Sources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 6)
+
+ expected = {
+ '.cpp': ['a.cpp', 'b.cc', 'c.cxx'],
+ '.c': ['d.c'],
+ '.m': ['e.m'],
+ '.mm': ['f.mm'],
+ '.S': ['g.S'],
+ '.s': ['h.s', 'i.asm'],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files,
+ [mozpath.join(reader.config.topsrcdir, f) for f in files])
+
+ def test_sources_just_c(self):
+ """Test that a linkable with no C++ sources doesn't have cxx_link set."""
+ reader = self.reader('sources-just-c')
+ objs = self.read_topsrcdir(reader)
+
+ # The last object is a Linkable.
+ linkable = objs.pop()
+ self.assertFalse(linkable.cxx_link)
+
+ def test_linkables_cxx_link(self):
+ """Test that linkables transitively set cxx_link properly."""
+ reader = self.reader('test-linkables-cxx-link')
+ got_results = 0
+ for obj in self.read_topsrcdir(reader):
+ if isinstance(obj, SharedLibrary):
+ if obj.basename == 'cxx_shared':
+ self.assertTrue(obj.cxx_link)
+ got_results += 1
+ elif obj.basename == 'just_c_shared':
+ self.assertFalse(obj.cxx_link)
+ got_results += 1
+ self.assertEqual(got_results, 2)
+
+ def test_generated_sources(self):
+ """Test that GENERATED_SOURCES works properly."""
+ reader = self.reader('generated-sources')
+ objs = self.read_topsrcdir(reader)
+
+ # The last object is a Linkable.
+ linkable = objs.pop()
+ self.assertTrue(linkable.cxx_link)
+ self.assertEqual(len(objs), 6)
+
+ generated_sources = [o for o in objs if isinstance(o, GeneratedSources)]
+ self.assertEqual(len(generated_sources), 6)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in generated_sources}
+ self.assertEqual(len(suffix_map), 6)
+
+ expected = {
+ '.cpp': ['a.cpp', 'b.cc', 'c.cxx'],
+ '.c': ['d.c'],
+ '.m': ['e.m'],
+ '.mm': ['f.mm'],
+ '.S': ['g.S'],
+ '.s': ['h.s', 'i.asm'],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files,
+ [mozpath.join(reader.config.topobjdir, f) for f in files])
+
+ def test_host_sources(self):
+ """Test that HOST_SOURCES works properly."""
+ reader = self.reader('host-sources')
+ objs = self.read_topsrcdir(reader)
+
+ # The last object is a Linkable
+ linkable = objs.pop()
+ self.assertTrue(linkable.cxx_link)
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, HostSources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 3)
+
+ expected = {
+ '.cpp': ['a.cpp', 'b.cc', 'c.cxx'],
+ '.c': ['d.c'],
+ '.mm': ['e.mm', 'f.mm'],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files,
+ [mozpath.join(reader.config.topsrcdir, f) for f in files])
+
+ def test_unified_sources(self):
+ """Test that UNIFIED_SOURCES works properly."""
+ reader = self.reader('unified-sources')
+ objs = self.read_topsrcdir(reader)
+
+ # The last object is a Linkable, ignore it
+ objs = objs[:-1]
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, UnifiedSources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 3)
+
+ expected = {
+ '.cpp': ['bar.cxx', 'foo.cpp', 'quux.cc'],
+ '.mm': ['objc1.mm', 'objc2.mm'],
+ '.c': ['c1.c', 'c2.c'],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files,
+ [mozpath.join(reader.config.topsrcdir, f) for f in files])
+ self.assertTrue(sources.have_unified_mapping)
+
+ def test_unified_sources_non_unified(self):
+ """Test that UNIFIED_SOURCES with FILES_PER_UNIFIED_FILE=1 works properly."""
+ reader = self.reader('unified-sources-non-unified')
+ objs = self.read_topsrcdir(reader)
+
+ # The last object is a Linkable, ignore it
+ objs = objs[:-1]
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, UnifiedSources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 3)
+
+ expected = {
+ '.cpp': ['bar.cxx', 'foo.cpp', 'quux.cc'],
+ '.mm': ['objc1.mm', 'objc2.mm'],
+ '.c': ['c1.c', 'c2.c'],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files,
+ [mozpath.join(reader.config.topsrcdir, f) for f in files])
+ self.assertFalse(sources.have_unified_mapping)
+
+ def test_final_target_pp_files(self):
+ """Test that FINAL_TARGET_PP_FILES works properly."""
+ reader = self.reader('dist-files')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], FinalTargetPreprocessedFiles)
+
+ # Ideally we'd test hierarchies, but that would just be testing
+ # the HierarchicalStringList class, which we test separately.
+ for path, files in objs[0].files.walk():
+ self.assertEqual(path, '')
+ self.assertEqual(len(files), 2)
+
+ expected = {'install.rdf', 'main.js'}
+ for f in files:
+ self.assertTrue(unicode(f) in expected)
+
+ def test_missing_final_target_pp_files(self):
+ """Test that FINAL_TARGET_PP_FILES with missing files throws errors."""
+ with self.assertRaisesRegexp(SandboxValidationError, 'File listed in '
+ 'FINAL_TARGET_PP_FILES does not exist'):
+ reader = self.reader('dist-files-missing')
+ self.read_topsrcdir(reader)
+
+ def test_final_target_pp_files_non_srcdir(self):
+ '''Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors.'''
+ reader = self.reader('final-target-pp-files-non-srcdir')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Only source directory paths allowed in FINAL_TARGET_PP_FILES:'):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_no_cargo_toml(self):
+ '''Test that defining a RustLibrary without a Cargo.toml fails.'''
+ reader = self.reader('rust-library-no-cargo-toml')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'No Cargo.toml file found'):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_name_mismatch(self):
+ '''Test that defining a RustLibrary that doesn't match Cargo.toml fails.'''
+ reader = self.reader('rust-library-name-mismatch')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'library.*does not match Cargo.toml-defined package'):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_no_lib_section(self):
+ '''Test that a RustLibrary Cargo.toml with no [lib] section fails.'''
+ reader = self.reader('rust-library-no-lib-section')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Cargo.toml for.* has no \\[lib\\] section'):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_no_profile_section(self):
+ '''Test that a RustLibrary Cargo.toml with no [profile] section fails.'''
+ reader = self.reader('rust-library-no-profile-section')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Cargo.toml for.* has no \\[profile\\.dev\\] section'):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_invalid_crate_type(self):
+ '''Test that a RustLibrary Cargo.toml has a permitted crate-type.'''
+ reader = self.reader('rust-library-invalid-crate-type')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'crate-type.* is not permitted'):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_non_abort_panic(self):
+ '''Test that a RustLibrary Cargo.toml has `panic = "abort" set'''
+ reader = self.reader('rust-library-non-abort-panic')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'does not specify `panic = "abort"`'):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_dash_folding(self):
+ '''Test that on-disk names of RustLibrary objects convert dashes to underscores.'''
+ reader = self.reader('rust-library-dash-folding',
+ extra_substs=dict(RUST_TARGET='i686-pc-windows-msvc'))
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ lib = objs[0]
+ self.assertIsInstance(lib, RustLibrary)
+ self.assertRegexpMatches(lib.lib_name, "random_crate")
+ self.assertRegexpMatches(lib.import_name, "random_crate")
+ self.assertRegexpMatches(lib.basename, "random-crate")
+
+ def test_multiple_rust_libraries(self):
+ '''Test that linking multiple Rust libraries throws an error'''
+ reader = self.reader('multiple-rust-libraries',
+ extra_substs=dict(RUST_TARGET='i686-pc-windows-msvc'))
+ with self.assertRaisesRegexp(LinkageMultipleRustLibrariesError,
+ 'Cannot link multiple Rust libraries'):
+ self.read_topsrcdir(reader)
+
+ def test_crate_dependency_path_resolution(self):
+ '''Test recursive dependencies resolve with the correct paths.'''
+ reader = self.reader('crate-dependency-path-resolution',
+ extra_substs=dict(RUST_TARGET='i686-pc-windows-msvc'))
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], RustLibrary)
+
+ def test_android_res_dirs(self):
+ """Test that ANDROID_RES_DIRS works properly."""
+ reader = self.reader('android-res-dirs')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], AndroidResDirs)
+
+ # Android resource directories are ordered.
+ expected = [
+ mozpath.join(reader.config.topsrcdir, 'dir1'),
+ mozpath.join(reader.config.topobjdir, 'dir2'),
+ '/dir3',
+ ]
+ self.assertEquals([p.full_path for p in objs[0].paths], expected)
+
+ def test_binary_components(self):
+ """Test that IS_COMPONENT/NO_COMPONENTS_MANIFEST work properly."""
+ reader = self.reader('binary-components')
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 3)
+ self.assertIsInstance(objs[0], ChromeManifestEntry)
+ self.assertEqual(objs[0].path,
+ 'dist/bin/components/components.manifest')
+ self.assertIsInstance(objs[0].entry, manifest.ManifestBinaryComponent)
+ self.assertEqual(objs[0].entry.base, 'dist/bin/components')
+ self.assertEqual(objs[0].entry.relpath, objs[1].lib_name)
+ self.assertIsInstance(objs[1], SharedLibrary)
+ self.assertEqual(objs[1].basename, 'foo')
+ self.assertIsInstance(objs[2], SharedLibrary)
+ self.assertEqual(objs[2].basename, 'bar')
+
+ def test_install_shared_lib(self):
+ """Test that we can install a shared library with TEST_HARNESS_FILES"""
+ reader = self.reader('test-install-shared-lib')
+ objs = self.read_topsrcdir(reader)
+ self.assertIsInstance(objs[0], TestHarnessFiles)
+ self.assertIsInstance(objs[1], VariablePassthru)
+ self.assertIsInstance(objs[2], SharedLibrary)
+ for path, files in objs[0].files.walk():
+ for f in files:
+ self.assertEqual(str(f), '!libfoo.so')
+ self.assertEqual(path, 'foo/bar')
+
+ def test_symbols_file(self):
+ """Test that SYMBOLS_FILE works"""
+ reader = self.reader('test-symbols-file')
+ genfile, shlib = self.read_topsrcdir(reader)
+ self.assertIsInstance(genfile, GeneratedFile)
+ self.assertIsInstance(shlib, SharedLibrary)
+ # This looks weird but MockConfig sets DLL_{PREFIX,SUFFIX} and
+ # the reader method in this class sets OS_TARGET=WINNT.
+ self.assertEqual(shlib.symbols_file, 'libfoo.so.def')
+
+ def test_symbols_file_objdir(self):
+ """Test that a SYMBOLS_FILE in the objdir works"""
+ reader = self.reader('test-symbols-file-objdir')
+ genfile, shlib = self.read_topsrcdir(reader)
+ self.assertIsInstance(genfile, GeneratedFile)
+ self.assertEqual(genfile.script,
+ mozpath.join(reader.config.topsrcdir, 'foo.py'))
+ self.assertIsInstance(shlib, SharedLibrary)
+ self.assertEqual(shlib.symbols_file, 'foo.symbols')
+
+ def test_symbols_file_objdir_missing_generated(self):
+ """Test that a SYMBOLS_FILE in the objdir that's missing
+ from GENERATED_FILES is an error.
+ """
+ reader = self.reader('test-symbols-file-objdir-missing-generated')
+ with self.assertRaisesRegexp(SandboxValidationError,
+ 'Objdir file specified in SYMBOLS_FILE not in GENERATED_FILES:'):
+ self.read_topsrcdir(reader)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/frontend/test_namespaces.py b/python/mozbuild/mozbuild/test/frontend/test_namespaces.py
new file mode 100644
index 000000000..71cc634e1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_namespaces.py
@@ -0,0 +1,207 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import unittest
+
+from mozunit import main
+
+from mozbuild.frontend.context import (
+ Context,
+ ContextDerivedValue,
+ ContextDerivedTypedList,
+ ContextDerivedTypedListWithItems,
+)
+
+from mozbuild.util import (
+ StrictOrderingOnAppendList,
+ StrictOrderingOnAppendListWithFlagsFactory,
+ UnsortedError,
+)
+
+
+class Fuga(object):
+ def __init__(self, value):
+ self.value = value
+
+
+class Piyo(ContextDerivedValue):
+ def __init__(self, context, value):
+ if not isinstance(value, unicode):
+ raise ValueError
+ self.context = context
+ self.value = value
+
+ def lower(self):
+ return self.value.lower()
+
+ def __str__(self):
+ return self.value
+
+ def __cmp__(self, other):
+ return cmp(self.value, str(other))
+
+ def __hash__(self):
+ return hash(self.value)
+
+
+VARIABLES = {
+ 'HOGE': (unicode, unicode, None),
+ 'FUGA': (Fuga, unicode, None),
+ 'PIYO': (Piyo, unicode, None),
+ 'HOGERA': (ContextDerivedTypedList(Piyo, StrictOrderingOnAppendList),
+ list, None),
+ 'HOGEHOGE': (ContextDerivedTypedListWithItems(
+ Piyo,
+ StrictOrderingOnAppendListWithFlagsFactory({
+ 'foo': bool,
+ })), list, None),
+}
+
+class TestContext(unittest.TestCase):
+ def test_key_rejection(self):
+ # Lowercase keys should be rejected during normal operation.
+ ns = Context(allowed_variables=VARIABLES)
+
+ with self.assertRaises(KeyError) as ke:
+ ns['foo'] = True
+
+ e = ke.exception.args
+ self.assertEqual(e[0], 'global_ns')
+ self.assertEqual(e[1], 'set_unknown')
+ self.assertEqual(e[2], 'foo')
+ self.assertTrue(e[3])
+
+ # Unknown uppercase keys should be rejected.
+ with self.assertRaises(KeyError) as ke:
+ ns['FOO'] = True
+
+ e = ke.exception.args
+ self.assertEqual(e[0], 'global_ns')
+ self.assertEqual(e[1], 'set_unknown')
+ self.assertEqual(e[2], 'FOO')
+ self.assertTrue(e[3])
+
+ def test_allowed_set(self):
+ self.assertIn('HOGE', VARIABLES)
+
+ ns = Context(allowed_variables=VARIABLES)
+
+ ns['HOGE'] = 'foo'
+ self.assertEqual(ns['HOGE'], 'foo')
+
+ def test_value_checking(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a non-allowed type should not work.
+ with self.assertRaises(ValueError) as ve:
+ ns['HOGE'] = True
+
+ e = ve.exception.args
+ self.assertEqual(e[0], 'global_ns')
+ self.assertEqual(e[1], 'set_type')
+ self.assertEqual(e[2], 'HOGE')
+ self.assertEqual(e[3], True)
+ self.assertEqual(e[4], unicode)
+
+ def test_key_checking(self):
+ # Checking for existence of a key should not populate the key if it
+ # doesn't exist.
+ g = Context(allowed_variables=VARIABLES)
+
+ self.assertFalse('HOGE' in g)
+ self.assertFalse('HOGE' in g)
+
+ def test_coercion(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a type different from the allowed input type should not
+ # work.
+ with self.assertRaises(ValueError) as ve:
+ ns['FUGA'] = False
+
+ e = ve.exception.args
+ self.assertEqual(e[0], 'global_ns')
+ self.assertEqual(e[1], 'set_type')
+ self.assertEqual(e[2], 'FUGA')
+ self.assertEqual(e[3], False)
+ self.assertEqual(e[4], unicode)
+
+ ns['FUGA'] = 'fuga'
+ self.assertIsInstance(ns['FUGA'], Fuga)
+ self.assertEqual(ns['FUGA'].value, 'fuga')
+
+ ns['FUGA'] = Fuga('hoge')
+ self.assertIsInstance(ns['FUGA'], Fuga)
+ self.assertEqual(ns['FUGA'].value, 'hoge')
+
+ def test_context_derived_coercion(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a type different from the allowed input type should not
+ # work.
+ with self.assertRaises(ValueError) as ve:
+ ns['PIYO'] = False
+
+ e = ve.exception.args
+ self.assertEqual(e[0], 'global_ns')
+ self.assertEqual(e[1], 'set_type')
+ self.assertEqual(e[2], 'PIYO')
+ self.assertEqual(e[3], False)
+ self.assertEqual(e[4], unicode)
+
+ ns['PIYO'] = 'piyo'
+ self.assertIsInstance(ns['PIYO'], Piyo)
+ self.assertEqual(ns['PIYO'].value, 'piyo')
+ self.assertEqual(ns['PIYO'].context, ns)
+
+ ns['PIYO'] = Piyo(ns, 'fuga')
+ self.assertIsInstance(ns['PIYO'], Piyo)
+ self.assertEqual(ns['PIYO'].value, 'fuga')
+ self.assertEqual(ns['PIYO'].context, ns)
+
+ def test_context_derived_typed_list(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a type that's rejected by coercion should not work.
+ with self.assertRaises(ValueError):
+ ns['HOGERA'] = [False]
+
+ ns['HOGERA'] += ['a', 'b', 'c']
+
+ self.assertIsInstance(ns['HOGERA'], VARIABLES['HOGERA'][0])
+ for n in range(0, 3):
+ self.assertIsInstance(ns['HOGERA'][n], Piyo)
+ self.assertEqual(ns['HOGERA'][n].value, ['a', 'b', 'c'][n])
+ self.assertEqual(ns['HOGERA'][n].context, ns)
+
+ with self.assertRaises(UnsortedError):
+ ns['HOGERA'] += ['f', 'e', 'd']
+
+ def test_context_derived_typed_list_with_items(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a type that's rejected by coercion should not work.
+ with self.assertRaises(ValueError):
+ ns['HOGEHOGE'] = [False]
+
+ values = ['a', 'b', 'c']
+ ns['HOGEHOGE'] += values
+
+ self.assertIsInstance(ns['HOGEHOGE'], VARIABLES['HOGEHOGE'][0])
+ for v in values:
+ ns['HOGEHOGE'][v].foo = True
+
+ for v, item in zip(values, ns['HOGEHOGE']):
+ self.assertIsInstance(item, Piyo)
+ self.assertEqual(v, item)
+ self.assertEqual(ns['HOGEHOGE'][v].foo, True)
+ self.assertEqual(ns['HOGEHOGE'][item].foo, True)
+
+ with self.assertRaises(UnsortedError):
+ ns['HOGEHOGE'] += ['f', 'e', 'd']
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/frontend/test_reader.py b/python/mozbuild/mozbuild/test/frontend/test_reader.py
new file mode 100644
index 000000000..7c2aed9df
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_reader.py
@@ -0,0 +1,485 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+import sys
+import unittest
+
+from mozunit import main
+
+from mozbuild.frontend.context import BugzillaComponent
+from mozbuild.frontend.reader import (
+ BuildReaderError,
+ BuildReader,
+)
+
+from mozbuild.test.common import MockConfig
+
+import mozpack.path as mozpath
+
+
+if sys.version_info.major == 2:
+ text_type = 'unicode'
+else:
+ text_type = 'str'
+
+data_path = mozpath.abspath(mozpath.dirname(__file__))
+data_path = mozpath.join(data_path, 'data')
+
+
+class TestBuildReader(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop('MOZ_OBJDIR', None)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def config(self, name, **kwargs):
+ path = mozpath.join(data_path, name)
+
+ return MockConfig(path, **kwargs)
+
+ def reader(self, name, enable_tests=False, error_is_fatal=True, **kwargs):
+ extra = {}
+ if enable_tests:
+ extra['ENABLE_TESTS'] = '1'
+ config = self.config(name,
+ extra_substs=extra,
+ error_is_fatal=error_is_fatal)
+
+ return BuildReader(config, **kwargs)
+
+ def file_path(self, name, *args):
+ return mozpath.join(data_path, name, *args)
+
+ def test_dirs_traversal_simple(self):
+ reader = self.reader('traversal-simple')
+
+ contexts = list(reader.read_topsrcdir())
+
+ self.assertEqual(len(contexts), 4)
+
+ def test_dirs_traversal_no_descend(self):
+ reader = self.reader('traversal-simple')
+
+ path = mozpath.join(reader.config.topsrcdir, 'moz.build')
+ self.assertTrue(os.path.exists(path))
+
+ contexts = list(reader.read_mozbuild(path, reader.config,
+ descend=False))
+
+ self.assertEqual(len(contexts), 1)
+
+ def test_dirs_traversal_all_variables(self):
+ reader = self.reader('traversal-all-vars')
+
+ contexts = list(reader.read_topsrcdir())
+ self.assertEqual(len(contexts), 2)
+
+ reader = self.reader('traversal-all-vars', enable_tests=True)
+
+ contexts = list(reader.read_topsrcdir())
+ self.assertEqual(len(contexts), 3)
+
+ def test_relative_dirs(self):
+ # Ensure relative directories are traversed.
+ reader = self.reader('traversal-relative-dirs')
+
+ contexts = list(reader.read_topsrcdir())
+ self.assertEqual(len(contexts), 3)
+
+ def test_repeated_dirs_ignored(self):
+ # Ensure repeated directories are ignored.
+ reader = self.reader('traversal-repeated-dirs')
+
+ contexts = list(reader.read_topsrcdir())
+ self.assertEqual(len(contexts), 3)
+
+ def test_outside_topsrcdir(self):
+ # References to directories outside the topsrcdir should fail.
+ reader = self.reader('traversal-outside-topsrcdir')
+
+ with self.assertRaises(Exception):
+ list(reader.read_topsrcdir())
+
+ def test_error_basic(self):
+ reader = self.reader('reader-error-basic')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertEqual(e.actual_file, self.file_path('reader-error-basic',
+ 'moz.build'))
+
+ self.assertIn('The error occurred while processing the', str(e))
+
+ def test_error_included_from(self):
+ reader = self.reader('reader-error-included-from')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertEqual(e.actual_file,
+ self.file_path('reader-error-included-from', 'child.build'))
+ self.assertEqual(e.main_file,
+ self.file_path('reader-error-included-from', 'moz.build'))
+
+ self.assertIn('This file was included as part of processing', str(e))
+
+ def test_error_syntax_error(self):
+ reader = self.reader('reader-error-syntax')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('Python syntax error on line 5', str(e))
+ self.assertIn(' foo =', str(e))
+ self.assertIn(' ^', str(e))
+
+ def test_error_read_unknown_global(self):
+ reader = self.reader('reader-error-read-unknown-global')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('The error was triggered on line 5', str(e))
+ self.assertIn('The underlying problem is an attempt to read', str(e))
+ self.assertIn(' FOO', str(e))
+
+ def test_error_write_unknown_global(self):
+ reader = self.reader('reader-error-write-unknown-global')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('The error was triggered on line 7', str(e))
+ self.assertIn('The underlying problem is an attempt to write', str(e))
+ self.assertIn(' FOO', str(e))
+
+ def test_error_write_bad_value(self):
+ reader = self.reader('reader-error-write-bad-value')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('The error was triggered on line 5', str(e))
+ self.assertIn('is an attempt to write an illegal value to a special',
+ str(e))
+
+ self.assertIn('variable whose value was rejected is:\n\n DIRS',
+ str(e))
+
+ self.assertIn('written to it was of the following type:\n\n %s' % text_type,
+ str(e))
+
+ self.assertIn('expects the following type(s):\n\n list', str(e))
+
+ def test_error_illegal_path(self):
+ reader = self.reader('reader-error-outside-topsrcdir')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('The underlying problem is an illegal file access',
+ str(e))
+
+ def test_error_missing_include_path(self):
+ reader = self.reader('reader-error-missing-include')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('we referenced a path that does not exist', str(e))
+
+ def test_error_script_error(self):
+ reader = self.reader('reader-error-script-error')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('The error appears to be the fault of the script',
+ str(e))
+ self.assertIn(' ["TypeError: unsupported operand', str(e))
+
+ def test_error_bad_dir(self):
+ reader = self.reader('reader-error-bad-dir')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('we referenced a path that does not exist', str(e))
+
+ def test_error_repeated_dir(self):
+ reader = self.reader('reader-error-repeated-dir')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('Directory (foo) registered multiple times', str(e))
+
+ def test_error_error_func(self):
+ reader = self.reader('reader-error-error-func')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('A moz.build file called the error() function.', str(e))
+ self.assertIn(' Some error.', str(e))
+
+ def test_error_error_func_ok(self):
+ reader = self.reader('reader-error-error-func', error_is_fatal=False)
+
+ contexts = list(reader.read_topsrcdir())
+
+ def test_error_empty_list(self):
+ reader = self.reader('reader-error-empty-list')
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn('Variable DIRS assigned an empty value.', str(e))
+
+ def test_inheriting_variables(self):
+ reader = self.reader('inheriting-variables')
+
+ contexts = list(reader.read_topsrcdir())
+
+ self.assertEqual(len(contexts), 4)
+ self.assertEqual([context.relsrcdir for context in contexts],
+ ['', 'foo', 'foo/baz', 'bar'])
+ self.assertEqual([context['XPIDL_MODULE'] for context in contexts],
+ ['foobar', 'foobar', 'baz', 'foobar'])
+
+ def test_find_relevant_mozbuilds(self):
+ reader = self.reader('reader-relevant-mozbuild')
+
+ # Absolute paths outside topsrcdir are rejected.
+ with self.assertRaises(Exception):
+ reader._find_relevant_mozbuilds(['/foo'])
+
+ # File in root directory.
+ paths = reader._find_relevant_mozbuilds(['file'])
+ self.assertEqual(paths, {'file': ['moz.build']})
+
+ # File in child directory.
+ paths = reader._find_relevant_mozbuilds(['d1/file1'])
+ self.assertEqual(paths, {'d1/file1': ['moz.build', 'd1/moz.build']})
+
+ # Multiple files in same directory.
+ paths = reader._find_relevant_mozbuilds(['d1/file1', 'd1/file2'])
+ self.assertEqual(paths, {
+ 'd1/file1': ['moz.build', 'd1/moz.build'],
+ 'd1/file2': ['moz.build', 'd1/moz.build']})
+
+ # Missing moz.build from missing intermediate directory.
+ paths = reader._find_relevant_mozbuilds(
+ ['d1/no-intermediate-moz-build/child/file'])
+ self.assertEqual(paths, {
+ 'd1/no-intermediate-moz-build/child/file': [
+ 'moz.build', 'd1/moz.build', 'd1/no-intermediate-moz-build/child/moz.build']})
+
+ # Lots of empty directories.
+ paths = reader._find_relevant_mozbuilds([
+ 'd1/parent-is-far/dir1/dir2/dir3/file'])
+ self.assertEqual(paths, {
+ 'd1/parent-is-far/dir1/dir2/dir3/file':
+ ['moz.build', 'd1/moz.build', 'd1/parent-is-far/moz.build']})
+
+ # Lots of levels.
+ paths = reader._find_relevant_mozbuilds([
+ 'd1/every-level/a/file', 'd1/every-level/b/file'])
+ self.assertEqual(paths, {
+ 'd1/every-level/a/file': [
+ 'moz.build',
+ 'd1/moz.build',
+ 'd1/every-level/moz.build',
+ 'd1/every-level/a/moz.build',
+ ],
+ 'd1/every-level/b/file': [
+ 'moz.build',
+ 'd1/moz.build',
+ 'd1/every-level/moz.build',
+ 'd1/every-level/b/moz.build',
+ ],
+ })
+
+ # Different root directories.
+ paths = reader._find_relevant_mozbuilds(['d1/file', 'd2/file', 'file'])
+ self.assertEqual(paths, {
+ 'file': ['moz.build'],
+ 'd1/file': ['moz.build', 'd1/moz.build'],
+ 'd2/file': ['moz.build', 'd2/moz.build'],
+ })
+
+ def test_read_relevant_mozbuilds(self):
+ reader = self.reader('reader-relevant-mozbuild')
+
+ paths, contexts = reader.read_relevant_mozbuilds(['d1/every-level/a/file',
+ 'd1/every-level/b/file', 'd2/file'])
+ self.assertEqual(len(paths), 3)
+ self.assertEqual(len(contexts), 6)
+
+ self.assertEqual([ctx.relsrcdir for ctx in paths['d1/every-level/a/file']],
+ ['', 'd1', 'd1/every-level', 'd1/every-level/a'])
+ self.assertEqual([ctx.relsrcdir for ctx in paths['d1/every-level/b/file']],
+ ['', 'd1', 'd1/every-level', 'd1/every-level/b'])
+ self.assertEqual([ctx.relsrcdir for ctx in paths['d2/file']],
+ ['', 'd2'])
+
+ def test_files_bad_bug_component(self):
+ reader = self.reader('files-info')
+
+ with self.assertRaises(BuildReaderError):
+ reader.files_info(['bug_component/bad-assignment/moz.build'])
+
+ def test_files_bug_component_static(self):
+ reader = self.reader('files-info')
+
+ v = reader.files_info(['bug_component/static/foo',
+ 'bug_component/static/bar',
+ 'bug_component/static/foo/baz'])
+ self.assertEqual(len(v), 3)
+ self.assertEqual(v['bug_component/static/foo']['BUG_COMPONENT'],
+ BugzillaComponent('FooProduct', 'FooComponent'))
+ self.assertEqual(v['bug_component/static/bar']['BUG_COMPONENT'],
+ BugzillaComponent('BarProduct', 'BarComponent'))
+ self.assertEqual(v['bug_component/static/foo/baz']['BUG_COMPONENT'],
+ BugzillaComponent('default_product', 'default_component'))
+
+ def test_files_bug_component_simple(self):
+ reader = self.reader('files-info')
+
+ v = reader.files_info(['bug_component/simple/moz.build'])
+ self.assertEqual(len(v), 1)
+ flags = v['bug_component/simple/moz.build']
+ self.assertEqual(flags['BUG_COMPONENT'].product, 'Core')
+ self.assertEqual(flags['BUG_COMPONENT'].component, 'Build Config')
+
+ def test_files_bug_component_different_matchers(self):
+ reader = self.reader('files-info')
+
+ v = reader.files_info([
+ 'bug_component/different-matchers/foo.jsm',
+ 'bug_component/different-matchers/bar.cpp',
+ 'bug_component/different-matchers/baz.misc'])
+ self.assertEqual(len(v), 3)
+
+ js_flags = v['bug_component/different-matchers/foo.jsm']
+ cpp_flags = v['bug_component/different-matchers/bar.cpp']
+ misc_flags = v['bug_component/different-matchers/baz.misc']
+
+ self.assertEqual(js_flags['BUG_COMPONENT'], BugzillaComponent('Firefox', 'JS'))
+ self.assertEqual(cpp_flags['BUG_COMPONENT'], BugzillaComponent('Firefox', 'C++'))
+ self.assertEqual(misc_flags['BUG_COMPONENT'], BugzillaComponent('default_product', 'default_component'))
+
+ def test_files_bug_component_final(self):
+ reader = self.reader('files-info')
+
+ v = reader.files_info([
+ 'bug_component/final/foo',
+ 'bug_component/final/Makefile.in',
+ 'bug_component/final/subcomponent/Makefile.in',
+ 'bug_component/final/subcomponent/bar'])
+
+ self.assertEqual(v['bug_component/final/foo']['BUG_COMPONENT'],
+ BugzillaComponent('default_product', 'default_component'))
+ self.assertEqual(v['bug_component/final/Makefile.in']['BUG_COMPONENT'],
+ BugzillaComponent('Core', 'Build Config'))
+ self.assertEqual(v['bug_component/final/subcomponent/Makefile.in']['BUG_COMPONENT'],
+ BugzillaComponent('Core', 'Build Config'))
+ self.assertEqual(v['bug_component/final/subcomponent/bar']['BUG_COMPONENT'],
+ BugzillaComponent('Another', 'Component'))
+
+ def test_file_test_deps(self):
+ reader = self.reader('files-test-metadata')
+
+ expected = {
+ 'simple/src/module.jsm': set(['simple/tests/test_general.html',
+ 'simple/browser/**.js']),
+ 'simple/base.cpp': set(['simple/tests/*',
+ 'default/tests/xpcshell/test_default_mod.js']),
+ }
+
+ v = reader.files_info([
+ 'simple/src/module.jsm',
+ 'simple/base.cpp',
+ ])
+
+ for path, pattern_set in expected.items():
+ self.assertEqual(v[path].test_files,
+ expected[path])
+
+ def test_file_test_deps_default(self):
+ reader = self.reader('files-test-metadata')
+ v = reader.files_info([
+ 'default/module.js',
+ ])
+
+ expected = {
+ 'default/module.js': set(['default/tests/xpcshell/**',
+ 'default/tests/reftests/**']),
+ }
+
+ for path, pattern_set in expected.items():
+ self.assertEqual(v[path].test_files,
+ expected[path])
+
+ def test_file_test_deps_tags(self):
+ reader = self.reader('files-test-metadata')
+ v = reader.files_info([
+ 'tagged/src/bar.jsm',
+ 'tagged/src/submodule/foo.js',
+ ])
+
+ expected_patterns = {
+ 'tagged/src/submodule/foo.js': set([]),
+ 'tagged/src/bar.jsm': set(['tagged/**.js']),
+ }
+
+ for path, pattern_set in expected_patterns.items():
+ self.assertEqual(v[path].test_files,
+ expected_patterns[path])
+
+ expected_tags = {
+ 'tagged/src/submodule/foo.js': set(['submodule']),
+ 'tagged/src/bar.jsm': set([]),
+ }
+ for path, pattern_set in expected_tags.items():
+ self.assertEqual(v[path].test_tags,
+ expected_tags[path])
+
+ expected_flavors = {
+ 'tagged/src/bar.jsm': set(['browser-chrome']),
+ 'tagged/src/submodule/foo.js': set([]),
+ }
+ for path, pattern_set in expected_flavors.items():
+ self.assertEqual(v[path].test_flavors,
+ expected_flavors[path])
+
+ def test_invalid_flavor(self):
+ reader = self.reader('invalid-files-flavor')
+
+ with self.assertRaises(BuildReaderError):
+ reader.files_info(['foo.js'])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/frontend/test_sandbox.py b/python/mozbuild/mozbuild/test/frontend/test_sandbox.py
new file mode 100644
index 000000000..d24c5d9ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_sandbox.py
@@ -0,0 +1,534 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+import shutil
+import unittest
+
+from mozunit import main
+
+from mozbuild.frontend.reader import (
+ MozbuildSandbox,
+ SandboxCalledError,
+)
+
+from mozbuild.frontend.sandbox import (
+ Sandbox,
+ SandboxExecutionError,
+ SandboxLoadError,
+)
+
+from mozbuild.frontend.context import (
+ Context,
+ FUNCTIONS,
+ SourcePath,
+ SPECIAL_VARIABLES,
+ VARIABLES,
+)
+
+from mozbuild.test.common import MockConfig
+from types import StringTypes
+
+import mozpack.path as mozpath
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data')
+
+
+class TestSandbox(unittest.TestCase):
+ def sandbox(self):
+ return Sandbox(Context({
+ 'DIRS': (list, list, None),
+ }))
+
+ def test_exec_source_success(self):
+ sandbox = self.sandbox()
+ context = sandbox._context
+
+ sandbox.exec_source('foo = True', mozpath.abspath('foo.py'))
+
+ self.assertNotIn('foo', context)
+ self.assertEqual(context.main_path, mozpath.abspath('foo.py'))
+ self.assertEqual(context.all_paths, set([mozpath.abspath('foo.py')]))
+
+ def test_exec_compile_error(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('2f23;k;asfj', mozpath.abspath('foo.py'))
+
+ self.assertEqual(se.exception.file_stack, [mozpath.abspath('foo.py')])
+ self.assertIsInstance(se.exception.exc_value, SyntaxError)
+ self.assertEqual(sandbox._context.main_path, mozpath.abspath('foo.py'))
+
+ def test_exec_import_denied(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('import sys')
+
+ self.assertIsInstance(se.exception, SandboxExecutionError)
+ self.assertEqual(se.exception.exc_type, ImportError)
+
+ def test_exec_source_multiple(self):
+ sandbox = self.sandbox()
+
+ sandbox.exec_source('DIRS = ["foo"]')
+ sandbox.exec_source('DIRS += ["bar"]')
+
+ self.assertEqual(sandbox['DIRS'], ['foo', 'bar'])
+
+ def test_exec_source_illegal_key_set(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('ILLEGAL = True')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], 'global_ns')
+ self.assertEqual(e.args[1], 'set_unknown')
+
+ def test_exec_source_reassign(self):
+ sandbox = self.sandbox()
+
+ sandbox.exec_source('DIRS = ["foo"]')
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('DIRS = ["bar"]')
+
+ self.assertEqual(sandbox['DIRS'], ['foo'])
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], 'global_ns')
+ self.assertEqual(e.args[1], 'reassign')
+ self.assertEqual(e.args[2], 'DIRS')
+
+ def test_exec_source_reassign_builtin(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('True = 1')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], 'Cannot reassign builtins')
+
+
+class TestedSandbox(MozbuildSandbox):
+ '''Version of MozbuildSandbox with a little more convenience for testing.
+
+ It automatically normalizes paths given to exec_file and exec_source. This
+ helps simplify the test code.
+ '''
+ def normalize_path(self, path):
+ return mozpath.normpath(
+ mozpath.join(self._context.config.topsrcdir, path))
+
+ def source_path(self, path):
+ return SourcePath(self._context, path)
+
+ def exec_file(self, path):
+ super(TestedSandbox, self).exec_file(self.normalize_path(path))
+
+ def exec_source(self, source, path=''):
+ super(TestedSandbox, self).exec_source(source,
+ self.normalize_path(path) if path else '')
+
+
+class TestMozbuildSandbox(unittest.TestCase):
+ def sandbox(self, data_path=None, metadata={}):
+ config = None
+
+ if data_path is not None:
+ config = MockConfig(mozpath.join(test_data_path, data_path))
+ else:
+ config = MockConfig()
+
+ return TestedSandbox(Context(VARIABLES, config), metadata)
+
+ def test_default_state(self):
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path('moz.build'))
+ config = sandbox._context.config
+
+ self.assertEqual(sandbox['TOPSRCDIR'], config.topsrcdir)
+ self.assertEqual(sandbox['TOPOBJDIR'], config.topobjdir)
+ self.assertEqual(sandbox['RELATIVEDIR'], '')
+ self.assertEqual(sandbox['SRCDIR'], config.topsrcdir)
+ self.assertEqual(sandbox['OBJDIR'], config.topobjdir)
+
+ def test_symbol_presence(self):
+ # Ensure no discrepancies between the master symbol table and what's in
+ # the sandbox.
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path('moz.build'))
+
+ all_symbols = set()
+ all_symbols |= set(FUNCTIONS.keys())
+ all_symbols |= set(SPECIAL_VARIABLES.keys())
+
+ for symbol in all_symbols:
+ self.assertIsNotNone(sandbox[symbol])
+
+ def test_path_calculation(self):
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path('foo/bar/moz.build'))
+ config = sandbox._context.config
+
+ self.assertEqual(sandbox['TOPSRCDIR'], config.topsrcdir)
+ self.assertEqual(sandbox['TOPOBJDIR'], config.topobjdir)
+ self.assertEqual(sandbox['RELATIVEDIR'], 'foo/bar')
+ self.assertEqual(sandbox['SRCDIR'],
+ mozpath.join(config.topsrcdir, 'foo/bar'))
+ self.assertEqual(sandbox['OBJDIR'],
+ mozpath.join(config.topobjdir, 'foo/bar'))
+
+ def test_config_access(self):
+ sandbox = self.sandbox()
+ config = sandbox._context.config
+
+ self.assertEqual(sandbox['CONFIG']['MOZ_TRUE'], '1')
+ self.assertEqual(sandbox['CONFIG']['MOZ_FOO'], config.substs['MOZ_FOO'])
+
+ # Access to an undefined substitution should return None.
+ self.assertNotIn('MISSING', sandbox['CONFIG'])
+ self.assertIsNone(sandbox['CONFIG']['MISSING'])
+
+ # Should shouldn't be allowed to assign to the config.
+ with self.assertRaises(Exception):
+ sandbox['CONFIG']['FOO'] = ''
+
+ def test_special_variables(self):
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path('moz.build'))
+
+ for k in SPECIAL_VARIABLES:
+ with self.assertRaises(KeyError):
+ sandbox[k] = 0
+
+ def test_exec_source_reassign_exported(self):
+ template_sandbox = self.sandbox(data_path='templates')
+
+ # Templates need to be defined in actual files because of
+ # inspect.getsourcelines.
+ template_sandbox.exec_file('templates.mozbuild')
+
+ config = MockConfig()
+
+ exports = {'DIST_SUBDIR': 'browser'}
+
+ sandbox = TestedSandbox(Context(VARIABLES, config), metadata={
+ 'exports': exports,
+ 'templates': template_sandbox.templates,
+ })
+
+ self.assertEqual(sandbox['DIST_SUBDIR'], 'browser')
+
+ # Templates should not interfere
+ sandbox.exec_source('Template([])', 'foo.mozbuild')
+
+ sandbox.exec_source('DIST_SUBDIR = "foo"')
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('DIST_SUBDIR = "bar"')
+
+ self.assertEqual(sandbox['DIST_SUBDIR'], 'foo')
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], 'global_ns')
+ self.assertEqual(e.args[1], 'reassign')
+ self.assertEqual(e.args[2], 'DIST_SUBDIR')
+
+ def test_include_basic(self):
+ sandbox = self.sandbox(data_path='include-basic')
+
+ sandbox.exec_file('moz.build')
+
+ self.assertEqual(sandbox['DIRS'], [
+ sandbox.source_path('foo'),
+ sandbox.source_path('bar'),
+ ])
+ self.assertEqual(sandbox._context.main_path,
+ sandbox.normalize_path('moz.build'))
+ self.assertEqual(len(sandbox._context.all_paths), 2)
+
+ def test_include_outside_topsrcdir(self):
+ sandbox = self.sandbox(data_path='include-outside-topsrcdir')
+
+ with self.assertRaises(SandboxLoadError) as se:
+ sandbox.exec_file('relative.build')
+
+ self.assertEqual(se.exception.illegal_path,
+ sandbox.normalize_path('../moz.build'))
+
+ def test_include_error_stack(self):
+ # Ensure the path stack is reported properly in exceptions.
+ sandbox = self.sandbox(data_path='include-file-stack')
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_file('moz.build')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ args = e.exc_value.args
+ self.assertEqual(args[0], 'global_ns')
+ self.assertEqual(args[1], 'set_unknown')
+ self.assertEqual(args[2], 'ILLEGAL')
+
+ expected_stack = [mozpath.join(sandbox._context.config.topsrcdir, p) for p in [
+ 'moz.build', 'included-1.build', 'included-2.build']]
+
+ self.assertEqual(e.file_stack, expected_stack)
+
+ def test_include_missing(self):
+ sandbox = self.sandbox(data_path='include-missing')
+
+ with self.assertRaises(SandboxLoadError) as sle:
+ sandbox.exec_file('moz.build')
+
+ self.assertIsNotNone(sle.exception.read_error)
+
+ def test_include_relative_from_child_dir(self):
+ # A relative path from a subdirectory should be relative from that
+ # child directory.
+ sandbox = self.sandbox(data_path='include-relative-from-child')
+ sandbox.exec_file('child/child.build')
+ self.assertEqual(sandbox['DIRS'], [sandbox.source_path('../foo')])
+
+ sandbox = self.sandbox(data_path='include-relative-from-child')
+ sandbox.exec_file('child/child2.build')
+ self.assertEqual(sandbox['DIRS'], [sandbox.source_path('../foo')])
+
+ def test_include_topsrcdir_relative(self):
+ # An absolute path for include() is relative to topsrcdir.
+
+ sandbox = self.sandbox(data_path='include-topsrcdir-relative')
+ sandbox.exec_file('moz.build')
+
+ self.assertEqual(sandbox['DIRS'], [sandbox.source_path('foo')])
+
+ def test_error(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxCalledError) as sce:
+ sandbox.exec_source('error("This is an error.")')
+
+ e = sce.exception
+ self.assertEqual(e.message, 'This is an error.')
+
+ def test_substitute_config_files(self):
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path('moz.build'))
+
+ sandbox.exec_source('CONFIGURE_SUBST_FILES += ["bar", "foo"]')
+ self.assertEqual(sandbox['CONFIGURE_SUBST_FILES'], ['bar', 'foo'])
+ for item in sandbox['CONFIGURE_SUBST_FILES']:
+ self.assertIsInstance(item, SourcePath)
+
+ def test_invalid_utf8_substs(self):
+ """Ensure invalid UTF-8 in substs is converted with an error."""
+
+ # This is really mbcs. It's a bunch of invalid UTF-8.
+ config = MockConfig(extra_substs={'BAD_UTF8': b'\x83\x81\x83\x82\x3A'})
+
+ sandbox = MozbuildSandbox(Context(VARIABLES, config))
+
+ self.assertEqual(sandbox['CONFIG']['BAD_UTF8'],
+ u'\ufffd\ufffd\ufffd\ufffd:')
+
+ def test_invalid_exports_set_base(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('EXPORTS = "foo.h"')
+
+ self.assertEqual(se.exception.exc_type, ValueError)
+
+ def test_templates(self):
+ sandbox = self.sandbox(data_path='templates')
+
+ # Templates need to be defined in actual files because of
+ # inspect.getsourcelines.
+ sandbox.exec_file('templates.mozbuild')
+
+ sandbox2 = self.sandbox(metadata={'templates': sandbox.templates})
+ source = '''
+Template([
+ 'foo.cpp',
+])
+'''
+ sandbox2.exec_source(source, 'foo.mozbuild')
+
+ self.assertEqual(sandbox2._context, {
+ 'SOURCES': ['foo.cpp'],
+ 'DIRS': [],
+ })
+
+ sandbox2 = self.sandbox(metadata={'templates': sandbox.templates})
+ source = '''
+SOURCES += ['qux.cpp']
+Template([
+ 'bar.cpp',
+ 'foo.cpp',
+],[
+ 'foo',
+])
+SOURCES += ['hoge.cpp']
+'''
+ sandbox2.exec_source(source, 'foo.mozbuild')
+
+ self.assertEqual(sandbox2._context, {
+ 'SOURCES': ['qux.cpp', 'bar.cpp', 'foo.cpp', 'hoge.cpp'],
+ 'DIRS': [sandbox2.source_path('foo')],
+ })
+
+ sandbox2 = self.sandbox(metadata={'templates': sandbox.templates})
+ source = '''
+TemplateError([
+ 'foo.cpp',
+])
+'''
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox2.exec_source(source, 'foo.mozbuild')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], 'global_ns')
+ self.assertEqual(e.args[1], 'set_unknown')
+
+ # TemplateGlobalVariable tries to access 'illegal' but that is expected
+ # to throw.
+ sandbox2 = self.sandbox(metadata={'templates': sandbox.templates})
+ source = '''
+illegal = True
+TemplateGlobalVariable()
+'''
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox2.exec_source(source, 'foo.mozbuild')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, NameError)
+
+ # TemplateGlobalUPPERVariable sets SOURCES with DIRS, but the context
+ # used when running the template is not expected to access variables
+ # from the global context.
+ sandbox2 = self.sandbox(metadata={'templates': sandbox.templates})
+ source = '''
+DIRS += ['foo']
+TemplateGlobalUPPERVariable()
+'''
+ sandbox2.exec_source(source, 'foo.mozbuild')
+ self.assertEqual(sandbox2._context, {
+ 'SOURCES': [],
+ 'DIRS': [sandbox2.source_path('foo')],
+ })
+
+ # However, the result of the template is mixed with the global
+ # context.
+ sandbox2 = self.sandbox(metadata={'templates': sandbox.templates})
+ source = '''
+SOURCES += ['qux.cpp']
+TemplateInherit([
+ 'bar.cpp',
+ 'foo.cpp',
+])
+SOURCES += ['hoge.cpp']
+'''
+ sandbox2.exec_source(source, 'foo.mozbuild')
+
+ self.assertEqual(sandbox2._context, {
+ 'SOURCES': ['qux.cpp', 'bar.cpp', 'foo.cpp', 'hoge.cpp'],
+ 'USE_LIBS': ['foo'],
+ 'DIRS': [],
+ })
+
+ # Template names must be CamelCase. Here, we can define the template
+ # inline because the error happens before inspect.getsourcelines.
+ sandbox2 = self.sandbox(metadata={'templates': sandbox.templates})
+ source = '''
+@template
+def foo():
+ pass
+'''
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox2.exec_source(source, 'foo.mozbuild')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, NameError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.message,
+ 'Template function names must be CamelCase.')
+
+ # Template names must not already be registered.
+ sandbox2 = self.sandbox(metadata={'templates': sandbox.templates})
+ source = '''
+@template
+def Template():
+ pass
+'''
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox2.exec_source(source, 'foo.mozbuild')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.message,
+ 'A template named "Template" was already declared in %s.' %
+ sandbox.normalize_path('templates.mozbuild'))
+
+ def test_function_args(self):
+ class Foo(int): pass
+
+ def foo(a, b):
+ return type(a), type(b)
+
+ FUNCTIONS.update({
+ 'foo': (lambda self: foo, (Foo, int), ''),
+ })
+
+ try:
+ sandbox = self.sandbox()
+ source = 'foo("a", "b")'
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source(source, 'foo.mozbuild')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, ValueError)
+
+ sandbox = self.sandbox()
+ source = 'foo(1, "b")'
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source(source, 'foo.mozbuild')
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, ValueError)
+
+ sandbox = self.sandbox()
+ source = 'a = foo(1, 2)'
+ sandbox.exec_source(source, 'foo.mozbuild')
+
+ self.assertEquals(sandbox['a'], (Foo, int))
+ finally:
+ del FUNCTIONS['foo']
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_android_version_code.py b/python/mozbuild/mozbuild/test/test_android_version_code.py
new file mode 100644
index 000000000..059f4588c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_android_version_code.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozunit import main
+import unittest
+
+from mozbuild.android_version_code import (
+ android_version_code_v0,
+ android_version_code_v1,
+)
+
+class TestAndroidVersionCode(unittest.TestCase):
+ def test_android_version_code_v0(self):
+ # From https://treeherder.mozilla.org/#/jobs?repo=mozilla-central&revision=e25de9972a77.
+ buildid = '20150708104620'
+ arm_api9 = 2015070819
+ arm_api11 = 2015070821
+ x86_api9 = 2015070822
+ self.assertEqual(android_version_code_v0(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None), arm_api9)
+ self.assertEqual(android_version_code_v0(buildid, cpu_arch='armeabi-v7a', min_sdk=11, max_sdk=None), arm_api11)
+ self.assertEqual(android_version_code_v0(buildid, cpu_arch='x86', min_sdk=9, max_sdk=None), x86_api9)
+
+ def test_android_version_code_v1(self):
+ buildid = '20150825141628'
+ arm_api15 = 0b01111000001000000001001001110001
+ x86_api9 = 0b01111000001000000001001001110100
+ self.assertEqual(android_version_code_v1(buildid, cpu_arch='armeabi-v7a', min_sdk=15, max_sdk=None), arm_api15)
+ self.assertEqual(android_version_code_v1(buildid, cpu_arch='x86', min_sdk=9, max_sdk=None), x86_api9)
+
+ def test_android_version_code_v1_underflow(self):
+ '''Verify that it is an error to ask for v1 codes predating the cutoff.'''
+ buildid = '201508010000' # Earliest possible.
+ arm_api9 = 0b01111000001000000000000000000000
+ self.assertEqual(android_version_code_v1(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None), arm_api9)
+ with self.assertRaises(ValueError) as cm:
+ underflow = '201507310000' # Latest possible (valid) underflowing date.
+ android_version_code_v1(underflow, cpu_arch='armeabi', min_sdk=9, max_sdk=None)
+ self.assertTrue('underflow' in cm.exception.message)
+
+ def test_android_version_code_v1_running_low(self):
+ '''Verify there is an informative message if one asks for v1 codes that are close to overflow.'''
+ with self.assertRaises(ValueError) as cm:
+ overflow = '20290801000000'
+ android_version_code_v1(overflow, cpu_arch='armeabi', min_sdk=9, max_sdk=None)
+ self.assertTrue('Running out of low order bits' in cm.exception.message)
+
+ def test_android_version_code_v1_overflow(self):
+ '''Verify that it is an error to ask for v1 codes that actually does overflow.'''
+ with self.assertRaises(ValueError) as cm:
+ overflow = '20310801000000'
+ android_version_code_v1(overflow, cpu_arch='armeabi', min_sdk=9, max_sdk=None)
+ self.assertTrue('overflow' in cm.exception.message)
+
+ def test_android_version_code_v0_relative_v1(self):
+ '''Verify that the first v1 code is greater than the equivalent v0 code.'''
+ buildid = '20150801000000'
+ self.assertGreater(android_version_code_v1(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None),
+ android_version_code_v0(buildid, cpu_arch='armeabi', min_sdk=9, max_sdk=None))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_base.py b/python/mozbuild/mozbuild/test/test_base.py
new file mode 100644
index 000000000..87f0db85b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_base.py
@@ -0,0 +1,410 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import unittest
+
+from cStringIO import StringIO
+from mozfile.mozfile import NamedTemporaryFile
+
+from mozunit import main
+
+from mach.logging import LoggingManager
+
+from mozbuild.base import (
+ BadEnvironmentException,
+ MachCommandBase,
+ MozbuildObject,
+ ObjdirMismatchException,
+ PathArgument,
+)
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from buildconfig import topsrcdir, topobjdir
+import mozpack.path as mozpath
+
+
+curdir = os.path.dirname(__file__)
+log_manager = LoggingManager()
+
+
+class TestMozbuildObject(unittest.TestCase):
+ def setUp(self):
+ self._old_cwd = os.getcwd()
+ self._old_env = dict(os.environ)
+ os.environ.pop('MOZCONFIG', None)
+ os.environ.pop('MOZ_OBJDIR', None)
+ os.environ.pop('MOZ_CURRENT_PROJECT', None)
+
+ def tearDown(self):
+ os.chdir(self._old_cwd)
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def get_base(self, topobjdir=None):
+ return MozbuildObject(topsrcdir, None, log_manager, topobjdir=topobjdir)
+
+ def test_objdir_config_guess(self):
+ base = self.get_base()
+
+ with NamedTemporaryFile() as mozconfig:
+ os.environ[b'MOZCONFIG'] = mozconfig.name
+
+ self.assertIsNotNone(base.topobjdir)
+ self.assertEqual(len(base.topobjdir.split()), 1)
+ config_guess = base.resolve_config_guess()
+ self.assertTrue(base.topobjdir.endswith(config_guess))
+ self.assertTrue(os.path.isabs(base.topobjdir))
+ self.assertTrue(base.topobjdir.startswith(base.topsrcdir))
+
+ def test_objdir_trailing_slash(self):
+ """Trailing slashes in topobjdir should be removed."""
+ base = self.get_base()
+
+ with NamedTemporaryFile() as mozconfig:
+ mozconfig.write('mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/foo/')
+ mozconfig.flush()
+ os.environ[b'MOZCONFIG'] = mozconfig.name
+
+ self.assertEqual(base.topobjdir, mozpath.join(base.topsrcdir,
+ 'foo'))
+ self.assertTrue(base.topobjdir.endswith('foo'))
+
+ def test_objdir_config_status(self):
+ """Ensure @CONFIG_GUESS@ is handled when loading mozconfig."""
+ base = self.get_base()
+ cmd = base._normalize_command(
+ [os.path.join(topsrcdir, 'build', 'autoconf', 'config.guess')],
+ True)
+ guess = subprocess.check_output(cmd, cwd=topsrcdir).strip()
+
+ # There may be symlinks involved, so we use real paths to ensure
+ # path consistency.
+ d = os.path.realpath(tempfile.mkdtemp())
+ try:
+ mozconfig = os.path.join(d, 'mozconfig')
+ with open(mozconfig, 'wt') as fh:
+ fh.write('mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/foo/@CONFIG_GUESS@')
+ print('Wrote mozconfig %s' % mozconfig)
+
+ topobjdir = os.path.join(d, 'foo', guess)
+ os.makedirs(topobjdir)
+
+ # Create a fake topsrcdir.
+ guess_path = os.path.join(d, 'build', 'autoconf', 'config.guess')
+ os.makedirs(os.path.dirname(guess_path))
+ shutil.copy(os.path.join(topsrcdir, 'build', 'autoconf',
+ 'config.guess',), guess_path)
+
+ mozinfo = os.path.join(topobjdir, 'mozinfo.json')
+ with open(mozinfo, 'wt') as fh:
+ json.dump(dict(
+ topsrcdir=d,
+ mozconfig=mozconfig,
+ ), fh)
+
+ os.environ[b'MOZCONFIG'] = mozconfig.encode('utf-8')
+ os.chdir(topobjdir)
+
+ obj = MozbuildObject.from_environment(
+ detect_virtualenv_mozinfo=False)
+
+ self.assertEqual(obj.topobjdir, mozpath.normsep(topobjdir))
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_relative_objdir(self):
+ """Relative defined objdirs are loaded properly."""
+ d = os.path.realpath(tempfile.mkdtemp())
+ try:
+ mozconfig = os.path.join(d, 'mozconfig')
+ with open(mozconfig, 'wt') as fh:
+ fh.write('mk_add_options MOZ_OBJDIR=./objdir')
+
+ topobjdir = mozpath.join(d, 'objdir')
+ os.mkdir(topobjdir)
+
+ mozinfo = os.path.join(topobjdir, 'mozinfo.json')
+ with open(mozinfo, 'wt') as fh:
+ json.dump(dict(
+ topsrcdir=d,
+ mozconfig=mozconfig,
+ ), fh)
+
+ os.environ[b'MOZCONFIG'] = mozconfig.encode('utf-8')
+ child = os.path.join(topobjdir, 'foo', 'bar')
+ os.makedirs(child)
+ os.chdir(child)
+
+ obj = MozbuildObject.from_environment(
+ detect_virtualenv_mozinfo=False)
+
+ self.assertEqual(obj.topobjdir, topobjdir)
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ @unittest.skipIf(not hasattr(os, 'symlink'), 'symlinks not available.')
+ def test_symlink_objdir(self):
+ """Objdir that is a symlink is loaded properly."""
+ d = os.path.realpath(tempfile.mkdtemp())
+ try:
+ topobjdir_real = os.path.join(d, 'objdir')
+ topobjdir_link = os.path.join(d, 'objlink')
+
+ os.mkdir(topobjdir_real)
+ os.symlink(topobjdir_real, topobjdir_link)
+
+ mozconfig = os.path.join(d, 'mozconfig')
+ with open(mozconfig, 'wt') as fh:
+ fh.write('mk_add_options MOZ_OBJDIR=%s' % topobjdir_link)
+
+ mozinfo = os.path.join(topobjdir_real, 'mozinfo.json')
+ with open(mozinfo, 'wt') as fh:
+ json.dump(dict(
+ topsrcdir=d,
+ mozconfig=mozconfig,
+ ), fh)
+
+ os.chdir(topobjdir_link)
+ obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
+ self.assertEqual(obj.topobjdir, topobjdir_real)
+
+ os.chdir(topobjdir_real)
+ obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
+ self.assertEqual(obj.topobjdir, topobjdir_real)
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_mach_command_base_inside_objdir(self):
+ """Ensure a MachCommandBase constructed from inside the objdir works."""
+
+ d = os.path.realpath(tempfile.mkdtemp())
+
+ try:
+ topobjdir = os.path.join(d, 'objdir')
+ os.makedirs(topobjdir)
+
+ topsrcdir = os.path.join(d, 'srcdir')
+ os.makedirs(topsrcdir)
+
+ mozinfo = os.path.join(topobjdir, 'mozinfo.json')
+ with open(mozinfo, 'wt') as fh:
+ json.dump(dict(
+ topsrcdir=topsrcdir,
+ ), fh)
+
+ os.chdir(topobjdir)
+
+ class MockMachContext(object):
+ pass
+
+ context = MockMachContext()
+ context.cwd = topobjdir
+ context.topdir = topsrcdir
+ context.settings = None
+ context.log_manager = None
+ context.detect_virtualenv_mozinfo=False
+
+ o = MachCommandBase(context)
+
+ self.assertEqual(o.topobjdir, mozpath.normsep(topobjdir))
+ self.assertEqual(o.topsrcdir, mozpath.normsep(topsrcdir))
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_objdir_is_srcdir_rejected(self):
+ """Ensure the srcdir configurations are rejected."""
+ d = os.path.realpath(tempfile.mkdtemp())
+
+ try:
+ # The easiest way to do this is to create a mozinfo.json with data
+ # that will never happen.
+ mozinfo = os.path.join(d, 'mozinfo.json')
+ with open(mozinfo, 'wt') as fh:
+ json.dump({'topsrcdir': d}, fh)
+
+ os.chdir(d)
+
+ with self.assertRaises(BadEnvironmentException):
+ MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_objdir_mismatch(self):
+ """Ensure MachCommandBase throwing on objdir mismatch."""
+ d = os.path.realpath(tempfile.mkdtemp())
+
+ try:
+ real_topobjdir = os.path.join(d, 'real-objdir')
+ os.makedirs(real_topobjdir)
+
+ topobjdir = os.path.join(d, 'objdir')
+ os.makedirs(topobjdir)
+
+ topsrcdir = os.path.join(d, 'srcdir')
+ os.makedirs(topsrcdir)
+
+ mozconfig = os.path.join(d, 'mozconfig')
+ with open(mozconfig, 'wt') as fh:
+ fh.write('mk_add_options MOZ_OBJDIR=%s' % real_topobjdir)
+
+ mozinfo = os.path.join(topobjdir, 'mozinfo.json')
+ with open(mozinfo, 'wt') as fh:
+ json.dump(dict(
+ topsrcdir=topsrcdir,
+ mozconfig=mozconfig,
+ ), fh)
+
+ os.chdir(topobjdir)
+
+ class MockMachContext(object):
+ pass
+
+ context = MockMachContext()
+ context.cwd = topobjdir
+ context.topdir = topsrcdir
+ context.settings = None
+ context.log_manager = None
+ context.detect_virtualenv_mozinfo=False
+
+ stdout = sys.stdout
+ sys.stdout = StringIO()
+ try:
+ with self.assertRaises(SystemExit):
+ MachCommandBase(context)
+
+ self.assertTrue(sys.stdout.getvalue().startswith(
+ 'Ambiguous object directory detected.'))
+ finally:
+ sys.stdout = stdout
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_config_environment(self):
+ base = self.get_base(topobjdir=topobjdir)
+
+ ce = base.config_environment
+ self.assertIsInstance(ce, ConfigEnvironment)
+
+ self.assertEqual(base.defines, ce.defines)
+ self.assertEqual(base.substs, ce.substs)
+
+ self.assertIsInstance(base.defines, dict)
+ self.assertIsInstance(base.substs, dict)
+
+ def test_get_binary_path(self):
+ base = self.get_base(topobjdir=topobjdir)
+
+ platform = sys.platform
+
+ # We should ideally use the config.status from the build. Let's install
+ # a fake one.
+ substs = [
+ ('MOZ_APP_NAME', 'awesomeapp'),
+ ('MOZ_BUILD_APP', 'awesomeapp'),
+ ]
+ if sys.platform.startswith('darwin'):
+ substs.append(('OS_ARCH', 'Darwin'))
+ substs.append(('BIN_SUFFIX', ''))
+ substs.append(('MOZ_MACBUNDLE_NAME', 'Nightly.app'))
+ elif sys.platform.startswith(('win32', 'cygwin')):
+ substs.append(('OS_ARCH', 'WINNT'))
+ substs.append(('BIN_SUFFIX', '.exe'))
+ else:
+ substs.append(('OS_ARCH', 'something'))
+ substs.append(('BIN_SUFFIX', ''))
+
+ base._config_environment = ConfigEnvironment(base.topsrcdir,
+ base.topobjdir, substs=substs)
+
+ p = base.get_binary_path('xpcshell', False)
+ if platform.startswith('darwin'):
+ self.assertTrue(p.endswith('Contents/MacOS/xpcshell'))
+ elif platform.startswith(('win32', 'cygwin')):
+ self.assertTrue(p.endswith('xpcshell.exe'))
+ else:
+ self.assertTrue(p.endswith('dist/bin/xpcshell'))
+
+ p = base.get_binary_path(validate_exists=False)
+ if platform.startswith('darwin'):
+ self.assertTrue(p.endswith('Contents/MacOS/awesomeapp'))
+ elif platform.startswith(('win32', 'cygwin')):
+ self.assertTrue(p.endswith('awesomeapp.exe'))
+ else:
+ self.assertTrue(p.endswith('dist/bin/awesomeapp'))
+
+ p = base.get_binary_path(validate_exists=False, where="staged-package")
+ if platform.startswith('darwin'):
+ self.assertTrue(p.endswith('awesomeapp/Nightly.app/Contents/MacOS/awesomeapp'))
+ elif platform.startswith(('win32', 'cygwin')):
+ self.assertTrue(p.endswith('awesomeapp\\awesomeapp.exe'))
+ else:
+ self.assertTrue(p.endswith('awesomeapp/awesomeapp'))
+
+ self.assertRaises(Exception, base.get_binary_path, where="somewhere")
+
+ p = base.get_binary_path('foobar', validate_exists=False)
+ if platform.startswith('win32'):
+ self.assertTrue(p.endswith('foobar.exe'))
+ else:
+ self.assertTrue(p.endswith('foobar'))
+
+class TestPathArgument(unittest.TestCase):
+ def test_path_argument(self):
+ # Absolute path
+ p = PathArgument("/obj/foo", "/src", "/obj", "/src")
+ self.assertEqual(p.relpath(), "foo")
+ self.assertEqual(p.srcdir_path(), "/src/foo")
+ self.assertEqual(p.objdir_path(), "/obj/foo")
+
+ # Relative path within srcdir
+ p = PathArgument("foo", "/src", "/obj", "/src")
+ self.assertEqual(p.relpath(), "foo")
+ self.assertEqual(p.srcdir_path(), "/src/foo")
+ self.assertEqual(p.objdir_path(), "/obj/foo")
+
+ # Relative path within subdirectory
+ p = PathArgument("bar", "/src", "/obj", "/src/foo")
+ self.assertEqual(p.relpath(), "foo/bar")
+ self.assertEqual(p.srcdir_path(), "/src/foo/bar")
+ self.assertEqual(p.objdir_path(), "/obj/foo/bar")
+
+ # Relative path within objdir
+ p = PathArgument("foo", "/src", "/obj", "/obj")
+ self.assertEqual(p.relpath(), "foo")
+ self.assertEqual(p.srcdir_path(), "/src/foo")
+ self.assertEqual(p.objdir_path(), "/obj/foo")
+
+ # "." path
+ p = PathArgument(".", "/src", "/obj", "/src/foo")
+ self.assertEqual(p.relpath(), "foo")
+ self.assertEqual(p.srcdir_path(), "/src/foo")
+ self.assertEqual(p.objdir_path(), "/obj/foo")
+
+ # Nested src/obj directories
+ p = PathArgument("bar", "/src", "/src/obj", "/src/obj/foo")
+ self.assertEqual(p.relpath(), "foo/bar")
+ self.assertEqual(p.srcdir_path(), "/src/foo/bar")
+ self.assertEqual(p.objdir_path(), "/src/obj/foo/bar")
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_containers.py b/python/mozbuild/mozbuild/test/test_containers.py
new file mode 100644
index 000000000..3d46f86a9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_containers.py
@@ -0,0 +1,224 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from mozunit import main
+
+from mozbuild.util import (
+ KeyedDefaultDict,
+ List,
+ OrderedDefaultDict,
+ ReadOnlyNamespace,
+ ReadOnlyDefaultDict,
+ ReadOnlyDict,
+ ReadOnlyKeyedDefaultDict,
+)
+
+from collections import OrderedDict
+
+
+class TestReadOnlyNamespace(unittest.TestCase):
+ def test_basic(self):
+ test = ReadOnlyNamespace(foo=1, bar=2)
+
+ self.assertEqual(test.foo, 1)
+ self.assertEqual(test.bar, 2)
+ self.assertEqual(
+ sorted(i for i in dir(test) if not i.startswith('__')),
+ ['bar', 'foo'])
+
+ with self.assertRaises(AttributeError):
+ value = test.missing
+
+ with self.assertRaises(Exception):
+ test.foo = 2
+
+ with self.assertRaises(Exception):
+ del test.foo
+
+ self.assertEqual(test, test)
+ self.assertEqual(test, ReadOnlyNamespace(foo=1, bar=2))
+ self.assertNotEqual(test, ReadOnlyNamespace(foo='1', bar=2))
+ self.assertNotEqual(test, ReadOnlyNamespace(foo=1, bar=2, qux=3))
+ self.assertNotEqual(test, ReadOnlyNamespace(foo=1, qux=3))
+ self.assertNotEqual(test, ReadOnlyNamespace(foo=3, bar='42'))
+
+
+class TestReadOnlyDict(unittest.TestCase):
+ def test_basic(self):
+ original = {'foo': 1, 'bar': 2}
+
+ test = ReadOnlyDict(original)
+
+ self.assertEqual(original, test)
+ self.assertEqual(test['foo'], 1)
+
+ with self.assertRaises(KeyError):
+ value = test['missing']
+
+ with self.assertRaises(Exception):
+ test['baz'] = True
+
+ def test_update(self):
+ original = {'foo': 1, 'bar': 2}
+
+ test = ReadOnlyDict(original)
+
+ with self.assertRaises(Exception):
+ test.update(foo=2)
+
+ self.assertEqual(original, test)
+
+ def test_del(self):
+ original = {'foo': 1, 'bar': 2}
+
+ test = ReadOnlyDict(original)
+
+ with self.assertRaises(Exception):
+ del test['foo']
+
+ self.assertEqual(original, test)
+
+
+class TestReadOnlyDefaultDict(unittest.TestCase):
+ def test_simple(self):
+ original = {'foo': 1, 'bar': 2}
+
+ test = ReadOnlyDefaultDict(bool, original)
+
+ self.assertEqual(original, test)
+
+ self.assertEqual(test['foo'], 1)
+
+ def test_assignment(self):
+ test = ReadOnlyDefaultDict(bool, {})
+
+ with self.assertRaises(Exception):
+ test['foo'] = True
+
+ def test_defaults(self):
+ test = ReadOnlyDefaultDict(bool, {'foo': 1})
+
+ self.assertEqual(test['foo'], 1)
+
+ self.assertEqual(test['qux'], False)
+
+
+class TestList(unittest.TestCase):
+ def test_add_list(self):
+ test = List([1, 2, 3])
+
+ test += [4, 5, 6]
+ self.assertIsInstance(test, List)
+ self.assertEqual(test, [1, 2, 3, 4, 5, 6])
+
+ test = test + [7, 8]
+ self.assertIsInstance(test, List)
+ self.assertEqual(test, [1, 2, 3, 4, 5, 6, 7, 8])
+
+ def test_add_string(self):
+ test = List([1, 2, 3])
+
+ with self.assertRaises(ValueError):
+ test += 'string'
+
+ def test_none(self):
+ """As a special exception, we allow None to be treated as an empty
+ list."""
+ test = List([1, 2, 3])
+
+ test += None
+ self.assertEqual(test, [1, 2, 3])
+
+ test = test + None
+ self.assertIsInstance(test, List)
+ self.assertEqual(test, [1, 2, 3])
+
+ with self.assertRaises(ValueError):
+ test += False
+
+ with self.assertRaises(ValueError):
+ test = test + False
+
+class TestOrderedDefaultDict(unittest.TestCase):
+ def test_simple(self):
+ original = OrderedDict(foo=1, bar=2)
+
+ test = OrderedDefaultDict(bool, original)
+
+ self.assertEqual(original, test)
+
+ self.assertEqual(test['foo'], 1)
+
+ self.assertEqual(test.keys(), ['foo', 'bar' ])
+
+ def test_defaults(self):
+ test = OrderedDefaultDict(bool, {'foo': 1 })
+
+ self.assertEqual(test['foo'], 1)
+
+ self.assertEqual(test['qux'], False)
+
+ self.assertEqual(test.keys(), ['foo', 'qux' ])
+
+
+class TestKeyedDefaultDict(unittest.TestCase):
+ def test_simple(self):
+ original = {'foo': 1, 'bar': 2 }
+
+ test = KeyedDefaultDict(lambda x: x, original)
+
+ self.assertEqual(original, test)
+
+ self.assertEqual(test['foo'], 1)
+
+ def test_defaults(self):
+ test = KeyedDefaultDict(lambda x: x, {'foo': 1 })
+
+ self.assertEqual(test['foo'], 1)
+
+ self.assertEqual(test['qux'], 'qux')
+
+ self.assertEqual(test['bar'], 'bar')
+
+ test['foo'] = 2
+ test['qux'] = None
+ test['baz'] = 'foo'
+
+ self.assertEqual(test['foo'], 2)
+
+ self.assertEqual(test['qux'], None)
+
+ self.assertEqual(test['baz'], 'foo')
+
+
+class TestReadOnlyKeyedDefaultDict(unittest.TestCase):
+ def test_defaults(self):
+ test = ReadOnlyKeyedDefaultDict(lambda x: x, {'foo': 1 })
+
+ self.assertEqual(test['foo'], 1)
+
+ self.assertEqual(test['qux'], 'qux')
+
+ self.assertEqual(test['bar'], 'bar')
+
+ copy = dict(test)
+
+ with self.assertRaises(Exception):
+ test['foo'] = 2
+
+ with self.assertRaises(Exception):
+ test['qux'] = None
+
+ with self.assertRaises(Exception):
+ test['baz'] = 'foo'
+
+ self.assertEqual(test, copy)
+
+ self.assertEqual(len(test), 3)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_dotproperties.py b/python/mozbuild/mozbuild/test/test_dotproperties.py
new file mode 100644
index 000000000..a03f85b0d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_dotproperties.py
@@ -0,0 +1,178 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import unicode_literals
+
+import os
+import unittest
+
+from StringIO import StringIO
+
+import mozpack.path as mozpath
+
+from mozbuild.dotproperties import (
+ DotProperties,
+)
+
+from mozunit import (
+ main,
+)
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data')
+
+
+class TestDotProperties(unittest.TestCase):
+ def test_get(self):
+ contents = StringIO('''
+key=value
+''')
+ p = DotProperties(contents)
+ self.assertEqual(p.get('missing'), None)
+ self.assertEqual(p.get('missing', 'default'), 'default')
+ self.assertEqual(p.get('key'), 'value')
+
+
+ def test_update(self):
+ contents = StringIO('''
+old=old value
+key=value
+''')
+ p = DotProperties(contents)
+ self.assertEqual(p.get('old'), 'old value')
+ self.assertEqual(p.get('key'), 'value')
+
+ new_contents = StringIO('''
+key=new value
+''')
+ p.update(new_contents)
+ self.assertEqual(p.get('old'), 'old value')
+ self.assertEqual(p.get('key'), 'new value')
+
+
+ def test_get_list(self):
+ contents = StringIO('''
+list.0=A
+list.1=B
+list.2=C
+
+order.1=B
+order.0=A
+order.2=C
+''')
+ p = DotProperties(contents)
+ self.assertEqual(p.get_list('missing'), [])
+ self.assertEqual(p.get_list('list'), ['A', 'B', 'C'])
+ self.assertEqual(p.get_list('order'), ['A', 'B', 'C'])
+
+
+ def test_get_list_with_shared_prefix(self):
+ contents = StringIO('''
+list.0=A
+list.1=B
+list.2=C
+
+list.sublist.1=E
+list.sublist.0=D
+list.sublist.2=F
+
+list.sublist.second.0=G
+
+list.other.0=H
+''')
+ p = DotProperties(contents)
+ self.assertEqual(p.get_list('list'), ['A', 'B', 'C'])
+ self.assertEqual(p.get_list('list.sublist'), ['D', 'E', 'F'])
+ self.assertEqual(p.get_list('list.sublist.second'), ['G'])
+ self.assertEqual(p.get_list('list.other'), ['H'])
+
+
+ def test_get_dict(self):
+ contents = StringIO('''
+A.title=title A
+
+B.title=title B
+B.url=url B
+
+C=value
+''')
+ p = DotProperties(contents)
+ self.assertEqual(p.get_dict('missing'), {})
+ self.assertEqual(p.get_dict('A'), {'title': 'title A'})
+ self.assertEqual(p.get_dict('B'), {'title': 'title B', 'url': 'url B'})
+ with self.assertRaises(ValueError):
+ p.get_dict('A', required_keys=['title', 'url'])
+ with self.assertRaises(ValueError):
+ p.get_dict('missing', required_keys=['key'])
+ # A key=value pair is considered to root an empty dict.
+ self.assertEqual(p.get_dict('C'), {})
+ with self.assertRaises(ValueError):
+ p.get_dict('C', required_keys=['missing_key'])
+
+
+ def test_get_dict_with_shared_prefix(self):
+ contents = StringIO('''
+A.title=title A
+A.subdict.title=title A subdict
+
+B.title=title B
+B.url=url B
+B.subdict.title=title B subdict
+B.subdict.url=url B subdict
+''')
+ p = DotProperties(contents)
+ self.assertEqual(p.get_dict('A'), {'title': 'title A'})
+ self.assertEqual(p.get_dict('B'), {'title': 'title B', 'url': 'url B'})
+ self.assertEqual(p.get_dict('A.subdict'),
+ {'title': 'title A subdict'})
+ self.assertEqual(p.get_dict('B.subdict'),
+ {'title': 'title B subdict', 'url': 'url B subdict'})
+
+ def test_get_dict_with_value_prefix(self):
+ contents = StringIO('''
+A.default=A
+A.default.B=B
+A.default.B.ignored=B ignored
+A.default.C=C
+A.default.C.ignored=C ignored
+''')
+ p = DotProperties(contents)
+ self.assertEqual(p.get('A.default'), 'A')
+ # This enumerates the properties.
+ self.assertEqual(p.get_dict('A.default'), {'B': 'B', 'C': 'C'})
+ # They can still be fetched directly.
+ self.assertEqual(p.get('A.default.B'), 'B')
+ self.assertEqual(p.get('A.default.C'), 'C')
+
+
+ def test_unicode(self):
+ contents = StringIO('''
+# Danish.
+# #### ~~ Søren Munk Skrøder, sskroeder - 2009-05-30 @ #mozmae
+
+# Korean.
+A.title=한메ì¼
+
+# Russian.
+list.0 = test
+list.1 = ЯндекÑ
+''')
+ p = DotProperties(contents)
+ self.assertEqual(p.get_dict('A'), {'title': '한메ì¼'})
+ self.assertEqual(p.get_list('list'), ['test', 'ЯндекÑ'])
+
+ def test_valid_unicode_from_file(self):
+ # The contents of valid.properties is identical to the contents of the
+ # test above. This specifically exercises reading from a file.
+ p = DotProperties(os.path.join(test_data_path, 'valid.properties'))
+ self.assertEqual(p.get_dict('A'), {'title': '한메ì¼'})
+ self.assertEqual(p.get_list('list'), ['test', 'ЯндекÑ'])
+
+ def test_bad_unicode_from_file(self):
+ # The contents of bad.properties is not valid Unicode; see the comments
+ # in the file itself for details.
+ with self.assertRaises(UnicodeDecodeError):
+ DotProperties(os.path.join(test_data_path, 'bad.properties'))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_expression.py b/python/mozbuild/mozbuild/test/test_expression.py
new file mode 100644
index 000000000..fb3c45894
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_expression.py
@@ -0,0 +1,82 @@
+import unittest
+
+import sys
+import os.path
+import mozunit
+
+from mozbuild.preprocessor import Expression, Context
+
+class TestContext(unittest.TestCase):
+ """
+ Unit tests for the Context class
+ """
+
+ def setUp(self):
+ self.c = Context()
+ self.c['FAIL'] = 'PASS'
+
+ def test_string_literal(self):
+ """test string literal, fall-through for undefined var in a Context"""
+ self.assertEqual(self.c['PASS'], 'PASS')
+
+ def test_variable(self):
+ """test value for defined var in the Context class"""
+ self.assertEqual(self.c['FAIL'], 'PASS')
+
+ def test_in(self):
+ """test 'var in context' to not fall for fallback"""
+ self.assert_('FAIL' in self.c)
+ self.assert_('PASS' not in self.c)
+
+class TestExpression(unittest.TestCase):
+ """
+ Unit tests for the Expression class
+ evaluate() is called with a context {FAIL: 'PASS'}
+ """
+
+ def setUp(self):
+ self.c = Context()
+ self.c['FAIL'] = 'PASS'
+
+ def test_string_literal(self):
+ """Test for a string literal in an Expression"""
+ self.assertEqual(Expression('PASS').evaluate(self.c), 'PASS')
+
+ def test_variable(self):
+ """Test for variable value in an Expression"""
+ self.assertEqual(Expression('FAIL').evaluate(self.c), 'PASS')
+
+ def test_not(self):
+ """Test for the ! operator"""
+ self.assert_(Expression('!0').evaluate(self.c))
+ self.assert_(not Expression('!1').evaluate(self.c))
+
+ def test_equals(self):
+ """ Test for the == operator"""
+ self.assert_(Expression('FAIL == PASS').evaluate(self.c))
+
+ def test_notequals(self):
+ """ Test for the != operator"""
+ self.assert_(Expression('FAIL != 1').evaluate(self.c))
+
+ def test_logical_and(self):
+ """ Test for the && operator"""
+ self.assertTrue(Expression('PASS == PASS && PASS != NOTPASS').evaluate(self.c))
+
+ def test_logical_or(self):
+ """ Test for the || operator"""
+ self.assertTrue(Expression('PASS == NOTPASS || PASS != NOTPASS').evaluate(self.c))
+
+ def test_logical_ops(self):
+ """ Test for the && and || operators precedence"""
+ # Would evaluate to false if precedence was wrong
+ self.assertTrue(Expression('PASS == PASS || PASS != NOTPASS && PASS == NOTPASS').evaluate(self.c))
+
+ def test_defined(self):
+ """ Test for the defined() value"""
+ self.assertTrue(Expression('defined(FAIL)').evaluate(self.c))
+ self.assertTrue(Expression('!defined(PASS)').evaluate(self.c))
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_jarmaker.py b/python/mozbuild/mozbuild/test/test_jarmaker.py
new file mode 100644
index 000000000..a4d4156a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_jarmaker.py
@@ -0,0 +1,367 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function
+import unittest
+
+import os, sys, os.path, time, inspect
+from filecmp import dircmp
+from tempfile import mkdtemp
+from shutil import rmtree, copy2
+from StringIO import StringIO
+from zipfile import ZipFile
+import mozunit
+
+from mozbuild.jar import JarMaker
+
+
+if sys.platform == "win32":
+ import ctypes
+ from ctypes import POINTER, WinError
+ DWORD = ctypes.c_ulong
+ LPDWORD = POINTER(DWORD)
+ HANDLE = ctypes.c_void_p
+ GENERIC_READ = 0x80000000
+ FILE_SHARE_READ = 0x00000001
+ OPEN_EXISTING = 3
+ MAX_PATH = 260
+
+ class FILETIME(ctypes.Structure):
+ _fields_ = [("dwLowDateTime", DWORD),
+ ("dwHighDateTime", DWORD)]
+
+ class BY_HANDLE_FILE_INFORMATION(ctypes.Structure):
+ _fields_ = [("dwFileAttributes", DWORD),
+ ("ftCreationTime", FILETIME),
+ ("ftLastAccessTime", FILETIME),
+ ("ftLastWriteTime", FILETIME),
+ ("dwVolumeSerialNumber", DWORD),
+ ("nFileSizeHigh", DWORD),
+ ("nFileSizeLow", DWORD),
+ ("nNumberOfLinks", DWORD),
+ ("nFileIndexHigh", DWORD),
+ ("nFileIndexLow", DWORD)]
+
+ # http://msdn.microsoft.com/en-us/library/aa363858
+ CreateFile = ctypes.windll.kernel32.CreateFileA
+ CreateFile.argtypes = [ctypes.c_char_p, DWORD, DWORD, ctypes.c_void_p,
+ DWORD, DWORD, HANDLE]
+ CreateFile.restype = HANDLE
+
+ # http://msdn.microsoft.com/en-us/library/aa364952
+ GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle
+ GetFileInformationByHandle.argtypes = [HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)]
+ GetFileInformationByHandle.restype = ctypes.c_int
+
+ # http://msdn.microsoft.com/en-us/library/aa364996
+ GetVolumePathName = ctypes.windll.kernel32.GetVolumePathNameA
+ GetVolumePathName.argtypes = [ctypes.c_char_p, ctypes.c_char_p, DWORD]
+ GetVolumePathName.restype = ctypes.c_int
+
+ # http://msdn.microsoft.com/en-us/library/aa364993
+ GetVolumeInformation = ctypes.windll.kernel32.GetVolumeInformationA
+ GetVolumeInformation.argtypes = [ctypes.c_char_p, ctypes.c_char_p, DWORD,
+ LPDWORD, LPDWORD, LPDWORD, ctypes.c_char_p,
+ DWORD]
+ GetVolumeInformation.restype = ctypes.c_int
+
+def symlinks_supported(path):
+ if sys.platform == "win32":
+ # Add 1 for a trailing backslash if necessary, and 1 for the terminating
+ # null character.
+ volpath = ctypes.create_string_buffer(len(path) + 2)
+ rv = GetVolumePathName(path, volpath, len(volpath))
+ if rv == 0:
+ raise WinError()
+
+ fsname = ctypes.create_string_buffer(MAX_PATH + 1)
+ rv = GetVolumeInformation(volpath, None, 0, None, None, None, fsname,
+ len(fsname))
+ if rv == 0:
+ raise WinError()
+
+ # Return true only if the fsname is NTFS
+ return fsname.value == "NTFS"
+ else:
+ return True
+
+def _getfileinfo(path):
+ """Return information for the given file. This only works on Windows."""
+ fh = CreateFile(path, GENERIC_READ, FILE_SHARE_READ, None, OPEN_EXISTING, 0, None)
+ if fh is None:
+ raise WinError()
+ info = BY_HANDLE_FILE_INFORMATION()
+ rv = GetFileInformationByHandle(fh, info)
+ if rv == 0:
+ raise WinError()
+ return info
+
+def is_symlink_to(dest, src):
+ if sys.platform == "win32":
+ # Check if both are on the same volume and have the same file ID
+ destinfo = _getfileinfo(dest)
+ srcinfo = _getfileinfo(src)
+ return (destinfo.dwVolumeSerialNumber == srcinfo.dwVolumeSerialNumber and
+ destinfo.nFileIndexHigh == srcinfo.nFileIndexHigh and
+ destinfo.nFileIndexLow == srcinfo.nFileIndexLow)
+ else:
+ # Read the link and check if it is correct
+ if not os.path.islink(dest):
+ return False
+ target = os.path.abspath(os.readlink(dest))
+ abssrc = os.path.abspath(src)
+ return target == abssrc
+
+class _TreeDiff(dircmp):
+ """Helper to report rich results on difference between two directories.
+ """
+ def _fillDiff(self, dc, rv, basepath="{0}"):
+ rv['right_only'] += map(lambda l: basepath.format(l), dc.right_only)
+ rv['left_only'] += map(lambda l: basepath.format(l), dc.left_only)
+ rv['diff_files'] += map(lambda l: basepath.format(l), dc.diff_files)
+ rv['funny'] += map(lambda l: basepath.format(l), dc.common_funny)
+ rv['funny'] += map(lambda l: basepath.format(l), dc.funny_files)
+ for subdir, _dc in dc.subdirs.iteritems():
+ self._fillDiff(_dc, rv, basepath.format(subdir + "/{0}"))
+ def allResults(self, left, right):
+ rv = {'right_only':[], 'left_only':[],
+ 'diff_files':[], 'funny': []}
+ self._fillDiff(self, rv)
+ chunks = []
+ if rv['right_only']:
+ chunks.append('{0} only in {1}'.format(', '.join(rv['right_only']),
+ right))
+ if rv['left_only']:
+ chunks.append('{0} only in {1}'.format(', '.join(rv['left_only']),
+ left))
+ if rv['diff_files']:
+ chunks.append('{0} differ'.format(', '.join(rv['diff_files'])))
+ if rv['funny']:
+ chunks.append("{0} don't compare".format(', '.join(rv['funny'])))
+ return '; '.join(chunks)
+
+class TestJarMaker(unittest.TestCase):
+ """
+ Unit tests for JarMaker.py
+ """
+ debug = False # set to True to debug failing tests on disk
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+ self.srcdir = os.path.join(self.tmpdir, 'src')
+ os.mkdir(self.srcdir)
+ self.builddir = os.path.join(self.tmpdir, 'build')
+ os.mkdir(self.builddir)
+ self.refdir = os.path.join(self.tmpdir, 'ref')
+ os.mkdir(self.refdir)
+ self.stagedir = os.path.join(self.tmpdir, 'stage')
+ os.mkdir(self.stagedir)
+
+ def tearDown(self):
+ if self.debug:
+ print(self.tmpdir)
+ elif sys.platform != "win32":
+ # can't clean up on windows
+ rmtree(self.tmpdir)
+
+ def _jar_and_compare(self, infile, **kwargs):
+ jm = JarMaker(outputFormat='jar')
+ if 'topsourcedir' not in kwargs:
+ kwargs['topsourcedir'] = self.srcdir
+ for attr in ('topsourcedir', 'sourcedirs'):
+ if attr in kwargs:
+ setattr(jm, attr, kwargs[attr])
+ jm.makeJar(infile, self.builddir)
+ cwd = os.getcwd()
+ os.chdir(self.builddir)
+ try:
+ # expand build to stage
+ for path, dirs, files in os.walk('.'):
+ stagedir = os.path.join(self.stagedir, path)
+ if not os.path.isdir(stagedir):
+ os.mkdir(stagedir)
+ for file in files:
+ if file.endswith('.jar'):
+ # expand jar
+ stagepath = os.path.join(stagedir, file)
+ os.mkdir(stagepath)
+ zf = ZipFile(os.path.join(path, file))
+ # extractall is only in 2.6, do this manually :-(
+ for entry_name in zf.namelist():
+ segs = entry_name.split('/')
+ fname = segs.pop()
+ dname = os.path.join(stagepath, *segs)
+ if not os.path.isdir(dname):
+ os.makedirs(dname)
+ if not fname:
+ # directory, we're done
+ continue
+ _c = zf.read(entry_name)
+ open(os.path.join(dname, fname), 'wb').write(_c)
+ zf.close()
+ else:
+ copy2(os.path.join(path, file), stagedir)
+ # compare both dirs
+ os.chdir('..')
+ td = _TreeDiff('ref', 'stage')
+ return td.allResults('reference', 'build')
+ finally:
+ os.chdir(cwd)
+
+ def _create_simple_setup(self):
+ # create src content
+ jarf = open(os.path.join(self.srcdir, 'jar.mn'), 'w')
+ jarf.write('''test.jar:
+ dir/foo (bar)
+''')
+ jarf.close()
+ open(os.path.join(self.srcdir,'bar'),'w').write('content\n')
+ # create reference
+ refpath = os.path.join(self.refdir, 'chrome', 'test.jar', 'dir')
+ os.makedirs(refpath)
+ open(os.path.join(refpath, 'foo'), 'w').write('content\n')
+
+ def test_a_simple_jar(self):
+ '''Test a simple jar.mn'''
+ self._create_simple_setup()
+ # call JarMaker
+ rv = self._jar_and_compare(os.path.join(self.srcdir,'jar.mn'),
+ sourcedirs = [self.srcdir])
+ self.assertTrue(not rv, rv)
+
+ def test_a_simple_symlink(self):
+ '''Test a simple jar.mn with a symlink'''
+ if not symlinks_supported(self.srcdir):
+ raise unittest.SkipTest('symlinks not supported')
+
+ self._create_simple_setup()
+ jm = JarMaker(outputFormat='symlink')
+ jm.sourcedirs = [self.srcdir]
+ jm.topsourcedir = self.srcdir
+ jm.makeJar(os.path.join(self.srcdir,'jar.mn'), self.builddir)
+ # All we do is check that srcdir/bar points to builddir/chrome/test/dir/foo
+ srcbar = os.path.join(self.srcdir, 'bar')
+ destfoo = os.path.join(self.builddir, 'chrome', 'test', 'dir', 'foo')
+ self.assertTrue(is_symlink_to(destfoo, srcbar),
+ "{0} is not a symlink to {1}".format(destfoo, srcbar))
+
+ def _create_wildcard_setup(self):
+ # create src content
+ jarf = open(os.path.join(self.srcdir, 'jar.mn'), 'w')
+ jarf.write('''test.jar:
+ dir/bar (*.js)
+ dir/hoge (qux/*)
+''')
+ jarf.close()
+ open(os.path.join(self.srcdir,'foo.js'),'w').write('foo.js\n')
+ open(os.path.join(self.srcdir,'bar.js'),'w').write('bar.js\n')
+ os.makedirs(os.path.join(self.srcdir, 'qux', 'foo'))
+ open(os.path.join(self.srcdir,'qux', 'foo', '1'),'w').write('1\n')
+ open(os.path.join(self.srcdir,'qux', 'foo', '2'),'w').write('2\n')
+ open(os.path.join(self.srcdir,'qux', 'baz'),'w').write('baz\n')
+ # create reference
+ refpath = os.path.join(self.refdir, 'chrome', 'test.jar', 'dir')
+ os.makedirs(os.path.join(refpath, 'bar'))
+ os.makedirs(os.path.join(refpath, 'hoge', 'foo'))
+ open(os.path.join(refpath, 'bar', 'foo.js'), 'w').write('foo.js\n')
+ open(os.path.join(refpath, 'bar', 'bar.js'), 'w').write('bar.js\n')
+ open(os.path.join(refpath, 'hoge', 'foo', '1'), 'w').write('1\n')
+ open(os.path.join(refpath, 'hoge', 'foo', '2'), 'w').write('2\n')
+ open(os.path.join(refpath, 'hoge', 'baz'), 'w').write('baz\n')
+
+ def test_a_wildcard_jar(self):
+ '''Test a wildcard in jar.mn'''
+ self._create_wildcard_setup()
+ # call JarMaker
+ rv = self._jar_and_compare(os.path.join(self.srcdir,'jar.mn'),
+ sourcedirs = [self.srcdir])
+ self.assertTrue(not rv, rv)
+
+ def test_a_wildcard_symlink(self):
+ '''Test a wildcard in jar.mn with symlinks'''
+ if not symlinks_supported(self.srcdir):
+ raise unittest.SkipTest('symlinks not supported')
+
+ self._create_wildcard_setup()
+ jm = JarMaker(outputFormat='symlink')
+ jm.sourcedirs = [self.srcdir]
+ jm.topsourcedir = self.srcdir
+ jm.makeJar(os.path.join(self.srcdir,'jar.mn'), self.builddir)
+
+ expected_symlinks = {
+ ('bar', 'foo.js'): ('foo.js',),
+ ('bar', 'bar.js'): ('bar.js',),
+ ('hoge', 'foo', '1'): ('qux', 'foo', '1'),
+ ('hoge', 'foo', '2'): ('qux', 'foo', '2'),
+ ('hoge', 'baz'): ('qux', 'baz'),
+ }
+ for dest, src in expected_symlinks.iteritems():
+ srcpath = os.path.join(self.srcdir, *src)
+ destpath = os.path.join(self.builddir, 'chrome', 'test', 'dir',
+ *dest)
+ self.assertTrue(is_symlink_to(destpath, srcpath),
+ "{0} is not a symlink to {1}".format(destpath,
+ srcpath))
+
+
+class Test_relativesrcdir(unittest.TestCase):
+ def setUp(self):
+ self.jm = JarMaker()
+ self.jm.topsourcedir = '/TOPSOURCEDIR'
+ self.jm.relativesrcdir = 'browser/locales'
+ self.fake_empty_file = StringIO()
+ self.fake_empty_file.name = 'fake_empty_file'
+ def tearDown(self):
+ del self.jm
+ del self.fake_empty_file
+ def test_en_US(self):
+ jm = self.jm
+ jm.makeJar(self.fake_empty_file, '/NO_OUTPUT_REQUIRED')
+ self.assertEquals(jm.localedirs,
+ [
+ os.path.join(os.path.abspath('/TOPSOURCEDIR'),
+ 'browser/locales', 'en-US')
+ ])
+ def test_l10n_no_merge(self):
+ jm = self.jm
+ jm.l10nbase = '/L10N_BASE'
+ jm.makeJar(self.fake_empty_file, '/NO_OUTPUT_REQUIRED')
+ self.assertEquals(jm.localedirs, [os.path.join('/L10N_BASE', 'browser')])
+ def test_l10n_merge(self):
+ jm = self.jm
+ jm.l10nbase = '/L10N_BASE'
+ jm.l10nmerge = '/L10N_MERGE'
+ jm.makeJar(self.fake_empty_file, '/NO_OUTPUT_REQUIRED')
+ self.assertEquals(jm.localedirs,
+ [os.path.join('/L10N_MERGE', 'browser'),
+ os.path.join('/L10N_BASE', 'browser'),
+ os.path.join(os.path.abspath('/TOPSOURCEDIR'),
+ 'browser/locales', 'en-US')
+ ])
+ def test_override(self):
+ jm = self.jm
+ jm.outputFormat = 'flat' # doesn't touch chrome dir without files
+ jarcontents = StringIO('''en-US.jar:
+relativesrcdir dom/locales:
+''')
+ jarcontents.name = 'override.mn'
+ jm.makeJar(jarcontents, '/NO_OUTPUT_REQUIRED')
+ self.assertEquals(jm.localedirs,
+ [
+ os.path.join(os.path.abspath('/TOPSOURCEDIR'),
+ 'dom/locales', 'en-US')
+ ])
+ def test_override_l10n(self):
+ jm = self.jm
+ jm.l10nbase = '/L10N_BASE'
+ jm.outputFormat = 'flat' # doesn't touch chrome dir without files
+ jarcontents = StringIO('''en-US.jar:
+relativesrcdir dom/locales:
+''')
+ jarcontents.name = 'override.mn'
+ jm.makeJar(jarcontents, '/NO_OUTPUT_REQUIRED')
+ self.assertEquals(jm.localedirs, [os.path.join('/L10N_BASE', 'dom')])
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_line_endings.py b/python/mozbuild/mozbuild/test/test_line_endings.py
new file mode 100644
index 000000000..565abc8c9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_line_endings.py
@@ -0,0 +1,46 @@
+import unittest
+
+from StringIO import StringIO
+import os
+import sys
+import os.path
+import mozunit
+
+from mozbuild.preprocessor import Preprocessor
+
+class TestLineEndings(unittest.TestCase):
+ """
+ Unit tests for the Context class
+ """
+
+ def setUp(self):
+ self.pp = Preprocessor()
+ self.pp.out = StringIO()
+ self.tempnam = os.tempnam('.')
+
+ def tearDown(self):
+ os.remove(self.tempnam)
+
+ def createFile(self, lineendings):
+ f = open(self.tempnam, 'wb')
+ for line, ending in zip(['a', '#literal b', 'c'], lineendings):
+ f.write(line+ending)
+ f.close()
+
+ def testMac(self):
+ self.createFile(['\x0D']*3)
+ self.pp.do_include(self.tempnam)
+ self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')
+
+ def testUnix(self):
+ self.createFile(['\x0A']*3)
+ self.pp.do_include(self.tempnam)
+ self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')
+
+ def testWindows(self):
+ self.createFile(['\x0D\x0A']*3)
+ self.pp.do_include(self.tempnam)
+ self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_makeutil.py b/python/mozbuild/mozbuild/test/test_makeutil.py
new file mode 100644
index 000000000..6fffa0e0e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_makeutil.py
@@ -0,0 +1,165 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.makeutil import (
+ Makefile,
+ read_dep_makefile,
+ Rule,
+ write_dep_makefile,
+)
+from mozunit import main
+import os
+import unittest
+from StringIO import StringIO
+
+
+class TestMakefile(unittest.TestCase):
+ def test_rule(self):
+ out = StringIO()
+ rule = Rule()
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), '')
+ out.truncate(0)
+
+ rule.add_targets(['foo', 'bar'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar:\n')
+ out.truncate(0)
+
+ rule.add_targets(['baz'])
+ rule.add_dependencies(['qux', 'hoge', 'piyo'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar baz: qux hoge piyo\n')
+ out.truncate(0)
+
+ rule = Rule(['foo', 'bar'])
+ rule.add_dependencies(['baz'])
+ rule.add_commands(['echo $@'])
+ rule.add_commands(['$(BAZ) -o $@ $<', '$(TOUCH) $@'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(),
+ 'foo bar: baz\n' +
+ '\techo $@\n' +
+ '\t$(BAZ) -o $@ $<\n' +
+ '\t$(TOUCH) $@\n')
+ out.truncate(0)
+
+ rule = Rule(['foo'])
+ rule.add_dependencies(['bar', 'foo', 'baz'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo: bar baz\n')
+ out.truncate(0)
+
+ rule.add_targets(['bar'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar: baz\n')
+ out.truncate(0)
+
+ rule.add_targets(['bar'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar: baz\n')
+ out.truncate(0)
+
+ rule.add_dependencies(['bar'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar: baz\n')
+ out.truncate(0)
+
+ rule.add_dependencies(['qux'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar: baz qux\n')
+ out.truncate(0)
+
+ rule.add_dependencies(['qux'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar: baz qux\n')
+ out.truncate(0)
+
+ rule.add_dependencies(['hoge', 'hoge'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar: baz qux hoge\n')
+ out.truncate(0)
+
+ rule.add_targets(['fuga', 'fuga'])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), 'foo bar fuga: baz qux hoge\n')
+
+ def test_makefile(self):
+ out = StringIO()
+ mk = Makefile()
+ rule = mk.create_rule(['foo'])
+ rule.add_dependencies(['bar', 'baz', 'qux'])
+ rule.add_commands(['echo foo'])
+ rule = mk.create_rule().add_targets(['bar', 'baz'])
+ rule.add_dependencies(['hoge'])
+ rule.add_commands(['echo $@'])
+ mk.dump(out, removal_guard=False)
+ self.assertEqual(out.getvalue(),
+ 'foo: bar baz qux\n' +
+ '\techo foo\n' +
+ 'bar baz: hoge\n' +
+ '\techo $@\n')
+ out.truncate(0)
+
+ mk.dump(out)
+ self.assertEqual(out.getvalue(),
+ 'foo: bar baz qux\n' +
+ '\techo foo\n' +
+ 'bar baz: hoge\n' +
+ '\techo $@\n' +
+ 'hoge qux:\n')
+
+ def test_statement(self):
+ out = StringIO()
+ mk = Makefile()
+ mk.create_rule(['foo']).add_dependencies(['bar']) \
+ .add_commands(['echo foo'])
+ mk.add_statement('BAR = bar')
+ mk.create_rule(['$(BAR)']).add_commands(['echo $@'])
+ mk.dump(out, removal_guard=False)
+ self.assertEqual(out.getvalue(),
+ 'foo: bar\n' +
+ '\techo foo\n' +
+ 'BAR = bar\n' +
+ '$(BAR):\n' +
+ '\techo $@\n')
+
+ @unittest.skipIf(os.name != 'nt', 'Test only applicable on Windows.')
+ def test_path_normalization(self):
+ out = StringIO()
+ mk = Makefile()
+ rule = mk.create_rule(['c:\\foo'])
+ rule.add_dependencies(['c:\\bar', 'c:\\baz\\qux'])
+ rule.add_commands(['echo c:\\foo'])
+ mk.dump(out)
+ self.assertEqual(out.getvalue(),
+ 'c:/foo: c:/bar c:/baz/qux\n' +
+ '\techo c:\\foo\n' +
+ 'c:/bar c:/baz/qux:\n')
+
+ def test_read_dep_makefile(self):
+ input = StringIO(
+ os.path.abspath('foo') + ': bar\n' +
+ 'baz qux: \\ \n' +
+ 'hoge \\\n' +
+ 'piyo \\\n' +
+ 'fuga\n' +
+ 'fuga:\n'
+ )
+ result = list(read_dep_makefile(input))
+ self.assertEqual(len(result), 2)
+ self.assertEqual(list(result[0].targets()), [os.path.abspath('foo').replace(os.sep, '/')])
+ self.assertEqual(list(result[0].dependencies()), ['bar'])
+ self.assertEqual(list(result[1].targets()), ['baz', 'qux'])
+ self.assertEqual(list(result[1].dependencies()), ['hoge', 'piyo', 'fuga'])
+
+ def test_write_dep_makefile(self):
+ out = StringIO()
+ write_dep_makefile(out, 'target', ['b', 'c', 'a'])
+ self.assertEqual(out.getvalue(),
+ 'target: b c a\n' +
+ 'a b c:\n')
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_mozconfig.py b/python/mozbuild/mozbuild/test/test_mozconfig.py
new file mode 100644
index 000000000..0cd125912
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_mozconfig.py
@@ -0,0 +1,489 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+import unittest
+
+from shutil import rmtree
+
+from tempfile import (
+ gettempdir,
+ mkdtemp,
+)
+
+from mozfile.mozfile import NamedTemporaryFile
+
+from mozunit import main
+
+from mozbuild.mozconfig import (
+ MozconfigFindException,
+ MozconfigLoadException,
+ MozconfigLoader,
+)
+
+
+class TestMozconfigLoader(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop('MOZCONFIG', None)
+ os.environ.pop('MOZ_OBJDIR', None)
+ os.environ.pop('CC', None)
+ os.environ.pop('CXX', None)
+ self._temp_dirs = set()
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ for d in self._temp_dirs:
+ rmtree(d)
+
+ def get_loader(self):
+ return MozconfigLoader(self.get_temp_dir())
+
+ def get_temp_dir(self):
+ d = mkdtemp()
+ self._temp_dirs.add(d)
+
+ return d
+
+ def test_find_legacy_env(self):
+ """Ensure legacy mozconfig path definitions result in error."""
+
+ os.environ[b'MOZ_MYCONFIG'] = '/foo'
+
+ with self.assertRaises(MozconfigFindException) as e:
+ self.get_loader().find_mozconfig()
+
+ self.assertTrue(e.exception.message.startswith('The MOZ_MYCONFIG'))
+
+ def test_find_multiple_configs(self):
+ """Ensure multiple relative-path MOZCONFIGs result in error."""
+ relative_mozconfig = '.mconfig'
+ os.environ[b'MOZCONFIG'] = relative_mozconfig
+
+ srcdir = self.get_temp_dir()
+ curdir = self.get_temp_dir()
+ dirs = [srcdir, curdir]
+ loader = MozconfigLoader(srcdir)
+ for d in dirs:
+ path = os.path.join(d, relative_mozconfig)
+ with open(path, 'wb') as f:
+ f.write(path)
+
+ orig_dir = os.getcwd()
+ try:
+ os.chdir(curdir)
+ with self.assertRaises(MozconfigFindException) as e:
+ loader.find_mozconfig()
+ finally:
+ os.chdir(orig_dir)
+
+ self.assertIn('exists in more than one of', e.exception.message)
+ for d in dirs:
+ self.assertIn(d, e.exception.message)
+
+ def test_find_multiple_but_identical_configs(self):
+ """Ensure multiple relative-path MOZCONFIGs pointing at the same file are OK."""
+ relative_mozconfig = '../src/.mconfig'
+ os.environ[b'MOZCONFIG'] = relative_mozconfig
+
+ topdir = self.get_temp_dir()
+ srcdir = os.path.join(topdir, 'src')
+ os.mkdir(srcdir)
+ curdir = os.path.join(topdir, 'obj')
+ os.mkdir(curdir)
+
+ loader = MozconfigLoader(srcdir)
+ path = os.path.join(srcdir, relative_mozconfig)
+ with open(path, 'w'):
+ pass
+
+ orig_dir = os.getcwd()
+ try:
+ os.chdir(curdir)
+ self.assertEqual(os.path.realpath(loader.find_mozconfig()),
+ os.path.realpath(path))
+ finally:
+ os.chdir(orig_dir)
+
+ def test_find_no_relative_configs(self):
+ """Ensure a missing relative-path MOZCONFIG is detected."""
+ relative_mozconfig = '.mconfig'
+ os.environ[b'MOZCONFIG'] = relative_mozconfig
+
+ srcdir = self.get_temp_dir()
+ curdir = self.get_temp_dir()
+ dirs = [srcdir, curdir]
+ loader = MozconfigLoader(srcdir)
+
+ orig_dir = os.getcwd()
+ try:
+ os.chdir(curdir)
+ with self.assertRaises(MozconfigFindException) as e:
+ loader.find_mozconfig()
+ finally:
+ os.chdir(orig_dir)
+
+ self.assertIn('does not exist in any of', e.exception.message)
+ for d in dirs:
+ self.assertIn(d, e.exception.message)
+
+ def test_find_relative_mozconfig(self):
+ """Ensure a relative MOZCONFIG can be found in the srcdir."""
+ relative_mozconfig = '.mconfig'
+ os.environ[b'MOZCONFIG'] = relative_mozconfig
+
+ srcdir = self.get_temp_dir()
+ curdir = self.get_temp_dir()
+ dirs = [srcdir, curdir]
+ loader = MozconfigLoader(srcdir)
+
+ path = os.path.join(srcdir, relative_mozconfig)
+ with open(path, 'w'):
+ pass
+
+ orig_dir = os.getcwd()
+ try:
+ os.chdir(curdir)
+ self.assertEqual(os.path.normpath(loader.find_mozconfig()),
+ os.path.normpath(path))
+ finally:
+ os.chdir(orig_dir)
+
+ def test_find_abs_path_not_exist(self):
+ """Ensure a missing absolute path is detected."""
+ os.environ[b'MOZCONFIG'] = '/foo/bar/does/not/exist'
+
+ with self.assertRaises(MozconfigFindException) as e:
+ self.get_loader().find_mozconfig()
+
+ self.assertIn('path that does not exist', e.exception.message)
+ self.assertTrue(e.exception.message.endswith('/foo/bar/does/not/exist'))
+
+ def test_find_path_not_file(self):
+ """Ensure non-file paths are detected."""
+
+ os.environ[b'MOZCONFIG'] = gettempdir()
+
+ with self.assertRaises(MozconfigFindException) as e:
+ self.get_loader().find_mozconfig()
+
+ self.assertIn('refers to a non-file', e.exception.message)
+ self.assertTrue(e.exception.message.endswith(gettempdir()))
+
+ def test_find_default_files(self):
+ """Ensure default paths are used when present."""
+ for p in MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS:
+ d = self.get_temp_dir()
+ path = os.path.join(d, p)
+
+ with open(path, 'w'):
+ pass
+
+ self.assertEqual(MozconfigLoader(d).find_mozconfig(), path)
+
+ def test_find_multiple_defaults(self):
+ """Ensure we error when multiple default files are present."""
+ self.assertGreater(len(MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS), 1)
+
+ d = self.get_temp_dir()
+ for p in MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS:
+ with open(os.path.join(d, p), 'w'):
+ pass
+
+ with self.assertRaises(MozconfigFindException) as e:
+ MozconfigLoader(d).find_mozconfig()
+
+ self.assertIn('Multiple default mozconfig files present',
+ e.exception.message)
+
+ def test_find_deprecated_path_srcdir(self):
+ """Ensure we error when deprecated path locations are present."""
+ for p in MozconfigLoader.DEPRECATED_TOPSRCDIR_PATHS:
+ d = self.get_temp_dir()
+ with open(os.path.join(d, p), 'w'):
+ pass
+
+ with self.assertRaises(MozconfigFindException) as e:
+ MozconfigLoader(d).find_mozconfig()
+
+ self.assertIn('This implicit location is no longer',
+ e.exception.message)
+ self.assertIn(d, e.exception.message)
+
+ def test_find_deprecated_home_paths(self):
+ """Ensure we error when deprecated home directory paths are present."""
+
+ for p in MozconfigLoader.DEPRECATED_HOME_PATHS:
+ home = self.get_temp_dir()
+ os.environ[b'HOME'] = home
+ path = os.path.join(home, p)
+
+ with open(path, 'w'):
+ pass
+
+ with self.assertRaises(MozconfigFindException) as e:
+ self.get_loader().find_mozconfig()
+
+ self.assertIn('This implicit location is no longer',
+ e.exception.message)
+ self.assertIn(path, e.exception.message)
+
+ def test_read_no_mozconfig(self):
+ # This is basically to ensure changes to defaults incur a test failure.
+ result = self.get_loader().read_mozconfig()
+
+ self.assertEqual(result, {
+ 'path': None,
+ 'topobjdir': None,
+ 'configure_args': None,
+ 'make_flags': None,
+ 'make_extra': None,
+ 'env': None,
+ 'vars': None,
+ })
+
+ def test_read_empty_mozconfig(self):
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['path'], mozconfig.name)
+ self.assertIsNone(result['topobjdir'])
+ self.assertEqual(result['configure_args'], [])
+ self.assertEqual(result['make_flags'], [])
+ self.assertEqual(result['make_extra'], [])
+
+ for f in ('added', 'removed', 'modified'):
+ self.assertEqual(len(result['vars'][f]), 0)
+ self.assertEqual(len(result['env'][f]), 0)
+
+ self.assertEqual(result['env']['unmodified'], {})
+
+ def test_read_capture_ac_options(self):
+ """Ensures ac_add_options calls are captured."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('ac_add_options --enable-debug\n')
+ mozconfig.write('ac_add_options --disable-tests --enable-foo\n')
+ mozconfig.write('ac_add_options --foo="bar baz"\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+ self.assertEqual(result['configure_args'], [
+ '--enable-debug', '--disable-tests', '--enable-foo',
+ '--foo=bar baz'])
+
+ def test_read_ac_options_substitution(self):
+ """Ensure ac_add_options values are substituted."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('ac_add_options --foo=@TOPSRCDIR@\n')
+ mozconfig.flush()
+
+ loader = self.get_loader()
+ result = loader.read_mozconfig(mozconfig.name)
+ self.assertEqual(result['configure_args'], [
+ '--foo=%s' % loader.topsrcdir])
+
+ def test_read_ac_app_options(self):
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('ac_add_options --foo=@TOPSRCDIR@\n')
+ mozconfig.write('ac_add_app_options app1 --bar=@TOPSRCDIR@\n')
+ mozconfig.write('ac_add_app_options app2 --bar=x\n')
+ mozconfig.flush()
+
+ loader = self.get_loader()
+ result = loader.read_mozconfig(mozconfig.name, moz_build_app='app1')
+ self.assertEqual(result['configure_args'], [
+ '--foo=%s' % loader.topsrcdir,
+ '--bar=%s' % loader.topsrcdir])
+
+ result = loader.read_mozconfig(mozconfig.name, moz_build_app='app2')
+ self.assertEqual(result['configure_args'], [
+ '--foo=%s' % loader.topsrcdir,
+ '--bar=x'])
+
+ def test_read_capture_mk_options(self):
+ """Ensures mk_add_options calls are captured."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('mk_add_options MOZ_OBJDIR=/foo/bar\n')
+ mozconfig.write('mk_add_options MOZ_MAKE_FLAGS="-j8 -s"\n')
+ mozconfig.write('mk_add_options FOO="BAR BAZ"\n')
+ mozconfig.write('mk_add_options BIZ=1\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+ self.assertEqual(result['topobjdir'], '/foo/bar')
+ self.assertEqual(result['make_flags'], ['-j8', '-s'])
+ self.assertEqual(result['make_extra'], ['FOO=BAR BAZ', 'BIZ=1'])
+
+ vars = result['vars']['added']
+ for var in ('MOZ_OBJDIR', 'MOZ_MAKE_FLAGS', 'FOO', 'BIZ'):
+ self.assertEqual(vars.get('%s_IS_SET' % var), '1')
+
+ def test_read_empty_mozconfig_objdir_environ(self):
+ os.environ[b'MOZ_OBJDIR'] = b'obj-firefox'
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+ self.assertEqual(result['topobjdir'], 'obj-firefox')
+
+ def test_read_capture_mk_options_objdir_environ(self):
+ """Ensures mk_add_options calls are captured and override the environ."""
+ os.environ[b'MOZ_OBJDIR'] = b'obj-firefox'
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('mk_add_options MOZ_OBJDIR=/foo/bar\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+ self.assertEqual(result['topobjdir'], '/foo/bar')
+
+ def test_read_moz_objdir_substitution(self):
+ """Ensure @TOPSRCDIR@ substitution is recognized in MOZ_OBJDIR."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/some-objdir')
+ mozconfig.flush()
+
+ loader = self.get_loader()
+ result = loader.read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['topobjdir'], '%s/some-objdir' %
+ loader.topsrcdir)
+
+ def test_read_new_variables(self):
+ """New variables declared in mozconfig file are detected."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('CC=/usr/local/bin/clang\n')
+ mozconfig.write('CXX=/usr/local/bin/clang++\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['vars']['added'], {
+ 'CC': '/usr/local/bin/clang',
+ 'CXX': '/usr/local/bin/clang++'})
+ self.assertEqual(result['env']['added'], {})
+
+ def test_read_exported_variables(self):
+ """Exported variables are caught as new variables."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('export MY_EXPORTED=woot\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['vars']['added'], {})
+ self.assertEqual(result['env']['added'], {
+ 'MY_EXPORTED': 'woot'})
+
+ def test_read_modify_variables(self):
+ """Variables modified by mozconfig are detected."""
+ old_path = os.path.realpath(b'/usr/bin/gcc')
+ new_path = os.path.realpath(b'/usr/local/bin/clang')
+ os.environ[b'CC'] = old_path
+
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('CC="%s"\n' % new_path)
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['vars']['modified'], {})
+ self.assertEqual(result['env']['modified'], {
+ 'CC': (old_path, new_path)
+ })
+
+ def test_read_unmodified_variables(self):
+ """Variables modified by mozconfig are detected."""
+ cc_path = os.path.realpath(b'/usr/bin/gcc')
+ os.environ[b'CC'] = cc_path
+
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['vars']['unmodified'], {})
+ self.assertEqual(result['env']['unmodified'], {
+ 'CC': cc_path
+ })
+
+ def test_read_removed_variables(self):
+ """Variables unset by the mozconfig are detected."""
+ cc_path = os.path.realpath(b'/usr/bin/clang')
+ os.environ[b'CC'] = cc_path
+
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('unset CC\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['vars']['removed'], {})
+ self.assertEqual(result['env']['removed'], {
+ 'CC': cc_path})
+
+ def test_read_multiline_variables(self):
+ """Ensure multi-line variables are captured properly."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('multi="foo\nbar"\n')
+ mozconfig.write('single=1\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['vars']['added'], {
+ 'multi': 'foo\nbar',
+ 'single': '1'
+ })
+ self.assertEqual(result['env']['added'], {})
+
+ def test_read_topsrcdir_defined(self):
+ """Ensure $topsrcdir references work as expected."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('TEST=$topsrcdir')
+ mozconfig.flush()
+
+ loader = self.get_loader()
+ result = loader.read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['vars']['added']['TEST'],
+ loader.topsrcdir.replace(os.sep, '/'))
+ self.assertEqual(result['env']['added'], {})
+
+ def test_read_empty_variable_value(self):
+ """Ensure empty variable values are parsed properly."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('EMPTY=\n')
+ mozconfig.write('export EXPORT_EMPTY=\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result['vars']['added'], {
+ 'EMPTY': '',
+ })
+ self.assertEqual(result['env']['added'], {
+ 'EXPORT_EMPTY': ''
+ })
+
+ def test_read_load_exception(self):
+ """Ensure non-0 exit codes in mozconfigs are handled properly."""
+ with NamedTemporaryFile(mode='w') as mozconfig:
+ mozconfig.write('echo "hello world"\n')
+ mozconfig.write('exit 1\n')
+ mozconfig.flush()
+
+ with self.assertRaises(MozconfigLoadException) as e:
+ self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertTrue(e.exception.message.startswith(
+ 'Evaluation of your mozconfig exited with an error'))
+ self.assertEquals(e.exception.path,
+ mozconfig.name.replace(os.sep, '/'))
+ self.assertEquals(e.exception.output, ['hello world'])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_mozinfo.py b/python/mozbuild/mozbuild/test/test_mozinfo.py
new file mode 100755
index 000000000..1a4194cb5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_mozinfo.py
@@ -0,0 +1,278 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import tempfile
+import unittest
+
+from StringIO import StringIO
+
+import mozunit
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+
+from mozbuild.mozinfo import (
+ build_dict,
+ write_mozinfo,
+)
+
+from mozfile.mozfile import NamedTemporaryFile
+
+
+class Base(object):
+ def _config(self, substs={}):
+ d = os.path.dirname(__file__)
+ return ConfigEnvironment(d, d, substs=substs)
+
+
+class TestBuildDict(unittest.TestCase, Base):
+ def test_missing(self):
+ """
+ Test that missing required values raises.
+ """
+
+ with self.assertRaises(Exception):
+ build_dict(self._config(substs=dict(OS_TARGET='foo')))
+
+ with self.assertRaises(Exception):
+ build_dict(self._config(substs=dict(TARGET_CPU='foo')))
+
+ with self.assertRaises(Exception):
+ build_dict(self._config(substs=dict(MOZ_WIDGET_TOOLKIT='foo')))
+
+ def test_win(self):
+ d = build_dict(self._config(dict(
+ OS_TARGET='WINNT',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='windows',
+ )))
+ self.assertEqual('win', d['os'])
+ self.assertEqual('x86', d['processor'])
+ self.assertEqual('windows', d['toolkit'])
+ self.assertEqual(32, d['bits'])
+
+ def test_linux(self):
+ d = build_dict(self._config(dict(
+ OS_TARGET='Linux',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='gtk2',
+ )))
+ self.assertEqual('linux', d['os'])
+ self.assertEqual('x86', d['processor'])
+ self.assertEqual('gtk2', d['toolkit'])
+ self.assertEqual(32, d['bits'])
+
+ d = build_dict(self._config(dict(
+ OS_TARGET='Linux',
+ TARGET_CPU='x86_64',
+ MOZ_WIDGET_TOOLKIT='gtk2',
+ )))
+ self.assertEqual('linux', d['os'])
+ self.assertEqual('x86_64', d['processor'])
+ self.assertEqual('gtk2', d['toolkit'])
+ self.assertEqual(64, d['bits'])
+
+ def test_mac(self):
+ d = build_dict(self._config(dict(
+ OS_TARGET='Darwin',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='cocoa',
+ )))
+ self.assertEqual('mac', d['os'])
+ self.assertEqual('x86', d['processor'])
+ self.assertEqual('cocoa', d['toolkit'])
+ self.assertEqual(32, d['bits'])
+
+ d = build_dict(self._config(dict(
+ OS_TARGET='Darwin',
+ TARGET_CPU='x86_64',
+ MOZ_WIDGET_TOOLKIT='cocoa',
+ )))
+ self.assertEqual('mac', d['os'])
+ self.assertEqual('x86_64', d['processor'])
+ self.assertEqual('cocoa', d['toolkit'])
+ self.assertEqual(64, d['bits'])
+
+ def test_mac_universal(self):
+ d = build_dict(self._config(dict(
+ OS_TARGET='Darwin',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='cocoa',
+ UNIVERSAL_BINARY='1',
+ )))
+ self.assertEqual('mac', d['os'])
+ self.assertEqual('universal-x86-x86_64', d['processor'])
+ self.assertEqual('cocoa', d['toolkit'])
+ self.assertFalse('bits' in d)
+
+ d = build_dict(self._config(dict(
+ OS_TARGET='Darwin',
+ TARGET_CPU='x86_64',
+ MOZ_WIDGET_TOOLKIT='cocoa',
+ UNIVERSAL_BINARY='1',
+ )))
+ self.assertEqual('mac', d['os'])
+ self.assertEqual('universal-x86-x86_64', d['processor'])
+ self.assertEqual('cocoa', d['toolkit'])
+ self.assertFalse('bits' in d)
+
+ def test_android(self):
+ d = build_dict(self._config(dict(
+ OS_TARGET='Android',
+ TARGET_CPU='arm',
+ MOZ_WIDGET_TOOLKIT='android',
+ )))
+ self.assertEqual('android', d['os'])
+ self.assertEqual('arm', d['processor'])
+ self.assertEqual('android', d['toolkit'])
+ self.assertEqual(32, d['bits'])
+
+ def test_x86(self):
+ """
+ Test that various i?86 values => x86.
+ """
+ d = build_dict(self._config(dict(
+ OS_TARGET='WINNT',
+ TARGET_CPU='i486',
+ MOZ_WIDGET_TOOLKIT='windows',
+ )))
+ self.assertEqual('x86', d['processor'])
+
+ d = build_dict(self._config(dict(
+ OS_TARGET='WINNT',
+ TARGET_CPU='i686',
+ MOZ_WIDGET_TOOLKIT='windows',
+ )))
+ self.assertEqual('x86', d['processor'])
+
+ def test_arm(self):
+ """
+ Test that all arm CPU architectures => arm.
+ """
+ d = build_dict(self._config(dict(
+ OS_TARGET='Linux',
+ TARGET_CPU='arm',
+ MOZ_WIDGET_TOOLKIT='gtk2',
+ )))
+ self.assertEqual('arm', d['processor'])
+
+ d = build_dict(self._config(dict(
+ OS_TARGET='Linux',
+ TARGET_CPU='armv7',
+ MOZ_WIDGET_TOOLKIT='gtk2',
+ )))
+ self.assertEqual('arm', d['processor'])
+
+ def test_unknown(self):
+ """
+ Test that unknown values pass through okay.
+ """
+ d = build_dict(self._config(dict(
+ OS_TARGET='RandOS',
+ TARGET_CPU='cptwo',
+ MOZ_WIDGET_TOOLKIT='foobar',
+ )))
+ self.assertEqual("randos", d["os"])
+ self.assertEqual("cptwo", d["processor"])
+ self.assertEqual("foobar", d["toolkit"])
+ # unknown CPUs should not get a bits value
+ self.assertFalse("bits" in d)
+
+ def test_debug(self):
+ """
+ Test that debug values are properly detected.
+ """
+ d = build_dict(self._config(dict(
+ OS_TARGET='Linux',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='gtk2',
+ )))
+ self.assertEqual(False, d['debug'])
+
+ d = build_dict(self._config(dict(
+ OS_TARGET='Linux',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='gtk2',
+ MOZ_DEBUG='1',
+ )))
+ self.assertEqual(True, d['debug'])
+
+ def test_crashreporter(self):
+ """
+ Test that crashreporter values are properly detected.
+ """
+ d = build_dict(self._config(dict(
+ OS_TARGET='Linux',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='gtk2',
+ )))
+ self.assertEqual(False, d['crashreporter'])
+
+ d = build_dict(self._config(dict(
+ OS_TARGET='Linux',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='gtk2',
+ MOZ_CRASHREPORTER='1',
+ )))
+ self.assertEqual(True, d['crashreporter'])
+
+
+class TestWriteMozinfo(unittest.TestCase, Base):
+ """
+ Test the write_mozinfo function.
+ """
+ def setUp(self):
+ fd, self.f = tempfile.mkstemp()
+ os.close(fd)
+
+ def tearDown(self):
+ os.unlink(self.f)
+
+ def test_basic(self):
+ """
+ Test that writing to a file produces correct output.
+ """
+ c = self._config(dict(
+ OS_TARGET='WINNT',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='windows',
+ ))
+ tempdir = tempfile.tempdir
+ c.topsrcdir = tempdir
+ with NamedTemporaryFile(dir=os.path.normpath(c.topsrcdir)) as mozconfig:
+ mozconfig.write('unused contents')
+ mozconfig.flush()
+ c.mozconfig = mozconfig.name
+ write_mozinfo(self.f, c)
+ with open(self.f) as f:
+ d = json.load(f)
+ self.assertEqual('win', d['os'])
+ self.assertEqual('x86', d['processor'])
+ self.assertEqual('windows', d['toolkit'])
+ self.assertEqual(tempdir, d['topsrcdir'])
+ self.assertEqual(mozconfig.name, d['mozconfig'])
+ self.assertEqual(32, d['bits'])
+
+ def test_fileobj(self):
+ """
+ Test that writing to a file-like object produces correct output.
+ """
+ s = StringIO()
+ c = self._config(dict(
+ OS_TARGET='WINNT',
+ TARGET_CPU='i386',
+ MOZ_WIDGET_TOOLKIT='windows',
+ ))
+ write_mozinfo(s, c)
+ d = json.loads(s.getvalue())
+ self.assertEqual('win', d['os'])
+ self.assertEqual('x86', d['processor'])
+ self.assertEqual('windows', d['toolkit'])
+ self.assertEqual(32, d['bits'])
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_preprocessor.py b/python/mozbuild/mozbuild/test/test_preprocessor.py
new file mode 100644
index 000000000..9aba94853
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_preprocessor.py
@@ -0,0 +1,646 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from StringIO import StringIO
+import os
+import shutil
+
+from tempfile import mkdtemp
+
+from mozunit import main, MockedOpen
+
+from mozbuild.preprocessor import Preprocessor
+
+
+class TestPreprocessor(unittest.TestCase):
+ """
+ Unit tests for the Context class
+ """
+
+ def setUp(self):
+ self.pp = Preprocessor()
+ self.pp.out = StringIO()
+
+ def do_include_compare(self, content_lines, expected_lines):
+ content = '%s' % '\n'.join(content_lines)
+ expected = '%s'.rstrip() % '\n'.join(expected_lines)
+
+ with MockedOpen({'dummy': content}):
+ self.pp.do_include('dummy')
+ self.assertEqual(self.pp.out.getvalue().rstrip('\n'), expected)
+
+ def do_include_pass(self, content_lines):
+ self.do_include_compare(content_lines, ['PASS'])
+
+ def test_conditional_if_0(self):
+ self.do_include_pass([
+ '#if 0',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_no_marker(self):
+ lines = [
+ '#if 0',
+ 'PASS',
+ '#endif',
+ ]
+ self.pp.setMarker(None)
+ self.do_include_compare(lines, lines)
+
+ def test_string_value(self):
+ self.do_include_compare([
+ '#define FOO STRING',
+ '#if FOO',
+ 'string value is true',
+ '#else',
+ 'string value is false',
+ '#endif',
+ ], ['string value is false'])
+
+ def test_number_value(self):
+ self.do_include_compare([
+ '#define FOO 1',
+ '#if FOO',
+ 'number value is true',
+ '#else',
+ 'number value is false',
+ '#endif',
+ ], ['number value is true'])
+
+ def test_conditional_if_0_elif_1(self):
+ self.do_include_pass([
+ '#if 0',
+ '#elif 1',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_conditional_if_1(self):
+ self.do_include_pass([
+ '#if 1',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_conditional_if_0_or_1(self):
+ self.do_include_pass([
+ '#if 0 || 1',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_conditional_if_1_elif_1_else(self):
+ self.do_include_pass([
+ '#if 1',
+ 'PASS',
+ '#elif 1',
+ 'FAIL',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_conditional_if_1_if_1(self):
+ self.do_include_pass([
+ '#if 1',
+ '#if 1',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_conditional_not_0(self):
+ self.do_include_pass([
+ '#if !0',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_conditional_not_0_and_1(self):
+ self.do_include_pass([
+ '#if !0 && !1',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_conditional_not_1(self):
+ self.do_include_pass([
+ '#if !1',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_conditional_not_emptyval(self):
+ self.do_include_compare([
+ '#define EMPTYVAL',
+ '#ifndef EMPTYVAL',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ '#ifdef EMPTYVAL',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ], ['PASS', 'PASS'])
+
+ def test_conditional_not_nullval(self):
+ self.do_include_pass([
+ '#define NULLVAL 0',
+ '#if !NULLVAL',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_expand(self):
+ self.do_include_pass([
+ '#define ASVAR AS',
+ '#expand P__ASVAR__S',
+ ])
+
+ def test_undef_defined(self):
+ self.do_include_compare([
+ '#define BAR',
+ '#undef BAR',
+ 'BAR',
+ ], ['BAR'])
+
+ def test_undef_undefined(self):
+ self.do_include_compare([
+ '#undef BAR',
+ ], [])
+
+ def test_filter_attemptSubstitution(self):
+ self.do_include_compare([
+ '#filter attemptSubstitution',
+ '@PASS@',
+ '#unfilter attemptSubstitution',
+ ], ['@PASS@'])
+
+ def test_filter_emptyLines(self):
+ self.do_include_compare([
+ 'lines with a',
+ '',
+ 'blank line',
+ '#filter emptyLines',
+ 'lines with',
+ '',
+ 'no blank lines',
+ '#unfilter emptyLines',
+ 'yet more lines with',
+ '',
+ 'blank lines',
+ ], [
+ 'lines with a',
+ '',
+ 'blank line',
+ 'lines with',
+ 'no blank lines',
+ 'yet more lines with',
+ '',
+ 'blank lines',
+ ])
+
+ def test_filter_slashslash(self):
+ self.do_include_compare([
+ '#filter slashslash',
+ 'PASS//FAIL // FAIL',
+ '#unfilter slashslash',
+ 'PASS // PASS',
+ ], [
+ 'PASS',
+ 'PASS // PASS',
+ ])
+
+ def test_filter_spaces(self):
+ self.do_include_compare([
+ '#filter spaces',
+ 'You should see two nice ascii tables',
+ ' +-+-+-+',
+ ' | | | |',
+ ' +-+-+-+',
+ '#unfilter spaces',
+ '+-+---+',
+ '| | |',
+ '+-+---+',
+ ], [
+ 'You should see two nice ascii tables',
+ '+-+-+-+',
+ '| | | |',
+ '+-+-+-+',
+ '+-+---+',
+ '| | |',
+ '+-+---+',
+ ])
+
+ def test_filter_substitution(self):
+ self.do_include_pass([
+ '#define VAR ASS',
+ '#filter substitution',
+ 'P@VAR@',
+ '#unfilter substitution',
+ ])
+
+ def test_error(self):
+ with MockedOpen({'f': '#error spit this message out\n'}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include('f')
+ self.assertEqual(e.args[0][-1], 'spit this message out')
+
+ def test_javascript_line(self):
+ # The preprocessor is reading the filename from somewhere not caught
+ # by MockedOpen.
+ tmpdir = mkdtemp()
+ try:
+ full = os.path.join(tmpdir, 'javascript_line.js.in')
+ with open(full, 'w') as fh:
+ fh.write('\n'.join([
+ '// Line 1',
+ '#if 0',
+ '// line 3',
+ '#endif',
+ '// line 5',
+ '# comment',
+ '// line 7',
+ '// line 8',
+ '// line 9',
+ '# another comment',
+ '// line 11',
+ '#define LINE 1',
+ '// line 13, given line number overwritten with 2',
+ '',
+ ]))
+
+ self.pp.do_include(full)
+ out = '\n'.join([
+ '// Line 1',
+ '//@line 5 "CWDjavascript_line.js.in"',
+ '// line 5',
+ '//@line 7 "CWDjavascript_line.js.in"',
+ '// line 7',
+ '// line 8',
+ '// line 9',
+ '//@line 11 "CWDjavascript_line.js.in"',
+ '// line 11',
+ '//@line 2 "CWDjavascript_line.js.in"',
+ '// line 13, given line number overwritten with 2',
+ '',
+ ])
+ out = out.replace('CWD', tmpdir + os.path.sep)
+ self.assertEqual(self.pp.out.getvalue(), out)
+ finally:
+ shutil.rmtree(tmpdir)
+
+ def test_literal(self):
+ self.do_include_pass([
+ '#literal PASS',
+ ])
+
+ def test_var_directory(self):
+ self.do_include_pass([
+ '#ifdef DIRECTORY',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_var_file(self):
+ self.do_include_pass([
+ '#ifdef FILE',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_var_if_0(self):
+ self.do_include_pass([
+ '#define VAR 0',
+ '#if VAR',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_var_if_0_elifdef(self):
+ self.do_include_pass([
+ '#if 0',
+ '#elifdef FILE',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_var_if_0_elifndef(self):
+ self.do_include_pass([
+ '#if 0',
+ '#elifndef VAR',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_var_ifdef_0(self):
+ self.do_include_pass([
+ '#define VAR 0',
+ '#ifdef VAR',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_var_ifdef_1_or_undef(self):
+ self.do_include_pass([
+ '#define FOO 1',
+ '#if defined(FOO) || defined(BAR)',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_var_ifdef_undef(self):
+ self.do_include_pass([
+ '#define VAR 0',
+ '#undef VAR',
+ '#ifdef VAR',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_var_ifndef_0(self):
+ self.do_include_pass([
+ '#define VAR 0',
+ '#ifndef VAR',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_var_ifndef_0_and_undef(self):
+ self.do_include_pass([
+ '#define FOO 0',
+ '#if !defined(FOO) && !defined(BAR)',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_var_ifndef_undef(self):
+ self.do_include_pass([
+ '#define VAR 0',
+ '#undef VAR',
+ '#ifndef VAR',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_var_line(self):
+ self.do_include_pass([
+ '#ifdef LINE',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_filterDefine(self):
+ self.do_include_pass([
+ '#filter substitution',
+ '#define VAR AS',
+ '#define VAR2 P@VAR@',
+ '@VAR2@S',
+ ])
+
+ def test_number_value_equals(self):
+ self.do_include_pass([
+ '#define FOO 1000',
+ '#if FOO == 1000',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_default_defines(self):
+ self.pp.handleCommandLine(["-DFOO"])
+ self.do_include_pass([
+ '#if FOO == 1',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ ])
+
+ def test_number_value_equals_defines(self):
+ self.pp.handleCommandLine(["-DFOO=1000"])
+ self.do_include_pass([
+ '#if FOO == 1000',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ ])
+
+ def test_octal_value_equals(self):
+ self.do_include_pass([
+ '#define FOO 0100',
+ '#if FOO == 0100',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_octal_value_equals_defines(self):
+ self.pp.handleCommandLine(["-DFOO=0100"])
+ self.do_include_pass([
+ '#if FOO == 0100',
+ 'PASS',
+ '#else',
+ 'FAIL',
+ '#endif',
+ ])
+
+ def test_value_quoted_expansion(self):
+ """
+ Quoted values on the commandline don't currently have quotes stripped.
+ Pike says this is for compat reasons.
+ """
+ self.pp.handleCommandLine(['-DFOO="ABCD"'])
+ self.do_include_compare([
+ '#filter substitution',
+ '@FOO@',
+ ], ['"ABCD"'])
+
+ def test_octal_value_quoted_expansion(self):
+ self.pp.handleCommandLine(['-DFOO="0100"'])
+ self.do_include_compare([
+ '#filter substitution',
+ '@FOO@',
+ ], ['"0100"'])
+
+ def test_number_value_not_equals_quoted_defines(self):
+ self.pp.handleCommandLine(['-DFOO="1000"'])
+ self.do_include_pass([
+ '#if FOO == 1000',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_octal_value_not_equals_quoted_defines(self):
+ self.pp.handleCommandLine(['-DFOO="0100"'])
+ self.do_include_pass([
+ '#if FOO == 0100',
+ 'FAIL',
+ '#else',
+ 'PASS',
+ '#endif',
+ ])
+
+ def test_undefined_variable(self):
+ with MockedOpen({'f': '#filter substitution\n@foo@'}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include('f')
+ self.assertEqual(e.key, 'UNDEFINED_VAR')
+
+ def test_include(self):
+ files = {
+ 'foo/test': '\n'.join([
+ '#define foo foobarbaz',
+ '#include @inc@',
+ '@bar@',
+ '',
+ ]),
+ 'bar': '\n'.join([
+ '#define bar barfoobaz',
+ '@foo@',
+ '',
+ ]),
+ 'f': '\n'.join([
+ '#filter substitution',
+ '#define inc ../bar',
+ '#include foo/test',
+ '',
+ ]),
+ }
+
+ with MockedOpen(files):
+ self.pp.do_include('f')
+ self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\nbarfoobaz\n')
+
+ def test_include_line(self):
+ files = {
+ 'test.js': '\n'.join([
+ '#define foo foobarbaz',
+ '#include @inc@',
+ '@bar@',
+ '',
+ ]),
+ 'bar.js': '\n'.join([
+ '#define bar barfoobaz',
+ '@foo@',
+ '',
+ ]),
+ 'foo.js': '\n'.join([
+ 'bazfoobar',
+ '#include bar.js',
+ 'bazbarfoo',
+ '',
+ ]),
+ 'baz.js': 'baz\n',
+ 'f.js': '\n'.join([
+ '#include foo.js',
+ '#filter substitution',
+ '#define inc bar.js',
+ '#include test.js',
+ '#include baz.js',
+ 'fin',
+ '',
+ ]),
+ }
+
+ with MockedOpen(files):
+ self.pp.do_include('f.js')
+ self.assertEqual(self.pp.out.getvalue(),
+ ('//@line 1 "CWD/foo.js"\n'
+ 'bazfoobar\n'
+ '//@line 2 "CWD/bar.js"\n'
+ '@foo@\n'
+ '//@line 3 "CWD/foo.js"\n'
+ 'bazbarfoo\n'
+ '//@line 2 "CWD/bar.js"\n'
+ 'foobarbaz\n'
+ '//@line 3 "CWD/test.js"\n'
+ 'barfoobaz\n'
+ '//@line 1 "CWD/baz.js"\n'
+ 'baz\n'
+ '//@line 6 "CWD/f.js"\n'
+ 'fin\n').replace('CWD/',
+ os.getcwd() + os.path.sep))
+
+ def test_include_missing_file(self):
+ with MockedOpen({'f': '#include foo\n'}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include('f')
+ self.assertEqual(e.exception.key, 'FILE_NOT_FOUND')
+
+ def test_include_undefined_variable(self):
+ with MockedOpen({'f': '#filter substitution\n#include @foo@\n'}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include('f')
+ self.assertEqual(e.exception.key, 'UNDEFINED_VAR')
+
+ def test_include_literal_at(self):
+ files = {
+ '@foo@': '#define foo foobarbaz\n',
+ 'f': '#include @foo@\n#filter substitution\n@foo@\n',
+ }
+
+ with MockedOpen(files):
+ self.pp.do_include('f')
+ self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n')
+
+ def test_command_line_literal_at(self):
+ with MockedOpen({"@foo@.in": '@foo@\n'}):
+ self.pp.handleCommandLine(['-Fsubstitution', '-Dfoo=foobarbaz', '@foo@.in'])
+ self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n')
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_pythonutil.py b/python/mozbuild/mozbuild/test/test_pythonutil.py
new file mode 100644
index 000000000..87399b3f5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_pythonutil.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.pythonutil import iter_modules_in_path
+from mozunit import main
+import os
+import unittest
+
+
+class TestIterModules(unittest.TestCase):
+ def test_iter_modules_in_path(self):
+ mozbuild_path = os.path.normcase(os.path.dirname(os.path.dirname(__file__)))
+ paths = list(iter_modules_in_path(mozbuild_path))
+ self.assertEquals(sorted(paths), [
+ os.path.join(os.path.abspath(mozbuild_path), '__init__.py'),
+ os.path.join(os.path.abspath(mozbuild_path), 'pythonutil.py'),
+ os.path.join(os.path.abspath(mozbuild_path), 'test', 'test_pythonutil.py'),
+ ])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_testing.py b/python/mozbuild/mozbuild/test/test_testing.py
new file mode 100644
index 000000000..e71892e24
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_testing.py
@@ -0,0 +1,332 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import cPickle as pickle
+import os
+import shutil
+import tempfile
+import unittest
+
+import mozpack.path as mozpath
+
+from mozfile import NamedTemporaryFile
+from mozunit import main
+
+from mozbuild.base import MozbuildObject
+from mozbuild.testing import (
+ TestMetadata,
+ TestResolver,
+)
+
+
+ALL_TESTS = {
+ "accessible/tests/mochitest/actions/test_anchors.html": [
+ {
+ "dir_relpath": "accessible/tests/mochitest/actions",
+ "expected": "pass",
+ "file_relpath": "accessible/tests/mochitest/actions/test_anchors.html",
+ "flavor": "a11y",
+ "here": "/Users/gps/src/firefox/accessible/tests/mochitest/actions",
+ "manifest": "/Users/gps/src/firefox/accessible/tests/mochitest/actions/a11y.ini",
+ "name": "test_anchors.html",
+ "path": "/Users/gps/src/firefox/accessible/tests/mochitest/actions/test_anchors.html",
+ "relpath": "test_anchors.html"
+ }
+ ],
+ "services/common/tests/unit/test_async_chain.js": [
+ {
+ "dir_relpath": "services/common/tests/unit",
+ "file_relpath": "services/common/tests/unit/test_async_chain.js",
+ "firefox-appdir": "browser",
+ "flavor": "xpcshell",
+ "head": "head_global.js head_helpers.js head_http.js",
+ "here": "/Users/gps/src/firefox/services/common/tests/unit",
+ "manifest": "/Users/gps/src/firefox/services/common/tests/unit/xpcshell.ini",
+ "name": "test_async_chain.js",
+ "path": "/Users/gps/src/firefox/services/common/tests/unit/test_async_chain.js",
+ "relpath": "test_async_chain.js",
+ "tail": ""
+ }
+ ],
+ "services/common/tests/unit/test_async_querySpinningly.js": [
+ {
+ "dir_relpath": "services/common/tests/unit",
+ "file_relpath": "services/common/tests/unit/test_async_querySpinningly.js",
+ "firefox-appdir": "browser",
+ "flavor": "xpcshell",
+ "head": "head_global.js head_helpers.js head_http.js",
+ "here": "/Users/gps/src/firefox/services/common/tests/unit",
+ "manifest": "/Users/gps/src/firefox/services/common/tests/unit/xpcshell.ini",
+ "name": "test_async_querySpinningly.js",
+ "path": "/Users/gps/src/firefox/services/common/tests/unit/test_async_querySpinningly.js",
+ "relpath": "test_async_querySpinningly.js",
+ "tail": ""
+ }
+ ],
+ "toolkit/mozapps/update/test/unit/test_0201_app_launch_apply_update.js": [
+ {
+ "dir_relpath": "toolkit/mozapps/update/test/unit",
+ "file_relpath": "toolkit/mozapps/update/test/unit/test_0201_app_launch_apply_update.js",
+ "flavor": "xpcshell",
+ "generated-files": "head_update.js",
+ "head": "head_update.js",
+ "here": "/Users/gps/src/firefox/toolkit/mozapps/update/test/unit",
+ "manifest": "/Users/gps/src/firefox/toolkit/mozapps/update/test/unit/xpcshell_updater.ini",
+ "name": "test_0201_app_launch_apply_update.js",
+ "path": "/Users/gps/src/firefox/toolkit/mozapps/update/test/unit/test_0201_app_launch_apply_update.js",
+ "reason": "bug 820380",
+ "relpath": "test_0201_app_launch_apply_update.js",
+ "run-sequentially": "Launches application.",
+ "skip-if": "toolkit == 'gonk' || os == 'android'",
+ "tail": ""
+ },
+ {
+ "dir_relpath": "toolkit/mozapps/update/test/unit",
+ "file_relpath": "toolkit/mozapps/update/test/unit/test_0201_app_launch_apply_update.js",
+ "flavor": "xpcshell",
+ "generated-files": "head_update.js",
+ "head": "head_update.js head2.js",
+ "here": "/Users/gps/src/firefox/toolkit/mozapps/update/test/unit",
+ "manifest": "/Users/gps/src/firefox/toolkit/mozapps/update/test/unit/xpcshell_updater.ini",
+ "name": "test_0201_app_launch_apply_update.js",
+ "path": "/Users/gps/src/firefox/toolkit/mozapps/update/test/unit/test_0201_app_launch_apply_update.js",
+ "reason": "bug 820380",
+ "relpath": "test_0201_app_launch_apply_update.js",
+ "run-sequentially": "Launches application.",
+ "skip-if": "toolkit == 'gonk' || os == 'android'",
+ "tail": ""
+ }
+ ],
+ "mobile/android/tests/background/junit3/src/common/TestAndroidLogWriters.java": [
+ {
+ "dir_relpath": "mobile/android/tests/background/junit3/src/common",
+ "file_relpath": "mobile/android/tests/background/junit3/src/common/TestAndroidLogWriters.java",
+ "flavor": "instrumentation",
+ "here": "/Users/nalexander/Mozilla/gecko-dev/mobile/android/tests/background/junit3",
+ "manifest": "/Users/nalexander/Mozilla/gecko-dev/mobile/android/tests/background/junit3/instrumentation.ini",
+ "name": "src/common/TestAndroidLogWriters.java",
+ "path": "/Users/nalexander/Mozilla/gecko-dev/mobile/android/tests/background/junit3/src/common/TestAndroidLogWriters.java",
+ "relpath": "src/common/TestAndroidLogWriters.java",
+ "subsuite": "background"
+ }
+ ],
+ "mobile/android/tests/browser/junit3/src/TestDistribution.java": [
+ {
+ "dir_relpath": "mobile/android/tests/browser/junit3/src",
+ "file_relpath": "mobile/android/tests/browser/junit3/src/TestDistribution.java",
+ "flavor": "instrumentation",
+ "here": "/Users/nalexander/Mozilla/gecko-dev/mobile/android/tests/browser/junit3",
+ "manifest": "/Users/nalexander/Mozilla/gecko-dev/mobile/android/tests/browser/junit3/instrumentation.ini",
+ "name": "src/TestDistribution.java",
+ "path": "/Users/nalexander/Mozilla/gecko-dev/mobile/android/tests/browser/junit3/src/TestDistribution.java",
+ "relpath": "src/TestDistribution.java",
+ "subsuite": "browser"
+ }
+ ],
+ "image/test/browser/browser_bug666317.js": [
+ {
+ "dir_relpath": "image/test/browser",
+ "file_relpath": "image/test/browser/browser_bug666317.js",
+ "flavor": "browser-chrome",
+ "here": "/home/chris/m-c/obj-dbg/_tests/testing/mochitest/browser/image/test/browser",
+ "manifest": "/home/chris/m-c/image/test/browser/browser.ini",
+ "name": "browser_bug666317.js",
+ "path": "/home/chris/m-c/obj-dbg/_tests/testing/mochitest/browser/image/test/browser/browser_bug666317.js",
+ "relpath": "image/test/browser/browser_bug666317.js",
+ "skip-if": "e10s # Bug 948194 - Decoded Images seem to not be discarded on memory-pressure notification with e10s enabled",
+ "subsuite": ""
+ }
+ ],
+ "devtools/client/markupview/test/browser_markupview_copy_image_data.js": [
+ {
+ "dir_relpath": "devtools/client/markupview/test",
+ "file_relpath": "devtools/client/markupview/test/browser_markupview_copy_image_data.js",
+ "flavor": "browser-chrome",
+ "here": "/home/chris/m-c/obj-dbg/_tests/testing/mochitest/browser/devtools/client/markupview/test",
+ "manifest": "/home/chris/m-c/devtools/client/markupview/test/browser.ini",
+ "name": "browser_markupview_copy_image_data.js",
+ "path": "/home/chris/m-c/obj-dbg/_tests/testing/mochitest/browser/devtools/client/markupview/test/browser_markupview_copy_image_data.js",
+ "relpath": "devtools/client/markupview/test/browser_markupview_copy_image_data.js",
+ "subsuite": "devtools",
+ "tags": "devtools"
+ }
+ ]
+}
+
+TEST_DEFAULTS = {
+ "/Users/gps/src/firefox/toolkit/mozapps/update/test/unit/xpcshell_updater.ini": {"support-files": "\ndata/**\nxpcshell_updater.ini"}
+}
+
+
+class Base(unittest.TestCase):
+ def setUp(self):
+ self._temp_files = []
+
+ def tearDown(self):
+ for f in self._temp_files:
+ del f
+
+ self._temp_files = []
+
+ def _get_test_metadata(self):
+ all_tests = NamedTemporaryFile(mode='wb')
+ pickle.dump(ALL_TESTS, all_tests)
+ all_tests.flush()
+ self._temp_files.append(all_tests)
+
+ test_defaults = NamedTemporaryFile(mode='wb')
+ pickle.dump(TEST_DEFAULTS, test_defaults)
+ test_defaults.flush()
+ self._temp_files.append(test_defaults)
+
+ return TestMetadata(all_tests.name, test_defaults=test_defaults.name)
+
+
+class TestTestMetadata(Base):
+ def test_load(self):
+ t = self._get_test_metadata()
+ self.assertEqual(len(t._tests_by_path), 8)
+
+ self.assertEqual(len(list(t.tests_with_flavor('xpcshell'))), 3)
+ self.assertEqual(len(list(t.tests_with_flavor('mochitest-plain'))), 0)
+
+ def test_resolve_all(self):
+ t = self._get_test_metadata()
+ self.assertEqual(len(list(t.resolve_tests())), 9)
+
+ def test_resolve_filter_flavor(self):
+ t = self._get_test_metadata()
+ self.assertEqual(len(list(t.resolve_tests(flavor='xpcshell'))), 4)
+
+ def test_resolve_by_dir(self):
+ t = self._get_test_metadata()
+ self.assertEqual(len(list(t.resolve_tests(paths=['services/common']))), 2)
+
+ def test_resolve_under_path(self):
+ t = self._get_test_metadata()
+ self.assertEqual(len(list(t.resolve_tests(under_path='services'))), 2)
+
+ self.assertEqual(len(list(t.resolve_tests(flavor='xpcshell',
+ under_path='services'))), 2)
+
+ def test_resolve_multiple_paths(self):
+ t = self._get_test_metadata()
+ result = list(t.resolve_tests(paths=['services', 'toolkit']))
+ self.assertEqual(len(result), 4)
+
+ def test_resolve_support_files(self):
+ expected_support_files = "\ndata/**\nxpcshell_updater.ini"
+ t = self._get_test_metadata()
+ result = list(t.resolve_tests(paths=['toolkit']))
+ self.assertEqual(len(result), 2)
+
+ for test in result:
+ self.assertEqual(test['support-files'],
+ expected_support_files)
+
+ def test_resolve_path_prefix(self):
+ t = self._get_test_metadata()
+ result = list(t.resolve_tests(paths=['image']))
+ self.assertEqual(len(result), 1)
+
+
+class TestTestResolver(Base):
+ FAKE_TOPSRCDIR = '/Users/gps/src/firefox'
+
+ def setUp(self):
+ Base.setUp(self)
+
+ self._temp_dirs = []
+
+ def tearDown(self):
+ Base.tearDown(self)
+
+ for d in self._temp_dirs:
+ shutil.rmtree(d)
+
+ def _get_resolver(self):
+ topobjdir = tempfile.mkdtemp()
+ self._temp_dirs.append(topobjdir)
+
+ with open(os.path.join(topobjdir, 'all-tests.pkl'), 'wb') as fh:
+ pickle.dump(ALL_TESTS, fh)
+ with open(os.path.join(topobjdir, 'test-defaults.pkl'), 'wb') as fh:
+ pickle.dump(TEST_DEFAULTS, fh)
+
+ o = MozbuildObject(self.FAKE_TOPSRCDIR, None, None, topobjdir=topobjdir)
+
+ # Monkey patch the test resolver to avoid tests failing to find make
+ # due to our fake topscrdir.
+ TestResolver._run_make = lambda *a, **b: None
+
+ return o._spawn(TestResolver)
+
+ def test_cwd_children_only(self):
+ """If cwd is defined, only resolve tests under the specified cwd."""
+ r = self._get_resolver()
+
+ # Pretend we're under '/services' and ask for 'common'. This should
+ # pick up all tests from '/services/common'
+ tests = list(r.resolve_tests(paths=['common'], cwd=os.path.join(r.topsrcdir,
+ 'services')))
+
+ self.assertEqual(len(tests), 2)
+
+ # Tests should be rewritten to objdir.
+ for t in tests:
+ self.assertEqual(t['here'], mozpath.join(r.topobjdir,
+ '_tests/xpcshell/services/common/tests/unit'))
+
+ def test_various_cwd(self):
+ """Test various cwd conditions are all equal."""
+
+ r = self._get_resolver()
+
+ expected = list(r.resolve_tests(paths=['services']))
+ actual = list(r.resolve_tests(paths=['services'], cwd='/'))
+ self.assertEqual(actual, expected)
+
+ actual = list(r.resolve_tests(paths=['services'], cwd=r.topsrcdir))
+ self.assertEqual(actual, expected)
+
+ actual = list(r.resolve_tests(paths=['services'], cwd=r.topobjdir))
+ self.assertEqual(actual, expected)
+
+ def test_subsuites(self):
+ """Test filtering by subsuite."""
+
+ r = self._get_resolver()
+
+ tests = list(r.resolve_tests(paths=['mobile']))
+ self.assertEqual(len(tests), 2)
+
+ tests = list(r.resolve_tests(paths=['mobile'], subsuite='browser'))
+ self.assertEqual(len(tests), 1)
+ self.assertEqual(tests[0]['name'], 'src/TestDistribution.java')
+
+ tests = list(r.resolve_tests(paths=['mobile'], subsuite='background'))
+ self.assertEqual(len(tests), 1)
+ self.assertEqual(tests[0]['name'], 'src/common/TestAndroidLogWriters.java')
+
+ def test_wildcard_patterns(self):
+ """Test matching paths by wildcard."""
+
+ r = self._get_resolver()
+
+ tests = list(r.resolve_tests(paths=['mobile/**']))
+ self.assertEqual(len(tests), 2)
+ for t in tests:
+ self.assertTrue(t['file_relpath'].startswith('mobile'))
+
+ tests = list(r.resolve_tests(paths=['**/**.js', 'accessible/**']))
+ self.assertEqual(len(tests), 7)
+ for t in tests:
+ path = t['file_relpath']
+ self.assertTrue(path.startswith('accessible') or path.endswith('.js'))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_util.py b/python/mozbuild/mozbuild/test/test_util.py
new file mode 100644
index 000000000..6c3b39b1e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_util.py
@@ -0,0 +1,924 @@
+# coding: utf-8
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import itertools
+import hashlib
+import os
+import unittest
+import shutil
+import string
+import sys
+import tempfile
+import textwrap
+
+from mozfile.mozfile import NamedTemporaryFile
+from mozunit import (
+ main,
+ MockedOpen,
+)
+
+from mozbuild.util import (
+ expand_variables,
+ FileAvoidWrite,
+ group_unified_files,
+ hash_file,
+ indented_repr,
+ memoize,
+ memoized_property,
+ pair,
+ resolve_target_to_make,
+ MozbuildDeletionError,
+ HierarchicalStringList,
+ EnumString,
+ EnumStringComparisonError,
+ ListWithAction,
+ StrictOrderingOnAppendList,
+ StrictOrderingOnAppendListWithFlagsFactory,
+ TypedList,
+ TypedNamedTuple,
+ UnsortedError,
+)
+
+if sys.version_info[0] == 3:
+ str_type = 'str'
+else:
+ str_type = 'unicode'
+
+data_path = os.path.abspath(os.path.dirname(__file__))
+data_path = os.path.join(data_path, 'data')
+
+
+class TestHashing(unittest.TestCase):
+ def test_hash_file_known_hash(self):
+ """Ensure a known hash value is recreated."""
+ data = b'The quick brown fox jumps over the lazy cog'
+ expected = 'de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3'
+
+ temp = NamedTemporaryFile()
+ temp.write(data)
+ temp.flush()
+
+ actual = hash_file(temp.name)
+
+ self.assertEqual(actual, expected)
+
+ def test_hash_file_large(self):
+ """Ensure that hash_file seems to work with a large file."""
+ data = b'x' * 1048576
+
+ hasher = hashlib.sha1()
+ hasher.update(data)
+ expected = hasher.hexdigest()
+
+ temp = NamedTemporaryFile()
+ temp.write(data)
+ temp.flush()
+
+ actual = hash_file(temp.name)
+
+ self.assertEqual(actual, expected)
+
+
+class TestFileAvoidWrite(unittest.TestCase):
+ def test_file_avoid_write(self):
+ with MockedOpen({'file': 'content'}):
+ # Overwriting an existing file replaces its content
+ faw = FileAvoidWrite('file')
+ faw.write('bazqux')
+ self.assertEqual(faw.close(), (True, True))
+ self.assertEqual(open('file', 'r').read(), 'bazqux')
+
+ # Creating a new file (obviously) stores its content
+ faw = FileAvoidWrite('file2')
+ faw.write('content')
+ self.assertEqual(faw.close(), (False, True))
+ self.assertEqual(open('file2').read(), 'content')
+
+ with MockedOpen({'file': 'content'}):
+ with FileAvoidWrite('file') as file:
+ file.write('foobar')
+
+ self.assertEqual(open('file', 'r').read(), 'foobar')
+
+ class MyMockedOpen(MockedOpen):
+ '''MockedOpen extension to raise an exception if something
+ attempts to write in an opened file.
+ '''
+ def __call__(self, name, mode):
+ if 'w' in mode:
+ raise Exception, 'Unexpected open with write mode'
+ return MockedOpen.__call__(self, name, mode)
+
+ with MyMockedOpen({'file': 'content'}):
+ # Validate that MyMockedOpen works as intended
+ file = FileAvoidWrite('file')
+ file.write('foobar')
+ self.assertRaises(Exception, file.close)
+
+ # Check that no write actually happens when writing the
+ # same content as what already is in the file
+ faw = FileAvoidWrite('file')
+ faw.write('content')
+ self.assertEqual(faw.close(), (True, False))
+
+ def test_diff_not_default(self):
+ """Diffs are not produced by default."""
+
+ with MockedOpen({'file': 'old'}):
+ faw = FileAvoidWrite('file')
+ faw.write('dummy')
+ faw.close()
+ self.assertIsNone(faw.diff)
+
+ def test_diff_update(self):
+ """Diffs are produced on file update."""
+
+ with MockedOpen({'file': 'old'}):
+ faw = FileAvoidWrite('file', capture_diff=True)
+ faw.write('new')
+ faw.close()
+
+ diff = '\n'.join(faw.diff)
+ self.assertIn('-old', diff)
+ self.assertIn('+new', diff)
+
+ def test_diff_create(self):
+ """Diffs are produced when files are created."""
+
+ tmpdir = tempfile.mkdtemp()
+ try:
+ path = os.path.join(tmpdir, 'file')
+ faw = FileAvoidWrite(path, capture_diff=True)
+ faw.write('new')
+ faw.close()
+
+ diff = '\n'.join(faw.diff)
+ self.assertIn('+new', diff)
+ finally:
+ shutil.rmtree(tmpdir)
+
+class TestResolveTargetToMake(unittest.TestCase):
+ def setUp(self):
+ self.topobjdir = data_path
+
+ def assertResolve(self, path, expected):
+ # Handle Windows path separators.
+ (reldir, target) = resolve_target_to_make(self.topobjdir, path)
+ if reldir is not None:
+ reldir = reldir.replace(os.sep, '/')
+ if target is not None:
+ target = target.replace(os.sep, '/')
+ self.assertEqual((reldir, target), expected)
+
+ def test_root_path(self):
+ self.assertResolve('/test-dir', ('test-dir', None))
+ self.assertResolve('/test-dir/with', ('test-dir/with', None))
+ self.assertResolve('/test-dir/without', ('test-dir', None))
+ self.assertResolve('/test-dir/without/with', ('test-dir/without/with', None))
+
+ def test_dir(self):
+ self.assertResolve('test-dir', ('test-dir', None))
+ self.assertResolve('test-dir/with', ('test-dir/with', None))
+ self.assertResolve('test-dir/with', ('test-dir/with', None))
+ self.assertResolve('test-dir/without', ('test-dir', None))
+ self.assertResolve('test-dir/without/with', ('test-dir/without/with', None))
+
+ def test_top_level(self):
+ self.assertResolve('package', (None, 'package'))
+ # Makefile handling shouldn't affect top-level targets.
+ self.assertResolve('Makefile', (None, 'Makefile'))
+
+ def test_regular_file(self):
+ self.assertResolve('test-dir/with/file', ('test-dir/with', 'file'))
+ self.assertResolve('test-dir/with/without/file', ('test-dir/with', 'without/file'))
+ self.assertResolve('test-dir/with/without/with/file', ('test-dir/with/without/with', 'file'))
+
+ self.assertResolve('test-dir/without/file', ('test-dir', 'without/file'))
+ self.assertResolve('test-dir/without/with/file', ('test-dir/without/with', 'file'))
+ self.assertResolve('test-dir/without/with/without/file', ('test-dir/without/with', 'without/file'))
+
+ def test_Makefile(self):
+ self.assertResolve('test-dir/with/Makefile', ('test-dir', 'with/Makefile'))
+ self.assertResolve('test-dir/with/without/Makefile', ('test-dir/with', 'without/Makefile'))
+ self.assertResolve('test-dir/with/without/with/Makefile', ('test-dir/with', 'without/with/Makefile'))
+
+ self.assertResolve('test-dir/without/Makefile', ('test-dir', 'without/Makefile'))
+ self.assertResolve('test-dir/without/with/Makefile', ('test-dir', 'without/with/Makefile'))
+ self.assertResolve('test-dir/without/with/without/Makefile', ('test-dir/without/with', 'without/Makefile'))
+
+class TestHierarchicalStringList(unittest.TestCase):
+ def setUp(self):
+ self.EXPORTS = HierarchicalStringList()
+
+ def test_exports_append(self):
+ self.assertEqual(self.EXPORTS._strings, [])
+ self.EXPORTS += ["foo.h"]
+ self.assertEqual(self.EXPORTS._strings, ["foo.h"])
+ self.EXPORTS += ["bar.h"]
+ self.assertEqual(self.EXPORTS._strings, ["foo.h", "bar.h"])
+
+ def test_exports_subdir(self):
+ self.assertEqual(self.EXPORTS._children, {})
+ self.EXPORTS.foo += ["foo.h"]
+ self.assertItemsEqual(self.EXPORTS._children, {"foo" : True})
+ self.assertEqual(self.EXPORTS.foo._strings, ["foo.h"])
+ self.EXPORTS.bar += ["bar.h"]
+ self.assertItemsEqual(self.EXPORTS._children,
+ {"foo" : True, "bar" : True})
+ self.assertEqual(self.EXPORTS.foo._strings, ["foo.h"])
+ self.assertEqual(self.EXPORTS.bar._strings, ["bar.h"])
+
+ def test_exports_multiple_subdir(self):
+ self.EXPORTS.foo.bar = ["foobar.h"]
+ self.assertItemsEqual(self.EXPORTS._children, {"foo" : True})
+ self.assertItemsEqual(self.EXPORTS.foo._children, {"bar" : True})
+ self.assertItemsEqual(self.EXPORTS.foo.bar._children, {})
+ self.assertEqual(self.EXPORTS._strings, [])
+ self.assertEqual(self.EXPORTS.foo._strings, [])
+ self.assertEqual(self.EXPORTS.foo.bar._strings, ["foobar.h"])
+
+ def test_invalid_exports_append(self):
+ with self.assertRaises(ValueError) as ve:
+ self.EXPORTS += "foo.h"
+ self.assertEqual(str(ve.exception),
+ "Expected a list of strings, not <type '%s'>" % str_type)
+
+ def test_invalid_exports_set(self):
+ with self.assertRaises(ValueError) as ve:
+ self.EXPORTS.foo = "foo.h"
+
+ self.assertEqual(str(ve.exception),
+ "Expected a list of strings, not <type '%s'>" % str_type)
+
+ def test_invalid_exports_append_base(self):
+ with self.assertRaises(ValueError) as ve:
+ self.EXPORTS += "foo.h"
+
+ self.assertEqual(str(ve.exception),
+ "Expected a list of strings, not <type '%s'>" % str_type)
+
+ def test_invalid_exports_bool(self):
+ with self.assertRaises(ValueError) as ve:
+ self.EXPORTS += [True]
+
+ self.assertEqual(str(ve.exception),
+ "Expected a list of strings, not an element of "
+ "<type 'bool'>")
+
+ def test_del_exports(self):
+ with self.assertRaises(MozbuildDeletionError) as mde:
+ self.EXPORTS.foo += ['bar.h']
+ del self.EXPORTS.foo
+
+ def test_unsorted(self):
+ with self.assertRaises(UnsortedError) as ee:
+ self.EXPORTS += ['foo.h', 'bar.h']
+
+ with self.assertRaises(UnsortedError) as ee:
+ self.EXPORTS.foo = ['foo.h', 'bar.h']
+
+ with self.assertRaises(UnsortedError) as ee:
+ self.EXPORTS.foo += ['foo.h', 'bar.h']
+
+ def test_reassign(self):
+ self.EXPORTS.foo = ['foo.h']
+
+ with self.assertRaises(KeyError) as ee:
+ self.EXPORTS.foo = ['bar.h']
+
+ def test_walk(self):
+ l = HierarchicalStringList()
+ l += ['root1', 'root2', 'root3']
+ l.child1 += ['child11', 'child12', 'child13']
+ l.child1.grandchild1 += ['grandchild111', 'grandchild112']
+ l.child1.grandchild2 += ['grandchild121', 'grandchild122']
+ l.child2.grandchild1 += ['grandchild211', 'grandchild212']
+ l.child2.grandchild1 += ['grandchild213', 'grandchild214']
+
+ els = list((path, list(seq)) for path, seq in l.walk())
+ self.assertEqual(els, [
+ ('', ['root1', 'root2', 'root3']),
+ ('child1', ['child11', 'child12', 'child13']),
+ ('child1/grandchild1', ['grandchild111', 'grandchild112']),
+ ('child1/grandchild2', ['grandchild121', 'grandchild122']),
+ ('child2/grandchild1', ['grandchild211', 'grandchild212',
+ 'grandchild213', 'grandchild214']),
+ ])
+
+ def test_merge(self):
+ l1 = HierarchicalStringList()
+ l1 += ['root1', 'root2', 'root3']
+ l1.child1 += ['child11', 'child12', 'child13']
+ l1.child1.grandchild1 += ['grandchild111', 'grandchild112']
+ l1.child1.grandchild2 += ['grandchild121', 'grandchild122']
+ l1.child2.grandchild1 += ['grandchild211', 'grandchild212']
+ l1.child2.grandchild1 += ['grandchild213', 'grandchild214']
+ l2 = HierarchicalStringList()
+ l2.child1 += ['child14', 'child15']
+ l2.child1.grandchild2 += ['grandchild123']
+ l2.child3 += ['child31', 'child32']
+
+ l1 += l2
+ els = list((path, list(seq)) for path, seq in l1.walk())
+ self.assertEqual(els, [
+ ('', ['root1', 'root2', 'root3']),
+ ('child1', ['child11', 'child12', 'child13', 'child14',
+ 'child15']),
+ ('child1/grandchild1', ['grandchild111', 'grandchild112']),
+ ('child1/grandchild2', ['grandchild121', 'grandchild122',
+ 'grandchild123']),
+ ('child2/grandchild1', ['grandchild211', 'grandchild212',
+ 'grandchild213', 'grandchild214']),
+ ('child3', ['child31', 'child32']),
+ ])
+
+
+class TestStrictOrderingOnAppendList(unittest.TestCase):
+ def test_init(self):
+ l = StrictOrderingOnAppendList()
+ self.assertEqual(len(l), 0)
+
+ l = StrictOrderingOnAppendList(['a', 'b', 'c'])
+ self.assertEqual(len(l), 3)
+
+ with self.assertRaises(UnsortedError):
+ StrictOrderingOnAppendList(['c', 'b', 'a'])
+
+ self.assertEqual(len(l), 3)
+
+ def test_extend(self):
+ l = StrictOrderingOnAppendList()
+ l.extend(['a', 'b'])
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, StrictOrderingOnAppendList)
+
+ with self.assertRaises(UnsortedError):
+ l.extend(['d', 'c'])
+
+ self.assertEqual(len(l), 2)
+
+ def test_slicing(self):
+ l = StrictOrderingOnAppendList()
+ l[:] = ['a', 'b']
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, StrictOrderingOnAppendList)
+
+ with self.assertRaises(UnsortedError):
+ l[:] = ['b', 'a']
+
+ self.assertEqual(len(l), 2)
+
+ def test_add(self):
+ l = StrictOrderingOnAppendList()
+ l2 = l + ['a', 'b']
+ self.assertEqual(len(l), 0)
+ self.assertEqual(len(l2), 2)
+ self.assertIsInstance(l2, StrictOrderingOnAppendList)
+
+ with self.assertRaises(UnsortedError):
+ l2 = l + ['b', 'a']
+
+ self.assertEqual(len(l), 0)
+
+ def test_iadd(self):
+ l = StrictOrderingOnAppendList()
+ l += ['a', 'b']
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, StrictOrderingOnAppendList)
+
+ with self.assertRaises(UnsortedError):
+ l += ['b', 'a']
+
+ self.assertEqual(len(l), 2)
+
+ def test_add_after_iadd(self):
+ l = StrictOrderingOnAppendList(['b'])
+ l += ['a']
+ l2 = l + ['c', 'd']
+ self.assertEqual(len(l), 2)
+ self.assertEqual(len(l2), 4)
+ self.assertIsInstance(l2, StrictOrderingOnAppendList)
+ with self.assertRaises(UnsortedError):
+ l2 = l + ['d', 'c']
+
+ self.assertEqual(len(l), 2)
+
+ def test_add_StrictOrderingOnAppendList(self):
+ l = StrictOrderingOnAppendList()
+ l += ['c', 'd']
+ l += ['a', 'b']
+ l2 = StrictOrderingOnAppendList()
+ with self.assertRaises(UnsortedError):
+ l2 += list(l)
+ # Adding a StrictOrderingOnAppendList to another shouldn't throw
+ l2 += l
+
+
+class TestListWithAction(unittest.TestCase):
+ def setUp(self):
+ self.action = lambda a: (a, id(a))
+
+ def assertSameList(self, expected, actual):
+ self.assertEqual(len(expected), len(actual))
+ for idx, item in enumerate(actual):
+ self.assertEqual(item, expected[idx])
+
+ def test_init(self):
+ l = ListWithAction(action=self.action)
+ self.assertEqual(len(l), 0)
+ original = ['a', 'b', 'c']
+ l = ListWithAction(['a', 'b', 'c'], action=self.action)
+ expected = map(self.action, original)
+ self.assertSameList(expected, l)
+
+ with self.assertRaises(ValueError):
+ ListWithAction('abc', action=self.action)
+
+ with self.assertRaises(ValueError):
+ ListWithAction()
+
+ def test_extend(self):
+ l = ListWithAction(action=self.action)
+ original = ['a', 'b']
+ l.extend(original)
+ expected = map(self.action, original)
+ self.assertSameList(expected, l)
+
+ with self.assertRaises(ValueError):
+ l.extend('ab')
+
+ def test_slicing(self):
+ l = ListWithAction(action=self.action)
+ original = ['a', 'b']
+ l[:] = original
+ expected = map(self.action, original)
+ self.assertSameList(expected, l)
+
+ with self.assertRaises(ValueError):
+ l[:] = 'ab'
+
+ def test_add(self):
+ l = ListWithAction(action=self.action)
+ original = ['a', 'b']
+ l2 = l + original
+ expected = map(self.action, original)
+ self.assertSameList(expected, l2)
+
+ with self.assertRaises(ValueError):
+ l + 'abc'
+
+ def test_iadd(self):
+ l = ListWithAction(action=self.action)
+ original = ['a', 'b']
+ l += original
+ expected = map(self.action, original)
+ self.assertSameList(expected, l)
+
+ with self.assertRaises(ValueError):
+ l += 'abc'
+
+
+class TestStrictOrderingOnAppendListWithFlagsFactory(unittest.TestCase):
+ def test_strict_ordering_on_append_list_with_flags_factory(self):
+ cls = StrictOrderingOnAppendListWithFlagsFactory({
+ 'foo': bool,
+ 'bar': int,
+ })
+
+ l = cls()
+ l += ['a', 'b']
+
+ with self.assertRaises(Exception):
+ l['a'] = 'foo'
+
+ with self.assertRaises(Exception):
+ c = l['c']
+
+ self.assertEqual(l['a'].foo, False)
+ l['a'].foo = True
+ self.assertEqual(l['a'].foo, True)
+
+ with self.assertRaises(TypeError):
+ l['a'].bar = 'bar'
+
+ self.assertEqual(l['a'].bar, 0)
+ l['a'].bar = 42
+ self.assertEqual(l['a'].bar, 42)
+
+ l['b'].foo = True
+ self.assertEqual(l['b'].foo, True)
+
+ with self.assertRaises(AttributeError):
+ l['b'].baz = False
+
+ l['b'].update(foo=False, bar=12)
+ self.assertEqual(l['b'].foo, False)
+ self.assertEqual(l['b'].bar, 12)
+
+ with self.assertRaises(AttributeError):
+ l['b'].update(xyz=1)
+
+ def test_strict_ordering_on_append_list_with_flags_factory_extend(self):
+ FooList = StrictOrderingOnAppendListWithFlagsFactory({
+ 'foo': bool, 'bar': unicode
+ })
+ foo = FooList(['a', 'b', 'c'])
+ foo['a'].foo = True
+ foo['b'].bar = 'bar'
+
+ # Don't allow extending lists with different flag definitions.
+ BarList = StrictOrderingOnAppendListWithFlagsFactory({
+ 'foo': unicode, 'baz': bool
+ })
+ bar = BarList(['d', 'e', 'f'])
+ bar['d'].foo = 'foo'
+ bar['e'].baz = True
+ with self.assertRaises(ValueError):
+ foo + bar
+ with self.assertRaises(ValueError):
+ bar + foo
+
+ # It's not obvious what to do with duplicate list items with possibly
+ # different flag values, so don't allow that case.
+ with self.assertRaises(ValueError):
+ foo + foo
+
+ def assertExtended(l):
+ self.assertEqual(len(l), 6)
+ self.assertEqual(l['a'].foo, True)
+ self.assertEqual(l['b'].bar, 'bar')
+ self.assertTrue('c' in l)
+ self.assertEqual(l['d'].foo, True)
+ self.assertEqual(l['e'].bar, 'bar')
+ self.assertTrue('f' in l)
+
+ # Test extend.
+ zot = FooList(['d', 'e', 'f'])
+ zot['d'].foo = True
+ zot['e'].bar = 'bar'
+ zot.extend(foo)
+ assertExtended(zot)
+
+ # Test __add__.
+ zot = FooList(['d', 'e', 'f'])
+ zot['d'].foo = True
+ zot['e'].bar = 'bar'
+ assertExtended(foo + zot)
+ assertExtended(zot + foo)
+
+ # Test __iadd__.
+ foo += zot
+ assertExtended(foo)
+
+ # Test __setslice__.
+ foo[3:] = []
+ self.assertEqual(len(foo), 3)
+ foo[3:] = zot
+ assertExtended(foo)
+
+
+class TestMemoize(unittest.TestCase):
+ def test_memoize(self):
+ self._count = 0
+ @memoize
+ def wrapped(a, b):
+ self._count += 1
+ return a + b
+
+ self.assertEqual(self._count, 0)
+ self.assertEqual(wrapped(1, 1), 2)
+ self.assertEqual(self._count, 1)
+ self.assertEqual(wrapped(1, 1), 2)
+ self.assertEqual(self._count, 1)
+ self.assertEqual(wrapped(2, 1), 3)
+ self.assertEqual(self._count, 2)
+ self.assertEqual(wrapped(1, 2), 3)
+ self.assertEqual(self._count, 3)
+ self.assertEqual(wrapped(1, 2), 3)
+ self.assertEqual(self._count, 3)
+ self.assertEqual(wrapped(1, 1), 2)
+ self.assertEqual(self._count, 3)
+
+ def test_memoize_method(self):
+ class foo(object):
+ def __init__(self):
+ self._count = 0
+
+ @memoize
+ def wrapped(self, a, b):
+ self._count += 1
+ return a + b
+
+ instance = foo()
+ refcount = sys.getrefcount(instance)
+ self.assertEqual(instance._count, 0)
+ self.assertEqual(instance.wrapped(1, 1), 2)
+ self.assertEqual(instance._count, 1)
+ self.assertEqual(instance.wrapped(1, 1), 2)
+ self.assertEqual(instance._count, 1)
+ self.assertEqual(instance.wrapped(2, 1), 3)
+ self.assertEqual(instance._count, 2)
+ self.assertEqual(instance.wrapped(1, 2), 3)
+ self.assertEqual(instance._count, 3)
+ self.assertEqual(instance.wrapped(1, 2), 3)
+ self.assertEqual(instance._count, 3)
+ self.assertEqual(instance.wrapped(1, 1), 2)
+ self.assertEqual(instance._count, 3)
+
+ # Memoization of methods is expected to not keep references to
+ # instances, so the refcount shouldn't have changed after executing the
+ # memoized method.
+ self.assertEqual(refcount, sys.getrefcount(instance))
+
+ def test_memoized_property(self):
+ class foo(object):
+ def __init__(self):
+ self._count = 0
+
+ @memoized_property
+ def wrapped(self):
+ self._count += 1
+ return 42
+
+ instance = foo()
+ self.assertEqual(instance._count, 0)
+ self.assertEqual(instance.wrapped, 42)
+ self.assertEqual(instance._count, 1)
+ self.assertEqual(instance.wrapped, 42)
+ self.assertEqual(instance._count, 1)
+
+
+class TestTypedList(unittest.TestCase):
+ def test_init(self):
+ cls = TypedList(int)
+ l = cls()
+ self.assertEqual(len(l), 0)
+
+ l = cls([1, 2, 3])
+ self.assertEqual(len(l), 3)
+
+ with self.assertRaises(ValueError):
+ cls([1, 2, 'c'])
+
+ def test_extend(self):
+ cls = TypedList(int)
+ l = cls()
+ l.extend([1, 2])
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, cls)
+
+ with self.assertRaises(ValueError):
+ l.extend([3, 'c'])
+
+ self.assertEqual(len(l), 2)
+
+ def test_slicing(self):
+ cls = TypedList(int)
+ l = cls()
+ l[:] = [1, 2]
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, cls)
+
+ with self.assertRaises(ValueError):
+ l[:] = [3, 'c']
+
+ self.assertEqual(len(l), 2)
+
+ def test_add(self):
+ cls = TypedList(int)
+ l = cls()
+ l2 = l + [1, 2]
+ self.assertEqual(len(l), 0)
+ self.assertEqual(len(l2), 2)
+ self.assertIsInstance(l2, cls)
+
+ with self.assertRaises(ValueError):
+ l2 = l + [3, 'c']
+
+ self.assertEqual(len(l), 0)
+
+ def test_iadd(self):
+ cls = TypedList(int)
+ l = cls()
+ l += [1, 2]
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, cls)
+
+ with self.assertRaises(ValueError):
+ l += [3, 'c']
+
+ self.assertEqual(len(l), 2)
+
+ def test_add_coercion(self):
+ objs = []
+
+ class Foo(object):
+ def __init__(self, obj):
+ objs.append(obj)
+
+ cls = TypedList(Foo)
+ l = cls()
+ l += [1, 2]
+ self.assertEqual(len(objs), 2)
+ self.assertEqual(type(l[0]), Foo)
+ self.assertEqual(type(l[1]), Foo)
+
+ # Adding a TypedList to a TypedList shouldn't trigger coercion again
+ l2 = cls()
+ l2 += l
+ self.assertEqual(len(objs), 2)
+ self.assertEqual(type(l2[0]), Foo)
+ self.assertEqual(type(l2[1]), Foo)
+
+ # Adding a TypedList to a TypedList shouldn't even trigger the code
+ # that does coercion at all.
+ l2 = cls()
+ list.__setslice__(l, 0, -1, [1, 2])
+ l2 += l
+ self.assertEqual(len(objs), 2)
+ self.assertEqual(type(l2[0]), int)
+ self.assertEqual(type(l2[1]), int)
+
+ def test_memoized(self):
+ cls = TypedList(int)
+ cls2 = TypedList(str)
+ self.assertEqual(TypedList(int), cls)
+ self.assertNotEqual(cls, cls2)
+
+
+class TypedTestStrictOrderingOnAppendList(unittest.TestCase):
+ def test_init(self):
+ class Unicode(unicode):
+ def __init__(self, other):
+ if not isinstance(other, unicode):
+ raise ValueError()
+ super(Unicode, self).__init__(other)
+
+ cls = TypedList(Unicode, StrictOrderingOnAppendList)
+ l = cls()
+ self.assertEqual(len(l), 0)
+
+ l = cls(['a', 'b', 'c'])
+ self.assertEqual(len(l), 3)
+
+ with self.assertRaises(UnsortedError):
+ cls(['c', 'b', 'a'])
+
+ with self.assertRaises(ValueError):
+ cls(['a', 'b', 3])
+
+ self.assertEqual(len(l), 3)
+
+
+class TestTypedNamedTuple(unittest.TestCase):
+ def test_simple(self):
+ FooBar = TypedNamedTuple('FooBar', [('foo', unicode), ('bar', int)])
+
+ t = FooBar(foo='foo', bar=2)
+ self.assertEquals(type(t), FooBar)
+ self.assertEquals(t.foo, 'foo')
+ self.assertEquals(t.bar, 2)
+ self.assertEquals(t[0], 'foo')
+ self.assertEquals(t[1], 2)
+
+ FooBar('foo', 2)
+
+ with self.assertRaises(TypeError):
+ FooBar('foo', 'not integer')
+ with self.assertRaises(TypeError):
+ FooBar(2, 4)
+
+ # Passing a tuple as the first argument is the same as passing multiple
+ # arguments.
+ t1 = ('foo', 3)
+ t2 = FooBar(t1)
+ self.assertEquals(type(t2), FooBar)
+ self.assertEqual(FooBar(t1), FooBar('foo', 3))
+
+
+class TestGroupUnifiedFiles(unittest.TestCase):
+ FILES = ['%s.cpp' % letter for letter in string.ascii_lowercase]
+
+ def test_multiple_files(self):
+ mapping = list(group_unified_files(self.FILES, 'Unified', 'cpp', 5))
+
+ def check_mapping(index, expected_num_source_files):
+ (unified_file, source_files) = mapping[index]
+
+ self.assertEqual(unified_file, 'Unified%d.cpp' % index)
+ self.assertEqual(len(source_files), expected_num_source_files)
+
+ all_files = list(itertools.chain(*[files for (_, files) in mapping]))
+ self.assertEqual(len(all_files), len(self.FILES))
+ self.assertEqual(set(all_files), set(self.FILES))
+
+ expected_amounts = [5, 5, 5, 5, 5, 1]
+ for i, amount in enumerate(expected_amounts):
+ check_mapping(i, amount)
+
+ def test_unsorted_files(self):
+ unsorted_files = ['a%d.cpp' % i for i in range(11)]
+ sorted_files = sorted(unsorted_files)
+ mapping = list(group_unified_files(unsorted_files, 'Unified', 'cpp', 5))
+
+ self.assertEqual(mapping[0][1], sorted_files[0:5])
+ self.assertEqual(mapping[1][1], sorted_files[5:10])
+ self.assertEqual(mapping[2][1], sorted_files[10:])
+
+
+class TestMisc(unittest.TestCase):
+ def test_pair(self):
+ self.assertEqual(
+ list(pair([1, 2, 3, 4, 5, 6])),
+ [(1, 2), (3, 4), (5, 6)]
+ )
+
+ self.assertEqual(
+ list(pair([1, 2, 3, 4, 5, 6, 7])),
+ [(1, 2), (3, 4), (5, 6), (7, None)]
+ )
+
+ def test_expand_variables(self):
+ self.assertEqual(
+ expand_variables('$(var)', {'var': 'value'}),
+ 'value'
+ )
+
+ self.assertEqual(
+ expand_variables('$(a) and $(b)', {'a': '1', 'b': '2'}),
+ '1 and 2'
+ )
+
+ self.assertEqual(
+ expand_variables('$(a) and $(undefined)', {'a': '1', 'b': '2'}),
+ '1 and '
+ )
+
+ self.assertEqual(
+ expand_variables('before $(string) between $(list) after', {
+ 'string': 'abc',
+ 'list': ['a', 'b', 'c']
+ }),
+ 'before abc between a b c after'
+ )
+
+class TestEnumString(unittest.TestCase):
+ def test_string(self):
+ CompilerType = EnumString.subclass('msvc', 'gcc', 'clang', 'clang-cl')
+
+ type = CompilerType('msvc')
+ self.assertEquals(type, 'msvc')
+ self.assertNotEquals(type, 'gcc')
+ self.assertNotEquals(type, 'clang')
+ self.assertNotEquals(type, 'clang-cl')
+ self.assertIn(type, ('msvc', 'clang-cl'))
+ self.assertNotIn(type, ('gcc', 'clang'))
+
+ with self.assertRaises(EnumStringComparisonError):
+ self.assertEquals(type, 'foo')
+
+ with self.assertRaises(EnumStringComparisonError):
+ self.assertNotEquals(type, 'foo')
+
+ with self.assertRaises(EnumStringComparisonError):
+ self.assertIn(type, ('foo', 'gcc'))
+
+ with self.assertRaises(ValueError):
+ type = CompilerType('foo')
+
+
+class TestIndentedRepr(unittest.TestCase):
+ def test_indented_repr(self):
+ data = textwrap.dedent(r'''
+ {
+ 'a': 1,
+ 'b': b'abc',
+ b'c': 'xyz',
+ 'd': False,
+ 'e': {
+ 'a': 1,
+ 'b': b'2',
+ 'c': '3',
+ },
+ 'f': [
+ 1,
+ b'2',
+ '3',
+ ],
+ 'pile_of_bytes': b'\xf0\x9f\x92\xa9',
+ 'pile_of_poo': '💩',
+ 'special_chars': '\\\'"\x08\n\t',
+ 'with_accents': 'éàñ',
+ }''').lstrip()
+
+ obj = eval(data)
+
+ self.assertEqual(indented_repr(obj), data)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozbuild/testing.py b/python/mozbuild/mozbuild/testing.py
new file mode 100644
index 000000000..b327cd74f
--- /dev/null
+++ b/python/mozbuild/mozbuild/testing.py
@@ -0,0 +1,535 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import cPickle as pickle
+import os
+import sys
+
+import mozpack.path as mozpath
+
+from mozpack.copier import FileCopier
+from mozpack.manifests import InstallManifest
+
+from .base import MozbuildObject
+from .util import OrderedDefaultDict
+from collections import defaultdict
+
+import manifestparser
+
+def rewrite_test_base(test, new_base, honor_install_to_subdir=False):
+ """Rewrite paths in a test to be under a new base path.
+
+ This is useful for running tests from a separate location from where they
+ were defined.
+
+ honor_install_to_subdir and the underlying install-to-subdir field are a
+ giant hack intended to work around the restriction where the mochitest
+ runner can't handle single test files with multiple configurations. This
+ argument should be removed once the mochitest runner talks manifests
+ (bug 984670).
+ """
+ test['here'] = mozpath.join(new_base, test['dir_relpath'])
+
+ if honor_install_to_subdir and test.get('install-to-subdir'):
+ manifest_relpath = mozpath.relpath(test['path'],
+ mozpath.dirname(test['manifest']))
+ test['path'] = mozpath.join(new_base, test['dir_relpath'],
+ test['install-to-subdir'], manifest_relpath)
+ else:
+ test['path'] = mozpath.join(new_base, test['file_relpath'])
+
+ return test
+
+
+class TestMetadata(object):
+ """Holds information about tests.
+
+ This class provides an API to query tests active in the build
+ configuration.
+ """
+
+ def __init__(self, all_tests, test_defaults=None):
+ self._tests_by_path = OrderedDefaultDict(list)
+ self._tests_by_flavor = defaultdict(set)
+ self._test_dirs = set()
+
+ with open(all_tests, 'rb') as fh:
+ test_data = pickle.load(fh)
+ defaults = None
+ if test_defaults:
+ with open(test_defaults, 'rb') as fh:
+ defaults = pickle.load(fh)
+ for path, tests in test_data.items():
+ for metadata in tests:
+ if defaults:
+ manifest = metadata['manifest']
+ manifest_defaults = defaults.get(manifest)
+ if manifest_defaults:
+ metadata = manifestparser.combine_fields(manifest_defaults,
+ metadata)
+ self._tests_by_path[path].append(metadata)
+ self._test_dirs.add(os.path.dirname(path))
+ flavor = metadata.get('flavor')
+ self._tests_by_flavor[flavor].add(path)
+
+ def tests_with_flavor(self, flavor):
+ """Obtain all tests having the specified flavor.
+
+ This is a generator of dicts describing each test.
+ """
+
+ for path in sorted(self._tests_by_flavor.get(flavor, [])):
+ yield self._tests_by_path[path]
+
+ def resolve_tests(self, paths=None, flavor=None, subsuite=None, under_path=None,
+ tags=None):
+ """Resolve tests from an identifier.
+
+ This is a generator of dicts describing each test.
+
+ ``paths`` can be an iterable of values to use to identify tests to run.
+ If an entry is a known test file, tests associated with that file are
+ returned (there may be multiple configurations for a single file). If
+ an entry is a directory, or a prefix of a directory containing tests,
+ all tests in that directory are returned. If the string appears in a
+ known test file, that test file is considered. If the path contains
+ a wildcard pattern, tests matching that pattern are returned.
+
+ If ``under_path`` is a string, it will be used to filter out tests that
+ aren't in the specified path prefix relative to topsrcdir or the
+ test's installed dir.
+
+ If ``flavor`` is a string, it will be used to filter returned tests
+ to only be the flavor specified. A flavor is something like
+ ``xpcshell``.
+
+ If ``subsuite`` is a string, it will be used to filter returned tests
+ to only be in the subsuite specified.
+
+ If ``tags`` are specified, they will be used to filter returned tests
+ to only those with a matching tag.
+ """
+ if tags:
+ tags = set(tags)
+
+ def fltr(tests):
+ for test in tests:
+ if flavor:
+ if (flavor == 'devtools' and test.get('flavor') != 'browser-chrome') or \
+ (flavor != 'devtools' and test.get('flavor') != flavor):
+ continue
+
+ if subsuite and test.get('subsuite') != subsuite:
+ continue
+
+ if tags and not (tags & set(test.get('tags', '').split())):
+ continue
+
+ if under_path \
+ and not test['file_relpath'].startswith(under_path):
+ continue
+
+ # Make a copy so modifications don't change the source.
+ yield dict(test)
+
+ paths = paths or []
+ paths = [mozpath.normpath(p) for p in paths]
+ if not paths:
+ paths = [None]
+
+ candidate_paths = set()
+
+ for path in sorted(paths):
+ if path is None:
+ candidate_paths |= set(self._tests_by_path.keys())
+ continue
+
+ if '*' in path:
+ candidate_paths |= {p for p in self._tests_by_path
+ if mozpath.match(p, path)}
+ continue
+
+ # If the path is a directory, or the path is a prefix of a directory
+ # containing tests, pull in all tests in that directory.
+ if (path in self._test_dirs or
+ any(p.startswith(path) for p in self._tests_by_path)):
+ candidate_paths |= {p for p in self._tests_by_path
+ if p.startswith(path)}
+ continue
+
+ # If it's a test file, add just that file.
+ candidate_paths |= {p for p in self._tests_by_path if path in p}
+
+ for p in sorted(candidate_paths):
+ tests = self._tests_by_path[p]
+
+ for test in fltr(tests):
+ yield test
+
+
+class TestResolver(MozbuildObject):
+ """Helper to resolve tests from the current environment to test files."""
+
+ def __init__(self, *args, **kwargs):
+ MozbuildObject.__init__(self, *args, **kwargs)
+
+ # If installing tests is going to result in re-generating the build
+ # backend, we need to do this here, so that the updated contents of
+ # all-tests.pkl make it to the set of tests to run.
+ self._run_make(target='run-tests-deps', pass_thru=True,
+ print_directory=False)
+
+ self._tests = TestMetadata(os.path.join(self.topobjdir,
+ 'all-tests.pkl'),
+ test_defaults=os.path.join(self.topobjdir,
+ 'test-defaults.pkl'))
+
+ self._test_rewrites = {
+ 'a11y': os.path.join(self.topobjdir, '_tests', 'testing',
+ 'mochitest', 'a11y'),
+ 'browser-chrome': os.path.join(self.topobjdir, '_tests', 'testing',
+ 'mochitest', 'browser'),
+ 'jetpack-package': os.path.join(self.topobjdir, '_tests', 'testing',
+ 'mochitest', 'jetpack-package'),
+ 'jetpack-addon': os.path.join(self.topobjdir, '_tests', 'testing',
+ 'mochitest', 'jetpack-addon'),
+ 'chrome': os.path.join(self.topobjdir, '_tests', 'testing',
+ 'mochitest', 'chrome'),
+ 'mochitest': os.path.join(self.topobjdir, '_tests', 'testing',
+ 'mochitest', 'tests'),
+ 'web-platform-tests': os.path.join(self.topobjdir, '_tests', 'testing',
+ 'web-platform'),
+ 'xpcshell': os.path.join(self.topobjdir, '_tests', 'xpcshell'),
+ }
+
+ def resolve_tests(self, cwd=None, **kwargs):
+ """Resolve tests in the context of the current environment.
+
+ This is a more intelligent version of TestMetadata.resolve_tests().
+
+ This function provides additional massaging and filtering of low-level
+ results.
+
+ Paths in returned tests are automatically translated to the paths in
+ the _tests directory under the object directory.
+
+ If cwd is defined, we will limit our results to tests under the
+ directory specified. The directory should be defined as an absolute
+ path under topsrcdir or topobjdir for it to work properly.
+ """
+ rewrite_base = None
+
+ if cwd:
+ norm_cwd = mozpath.normpath(cwd)
+ norm_srcdir = mozpath.normpath(self.topsrcdir)
+ norm_objdir = mozpath.normpath(self.topobjdir)
+
+ reldir = None
+
+ if norm_cwd.startswith(norm_objdir):
+ reldir = norm_cwd[len(norm_objdir)+1:]
+ elif norm_cwd.startswith(norm_srcdir):
+ reldir = norm_cwd[len(norm_srcdir)+1:]
+
+ result = self._tests.resolve_tests(under_path=reldir,
+ **kwargs)
+
+ else:
+ result = self._tests.resolve_tests(**kwargs)
+
+ for test in result:
+ rewrite_base = self._test_rewrites.get(test['flavor'], None)
+
+ if rewrite_base:
+ yield rewrite_test_base(test, rewrite_base,
+ honor_install_to_subdir=True)
+ else:
+ yield test
+
+# These definitions provide a single source of truth for modules attempting
+# to get a view of all tests for a build. Used by the emitter to figure out
+# how to read/install manifests and by test dependency annotations in Files()
+# entries to enumerate test flavors.
+
+# While there are multiple test manifests, the behavior is very similar
+# across them. We enforce this by having common handling of all
+# manifests and outputting a single class type with the differences
+# described inside the instance.
+#
+# Keys are variable prefixes and values are tuples describing how these
+# manifests should be handled:
+#
+# (flavor, install_root, install_subdir, package_tests)
+#
+# flavor identifies the flavor of this test.
+# install_root is the path prefix to install the files starting from the root
+# directory and not as specified by the manifest location. (bug 972168)
+# install_subdir is the path of where to install the files in
+# the tests directory.
+# package_tests indicates whether to package test files into the test
+# package; suites that compile the test files should not install
+# them into the test package.
+#
+TEST_MANIFESTS = dict(
+ A11Y=('a11y', 'testing/mochitest', 'a11y', True),
+ BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True),
+ ANDROID_INSTRUMENTATION=('instrumentation', 'instrumentation', '.', False),
+ JETPACK_PACKAGE=('jetpack-package', 'testing/mochitest', 'jetpack-package', True),
+ JETPACK_ADDON=('jetpack-addon', 'testing/mochitest', 'jetpack-addon', False),
+ FIREFOX_UI_FUNCTIONAL=('firefox-ui-functional', 'firefox-ui', '.', False),
+ FIREFOX_UI_UPDATE=('firefox-ui-update', 'firefox-ui', '.', False),
+ PUPPETEER_FIREFOX=('firefox-ui-functional', 'firefox-ui', '.', False),
+
+ # marionette tests are run from the srcdir
+ # TODO(ato): make packaging work as for other test suites
+ MARIONETTE=('marionette', 'marionette', '.', False),
+ MARIONETTE_UNIT=('marionette', 'marionette', '.', False),
+ MARIONETTE_WEBAPI=('marionette', 'marionette', '.', False),
+
+ METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True),
+ MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True),
+ MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True),
+ WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True),
+ XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', True),
+)
+
+# Reftests have their own manifest format and are processed separately.
+REFTEST_FLAVORS = ('crashtest', 'reftest')
+
+# Web platform tests have their own manifest format and are processed separately.
+WEB_PLATFORM_TESTS_FLAVORS = ('web-platform-tests',)
+
+def all_test_flavors():
+ return ([v[0] for v in TEST_MANIFESTS.values()] +
+ list(REFTEST_FLAVORS) +
+ list(WEB_PLATFORM_TESTS_FLAVORS) +
+ ['python'])
+
+class TestInstallInfo(object):
+ def __init__(self):
+ self.seen = set()
+ self.pattern_installs = []
+ self.installs = []
+ self.external_installs = set()
+ self.deferred_installs = set()
+
+ def __ior__(self, other):
+ self.pattern_installs.extend(other.pattern_installs)
+ self.installs.extend(other.installs)
+ self.external_installs |= other.external_installs
+ self.deferred_installs |= other.deferred_installs
+ return self
+
+class SupportFilesConverter(object):
+ """Processes a "support-files" entry from a test object, either from
+ a parsed object from a test manifests or its representation in
+ moz.build and returns the installs to perform for this test object.
+
+ Processing the same support files multiple times will not have any further
+ effect, and the structure of the parsed objects from manifests will have a
+ lot of repeated entries, so this class takes care of memoizing.
+ """
+ def __init__(self):
+ self._fields = (('head', set()),
+ ('tail', set()),
+ ('support-files', set()),
+ ('generated-files', set()))
+
+ def convert_support_files(self, test, install_root, manifest_dir, out_dir):
+ # Arguments:
+ # test - The test object to process.
+ # install_root - The directory under $objdir/_tests that will contain
+ # the tests for this harness (examples are "testing/mochitest",
+ # "xpcshell").
+ # manifest_dir - Absoulute path to the (srcdir) directory containing the
+ # manifest that included this test
+ # out_dir - The path relative to $objdir/_tests used as the destination for the
+ # test, based on the relative path to the manifest in the srcdir,
+ # the install_root, and 'install-to-subdir', if present in the manifest.
+ info = TestInstallInfo()
+ for field, seen in self._fields:
+ value = test.get(field, '')
+ for pattern in value.split():
+
+ # We track uniqueness locally (per test) where duplicates are forbidden,
+ # and globally, where they are permitted. If a support file appears multiple
+ # times for a single test, there are unnecessary entries in the manifest. But
+ # many entries will be shared across tests that share defaults.
+ # We need to memoize on the basis of both the path and the output
+ # directory for the benefit of tests specifying 'install-to-subdir'.
+ key = field, pattern, out_dir
+ if key in info.seen:
+ raise ValueError("%s appears multiple times in a test manifest under a %s field,"
+ " please omit the duplicate entry." % (pattern, field))
+ info.seen.add(key)
+ if key in seen:
+ continue
+ seen.add(key)
+
+ if field == 'generated-files':
+ info.external_installs.add(mozpath.normpath(mozpath.join(out_dir, pattern)))
+ # '!' indicates our syntax for inter-directory support file
+ # dependencies. These receive special handling in the backend.
+ elif pattern[0] == '!':
+ info.deferred_installs.add(pattern)
+ # We only support globbing on support-files because
+ # the harness doesn't support * for head and tail.
+ elif '*' in pattern and field == 'support-files':
+ info.pattern_installs.append((manifest_dir, pattern, out_dir))
+ # "absolute" paths identify files that are to be
+ # placed in the install_root directory (no globs)
+ elif pattern[0] == '/':
+ full = mozpath.normpath(mozpath.join(manifest_dir,
+ mozpath.basename(pattern)))
+ info.installs.append((full, mozpath.join(install_root, pattern[1:])))
+ else:
+ full = mozpath.normpath(mozpath.join(manifest_dir, pattern))
+ dest_path = mozpath.join(out_dir, pattern)
+
+ # If the path resolves to a different directory
+ # tree, we take special behavior depending on the
+ # entry type.
+ if not full.startswith(manifest_dir):
+ # If it's a support file, we install the file
+ # into the current destination directory.
+ # This implementation makes installing things
+ # with custom prefixes impossible. If this is
+ # needed, we can add support for that via a
+ # special syntax later.
+ if field == 'support-files':
+ dest_path = mozpath.join(out_dir,
+ os.path.basename(pattern))
+ # If it's not a support file, we ignore it.
+ # This preserves old behavior so things like
+ # head files doesn't get installed multiple
+ # times.
+ else:
+ continue
+ info.installs.append((full, mozpath.normpath(dest_path)))
+ return info
+
+def _resolve_installs(paths, topobjdir, manifest):
+ """Using the given paths as keys, find any unresolved installs noted
+ by the build backend corresponding to those keys, and add them
+ to the given manifest.
+ """
+ filename = os.path.join(topobjdir, 'test-installs.pkl')
+ with open(filename, 'rb') as fh:
+ resolved_installs = pickle.load(fh)
+
+ for path in paths:
+ path = path[2:]
+ if path not in resolved_installs:
+ raise Exception('A cross-directory support file path noted in a '
+ 'test manifest does not appear in any other manifest.\n "%s" '
+ 'must appear in another test manifest to specify an install '
+ 'for "!/%s".' % (path, path))
+ installs = resolved_installs[path]
+ for install_info in installs:
+ try:
+ if len(install_info) == 3:
+ manifest.add_pattern_symlink(*install_info)
+ if len(install_info) == 2:
+ manifest.add_symlink(*install_info)
+ except ValueError:
+ # A duplicate value here is pretty likely when running
+ # multiple directories at once, and harmless.
+ pass
+
+def install_test_files(topsrcdir, topobjdir, tests_root, test_objs):
+ """Installs the requested test files to the objdir. This is invoked by
+ test runners to avoid installing tens of thousands of test files when
+ only a few tests need to be run.
+ """
+ flavor_info = {flavor: (root, prefix, install)
+ for (flavor, root, prefix, install) in TEST_MANIFESTS.values()}
+ objdir_dest = mozpath.join(topobjdir, tests_root)
+
+ converter = SupportFilesConverter()
+ install_info = TestInstallInfo()
+ for o in test_objs:
+ flavor = o['flavor']
+ if flavor not in flavor_info:
+ # This is a test flavor that isn't installed by the build system.
+ continue
+ root, prefix, install = flavor_info[flavor]
+ if not install:
+ # This flavor isn't installed to the objdir.
+ continue
+
+ manifest_path = o['manifest']
+ manifest_dir = mozpath.dirname(manifest_path)
+
+ out_dir = mozpath.join(root, prefix, manifest_dir[len(topsrcdir) + 1:])
+ file_relpath = o['file_relpath']
+ source = mozpath.join(topsrcdir, file_relpath)
+ dest = mozpath.join(root, prefix, file_relpath)
+ if 'install-to-subdir' in o:
+ out_dir = mozpath.join(out_dir, o['install-to-subdir'])
+ manifest_relpath = mozpath.relpath(source, mozpath.dirname(manifest_path))
+ dest = mozpath.join(out_dir, manifest_relpath)
+
+ install_info.installs.append((source, dest))
+ install_info |= converter.convert_support_files(o, root,
+ manifest_dir,
+ out_dir)
+
+ manifest = InstallManifest()
+
+ for source, dest in set(install_info.installs):
+ if dest in install_info.external_installs:
+ continue
+ manifest.add_symlink(source, dest)
+ for base, pattern, dest in install_info.pattern_installs:
+ manifest.add_pattern_symlink(base, pattern, dest)
+
+ _resolve_installs(install_info.deferred_installs, topobjdir, manifest)
+
+ # Harness files are treated as a monolith and installed each time we run tests.
+ # Fortunately there are not very many.
+ manifest |= InstallManifest(mozpath.join(topobjdir,
+ '_build_manifests',
+ 'install', tests_root))
+ copier = FileCopier()
+ manifest.populate_registry(copier)
+ copier.copy(objdir_dest,
+ remove_unaccounted=False)
+
+
+# Convenience methods for test manifest reading.
+def read_manifestparser_manifest(context, manifest_path):
+ path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
+ return manifestparser.TestManifest(manifests=[path], strict=True,
+ rootdir=context.config.topsrcdir,
+ finder=context._finder,
+ handle_defaults=False)
+
+def read_reftest_manifest(context, manifest_path):
+ import reftest
+ path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
+ manifest = reftest.ReftestManifest(finder=context._finder)
+ manifest.load(path)
+ return manifest
+
+def read_wpt_manifest(context, paths):
+ manifest_path, tests_root = paths
+ full_path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
+ old_path = sys.path[:]
+ try:
+ # Setup sys.path to include all the dependencies required to import
+ # the web-platform-tests manifest parser. web-platform-tests provides
+ # a the localpaths.py to do the path manipulation, which we load,
+ # providing the __file__ variable so it can resolve the relative
+ # paths correctly.
+ paths_file = os.path.join(context.config.topsrcdir, "testing",
+ "web-platform", "tests", "tools", "localpaths.py")
+ _globals = {"__file__": paths_file}
+ execfile(paths_file, _globals)
+ import manifest as wptmanifest
+ finally:
+ sys.path = old_path
+ f = context._finder.get(full_path)
+ return wptmanifest.manifest.load(tests_root, f)
diff --git a/python/mozbuild/mozbuild/util.py b/python/mozbuild/mozbuild/util.py
new file mode 100644
index 000000000..58dd9daf0
--- /dev/null
+++ b/python/mozbuild/mozbuild/util.py
@@ -0,0 +1,1264 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains miscellaneous utility functions that don't belong anywhere
+# in particular.
+
+from __future__ import absolute_import, unicode_literals, print_function
+
+import argparse
+import collections
+import ctypes
+import difflib
+import errno
+import functools
+import hashlib
+import itertools
+import os
+import re
+import stat
+import sys
+import time
+import types
+
+from collections import (
+ defaultdict,
+ Iterable,
+ OrderedDict,
+)
+from io import (
+ StringIO,
+ BytesIO,
+)
+
+
+if sys.version_info[0] == 3:
+ str_type = str
+else:
+ str_type = basestring
+
+if sys.platform == 'win32':
+ _kernel32 = ctypes.windll.kernel32
+ _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000
+
+
+def exec_(object, globals=None, locals=None):
+ """Wrapper around the exec statement to avoid bogus errors like:
+
+ SyntaxError: unqualified exec is not allowed in function ...
+ it is a nested function.
+
+ or
+
+ SyntaxError: unqualified exec is not allowed in function ...
+ it contains a nested function with free variable
+
+ which happen with older versions of python 2.7.
+ """
+ exec(object, globals, locals)
+
+
+def hash_file(path, hasher=None):
+ """Hashes a file specified by the path given and returns the hex digest."""
+
+ # If the default hashing function changes, this may invalidate
+ # lots of cached data. Don't change it lightly.
+ h = hasher or hashlib.sha1()
+
+ with open(path, 'rb') as fh:
+ while True:
+ data = fh.read(8192)
+
+ if not len(data):
+ break
+
+ h.update(data)
+
+ return h.hexdigest()
+
+
+class EmptyValue(unicode):
+ """A dummy type that behaves like an empty string and sequence.
+
+ This type exists in order to support
+ :py:class:`mozbuild.frontend.reader.EmptyConfig`. It should likely not be
+ used elsewhere.
+ """
+ def __init__(self):
+ super(EmptyValue, self).__init__()
+
+
+class ReadOnlyNamespace(object):
+ """A class for objects with immutable attributes set at initialization."""
+ def __init__(self, **kwargs):
+ for k, v in kwargs.iteritems():
+ super(ReadOnlyNamespace, self).__setattr__(k, v)
+
+ def __delattr__(self, key):
+ raise Exception('Object does not support deletion.')
+
+ def __setattr__(self, key, value):
+ raise Exception('Object does not support assignment.')
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __eq__(self, other):
+ return self is other or (
+ hasattr(other, '__dict__') and self.__dict__ == other.__dict__)
+
+ def __repr__(self):
+ return '<%s %r>' % (self.__class__.__name__, self.__dict__)
+
+
+class ReadOnlyDict(dict):
+ """A read-only dictionary."""
+ def __init__(self, *args, **kwargs):
+ dict.__init__(self, *args, **kwargs)
+
+ def __delitem__(self, key):
+ raise Exception('Object does not support deletion.')
+
+ def __setitem__(self, key, value):
+ raise Exception('Object does not support assignment.')
+
+ def update(self, *args, **kwargs):
+ raise Exception('Object does not support update.')
+
+
+class undefined_default(object):
+ """Represents an undefined argument value that isn't None."""
+
+
+undefined = undefined_default()
+
+
+class ReadOnlyDefaultDict(ReadOnlyDict):
+ """A read-only dictionary that supports default values on retrieval."""
+ def __init__(self, default_factory, *args, **kwargs):
+ ReadOnlyDict.__init__(self, *args, **kwargs)
+ self._default_factory = default_factory
+
+ def __missing__(self, key):
+ value = self._default_factory()
+ dict.__setitem__(self, key, value)
+ return value
+
+
+def ensureParentDir(path):
+ """Ensures the directory parent to the given file exists."""
+ d = os.path.dirname(path)
+ if d and not os.path.exists(path):
+ try:
+ os.makedirs(d)
+ except OSError, error:
+ if error.errno != errno.EEXIST:
+ raise
+
+
+def mkdir(path, not_indexed=False):
+ """Ensure a directory exists.
+
+ If ``not_indexed`` is True, an attribute is set that disables content
+ indexing on the directory.
+ """
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ if not_indexed:
+ if sys.platform == 'win32':
+ if isinstance(path, str_type):
+ fn = _kernel32.SetFileAttributesW
+ else:
+ fn = _kernel32.SetFileAttributesA
+
+ fn(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)
+ elif sys.platform == 'darwin':
+ with open(os.path.join(path, '.metadata_never_index'), 'a'):
+ pass
+
+
+def simple_diff(filename, old_lines, new_lines):
+ """Returns the diff between old_lines and new_lines, in unified diff form,
+ as a list of lines.
+
+ old_lines and new_lines are lists of non-newline terminated lines to
+ compare.
+ old_lines can be None, indicating a file creation.
+ new_lines can be None, indicating a file deletion.
+ """
+
+ old_name = '/dev/null' if old_lines is None else filename
+ new_name = '/dev/null' if new_lines is None else filename
+
+ return difflib.unified_diff(old_lines or [], new_lines or [],
+ old_name, new_name, n=4, lineterm='')
+
+
+class FileAvoidWrite(BytesIO):
+ """File-like object that buffers output and only writes if content changed.
+
+ We create an instance from an existing filename. New content is written to
+ it. When we close the file object, if the content in the in-memory buffer
+ differs from what is on disk, then we write out the new content. Otherwise,
+ the original file is untouched.
+
+ Instances can optionally capture diffs of file changes. This feature is not
+ enabled by default because it a) doesn't make sense for binary files b)
+ could add unwanted overhead to calls.
+
+ Additionally, there is dry run mode where the file is not actually written
+ out, but reports whether the file was existing and would have been updated
+ still occur, as well as diff capture if requested.
+ """
+ def __init__(self, filename, capture_diff=False, dry_run=False, mode='rU'):
+ BytesIO.__init__(self)
+ self.name = filename
+ self._capture_diff = capture_diff
+ self._dry_run = dry_run
+ self.diff = None
+ self.mode = mode
+
+ def write(self, buf):
+ if isinstance(buf, unicode):
+ buf = buf.encode('utf-8')
+ BytesIO.write(self, buf)
+
+ def close(self):
+ """Stop accepting writes, compare file contents, and rewrite if needed.
+
+ Returns a tuple of bools indicating what action was performed:
+
+ (file existed, file updated)
+
+ If ``capture_diff`` was specified at construction time and the
+ underlying file was changed, ``.diff`` will be populated with the diff
+ of the result.
+ """
+ buf = self.getvalue()
+ BytesIO.close(self)
+ existed = False
+ old_content = None
+
+ try:
+ existing = open(self.name, self.mode)
+ existed = True
+ except IOError:
+ pass
+ else:
+ try:
+ old_content = existing.read()
+ if old_content == buf:
+ return True, False
+ except IOError:
+ pass
+ finally:
+ existing.close()
+
+ if not self._dry_run:
+ ensureParentDir(self.name)
+ # Maintain 'b' if specified. 'U' only applies to modes starting with
+ # 'r', so it is dropped.
+ writemode = 'w'
+ if 'b' in self.mode:
+ writemode += 'b'
+ with open(self.name, writemode) as file:
+ file.write(buf)
+
+ if self._capture_diff:
+ try:
+ old_lines = old_content.splitlines() if existed else None
+ new_lines = buf.splitlines()
+
+ self.diff = simple_diff(self.name, old_lines, new_lines)
+ # FileAvoidWrite isn't unicode/bytes safe. So, files with non-ascii
+ # content or opened and written in different modes may involve
+ # implicit conversion and this will make Python unhappy. Since
+ # diffing isn't a critical feature, we just ignore the failure.
+ # This can go away once FileAvoidWrite uses io.BytesIO and
+ # io.StringIO. But that will require a lot of work.
+ except (UnicodeDecodeError, UnicodeEncodeError):
+ self.diff = ['Binary or non-ascii file changed: %s' %
+ self.name]
+
+ return existed, True
+
+ def __enter__(self):
+ return self
+ def __exit__(self, type, value, traceback):
+ if not self.closed:
+ self.close()
+
+
+def resolve_target_to_make(topobjdir, target):
+ r'''
+ Resolve `target` (a target, directory, or file) to a make target.
+
+ `topobjdir` is the object directory; all make targets will be
+ rooted at or below the top-level Makefile in this directory.
+
+ Returns a pair `(reldir, target)` where `reldir` is a directory
+ relative to `topobjdir` containing a Makefile and `target` is a
+ make target (possibly `None`).
+
+ A directory resolves to the nearest directory at or above
+ containing a Makefile, and target `None`.
+
+ A regular (non-Makefile) file resolves to the nearest directory at
+ or above the file containing a Makefile, and an appropriate
+ target.
+
+ A Makefile resolves to the nearest parent strictly above the
+ Makefile containing a different Makefile, and an appropriate
+ target.
+ '''
+
+ target = target.replace(os.sep, '/').lstrip('/')
+ abs_target = os.path.join(topobjdir, target)
+
+ # For directories, run |make -C dir|. If the directory does not
+ # contain a Makefile, check parents until we find one. At worst,
+ # this will terminate at the root.
+ if os.path.isdir(abs_target):
+ current = abs_target
+
+ while True:
+ make_path = os.path.join(current, 'Makefile')
+ if os.path.exists(make_path):
+ return (current[len(topobjdir) + 1:], None)
+
+ current = os.path.dirname(current)
+
+ # If it's not in a directory, this is probably a top-level make
+ # target. Treat it as such.
+ if '/' not in target:
+ return (None, target)
+
+ # We have a relative path within the tree. We look for a Makefile
+ # as far into the path as possible. Then, we compute the make
+ # target as relative to that directory.
+ reldir = os.path.dirname(target)
+ target = os.path.basename(target)
+
+ while True:
+ make_path = os.path.join(topobjdir, reldir, 'Makefile')
+
+ # We append to target every iteration, so the check below
+ # happens exactly once.
+ if target != 'Makefile' and os.path.exists(make_path):
+ return (reldir, target)
+
+ target = os.path.join(os.path.basename(reldir), target)
+ reldir = os.path.dirname(reldir)
+
+
+class ListMixin(object):
+ def __init__(self, iterable=None, **kwargs):
+ if iterable is None:
+ iterable = []
+ if not isinstance(iterable, list):
+ raise ValueError('List can only be created from other list instances.')
+
+ self._kwargs = kwargs
+ return super(ListMixin, self).__init__(iterable, **kwargs)
+
+ def extend(self, l):
+ if not isinstance(l, list):
+ raise ValueError('List can only be extended with other list instances.')
+
+ return super(ListMixin, self).extend(l)
+
+ def __setslice__(self, i, j, sequence):
+ if not isinstance(sequence, list):
+ raise ValueError('List can only be sliced with other list instances.')
+
+ return super(ListMixin, self).__setslice__(i, j, sequence)
+
+ def __add__(self, other):
+ # Allow None and EmptyValue is a special case because it makes undefined
+ # variable references in moz.build behave better.
+ other = [] if isinstance(other, (types.NoneType, EmptyValue)) else other
+ if not isinstance(other, list):
+ raise ValueError('Only lists can be appended to lists.')
+
+ new_list = self.__class__(self, **self._kwargs)
+ new_list.extend(other)
+ return new_list
+
+ def __iadd__(self, other):
+ other = [] if isinstance(other, (types.NoneType, EmptyValue)) else other
+ if not isinstance(other, list):
+ raise ValueError('Only lists can be appended to lists.')
+
+ return super(ListMixin, self).__iadd__(other)
+
+
+class List(ListMixin, list):
+ """A list specialized for moz.build environments.
+
+ We overload the assignment and append operations to require that the
+ appended thing is a list. This avoids bad surprises coming from appending
+ a string to a list, which would just add each letter of the string.
+ """
+
+
+class UnsortedError(Exception):
+ def __init__(self, srtd, original):
+ assert len(srtd) == len(original)
+
+ self.sorted = srtd
+ self.original = original
+
+ for i, orig in enumerate(original):
+ s = srtd[i]
+
+ if orig != s:
+ self.i = i
+ break
+
+ def __str__(self):
+ s = StringIO()
+
+ s.write('An attempt was made to add an unsorted sequence to a list. ')
+ s.write('The incoming list is unsorted starting at element %d. ' %
+ self.i)
+ s.write('We expected "%s" but got "%s"' % (
+ self.sorted[self.i], self.original[self.i]))
+
+ return s.getvalue()
+
+
+class StrictOrderingOnAppendListMixin(object):
+ @staticmethod
+ def ensure_sorted(l):
+ if isinstance(l, StrictOrderingOnAppendList):
+ return
+
+ def _first_element(e):
+ # If the list entry is a tuple, we sort based on the first element
+ # in the tuple.
+ return e[0] if isinstance(e, tuple) else e
+ srtd = sorted(l, key=lambda x: _first_element(x).lower())
+
+ if srtd != l:
+ raise UnsortedError(srtd, l)
+
+ def __init__(self, iterable=None, **kwargs):
+ if iterable is None:
+ iterable = []
+
+ StrictOrderingOnAppendListMixin.ensure_sorted(iterable)
+
+ super(StrictOrderingOnAppendListMixin, self).__init__(iterable, **kwargs)
+
+ def extend(self, l):
+ StrictOrderingOnAppendListMixin.ensure_sorted(l)
+
+ return super(StrictOrderingOnAppendListMixin, self).extend(l)
+
+ def __setslice__(self, i, j, sequence):
+ StrictOrderingOnAppendListMixin.ensure_sorted(sequence)
+
+ return super(StrictOrderingOnAppendListMixin, self).__setslice__(i, j,
+ sequence)
+
+ def __add__(self, other):
+ StrictOrderingOnAppendListMixin.ensure_sorted(other)
+
+ return super(StrictOrderingOnAppendListMixin, self).__add__(other)
+
+ def __iadd__(self, other):
+ StrictOrderingOnAppendListMixin.ensure_sorted(other)
+
+ return super(StrictOrderingOnAppendListMixin, self).__iadd__(other)
+
+
+class StrictOrderingOnAppendList(ListMixin, StrictOrderingOnAppendListMixin,
+ list):
+ """A list specialized for moz.build environments.
+
+ We overload the assignment and append operations to require that incoming
+ elements be ordered. This enforces cleaner style in moz.build files.
+ """
+
+class ListWithActionMixin(object):
+ """Mixin to create lists with pre-processing. See ListWithAction."""
+ def __init__(self, iterable=None, action=None):
+ if iterable is None:
+ iterable = []
+ if not callable(action):
+ raise ValueError('A callabe action is required to construct '
+ 'a ListWithAction')
+
+ self._action = action
+ iterable = [self._action(i) for i in iterable]
+ super(ListWithActionMixin, self).__init__(iterable)
+
+ def extend(self, l):
+ l = [self._action(i) for i in l]
+ return super(ListWithActionMixin, self).extend(l)
+
+ def __setslice__(self, i, j, sequence):
+ sequence = [self._action(item) for item in sequence]
+ return super(ListWithActionMixin, self).__setslice__(i, j, sequence)
+
+ def __iadd__(self, other):
+ other = [self._action(i) for i in other]
+ return super(ListWithActionMixin, self).__iadd__(other)
+
+class StrictOrderingOnAppendListWithAction(StrictOrderingOnAppendListMixin,
+ ListMixin, ListWithActionMixin, list):
+ """An ordered list that accepts a callable to be applied to each item.
+
+ A callable (action) passed to the constructor is run on each item of input.
+ The result of running the callable on each item will be stored in place of
+ the original input, but the original item must be used to enforce sortedness.
+ Note that the order of superclasses is therefore significant.
+ """
+
+class ListWithAction(ListMixin, ListWithActionMixin, list):
+ """A list that accepts a callable to be applied to each item.
+
+ A callable (action) may optionally be passed to the constructor to run on
+ each item of input. The result of calling the callable on each item will be
+ stored in place of the original input.
+ """
+
+class MozbuildDeletionError(Exception):
+ pass
+
+
+def FlagsFactory(flags):
+ """Returns a class which holds optional flags for an item in a list.
+
+ The flags are defined in the dict given as argument, where keys are
+ the flag names, and values the type used for the value of that flag.
+
+ The resulting class is used by the various <TypeName>WithFlagsFactory
+ functions below.
+ """
+ assert isinstance(flags, dict)
+ assert all(isinstance(v, type) for v in flags.values())
+
+ class Flags(object):
+ __slots__ = flags.keys()
+ _flags = flags
+
+ def update(self, **kwargs):
+ for k, v in kwargs.iteritems():
+ setattr(self, k, v)
+
+ def __getattr__(self, name):
+ if name not in self.__slots__:
+ raise AttributeError("'%s' object has no attribute '%s'" %
+ (self.__class__.__name__, name))
+ try:
+ return object.__getattr__(self, name)
+ except AttributeError:
+ value = self._flags[name]()
+ self.__setattr__(name, value)
+ return value
+
+ def __setattr__(self, name, value):
+ if name not in self.__slots__:
+ raise AttributeError("'%s' object has no attribute '%s'" %
+ (self.__class__.__name__, name))
+ if not isinstance(value, self._flags[name]):
+ raise TypeError("'%s' attribute of class '%s' must be '%s'" %
+ (name, self.__class__.__name__,
+ self._flags[name].__name__))
+ return object.__setattr__(self, name, value)
+
+ def __delattr__(self, name):
+ raise MozbuildDeletionError('Unable to delete attributes for this object')
+
+ return Flags
+
+
+class StrictOrderingOnAppendListWithFlags(StrictOrderingOnAppendList):
+ """A list with flags specialized for moz.build environments.
+
+ Each subclass has a set of typed flags; this class lets us use `isinstance`
+ for natural testing.
+ """
+
+
+def StrictOrderingOnAppendListWithFlagsFactory(flags):
+ """Returns a StrictOrderingOnAppendList-like object, with optional
+ flags on each item.
+
+ The flags are defined in the dict given as argument, where keys are
+ the flag names, and values the type used for the value of that flag.
+
+ Example:
+ FooList = StrictOrderingOnAppendListWithFlagsFactory({
+ 'foo': bool, 'bar': unicode
+ })
+ foo = FooList(['a', 'b', 'c'])
+ foo['a'].foo = True
+ foo['b'].bar = 'bar'
+ """
+ class StrictOrderingOnAppendListWithFlagsSpecialization(StrictOrderingOnAppendListWithFlags):
+ def __init__(self, iterable=None):
+ if iterable is None:
+ iterable = []
+ StrictOrderingOnAppendListWithFlags.__init__(self, iterable)
+ self._flags_type = FlagsFactory(flags)
+ self._flags = dict()
+
+ def __getitem__(self, name):
+ if name not in self._flags:
+ if name not in self:
+ raise KeyError("'%s'" % name)
+ self._flags[name] = self._flags_type()
+ return self._flags[name]
+
+ def __setitem__(self, name, value):
+ raise TypeError("'%s' object does not support item assignment" %
+ self.__class__.__name__)
+
+ def _update_flags(self, other):
+ if self._flags_type._flags != other._flags_type._flags:
+ raise ValueError('Expected a list of strings with flags like %s, not like %s' %
+ (self._flags_type._flags, other._flags_type._flags))
+ intersection = set(self._flags.keys()) & set(other._flags.keys())
+ if intersection:
+ raise ValueError('Cannot update flags: both lists of strings with flags configure %s' %
+ intersection)
+ self._flags.update(other._flags)
+
+ def extend(self, l):
+ result = super(StrictOrderingOnAppendList, self).extend(l)
+ if isinstance(l, StrictOrderingOnAppendListWithFlags):
+ self._update_flags(l)
+ return result
+
+ def __setslice__(self, i, j, sequence):
+ result = super(StrictOrderingOnAppendList, self).__setslice__(i, j, sequence)
+ # We may have removed items.
+ for name in set(self._flags.keys()) - set(self):
+ del self._flags[name]
+ if isinstance(sequence, StrictOrderingOnAppendListWithFlags):
+ self._update_flags(sequence)
+ return result
+
+ def __add__(self, other):
+ result = super(StrictOrderingOnAppendList, self).__add__(other)
+ if isinstance(other, StrictOrderingOnAppendListWithFlags):
+ # Result has flags from other but not from self, since
+ # internally we duplicate self and then extend with other, and
+ # only extend knows about flags. Since we don't allow updating
+ # when the set of flag keys intersect, which we instance we pass
+ # to _update_flags here matters. This needs to be correct but
+ # is an implementation detail.
+ result._update_flags(self)
+ return result
+
+ def __iadd__(self, other):
+ result = super(StrictOrderingOnAppendList, self).__iadd__(other)
+ if isinstance(other, StrictOrderingOnAppendListWithFlags):
+ self._update_flags(other)
+ return result
+
+ return StrictOrderingOnAppendListWithFlagsSpecialization
+
+
+class HierarchicalStringList(object):
+ """A hierarchy of lists of strings.
+
+ Each instance of this object contains a list of strings, which can be set or
+ appended to. A sub-level of the hierarchy is also an instance of this class,
+ can be added by appending to an attribute instead.
+
+ For example, the moz.build variable EXPORTS is an instance of this class. We
+ can do:
+
+ EXPORTS += ['foo.h']
+ EXPORTS.mozilla.dom += ['bar.h']
+
+ In this case, we have 3 instances (EXPORTS, EXPORTS.mozilla, and
+ EXPORTS.mozilla.dom), and the first and last each have one element in their
+ list.
+ """
+ __slots__ = ('_strings', '_children')
+
+ def __init__(self):
+ # Please change ContextDerivedTypedHierarchicalStringList in context.py
+ # if you make changes here.
+ self._strings = StrictOrderingOnAppendList()
+ self._children = {}
+
+ class StringListAdaptor(collections.Sequence):
+ def __init__(self, hsl):
+ self._hsl = hsl
+
+ def __getitem__(self, index):
+ return self._hsl._strings[index]
+
+ def __len__(self):
+ return len(self._hsl._strings)
+
+ def walk(self):
+ """Walk over all HierarchicalStringLists in the hierarchy.
+
+ This is a generator of (path, sequence).
+
+ The path is '' for the root level and '/'-delimited strings for
+ any descendants. The sequence is a read-only sequence of the
+ strings contained at that level.
+ """
+
+ if self._strings:
+ path_to_here = ''
+ yield path_to_here, self.StringListAdaptor(self)
+
+ for k, l in sorted(self._children.items()):
+ for p, v in l.walk():
+ path_to_there = '%s/%s' % (k, p)
+ yield path_to_there.strip('/'), v
+
+ def __setattr__(self, name, value):
+ if name in self.__slots__:
+ return object.__setattr__(self, name, value)
+
+ # __setattr__ can be called with a list when a simple assignment is
+ # used:
+ #
+ # EXPORTS.foo = ['file.h']
+ #
+ # In this case, we need to overwrite foo's current list of strings.
+ #
+ # However, __setattr__ is also called with a HierarchicalStringList
+ # to try to actually set the attribute. We want to ignore this case,
+ # since we don't actually create an attribute called 'foo', but just add
+ # it to our list of children (using _get_exportvariable()).
+ self._set_exportvariable(name, value)
+
+ def __getattr__(self, name):
+ if name.startswith('__'):
+ return object.__getattr__(self, name)
+ return self._get_exportvariable(name)
+
+ def __delattr__(self, name):
+ raise MozbuildDeletionError('Unable to delete attributes for this object')
+
+ def __iadd__(self, other):
+ if isinstance(other, HierarchicalStringList):
+ self._strings += other._strings
+ for c in other._children:
+ self[c] += other[c]
+ else:
+ self._check_list(other)
+ self._strings += other
+ return self
+
+ def __getitem__(self, name):
+ return self._get_exportvariable(name)
+
+ def __setitem__(self, name, value):
+ self._set_exportvariable(name, value)
+
+ def _get_exportvariable(self, name):
+ # Please change ContextDerivedTypedHierarchicalStringList in context.py
+ # if you make changes here.
+ child = self._children.get(name)
+ if not child:
+ child = self._children[name] = HierarchicalStringList()
+ return child
+
+ def _set_exportvariable(self, name, value):
+ if name in self._children:
+ if value is self._get_exportvariable(name):
+ return
+ raise KeyError('global_ns', 'reassign',
+ '<some variable>.%s' % name)
+
+ exports = self._get_exportvariable(name)
+ exports._check_list(value)
+ exports._strings += value
+
+ def _check_list(self, value):
+ if not isinstance(value, list):
+ raise ValueError('Expected a list of strings, not %s' % type(value))
+ for v in value:
+ if not isinstance(v, str_type):
+ raise ValueError(
+ 'Expected a list of strings, not an element of %s' % type(v))
+
+
+class LockFile(object):
+ """LockFile is used by the lock_file method to hold the lock.
+
+ This object should not be used directly, but only through
+ the lock_file method below.
+ """
+
+ def __init__(self, lockfile):
+ self.lockfile = lockfile
+
+ def __del__(self):
+ while True:
+ try:
+ os.remove(self.lockfile)
+ break
+ except OSError as e:
+ if e.errno == errno.EACCES:
+ # Another process probably has the file open, we'll retry.
+ # Just a short sleep since we want to drop the lock ASAP
+ # (but we need to let some other process close the file
+ # first).
+ time.sleep(0.1)
+ else:
+ # Re-raise unknown errors
+ raise
+
+
+def lock_file(lockfile, max_wait = 600):
+ """Create and hold a lockfile of the given name, with the given timeout.
+
+ To release the lock, delete the returned object.
+ """
+
+ # FUTURE This function and object could be written as a context manager.
+
+ while True:
+ try:
+ fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT)
+ # We created the lockfile, so we're the owner
+ break
+ except OSError as e:
+ if (e.errno == errno.EEXIST or
+ (sys.platform == "win32" and e.errno == errno.EACCES)):
+ pass
+ else:
+ # Should not occur
+ raise
+
+ try:
+ # The lock file exists, try to stat it to get its age
+ # and read its contents to report the owner PID
+ f = open(lockfile, 'r')
+ s = os.stat(lockfile)
+ except EnvironmentError as e:
+ if e.errno == errno.ENOENT or e.errno == errno.EACCES:
+ # We didn't create the lockfile, so it did exist, but it's
+ # gone now. Just try again
+ continue
+
+ raise Exception('{0} exists but stat() failed: {1}'.format(
+ lockfile, e.strerror))
+
+ # We didn't create the lockfile and it's still there, check
+ # its age
+ now = int(time.time())
+ if now - s[stat.ST_MTIME] > max_wait:
+ pid = f.readline().rstrip()
+ raise Exception('{0} has been locked for more than '
+ '{1} seconds (PID {2})'.format(lockfile, max_wait, pid))
+
+ # It's not been locked too long, wait a while and retry
+ f.close()
+ time.sleep(1)
+
+ # if we get here. we have the lockfile. Convert the os.open file
+ # descriptor into a Python file object and record our PID in it
+ f = os.fdopen(fd, 'w')
+ f.write('{0}\n'.format(os.getpid()))
+ f.close()
+
+ return LockFile(lockfile)
+
+
+class OrderedDefaultDict(OrderedDict):
+ '''A combination of OrderedDict and defaultdict.'''
+ def __init__(self, default_factory, *args, **kwargs):
+ OrderedDict.__init__(self, *args, **kwargs)
+ self._default_factory = default_factory
+
+ def __missing__(self, key):
+ value = self[key] = self._default_factory()
+ return value
+
+
+class KeyedDefaultDict(dict):
+ '''Like a defaultdict, but the default_factory function takes the key as
+ argument'''
+ def __init__(self, default_factory, *args, **kwargs):
+ dict.__init__(self, *args, **kwargs)
+ self._default_factory = default_factory
+
+ def __missing__(self, key):
+ value = self._default_factory(key)
+ dict.__setitem__(self, key, value)
+ return value
+
+
+class ReadOnlyKeyedDefaultDict(KeyedDefaultDict, ReadOnlyDict):
+ '''Like KeyedDefaultDict, but read-only.'''
+
+
+class memoize(dict):
+ '''A decorator to memoize the results of function calls depending
+ on its arguments.
+ Both functions and instance methods are handled, although in the
+ instance method case, the results are cache in the instance itself.
+ '''
+ def __init__(self, func):
+ self.func = func
+ functools.update_wrapper(self, func)
+
+ def __call__(self, *args):
+ if args not in self:
+ self[args] = self.func(*args)
+ return self[args]
+
+ def method_call(self, instance, *args):
+ name = '_%s' % self.func.__name__
+ if not hasattr(instance, name):
+ setattr(instance, name, {})
+ cache = getattr(instance, name)
+ if args not in cache:
+ cache[args] = self.func(instance, *args)
+ return cache[args]
+
+ def __get__(self, instance, cls):
+ return functools.update_wrapper(
+ functools.partial(self.method_call, instance), self.func)
+
+
+class memoized_property(object):
+ '''A specialized version of the memoize decorator that works for
+ class instance properties.
+ '''
+ def __init__(self, func):
+ self.func = func
+
+ def __get__(self, instance, cls):
+ name = '_%s' % self.func.__name__
+ if not hasattr(instance, name):
+ setattr(instance, name, self.func(instance))
+ return getattr(instance, name)
+
+
+def TypedNamedTuple(name, fields):
+ """Factory for named tuple types with strong typing.
+
+ Arguments are an iterable of 2-tuples. The first member is the
+ the field name. The second member is a type the field will be validated
+ to be.
+
+ Construction of instances varies from ``collections.namedtuple``.
+
+ First, if a single tuple argument is given to the constructor, this is
+ treated as the equivalent of passing each tuple value as a separate
+ argument into __init__. e.g.::
+
+ t = (1, 2)
+ TypedTuple(t) == TypedTuple(1, 2)
+
+ This behavior is meant for moz.build files, so vanilla tuples are
+ automatically cast to typed tuple instances.
+
+ Second, fields in the tuple are validated to be instances of the specified
+ type. This is done via an ``isinstance()`` check. To allow multiple types,
+ pass a tuple as the allowed types field.
+ """
+ cls = collections.namedtuple(name, (name for name, typ in fields))
+
+ class TypedTuple(cls):
+ __slots__ = ()
+
+ def __new__(klass, *args, **kwargs):
+ if len(args) == 1 and not kwargs and isinstance(args[0], tuple):
+ args = args[0]
+
+ return super(TypedTuple, klass).__new__(klass, *args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ for i, (fname, ftype) in enumerate(self._fields):
+ value = self[i]
+
+ if not isinstance(value, ftype):
+ raise TypeError('field in tuple not of proper type: %s; '
+ 'got %s, expected %s' % (fname,
+ type(value), ftype))
+
+ super(TypedTuple, self).__init__(*args, **kwargs)
+
+ TypedTuple._fields = fields
+
+ return TypedTuple
+
+
+class TypedListMixin(object):
+ '''Mixin for a list with type coercion. See TypedList.'''
+
+ def _ensure_type(self, l):
+ if isinstance(l, self.__class__):
+ return l
+
+ return [self.normalize(e) for e in l]
+
+ def __init__(self, iterable=None, **kwargs):
+ if iterable is None:
+ iterable = []
+ iterable = self._ensure_type(iterable)
+
+ super(TypedListMixin, self).__init__(iterable, **kwargs)
+
+ def extend(self, l):
+ l = self._ensure_type(l)
+
+ return super(TypedListMixin, self).extend(l)
+
+ def __setslice__(self, i, j, sequence):
+ sequence = self._ensure_type(sequence)
+
+ return super(TypedListMixin, self).__setslice__(i, j,
+ sequence)
+
+ def __add__(self, other):
+ other = self._ensure_type(other)
+
+ return super(TypedListMixin, self).__add__(other)
+
+ def __iadd__(self, other):
+ other = self._ensure_type(other)
+
+ return super(TypedListMixin, self).__iadd__(other)
+
+ def append(self, other):
+ self += [other]
+
+
+@memoize
+def TypedList(type, base_class=List):
+ '''A list with type coercion.
+
+ The given ``type`` is what list elements are being coerced to. It may do
+ strict validation, throwing ValueError exceptions.
+
+ A ``base_class`` type can be given for more specific uses than a List. For
+ example, a Typed StrictOrderingOnAppendList can be created with:
+
+ TypedList(unicode, StrictOrderingOnAppendList)
+ '''
+ class _TypedList(TypedListMixin, base_class):
+ @staticmethod
+ def normalize(e):
+ if not isinstance(e, type):
+ e = type(e)
+ return e
+
+ return _TypedList
+
+def group_unified_files(files, unified_prefix, unified_suffix,
+ files_per_unified_file):
+ """Return an iterator of (unified_filename, source_filenames) tuples.
+
+ We compile most C and C++ files in "unified mode"; instead of compiling
+ ``a.cpp``, ``b.cpp``, and ``c.cpp`` separately, we compile a single file
+ that looks approximately like::
+
+ #include "a.cpp"
+ #include "b.cpp"
+ #include "c.cpp"
+
+ This function handles the details of generating names for the unified
+ files, and determining which original source files go in which unified
+ file."""
+
+ # Make sure the input list is sorted. If it's not, bad things could happen!
+ files = sorted(files)
+
+ # Our last returned list of source filenames may be short, and we
+ # don't want the fill value inserted by izip_longest to be an
+ # issue. So we do a little dance to filter it out ourselves.
+ dummy_fill_value = ("dummy",)
+ def filter_out_dummy(iterable):
+ return itertools.ifilter(lambda x: x != dummy_fill_value,
+ iterable)
+
+ # From the itertools documentation, slightly modified:
+ def grouper(n, iterable):
+ "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
+ args = [iter(iterable)] * n
+ return itertools.izip_longest(fillvalue=dummy_fill_value, *args)
+
+ for i, unified_group in enumerate(grouper(files_per_unified_file,
+ files)):
+ just_the_filenames = list(filter_out_dummy(unified_group))
+ yield '%s%d.%s' % (unified_prefix, i, unified_suffix), just_the_filenames
+
+
+def pair(iterable):
+ '''Given an iterable, returns an iterable pairing its items.
+
+ For example,
+ list(pair([1,2,3,4,5,6]))
+ returns
+ [(1,2), (3,4), (5,6)]
+ '''
+ i = iter(iterable)
+ return itertools.izip_longest(i, i)
+
+
+VARIABLES_RE = re.compile('\$\((\w+)\)')
+
+
+def expand_variables(s, variables):
+ '''Given a string with $(var) variable references, replace those references
+ with the corresponding entries from the given `variables` dict.
+
+ If a variable value is not a string, it is iterated and its items are
+ joined with a whitespace.'''
+ result = ''
+ for s, name in pair(VARIABLES_RE.split(s)):
+ result += s
+ value = variables.get(name)
+ if not value:
+ continue
+ if not isinstance(value, types.StringTypes):
+ value = ' '.join(value)
+ result += value
+ return result
+
+
+class DefinesAction(argparse.Action):
+ '''An ArgumentParser action to handle -Dvar[=value] type of arguments.'''
+ def __call__(self, parser, namespace, values, option_string):
+ defines = getattr(namespace, self.dest)
+ if defines is None:
+ defines = {}
+ values = values.split('=', 1)
+ if len(values) == 1:
+ name, value = values[0], 1
+ else:
+ name, value = values
+ if value.isdigit():
+ value = int(value)
+ defines[name] = value
+ setattr(namespace, self.dest, defines)
+
+
+class EnumStringComparisonError(Exception):
+ pass
+
+
+class EnumString(unicode):
+ '''A string type that only can have a limited set of values, similarly to
+ an Enum, and can only be compared against that set of values.
+
+ The class is meant to be subclassed, where the subclass defines
+ POSSIBLE_VALUES. The `subclass` method is a helper to create such
+ subclasses.
+ '''
+ POSSIBLE_VALUES = ()
+ def __init__(self, value):
+ if value not in self.POSSIBLE_VALUES:
+ raise ValueError("'%s' is not a valid value for %s"
+ % (value, self.__class__.__name__))
+
+ def __eq__(self, other):
+ if other not in self.POSSIBLE_VALUES:
+ raise EnumStringComparisonError(
+ 'Can only compare with %s'
+ % ', '.join("'%s'" % v for v in self.POSSIBLE_VALUES))
+ return super(EnumString, self).__eq__(other)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ @staticmethod
+ def subclass(*possible_values):
+ class EnumStringSubclass(EnumString):
+ POSSIBLE_VALUES = possible_values
+ return EnumStringSubclass
+
+
+def _escape_char(c):
+ # str.encode('unicode_espace') doesn't escape quotes, presumably because
+ # quoting could be done with either ' or ".
+ if c == "'":
+ return "\\'"
+ return unicode(c.encode('unicode_escape'))
+
+# Mapping table between raw characters below \x80 and their escaped
+# counterpart, when they differ
+_INDENTED_REPR_TABLE = {
+ c: e
+ for c, e in map(lambda x: (x, _escape_char(x)),
+ map(unichr, range(128)))
+ if c != e
+}
+# Regexp matching all characters to escape.
+_INDENTED_REPR_RE = re.compile(
+ '([' + ''.join(_INDENTED_REPR_TABLE.values()) + ']+)')
+
+
+def indented_repr(o, indent=4):
+ '''Similar to repr(), but returns an indented representation of the object
+
+ One notable difference with repr is that the returned representation
+ assumes `from __future__ import unicode_literals`.
+ '''
+ one_indent = ' ' * indent
+ def recurse_indented_repr(o, level):
+ if isinstance(o, dict):
+ yield '{\n'
+ for k, v in sorted(o.items()):
+ yield one_indent * (level + 1)
+ for d in recurse_indented_repr(k, level + 1):
+ yield d
+ yield ': '
+ for d in recurse_indented_repr(v, level + 1):
+ yield d
+ yield ',\n'
+ yield one_indent * level
+ yield '}'
+ elif isinstance(o, bytes):
+ yield 'b'
+ yield repr(o)
+ elif isinstance(o, unicode):
+ yield "'"
+ # We want a readable string (non escaped unicode), but some
+ # special characters need escaping (e.g. \n, \t, etc.)
+ for i, s in enumerate(_INDENTED_REPR_RE.split(o)):
+ if i % 2:
+ for c in s:
+ yield _INDENTED_REPR_TABLE[c]
+ else:
+ yield s
+ yield "'"
+ elif hasattr(o, '__iter__'):
+ yield '[\n'
+ for i in o:
+ yield one_indent * (level + 1)
+ for d in recurse_indented_repr(i, level + 1):
+ yield d
+ yield ',\n'
+ yield one_indent * level
+ yield ']'
+ else:
+ yield repr(o)
+ return ''.join(recurse_indented_repr(o, 0))
+
+
+def encode(obj, encoding='utf-8'):
+ '''Recursively encode unicode strings with the given encoding.'''
+ if isinstance(obj, dict):
+ return {
+ encode(k, encoding): encode(v, encoding)
+ for k, v in obj.iteritems()
+ }
+ if isinstance(obj, bytes):
+ return obj
+ if isinstance(obj, unicode):
+ return obj.encode(encoding)
+ if isinstance(obj, Iterable):
+ return [encode(i, encoding) for i in obj]
+ return obj
diff --git a/python/mozbuild/mozbuild/vendor_rust.py b/python/mozbuild/mozbuild/vendor_rust.py
new file mode 100644
index 000000000..92103e1cb
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor_rust.py
@@ -0,0 +1,86 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from distutils.version import LooseVersion
+import logging
+from mozbuild.base import (
+ BuildEnvironmentNotFoundException,
+ MozbuildObject,
+)
+import mozfile
+import mozpack.path as mozpath
+import os
+import subprocess
+import sys
+
+class VendorRust(MozbuildObject):
+ def get_cargo_path(self):
+ try:
+ # If the build isn't --enable-rust then CARGO won't be set.
+ return self.substs['CARGO']
+ except (BuildEnvironmentNotFoundException, KeyError):
+ # Default if this tree isn't configured.
+ import which
+ return which.which('cargo')
+
+ def check_cargo_version(self, cargo):
+ '''
+ Ensure that cargo is new enough. cargo 0.12 added support
+ for source replacement, which is required for vendoring to work.
+ '''
+ out = subprocess.check_output([cargo, '--version']).splitlines()[0]
+ if not out.startswith('cargo'):
+ return False
+ return LooseVersion(out.split()[1]) >= b'0.13'
+
+ def check_modified_files(self):
+ '''
+ Ensure that there aren't any uncommitted changes to files
+ in the working copy, since we're going to change some state
+ on the user. Allow changes to Cargo.{toml,lock} since that's
+ likely to be a common use case.
+ '''
+ modified = [f for f in self.repository.get_modified_files() if os.path.basename(f) not in ('Cargo.toml', 'Cargo.lock')]
+ if modified:
+ self.log(logging.ERROR, 'modified_files', {},
+ '''You have uncommitted changes to the following files:
+
+{files}
+
+Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+'''.format(files='\n'.join(sorted(modified))))
+ sys.exit(1)
+
+ def vendor(self, ignore_modified=False):
+ self.populate_logger()
+ self.log_manager.enable_unstructured()
+ if not ignore_modified:
+ self.check_modified_files()
+ cargo = self.get_cargo_path()
+ if not self.check_cargo_version(cargo):
+ self.log(logging.ERROR, 'cargo_version', {}, 'Cargo >= 0.13 required (install Rust 1.12 or newer)')
+ return
+ else:
+ self.log(logging.DEBUG, 'cargo_version', {}, 'cargo is new enough')
+ have_vendor = any(l.strip() == 'vendor' for l in subprocess.check_output([cargo, '--list']).splitlines())
+ if not have_vendor:
+ self.log(logging.INFO, 'installing', {}, 'Installing cargo-vendor')
+ self.run_process(args=[cargo, 'install', 'cargo-vendor'])
+ else:
+ self.log(logging.DEBUG, 'cargo_vendor', {}, 'cargo-vendor already intalled')
+ vendor_dir = mozpath.join(self.topsrcdir, 'third_party/rust')
+ self.log(logging.INFO, 'rm_vendor_dir', {}, 'rm -rf %s' % vendor_dir)
+ mozfile.remove(vendor_dir)
+ # Once we require a new enough cargo to switch to workspaces, we can
+ # just do this once on the workspace root crate.
+ for crate_root in ('toolkit/library/rust/',
+ 'toolkit/library/gtest/rust'):
+ path = mozpath.join(self.topsrcdir, crate_root)
+ self._run_command_in_srcdir(args=[cargo, 'generate-lockfile', '--manifest-path', mozpath.join(path, 'Cargo.toml')])
+ self._run_command_in_srcdir(args=[cargo, 'vendor', '--sync', mozpath.join(path, 'Cargo.lock'), vendor_dir])
+ #TODO: print stats on size of files added/removed, warn or error
+ # when adding very large files (bug 1306078)
+ self.repository.add_remove_files(vendor_dir)
diff --git a/python/mozbuild/mozbuild/virtualenv.py b/python/mozbuild/mozbuild/virtualenv.py
new file mode 100644
index 000000000..05d30424b
--- /dev/null
+++ b/python/mozbuild/mozbuild/virtualenv.py
@@ -0,0 +1,568 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains code for populating the virtualenv environment for
+# Mozilla's build system. It is typically called as part of configure.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import distutils.sysconfig
+import os
+import shutil
+import subprocess
+import sys
+import warnings
+
+from distutils.version import LooseVersion
+
+IS_NATIVE_WIN = (sys.platform == 'win32' and os.sep == '\\')
+IS_MSYS2 = (sys.platform == 'win32' and os.sep == '/')
+IS_CYGWIN = (sys.platform == 'cygwin')
+
+# Minimum version of Python required to build.
+MINIMUM_PYTHON_VERSION = LooseVersion('2.7.3')
+MINIMUM_PYTHON_MAJOR = 2
+
+
+UPGRADE_WINDOWS = '''
+Please upgrade to the latest MozillaBuild development environment. See
+https://developer.mozilla.org/en-US/docs/Developer_Guide/Build_Instructions/Windows_Prerequisites
+'''.lstrip()
+
+UPGRADE_OTHER = '''
+Run |mach bootstrap| to ensure your system is up to date.
+
+If you still receive this error, your shell environment is likely detecting
+another Python version. Ensure a modern Python can be found in the paths
+defined by the $PATH environment variable and try again.
+'''.lstrip()
+
+
+class VirtualenvManager(object):
+ """Contains logic for managing virtualenvs for building the tree."""
+
+ def __init__(self, topsrcdir, topobjdir, virtualenv_path, log_handle,
+ manifest_path):
+ """Create a new manager.
+
+ Each manager is associated with a source directory, a path where you
+ want the virtualenv to be created, and a handle to write output to.
+ """
+ assert os.path.isabs(manifest_path), "manifest_path must be an absolute path: %s" % (manifest_path)
+ self.topsrcdir = topsrcdir
+ self.topobjdir = topobjdir
+ self.virtualenv_root = virtualenv_path
+
+ # Record the Python executable that was used to create the Virtualenv
+ # so we can check this against sys.executable when verifying the
+ # integrity of the virtualenv.
+ self.exe_info_path = os.path.join(self.virtualenv_root,
+ 'python_exe.txt')
+
+ self.log_handle = log_handle
+ self.manifest_path = manifest_path
+
+ @property
+ def virtualenv_script_path(self):
+ """Path to virtualenv's own populator script."""
+ return os.path.join(self.topsrcdir, 'python', 'virtualenv',
+ 'virtualenv.py')
+
+ @property
+ def bin_path(self):
+ # virtualenv.py provides a similar API via path_locations(). However,
+ # we have a bit of a chicken-and-egg problem and can't reliably
+ # import virtualenv. The functionality is trivial, so just implement
+ # it here.
+ if IS_CYGWIN or IS_NATIVE_WIN:
+ return os.path.join(self.virtualenv_root, 'Scripts')
+
+ return os.path.join(self.virtualenv_root, 'bin')
+
+ @property
+ def python_path(self):
+ binary = 'python'
+ if sys.platform in ('win32', 'cygwin'):
+ binary += '.exe'
+
+ return os.path.join(self.bin_path, binary)
+
+ @property
+ def activate_path(self):
+ return os.path.join(self.bin_path, 'activate_this.py')
+
+ def get_exe_info(self):
+ """Returns the version and file size of the python executable that was in
+ use when this virutalenv was created.
+ """
+ with open(self.exe_info_path, 'r') as fh:
+ version, size = fh.read().splitlines()
+ return int(version), int(size)
+
+ def write_exe_info(self, python):
+ """Records the the version of the python executable that was in use when
+ this virutalenv was created. We record this explicitly because
+ on OS X our python path may end up being a different or modified
+ executable.
+ """
+ ver = subprocess.check_output([python, '-c', 'import sys; print(sys.hexversion)']).rstrip()
+ with open(self.exe_info_path, 'w') as fh:
+ fh.write("%s\n" % ver)
+ fh.write("%s\n" % os.path.getsize(python))
+
+ def up_to_date(self, python=sys.executable):
+ """Returns whether the virtualenv is present and up to date."""
+
+ deps = [self.manifest_path, __file__]
+
+ # check if virtualenv exists
+ if not os.path.exists(self.virtualenv_root) or \
+ not os.path.exists(self.activate_path):
+
+ return False
+
+ # check modification times
+ activate_mtime = os.path.getmtime(self.activate_path)
+ dep_mtime = max(os.path.getmtime(p) for p in deps)
+ if dep_mtime > activate_mtime:
+ return False
+
+ # Verify that the Python we're checking here is either the virutalenv
+ # python, or we have the Python version that was used to create the
+ # virtualenv. If this fails, it is likely system Python has been
+ # upgraded, and our virtualenv would not be usable.
+ python_size = os.path.getsize(python)
+ if ((python, python_size) != (self.python_path, os.path.getsize(self.python_path)) and
+ (sys.hexversion, python_size) != self.get_exe_info()):
+ return False
+
+ # recursively check sub packages.txt files
+ submanifests = [i[1] for i in self.packages()
+ if i[0] == 'packages.txt']
+ for submanifest in submanifests:
+ submanifest = os.path.join(self.topsrcdir, submanifest)
+ submanager = VirtualenvManager(self.topsrcdir,
+ self.topobjdir,
+ self.virtualenv_root,
+ self.log_handle,
+ submanifest)
+ if not submanager.up_to_date(python):
+ return False
+
+ return True
+
+ def ensure(self, python=sys.executable):
+ """Ensure the virtualenv is present and up to date.
+
+ If the virtualenv is up to date, this does nothing. Otherwise, it
+ creates and populates the virtualenv as necessary.
+
+ This should be the main API used from this class as it is the
+ highest-level.
+ """
+ if self.up_to_date(python):
+ return self.virtualenv_root
+ return self.build(python)
+
+ def _log_process_output(self, *args, **kwargs):
+ if hasattr(self.log_handle, 'fileno'):
+ return subprocess.call(*args, stdout=self.log_handle,
+ stderr=subprocess.STDOUT, **kwargs)
+
+ proc = subprocess.Popen(*args, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, **kwargs)
+
+ for line in proc.stdout:
+ self.log_handle.write(line)
+
+ return proc.wait()
+
+ def create(self, python=sys.executable):
+ """Create a new, empty virtualenv.
+
+ Receives the path to virtualenv's virtualenv.py script (which will be
+ called out to), the path to create the virtualenv in, and a handle to
+ write output to.
+ """
+ env = dict(os.environ)
+ env.pop('PYTHONDONTWRITEBYTECODE', None)
+
+ args = [python, self.virtualenv_script_path,
+ # Without this, virtualenv.py may attempt to contact the outside
+ # world and search for or download a newer version of pip,
+ # setuptools, or wheel. This is bad for security, reproducibility,
+ # and speed.
+ '--no-download',
+ self.virtualenv_root]
+
+ result = self._log_process_output(args, env=env)
+
+ if result:
+ raise Exception(
+ 'Failed to create virtualenv: %s' % self.virtualenv_root)
+
+ self.write_exe_info(python)
+
+ return self.virtualenv_root
+
+ def packages(self):
+ with file(self.manifest_path, 'rU') as fh:
+ packages = [line.rstrip().split(':')
+ for line in fh]
+ return packages
+
+ def populate(self):
+ """Populate the virtualenv.
+
+ The manifest file consists of colon-delimited fields. The first field
+ specifies the action. The remaining fields are arguments to that
+ action. The following actions are supported:
+
+ setup.py -- Invoke setup.py for a package. Expects the arguments:
+ 1. relative path directory containing setup.py.
+ 2. argument(s) to setup.py. e.g. "develop". Each program argument
+ is delimited by a colon. Arguments with colons are not yet
+ supported.
+
+ filename.pth -- Adds the path given as argument to filename.pth under
+ the virtualenv site packages directory.
+
+ optional -- This denotes the action as optional. The requested action
+ is attempted. If it fails, we issue a warning and go on. The
+ initial "optional" field is stripped then the remaining line is
+ processed like normal. e.g.
+ "optional:setup.py:python/foo:built_ext:-i"
+
+ copy -- Copies the given file in the virtualenv site packages
+ directory.
+
+ packages.txt -- Denotes that the specified path is a child manifest. It
+ will be read and processed as if its contents were concatenated
+ into the manifest being read.
+
+ objdir -- Denotes a relative path in the object directory to add to the
+ search path. e.g. "objdir:build" will add $topobjdir/build to the
+ search path.
+
+ Note that the Python interpreter running this function should be the
+ one from the virtualenv. If it is the system Python or if the
+ environment is not configured properly, packages could be installed
+ into the wrong place. This is how virtualenv's work.
+ """
+
+ packages = self.packages()
+ python_lib = distutils.sysconfig.get_python_lib()
+
+ def handle_package(package):
+ if package[0] == 'setup.py':
+ assert len(package) >= 2
+
+ self.call_setup(os.path.join(self.topsrcdir, package[1]),
+ package[2:])
+
+ return True
+
+ if package[0] == 'copy':
+ assert len(package) == 2
+
+ src = os.path.join(self.topsrcdir, package[1])
+ dst = os.path.join(python_lib, os.path.basename(package[1]))
+
+ shutil.copy(src, dst)
+
+ return True
+
+ if package[0] == 'packages.txt':
+ assert len(package) == 2
+
+ src = os.path.join(self.topsrcdir, package[1])
+ assert os.path.isfile(src), "'%s' does not exist" % src
+ submanager = VirtualenvManager(self.topsrcdir,
+ self.topobjdir,
+ self.virtualenv_root,
+ self.log_handle,
+ src)
+ submanager.populate()
+
+ return True
+
+ if package[0].endswith('.pth'):
+ assert len(package) == 2
+
+ path = os.path.join(self.topsrcdir, package[1])
+
+ with open(os.path.join(python_lib, package[0]), 'a') as f:
+ # This path is relative to the .pth file. Using a
+ # relative path allows the srcdir/objdir combination
+ # to be moved around (as long as the paths relative to
+ # each other remain the same).
+ try:
+ f.write("%s\n" % os.path.relpath(path, python_lib))
+ except ValueError:
+ # When objdir is on a separate drive, relpath throws
+ f.write("%s\n" % os.path.join(python_lib, path))
+
+ return True
+
+ if package[0] == 'optional':
+ try:
+ handle_package(package[1:])
+ return True
+ except:
+ print('Error processing command. Ignoring', \
+ 'because optional. (%s)' % ':'.join(package),
+ file=self.log_handle)
+ return False
+
+ if package[0] == 'objdir':
+ assert len(package) == 2
+ path = os.path.join(self.topobjdir, package[1])
+
+ with open(os.path.join(python_lib, 'objdir.pth'), 'a') as f:
+ f.write('%s\n' % path)
+
+ return True
+
+ raise Exception('Unknown action: %s' % package[0])
+
+ # We always target the OS X deployment target that Python itself was
+ # built with, regardless of what's in the current environment. If we
+ # don't do # this, we may run into a Python bug. See
+ # http://bugs.python.org/issue9516 and bug 659881.
+ #
+ # Note that this assumes that nothing compiled in the virtualenv is
+ # shipped as part of a distribution. If we do ship anything, the
+ # deployment target here may be different from what's targeted by the
+ # shipping binaries and # virtualenv-produced binaries may fail to
+ # work.
+ #
+ # We also ignore environment variables that may have been altered by
+ # configure or a mozconfig activated in the current shell. We trust
+ # Python is smart enough to find a proper compiler and to use the
+ # proper compiler flags. If it isn't your Python is likely broken.
+ IGNORE_ENV_VARIABLES = ('CC', 'CXX', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS',
+ 'PYTHONDONTWRITEBYTECODE')
+
+ try:
+ old_target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
+ sysconfig_target = \
+ distutils.sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+
+ if sysconfig_target is not None:
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = sysconfig_target
+
+ old_env_variables = {}
+ for k in IGNORE_ENV_VARIABLES:
+ if k not in os.environ:
+ continue
+
+ old_env_variables[k] = os.environ[k]
+ del os.environ[k]
+
+ # HACK ALERT.
+ #
+ # The following adjustment to the VSNNCOMNTOOLS environment
+ # variables are wrong. This is done as a hack to facilitate the
+ # building of binary Python packages - notably psutil - on Windows
+ # machines that don't have the Visual Studio 2008 binaries
+ # installed. This hack assumes the Python on that system was built
+ # with Visual Studio 2008. The hack is wrong for the reasons
+ # explained at
+ # http://stackoverflow.com/questions/3047542/building-lxml-for-python-2-7-on-windows/5122521#5122521.
+ if sys.platform in ('win32', 'cygwin') and \
+ 'VS90COMNTOOLS' not in os.environ:
+
+ warnings.warn('Hacking environment to allow binary Python '
+ 'extensions to build. You can make this warning go away '
+ 'by installing Visual Studio 2008. You can download the '
+ 'Express Edition installer from '
+ 'http://go.microsoft.com/?linkid=7729279')
+
+ # We list in order from oldest to newest to prefer the closest
+ # to 2008 so differences are minimized.
+ for ver in ('100', '110', '120'):
+ var = 'VS%sCOMNTOOLS' % ver
+ if var in os.environ:
+ os.environ['VS90COMNTOOLS'] = os.environ[var]
+ break
+
+ for package in packages:
+ handle_package(package)
+
+ sitecustomize = os.path.join(
+ os.path.dirname(os.__file__), 'sitecustomize.py')
+ with open(sitecustomize, 'w') as f:
+ f.write(
+ '# Importing mach_bootstrap has the side effect of\n'
+ '# installing an import hook\n'
+ 'import mach_bootstrap\n'
+ )
+
+ finally:
+ os.environ.pop('MACOSX_DEPLOYMENT_TARGET', None)
+
+ if old_target is not None:
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = old_target
+
+ os.environ.update(old_env_variables)
+
+ def call_setup(self, directory, arguments):
+ """Calls setup.py in a directory."""
+ setup = os.path.join(directory, 'setup.py')
+
+ program = [self.python_path, setup]
+ program.extend(arguments)
+
+ # We probably could call the contents of this file inside the context
+ # of this interpreter using execfile() or similar. However, if global
+ # variables like sys.path are adjusted, this could cause all kinds of
+ # havoc. While this may work, invoking a new process is safer.
+
+ try:
+ output = subprocess.check_output(program, cwd=directory, stderr=subprocess.STDOUT)
+ print(output)
+ except subprocess.CalledProcessError as e:
+ if 'Python.h: No such file or directory' in e.output:
+ print('WARNING: Python.h not found. Install Python development headers.')
+ else:
+ print(e.output)
+
+ raise Exception('Error installing package: %s' % directory)
+
+ def build(self, python=sys.executable):
+ """Build a virtualenv per tree conventions.
+
+ This returns the path of the created virtualenv.
+ """
+
+ self.create(python)
+
+ # We need to populate the virtualenv using the Python executable in
+ # the virtualenv for paths to be proper.
+
+ args = [self.python_path, __file__, 'populate', self.topsrcdir,
+ self.topobjdir, self.virtualenv_root, self.manifest_path]
+
+ result = self._log_process_output(args, cwd=self.topsrcdir)
+
+ if result != 0:
+ raise Exception('Error populating virtualenv.')
+
+ os.utime(self.activate_path, None)
+
+ return self.virtualenv_root
+
+ def activate(self):
+ """Activate the virtualenv in this Python context.
+
+ If you run a random Python script and wish to "activate" the
+ virtualenv, you can simply instantiate an instance of this class
+ and call .ensure() and .activate() to make the virtualenv active.
+ """
+
+ execfile(self.activate_path, dict(__file__=self.activate_path))
+ if isinstance(os.environ['PATH'], unicode):
+ os.environ['PATH'] = os.environ['PATH'].encode('utf-8')
+
+ def install_pip_package(self, package):
+ """Install a package via pip.
+
+ The supplied package is specified using a pip requirement specifier.
+ e.g. 'foo' or 'foo==1.0'.
+
+ If the package is already installed, this is a no-op.
+ """
+ from pip.req import InstallRequirement
+
+ req = InstallRequirement.from_line(package)
+ if req.check_if_exists():
+ return
+
+ args = [
+ 'install',
+ '--use-wheel',
+ package,
+ ]
+
+ return self._run_pip(args)
+
+ def install_pip_requirements(self, path, require_hashes=True):
+ """Install a pip requirements.txt file.
+
+ The supplied path is a text file containing pip requirement
+ specifiers.
+
+ If require_hashes is True, each specifier must contain the
+ expected hash of the downloaded package. See:
+ https://pip.pypa.io/en/stable/reference/pip_install/#hash-checking-mode
+ """
+
+ if not os.path.isabs(path):
+ path = os.path.join(self.topsrcdir, path)
+
+ args = [
+ 'install',
+ '--requirement',
+ path,
+ ]
+
+ if require_hashes:
+ args.append('--require-hashes')
+
+ return self._run_pip(args)
+
+ def _run_pip(self, args):
+ # It's tempting to call pip natively via pip.main(). However,
+ # the current Python interpreter may not be the virtualenv python.
+ # This will confuse pip and cause the package to attempt to install
+ # against the executing interpreter. By creating a new process, we
+ # force the virtualenv's interpreter to be used and all is well.
+ # It /might/ be possible to cheat and set sys.executable to
+ # self.python_path. However, this seems more risk than it's worth.
+ subprocess.check_call([os.path.join(self.bin_path, 'pip')] + args,
+ stderr=subprocess.STDOUT)
+
+
+def verify_python_version(log_handle):
+ """Ensure the current version of Python is sufficient."""
+ major, minor, micro = sys.version_info[:3]
+
+ our = LooseVersion('%d.%d.%d' % (major, minor, micro))
+
+ if major != MINIMUM_PYTHON_MAJOR or our < MINIMUM_PYTHON_VERSION:
+ log_handle.write('Python %s or greater (but not Python 3) is '
+ 'required to build. ' % MINIMUM_PYTHON_VERSION)
+ log_handle.write('You are running Python %s.\n' % our)
+
+ if os.name in ('nt', 'ce'):
+ log_handle.write(UPGRADE_WINDOWS)
+ else:
+ log_handle.write(UPGRADE_OTHER)
+
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ if len(sys.argv) < 5:
+ print('Usage: populate_virtualenv.py /path/to/topsrcdir /path/to/topobjdir /path/to/virtualenv /path/to/virtualenv_manifest')
+ sys.exit(1)
+
+ verify_python_version(sys.stdout)
+
+ topsrcdir, topobjdir, virtualenv_path, manifest_path = sys.argv[1:5]
+ populate = False
+
+ # This should only be called internally.
+ if sys.argv[1] == 'populate':
+ populate = True
+ topsrcdir, topobjdir, virtualenv_path, manifest_path = sys.argv[2:]
+
+ manager = VirtualenvManager(topsrcdir, topobjdir, virtualenv_path,
+ sys.stdout, manifest_path)
+
+ if populate:
+ manager.populate()
+ else:
+ manager.ensure()
+
diff --git a/python/mozbuild/mozpack/__init__.py b/python/mozbuild/mozpack/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozpack/__init__.py
diff --git a/python/mozbuild/mozpack/archive.py b/python/mozbuild/mozpack/archive.py
new file mode 100644
index 000000000..f3015ff21
--- /dev/null
+++ b/python/mozbuild/mozpack/archive.py
@@ -0,0 +1,107 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import bz2
+import gzip
+import stat
+import tarfile
+
+
+# 2016-01-01T00:00:00+0000
+DEFAULT_MTIME = 1451606400
+
+
+def create_tar_from_files(fp, files):
+ """Create a tar file deterministically.
+
+ Receives a dict mapping names of files in the archive to local filesystem
+ paths.
+
+ The files will be archived and written to the passed file handle opened
+ for writing.
+
+ Only regular files can be written.
+
+ FUTURE accept mozpack.files classes for writing
+ FUTURE accept a filename argument (or create APIs to write files)
+ """
+ with tarfile.open(name='', mode='w', fileobj=fp, dereference=True) as tf:
+ for archive_path, fs_path in sorted(files.items()):
+ ti = tf.gettarinfo(fs_path, archive_path)
+
+ if not ti.isreg():
+ raise ValueError('not a regular file: %s' % fs_path)
+
+ # Disallow setuid and setgid bits. This is an arbitrary restriction.
+ # However, since we set uid/gid to root:root, setuid and setgid
+ # would be a glaring security hole if the archive were
+ # uncompressed as root.
+ if ti.mode & (stat.S_ISUID | stat.S_ISGID):
+ raise ValueError('cannot add file with setuid or setgid set: '
+ '%s' % fs_path)
+
+ # Set uid, gid, username, and group as deterministic values.
+ ti.uid = 0
+ ti.gid = 0
+ ti.uname = ''
+ ti.gname = ''
+
+ # Set mtime to a constant value.
+ ti.mtime = DEFAULT_MTIME
+
+ with open(fs_path, 'rb') as fh:
+ tf.addfile(ti, fh)
+
+
+def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9):
+ """Create a tar.gz file deterministically from files.
+
+ This is a glorified wrapper around ``create_tar_from_files`` that
+ adds gzip compression.
+
+ The passed file handle should be opened for writing in binary mode.
+ When the function returns, all data has been written to the handle.
+ """
+ # Offset 3-7 in the gzip header contains an mtime. Pin it to a known
+ # value so output is deterministic.
+ gf = gzip.GzipFile(filename=filename or '', mode='wb', fileobj=fp,
+ compresslevel=compresslevel, mtime=DEFAULT_MTIME)
+ with gf:
+ create_tar_from_files(gf, files)
+
+
+class _BZ2Proxy(object):
+ """File object that proxies writes to a bz2 compressor."""
+ def __init__(self, fp, compresslevel=9):
+ self.fp = fp
+ self.compressor = bz2.BZ2Compressor(compresslevel=compresslevel)
+ self.pos = 0
+
+ def tell(self):
+ return self.pos
+
+ def write(self, data):
+ data = self.compressor.compress(data)
+ self.pos += len(data)
+ self.fp.write(data)
+
+ def close(self):
+ data = self.compressor.flush()
+ self.pos += len(data)
+ self.fp.write(data)
+
+
+def create_tar_bz2_from_files(fp, files, compresslevel=9):
+ """Create a tar.bz2 file deterministically from files.
+
+ This is a glorified wrapper around ``create_tar_from_files`` that
+ adds bzip2 compression.
+
+ This function is similar to ``create_tar_gzip_from_files()``.
+ """
+ proxy = _BZ2Proxy(fp, compresslevel=compresslevel)
+ create_tar_from_files(proxy, files)
+ proxy.close()
diff --git a/python/mozbuild/mozpack/chrome/__init__.py b/python/mozbuild/mozpack/chrome/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/__init__.py
diff --git a/python/mozbuild/mozpack/chrome/flags.py b/python/mozbuild/mozpack/chrome/flags.py
new file mode 100644
index 000000000..8c5c9a54c
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/flags.py
@@ -0,0 +1,258 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import re
+from distutils.version import LooseVersion
+from mozpack.errors import errors
+from collections import OrderedDict
+
+
+class Flag(object):
+ '''
+ Class for flags in manifest entries in the form:
+ "flag" (same as "flag=true")
+ "flag=yes|true|1"
+ "flag=no|false|0"
+ '''
+ def __init__(self, name):
+ '''
+ Initialize a Flag with the given name.
+ '''
+ self.name = name
+ self.value = None
+
+ def add_definition(self, definition):
+ '''
+ Add a flag value definition. Replaces any previously set value.
+ '''
+ if definition == self.name:
+ self.value = True
+ return
+ assert(definition.startswith(self.name))
+ if definition[len(self.name)] != '=':
+ return errors.fatal('Malformed flag: %s' % definition)
+ value = definition[len(self.name) + 1:]
+ if value in ('yes', 'true', '1', 'no', 'false', '0'):
+ self.value = value
+ else:
+ return errors.fatal('Unknown value in: %s' % definition)
+
+ def matches(self, value):
+ '''
+ Return whether the flag value matches the given value. The values
+ are canonicalized for comparison.
+ '''
+ if value in ('yes', 'true', '1', True):
+ return self.value in ('yes', 'true', '1', True)
+ if value in ('no', 'false', '0', False):
+ return self.value in ('no', 'false', '0', False, None)
+ raise RuntimeError('Invalid value: %s' % value)
+
+ def __str__(self):
+ '''
+ Serialize the flag value in the same form given to the last
+ add_definition() call.
+ '''
+ if self.value is None:
+ return ''
+ if self.value is True:
+ return self.name
+ return '%s=%s' % (self.name, self.value)
+
+
+class StringFlag(object):
+ '''
+ Class for string flags in manifest entries in the form:
+ "flag=string"
+ "flag!=string"
+ '''
+ def __init__(self, name):
+ '''
+ Initialize a StringFlag with the given name.
+ '''
+ self.name = name
+ self.values = []
+
+ def add_definition(self, definition):
+ '''
+ Add a string flag definition.
+ '''
+ assert(definition.startswith(self.name))
+ value = definition[len(self.name):]
+ if value.startswith('='):
+ self.values.append(('==', value[1:]))
+ elif value.startswith('!='):
+ self.values.append(('!=', value[2:]))
+ else:
+ return errors.fatal('Malformed flag: %s' % definition)
+
+ def matches(self, value):
+ '''
+ Return whether one of the string flag definitions matches the given
+ value.
+ For example,
+ flag = StringFlag('foo')
+ flag.add_definition('foo!=bar')
+ flag.matches('bar') returns False
+ flag.matches('qux') returns True
+ flag = StringFlag('foo')
+ flag.add_definition('foo=bar')
+ flag.add_definition('foo=baz')
+ flag.matches('bar') returns True
+ flag.matches('baz') returns True
+ flag.matches('qux') returns False
+ '''
+ if not self.values:
+ return True
+ for comparison, val in self.values:
+ if eval('value %s val' % comparison):
+ return True
+ return False
+
+ def __str__(self):
+ '''
+ Serialize the flag definitions in the same form given to each
+ add_definition() call.
+ '''
+ res = []
+ for comparison, val in self.values:
+ if comparison == '==':
+ res.append('%s=%s' % (self.name, val))
+ else:
+ res.append('%s!=%s' % (self.name, val))
+ return ' '.join(res)
+
+
+class VersionFlag(object):
+ '''
+ Class for version flags in manifest entries in the form:
+ "flag=version"
+ "flag<=version"
+ "flag<version"
+ "flag>=version"
+ "flag>version"
+ '''
+ def __init__(self, name):
+ '''
+ Initialize a VersionFlag with the given name.
+ '''
+ self.name = name
+ self.values = []
+
+ def add_definition(self, definition):
+ '''
+ Add a version flag definition.
+ '''
+ assert(definition.startswith(self.name))
+ value = definition[len(self.name):]
+ if value.startswith('='):
+ self.values.append(('==', LooseVersion(value[1:])))
+ elif len(value) > 1 and value[0] in ['<', '>']:
+ if value[1] == '=':
+ if len(value) < 3:
+ return errors.fatal('Malformed flag: %s' % definition)
+ self.values.append((value[0:2], LooseVersion(value[2:])))
+ else:
+ self.values.append((value[0], LooseVersion(value[1:])))
+ else:
+ return errors.fatal('Malformed flag: %s' % definition)
+
+ def matches(self, value):
+ '''
+ Return whether one of the version flag definitions matches the given
+ value.
+ For example,
+ flag = VersionFlag('foo')
+ flag.add_definition('foo>=1.0')
+ flag.matches('1.0') returns True
+ flag.matches('1.1') returns True
+ flag.matches('0.9') returns False
+ flag = VersionFlag('foo')
+ flag.add_definition('foo>=1.0')
+ flag.add_definition('foo<0.5')
+ flag.matches('0.4') returns True
+ flag.matches('1.0') returns True
+ flag.matches('0.6') returns False
+ '''
+ value = LooseVersion(value)
+ if not self.values:
+ return True
+ for comparison, val in self.values:
+ if eval('value %s val' % comparison):
+ return True
+ return False
+
+ def __str__(self):
+ '''
+ Serialize the flag definitions in the same form given to each
+ add_definition() call.
+ '''
+ res = []
+ for comparison, val in self.values:
+ if comparison == '==':
+ res.append('%s=%s' % (self.name, val))
+ else:
+ res.append('%s%s%s' % (self.name, comparison, val))
+ return ' '.join(res)
+
+
+class Flags(OrderedDict):
+ '''
+ Class to handle a set of flags definitions given on a single manifest
+ entry.
+ '''
+ FLAGS = {
+ 'application': StringFlag,
+ 'appversion': VersionFlag,
+ 'platformversion': VersionFlag,
+ 'contentaccessible': Flag,
+ 'os': StringFlag,
+ 'osversion': VersionFlag,
+ 'abi': StringFlag,
+ 'platform': Flag,
+ 'xpcnativewrappers': Flag,
+ 'tablet': Flag,
+ 'process': StringFlag,
+ }
+ RE = re.compile(r'([!<>=]+)')
+
+ def __init__(self, *flags):
+ '''
+ Initialize a set of flags given in string form.
+ flags = Flags('contentaccessible=yes', 'appversion>=3.5')
+ '''
+ OrderedDict.__init__(self)
+ for f in flags:
+ name = self.RE.split(f)
+ name = name[0]
+ if not name in self.FLAGS:
+ errors.fatal('Unknown flag: %s' % name)
+ continue
+ if not name in self:
+ self[name] = self.FLAGS[name](name)
+ self[name].add_definition(f)
+
+ def __str__(self):
+ '''
+ Serialize the set of flags.
+ '''
+ return ' '.join(str(self[k]) for k in self)
+
+ def match(self, **filter):
+ '''
+ Return whether the set of flags match the set of given filters.
+ flags = Flags('contentaccessible=yes', 'appversion>=3.5',
+ 'application=foo')
+ flags.match(application='foo') returns True
+ flags.match(application='foo', appversion='3.5') returns True
+ flags.match(application='foo', appversion='3.0') returns False
+ '''
+ for name, value in filter.iteritems():
+ if not name in self:
+ continue
+ if not self[name].matches(value):
+ return False
+ return True
diff --git a/python/mozbuild/mozpack/chrome/manifest.py b/python/mozbuild/mozpack/chrome/manifest.py
new file mode 100644
index 000000000..71241764d
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/manifest.py
@@ -0,0 +1,368 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import re
+import os
+from urlparse import urlparse
+import mozpack.path as mozpath
+from mozpack.chrome.flags import Flags
+from mozpack.errors import errors
+
+
+class ManifestEntry(object):
+ '''
+ Base class for all manifest entry types.
+ Subclasses may define the following class or member variables:
+ - localized: indicates whether the manifest entry is used for localized
+ data.
+ - type: the manifest entry type (e.g. 'content' in
+ 'content global content/global/')
+ - allowed_flags: a set of flags allowed to be defined for the given
+ manifest entry type.
+
+ A manifest entry is attached to a base path, defining where the manifest
+ entry is bound to, and that is used to find relative paths defined in
+ entries.
+ '''
+ localized = False
+ type = None
+ allowed_flags = [
+ 'application',
+ 'platformversion',
+ 'os',
+ 'osversion',
+ 'abi',
+ 'xpcnativewrappers',
+ 'tablet',
+ 'process',
+ ]
+
+ def __init__(self, base, *flags):
+ '''
+ Initialize a manifest entry with the given base path and flags.
+ '''
+ self.base = base
+ self.flags = Flags(*flags)
+ if not all(f in self.allowed_flags for f in self.flags):
+ errors.fatal('%s unsupported for %s manifest entries' %
+ (','.join(f for f in self.flags
+ if not f in self.allowed_flags), self.type))
+
+ def serialize(self, *args):
+ '''
+ Serialize the manifest entry.
+ '''
+ entry = [self.type] + list(args)
+ flags = str(self.flags)
+ if flags:
+ entry.append(flags)
+ return ' '.join(entry)
+
+ def __eq__(self, other):
+ return self.base == other.base and str(self) == str(other)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return '<%s@%s>' % (str(self), self.base)
+
+ def move(self, base):
+ '''
+ Return a new manifest entry with a different base path.
+ '''
+ return parse_manifest_line(base, str(self))
+
+ def rebase(self, base):
+ '''
+ Return a new manifest entry with all relative paths defined in the
+ entry relative to a new base directory.
+ The base class doesn't define relative paths, so it is equivalent to
+ move().
+ '''
+ return self.move(base)
+
+
+class ManifestEntryWithRelPath(ManifestEntry):
+ '''
+ Abstract manifest entry type with a relative path definition.
+ '''
+ def __init__(self, base, relpath, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.relpath = relpath
+
+ def __str__(self):
+ return self.serialize(self.relpath)
+
+ def rebase(self, base):
+ '''
+ Return a new manifest entry with all relative paths defined in the
+ entry relative to a new base directory.
+ '''
+ clone = ManifestEntry.rebase(self, base)
+ clone.relpath = mozpath.rebase(self.base, base, self.relpath)
+ return clone
+
+ @property
+ def path(self):
+ return mozpath.normpath(mozpath.join(self.base,
+ self.relpath))
+
+
+class Manifest(ManifestEntryWithRelPath):
+ '''
+ Class for 'manifest' entries.
+ manifest some/path/to/another.manifest
+ '''
+ type = 'manifest'
+
+
+class ManifestChrome(ManifestEntryWithRelPath):
+ '''
+ Abstract class for chrome entries.
+ '''
+ def __init__(self, base, name, relpath, *flags):
+ ManifestEntryWithRelPath.__init__(self, base, relpath, *flags)
+ self.name = name
+
+ @property
+ def location(self):
+ return mozpath.join(self.base, self.relpath)
+
+
+class ManifestContent(ManifestChrome):
+ '''
+ Class for 'content' entries.
+ content global content/global/
+ '''
+ type = 'content'
+ allowed_flags = ManifestChrome.allowed_flags + [
+ 'contentaccessible',
+ 'platform',
+ ]
+
+ def __str__(self):
+ return self.serialize(self.name, self.relpath)
+
+
+class ManifestMultiContent(ManifestChrome):
+ '''
+ Abstract class for chrome entries with multiple definitions.
+ Used for locale and skin entries.
+ '''
+ type = None
+
+ def __init__(self, base, name, id, relpath, *flags):
+ ManifestChrome.__init__(self, base, name, relpath, *flags)
+ self.id = id
+
+ def __str__(self):
+ return self.serialize(self.name, self.id, self.relpath)
+
+
+class ManifestLocale(ManifestMultiContent):
+ '''
+ Class for 'locale' entries.
+ locale global en-US content/en-US/
+ locale global fr content/fr/
+ '''
+ localized = True
+ type = 'locale'
+
+
+class ManifestSkin(ManifestMultiContent):
+ '''
+ Class for 'skin' entries.
+ skin global classic/1.0 content/skin/classic/
+ '''
+ type = 'skin'
+
+
+class ManifestOverload(ManifestEntry):
+ '''
+ Abstract class for chrome entries defining some kind of overloading.
+ Used for overlay, override or style entries.
+ '''
+ type = None
+
+ def __init__(self, base, overloaded, overload, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.overloaded = overloaded
+ self.overload = overload
+
+ def __str__(self):
+ return self.serialize(self.overloaded, self.overload)
+
+ @property
+ def localized(self):
+ u = urlparse(self.overload)
+ return u.scheme == 'chrome' and \
+ u.path.split('/')[0:2] == ['', 'locale']
+
+
+class ManifestOverlay(ManifestOverload):
+ '''
+ Class for 'overlay' entries.
+ overlay chrome://global/content/viewSource.xul \
+ chrome://browser/content/viewSourceOverlay.xul
+ '''
+ type = 'overlay'
+
+
+class ManifestStyle(ManifestOverload):
+ '''
+ Class for 'style' entries.
+ style chrome://global/content/customizeToolbar.xul \
+ chrome://browser/skin/
+ '''
+ type = 'style'
+
+
+class ManifestOverride(ManifestOverload):
+ '''
+ Class for 'override' entries.
+ override chrome://global/locale/netError.dtd \
+ chrome://browser/locale/netError.dtd
+ '''
+ type = 'override'
+
+
+class ManifestResource(ManifestEntry):
+ '''
+ Class for 'resource' entries.
+ resource gre-resources toolkit/res/
+ resource services-sync resource://gre/modules/services-sync/
+
+ The target may be a relative path or a resource or chrome url.
+ '''
+ type = 'resource'
+
+ def __init__(self, base, name, target, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.name = name
+ self.target = target
+
+ def __str__(self):
+ return self.serialize(self.name, self.target)
+
+ def rebase(self, base):
+ u = urlparse(self.target)
+ if u.scheme and u.scheme != 'jar':
+ return ManifestEntry.rebase(self, base)
+ clone = ManifestEntry.rebase(self, base)
+ clone.target = mozpath.rebase(self.base, base, self.target)
+ return clone
+
+
+class ManifestBinaryComponent(ManifestEntryWithRelPath):
+ '''
+ Class for 'binary-component' entries.
+ binary-component some/path/to/a/component.dll
+ '''
+ type = 'binary-component'
+
+
+class ManifestComponent(ManifestEntryWithRelPath):
+ '''
+ Class for 'component' entries.
+ component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js
+ '''
+ type = 'component'
+
+ def __init__(self, base, cid, file, *flags):
+ ManifestEntryWithRelPath.__init__(self, base, file, *flags)
+ self.cid = cid
+
+ def __str__(self):
+ return self.serialize(self.cid, self.relpath)
+
+
+class ManifestInterfaces(ManifestEntryWithRelPath):
+ '''
+ Class for 'interfaces' entries.
+ interfaces foo.xpt
+ '''
+ type = 'interfaces'
+
+
+class ManifestCategory(ManifestEntry):
+ '''
+ Class for 'category' entries.
+ category command-line-handler m-browser @mozilla.org/browser/clh;
+ '''
+ type = 'category'
+
+ def __init__(self, base, category, name, value, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.category = category
+ self.name = name
+ self.value = value
+
+ def __str__(self):
+ return self.serialize(self.category, self.name, self.value)
+
+
+class ManifestContract(ManifestEntry):
+ '''
+ Class for 'contract' entries.
+ contract @mozilla.org/foo;1 {b2bba4df-057d-41ea-b6b1-94a10a8ede68}
+ '''
+ type = 'contract'
+
+ def __init__(self, base, contractID, cid, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.contractID = contractID
+ self.cid = cid
+
+ def __str__(self):
+ return self.serialize(self.contractID, self.cid)
+
+# All manifest classes by their type name.
+MANIFESTS_TYPES = dict([(c.type, c) for c in globals().values()
+ if type(c) == type and issubclass(c, ManifestEntry)
+ and hasattr(c, 'type') and c.type])
+
+MANIFEST_RE = re.compile(r'^#.*$')
+
+
+def parse_manifest_line(base, line):
+ '''
+ Parse a line from a manifest file with the given base directory and
+ return the corresponding ManifestEntry instance.
+ '''
+ # Remove comments
+ cmd = MANIFEST_RE.sub('', line).strip().split()
+ if not cmd:
+ return None
+ if not cmd[0] in MANIFESTS_TYPES:
+ return errors.fatal('Unknown manifest directive: %s' % cmd[0])
+ return MANIFESTS_TYPES[cmd[0]](base, *cmd[1:])
+
+
+def parse_manifest(root, path, fileobj=None):
+ '''
+ Parse a manifest file.
+ '''
+ base = mozpath.dirname(path)
+ if root:
+ path = os.path.normpath(os.path.abspath(os.path.join(root, path)))
+ if not fileobj:
+ fileobj = open(path)
+ linenum = 0
+ for line in fileobj:
+ linenum += 1
+ with errors.context(path, linenum):
+ e = parse_manifest_line(base, line)
+ if e:
+ yield e
+
+
+def is_manifest(path):
+ '''
+ Return whether the given path is that of a manifest file.
+ '''
+ return path.endswith('.manifest') and not path.endswith('.CRT.manifest') \
+ and not path.endswith('.exe.manifest')
diff --git a/python/mozbuild/mozpack/copier.py b/python/mozbuild/mozpack/copier.py
new file mode 100644
index 000000000..386930fe7
--- /dev/null
+++ b/python/mozbuild/mozpack/copier.py
@@ -0,0 +1,568 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import os
+import stat
+import sys
+
+from mozpack.errors import errors
+from mozpack.files import (
+ BaseFile,
+ Dest,
+)
+import mozpack.path as mozpath
+import errno
+from collections import (
+ Counter,
+ OrderedDict,
+)
+import concurrent.futures as futures
+
+
+class FileRegistry(object):
+ '''
+ Generic container to keep track of a set of BaseFile instances. It
+ preserves the order under which the files are added, but doesn't keep
+ track of empty directories (directories are not stored at all).
+ The paths associated with the BaseFile instances are relative to an
+ unspecified (virtual) root directory.
+
+ registry = FileRegistry()
+ registry.add('foo/bar', file_instance)
+ '''
+
+ def __init__(self):
+ self._files = OrderedDict()
+ self._required_directories = Counter()
+ self._partial_paths_cache = {}
+
+ def _partial_paths(self, path):
+ '''
+ Turn "foo/bar/baz/zot" into ["foo/bar/baz", "foo/bar", "foo"].
+ '''
+ dir_name = path.rpartition('/')[0]
+ if not dir_name:
+ return []
+
+ partial_paths = self._partial_paths_cache.get(dir_name)
+ if partial_paths:
+ return partial_paths
+
+ partial_paths = [dir_name] + self._partial_paths(dir_name)
+
+ self._partial_paths_cache[dir_name] = partial_paths
+ return partial_paths
+
+ def add(self, path, content):
+ '''
+ Add a BaseFile instance to the container, under the given path.
+ '''
+ assert isinstance(content, BaseFile)
+ if path in self._files:
+ return errors.error("%s already added" % path)
+ if self._required_directories[path] > 0:
+ return errors.error("Can't add %s: it is a required directory" %
+ path)
+ # Check whether any parent of the given path is already stored
+ partial_paths = self._partial_paths(path)
+ for partial_path in partial_paths:
+ if partial_path in self._files:
+ return errors.error("Can't add %s: %s is a file" %
+ (path, partial_path))
+ self._files[path] = content
+ self._required_directories.update(partial_paths)
+
+ def match(self, pattern):
+ '''
+ Return the list of paths, stored in the container, matching the
+ given pattern. See the mozpack.path.match documentation for a
+ description of the handled patterns.
+ '''
+ if '*' in pattern:
+ return [p for p in self.paths()
+ if mozpath.match(p, pattern)]
+ if pattern == '':
+ return self.paths()
+ if pattern in self._files:
+ return [pattern]
+ return [p for p in self.paths()
+ if mozpath.basedir(p, [pattern]) == pattern]
+
+ def remove(self, pattern):
+ '''
+ Remove paths matching the given pattern from the container. See the
+ mozpack.path.match documentation for a description of the handled
+ patterns.
+ '''
+ items = self.match(pattern)
+ if not items:
+ return errors.error("Can't remove %s: %s" % (pattern,
+ "not matching anything previously added"))
+ for i in items:
+ del self._files[i]
+ self._required_directories.subtract(self._partial_paths(i))
+
+ def paths(self):
+ '''
+ Return all paths stored in the container, in the order they were added.
+ '''
+ return self._files.keys()
+
+ def __len__(self):
+ '''
+ Return number of paths stored in the container.
+ '''
+ return len(self._files)
+
+ def __contains__(self, pattern):
+ raise RuntimeError("'in' operator forbidden for %s. Use contains()." %
+ self.__class__.__name__)
+
+ def contains(self, pattern):
+ '''
+ Return whether the container contains paths matching the given
+ pattern. See the mozpack.path.match documentation for a description of
+ the handled patterns.
+ '''
+ return len(self.match(pattern)) > 0
+
+ def __getitem__(self, path):
+ '''
+ Return the BaseFile instance stored in the container for the given
+ path.
+ '''
+ return self._files[path]
+
+ def __iter__(self):
+ '''
+ Iterate over all (path, BaseFile instance) pairs from the container.
+ for path, file in registry:
+ (...)
+ '''
+ return self._files.iteritems()
+
+ def required_directories(self):
+ '''
+ Return the set of directories required by the paths in the container,
+ in no particular order. The returned directories are relative to an
+ unspecified (virtual) root directory (and do not include said root
+ directory).
+ '''
+ return set(k for k, v in self._required_directories.items() if v > 0)
+
+
+class FileRegistrySubtree(object):
+ '''A proxy class to give access to a subtree of an existing FileRegistry.
+
+ Note this doesn't implement the whole FileRegistry interface.'''
+ def __new__(cls, base, registry):
+ if not base:
+ return registry
+ return object.__new__(cls)
+
+ def __init__(self, base, registry):
+ self._base = base
+ self._registry = registry
+
+ def _get_path(self, path):
+ # mozpath.join will return a trailing slash if path is empty, and we
+ # don't want that.
+ return mozpath.join(self._base, path) if path else self._base
+
+ def add(self, path, content):
+ return self._registry.add(self._get_path(path), content)
+
+ def match(self, pattern):
+ return [mozpath.relpath(p, self._base)
+ for p in self._registry.match(self._get_path(pattern))]
+
+ def remove(self, pattern):
+ return self._registry.remove(self._get_path(pattern))
+
+ def paths(self):
+ return [p for p, f in self]
+
+ def __len__(self):
+ return len(self.paths())
+
+ def contains(self, pattern):
+ return self._registry.contains(self._get_path(pattern))
+
+ def __getitem__(self, path):
+ return self._registry[self._get_path(path)]
+
+ def __iter__(self):
+ for p, f in self._registry:
+ if mozpath.basedir(p, [self._base]):
+ yield mozpath.relpath(p, self._base), f
+
+
+class FileCopyResult(object):
+ """Represents results of a FileCopier.copy operation."""
+
+ def __init__(self):
+ self.updated_files = set()
+ self.existing_files = set()
+ self.removed_files = set()
+ self.removed_directories = set()
+
+ @property
+ def updated_files_count(self):
+ return len(self.updated_files)
+
+ @property
+ def existing_files_count(self):
+ return len(self.existing_files)
+
+ @property
+ def removed_files_count(self):
+ return len(self.removed_files)
+
+ @property
+ def removed_directories_count(self):
+ return len(self.removed_directories)
+
+
+class FileCopier(FileRegistry):
+ '''
+ FileRegistry with the ability to copy the registered files to a separate
+ directory.
+ '''
+ def copy(self, destination, skip_if_older=True,
+ remove_unaccounted=True,
+ remove_all_directory_symlinks=True,
+ remove_empty_directories=True):
+ '''
+ Copy all registered files to the given destination path. The given
+ destination can be an existing directory, or not exist at all. It
+ can't be e.g. a file.
+ The copy process acts a bit like rsync: files are not copied when they
+ don't need to (see mozpack.files for details on file.copy).
+
+ By default, files in the destination directory that aren't
+ registered are removed and empty directories are deleted. In
+ addition, all directory symlinks in the destination directory
+ are deleted: this is a conservative approach to ensure that we
+ never accidently write files into a directory that is not the
+ destination directory. In the worst case, we might have a
+ directory symlink in the object directory to the source
+ directory.
+
+ To disable removing of unregistered files, pass
+ remove_unaccounted=False. To disable removing empty
+ directories, pass remove_empty_directories=False. In rare
+ cases, you might want to maintain directory symlinks in the
+ destination directory (at least those that are not required to
+ be regular directories): pass
+ remove_all_directory_symlinks=False. Exercise caution with
+ this flag: you almost certainly do not want to preserve
+ directory symlinks.
+
+ Returns a FileCopyResult that details what changed.
+ '''
+ assert isinstance(destination, basestring)
+ assert not os.path.exists(destination) or os.path.isdir(destination)
+
+ result = FileCopyResult()
+ have_symlinks = hasattr(os, 'symlink')
+ destination = os.path.normpath(destination)
+
+ # We create the destination directory specially. We can't do this as
+ # part of the loop doing mkdir() below because that loop munges
+ # symlinks and permissions and parent directories of the destination
+ # directory may have their own weird schema. The contract is we only
+ # manage children of destination, not its parents.
+ try:
+ os.makedirs(destination)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ # Because we could be handling thousands of files, code in this
+ # function is optimized to minimize system calls. We prefer CPU time
+ # in Python over possibly I/O bound filesystem calls to stat() and
+ # friends.
+
+ required_dirs = set([destination])
+ required_dirs |= set(os.path.normpath(os.path.join(destination, d))
+ for d in self.required_directories())
+
+ # Ensure destination directories are in place and proper.
+ #
+ # The "proper" bit is important. We need to ensure that directories
+ # have appropriate permissions or we will be unable to discover
+ # and write files. Furthermore, we need to verify directories aren't
+ # symlinks.
+ #
+ # Symlinked directories (a symlink whose target is a directory) are
+ # incompatible with us because our manifest talks in terms of files,
+ # not directories. If we leave symlinked directories unchecked, we
+ # would blindly follow symlinks and this might confuse file
+ # installation. For example, if an existing directory is a symlink
+ # to directory X and we attempt to install a symlink in this directory
+ # to a file in directory X, we may create a recursive symlink!
+ for d in sorted(required_dirs, key=len):
+ try:
+ os.mkdir(d)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+ # We allow the destination to be a symlink because the caller
+ # is responsible for managing the destination and we assume
+ # they know what they are doing.
+ if have_symlinks and d != destination:
+ st = os.lstat(d)
+ if stat.S_ISLNK(st.st_mode):
+ # While we have remove_unaccounted, it doesn't apply
+ # to directory symlinks because if it did, our behavior
+ # could be very wrong.
+ os.remove(d)
+ os.mkdir(d)
+
+ if not os.access(d, os.W_OK):
+ umask = os.umask(0o077)
+ os.umask(umask)
+ os.chmod(d, 0o777 & ~umask)
+
+ if isinstance(remove_unaccounted, FileRegistry):
+ existing_files = set(os.path.normpath(os.path.join(destination, p))
+ for p in remove_unaccounted.paths())
+ existing_dirs = set(os.path.normpath(os.path.join(destination, p))
+ for p in remove_unaccounted
+ .required_directories())
+ existing_dirs |= {os.path.normpath(destination)}
+ else:
+ # While we have remove_unaccounted, it doesn't apply to empty
+ # directories because it wouldn't make sense: an empty directory
+ # is empty, so removing it should have no effect.
+ existing_dirs = set()
+ existing_files = set()
+ for root, dirs, files in os.walk(destination):
+ # We need to perform the same symlink detection as above.
+ # os.walk() doesn't follow symlinks into directories by
+ # default, so we need to check dirs (we can't wait for root).
+ if have_symlinks:
+ filtered = []
+ for d in dirs:
+ full = os.path.join(root, d)
+ st = os.lstat(full)
+ if stat.S_ISLNK(st.st_mode):
+ # This directory symlink is not a required
+ # directory: any such symlink would have been
+ # removed and a directory created above.
+ if remove_all_directory_symlinks:
+ os.remove(full)
+ result.removed_files.add(
+ os.path.normpath(full))
+ else:
+ existing_files.add(os.path.normpath(full))
+ else:
+ filtered.append(d)
+
+ dirs[:] = filtered
+
+ existing_dirs.add(os.path.normpath(root))
+
+ for d in dirs:
+ existing_dirs.add(os.path.normpath(os.path.join(root, d)))
+
+ for f in files:
+ existing_files.add(os.path.normpath(os.path.join(root, f)))
+
+ # Now we reconcile the state of the world against what we want.
+ dest_files = set()
+
+ # Install files.
+ #
+ # Creating/appending new files on Windows/NTFS is slow. So we use a
+ # thread pool to speed it up significantly. The performance of this
+ # loop is so critical to common build operations on Linux that the
+ # overhead of the thread pool is worth avoiding, so we have 2 code
+ # paths. We also employ a low water mark to prevent thread pool
+ # creation if number of files is too small to benefit.
+ copy_results = []
+ if sys.platform == 'win32' and len(self) > 100:
+ with futures.ThreadPoolExecutor(4) as e:
+ fs = []
+ for p, f in self:
+ destfile = os.path.normpath(os.path.join(destination, p))
+ fs.append((destfile, e.submit(f.copy, destfile, skip_if_older)))
+
+ copy_results = [(destfile, f.result) for destfile, f in fs]
+ else:
+ for p, f in self:
+ destfile = os.path.normpath(os.path.join(destination, p))
+ copy_results.append((destfile, f.copy(destfile, skip_if_older)))
+
+ for destfile, copy_result in copy_results:
+ dest_files.add(destfile)
+ if copy_result:
+ result.updated_files.add(destfile)
+ else:
+ result.existing_files.add(destfile)
+
+ # Remove files no longer accounted for.
+ if remove_unaccounted:
+ for f in existing_files - dest_files:
+ # Windows requires write access to remove files.
+ if os.name == 'nt' and not os.access(f, os.W_OK):
+ # It doesn't matter what we set permissions to since we
+ # will remove this file shortly.
+ os.chmod(f, 0o600)
+
+ os.remove(f)
+ result.removed_files.add(f)
+
+ if not remove_empty_directories:
+ return result
+
+ # Figure out which directories can be removed. This is complicated
+ # by the fact we optionally remove existing files. This would be easy
+ # if we walked the directory tree after installing files. But, we're
+ # trying to minimize system calls.
+
+ # Start with the ideal set.
+ remove_dirs = existing_dirs - required_dirs
+
+ # Then don't remove directories if we didn't remove unaccounted files
+ # and one of those files exists.
+ if not remove_unaccounted:
+ parents = set()
+ pathsep = os.path.sep
+ for f in existing_files:
+ path = f
+ while True:
+ # All the paths are normalized and relative by this point,
+ # so os.path.dirname would only do extra work.
+ dirname = path.rpartition(pathsep)[0]
+ if dirname in parents:
+ break
+ parents.add(dirname)
+ path = dirname
+ remove_dirs -= parents
+
+ # Remove empty directories that aren't required.
+ for d in sorted(remove_dirs, key=len, reverse=True):
+ try:
+ try:
+ os.rmdir(d)
+ except OSError as e:
+ if e.errno in (errno.EPERM, errno.EACCES):
+ # Permissions may not allow deletion. So ensure write
+ # access is in place before attempting to rmdir again.
+ os.chmod(d, 0o700)
+ os.rmdir(d)
+ else:
+ raise
+ except OSError as e:
+ # If remove_unaccounted is a # FileRegistry, then we have a
+ # list of directories that may not be empty, so ignore rmdir
+ # ENOTEMPTY errors for them.
+ if (isinstance(remove_unaccounted, FileRegistry) and
+ e.errno == errno.ENOTEMPTY):
+ continue
+ raise
+ result.removed_directories.add(d)
+
+ return result
+
+
+class Jarrer(FileRegistry, BaseFile):
+ '''
+ FileRegistry with the ability to copy and pack the registered files as a
+ jar file. Also acts as a BaseFile instance, to be copied with a FileCopier.
+ '''
+ def __init__(self, compress=True, optimize=True):
+ '''
+ Create a Jarrer instance. See mozpack.mozjar.JarWriter documentation
+ for details on the compress and optimize arguments.
+ '''
+ self.compress = compress
+ self.optimize = optimize
+ self._preload = []
+ self._compress_options = {} # Map path to compress boolean option.
+ FileRegistry.__init__(self)
+
+ def add(self, path, content, compress=None):
+ FileRegistry.add(self, path, content)
+ if compress is not None:
+ self._compress_options[path] = compress
+
+ def copy(self, dest, skip_if_older=True):
+ '''
+ Pack all registered files in the given destination jar. The given
+ destination jar may be a path to jar file, or a Dest instance for
+ a jar file.
+ If the destination jar file exists, its (compressed) contents are used
+ instead of the registered BaseFile instances when appropriate.
+ '''
+ class DeflaterDest(Dest):
+ '''
+ Dest-like class, reading from a file-like object initially, but
+ switching to a Deflater object if written to.
+
+ dest = DeflaterDest(original_file)
+ dest.read() # Reads original_file
+ dest.write(data) # Creates a Deflater and write data there
+ dest.read() # Re-opens the Deflater and reads from it
+ '''
+ def __init__(self, orig=None, compress=True):
+ self.mode = None
+ self.deflater = orig
+ self.compress = compress
+
+ def read(self, length=-1):
+ if self.mode != 'r':
+ assert self.mode is None
+ self.mode = 'r'
+ return self.deflater.read(length)
+
+ def write(self, data):
+ if self.mode != 'w':
+ from mozpack.mozjar import Deflater
+ self.deflater = Deflater(self.compress)
+ self.mode = 'w'
+ self.deflater.write(data)
+
+ def exists(self):
+ return self.deflater is not None
+
+ if isinstance(dest, basestring):
+ dest = Dest(dest)
+ assert isinstance(dest, Dest)
+
+ from mozpack.mozjar import JarWriter, JarReader
+ try:
+ old_jar = JarReader(fileobj=dest)
+ except Exception:
+ old_jar = []
+
+ old_contents = dict([(f.filename, f) for f in old_jar])
+
+ with JarWriter(fileobj=dest, compress=self.compress,
+ optimize=self.optimize) as jar:
+ for path, file in self:
+ compress = self._compress_options.get(path, self.compress)
+
+ if path in old_contents:
+ deflater = DeflaterDest(old_contents[path], compress)
+ else:
+ deflater = DeflaterDest(compress=compress)
+ file.copy(deflater, skip_if_older)
+ jar.add(path, deflater.deflater, mode=file.mode, compress=compress)
+ if self._preload:
+ jar.preload(self._preload)
+
+ def open(self):
+ raise RuntimeError('unsupported')
+
+ def preload(self, paths):
+ '''
+ Add the given set of paths to the list of preloaded files. See
+ mozpack.mozjar.JarWriter documentation for details on jar preloading.
+ '''
+ self._preload.extend(paths)
diff --git a/python/mozbuild/mozpack/dmg.py b/python/mozbuild/mozpack/dmg.py
new file mode 100644
index 000000000..036302214
--- /dev/null
+++ b/python/mozbuild/mozpack/dmg.py
@@ -0,0 +1,121 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import mozfile
+import os
+import platform
+import shutil
+import subprocess
+
+is_linux = platform.system() == 'Linux'
+
+def mkdir(dir):
+ if not os.path.isdir(dir):
+ try:
+ os.makedirs(dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+
+def chmod(dir):
+ 'Set permissions of DMG contents correctly'
+ subprocess.check_call(['chmod', '-R', 'a+rX,a-st,u+w,go-w', dir])
+
+
+def rsync(source, dest):
+ 'rsync the contents of directory source into directory dest'
+ # Ensure a trailing slash so rsync copies the *contents* of source.
+ if not source.endswith('/'):
+ source += '/'
+ subprocess.check_call(['rsync', '-a', '--copy-unsafe-links',
+ source, dest])
+
+
+def set_folder_icon(dir):
+ 'Set HFS attributes of dir to use a custom icon'
+ if not is_linux:
+ #TODO: bug 1197325 - figure out how to support this on Linux
+ subprocess.check_call(['SetFile', '-a', 'C', dir])
+
+
+def create_dmg_from_staged(stagedir, output_dmg, tmpdir, volume_name):
+ 'Given a prepared directory stagedir, produce a DMG at output_dmg.'
+ if not is_linux:
+ # Running on OS X
+ hybrid = os.path.join(tmpdir, 'hybrid.dmg')
+ subprocess.check_call(['hdiutil', 'makehybrid', '-hfs',
+ '-hfs-volume-name', volume_name,
+ '-hfs-openfolder', stagedir,
+ '-ov', stagedir,
+ '-o', hybrid])
+ subprocess.check_call(['hdiutil', 'convert', '-format', 'UDBZ',
+ '-imagekey', 'bzip2-level=9',
+ '-ov', hybrid, '-o', output_dmg])
+ else:
+ import buildconfig
+ uncompressed = os.path.join(tmpdir, 'uncompressed.dmg')
+ subprocess.check_call([
+ buildconfig.substs['GENISOIMAGE'],
+ '-V', volume_name,
+ '-D', '-R', '-apple', '-no-pad',
+ '-o', uncompressed,
+ stagedir
+ ])
+ subprocess.check_call([
+ buildconfig.substs['DMG_TOOL'],
+ 'dmg',
+ uncompressed,
+ output_dmg
+ ],
+ # dmg is seriously chatty
+ stdout=open(os.devnull, 'wb'))
+
+def check_tools(*tools):
+ '''
+ Check that each tool named in tools exists in SUBSTS and is executable.
+ '''
+ import buildconfig
+ for tool in tools:
+ path = buildconfig.substs[tool]
+ if not path:
+ raise Exception('Required tool "%s" not found' % tool)
+ if not os.path.isfile(path):
+ raise Exception('Required tool "%s" not found at path "%s"' % (tool, path))
+ if not os.access(path, os.X_OK):
+ raise Exception('Required tool "%s" at path "%s" is not executable' % (tool, path))
+
+
+def create_dmg(source_directory, output_dmg, volume_name, extra_files):
+ '''
+ Create a DMG disk image at the path output_dmg from source_directory.
+
+ Use volume_name as the disk image volume name, and
+ use extra_files as a list of tuples of (filename, relative path) to copy
+ into the disk image.
+ '''
+ if platform.system() not in ('Darwin', 'Linux'):
+ raise Exception("Don't know how to build a DMG on '%s'" % platform.system())
+
+ if is_linux:
+ check_tools('DMG_TOOL', 'GENISOIMAGE')
+ with mozfile.TemporaryDirectory() as tmpdir:
+ stagedir = os.path.join(tmpdir, 'stage')
+ os.mkdir(stagedir)
+ # Copy the app bundle over using rsync
+ rsync(source_directory, stagedir)
+ # Copy extra files
+ for source, target in extra_files:
+ full_target = os.path.join(stagedir, target)
+ mkdir(os.path.dirname(full_target))
+ shutil.copyfile(source, full_target)
+ # Make a symlink to /Applications. The symlink name is a space
+ # so we don't have to localize it. The Applications folder icon
+ # will be shown in Finder, which should be clear enough for users.
+ os.symlink('/Applications', os.path.join(stagedir, ' '))
+ # Set the folder attributes to use a custom icon
+ set_folder_icon(stagedir)
+ chmod(stagedir)
+ create_dmg_from_staged(stagedir, output_dmg, tmpdir, volume_name)
diff --git a/python/mozbuild/mozpack/errors.py b/python/mozbuild/mozpack/errors.py
new file mode 100644
index 000000000..8b4b80072
--- /dev/null
+++ b/python/mozbuild/mozpack/errors.py
@@ -0,0 +1,139 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import sys
+from contextlib import contextmanager
+
+
+class ErrorMessage(Exception):
+ '''Exception type raised from errors.error() and errors.fatal()'''
+
+
+class AccumulatedErrors(Exception):
+ '''Exception type raised from errors.accumulate()'''
+
+
+class ErrorCollector(object):
+ '''
+ Error handling/logging class. A global instance, errors, is provided for
+ convenience.
+
+ Warnings, errors and fatal errors may be logged by calls to the following
+ functions:
+ errors.warn(message)
+ errors.error(message)
+ errors.fatal(message)
+
+ Warnings only send the message on the logging output, while errors and
+ fatal errors send the message and throw an ErrorMessage exception. The
+ exception, however, may be deferred. See further below.
+
+ Errors may be ignored by calling:
+ errors.ignore_errors()
+
+ After calling that function, only fatal errors throw an exception.
+
+ The warnings, errors or fatal errors messages may be augmented with context
+ information when a context is provided. Context is defined by a pair
+ (filename, linenumber), and may be set with errors.context() used as a
+ context manager:
+ with errors.context(filename, linenumber):
+ errors.warn(message)
+
+ Arbitrary nesting is supported, both for errors.context calls:
+ with errors.context(filename1, linenumber1):
+ errors.warn(message)
+ with errors.context(filename2, linenumber2):
+ errors.warn(message)
+
+ as well as for function calls:
+ def func():
+ errors.warn(message)
+ with errors.context(filename, linenumber):
+ func()
+
+ Errors and fatal errors can have their exception thrown at a later time,
+ allowing for several different errors to be reported at once before
+ throwing. This is achieved with errors.accumulate() as a context manager:
+ with errors.accumulate():
+ if test1:
+ errors.error(message1)
+ if test2:
+ errors.error(message2)
+
+ In such cases, a single AccumulatedErrors exception is thrown, but doesn't
+ contain information about the exceptions. The logged messages do.
+ '''
+ out = sys.stderr
+ WARN = 1
+ ERROR = 2
+ FATAL = 3
+ _level = ERROR
+ _context = []
+ _count = None
+
+ def ignore_errors(self, ignore=True):
+ if ignore:
+ self._level = self.FATAL
+ else:
+ self._level = self.ERROR
+
+ def _full_message(self, level, msg):
+ if level >= self._level:
+ level = 'Error'
+ else:
+ level = 'Warning'
+ if self._context:
+ file, line = self._context[-1]
+ return "%s: %s:%d: %s" % (level, file, line, msg)
+ return "%s: %s" % (level, msg)
+
+ def _handle(self, level, msg):
+ msg = self._full_message(level, msg)
+ if level >= self._level:
+ if self._count is None:
+ raise ErrorMessage(msg)
+ self._count += 1
+ print >>self.out, msg
+
+ def fatal(self, msg):
+ self._handle(self.FATAL, msg)
+
+ def error(self, msg):
+ self._handle(self.ERROR, msg)
+
+ def warn(self, msg):
+ self._handle(self.WARN, msg)
+
+ def get_context(self):
+ if self._context:
+ return self._context[-1]
+
+ @contextmanager
+ def context(self, file, line):
+ if file and line:
+ self._context.append((file, line))
+ yield
+ if file and line:
+ self._context.pop()
+
+ @contextmanager
+ def accumulate(self):
+ assert self._count is None
+ self._count = 0
+ yield
+ count = self._count
+ self._count = None
+ if count:
+ raise AccumulatedErrors()
+
+ @property
+ def count(self):
+ # _count can be None.
+ return self._count if self._count else 0
+
+
+errors = ErrorCollector()
diff --git a/python/mozbuild/mozpack/executables.py b/python/mozbuild/mozpack/executables.py
new file mode 100644
index 000000000..c943564fa
--- /dev/null
+++ b/python/mozbuild/mozpack/executables.py
@@ -0,0 +1,124 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import os
+import struct
+import subprocess
+from mozpack.errors import errors
+
+MACHO_SIGNATURES = [
+ 0xfeedface, # mach-o 32-bits big endian
+ 0xcefaedfe, # mach-o 32-bits little endian
+ 0xfeedfacf, # mach-o 64-bits big endian
+ 0xcffaedfe, # mach-o 64-bits little endian
+]
+
+FAT_SIGNATURE = 0xcafebabe # mach-o FAT binary
+
+ELF_SIGNATURE = 0x7f454c46 # Elf binary
+
+UNKNOWN = 0
+MACHO = 1
+ELF = 2
+
+def get_type(path):
+ '''
+ Check the signature of the give file and returns what kind of executable
+ matches.
+ '''
+ with open(path, 'rb') as f:
+ signature = f.read(4)
+ if len(signature) < 4:
+ return UNKNOWN
+ signature = struct.unpack('>L', signature)[0]
+ if signature == ELF_SIGNATURE:
+ return ELF
+ if signature in MACHO_SIGNATURES:
+ return MACHO
+ if signature != FAT_SIGNATURE:
+ return UNKNOWN
+ # We have to sanity check the second four bytes, because Java class
+ # files use the same magic number as Mach-O fat binaries.
+ # This logic is adapted from file(1), which says that Mach-O uses
+ # these bytes to count the number of architectures within, while
+ # Java uses it for a version number. Conveniently, there are only
+ # 18 labelled Mach-O architectures, and Java's first released
+ # class format used the version 43.0.
+ num = f.read(4)
+ if len(num) < 4:
+ return UNKNOWN
+ num = struct.unpack('>L', num)[0]
+ if num < 20:
+ return MACHO
+ return UNKNOWN
+
+
+def is_executable(path):
+ '''
+ Return whether a given file path points to an executable or a library,
+ where an executable or library is identified by:
+ - the file extension on OS/2 and WINNT
+ - the file signature on OS/X and ELF systems (GNU/Linux, Android, BSD,
+ Solaris)
+
+ As this function is intended for use to choose between the ExecutableFile
+ and File classes in FileFinder, and choosing ExecutableFile only matters
+ on OS/2, OS/X, ELF and WINNT (in GCC build) systems, we don't bother
+ detecting other kind of executables.
+ '''
+ from buildconfig import substs
+ if not os.path.exists(path):
+ return False
+
+ if substs['OS_ARCH'] == 'WINNT':
+ return path.lower().endswith((substs['DLL_SUFFIX'],
+ substs['BIN_SUFFIX']))
+
+ return get_type(path) != UNKNOWN
+
+
+def may_strip(path):
+ '''
+ Return whether strip() should be called
+ '''
+ from buildconfig import substs
+ return not substs['PKG_SKIP_STRIP']
+
+
+def strip(path):
+ '''
+ Execute the STRIP command with STRIP_FLAGS on the given path.
+ '''
+ from buildconfig import substs
+ strip = substs['STRIP']
+ flags = substs['STRIP_FLAGS'].split() if 'STRIP_FLAGS' in substs else []
+ cmd = [strip] + flags + [path]
+ if subprocess.call(cmd) != 0:
+ errors.fatal('Error executing ' + ' '.join(cmd))
+
+
+def may_elfhack(path):
+ '''
+ Return whether elfhack() should be called
+ '''
+ # elfhack only supports libraries. We should check the ELF header for
+ # the right flag, but checking the file extension works too.
+ from buildconfig import substs
+ return ('USE_ELF_HACK' in substs and substs['USE_ELF_HACK'] and
+ path.endswith(substs['DLL_SUFFIX']) and
+ 'COMPILE_ENVIRONMENT' in substs and substs['COMPILE_ENVIRONMENT'])
+
+
+def elfhack(path):
+ '''
+ Execute the elfhack command on the given path.
+ '''
+ from buildconfig import topobjdir
+ cmd = [os.path.join(topobjdir, 'build/unix/elfhack/elfhack'), path]
+ if 'ELF_HACK_FLAGS' in os.environ:
+ cmd[1:0] = os.environ['ELF_HACK_FLAGS'].split()
+ if subprocess.call(cmd) != 0:
+ errors.fatal('Error executing ' + ' '.join(cmd))
diff --git a/python/mozbuild/mozpack/files.py b/python/mozbuild/mozpack/files.py
new file mode 100644
index 000000000..64902e195
--- /dev/null
+++ b/python/mozbuild/mozpack/files.py
@@ -0,0 +1,1106 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import errno
+import os
+import platform
+import shutil
+import stat
+import subprocess
+import uuid
+import mozbuild.makeutil as makeutil
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import FileAvoidWrite
+from mozpack.executables import (
+ is_executable,
+ may_strip,
+ strip,
+ may_elfhack,
+ elfhack,
+)
+from mozpack.chrome.manifest import ManifestEntry
+from io import BytesIO
+from mozpack.errors import (
+ ErrorMessage,
+ errors,
+)
+from mozpack.mozjar import JarReader
+import mozpack.path as mozpath
+from collections import OrderedDict
+from jsmin import JavascriptMinify
+from tempfile import (
+ mkstemp,
+ NamedTemporaryFile,
+)
+from tarfile import (
+ TarFile,
+ TarInfo,
+)
+try:
+ import hglib
+except ImportError:
+ hglib = None
+
+
+# For clean builds, copying files on win32 using CopyFile through ctypes is
+# ~2x as fast as using shutil.copyfile.
+if platform.system() != 'Windows':
+ _copyfile = shutil.copyfile
+else:
+ import ctypes
+ _kernel32 = ctypes.windll.kernel32
+ _CopyFileA = _kernel32.CopyFileA
+ _CopyFileW = _kernel32.CopyFileW
+
+ def _copyfile(src, dest):
+ # False indicates `dest` should be overwritten if it exists already.
+ if isinstance(src, unicode) and isinstance(dest, unicode):
+ _CopyFileW(src, dest, False)
+ elif isinstance(src, str) and isinstance(dest, str):
+ _CopyFileA(src, dest, False)
+ else:
+ raise TypeError('mismatched path types!')
+
+class Dest(object):
+ '''
+ Helper interface for BaseFile.copy. The interface works as follows:
+ - read() and write() can be used to sequentially read/write from the
+ underlying file.
+ - a call to read() after a write() will re-open the underlying file and
+ read from it.
+ - a call to write() after a read() will re-open the underlying file,
+ emptying it, and write to it.
+ '''
+ def __init__(self, path):
+ self.path = path
+ self.mode = None
+
+ @property
+ def name(self):
+ return self.path
+
+ def read(self, length=-1):
+ if self.mode != 'r':
+ self.file = open(self.path, 'rb')
+ self.mode = 'r'
+ return self.file.read(length)
+
+ def write(self, data):
+ if self.mode != 'w':
+ self.file = open(self.path, 'wb')
+ self.mode = 'w'
+ return self.file.write(data)
+
+ def exists(self):
+ return os.path.exists(self.path)
+
+ def close(self):
+ if self.mode:
+ self.mode = None
+ self.file.close()
+
+
+class BaseFile(object):
+ '''
+ Base interface and helper for file copying. Derived class may implement
+ their own copy function, or rely on BaseFile.copy using the open() member
+ function and/or the path property.
+ '''
+ @staticmethod
+ def is_older(first, second):
+ '''
+ Compares the modification time of two files, and returns whether the
+ ``first`` file is older than the ``second`` file.
+ '''
+ # os.path.getmtime returns a result in seconds with precision up to
+ # the microsecond. But microsecond is too precise because
+ # shutil.copystat only copies milliseconds, and seconds is not
+ # enough precision.
+ return int(os.path.getmtime(first) * 1000) \
+ <= int(os.path.getmtime(second) * 1000)
+
+ @staticmethod
+ def any_newer(dest, inputs):
+ '''
+ Compares the modification time of ``dest`` to multiple input files, and
+ returns whether any of the ``inputs`` is newer (has a later mtime) than
+ ``dest``.
+ '''
+ # os.path.getmtime returns a result in seconds with precision up to
+ # the microsecond. But microsecond is too precise because
+ # shutil.copystat only copies milliseconds, and seconds is not
+ # enough precision.
+ dest_mtime = int(os.path.getmtime(dest) * 1000)
+ for input in inputs:
+ if dest_mtime < int(os.path.getmtime(input) * 1000):
+ return True
+ return False
+
+ @staticmethod
+ def normalize_mode(mode):
+ # Normalize file mode:
+ # - keep file type (e.g. S_IFREG)
+ ret = stat.S_IFMT(mode)
+ # - expand user read and execute permissions to everyone
+ if mode & 0400:
+ ret |= 0444
+ if mode & 0100:
+ ret |= 0111
+ # - keep user write permissions
+ if mode & 0200:
+ ret |= 0200
+ # - leave away sticky bit, setuid, setgid
+ return ret
+
+ def copy(self, dest, skip_if_older=True):
+ '''
+ Copy the BaseFile content to the destination given as a string or a
+ Dest instance. Avoids replacing existing files if the BaseFile content
+ matches that of the destination, or in case of plain files, if the
+ destination is newer than the original file. This latter behaviour is
+ disabled when skip_if_older is False.
+ Returns whether a copy was actually performed (True) or not (False).
+ '''
+ if isinstance(dest, basestring):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ can_skip_content_check = False
+ if not dest.exists():
+ can_skip_content_check = True
+ elif getattr(self, 'path', None) and getattr(dest, 'path', None):
+ if skip_if_older and BaseFile.is_older(self.path, dest.path):
+ return False
+ elif os.path.getsize(self.path) != os.path.getsize(dest.path):
+ can_skip_content_check = True
+
+ if can_skip_content_check:
+ if getattr(self, 'path', None) and getattr(dest, 'path', None):
+ _copyfile(self.path, dest.path)
+ shutil.copystat(self.path, dest.path)
+ else:
+ # Ensure the file is always created
+ if not dest.exists():
+ dest.write('')
+ shutil.copyfileobj(self.open(), dest)
+ return True
+
+ src = self.open()
+ copy_content = ''
+ while True:
+ dest_content = dest.read(32768)
+ src_content = src.read(32768)
+ copy_content += src_content
+ if len(dest_content) == len(src_content) == 0:
+ break
+ # If the read content differs between origin and destination,
+ # write what was read up to now, and copy the remainder.
+ if dest_content != src_content:
+ dest.write(copy_content)
+ shutil.copyfileobj(src, dest)
+ break
+ if hasattr(self, 'path') and hasattr(dest, 'path'):
+ shutil.copystat(self.path, dest.path)
+ return True
+
+ def open(self):
+ '''
+ Return a file-like object allowing to read() the content of the
+ associated file. This is meant to be overloaded in subclasses to return
+ a custom file-like object.
+ '''
+ assert self.path is not None
+ return open(self.path, 'rb')
+
+ def read(self):
+ raise NotImplementedError('BaseFile.read() not implemented. Bug 1170329.')
+
+ @property
+ def mode(self):
+ '''
+ Return the file's unix mode, or None if it has no meaning.
+ '''
+ return None
+
+
+class File(BaseFile):
+ '''
+ File class for plain files.
+ '''
+ def __init__(self, path):
+ self.path = path
+
+ @property
+ def mode(self):
+ '''
+ Return the file's unix mode, as returned by os.stat().st_mode.
+ '''
+ if platform.system() == 'Windows':
+ return None
+ assert self.path is not None
+ mode = os.stat(self.path).st_mode
+ return self.normalize_mode(mode)
+
+ def read(self):
+ '''Return the contents of the file.'''
+ with open(self.path, 'rb') as fh:
+ return fh.read()
+
+
+class ExecutableFile(File):
+ '''
+ File class for executable and library files on OS/2, OS/X and ELF systems.
+ (see mozpack.executables.is_executable documentation).
+ '''
+ def copy(self, dest, skip_if_older=True):
+ real_dest = dest
+ if not isinstance(dest, basestring):
+ fd, dest = mkstemp()
+ os.close(fd)
+ os.remove(dest)
+ assert isinstance(dest, basestring)
+ # If File.copy didn't actually copy because dest is newer, check the
+ # file sizes. If dest is smaller, it means it is already stripped and
+ # elfhacked, so we can skip.
+ if not File.copy(self, dest, skip_if_older) and \
+ os.path.getsize(self.path) > os.path.getsize(dest):
+ return False
+ try:
+ if may_strip(dest):
+ strip(dest)
+ if may_elfhack(dest):
+ elfhack(dest)
+ except ErrorMessage:
+ os.remove(dest)
+ raise
+
+ if real_dest != dest:
+ f = File(dest)
+ ret = f.copy(real_dest, skip_if_older)
+ os.remove(dest)
+ return ret
+ return True
+
+
+class AbsoluteSymlinkFile(File):
+ '''File class that is copied by symlinking (if available).
+
+ This class only works if the target path is absolute.
+ '''
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise ValueError('Symlink target not absolute: %s' % path)
+
+ File.__init__(self, path)
+
+ def copy(self, dest, skip_if_older=True):
+ assert isinstance(dest, basestring)
+
+ # The logic in this function is complicated by the fact that symlinks
+ # aren't universally supported. So, where symlinks aren't supported, we
+ # fall back to file copying. Keep in mind that symlink support is
+ # per-filesystem, not per-OS.
+
+ # Handle the simple case where symlinks are definitely not supported by
+ # falling back to file copy.
+ if not hasattr(os, 'symlink'):
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # Always verify the symlink target path exists.
+ if not os.path.exists(self.path):
+ raise ErrorMessage('Symlink target path does not exist: %s' % self.path)
+
+ st = None
+
+ try:
+ st = os.lstat(dest)
+ except OSError as ose:
+ if ose.errno != errno.ENOENT:
+ raise
+
+ # If the dest is a symlink pointing to us, we have nothing to do.
+ # If it's the wrong symlink, the filesystem must support symlinks,
+ # so we replace with a proper symlink.
+ if st and stat.S_ISLNK(st.st_mode):
+ link = os.readlink(dest)
+ if link == self.path:
+ return False
+
+ os.remove(dest)
+ os.symlink(self.path, dest)
+ return True
+
+ # If the destination doesn't exist, we try to create a symlink. If that
+ # fails, we fall back to copy code.
+ if not st:
+ try:
+ os.symlink(self.path, dest)
+ return True
+ except OSError:
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # Now the complicated part. If the destination exists, we could be
+ # replacing a file with a symlink. Or, the filesystem may not support
+ # symlinks. We want to minimize I/O overhead for performance reasons,
+ # so we keep the existing destination file around as long as possible.
+ # A lot of the system calls would be eliminated if we cached whether
+ # symlinks are supported. However, even if we performed a single
+ # up-front test of whether the root of the destination directory
+ # supports symlinks, there's no guarantee that all operations for that
+ # dest (or source) would be on the same filesystem and would support
+ # symlinks.
+ #
+ # Our strategy is to attempt to create a new symlink with a random
+ # name. If that fails, we fall back to copy mode. If that works, we
+ # remove the old destination and move the newly-created symlink into
+ # its place.
+
+ temp_dest = os.path.join(os.path.dirname(dest), str(uuid.uuid4()))
+ try:
+ os.symlink(self.path, temp_dest)
+ # TODO Figure out exactly how symlink creation fails and only trap
+ # that.
+ except EnvironmentError:
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # If removing the original file fails, don't forget to clean up the
+ # temporary symlink.
+ try:
+ os.remove(dest)
+ except EnvironmentError:
+ os.remove(temp_dest)
+ raise
+
+ os.rename(temp_dest, dest)
+ return True
+
+
+class ExistingFile(BaseFile):
+ '''
+ File class that represents a file that may exist but whose content comes
+ from elsewhere.
+
+ This purpose of this class is to account for files that are installed via
+ external means. It is typically only used in manifests or in registries to
+ account for files.
+
+ When asked to copy, this class does nothing because nothing is known about
+ the source file/data.
+
+ Instances of this class come in two flavors: required and optional. If an
+ existing file is required, it must exist during copy() or an error is
+ raised.
+ '''
+ def __init__(self, required):
+ self.required = required
+
+ def copy(self, dest, skip_if_older=True):
+ if isinstance(dest, basestring):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ if not self.required:
+ return
+
+ if not dest.exists():
+ errors.fatal("Required existing file doesn't exist: %s" %
+ dest.path)
+
+
+class PreprocessedFile(BaseFile):
+ '''
+ File class for a file that is preprocessed. PreprocessedFile.copy() runs
+ the preprocessor on the file to create the output.
+ '''
+ def __init__(self, path, depfile_path, marker, defines, extra_depends=None,
+ silence_missing_directive_warnings=False):
+ self.path = path
+ self.depfile = depfile_path
+ self.marker = marker
+ self.defines = defines
+ self.extra_depends = list(extra_depends or [])
+ self.silence_missing_directive_warnings = \
+ silence_missing_directive_warnings
+
+ def copy(self, dest, skip_if_older=True):
+ '''
+ Invokes the preprocessor to create the destination file.
+ '''
+ if isinstance(dest, basestring):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ # We have to account for the case where the destination exists and is a
+ # symlink to something. Since we know the preprocessor is certainly not
+ # going to create a symlink, we can just remove the existing one. If the
+ # destination is not a symlink, we leave it alone, since we're going to
+ # overwrite its contents anyway.
+ # If symlinks aren't supported at all, we can skip this step.
+ if hasattr(os, 'symlink'):
+ if os.path.islink(dest.path):
+ os.remove(dest.path)
+
+ pp_deps = set(self.extra_depends)
+
+ # If a dependency file was specified, and it exists, add any
+ # dependencies from that file to our list.
+ if self.depfile and os.path.exists(self.depfile):
+ target = mozpath.normpath(dest.name)
+ with open(self.depfile, 'rb') as fileobj:
+ for rule in makeutil.read_dep_makefile(fileobj):
+ if target in rule.targets():
+ pp_deps.update(rule.dependencies())
+
+ skip = False
+ if dest.exists() and skip_if_older:
+ # If a dependency file was specified, and it doesn't exist,
+ # assume that the preprocessor needs to be rerun. That will
+ # regenerate the dependency file.
+ if self.depfile and not os.path.exists(self.depfile):
+ skip = False
+ else:
+ skip = not BaseFile.any_newer(dest.path, pp_deps)
+
+ if skip:
+ return False
+
+ deps_out = None
+ if self.depfile:
+ deps_out = FileAvoidWrite(self.depfile)
+ pp = Preprocessor(defines=self.defines, marker=self.marker)
+ pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)
+
+ with open(self.path, 'rU') as input:
+ pp.processFile(input=input, output=dest, depfile=deps_out)
+
+ dest.close()
+ if self.depfile:
+ deps_out.close()
+
+ return True
+
+
+class GeneratedFile(BaseFile):
+ '''
+ File class for content with no previous existence on the filesystem.
+ '''
+ def __init__(self, content):
+ self.content = content
+
+ def open(self):
+ return BytesIO(self.content)
+
+
+class DeflatedFile(BaseFile):
+ '''
+ File class for members of a jar archive. DeflatedFile.copy() effectively
+ extracts the file from the jar archive.
+ '''
+ def __init__(self, file):
+ from mozpack.mozjar import JarFileReader
+ assert isinstance(file, JarFileReader)
+ self.file = file
+
+ def open(self):
+ self.file.seek(0)
+ return self.file
+
+class ExtractedTarFile(GeneratedFile):
+ '''
+ File class for members of a tar archive. Contents of the underlying file
+ are extracted immediately and stored in memory.
+ '''
+ def __init__(self, tar, info):
+ assert isinstance(info, TarInfo)
+ assert isinstance(tar, TarFile)
+ GeneratedFile.__init__(self, tar.extractfile(info).read())
+ self._mode = self.normalize_mode(info.mode)
+
+ @property
+ def mode(self):
+ return self._mode
+
+ def read(self):
+ return self.content
+
+class XPTFile(GeneratedFile):
+ '''
+ File class for a linked XPT file. It takes several XPT files as input
+ (using the add() and remove() member functions), and links them at copy()
+ time.
+ '''
+ def __init__(self):
+ self._files = set()
+
+ def add(self, xpt):
+ '''
+ Add the given XPT file (as a BaseFile instance) to the list of XPTs
+ to link.
+ '''
+ assert isinstance(xpt, BaseFile)
+ self._files.add(xpt)
+
+ def remove(self, xpt):
+ '''
+ Remove the given XPT file (as a BaseFile instance) from the list of
+ XPTs to link.
+ '''
+ assert isinstance(xpt, BaseFile)
+ self._files.remove(xpt)
+
+ def copy(self, dest, skip_if_older=True):
+ '''
+ Link the registered XPTs and place the resulting linked XPT at the
+ destination given as a string or a Dest instance. Avoids an expensive
+ XPT linking if the interfaces in an existing destination match those of
+ the individual XPTs to link.
+ skip_if_older is ignored.
+ '''
+ if isinstance(dest, basestring):
+ dest = Dest(dest)
+ assert isinstance(dest, Dest)
+
+ from xpt import xpt_link, Typelib, Interface
+ all_typelibs = [Typelib.read(f.open()) for f in self._files]
+ if dest.exists():
+ # Typelib.read() needs to seek(), so use a BytesIO for dest
+ # content.
+ dest_interfaces = \
+ dict((i.name, i)
+ for i in Typelib.read(BytesIO(dest.read())).interfaces
+ if i.iid != Interface.UNRESOLVED_IID)
+ identical = True
+ for f in self._files:
+ typelib = Typelib.read(f.open())
+ for i in typelib.interfaces:
+ if i.iid != Interface.UNRESOLVED_IID and \
+ not (i.name in dest_interfaces and
+ i == dest_interfaces[i.name]):
+ identical = False
+ break
+ if identical:
+ return False
+ s = BytesIO()
+ xpt_link(all_typelibs).write(s)
+ dest.write(s.getvalue())
+ return True
+
+ def open(self):
+ raise RuntimeError("Unsupported")
+
+ def isempty(self):
+ '''
+ Return whether there are XPT files to link.
+ '''
+ return len(self._files) == 0
+
+
+class ManifestFile(BaseFile):
+ '''
+ File class for a manifest file. It takes individual manifest entries (using
+ the add() and remove() member functions), and adjusts them to be relative
+ to the base path for the manifest, given at creation.
+ Example:
+ There is a manifest entry "content foobar foobar/content/" relative
+ to "foobar/chrome". When packaging, the entry will be stored in
+ jar:foobar/omni.ja!/chrome/chrome.manifest, which means the entry
+ will have to be relative to "chrome" instead of "foobar/chrome". This
+ doesn't really matter when serializing the entry, since this base path
+ is not written out, but it matters when moving the entry at the same
+ time, e.g. to jar:foobar/omni.ja!/chrome.manifest, which we don't do
+ currently but could in the future.
+ '''
+ def __init__(self, base, entries=None):
+ self._entries = entries if entries else []
+ self._base = base
+
+ def add(self, entry):
+ '''
+ Add the given entry to the manifest. Entries are rebased at open() time
+ instead of add() time so that they can be more easily remove()d.
+ '''
+ assert isinstance(entry, ManifestEntry)
+ self._entries.append(entry)
+
+ def remove(self, entry):
+ '''
+ Remove the given entry from the manifest.
+ '''
+ assert isinstance(entry, ManifestEntry)
+ self._entries.remove(entry)
+
+ def open(self):
+ '''
+ Return a file-like object allowing to read() the serialized content of
+ the manifest.
+ '''
+ return BytesIO(''.join('%s\n' % e.rebase(self._base)
+ for e in self._entries))
+
+ def __iter__(self):
+ '''
+ Iterate over entries in the manifest file.
+ '''
+ return iter(self._entries)
+
+ def isempty(self):
+ '''
+ Return whether there are manifest entries to write
+ '''
+ return len(self._entries) == 0
+
+
+class MinifiedProperties(BaseFile):
+ '''
+ File class for minified properties. This wraps around a BaseFile instance,
+ and removes lines starting with a # from its content.
+ '''
+ def __init__(self, file):
+ assert isinstance(file, BaseFile)
+ self._file = file
+
+ def open(self):
+ '''
+ Return a file-like object allowing to read() the minified content of
+ the properties file.
+ '''
+ return BytesIO(''.join(l for l in self._file.open().readlines()
+ if not l.startswith('#')))
+
+
+class MinifiedJavaScript(BaseFile):
+ '''
+ File class for minifying JavaScript files.
+ '''
+ def __init__(self, file, verify_command=None):
+ assert isinstance(file, BaseFile)
+ self._file = file
+ self._verify_command = verify_command
+
+ def open(self):
+ output = BytesIO()
+ minify = JavascriptMinify(self._file.open(), output, quote_chars="'\"`")
+ minify.minify()
+ output.seek(0)
+
+ if not self._verify_command:
+ return output
+
+ input_source = self._file.open().read()
+ output_source = output.getvalue()
+
+ with NamedTemporaryFile() as fh1, NamedTemporaryFile() as fh2:
+ fh1.write(input_source)
+ fh2.write(output_source)
+ fh1.flush()
+ fh2.flush()
+
+ try:
+ args = list(self._verify_command)
+ args.extend([fh1.name, fh2.name])
+ subprocess.check_output(args, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ errors.warn('JS minification verification failed for %s:' %
+ (getattr(self._file, 'path', '<unknown>')))
+ # Prefix each line with "Warning:" so mozharness doesn't
+ # think these error messages are real errors.
+ for line in e.output.splitlines():
+ errors.warn(line)
+
+ return self._file.open()
+
+ return output
+
+
+class BaseFinder(object):
+ def __init__(self, base, minify=False, minify_js=False,
+ minify_js_verify_command=None):
+ '''
+ Initializes the instance with a reference base directory.
+
+ The optional minify argument specifies whether minification of code
+ should occur. minify_js is an additional option to control minification
+ of JavaScript. It requires minify to be True.
+
+ minify_js_verify_command can be used to optionally verify the results
+ of JavaScript minification. If defined, it is expected to be an iterable
+ that will constitute the first arguments to a called process which will
+ receive the filenames of the original and minified JavaScript files.
+ The invoked process can then verify the results. If minification is
+ rejected, the process exits with a non-0 exit code and the original
+ JavaScript source is used. An example value for this argument is
+ ('/path/to/js', '/path/to/verify/script.js').
+ '''
+ if minify_js and not minify:
+ raise ValueError('minify_js requires minify.')
+
+ self.base = base
+ self._minify = minify
+ self._minify_js = minify_js
+ self._minify_js_verify_command = minify_js_verify_command
+
+ def find(self, pattern):
+ '''
+ Yield path, BaseFile_instance pairs for all files under the base
+ directory and its subdirectories that match the given pattern. See the
+ mozpack.path.match documentation for a description of the handled
+ patterns.
+ '''
+ while pattern.startswith('/'):
+ pattern = pattern[1:]
+ for p, f in self._find(pattern):
+ yield p, self._minify_file(p, f)
+
+ def get(self, path):
+ """Obtain a single file.
+
+ Where ``find`` is tailored towards matching multiple files, this method
+ is used for retrieving a single file. Use this method when performance
+ is critical.
+
+ Returns a ``BaseFile`` if at most one file exists or ``None`` otherwise.
+ """
+ files = list(self.find(path))
+ if len(files) != 1:
+ return None
+ return files[0][1]
+
+ def __iter__(self):
+ '''
+ Iterates over all files under the base directory (excluding files
+ starting with a '.' and files at any level under a directory starting
+ with a '.').
+ for path, file in finder:
+ ...
+ '''
+ return self.find('')
+
+ def __contains__(self, pattern):
+ raise RuntimeError("'in' operator forbidden for %s. Use contains()." %
+ self.__class__.__name__)
+
+ def contains(self, pattern):
+ '''
+ Return whether some files under the base directory match the given
+ pattern. See the mozpack.path.match documentation for a description of
+ the handled patterns.
+ '''
+ return any(self.find(pattern))
+
+ def _minify_file(self, path, file):
+ '''
+ Return an appropriate MinifiedSomething wrapper for the given BaseFile
+ instance (file), according to the file type (determined by the given
+ path), if the FileFinder was created with minification enabled.
+ Otherwise, just return the given BaseFile instance.
+ '''
+ if not self._minify or isinstance(file, ExecutableFile):
+ return file
+
+ if path.endswith('.properties'):
+ return MinifiedProperties(file)
+
+ if self._minify_js and path.endswith(('.js', '.jsm')):
+ return MinifiedJavaScript(file, self._minify_js_verify_command)
+
+ return file
+
+ def _find_helper(self, pattern, files, file_getter):
+ """Generic implementation of _find.
+
+ A few *Finder implementations share logic for returning results.
+ This function implements the custom logic.
+
+ The ``file_getter`` argument is a callable that receives a path
+ that is known to exist. The callable should return a ``BaseFile``
+ instance.
+ """
+ if '*' in pattern:
+ for p in files:
+ if mozpath.match(p, pattern):
+ yield p, file_getter(p)
+ elif pattern == '':
+ for p in files:
+ yield p, file_getter(p)
+ elif pattern in files:
+ yield pattern, file_getter(pattern)
+ else:
+ for p in files:
+ if mozpath.basedir(p, [pattern]) == pattern:
+ yield p, file_getter(p)
+
+
+class FileFinder(BaseFinder):
+ '''
+ Helper to get appropriate BaseFile instances from the file system.
+ '''
+ def __init__(self, base, find_executables=True, ignore=(),
+ find_dotfiles=False, **kargs):
+ '''
+ Create a FileFinder for files under the given base directory.
+
+ The find_executables argument determines whether the finder needs to
+ try to guess whether files are executables. Disabling this guessing
+ when not necessary can speed up the finder significantly.
+
+ ``ignore`` accepts an iterable of patterns to ignore. Entries are
+ strings that match paths relative to ``base`` using
+ ``mozpath.match()``. This means if an entry corresponds
+ to a directory, all files under that directory will be ignored. If
+ an entry corresponds to a file, that particular file will be ignored.
+ '''
+ BaseFinder.__init__(self, base, **kargs)
+ self.find_dotfiles = find_dotfiles
+ self.find_executables = find_executables
+ self.ignore = ignore
+
+ def _find(self, pattern):
+ '''
+ Actual implementation of FileFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ Note all files with a name starting with a '.' are ignored when
+ scanning directories, but are not ignored when explicitely requested.
+ '''
+ if '*' in pattern:
+ return self._find_glob('', mozpath.split(pattern))
+ elif os.path.isdir(os.path.join(self.base, pattern)):
+ return self._find_dir(pattern)
+ else:
+ f = self.get(pattern)
+ return ((pattern, f),) if f else ()
+
+ def _find_dir(self, path):
+ '''
+ Actual implementation of FileFinder.find() when the given pattern
+ corresponds to an existing directory under the base directory.
+ Ignores file names starting with a '.' under the given path. If the
+ path itself has leafs starting with a '.', they are not ignored.
+ '''
+ for p in self.ignore:
+ if mozpath.match(path, p):
+ return
+
+ # The sorted makes the output idempotent. Otherwise, we are
+ # likely dependent on filesystem implementation details, such as
+ # inode ordering.
+ for p in sorted(os.listdir(os.path.join(self.base, path))):
+ if p.startswith('.'):
+ if p in ('.', '..'):
+ continue
+ if not self.find_dotfiles:
+ continue
+ for p_, f in self._find(mozpath.join(path, p)):
+ yield p_, f
+
+ def get(self, path):
+ srcpath = os.path.join(self.base, path)
+ if not os.path.exists(srcpath):
+ return None
+
+ for p in self.ignore:
+ if mozpath.match(path, p):
+ return None
+
+ if self.find_executables and is_executable(srcpath):
+ return ExecutableFile(srcpath)
+ else:
+ return File(srcpath)
+
+ def _find_glob(self, base, pattern):
+ '''
+ Actual implementation of FileFinder.find() when the given pattern
+ contains globbing patterns ('*' or '**'). This is meant to be an
+ equivalent of:
+ for p, f in self:
+ if mozpath.match(p, pattern):
+ yield p, f
+ but avoids scanning the entire tree.
+ '''
+ if not pattern:
+ for p, f in self._find(base):
+ yield p, f
+ elif pattern[0] == '**':
+ for p, f in self._find(base):
+ if mozpath.match(p, mozpath.join(*pattern)):
+ yield p, f
+ elif '*' in pattern[0]:
+ if not os.path.exists(os.path.join(self.base, base)):
+ return
+
+ for p in self.ignore:
+ if mozpath.match(base, p):
+ return
+
+ # See above comment w.r.t. sorted() and idempotent behavior.
+ for p in sorted(os.listdir(os.path.join(self.base, base))):
+ if p.startswith('.') and not pattern[0].startswith('.'):
+ continue
+ if mozpath.match(p, pattern[0]):
+ for p_, f in self._find_glob(mozpath.join(base, p),
+ pattern[1:]):
+ yield p_, f
+ else:
+ for p, f in self._find_glob(mozpath.join(base, pattern[0]),
+ pattern[1:]):
+ yield p, f
+
+
+class JarFinder(BaseFinder):
+ '''
+ Helper to get appropriate DeflatedFile instances from a JarReader.
+ '''
+ def __init__(self, base, reader, **kargs):
+ '''
+ Create a JarFinder for files in the given JarReader. The base argument
+ is used as an indication of the Jar file location.
+ '''
+ assert isinstance(reader, JarReader)
+ BaseFinder.__init__(self, base, **kargs)
+ self._files = OrderedDict((f.filename, f) for f in reader)
+
+ def _find(self, pattern):
+ '''
+ Actual implementation of JarFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ '''
+ return self._find_helper(pattern, self._files,
+ lambda x: DeflatedFile(self._files[x]))
+
+
+class TarFinder(BaseFinder):
+ '''
+ Helper to get files from a TarFile.
+ '''
+ def __init__(self, base, tar, **kargs):
+ '''
+ Create a TarFinder for files in the given TarFile. The base argument
+ is used as an indication of the Tar file location.
+ '''
+ assert isinstance(tar, TarFile)
+ self._tar = tar
+ BaseFinder.__init__(self, base, **kargs)
+ self._files = OrderedDict((f.name, f) for f in tar if f.isfile())
+
+ def _find(self, pattern):
+ '''
+ Actual implementation of TarFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ '''
+ return self._find_helper(pattern, self._files,
+ lambda x: ExtractedTarFile(self._tar,
+ self._files[x]))
+
+
+class ComposedFinder(BaseFinder):
+ '''
+ Composes multiple File Finders in some sort of virtual file system.
+
+ A ComposedFinder is initialized from a dictionary associating paths to
+ *Finder instances.
+
+ Note this could be optimized to be smarter than getting all the files
+ in advance.
+ '''
+ def __init__(self, finders):
+ # Can't import globally, because of the dependency of mozpack.copier
+ # on this module.
+ from mozpack.copier import FileRegistry
+ self.files = FileRegistry()
+
+ for base, finder in sorted(finders.iteritems()):
+ if self.files.contains(base):
+ self.files.remove(base)
+ for p, f in finder.find(''):
+ self.files.add(mozpath.join(base, p), f)
+
+ def find(self, pattern):
+ for p in self.files.match(pattern):
+ yield p, self.files[p]
+
+
+class MercurialFile(BaseFile):
+ """File class for holding data from Mercurial."""
+ def __init__(self, client, rev, path):
+ self._content = client.cat([path], rev=rev)
+
+ def read(self):
+ return self._content
+
+
+class MercurialRevisionFinder(BaseFinder):
+ """A finder that operates on a specific Mercurial revision."""
+
+ def __init__(self, repo, rev='.', recognize_repo_paths=False, **kwargs):
+ """Create a finder attached to a specific revision in a repository.
+
+ If no revision is given, open the parent of the working directory.
+
+ ``recognize_repo_paths`` will enable a mode where ``.get()`` will
+ recognize full paths that include the repo's path. Typically Finder
+ instances are "bound" to a base directory and paths are relative to
+ that directory. This mode changes that. When this mode is activated,
+ ``.find()`` will not work! This mode exists to support the moz.build
+ reader, which uses absolute paths instead of relative paths. The reader
+ should eventually be rewritten to use relative paths and this hack
+ should be removed (TODO bug 1171069).
+ """
+ if not hglib:
+ raise Exception('hglib package not found')
+
+ super(MercurialRevisionFinder, self).__init__(base=repo, **kwargs)
+
+ self._root = mozpath.normpath(repo).rstrip('/')
+ self._recognize_repo_paths = recognize_repo_paths
+
+ # We change directories here otherwise we have to deal with relative
+ # paths.
+ oldcwd = os.getcwd()
+ os.chdir(self._root)
+ try:
+ self._client = hglib.open(path=repo, encoding=b'utf-8')
+ finally:
+ os.chdir(oldcwd)
+ self._rev = rev if rev is not None else b'.'
+ self._files = OrderedDict()
+
+ # Immediately populate the list of files in the repo since nearly every
+ # operation requires this list.
+ out = self._client.rawcommand([b'files', b'--rev', str(self._rev)])
+ for relpath in out.splitlines():
+ self._files[relpath] = None
+
+ def _find(self, pattern):
+ if self._recognize_repo_paths:
+ raise NotImplementedError('cannot use find with recognize_repo_path')
+
+ return self._find_helper(pattern, self._files, self._get)
+
+ def get(self, path):
+ if self._recognize_repo_paths:
+ if not path.startswith(self._root):
+ raise ValueError('lookups in recognize_repo_paths mode must be '
+ 'prefixed with repo path: %s' % path)
+ path = path[len(self._root) + 1:]
+
+ try:
+ return self._get(path)
+ except KeyError:
+ return None
+
+ def _get(self, path):
+ # We lazy populate self._files because potentially creating tens of
+ # thousands of MercurialFile instances for every file in the repo is
+ # inefficient.
+ f = self._files[path]
+ if not f:
+ f = MercurialFile(self._client, self._rev, path)
+ self._files[path] = f
+
+ return f
diff --git a/python/mozbuild/mozpack/hg.py b/python/mozbuild/mozpack/hg.py
new file mode 100644
index 000000000..79876061f
--- /dev/null
+++ b/python/mozbuild/mozpack/hg.py
@@ -0,0 +1,95 @@
+# Copyright (C) 2015 Mozilla Contributors
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# As a special exception, the copyright holders of this code give you
+# permission to combine this code with the software known as 'mozbuild',
+# and to distribute those combinations without any restriction
+# coming from the use of this file. (The General Public License
+# restrictions do apply in other respects; for example, they cover
+# modification of the file, and distribution when not combined with
+# mozbuild.)
+#
+# If you modify this code, you may extend this exception to your
+# version of the code, but you are not obliged to do so. If you
+# do not wish to do so, delete this exception statement from your
+# version.
+
+from __future__ import absolute_import
+
+import mercurial.error as error
+import mercurial.hg as hg
+import mercurial.ui as hgui
+
+from .files import (
+ BaseFinder,
+ MercurialFile,
+)
+import mozpack.path as mozpath
+
+
+# This isn't a complete implementation of BaseFile. But it is complete
+# enough for moz.build reading.
+class MercurialNativeFile(MercurialFile):
+ def __init__(self, data):
+ self.data = data
+
+ def read(self):
+ return self.data
+
+
+class MercurialNativeRevisionFinder(BaseFinder):
+ def __init__(self, repo, rev='.', recognize_repo_paths=False):
+ """Create a finder attached to a specific changeset.
+
+ Accepts a Mercurial localrepo and changectx instance.
+ """
+ if isinstance(repo, (str, unicode)):
+ path = repo
+ repo = hg.repository(hgui.ui(), repo)
+ else:
+ path = repo.root
+
+ super(MercurialNativeRevisionFinder, self).__init__(base=repo.root)
+
+ self._repo = repo
+ self._rev = rev
+ self._root = mozpath.normpath(path)
+ self._recognize_repo_paths = recognize_repo_paths
+
+ def _find(self, pattern):
+ if self._recognize_repo_paths:
+ raise NotImplementedError('cannot use find with recognize_repo_path')
+
+ return self._find_helper(pattern, self._repo[self._rev], self._get)
+
+ def get(self, path):
+ if self._recognize_repo_paths:
+ if not path.startswith(self._root):
+ raise ValueError('lookups in recognize_repo_paths mode must be '
+ 'prefixed with repo path: %s' % path)
+ path = path[len(self._root) + 1:]
+
+ return self._get(path)
+
+ def _get(self, path):
+ if isinstance(path, unicode):
+ path = path.encode('utf-8', 'replace')
+
+ try:
+ fctx = self._repo.filectx(path, self._rev)
+ return MercurialNativeFile(fctx.data())
+ except error.LookupError:
+ return None
diff --git a/python/mozbuild/mozpack/manifests.py b/python/mozbuild/mozpack/manifests.py
new file mode 100644
index 000000000..93bd6c2ca
--- /dev/null
+++ b/python/mozbuild/mozpack/manifests.py
@@ -0,0 +1,419 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+from contextlib import contextmanager
+import json
+
+from .files import (
+ AbsoluteSymlinkFile,
+ ExistingFile,
+ File,
+ FileFinder,
+ GeneratedFile,
+ PreprocessedFile,
+)
+import mozpack.path as mozpath
+
+
+# This probably belongs in a more generic module. Where?
+@contextmanager
+def _auto_fileobj(path, fileobj, mode='r'):
+ if path and fileobj:
+ raise AssertionError('Only 1 of path or fileobj may be defined.')
+
+ if not path and not fileobj:
+ raise AssertionError('Must specified 1 of path or fileobj.')
+
+ if path:
+ fileobj = open(path, mode)
+
+ try:
+ yield fileobj
+ finally:
+ if path:
+ fileobj.close()
+
+
+class UnreadableInstallManifest(Exception):
+ """Raised when an invalid install manifest is parsed."""
+
+
+class InstallManifest(object):
+ """Describes actions to be used with a copier.FileCopier instance.
+
+ This class facilitates serialization and deserialization of data used to
+ construct a copier.FileCopier and to perform copy operations.
+
+ The manifest defines source paths, destination paths, and a mechanism by
+ which the destination file should come into existence.
+
+ Entries in the manifest correspond to the following types:
+
+ copy -- The file specified as the source path will be copied to the
+ destination path.
+
+ symlink -- The destination path will be a symlink to the source path.
+ If symlinks are not supported, a copy will be performed.
+
+ exists -- The destination path is accounted for and won't be deleted by
+ the FileCopier. If the destination path doesn't exist, an error is
+ raised.
+
+ optional -- The destination path is accounted for and won't be deleted by
+ the FileCopier. No error is raised if the destination path does not
+ exist.
+
+ patternsymlink -- Paths matched by the expression in the source path
+ will be symlinked to the destination directory.
+
+ patterncopy -- Similar to patternsymlink except files are copied, not
+ symlinked.
+
+ preprocess -- The file specified at the source path will be run through
+ the preprocessor, and the output will be written to the destination
+ path.
+
+ content -- The destination file will be created with the given content.
+
+ Version 1 of the manifest was the initial version.
+ Version 2 added optional path support
+ Version 3 added support for pattern entries.
+ Version 4 added preprocessed file support.
+ Version 5 added content support.
+ """
+
+ CURRENT_VERSION = 5
+
+ FIELD_SEPARATOR = '\x1f'
+
+ # Negative values are reserved for non-actionable items, that is, metadata
+ # that doesn't describe files in the destination.
+ SYMLINK = 1
+ COPY = 2
+ REQUIRED_EXISTS = 3
+ OPTIONAL_EXISTS = 4
+ PATTERN_SYMLINK = 5
+ PATTERN_COPY = 6
+ PREPROCESS = 7
+ CONTENT = 8
+
+ def __init__(self, path=None, fileobj=None):
+ """Create a new InstallManifest entry.
+
+ If path is defined, the manifest will be populated with data from the
+ file path.
+
+ If fileobj is defined, the manifest will be populated with data read
+ from the specified file object.
+
+ Both path and fileobj cannot be defined.
+ """
+ self._dests = {}
+ self._source_files = set()
+
+ if path or fileobj:
+ with _auto_fileobj(path, fileobj, 'rb') as fh:
+ self._source_files.add(fh.name)
+ self._load_from_fileobj(fh)
+
+ def _load_from_fileobj(self, fileobj):
+ version = fileobj.readline().rstrip()
+ if version not in ('1', '2', '3', '4', '5'):
+ raise UnreadableInstallManifest('Unknown manifest version: %s' %
+ version)
+
+ for line in fileobj:
+ line = line.rstrip()
+
+ fields = line.split(self.FIELD_SEPARATOR)
+
+ record_type = int(fields[0])
+
+ if record_type == self.SYMLINK:
+ dest, source = fields[1:]
+ self.add_symlink(source, dest)
+ continue
+
+ if record_type == self.COPY:
+ dest, source = fields[1:]
+ self.add_copy(source, dest)
+ continue
+
+ if record_type == self.REQUIRED_EXISTS:
+ _, path = fields
+ self.add_required_exists(path)
+ continue
+
+ if record_type == self.OPTIONAL_EXISTS:
+ _, path = fields
+ self.add_optional_exists(path)
+ continue
+
+ if record_type == self.PATTERN_SYMLINK:
+ _, base, pattern, dest = fields[1:]
+ self.add_pattern_symlink(base, pattern, dest)
+ continue
+
+ if record_type == self.PATTERN_COPY:
+ _, base, pattern, dest = fields[1:]
+ self.add_pattern_copy(base, pattern, dest)
+ continue
+
+ if record_type == self.PREPROCESS:
+ dest, source, deps, marker, defines, warnings = fields[1:]
+
+ self.add_preprocess(source, dest, deps, marker,
+ self._decode_field_entry(defines),
+ silence_missing_directive_warnings=bool(int(warnings)))
+ continue
+
+ if record_type == self.CONTENT:
+ dest, content = fields[1:]
+
+ self.add_content(
+ self._decode_field_entry(content).encode('utf-8'), dest)
+ continue
+
+ # Don't fail for non-actionable items, allowing
+ # forward-compatibility with those we will add in the future.
+ if record_type >= 0:
+ raise UnreadableInstallManifest('Unknown record type: %d' %
+ record_type)
+
+ def __len__(self):
+ return len(self._dests)
+
+ def __contains__(self, item):
+ return item in self._dests
+
+ def __eq__(self, other):
+ return isinstance(other, InstallManifest) and self._dests == other._dests
+
+ def __neq__(self, other):
+ return not self.__eq__(other)
+
+ def __ior__(self, other):
+ if not isinstance(other, InstallManifest):
+ raise ValueError('Can only | with another instance of InstallManifest.')
+
+ # We must copy source files to ourselves so extra dependencies from
+ # the preprocessor are taken into account. Ideally, we would track
+ # which source file each entry came from. However, this is more
+ # complicated and not yet implemented. The current implementation
+ # will result in over invalidation, possibly leading to performance
+ # loss.
+ self._source_files |= other._source_files
+
+ for dest in sorted(other._dests):
+ self._add_entry(dest, other._dests[dest])
+
+ return self
+
+ def _encode_field_entry(self, data):
+ """Converts an object into a format that can be stored in the manifest file.
+
+ Complex data types, such as ``dict``, need to be converted into a text
+ representation before they can be written to a file.
+ """
+ return json.dumps(data, sort_keys=True)
+
+ def _decode_field_entry(self, data):
+ """Restores an object from a format that can be stored in the manifest file.
+
+ Complex data types, such as ``dict``, need to be converted into a text
+ representation before they can be written to a file.
+ """
+ return json.loads(data)
+
+ def write(self, path=None, fileobj=None):
+ """Serialize this manifest to a file or file object.
+
+ If path is specified, that file will be written to. If fileobj is specified,
+ the serialized content will be written to that file object.
+
+ It is an error if both are specified.
+ """
+ with _auto_fileobj(path, fileobj, 'wb') as fh:
+ fh.write('%d\n' % self.CURRENT_VERSION)
+
+ for dest in sorted(self._dests):
+ entry = self._dests[dest]
+
+ parts = ['%d' % entry[0], dest]
+ parts.extend(entry[1:])
+ fh.write('%s\n' % self.FIELD_SEPARATOR.join(
+ p.encode('utf-8') for p in parts))
+
+ def add_symlink(self, source, dest):
+ """Add a symlink to this manifest.
+
+ dest will be a symlink to source.
+ """
+ self._add_entry(dest, (self.SYMLINK, source))
+
+ def add_copy(self, source, dest):
+ """Add a copy to this manifest.
+
+ source will be copied to dest.
+ """
+ self._add_entry(dest, (self.COPY, source))
+
+ def add_required_exists(self, dest):
+ """Record that a destination file must exist.
+
+ This effectively prevents the listed file from being deleted.
+ """
+ self._add_entry(dest, (self.REQUIRED_EXISTS,))
+
+ def add_optional_exists(self, dest):
+ """Record that a destination file may exist.
+
+ This effectively prevents the listed file from being deleted. Unlike a
+ "required exists" file, files of this type do not raise errors if the
+ destination file does not exist.
+ """
+ self._add_entry(dest, (self.OPTIONAL_EXISTS,))
+
+ def add_pattern_symlink(self, base, pattern, dest):
+ """Add a pattern match that results in symlinks being created.
+
+ A ``FileFinder`` will be created with its base set to ``base``
+ and ``FileFinder.find()`` will be called with ``pattern`` to discover
+ source files. Each source file will be symlinked under ``dest``.
+
+ Filenames under ``dest`` are constructed by taking the path fragment
+ after ``base`` and concatenating it with ``dest``. e.g.
+
+ <base>/foo/bar.h -> <dest>/foo/bar.h
+ """
+ self._add_entry(mozpath.join(base, pattern, dest),
+ (self.PATTERN_SYMLINK, base, pattern, dest))
+
+ def add_pattern_copy(self, base, pattern, dest):
+ """Add a pattern match that results in copies.
+
+ See ``add_pattern_symlink()`` for usage.
+ """
+ self._add_entry(mozpath.join(base, pattern, dest),
+ (self.PATTERN_COPY, base, pattern, dest))
+
+ def add_preprocess(self, source, dest, deps, marker='#', defines={},
+ silence_missing_directive_warnings=False):
+ """Add a preprocessed file to this manifest.
+
+ ``source`` will be passed through preprocessor.py, and the output will be
+ written to ``dest``.
+ """
+ self._add_entry(dest, (
+ self.PREPROCESS,
+ source,
+ deps,
+ marker,
+ self._encode_field_entry(defines),
+ '1' if silence_missing_directive_warnings else '0',
+ ))
+
+ def add_content(self, content, dest):
+ """Add a file with the given content."""
+ self._add_entry(dest, (
+ self.CONTENT,
+ self._encode_field_entry(content),
+ ))
+
+ def _add_entry(self, dest, entry):
+ if dest in self._dests:
+ raise ValueError('Item already in manifest: %s' % dest)
+
+ self._dests[dest] = entry
+
+ def populate_registry(self, registry, defines_override={}):
+ """Populate a mozpack.copier.FileRegistry instance with data from us.
+
+ The caller supplied a FileRegistry instance (or at least something that
+ conforms to its interface) and that instance is populated with data
+ from this manifest.
+
+ Defines can be given to override the ones in the manifest for
+ preprocessing.
+ """
+ for dest in sorted(self._dests):
+ entry = self._dests[dest]
+ install_type = entry[0]
+
+ if install_type == self.SYMLINK:
+ registry.add(dest, AbsoluteSymlinkFile(entry[1]))
+ continue
+
+ if install_type == self.COPY:
+ registry.add(dest, File(entry[1]))
+ continue
+
+ if install_type == self.REQUIRED_EXISTS:
+ registry.add(dest, ExistingFile(required=True))
+ continue
+
+ if install_type == self.OPTIONAL_EXISTS:
+ registry.add(dest, ExistingFile(required=False))
+ continue
+
+ if install_type in (self.PATTERN_SYMLINK, self.PATTERN_COPY):
+ _, base, pattern, dest = entry
+ finder = FileFinder(base, find_executables=False)
+ paths = [f[0] for f in finder.find(pattern)]
+
+ if install_type == self.PATTERN_SYMLINK:
+ cls = AbsoluteSymlinkFile
+ else:
+ cls = File
+
+ for path in paths:
+ source = mozpath.join(base, path)
+ registry.add(mozpath.join(dest, path), cls(source))
+
+ continue
+
+ if install_type == self.PREPROCESS:
+ defines = self._decode_field_entry(entry[4])
+ if defines_override:
+ defines.update(defines_override)
+ registry.add(dest, PreprocessedFile(entry[1],
+ depfile_path=entry[2],
+ marker=entry[3],
+ defines=defines,
+ extra_depends=self._source_files,
+ silence_missing_directive_warnings=bool(int(entry[5]))))
+
+ continue
+
+ if install_type == self.CONTENT:
+ # GeneratedFile expect the buffer interface, which the unicode
+ # type doesn't have, so encode to a str.
+ content = self._decode_field_entry(entry[1]).encode('utf-8')
+ registry.add(dest, GeneratedFile(content))
+ continue
+
+ raise Exception('Unknown install type defined in manifest: %d' %
+ install_type)
+
+
+class InstallManifestNoSymlinks(InstallManifest):
+ """Like InstallManifest, but files are never installed as symbolic links.
+ Instead, they are always copied.
+ """
+
+ def add_symlink(self, source, dest):
+ """A wrapper that accept symlink entries and install file copies.
+
+ source will be copied to dest.
+ """
+ self.add_copy(source, dest)
+
+ def add_pattern_symlink(self, base, pattern, dest):
+ """A wrapper that accepts symlink patterns and installs file copies.
+
+ Files discovered with ``pattern`` will be copied to ``dest``.
+ """
+ self.add_pattern_copy(base, pattern, dest)
diff --git a/python/mozbuild/mozpack/mozjar.py b/python/mozbuild/mozpack/mozjar.py
new file mode 100644
index 000000000..a1ada8594
--- /dev/null
+++ b/python/mozbuild/mozpack/mozjar.py
@@ -0,0 +1,816 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+from io import BytesIO
+import struct
+import zlib
+import os
+from zipfile import (
+ ZIP_STORED,
+ ZIP_DEFLATED,
+)
+from collections import OrderedDict
+from urlparse import urlparse, ParseResult
+import mozpack.path as mozpath
+
+JAR_STORED = ZIP_STORED
+JAR_DEFLATED = ZIP_DEFLATED
+MAX_WBITS = 15
+
+
+class JarReaderError(Exception):
+ '''Error type for Jar reader errors.'''
+
+
+class JarWriterError(Exception):
+ '''Error type for Jar writer errors.'''
+
+
+class JarStruct(object):
+ '''
+ Helper used to define ZIP archive raw data structures. Data structures
+ handled by this helper all start with a magic number, defined in
+ subclasses MAGIC field as a 32-bits unsigned integer, followed by data
+ structured as described in subclasses STRUCT field.
+
+ The STRUCT field contains a list of (name, type) pairs where name is a
+ field name, and the type can be one of 'uint32', 'uint16' or one of the
+ field names. In the latter case, the field is considered to be a string
+ buffer with a length given in that field.
+ For example,
+ STRUCT = [
+ ('version', 'uint32'),
+ ('filename_size', 'uint16'),
+ ('filename', 'filename_size')
+ ]
+ describes a structure with a 'version' 32-bits unsigned integer field,
+ followed by a 'filename_size' 16-bits unsigned integer field, followed by a
+ filename_size-long string buffer 'filename'.
+
+ Fields that are used as other fields size are not stored in objects. In the
+ above example, an instance of such subclass would only have two attributes:
+ obj['version']
+ obj['filename']
+ filename_size would be obtained with len(obj['filename']).
+
+ JarStruct subclasses instances can be either initialized from existing data
+ (deserialized), or with empty fields.
+ '''
+
+ TYPE_MAPPING = {'uint32': ('I', 4), 'uint16': ('H', 2)}
+
+ def __init__(self, data=None):
+ '''
+ Create an instance from the given data. Data may be omitted to create
+ an instance with empty fields.
+ '''
+ assert self.MAGIC and isinstance(self.STRUCT, OrderedDict)
+ self.size_fields = set(t for t in self.STRUCT.itervalues()
+ if not t in JarStruct.TYPE_MAPPING)
+ self._values = {}
+ if data:
+ self._init_data(data)
+ else:
+ self._init_empty()
+
+ def _init_data(self, data):
+ '''
+ Initialize an instance from data, following the data structure
+ described in self.STRUCT. The self.MAGIC signature is expected at
+ data[:4].
+ '''
+ assert data is not None
+ self.signature, size = JarStruct.get_data('uint32', data)
+ if self.signature != self.MAGIC:
+ raise JarReaderError('Bad magic')
+ offset = size
+ # For all fields used as other fields sizes, keep track of their value
+ # separately.
+ sizes = dict((t, 0) for t in self.size_fields)
+ for name, t in self.STRUCT.iteritems():
+ if t in JarStruct.TYPE_MAPPING:
+ value, size = JarStruct.get_data(t, data[offset:])
+ else:
+ size = sizes[t]
+ value = data[offset:offset + size]
+ if isinstance(value, memoryview):
+ value = value.tobytes()
+ if not name in sizes:
+ self._values[name] = value
+ else:
+ sizes[name] = value
+ offset += size
+
+ def _init_empty(self):
+ '''
+ Initialize an instance with empty fields.
+ '''
+ self.signature = self.MAGIC
+ for name, t in self.STRUCT.iteritems():
+ if name in self.size_fields:
+ continue
+ self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else ''
+
+ @staticmethod
+ def get_data(type, data):
+ '''
+ Deserialize a single field of given type (must be one of
+ JarStruct.TYPE_MAPPING) at the given offset in the given data.
+ '''
+ assert type in JarStruct.TYPE_MAPPING
+ assert data is not None
+ format, size = JarStruct.TYPE_MAPPING[type]
+ data = data[:size]
+ if isinstance(data, memoryview):
+ data = data.tobytes()
+ return struct.unpack('<' + format, data)[0], size
+
+ def serialize(self):
+ '''
+ Serialize the data structure according to the data structure definition
+ from self.STRUCT.
+ '''
+ serialized = struct.pack('<I', self.signature)
+ sizes = dict((t, name) for name, t in self.STRUCT.iteritems()
+ if not t in JarStruct.TYPE_MAPPING)
+ for name, t in self.STRUCT.iteritems():
+ if t in JarStruct.TYPE_MAPPING:
+ format, size = JarStruct.TYPE_MAPPING[t]
+ if name in sizes:
+ value = len(self[sizes[name]])
+ else:
+ value = self[name]
+ serialized += struct.pack('<' + format, value)
+ else:
+ serialized += self[name]
+ return serialized
+
+ @property
+ def size(self):
+ '''
+ Return the size of the data structure, given the current values of all
+ variable length fields.
+ '''
+ size = JarStruct.TYPE_MAPPING['uint32'][1]
+ for name, type in self.STRUCT.iteritems():
+ if type in JarStruct.TYPE_MAPPING:
+ size += JarStruct.TYPE_MAPPING[type][1]
+ else:
+ size += len(self[name])
+ return size
+
+ def __getitem__(self, key):
+ return self._values[key]
+
+ def __setitem__(self, key, value):
+ if not key in self.STRUCT:
+ raise KeyError(key)
+ if key in self.size_fields:
+ raise AttributeError("can't set attribute")
+ self._values[key] = value
+
+ def __contains__(self, key):
+ return key in self._values
+
+ def __iter__(self):
+ return self._values.iteritems()
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__,
+ ' '.join('%s=%s' % (n, v) for n, v in self))
+
+
+class JarCdirEnd(JarStruct):
+ '''
+ End of central directory record.
+ '''
+ MAGIC = 0x06054b50
+ STRUCT = OrderedDict([
+ ('disk_num', 'uint16'),
+ ('cdir_disk', 'uint16'),
+ ('disk_entries', 'uint16'),
+ ('cdir_entries', 'uint16'),
+ ('cdir_size', 'uint32'),
+ ('cdir_offset', 'uint32'),
+ ('comment_size', 'uint16'),
+ ('comment', 'comment_size'),
+ ])
+
+CDIR_END_SIZE = JarCdirEnd().size
+
+
+class JarCdirEntry(JarStruct):
+ '''
+ Central directory file header
+ '''
+ MAGIC = 0x02014b50
+ STRUCT = OrderedDict([
+ ('creator_version', 'uint16'),
+ ('min_version', 'uint16'),
+ ('general_flag', 'uint16'),
+ ('compression', 'uint16'),
+ ('lastmod_time', 'uint16'),
+ ('lastmod_date', 'uint16'),
+ ('crc32', 'uint32'),
+ ('compressed_size', 'uint32'),
+ ('uncompressed_size', 'uint32'),
+ ('filename_size', 'uint16'),
+ ('extrafield_size', 'uint16'),
+ ('filecomment_size', 'uint16'),
+ ('disknum', 'uint16'),
+ ('internal_attr', 'uint16'),
+ ('external_attr', 'uint32'),
+ ('offset', 'uint32'),
+ ('filename', 'filename_size'),
+ ('extrafield', 'extrafield_size'),
+ ('filecomment', 'filecomment_size'),
+ ])
+
+
+class JarLocalFileHeader(JarStruct):
+ '''
+ Local file header
+ '''
+ MAGIC = 0x04034b50
+ STRUCT = OrderedDict([
+ ('min_version', 'uint16'),
+ ('general_flag', 'uint16'),
+ ('compression', 'uint16'),
+ ('lastmod_time', 'uint16'),
+ ('lastmod_date', 'uint16'),
+ ('crc32', 'uint32'),
+ ('compressed_size', 'uint32'),
+ ('uncompressed_size', 'uint32'),
+ ('filename_size', 'uint16'),
+ ('extra_field_size', 'uint16'),
+ ('filename', 'filename_size'),
+ ('extra_field', 'extra_field_size'),
+ ])
+
+
+class JarFileReader(object):
+ '''
+ File-like class for use by JarReader to give access to individual files
+ within a Jar archive.
+ '''
+ def __init__(self, header, data):
+ '''
+ Initialize a JarFileReader. header is the local file header
+ corresponding to the file in the jar archive, data a buffer containing
+ the file data.
+ '''
+ assert header['compression'] in [JAR_DEFLATED, JAR_STORED]
+ self._data = data
+ # Copy some local file header fields.
+ for name in ['filename', 'compressed_size',
+ 'uncompressed_size', 'crc32']:
+ setattr(self, name, header[name])
+ self.compressed = header['compression'] == JAR_DEFLATED
+
+ def read(self, length=-1):
+ '''
+ Read some amount of uncompressed data.
+ '''
+ return self.uncompressed_data.read(length)
+
+ def readlines(self):
+ '''
+ Return a list containing all the lines of data in the uncompressed
+ data.
+ '''
+ return self.read().splitlines(True)
+
+ def __iter__(self):
+ '''
+ Iterator, to support the "for line in fileobj" constructs.
+ '''
+ return iter(self.readlines())
+
+ def seek(self, pos, whence=os.SEEK_SET):
+ '''
+ Change the current position in the uncompressed data. Subsequent reads
+ will start from there.
+ '''
+ return self.uncompressed_data.seek(pos, whence)
+
+ def close(self):
+ '''
+ Free the uncompressed data buffer.
+ '''
+ self.uncompressed_data.close()
+
+ @property
+ def compressed_data(self):
+ '''
+ Return the raw compressed data.
+ '''
+ return self._data[:self.compressed_size]
+
+ @property
+ def uncompressed_data(self):
+ '''
+ Return the uncompressed data.
+ '''
+ if hasattr(self, '_uncompressed_data'):
+ return self._uncompressed_data
+ data = self.compressed_data
+ if self.compressed:
+ data = zlib.decompress(data.tobytes(), -MAX_WBITS)
+ else:
+ data = data.tobytes()
+ if len(data) != self.uncompressed_size:
+ raise JarReaderError('Corrupted file? %s' % self.filename)
+ self._uncompressed_data = BytesIO(data)
+ return self._uncompressed_data
+
+
+class JarReader(object):
+ '''
+ Class with methods to read Jar files. Can open standard jar files as well
+ as Mozilla jar files (see further details in the JarWriter documentation).
+ '''
+ def __init__(self, file=None, fileobj=None, data=None):
+ '''
+ Opens the given file as a Jar archive. Use the given file-like object
+ if one is given instead of opening the given file name.
+ '''
+ if fileobj:
+ data = fileobj.read()
+ elif file:
+ data = open(file, 'rb').read()
+ self._data = memoryview(data)
+ # The End of Central Directory Record has a variable size because of
+ # comments it may contain, so scan for it from the end of the file.
+ offset = -CDIR_END_SIZE
+ while True:
+ signature = JarStruct.get_data('uint32', self._data[offset:])[0]
+ if signature == JarCdirEnd.MAGIC:
+ break
+ if offset == -len(self._data):
+ raise JarReaderError('Not a jar?')
+ offset -= 1
+ self._cdir_end = JarCdirEnd(self._data[offset:])
+
+ def close(self):
+ '''
+ Free some resources associated with the Jar.
+ '''
+ del self._data
+
+ @property
+ def entries(self):
+ '''
+ Return an ordered dict of central directory entries, indexed by
+ filename, in the order they appear in the Jar archive central
+ directory. Directory entries are skipped.
+ '''
+ if hasattr(self, '_entries'):
+ return self._entries
+ preload = 0
+ if self.is_optimized:
+ preload = JarStruct.get_data('uint32', self._data)[0]
+ entries = OrderedDict()
+ offset = self._cdir_end['cdir_offset']
+ for e in xrange(self._cdir_end['cdir_entries']):
+ entry = JarCdirEntry(self._data[offset:])
+ offset += entry.size
+ # Creator host system. 0 is MSDOS, 3 is Unix
+ host = entry['creator_version'] >> 8
+ # External attributes values depend on host above. On Unix the
+ # higher bits are the stat.st_mode value. On MSDOS, the lower bits
+ # are the FAT attributes.
+ xattr = entry['external_attr']
+ # Skip directories
+ if (host == 0 and xattr & 0x10) or (host == 3 and
+ xattr & (040000 << 16)):
+ continue
+ entries[entry['filename']] = entry
+ if entry['offset'] < preload:
+ self._last_preloaded = entry['filename']
+ self._entries = entries
+ return entries
+
+ @property
+ def is_optimized(self):
+ '''
+ Return whether the jar archive is optimized.
+ '''
+ # In optimized jars, the central directory is at the beginning of the
+ # file, after a single 32-bits value, which is the length of data
+ # preloaded.
+ return self._cdir_end['cdir_offset'] == \
+ JarStruct.TYPE_MAPPING['uint32'][1]
+
+ @property
+ def last_preloaded(self):
+ '''
+ Return the name of the last file that is set to be preloaded.
+ See JarWriter documentation for more details on preloading.
+ '''
+ if hasattr(self, '_last_preloaded'):
+ return self._last_preloaded
+ self._last_preloaded = None
+ self.entries
+ return self._last_preloaded
+
+ def _getreader(self, entry):
+ '''
+ Helper to create a JarFileReader corresponding to the given central
+ directory entry.
+ '''
+ header = JarLocalFileHeader(self._data[entry['offset']:])
+ for key, value in entry:
+ if key in header and header[key] != value:
+ raise JarReaderError('Central directory and file header ' +
+ 'mismatch. Corrupted archive?')
+ return JarFileReader(header,
+ self._data[entry['offset'] + header.size:])
+
+ def __iter__(self):
+ '''
+ Iterate over all files in the Jar archive, in the form of
+ JarFileReaders.
+ for file in jarReader:
+ ...
+ '''
+ for entry in self.entries.itervalues():
+ yield self._getreader(entry)
+
+ def __getitem__(self, name):
+ '''
+ Get a JarFileReader for the given file name.
+ '''
+ return self._getreader(self.entries[name])
+
+ def __contains__(self, name):
+ '''
+ Return whether the given file name appears in the Jar archive.
+ '''
+ return name in self.entries
+
+
+class JarWriter(object):
+ '''
+ Class with methods to write Jar files. Can write more-or-less standard jar
+ archives as well as jar archives optimized for Gecko. See the documentation
+ for the close() member function for a description of both layouts.
+ '''
+ def __init__(self, file=None, fileobj=None, compress=True, optimize=True,
+ compress_level=9):
+ '''
+ Initialize a Jar archive in the given file. Use the given file-like
+ object if one is given instead of opening the given file name.
+ The compress option determines the default behavior for storing data
+ in the jar archive. The optimize options determines whether the jar
+ archive should be optimized for Gecko or not. ``compress_level``
+ defines the zlib compression level. It must be a value between 0 and 9
+ and defaults to 9, the highest and slowest level of compression.
+ '''
+ if fileobj:
+ self._data = fileobj
+ else:
+ self._data = open(file, 'wb')
+ self._compress = compress
+ self._compress_level = compress_level
+ self._contents = OrderedDict()
+ self._last_preloaded = None
+ self._optimize = optimize
+
+ def __enter__(self):
+ '''
+ Context manager __enter__ method for JarWriter.
+ '''
+ return self
+
+ def __exit__(self, type, value, tb):
+ '''
+ Context manager __exit__ method for JarWriter.
+ '''
+ self.finish()
+
+ def finish(self):
+ '''
+ Flush and close the Jar archive.
+
+ Standard jar archives are laid out like the following:
+ - Local file header 1
+ - File data 1
+ - Local file header 2
+ - File data 2
+ - (...)
+ - Central directory entry pointing at Local file header 1
+ - Central directory entry pointing at Local file header 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+
+ Jar archives optimized for Gecko are laid out like the following:
+ - 32-bits unsigned integer giving the amount of data to preload.
+ - Central directory entry pointing at Local file header 1
+ - Central directory entry pointing at Local file header 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+ - Local file header 1
+ - File data 1
+ - Local file header 2
+ - File data 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+ The duplication of the End of central directory is to accomodate some
+ Zip reading tools that want an end of central directory structure to
+ follow the central directory entries.
+ '''
+ offset = 0
+ headers = {}
+ preload_size = 0
+ # Prepare central directory entries
+ for entry, content in self._contents.itervalues():
+ header = JarLocalFileHeader()
+ for name in entry.STRUCT:
+ if name in header:
+ header[name] = entry[name]
+ entry['offset'] = offset
+ offset += len(content) + header.size
+ if entry['filename'] == self._last_preloaded:
+ preload_size = offset
+ headers[entry] = header
+ # Prepare end of central directory
+ end = JarCdirEnd()
+ end['disk_entries'] = len(self._contents)
+ end['cdir_entries'] = end['disk_entries']
+ end['cdir_size'] = reduce(lambda x, y: x + y[0].size,
+ self._contents.values(), 0)
+ # On optimized archives, store the preloaded size and the central
+ # directory entries, followed by the first end of central directory.
+ if self._optimize:
+ end['cdir_offset'] = 4
+ offset = end['cdir_size'] + end['cdir_offset'] + end.size
+ if preload_size:
+ preload_size += offset
+ self._data.write(struct.pack('<I', preload_size))
+ for entry, _ in self._contents.itervalues():
+ entry['offset'] += offset
+ self._data.write(entry.serialize())
+ self._data.write(end.serialize())
+ # Store local file entries followed by compressed data
+ for entry, content in self._contents.itervalues():
+ self._data.write(headers[entry].serialize())
+ self._data.write(content)
+ # On non optimized archives, store the central directory entries.
+ if not self._optimize:
+ end['cdir_offset'] = offset
+ for entry, _ in self._contents.itervalues():
+ self._data.write(entry.serialize())
+ # Store the end of central directory.
+ self._data.write(end.serialize())
+ self._data.close()
+
+ def add(self, name, data, compress=None, mode=None, skip_duplicates=False):
+ '''
+ Add a new member to the jar archive, with the given name and the given
+ data.
+ The compress option indicates if the given data should be compressed
+ (True), not compressed (False), or compressed according to the default
+ defined when creating the JarWriter (None).
+ When the data should be compressed (True or None with self.compress ==
+ True), it is only really compressed if the compressed size is smaller
+ than the uncompressed size.
+ The mode option gives the unix permissions that should be stored
+ for the jar entry.
+ If a duplicated member is found skip_duplicates will prevent raising
+ an exception if set to True.
+ The given data may be a buffer, a file-like instance, a Deflater or a
+ JarFileReader instance. The latter two allow to avoid uncompressing
+ data to recompress it.
+ '''
+ name = mozpath.normsep(name)
+
+ if name in self._contents and not skip_duplicates:
+ raise JarWriterError("File %s already in JarWriter" % name)
+ if compress is None:
+ compress = self._compress
+ if (isinstance(data, JarFileReader) and data.compressed == compress) \
+ or (isinstance(data, Deflater) and data.compress == compress):
+ deflater = data
+ else:
+ deflater = Deflater(compress, compress_level=self._compress_level)
+ if isinstance(data, basestring):
+ deflater.write(data)
+ elif hasattr(data, 'read'):
+ if hasattr(data, 'seek'):
+ data.seek(0)
+ deflater.write(data.read())
+ else:
+ raise JarWriterError("Don't know how to handle %s" %
+ type(data))
+ # Fill a central directory entry for this new member.
+ entry = JarCdirEntry()
+ entry['creator_version'] = 20
+ if mode is not None:
+ # Set creator host system (upper byte of creator_version)
+ # to 3 (Unix) so mode is honored when there is one.
+ entry['creator_version'] |= 3 << 8
+ entry['external_attr'] = (mode & 0xFFFF) << 16L
+ if deflater.compressed:
+ entry['min_version'] = 20 # Version 2.0 supports deflated streams
+ entry['general_flag'] = 2 # Max compression
+ entry['compression'] = JAR_DEFLATED
+ else:
+ entry['min_version'] = 10 # Version 1.0 for stored streams
+ entry['general_flag'] = 0
+ entry['compression'] = JAR_STORED
+ # January 1st, 2010. See bug 592369.
+ entry['lastmod_date'] = ((2010 - 1980) << 9) | (1 << 5) | 1
+ entry['lastmod_time'] = 0
+ entry['crc32'] = deflater.crc32
+ entry['compressed_size'] = deflater.compressed_size
+ entry['uncompressed_size'] = deflater.uncompressed_size
+ entry['filename'] = name
+ self._contents[name] = entry, deflater.compressed_data
+
+ def preload(self, files):
+ '''
+ Set which members of the jar archive should be preloaded when opening
+ the archive in Gecko. This reorders the members according to the order
+ of given list.
+ '''
+ new_contents = OrderedDict()
+ for f in files:
+ if not f in self._contents:
+ continue
+ new_contents[f] = self._contents[f]
+ self._last_preloaded = f
+ for f in self._contents:
+ if not f in new_contents:
+ new_contents[f] = self._contents[f]
+ self._contents = new_contents
+
+
+class Deflater(object):
+ '''
+ File-like interface to zlib compression. The data is actually not
+ compressed unless the compressed form is smaller than the uncompressed
+ data.
+ '''
+ def __init__(self, compress=True, compress_level=9):
+ '''
+ Initialize a Deflater. The compress argument determines whether to
+ try to compress at all.
+ '''
+ self._data = BytesIO()
+ self.compress = compress
+ if compress:
+ self._deflater = zlib.compressobj(compress_level, zlib.DEFLATED,
+ -MAX_WBITS)
+ self._deflated = BytesIO()
+ else:
+ self._deflater = None
+
+ def write(self, data):
+ '''
+ Append a buffer to the Deflater.
+ '''
+ self._data.write(data)
+ if self.compress:
+ if self._deflater:
+ if isinstance(data, memoryview):
+ data = data.tobytes()
+ self._deflated.write(self._deflater.compress(data))
+ else:
+ raise JarWriterError("Can't write after flush")
+
+ def close(self):
+ '''
+ Close the Deflater.
+ '''
+ self._data.close()
+ if self.compress:
+ self._deflated.close()
+
+ def _flush(self):
+ '''
+ Flush the underlying zlib compression object.
+ '''
+ if self.compress and self._deflater:
+ self._deflated.write(self._deflater.flush())
+ self._deflater = None
+
+ @property
+ def compressed(self):
+ '''
+ Return whether the data should be compressed.
+ '''
+ return self._compressed_size < self.uncompressed_size
+
+ @property
+ def _compressed_size(self):
+ '''
+ Return the real compressed size of the data written to the Deflater. If
+ the Deflater is set not to compress, the uncompressed size is returned.
+ Otherwise, the actual compressed size is returned, whether or not it is
+ a win over the uncompressed size.
+ '''
+ if self.compress:
+ self._flush()
+ return self._deflated.tell()
+ return self.uncompressed_size
+
+ @property
+ def compressed_size(self):
+ '''
+ Return the compressed size of the data written to the Deflater. If the
+ Deflater is set not to compress, the uncompressed size is returned.
+ Otherwise, if the data should not be compressed (the real compressed
+ size is bigger than the uncompressed size), return the uncompressed
+ size.
+ '''
+ if self.compressed:
+ return self._compressed_size
+ return self.uncompressed_size
+
+ @property
+ def uncompressed_size(self):
+ '''
+ Return the size of the data written to the Deflater.
+ '''
+ return self._data.tell()
+
+ @property
+ def crc32(self):
+ '''
+ Return the crc32 of the data written to the Deflater.
+ '''
+ return zlib.crc32(self._data.getvalue()) & 0xffffffff
+
+ @property
+ def compressed_data(self):
+ '''
+ Return the compressed data, if the data should be compressed (real
+ compressed size smaller than the uncompressed size), or the
+ uncompressed data otherwise.
+ '''
+ if self.compressed:
+ return self._deflated.getvalue()
+ return self._data.getvalue()
+
+
+class JarLog(dict):
+ '''
+ Helper to read the file Gecko generates when setting MOZ_JAR_LOG_FILE.
+ The jar log is then available as a dict with the jar path as key (see
+ canonicalize for more details on the key value), and the corresponding
+ access log as a list value. Only the first access to a given member of
+ a jar is stored.
+ '''
+ def __init__(self, file=None, fileobj=None):
+ if not fileobj:
+ fileobj = open(file, 'r')
+ urlmap = {}
+ for line in fileobj:
+ url, path = line.strip().split(None, 1)
+ if not url or not path:
+ continue
+ if url not in urlmap:
+ urlmap[url] = JarLog.canonicalize(url)
+ jar = urlmap[url]
+ entry = self.setdefault(jar, [])
+ if path not in entry:
+ entry.append(path)
+
+ @staticmethod
+ def canonicalize(url):
+ '''
+ The jar path is stored in a MOZ_JAR_LOG_FILE log as a url. This method
+ returns a unique value corresponding to such urls.
+ - file:///{path} becomes {path}
+ - jar:file:///{path}!/{subpath} becomes ({path}, {subpath})
+ - jar:jar:file:///{path}!/{subpath}!/{subpath2} becomes
+ ({path}, {subpath}, {subpath2})
+ '''
+ if not isinstance(url, ParseResult):
+ # Assume that if it doesn't start with jar: or file:, it's a path.
+ if not url.startswith(('jar:', 'file:')):
+ url = 'file:///' + os.path.abspath(url)
+ url = urlparse(url)
+ assert url.scheme
+ assert url.scheme in ('jar', 'file')
+ if url.scheme == 'jar':
+ path = JarLog.canonicalize(url.path)
+ if isinstance(path, tuple):
+ return path[:-1] + tuple(path[-1].split('!/', 1))
+ return tuple(path.split('!/', 1))
+ if url.scheme == 'file':
+ assert os.path.isabs(url.path)
+ path = url.path
+ # On Windows, url.path will be /drive:/path ; on Unix systems,
+ # /path. As we want drive:/path instead of /drive:/path on Windows,
+ # remove the leading /.
+ if os.path.isabs(path[1:]):
+ path = path[1:]
+ path = os.path.realpath(path)
+ return mozpath.normsep(os.path.normcase(path))
diff --git a/python/mozbuild/mozpack/packager/__init__.py b/python/mozbuild/mozpack/packager/__init__.py
new file mode 100644
index 000000000..4c98ec3d3
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/__init__.py
@@ -0,0 +1,408 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+from mozbuild.preprocessor import Preprocessor
+import re
+import os
+from mozpack.errors import errors
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestBinaryComponent,
+ ManifestChrome,
+ ManifestInterfaces,
+ is_manifest,
+ parse_manifest,
+)
+import mozpack.path as mozpath
+from collections import deque
+
+
+class Component(object):
+ '''
+ Class that represents a component in a package manifest.
+ '''
+ def __init__(self, name, destdir=''):
+ if name.find(' ') > 0:
+ errors.fatal('Malformed manifest: space in component name "%s"'
+ % component)
+ self._name = name
+ self._destdir = destdir
+
+ def __repr__(self):
+ s = self.name
+ if self.destdir:
+ s += ' destdir="%s"' % self.destdir
+ return s
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def destdir(self):
+ return self._destdir
+
+ @staticmethod
+ def _triples(lst):
+ '''
+ Split [1, 2, 3, 4, 5, 6, 7] into [(1, 2, 3), (4, 5, 6)].
+ '''
+ return zip(*[iter(lst)] * 3)
+
+ KEY_VALUE_RE = re.compile(r'''
+ \s* # optional whitespace.
+ ([a-zA-Z0-9_]+) # key.
+ \s*=\s* # optional space around =.
+ "([^"]*)" # value without surrounding quotes.
+ (?:\s+|$)
+ ''', re.VERBOSE)
+
+ @staticmethod
+ def _split_options(string):
+ '''
+ Split 'key1="value1" key2="value2"' into
+ {'key1':'value1', 'key2':'value2'}.
+
+ Returned keys and values are all strings.
+
+ Throws ValueError if the input is malformed.
+ '''
+ options = {}
+ splits = Component.KEY_VALUE_RE.split(string)
+ if len(splits) % 3 != 1:
+ # This should never happen -- we expect to always split
+ # into ['', ('key', 'val', '')*].
+ raise ValueError("Bad input")
+ if splits[0]:
+ raise ValueError('Unrecognized input ' + splits[0])
+ for key, val, no_match in Component._triples(splits[1:]):
+ if no_match:
+ raise ValueError('Unrecognized input ' + no_match)
+ options[key] = val
+ return options
+
+ @staticmethod
+ def _split_component_and_options(string):
+ '''
+ Split 'name key1="value1" key2="value2"' into
+ ('name', {'key1':'value1', 'key2':'value2'}).
+
+ Returned name, keys and values are all strings.
+
+ Raises ValueError if the input is malformed.
+ '''
+ splits = string.strip().split(None, 1)
+ if not splits:
+ raise ValueError('No component found')
+ component = splits[0].strip()
+ if not component:
+ raise ValueError('No component found')
+ if not re.match('[a-zA-Z0-9_\-]+$', component):
+ raise ValueError('Bad component name ' + component)
+ options = Component._split_options(splits[1]) if len(splits) > 1 else {}
+ return component, options
+
+ @staticmethod
+ def from_string(string):
+ '''
+ Create a component from a string.
+ '''
+ try:
+ name, options = Component._split_component_and_options(string)
+ except ValueError as e:
+ errors.fatal('Malformed manifest: %s' % e)
+ return
+ destdir = options.pop('destdir', '')
+ if options:
+ errors.fatal('Malformed manifest: options %s not recognized'
+ % options.keys())
+ return Component(name, destdir=destdir)
+
+
+class PackageManifestParser(object):
+ '''
+ Class for parsing of a package manifest, after preprocessing.
+
+ A package manifest is a list of file paths, with some syntaxic sugar:
+ [] designates a toplevel component. Example: [xpcom]
+ - in front of a file specifies it to be removed
+ * wildcard support
+ ** expands to all files and zero or more directories
+ ; file comment
+
+ The parser takes input from the preprocessor line by line, and pushes
+ parsed information to a sink object.
+
+ The add and remove methods of the sink object are called with the
+ current Component instance and a path.
+ '''
+ def __init__(self, sink):
+ '''
+ Initialize the package manifest parser with the given sink.
+ '''
+ self._component = Component('')
+ self._sink = sink
+
+ def handle_line(self, str):
+ '''
+ Handle a line of input and push the parsed information to the sink
+ object.
+ '''
+ # Remove comments.
+ str = str.strip()
+ if not str or str.startswith(';'):
+ return
+ if str.startswith('[') and str.endswith(']'):
+ self._component = Component.from_string(str[1:-1])
+ elif str.startswith('-'):
+ str = str[1:]
+ self._sink.remove(self._component, str)
+ elif ',' in str:
+ errors.fatal('Incompatible syntax')
+ else:
+ self._sink.add(self._component, str)
+
+
+class PreprocessorOutputWrapper(object):
+ '''
+ File-like helper to handle the preprocessor output and send it to a parser.
+ The parser's handle_line method is called in the relevant errors.context.
+ '''
+ def __init__(self, preprocessor, parser):
+ self._parser = parser
+ self._pp = preprocessor
+
+ def write(self, str):
+ file = os.path.normpath(os.path.abspath(self._pp.context['FILE']))
+ with errors.context(file, self._pp.context['LINE']):
+ self._parser.handle_line(str)
+
+
+def preprocess(input, parser, defines={}):
+ '''
+ Preprocess the file-like input with the given defines, and send the
+ preprocessed output line by line to the given parser.
+ '''
+ pp = Preprocessor()
+ pp.context.update(defines)
+ pp.do_filter('substitution')
+ pp.out = PreprocessorOutputWrapper(pp, parser)
+ pp.do_include(input)
+
+
+def preprocess_manifest(sink, manifest, defines={}):
+ '''
+ Preprocess the given file-like manifest with the given defines, and push
+ the parsed information to a sink. See PackageManifestParser documentation
+ for more details on the sink.
+ '''
+ preprocess(manifest, PackageManifestParser(sink), defines)
+
+
+class CallDeque(deque):
+ '''
+ Queue of function calls to make.
+ '''
+ def append(self, function, *args):
+ deque.append(self, (errors.get_context(), function, args))
+
+ def execute(self):
+ while True:
+ try:
+ context, function, args = self.popleft()
+ except IndexError:
+ return
+ if context:
+ with errors.context(context[0], context[1]):
+ function(*args)
+ else:
+ function(*args)
+
+
+class SimplePackager(object):
+ '''
+ Helper used to translate and buffer instructions from the
+ SimpleManifestSink to a formatter. Formatters expect some information to be
+ given first that the simple manifest contents can't guarantee before the
+ end of the input.
+ '''
+ def __init__(self, formatter):
+ self.formatter = formatter
+ # Queue for formatter.add_interfaces()/add_manifest() calls.
+ self._queue = CallDeque()
+ # Queue for formatter.add_manifest() calls for ManifestChrome.
+ self._chrome_queue = CallDeque()
+ # Queue for formatter.add() calls.
+ self._file_queue = CallDeque()
+ # All paths containing addons. (key is path, value is whether it
+ # should be packed or unpacked)
+ self._addons = {}
+ # All manifest paths imported.
+ self._manifests = set()
+ # All manifest paths included from some other manifest.
+ self._included_manifests = {}
+ self._closed = False
+
+ # Parsing RDF is complex, and would require an external library to do
+ # properly. Just go with some hackish but probably sufficient regexp
+ UNPACK_ADDON_RE = re.compile(r'''(?:
+ <em:unpack>true</em:unpack>
+ |em:unpack=(?P<quote>["']?)true(?P=quote)
+ )''', re.VERBOSE)
+
+ def add(self, path, file):
+ '''
+ Add the given BaseFile instance with the given path.
+ '''
+ assert not self._closed
+ if is_manifest(path):
+ self._add_manifest_file(path, file)
+ elif path.endswith('.xpt'):
+ self._queue.append(self.formatter.add_interfaces, path, file)
+ else:
+ self._file_queue.append(self.formatter.add, path, file)
+ if mozpath.basename(path) == 'install.rdf':
+ addon = True
+ install_rdf = file.open().read()
+ if self.UNPACK_ADDON_RE.search(install_rdf):
+ addon = 'unpacked'
+ self._addons[mozpath.dirname(path)] = addon
+
+ def _add_manifest_file(self, path, file):
+ '''
+ Add the given BaseFile with manifest file contents with the given path.
+ '''
+ self._manifests.add(path)
+ base = ''
+ if hasattr(file, 'path'):
+ # Find the directory the given path is relative to.
+ b = mozpath.normsep(file.path)
+ if b.endswith('/' + path) or b == path:
+ base = os.path.normpath(b[:-len(path)])
+ for e in parse_manifest(base, path, file.open()):
+ # ManifestResources need to be given after ManifestChrome, so just
+ # put all ManifestChrome in a separate queue to make them first.
+ if isinstance(e, ManifestChrome):
+ # e.move(e.base) just returns a clone of the entry.
+ self._chrome_queue.append(self.formatter.add_manifest,
+ e.move(e.base))
+ elif not isinstance(e, (Manifest, ManifestInterfaces)):
+ self._queue.append(self.formatter.add_manifest, e.move(e.base))
+ # If a binary component is added to an addon, prevent the addon
+ # from being packed.
+ if isinstance(e, ManifestBinaryComponent):
+ addon = mozpath.basedir(e.base, self._addons)
+ if addon:
+ self._addons[addon] = 'unpacked'
+ if isinstance(e, Manifest):
+ if e.flags:
+ errors.fatal('Flags are not supported on ' +
+ '"manifest" entries')
+ self._included_manifests[e.path] = path
+
+ def get_bases(self, addons=True):
+ '''
+ Return all paths under which root manifests have been found. Root
+ manifests are manifests that are included in no other manifest.
+ `addons` indicates whether to include addon bases as well.
+ '''
+ all_bases = set(mozpath.dirname(m)
+ for m in self._manifests
+ - set(self._included_manifests))
+ if not addons:
+ all_bases -= set(self._addons)
+ else:
+ # If for some reason some detected addon doesn't have a
+ # non-included manifest.
+ all_bases |= set(self._addons)
+ return all_bases
+
+ def close(self):
+ '''
+ Push all instructions to the formatter.
+ '''
+ self._closed = True
+
+ bases = self.get_bases()
+ broken_bases = sorted(
+ m for m, includer in self._included_manifests.iteritems()
+ if mozpath.basedir(m, bases) != mozpath.basedir(includer, bases))
+ for m in broken_bases:
+ errors.fatal('"%s" is included from "%s", which is outside "%s"' %
+ (m, self._included_manifests[m],
+ mozpath.basedir(m, bases)))
+ for base in sorted(bases):
+ self.formatter.add_base(base, self._addons.get(base, False))
+ self._chrome_queue.execute()
+ self._queue.execute()
+ self._file_queue.execute()
+
+
+class SimpleManifestSink(object):
+ '''
+ Parser sink for "simple" package manifests. Simple package manifests use
+ the format described in the PackageManifestParser documentation, but don't
+ support file removals, and require manifests, interfaces and chrome data to
+ be explicitely listed.
+ Entries starting with bin/ are searched under bin/ in the FileFinder, but
+ are packaged without the bin/ prefix.
+ '''
+ def __init__(self, finder, formatter):
+ '''
+ Initialize the SimpleManifestSink. The given FileFinder is used to
+ get files matching the patterns given in the manifest. The given
+ formatter does the packaging job.
+ '''
+ self._finder = finder
+ self.packager = SimplePackager(formatter)
+ self._closed = False
+ self._manifests = set()
+
+ @staticmethod
+ def normalize_path(path):
+ '''
+ Remove any bin/ prefix.
+ '''
+ if mozpath.basedir(path, ['bin']) == 'bin':
+ return mozpath.relpath(path, 'bin')
+ return path
+
+ def add(self, component, pattern):
+ '''
+ Add files with the given pattern in the given component.
+ '''
+ assert not self._closed
+ added = False
+ for p, f in self._finder.find(pattern):
+ added = True
+ if is_manifest(p):
+ self._manifests.add(p)
+ dest = mozpath.join(component.destdir, SimpleManifestSink.normalize_path(p))
+ self.packager.add(dest, f)
+ if not added:
+ errors.error('Missing file(s): %s' % pattern)
+
+ def remove(self, component, pattern):
+ '''
+ Remove files with the given pattern in the given component.
+ '''
+ assert not self._closed
+ errors.fatal('Removal is unsupported')
+
+ def close(self, auto_root_manifest=True):
+ '''
+ Add possibly missing bits and push all instructions to the formatter.
+ '''
+ if auto_root_manifest:
+ # Simple package manifests don't contain the root manifests, so
+ # find and add them.
+ paths = [mozpath.dirname(m) for m in self._manifests]
+ path = mozpath.dirname(mozpath.commonprefix(paths))
+ for p, f in self._finder.find(mozpath.join(path,
+ 'chrome.manifest')):
+ if not p in self._manifests:
+ self.packager.add(SimpleManifestSink.normalize_path(p), f)
+ self.packager.close()
diff --git a/python/mozbuild/mozpack/packager/formats.py b/python/mozbuild/mozpack/packager/formats.py
new file mode 100644
index 000000000..c4adabab0
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/formats.py
@@ -0,0 +1,324 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestInterfaces,
+ ManifestChrome,
+ ManifestBinaryComponent,
+ ManifestResource,
+)
+from urlparse import urlparse
+import mozpack.path as mozpath
+from mozpack.files import (
+ ManifestFile,
+ XPTFile,
+)
+from mozpack.copier import (
+ FileRegistry,
+ FileRegistrySubtree,
+ Jarrer,
+)
+
+STARTUP_CACHE_PATHS = [
+ 'jsloader',
+ 'jssubloader',
+]
+
+'''
+Formatters are classes receiving packaging instructions and creating the
+appropriate package layout.
+
+There are three distinct formatters, each handling one of the different chrome
+formats:
+ - flat: essentially, copies files from the source with the same file system
+ layout. Manifests entries are grouped in a single manifest per directory,
+ as well as XPT interfaces.
+ - jar: chrome content is packaged in jar files.
+ - omni: chrome content, modules, non-binary components, and many other
+ elements are packaged in an omnijar file for each base directory.
+
+The base interface provides the following methods:
+ - add_base(path [, addon])
+ Register a base directory for an application or GRE, or an addon.
+ Base directories usually contain a root manifest (manifests not
+ included in any other manifest) named chrome.manifest.
+ The optional addon argument tells whether the base directory
+ is that of a packed addon (True), unpacked addon ('unpacked') or
+ otherwise (False).
+ - add(path, content)
+ Add the given content (BaseFile instance) at the given virtual path
+ - add_interfaces(path, content)
+ Add the given content (BaseFile instance) and link it to other
+ interfaces in the parent directory of the given virtual path.
+ - add_manifest(entry)
+ Add a ManifestEntry.
+ - contains(path)
+ Returns whether the given virtual path is known of the formatter.
+
+The virtual paths mentioned above are paths as they would be with a flat
+chrome.
+
+Formatters all take a FileCopier instance they will fill with the packaged
+data.
+'''
+
+
+class PiecemealFormatter(object):
+ '''
+ Generic formatter that dispatches across different sub-formatters
+ according to paths.
+ '''
+ def __init__(self, copier):
+ assert isinstance(copier, (FileRegistry, FileRegistrySubtree))
+ self.copier = copier
+ self._sub_formatter = {}
+ self._frozen_bases = False
+
+ def add_base(self, base, addon=False):
+ # Only allow to add a base directory before calls to _get_base()
+ assert not self._frozen_bases
+ assert base not in self._sub_formatter
+ self._add_base(base, addon)
+
+ def _get_base(self, path):
+ '''
+ Return the deepest base directory containing the given path.
+ '''
+ self._frozen_bases = True
+ base = mozpath.basedir(path, self._sub_formatter.keys())
+ relpath = mozpath.relpath(path, base) if base else path
+ return base, relpath
+
+ def add(self, path, content):
+ base, relpath = self._get_base(path)
+ if base is None:
+ return self.copier.add(relpath, content)
+ return self._sub_formatter[base].add(relpath, content)
+
+ def add_manifest(self, entry):
+ base, relpath = self._get_base(entry.base)
+ assert base is not None
+ return self._sub_formatter[base].add_manifest(entry.move(relpath))
+
+ def add_interfaces(self, path, content):
+ base, relpath = self._get_base(path)
+ assert base is not None
+ return self._sub_formatter[base].add_interfaces(relpath, content)
+
+ def contains(self, path):
+ assert '*' not in path
+ base, relpath = self._get_base(path)
+ if base is None:
+ return self.copier.contains(relpath)
+ return self._sub_formatter[base].contains(relpath)
+
+
+class FlatFormatter(PiecemealFormatter):
+ '''
+ Formatter for the flat package format.
+ '''
+ def _add_base(self, base, addon=False):
+ self._sub_formatter[base] = FlatSubFormatter(
+ FileRegistrySubtree(base, self.copier))
+
+
+class FlatSubFormatter(object):
+ '''
+ Sub-formatter for the flat package format.
+ '''
+ def __init__(self, copier):
+ assert isinstance(copier, (FileRegistry, FileRegistrySubtree))
+ self.copier = copier
+
+ def add(self, path, content):
+ self.copier.add(path, content)
+
+ def add_manifest(self, entry):
+ # Store manifest entries in a single manifest per directory, named
+ # after their parent directory, except for root manifests, all named
+ # chrome.manifest.
+ if entry.base:
+ name = mozpath.basename(entry.base)
+ else:
+ name = 'chrome'
+ path = mozpath.normpath(mozpath.join(entry.base, '%s.manifest' % name))
+ if not self.copier.contains(path):
+ # Add a reference to the manifest file in the parent manifest, if
+ # the manifest file is not a root manifest.
+ if entry.base:
+ parent = mozpath.dirname(entry.base)
+ relbase = mozpath.basename(entry.base)
+ relpath = mozpath.join(relbase,
+ mozpath.basename(path))
+ self.add_manifest(Manifest(parent, relpath))
+ self.copier.add(path, ManifestFile(entry.base))
+ self.copier[path].add(entry)
+
+ def add_interfaces(self, path, content):
+ # Interfaces in the same directory are all linked together in an
+ # interfaces.xpt file.
+ interfaces_path = mozpath.join(mozpath.dirname(path),
+ 'interfaces.xpt')
+ if not self.copier.contains(interfaces_path):
+ self.add_manifest(ManifestInterfaces(mozpath.dirname(path),
+ 'interfaces.xpt'))
+ self.copier.add(interfaces_path, XPTFile())
+ self.copier[interfaces_path].add(content)
+
+ def contains(self, path):
+ assert '*' not in path
+ return self.copier.contains(path)
+
+
+class JarFormatter(PiecemealFormatter):
+ '''
+ Formatter for the jar package format. Assumes manifest entries related to
+ chrome are registered before the chrome data files are added. Also assumes
+ manifest entries for resources are registered after chrome manifest
+ entries.
+ '''
+ def __init__(self, copier, compress=True, optimize=True):
+ PiecemealFormatter.__init__(self, copier)
+ self._compress=compress
+ self._optimize=optimize
+
+ def _add_base(self, base, addon=False):
+ if addon is True:
+ jarrer = Jarrer(self._compress, self._optimize)
+ self.copier.add(base + '.xpi', jarrer)
+ self._sub_formatter[base] = FlatSubFormatter(jarrer)
+ else:
+ self._sub_formatter[base] = JarSubFormatter(
+ FileRegistrySubtree(base, self.copier),
+ self._compress, self._optimize)
+
+
+class JarSubFormatter(PiecemealFormatter):
+ '''
+ Sub-formatter for the jar package format. It is a PiecemealFormatter that
+ dispatches between further sub-formatter for each of the jar files it
+ dispatches the chrome data to, and a FlatSubFormatter for the non-chrome
+ files.
+ '''
+ def __init__(self, copier, compress=True, optimize=True):
+ PiecemealFormatter.__init__(self, copier)
+ self._frozen_chrome = False
+ self._compress = compress
+ self._optimize = optimize
+ self._sub_formatter[''] = FlatSubFormatter(copier)
+
+ def _jarize(self, entry, relpath):
+ '''
+ Transform a manifest entry in one pointing to chrome data in a jar.
+ Return the corresponding chrome path and the new entry.
+ '''
+ base = entry.base
+ basepath = mozpath.split(relpath)[0]
+ chromepath = mozpath.join(base, basepath)
+ entry = entry.rebase(chromepath) \
+ .move(mozpath.join(base, 'jar:%s.jar!' % basepath)) \
+ .rebase(base)
+ return chromepath, entry
+
+ def add_manifest(self, entry):
+ if isinstance(entry, ManifestChrome) and \
+ not urlparse(entry.relpath).scheme:
+ chromepath, entry = self._jarize(entry, entry.relpath)
+ assert not self._frozen_chrome
+ if chromepath not in self._sub_formatter:
+ jarrer = Jarrer(self._compress, self._optimize)
+ self.copier.add(chromepath + '.jar', jarrer)
+ self._sub_formatter[chromepath] = FlatSubFormatter(jarrer)
+ elif isinstance(entry, ManifestResource) and \
+ not urlparse(entry.target).scheme:
+ chromepath, new_entry = self._jarize(entry, entry.target)
+ if chromepath in self._sub_formatter:
+ entry = new_entry
+ PiecemealFormatter.add_manifest(self, entry)
+
+
+class OmniJarFormatter(JarFormatter):
+ '''
+ Formatter for the omnijar package format.
+ '''
+ def __init__(self, copier, omnijar_name, compress=True, optimize=True,
+ non_resources=()):
+ JarFormatter.__init__(self, copier, compress, optimize)
+ self._omnijar_name = omnijar_name
+ self._non_resources = non_resources
+
+ def _add_base(self, base, addon=False):
+ if addon:
+ JarFormatter._add_base(self, base, addon)
+ else:
+ # Initialize a chrome.manifest next to the omnijar file so that
+ # there's always a chrome.manifest file, even an empty one.
+ path = mozpath.normpath(mozpath.join(base, 'chrome.manifest'))
+ if not self.copier.contains(path):
+ self.copier.add(path, ManifestFile(''))
+ self._sub_formatter[base] = OmniJarSubFormatter(
+ FileRegistrySubtree(base, self.copier), self._omnijar_name,
+ self._compress, self._optimize, self._non_resources)
+
+
+class OmniJarSubFormatter(PiecemealFormatter):
+ '''
+ Sub-formatter for the omnijar package format. It is a PiecemealFormatter
+ that dispatches between a FlatSubFormatter for the resources data and
+ another FlatSubFormatter for the other files.
+ '''
+ def __init__(self, copier, omnijar_name, compress=True, optimize=True,
+ non_resources=()):
+ PiecemealFormatter.__init__(self, copier)
+ self._omnijar_name = omnijar_name
+ self._compress = compress
+ self._optimize = optimize
+ self._non_resources = non_resources
+ self._sub_formatter[''] = FlatSubFormatter(copier)
+ jarrer = Jarrer(self._compress, self._optimize)
+ self._sub_formatter[omnijar_name] = FlatSubFormatter(jarrer)
+
+ def _get_base(self, path):
+ base = self._omnijar_name if self.is_resource(path) else ''
+ # Only add the omnijar file if something ends up in it.
+ if base and not self.copier.contains(base):
+ self.copier.add(base, self._sub_formatter[base].copier)
+ return base, path
+
+ def add_manifest(self, entry):
+ base = ''
+ if not isinstance(entry, ManifestBinaryComponent):
+ base = self._omnijar_name
+ formatter = self._sub_formatter[base]
+ return formatter.add_manifest(entry)
+
+ def is_resource(self, path):
+ '''
+ Return whether the given path corresponds to a resource to be put in an
+ omnijar archive.
+ '''
+ if any(mozpath.match(path, p.replace('*', '**'))
+ for p in self._non_resources):
+ return False
+ path = mozpath.split(path)
+ if path[0] == 'chrome':
+ return len(path) == 1 or path[1] != 'icons'
+ if path[0] == 'components':
+ return path[-1].endswith(('.js', '.xpt'))
+ if path[0] == 'res':
+ return len(path) == 1 or \
+ (path[1] != 'cursors' and path[1] != 'MainMenu.nib')
+ if path[0] == 'defaults':
+ return len(path) != 3 or \
+ not (path[2] == 'channel-prefs.js' and
+ path[1] in ['pref', 'preferences'])
+ return path[0] in [
+ 'modules',
+ 'greprefs.js',
+ 'hyphenation',
+ 'update.locale',
+ ] or path[0] in STARTUP_CACHE_PATHS
diff --git a/python/mozbuild/mozpack/packager/l10n.py b/python/mozbuild/mozpack/packager/l10n.py
new file mode 100644
index 000000000..758064f59
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/l10n.py
@@ -0,0 +1,259 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+'''
+Replace localized parts of a packaged directory with data from a langpack
+directory.
+'''
+
+import os
+import mozpack.path as mozpath
+from mozpack.packager.formats import (
+ FlatFormatter,
+ JarFormatter,
+ OmniJarFormatter,
+)
+from mozpack.packager import (
+ Component,
+ SimplePackager,
+ SimpleManifestSink,
+)
+from mozpack.files import (
+ ComposedFinder,
+ ManifestFile,
+)
+from mozpack.copier import (
+ FileCopier,
+ Jarrer,
+)
+from mozpack.chrome.manifest import (
+ ManifestLocale,
+ ManifestEntryWithRelPath,
+ is_manifest,
+ ManifestChrome,
+ Manifest,
+)
+from mozpack.errors import errors
+from mozpack.packager.unpack import UnpackFinder
+from createprecomplete import generate_precomplete
+
+
+class LocaleManifestFinder(object):
+ def __init__(self, finder):
+ entries = self.entries = []
+ bases = self.bases = []
+
+ class MockFormatter(object):
+ def add_interfaces(self, path, content):
+ pass
+
+ def add(self, path, content):
+ pass
+
+ def add_manifest(self, entry):
+ if entry.localized:
+ entries.append(entry)
+
+ def add_base(self, base, addon=False):
+ bases.append(base)
+
+ # SimplePackager rejects "manifest foo.manifest" entries with
+ # additional flags (such as "manifest foo.manifest application=bar").
+ # Those type of entries are used by language packs to work as addons,
+ # but are not necessary for the purpose of l10n repacking. So we wrap
+ # the finder in order to remove those entries.
+ class WrapFinder(object):
+ def __init__(self, finder):
+ self._finder = finder
+
+ def find(self, pattern):
+ for p, f in self._finder.find(pattern):
+ if isinstance(f, ManifestFile):
+ unwanted = [
+ e for e in f._entries
+ if isinstance(e, Manifest) and e.flags
+ ]
+ if unwanted:
+ f = ManifestFile(
+ f._base,
+ [e for e in f._entries if e not in unwanted])
+ yield p, f
+
+ sink = SimpleManifestSink(WrapFinder(finder), MockFormatter())
+ sink.add(Component(''), '*')
+ sink.close(False)
+
+ # Find unique locales used in these manifest entries.
+ self.locales = list(set(e.id for e in self.entries
+ if isinstance(e, ManifestLocale)))
+
+
+def _repack(app_finder, l10n_finder, copier, formatter, non_chrome=set()):
+ app = LocaleManifestFinder(app_finder)
+ l10n = LocaleManifestFinder(l10n_finder)
+
+ # The code further below assumes there's only one locale replaced with
+ # another one.
+ if len(app.locales) > 1:
+ errors.fatal("Multiple app locales aren't supported: " +
+ ",".join(app.locales))
+ if len(l10n.locales) > 1:
+ errors.fatal("Multiple l10n locales aren't supported: " +
+ ",".join(l10n.locales))
+ locale = app.locales[0]
+ l10n_locale = l10n.locales[0]
+
+ # For each base directory, store what path a locale chrome package name
+ # corresponds to.
+ # e.g., for the following entry under app/chrome:
+ # locale foo en-US path/to/files
+ # keep track that the locale path for foo in app is
+ # app/chrome/path/to/files.
+ l10n_paths = {}
+ for e in l10n.entries:
+ if isinstance(e, ManifestChrome):
+ base = mozpath.basedir(e.path, app.bases)
+ l10n_paths.setdefault(base, {})
+ l10n_paths[base][e.name] = e.path
+
+ # For chrome and non chrome files or directories, store what langpack path
+ # corresponds to a package path.
+ paths = {}
+ for e in app.entries:
+ if isinstance(e, ManifestEntryWithRelPath):
+ base = mozpath.basedir(e.path, app.bases)
+ if base not in l10n_paths:
+ errors.fatal("Locale doesn't contain %s/" % base)
+ # Allow errors to accumulate
+ continue
+ if e.name not in l10n_paths[base]:
+ errors.fatal("Locale doesn't have a manifest entry for '%s'" %
+ e.name)
+ # Allow errors to accumulate
+ continue
+ paths[e.path] = l10n_paths[base][e.name]
+
+ for pattern in non_chrome:
+ for base in app.bases:
+ path = mozpath.join(base, pattern)
+ left = set(p for p, f in app_finder.find(path))
+ right = set(p for p, f in l10n_finder.find(path))
+ for p in right:
+ paths[p] = p
+ for p in left - right:
+ paths[p] = None
+
+ # Create a new package, with non localized bits coming from the original
+ # package, and localized bits coming from the langpack.
+ packager = SimplePackager(formatter)
+ for p, f in app_finder:
+ if is_manifest(p):
+ # Remove localized manifest entries.
+ for e in [e for e in f if e.localized]:
+ f.remove(e)
+ # If the path is one that needs a locale replacement, use the
+ # corresponding file from the langpack.
+ path = None
+ if p in paths:
+ path = paths[p]
+ if not path:
+ continue
+ else:
+ base = mozpath.basedir(p, paths.keys())
+ if base:
+ subpath = mozpath.relpath(p, base)
+ path = mozpath.normpath(mozpath.join(paths[base],
+ subpath))
+ if path:
+ files = [f for p, f in l10n_finder.find(path)]
+ if not len(files):
+ if base not in non_chrome:
+ finderBase = ""
+ if hasattr(l10n_finder, 'base'):
+ finderBase = l10n_finder.base
+ errors.error("Missing file: %s" %
+ os.path.join(finderBase, path))
+ else:
+ packager.add(path, files[0])
+ else:
+ packager.add(p, f)
+
+ # Add localized manifest entries from the langpack.
+ l10n_manifests = []
+ for base in set(e.base for e in l10n.entries):
+ m = ManifestFile(base, [e for e in l10n.entries if e.base == base])
+ path = mozpath.join(base, 'chrome.%s.manifest' % l10n_locale)
+ l10n_manifests.append((path, m))
+ bases = packager.get_bases()
+ for path, m in l10n_manifests:
+ base = mozpath.basedir(path, bases)
+ packager.add(path, m)
+ # Add a "manifest $path" entry in the top manifest under that base.
+ m = ManifestFile(base)
+ m.add(Manifest(base, mozpath.relpath(path, base)))
+ packager.add(mozpath.join(base, 'chrome.manifest'), m)
+
+ packager.close()
+
+ # Add any remaining non chrome files.
+ for pattern in non_chrome:
+ for base in bases:
+ for p, f in l10n_finder.find(mozpath.join(base, pattern)):
+ if not formatter.contains(p):
+ formatter.add(p, f)
+
+ # Transplant jar preloading information.
+ for path, log in app_finder.jarlogs.iteritems():
+ assert isinstance(copier[path], Jarrer)
+ copier[path].preload([l.replace(locale, l10n_locale) for l in log])
+
+
+def repack(source, l10n, extra_l10n={}, non_resources=[], non_chrome=set()):
+ '''
+ Replace localized data from the `source` directory with localized data
+ from `l10n` and `extra_l10n`.
+
+ The `source` argument points to a directory containing a packaged
+ application (in omnijar, jar or flat form).
+ The `l10n` argument points to a directory containing the main localized
+ data (usually in the form of a language pack addon) to use to replace
+ in the packaged application.
+ The `extra_l10n` argument contains a dict associating relative paths in
+ the source to separate directories containing localized data for them.
+ This can be used to point at different language pack addons for different
+ parts of the package application.
+ The `non_resources` argument gives a list of relative paths in the source
+ that should not be added in an omnijar in case the packaged application
+ is in that format.
+ The `non_chrome` argument gives a list of file/directory patterns for
+ localized files that are not listed in a chrome.manifest.
+ '''
+ app_finder = UnpackFinder(source)
+ l10n_finder = UnpackFinder(l10n)
+ if extra_l10n:
+ finders = {
+ '': l10n_finder,
+ }
+ for base, path in extra_l10n.iteritems():
+ finders[base] = UnpackFinder(path)
+ l10n_finder = ComposedFinder(finders)
+ copier = FileCopier()
+ if app_finder.kind == 'flat':
+ formatter = FlatFormatter(copier)
+ elif app_finder.kind == 'jar':
+ formatter = JarFormatter(copier,
+ optimize=app_finder.optimizedjars,
+ compress=app_finder.compressed)
+ elif app_finder.kind == 'omni':
+ formatter = OmniJarFormatter(copier, app_finder.omnijar,
+ optimize=app_finder.optimizedjars,
+ compress=app_finder.compressed,
+ non_resources=non_resources)
+
+ with errors.accumulate():
+ _repack(app_finder, l10n_finder, copier, formatter, non_chrome)
+ copier.copy(source, skip_if_older=False)
+ generate_precomplete(source)
diff --git a/python/mozbuild/mozpack/packager/unpack.py b/python/mozbuild/mozpack/packager/unpack.py
new file mode 100644
index 000000000..fa2b474e7
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/unpack.py
@@ -0,0 +1,202 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import mozpack.path as mozpath
+from mozpack.files import (
+ BaseFinder,
+ FileFinder,
+ DeflatedFile,
+ ManifestFile,
+)
+from mozpack.chrome.manifest import (
+ parse_manifest,
+ ManifestEntryWithRelPath,
+ ManifestResource,
+ is_manifest,
+)
+from mozpack.mozjar import JarReader
+from mozpack.copier import (
+ FileRegistry,
+ FileCopier,
+)
+from mozpack.packager import SimplePackager
+from mozpack.packager.formats import (
+ FlatFormatter,
+ STARTUP_CACHE_PATHS,
+)
+from urlparse import urlparse
+
+
+class UnpackFinder(BaseFinder):
+ '''
+ Special Finder object that treats the source package directory as if it
+ were in the flat chrome format, whatever chrome format it actually is in.
+
+ This means that for example, paths like chrome/browser/content/... match
+ files under jar:chrome/browser.jar!/content/... in case of jar chrome
+ format.
+
+ The only argument to the constructor is a Finder instance or a path.
+ The UnpackFinder is populated with files from this Finder instance,
+ or with files from a FileFinder using the given path as its root.
+ '''
+ def __init__(self, source):
+ if isinstance(source, BaseFinder):
+ self._finder = source
+ else:
+ self._finder = FileFinder(source)
+ self.base = self._finder.base
+ self.files = FileRegistry()
+ self.kind = 'flat'
+ self.omnijar = None
+ self.jarlogs = {}
+ self.optimizedjars = False
+ self.compressed = True
+
+ jars = set()
+
+ for p, f in self._finder.find('*'):
+ # Skip the precomplete file, which is generated at packaging time.
+ if p == 'precomplete':
+ continue
+ base = mozpath.dirname(p)
+ # If the file is a zip/jar that is not a .xpi, and contains a
+ # chrome.manifest, it is an omnijar. All the files it contains
+ # go in the directory containing the omnijar. Manifests are merged
+ # if there is a corresponding manifest in the directory.
+ if not p.endswith('.xpi') and self._maybe_zip(f) and \
+ (mozpath.basename(p) == self.omnijar or
+ not self.omnijar):
+ jar = self._open_jar(p, f)
+ if 'chrome.manifest' in jar:
+ self.kind = 'omni'
+ self.omnijar = mozpath.basename(p)
+ self._fill_with_jar(base, jar)
+ continue
+ # If the file is a manifest, scan its entries for some referencing
+ # jar: urls. If there are some, the files contained in the jar they
+ # point to, go under a directory named after the jar.
+ if is_manifest(p):
+ m = self.files[p] if self.files.contains(p) \
+ else ManifestFile(base)
+ for e in parse_manifest(self.base, p, f.open()):
+ m.add(self._handle_manifest_entry(e, jars))
+ if self.files.contains(p):
+ continue
+ f = m
+ # If the file is a packed addon, unpack it under a directory named
+ # after the xpi.
+ if p.endswith('.xpi') and self._maybe_zip(f):
+ self._fill_with_jar(p[:-4], self._open_jar(p, f))
+ continue
+ if not p in jars:
+ self.files.add(p, f)
+
+ def _fill_with_jar(self, base, jar):
+ for j in jar:
+ path = mozpath.join(base, j.filename)
+ if is_manifest(j.filename):
+ m = self.files[path] if self.files.contains(path) \
+ else ManifestFile(mozpath.dirname(path))
+ for e in parse_manifest(None, path, j):
+ m.add(e)
+ if not self.files.contains(path):
+ self.files.add(path, m)
+ continue
+ else:
+ self.files.add(path, DeflatedFile(j))
+
+ def _handle_manifest_entry(self, entry, jars):
+ jarpath = None
+ if isinstance(entry, ManifestEntryWithRelPath) and \
+ urlparse(entry.relpath).scheme == 'jar':
+ jarpath, entry = self._unjarize(entry, entry.relpath)
+ elif isinstance(entry, ManifestResource) and \
+ urlparse(entry.target).scheme == 'jar':
+ jarpath, entry = self._unjarize(entry, entry.target)
+ if jarpath:
+ # Don't defer unpacking the jar file. If we already saw
+ # it, take (and remove) it from the registry. If we
+ # haven't, try to find it now.
+ if self.files.contains(jarpath):
+ jar = self.files[jarpath]
+ self.files.remove(jarpath)
+ else:
+ jar = [f for p, f in self._finder.find(jarpath)]
+ assert len(jar) == 1
+ jar = jar[0]
+ if not jarpath in jars:
+ base = mozpath.splitext(jarpath)[0]
+ for j in self._open_jar(jarpath, jar):
+ self.files.add(mozpath.join(base,
+ j.filename),
+ DeflatedFile(j))
+ jars.add(jarpath)
+ self.kind = 'jar'
+ return entry
+
+ def _open_jar(self, path, file):
+ '''
+ Return a JarReader for the given BaseFile instance, keeping a log of
+ the preloaded entries it has.
+ '''
+ jar = JarReader(fileobj=file.open())
+ if jar.is_optimized:
+ self.optimizedjars = True
+ if not any(f.compressed for f in jar):
+ self.compressed = False
+ if jar.last_preloaded:
+ jarlog = jar.entries.keys()
+ self.jarlogs[path] = jarlog[:jarlog.index(jar.last_preloaded) + 1]
+ return jar
+
+ def find(self, path):
+ for p in self.files.match(path):
+ yield p, self.files[p]
+
+ def _maybe_zip(self, file):
+ '''
+ Return whether the given BaseFile looks like a ZIP/Jar.
+ '''
+ header = file.open().read(8)
+ return len(header) == 8 and (header[0:2] == 'PK' or
+ header[4:6] == 'PK')
+
+ def _unjarize(self, entry, relpath):
+ '''
+ Transform a manifest entry pointing to chrome data in a jar in one
+ pointing to the corresponding unpacked path. Return the jar path and
+ the new entry.
+ '''
+ base = entry.base
+ jar, relpath = urlparse(relpath).path.split('!', 1)
+ entry = entry.rebase(mozpath.join(base, 'jar:%s!' % jar)) \
+ .move(mozpath.join(base, mozpath.splitext(jar)[0])) \
+ .rebase(base)
+ return mozpath.join(base, jar), entry
+
+
+def unpack_to_registry(source, registry):
+ '''
+ Transform a jar chrome or omnijar packaged directory into a flat package.
+
+ The given registry is filled with the flat package.
+ '''
+ finder = UnpackFinder(source)
+ packager = SimplePackager(FlatFormatter(registry))
+ for p, f in finder.find('*'):
+ if mozpath.split(p)[0] not in STARTUP_CACHE_PATHS:
+ packager.add(p, f)
+ packager.close()
+
+
+def unpack(source):
+ '''
+ Transform a jar chrome or omnijar packaged directory into a flat package.
+ '''
+ copier = FileCopier()
+ unpack_to_registry(source, copier)
+ copier.copy(source, skip_if_older=False)
diff --git a/python/mozbuild/mozpack/path.py b/python/mozbuild/mozpack/path.py
new file mode 100644
index 000000000..7ea8ea85a
--- /dev/null
+++ b/python/mozbuild/mozpack/path.py
@@ -0,0 +1,136 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import posixpath
+import os
+import re
+
+'''
+Like os.path, with a reduced set of functions, and with normalized path
+separators (always use forward slashes).
+Also contains a few additional utilities not found in os.path.
+'''
+
+
+def normsep(path):
+ '''
+ Normalize path separators, by using forward slashes instead of whatever
+ os.sep is.
+ '''
+ if os.sep != '/':
+ path = path.replace(os.sep, '/')
+ if os.altsep and os.altsep != '/':
+ path = path.replace(os.altsep, '/')
+ return path
+
+
+def relpath(path, start):
+ rel = normsep(os.path.relpath(path, start))
+ return '' if rel == '.' else rel
+
+
+def realpath(path):
+ return normsep(os.path.realpath(path))
+
+
+def abspath(path):
+ return normsep(os.path.abspath(path))
+
+
+def join(*paths):
+ return normsep(os.path.join(*paths))
+
+
+def normpath(path):
+ return posixpath.normpath(normsep(path))
+
+
+def dirname(path):
+ return posixpath.dirname(normsep(path))
+
+
+def commonprefix(paths):
+ return posixpath.commonprefix([normsep(path) for path in paths])
+
+
+def basename(path):
+ return os.path.basename(path)
+
+
+def splitext(path):
+ return posixpath.splitext(normsep(path))
+
+
+def split(path):
+ '''
+ Return the normalized path as a list of its components.
+ split('foo/bar/baz') returns ['foo', 'bar', 'baz']
+ '''
+ return normsep(path).split('/')
+
+
+def basedir(path, bases):
+ '''
+ Given a list of directories (bases), return which one contains the given
+ path. If several matches are found, the deepest base directory is returned.
+ basedir('foo/bar/baz', ['foo', 'baz', 'foo/bar']) returns 'foo/bar'
+ ('foo' and 'foo/bar' both match, but 'foo/bar' is the deepest match)
+ '''
+ path = normsep(path)
+ bases = [normsep(b) for b in bases]
+ if path in bases:
+ return path
+ for b in sorted(bases, reverse=True):
+ if b == '' or path.startswith(b + '/'):
+ return b
+
+
+re_cache = {}
+
+def match(path, pattern):
+ '''
+ Return whether the given path matches the given pattern.
+ An asterisk can be used to match any string, including the null string, in
+ one part of the path:
+ 'foo' matches '*', 'f*' or 'fo*o'
+ However, an asterisk matching a subdirectory may not match the null string:
+ 'foo/bar' does *not* match 'foo/*/bar'
+ If the pattern matches one of the ancestor directories of the path, the
+ patch is considered matching:
+ 'foo/bar' matches 'foo'
+ Two adjacent asterisks can be used to match files and zero or more
+ directories and subdirectories.
+ 'foo/bar' matches 'foo/**/bar', or '**/bar'
+ '''
+ if not pattern:
+ return True
+ if pattern not in re_cache:
+ p = re.escape(pattern)
+ p = re.sub(r'(^|\\\/)\\\*\\\*\\\/', r'\1(?:.+/)?', p)
+ p = re.sub(r'(^|\\\/)\\\*\\\*$', r'(?:\1.+)?', p)
+ p = p.replace(r'\*', '[^/]*') + '(?:/.*)?$'
+ re_cache[pattern] = re.compile(p)
+ return re_cache[pattern].match(path) is not None
+
+
+def rebase(oldbase, base, relativepath):
+ '''
+ Return relativepath relative to base instead of oldbase.
+ '''
+ if base == oldbase:
+ return relativepath
+ if len(base) < len(oldbase):
+ assert basedir(oldbase, [base]) == base
+ relbase = relpath(oldbase, base)
+ result = join(relbase, relativepath)
+ else:
+ assert basedir(base, [oldbase]) == oldbase
+ relbase = relpath(base, oldbase)
+ result = relpath(relativepath, relbase)
+ result = normpath(result)
+ if relativepath.endswith('/') and not result.endswith('/'):
+ result += '/'
+ return result
diff --git a/python/mozbuild/mozpack/test/__init__.py b/python/mozbuild/mozpack/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozpack/test/__init__.py
diff --git a/python/mozbuild/mozpack/test/data/test_data b/python/mozbuild/mozpack/test/data/test_data
new file mode 100644
index 000000000..fb7f0c4fc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/data/test_data
@@ -0,0 +1 @@
+test_data \ No newline at end of file
diff --git a/python/mozbuild/mozpack/test/support/minify_js_verify.py b/python/mozbuild/mozpack/test/support/minify_js_verify.py
new file mode 100644
index 000000000..8e4e8b759
--- /dev/null
+++ b/python/mozbuild/mozpack/test/support/minify_js_verify.py
@@ -0,0 +1,17 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function
+import sys
+
+
+if len(sys.argv) != 4:
+ raise Exception('Usage: minify_js_verify <exitcode> <orig> <minified>')
+
+retcode = int(sys.argv[1])
+
+if retcode:
+ print('Error message', file=sys.stderr)
+
+sys.exit(retcode)
diff --git a/python/mozbuild/mozpack/test/test_archive.py b/python/mozbuild/mozpack/test/test_archive.py
new file mode 100644
index 000000000..6f61f7eb7
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_archive.py
@@ -0,0 +1,190 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import hashlib
+import os
+import shutil
+import stat
+import tarfile
+import tempfile
+import unittest
+
+from mozpack.archive import (
+ DEFAULT_MTIME,
+ create_tar_from_files,
+ create_tar_gz_from_files,
+ create_tar_bz2_from_files,
+)
+
+from mozunit import main
+
+
+MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
+
+
+def file_hash(path):
+ h = hashlib.sha1()
+ with open(path, 'rb') as fh:
+ while True:
+ data = fh.read(8192)
+ if not data:
+ break
+ h.update(data)
+
+ return h.hexdigest()
+
+
+class TestArchive(unittest.TestCase):
+ def _create_files(self, root):
+ files = {}
+ for i in range(10):
+ p = os.path.join(root, b'file%d' % i)
+ with open(p, 'wb') as fh:
+ fh.write(b'file%d' % i)
+ # Need to set permissions or umask may influence testing.
+ os.chmod(p, MODE_STANDARD)
+ files[b'file%d' % i] = p
+
+ return files
+
+ def _verify_basic_tarfile(self, tf):
+ self.assertEqual(len(tf.getmembers()), 10)
+
+ names = ['file%d' % i for i in range(10)]
+ self.assertEqual(tf.getnames(), names)
+
+ for ti in tf.getmembers():
+ self.assertEqual(ti.uid, 0)
+ self.assertEqual(ti.gid, 0)
+ self.assertEqual(ti.uname, '')
+ self.assertEqual(ti.gname, '')
+ self.assertEqual(ti.mode, MODE_STANDARD)
+ self.assertEqual(ti.mtime, DEFAULT_MTIME)
+
+ def test_dirs_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ tp = os.path.join(d, 'test.tar')
+ with open(tp, 'wb') as fh:
+ with self.assertRaisesRegexp(ValueError, 'not a regular'):
+ create_tar_from_files(fh, {'test': d})
+ finally:
+ shutil.rmtree(d)
+
+ def test_setuid_setgid_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ uid = os.path.join(d, 'setuid')
+ gid = os.path.join(d, 'setgid')
+ with open(uid, 'a'):
+ pass
+ with open(gid, 'a'):
+ pass
+
+ os.chmod(uid, MODE_STANDARD | stat.S_ISUID)
+ os.chmod(gid, MODE_STANDARD | stat.S_ISGID)
+
+ tp = os.path.join(d, 'test.tar')
+ with open(tp, 'wb') as fh:
+ with self.assertRaisesRegexp(ValueError, 'cannot add file with setuid'):
+ create_tar_from_files(fh, {'test': uid})
+ with self.assertRaisesRegexp(ValueError, 'cannot add file with setuid'):
+ create_tar_from_files(fh, {'test': gid})
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ tp = os.path.join(d, 'test.tar')
+ with open(tp, 'wb') as fh:
+ create_tar_from_files(fh, files)
+
+ # Output should be deterministic.
+ self.assertEqual(file_hash(tp), 'cd16cee6f13391abd94dfa435d2633b61ed727f1')
+
+ with tarfile.open(tp, 'r') as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_executable_preserved(self):
+ d = tempfile.mkdtemp()
+ try:
+ p = os.path.join(d, 'exec')
+ with open(p, 'wb') as fh:
+ fh.write('#!/bin/bash\n')
+ os.chmod(p, MODE_STANDARD | stat.S_IXUSR)
+
+ tp = os.path.join(d, 'test.tar')
+ with open(tp, 'wb') as fh:
+ create_tar_from_files(fh, {'exec': p})
+
+ self.assertEqual(file_hash(tp), '357e1b81c0b6cfdfa5d2d118d420025c3c76ee93')
+
+ with tarfile.open(tp, 'r') as tf:
+ m = tf.getmember('exec')
+ self.assertEqual(m.mode, MODE_STANDARD | stat.S_IXUSR)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_gz_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, 'test.tar.gz')
+ with open(gp, 'wb') as fh:
+ create_tar_gz_from_files(fh, files)
+
+ self.assertEqual(file_hash(gp), 'acb602239c1aeb625da5e69336775609516d60f5')
+
+ with tarfile.open(gp, 'r:gz') as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_tar_gz_name(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, 'test.tar.gz')
+ with open(gp, 'wb') as fh:
+ create_tar_gz_from_files(fh, files, filename='foobar', compresslevel=1)
+
+ self.assertEqual(file_hash(gp), 'fd099f96480cc1100f37baa8e89a6b820dbbcbd3')
+
+ with tarfile.open(gp, 'r:gz') as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_bz2_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ bp = os.path.join(d, 'test.tar.bz2')
+ with open(bp, 'wb') as fh:
+ create_tar_bz2_from_files(fh, files)
+
+ self.assertEqual(file_hash(bp), '1827ad00dfe7acf857b7a1c95ce100361e3f6eea')
+
+ with tarfile.open(bp, 'r:bz2') as tf:
+ self._verify_basic_tarfile(tf)
+ finally:
+ shutil.rmtree(d)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_flags.py b/python/mozbuild/mozpack/test/test_chrome_flags.py
new file mode 100644
index 000000000..e6a5257e9
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_flags.py
@@ -0,0 +1,148 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+from mozpack.chrome.flags import (
+ Flag,
+ StringFlag,
+ VersionFlag,
+ Flags,
+)
+from mozpack.errors import ErrorMessage
+
+
+class TestFlag(unittest.TestCase):
+ def test_flag(self):
+ flag = Flag('flag')
+ self.assertEqual(str(flag), '')
+ self.assertTrue(flag.matches(False))
+ self.assertTrue(flag.matches('false'))
+ self.assertFalse(flag.matches('true'))
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag=')
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag=42')
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag!=false')
+
+ flag.add_definition('flag=1')
+ self.assertEqual(str(flag), 'flag=1')
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches('1'))
+ self.assertFalse(flag.matches('no'))
+
+ flag.add_definition('flag=true')
+ self.assertEqual(str(flag), 'flag=true')
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches('true'))
+ self.assertFalse(flag.matches('0'))
+
+ flag.add_definition('flag=no')
+ self.assertEqual(str(flag), 'flag=no')
+ self.assertTrue(flag.matches('false'))
+ self.assertFalse(flag.matches('1'))
+
+ flag.add_definition('flag')
+ self.assertEqual(str(flag), 'flag')
+ self.assertFalse(flag.matches('false'))
+ self.assertTrue(flag.matches('true'))
+ self.assertFalse(flag.matches(False))
+
+ def test_string_flag(self):
+ flag = StringFlag('flag')
+ self.assertEqual(str(flag), '')
+ self.assertTrue(flag.matches('foo'))
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag>=2')
+
+ flag.add_definition('flag=foo')
+ self.assertEqual(str(flag), 'flag=foo')
+ self.assertTrue(flag.matches('foo'))
+ self.assertFalse(flag.matches('bar'))
+
+ flag.add_definition('flag=bar')
+ self.assertEqual(str(flag), 'flag=foo flag=bar')
+ self.assertTrue(flag.matches('foo'))
+ self.assertTrue(flag.matches('bar'))
+ self.assertFalse(flag.matches('baz'))
+
+ flag = StringFlag('flag')
+ flag.add_definition('flag!=bar')
+ self.assertEqual(str(flag), 'flag!=bar')
+ self.assertTrue(flag.matches('foo'))
+ self.assertFalse(flag.matches('bar'))
+
+ def test_version_flag(self):
+ flag = VersionFlag('flag')
+ self.assertEqual(str(flag), '')
+ self.assertTrue(flag.matches('1.0'))
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag!=2')
+
+ flag.add_definition('flag=1.0')
+ self.assertEqual(str(flag), 'flag=1.0')
+ self.assertTrue(flag.matches('1.0'))
+ self.assertFalse(flag.matches('2.0'))
+
+ flag.add_definition('flag=2.0')
+ self.assertEqual(str(flag), 'flag=1.0 flag=2.0')
+ self.assertTrue(flag.matches('1.0'))
+ self.assertTrue(flag.matches('2.0'))
+ self.assertFalse(flag.matches('3.0'))
+
+ flag = VersionFlag('flag')
+ flag.add_definition('flag>=2.0')
+ self.assertEqual(str(flag), 'flag>=2.0')
+ self.assertFalse(flag.matches('1.0'))
+ self.assertTrue(flag.matches('2.0'))
+ self.assertTrue(flag.matches('3.0'))
+
+ flag.add_definition('flag<1.10')
+ self.assertEqual(str(flag), 'flag>=2.0 flag<1.10')
+ self.assertTrue(flag.matches('1.0'))
+ self.assertTrue(flag.matches('1.9'))
+ self.assertFalse(flag.matches('1.10'))
+ self.assertFalse(flag.matches('1.20'))
+ self.assertTrue(flag.matches('2.0'))
+ self.assertTrue(flag.matches('3.0'))
+ self.assertRaises(Exception, flag.add_definition, 'flag<')
+ self.assertRaises(Exception, flag.add_definition, 'flag>')
+ self.assertRaises(Exception, flag.add_definition, 'flag>=')
+ self.assertRaises(Exception, flag.add_definition, 'flag<=')
+ self.assertRaises(Exception, flag.add_definition, 'flag!=1.0')
+
+
+class TestFlags(unittest.TestCase):
+ def setUp(self):
+ self.flags = Flags('contentaccessible=yes',
+ 'appversion>=3.5',
+ 'application=foo',
+ 'application=bar',
+ 'appversion<2.0',
+ 'platform',
+ 'abi!=Linux_x86-gcc3')
+
+ def test_flags_str(self):
+ self.assertEqual(str(self.flags), 'contentaccessible=yes ' +
+ 'appversion>=3.5 appversion<2.0 application=foo ' +
+ 'application=bar platform abi!=Linux_x86-gcc3')
+
+ def test_flags_match_unset(self):
+ self.assertTrue(self.flags.match(os='WINNT'))
+
+ def test_flags_match(self):
+ self.assertTrue(self.flags.match(application='foo'))
+ self.assertFalse(self.flags.match(application='qux'))
+
+ def test_flags_match_different(self):
+ self.assertTrue(self.flags.match(abi='WINNT_x86-MSVC'))
+ self.assertFalse(self.flags.match(abi='Linux_x86-gcc3'))
+
+ def test_flags_match_version(self):
+ self.assertTrue(self.flags.match(appversion='1.0'))
+ self.assertTrue(self.flags.match(appversion='1.5'))
+ self.assertFalse(self.flags.match(appversion='2.0'))
+ self.assertFalse(self.flags.match(appversion='3.0'))
+ self.assertTrue(self.flags.match(appversion='3.5'))
+ self.assertTrue(self.flags.match(appversion='3.10'))
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_manifest.py b/python/mozbuild/mozpack/test/test_chrome_manifest.py
new file mode 100644
index 000000000..690c6acdc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_manifest.py
@@ -0,0 +1,149 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+import os
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestLocale,
+ ManifestSkin,
+ Manifest,
+ ManifestResource,
+ ManifestOverride,
+ ManifestComponent,
+ ManifestContract,
+ ManifestInterfaces,
+ ManifestBinaryComponent,
+ ManifestCategory,
+ ManifestStyle,
+ ManifestOverlay,
+ MANIFESTS_TYPES,
+ parse_manifest,
+ parse_manifest_line,
+)
+from mozpack.errors import errors, AccumulatedErrors
+from test_errors import TestErrors
+
+
+class TestManifest(unittest.TestCase):
+ def test_parse_manifest(self):
+ manifest = [
+ 'content global content/global/',
+ 'content global content/global/ application=foo application=bar' +
+ ' platform',
+ 'locale global en-US content/en-US/',
+ 'locale global en-US content/en-US/ application=foo',
+ 'skin global classic/1.0 content/skin/classic/',
+ 'skin global classic/1.0 content/skin/classic/ application=foo' +
+ ' os=WINNT',
+ '',
+ 'manifest pdfjs/chrome.manifest',
+ 'resource gre-resources toolkit/res/',
+ 'override chrome://global/locale/netError.dtd' +
+ ' chrome://browser/locale/netError.dtd',
+ '# Comment',
+ 'component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js',
+ 'contract @mozilla.org/foo;1' +
+ ' {b2bba4df-057d-41ea-b6b1-94a10a8ede68}',
+ 'interfaces foo.xpt',
+ 'binary-component bar.so',
+ 'category command-line-handler m-browser' +
+ ' @mozilla.org/browser/clh;1' +
+ ' application={ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
+ 'style chrome://global/content/customizeToolbar.xul' +
+ ' chrome://browser/skin/',
+ 'overlay chrome://global/content/viewSource.xul' +
+ ' chrome://browser/content/viewSourceOverlay.xul',
+ ]
+ other_manifest = [
+ 'content global content/global/'
+ ]
+ expected_result = [
+ ManifestContent('', 'global', 'content/global/'),
+ ManifestContent('', 'global', 'content/global/', 'application=foo',
+ 'application=bar', 'platform'),
+ ManifestLocale('', 'global', 'en-US', 'content/en-US/'),
+ ManifestLocale('', 'global', 'en-US', 'content/en-US/',
+ 'application=foo'),
+ ManifestSkin('', 'global', 'classic/1.0', 'content/skin/classic/'),
+ ManifestSkin('', 'global', 'classic/1.0', 'content/skin/classic/',
+ 'application=foo', 'os=WINNT'),
+ Manifest('', 'pdfjs/chrome.manifest'),
+ ManifestResource('', 'gre-resources', 'toolkit/res/'),
+ ManifestOverride('', 'chrome://global/locale/netError.dtd',
+ 'chrome://browser/locale/netError.dtd'),
+ ManifestComponent('', '{b2bba4df-057d-41ea-b6b1-94a10a8ede68}',
+ 'foo.js'),
+ ManifestContract('', '@mozilla.org/foo;1',
+ '{b2bba4df-057d-41ea-b6b1-94a10a8ede68}'),
+ ManifestInterfaces('', 'foo.xpt'),
+ ManifestBinaryComponent('', 'bar.so'),
+ ManifestCategory('', 'command-line-handler', 'm-browser',
+ '@mozilla.org/browser/clh;1', 'application=' +
+ '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'),
+ ManifestStyle('', 'chrome://global/content/customizeToolbar.xul',
+ 'chrome://browser/skin/'),
+ ManifestOverlay('', 'chrome://global/content/viewSource.xul',
+ 'chrome://browser/content/viewSourceOverlay.xul'),
+ ]
+ with mozunit.MockedOpen({'manifest': '\n'.join(manifest),
+ 'other/manifest': '\n'.join(other_manifest)}):
+ # Ensure we have tests for all types of manifests.
+ self.assertEqual(set(type(e) for e in expected_result),
+ set(MANIFESTS_TYPES.values()))
+ self.assertEqual(list(parse_manifest(os.curdir, 'manifest')),
+ expected_result)
+ self.assertEqual(list(parse_manifest(os.curdir, 'other/manifest')),
+ [ManifestContent('other', 'global',
+ 'content/global/')])
+
+ def test_manifest_rebase(self):
+ m = parse_manifest_line('chrome', 'content global content/global/')
+ m = m.rebase('')
+ self.assertEqual(str(m), 'content global chrome/content/global/')
+ m = m.rebase('chrome')
+ self.assertEqual(str(m), 'content global content/global/')
+
+ m = parse_manifest_line('chrome/foo', 'content global content/global/')
+ m = m.rebase('chrome')
+ self.assertEqual(str(m), 'content global foo/content/global/')
+ m = m.rebase('chrome/foo')
+ self.assertEqual(str(m), 'content global content/global/')
+
+ m = parse_manifest_line('modules/foo', 'resource foo ./')
+ m = m.rebase('modules')
+ self.assertEqual(str(m), 'resource foo foo/')
+ m = m.rebase('modules/foo')
+ self.assertEqual(str(m), 'resource foo ./')
+
+ m = parse_manifest_line('chrome', 'content browser browser/content/')
+ m = m.rebase('chrome/browser').move('jar:browser.jar!').rebase('')
+ self.assertEqual(str(m), 'content browser jar:browser.jar!/content/')
+
+
+class TestManifestErrors(TestErrors, unittest.TestCase):
+ def test_parse_manifest_errors(self):
+ manifest = [
+ 'skin global classic/1.0 content/skin/classic/ platform',
+ '',
+ 'binary-component bar.so',
+ 'unsupported foo',
+ ]
+ with mozunit.MockedOpen({'manifest': '\n'.join(manifest)}):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ list(parse_manifest(os.curdir, 'manifest'))
+ out = self.get_output()
+ # Expecting 2 errors
+ self.assertEqual(len(out), 2)
+ path = os.path.abspath('manifest')
+ # First on line 1
+ self.assertTrue(out[0].startswith('Error: %s:1: ' % path))
+ # Second on line 4
+ self.assertTrue(out[1].startswith('Error: %s:4: ' % path))
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_copier.py b/python/mozbuild/mozpack/test/test_copier.py
new file mode 100644
index 000000000..6688b3d5e
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_copier.py
@@ -0,0 +1,529 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.copier import (
+ FileCopier,
+ FileRegistry,
+ FileRegistrySubtree,
+ Jarrer,
+)
+from mozpack.files import (
+ GeneratedFile,
+ ExistingFile,
+)
+from mozpack.mozjar import JarReader
+import mozpack.path as mozpath
+import unittest
+import mozunit
+import os
+import stat
+from mozpack.errors import ErrorMessage
+from mozpack.test.test_files import (
+ MockDest,
+ MatchTestTemplate,
+ TestWithTmpDir,
+)
+
+
+class BaseTestFileRegistry(MatchTestTemplate):
+ def add(self, path):
+ self.registry.add(path, GeneratedFile(path))
+
+ def do_check(self, pattern, result):
+ self.checked = True
+ if result:
+ self.assertTrue(self.registry.contains(pattern))
+ else:
+ self.assertFalse(self.registry.contains(pattern))
+ self.assertEqual(self.registry.match(pattern), result)
+
+ def do_test_file_registry(self, registry):
+ self.registry = registry
+ self.registry.add('foo', GeneratedFile('foo'))
+ bar = GeneratedFile('bar')
+ self.registry.add('bar', bar)
+ self.assertEqual(self.registry.paths(), ['foo', 'bar'])
+ self.assertEqual(self.registry['bar'], bar)
+
+ self.assertRaises(ErrorMessage, self.registry.add, 'foo',
+ GeneratedFile('foo2'))
+
+ self.assertRaises(ErrorMessage, self.registry.remove, 'qux')
+
+ self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar',
+ GeneratedFile('foobar'))
+ self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar/baz',
+ GeneratedFile('foobar'))
+
+ self.assertEqual(self.registry.paths(), ['foo', 'bar'])
+
+ self.registry.remove('foo')
+ self.assertEqual(self.registry.paths(), ['bar'])
+ self.registry.remove('bar')
+ self.assertEqual(self.registry.paths(), [])
+
+ self.prepare_match_test()
+ self.do_match_test()
+ self.assertTrue(self.checked)
+ self.assertEqual(self.registry.paths(), [
+ 'bar',
+ 'foo/bar',
+ 'foo/baz',
+ 'foo/qux/1',
+ 'foo/qux/bar',
+ 'foo/qux/2/test',
+ 'foo/qux/2/test2',
+ ])
+
+ self.registry.remove('foo/qux')
+ self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz'])
+
+ self.registry.add('foo/qux', GeneratedFile('fooqux'))
+ self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz',
+ 'foo/qux'])
+ self.registry.remove('foo/b*')
+ self.assertEqual(self.registry.paths(), ['bar', 'foo/qux'])
+
+ self.assertEqual([f for f, c in self.registry], ['bar', 'foo/qux'])
+ self.assertEqual(len(self.registry), 2)
+
+ self.add('foo/.foo')
+ self.assertTrue(self.registry.contains('foo/.foo'))
+
+ def do_test_registry_paths(self, registry):
+ self.registry = registry
+
+ # Can't add a file if it requires a directory in place of a
+ # file we also require.
+ self.registry.add('foo', GeneratedFile('foo'))
+ self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar',
+ GeneratedFile('foobar'))
+
+ # Can't add a file if we already have a directory there.
+ self.registry.add('bar/baz', GeneratedFile('barbaz'))
+ self.assertRaises(ErrorMessage, self.registry.add, 'bar',
+ GeneratedFile('bar'))
+
+ # Bump the count of things that require bar/ to 2.
+ self.registry.add('bar/zot', GeneratedFile('barzot'))
+ self.assertRaises(ErrorMessage, self.registry.add, 'bar',
+ GeneratedFile('bar'))
+
+ # Drop the count of things that require bar/ to 1.
+ self.registry.remove('bar/baz')
+ self.assertRaises(ErrorMessage, self.registry.add, 'bar',
+ GeneratedFile('bar'))
+
+ # Drop the count of things that require bar/ to 0.
+ self.registry.remove('bar/zot')
+ self.registry.add('bar/zot', GeneratedFile('barzot'))
+
+class TestFileRegistry(BaseTestFileRegistry, unittest.TestCase):
+ def test_partial_paths(self):
+ cases = {
+ 'foo/bar/baz/zot': ['foo/bar/baz', 'foo/bar', 'foo'],
+ 'foo/bar': ['foo'],
+ 'bar': [],
+ }
+ reg = FileRegistry()
+ for path, parts in cases.iteritems():
+ self.assertEqual(reg._partial_paths(path), parts)
+
+ def test_file_registry(self):
+ self.do_test_file_registry(FileRegistry())
+
+ def test_registry_paths(self):
+ self.do_test_registry_paths(FileRegistry())
+
+ def test_required_directories(self):
+ self.registry = FileRegistry()
+
+ self.registry.add('foo', GeneratedFile('foo'))
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add('bar/baz', GeneratedFile('barbaz'))
+ self.assertEqual(self.registry.required_directories(), {'bar'})
+
+ self.registry.add('bar/zot', GeneratedFile('barzot'))
+ self.assertEqual(self.registry.required_directories(), {'bar'})
+
+ self.registry.add('bar/zap/zot', GeneratedFile('barzapzot'))
+ self.assertEqual(self.registry.required_directories(), {'bar', 'bar/zap'})
+
+ self.registry.remove('bar/zap/zot')
+ self.assertEqual(self.registry.required_directories(), {'bar'})
+
+ self.registry.remove('bar/baz')
+ self.assertEqual(self.registry.required_directories(), {'bar'})
+
+ self.registry.remove('bar/zot')
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add('x/y/z', GeneratedFile('xyz'))
+ self.assertEqual(self.registry.required_directories(), {'x', 'x/y'})
+
+
+class TestFileRegistrySubtree(BaseTestFileRegistry, unittest.TestCase):
+ def test_file_registry_subtree_base(self):
+ registry = FileRegistry()
+ self.assertEqual(registry, FileRegistrySubtree('', registry))
+ self.assertNotEqual(registry, FileRegistrySubtree('base', registry))
+
+ def create_registry(self):
+ registry = FileRegistry()
+ registry.add('foo/bar', GeneratedFile('foo/bar'))
+ registry.add('baz/qux', GeneratedFile('baz/qux'))
+ return FileRegistrySubtree('base/root', registry)
+
+ def test_file_registry_subtree(self):
+ self.do_test_file_registry(self.create_registry())
+
+ def test_registry_paths_subtree(self):
+ registry = FileRegistry()
+ self.do_test_registry_paths(self.create_registry())
+
+
+class TestFileCopier(TestWithTmpDir):
+ def all_dirs(self, base):
+ all_dirs = set()
+ for root, dirs, files in os.walk(base):
+ if not dirs:
+ all_dirs.add(mozpath.relpath(root, base))
+ return all_dirs
+
+ def all_files(self, base):
+ all_files = set()
+ for root, dirs, files in os.walk(base):
+ for f in files:
+ all_files.add(
+ mozpath.join(mozpath.relpath(root, base), f))
+ return all_files
+
+ def test_file_copier(self):
+ copier = FileCopier()
+ copier.add('foo/bar', GeneratedFile('foobar'))
+ copier.add('foo/qux', GeneratedFile('fooqux'))
+ copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
+ copier.add('bar', GeneratedFile('bar'))
+ copier.add('qux/foo', GeneratedFile('quxfoo'))
+ copier.add('qux/bar', GeneratedFile(''))
+
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(self.all_dirs(self.tmpdir),
+ set(['foo/deep/nested/directory', 'qux']))
+
+ self.assertEqual(result.updated_files, set(self.tmppath(p) for p in
+ self.all_files(self.tmpdir)))
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ copier.remove('foo')
+ copier.add('test', GeneratedFile('test'))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
+ self.assertEqual(result.removed_files, set(self.tmppath(p) for p in
+ ('foo/bar', 'foo/qux', 'foo/deep/nested/directory/file')))
+
+ def test_symlink_directory_replaced(self):
+ """Directory symlinks in destination are replaced if they need to be
+ real directories."""
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath('dest')
+
+ copier = FileCopier()
+ copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))
+
+ os.makedirs(self.tmppath('dest/foo'))
+ dummy = self.tmppath('dummy')
+ os.mkdir(dummy)
+ link = self.tmppath('dest/foo/bar')
+ os.symlink(dummy, link)
+
+ result = copier.copy(dest)
+
+ st = os.lstat(link)
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ def test_remove_unaccounted_directory_symlinks(self):
+ """Directory symlinks in destination that are not in the way are
+ deleted according to remove_unaccounted and
+ remove_all_directory_symlinks.
+ """
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath('dest')
+
+ copier = FileCopier()
+ copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))
+
+ os.makedirs(self.tmppath('dest/foo'))
+ dummy = self.tmppath('dummy')
+ os.mkdir(dummy)
+
+ os.mkdir(self.tmppath('dest/zot'))
+ link = self.tmppath('dest/zot/zap')
+ os.symlink(dummy, link)
+
+ # If not remove_unaccounted but remove_empty_directories, then
+ # the symlinked directory remains (as does its containing
+ # directory).
+ result = copier.copy(dest, remove_unaccounted=False,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False)
+
+ st = os.lstat(link)
+ self.assertTrue(stat.S_ISLNK(st.st_mode))
+ self.assertFalse(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(['foo/bar']))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ # If remove_unaccounted but not remove_empty_directories, then
+ # only the symlinked directory is removed.
+ result = copier.copy(dest, remove_unaccounted=True,
+ remove_empty_directories=False,
+ remove_all_directory_symlinks=False)
+
+ st = os.lstat(self.tmppath('dest/zot'))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set())
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(['foo/bar', 'zot']))
+
+ # If remove_unaccounted and remove_empty_directories, then
+ # both the symlink and its containing directory are removed.
+ link = self.tmppath('dest/zot/zap')
+ os.symlink(dummy, link)
+
+ result = copier.copy(dest, remove_unaccounted=True,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False)
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set([self.tmppath('dest/zot')]))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(['foo/bar']))
+
+ def test_permissions(self):
+ """Ensure files without write permission can be deleted."""
+ with open(self.tmppath('dummy'), 'a'):
+ pass
+
+ p = self.tmppath('no_perms')
+ with open(p, 'a'):
+ pass
+
+ # Make file and directory unwritable. Reminder: making a directory
+ # unwritable prevents modifications (including deletes) from the list
+ # of files in that directory.
+ os.chmod(p, 0o400)
+ os.chmod(self.tmpdir, 0o400)
+
+ copier = FileCopier()
+ copier.add('dummy', GeneratedFile('content'))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(result.removed_files_count, 1)
+ self.assertFalse(os.path.exists(p))
+
+ def test_no_remove(self):
+ copier = FileCopier()
+ copier.add('foo', GeneratedFile('foo'))
+
+ with open(self.tmppath('bar'), 'a'):
+ pass
+
+ os.mkdir(self.tmppath('emptydir'))
+ d = self.tmppath('populateddir')
+ os.mkdir(d)
+
+ with open(self.tmppath('populateddir/foo'), 'a'):
+ pass
+
+ result = copier.copy(self.tmpdir, remove_unaccounted=False)
+
+ self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar',
+ 'populateddir/foo']))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(['populateddir']))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories,
+ set([self.tmppath('emptydir')]))
+
+ def test_no_remove_empty_directories(self):
+ copier = FileCopier()
+ copier.add('foo', GeneratedFile('foo'))
+
+ with open(self.tmppath('bar'), 'a'):
+ pass
+
+ os.mkdir(self.tmppath('emptydir'))
+ d = self.tmppath('populateddir')
+ os.mkdir(d)
+
+ with open(self.tmppath('populateddir/foo'), 'a'):
+ pass
+
+ result = copier.copy(self.tmpdir, remove_unaccounted=False,
+ remove_empty_directories=False)
+
+ self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar',
+ 'populateddir/foo']))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(['emptydir',
+ 'populateddir']))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ def test_optional_exists_creates_unneeded_directory(self):
+ """Demonstrate that a directory not strictly required, but specified
+ as the path to an optional file, will be unnecessarily created.
+
+ This behaviour is wrong; fixing it is tracked by Bug 972432;
+ and this test exists to guard against unexpected changes in
+ behaviour.
+ """
+
+ dest = self.tmppath('dest')
+
+ copier = FileCopier()
+ copier.add('foo/bar', ExistingFile(required=False))
+
+ result = copier.copy(dest)
+
+ st = os.lstat(self.tmppath('dest/foo'))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ # What's worse, we have no record that dest was created.
+ self.assertEquals(len(result.updated_files), 0)
+
+ # But we do have an erroneous record of an optional file
+ # existing when it does not.
+ self.assertIn(self.tmppath('dest/foo/bar'), result.existing_files)
+
+ def test_remove_unaccounted_file_registry(self):
+ """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""
+
+ dest = self.tmppath('dest')
+
+ copier = FileCopier()
+ copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))
+ copier.add('foo/bar/qux', GeneratedFile('foobarqux'))
+ copier.add('foo/hoge/fuga', GeneratedFile('foohogefuga'))
+ copier.add('foo/toto/tata', GeneratedFile('footototata'))
+
+ os.makedirs(os.path.join(dest, 'bar'))
+ with open(os.path.join(dest, 'bar', 'bar'), 'w') as fh:
+ fh.write('barbar');
+ os.makedirs(os.path.join(dest, 'foo', 'toto'))
+ with open(os.path.join(dest, 'foo', 'toto', 'toto'), 'w') as fh:
+ fh.write('foototototo');
+
+ result = copier.copy(dest, remove_unaccounted=False)
+
+ self.assertEqual(self.all_files(dest),
+ set(copier.paths()) | { 'foo/toto/toto', 'bar/bar'})
+ self.assertEqual(self.all_dirs(dest),
+ {'foo/bar', 'foo/hoge', 'foo/toto', 'bar'})
+
+ copier2 = FileCopier()
+ copier2.add('foo/hoge/fuga', GeneratedFile('foohogefuga'))
+
+ # We expect only files copied from the first copier to be removed,
+ # not the extra file that was there beforehand.
+ result = copier2.copy(dest, remove_unaccounted=copier)
+
+ self.assertEqual(self.all_files(dest),
+ set(copier2.paths()) | { 'foo/toto/toto', 'bar/bar'})
+ self.assertEqual(self.all_dirs(dest),
+ {'foo/hoge', 'foo/toto', 'bar'})
+ self.assertEqual(result.updated_files,
+ {self.tmppath('dest/foo/hoge/fuga')})
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(result.removed_files, {self.tmppath(p) for p in
+ ('dest/foo/bar/baz', 'dest/foo/bar/qux', 'dest/foo/toto/tata')})
+ self.assertEqual(result.removed_directories,
+ {self.tmppath('dest/foo/bar')})
+
+
+class TestJarrer(unittest.TestCase):
+ def check_jar(self, dest, copier):
+ jar = JarReader(fileobj=dest)
+ self.assertEqual([f.filename for f in jar], copier.paths())
+ for f in jar:
+ self.assertEqual(f.uncompressed_data.read(),
+ copier[f.filename].content)
+
+ def test_jarrer(self):
+ copier = Jarrer()
+ copier.add('foo/bar', GeneratedFile('foobar'))
+ copier.add('foo/qux', GeneratedFile('fooqux'))
+ copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
+ copier.add('bar', GeneratedFile('bar'))
+ copier.add('qux/foo', GeneratedFile('quxfoo'))
+ copier.add('qux/bar', GeneratedFile(''))
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove('foo')
+ copier.add('test', GeneratedFile('test'))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove('test')
+ copier.add('test', GeneratedFile('replaced-content'))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ preloaded = ['qux/bar', 'bar']
+ copier.preload(preloaded)
+ copier.copy(dest)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertEqual([f.filename for f in jar], preloaded +
+ [p for p in copier.paths() if not p in preloaded])
+ self.assertEqual(jar.last_preloaded, preloaded[-1])
+
+
+ def test_jarrer_compress(self):
+ copier = Jarrer()
+ copier.add('foo/bar', GeneratedFile('ffffff'))
+ copier.add('foo/qux', GeneratedFile('ffffff'), compress=False)
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertTrue(jar['foo/bar'].compressed)
+ self.assertFalse(jar['foo/qux'].compressed)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_errors.py b/python/mozbuild/mozpack/test/test_errors.py
new file mode 100644
index 000000000..16e2b0496
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_errors.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.errors import (
+ errors,
+ ErrorMessage,
+ AccumulatedErrors,
+)
+import unittest
+import mozunit
+import sys
+from cStringIO import StringIO
+
+
+class TestErrors(object):
+ def setUp(self):
+ errors.out = StringIO()
+ errors.ignore_errors(False)
+
+ def tearDown(self):
+ errors.out = sys.stderr
+
+ def get_output(self):
+ return [l.strip() for l in errors.out.getvalue().splitlines()]
+
+
+class TestErrorsImpl(TestErrors, unittest.TestCase):
+ def test_plain_error(self):
+ errors.warn('foo')
+ self.assertRaises(ErrorMessage, errors.error, 'foo')
+ self.assertRaises(ErrorMessage, errors.fatal, 'foo')
+ self.assertEquals(self.get_output(), ['Warning: foo'])
+
+ def test_ignore_errors(self):
+ errors.ignore_errors()
+ errors.warn('foo')
+ errors.error('bar')
+ self.assertRaises(ErrorMessage, errors.fatal, 'foo')
+ self.assertEquals(self.get_output(), ['Warning: foo', 'Warning: bar'])
+
+ def test_no_error(self):
+ with errors.accumulate():
+ errors.warn('1')
+
+ def test_simple_error(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error('1')
+ self.assertEquals(self.get_output(), ['Error: 1'])
+
+ def test_error_loop(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ for i in range(3):
+ errors.error('%d' % i)
+ self.assertEquals(self.get_output(),
+ ['Error: 0', 'Error: 1', 'Error: 2'])
+
+ def test_multiple_errors(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error('foo')
+ for i in range(3):
+ if i == 2:
+ errors.warn('%d' % i)
+ else:
+ errors.error('%d' % i)
+ errors.error('bar')
+ self.assertEquals(self.get_output(),
+ ['Error: foo', 'Error: 0', 'Error: 1',
+ 'Warning: 2', 'Error: bar'])
+
+ def test_errors_context(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ self.assertEqual(errors.get_context(), None)
+ with errors.context('foo', 1):
+ self.assertEqual(errors.get_context(), ('foo', 1))
+ errors.error('a')
+ with errors.context('bar', 2):
+ self.assertEqual(errors.get_context(), ('bar', 2))
+ errors.error('b')
+ self.assertEqual(errors.get_context(), ('foo', 1))
+ errors.error('c')
+ self.assertEqual(self.get_output(), [
+ 'Error: foo:1: a',
+ 'Error: bar:2: b',
+ 'Error: foo:1: c',
+ ])
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_files.py b/python/mozbuild/mozpack/test/test_files.py
new file mode 100644
index 000000000..6fd617828
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_files.py
@@ -0,0 +1,1160 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import ensureParentDir
+
+from mozpack.errors import (
+ ErrorMessage,
+ errors,
+)
+from mozpack.files import (
+ AbsoluteSymlinkFile,
+ ComposedFinder,
+ DeflatedFile,
+ Dest,
+ ExistingFile,
+ ExtractedTarFile,
+ FileFinder,
+ File,
+ GeneratedFile,
+ JarFinder,
+ TarFinder,
+ ManifestFile,
+ MercurialFile,
+ MercurialRevisionFinder,
+ MinifiedJavaScript,
+ MinifiedProperties,
+ PreprocessedFile,
+ XPTFile,
+)
+
+# We don't have hglib installed everywhere.
+try:
+ import hglib
+except ImportError:
+ hglib = None
+
+try:
+ from mozpack.hg import MercurialNativeRevisionFinder
+except ImportError:
+ MercurialNativeRevisionFinder = None
+
+from mozpack.mozjar import (
+ JarReader,
+ JarWriter,
+)
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestResource,
+ ManifestLocale,
+ ManifestOverride,
+)
+import unittest
+import mozfile
+import mozunit
+import os
+import random
+import string
+import sys
+import tarfile
+import mozpack.path as mozpath
+from tempfile import mkdtemp
+from io import BytesIO
+from StringIO import StringIO
+from xpt import Typelib
+
+
+class TestWithTmpDir(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+
+ self.symlink_supported = False
+
+ if not hasattr(os, 'symlink'):
+ return
+
+ dummy_path = self.tmppath('dummy_file')
+ with open(dummy_path, 'a'):
+ pass
+
+ try:
+ os.symlink(dummy_path, self.tmppath('dummy_symlink'))
+ os.remove(self.tmppath('dummy_symlink'))
+ except EnvironmentError:
+ pass
+ finally:
+ os.remove(dummy_path)
+
+ self.symlink_supported = True
+
+
+ def tearDown(self):
+ mozfile.rmtree(self.tmpdir)
+
+ def tmppath(self, relpath):
+ return os.path.normpath(os.path.join(self.tmpdir, relpath))
+
+
+class MockDest(BytesIO, Dest):
+ def __init__(self):
+ BytesIO.__init__(self)
+ self.mode = None
+
+ def read(self, length=-1):
+ if self.mode != 'r':
+ self.seek(0)
+ self.mode = 'r'
+ return BytesIO.read(self, length)
+
+ def write(self, data):
+ if self.mode != 'w':
+ self.seek(0)
+ self.truncate(0)
+ self.mode = 'w'
+ return BytesIO.write(self, data)
+
+ def exists(self):
+ return True
+
+ def close(self):
+ if self.mode:
+ self.mode = None
+
+
+class DestNoWrite(Dest):
+ def write(self, data):
+ raise RuntimeError
+
+
+class TestDest(TestWithTmpDir):
+ def test_dest(self):
+ dest = Dest(self.tmppath('dest'))
+ self.assertFalse(dest.exists())
+ dest.write('foo')
+ self.assertTrue(dest.exists())
+ dest.write('foo')
+ self.assertEqual(dest.read(4), 'foof')
+ self.assertEqual(dest.read(), 'oo')
+ self.assertEqual(dest.read(), '')
+ dest.write('bar')
+ self.assertEqual(dest.read(4), 'bar')
+ dest.close()
+ self.assertEqual(dest.read(), 'bar')
+ dest.write('foo')
+ dest.close()
+ dest.write('qux')
+ self.assertEqual(dest.read(), 'qux')
+
+rand = ''.join(random.choice(string.letters) for i in xrange(131597))
+samples = [
+ '',
+ 'test',
+ 'fooo',
+ 'same',
+ 'same',
+ 'Different and longer',
+ rand,
+ rand,
+ rand[:-1] + '_',
+ 'test'
+]
+
+
+class TestFile(TestWithTmpDir):
+ def test_file(self):
+ '''
+ Check that File.copy yields the proper content in the destination file
+ in all situations that trigger different code paths:
+ - different content
+ - different content of the same size
+ - same content
+ - long content
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+
+ for content in samples:
+ with open(src, 'wb') as tmp:
+ tmp.write(content)
+ # Ensure the destination file, when it exists, is older than the
+ # source
+ if os.path.exists(dest):
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, 'rb').read())
+ self.assertEqual(content, f.open().read())
+ self.assertEqual(content, f.open().read())
+
+ def test_file_dest(self):
+ '''
+ Similar to test_file, but for a destination object instead of
+ a destination file. This ensures the destination object is being
+ used properly by File.copy, ensuring that other subclasses of Dest
+ will work.
+ '''
+ src = self.tmppath('src')
+ dest = MockDest()
+
+ for content in samples:
+ with open(src, 'wb') as tmp:
+ tmp.write(content)
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, dest.getvalue())
+
+ def test_file_open(self):
+ '''
+ Test whether File.open returns an appropriately reset file object.
+ '''
+ src = self.tmppath('src')
+ content = ''.join(samples)
+ with open(src, 'wb') as tmp:
+ tmp.write(content)
+
+ f = File(src)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_file_no_write(self):
+ '''
+ Test various conditions where File.copy is expected not to write
+ in the destination file.
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+
+ with open(src, 'wb') as tmp:
+ tmp.write('test')
+
+ # Initial copy
+ f = File(src)
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # When the source file is newer, but with the same content, no copy
+ # should occur
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, 'wb') as tmp:
+ tmp.write('fooo')
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ f.copy(dest, skip_if_older=False)
+ self.assertEqual('fooo', open(dest, 'rb').read())
+
+
+class TestAbsoluteSymlinkFile(TestWithTmpDir):
+ def test_absolute_relative(self):
+ AbsoluteSymlinkFile('/foo')
+
+ with self.assertRaisesRegexp(ValueError, 'Symlink target not absolute'):
+ AbsoluteSymlinkFile('./foo')
+
+ def test_symlink_file(self):
+ source = self.tmppath('test_path')
+ with open(source, 'wt') as fh:
+ fh.write('Hello world')
+
+ s = AbsoluteSymlinkFile(source)
+ dest = self.tmppath('symlink')
+ self.assertTrue(s.copy(dest))
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, 'Hello world')
+
+ def test_replace_file_with_symlink(self):
+ # If symlinks are supported, an existing file should be replaced by a
+ # symlink.
+ source = self.tmppath('test_path')
+ with open(source, 'wt') as fh:
+ fh.write('source')
+
+ dest = self.tmppath('dest')
+ with open(dest, 'a'):
+ pass
+
+ s = AbsoluteSymlinkFile(source)
+ s.copy(dest, skip_if_older=False)
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, 'source')
+
+ def test_replace_symlink(self):
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath('source')
+ with open(source, 'a'):
+ pass
+
+ dest = self.tmppath('dest')
+
+ os.symlink(self.tmppath('bad'), dest)
+ self.assertTrue(os.path.islink(dest))
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertTrue(s.copy(dest))
+
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ def test_noop(self):
+ if not hasattr(os, 'symlink'):
+ return
+
+ source = self.tmppath('source')
+ dest = self.tmppath('dest')
+
+ with open(source, 'a'):
+ pass
+
+ os.symlink(source, dest)
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+class TestPreprocessedFile(TestWithTmpDir):
+ def test_preprocess(self):
+ '''
+ Test that copying the file invokes the preprocessor
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+
+ with open(src, 'wb') as tmp:
+ tmp.write('#ifdef FOO\ntest\n#endif')
+
+ f = PreprocessedFile(src, depfile_path=None, marker='#', defines={'FOO': True})
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual('test\n', open(dest, 'rb').read())
+
+ def test_preprocess_file_no_write(self):
+ '''
+ Test various conditions where PreprocessedFile.copy is expected not to
+ write in the destination file.
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+ depfile = self.tmppath('depfile')
+
+ with open(src, 'wb') as tmp:
+ tmp.write('#ifdef FOO\ntest\n#endif')
+
+ # Initial copy
+ f = PreprocessedFile(src, depfile_path=depfile, marker='#', defines={'FOO': True})
+ self.assertTrue(f.copy(dest))
+
+ # Ensure subsequent copies won't trigger writes
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual('test\n', open(dest, 'rb').read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, 'wb') as tmp:
+ tmp.write('#ifdef FOO\nfooo\n#endif')
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual('test\n', open(dest, 'rb').read())
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ self.assertTrue(f.copy(dest, skip_if_older=False))
+ self.assertEqual('fooo\n', open(dest, 'rb').read())
+
+ def test_preprocess_file_dependencies(self):
+ '''
+ Test that the preprocess runs if the dependencies of the source change
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+ incl = self.tmppath('incl')
+ deps = self.tmppath('src.pp')
+
+ with open(src, 'wb') as tmp:
+ tmp.write('#ifdef FOO\ntest\n#endif')
+
+ with open(incl, 'wb') as tmp:
+ tmp.write('foo bar')
+
+ # Initial copy
+ f = PreprocessedFile(src, depfile_path=deps, marker='#', defines={'FOO': True})
+ self.assertTrue(f.copy(dest))
+
+ # Update the source so it #includes the include file.
+ with open(src, 'wb') as tmp:
+ tmp.write('#include incl\n')
+ time = os.path.getmtime(dest) + 1
+ os.utime(src, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual('foo bar', open(dest, 'rb').read())
+
+ # If one of the dependencies changes, the file should be updated. The
+ # mtime of the dependency is set after the destination file, to avoid
+ # both files having the same time.
+ with open(incl, 'wb') as tmp:
+ tmp.write('quux')
+ time = os.path.getmtime(dest) + 1
+ os.utime(incl, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual('quux', open(dest, 'rb').read())
+
+ # Perform one final copy to confirm that we don't run the preprocessor
+ # again. We update the mtime of the destination so it's newer than the
+ # input files. This would "just work" if we weren't changing
+ time = os.path.getmtime(incl) + 1
+ os.utime(dest, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+
+ def test_replace_symlink(self):
+ '''
+ Test that if the destination exists, and is a symlink, the target of
+ the symlink is not overwritten by the preprocessor output.
+ '''
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath('source')
+ dest = self.tmppath('dest')
+ pp_source = self.tmppath('pp_in')
+ deps = self.tmppath('deps')
+
+ with open(source, 'a'):
+ pass
+
+ os.symlink(source, dest)
+ self.assertTrue(os.path.islink(dest))
+
+ with open(pp_source, 'wb') as tmp:
+ tmp.write('#define FOO\nPREPROCESSED')
+
+ f = PreprocessedFile(pp_source, depfile_path=deps, marker='#',
+ defines={'FOO': True})
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual('PREPROCESSED', open(dest, 'rb').read())
+ self.assertFalse(os.path.islink(dest))
+ self.assertEqual('', open(source, 'rb').read())
+
+class TestExistingFile(TestWithTmpDir):
+ def test_required_missing_dest(self):
+ with self.assertRaisesRegexp(ErrorMessage, 'Required existing file'):
+ f = ExistingFile(required=True)
+ f.copy(self.tmppath('dest'))
+
+ def test_required_existing_dest(self):
+ p = self.tmppath('dest')
+ with open(p, 'a'):
+ pass
+
+ f = ExistingFile(required=True)
+ f.copy(p)
+
+ def test_optional_missing_dest(self):
+ f = ExistingFile(required=False)
+ f.copy(self.tmppath('dest'))
+
+ def test_optional_existing_dest(self):
+ p = self.tmppath('dest')
+ with open(p, 'a'):
+ pass
+
+ f = ExistingFile(required=False)
+ f.copy(p)
+
+
+class TestGeneratedFile(TestWithTmpDir):
+ def test_generated_file(self):
+ '''
+ Check that GeneratedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ '''
+ dest = self.tmppath('dest')
+
+ for content in samples:
+ f = GeneratedFile(content)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, 'rb').read())
+
+ def test_generated_file_open(self):
+ '''
+ Test whether GeneratedFile.open returns an appropriately reset file
+ object.
+ '''
+ content = ''.join(samples)
+ f = GeneratedFile(content)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_generated_file_no_write(self):
+ '''
+ Test various conditions where GeneratedFile.copy is expected not to
+ write in the destination file.
+ '''
+ dest = self.tmppath('dest')
+
+ # Initial copy
+ f = GeneratedFile('test')
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # When using a new instance with the same content, no copy should occur
+ f = GeneratedFile('test')
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = GeneratedFile('fooo')
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+
+class TestDeflatedFile(TestWithTmpDir):
+ def test_deflated_file(self):
+ '''
+ Check that DeflatedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ '''
+ src = self.tmppath('src.jar')
+ dest = self.tmppath('dest')
+
+ contents = {}
+ with JarWriter(src) as jar:
+ for content in samples:
+ name = ''.join(random.choice(string.letters)
+ for i in xrange(8))
+ jar.add(name, content, compress=True)
+ contents[name] = content
+
+ for j in JarReader(src):
+ f = DeflatedFile(j)
+ f.copy(dest)
+ self.assertEqual(contents[j.filename], open(dest, 'rb').read())
+
+ def test_deflated_file_open(self):
+ '''
+ Test whether DeflatedFile.open returns an appropriately reset file
+ object.
+ '''
+ src = self.tmppath('src.jar')
+ content = ''.join(samples)
+ with JarWriter(src) as jar:
+ jar.add('content', content)
+
+ f = DeflatedFile(JarReader(src)['content'])
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_deflated_file_no_write(self):
+ '''
+ Test various conditions where DeflatedFile.copy is expected not to
+ write in the destination file.
+ '''
+ src = self.tmppath('src.jar')
+ dest = self.tmppath('dest')
+
+ with JarWriter(src) as jar:
+ jar.add('test', 'test')
+ jar.add('test2', 'test')
+ jar.add('fooo', 'fooo')
+
+ jar = JarReader(src)
+ # Initial copy
+ f = DeflatedFile(jar['test'])
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # When using a different file with the same content, no copy should
+ # occur
+ f = DeflatedFile(jar['test2'])
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = DeflatedFile(jar['fooo'])
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+
+class TestManifestFile(TestWithTmpDir):
+ def test_manifest_file(self):
+ f = ManifestFile('chrome')
+ f.add(ManifestContent('chrome', 'global', 'toolkit/content/global/'))
+ f.add(ManifestResource('chrome', 'gre-resources', 'toolkit/res/'))
+ f.add(ManifestResource('chrome/pdfjs', 'pdfjs', './'))
+ f.add(ManifestContent('chrome/pdfjs', 'pdfjs', 'pdfjs'))
+ f.add(ManifestLocale('chrome', 'browser', 'en-US',
+ 'en-US/locale/browser/'))
+
+ f.copy(self.tmppath('chrome.manifest'))
+ self.assertEqual(open(self.tmppath('chrome.manifest')).readlines(), [
+ 'content global toolkit/content/global/\n',
+ 'resource gre-resources toolkit/res/\n',
+ 'resource pdfjs pdfjs/\n',
+ 'content pdfjs pdfjs/pdfjs\n',
+ 'locale browser en-US en-US/locale/browser/\n',
+ ])
+
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent('', 'global', 'toolkit/content/global/')
+ )
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestOverride('chrome', 'chrome://global/locale/netError.dtd',
+ 'chrome://browser/locale/netError.dtd')
+ )
+
+ f.remove(ManifestContent('chrome', 'global',
+ 'toolkit/content/global/'))
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent('chrome', 'global', 'toolkit/content/global/')
+ )
+
+ f.copy(self.tmppath('chrome.manifest'))
+ content = open(self.tmppath('chrome.manifest')).read()
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+# Compiled typelib for the following IDL:
+# interface foo;
+# [scriptable, uuid(5f70da76-519c-4858-b71e-e3c92333e2d6)]
+# interface bar {
+# void bar(in foo f);
+# };
+# We need to make this [scriptable] so it doesn't get deleted from the
+# typelib. We don't need to make the foo interfaces below [scriptable],
+# because they will be automatically included by virtue of being an
+# argument to a method of |bar|.
+bar_xpt = GeneratedFile(
+ b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
+ b'\x01\x02\x00\x02\x00\x00\x00\x7B\x00\x00\x00\x24\x00\x00\x00\x5C' +
+ b'\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
+ b'\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x5F' +
+ b'\x70\xDA\x76\x51\x9C\x48\x58\xB7\x1E\xE3\xC9\x23\x33\xE2\xD6\x00' +
+ b'\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x0D\x00\x66\x6F\x6F\x00' +
+ b'\x62\x61\x72\x00\x62\x61\x72\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
+ b'\x09\x01\x80\x92\x00\x01\x80\x06\x00\x00\x80'
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(3271bebc-927e-4bef-935e-44e0aaf3c1e5)]
+# interface foo {
+# void foo();
+# };
+foo_xpt = GeneratedFile(
+ b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
+ b'\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40' +
+ b'\x80\x00\x00\x32\x71\xBE\xBC\x92\x7E\x4B\xEF\x93\x5E\x44\xE0\xAA' +
+ b'\xF3\xC1\xE5\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00' +
+ b'\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
+ b'\x05\x00\x80\x06\x00\x00\x00'
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(7057f2aa-fdc2-4559-abde-08d939f7e80d)]
+# interface foo {
+# void foo();
+# };
+foo2_xpt = GeneratedFile(
+ b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
+ b'\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40' +
+ b'\x80\x00\x00\x70\x57\xF2\xAA\xFD\xC2\x45\x59\xAB\xDE\x08\xD9\x39' +
+ b'\xF7\xE8\x0D\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00' +
+ b'\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
+ b'\x05\x00\x80\x06\x00\x00\x00'
+)
+
+
+def read_interfaces(file):
+ return dict((i.name, i) for i in Typelib.read(file).interfaces)
+
+
+class TestXPTFile(TestWithTmpDir):
+ def test_xpt_file(self):
+ x = XPTFile()
+ x.add(foo_xpt)
+ x.add(bar_xpt)
+ x.copy(self.tmppath('interfaces.xpt'))
+
+ foo = read_interfaces(foo_xpt.open())
+ foo2 = read_interfaces(foo2_xpt.open())
+ bar = read_interfaces(bar_xpt.open())
+ linked = read_interfaces(self.tmppath('interfaces.xpt'))
+ self.assertEqual(foo['foo'], linked['foo'])
+ self.assertEqual(bar['bar'], linked['bar'])
+
+ x.remove(foo_xpt)
+ x.copy(self.tmppath('interfaces2.xpt'))
+ linked = read_interfaces(self.tmppath('interfaces2.xpt'))
+ self.assertEqual(bar['foo'], linked['foo'])
+ self.assertEqual(bar['bar'], linked['bar'])
+
+ x.add(foo_xpt)
+ x.copy(DestNoWrite(self.tmppath('interfaces.xpt')))
+ linked = read_interfaces(self.tmppath('interfaces.xpt'))
+ self.assertEqual(foo['foo'], linked['foo'])
+ self.assertEqual(bar['bar'], linked['bar'])
+
+ x = XPTFile()
+ x.add(foo2_xpt)
+ x.add(bar_xpt)
+ x.copy(self.tmppath('interfaces.xpt'))
+ linked = read_interfaces(self.tmppath('interfaces.xpt'))
+ self.assertEqual(foo2['foo'], linked['foo'])
+ self.assertEqual(bar['bar'], linked['bar'])
+
+ x = XPTFile()
+ x.add(foo_xpt)
+ x.add(foo2_xpt)
+ x.add(bar_xpt)
+ from xpt import DataError
+ self.assertRaises(DataError, x.copy, self.tmppath('interfaces.xpt'))
+
+
+class TestMinifiedProperties(TestWithTmpDir):
+ def test_minified_properties(self):
+ propLines = [
+ '# Comments are removed',
+ 'foo = bar',
+ '',
+ '# Another comment',
+ ]
+ prop = GeneratedFile('\n'.join(propLines))
+ self.assertEqual(MinifiedProperties(prop).open().readlines(),
+ ['foo = bar\n', '\n'])
+ open(self.tmppath('prop'), 'wb').write('\n'.join(propLines))
+ MinifiedProperties(File(self.tmppath('prop'))) \
+ .copy(self.tmppath('prop2'))
+ self.assertEqual(open(self.tmppath('prop2')).readlines(),
+ ['foo = bar\n', '\n'])
+
+
+class TestMinifiedJavaScript(TestWithTmpDir):
+ orig_lines = [
+ '// Comment line',
+ 'let foo = "bar";',
+ 'var bar = true;',
+ '',
+ '// Another comment',
+ ]
+
+ def test_minified_javascript(self):
+ orig_f = GeneratedFile('\n'.join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f)
+
+ mini_lines = min_f.open().readlines()
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def _verify_command(self, code):
+ our_dir = os.path.abspath(os.path.dirname(__file__))
+ return [
+ sys.executable,
+ os.path.join(our_dir, 'support', 'minify_js_verify.py'),
+ code,
+ ]
+
+ def test_minified_verify_success(self):
+ orig_f = GeneratedFile('\n'.join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f,
+ verify_command=self._verify_command('0'))
+
+ mini_lines = min_f.open().readlines()
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def test_minified_verify_failure(self):
+ orig_f = GeneratedFile('\n'.join(self.orig_lines))
+ errors.out = StringIO()
+ min_f = MinifiedJavaScript(orig_f,
+ verify_command=self._verify_command('1'))
+
+ mini_lines = min_f.open().readlines()
+ output = errors.out.getvalue()
+ errors.out = sys.stderr
+ self.assertEqual(output,
+ 'Warning: JS minification verification failed for <unknown>:\n'
+ 'Warning: Error message\n')
+ self.assertEqual(mini_lines, orig_f.open().readlines())
+
+
+class MatchTestTemplate(object):
+ def prepare_match_test(self, with_dotfiles=False):
+ self.add('bar')
+ self.add('foo/bar')
+ self.add('foo/baz')
+ self.add('foo/qux/1')
+ self.add('foo/qux/bar')
+ self.add('foo/qux/2/test')
+ self.add('foo/qux/2/test2')
+ if with_dotfiles:
+ self.add('foo/.foo')
+ self.add('foo/.bar/foo')
+
+ def do_match_test(self):
+ self.do_check('', [
+ 'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('*', [
+ 'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/qux', [
+ 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/b*', ['foo/bar', 'foo/baz'])
+ self.do_check('baz', [])
+ self.do_check('foo/foo', [])
+ self.do_check('foo/*ar', ['foo/bar'])
+ self.do_check('*ar', ['bar'])
+ self.do_check('*/bar', ['foo/bar'])
+ self.do_check('foo/*ux', [
+ 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/q*ux', [
+ 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/*/2/test*', ['foo/qux/2/test', 'foo/qux/2/test2'])
+ self.do_check('**/bar', ['bar', 'foo/bar', 'foo/qux/bar'])
+ self.do_check('foo/**/test', ['foo/qux/2/test'])
+ self.do_check('foo', [
+ 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/**', [
+ 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('**/2/test*', ['foo/qux/2/test', 'foo/qux/2/test2'])
+ self.do_check('**/foo', [
+ 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('**/barbaz', [])
+ self.do_check('f**/bar', ['foo/bar'])
+
+ def do_finder_test(self, finder):
+ self.assertTrue(finder.contains('foo/.foo'))
+ self.assertTrue(finder.contains('foo/.bar'))
+ self.assertTrue('foo/.foo' in [f for f, c in
+ finder.find('foo/.foo')])
+ self.assertTrue('foo/.bar/foo' in [f for f, c in
+ finder.find('foo/.bar')])
+ self.assertEqual(sorted([f for f, c in finder.find('foo/.*')]),
+ ['foo/.bar/foo', 'foo/.foo'])
+ for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']:
+ self.assertFalse('foo/.foo' in [f for f, c in
+ finder.find(pattern)])
+ self.assertFalse('foo/.bar/foo' in [f for f, c in
+ finder.find(pattern)])
+ self.assertEqual(sorted([f for f, c in finder.find(pattern)]),
+ sorted([f for f, c in finder
+ if mozpath.match(f, pattern)]))
+
+
+def do_check(test, finder, pattern, result):
+ if result:
+ test.assertTrue(finder.contains(pattern))
+ else:
+ test.assertFalse(finder.contains(pattern))
+ test.assertEqual(sorted(list(f for f, c in finder.find(pattern))),
+ sorted(result))
+
+
+class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ ensureParentDir(self.tmppath(path))
+ open(self.tmppath(path), 'wb').write(path)
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_file_finder(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir)
+ self.do_match_test()
+ self.do_finder_test(self.finder)
+
+ def test_get(self):
+ self.prepare_match_test()
+ finder = FileFinder(self.tmpdir)
+
+ self.assertIsNone(finder.get('does-not-exist'))
+ res = finder.get('bar')
+ self.assertIsInstance(res, File)
+ self.assertEqual(mozpath.normpath(res.path),
+ mozpath.join(self.tmpdir, 'bar'))
+
+ def test_ignored_dirs(self):
+ """Ignored directories should not have results returned."""
+ self.prepare_match_test()
+ self.add('fooz')
+
+ # Present to ensure prefix matching doesn't exclude.
+ self.add('foo/quxz')
+
+ self.finder = FileFinder(self.tmpdir, ignore=['foo/qux'])
+
+ self.do_check('**', ['bar', 'foo/bar', 'foo/baz', 'foo/quxz', 'fooz'])
+ self.do_check('foo/*', ['foo/bar', 'foo/baz', 'foo/quxz'])
+ self.do_check('foo/**', ['foo/bar', 'foo/baz', 'foo/quxz'])
+ self.do_check('foo/qux/**', [])
+ self.do_check('foo/qux/*', [])
+ self.do_check('foo/qux/bar', [])
+ self.do_check('foo/quxz', ['foo/quxz'])
+ self.do_check('fooz', ['fooz'])
+
+ def test_ignored_files(self):
+ """Ignored files should not have results returned."""
+ self.prepare_match_test()
+
+ # Be sure prefix match doesn't get ignored.
+ self.add('barz')
+
+ self.finder = FileFinder(self.tmpdir, ignore=['foo/bar', 'bar'])
+ self.do_check('**', ['barz', 'foo/baz', 'foo/qux/1', 'foo/qux/2/test',
+ 'foo/qux/2/test2', 'foo/qux/bar'])
+ self.do_check('foo/**', ['foo/baz', 'foo/qux/1', 'foo/qux/2/test',
+ 'foo/qux/2/test2', 'foo/qux/bar'])
+
+ def test_ignored_patterns(self):
+ """Ignore entries with patterns should be honored."""
+ self.prepare_match_test()
+
+ self.add('foo/quxz')
+
+ self.finder = FileFinder(self.tmpdir, ignore=['foo/qux/*'])
+ self.do_check('**', ['foo/bar', 'foo/baz', 'foo/quxz', 'bar'])
+ self.do_check('foo/**', ['foo/bar', 'foo/baz', 'foo/quxz'])
+
+ def test_dotfiles(self):
+ """Finder can find files beginning with . is configured."""
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir, find_dotfiles=True)
+ self.do_check('**', ['bar', 'foo/.foo', 'foo/.bar/foo',
+ 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'])
+
+ def test_dotfiles_plus_ignore(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir, find_dotfiles=True,
+ ignore=['foo/.bar/**'])
+ self.do_check('foo/**', ['foo/.foo', 'foo/bar', 'foo/baz',
+ 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'])
+
+
+class TestJarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.jar.add(path, path, compress=True)
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_jar_finder(self):
+ self.jar = JarWriter(file=self.tmppath('test.jar'))
+ self.prepare_match_test()
+ self.jar.finish()
+ reader = JarReader(file=self.tmppath('test.jar'))
+ self.finder = JarFinder(self.tmppath('test.jar'), reader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get('does-not-exist'))
+ self.assertIsInstance(self.finder.get('bar'), DeflatedFile)
+
+class TestTarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.tar.addfile(tarfile.TarInfo(name=path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_tar_finder(self):
+ self.tar = tarfile.open(name=self.tmppath('test.tar.bz2'),
+ mode='w:bz2')
+ self.prepare_match_test()
+ self.tar.close()
+ with tarfile.open(name=self.tmppath('test.tar.bz2'),
+ mode='r:bz2') as tarreader:
+ self.finder = TarFinder(self.tmppath('test.tar.bz2'), tarreader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get('does-not-exist'))
+ self.assertIsInstance(self.finder.get('bar'), ExtractedTarFile)
+
+
+class TestComposedFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path, content=None):
+ # Put foo/qux files under $tmp/b.
+ if path.startswith('foo/qux/'):
+ real_path = mozpath.join('b', path[8:])
+ else:
+ real_path = mozpath.join('a', path)
+ ensureParentDir(self.tmppath(real_path))
+ if not content:
+ content = path
+ open(self.tmppath(real_path), 'wb').write(content)
+
+ def do_check(self, pattern, result):
+ if '*' in pattern:
+ return
+ do_check(self, self.finder, pattern, result)
+
+ def test_composed_finder(self):
+ self.prepare_match_test()
+ # Also add files in $tmp/a/foo/qux because ComposedFinder is
+ # expected to mask foo/qux entirely with content from $tmp/b.
+ ensureParentDir(self.tmppath('a/foo/qux/hoge'))
+ open(self.tmppath('a/foo/qux/hoge'), 'wb').write('hoge')
+ open(self.tmppath('a/foo/qux/bar'), 'wb').write('not the right content')
+ self.finder = ComposedFinder({
+ '': FileFinder(self.tmppath('a')),
+ 'foo/qux': FileFinder(self.tmppath('b')),
+ })
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get('does-not-exist'))
+ self.assertIsInstance(self.finder.get('bar'), File)
+
+
+@unittest.skipUnless(hglib, 'hglib not available')
+class TestMercurialRevisionFinder(MatchTestTemplate, TestWithTmpDir):
+ def setUp(self):
+ super(TestMercurialRevisionFinder, self).setUp()
+ hglib.init(self.tmpdir)
+
+ def add(self, path):
+ c = hglib.open(self.tmpdir)
+ ensureParentDir(self.tmppath(path))
+ with open(self.tmppath(path), 'wb') as fh:
+ fh.write(path)
+ c.add(self.tmppath(path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def _get_finder(self, *args, **kwargs):
+ return MercurialRevisionFinder(*args, **kwargs)
+
+ def test_default_revision(self):
+ self.prepare_match_test()
+ c = hglib.open(self.tmpdir)
+ c.commit('initial commit')
+ self.finder = self._get_finder(self.tmpdir)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get('does-not-exist'))
+ self.assertIsInstance(self.finder.get('bar'), MercurialFile)
+
+ def test_old_revision(self):
+ c = hglib.open(self.tmpdir)
+ with open(self.tmppath('foo'), 'wb') as fh:
+ fh.write('foo initial')
+ c.add(self.tmppath('foo'))
+ c.commit('initial')
+
+ with open(self.tmppath('foo'), 'wb') as fh:
+ fh.write('foo second')
+ with open(self.tmppath('bar'), 'wb') as fh:
+ fh.write('bar second')
+ c.add(self.tmppath('bar'))
+ c.commit('second')
+ # This wipes out the working directory, ensuring the finder isn't
+ # finding anything from the filesystem.
+ c.rawcommand(['update', 'null'])
+
+ finder = self._get_finder(self.tmpdir, 0)
+ f = finder.get('foo')
+ self.assertEqual(f.read(), 'foo initial')
+ self.assertEqual(f.read(), 'foo initial', 'read again for good measure')
+ self.assertIsNone(finder.get('bar'))
+
+ finder = MercurialRevisionFinder(self.tmpdir, rev=1)
+ f = finder.get('foo')
+ self.assertEqual(f.read(), 'foo second')
+ f = finder.get('bar')
+ self.assertEqual(f.read(), 'bar second')
+
+ def test_recognize_repo_paths(self):
+ c = hglib.open(self.tmpdir)
+ with open(self.tmppath('foo'), 'wb') as fh:
+ fh.write('initial')
+ c.add(self.tmppath('foo'))
+ c.commit('initial')
+ c.rawcommand(['update', 'null'])
+
+ finder = self._get_finder(self.tmpdir, 0,
+ recognize_repo_paths=True)
+ with self.assertRaises(NotImplementedError):
+ list(finder.find(''))
+
+ with self.assertRaises(ValueError):
+ finder.get('foo')
+ with self.assertRaises(ValueError):
+ finder.get('')
+
+ f = finder.get(self.tmppath('foo'))
+ self.assertIsInstance(f, MercurialFile)
+ self.assertEqual(f.read(), 'initial')
+
+
+@unittest.skipUnless(MercurialNativeRevisionFinder, 'hgnative not available')
+class TestMercurialNativeRevisionFinder(TestMercurialRevisionFinder):
+ def _get_finder(self, *args, **kwargs):
+ return MercurialNativeRevisionFinder(*args, **kwargs)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_manifests.py b/python/mozbuild/mozpack/test/test_manifests.py
new file mode 100644
index 000000000..b785d014a
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_manifests.py
@@ -0,0 +1,375 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+
+import mozunit
+
+from mozpack.copier import (
+ FileCopier,
+ FileRegistry,
+)
+from mozpack.manifests import (
+ InstallManifest,
+ UnreadableInstallManifest,
+)
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestInstallManifest(TestWithTmpDir):
+ def test_construct(self):
+ m = InstallManifest()
+ self.assertEqual(len(m), 0)
+
+ def test_malformed(self):
+ f = self.tmppath('manifest')
+ open(f, 'wb').write('junk\n')
+ with self.assertRaises(UnreadableInstallManifest):
+ m = InstallManifest(f)
+
+ def test_adds(self):
+ m = InstallManifest()
+ m.add_symlink('s_source', 's_dest')
+ m.add_copy('c_source', 'c_dest')
+ m.add_required_exists('e_dest')
+ m.add_optional_exists('o_dest')
+ m.add_pattern_symlink('ps_base', 'ps/*', 'ps_dest')
+ m.add_pattern_copy('pc_base', 'pc/**', 'pc_dest')
+ m.add_preprocess('p_source', 'p_dest', 'p_source.pp')
+ m.add_content('content', 'content')
+
+ self.assertEqual(len(m), 8)
+ self.assertIn('s_dest', m)
+ self.assertIn('c_dest', m)
+ self.assertIn('p_dest', m)
+ self.assertIn('e_dest', m)
+ self.assertIn('o_dest', m)
+ self.assertIn('content', m)
+
+ with self.assertRaises(ValueError):
+ m.add_symlink('s_other', 's_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_copy('c_other', 'c_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_preprocess('p_other', 'p_dest', 'p_other.pp')
+
+ with self.assertRaises(ValueError):
+ m.add_required_exists('e_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_optional_exists('o_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_symlink('ps_base', 'ps/*', 'ps_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_copy('pc_base', 'pc/**', 'pc_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_content('content', 'content')
+
+ def _get_test_manifest(self):
+ m = InstallManifest()
+ m.add_symlink(self.tmppath('s_source'), 's_dest')
+ m.add_copy(self.tmppath('c_source'), 'c_dest')
+ m.add_preprocess(self.tmppath('p_source'), 'p_dest', self.tmppath('p_source.pp'), '#', {'FOO':'BAR', 'BAZ':'QUX'})
+ m.add_required_exists('e_dest')
+ m.add_optional_exists('o_dest')
+ m.add_pattern_symlink('ps_base', '*', 'ps_dest')
+ m.add_pattern_copy('pc_base', '**', 'pc_dest')
+ m.add_content('the content\non\nmultiple lines', 'content')
+
+ return m
+
+ def test_serialization(self):
+ m = self._get_test_manifest()
+
+ p = self.tmppath('m')
+ m.write(path=p)
+ self.assertTrue(os.path.isfile(p))
+
+ with open(p, 'rb') as fh:
+ c = fh.read()
+
+ self.assertEqual(c.count('\n'), 9)
+
+ lines = c.splitlines()
+ self.assertEqual(len(lines), 9)
+
+ self.assertEqual(lines[0], '5')
+
+ m2 = InstallManifest(path=p)
+ self.assertEqual(m, m2)
+ p2 = self.tmppath('m2')
+ m2.write(path=p2)
+
+ with open(p2, 'rb') as fh:
+ c2 = fh.read()
+
+ self.assertEqual(c, c2)
+
+ def test_populate_registry(self):
+ m = self._get_test_manifest()
+ r = FileRegistry()
+ m.populate_registry(r)
+
+ self.assertEqual(len(r), 6)
+ self.assertEqual(r.paths(), ['c_dest', 'content', 'e_dest', 'o_dest',
+ 'p_dest', 's_dest'])
+
+ def test_pattern_expansion(self):
+ source = self.tmppath('source')
+ os.mkdir(source)
+ os.mkdir('%s/base' % source)
+ os.mkdir('%s/base/foo' % source)
+
+ with open('%s/base/foo/file1' % source, 'a'):
+ pass
+
+ with open('%s/base/foo/file2' % source, 'a'):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_symlink('%s/base' % source, '**', 'dest')
+
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertEqual(c.paths(), ['dest/foo/file1', 'dest/foo/file2'])
+
+ def test_or(self):
+ m1 = self._get_test_manifest()
+ orig_length = len(m1)
+ m2 = InstallManifest()
+ m2.add_symlink('s_source2', 's_dest2')
+ m2.add_copy('c_source2', 'c_dest2')
+
+ m1 |= m2
+
+ self.assertEqual(len(m2), 2)
+ self.assertEqual(len(m1), orig_length + 2)
+
+ self.assertIn('s_dest2', m1)
+ self.assertIn('c_dest2', m1)
+
+ def test_copier_application(self):
+ dest = self.tmppath('dest')
+ os.mkdir(dest)
+
+ to_delete = self.tmppath('dest/to_delete')
+ with open(to_delete, 'a'):
+ pass
+
+ with open(self.tmppath('s_source'), 'wt') as fh:
+ fh.write('symlink!')
+
+ with open(self.tmppath('c_source'), 'wt') as fh:
+ fh.write('copy!')
+
+ with open(self.tmppath('p_source'), 'wt') as fh:
+ fh.write('#define FOO 1\npreprocess!')
+
+ with open(self.tmppath('dest/e_dest'), 'a'):
+ pass
+
+ with open(self.tmppath('dest/o_dest'), 'a'):
+ pass
+
+ m = self._get_test_manifest()
+ c = FileCopier()
+ m.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath('dest/s_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/c_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/p_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/e_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/o_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/content')))
+ self.assertFalse(os.path.exists(to_delete))
+
+ with open(self.tmppath('dest/s_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'symlink!')
+
+ with open(self.tmppath('dest/c_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'copy!')
+
+ with open(self.tmppath('dest/p_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'preprocess!')
+
+ self.assertEqual(result.updated_files, set(self.tmppath(p) for p in (
+ 'dest/s_dest', 'dest/c_dest', 'dest/p_dest', 'dest/content')))
+ self.assertEqual(result.existing_files,
+ set([self.tmppath('dest/e_dest'), self.tmppath('dest/o_dest')]))
+ self.assertEqual(result.removed_files, {to_delete})
+ self.assertEqual(result.removed_directories, set())
+
+ def test_preprocessor(self):
+ manifest = self.tmppath('m')
+ deps = self.tmppath('m.pp')
+ dest = self.tmppath('dest')
+ include = self.tmppath('p_incl')
+
+ with open(include, 'wt') as fh:
+ fh.write('#define INCL\n')
+ time = os.path.getmtime(include) - 3
+ os.utime(include, (time, time))
+
+ with open(self.tmppath('p_source'), 'wt') as fh:
+ fh.write('#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n')
+ fh.write('#ifdef DEPTEST\nPASS2\n#endif\n')
+ fh.write('#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n')
+ time = os.path.getmtime(self.tmppath('p_source')) - 3
+ os.utime(self.tmppath('p_source'), (time, time))
+
+ # Create and write a manifest with the preprocessed file, then apply it.
+ # This should write out our preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'})
+ m.write(path=manifest)
+
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath('dest/p_dest')))
+
+ with open(self.tmppath('dest/p_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'PASS1\n')
+
+ # Create a second manifest with the preprocessed file, then apply it.
+ # Since this manifest does not exist on the disk, there should not be a
+ # dependency on it, and the preprocessed file should not be modified.
+ m2 = InstallManifest()
+ m2.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {'DEPTEST':True})
+ c = FileCopier()
+ m2.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertFalse(self.tmppath('dest/p_dest') in result.updated_files)
+ self.assertTrue(self.tmppath('dest/p_dest') in result.existing_files)
+
+ # Write out the second manifest, then load it back in from the disk.
+ # This should add the dependency on the manifest file, so our
+ # preprocessed file should be regenerated with the new defines.
+ # We also set the mtime on the destination file back, so it will be
+ # older than the manifest file.
+ m2.write(path=manifest)
+ time = os.path.getmtime(manifest) - 1
+ os.utime(self.tmppath('dest/p_dest'), (time, time))
+ m2 = InstallManifest(path=manifest)
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath('dest/p_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'PASS2\n')
+
+ # Set the time on the manifest back, so it won't be picked up as
+ # modified in the next test
+ time = os.path.getmtime(manifest) - 1
+ os.utime(manifest, (time, time))
+
+ # Update the contents of a file included by the source file. This should
+ # cause the destination to be regenerated.
+ with open(include, 'wt') as fh:
+ fh.write('#define INCLTEST\n')
+
+ time = os.path.getmtime(include) - 1
+ os.utime(self.tmppath('dest/p_dest'), (time, time))
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath('dest/p_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'PASS2\nPASS3\n')
+
+ def test_preprocessor_dependencies(self):
+ manifest = self.tmppath('m')
+ deps = self.tmppath('m.pp')
+ dest = self.tmppath('dest')
+ source = self.tmppath('p_source')
+ destfile = self.tmppath('dest/p_dest')
+ include = self.tmppath('p_incl')
+ os.mkdir(dest)
+
+ with open(source, 'wt') as fh:
+ fh.write('#define SRC\nSOURCE\n')
+ time = os.path.getmtime(source) - 3
+ os.utime(source, (time, time))
+
+ with open(include, 'wt') as fh:
+ fh.write('INCLUDE\n')
+ time = os.path.getmtime(source) - 3
+ os.utime(include, (time, time))
+
+ # Create and write a manifest with the preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(source, 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'})
+ m.write(path=manifest)
+
+ time = os.path.getmtime(source) - 5
+ os.utime(manifest, (time, time))
+
+ # Now read the manifest back in, and apply it. This should write out
+ # our preprocessed file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(destfile, 'rt') as fh:
+ self.assertEqual(fh.read(), 'SOURCE\n')
+
+ # Next, modify the source to #INCLUDE another file.
+ with open(source, 'wt') as fh:
+ fh.write('SOURCE\n#include p_incl\n')
+ time = os.path.getmtime(source) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that it also reads the newly included
+ # file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, 'rt') as fh:
+ self.assertEqual(fh.read(), 'SOURCE\nINCLUDE\n')
+
+ # Set the time on the source file back, so it won't be picked up as
+ # modified in the next test.
+ time = os.path.getmtime(source) - 1
+ os.utime(source, (time, time))
+
+ # Now, modify the include file (but not the original source).
+ with open(include, 'wt') as fh:
+ fh.write('INCLUDE MODIFIED\n')
+ time = os.path.getmtime(include) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that the change to the include file
+ # is detected. That should cause the preprocessor to run again.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, 'rt') as fh:
+ self.assertEqual(fh.read(), 'SOURCE\nINCLUDE MODIFIED\n')
+
+ # ORing an InstallManifest should copy file dependencies
+ m = InstallManifest()
+ m |= InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ e = c._files['p_dest']
+ self.assertEqual(e.extra_depends, [manifest])
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_mozjar.py b/python/mozbuild/mozpack/test/test_mozjar.py
new file mode 100644
index 000000000..948403006
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_mozjar.py
@@ -0,0 +1,342 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.files import FileFinder
+from mozpack.mozjar import (
+ JarReaderError,
+ JarWriterError,
+ JarStruct,
+ JarReader,
+ JarWriter,
+ Deflater,
+ JarLog,
+)
+from collections import OrderedDict
+from mozpack.test.test_files import MockDest
+import unittest
+import mozunit
+from cStringIO import StringIO
+from urllib import pathname2url
+import mozpack.path as mozpath
+import os
+
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data')
+
+
+class TestJarStruct(unittest.TestCase):
+ class Foo(JarStruct):
+ MAGIC = 0x01020304
+ STRUCT = OrderedDict([
+ ('foo', 'uint32'),
+ ('bar', 'uint16'),
+ ('qux', 'uint16'),
+ ('length', 'uint16'),
+ ('length2', 'uint16'),
+ ('string', 'length'),
+ ('string2', 'length2'),
+ ])
+
+ def test_jar_struct(self):
+ foo = TestJarStruct.Foo()
+ self.assertEqual(foo.signature, TestJarStruct.Foo.MAGIC)
+ self.assertEqual(foo['foo'], 0)
+ self.assertEqual(foo['bar'], 0)
+ self.assertEqual(foo['qux'], 0)
+ self.assertFalse('length' in foo)
+ self.assertFalse('length2' in foo)
+ self.assertEqual(foo['string'], '')
+ self.assertEqual(foo['string2'], '')
+
+ self.assertEqual(foo.size, 16)
+
+ foo['foo'] = 0x42434445
+ foo['bar'] = 0xabcd
+ foo['qux'] = 0xef01
+ foo['string'] = 'abcde'
+ foo['string2'] = 'Arbitrarily long string'
+
+ serialized = b'\x04\x03\x02\x01\x45\x44\x43\x42\xcd\xab\x01\xef' + \
+ b'\x05\x00\x17\x00abcdeArbitrarily long string'
+ self.assertEqual(foo.size, len(serialized))
+ foo_serialized = foo.serialize()
+ self.assertEqual(foo_serialized, serialized)
+
+ def do_test_read_jar_struct(self, data):
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data)
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data[2:])
+
+ foo = TestJarStruct.Foo(data[1:])
+ self.assertEqual(foo['foo'], 0x45444342)
+ self.assertEqual(foo['bar'], 0xcdab)
+ self.assertEqual(foo['qux'], 0x01ef)
+ self.assertFalse('length' in foo)
+ self.assertFalse('length2' in foo)
+ self.assertEqual(foo['string'], '012345')
+ self.assertEqual(foo['string2'], '67')
+
+ def test_read_jar_struct(self):
+ data = b'\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef' + \
+ b'\x01\x06\x00\x02\x0001234567890'
+ self.do_test_read_jar_struct(data)
+
+ def test_read_jar_struct_memoryview(self):
+ data = b'\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef' + \
+ b'\x01\x06\x00\x02\x0001234567890'
+ self.do_test_read_jar_struct(memoryview(data))
+
+
+class TestDeflater(unittest.TestCase):
+ def wrap(self, data):
+ return data
+
+ def test_deflater_no_compress(self):
+ deflater = Deflater(False)
+ deflater.write(self.wrap('abc'))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, 'abc')
+ self.assertEqual(deflater.crc32, 0x352441c2)
+
+ def test_deflater_compress_no_gain(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap('abc'))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, 'abc')
+ self.assertEqual(deflater.crc32, 0x352441c2)
+
+ def test_deflater_compress(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap('aaaaaaaaaaaaanopqrstuvwxyz'))
+ self.assertTrue(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 26)
+ self.assertNotEqual(deflater.compressed_size,
+ deflater.uncompressed_size)
+ self.assertEqual(deflater.crc32, 0xd46b97ed)
+ # The CRC is the same as when not compressed
+ deflater = Deflater(False)
+ self.assertFalse(deflater.compressed)
+ deflater.write(self.wrap('aaaaaaaaaaaaanopqrstuvwxyz'))
+ self.assertEqual(deflater.crc32, 0xd46b97ed)
+
+
+class TestDeflaterMemoryView(TestDeflater):
+ def wrap(self, data):
+ return memoryview(data)
+
+
+class TestJar(unittest.TestCase):
+ optimize = False
+
+ def test_jar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+ jar.add('foo', 'foo')
+ self.assertRaises(JarWriterError, jar.add, 'foo', 'bar')
+ jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
+ jar.add('baz\\backslash', 'aaaaaaaaaaaaaaa')
+
+ files = [j for j in JarReader(fileobj=s)]
+
+ self.assertEqual(files[0].filename, 'foo')
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), 'foo')
+
+ self.assertEqual(files[1].filename, 'bar')
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ self.assertEqual(files[2].filename, 'baz/qux')
+ self.assertFalse(files[2].compressed)
+ self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ if os.sep == '\\':
+ self.assertEqual(files[3].filename, 'baz/backslash',
+ 'backslashes in filenames on Windows should get normalized')
+ else:
+ self.assertEqual(files[3].filename, 'baz\\backslash',
+ 'backslashes in filenames on POSIX platform are untouched')
+
+ s = MockDest()
+ with JarWriter(fileobj=s, compress=False,
+ optimize=self.optimize) as jar:
+ jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
+ jar.add('foo', 'foo')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', True)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, 'bar')
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ self.assertEqual(files[1].filename, 'foo')
+ self.assertFalse(files[1].compressed)
+ self.assertEqual(files[1].read(), 'foo')
+
+ self.assertEqual(files[2].filename, 'baz/qux')
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ self.assertTrue('bar' in jar)
+ self.assertTrue('foo' in jar)
+ self.assertFalse('baz' in jar)
+ self.assertTrue('baz/qux' in jar)
+ self.assertTrue(jar['bar'], files[1])
+ self.assertTrue(jar['foo'], files[0])
+ self.assertTrue(jar['baz/qux'], files[2])
+
+ s.seek(0)
+ jar = JarReader(fileobj=s)
+ self.assertTrue('bar' in jar)
+ self.assertTrue('foo' in jar)
+ self.assertFalse('baz' in jar)
+ self.assertTrue('baz/qux' in jar)
+
+ files[0].seek(0)
+ self.assertEqual(jar['bar'].filename, files[0].filename)
+ self.assertEqual(jar['bar'].compressed, files[0].compressed)
+ self.assertEqual(jar['bar'].read(), files[0].read())
+
+ files[1].seek(0)
+ self.assertEqual(jar['foo'].filename, files[1].filename)
+ self.assertEqual(jar['foo'].compressed, files[1].compressed)
+ self.assertEqual(jar['foo'].read(), files[1].read())
+
+ files[2].seek(0)
+ self.assertEqual(jar['baz/qux'].filename, files[2].filename)
+ self.assertEqual(jar['baz/qux'].compressed, files[2].compressed)
+ self.assertEqual(jar['baz/qux'].read(), files[2].read())
+
+ def test_rejar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+ jar.add('foo', 'foo')
+ jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
+
+ new = MockDest()
+ with JarWriter(fileobj=new, optimize=self.optimize) as jar:
+ for j in JarReader(fileobj=s):
+ jar.add(j.filename, j)
+
+ jar = JarReader(fileobj=new)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, 'foo')
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), 'foo')
+
+ self.assertEqual(files[1].filename, 'bar')
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ self.assertEqual(files[2].filename, 'baz/qux')
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ def test_add_from_finder(self):
+ s = MockDest()
+ with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+ finder = FileFinder(test_data_path)
+ for p, f in finder.find('test_data'):
+ jar.add('test_data', f)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, 'test_data')
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), 'test_data')
+
+
+class TestOptimizeJar(TestJar):
+ optimize = True
+
+
+class TestPreload(unittest.TestCase):
+ def test_preload(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add('foo', 'foo')
+ jar.add('bar', 'abcdefghijklmnopqrstuvwxyz')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, None)
+
+ with JarWriter(fileobj=s) as jar:
+ jar.add('foo', 'foo')
+ jar.add('bar', 'abcdefghijklmnopqrstuvwxyz')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz')
+ jar.preload(['baz/qux', 'bar'])
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, 'bar')
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, 'baz/qux')
+ self.assertEqual(files[1].filename, 'bar')
+ self.assertEqual(files[2].filename, 'foo')
+
+
+class TestJarLog(unittest.TestCase):
+ def test_jarlog(self):
+ base = 'file:' + pathname2url(os.path.abspath(os.curdir))
+ s = StringIO('\n'.join([
+ base + '/bar/baz.jar first',
+ base + '/bar/baz.jar second',
+ base + '/bar/baz.jar third',
+ base + '/bar/baz.jar second',
+ base + '/bar/baz.jar second',
+ 'jar:' + base + '/qux.zip!/omni.ja stuff',
+ base + '/bar/baz.jar first',
+ 'jar:' + base + '/qux.zip!/omni.ja other/stuff',
+ 'jar:' + base + '/qux.zip!/omni.ja stuff',
+ base + '/bar/baz.jar third',
+ 'jar:jar:' + base + '/qux.zip!/baz/baz.jar!/omni.ja nested/stuff',
+ 'jar:jar:jar:' + base + '/qux.zip!/baz/baz.jar!/foo.zip!/omni.ja' +
+ ' deeply/nested/stuff',
+ ]))
+ log = JarLog(fileobj=s)
+ canonicalize = lambda p: \
+ mozpath.normsep(os.path.normcase(os.path.realpath(p)))
+ baz_jar = canonicalize('bar/baz.jar')
+ qux_zip = canonicalize('qux.zip')
+ self.assertEqual(set(log.keys()), set([
+ baz_jar,
+ (qux_zip, 'omni.ja'),
+ (qux_zip, 'baz/baz.jar', 'omni.ja'),
+ (qux_zip, 'baz/baz.jar', 'foo.zip', 'omni.ja'),
+ ]))
+ self.assertEqual(log[baz_jar], [
+ 'first',
+ 'second',
+ 'third',
+ ])
+ self.assertEqual(log[(qux_zip, 'omni.ja')], [
+ 'stuff',
+ 'other/stuff',
+ ])
+ self.assertEqual(log[(qux_zip, 'baz/baz.jar', 'omni.ja')],
+ ['nested/stuff'])
+ self.assertEqual(log[(qux_zip, 'baz/baz.jar', 'foo.zip',
+ 'omni.ja')], ['deeply/nested/stuff'])
+
+ # The above tests also indirectly check the value returned by
+ # JarLog.canonicalize for various jar: and file: urls, but
+ # JarLog.canonicalize also supports plain paths.
+ self.assertEqual(JarLog.canonicalize(os.path.abspath('bar/baz.jar')),
+ baz_jar)
+ self.assertEqual(JarLog.canonicalize('bar/baz.jar'), baz_jar)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager.py b/python/mozbuild/mozpack/test/test_packager.py
new file mode 100644
index 000000000..397f40538
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager.py
@@ -0,0 +1,490 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+import os
+from mozpack.packager import (
+ preprocess_manifest,
+ CallDeque,
+ Component,
+ SimplePackager,
+ SimpleManifestSink,
+)
+from mozpack.files import GeneratedFile
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ ManifestContent,
+ ManifestResource,
+)
+from mozunit import MockedOpen
+from mozbuild.preprocessor import Preprocessor
+from mozpack.errors import (
+ errors,
+ ErrorMessage,
+)
+import mozpack.path as mozpath
+
+MANIFEST = '''
+bar/*
+[foo]
+foo/*
+-foo/bar
+chrome.manifest
+[zot destdir="destdir"]
+foo/zot
+; comment
+#ifdef baz
+[baz]
+baz@SUFFIX@
+#endif
+'''
+
+
+class TestPreprocessManifest(unittest.TestCase):
+ MANIFEST_PATH = os.path.join(os.path.abspath(os.curdir), 'manifest')
+
+ EXPECTED_LOG = [
+ ((MANIFEST_PATH, 2), 'add', '', 'bar/*'),
+ ((MANIFEST_PATH, 4), 'add', 'foo', 'foo/*'),
+ ((MANIFEST_PATH, 5), 'remove', 'foo', 'foo/bar'),
+ ((MANIFEST_PATH, 6), 'add', 'foo', 'chrome.manifest'),
+ ((MANIFEST_PATH, 8), 'add', 'zot destdir="destdir"', 'foo/zot'),
+ ]
+
+ def setUp(self):
+ class MockSink(object):
+ def __init__(self):
+ self.log = []
+
+ def add(self, component, path):
+ self._log(errors.get_context(), 'add', repr(component), path)
+
+ def remove(self, component, path):
+ self._log(errors.get_context(), 'remove', repr(component), path)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+ self.sink = MockSink()
+
+ def test_preprocess_manifest(self):
+ with MockedOpen({'manifest': MANIFEST}):
+ preprocess_manifest(self.sink, 'manifest')
+ self.assertEqual(self.sink.log, self.EXPECTED_LOG)
+
+ def test_preprocess_manifest_missing_define(self):
+ with MockedOpen({'manifest': MANIFEST}):
+ self.assertRaises(
+ Preprocessor.Error,
+ preprocess_manifest,
+ self.sink,
+ 'manifest',
+ {'baz': 1}
+ )
+
+ def test_preprocess_manifest_defines(self):
+ with MockedOpen({'manifest': MANIFEST}):
+ preprocess_manifest(self.sink, 'manifest',
+ {'baz': 1, 'SUFFIX': '.exe'})
+ self.assertEqual(self.sink.log, self.EXPECTED_LOG +
+ [((self.MANIFEST_PATH, 12), 'add', 'baz', 'baz.exe')])
+
+
+class MockFinder(object):
+ def __init__(self, files):
+ self.files = files
+ self.log = []
+
+ def find(self, path):
+ self.log.append(path)
+ for f in sorted(self.files):
+ if mozpath.match(f, path):
+ yield f, self.files[f]
+
+ def __iter__(self):
+ return self.find('')
+
+
+class MockFormatter(object):
+ def __init__(self):
+ self.log = []
+
+ def add_base(self, *args):
+ self._log(errors.get_context(), 'add_base', *args)
+
+ def add_manifest(self, *args):
+ self._log(errors.get_context(), 'add_manifest', *args)
+
+ def add_interfaces(self, *args):
+ self._log(errors.get_context(), 'add_interfaces', *args)
+
+ def add(self, *args):
+ self._log(errors.get_context(), 'add', *args)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+
+class TestSimplePackager(unittest.TestCase):
+ def test_simple_packager(self):
+ class GeneratedFileWithPath(GeneratedFile):
+ def __init__(self, path, content):
+ GeneratedFile.__init__(self, content)
+ self.path = path
+
+ formatter = MockFormatter()
+ packager = SimplePackager(formatter)
+ curdir = os.path.abspath(os.curdir)
+ file = GeneratedFileWithPath(os.path.join(curdir, 'foo',
+ 'bar.manifest'),
+ 'resource bar bar/\ncontent bar bar/')
+ with errors.context('manifest', 1):
+ packager.add('foo/bar.manifest', file)
+
+ file = GeneratedFileWithPath(os.path.join(curdir, 'foo',
+ 'baz.manifest'),
+ 'resource baz baz/')
+ with errors.context('manifest', 2):
+ packager.add('bar/baz.manifest', file)
+
+ with errors.context('manifest', 3):
+ packager.add('qux/qux.manifest',
+ GeneratedFile(''.join([
+ 'resource qux qux/\n',
+ 'binary-component qux.so\n',
+ ])))
+ bar_xpt = GeneratedFile('bar.xpt')
+ qux_xpt = GeneratedFile('qux.xpt')
+ foo_html = GeneratedFile('foo_html')
+ bar_html = GeneratedFile('bar_html')
+ with errors.context('manifest', 4):
+ packager.add('foo/bar.xpt', bar_xpt)
+ with errors.context('manifest', 5):
+ packager.add('foo/bar/foo.html', foo_html)
+ packager.add('foo/bar/bar.html', bar_html)
+
+ file = GeneratedFileWithPath(os.path.join(curdir, 'foo.manifest'),
+ ''.join([
+ 'manifest foo/bar.manifest\n',
+ 'manifest bar/baz.manifest\n',
+ ]))
+ with errors.context('manifest', 6):
+ packager.add('foo.manifest', file)
+ with errors.context('manifest', 7):
+ packager.add('foo/qux.xpt', qux_xpt)
+
+ file = GeneratedFileWithPath(os.path.join(curdir, 'addon',
+ 'chrome.manifest'),
+ 'resource hoge hoge/')
+ with errors.context('manifest', 8):
+ packager.add('addon/chrome.manifest', file)
+
+ install_rdf = GeneratedFile('<RDF></RDF>')
+ with errors.context('manifest', 9):
+ packager.add('addon/install.rdf', install_rdf)
+
+ with errors.context('manifest', 10):
+ packager.add('addon2/install.rdf', install_rdf)
+ packager.add('addon2/chrome.manifest',
+ GeneratedFile('binary-component addon2.so'))
+
+ with errors.context('manifest', 11):
+ packager.add('addon3/install.rdf', install_rdf)
+ packager.add('addon3/chrome.manifest', GeneratedFile(
+ 'manifest components/components.manifest'))
+ packager.add('addon3/components/components.manifest',
+ GeneratedFile('binary-component addon3.so'))
+
+ with errors.context('manifest', 12):
+ install_rdf_addon4 = GeneratedFile(
+ '<RDF>\n<...>\n<em:unpack>true</em:unpack>\n<...>\n</RDF>')
+ packager.add('addon4/install.rdf', install_rdf_addon4)
+
+ with errors.context('manifest', 13):
+ install_rdf_addon5 = GeneratedFile(
+ '<RDF>\n<...>\n<em:unpack>false</em:unpack>\n<...>\n</RDF>')
+ packager.add('addon5/install.rdf', install_rdf_addon5)
+
+ with errors.context('manifest', 14):
+ install_rdf_addon6 = GeneratedFile(
+ '<RDF>\n<... em:unpack=true>\n<...>\n</RDF>')
+ packager.add('addon6/install.rdf', install_rdf_addon6)
+
+ with errors.context('manifest', 15):
+ install_rdf_addon7 = GeneratedFile(
+ '<RDF>\n<... em:unpack=false>\n<...>\n</RDF>')
+ packager.add('addon7/install.rdf', install_rdf_addon7)
+
+ with errors.context('manifest', 16):
+ install_rdf_addon8 = GeneratedFile(
+ '<RDF>\n<... em:unpack="true">\n<...>\n</RDF>')
+ packager.add('addon8/install.rdf', install_rdf_addon8)
+
+ with errors.context('manifest', 17):
+ install_rdf_addon9 = GeneratedFile(
+ '<RDF>\n<... em:unpack="false">\n<...>\n</RDF>')
+ packager.add('addon9/install.rdf', install_rdf_addon9)
+
+ with errors.context('manifest', 18):
+ install_rdf_addon10 = GeneratedFile(
+ '<RDF>\n<... em:unpack=\'true\'>\n<...>\n</RDF>')
+ packager.add('addon10/install.rdf', install_rdf_addon10)
+
+ with errors.context('manifest', 19):
+ install_rdf_addon11 = GeneratedFile(
+ '<RDF>\n<... em:unpack=\'false\'>\n<...>\n</RDF>')
+ packager.add('addon11/install.rdf', install_rdf_addon11)
+
+ self.assertEqual(formatter.log, [])
+
+ with errors.context('dummy', 1):
+ packager.close()
+ self.maxDiff = None
+ # The formatter is expected to reorder the manifest entries so that
+ # chrome entries appear before the others.
+ self.assertEqual(formatter.log, [
+ (('dummy', 1), 'add_base', '', False),
+ (('dummy', 1), 'add_base', 'addon', True),
+ (('dummy', 1), 'add_base', 'addon10', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon11', True),
+ (('dummy', 1), 'add_base', 'addon2', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon3', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon4', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon5', True),
+ (('dummy', 1), 'add_base', 'addon6', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon7', True),
+ (('dummy', 1), 'add_base', 'addon8', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon9', True),
+ (('dummy', 1), 'add_base', 'qux', False),
+ ((os.path.join(curdir, 'foo', 'bar.manifest'), 2),
+ 'add_manifest', ManifestContent('foo', 'bar', 'bar/')),
+ ((os.path.join(curdir, 'foo', 'bar.manifest'), 1),
+ 'add_manifest', ManifestResource('foo', 'bar', 'bar/')),
+ (('bar/baz.manifest', 1),
+ 'add_manifest', ManifestResource('bar', 'baz', 'baz/')),
+ (('qux/qux.manifest', 1),
+ 'add_manifest', ManifestResource('qux', 'qux', 'qux/')),
+ (('qux/qux.manifest', 2),
+ 'add_manifest', ManifestBinaryComponent('qux', 'qux.so')),
+ (('manifest', 4), 'add_interfaces', 'foo/bar.xpt', bar_xpt),
+ (('manifest', 7), 'add_interfaces', 'foo/qux.xpt', qux_xpt),
+ ((os.path.join(curdir, 'addon', 'chrome.manifest'), 1),
+ 'add_manifest', ManifestResource('addon', 'hoge', 'hoge/')),
+ (('addon2/chrome.manifest', 1), 'add_manifest',
+ ManifestBinaryComponent('addon2', 'addon2.so')),
+ (('addon3/components/components.manifest', 1), 'add_manifest',
+ ManifestBinaryComponent('addon3/components', 'addon3.so')),
+ (('manifest', 5), 'add', 'foo/bar/foo.html', foo_html),
+ (('manifest', 5), 'add', 'foo/bar/bar.html', bar_html),
+ (('manifest', 9), 'add', 'addon/install.rdf', install_rdf),
+ (('manifest', 10), 'add', 'addon2/install.rdf', install_rdf),
+ (('manifest', 11), 'add', 'addon3/install.rdf', install_rdf),
+ (('manifest', 12), 'add', 'addon4/install.rdf',
+ install_rdf_addon4),
+ (('manifest', 13), 'add', 'addon5/install.rdf',
+ install_rdf_addon5),
+ (('manifest', 14), 'add', 'addon6/install.rdf',
+ install_rdf_addon6),
+ (('manifest', 15), 'add', 'addon7/install.rdf',
+ install_rdf_addon7),
+ (('manifest', 16), 'add', 'addon8/install.rdf',
+ install_rdf_addon8),
+ (('manifest', 17), 'add', 'addon9/install.rdf',
+ install_rdf_addon9),
+ (('manifest', 18), 'add', 'addon10/install.rdf',
+ install_rdf_addon10),
+ (('manifest', 19), 'add', 'addon11/install.rdf',
+ install_rdf_addon11),
+ ])
+
+ self.assertEqual(packager.get_bases(),
+ set(['', 'addon', 'addon2', 'addon3', 'addon4',
+ 'addon5', 'addon6', 'addon7', 'addon8',
+ 'addon9', 'addon10', 'addon11', 'qux']))
+ self.assertEqual(packager.get_bases(addons=False), set(['', 'qux']))
+
+ def test_simple_packager_manifest_consistency(self):
+ formatter = MockFormatter()
+ # bar/ is detected as an addon because of install.rdf, but top-level
+ # includes a manifest inside bar/.
+ packager = SimplePackager(formatter)
+ packager.add('base.manifest', GeneratedFile(
+ 'manifest foo/bar.manifest\n'
+ 'manifest bar/baz.manifest\n'
+ ))
+ packager.add('foo/bar.manifest', GeneratedFile('resource bar bar'))
+ packager.add('bar/baz.manifest', GeneratedFile('resource baz baz'))
+ packager.add('bar/install.rdf', GeneratedFile(''))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(e.exception.message,
+ 'Error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"')
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but top-level includes another manifest inside
+ # bar/.
+ packager = SimplePackager(formatter)
+ packager.add('base.manifest', GeneratedFile(
+ 'manifest foo/bar.manifest\n'
+ 'manifest bar/baz.manifest\n'
+ ))
+ packager.add('foo/bar.manifest', GeneratedFile('resource bar bar'))
+ packager.add('bar/baz.manifest', GeneratedFile('resource baz baz'))
+ packager.add('bar/chrome.manifest', GeneratedFile('resource baz baz'))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(e.exception.message,
+ 'Error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"')
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but chrome.manifest includes baz.manifest from
+ # the same directory. This shouldn't error out.
+ packager = SimplePackager(formatter)
+ packager.add('base.manifest', GeneratedFile(
+ 'manifest foo/bar.manifest\n'
+ ))
+ packager.add('foo/bar.manifest', GeneratedFile('resource bar bar'))
+ packager.add('bar/baz.manifest', GeneratedFile('resource baz baz'))
+ packager.add('bar/chrome.manifest',
+ GeneratedFile('manifest baz.manifest'))
+ packager.close()
+
+
+class TestSimpleManifestSink(unittest.TestCase):
+ def test_simple_manifest_parser(self):
+ formatter = MockFormatter()
+ foobar = GeneratedFile('foobar')
+ foobaz = GeneratedFile('foobaz')
+ fooqux = GeneratedFile('fooqux')
+ foozot = GeneratedFile('foozot')
+ finder = MockFinder({
+ 'bin/foo/bar': foobar,
+ 'bin/foo/baz': foobaz,
+ 'bin/foo/qux': fooqux,
+ 'bin/foo/zot': foozot,
+ 'bin/foo/chrome.manifest': GeneratedFile('resource foo foo/'),
+ 'bin/chrome.manifest':
+ GeneratedFile('manifest foo/chrome.manifest'),
+ })
+ parser = SimpleManifestSink(finder, formatter)
+ component0 = Component('component0')
+ component1 = Component('component1')
+ component2 = Component('component2', destdir='destdir')
+ parser.add(component0, 'bin/foo/b*')
+ parser.add(component1, 'bin/foo/qux')
+ parser.add(component1, 'bin/foo/chrome.manifest')
+ parser.add(component2, 'bin/foo/zot')
+ self.assertRaises(ErrorMessage, parser.add, 'component1', 'bin/bar')
+
+ self.assertEqual(formatter.log, [])
+ parser.close()
+ self.assertEqual(formatter.log, [
+ (None, 'add_base', '', False),
+ (('foo/chrome.manifest', 1),
+ 'add_manifest', ManifestResource('foo', 'foo', 'foo/')),
+ (None, 'add', 'foo/bar', foobar),
+ (None, 'add', 'foo/baz', foobaz),
+ (None, 'add', 'foo/qux', fooqux),
+ (None, 'add', 'destdir/foo/zot', foozot),
+ ])
+
+ self.assertEqual(finder.log, [
+ 'bin/foo/b*',
+ 'bin/foo/qux',
+ 'bin/foo/chrome.manifest',
+ 'bin/foo/zot',
+ 'bin/bar',
+ 'bin/chrome.manifest'
+ ])
+
+
+class TestCallDeque(unittest.TestCase):
+ def test_call_deque(self):
+ class Logger(object):
+ def __init__(self):
+ self._log = []
+
+ def log(self, str):
+ self._log.append(str)
+
+ @staticmethod
+ def staticlog(logger, str):
+ logger.log(str)
+
+ def do_log(logger, str):
+ logger.log(str)
+
+ logger = Logger()
+ d = CallDeque()
+ d.append(logger.log, 'foo')
+ d.append(logger.log, 'bar')
+ d.append(logger.staticlog, logger, 'baz')
+ d.append(do_log, logger, 'qux')
+ self.assertEqual(logger._log, [])
+ d.execute()
+ self.assertEqual(logger._log, ['foo', 'bar', 'baz', 'qux'])
+
+
+class TestComponent(unittest.TestCase):
+ def do_split(self, string, name, options):
+ n, o = Component._split_component_and_options(string)
+ self.assertEqual(name, n)
+ self.assertEqual(options, o)
+
+ def test_component_split_component_and_options(self):
+ self.do_split('component', 'component', {})
+ self.do_split('trailingspace ', 'trailingspace', {})
+ self.do_split(' leadingspace', 'leadingspace', {})
+ self.do_split(' trim ', 'trim', {})
+ self.do_split(' trim key="value"', 'trim', {'key':'value'})
+ self.do_split(' trim empty=""', 'trim', {'empty':''})
+ self.do_split(' trim space=" "', 'trim', {'space':' '})
+ self.do_split('component key="value" key2="second" ',
+ 'component', {'key':'value', 'key2':'second'})
+ self.do_split( 'trim key=" value with spaces " key2="spaces again"',
+ 'trim', {'key':' value with spaces ', 'key2': 'spaces again'})
+
+ def do_split_error(self, string):
+ self.assertRaises(ValueError, Component._split_component_and_options, string)
+
+ def test_component_split_component_and_options_errors(self):
+ self.do_split_error('"component')
+ self.do_split_error('comp"onent')
+ self.do_split_error('component"')
+ self.do_split_error('"component"')
+ self.do_split_error('=component')
+ self.do_split_error('comp=onent')
+ self.do_split_error('component=')
+ self.do_split_error('key="val"')
+ self.do_split_error('component key=')
+ self.do_split_error('component key="val')
+ self.do_split_error('component key=val"')
+ self.do_split_error('component key="val" x')
+ self.do_split_error('component x key="val"')
+ self.do_split_error('component key1="val" x key2="val"')
+
+ def do_from_string(self, string, name, destdir=''):
+ component = Component.from_string(string)
+ self.assertEqual(name, component.name)
+ self.assertEqual(destdir, component.destdir)
+
+ def test_component_from_string(self):
+ self.do_from_string('component', 'component')
+ self.do_from_string('component-with-hyphen', 'component-with-hyphen')
+ self.do_from_string('component destdir="foo/bar"', 'component', 'foo/bar')
+ self.do_from_string('component destdir="bar spc"', 'component', 'bar spc')
+ self.assertRaises(ErrorMessage, Component.from_string, '')
+ self.assertRaises(ErrorMessage, Component.from_string, 'component novalue=')
+ self.assertRaises(ErrorMessage, Component.from_string, 'component badoption=badvalue')
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_formats.py b/python/mozbuild/mozpack/test/test_packager_formats.py
new file mode 100644
index 000000000..1af4336b2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_formats.py
@@ -0,0 +1,428 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+import unittest
+from mozpack.packager.formats import (
+ FlatFormatter,
+ JarFormatter,
+ OmniJarFormatter,
+)
+from mozpack.copier import FileRegistry
+from mozpack.files import (
+ GeneratedFile,
+ ManifestFile,
+)
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestComponent,
+ ManifestResource,
+ ManifestBinaryComponent,
+)
+from mozpack.test.test_files import (
+ MockDest,
+ foo_xpt,
+ foo2_xpt,
+ bar_xpt,
+ read_interfaces,
+)
+import mozpack.path as mozpath
+
+
+CONTENTS = {
+ 'bases': {
+ # base_path: is_addon?
+ '': False,
+ 'app': False,
+ 'addon0': 'unpacked',
+ 'addon1': True,
+ },
+ 'manifests': [
+ ManifestContent('chrome/f', 'oo', 'oo/'),
+ ManifestContent('chrome/f', 'bar', 'oo/bar/'),
+ ManifestResource('chrome/f', 'foo', 'resource://bar/'),
+ ManifestBinaryComponent('components', 'foo.so'),
+ ManifestContent('app/chrome', 'content', 'foo/'),
+ ManifestComponent('app/components', '{foo-id}', 'foo.js'),
+ ManifestContent('addon0/chrome', 'content', 'foo/bar/'),
+ ManifestContent('addon1/chrome', 'content', 'foo/bar/'),
+ ],
+ 'files': {
+ 'chrome/f/oo/bar/baz': GeneratedFile('foobarbaz'),
+ 'chrome/f/oo/baz': GeneratedFile('foobaz'),
+ 'chrome/f/oo/qux': GeneratedFile('fooqux'),
+ 'components/foo.so': GeneratedFile('foo.so'),
+ 'components/foo.xpt': foo_xpt,
+ 'components/bar.xpt': bar_xpt,
+ 'foo': GeneratedFile('foo'),
+ 'app/chrome/foo/foo': GeneratedFile('appfoo'),
+ 'app/components/foo.js': GeneratedFile('foo.js'),
+ 'addon0/chrome/foo/bar/baz': GeneratedFile('foobarbaz'),
+ 'addon0/components/foo.xpt': foo2_xpt,
+ 'addon0/components/bar.xpt': bar_xpt,
+ 'addon1/chrome/foo/bar/baz': GeneratedFile('foobarbaz'),
+ 'addon1/components/foo.xpt': foo2_xpt,
+ 'addon1/components/bar.xpt': bar_xpt,
+ },
+}
+
+FILES = CONTENTS['files']
+
+RESULT_FLAT = {
+ 'chrome.manifest': [
+ 'manifest chrome/chrome.manifest',
+ 'manifest components/components.manifest',
+ ],
+ 'chrome/chrome.manifest': [
+ 'manifest f/f.manifest',
+ ],
+ 'chrome/f/f.manifest': [
+ 'content oo oo/',
+ 'content bar oo/bar/',
+ 'resource foo resource://bar/',
+ ],
+ 'chrome/f/oo/bar/baz': FILES['chrome/f/oo/bar/baz'],
+ 'chrome/f/oo/baz': FILES['chrome/f/oo/baz'],
+ 'chrome/f/oo/qux': FILES['chrome/f/oo/qux'],
+ 'components/components.manifest': [
+ 'binary-component foo.so',
+ 'interfaces interfaces.xpt',
+ ],
+ 'components/foo.so': FILES['components/foo.so'],
+ 'components/interfaces.xpt': {
+ 'foo': read_interfaces(foo_xpt.open())['foo'],
+ 'bar': read_interfaces(bar_xpt.open())['bar'],
+ },
+ 'foo': FILES['foo'],
+ 'app/chrome.manifest': [
+ 'manifest chrome/chrome.manifest',
+ 'manifest components/components.manifest',
+ ],
+ 'app/chrome/chrome.manifest': [
+ 'content content foo/',
+ ],
+ 'app/chrome/foo/foo': FILES['app/chrome/foo/foo'],
+ 'app/components/components.manifest': [
+ 'component {foo-id} foo.js',
+ ],
+ 'app/components/foo.js': FILES['app/components/foo.js'],
+}
+
+for addon in ('addon0', 'addon1'):
+ RESULT_FLAT.update({
+ mozpath.join(addon, p): f
+ for p, f in {
+ 'chrome.manifest': [
+ 'manifest chrome/chrome.manifest',
+ 'manifest components/components.manifest',
+ ],
+ 'chrome/chrome.manifest': [
+ 'content content foo/bar/',
+ ],
+ 'chrome/foo/bar/baz': FILES[mozpath.join(addon, 'chrome/foo/bar/baz')],
+ 'components/components.manifest': [
+ 'interfaces interfaces.xpt',
+ ],
+ 'components/interfaces.xpt': {
+ 'foo': read_interfaces(foo2_xpt.open())['foo'],
+ 'bar': read_interfaces(bar_xpt.open())['bar'],
+ },
+ }.iteritems()
+ })
+
+RESULT_JAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ 'chrome.manifest',
+ 'chrome/chrome.manifest',
+ 'components/components.manifest',
+ 'components/foo.so',
+ 'components/interfaces.xpt',
+ 'foo',
+ 'app/chrome.manifest',
+ 'app/components/components.manifest',
+ 'app/components/foo.js',
+ 'addon0/chrome.manifest',
+ 'addon0/components/components.manifest',
+ 'addon0/components/interfaces.xpt',
+ )
+}
+
+RESULT_JAR.update({
+ 'chrome/f/f.manifest': [
+ 'content oo jar:oo.jar!/',
+ 'content bar jar:oo.jar!/bar/',
+ 'resource foo resource://bar/',
+ ],
+ 'chrome/f/oo.jar': {
+ 'bar/baz': FILES['chrome/f/oo/bar/baz'],
+ 'baz': FILES['chrome/f/oo/baz'],
+ 'qux': FILES['chrome/f/oo/qux'],
+ },
+ 'app/chrome/chrome.manifest': [
+ 'content content jar:foo.jar!/',
+ ],
+ 'app/chrome/foo.jar': {
+ 'foo': FILES['app/chrome/foo/foo'],
+ },
+ 'addon0/chrome/chrome.manifest': [
+ 'content content jar:foo.jar!/bar/',
+ ],
+ 'addon0/chrome/foo.jar': {
+ 'bar/baz': FILES['addon0/chrome/foo/bar/baz'],
+ },
+ 'addon1.xpi': {
+ mozpath.relpath(p, 'addon1'): f
+ for p, f in RESULT_FLAT.iteritems()
+ if p.startswith('addon1/')
+ },
+})
+
+RESULT_OMNIJAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ 'components/foo.so',
+ 'foo',
+ )
+}
+
+RESULT_OMNIJAR.update({
+ p: RESULT_JAR[p]
+ for p in RESULT_JAR
+ if p.startswith('addon')
+})
+
+RESULT_OMNIJAR.update({
+ 'omni.foo': {
+ 'components/components.manifest': [
+ 'interfaces interfaces.xpt',
+ ],
+ },
+ 'chrome.manifest': [
+ 'manifest components/components.manifest',
+ ],
+ 'components/components.manifest': [
+ 'binary-component foo.so',
+ ],
+ 'app/omni.foo': {
+ p: RESULT_FLAT['app/' + p]
+ for p in (
+ 'chrome.manifest',
+ 'chrome/chrome.manifest',
+ 'chrome/foo/foo',
+ 'components/components.manifest',
+ 'components/foo.js',
+ )
+ },
+ 'app/chrome.manifest': [],
+})
+
+RESULT_OMNIJAR['omni.foo'].update({
+ p: RESULT_FLAT[p]
+ for p in (
+ 'chrome.manifest',
+ 'chrome/chrome.manifest',
+ 'chrome/f/f.manifest',
+ 'chrome/f/oo/bar/baz',
+ 'chrome/f/oo/baz',
+ 'chrome/f/oo/qux',
+ 'components/interfaces.xpt',
+ )
+})
+
+CONTENTS_WITH_BASE = {
+ 'bases': {
+ mozpath.join('base/root', b) if b else 'base/root': a
+ for b, a in CONTENTS['bases'].iteritems()
+ },
+ 'manifests': [
+ m.move(mozpath.join('base/root', m.base))
+ for m in CONTENTS['manifests']
+ ],
+ 'files': {
+ mozpath.join('base/root', p): f
+ for p, f in CONTENTS['files'].iteritems()
+ },
+}
+
+EXTRA_CONTENTS = {
+ 'extra/file': GeneratedFile('extra file'),
+}
+
+CONTENTS_WITH_BASE['files'].update(EXTRA_CONTENTS)
+
+def result_with_base(results):
+ result = {
+ mozpath.join('base/root', p): v
+ for p, v in results.iteritems()
+ }
+ result.update(EXTRA_CONTENTS)
+ return result
+
+RESULT_FLAT_WITH_BASE = result_with_base(RESULT_FLAT)
+RESULT_JAR_WITH_BASE = result_with_base(RESULT_JAR)
+RESULT_OMNIJAR_WITH_BASE = result_with_base(RESULT_OMNIJAR)
+
+
+class MockDest(MockDest):
+ def exists(self):
+ return False
+
+
+def fill_formatter(formatter, contents):
+ for base, is_addon in contents['bases'].items():
+ formatter.add_base(base, is_addon)
+
+ for manifest in contents['manifests']:
+ formatter.add_manifest(manifest)
+
+ for k, v in contents['files'].iteritems():
+ if k.endswith('.xpt'):
+ formatter.add_interfaces(k, v)
+ else:
+ formatter.add(k, v)
+
+
+def get_contents(registry, read_all=False):
+ result = {}
+ for k, v in registry:
+ if k.endswith('.xpt'):
+ tmpfile = MockDest()
+ registry[k].copy(tmpfile)
+ result[k] = read_interfaces(tmpfile)
+ elif isinstance(v, FileRegistry):
+ result[k] = get_contents(v)
+ elif isinstance(v, ManifestFile) or read_all:
+ result[k] = v.open().read().splitlines()
+ else:
+ result[k] = v
+ return result
+
+
+class TestFormatters(unittest.TestCase):
+ maxDiff = None
+
+ def test_bases(self):
+ formatter = FlatFormatter(FileRegistry())
+ formatter.add_base('')
+ formatter.add_base('browser')
+ formatter.add_base('addon0', addon=True)
+ self.assertEqual(formatter._get_base('platform.ini'),
+ ('', 'platform.ini'))
+ self.assertEqual(formatter._get_base('browser/application.ini'),
+ ('browser', 'application.ini'))
+ self.assertEqual(formatter._get_base('addon0/install.rdf'),
+ ('addon0', 'install.rdf'))
+
+ def do_test_contents(self, formatter, contents):
+ for f in contents['files']:
+ # .xpt files are merged, so skip them.
+ if not f.endswith('.xpt'):
+ self.assertTrue(formatter.contains(f))
+
+ def test_flat_formatter(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_FLAT)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_jar_formatter(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_JAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_omnijar_formatter(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, 'omni.foo')
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_flat_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_FLAT_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_jar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_JAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, 'omni.foo')
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_is_resource(self):
+ def is_resource(base, path):
+ registry = FileRegistry()
+ f = OmniJarFormatter(registry, 'omni.foo', non_resources=[
+ 'defaults/messenger/mailViews.dat',
+ 'defaults/foo/*',
+ '*/dummy',
+ ])
+ f.add_base('')
+ f.add_base('app')
+ f.add(mozpath.join(base, path), GeneratedFile(''))
+ if f.copier.contains(mozpath.join(base, path)):
+ return False
+ self.assertTrue(f.copier.contains(mozpath.join(base, 'omni.foo')))
+ self.assertTrue(f.copier[mozpath.join(base, 'omni.foo')]
+ .contains(path))
+ return True
+
+ for base in ['', 'app/']:
+ self.assertTrue(is_resource(base, 'chrome'))
+ self.assertTrue(
+ is_resource(base, 'chrome/foo/bar/baz.properties'))
+ self.assertFalse(is_resource(base, 'chrome/icons/foo.png'))
+ self.assertTrue(is_resource(base, 'components/foo.js'))
+ self.assertFalse(is_resource(base, 'components/foo.so'))
+ self.assertTrue(is_resource(base, 'res/foo.css'))
+ self.assertFalse(is_resource(base, 'res/cursors/foo.png'))
+ self.assertFalse(is_resource(base, 'res/MainMenu.nib/foo'))
+ self.assertTrue(is_resource(base, 'defaults/pref/foo.js'))
+ self.assertFalse(
+ is_resource(base, 'defaults/pref/channel-prefs.js'))
+ self.assertTrue(
+ is_resource(base, 'defaults/preferences/foo.js'))
+ self.assertFalse(
+ is_resource(base, 'defaults/preferences/channel-prefs.js'))
+ self.assertTrue(is_resource(base, 'modules/foo.jsm'))
+ self.assertTrue(is_resource(base, 'greprefs.js'))
+ self.assertTrue(is_resource(base, 'hyphenation/foo'))
+ self.assertTrue(is_resource(base, 'update.locale'))
+ self.assertTrue(
+ is_resource(base, 'jsloader/resource/gre/modules/foo.jsm'))
+ self.assertFalse(is_resource(base, 'foo'))
+ self.assertFalse(is_resource(base, 'foo/bar/greprefs.js'))
+ self.assertTrue(is_resource(base, 'defaults/messenger/foo.dat'))
+ self.assertFalse(
+ is_resource(base, 'defaults/messenger/mailViews.dat'))
+ self.assertTrue(is_resource(base, 'defaults/pref/foo.js'))
+ self.assertFalse(is_resource(base, 'defaults/foo/bar.dat'))
+ self.assertFalse(is_resource(base, 'defaults/foo/bar/baz.dat'))
+ self.assertTrue(is_resource(base, 'chrome/foo/bar/baz/dummy_'))
+ self.assertFalse(is_resource(base, 'chrome/foo/bar/baz/dummy'))
+ self.assertTrue(is_resource(base, 'chrome/foo/bar/dummy_'))
+ self.assertFalse(is_resource(base, 'chrome/foo/bar/dummy'))
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_l10n.py b/python/mozbuild/mozpack/test/test_packager_l10n.py
new file mode 100644
index 000000000..c797eadd1
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_l10n.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+from test_packager import MockFinder
+from mozpack.packager import l10n
+from mozpack.files import (
+ GeneratedFile,
+ ManifestFile,
+)
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestLocale,
+ ManifestContent,
+)
+from mozpack.copier import FileRegistry
+from mozpack.packager.formats import FlatFormatter
+
+
+class TestL10NRepack(unittest.TestCase):
+ def test_l10n_repack(self):
+ foo = GeneratedFile('foo')
+ foobar = GeneratedFile('foobar')
+ qux = GeneratedFile('qux')
+ bar = GeneratedFile('bar')
+ baz = GeneratedFile('baz')
+ dict_aa = GeneratedFile('dict_aa')
+ dict_bb = GeneratedFile('dict_bb')
+ dict_cc = GeneratedFile('dict_cc')
+ barbaz = GeneratedFile('barbaz')
+ lst = GeneratedFile('foo\nbar')
+ app_finder = MockFinder({
+ 'bar/foo': foo,
+ 'chrome/foo/foobar': foobar,
+ 'chrome/qux/qux.properties': qux,
+ 'chrome/qux/baz/baz.properties': baz,
+ 'chrome/chrome.manifest': ManifestFile('chrome', [
+ ManifestContent('chrome', 'foo', 'foo/'),
+ ManifestLocale('chrome', 'qux', 'en-US', 'qux/'),
+ ]),
+ 'chrome.manifest':
+ ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]),
+ 'dict/aa': dict_aa,
+ 'app/chrome/bar/barbaz.dtd': barbaz,
+ 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [
+ ManifestLocale('app/chrome', 'bar', 'en-US', 'bar/')
+ ]),
+ 'app/chrome.manifest':
+ ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]),
+ 'app/dict/bb': dict_bb,
+ 'app/dict/cc': dict_cc,
+ 'app/chrome/bar/search/foo.xml': foo,
+ 'app/chrome/bar/search/bar.xml': bar,
+ 'app/chrome/bar/search/lst.txt': lst,
+ })
+ app_finder.jarlogs = {}
+ app_finder.base = 'app'
+ foo_l10n = GeneratedFile('foo_l10n')
+ qux_l10n = GeneratedFile('qux_l10n')
+ baz_l10n = GeneratedFile('baz_l10n')
+ barbaz_l10n = GeneratedFile('barbaz_l10n')
+ lst_l10n = GeneratedFile('foo\nqux')
+ l10n_finder = MockFinder({
+ 'chrome/qux-l10n/qux.properties': qux_l10n,
+ 'chrome/qux-l10n/baz/baz.properties': baz_l10n,
+ 'chrome/chrome.manifest': ManifestFile('chrome', [
+ ManifestLocale('chrome', 'qux', 'x-test', 'qux-l10n/'),
+ ]),
+ 'chrome.manifest':
+ ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]),
+ 'dict/bb': dict_bb,
+ 'dict/cc': dict_cc,
+ 'app/chrome/bar-l10n/barbaz.dtd': barbaz_l10n,
+ 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [
+ ManifestLocale('app/chrome', 'bar', 'x-test', 'bar-l10n/')
+ ]),
+ 'app/chrome.manifest':
+ ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]),
+ 'app/dict/aa': dict_aa,
+ 'app/chrome/bar-l10n/search/foo.xml': foo_l10n,
+ 'app/chrome/bar-l10n/search/qux.xml': qux_l10n,
+ 'app/chrome/bar-l10n/search/lst.txt': lst_l10n,
+ })
+ l10n_finder.base = 'l10n'
+ copier = FileRegistry()
+ formatter = FlatFormatter(copier)
+
+ l10n._repack(app_finder, l10n_finder, copier, formatter,
+ ['dict', 'chrome/**/search/*.xml'])
+ self.maxDiff = None
+
+ repacked = {
+ 'bar/foo': foo,
+ 'chrome/foo/foobar': foobar,
+ 'chrome/qux-l10n/qux.properties': qux_l10n,
+ 'chrome/qux-l10n/baz/baz.properties': baz_l10n,
+ 'chrome/chrome.manifest': ManifestFile('chrome', [
+ ManifestContent('chrome', 'foo', 'foo/'),
+ ManifestLocale('chrome', 'qux', 'x-test', 'qux-l10n/'),
+ ]),
+ 'chrome.manifest':
+ ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]),
+ 'dict/bb': dict_bb,
+ 'dict/cc': dict_cc,
+ 'app/chrome/bar-l10n/barbaz.dtd': barbaz_l10n,
+ 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [
+ ManifestLocale('app/chrome', 'bar', 'x-test', 'bar-l10n/')
+ ]),
+ 'app/chrome.manifest':
+ ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]),
+ 'app/dict/aa': dict_aa,
+ 'app/chrome/bar-l10n/search/foo.xml': foo_l10n,
+ 'app/chrome/bar-l10n/search/qux.xml': qux_l10n,
+ 'app/chrome/bar-l10n/search/lst.txt': lst_l10n,
+ }
+
+ self.assertEqual(
+ dict((p, f.open().read()) for p, f in copier),
+ dict((p, f.open().read()) for p, f in repacked.iteritems())
+ )
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_unpack.py b/python/mozbuild/mozpack/test/test_packager_unpack.py
new file mode 100644
index 000000000..d201cabf7
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_unpack.py
@@ -0,0 +1,65 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+from mozpack.packager.formats import (
+ FlatFormatter,
+ JarFormatter,
+ OmniJarFormatter,
+)
+from mozpack.packager.unpack import unpack_to_registry
+from mozpack.copier import (
+ FileCopier,
+ FileRegistry,
+)
+from mozpack.test.test_packager_formats import (
+ CONTENTS,
+ fill_formatter,
+ get_contents,
+)
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestUnpack(TestWithTmpDir):
+ maxDiff = None
+
+ @staticmethod
+ def _get_copier(cls):
+ copier = FileCopier()
+ formatter = cls(copier)
+ fill_formatter(formatter, CONTENTS)
+ return copier
+
+ @classmethod
+ def setUpClass(cls):
+ cls.contents = get_contents(cls._get_copier(FlatFormatter),
+ read_all=True)
+
+ def _unpack_test(self, cls):
+ # Format a package with the given formatter class
+ copier = self._get_copier(cls)
+ copier.copy(self.tmpdir)
+
+ # Unpack that package. Its content is expected to match that of a Flat
+ # formatted package.
+ registry = FileRegistry()
+ unpack_to_registry(self.tmpdir, registry)
+ self.assertEqual(get_contents(registry, read_all=True), self.contents)
+
+ def test_flat_unpack(self):
+ self._unpack_test(FlatFormatter)
+
+ def test_jar_unpack(self):
+ self._unpack_test(JarFormatter)
+
+ def test_omnijar_unpack(self):
+ class OmniFooFormatter(OmniJarFormatter):
+ def __init__(self, registry):
+ super(OmniFooFormatter, self).__init__(registry, 'omni.foo')
+
+ self._unpack_test(OmniFooFormatter)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_path.py b/python/mozbuild/mozpack/test/test_path.py
new file mode 100644
index 000000000..ee41e4a69
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_path.py
@@ -0,0 +1,143 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.path import (
+ relpath,
+ join,
+ normpath,
+ dirname,
+ commonprefix,
+ basename,
+ split,
+ splitext,
+ basedir,
+ match,
+ rebase,
+)
+import unittest
+import mozunit
+import os
+
+
+class TestPath(unittest.TestCase):
+ SEP = os.sep
+
+ def test_relpath(self):
+ self.assertEqual(relpath('foo', 'foo'), '')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar'), '')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo'), 'bar')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar', 'baz')), 'foo'),
+ 'bar/baz')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar/baz'),
+ '..')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/baz'),
+ '../bar')
+ self.assertEqual(relpath('foo/', 'foo'), '')
+ self.assertEqual(relpath('foo/bar/', 'foo'), 'bar')
+
+ def test_join(self):
+ self.assertEqual(join('foo', 'bar', 'baz'), 'foo/bar/baz')
+ self.assertEqual(join('foo', '', 'bar'), 'foo/bar')
+ self.assertEqual(join('', 'foo', 'bar'), 'foo/bar')
+ self.assertEqual(join('', 'foo', '/bar'), '/bar')
+
+ def test_normpath(self):
+ self.assertEqual(normpath(self.SEP.join(('foo', 'bar', 'baz',
+ '..', 'qux'))), 'foo/bar/qux')
+
+ def test_dirname(self):
+ self.assertEqual(dirname('foo/bar/baz'), 'foo/bar')
+ self.assertEqual(dirname('foo/bar'), 'foo')
+ self.assertEqual(dirname('foo'), '')
+ self.assertEqual(dirname('foo/bar/'), 'foo/bar')
+
+ def test_commonprefix(self):
+ self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')),
+ 'foo/qux', 'foo/baz/qux']), 'foo/')
+ self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')),
+ 'foo/qux', 'baz/qux']), '')
+
+ def test_basename(self):
+ self.assertEqual(basename('foo/bar/baz'), 'baz')
+ self.assertEqual(basename('foo/bar'), 'bar')
+ self.assertEqual(basename('foo'), 'foo')
+ self.assertEqual(basename('foo/bar/'), '')
+
+ def test_split(self):
+ self.assertEqual(split(self.SEP.join(('foo', 'bar', 'baz'))),
+ ['foo', 'bar', 'baz'])
+
+ def test_splitext(self):
+ self.assertEqual(splitext(self.SEP.join(('foo', 'bar', 'baz.qux'))),
+ ('foo/bar/baz', '.qux'))
+
+ def test_basedir(self):
+ foobarbaz = self.SEP.join(('foo', 'bar', 'baz'))
+ self.assertEqual(basedir(foobarbaz, ['foo', 'bar', 'baz']), 'foo')
+ self.assertEqual(basedir(foobarbaz, ['foo', 'foo/bar', 'baz']),
+ 'foo/bar')
+ self.assertEqual(basedir(foobarbaz, ['foo/bar', 'foo', 'baz']),
+ 'foo/bar')
+ self.assertEqual(basedir(foobarbaz, ['foo', 'bar', '']), 'foo')
+ self.assertEqual(basedir(foobarbaz, ['bar', 'baz', '']), '')
+
+ def test_match(self):
+ self.assertTrue(match('foo', ''))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo'))
+ self.assertTrue(match('foo', '*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/*/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/*/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/b*/*z.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/b*r/ba*z.qux'))
+ self.assertFalse(match('foo/bar/baz.qux', 'foo/b*z/ba*r.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/foo/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/*.qux'))
+ self.assertFalse(match('foo/bar/baz.qux', '**.qux'))
+ self.assertFalse(match('foo/bar', 'foo/*/bar'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/**'))
+ self.assertFalse(match('foo/nobar/baz.qux', 'foo/**/bar/**'))
+ self.assertTrue(match('foo/bar', 'foo/**/bar/**'))
+
+ def test_rebase(self):
+ self.assertEqual(rebase('foo', 'foo/bar', 'bar/baz'), 'baz')
+ self.assertEqual(rebase('foo', 'foo', 'bar/baz'), 'bar/baz')
+ self.assertEqual(rebase('foo/bar', 'foo', 'baz'), 'bar/baz')
+
+
+if os.altsep:
+ class TestAltPath(TestPath):
+ SEP = os.altsep
+
+ class TestReverseAltPath(TestPath):
+ def setUp(self):
+ sep = os.sep
+ os.sep = os.altsep
+ os.altsep = sep
+
+ def tearDown(self):
+ self.setUp()
+
+ class TestAltReverseAltPath(TestReverseAltPath):
+ SEP = os.altsep
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_unify.py b/python/mozbuild/mozpack/test/test_unify.py
new file mode 100644
index 000000000..a2bbb4470
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_unify.py
@@ -0,0 +1,199 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import ensureParentDir
+
+from mozpack.unify import (
+ UnifiedFinder,
+ UnifiedBuildFinder,
+)
+import mozunit
+from mozpack.test.test_files import TestWithTmpDir
+from mozpack.files import FileFinder
+from mozpack.mozjar import JarWriter
+from mozpack.test.test_files import MockDest
+from cStringIO import StringIO
+import os
+import sys
+from mozpack.errors import (
+ ErrorMessage,
+ AccumulatedErrors,
+ errors,
+)
+
+
+class TestUnified(TestWithTmpDir):
+ def create_one(self, which, path, content):
+ file = self.tmppath(os.path.join(which, path))
+ ensureParentDir(file)
+ open(file, 'wb').write(content)
+
+ def create_both(self, path, content):
+ for p in ['a', 'b']:
+ self.create_one(p, path, content)
+
+
+class TestUnifiedFinder(TestUnified):
+ def test_unified_finder(self):
+ self.create_both('foo/bar', 'foobar')
+ self.create_both('foo/baz', 'foobaz')
+ self.create_one('a', 'bar', 'bar')
+ self.create_one('b', 'baz', 'baz')
+ self.create_one('a', 'qux', 'foobar')
+ self.create_one('b', 'qux', 'baz')
+ self.create_one('a', 'test/foo', 'a\nb\nc\n')
+ self.create_one('b', 'test/foo', 'b\nc\na\n')
+ self.create_both('test/bar', 'a\nb\nc\n')
+
+ finder = UnifiedFinder(FileFinder(self.tmppath('a')),
+ FileFinder(self.tmppath('b')),
+ sorted=['test'])
+ self.assertEqual(sorted([(f, c.open().read())
+ for f, c in finder.find('foo')]),
+ [('foo/bar', 'foobar'), ('foo/baz', 'foobaz')])
+ self.assertRaises(ErrorMessage, any, finder.find('bar'))
+ self.assertRaises(ErrorMessage, any, finder.find('baz'))
+ self.assertRaises(ErrorMessage, any, finder.find('qux'))
+ self.assertEqual(sorted([(f, c.open().read())
+ for f, c in finder.find('test')]),
+ [('test/bar', 'a\nb\nc\n'),
+ ('test/foo', 'a\nb\nc\n')])
+
+
+class TestUnifiedBuildFinder(TestUnified):
+ def test_unified_build_finder(self):
+ finder = UnifiedBuildFinder(FileFinder(self.tmppath('a')),
+ FileFinder(self.tmppath('b')))
+
+ # Test chrome.manifest unification
+ self.create_both('chrome.manifest', 'a\nb\nc\n')
+ self.create_one('a', 'chrome/chrome.manifest', 'a\nb\nc\n')
+ self.create_one('b', 'chrome/chrome.manifest', 'b\nc\na\n')
+ self.assertEqual(sorted([(f, c.open().read()) for f, c in
+ finder.find('**/chrome.manifest')]),
+ [('chrome.manifest', 'a\nb\nc\n'),
+ ('chrome/chrome.manifest', 'a\nb\nc\n')])
+
+ # Test buildconfig.html unification
+ self.create_one('a', 'chrome/browser/foo/buildconfig.html',
+ '\n'.join([
+ '<html>',
+ '<body>',
+ '<h1>about:buildconfig</h1>',
+ '<div>foo</div>',
+ '</body>',
+ '</html>',
+ ]))
+ self.create_one('b', 'chrome/browser/foo/buildconfig.html',
+ '\n'.join([
+ '<html>',
+ '<body>',
+ '<h1>about:buildconfig</h1>',
+ '<div>bar</div>',
+ '</body>',
+ '</html>',
+ ]))
+ self.assertEqual(sorted([(f, c.open().read()) for f, c in
+ finder.find('**/buildconfig.html')]),
+ [('chrome/browser/foo/buildconfig.html', '\n'.join([
+ '<html>',
+ '<body>',
+ '<h1>about:buildconfig</h1>',
+ '<div>foo</div>',
+ '<hr> </hr>',
+ '<div>bar</div>',
+ '</body>',
+ '</html>',
+ ]))])
+
+ # Test xpi file unification
+ xpi = MockDest()
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add('foo', 'foo')
+ jar.add('bar', 'bar')
+ foo_xpi = xpi.read()
+ self.create_both('foo.xpi', foo_xpi)
+
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add('foo', 'bar')
+ self.create_one('a', 'bar.xpi', foo_xpi)
+ self.create_one('b', 'bar.xpi', xpi.read())
+
+ errors.out = StringIO()
+ with self.assertRaises(AccumulatedErrors), errors.accumulate():
+ self.assertEqual([(f, c.open().read()) for f, c in
+ finder.find('*.xpi')],
+ [('foo.xpi', foo_xpi)])
+ errors.out = sys.stderr
+
+ # Test install.rdf unification
+ x86_64 = 'Darwin_x86_64-gcc3'
+ x86 = 'Darwin_x86-gcc3'
+ target_tag = '<{em}targetPlatform>{platform}</{em}targetPlatform>'
+ target_attr = '{em}targetPlatform="{platform}" '
+
+ rdf_tag = ''.join([
+ '<{RDF}Description {em}bar="bar" {em}qux="qux">',
+ '<{em}foo>foo</{em}foo>',
+ '{targets}',
+ '<{em}baz>baz</{em}baz>',
+ '</{RDF}Description>'
+ ])
+ rdf_attr = ''.join([
+ '<{RDF}Description {em}bar="bar" {attr}{em}qux="qux">',
+ '{targets}',
+ '<{em}foo>foo</{em}foo><{em}baz>baz</{em}baz>',
+ '</{RDF}Description>'
+ ])
+
+ for descr_ns, target_ns in (('RDF:', ''), ('', 'em:'), ('RDF:', 'em:')):
+ # First we need to infuse the above strings with our namespaces and
+ # platform values.
+ ns = { 'RDF': descr_ns, 'em': target_ns }
+ target_tag_x86_64 = target_tag.format(platform=x86_64, **ns)
+ target_tag_x86 = target_tag.format(platform=x86, **ns)
+ target_attr_x86_64 = target_attr.format(platform=x86_64, **ns)
+ target_attr_x86 = target_attr.format(platform=x86, **ns)
+
+ tag_x86_64 = rdf_tag.format(targets=target_tag_x86_64, **ns)
+ tag_x86 = rdf_tag.format(targets=target_tag_x86, **ns)
+ tag_merged = rdf_tag.format(targets=target_tag_x86_64 + target_tag_x86, **ns)
+ tag_empty = rdf_tag.format(targets="", **ns)
+
+ attr_x86_64 = rdf_attr.format(attr=target_attr_x86_64, targets="", **ns)
+ attr_x86 = rdf_attr.format(attr=target_attr_x86, targets="", **ns)
+ attr_merged = rdf_attr.format(attr="", targets=target_tag_x86_64 + target_tag_x86, **ns)
+
+ # This table defines the test cases, columns "a" and "b" being the
+ # contents of the install.rdf of the respective platform and
+ # "result" the exepected merged content after unification.
+ testcases = (
+ #_____a_____ _____b_____ ___result___#
+ (tag_x86_64, tag_x86, tag_merged ),
+ (tag_x86_64, tag_empty, tag_empty ),
+ (tag_empty, tag_x86, tag_empty ),
+ (tag_empty, tag_empty, tag_empty ),
+
+ (attr_x86_64, attr_x86, attr_merged ),
+ (tag_x86_64, attr_x86, tag_merged ),
+ (attr_x86_64, tag_x86, attr_merged ),
+
+ (attr_x86_64, tag_empty, tag_empty ),
+ (tag_empty, attr_x86, tag_empty )
+ )
+
+ # Now create the files from the above table and compare
+ results = []
+ for emid, (rdf_a, rdf_b, result) in enumerate(testcases):
+ filename = 'ext/id{0}/install.rdf'.format(emid)
+ self.create_one('a', filename, rdf_a)
+ self.create_one('b', filename, rdf_b)
+ results.append((filename, result))
+
+ self.assertEqual(sorted([(f, c.open().read()) for f, c in
+ finder.find('**/install.rdf')]), results)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/unify.py b/python/mozbuild/mozpack/unify.py
new file mode 100644
index 000000000..3c8a8d605
--- /dev/null
+++ b/python/mozbuild/mozpack/unify.py
@@ -0,0 +1,231 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+from mozpack.files import (
+ BaseFinder,
+ JarFinder,
+ ExecutableFile,
+ BaseFile,
+ GeneratedFile,
+)
+from mozpack.executables import (
+ MACHO_SIGNATURES,
+)
+from mozpack.mozjar import JarReader
+from mozpack.errors import errors
+from tempfile import mkstemp
+import mozpack.path as mozpath
+import struct
+import os
+import re
+import subprocess
+import buildconfig
+from collections import OrderedDict
+
+# Regular expressions for unifying install.rdf
+FIND_TARGET_PLATFORM = re.compile(r"""
+ <(?P<ns>[-._0-9A-Za-z]+:)?targetPlatform> # The targetPlatform tag, with any namespace
+ (?P<platform>[^<]*) # The actual platform value
+ </(?P=ns)?targetPlatform> # The closing tag
+ """, re.X)
+FIND_TARGET_PLATFORM_ATTR = re.compile(r"""
+ (?P<tag><(?:[-._0-9A-Za-z]+:)?Description) # The opening part of the <Description> tag
+ (?P<attrs>[^>]*?)\s+ # The initial attributes
+ (?P<ns>[-._0-9A-Za-z]+:)?targetPlatform= # The targetPlatform attribute, with any namespace
+ [\'"](?P<platform>[^\'"]+)[\'"] # The actual platform value
+ (?P<otherattrs>[^>]*?>) # The remaining attributes and closing angle bracket
+ """, re.X)
+
+def may_unify_binary(file):
+ '''
+ Return whether the given BaseFile instance is an ExecutableFile that
+ may be unified. Only non-fat Mach-O binaries are to be unified.
+ '''
+ if isinstance(file, ExecutableFile):
+ signature = file.open().read(4)
+ if len(signature) < 4:
+ return False
+ signature = struct.unpack('>L', signature)[0]
+ if signature in MACHO_SIGNATURES:
+ return True
+ return False
+
+
+class UnifiedExecutableFile(BaseFile):
+ '''
+ File class for executable and library files that to be unified with 'lipo'.
+ '''
+ def __init__(self, executable1, executable2):
+ '''
+ Initialize a UnifiedExecutableFile with a pair of ExecutableFiles to
+ be unified. They are expected to be non-fat Mach-O executables.
+ '''
+ assert isinstance(executable1, ExecutableFile)
+ assert isinstance(executable2, ExecutableFile)
+ self._executables = (executable1, executable2)
+
+ def copy(self, dest, skip_if_older=True):
+ '''
+ Create a fat executable from the two Mach-O executable given when
+ creating the instance.
+ skip_if_older is ignored.
+ '''
+ assert isinstance(dest, basestring)
+ tmpfiles = []
+ try:
+ for e in self._executables:
+ fd, f = mkstemp()
+ os.close(fd)
+ tmpfiles.append(f)
+ e.copy(f, skip_if_older=False)
+ lipo = buildconfig.substs.get('LIPO') or 'lipo'
+ subprocess.call([lipo, '-create'] + tmpfiles + ['-output', dest])
+ finally:
+ for f in tmpfiles:
+ os.unlink(f)
+
+
+class UnifiedFinder(BaseFinder):
+ '''
+ Helper to get unified BaseFile instances from two distinct trees on the
+ file system.
+ '''
+ def __init__(self, finder1, finder2, sorted=[], **kargs):
+ '''
+ Initialize a UnifiedFinder. finder1 and finder2 are BaseFinder
+ instances from which files are picked. UnifiedFinder.find() will act as
+ FileFinder.find() but will error out when matches can only be found in
+ one of the two trees and not the other. It will also error out if
+ matches can be found on both ends but their contents are not identical.
+
+ The sorted argument gives a list of mozpath.match patterns. File
+ paths matching one of these patterns will have their contents compared
+ with their lines sorted.
+ '''
+ assert isinstance(finder1, BaseFinder)
+ assert isinstance(finder2, BaseFinder)
+ self._finder1 = finder1
+ self._finder2 = finder2
+ self._sorted = sorted
+ BaseFinder.__init__(self, finder1.base, **kargs)
+
+ def _find(self, path):
+ '''
+ UnifiedFinder.find() implementation.
+ '''
+ files1 = OrderedDict()
+ for p, f in self._finder1.find(path):
+ files1[p] = f
+ files2 = set()
+ for p, f in self._finder2.find(path):
+ files2.add(p)
+ if p in files1:
+ if may_unify_binary(files1[p]) and \
+ may_unify_binary(f):
+ yield p, UnifiedExecutableFile(files1[p], f)
+ else:
+ err = errors.count
+ unified = self.unify_file(p, files1[p], f)
+ if unified:
+ yield p, unified
+ elif err == errors.count:
+ self._report_difference(p, files1[p], f)
+ else:
+ errors.error('File missing in %s: %s' %
+ (self._finder1.base, p))
+ for p in [p for p in files1 if not p in files2]:
+ errors.error('File missing in %s: %s' % (self._finder2.base, p))
+
+ def _report_difference(self, path, file1, file2):
+ '''
+ Report differences between files in both trees.
+ '''
+ errors.error("Can't unify %s: file differs between %s and %s" %
+ (path, self._finder1.base, self._finder2.base))
+ if not isinstance(file1, ExecutableFile) and \
+ not isinstance(file2, ExecutableFile):
+ from difflib import unified_diff
+ for line in unified_diff(file1.open().readlines(),
+ file2.open().readlines(),
+ os.path.join(self._finder1.base, path),
+ os.path.join(self._finder2.base, path)):
+ errors.out.write(line)
+
+ def unify_file(self, path, file1, file2):
+ '''
+ Given two BaseFiles and the path they were found at, check whether
+ their content match and return the first BaseFile if they do.
+ '''
+ content1 = file1.open().readlines()
+ content2 = file2.open().readlines()
+ if content1 == content2:
+ return file1
+ for pattern in self._sorted:
+ if mozpath.match(path, pattern):
+ if sorted(content1) == sorted(content2):
+ return file1
+ break
+ return None
+
+
+class UnifiedBuildFinder(UnifiedFinder):
+ '''
+ Specialized UnifiedFinder for Mozilla applications packaging. It allows
+ "*.manifest" files to differ in their order, and unifies "buildconfig.html"
+ files by merging their content.
+ '''
+ def __init__(self, finder1, finder2, **kargs):
+ UnifiedFinder.__init__(self, finder1, finder2,
+ sorted=['**/*.manifest'], **kargs)
+
+ def unify_file(self, path, file1, file2):
+ '''
+ Unify files taking Mozilla application special cases into account.
+ Otherwise defer to UnifiedFinder.unify_file.
+ '''
+ basename = mozpath.basename(path)
+ if basename == 'buildconfig.html':
+ content1 = file1.open().readlines()
+ content2 = file2.open().readlines()
+ # Copy everything from the first file up to the end of its <body>,
+ # insert a <hr> between the two files and copy the second file's
+ # content beginning after its leading <h1>.
+ return GeneratedFile(''.join(
+ content1[:content1.index('</body>\n')] +
+ ['<hr> </hr>\n'] +
+ content2[content2.index('<h1>about:buildconfig</h1>\n') + 1:]
+ ))
+ elif basename == 'install.rdf':
+ # install.rdf files often have em:targetPlatform (either as
+ # attribute or as tag) that will differ between platforms. The
+ # unified install.rdf should contain both em:targetPlatforms if
+ # they exist, or strip them if only one file has a target platform.
+ content1, content2 = (
+ FIND_TARGET_PLATFORM_ATTR.sub(lambda m: \
+ m.group('tag') + m.group('attrs') + m.group('otherattrs') +
+ '<%stargetPlatform>%s</%stargetPlatform>' % \
+ (m.group('ns') or "", m.group('platform'), m.group('ns') or ""),
+ f.open().read()
+ ) for f in (file1, file2)
+ )
+
+ platform2 = FIND_TARGET_PLATFORM.search(content2)
+ return GeneratedFile(FIND_TARGET_PLATFORM.sub(
+ lambda m: m.group(0) + platform2.group(0) if platform2 else '',
+ content1
+ ))
+ elif path.endswith('.xpi'):
+ finder1 = JarFinder(os.path.join(self._finder1.base, path),
+ JarReader(fileobj=file1.open()))
+ finder2 = JarFinder(os.path.join(self._finder2.base, path),
+ JarReader(fileobj=file2.open()))
+ unifier = UnifiedFinder(finder1, finder2, sorted=self._sorted)
+ err = errors.count
+ all(unifier.find(''))
+ if err == errors.count:
+ return file1
+ return None
+ return UnifiedFinder.unify_file(self, path, file1, file2)
diff --git a/python/mozbuild/setup.py b/python/mozbuild/setup.py
new file mode 100644
index 000000000..448a1362a
--- /dev/null
+++ b/python/mozbuild/setup.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup, find_packages
+
+VERSION = '0.2'
+
+setup(
+ author='Mozilla Foundation',
+ author_email='dev-builds@lists.mozilla.org',
+ name='mozbuild',
+ description='Mozilla build system functionality.',
+ license='MPL 2.0',
+ packages=find_packages(),
+ version=VERSION,
+ install_requires=[
+ 'jsmin',
+ 'mozfile',
+ ],
+ classifiers=[
+ 'Development Status :: 3 - Alpha',
+ 'Topic :: Software Development :: Build Tools',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ ],
+ keywords='mozilla build',
+)
diff --git a/python/mozlint/mozlint/__init__.py b/python/mozlint/mozlint/__init__.py
new file mode 100644
index 000000000..18eaf5112
--- /dev/null
+++ b/python/mozlint/mozlint/__init__.py
@@ -0,0 +1,7 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# flake8: noqa
+
+from .roller import LintRoller
+from .result import ResultContainer
diff --git a/python/mozlint/mozlint/cli.py b/python/mozlint/mozlint/cli.py
new file mode 100644
index 000000000..84c1b6aa4
--- /dev/null
+++ b/python/mozlint/mozlint/cli.py
@@ -0,0 +1,115 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function, unicode_literals
+
+import os
+import sys
+from argparse import ArgumentParser, REMAINDER
+
+
+SEARCH_PATHS = []
+
+
+class MozlintParser(ArgumentParser):
+ arguments = [
+ [['paths'],
+ {'nargs': '*',
+ 'default': None,
+ 'help': "Paths to file or directories to lint, like "
+ "'browser/components/loop' or 'mobile/android'. "
+ "Defaults to the current directory if not given.",
+ }],
+ [['-l', '--linter'],
+ {'dest': 'linters',
+ 'default': [],
+ 'action': 'append',
+ 'help': "Linters to run, e.g 'eslint'. By default all linters "
+ "are run for all the appropriate files.",
+ }],
+ [['-f', '--format'],
+ {'dest': 'fmt',
+ 'default': 'stylish',
+ 'help': "Formatter to use. Defaults to 'stylish'.",
+ }],
+ [['-n', '--no-filter'],
+ {'dest': 'use_filters',
+ 'default': True,
+ 'action': 'store_false',
+ 'help': "Ignore all filtering. This is useful for quickly "
+ "testing a directory that otherwise wouldn't be run, "
+ "without needing to modify the config file.",
+ }],
+ [['-r', '--rev'],
+ {'default': None,
+ 'help': "Lint files touched by the given revision(s). Works with "
+ "mercurial or git."
+ }],
+ [['-w', '--workdir'],
+ {'default': False,
+ 'action': 'store_true',
+ 'help': "Lint files touched by changes in the working directory "
+ "(i.e haven't been committed yet). Works with mercurial or git.",
+ }],
+ [['extra_args'],
+ {'nargs': REMAINDER,
+ 'help': "Extra arguments that will be forwarded to the underlying linter.",
+ }],
+ ]
+
+ def __init__(self, **kwargs):
+ ArgumentParser.__init__(self, usage=self.__doc__, **kwargs)
+
+ for cli, args in self.arguments:
+ self.add_argument(*cli, **args)
+
+ def parse_known_args(self, *args, **kwargs):
+ # This is here so the eslint mach command doesn't lose 'extra_args'
+ # when using mach's dispatch functionality.
+ args, extra = ArgumentParser.parse_known_args(self, *args, **kwargs)
+ args.extra_args = extra
+ return args, extra
+
+
+def find_linters(linters=None):
+ lints = []
+ for search_path in SEARCH_PATHS:
+ if not os.path.isdir(search_path):
+ continue
+
+ files = os.listdir(search_path)
+ for f in files:
+ name, ext = os.path.splitext(f)
+ if ext != '.lint':
+ continue
+
+ if linters and name not in linters:
+ continue
+
+ lints.append(os.path.join(search_path, f))
+ return lints
+
+
+def run(paths, linters, fmt, rev, workdir, **lintargs):
+ from mozlint import LintRoller, formatters
+
+ lint = LintRoller(**lintargs)
+ lint.read(find_linters(linters))
+
+ # run all linters
+ results = lint.roll(paths, rev=rev, workdir=workdir)
+
+ formatter = formatters.get(fmt)
+
+ # Explicitly utf-8 encode the output as some of the formatters make
+ # use of unicode characters. This will prevent a UnicodeEncodeError
+ # on environments where utf-8 isn't the default
+ print(formatter(results).encode('utf-8', 'replace'))
+ return lint.return_code
+
+
+if __name__ == '__main__':
+ parser = MozlintParser()
+ args = vars(parser.parse_args())
+ sys.exit(run(**args))
diff --git a/python/mozlint/mozlint/errors.py b/python/mozlint/mozlint/errors.py
new file mode 100644
index 000000000..a899a1974
--- /dev/null
+++ b/python/mozlint/mozlint/errors.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+
+class LintException(Exception):
+ pass
+
+
+class LinterNotFound(LintException):
+ def __init__(self, path):
+ LintException.__init__(self, "Could not find lint file '{}'".format(path))
+
+
+class LinterParseError(LintException):
+ def __init__(self, path, message):
+ LintException.__init__(self, "{}: {}".format(os.path.basename(path), message))
+
+
+class LintersNotConfigured(LintException):
+ def __init__(self):
+ LintException.__init__(self, "No linters registered! Use `LintRoller.read` "
+ "to register a linter.")
diff --git a/python/mozlint/mozlint/formatters/__init__.py b/python/mozlint/mozlint/formatters/__init__.py
new file mode 100644
index 000000000..33aca0446
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/__init__.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+
+from ..result import ResultEncoder
+from .stylish import StylishFormatter
+from .treeherder import TreeherderFormatter
+
+
+class JSONFormatter(object):
+ def __call__(self, results):
+ return json.dumps(results, cls=ResultEncoder)
+
+
+all_formatters = {
+ 'json': JSONFormatter,
+ 'stylish': StylishFormatter,
+ 'treeherder': TreeherderFormatter,
+}
+
+
+def get(name, **fmtargs):
+ return all_formatters[name](**fmtargs)
diff --git a/python/mozlint/mozlint/formatters/stylish.py b/python/mozlint/mozlint/formatters/stylish.py
new file mode 100644
index 000000000..62ddfbeb6
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/stylish.py
@@ -0,0 +1,122 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from ..result import ResultContainer
+
+try:
+ import blessings
+except ImportError:
+ blessings = None
+
+
+class NullTerminal(object):
+ """Replacement for `blessings.Terminal()` that does no formatting."""
+ class NullCallableString(unicode):
+ """A dummy callable Unicode stolen from blessings"""
+ def __new__(cls):
+ new = unicode.__new__(cls, u'')
+ return new
+
+ def __call__(self, *args):
+ if len(args) != 1 or isinstance(args[0], int):
+ return u''
+ return args[0]
+
+ def __getattr__(self, attr):
+ return self.NullCallableString()
+
+
+class StylishFormatter(object):
+ """Formatter based on the eslint default."""
+
+ # Colors later on in the list are fallbacks in case the terminal
+ # doesn't support colors earlier in the list.
+ # See http://www.calmar.ws/vim/256-xterm-24bit-rgb-color-chart.html
+ _colors = {
+ 'grey': [247, 8, 7],
+ 'red': [1],
+ 'yellow': [3],
+ 'brightred': [9, 1],
+ 'brightyellow': [11, 3],
+ }
+ fmt = " {c1}{lineno}{column} {c2}{level}{normal} {message} {c1}{rule}({linter}){normal}"
+ fmt_summary = "{t.bold}{c}\u2716 {problem} ({error}, {warning}){t.normal}"
+
+ def __init__(self, disable_colors=None):
+ if disable_colors or not blessings:
+ self.term = NullTerminal()
+ else:
+ self.term = blessings.Terminal()
+ self.num_colors = self.term.number_of_colors
+
+ def color(self, color):
+ for num in self._colors[color]:
+ if num < self.num_colors:
+ return self.term.color(num)
+ return ''
+
+ def _reset_max(self):
+ self.max_lineno = 0
+ self.max_column = 0
+ self.max_level = 0
+ self.max_message = 0
+
+ def _update_max(self, err):
+ """Calculates the longest length of each token for spacing."""
+ self.max_lineno = max(self.max_lineno, len(str(err.lineno)))
+ if err.column:
+ self.max_column = max(self.max_column, len(str(err.column)))
+ self.max_level = max(self.max_level, len(str(err.level)))
+ self.max_message = max(self.max_message, len(err.message))
+
+ def _pluralize(self, s, num):
+ if num != 1:
+ s += 's'
+ return str(num) + ' ' + s
+
+ def __call__(self, result):
+ message = []
+
+ num_errors = 0
+ num_warnings = 0
+ for path, errors in sorted(result.iteritems()):
+ self._reset_max()
+
+ message.append(self.term.underline(path))
+ # Do a first pass to calculate required padding
+ for err in errors:
+ assert isinstance(err, ResultContainer)
+ self._update_max(err)
+ if err.level == 'error':
+ num_errors += 1
+ else:
+ num_warnings += 1
+
+ for err in errors:
+ message.append(self.fmt.format(
+ normal=self.term.normal,
+ c1=self.color('grey'),
+ c2=self.color('red') if err.level == 'error' else self.color('yellow'),
+ lineno=str(err.lineno).rjust(self.max_lineno),
+ column=(":" + str(err.column).ljust(self.max_column)) if err.column else "",
+ level=err.level.ljust(self.max_level),
+ message=err.message.ljust(self.max_message),
+ rule='{} '.format(err.rule) if err.rule else '',
+ linter=err.linter.lower(),
+ ))
+
+ message.append('') # newline
+
+ # Print a summary
+ message.append(self.fmt_summary.format(
+ t=self.term,
+ c=self.color('brightred') if num_errors else self.color('brightyellow'),
+ problem=self._pluralize('problem', num_errors + num_warnings),
+ error=self._pluralize('error', num_errors),
+ warning=self._pluralize('warning', num_warnings),
+ ))
+
+ return '\n'.join(message)
diff --git a/python/mozlint/mozlint/formatters/treeherder.py b/python/mozlint/mozlint/formatters/treeherder.py
new file mode 100644
index 000000000..7c27011cf
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/treeherder.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from ..result import ResultContainer
+
+
+class TreeherderFormatter(object):
+ """Formatter for treeherder friendly output.
+
+ This formatter looks ugly, but prints output such that
+ treeherder is able to highlight the errors and warnings.
+ This is a stop-gap until bug 1276486 is fixed.
+ """
+ fmt = "TEST-UNEXPECTED-{level} | {path}:{lineno}{column} | {message} ({rule})"
+
+ def __call__(self, result):
+ message = []
+ for path, errors in sorted(result.iteritems()):
+ for err in errors:
+ assert isinstance(err, ResultContainer)
+
+ d = {s: getattr(err, s) for s in err.__slots__}
+ d["column"] = ":%s" % d["column"] if d["column"] else ""
+ d['level'] = d['level'].upper()
+ d['rule'] = d['rule'] or d['linter']
+ message.append(self.fmt.format(**d))
+
+ return "\n".join(message)
diff --git a/python/mozlint/mozlint/parser.py b/python/mozlint/mozlint/parser.py
new file mode 100644
index 000000000..f350d0de7
--- /dev/null
+++ b/python/mozlint/mozlint/parser.py
@@ -0,0 +1,85 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import imp
+import os
+import sys
+import uuid
+
+from .types import supported_types
+from .errors import LinterNotFound, LinterParseError
+
+
+class Parser(object):
+ """Reads and validates `.lint` files."""
+ required_attributes = (
+ 'name',
+ 'description',
+ 'type',
+ 'payload',
+ )
+
+ def __call__(self, path):
+ return self.parse(path)
+
+ def _load_linter(self, path):
+ # Ensure parent module is present otherwise we'll (likely) get
+ # an error due to unknown parent.
+ parent_module = 'mozlint.linters'
+ if parent_module not in sys.modules:
+ mod = imp.new_module(parent_module)
+ sys.modules[parent_module] = mod
+
+ write_bytecode = sys.dont_write_bytecode
+ sys.dont_write_bytecode = True
+
+ module_name = '{}.{}'.format(parent_module, uuid.uuid1().get_hex())
+ imp.load_source(module_name, path)
+
+ sys.dont_write_bytecode = write_bytecode
+
+ mod = sys.modules[module_name]
+
+ if not hasattr(mod, 'LINTER'):
+ raise LinterParseError(path, "No LINTER definition found!")
+
+ definition = mod.LINTER
+ definition['path'] = path
+ return definition
+
+ def _validate(self, linter):
+ missing_attrs = []
+ for attr in self.required_attributes:
+ if attr not in linter:
+ missing_attrs.append(attr)
+
+ if missing_attrs:
+ raise LinterParseError(linter['path'], "Missing required attribute(s): "
+ "{}".format(','.join(missing_attrs)))
+
+ if linter['type'] not in supported_types:
+ raise LinterParseError(linter['path'], "Invalid type '{}'".format(linter['type']))
+
+ for attr in ('include', 'exclude'):
+ if attr in linter and (not isinstance(linter[attr], list) or
+ not all(isinstance(a, basestring) for a in linter[attr])):
+ raise LinterParseError(linter['path'], "The {} directive must be a "
+ "list of strings!".format(attr))
+
+ def parse(self, path):
+ """Read a linter and return its LINTER definition.
+
+ :param path: Path to the linter.
+ :returns: Linter definition (dict)
+ :raises: LinterNotFound, LinterParseError
+ """
+ if not os.path.isfile(path):
+ raise LinterNotFound(path)
+
+ if not path.endswith('.lint'):
+ raise LinterParseError(path, "Invalid filename, linters must end with '.lint'!")
+
+ linter = self._load_linter(path)
+ self._validate(linter)
+ return linter
diff --git a/python/mozlint/mozlint/pathutils.py b/python/mozlint/mozlint/pathutils.py
new file mode 100644
index 000000000..532904dca
--- /dev/null
+++ b/python/mozlint/mozlint/pathutils.py
@@ -0,0 +1,156 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+
+from mozpack import path as mozpath
+from mozpack.files import FileFinder
+
+
+class FilterPath(object):
+ """Helper class to make comparing and matching file paths easier."""
+ def __init__(self, path, exclude=None):
+ self.path = os.path.normpath(path)
+ self._finder = None
+ self.exclude = exclude
+
+ @property
+ def finder(self):
+ if self._finder:
+ return self._finder
+ self._finder = FileFinder(
+ self.path, find_executables=False, ignore=self.exclude)
+ return self._finder
+
+ @property
+ def ext(self):
+ return os.path.splitext(self.path)[1]
+
+ @property
+ def exists(self):
+ return os.path.exists(self.path)
+
+ @property
+ def isfile(self):
+ return os.path.isfile(self.path)
+
+ @property
+ def isdir(self):
+ return os.path.isdir(self.path)
+
+ def join(self, *args):
+ return FilterPath(os.path.join(self, *args))
+
+ def match(self, patterns):
+ return any(mozpath.match(self.path, pattern.path) for pattern in patterns)
+
+ def contains(self, other):
+ """Return True if other is a subdirectory of self or equals self."""
+ if isinstance(other, FilterPath):
+ other = other.path
+ a = os.path.abspath(self.path)
+ b = os.path.normpath(os.path.abspath(other))
+
+ if b.startswith(a):
+ return True
+ return False
+
+ def __repr__(self):
+ return repr(self.path)
+
+
+def filterpaths(paths, linter, **lintargs):
+ """Filters a list of paths.
+
+ Given a list of paths, and a linter definition plus extra
+ arguments, return the set of paths that should be linted.
+
+ :param paths: A starting list of paths to possibly lint.
+ :param linter: A linter definition.
+ :param lintargs: Extra arguments passed to the linter.
+ :returns: A list of file paths to lint.
+ """
+ include = linter.get('include', [])
+ exclude = lintargs.get('exclude', [])
+ exclude.extend(linter.get('exclude', []))
+ root = lintargs['root']
+
+ if not lintargs.get('use_filters', True) or (not include and not exclude):
+ return paths
+
+ def normalize(path):
+ if not os.path.isabs(path):
+ path = os.path.join(root, path)
+ return FilterPath(path)
+
+ include = map(normalize, include)
+ exclude = map(normalize, exclude)
+
+ # Paths with and without globs will be handled separately,
+ # pull them apart now.
+ includepaths = [p for p in include if p.exists]
+ excludepaths = [p for p in exclude if p.exists]
+
+ includeglobs = [p for p in include if not p.exists]
+ excludeglobs = [p for p in exclude if not p.exists]
+
+ extensions = linter.get('extensions')
+ keep = set()
+ discard = set()
+ for path in map(FilterPath, paths):
+ # Exclude bad file extensions
+ if extensions and path.isfile and path.ext not in extensions:
+ continue
+
+ if path.match(excludeglobs):
+ continue
+
+ # First handle include/exclude directives
+ # that exist (i.e don't have globs)
+ for inc in includepaths:
+ # Only excludes that are subdirectories of the include
+ # path matter.
+ excs = [e for e in excludepaths if inc.contains(e)]
+
+ if path.contains(inc):
+ # If specified path is an ancestor of include path,
+ # then lint the include path.
+ keep.add(inc)
+
+ # We can't apply these exclude paths without explicitly
+ # including every sibling file. Rather than do that,
+ # just return them and hope the underlying linter will
+ # deal with them.
+ discard.update(excs)
+
+ elif inc.contains(path):
+ # If the include path is an ancestor of the specified
+ # path, then add the specified path only if there are
+ # no exclude paths in-between them.
+ if not any(e.contains(path) for e in excs):
+ keep.add(path)
+
+ # Next handle include/exclude directives that
+ # contain globs.
+ if path.isfile:
+ # If the specified path is a file it must be both
+ # matched by an include directive and not matched
+ # by an exclude directive.
+ if not path.match(includeglobs):
+ continue
+
+ keep.add(path)
+ elif path.isdir:
+ # If the specified path is a directory, use a
+ # FileFinder to resolve all relevant globs.
+ path.exclude = [e.path for e in excludeglobs]
+ for pattern in includeglobs:
+ for p, f in path.finder.find(pattern.path):
+ keep.add(path.join(p))
+
+ # Only pass paths we couldn't exclude here to the underlying linter
+ lintargs['exclude'] = [f.path for f in discard]
+ return [f.path for f in keep]
diff --git a/python/mozlint/mozlint/result.py b/python/mozlint/mozlint/result.py
new file mode 100644
index 000000000..0c56f1d76
--- /dev/null
+++ b/python/mozlint/mozlint/result.py
@@ -0,0 +1,88 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from json import dumps, JSONEncoder
+
+
+class ResultContainer(object):
+ """Represents a single lint error and its related metadata.
+
+ :param linter: name of the linter that flagged this error
+ :param path: path to the file containing the error
+ :param message: text describing the error
+ :param lineno: line number that contains the error
+ :param column: column containing the error
+ :param level: severity of the error, either 'warning' or 'error' (default 'error')
+ :param hint: suggestion for fixing the error (optional)
+ :param source: source code context of the error (optional)
+ :param rule: name of the rule that was violated (optional)
+ :param lineoffset: denotes an error spans multiple lines, of the form
+ (<lineno offset>, <num lines>) (optional)
+ """
+
+ __slots__ = (
+ 'linter',
+ 'path',
+ 'message',
+ 'lineno',
+ 'column',
+ 'hint',
+ 'source',
+ 'level',
+ 'rule',
+ 'lineoffset',
+ )
+
+ def __init__(self, linter, path, message, lineno, column=None, hint=None,
+ source=None, level=None, rule=None, lineoffset=None):
+ self.path = path
+ self.message = message
+ self.lineno = lineno
+ self.column = column
+ self.hint = hint
+ self.source = source
+ self.level = level or 'error'
+ self.linter = linter
+ self.rule = rule
+ self.lineoffset = lineoffset
+
+ def __repr__(self):
+ s = dumps(self, cls=ResultEncoder, indent=2)
+ return "ResultContainer({})".format(s)
+
+
+class ResultEncoder(JSONEncoder):
+ """Class for encoding :class:`~result.ResultContainer`s to json.
+
+ Usage:
+
+ json.dumps(results, cls=ResultEncoder)
+ """
+ def default(self, o):
+ if isinstance(o, ResultContainer):
+ return {a: getattr(o, a) for a in o.__slots__}
+ return JSONEncoder.default(self, o)
+
+
+def from_linter(lintobj, **kwargs):
+ """Create a :class:`~result.ResultContainer` from a LINTER definition.
+
+ Convenience method that pulls defaults from a LINTER
+ definition and forwards them.
+
+ :param lintobj: LINTER obj as defined in a .lint file
+ :param kwargs: same as :class:`~result.ResultContainer`
+ :returns: :class:`~result.ResultContainer` object
+ """
+ attrs = {}
+ for attr in ResultContainer.__slots__:
+ attrs[attr] = kwargs.get(attr, lintobj.get(attr))
+
+ if not attrs['linter']:
+ attrs['linter'] = lintobj.get('name')
+
+ if not attrs['message']:
+ attrs['message'] = lintobj.get('description')
+
+ return ResultContainer(**attrs)
diff --git a/python/mozlint/mozlint/roller.py b/python/mozlint/mozlint/roller.py
new file mode 100644
index 000000000..2d1608dd8
--- /dev/null
+++ b/python/mozlint/mozlint/roller.py
@@ -0,0 +1,154 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+import signal
+import sys
+import traceback
+from collections import defaultdict
+from Queue import Empty
+from multiprocessing import (
+ Manager,
+ Pool,
+ cpu_count,
+)
+
+from .errors import LintersNotConfigured
+from .types import supported_types
+from .parser import Parser
+from .vcs import VCSFiles
+
+
+def _run_linters(queue, paths, **lintargs):
+ parse = Parser()
+ results = defaultdict(list)
+ return_code = 0
+
+ while True:
+ try:
+ # The astute reader may wonder what is preventing the worker from
+ # grabbing the next linter from the queue after a SIGINT. Because
+ # this is a Manager.Queue(), it is itself in a child process which
+ # also received SIGINT. By the time the worker gets back here, the
+ # Queue is dead and IOError is raised.
+ linter_path = queue.get(False)
+ except (Empty, IOError):
+ return results, return_code
+
+ # Ideally we would pass the entire LINTER definition as an argument
+ # to the worker instead of re-parsing it. But passing a function from
+ # a dynamically created module (with imp) does not seem to be possible
+ # with multiprocessing on Windows.
+ linter = parse(linter_path)
+ func = supported_types[linter['type']]
+ res = func(paths, linter, **lintargs) or []
+
+ if not isinstance(res, (list, tuple)):
+ if res:
+ return_code = 1
+ continue
+
+ for r in res:
+ results[r.path].append(r)
+
+
+def _run_worker(*args, **lintargs):
+ try:
+ return _run_linters(*args, **lintargs)
+ except Exception:
+ # multiprocessing seems to munge worker exceptions, print
+ # it here so it isn't lost.
+ traceback.print_exc()
+ raise
+ finally:
+ sys.stdout.flush()
+
+
+class LintRoller(object):
+ """Registers and runs linters.
+
+ :param root: Path to which relative paths will be joined. If
+ unspecified, root will either be determined from
+ version control or cwd.
+ :param lintargs: Arguments to pass to the underlying linter(s).
+ """
+
+ def __init__(self, root=None, **lintargs):
+ self.parse = Parser()
+ self.vcs = VCSFiles()
+
+ self.linters = []
+ self.lintargs = lintargs
+ self.lintargs['root'] = root or self.vcs.root or os.getcwd()
+
+ self.return_code = None
+
+ def read(self, paths):
+ """Parse one or more linters and add them to the registry.
+
+ :param paths: A path or iterable of paths to linter definitions.
+ """
+ if isinstance(paths, basestring):
+ paths = (paths,)
+
+ for path in paths:
+ self.linters.append(self.parse(path))
+
+ def roll(self, paths=None, rev=None, workdir=None, num_procs=None):
+ """Run all of the registered linters against the specified file paths.
+
+ :param paths: An iterable of files and/or directories to lint.
+ :param rev: Lint all files touched by the specified revision.
+ :param workdir: Lint all files touched in the working directory.
+ :param num_procs: The number of processes to use. Default: cpu count
+ :return: A dictionary with file names as the key, and a list of
+ :class:`~result.ResultContainer`s as the value.
+ """
+ paths = paths or []
+ if isinstance(paths, basestring):
+ paths = [paths]
+
+ if not self.linters:
+ raise LintersNotConfigured
+
+ # Calculate files from VCS
+ if rev:
+ paths.extend(self.vcs.by_rev(rev))
+ if workdir:
+ paths.extend(self.vcs.by_workdir())
+ paths = paths or ['.']
+ paths = map(os.path.abspath, paths)
+
+ # Set up multiprocessing
+ m = Manager()
+ queue = m.Queue()
+
+ for linter in self.linters:
+ queue.put(linter['path'])
+
+ num_procs = num_procs or cpu_count()
+ num_procs = min(num_procs, len(self.linters))
+ pool = Pool(num_procs)
+
+ all_results = defaultdict(list)
+ workers = []
+ for i in range(num_procs):
+ workers.append(
+ pool.apply_async(_run_worker, args=(queue, paths), kwds=self.lintargs))
+ pool.close()
+
+ # ignore SIGINT in parent so we can still get partial results
+ # from child processes. These should shutdown quickly anyway.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+ self.return_code = 0
+ for worker in workers:
+ # parent process blocks on worker.get()
+ results, return_code = worker.get()
+ if results or return_code:
+ self.return_code = 1
+ for k, v in results.iteritems():
+ all_results[k].extend(v)
+ return all_results
diff --git a/python/mozlint/mozlint/types.py b/python/mozlint/mozlint/types.py
new file mode 100644
index 000000000..2f49ae2bf
--- /dev/null
+++ b/python/mozlint/mozlint/types.py
@@ -0,0 +1,142 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import re
+import sys
+from abc import ABCMeta, abstractmethod
+
+from mozlog import get_default_logger, commandline, structuredlog
+from mozlog.reader import LogHandler
+
+from . import result
+from .pathutils import filterpaths
+
+
+class BaseType(object):
+ """Abstract base class for all types of linters."""
+ __metaclass__ = ABCMeta
+ batch = False
+
+ def __call__(self, paths, linter, **lintargs):
+ """Run `linter` against `paths` with `lintargs`.
+
+ :param paths: Paths to lint. Can be a file or directory.
+ :param linter: Linter definition paths are being linted against.
+ :param lintargs: External arguments to the linter not defined in
+ the definition, but passed in by a consumer.
+ :returns: A list of :class:`~result.ResultContainer` objects.
+ """
+ paths = filterpaths(paths, linter, **lintargs)
+ if not paths:
+ print("{}: no files to lint in specified paths".format(linter['name']))
+ return
+
+ if self.batch:
+ return self._lint(paths, linter, **lintargs)
+
+ errors = []
+ try:
+ for p in paths:
+ result = self._lint(p, linter, **lintargs)
+ if result:
+ errors.extend(result)
+ except KeyboardInterrupt:
+ pass
+ return errors
+
+ @abstractmethod
+ def _lint(self, path):
+ pass
+
+
+class LineType(BaseType):
+ """Abstract base class for linter types that check each line individually.
+
+ Subclasses of this linter type will read each file and check the provided
+ payload against each line one by one.
+ """
+ __metaclass__ = ABCMeta
+
+ @abstractmethod
+ def condition(payload, line):
+ pass
+
+ def _lint(self, path, linter, **lintargs):
+ payload = linter['payload']
+
+ with open(path, 'r') as fh:
+ lines = fh.readlines()
+
+ errors = []
+ for i, line in enumerate(lines):
+ if self.condition(payload, line):
+ errors.append(result.from_linter(linter, path=path, lineno=i+1))
+
+ return errors
+
+
+class StringType(LineType):
+ """Linter type that checks whether a substring is found."""
+
+ def condition(self, payload, line):
+ return payload in line
+
+
+class RegexType(LineType):
+ """Linter type that checks whether a regex match is found."""
+
+ def condition(self, payload, line):
+ return re.search(payload, line)
+
+
+class ExternalType(BaseType):
+ """Linter type that runs an external function.
+
+ The function is responsible for properly formatting the results
+ into a list of :class:`~result.ResultContainer` objects.
+ """
+ batch = True
+
+ def _lint(self, files, linter, **lintargs):
+ payload = linter['payload']
+ return payload(files, **lintargs)
+
+
+class LintHandler(LogHandler):
+ def __init__(self, linter):
+ self.linter = linter
+ self.results = []
+
+ def lint(self, data):
+ self.results.append(result.from_linter(self.linter, **data))
+
+
+class StructuredLogType(BaseType):
+ batch = True
+
+ def _lint(self, files, linter, **lintargs):
+ payload = linter["payload"]
+ handler = LintHandler(linter)
+ logger = linter.get("logger")
+ if logger is None:
+ logger = get_default_logger()
+ if logger is None:
+ logger = structuredlog.StructuredLogger(linter["name"])
+ commandline.setup_logging(logger, {}, {"mach": sys.stdout})
+ logger.add_handler(handler)
+ try:
+ payload(files, logger, **lintargs)
+ except KeyboardInterrupt:
+ pass
+ return handler.results
+
+supported_types = {
+ 'string': StringType(),
+ 'regex': RegexType(),
+ 'external': ExternalType(),
+ 'structured_log': StructuredLogType()
+}
+"""Mapping of type string to an associated instance."""
diff --git a/python/mozlint/mozlint/vcs.py b/python/mozlint/mozlint/vcs.py
new file mode 100644
index 000000000..6a118f2e6
--- /dev/null
+++ b/python/mozlint/mozlint/vcs.py
@@ -0,0 +1,62 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import subprocess
+
+
+class VCSFiles(object):
+ def __init__(self):
+ self._root = None
+ self._vcs = None
+
+ @property
+ def root(self):
+ if self._root:
+ return self._root
+
+ # First check if we're in an hg repo, if not try git
+ commands = (
+ ['hg', 'root'],
+ ['git', 'rev-parse', '--show-toplevel'],
+ )
+
+ for cmd in commands:
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output = proc.communicate()[0].strip()
+
+ if proc.returncode == 0:
+ self._vcs = cmd[0]
+ self._root = output
+ return self._root
+
+ @property
+ def vcs(self):
+ return self._vcs or (self.root and self._vcs)
+
+ @property
+ def is_hg(self):
+ return self.vcs == 'hg'
+
+ @property
+ def is_git(self):
+ return self.vcs == 'git'
+
+ def _run(self, cmd):
+ files = subprocess.check_output(cmd).split()
+ return [os.path.join(self.root, f) for f in files]
+
+ def by_rev(self, rev):
+ if self.is_hg:
+ return self._run(['hg', 'log', '--template', '{files % "\\n{file}"}', '-r', rev])
+ elif self.is_git:
+ return self._run(['git', 'diff', '--name-only', rev])
+ return []
+
+ def by_workdir(self):
+ if self.is_hg:
+ return self._run(['hg', 'status', '-amn'])
+ elif self.is_git:
+ return self._run(['git', 'diff', '--name-only'])
+ return []
diff --git a/python/mozlint/setup.py b/python/mozlint/setup.py
new file mode 100644
index 000000000..62d25c38b
--- /dev/null
+++ b/python/mozlint/setup.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+VERSION = 0.1
+DEPS = ["mozlog>=3.4"]
+
+setup(
+ name='mozlint',
+ description='Framework for registering and running micro lints',
+ license='MPL 2.0',
+ author='Andrew Halberstadt',
+ author_email='ahalberstadt@mozilla.com',
+ url='',
+ packages=['mozlint'],
+ version=VERSION,
+ classifiers=[
+ 'Environment :: Console',
+ 'Development Status :: 3 - Alpha',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Natural Language :: English',
+ ],
+ install_requires=DEPS,
+)
diff --git a/python/mozlint/test/__init__.py b/python/mozlint/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozlint/test/__init__.py
diff --git a/python/mozlint/test/conftest.py b/python/mozlint/test/conftest.py
new file mode 100644
index 000000000..e171798b0
--- /dev/null
+++ b/python/mozlint/test/conftest.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import pytest
+
+from mozlint import LintRoller
+
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+@pytest.fixture
+def lint(request):
+ lintargs = getattr(request.module, 'lintargs', {})
+ return LintRoller(root=here, **lintargs)
+
+
+@pytest.fixture(scope='session')
+def filedir():
+ return os.path.join(here, 'files')
+
+
+@pytest.fixture(scope='module')
+def files(filedir, request):
+ suffix_filter = getattr(request.module, 'files', [''])
+ return [os.path.join(filedir, p) for p in os.listdir(filedir)
+ if any(p.endswith(suffix) for suffix in suffix_filter)]
+
+
+@pytest.fixture(scope='session')
+def lintdir():
+ return os.path.join(here, 'linters')
+
+
+@pytest.fixture(scope='module')
+def linters(lintdir, request):
+ suffix_filter = getattr(request.module, 'linters', ['.lint'])
+ return [os.path.join(lintdir, p) for p in os.listdir(lintdir)
+ if any(p.endswith(suffix) for suffix in suffix_filter)]
diff --git a/python/mozlint/test/files/foobar.js b/python/mozlint/test/files/foobar.js
new file mode 100644
index 000000000..d9754d0a2
--- /dev/null
+++ b/python/mozlint/test/files/foobar.js
@@ -0,0 +1,2 @@
+// Oh no.. we called this variable foobar, bad!
+var foobar = "a string";
diff --git a/python/mozlint/test/files/foobar.py b/python/mozlint/test/files/foobar.py
new file mode 100644
index 000000000..e1677b3fd
--- /dev/null
+++ b/python/mozlint/test/files/foobar.py
@@ -0,0 +1,2 @@
+# Oh no.. we called this variable foobar, bad!
+foobar = "a string"
diff --git a/python/mozlint/test/files/no_foobar.js b/python/mozlint/test/files/no_foobar.js
new file mode 100644
index 000000000..6b95d646c
--- /dev/null
+++ b/python/mozlint/test/files/no_foobar.js
@@ -0,0 +1,2 @@
+// What a relief
+var properlyNamed = "a string";
diff --git a/python/mozlint/test/linters/badreturncode.lint b/python/mozlint/test/linters/badreturncode.lint
new file mode 100644
index 000000000..398d51a55
--- /dev/null
+++ b/python/mozlint/test/linters/badreturncode.lint
@@ -0,0 +1,21 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+def lint(files, **lintargs):
+ return 1
+
+
+LINTER = {
+ 'name': "BadReturnCodeLinter",
+ 'description': "Returns an error code no matter what",
+ 'include': [
+ 'files',
+ ],
+ 'type': 'external',
+ 'extensions': ['.js', '.jsm'],
+ 'payload': lint,
+}
diff --git a/python/mozlint/test/linters/explicit_path.lint b/python/mozlint/test/linters/explicit_path.lint
new file mode 100644
index 000000000..8c1a88a1f
--- /dev/null
+++ b/python/mozlint/test/linters/explicit_path.lint
@@ -0,0 +1,13 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+LINTER = {
+ 'name': "ExplicitPathLinter",
+ 'description': "Only lint a specific file name",
+ 'rule': 'no-foobar',
+ 'include': [
+ 'no_foobar.js',
+ ],
+ 'type': 'string',
+ 'payload': 'foobar',
+}
diff --git a/python/mozlint/test/linters/external.lint b/python/mozlint/test/linters/external.lint
new file mode 100644
index 000000000..dcae419db
--- /dev/null
+++ b/python/mozlint/test/linters/external.lint
@@ -0,0 +1,30 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozlint import result
+
+
+def lint(files, **lintargs):
+ results = []
+ for path in files:
+ with open(path, 'r') as fh:
+ for i, line in enumerate(fh.readlines()):
+ if 'foobar' in line:
+ results.append(result.from_linter(
+ LINTER, path=path, lineno=i+1, column=1, rule="no-foobar"))
+ return results
+
+
+LINTER = {
+ 'name': "ExternalLinter",
+ 'description': "It's bad to have the string foobar in js files.",
+ 'include': [
+ 'files',
+ ],
+ 'type': 'external',
+ 'extensions': ['.js', '.jsm'],
+ 'payload': lint,
+}
diff --git a/python/mozlint/test/linters/invalid_exclude.lint b/python/mozlint/test/linters/invalid_exclude.lint
new file mode 100644
index 000000000..be6d0045c
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_exclude.lint
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+LINTER = {
+ 'name': "BadExcludeLinter",
+ 'description': "Has an invalid exclude directive.",
+ 'exclude': [0, 1], # should be a list of strings
+ 'type': 'string',
+ 'payload': 'foobar',
+}
diff --git a/python/mozlint/test/linters/invalid_extension.lnt b/python/mozlint/test/linters/invalid_extension.lnt
new file mode 100644
index 000000000..3cb8153a0
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_extension.lnt
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+LINTER = {
+ 'name': "BadExtensionLinter",
+ 'description': "Has an invalid file extension.",
+ 'type': 'string',
+ 'payload': 'foobar',
+}
diff --git a/python/mozlint/test/linters/invalid_include.lint b/python/mozlint/test/linters/invalid_include.lint
new file mode 100644
index 000000000..343d5e195
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_include.lint
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+LINTER = {
+ 'name': "BadIncludeLinter",
+ 'description': "Has an invalid include directive.",
+ 'include': 'should be a list',
+ 'type': 'string',
+ 'payload': 'foobar',
+}
diff --git a/python/mozlint/test/linters/invalid_type.lint b/python/mozlint/test/linters/invalid_type.lint
new file mode 100644
index 000000000..9e5926c5a
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_type.lint
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+LINTER = {
+ 'name': "BadTypeLinter",
+ 'description': "Has an invalid type.",
+ 'type': 'invalid',
+ 'payload': 'foobar',
+}
diff --git a/python/mozlint/test/linters/missing_attrs.lint b/python/mozlint/test/linters/missing_attrs.lint
new file mode 100644
index 000000000..380512b64
--- /dev/null
+++ b/python/mozlint/test/linters/missing_attrs.lint
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+LINTER = {
+ 'name': "MissingAttrsLinter",
+ 'description': "Missing type and payload",
+}
diff --git a/python/mozlint/test/linters/missing_definition.lint b/python/mozlint/test/linters/missing_definition.lint
new file mode 100644
index 000000000..a84b305d2
--- /dev/null
+++ b/python/mozlint/test/linters/missing_definition.lint
@@ -0,0 +1,4 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+# No LINTER variable
diff --git a/python/mozlint/test/linters/raises.lint b/python/mozlint/test/linters/raises.lint
new file mode 100644
index 000000000..f17e18733
--- /dev/null
+++ b/python/mozlint/test/linters/raises.lint
@@ -0,0 +1,19 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozlint.errors import LintException
+
+
+def lint(files, **lintargs):
+ raise LintException("Oh no something bad happened!")
+
+
+LINTER = {
+ 'name': "RaisesLinter",
+ 'description': "Raises an exception",
+ 'type': 'external',
+ 'payload': lint,
+}
diff --git a/python/mozlint/test/linters/regex.lint b/python/mozlint/test/linters/regex.lint
new file mode 100644
index 000000000..439cadf36
--- /dev/null
+++ b/python/mozlint/test/linters/regex.lint
@@ -0,0 +1,15 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+LINTER = {
+ 'name': "RegexLinter",
+ 'description': "Make sure the string 'foobar' never appears "
+ "in a js variable file because it is bad.",
+ 'rule': 'no-foobar',
+ 'include': [
+ '**/*.js',
+ '**/*.jsm',
+ ],
+ 'type': 'regex',
+ 'payload': 'foobar',
+}
diff --git a/python/mozlint/test/linters/string.lint b/python/mozlint/test/linters/string.lint
new file mode 100644
index 000000000..46bf0e8b8
--- /dev/null
+++ b/python/mozlint/test/linters/string.lint
@@ -0,0 +1,15 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+
+LINTER = {
+ 'name': "StringLinter",
+ 'description': "Make sure the string 'foobar' never appears "
+ "in browser js files because it is bad.",
+ 'rule': 'no-foobar',
+ 'include': [
+ '**/*.js',
+ '**/*.jsm',
+ ],
+ 'type': 'string',
+ 'payload': 'foobar',
+}
diff --git a/python/mozlint/test/linters/structured.lint b/python/mozlint/test/linters/structured.lint
new file mode 100644
index 000000000..e8be8d7b3
--- /dev/null
+++ b/python/mozlint/test/linters/structured.lint
@@ -0,0 +1,28 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+def lint(files, logger, **kwargs):
+ for path in files:
+ with open(path, 'r') as fh:
+ for i, line in enumerate(fh.readlines()):
+ if 'foobar' in line:
+ logger.lint_error(path=path,
+ lineno=i+1,
+ column=1,
+ rule="no-foobar")
+
+
+LINTER = {
+ 'name': "StructuredLinter",
+ 'description': "It's bad to have the string foobar in js files.",
+ 'include': [
+ 'files',
+ ],
+ 'type': 'structured_log',
+ 'extensions': ['.js', '.jsm'],
+ 'payload': lint,
+}
diff --git a/python/mozlint/test/test_formatters.py b/python/mozlint/test/test_formatters.py
new file mode 100644
index 000000000..b9e6512b2
--- /dev/null
+++ b/python/mozlint/test/test_formatters.py
@@ -0,0 +1,90 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import json
+import sys
+from collections import defaultdict
+
+import pytest
+
+from mozlint import ResultContainer
+from mozlint import formatters
+
+
+@pytest.fixture
+def results(scope='module'):
+ containers = (
+ ResultContainer(
+ linter='foo',
+ path='a/b/c.txt',
+ message="oh no foo",
+ lineno=1,
+ ),
+ ResultContainer(
+ linter='bar',
+ path='d/e/f.txt',
+ message="oh no bar",
+ hint="try baz instead",
+ level='warning',
+ lineno=4,
+ column=2,
+ rule="bar-not-allowed",
+ ),
+ ResultContainer(
+ linter='baz',
+ path='a/b/c.txt',
+ message="oh no baz",
+ lineno=4,
+ source="if baz:",
+ ),
+ )
+ results = defaultdict(list)
+ for c in containers:
+ results[c.path].append(c)
+ return results
+
+
+def test_stylish_formatter(results):
+ expected = """
+a/b/c.txt
+ 1 error oh no foo (foo)
+ 4 error oh no baz (baz)
+
+d/e/f.txt
+ 4:2 warning oh no bar bar-not-allowed (bar)
+
+\u2716 3 problems (2 errors, 1 warning)
+""".strip()
+
+ fmt = formatters.get('stylish', disable_colors=True)
+ assert expected == fmt(results)
+
+
+def test_treeherder_formatter(results):
+ expected = """
+TEST-UNEXPECTED-ERROR | a/b/c.txt:1 | oh no foo (foo)
+TEST-UNEXPECTED-ERROR | a/b/c.txt:4 | oh no baz (baz)
+TEST-UNEXPECTED-WARNING | d/e/f.txt:4:2 | oh no bar (bar-not-allowed)
+""".strip()
+
+ fmt = formatters.get('treeherder')
+ assert expected == fmt(results)
+
+
+def test_json_formatter(results):
+ fmt = formatters.get('json')
+ formatted = json.loads(fmt(results))
+
+ assert set(formatted.keys()) == set(results.keys())
+
+ slots = ResultContainer.__slots__
+ for errors in formatted.values():
+ for err in errors:
+ assert all(s in err for s in slots)
+
+
+if __name__ == '__main__':
+ sys.exit(pytest.main(['--verbose', __file__]))
diff --git a/python/mozlint/test/test_parser.py b/python/mozlint/test/test_parser.py
new file mode 100644
index 000000000..e18e7a5a9
--- /dev/null
+++ b/python/mozlint/test/test_parser.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+import pytest
+
+from mozlint.parser import Parser
+from mozlint.errors import (
+ LinterNotFound,
+ LinterParseError,
+)
+
+
+@pytest.fixture(scope='module')
+def parse(lintdir):
+ parser = Parser()
+
+ def _parse(name):
+ path = os.path.join(lintdir, name)
+ return parser(path)
+ return _parse
+
+
+def test_parse_valid_linter(parse):
+ lintobj = parse('string.lint')
+ assert isinstance(lintobj, dict)
+ assert 'name' in lintobj
+ assert 'description' in lintobj
+ assert 'type' in lintobj
+ assert 'payload' in lintobj
+
+
+@pytest.mark.parametrize('linter', [
+ 'invalid_type.lint',
+ 'invalid_extension.lnt',
+ 'invalid_include.lint',
+ 'invalid_exclude.lint',
+ 'missing_attrs.lint',
+ 'missing_definition.lint',
+])
+def test_parse_invalid_linter(parse, linter):
+ with pytest.raises(LinterParseError):
+ parse(linter)
+
+
+def test_parse_non_existent_linter(parse):
+ with pytest.raises(LinterNotFound):
+ parse('missing_file.lint')
+
+
+if __name__ == '__main__':
+ sys.exit(pytest.main(['--verbose', __file__]))
diff --git a/python/mozlint/test/test_roller.py b/python/mozlint/test/test_roller.py
new file mode 100644
index 000000000..b4b82c346
--- /dev/null
+++ b/python/mozlint/test/test_roller.py
@@ -0,0 +1,82 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+import pytest
+
+from mozlint import ResultContainer
+from mozlint.errors import LintersNotConfigured, LintException
+
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+linters = ('string.lint', 'regex.lint', 'external.lint')
+
+
+def test_roll_no_linters_configured(lint, files):
+ with pytest.raises(LintersNotConfigured):
+ lint.roll(files)
+
+
+def test_roll_successful(lint, linters, files):
+ lint.read(linters)
+
+ result = lint.roll(files)
+ assert len(result) == 1
+ assert lint.return_code == 1
+
+ path = result.keys()[0]
+ assert os.path.basename(path) == 'foobar.js'
+
+ errors = result[path]
+ assert isinstance(errors, list)
+ assert len(errors) == 6
+
+ container = errors[0]
+ assert isinstance(container, ResultContainer)
+ assert container.rule == 'no-foobar'
+
+
+def test_roll_catch_exception(lint, lintdir, files):
+ lint.read(os.path.join(lintdir, 'raises.lint'))
+
+ # suppress printed traceback from test output
+ old_stderr = sys.stderr
+ sys.stderr = open(os.devnull, 'w')
+ with pytest.raises(LintException):
+ lint.roll(files)
+ sys.stderr = old_stderr
+
+
+def test_roll_with_excluded_path(lint, linters, files):
+ lint.lintargs.update({'exclude': ['**/foobar.js']})
+
+ lint.read(linters)
+ result = lint.roll(files)
+
+ assert len(result) == 0
+ assert lint.return_code == 0
+
+
+def test_roll_with_invalid_extension(lint, lintdir, filedir):
+ lint.read(os.path.join(lintdir, 'external.lint'))
+ result = lint.roll(os.path.join(filedir, 'foobar.py'))
+ assert len(result) == 0
+ assert lint.return_code == 0
+
+
+def test_roll_with_failure_code(lint, lintdir, files):
+ lint.read(os.path.join(lintdir, 'badreturncode.lint'))
+
+ assert lint.return_code is None
+ result = lint.roll(files)
+ assert len(result) == 0
+ assert lint.return_code == 1
+
+
+if __name__ == '__main__':
+ sys.exit(pytest.main(['--verbose', __file__]))
diff --git a/python/mozlint/test/test_types.py b/python/mozlint/test/test_types.py
new file mode 100644
index 000000000..ee0ea9b63
--- /dev/null
+++ b/python/mozlint/test/test_types.py
@@ -0,0 +1,50 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+import pytest
+
+from mozlint.result import ResultContainer
+
+
+@pytest.fixture
+def path(filedir):
+ def _path(name):
+ return os.path.join(filedir, name)
+ return _path
+
+
+@pytest.fixture(params=['string.lint', 'regex.lint', 'external.lint', 'structured.lint'])
+def linter(lintdir, request):
+ return os.path.join(lintdir, request.param)
+
+
+def test_linter_types(lint, linter, files, path):
+ lint.read(linter)
+ result = lint.roll(files)
+ assert isinstance(result, dict)
+ assert path('foobar.js') in result
+ assert path('no_foobar.js') not in result
+
+ result = result[path('foobar.js')][0]
+ assert isinstance(result, ResultContainer)
+
+ name = os.path.basename(linter).split('.')[0]
+ assert result.linter.lower().startswith(name)
+
+
+def test_no_filter(lint, lintdir, files):
+ lint.read(os.path.join(lintdir, 'explicit_path.lint'))
+ result = lint.roll(files)
+ assert len(result) == 0
+
+ lint.lintargs['use_filters'] = False
+ result = lint.roll(files)
+ assert len(result) == 2
+
+
+if __name__ == '__main__':
+ sys.exit(pytest.main(['--verbose', __file__]))
diff --git a/python/mozversioncontrol/mozversioncontrol/__init__.py b/python/mozversioncontrol/mozversioncontrol/__init__.py
new file mode 100644
index 000000000..211d42ef1
--- /dev/null
+++ b/python/mozversioncontrol/mozversioncontrol/__init__.py
@@ -0,0 +1,144 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import re
+import subprocess
+import which
+
+from distutils.version import LooseVersion
+
+def get_tool_path(tool):
+ """Obtain the path of `tool`."""
+ # We use subprocess in places, which expects a Win32 executable or
+ # batch script. On some versions of MozillaBuild, we have "hg.exe",
+ # "hg.bat," and "hg" (a Python script). "which" will happily return the
+ # Python script, which will cause subprocess to choke. Explicitly favor
+ # the Windows version over the plain script.
+ try:
+ return which.which(tool + '.exe')
+ except which.WhichError:
+ try:
+ return which.which(tool)
+ except which.WhichError as e:
+ print(e)
+
+ raise Exception('Unable to obtain %s path. Try running ' +
+ '|mach bootstrap| to ensure your environment is up to ' +
+ 'date.' % tool)
+
+class Repository(object):
+ '''A class wrapping utility methods around version control repositories.'''
+ def __init__(self, path, tool):
+ self.path = os.path.abspath(path)
+ self._tool = get_tool_path(tool)
+ self._env = os.environ.copy()
+ self._version = None
+
+ def _run(self, *args):
+ return subprocess.check_output((self._tool, ) + args,
+ cwd=self.path,
+ env=self._env)
+
+ @property
+ def tool_version(self):
+ '''Return the version of the VCS tool in use as a `LooseVersion`.'''
+ if self._version:
+ return self._version
+ info = self._run('--version').strip()
+ match = re.search('version ([^\+\)]+)', info)
+ if not match:
+ raise Exception('Unable to identify tool version.')
+
+ self.version = LooseVersion(match.group(1))
+ return self.version
+
+ def get_modified_files(self):
+ '''Return a list of files that are modified in this repository's
+ working copy.'''
+ raise NotImplementedError
+
+ def add_remove_files(self, path):
+ '''Add and remove files under `path` in this repository's working copy.
+ '''
+ raise NotImplementedError
+
+ def get_files_in_working_directory(self):
+ """Obtain a list of managed files in the working directory."""
+ raise NotImplementedError
+
+
+class HgRepository(Repository):
+ '''An implementation of `Repository` for Mercurial repositories.'''
+ def __init__(self, path):
+ super(HgRepository, self).__init__(path, 'hg')
+ self._env[b'HGPLAIN'] = b'1'
+
+ def get_modified_files(self):
+ return [line.strip().split()[1] for line in self._run('status', '--modified').splitlines()]
+
+ def add_remove_files(self, path):
+ args = ['addremove', path]
+ if self.tool_version >= b'3.9':
+ args = ['--config', 'extensions.automv='] + args
+ self._run(*args)
+
+ def get_files_in_working_directory(self):
+ # Can return backslashes on Windows. Normalize to forward slashes.
+ return list(p.replace('\\', '/') for p in
+ self._run('files', '-0').split('\0'))
+
+
+class GitRepository(Repository):
+ '''An implementation of `Repository` for Git repositories.'''
+ def __init__(self, path):
+ super(GitRepository, self).__init__(path, 'git')
+
+ def get_modified_files(self):
+ # This is a little wonky, but it's good enough for this purpose.
+ return [bits[1] for bits in map(lambda line: line.strip().split(), self._run('status', '--porcelain').splitlines()) if 'M' in bits[0]]
+
+ def add_remove_files(self, path):
+ self._run('add', path)
+
+ def get_files_in_working_directory(self):
+ return self._run('ls-files', '-z').split('\0')
+
+
+class InvalidRepoPath(Exception):
+ """Represents a failure to find a VCS repo at a specified path."""
+
+
+def get_repository_object(path):
+ '''Get a repository object for the repository at `path`.
+ If `path` is not a known VCS repository, raise an exception.
+ '''
+ if os.path.isdir(os.path.join(path, '.hg')):
+ return HgRepository(path)
+ elif os.path.isdir(os.path.join(path, '.git')):
+ return GitRepository(path)
+ else:
+ raise InvalidRepoPath('Unknown VCS, or not a source checkout: %s' %
+ path)
+
+
+def get_repository_from_env():
+ """Obtain a repository object by looking at the environment."""
+ def ancestors(path):
+ while path:
+ yield path
+ path, child = os.path.split(path)
+ if child == '':
+ break
+
+ for path in ancestors(os.getcwd()):
+ try:
+ return get_repository_object(path)
+ except InvalidRepoPath:
+ continue
+
+ raise Exception('Could not find Mercurial or Git checkout for %s' %
+ os.getcwd())
diff --git a/python/mozversioncontrol/mozversioncontrol/repoupdate.py b/python/mozversioncontrol/mozversioncontrol/repoupdate.py
new file mode 100644
index 000000000..08be73a34
--- /dev/null
+++ b/python/mozversioncontrol/mozversioncontrol/repoupdate.py
@@ -0,0 +1,40 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+import subprocess
+
+# The logic here is far from robust. Improvements are welcome.
+
+def update_mercurial_repo(hg, repo, path, revision='default',
+ hostfingerprints=None, global_args=None):
+ """Ensure a HG repository exists at a path and is up to date."""
+ hostfingerprints = hostfingerprints or {}
+
+ args = [hg]
+ if global_args:
+ args.extend(global_args)
+
+ for host, fingerprint in sorted(hostfingerprints.items()):
+ args.extend(['--config', 'hostfingerprints.%s=%s' % (host,
+ fingerprint)])
+
+ if os.path.exists(path):
+ subprocess.check_call(args + ['pull', repo], cwd=path)
+ else:
+ subprocess.check_call(args + ['clone', repo, path])
+
+ subprocess.check_call([hg, 'update', '-r', revision], cwd=path)
+
+
+def update_git_repo(git, repo, path, revision='origin/master'):
+ """Ensure a Git repository exists at a path and is up to date."""
+ if os.path.exists(path):
+ subprocess.check_call([git, 'fetch', '--all'], cwd=path)
+ else:
+ subprocess.check_call([git, 'clone', repo, path])
+
+ subprocess.check_call([git, 'checkout', revision], cwd=path)
diff --git a/python/psutil/CREDITS b/python/psutil/CREDITS
new file mode 100644
index 000000000..170751b0a
--- /dev/null
+++ b/python/psutil/CREDITS
@@ -0,0 +1,310 @@
+Intro
+=====
+
+I would like to recognize some of the people who have been instrumental in the
+development of psutil.
+I'm sure I'm forgetting some people (feel free to email me), but here is a
+short list.
+It's modeled after the Linux CREDITS file where the fields are:
+name (N), e-mail (E), web-address (W), country (C), description (D), (I) issues
+(issue tracker is at https://github.com/giampaolo/psutil/issues).
+Really thanks to all of you.
+
+- Giampaolo
+
+Author
+======
+
+N: Giampaolo Rodola'
+C: Italy
+E: g.rodola@gmail.com
+W: http://grodola.blogspot.com/
+
+Contributors
+============
+
+N: Jay Loden
+C: NJ, USA
+E: jloden@gmail.com
+D: original co-author, initial design/bootstrap and occasional bug fixes
+W: http://www.jayloden.com
+
+N: Jeremy Whitlock
+E: jcscoobyrs@gmail.com
+D: great help with OSX C development.
+I: 125, 150, 174, 206
+
+N: wj32
+E: wj32.64@gmail.com
+D: process username() and get_connections() on Windows
+I: 114, 115
+
+N: Yan Raber
+C: Bologna, Italy
+E: yanraber@gmail.com
+D: help on Windows development (initial version of Process.username())
+
+N: Justin Venus
+E: justin.venus@gmail.com
+D: Solaris support
+I: 18
+
+N: Dave Daeschler
+C: USA
+E: david.daeschler@gmail.com
+W: http://daviddaeschler.com
+D: some contributions to initial design/bootstrap plus occasional bug fixing
+I: 522, 536
+
+N: cjgohlke
+E: cjgohlke@gmail.com
+D: Windows 64 bit support
+I: 107
+
+N: Jeffery Kline
+E: jeffery.kline@gmail.com
+I: 130
+
+N: Grabriel Monnerat
+E: gabrielmonnerat@gmail.com
+I: 146
+
+N: Philip Roberts
+E: philip.roberts@gmail.com
+I: 168
+
+N: jcscoobyrs
+E: jcscoobyrs@gmail.com
+I: 125
+
+N: Sandro Tosi
+E: sandro.tosi@gmail.com
+I: 200, 201
+
+N: Andrew Colin
+E: andrew.colin@gmail.com
+I: 248
+
+N: Amoser
+E: amoser@google.com
+I: 266, 267, 340
+
+N: Matthew Grant
+E: matthewgrant5@gmail.com
+I: 271
+
+N: oweidner
+E: oweidner@cct.lsu.edu
+I: 275
+
+N: Tarek Ziade
+E: ziade.tarek
+I: 281
+
+N: Luca Cipriani
+C: Turin, Italy
+E: luca.opensource@gmail.com
+I: 278
+
+N: Maciej Lach,
+E: maciej.lach@gmail.com
+I: 294
+
+N: James Pye
+E: james.pye@gmail.com
+I: 305, 306
+
+N: Stanchev Emil
+E: stanchev.emil
+I: 314
+
+N: Kim Gräsman
+E: kim.grasman@gmail.com
+D: ...also kindly donated some money.
+I: 316
+
+N: Riccardo Murri
+C: Italy
+I: 318
+
+N: Florent Xicluna
+E: florent.xicluna@gmail.com
+I: 319
+
+N: Michal Spondr
+E: michal.spondr
+I: 313
+
+N: Jean Sebastien
+E: dumbboules@gmail.com
+I: 344
+
+N: Rob Smith
+W: http://www.kormoc.com/
+I: 341
+
+N: Youngsik Kim
+W: https://plus.google.com/101320747613749824490/
+I: 317
+
+N: Gregory Szorc
+W: https://plus.google.com/116873264322260110710/posts
+I: 323
+
+N: André Oriani
+E: aoriani@gmail.com
+I: 361
+
+N: clackwell
+E: clackwell@gmail.com
+I: 356
+
+N: m.malycha
+E: m.malycha@gmail.com
+I: 351
+
+N: John Baldwin
+E: jhb@FreeBSD.org
+I: 370
+
+N: Jan Beich
+E: jbeich@tormail.org
+I: 325
+
+N: floppymaster
+E: floppymaster@gmail.com
+I: 380
+
+N: Arfrever.FTA
+E: Arfrever.FTA@gmail.com
+I: 369, 404
+
+N: danudey
+E: danudey@gmail.com
+I: 386
+
+N: Adrien Fallou
+I: 224
+
+N: Gisle Vanem
+E: gisle.vanem@gmail.com
+I: 411
+
+N: thepyr0
+E: thepyr0@gmail.com
+I: 414
+
+N: John Pankov
+E: john.pankov@gmail.com
+I: 435
+
+N: Matt Good
+W: http://matt-good.net/
+I: 438
+
+N: Ulrich Klank
+E: ulrich.klank@scitics.de
+I: 448
+
+N: Josiah Carlson
+E: josiah.carlson@gmail.com
+I: 451, 452
+
+N: Raymond Hettinger
+D: namedtuple and lru_cache backward compatible implementations.
+
+N: Jason Kirtland
+D: backward compatible implementation of collections.defaultdict.
+
+M: Ken Seeho
+D: @cached_property decorator
+
+N: crusaderky
+E: crusaderky@gmail.com
+I: 470, 477
+
+E: alex@mroja.net
+I: 471
+
+N: Gautam Singh
+E: gautam.singh@gmail.com
+I: 466
+
+E: lhn@hupfeldtit.dk
+I: 476, 479
+
+N: Francois Charron
+E: francois.charron.1@gmail.com
+I: 474
+
+N: Naveed Roudsari
+E: naveed.roudsari@gmail.com
+I: 421
+
+N: Alexander Grothe
+E: Alexander.Grothe@gmail.com
+I: 497
+
+N: Szigeti Gabor Niif
+E: szigeti.gabor.niif@gmail.com
+I: 446
+
+N: msabramo
+E: msabramo@gmail.com
+I: 492
+
+N: Jeff Tang
+W: https://github.com/mrjefftang
+I: 340, 529, 616, 653, 654
+
+N: Yaolong Huang
+E: airekans@gmail.com
+W: http://airekans.github.io/
+I: 530
+
+N: Anders Chrigström
+W: https://github.com/anders-chrigstrom
+I: 496
+
+N: spacewander
+E: spacewanderlzx@gmail.com
+I: 561
+
+N: Sylvain Mouquet
+E: sylvain.mouquet@gmail.com
+I: 565
+
+N: karthikrev
+I: 568
+
+N: Bruno Binet
+E: bruno.binet@gmail.com
+I: 572
+
+N: Gabi Davar
+C: Israel
+W: https://github.com/mindw
+I: 578, 581, 587
+
+N: spacewanderlzx
+C: Guangzhou,China
+E: spacewanderlzx@gmail.com
+I: 555
+
+N: Fabian Groffen
+I: 611, 618
+
+N: desbma
+W: https://github.com/desbma
+C: France
+I: 628
+
+N: John Burnett
+W: http://www.johnburnett.com/
+C: Irvine, CA, US
+I: 614
+
+N: Ãrni Már Jónsson
+E: Reykjavik, Iceland
+E: https://github.com/arnimarj
+I: 634
diff --git a/python/psutil/HISTORY.rst b/python/psutil/HISTORY.rst
new file mode 100644
index 000000000..12b985d1e
--- /dev/null
+++ b/python/psutil/HISTORY.rst
@@ -0,0 +1,1018 @@
+Bug tracker at https://github.com/giampaolo/psutil/issues
+
+3.1.1 - 2015-07-15
+==================
+
+**Bug fixes**
+
+- #645: [Linux] psutil.cpu_times_percent() may produce negative results.
+- #656: 'from psutil import *' does not work.
+
+
+3.1.0 - 2015-07-15
+==================
+
+**Enhancements**
+
+- #534: [Linux] disk_partitions() added support for ZFS filesystems.
+- #646: continuous tests integration for Windows with
+ https://ci.appveyor.com/project/giampaolo/psutil.
+- #647: new dev guide:
+ https://github.com/giampaolo/psutil/blob/master/DEVGUIDE.rst
+- #651: continuous code quality test integration with
+ https://scrutinizer-ci.com/g/giampaolo/psutil/
+
+**Bug fixes**
+
+- #340: [Windows] Process.open_files() no longer hangs. Instead it uses a
+ thred which times out and skips the file handle in case it's taking too long
+ to be retrieved. (patch by Jeff Tang, PR #597)
+- #627: [Windows] Process.name() no longer raises AccessDenied for pids owned
+ by another user.
+- #636: [Windows] Process.memory_info() raise AccessDenied.
+- #637: [UNIX] raise exception if trying to send signal to Process PID 0 as it
+ will affect os.getpid()'s process group instead of PID 0.
+- #639: [Linux] Process.cmdline() can be truncated.
+- #640: [Linux] *connections functions may swallow errors and return an
+ incomplete list of connnections.
+- #642: repr() of exceptions is incorrect.
+- #653: [Windows] Add inet_ntop function for Windows XP to support IPv6.
+- #641: [Windows] Replace deprecated string functions with safe equivalents.
+
+
+3.0.1 - 2015-06-18
+==================
+
+**Bug fixes**
+
+- #632: [Linux] better error message if cannot parse process UNIX connections.
+- #634: [Linux] Proces.cmdline() does not include empty string arguments.
+- #635: [UNIX] crash on module import if 'enum' package is installed on python
+ < 3.4.
+
+
+3.0.0 - 2015-06-13
+==================
+
+**Enhancements**
+
+- #250: new psutil.net_if_stats() returning NIC statistics (isup, duplex,
+ speed, MTU).
+- #376: new psutil.net_if_addrs() returning all NIC addresses a-la ifconfig.
+- #469: on Python >= 3.4 ``IOPRIO_CLASS_*`` and ``*_PRIORITY_CLASS`` constants
+ returned by psutil.Process' ionice() and nice() methods are enums instead of
+ plain integers.
+- #581: add .gitignore. (patch by Gabi Davar)
+- #582: connection constants returned by psutil.net_connections() and
+ psutil.Process.connections() were turned from int to enums on Python > 3.4.
+- #587: Move native extension into the package.
+- #589: Process.cpu_affinity() accepts any kind of iterable (set, tuple, ...),
+ not only lists.
+- #594: all deprecated APIs were removed.
+- #599: [Windows] process name() can now be determined for all processes even
+ when running as a limited user.
+- #602: pre-commit GIT hook.
+- #629: enhanced support for py.test and nose test discovery and tests run.
+- #616: [Windows] Add inet_ntop function for Windows XP.
+
+**Bug fixes**
+
+- #428: [all UNIXes except Linux] correct handling of zombie processes;
+ introduced new ZombieProcess exception class.
+- #512: [BSD] fix segfault in net_connections().
+- #555: [Linux] psutil.users() correctly handles ":0" as an alias for
+ "localhost"
+- #579: [Windows] Fixed open_files() for PID>64K.
+- #579: [Windows] fixed many compiler warnings.
+- #585: [FreeBSD] net_connections() may raise KeyError.
+- #586: [FreeBSD] cpu_affinity() segfaults on set in case an invalid CPU
+ number is provided.
+- #593: [FreeBSD] Process().memory_maps() segfaults.
+- #606: Process.parent() may swallow NoSuchProcess exceptions.
+- #611: [SunOS] net_io_counters has send and received swapped
+- #614: [Linux]: cpu_count(logical=False) return the number of physical CPUs
+ instead of physical cores.
+- #618: [SunOS] swap tests fail on Solaris when run as normal user
+- #628: [Linux] Process.name() truncates process name in case it contains
+ spaces or parentheses.
+
+
+2.2.1 - 2015-02-02
+==================
+
+**Bug fixes**
+
+- #496: [Linux] fix "ValueError: ambiguos inode with multiple PIDs references"
+ (patch by Bruno Binet)
+
+
+2.2.0 - 2015-01-06
+==================
+
+**Enhancements**
+
+- #521: drop support for Python 2.4 and 2.5.
+- #553: new examples/pstree.py script.
+- #564: C extension version mismatch in case the user messed up with psutil
+ installation or with sys.path is now detected at import time.
+- #568: New examples/pidof.py script.
+- #569: [FreeBSD] add support for process CPU affinity.
+
+**Bug fixes**
+
+- #496: [Solaris] can't import psutil.
+- #547: [UNIX] Process.username() may raise KeyError if UID can't be resolved.
+- #551: [Windows] get rid of the unicode hack for net_io_counters() NIC names.
+- #556: [Linux] lots of file handles were left open.
+- #561: [Linux] net_connections() might skip some legitimate UNIX sockets.
+ (patch by spacewander)
+- #565: [Windows] use proper encoding for psutil.Process.username() and
+ psutil.users(). (patch by Sylvain Mouquet)
+- #567: [Linux] in the alternative implementation of CPU affinity PyList_Append
+ and Py_BuildValue return values are not checked.
+- #569: [FreeBSD] fix memory leak in psutil.cpu_count(logical=False).
+- #571: [Linux] Process.open_files() might swallow AccessDenied exceptions and
+ return an incomplete list of open files.
+
+
+2.1.3 - 2014-09-26
+==================
+
+- #536: [Linux]: fix "undefined symbol: CPU_ALLOC" compilation error.
+
+
+2.1.2 - 2014-09-21
+==================
+
+**Enhancements**
+
+- #407: project moved from Google Code to Github; code moved from Mercurial
+ to Git.
+- #492: use tox to run tests on multiple python versions. (patch by msabramo)
+- #505: [Windows] distribution as wheel packages.
+- #511: new examples/ps.py sample code.
+
+**Bug fixes**
+
+- #340: [Windows] Process.get_open_files() no longer hangs. (patch by
+ Jeff Tang)
+- #501: [Windows] disk_io_counters() may return negative values.
+- #503: [Linux] in rare conditions Process exe(), open_files() and
+ connections() methods can raise OSError(ESRCH) instead of NoSuchProcess.
+- #504: [Linux] can't build RPM packages via setup.py
+- #506: [Linux] python 2.4 support was broken.
+- #522: [Linux] Process.cpu_affinity() might return EINVAL. (patch by David
+ Daeschler)
+- #529: [Windows] Process.exe() may raise unhandled WindowsError exception
+ for PIDs 0 and 4. (patch by Jeff Tang)
+- #530: [Linux] psutil.disk_io_counters() may crash on old Linux distros
+ (< 2.6.5) (patch by Yaolong Huang)
+- #533: [Linux] Process.memory_maps() may raise TypeError on old Linux distros.
+
+
+2.1.1 - 2014-04-30
+==================
+
+**Bug fixes**
+
+- #446: [Windows] fix encoding error when using net_io_counters() on Python 3.
+ (patch by Szigeti Gabor Niif)
+- #460: [Windows] net_io_counters() wraps after 4G.
+- #491: [Linux] psutil.net_connections() exceptions. (patch by Alexander Grothe)
+
+
+2.1.0 - 2014-04-08
+==================
+
+**Enhancements**
+
+- #387: system-wide open connections a-la netstat.
+
+**Bug fixes**
+
+- #421: [Solaris] psutil does not compile on SunOS 5.10 (patch by Naveed
+ Roudsari)
+- #489: [Linux] psutil.disk_partitions() return an empty list.
+
+
+2.0.0 - 2014-03-10
+==================
+
+**Enhancements**
+
+- #424: [Windows] installer for Python 3.X 64 bit.
+- #427: number of logical and physical CPUs (psutil.cpu_count()).
+- #447: psutil.wait_procs() timeout parameter is now optional.
+- #452: make Process instances hashable and usable with set()s.
+- #453: tests on Python < 2.7 require unittest2 module.
+- #459: add a make file for running tests and other repetitive tasks (also
+ on Windows).
+- #463: make timeout parameter of cpu_percent* functions default to 0.0 'cause
+ it's a common trap to introduce slowdowns.
+- #468: move documentation to readthedocs.com.
+- #477: process cpu_percent() is about 30% faster. (suggested by crusaderky)
+- #478: [Linux] almost all APIs are about 30% faster on Python 3.X.
+- #479: long deprecated psutil.error module is gone; exception classes now
+ live in "psutil" namespace only.
+
+**Bug fixes**
+
+- #193: psutil.Popen constructor can throw an exception if the spawned process
+ terminates quickly.
+- #340: [Windows] process get_open_files() no longer hangs. (patch by
+ jtang@vahna.net)
+- #443: [Linux] fix a potential overflow issue for Process.set_cpu_affinity()
+ on systems with more than 64 CPUs.
+- #448: [Windows] get_children() and ppid() memory leak (patch by Ulrich
+ Klank).
+- #457: [POSIX] pid_exists() always returns True for PID 0.
+- #461: namedtuples are not pickle-able.
+- #466: [Linux] process exe improper null bytes handling. (patch by
+ Gautam Singh)
+- #470: wait_procs() might not wait. (patch by crusaderky)
+- #471: [Windows] process exe improper unicode handling. (patch by
+ alex@mroja.net)
+- #473: psutil.Popen.wait() does not set returncode attribute.
+- #474: [Windows] Process.cpu_percent() is no longer capped at 100%.
+- #476: [Linux] encoding error for process name and cmdline.
+
+**API changes**
+
+For the sake of consistency a lot of psutil APIs have been renamed.
+In most cases accessing the old names will work but it will cause a
+DeprecationWarning.
+
+- psutil.* module level constants have being replaced by functions:
+
+ +-----------------------+-------------------------------+
+ | Old name | Replacement |
+ +=======================+===============================+
+ | psutil.NUM_CPUS | psutil.cpu_cpunt() |
+ +-----------------------+-------------------------------+
+ | psutil.BOOT_TIME | psutil.boot_time() |
+ +-----------------------+-------------------------------+
+ | psutil.TOTAL_PHYMEM | psutil.virtual_memory().total |
+ +-----------------------+-------------------------------+
+
+- Renamed psutil.* functions:
+
+ +--------------------------+-------------------------------+
+ | Old name | Replacement |
+ +==========================+===============================+
+ | - psutil.get_pid_list() | psutil.pids() |
+ +--------------------------+-------------------------------+
+ | - psutil.get_users() | psutil.users() |
+ +--------------------------+-------------------------------+
+ | - psutil.get_boot_time() | psutil.boot_time() |
+ +--------------------------+-------------------------------+
+
+- All psutil.Process ``get_*`` methods lost the ``get_`` prefix.
+ get_ext_memory_info() renamed to memory_info_ex().
+ Assuming "p = psutil.Process()":
+
+ +--------------------------+----------------------+
+ | Old name | Replacement |
+ +==========================+======================+
+ | p.get_children() | p.children() |
+ +--------------------------+----------------------+
+ | p.get_connections() | p.connections() |
+ +--------------------------+----------------------+
+ | p.get_cpu_affinity() | p.cpu_affinity() |
+ +--------------------------+----------------------+
+ | p.get_cpu_percent() | p.cpu_percent() |
+ +--------------------------+----------------------+
+ | p.get_cpu_times() | p.cpu_times() |
+ +--------------------------+----------------------+
+ | p.get_ext_memory_info() | p.memory_info_ex() |
+ +--------------------------+----------------------+
+ | p.get_io_counters() | p.io_counters() |
+ +--------------------------+----------------------+
+ | p.get_ionice() | p.ionice() |
+ +--------------------------+----------------------+
+ | p.get_memory_info() | p.memory_info() |
+ +--------------------------+----------------------+
+ | p.get_memory_maps() | p.memory_maps() |
+ +--------------------------+----------------------+
+ | p.get_memory_percent() | p.memory_percent() |
+ +--------------------------+----------------------+
+ | p.get_nice() | p.nice() |
+ +--------------------------+----------------------+
+ | p.get_num_ctx_switches() | p.num_ctx_switches() |
+ +--------------------------+----------------------+
+ | p.get_num_fds() | p.num_fds() |
+ +--------------------------+----------------------+
+ | p.get_num_threads() | p.num_threads() |
+ +--------------------------+----------------------+
+ | p.get_open_files() | p.open_files() |
+ +--------------------------+----------------------+
+ | p.get_rlimit() | p.rlimit() |
+ +--------------------------+----------------------+
+ | p.get_threads() | p.threads() |
+ +--------------------------+----------------------+
+ | p.getcwd() | p.cwd() |
+ +--------------------------+----------------------+
+
+- All psutil.Process ``set_*`` methods lost the ``set_`` prefix.
+ Assuming "p = psutil.Process()":
+
+ +----------------------+---------------------------------+
+ | Old name | Replacement |
+ +======================+=================================+
+ | p.set_nice() | p.nice(value) |
+ +----------------------+---------------------------------+
+ | p.set_ionice() | p.ionice(ioclass, value=None) |
+ +----------------------+---------------------------------+
+ | p.set_cpu_affinity() | p.cpu_affinity(cpus) |
+ +----------------------+---------------------------------+
+ | p.set_rlimit() | p.rlimit(resource, limits=None) |
+ +----------------------+---------------------------------+
+
+- Except for 'pid' all psutil.Process class properties have been turned into
+ methods. This is the only case which there are no aliases.
+ Assuming "p = psutil.Process()":
+
+ +---------------+-----------------+
+ | Old name | Replacement |
+ +===============+=================+
+ | p.name | p.name() |
+ +---------------+-----------------+
+ | p.parent | p.parent() |
+ +---------------+-----------------+
+ | p.ppid | p.ppid() |
+ +---------------+-----------------+
+ | p.exe | p.exe() |
+ +---------------+-----------------+
+ | p.cmdline | p.cmdline() |
+ +---------------+-----------------+
+ | p.status | p.status() |
+ +---------------+-----------------+
+ | p.uids | p.uids() |
+ +---------------+-----------------+
+ | p.gids | p.gids() |
+ +---------------+-----------------+
+ | p.username | p.username() |
+ +---------------+-----------------+
+ | p.create_time | p.create_time() |
+ +---------------+-----------------+
+
+- timeout parameter of cpu_percent* functions defaults to 0.0 instead of 0.1.
+- long deprecated psutil.error module is gone; exception classes now live in
+ "psutil" namespace only.
+- Process instances' "retcode" attribute returned by psutil.wait_procs() has
+ been renamed to "returncode" for consistency with subprocess.Popen.
+
+
+1.2.1 - 2013-11-25
+==================
+
+**Bug fixes**
+
+- #348: [Windows XP] fixed "ImportError: DLL load failed" occurring on module
+ import.
+- #425: [Solaris] crash on import due to failure at determining BOOT_TIME.
+- #443: [Linux] can't set CPU affinity on systems with more than 64 cores.
+
+
+1.2.0 - 2013-11-20
+==================
+
+**Enhancements**
+
+- #439: assume os.getpid() if no argument is passed to psutil.Process
+ constructor.
+- #440: new psutil.wait_procs() utility function which waits for multiple
+ processes to terminate.
+
+**Bug fixes**
+
+- #348: [Windows XP/Vista] fix "ImportError: DLL load failed" occurring on
+ module import.
+
+
+1.1.3 - 2013-11-07
+==================
+
+**Bug fixes**
+
+- #442: [Linux] psutil won't compile on certain version of Linux because of
+ missing prlimit(2) syscall.
+
+
+1.1.2 - 2013-10-22
+==================
+
+**Bug fixes**
+
+- #442: [Linux] psutil won't compile on Debian 6.0 because of missing
+ prlimit(2) syscall.
+
+
+1.1.1 - 2013-10-08
+==================
+
+**Bug fixes**
+
+- #442: [Linux] psutil won't compile on kernels < 2.6.36 due to missing
+ prlimit(2) syscall.
+
+
+1.1.0 - 2013-09-28
+==================
+
+**Enhancements**
+
+- #410: host tar.gz and windows binary files are on PYPI.
+- #412: [Linux] get/set process resource limits.
+- #415: [Windows] Process.get_children() is an order of magnitude faster.
+- #426: [Windows] Process.name is an order of magnitude faster.
+- #431: [UNIX] Process.name is slightly faster because it unnecessarily
+ retrieved also process cmdline.
+
+**Bug fixes**
+
+- #391: [Windows] psutil.cpu_times_percent() returns negative percentages.
+- #408: STATUS_* and CONN_* constants don't properly serialize on JSON.
+- #411: [Windows] examples/disk_usage.py may pop-up a GUI error.
+- #413: [Windows] Process.get_memory_info() leaks memory.
+- #414: [Windows] Process.exe on Windows XP may raise ERROR_INVALID_PARAMETER.
+- #416: psutil.disk_usage() doesn't work well with unicode path names.
+- #430: [Linux] process IO counters report wrong number of r/w syscalls.
+- #435: [Linux] psutil.net_io_counters() might report erreneous NIC names.
+- #436: [Linux] psutil.net_io_counters() reports a wrong 'dropin' value.
+
+**API changes**
+
+- #408: turn STATUS_* and CONN_* constants into plain Python strings.
+
+
+1.0.1 - 2013-07-12
+==================
+
+**Bug fixes**
+
+- #405: network_io_counters(pernic=True) no longer works as intended in 1.0.0.
+
+
+1.0.0 - 2013-07-10
+==================
+
+**Enhancements**
+
+- #18: Solaris support (yay!) (thanks Justin Venus)
+- #367: Process.get_connections() 'status' strings are now constants.
+- #380: test suite exits with non-zero on failure. (patch by floppymaster)
+- #391: introduce unittest2 facilities and provide workarounds if unittest2
+ is not installed (python < 2.7).
+
+**Bug fixes**
+
+- #374: [Windows] negative memory usage reported if process uses a lot of
+ memory.
+- #379: [Linux] Process.get_memory_maps() may raise ValueError.
+- #394: [OSX] Mapped memory regions report incorrect file name.
+- #404: [Linux] sched_*affinity() are implicitly declared. (patch by Arfrever)
+
+**API changes**
+
+- Process.get_connections() 'status' field is no longer a string but a
+ constant object (psutil.CONN_*).
+- Process.get_connections() 'local_address' and 'remote_address' fields
+ renamed to 'laddr' and 'raddr'.
+- psutil.network_io_counters() renamed to psutil.net_io_counters().
+
+
+0.7.1 - 2013-05-03
+==================
+
+**Bug fixes**
+
+- #325: [BSD] psutil.virtual_memory() can raise SystemError.
+ (patch by Jan Beich)
+- #370: [BSD] Process.get_connections() requires root. (patch by John Baldwin)
+- #372: [BSD] different process methods raise NoSuchProcess instead of
+ AccessDenied.
+
+
+0.7.0 - 2013-04-12
+==================
+
+**Enhancements**
+
+- #233: code migrated to Mercurial (yay!)
+- #246: psutil.error module is deprecated and scheduled for removal.
+- #328: [Windows] process IO nice/priority support.
+- #359: psutil.get_boot_time()
+- #361: [Linux] psutil.cpu_times() now includes new 'steal', 'guest' and
+ 'guest_nice' fields available on recent Linux kernels.
+ Also, psutil.cpu_percent() is more accurate.
+- #362: cpu_times_percent() (per-CPU-time utilization as a percentage)
+
+**Bug fixes**
+
+- #234: [Windows] disk_io_counters() fails to list certain disks.
+- #264: [Windows] use of psutil.disk_partitions() may cause a message box to
+ appear.
+- #313: [Linux] psutil.virtual_memory() and psutil.swap_memory() can crash on
+ certain exotic Linux flavors having an incomplete /proc interface.
+ If that's the case we now set the unretrievable stats to 0 and raise a
+ RuntimeWarning.
+- #315: [OSX] fix some compilation warnings.
+- #317: [Windows] cannot set process CPU affinity above 31 cores.
+- #319: [Linux] process get_memory_maps() raises KeyError 'Anonymous' on Debian
+ squeeze.
+- #321: [UNIX] Process.ppid property is no longer cached as the kernel may set
+ the ppid to 1 in case of a zombie process.
+- #323: [OSX] disk_io_counters()'s read_time and write_time parameters were
+ reporting microseconds not milliseconds. (patch by Gregory Szorc)
+- #331: Process cmdline is no longer cached after first acces as it may change.
+- #333: [OSX] Leak of Mach ports on OS X (patch by rsesek@google.com)
+- #337: [Linux] process methods not working because of a poor /proc
+ implementation will raise NotImplementedError rather than RuntimeError
+ and Process.as_dict() will not blow up. (patch by Curtin1060)
+- #338: [Linux] disk_io_counters() fails to find some disks.
+- #339: [FreeBSD] get_pid_list() can allocate all the memory on system.
+- #341: [Linux] psutil might crash on import due to error in retrieving system
+ terminals map.
+- #344: [FreeBSD] swap_memory() might return incorrect results due to
+ kvm_open(3) not being called. (patch by Jean Sebastien)
+- #338: [Linux] disk_io_counters() fails to find some disks.
+- #351: [Windows] if psutil is compiled with mingw32 (provided installers for
+ py2.4 and py2.5 are) disk_io_counters() will fail. (Patch by m.malycha)
+- #353: [OSX] get_users() returns an empty list on OSX 10.8.
+- #356: Process.parent now checks whether parent PID has been reused in which
+ case returns None.
+- #365: Process.set_nice() should check PID has not been reused by another
+ process.
+- #366: [FreeBSD] get_memory_maps(), get_num_fds(), get_open_files() and
+ getcwd() Process methods raise RuntimeError instead of AccessDenied.
+
+**API changes**
+
+- Process.cmdline property is no longer cached after first access.
+- Process.ppid property is no longer cached after first access.
+- [Linux] Process methods not working because of a poor /proc implementation
+ will raise NotImplementedError instead of RuntimeError.
+- psutil.error module is deprecated and scheduled for removal.
+
+
+0.6.1 - 2012-08-16
+==================
+
+**Enhancements**
+
+- #316: process cmdline property now makes a better job at guessing the process
+ executable from the cmdline.
+
+**Bug fixes**
+
+- #316: process exe was resolved in case it was a symlink.
+- #318: python 2.4 compatibility was broken.
+
+**API changes**
+
+- process exe can now return an empty string instead of raising AccessDenied.
+- process exe is no longer resolved in case it's a symlink.
+
+
+0.6.0 - 2012-08-13
+==================
+
+**Enhancements**
+
+- #216: [POSIX] get_connections() UNIX sockets support.
+- #220: [FreeBSD] get_connections() has been rewritten in C and no longer
+ requires lsof.
+- #222: [OSX] add support for process cwd.
+- #261: process extended memory info.
+- #295: [OSX] process executable path is now determined by asking the OS
+ instead of being guessed from process cmdline.
+- #297: [OSX] the Process methods below were always raising AccessDenied for
+ any process except the current one. Now this is no longer true. Also
+ they are 2.5x faster.
+ - name
+ - get_memory_info()
+ - get_memory_percent()
+ - get_cpu_times()
+ - get_cpu_percent()
+ - get_num_threads()
+- #300: examples/pmap.py script.
+- #301: process_iter() now yields processes sorted by their PIDs.
+- #302: process number of voluntary and involuntary context switches.
+- #303: [Windows] the Process methods below were always raising AccessDenied
+ for any process not owned by current user. Now this is no longer true:
+ - create_time
+ - get_cpu_times()
+ - get_cpu_percent()
+ - get_memory_info()
+ - get_memory_percent()
+ - get_num_handles()
+ - get_io_counters()
+- #305: add examples/netstat.py script.
+- #311: system memory functions has been refactorized and rewritten and now
+ provide a more detailed and consistent representation of the system
+ memory. New psutil.virtual_memory() function provides the following
+ memory amounts:
+ - total
+ - available
+ - percent
+ - used
+ - active [POSIX]
+ - inactive [POSIX]
+ - buffers (BSD, Linux)
+ - cached (BSD, OSX)
+ - wired (OSX, BSD)
+ - shared [FreeBSD]
+ New psutil.swap_memory() provides:
+ - total
+ - used
+ - free
+ - percent
+ - sin (no. of bytes the system has swapped in from disk (cumulative))
+ - sout (no. of bytes the system has swapped out from disk (cumulative))
+ All old memory-related functions are deprecated.
+ Also two new example scripts were added: free.py and meminfo.py.
+- #312: psutil.network_io_counters() namedtuple includes 4 new fields:
+ errin, errout dropin and dropout, reflecting the number of packets
+ dropped and with errors.
+
+**Bugfixes**
+
+- #298: [OSX and BSD] memory leak in get_num_fds().
+- #299: potential memory leak every time PyList_New(0) is used.
+- #303: [Windows] potential heap corruption in get_num_threads() and
+ get_status() Process methods.
+- #305: [FreeBSD] psutil can't compile on FreeBSD 9 due to removal of utmp.h.
+- #306: at C level, errors are not checked when invoking Py* functions which
+ create or manipulate Python objects leading to potential memory related
+ errors and/or segmentation faults.
+- #307: [FreeBSD] values returned by psutil.network_io_counters() are wrong.
+- #308: [BSD / Windows] psutil.virtmem_usage() wasn't actually returning
+ information about swap memory usage as it was supposed to do. It does
+ now.
+- #309: get_open_files() might not return files which can not be accessed
+ due to limited permissions. AccessDenied is now raised instead.
+
+**API changes**
+
+- psutil.phymem_usage() is deprecated (use psutil.virtual_memory())
+- psutil.virtmem_usage() is deprecated (use psutil.swap_memory())
+- psutil.phymem_buffers() on Linux is deprecated (use psutil.virtual_memory())
+- psutil.cached_phymem() on Linux is deprecated (use psutil.virtual_memory())
+- [Windows and BSD] psutil.virtmem_usage() now returns information about swap
+ memory instead of virtual memory.
+
+
+0.5.1 - 2012-06-29
+==================
+
+**Enhancements**
+
+- #293: [Windows] process executable path is now determined by asking the OS
+ instead of being guessed from process cmdline.
+
+**Bugfixes**
+
+- #292: [Linux] race condition in process files/threads/connections.
+- #294: [Windows] Process CPU affinity is only able to set CPU #0.
+
+
+0.5.0 - 2012-06-27
+==================
+
+**Enhancements**
+
+- #195: [Windows] number of handles opened by process.
+- #209: psutil.disk_partitions() now provides also mount options.
+- #229: list users currently connected on the system (psutil.get_users()).
+- #238: [Linux, Windows] process CPU affinity (get and set).
+- #242: Process.get_children(recursive=True): return all process
+ descendants.
+- #245: [POSIX] Process.wait() incrementally consumes less CPU cycles.
+- #257: [Windows] removed Windows 2000 support.
+- #258: [Linux] Process.get_memory_info() is now 0.5x faster.
+- #260: process's mapped memory regions. (Windows patch by wj32.64, OSX patch
+ by Jeremy Whitlock)
+- #262: [Windows] psutil.disk_partitions() was slow due to inspecting the
+ floppy disk drive also when "all" argument was False.
+- #273: psutil.get_process_list() is deprecated.
+- #274: psutil no longer requires 2to3 at installation time in order to work
+ with Python 3.
+- #278: new Process.as_dict() method.
+- #281: ppid, name, exe, cmdline and create_time properties of Process class
+ are now cached after being accessed.
+- #282: psutil.STATUS_* constants can now be compared by using their string
+ representation.
+- #283: speedup Process.is_running() by caching its return value in case the
+ process is terminated.
+- #284: [POSIX] per-process number of opened file descriptors.
+- #287: psutil.process_iter() now caches Process instances between calls.
+- #290: Process.nice property is deprecated in favor of new get_nice() and
+ set_nice() methods.
+
+**Bugfixes**
+
+- #193: psutil.Popen constructor can throw an exception if the spawned process
+ terminates quickly.
+- #240: [OSX] incorrect use of free() for Process.get_connections().
+- #244: [POSIX] Process.wait() can hog CPU resources if called against a
+ process which is not our children.
+- #248: [Linux] psutil.network_io_counters() might return erroneous NIC names.
+- #252: [Windows] process getcwd() erroneously raise NoSuchProcess for
+ processes owned by another user. It now raises AccessDenied instead.
+- #266: [Windows] psutil.get_pid_list() only shows 1024 processes.
+ (patch by Amoser)
+- #267: [OSX] Process.get_connections() - an erroneous remote address was
+ returned. (Patch by Amoser)
+- #272: [Linux] Porcess.get_open_files() - potential race condition can lead to
+ unexpected NoSuchProcess exception. Also, we can get incorrect reports
+ of not absolutized path names.
+- #275: [Linux] Process.get_io_counters() erroneously raise NoSuchProcess on
+ old Linux versions. Where not available it now raises
+ NotImplementedError.
+- #286: Process.is_running() doesn't actually check whether PID has been
+ reused.
+- #314: Process.get_children() can sometimes return non-children.
+
+**API changes**
+
+- Process.nice property is deprecated in favor of new get_nice() and set_nice()
+ methods.
+- psutil.get_process_list() is deprecated.
+- ppid, name, exe, cmdline and create_time properties of Process class are now
+ cached after being accessed, meaning NoSuchProcess will no longer be raised
+ in case the process is gone in the meantime.
+- psutil.STATUS_* constants can now be compared by using their string
+ representation.
+
+
+0.4.1 - 2011-12-14
+==================
+
+**Bugfixes**
+
+- #228: some example scripts were not working with python 3.
+- #230: [Windows / OSX] memory leak in Process.get_connections().
+- #232: [Linux] psutil.phymem_usage() can report erroneous values which are
+ different than "free" command.
+- #236: [Windows] memory/handle leak in Process's get_memory_info(),
+ suspend() and resume() methods.
+
+
+0.4.0 - 2011-10-29
+==================
+
+**Enhancements**
+
+- #150: network I/O counters. (OSX and Windows patch by Jeremy Whitlock)
+- #154: [FreeBSD] add support for process getcwd()
+- #157: [Windows] provide installer for Python 3.2 64-bit.
+- #198: Process.wait(timeout=0) can now be used to make wait() return
+ immediately.
+- #206: disk I/O counters. (OSX and Windows patch by Jeremy Whitlock)
+- #213: examples/iotop.py script.
+- #217: Process.get_connections() now has a "kind" argument to filter
+ for connections with different criteria.
+- #221: [FreeBSD] Process.get_open_files has been rewritten in C and no longer
+ relies on lsof.
+- #223: examples/top.py script.
+- #227: examples/nettop.py script.
+
+**Bugfixes**
+
+- #135: [OSX] psutil cannot create Process object.
+- #144: [Linux] no longer support 0 special PID.
+- #188: [Linux] psutil import error on Linux ARM architectures.
+- #194: [POSIX] psutil.Process.get_cpu_percent() now reports a percentage over
+ 100 on multicore processors.
+- #197: [Linux] Process.get_connections() is broken on platforms not
+ supporting IPv6.
+- #200: [Linux] psutil.NUM_CPUS not working on armel and sparc architectures
+ and causing crash on module import.
+- #201: [Linux] Process.get_connections() is broken on big-endian
+ architectures.
+- #211: Process instance can unexpectedly raise NoSuchProcess if tested for
+ equality with another object.
+- #218: [Linux] crash at import time on Debian 64-bit because of a missing
+ line in /proc/meminfo.
+- #226: [FreeBSD] crash at import time on FreeBSD 7 and minor.
+
+
+0.3.0 - 2011-07-08
+==================
+
+**Enhancements**
+
+- #125: system per-cpu percentage utilization and times.
+- #163: per-process associated terminal (TTY).
+- #171: added get_phymem() and get_virtmem() functions returning system
+ memory information (total, used, free) and memory percent usage.
+ total_* avail_* and used_* memory functions are deprecated.
+- #172: disk usage statistics.
+- #174: mounted disk partitions.
+- #179: setuptools is now used in setup.py
+
+**Bugfixes**
+
+- #159: SetSeDebug() does not close handles or unset impersonation on return.
+- #164: [Windows] wait function raises a TimeoutException when a process
+ returns -1 .
+- #165: process.status raises an unhandled exception.
+- #166: get_memory_info() leaks handles hogging system resources.
+- #168: psutil.cpu_percent() returns erroneous results when used in
+ non-blocking mode. (patch by Philip Roberts)
+- #178: OSX - Process.get_threads() leaks memory
+- #180: [Windows] Process's get_num_threads() and get_threads() methods can
+ raise NoSuchProcess exception while process still exists.
+
+
+0.2.1 - 2011-03-20
+==================
+
+**Enhancements**
+
+- #64: per-process I/O counters.
+- #116: per-process wait() (wait for process to terminate and return its exit
+ code).
+- #134: per-process get_threads() returning information (id, user and kernel
+ times) about threads opened by process.
+- #136: process executable path on FreeBSD is now determined by asking the
+ kernel instead of guessing it from cmdline[0].
+- #137: per-process real, effective and saved user and group ids.
+- #140: system boot time.
+- #142: per-process get and set niceness (priority).
+- #143: per-process status.
+- #147: per-process I/O nice (priority) - Linux only.
+- #148: psutil.Popen class which tidies up subprocess.Popen and psutil.Process
+ in a unique interface.
+- #152: [OSX] get_process_open_files() implementation has been rewritten
+ in C and no longer relies on lsof resulting in a 3x speedup.
+- #153: [OSX] get_process_connection() implementation has been rewritten
+ in C and no longer relies on lsof resulting in a 3x speedup.
+
+**Bugfixes**
+
+- #83: process cmdline is empty on OSX 64-bit.
+- #130: a race condition can cause IOError exception be raised on
+ Linux if process disappears between open() and subsequent read() calls.
+- #145: WindowsError was raised instead of psutil.AccessDenied when using
+ process resume() or suspend() on Windows.
+- #146: 'exe' property on Linux can raise TypeError if path contains NULL
+ bytes.
+- #151: exe and getcwd() for PID 0 on Linux return inconsistent data.
+
+**API changes**
+
+- Process "uid" and "gid" properties are deprecated in favor of "uids" and
+ "gids" properties.
+
+
+0.2.0 - 2010-11-13
+==================
+
+**Enhancements**
+
+- #79: per-process open files.
+- #88: total system physical cached memory.
+- #88: total system physical memory buffers used by the kernel.
+- #91: per-process send_signal() and terminate() methods.
+- #95: NoSuchProcess and AccessDenied exception classes now provide "pid",
+ "name" and "msg" attributes.
+- #97: per-process children.
+- #98: Process.get_cpu_times() and Process.get_memory_info now return
+ a namedtuple instead of a tuple.
+- #103: per-process opened TCP and UDP connections.
+- #107: add support for Windows 64 bit. (patch by cjgohlke)
+- #111: per-process executable name.
+- #113: exception messages now include process name and pid.
+- #114: process username Windows implementation has been rewritten in pure
+ C and no longer uses WMI resulting in a big speedup. Also, pywin32 is no
+ longer required as a third-party dependancy. (patch by wj32)
+- #117: added support for Windows 2000.
+- #123: psutil.cpu_percent() and psutil.Process.cpu_percent() accept a
+ new 'interval' parameter.
+- #129: per-process number of threads.
+
+**Bugfixes**
+
+- #80: fixed warnings when installing psutil with easy_install.
+- #81: psutil fails to compile with Visual Studio.
+- #94: suspend() raises OSError instead of AccessDenied.
+- #86: psutil didn't compile against FreeBSD 6.x.
+- #102: orphaned process handles obtained by using OpenProcess in C were
+ left behind every time Process class was instantiated.
+- #111: path and name Process properties report truncated or erroneous
+ values on UNIX.
+- #120: cpu_percent() always returning 100% on OS X.
+- #112: uid and gid properties don't change if process changes effective
+ user/group id at some point.
+- #126: ppid, uid, gid, name, exe, cmdline and create_time properties are
+ no longer cached and correctly raise NoSuchProcess exception if the process
+ disappears.
+
+**API changes**
+
+- psutil.Process.path property is deprecated and works as an alias for "exe"
+ property.
+- psutil.Process.kill(): signal argument was removed - to send a signal to the
+ process use send_signal(signal) method instead.
+- psutil.Process.get_memory_info() returns a nametuple instead of a tuple.
+- psutil.cpu_times() returns a nametuple instead of a tuple.
+- New psutil.Process methods: get_open_files(), get_connections(),
+ send_signal() and terminate().
+- ppid, uid, gid, name, exe, cmdline and create_time properties are no longer
+ cached and raise NoSuchProcess exception if process disappears.
+- psutil.cpu_percent() no longer returns immediately (see issue 123).
+- psutil.Process.get_cpu_percent() and psutil.cpu_percent() no longer returns
+ immediately by default (see issue 123).
+
+
+0.1.3 - 2010-03-02
+==================
+
+**Enhancements**
+
+- #14: per-process username
+- #51: per-process current working directory (Windows and Linux only)
+- #59: Process.is_running() is now 10 times faster
+- #61: added supoprt for FreeBSD 64 bit
+- #71: implemented suspend/resume process
+- #75: python 3 support
+
+**Bugfixes**
+
+- #36: process cpu_times() and memory_info() functions succeeded also for dead
+ processes while a NoSuchProcess exception is supposed to be raised.
+- #48: incorrect size for mib array defined in getcmdargs for BSD
+- #49: possible memory leak due to missing free() on error condition on
+- #50: fixed getcmdargs() memory fragmentation on BSD
+- #55: test_pid_4 was failing on Windows Vista
+- #57: some unit tests were failing on systems where no swap memory is
+ available
+- #58: is_running() is now called before kill() to make sure we are going
+ to kill the correct process.
+- #73: virtual memory size reported on OS X includes shared library size
+- #77: NoSuchProcess wasn't raised on Process.create_time if kill() was
+ used first.
+
+
+0.1.2 - 2009-05-06
+==================
+
+**Enhancements**
+
+- #32: Per-process CPU user/kernel times
+- #33: Process create time
+- #34: Per-process CPU utilization percentage
+- #38: Per-process memory usage (bytes)
+- #41: Per-process memory utilization (percent)
+- #39: System uptime
+- #43: Total system virtual memory
+- #46: Total system physical memory
+- #44: Total system used/free virtual and physical memory
+
+**Bugfixes**
+
+- #36: [Windows] NoSuchProcess not raised when accessing timing methods.
+- #40: test_get_cpu_times() failing on FreeBSD and OS X.
+- #42: [Windows] get_memory_percent() raises AccessDenied.
+
+
+0.1.1 - 2009-03-06
+==================
+
+**Enhancements**
+
+- #4: FreeBSD support for all functions of psutil
+- #9: Process.uid and Process.gid now retrieve process UID and GID.
+- #11: Support for parent/ppid - Process.parent property returns a
+ Process object representing the parent process, and Process.ppid returns
+ the parent PID.
+- #12 & 15:
+ NoSuchProcess exception now raised when creating an object
+ for a nonexistent process, or when retrieving information about a process
+ that has gone away.
+- #21: AccessDenied exception created for raising access denied errors
+ from OSError or WindowsError on individual platforms.
+- #26: psutil.process_iter() function to iterate over processes as
+ Process objects with a generator.
+- #?: Process objects can now also be compared with == operator for equality
+ (PID, name, command line are compared).
+
+**Bugfixes**
+
+- #16: [Windows] Special case for "System Idle Process" (PID 0) which
+ otherwise would return an "invalid parameter" exception.
+- #17: get_process_list() ignores NoSuchProcess and AccessDenied
+ exceptions during building of the list.
+- #22: [Windows] Process(0).kill() was failing with an unset exception.
+- #23: Special case for pid_exists(0)
+- #24: [Windows] Process(0).kill() now raises AccessDenied exception instead
+ of WindowsError.
+- #30: psutil.get_pid_list() was returning two instances of PID 0 on OSX and
+ FreeBSD platforms.
+
+
+0.1.0 - 2009-01-27
+==================
+
+- Initial release.
diff --git a/python/psutil/INSTALL.rst b/python/psutil/INSTALL.rst
new file mode 100644
index 000000000..e518c430e
--- /dev/null
+++ b/python/psutil/INSTALL.rst
@@ -0,0 +1,116 @@
+============================
+Installing using pip on UNIX
+============================
+
+The easiest way to install psutil on UNIX is by using pip (but first you might
+need to install python header files; see later).
+First install pip::
+
+ $ wget https://bootstrap.pypa.io/get-pip.py
+ $ python get-pip.py
+
+...then run::
+
+ $ pip install psutil
+
+You may need to install gcc and python header files first (see later).
+
+
+=====================
+Installing on Windows
+=====================
+
+Just get the right installer for your Python version and architecture from:
+https://pypi.python.org/pypi/psutil/#downloads
+Since wheels installers are also available you may also use pip.
+
+
+========================================
+Compiling on Windows using Visual Studio
+========================================
+
+In order to compile psutil on Windows you'll need Visual Studio (Mingw32 is
+no longer supported). You must have the same version of Visual Studio used to compile
+your installation of Python, that is::
+
+* Python 2.6: VS 2008
+* Python 2.7: VS 2008
+* Python 3.3, 3.4: VS 2010 (you can download it from `MS website <http://www.visualstudio.com/downloads/download-visual-studio-vs#d-2010-express>`_)
+* Python 3.5: `VS 2015 UP <http://www.visualstudio.com/en-au/news/vs2015-preview-vs>`_
+
+...then run::
+
+ setup.py build
+
+...or::
+
+ make.bat build
+
+Compiling 64 bit versions of Python 2.6 and 2.7 with VS 2008 requires
+Windows SDK and .NET Framework 3.5 SP1 to be installed first.
+Once you have those run vcvars64.bat, then compile:
+http://stackoverflow.com/questions/11072521/
+
+===================
+Installing on Linux
+===================
+
+gcc is required and so the python headers. They can easily be installed by
+using the distro package manager. For example, on Debian and Ubuntu::
+
+ $ sudo apt-get install gcc python-dev
+
+...on Redhat and CentOS::
+
+ $ sudo yum install gcc python-devel
+
+Once done, you can build/install psutil with::
+
+ $ python setup.py install
+
+
+==================
+Installing on OS X
+==================
+
+OS X installation from source will require gcc which you can obtain as part of
+the 'XcodeTools' installer from Apple. Then you can run the standard distutils
+commands.
+To build only::
+
+ $ python setup.py build
+
+To install and build::
+
+ $ python setup.py install
+
+
+=====================
+Installing on FreeBSD
+=====================
+
+The same compiler used to install Python must be present on the system in order
+to build modules using distutils. Assuming it is installed, you can build using
+the standard distutils commands.
+
+Build only::
+
+ $ python setup.py build
+
+Install and build::
+
+ $ python setup.py install
+
+
+========
+Makefile
+========
+
+A makefile is available for both UNIX and Windows (make.bat). It provides
+some automations for the tasks described above and might be preferred over
+using setup.py. With it you can::
+
+ $ make install # just install (in --user mode)
+ $ make uninstall # uninstall (needs pip)
+ $ make test # run tests
+ $ make clean # remove installation files
diff --git a/python/psutil/LICENSE b/python/psutil/LICENSE
new file mode 100644
index 000000000..e91b1359a
--- /dev/null
+++ b/python/psutil/LICENSE
@@ -0,0 +1,27 @@
+psutil is distributed under BSD license reproduced below.
+
+Copyright (c) 2009, Jay Loden, Dave Daeschler, Giampaolo Rodola'
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the psutil authors nor the names of its contributors
+ may be used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/python/psutil/MANIFEST.in b/python/psutil/MANIFEST.in
new file mode 100644
index 000000000..d807be289
--- /dev/null
+++ b/python/psutil/MANIFEST.in
@@ -0,0 +1,22 @@
+include .coveragerc
+include .git-pre-commit
+include .git-pre-commit
+include .gitignore
+include .travis.yml
+include CREDITS
+include HISTORY.rst
+include INSTALL.rst
+include LICENSE
+include make.bat
+include Makefile
+include MANIFEST.in
+include README.rst
+include setup.py
+include TODO
+include tox.ini
+recursive-exclude docs/_build *
+recursive-include .appveyor/*
+recursive-include docs *
+recursive-include examples *.py
+recursive-include psutil *.py *.c *.h
+recursive-include test *.py README*
diff --git a/python/psutil/Makefile b/python/psutil/Makefile
new file mode 100644
index 000000000..1e4eb4b01
--- /dev/null
+++ b/python/psutil/Makefile
@@ -0,0 +1,122 @@
+# Shortcuts for various tasks (UNIX only).
+# To use a specific Python version run:
+# $ make install PYTHON=python3.3
+
+# You can set these variables from the command line.
+PYTHON = python
+TSCRIPT = test/test_psutil.py
+
+all: test
+
+clean:
+ rm -f `find . -type f -name \*.py[co]`
+ rm -f `find . -type f -name \*.so`
+ rm -f `find . -type f -name .\*~`
+ rm -f `find . -type f -name \*.orig`
+ rm -f `find . -type f -name \*.bak`
+ rm -f `find . -type f -name \*.rej`
+ rm -rf `find . -type d -name __pycache__`
+ rm -rf *.core
+ rm -rf *.egg-info
+ rm -rf *\$testfile*
+ rm -rf .coverage
+ rm -rf .tox
+ rm -rf build
+ rm -rf dist
+ rm -rf docs/_build
+ rm -rf htmlcov
+
+build: clean
+ $(PYTHON) setup.py build
+ @# copies *.so files in ./psutil directory in order to allow
+ @# "import psutil" when using the interactive interpreter from within
+ @# this directory.
+ $(PYTHON) setup.py build_ext -i
+
+# useful deps which are nice to have while developing / testing
+setup-dev-env:
+ python -c "import urllib2; \
+ r = urllib2.urlopen('https://bootstrap.pypa.io/get-pip.py'); \
+ open('/tmp/get-pip.py', 'w').write(r.read());"
+ $(PYTHON) /tmp/get-pip.py --user
+ rm /tmp/get-pip.py
+ $(PYTHON) -m pip install --user --upgrade pip
+ $(PYTHON) -m pip install --user --upgrade \
+ coverage \
+ flake8 \
+ ipaddress \
+ ipdb \
+ mock==1.0.1 \
+ nose \
+ pep8 \
+ pyflakes \
+ sphinx \
+ sphinx-pypi-upload \
+ unittest2 \
+
+install: build
+ $(PYTHON) setup.py install --user
+
+uninstall:
+ cd ..; $(PYTHON) -m pip uninstall -y -v psutil
+
+test: install
+ $(PYTHON) $(TSCRIPT)
+
+test-process: install
+ $(PYTHON) -m unittest -v test.test_psutil.TestProcess
+
+test-system: install
+ $(PYTHON) -m unittest -v test.test_psutil.TestSystemAPIs
+
+test-memleaks: install
+ $(PYTHON) test/test_memory_leaks.py
+
+# Run a specific test by name; e.g. "make test-by-name disk_" will run
+# all test methods containing "disk_" in their name.
+# Requires "pip install nose".
+test-by-name: install
+ @$(PYTHON) -m nose test/test_psutil.py test/_* --nocapture -v -m $(filter-out $@,$(MAKECMDGOALS))
+
+# Same as above but for test_memory_leaks.py script.
+test-memleaks-by-name: install
+ @$(PYTHON) -m nose test/test_memory_leaks.py --nocapture -v -m $(filter-out $@,$(MAKECMDGOALS))
+
+coverage: install
+ # Note: coverage options are controlled by .coveragerc file
+ rm -rf .coverage htmlcov
+ $(PYTHON) -m coverage run $(TSCRIPT)
+ $(PYTHON) -m coverage report
+ @echo "writing results to htmlcov/index.html"
+ $(PYTHON) -m coverage html
+ $(PYTHON) -m webbrowser -t htmlcov/index.html
+
+pep8:
+ @git ls-files | grep \\.py$ | xargs $(PYTHON) -m pep8
+
+pyflakes:
+ @export PYFLAKES_NODOCTEST=1 && \
+ git ls-files | grep \\.py$ | xargs $(PYTHON) -m pyflakes
+
+flake8:
+ @git ls-files | grep \\.py$ | xargs $(PYTHON) -m flake8
+
+# Upload source tarball on https://pypi.python.org/pypi/psutil.
+upload-src: clean
+ $(PYTHON) setup.py sdist upload
+
+# Build and upload doc on https://pythonhosted.org/psutil/.
+# Requires "pip install sphinx-pypi-upload".
+upload-doc:
+ cd docs; make html
+ $(PYTHON) setup.py upload_sphinx --upload-dir=docs/_build/html
+
+# git-tag a new release
+git-tag-release:
+ git tag -a release-`python -c "import setup; print(setup.get_version())"` -m `git rev-list HEAD --count`:`git rev-parse --short HEAD`
+ echo "done; now run 'git push --follow-tags' to push the new tag on the remote repo"
+
+# install GIT pre-commit hook
+install-git-hooks:
+ ln -sf ../../.git-pre-commit .git/hooks/pre-commit
+ chmod +x .git/hooks/pre-commit
diff --git a/python/psutil/PKG-INFO b/python/psutil/PKG-INFO
new file mode 100644
index 000000000..e74d33f65
--- /dev/null
+++ b/python/psutil/PKG-INFO
@@ -0,0 +1,434 @@
+Metadata-Version: 1.1
+Name: psutil
+Version: 3.1.1
+Summary: psutil is a cross-platform library for retrieving information onrunning processes and system utilization (CPU, memory, disks, network)in Python.
+Home-page: https://github.com/giampaolo/psutil
+Author: Giampaolo Rodola
+Author-email: g.rodola <at> gmail <dot> com
+License: BSD
+Description: .. image:: https://img.shields.io/pypi/dm/psutil.svg
+ :target: https://pypi.python.org/pypi/psutil#downloads
+ :alt: Downloads this month
+
+ .. image:: https://api.travis-ci.org/giampaolo/psutil.png?branch=master
+ :target: https://travis-ci.org/giampaolo/psutil
+ :alt: Linux tests (Travis)
+
+ .. image:: https://ci.appveyor.com/api/projects/status/qdwvw7v1t915ywr5/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/giampaolo/psutil
+ :alt: Windows tests (Appveyor)
+
+ .. image:: https://coveralls.io/repos/giampaolo/psutil/badge.svg?branch=master&service=github
+ :target: https://coveralls.io/github/giampaolo/psutil?branch=master
+ :alt: Test coverage (coverall.io)
+
+ .. image:: https://img.shields.io/pypi/v/psutil.svg
+ :target: https://pypi.python.org/pypi/psutil/
+ :alt: Latest version
+
+ .. image:: https://img.shields.io/github/stars/giampaolo/psutil.svg
+ :target: https://github.com/giampaolo/psutil/
+ :alt: Github stars
+
+ .. image:: https://img.shields.io/scrutinizer/g/giampaolo/psutil.svg
+ :target: https://scrutinizer-ci.com/g/giampaolo/psutil/
+ :alt: Code quality (scrutinizer-ci.com)
+
+ .. image:: https://img.shields.io/pypi/l/psutil.svg
+ :target: https://pypi.python.org/pypi/psutil/
+ :alt: License
+
+ ===========
+ Quick links
+ ===========
+
+ - `Home page <https://github.com/giampaolo/psutil>`_
+ - `Documentation <http://pythonhosted.org/psutil/>`_
+ - `Installation <https://github.com/giampaolo/psutil/blob/master/INSTALL.rst>`_
+ - `Download <https://pypi.python.org/pypi?:action=display&name=psutil#downloads>`_
+ - `Forum <http://groups.google.com/group/psutil/topics>`_
+ - `Blog <http://grodola.blogspot.com/search/label/psutil>`_
+ - `Development guide <https://github.com/giampaolo/psutil/blob/master/DEVGUIDE.rst>`_
+ - `What's new <https://github.com/giampaolo/psutil/blob/master/HISTORY.rst>`_
+
+ =======
+ Summary
+ =======
+
+ psutil (python system and process utilities) is a cross-platform library for
+ retrieving information on **running processes** and **system utilization**
+ (CPU, memory, disks, network) in Python. It is useful mainly for **system
+ monitoring**, **profiling and limiting process resources** and **management of
+ running processes**. It implements many functionalities offered by command line
+ tools such as: ps, top, lsof, netstat, ifconfig, who, df, kill, free, nice,
+ ionice, iostat, iotop, uptime, pidof, tty, taskset, pmap. It currently supports
+ **Linux, Windows, OSX, FreeBSD** and **Sun Solaris**, both **32-bit** and
+ **64-bit** architectures, with Python versions from **2.6 to 3.5** (users of
+ Python 2.4 and 2.5 may use `2.1.3 <https://pypi.python.org/pypi?name=psutil&version=2.1.3&:action=files>`__ version).
+ `PyPy <http://pypy.org/>`__ is also known to work.
+
+ ====================
+ Example applications
+ ====================
+
+ .. image:: http://psutil.googlecode.com/svn/wiki/images/top-thumb.png
+ :target: http://psutil.googlecode.com/svn/wiki/images/top.png
+ :alt: top
+
+ .. image:: http://psutil.googlecode.com/svn/wiki/images/nettop-thumb.png
+ :target: http://psutil.googlecode.com/svn/wiki/images/nettop.png
+ :alt: nettop
+
+ .. image:: http://psutil.googlecode.com/svn/wiki/images/iotop-thumb.png
+ :target: http://psutil.googlecode.com/svn/wiki/images/iotop.png
+ :alt: iotop
+
+ See also:
+
+ * https://github.com/nicolargo/glances
+ * https://github.com/google/grr
+ * https://github.com/Jahaja/psdash
+
+ ==============
+ Example usages
+ ==============
+
+ CPU
+ ===
+
+ .. code-block:: python
+
+ >>> import psutil
+ >>> psutil.cpu_times()
+ scputimes(user=3961.46, nice=169.729, system=2150.659, idle=16900.540, iowait=629.59, irq=0.0, softirq=19.42, steal=0.0, guest=0, nice=0.0)
+ >>>
+ >>> for x in range(3):
+ ... psutil.cpu_percent(interval=1)
+ ...
+ 4.0
+ 5.9
+ 3.8
+ >>>
+ >>> for x in range(3):
+ ... psutil.cpu_percent(interval=1, percpu=True)
+ ...
+ [4.0, 6.9, 3.7, 9.2]
+ [7.0, 8.5, 2.4, 2.1]
+ [1.2, 9.0, 9.9, 7.2]
+ >>>
+ >>>
+ >>> for x in range(3):
+ ... psutil.cpu_times_percent(interval=1, percpu=False)
+ ...
+ scputimes(user=1.5, nice=0.0, system=0.5, idle=96.5, iowait=1.5, irq=0.0, softirq=0.0, steal=0.0, guest=0.0, guest_nice=0.0)
+ scputimes(user=1.0, nice=0.0, system=0.0, idle=99.0, iowait=0.0, irq=0.0, softirq=0.0, steal=0.0, guest=0.0, guest_nice=0.0)
+ scputimes(user=2.0, nice=0.0, system=0.0, idle=98.0, iowait=0.0, irq=0.0, softirq=0.0, steal=0.0, guest=0.0, guest_nice=0.0)
+ >>>
+ >>> psutil.cpu_count()
+ 4
+ >>> psutil.cpu_count(logical=False)
+ 2
+ >>>
+
+ Memory
+ ======
+
+ .. code-block:: python
+
+ >>> psutil.virtual_memory()
+ svmem(total=8374149120, available=2081050624, percent=75.1, used=8074080256, free=300068864, active=3294920704, inactive=1361616896, buffers=529895424, cached=1251086336)
+ >>> psutil.swap_memory()
+ sswap(total=2097147904, used=296128512, free=1801019392, percent=14.1, sin=304193536, sout=677842944)
+ >>>
+
+ Disks
+ =====
+
+ .. code-block:: python
+
+ >>> psutil.disk_partitions()
+ [sdiskpart(device='/dev/sda1', mountpoint='/', fstype='ext4', opts='rw,nosuid'),
+ sdiskpart(device='/dev/sda2', mountpoint='/home', fstype='ext, opts='rw')]
+ >>>
+ >>> psutil.disk_usage('/')
+ sdiskusage(total=21378641920, used=4809781248, free=15482871808, percent=22.5)
+ >>>
+ >>> psutil.disk_io_counters(perdisk=False)
+ sdiskio(read_count=719566, write_count=1082197, read_bytes=18626220032, write_bytes=24081764352, read_time=5023392, write_time=63199568)
+ >>>
+
+ Network
+ =======
+
+ .. code-block:: python
+
+ >>> psutil.net_io_counters(pernic=True)
+ {'eth0': netio(bytes_sent=485291293, bytes_recv=6004858642, packets_sent=3251564, packets_recv=4787798, errin=0, errout=0, dropin=0, dropout=0),
+ 'lo': netio(bytes_sent=2838627, bytes_recv=2838627, packets_sent=30567, packets_recv=30567, errin=0, errout=0, dropin=0, dropout=0)}
+ >>>
+ >>> psutil.net_connections()
+ [pconn(fd=115, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 48776), raddr=('93.186.135.91', 80), status='ESTABLISHED', pid=1254),
+ pconn(fd=117, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 43761), raddr=('72.14.234.100', 80), status='CLOSING', pid=2987),
+ pconn(fd=-1, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 60759), raddr=('72.14.234.104', 80), status='ESTABLISHED', pid=None),
+ pconn(fd=-1, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 51314), raddr=('72.14.234.83', 443), status='SYN_SENT', pid=None)
+ ...]
+ >>>
+ >>> psutil.net_if_addrs()
+ {'lo': [snic(family=<AddressFamily.AF_INET: 2>, address='127.0.0.1', netmask='255.0.0.0', broadcast='127.0.0.1'),
+ snic(family=<AddressFamily.AF_INET6: 10>, address='::1', netmask='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', broadcast=None),
+ snic(family=<AddressFamily.AF_LINK: 17>, address='00:00:00:00:00:00', netmask=None, broadcast='00:00:00:00:00:00')],
+ 'wlan0': [snic(family=<AddressFamily.AF_INET: 2>, address='192.168.1.3', netmask='255.255.255.0', broadcast='192.168.1.255'),
+ snic(family=<AddressFamily.AF_INET6: 10>, address='fe80::c685:8ff:fe45:641%wlan0', netmask='ffff:ffff:ffff:ffff::', broadcast=None),
+ snic(family=<AddressFamily.AF_LINK: 17>, address='c4:85:08:45:06:41', netmask=None, broadcast='ff:ff:ff:ff:ff:ff')]}
+ >>>
+ >>> psutil.net_if_stats()
+ {'eth0': snicstats(isup=True, duplex=<NicDuplex.NIC_DUPLEX_FULL: 2>, speed=100, mtu=1500),
+ 'lo': snicstats(isup=True, duplex=<NicDuplex.NIC_DUPLEX_UNKNOWN: 0>, speed=0, mtu=65536)}
+
+ Other system info
+ =================
+
+ .. code-block:: python
+
+ >>> psutil.users()
+ [user(name='giampaolo', terminal='pts/2', host='localhost', started=1340737536.0),
+ user(name='giampaolo', terminal='pts/3', host='localhost', started=1340737792.0)]
+ >>>
+ >>> psutil.boot_time()
+ 1365519115.0
+ >>>
+
+ Process management
+ ==================
+
+ .. code-block:: python
+
+ >>> import psutil
+ >>> psutil.pids()
+ [1, 2, 3, 4, 5, 6, 7, 46, 48, 50, 51, 178, 182, 222, 223, 224,
+ 268, 1215, 1216, 1220, 1221, 1243, 1244, 1301, 1601, 2237, 2355,
+ 2637, 2774, 3932, 4176, 4177, 4185, 4187, 4189, 4225, 4243, 4245,
+ 4263, 4282, 4306, 4311, 4312, 4313, 4314, 4337, 4339, 4357, 4358,
+ 4363, 4383, 4395, 4408, 4433, 4443, 4445, 4446, 5167, 5234, 5235,
+ 5252, 5318, 5424, 5644, 6987, 7054, 7055, 7071]
+ >>>
+ >>> p = psutil.Process(7055)
+ >>> p.name()
+ 'python'
+ >>> p.exe()
+ '/usr/bin/python'
+ >>> p.cwd()
+ '/home/giampaolo'
+ >>> p.cmdline()
+ ['/usr/bin/python', 'main.py']
+ >>>
+ >>> p.status()
+ 'running'
+ >>> p.username()
+ 'giampaolo'
+ >>> p.create_time()
+ 1267551141.5019531
+ >>> p.terminal()
+ '/dev/pts/0'
+ >>>
+ >>> p.uids()
+ puids(real=1000, effective=1000, saved=1000)
+ >>> p.gids()
+ pgids(real=1000, effective=1000, saved=1000)
+ >>>
+ >>> p.cpu_times()
+ pcputimes(user=1.02, system=0.31)
+ >>> p.cpu_percent(interval=1.0)
+ 12.1
+ >>> p.cpu_affinity()
+ [0, 1, 2, 3]
+ >>> p.cpu_affinity([0]) # set
+ >>>
+ >>> p.memory_percent()
+ 0.63423
+ >>>
+ >>> p.memory_info()
+ pmem(rss=7471104, vms=68513792)
+ >>> p.memory_info_ex()
+ extmem(rss=9662464, vms=49192960, shared=3612672, text=2564096, lib=0, data=5754880, dirty=0)
+ >>> p.memory_maps()
+ [pmmap_grouped(path='/lib/x86_64-linux-gnu/libutil-2.15.so', rss=16384, anonymous=8192, swap=0),
+ pmmap_grouped(path='/lib/x86_64-linux-gnu/libc-2.15.so', rss=6384, anonymous=15, swap=0),
+ pmmap_grouped(path='/lib/x86_64-linux-gnu/libcrypto.so.1.0.0', rss=34124, anonymous=1245, swap=0),
+ pmmap_grouped(path='[heap]', rss=54653, anonymous=8192, swap=0),
+ pmmap_grouped(path='[stack]', rss=1542, anonymous=166, swap=0),
+ ...]
+ >>>
+ >>> p.io_counters()
+ pio(read_count=478001, write_count=59371, read_bytes=700416, write_bytes=69632)
+ >>>
+ >>> p.open_files()
+ [popenfile(path='/home/giampaolo/svn/psutil/somefile', fd=3)]
+ >>>
+ >>> p.connections()
+ [pconn(fd=115, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 48776), raddr=('93.186.135.91', 80), status='ESTABLISHED'),
+ pconn(fd=117, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 43761), raddr=('72.14.234.100', 80), status='CLOSING'),
+ pconn(fd=119, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 60759), raddr=('72.14.234.104', 80), status='ESTABLISHED'),
+ pconn(fd=123, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 51314), raddr=('72.14.234.83', 443), status='SYN_SENT')]
+ >>>
+ >>> p.num_threads()
+ 4
+ >>> p.num_fds()
+ 8
+ >>> p.threads()
+ [pthread(id=5234, user_time=22.5, system_time=9.2891),
+ pthread(id=5235, user_time=0.0, system_time=0.0),
+ pthread(id=5236, user_time=0.0, system_time=0.0),
+ pthread(id=5237, user_time=0.0707, system_time=1.1)]
+ >>>
+ >>> p.num_ctx_switches()
+ pctxsw(voluntary=78, involuntary=19)
+ >>>
+ >>> p.nice()
+ 0
+ >>> p.nice(10) # set
+ >>>
+ >>> p.ionice(psutil.IOPRIO_CLASS_IDLE) # IO priority (Win and Linux only)
+ >>> p.ionice()
+ pionice(ioclass=<IOPriority.IOPRIO_CLASS_IDLE: 3>, value=0)
+ >>>
+ >>> p.rlimit(psutil.RLIMIT_NOFILE, (5, 5)) # set resource limits (Linux only)
+ >>> p.rlimit(psutil.RLIMIT_NOFILE)
+ (5, 5)
+ >>>
+ >>> p.suspend()
+ >>> p.resume()
+ >>>
+ >>> p.terminate()
+ >>> p.wait(timeout=3)
+ 0
+ >>>
+ >>> psutil.test()
+ USER PID %CPU %MEM VSZ RSS TTY START TIME COMMAND
+ root 1 0.0 0.0 24584 2240 Jun17 00:00 init
+ root 2 0.0 0.0 0 0 Jun17 00:00 kthreadd
+ root 3 0.0 0.0 0 0 Jun17 00:05 ksoftirqd/0
+ ...
+ giampaolo 31475 0.0 0.0 20760 3024 /dev/pts/0 Jun19 00:00 python2.4
+ giampaolo 31721 0.0 2.2 773060 181896 00:04 10:30 chrome
+ root 31763 0.0 0.0 0 0 00:05 00:00 kworker/0:1
+ >>>
+
+ Further process APIs
+ ====================
+
+ .. code-block:: python
+
+ >>> for p in psutil.process_iter():
+ ... print(p)
+ ...
+ psutil.Process(pid=1, name='init')
+ psutil.Process(pid=2, name='kthreadd')
+ psutil.Process(pid=3, name='ksoftirqd/0')
+ ...
+ >>>
+ >>> def on_terminate(proc):
+ ... print("process {} terminated".format(proc))
+ ...
+ >>> # waits for multiple processes to terminate
+ >>> gone, alive = psutil.wait_procs(procs_list, 3, callback=on_terminate)
+ >>>
+
+ ======
+ Donate
+ ======
+
+ A lot of time and effort went into making psutil as it is right now.
+ If you feel psutil is useful to you or your business and want to support its future development please consider donating me (`Giampaolo Rodola' <http://grodola.blogspot.com/p/about.html>`_) some money.
+ I only ask for a small donation, but of course I appreciate any amount.
+
+ .. image:: http://www.paypal.com/en_US/i/btn/x-click-but04.gif
+ :target: https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=A9ZS7PKKRM3S8
+ :alt: Donate via PayPal
+
+ Don't want to donate money? Then maybe you could `write me a recommendation on Linkedin <http://www.linkedin.com/in/grodola>`_.
+
+ ============
+ Mailing list
+ ============
+
+ http://groups.google.com/group/psutil/
+
+ ========
+ Timeline
+ ========
+
+ - 2015-07-15: `psutil-3.1.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-3.1.1.tar.gz>`_
+ - 2015-07-15: `psutil-3.1.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-3.1.0.tar.gz>`_
+ - 2015-06-18: `psutil-3.0.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-3.0.1.tar.gz>`_
+ - 2015-06-13: `psutil-3.0.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-3.0.0.tar.gz>`_
+ - 2015-02-02: `psutil-2.2.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.2.1.tar.gz>`_
+ - 2015-01-06: `psutil-2.2.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.2.0.tar.gz>`_
+ - 2014-09-26: `psutil-2.1.3.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.1.3.tar.gz>`_
+ - 2014-09-21: `psutil-2.1.2.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.1.2.tar.gz>`_
+ - 2014-04-30: `psutil-2.1.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.1.1.tar.gz>`_
+ - 2014-04-08: `psutil-2.1.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.1.0.tar.gz>`_
+ - 2014-03-10: `psutil-2.0.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.0.0.tar.gz>`_
+ - 2013-11-25: `psutil-1.2.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.2.1.tar.gz>`_
+ - 2013-11-20: `psutil-1.2.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.2.0.tar.gz>`_
+ - 2013-11-07: `psutil-1.1.3.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.1.3.tar.gz>`_
+ - 2013-10-22: `psutil-1.1.2.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.1.2.tar.gz>`_
+ - 2013-10-08: `psutil-1.1.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.1.1.tar.gz>`_
+ - 2013-09-28: `psutil-1.1.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.1.0.tar.gz>`_
+ - 2013-07-12: `psutil-1.0.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.0.1.tar.gz>`_
+ - 2013-07-10: `psutil-1.0.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.0.0.tar.gz>`_
+ - 2013-05-03: `psutil-0.7.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.7.1.tar.gz>`_
+ - 2013-04-12: `psutil-0.7.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.7.0.tar.gz>`_
+ - 2012-08-16: `psutil-0.6.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.6.1.tar.gz>`_
+ - 2012-08-13: `psutil-0.6.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.6.0.tar.gz>`_
+ - 2012-06-29: `psutil-0.5.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.5.1.tar.gz>`_
+ - 2012-06-27: `psutil-0.5.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.5.0.tar.gz>`_
+ - 2011-12-14: `psutil-0.4.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.4.1.tar.gz>`_
+ - 2011-10-29: `psutil-0.4.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.4.0.tar.gz>`_
+ - 2011-07-08: `psutil-0.3.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.3.0.tar.gz>`_
+ - 2011-03-20: `psutil-0.2.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.2.1.tar.gz>`_
+ - 2010-11-13: `psutil-0.2.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.2.0.tar.gz>`_
+ - 2010-03-02: `psutil-0.1.3.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.1.3.tar.gz>`_
+ - 2009-05-06: `psutil-0.1.2.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.1.2.tar.gz>`_
+ - 2009-03-06: `psutil-0.1.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.1.1.tar.gz>`_
+ - 2009-01-27: `psutil-0.1.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.1.0.tar.gz>`_
+
+Keywords: ps,top,kill,free,lsof,netstat,nice,tty,ionice,uptime,taskmgr,process,df,iotop,iostat,ifconfig,taskset,who,pidof,pmap,smem,pstree,monitoring,ulimit,prlimit
+Platform: Platform Independent
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Environment :: Win32 (MS Windows)
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows :: Windows NT/2000
+Classifier: Operating System :: Microsoft
+Classifier: Operating System :: OS Independent
+Classifier: Operating System :: POSIX :: BSD :: FreeBSD
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Operating System :: POSIX :: SunOS/Solaris
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: C
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.0
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: System :: Benchmark
+Classifier: Topic :: System :: Hardware
+Classifier: Topic :: System :: Monitoring
+Classifier: Topic :: System :: Networking :: Monitoring
+Classifier: Topic :: System :: Networking
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
diff --git a/python/psutil/README.rst b/python/psutil/README.rst
new file mode 100644
index 000000000..564656146
--- /dev/null
+++ b/python/psutil/README.rst
@@ -0,0 +1,386 @@
+.. image:: https://img.shields.io/pypi/dm/psutil.svg
+ :target: https://pypi.python.org/pypi/psutil#downloads
+ :alt: Downloads this month
+
+.. image:: https://api.travis-ci.org/giampaolo/psutil.png?branch=master
+ :target: https://travis-ci.org/giampaolo/psutil
+ :alt: Linux tests (Travis)
+
+.. image:: https://ci.appveyor.com/api/projects/status/qdwvw7v1t915ywr5/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/giampaolo/psutil
+ :alt: Windows tests (Appveyor)
+
+.. image:: https://coveralls.io/repos/giampaolo/psutil/badge.svg?branch=master&service=github
+ :target: https://coveralls.io/github/giampaolo/psutil?branch=master
+ :alt: Test coverage (coverall.io)
+
+.. image:: https://img.shields.io/pypi/v/psutil.svg
+ :target: https://pypi.python.org/pypi/psutil/
+ :alt: Latest version
+
+.. image:: https://img.shields.io/github/stars/giampaolo/psutil.svg
+ :target: https://github.com/giampaolo/psutil/
+ :alt: Github stars
+
+.. image:: https://img.shields.io/scrutinizer/g/giampaolo/psutil.svg
+ :target: https://scrutinizer-ci.com/g/giampaolo/psutil/
+ :alt: Code quality (scrutinizer-ci.com)
+
+.. image:: https://img.shields.io/pypi/l/psutil.svg
+ :target: https://pypi.python.org/pypi/psutil/
+ :alt: License
+
+===========
+Quick links
+===========
+
+- `Home page <https://github.com/giampaolo/psutil>`_
+- `Documentation <http://pythonhosted.org/psutil/>`_
+- `Installation <https://github.com/giampaolo/psutil/blob/master/INSTALL.rst>`_
+- `Download <https://pypi.python.org/pypi?:action=display&name=psutil#downloads>`_
+- `Forum <http://groups.google.com/group/psutil/topics>`_
+- `Blog <http://grodola.blogspot.com/search/label/psutil>`_
+- `Development guide <https://github.com/giampaolo/psutil/blob/master/DEVGUIDE.rst>`_
+- `What's new <https://github.com/giampaolo/psutil/blob/master/HISTORY.rst>`_
+
+=======
+Summary
+=======
+
+psutil (python system and process utilities) is a cross-platform library for
+retrieving information on **running processes** and **system utilization**
+(CPU, memory, disks, network) in Python. It is useful mainly for **system
+monitoring**, **profiling and limiting process resources** and **management of
+running processes**. It implements many functionalities offered by command line
+tools such as: ps, top, lsof, netstat, ifconfig, who, df, kill, free, nice,
+ionice, iostat, iotop, uptime, pidof, tty, taskset, pmap. It currently supports
+**Linux, Windows, OSX, FreeBSD** and **Sun Solaris**, both **32-bit** and
+**64-bit** architectures, with Python versions from **2.6 to 3.5** (users of
+Python 2.4 and 2.5 may use `2.1.3 <https://pypi.python.org/pypi?name=psutil&version=2.1.3&:action=files>`__ version).
+`PyPy <http://pypy.org/>`__ is also known to work.
+
+====================
+Example applications
+====================
+
+.. image:: http://psutil.googlecode.com/svn/wiki/images/top-thumb.png
+ :target: http://psutil.googlecode.com/svn/wiki/images/top.png
+ :alt: top
+
+.. image:: http://psutil.googlecode.com/svn/wiki/images/nettop-thumb.png
+ :target: http://psutil.googlecode.com/svn/wiki/images/nettop.png
+ :alt: nettop
+
+.. image:: http://psutil.googlecode.com/svn/wiki/images/iotop-thumb.png
+ :target: http://psutil.googlecode.com/svn/wiki/images/iotop.png
+ :alt: iotop
+
+See also:
+
+ * https://github.com/nicolargo/glances
+ * https://github.com/google/grr
+ * https://github.com/Jahaja/psdash
+
+==============
+Example usages
+==============
+
+CPU
+===
+
+.. code-block:: python
+
+ >>> import psutil
+ >>> psutil.cpu_times()
+ scputimes(user=3961.46, nice=169.729, system=2150.659, idle=16900.540, iowait=629.59, irq=0.0, softirq=19.42, steal=0.0, guest=0, nice=0.0)
+ >>>
+ >>> for x in range(3):
+ ... psutil.cpu_percent(interval=1)
+ ...
+ 4.0
+ 5.9
+ 3.8
+ >>>
+ >>> for x in range(3):
+ ... psutil.cpu_percent(interval=1, percpu=True)
+ ...
+ [4.0, 6.9, 3.7, 9.2]
+ [7.0, 8.5, 2.4, 2.1]
+ [1.2, 9.0, 9.9, 7.2]
+ >>>
+ >>>
+ >>> for x in range(3):
+ ... psutil.cpu_times_percent(interval=1, percpu=False)
+ ...
+ scputimes(user=1.5, nice=0.0, system=0.5, idle=96.5, iowait=1.5, irq=0.0, softirq=0.0, steal=0.0, guest=0.0, guest_nice=0.0)
+ scputimes(user=1.0, nice=0.0, system=0.0, idle=99.0, iowait=0.0, irq=0.0, softirq=0.0, steal=0.0, guest=0.0, guest_nice=0.0)
+ scputimes(user=2.0, nice=0.0, system=0.0, idle=98.0, iowait=0.0, irq=0.0, softirq=0.0, steal=0.0, guest=0.0, guest_nice=0.0)
+ >>>
+ >>> psutil.cpu_count()
+ 4
+ >>> psutil.cpu_count(logical=False)
+ 2
+ >>>
+
+Memory
+======
+
+.. code-block:: python
+
+ >>> psutil.virtual_memory()
+ svmem(total=8374149120, available=2081050624, percent=75.1, used=8074080256, free=300068864, active=3294920704, inactive=1361616896, buffers=529895424, cached=1251086336)
+ >>> psutil.swap_memory()
+ sswap(total=2097147904, used=296128512, free=1801019392, percent=14.1, sin=304193536, sout=677842944)
+ >>>
+
+Disks
+=====
+
+.. code-block:: python
+
+ >>> psutil.disk_partitions()
+ [sdiskpart(device='/dev/sda1', mountpoint='/', fstype='ext4', opts='rw,nosuid'),
+ sdiskpart(device='/dev/sda2', mountpoint='/home', fstype='ext, opts='rw')]
+ >>>
+ >>> psutil.disk_usage('/')
+ sdiskusage(total=21378641920, used=4809781248, free=15482871808, percent=22.5)
+ >>>
+ >>> psutil.disk_io_counters(perdisk=False)
+ sdiskio(read_count=719566, write_count=1082197, read_bytes=18626220032, write_bytes=24081764352, read_time=5023392, write_time=63199568)
+ >>>
+
+Network
+=======
+
+.. code-block:: python
+
+ >>> psutil.net_io_counters(pernic=True)
+ {'eth0': netio(bytes_sent=485291293, bytes_recv=6004858642, packets_sent=3251564, packets_recv=4787798, errin=0, errout=0, dropin=0, dropout=0),
+ 'lo': netio(bytes_sent=2838627, bytes_recv=2838627, packets_sent=30567, packets_recv=30567, errin=0, errout=0, dropin=0, dropout=0)}
+ >>>
+ >>> psutil.net_connections()
+ [pconn(fd=115, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 48776), raddr=('93.186.135.91', 80), status='ESTABLISHED', pid=1254),
+ pconn(fd=117, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 43761), raddr=('72.14.234.100', 80), status='CLOSING', pid=2987),
+ pconn(fd=-1, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 60759), raddr=('72.14.234.104', 80), status='ESTABLISHED', pid=None),
+ pconn(fd=-1, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 51314), raddr=('72.14.234.83', 443), status='SYN_SENT', pid=None)
+ ...]
+ >>>
+ >>> psutil.net_if_addrs()
+ {'lo': [snic(family=<AddressFamily.AF_INET: 2>, address='127.0.0.1', netmask='255.0.0.0', broadcast='127.0.0.1'),
+ snic(family=<AddressFamily.AF_INET6: 10>, address='::1', netmask='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', broadcast=None),
+ snic(family=<AddressFamily.AF_LINK: 17>, address='00:00:00:00:00:00', netmask=None, broadcast='00:00:00:00:00:00')],
+ 'wlan0': [snic(family=<AddressFamily.AF_INET: 2>, address='192.168.1.3', netmask='255.255.255.0', broadcast='192.168.1.255'),
+ snic(family=<AddressFamily.AF_INET6: 10>, address='fe80::c685:8ff:fe45:641%wlan0', netmask='ffff:ffff:ffff:ffff::', broadcast=None),
+ snic(family=<AddressFamily.AF_LINK: 17>, address='c4:85:08:45:06:41', netmask=None, broadcast='ff:ff:ff:ff:ff:ff')]}
+ >>>
+ >>> psutil.net_if_stats()
+ {'eth0': snicstats(isup=True, duplex=<NicDuplex.NIC_DUPLEX_FULL: 2>, speed=100, mtu=1500),
+ 'lo': snicstats(isup=True, duplex=<NicDuplex.NIC_DUPLEX_UNKNOWN: 0>, speed=0, mtu=65536)}
+
+Other system info
+=================
+
+.. code-block:: python
+
+ >>> psutil.users()
+ [user(name='giampaolo', terminal='pts/2', host='localhost', started=1340737536.0),
+ user(name='giampaolo', terminal='pts/3', host='localhost', started=1340737792.0)]
+ >>>
+ >>> psutil.boot_time()
+ 1365519115.0
+ >>>
+
+Process management
+==================
+
+.. code-block:: python
+
+ >>> import psutil
+ >>> psutil.pids()
+ [1, 2, 3, 4, 5, 6, 7, 46, 48, 50, 51, 178, 182, 222, 223, 224,
+ 268, 1215, 1216, 1220, 1221, 1243, 1244, 1301, 1601, 2237, 2355,
+ 2637, 2774, 3932, 4176, 4177, 4185, 4187, 4189, 4225, 4243, 4245,
+ 4263, 4282, 4306, 4311, 4312, 4313, 4314, 4337, 4339, 4357, 4358,
+ 4363, 4383, 4395, 4408, 4433, 4443, 4445, 4446, 5167, 5234, 5235,
+ 5252, 5318, 5424, 5644, 6987, 7054, 7055, 7071]
+ >>>
+ >>> p = psutil.Process(7055)
+ >>> p.name()
+ 'python'
+ >>> p.exe()
+ '/usr/bin/python'
+ >>> p.cwd()
+ '/home/giampaolo'
+ >>> p.cmdline()
+ ['/usr/bin/python', 'main.py']
+ >>>
+ >>> p.status()
+ 'running'
+ >>> p.username()
+ 'giampaolo'
+ >>> p.create_time()
+ 1267551141.5019531
+ >>> p.terminal()
+ '/dev/pts/0'
+ >>>
+ >>> p.uids()
+ puids(real=1000, effective=1000, saved=1000)
+ >>> p.gids()
+ pgids(real=1000, effective=1000, saved=1000)
+ >>>
+ >>> p.cpu_times()
+ pcputimes(user=1.02, system=0.31)
+ >>> p.cpu_percent(interval=1.0)
+ 12.1
+ >>> p.cpu_affinity()
+ [0, 1, 2, 3]
+ >>> p.cpu_affinity([0]) # set
+ >>>
+ >>> p.memory_percent()
+ 0.63423
+ >>>
+ >>> p.memory_info()
+ pmem(rss=7471104, vms=68513792)
+ >>> p.memory_info_ex()
+ extmem(rss=9662464, vms=49192960, shared=3612672, text=2564096, lib=0, data=5754880, dirty=0)
+ >>> p.memory_maps()
+ [pmmap_grouped(path='/lib/x86_64-linux-gnu/libutil-2.15.so', rss=16384, anonymous=8192, swap=0),
+ pmmap_grouped(path='/lib/x86_64-linux-gnu/libc-2.15.so', rss=6384, anonymous=15, swap=0),
+ pmmap_grouped(path='/lib/x86_64-linux-gnu/libcrypto.so.1.0.0', rss=34124, anonymous=1245, swap=0),
+ pmmap_grouped(path='[heap]', rss=54653, anonymous=8192, swap=0),
+ pmmap_grouped(path='[stack]', rss=1542, anonymous=166, swap=0),
+ ...]
+ >>>
+ >>> p.io_counters()
+ pio(read_count=478001, write_count=59371, read_bytes=700416, write_bytes=69632)
+ >>>
+ >>> p.open_files()
+ [popenfile(path='/home/giampaolo/svn/psutil/somefile', fd=3)]
+ >>>
+ >>> p.connections()
+ [pconn(fd=115, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 48776), raddr=('93.186.135.91', 80), status='ESTABLISHED'),
+ pconn(fd=117, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 43761), raddr=('72.14.234.100', 80), status='CLOSING'),
+ pconn(fd=119, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 60759), raddr=('72.14.234.104', 80), status='ESTABLISHED'),
+ pconn(fd=123, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 51314), raddr=('72.14.234.83', 443), status='SYN_SENT')]
+ >>>
+ >>> p.num_threads()
+ 4
+ >>> p.num_fds()
+ 8
+ >>> p.threads()
+ [pthread(id=5234, user_time=22.5, system_time=9.2891),
+ pthread(id=5235, user_time=0.0, system_time=0.0),
+ pthread(id=5236, user_time=0.0, system_time=0.0),
+ pthread(id=5237, user_time=0.0707, system_time=1.1)]
+ >>>
+ >>> p.num_ctx_switches()
+ pctxsw(voluntary=78, involuntary=19)
+ >>>
+ >>> p.nice()
+ 0
+ >>> p.nice(10) # set
+ >>>
+ >>> p.ionice(psutil.IOPRIO_CLASS_IDLE) # IO priority (Win and Linux only)
+ >>> p.ionice()
+ pionice(ioclass=<IOPriority.IOPRIO_CLASS_IDLE: 3>, value=0)
+ >>>
+ >>> p.rlimit(psutil.RLIMIT_NOFILE, (5, 5)) # set resource limits (Linux only)
+ >>> p.rlimit(psutil.RLIMIT_NOFILE)
+ (5, 5)
+ >>>
+ >>> p.suspend()
+ >>> p.resume()
+ >>>
+ >>> p.terminate()
+ >>> p.wait(timeout=3)
+ 0
+ >>>
+ >>> psutil.test()
+ USER PID %CPU %MEM VSZ RSS TTY START TIME COMMAND
+ root 1 0.0 0.0 24584 2240 Jun17 00:00 init
+ root 2 0.0 0.0 0 0 Jun17 00:00 kthreadd
+ root 3 0.0 0.0 0 0 Jun17 00:05 ksoftirqd/0
+ ...
+ giampaolo 31475 0.0 0.0 20760 3024 /dev/pts/0 Jun19 00:00 python2.4
+ giampaolo 31721 0.0 2.2 773060 181896 00:04 10:30 chrome
+ root 31763 0.0 0.0 0 0 00:05 00:00 kworker/0:1
+ >>>
+
+Further process APIs
+====================
+
+.. code-block:: python
+
+ >>> for p in psutil.process_iter():
+ ... print(p)
+ ...
+ psutil.Process(pid=1, name='init')
+ psutil.Process(pid=2, name='kthreadd')
+ psutil.Process(pid=3, name='ksoftirqd/0')
+ ...
+ >>>
+ >>> def on_terminate(proc):
+ ... print("process {} terminated".format(proc))
+ ...
+ >>> # waits for multiple processes to terminate
+ >>> gone, alive = psutil.wait_procs(procs_list, 3, callback=on_terminate)
+ >>>
+
+======
+Donate
+======
+
+A lot of time and effort went into making psutil as it is right now.
+If you feel psutil is useful to you or your business and want to support its future development please consider donating me (`Giampaolo Rodola' <http://grodola.blogspot.com/p/about.html>`_) some money.
+I only ask for a small donation, but of course I appreciate any amount.
+
+.. image:: http://www.paypal.com/en_US/i/btn/x-click-but04.gif
+ :target: https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=A9ZS7PKKRM3S8
+ :alt: Donate via PayPal
+
+Don't want to donate money? Then maybe you could `write me a recommendation on Linkedin <http://www.linkedin.com/in/grodola>`_.
+
+============
+Mailing list
+============
+
+http://groups.google.com/group/psutil/
+
+========
+Timeline
+========
+
+- 2015-07-15: `psutil-3.1.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-3.1.1.tar.gz>`_
+- 2015-07-15: `psutil-3.1.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-3.1.0.tar.gz>`_
+- 2015-06-18: `psutil-3.0.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-3.0.1.tar.gz>`_
+- 2015-06-13: `psutil-3.0.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-3.0.0.tar.gz>`_
+- 2015-02-02: `psutil-2.2.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.2.1.tar.gz>`_
+- 2015-01-06: `psutil-2.2.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.2.0.tar.gz>`_
+- 2014-09-26: `psutil-2.1.3.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.1.3.tar.gz>`_
+- 2014-09-21: `psutil-2.1.2.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.1.2.tar.gz>`_
+- 2014-04-30: `psutil-2.1.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.1.1.tar.gz>`_
+- 2014-04-08: `psutil-2.1.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.1.0.tar.gz>`_
+- 2014-03-10: `psutil-2.0.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-2.0.0.tar.gz>`_
+- 2013-11-25: `psutil-1.2.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.2.1.tar.gz>`_
+- 2013-11-20: `psutil-1.2.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.2.0.tar.gz>`_
+- 2013-11-07: `psutil-1.1.3.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.1.3.tar.gz>`_
+- 2013-10-22: `psutil-1.1.2.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.1.2.tar.gz>`_
+- 2013-10-08: `psutil-1.1.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.1.1.tar.gz>`_
+- 2013-09-28: `psutil-1.1.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.1.0.tar.gz>`_
+- 2013-07-12: `psutil-1.0.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.0.1.tar.gz>`_
+- 2013-07-10: `psutil-1.0.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-1.0.0.tar.gz>`_
+- 2013-05-03: `psutil-0.7.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.7.1.tar.gz>`_
+- 2013-04-12: `psutil-0.7.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.7.0.tar.gz>`_
+- 2012-08-16: `psutil-0.6.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.6.1.tar.gz>`_
+- 2012-08-13: `psutil-0.6.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.6.0.tar.gz>`_
+- 2012-06-29: `psutil-0.5.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.5.1.tar.gz>`_
+- 2012-06-27: `psutil-0.5.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.5.0.tar.gz>`_
+- 2011-12-14: `psutil-0.4.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.4.1.tar.gz>`_
+- 2011-10-29: `psutil-0.4.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.4.0.tar.gz>`_
+- 2011-07-08: `psutil-0.3.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.3.0.tar.gz>`_
+- 2011-03-20: `psutil-0.2.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.2.1.tar.gz>`_
+- 2010-11-13: `psutil-0.2.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.2.0.tar.gz>`_
+- 2010-03-02: `psutil-0.1.3.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.1.3.tar.gz>`_
+- 2009-05-06: `psutil-0.1.2.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.1.2.tar.gz>`_
+- 2009-03-06: `psutil-0.1.1.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.1.1.tar.gz>`_
+- 2009-01-27: `psutil-0.1.0.tar.gz <https://pypi.python.org/packages/source/p/psutil/psutil-0.1.0.tar.gz>`_
diff --git a/python/psutil/TODO b/python/psutil/TODO
new file mode 100644
index 000000000..a5df809d0
--- /dev/null
+++ b/python/psutil/TODO
@@ -0,0 +1,167 @@
+TODO
+====
+
+A collection of ideas and notes about stuff to implement in future versions.
+"#NNN" occurrences refer to bug tracker issues at:
+https://github.com/giampaolo/psutil/issues
+
+
+HIGHER PRIORITY
+===============
+
+ * OpenBSD support.
+
+ * #371: CPU temperature (apparently OSX and Linux only; on Linux it requires
+ lm-sensors lib).
+
+ * #269: expose network ifaces RX/TW queues. This should probably go into
+ net_if_stats(). Figure out on what platforms this is supported:
+ Linux: yes
+ Others: ?
+
+ * Process.threads(): thread names; patch for OSX available at:
+ https://code.google.com/p/plcrashreporter/issues/detail?id=65
+
+ * Asynchronous psutil.Popen (see http://bugs.python.org/issue1191964)
+
+ * (Windows) fall back on using WMIC for Process methods returning AccessDenied
+
+ * #613: thread names.
+
+ * #604: emulate os.getloadavg() on Windows
+
+ * #269: NIC rx/tx queue.
+
+
+LOWER PRIORITY
+==============
+
+ * #355: Android support.
+
+ * #276: GNU/Hurd support.
+
+ * #429: NetBSD support.
+
+ * DragonFlyBSD support?
+
+ * AIX support?
+
+ * examples/taskmgr-gui.py (using tk).
+
+ * system-wide number of open file descriptors:
+ * https://jira.hyperic.com/browse/SIGAR-30
+ * http://www.netadmintools.com/part295.html
+
+ * Number of system threads.
+ * Windows: http://msdn.microsoft.com/en-us/library/windows/desktop/ms684824(v=vs.85).aspx
+
+ * #357: what CPU a process is on.
+
+ * Doc / wiki which compares similarities between UNIX cli tools and psutil.
+ Example:
+ df -a -> psutil.disk_partitions
+ lsof -> psutil.Process.open_files() and psutil.Process.open_connections()
+ killall-> (actual script)
+ tty -> psutil.Process.terminal()
+ who -> psutil.users()
+
+
+DEBATABLE
+=========
+
+ * psutil.proc_tree() something which obtains a {pid:ppid, ...} dict for
+ all running processes in one shot. This can be factored out from
+ Process.children() and exposed as a first class function.
+ PROS: on Windows we can take advantage of _psutil_windows.ppid_map()
+ which is faster than iterating over all pids and calling ppid().
+ CONS: examples/pstree.py shows this can be easily done in the user code
+ so maybe it's not worth the addition.
+
+ * advanced cmdline interface exposing the whole API and providing different
+ kind of outputs (e.g. pprinted, colorized, json).
+
+ * [Linux]: process cgroups (http://en.wikipedia.org/wiki/Cgroups). They look
+ similar to prlimit() in terms of functionality but uglier (they should allow
+ limiting per-process network IO resources though, which is great). Needs
+ further reading.
+
+ * Should we expose OS constants (psutil.WINDOWS, psutil.OSX etc.)?
+
+ * Python 3.3. exposed different sched.h functions:
+ http://docs.python.org/dev/whatsnew/3.3.html#os
+ http://bugs.python.org/issue12655
+ http://docs.python.org/dev/library/os.html#interface-to-the-scheduler
+ It might be worth to take a look and figure out whether we can include some
+ of those in psutil.
+ Also, we can probably reimplement wait_pid() on POSIX which is currently
+ implemented as a busy-loop.
+
+ * Certain systems provide CPU times about process children. On those systems
+ Process.cpu_times() might return a (user, system, user_children,
+ system_children) ntuple.
+ * Linux: /proc/{PID}/stat
+ * Solaris: pr_cutime and pr_cstime
+ * FreeBSD: none
+ * OSX: none
+ * Windows: none
+
+ * ...also, os.times() provides 'elapsed' times as well.
+
+ * ...also Linux provides guest_time and cguest_time.
+
+ * Enrich exception classes hierarchy on Python >= 3.3 / post PEP-3151 so that:
+ - NoSuchProcess inherits from ProcessLookupError
+ - AccessDenied inherits from PermissionError
+ - TimeoutExpired inherits from TimeoutError (debatable)
+ See: http://docs.python.org/3/library/exceptions.html#os-exceptions
+
+ * Process.threads() might grow an extra "id" parameter so that it can be
+ used as such:
+
+ >>> p = psutil.Process(os.getpid())
+ >>> p.threads(id=psutil.current_thread_id())
+ thread(id=2539, user_time=0.03, system_time=0.02)
+ >>>
+
+ Note: this leads to questions such as "should we have a custom NoSuchThread
+ exception? Also see issue #418.
+
+ Note #2: this would work with os.getpid() only.
+ psutil.current_thread_id() might be desirable as per issue #418 though.
+
+ * should psutil.TimeoutExpired exception have a 'msg' kwarg similar to
+ NoSuchProcess and AccessDenied? Not that we need it, but currently we
+ cannot raise a TimeoutExpired exception with a specific error string.
+
+ * process_iter() might grow an "attrs" parameter similar to Process.as_dict()
+ invoke the necessary methods and include the results into a "cache"
+ attribute attached to the returned Process instances so that one can avoid
+ catching NSP and AccessDenied:
+ for p in process_iter(attrs=['cpu_percent']):
+ print(p.cache['cpu_percent'])
+ This also leads questions as whether we should introduce a sorting order.
+
+ * round Process.memory_percent() result?
+
+ * #550: number of threads per core.
+
+ * Have psutil.Process().cpu_affinity([]) be an alias for "all CPUs"?
+
+
+COMPATIBILITY BREAKAGE
+======================
+
+Removals (will likely happen in 2.2):
+
+ * (S) psutil.Process.nice (deprecated in 0.5.0)
+ * (S) get_process_list (deprecated in 0.5.0)
+ * (S) psutil.*mem* functions (deprecated in 0.3.0 and 0.6.0)
+ * (M) psutil.network_io_counters (deprecated in 1.0.0)
+ * (M) local_address and remote_address Process.connection() namedtuple fields
+ (deprecated in 1.0.0)
+
+
+REJECTED IDEAS
+==============
+
+STUB
diff --git a/python/psutil/docs/Makefile b/python/psutil/docs/Makefile
new file mode 100644
index 000000000..b23ab4ba8
--- /dev/null
+++ b/python/psutil/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -rf $(BUILDDIR)
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/psutil.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/psutil.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/psutil"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/psutil"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/python/psutil/docs/README b/python/psutil/docs/README
new file mode 100644
index 000000000..3aaea8a5b
--- /dev/null
+++ b/python/psutil/docs/README
@@ -0,0 +1,15 @@
+About
+=====
+
+This directory contains the reStructuredText (reST) sources to the psutil
+documentation. You don't need to build them yourself, prebuilt versions are
+available at https://pythonhosted.org/psutil/.
+In case you want, you need to install sphinx first:
+
+ $ pip install sphinx
+
+Then run:
+
+ $ make html
+
+You'll then have an HTML version of the doc at _build/html/index.html. \ No newline at end of file
diff --git a/python/psutil/docs/_static/copybutton.js b/python/psutil/docs/_static/copybutton.js
new file mode 100644
index 000000000..5d82c672b
--- /dev/null
+++ b/python/psutil/docs/_static/copybutton.js
@@ -0,0 +1,57 @@
+$(document).ready(function() {
+ /* Add a [>>>] button on the top-right corner of code samples to hide
+ * the >>> and ... prompts and the output and thus make the code
+ * copyable. */
+ var div = $('.highlight-python .highlight,' +
+ '.highlight-python3 .highlight')
+ var pre = div.find('pre');
+
+ // get the styles from the current theme
+ pre.parent().parent().css('position', 'relative');
+ var hide_text = 'Hide the prompts and output';
+ var show_text = 'Show the prompts and output';
+ var border_width = pre.css('border-top-width');
+ var border_style = pre.css('border-top-style');
+ var border_color = pre.css('border-top-color');
+ var button_styles = {
+ 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0',
+ 'border-color': border_color, 'border-style': border_style,
+ 'border-width': border_width, 'color': border_color, 'text-size': '75%',
+ 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em',
+ 'border-radius': '0 3px 0 0'
+ }
+
+ // create and add the button to all the code blocks that contain >>>
+ div.each(function(index) {
+ var jthis = $(this);
+ if (jthis.find('.gp').length > 0) {
+ var button = $('<span class="copybutton">&gt;&gt;&gt;</span>');
+ button.css(button_styles)
+ button.attr('title', hide_text);
+ jthis.prepend(button);
+ }
+ // tracebacks (.gt) contain bare text elements that need to be
+ // wrapped in a span to work with .nextUntil() (see later)
+ jthis.find('pre:has(.gt)').contents().filter(function() {
+ return ((this.nodeType == 3) && (this.data.trim().length > 0));
+ }).wrap('<span>');
+ });
+
+ // define the behavior of the button when it's clicked
+ $('.copybutton').toggle(
+ function() {
+ var button = $(this);
+ button.parent().find('.go, .gp, .gt').hide();
+ button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden');
+ button.css('text-decoration', 'line-through');
+ button.attr('title', show_text);
+ },
+ function() {
+ var button = $(this);
+ button.parent().find('.go, .gp, .gt').show();
+ button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible');
+ button.css('text-decoration', 'none');
+ button.attr('title', hide_text);
+ });
+});
+
diff --git a/python/psutil/docs/_static/favicon.ico b/python/psutil/docs/_static/favicon.ico
new file mode 100644
index 000000000..c9efc5844
--- /dev/null
+++ b/python/psutil/docs/_static/favicon.ico
Binary files differ
diff --git a/python/psutil/docs/_static/logo.png b/python/psutil/docs/_static/logo.png
new file mode 100644
index 000000000..7d975ec9d
--- /dev/null
+++ b/python/psutil/docs/_static/logo.png
Binary files differ
diff --git a/python/psutil/docs/_static/sidebar.js b/python/psutil/docs/_static/sidebar.js
new file mode 100644
index 000000000..337696391
--- /dev/null
+++ b/python/psutil/docs/_static/sidebar.js
@@ -0,0 +1,161 @@
+/*
+ * sidebar.js
+ * ~~~~~~~~~~
+ *
+ * This script makes the Sphinx sidebar collapsible.
+ *
+ * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds in
+ * .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton used to
+ * collapse and expand the sidebar.
+ *
+ * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden and the
+ * width of the sidebar and the margin-left of the document are decreased.
+ * When the sidebar is expanded the opposite happens. This script saves a
+ * per-browser/per-session cookie used to remember the position of the sidebar
+ * among the pages. Once the browser is closed the cookie is deleted and the
+ * position reset to the default (expanded).
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+$(function() {
+ // global elements used by the functions.
+ // the 'sidebarbutton' element is defined as global after its
+ // creation, in the add_sidebar_button function
+ var bodywrapper = $('.bodywrapper');
+ var sidebar = $('.sphinxsidebar');
+ var sidebarwrapper = $('.sphinxsidebarwrapper');
+
+ // original margin-left of the bodywrapper and width of the sidebar
+ // with the sidebar expanded
+ var bw_margin_expanded = bodywrapper.css('margin-left');
+ var ssb_width_expanded = sidebar.width();
+
+ // margin-left of the bodywrapper and width of the sidebar
+ // with the sidebar collapsed
+ var bw_margin_collapsed = '.8em';
+ var ssb_width_collapsed = '.8em';
+
+ // colors used by the current theme
+ var dark_color = '#AAAAAA';
+ var light_color = '#CCCCCC';
+
+ function sidebar_is_collapsed() {
+ return sidebarwrapper.is(':not(:visible)');
+ }
+
+ function toggle_sidebar() {
+ if (sidebar_is_collapsed())
+ expand_sidebar();
+ else
+ collapse_sidebar();
+ }
+
+ function collapse_sidebar() {
+ sidebarwrapper.hide();
+ sidebar.css('width', ssb_width_collapsed);
+ bodywrapper.css('margin-left', bw_margin_collapsed);
+ sidebarbutton.css({
+ 'margin-left': '0',
+ //'height': bodywrapper.height(),
+ 'height': sidebar.height(),
+ 'border-radius': '5px'
+ });
+ sidebarbutton.find('span').text('»');
+ sidebarbutton.attr('title', _('Expand sidebar'));
+ document.cookie = 'sidebar=collapsed';
+ }
+
+ function expand_sidebar() {
+ bodywrapper.css('margin-left', bw_margin_expanded);
+ sidebar.css('width', ssb_width_expanded);
+ sidebarwrapper.show();
+ sidebarbutton.css({
+ 'margin-left': ssb_width_expanded-12,
+ //'height': bodywrapper.height(),
+ 'height': sidebar.height(),
+ 'border-radius': '0 5px 5px 0'
+ });
+ sidebarbutton.find('span').text('«');
+ sidebarbutton.attr('title', _('Collapse sidebar'));
+ //sidebarwrapper.css({'padding-top':
+ // Math.max(window.pageYOffset - sidebarwrapper.offset().top, 10)});
+ document.cookie = 'sidebar=expanded';
+ }
+
+ function add_sidebar_button() {
+ sidebarwrapper.css({
+ 'float': 'left',
+ 'margin-right': '0',
+ 'width': ssb_width_expanded - 28
+ });
+ // create the button
+ sidebar.append(
+ '<div id="sidebarbutton"><span>&laquo;</span></div>'
+ );
+ var sidebarbutton = $('#sidebarbutton');
+ // find the height of the viewport to center the '<<' in the page
+ var viewport_height;
+ if (window.innerHeight)
+ viewport_height = window.innerHeight;
+ else
+ viewport_height = $(window).height();
+ var sidebar_offset = sidebar.offset().top;
+
+ var sidebar_height = sidebar.height();
+ //var sidebar_height = Math.max(bodywrapper.height(), sidebar.height());
+ sidebarbutton.find('span').css({
+ 'display': 'block',
+ 'margin-top': sidebar_height/2 - 10
+ //'margin-top': (viewport_height - sidebar.position().top - 20) / 2
+ //'position': 'fixed',
+ //'top': Math.min(viewport_height/2, sidebar_height/2 + sidebar_offset) - 10
+ });
+
+ sidebarbutton.click(toggle_sidebar);
+ sidebarbutton.attr('title', _('Collapse sidebar'));
+ sidebarbutton.css({
+ 'border-radius': '0 5px 5px 0',
+ 'color': '#444444',
+ 'background-color': '#CCCCCC',
+ 'font-size': '1.2em',
+ 'cursor': 'pointer',
+ 'height': sidebar_height,
+ 'padding-top': '1px',
+ 'padding-left': '1px',
+ 'margin-left': ssb_width_expanded - 12
+ });
+
+ sidebarbutton.hover(
+ function () {
+ $(this).css('background-color', dark_color);
+ },
+ function () {
+ $(this).css('background-color', light_color);
+ }
+ );
+ }
+
+ function set_position_from_cookie() {
+ if (!document.cookie)
+ return;
+ var items = document.cookie.split(';');
+ for(var k=0; k<items.length; k++) {
+ var key_val = items[k].split('=');
+ var key = key_val[0];
+ if (key == 'sidebar') {
+ var value = key_val[1];
+ if ((value == 'collapsed') && (!sidebar_is_collapsed()))
+ collapse_sidebar();
+ else if ((value == 'expanded') && (sidebar_is_collapsed()))
+ expand_sidebar();
+ }
+ }
+ }
+
+ add_sidebar_button();
+ var sidebarbutton = $('#sidebarbutton');
+ set_position_from_cookie();
+});
diff --git a/python/psutil/docs/_template/globaltoc.html b/python/psutil/docs/_template/globaltoc.html
new file mode 100644
index 000000000..f5fbb406c
--- /dev/null
+++ b/python/psutil/docs/_template/globaltoc.html
@@ -0,0 +1,12 @@
+{#
+ basic/globaltoc.html
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Sphinx sidebar template: global table of contents.
+
+ :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+#}
+<h3>{{ _('Manual') }}</h3>
+{{ toctree() }}
+<a href="{{ pathto(master_doc) }}">Back to Welcome</a>
diff --git a/python/psutil/docs/_template/indexcontent.html b/python/psutil/docs/_template/indexcontent.html
new file mode 100644
index 000000000..dd5e7249a
--- /dev/null
+++ b/python/psutil/docs/_template/indexcontent.html
@@ -0,0 +1,4 @@
+{% extends "defindex.html" %}
+{% block tables %}
+
+{% endblock %}
diff --git a/python/psutil/docs/_template/indexsidebar.html b/python/psutil/docs/_template/indexsidebar.html
new file mode 100644
index 000000000..903675d10
--- /dev/null
+++ b/python/psutil/docs/_template/indexsidebar.html
@@ -0,0 +1,8 @@
+<h3>Useful links</h3>
+<ul>
+ <li><a href="https://github.com/giampaolo/psutil">Github project</a></li>
+ <li><a href="http://grodola.blogspot.com/search/label/psutil">Blog</a></li>
+ <li><a href="https://pypi.python.org/pypi?:action=display&name=psutil#downloads">Download</a></li>
+ <li><a href="https://github.com/giampaolo/psutil/issues">Issues</a></li>
+ <li><a href="http://groups.google.com/group/psutil/topics">Forum</a></li>
+</ul>
diff --git a/python/psutil/docs/_template/page.html b/python/psutil/docs/_template/page.html
new file mode 100644
index 000000000..04b47b415
--- /dev/null
+++ b/python/psutil/docs/_template/page.html
@@ -0,0 +1,66 @@
+{% extends "!page.html" %}
+{% block extrahead %}
+{{ super() }}
+{% if not embedded %}<script type="text/javascript" src="{{ pathto('_static/copybutton.js', 1) }}"></script>{% endif %}
+<script type="text/javascript">
+
+ // Store editor pop-up help state in localStorage
+ // so it does not re-pop-up itself between page loads.
+ // Do not even to pretend to support IE gracefully.
+ (function($) {
+
+ $(document).ready(function() {
+ var box = $("#editor-trap");
+ var klass = "toggled";
+ var storageKey = "toggled";
+
+ function toggle() {
+ box.toggleClass(klass);
+ // Store the toggle status in local storage as "has value string" or null
+ window.localStorage.setItem(storageKey, box.hasClass(klass) ? "toggled" : "not-toggled");
+ }
+
+ box.click(toggle);
+
+ // Check the persistent state of the editor pop-up
+ // Note that localStorage does not necessarily support boolean values (ugh!)
+ // http://stackoverflow.com/questions/3263161/cannot-set-boolean-values-in-localstorage
+ var v = window.localStorage.getItem(storageKey);
+ if(v == "toggled" || !v) {
+ box.addClass(klass);
+ }
+
+ });
+
+ })(jQuery);
+</script>
+<script type="text/javascript">
+
+ var _gaq = _gaq || [];
+ _gaq.push(['_setAccount', 'UA-2097050-4']);
+ _gaq.push(['_trackPageview']);
+
+ (function() {
+ var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
+ ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
+ var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
+ })();
+
+</script>
+{% endblock %}
+
+{% block rootrellink %}
+ <li><a href="https://github.com/giampaolo/psutil/"><img src="{{ pathto('_static/logo.png', 1) }}" style="height: 30px; vertical-align: middle; padding-right: 1em;" /> Project Homepage</a>{{ reldelim1 }}</li>
+ <li><a href="{{ pathto('index') }}">{{ shorttitle }}</a>{{ reldelim1 }}</li>
+{% endblock %}
+
+
+{% block footer %}
+<div class="footer">
+ &copy; Copyright {{ copyright|e }}.
+ <br />
+ Last updated on {{ last_updated|e }}.
+ <br />
+ Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> {{ sphinx_version|e }}.
+</div>
+{% endblock %} \ No newline at end of file
diff --git a/python/psutil/docs/_themes/pydoctheme/static/pydoctheme.css b/python/psutil/docs/_themes/pydoctheme/static/pydoctheme.css
new file mode 100644
index 000000000..4196e5582
--- /dev/null
+++ b/python/psutil/docs/_themes/pydoctheme/static/pydoctheme.css
@@ -0,0 +1,187 @@
+@import url("default.css");
+
+body {
+ background-color: white;
+ margin-left: 1em;
+ margin-right: 1em;
+}
+
+div.related {
+ margin-bottom: 1.2em;
+ padding: 0.5em 0;
+ border-top: 1px solid #ccc;
+ margin-top: 0.5em;
+}
+
+div.related a:hover {
+ color: #0095C4;
+}
+
+div.related:first-child {
+ border-top: 0;
+ padding-top: 0;
+ border-bottom: 1px solid #ccc;
+}
+
+div.sphinxsidebar {
+ background-color: #eeeeee;
+ border-radius: 5px;
+ line-height: 130%;
+ font-size: smaller;
+}
+
+div.sphinxsidebar h3, div.sphinxsidebar h4 {
+ margin-top: 1.5em;
+}
+
+div.sphinxsidebarwrapper > h3:first-child {
+ margin-top: 0.2em;
+}
+
+div.sphinxsidebarwrapper > ul > li > ul > li {
+ margin-bottom: 0.4em;
+}
+
+div.sphinxsidebar a:hover {
+ color: #0095C4;
+}
+
+div.sphinxsidebar input {
+ font-family: 'Lucida Grande','Lucida Sans','DejaVu Sans',Arial,sans-serif;
+ border: 1px solid #999999;
+ font-size: smaller;
+ border-radius: 3px;
+}
+
+div.sphinxsidebar input[type=text] {
+ max-width: 150px;
+}
+
+div.body {
+ padding: 0 0 0 1.2em;
+}
+
+div.body p {
+ line-height: 140%;
+}
+
+div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 {
+ margin: 0;
+ border: 0;
+ padding: 0.3em 0;
+}
+
+div.body hr {
+ border: 0;
+ background-color: #ccc;
+ height: 1px;
+}
+
+div.body pre {
+ border-radius: 3px;
+ border: 1px solid #ac9;
+}
+
+div.body div.admonition, div.body div.impl-detail {
+ border-radius: 3px;
+}
+
+div.body div.impl-detail > p {
+ margin: 0;
+}
+
+div.body div.seealso {
+ border: 1px solid #dddd66;
+}
+
+div.body a {
+ color: #00608f;
+}
+
+div.body a:visited {
+ color: #30306f;
+}
+
+div.body a:hover {
+ color: #00B0E4;
+}
+
+tt, pre {
+ font-family: monospace, sans-serif;
+ font-size: 96.5%;
+}
+
+div.body tt {
+ border-radius: 3px;
+}
+
+div.body tt.descname {
+ font-size: 120%;
+}
+
+div.body tt.xref, div.body a tt {
+ font-weight: normal;
+}
+
+p.deprecated {
+ border-radius: 3px;
+}
+
+table.docutils {
+ border: 1px solid #ddd;
+ min-width: 20%;
+ border-radius: 3px;
+ margin-top: 10px;
+ margin-bottom: 10px;
+}
+
+table.docutils td, table.docutils th {
+ border: 1px solid #ddd !important;
+ border-radius: 3px;
+}
+
+table p, table li {
+ text-align: left !important;
+}
+
+table.docutils th {
+ background-color: #eee;
+ padding: 0.3em 0.5em;
+}
+
+table.docutils td {
+ background-color: white;
+ padding: 0.3em 0.5em;
+}
+
+table.footnote, table.footnote td {
+ border: 0 !important;
+}
+
+div.footer {
+ line-height: 150%;
+ margin-top: -2em;
+ text-align: right;
+ width: auto;
+ margin-right: 10px;
+}
+
+div.footer a:hover {
+ color: #0095C4;
+}
+
+div.body h1,
+div.body h2,
+div.body h3 {
+ background-color: #EAEAEA;
+ border-bottom: 1px solid #CCC;
+ padding-top: 2px;
+ padding-bottom: 2px;
+ padding-left: 5px;
+ margin-top: 5px;
+ margin-bottom: 5px;
+}
+
+div.body h2 {
+ padding-left:10px;
+}
diff --git a/python/psutil/docs/_themes/pydoctheme/theme.conf b/python/psutil/docs/_themes/pydoctheme/theme.conf
new file mode 100644
index 000000000..95b97e536
--- /dev/null
+++ b/python/psutil/docs/_themes/pydoctheme/theme.conf
@@ -0,0 +1,23 @@
+[theme]
+inherit = default
+stylesheet = pydoctheme.css
+pygments_style = sphinx
+
+[options]
+bodyfont = 'Lucida Grande', 'Lucida Sans', 'DejaVu Sans', Arial, sans-serif
+headfont = 'Lucida Grande', 'Lucida Sans', 'DejaVu Sans', Arial, sans-serif
+footerbgcolor = white
+footertextcolor = #555555
+relbarbgcolor = white
+relbartextcolor = #666666
+relbarlinkcolor = #444444
+sidebarbgcolor = white
+sidebartextcolor = #444444
+sidebarlinkcolor = #444444
+bgcolor = white
+textcolor = #222222
+linkcolor = #0090c0
+visitedlinkcolor = #00608f
+headtextcolor = #1a1a1a
+headbgcolor = white
+headlinkcolor = #aaaaaa
diff --git a/python/psutil/docs/conf.py b/python/psutil/docs/conf.py
new file mode 100644
index 000000000..9fa163b65
--- /dev/null
+++ b/python/psutil/docs/conf.py
@@ -0,0 +1,248 @@
+# -*- coding: utf-8 -*-
+#
+# psutil documentation build configuration file, created by
+# sphinx-quickstart.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import datetime
+import os
+
+
+PROJECT_NAME = "psutil"
+AUTHOR = "Giampaolo Rodola'"
+THIS_YEAR = str(datetime.datetime.now().year)
+HERE = os.path.abspath(os.path.dirname(__file__))
+
+
+def get_version():
+ INIT = os.path.abspath(os.path.join(HERE, '../psutil/__init__.py'))
+ with open(INIT, 'r') as f:
+ for line in f:
+ if line.startswith('__version__'):
+ ret = eval(line.strip().split(' = ')[1])
+ assert ret.count('.') == 2, ret
+ for num in ret.split('.'):
+ assert num.isdigit(), ret
+ return ret
+ else:
+ raise ValueError("couldn't find version string")
+
+VERSION = get_version()
+
+# If your documentation needs a minimal Sphinx version, state it here.
+needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = ['sphinx.ext.autodoc',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.pngmath',
+ 'sphinx.ext.viewcode',
+ 'sphinx.ext.intersphinx']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_template']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = PROJECT_NAME
+copyright = '2009-%s, %s' % (THIS_YEAR, AUTHOR)
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = VERSION
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+# language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+add_function_parentheses = True
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+autodoc_docstring_signature = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme = 'pydoctheme'
+html_theme_options = {'collapsiblesidebar': True}
+
+# Add any paths that contain custom themes here, relative to this directory.
+html_theme_path = ["_themes"]
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+html_title = "{project} {version} documentation".format(**locals())
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = 'logo.png'
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+html_favicon = '_static/favicon.ico'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+html_sidebars = {
+ 'index': 'indexsidebar.html',
+ '**': ['globaltoc.html',
+ 'relations.html',
+ 'sourcelink.html',
+ 'searchbox.html']
+}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {
+# 'index': 'indexcontent.html',
+# }
+
+# If false, no module index is generated.
+html_domain_indices = False
+
+# If false, no index is generated.
+html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = '%s-doc' % PROJECT_NAME
+
+# -- Options for LaTeX output ------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+# latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+# latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass
+# [howto/manual]).
+latex_documents = [
+ ('index', '%s.tex' % PROJECT_NAME,
+ '%s documentation' % PROJECT_NAME, AUTHOR),
+]
+
+# The name of an image file (relative to this directory) to place at
+# the top of the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Additional stuff for the LaTeX preamble.
+# latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+
+# -- Options for manual page output ------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', PROJECT_NAME, '%s documentation' % PROJECT_NAME, [AUTHOR], 1)
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
diff --git a/python/psutil/docs/index.rst b/python/psutil/docs/index.rst
new file mode 100644
index 000000000..443019226
--- /dev/null
+++ b/python/psutil/docs/index.rst
@@ -0,0 +1,1400 @@
+.. module:: psutil
+ :synopsis: psutil module
+.. moduleauthor:: Giampaolo Rodola' <grodola@gmail.com>
+
+.. warning::
+
+ This documentation refers to new 2.X version of psutil.
+ Instructions on how to port existing 1.2.1 code are
+ `here <http://grodola.blogspot.com/2014/01/psutil-20-porting.html>`__.
+ Old 1.2.1 documentation is still available
+ `here <https://code.google.com/p/psutil/wiki/Documentation>`__.
+
+psutil documentation
+====================
+
+Quick links
+-----------
+
+* `Home page <https://github.com/giampaolo/psutil>`__
+* `Blog <http://grodola.blogspot.com/search/label/psutil>`__
+* `Forum <http://groups.google.com/group/psutil/topics>`__
+* `Download <https://pypi.python.org/pypi?:action=display&name=psutil#downloads>`__
+* `Installation <https://github.com/giampaolo/psutil/blob/master/INSTALL.rst>`_
+* `Development guide <https://github.com/giampaolo/psutil/blob/master/DEVGUIDE.rst>`_
+* `What's new <https://github.com/giampaolo/psutil/blob/master/HISTORY.rst>`__
+
+About
+-----
+
+From project's home page:
+
+ psutil (python system and process utilities) is a cross-platform library for
+ retrieving information on running
+ **processes** and **system utilization** (CPU, memory, disks, network) in
+ **Python**.
+ It is useful mainly for **system monitoring**, **profiling** and **limiting
+ process resources** and **management of running processes**.
+ It implements many functionalities offered by command line tools
+ such as: *ps, top, lsof, netstat, ifconfig, who, df, kill, free, nice,
+ ionice, iostat, iotop, uptime, pidof, tty, taskset, pmap*.
+ It currently supports **Linux, Windows, OSX, FreeBSD** and **Sun Solaris**,
+ both **32-bit** and **64-bit** architectures, with Python versions from
+ **2.6 to 3.4** (users of Python 2.4 and 2.5 may use `2.1.3 <https://pypi.python.org/pypi?name=psutil&version=2.1.3&:action=files>`__ version).
+ `PyPy <http://pypy.org/>`__ is also known to work.
+
+The psutil documentation you're reading is distributed as a single HTML page.
+
+System related functions
+========================
+
+CPU
+---
+
+.. function:: cpu_times(percpu=False)
+
+ Return system CPU times as a namedtuple.
+ Every attribute represents the seconds the CPU has spent in the given mode.
+ The attributes availability varies depending on the platform:
+
+ - **user**
+ - **system**
+ - **idle**
+ - **nice** *(UNIX)*
+ - **iowait** *(Linux)*
+ - **irq** *(Linux, FreeBSD)*
+ - **softirq** *(Linux)*
+ - **steal** *(Linux 2.6.11+)*
+ - **guest** *(Linux 2.6.24+)*
+ - **guest_nice** *(Linux 3.2.0+)*
+
+ When *percpu* is ``True`` return a list of namedtuples for each logical CPU
+ on the system.
+ First element of the list refers to first CPU, second element to second CPU
+ and so on.
+ The order of the list is consistent across calls.
+ Example output on Linux:
+
+ >>> import psutil
+ >>> psutil.cpu_times()
+ scputimes(user=17411.7, nice=77.99, system=3797.02, idle=51266.57, iowait=732.58, irq=0.01, softirq=142.43, steal=0.0, guest=0.0, guest_nice=0.0)
+
+.. function:: cpu_percent(interval=None, percpu=False)
+
+ Return a float representing the current system-wide CPU utilization as a
+ percentage. When *interval* is > ``0.0`` compares system CPU times elapsed
+ before and after the interval (blocking).
+ When *interval* is ``0.0`` or ``None`` compares system CPU times elapsed
+ since last call or module import, returning immediately.
+ That means the first time this is called it will return a meaningless ``0.0``
+ value which you are supposed to ignore.
+ In this case is recommended for accuracy that this function be called with at
+ least ``0.1`` seconds between calls.
+ When *percpu* is ``True`` returns a list of floats representing the
+ utilization as a percentage for each CPU.
+ First element of the list refers to first CPU, second element to second CPU
+ and so on. The order of the list is consistent across calls.
+
+ >>> import psutil
+ >>> # blocking
+ >>> psutil.cpu_percent(interval=1)
+ 2.0
+ >>> # non-blocking (percentage since last call)
+ >>> psutil.cpu_percent(interval=None)
+ 2.9
+ >>> # blocking, per-cpu
+ >>> psutil.cpu_percent(interval=1, percpu=True)
+ [2.0, 1.0]
+ >>>
+
+ .. warning::
+
+ the first time this function is called with *interval* = ``0.0`` or ``None``
+ it will return a meaningless ``0.0`` value which you are supposed to
+ ignore.
+
+.. function:: cpu_times_percent(interval=None, percpu=False)
+
+ Same as :func:`cpu_percent()` but provides utilization percentages for each
+ specific CPU time as is returned by
+ :func:`psutil.cpu_times(percpu=True)<cpu_times()>`.
+ *interval* and
+ *percpu* arguments have the same meaning as in :func:`cpu_percent()`.
+
+ .. warning::
+
+ the first time this function is called with *interval* = ``0.0`` or
+ ``None`` it will return a meaningless ``0.0`` value which you are supposed
+ to ignore.
+
+.. function:: cpu_count(logical=True)
+
+ Return the number of logical CPUs in the system (same as
+ `os.cpu_count() <http://docs.python.org/3/library/os.html#os.cpu_count>`__
+ in Python 3.4).
+ If *logical* is ``False`` return the number of physical cores only (hyper
+ thread CPUs are excluded). Return ``None`` if undetermined.
+
+ >>> import psutil
+ >>> psutil.cpu_count()
+ 4
+ >>> psutil.cpu_count(logical=False)
+ 2
+ >>>
+
+Memory
+------
+
+.. function:: virtual_memory()
+
+ Return statistics about system memory usage as a namedtuple including the
+ following fields, expressed in bytes:
+
+ - **total**: total physical memory available.
+ - **available**: the actual amount of available memory that can be given
+ instantly to processes that request more memory in bytes; this is
+ calculated by summing different memory values depending on the platform
+ (e.g. free + buffers + cached on Linux) and it is supposed to be used to
+ monitor actual memory usage in a cross platform fashion.
+ - **percent**: the percentage usage calculated as
+ ``(total - available) / total * 100``.
+ - **used**: memory used, calculated differently depending on the platform and
+ designed for informational purposes only.
+ - **free**: memory not being used at all (zeroed) that is readily available;
+ note that this doesn't reflect the actual memory available (use 'available'
+ instead).
+
+ Platform-specific fields:
+
+ - **active**: (UNIX): memory currently in use or very recently used, and so
+ it is in RAM.
+ - **inactive**: (UNIX): memory that is marked as not used.
+ - **buffers**: (Linux, BSD): cache for things like file system metadata.
+ - **cached**: (Linux, BSD): cache for various things.
+ - **wired**: (BSD, OSX): memory that is marked to always stay in RAM. It is
+ never moved to disk.
+ - **shared**: (BSD): memory that may be simultaneously accessed by multiple
+ processes.
+
+ The sum of **used** and **available** does not necessarily equal **total**.
+ On Windows **available** and **free** are the same.
+ See `examples/meminfo.py <https://github.com/giampaolo/psutil/blob/master/examples/meminfo.py>`__
+ script providing an example on how to convert bytes in a human readable form.
+
+ >>> import psutil
+ >>> mem = psutil.virtual_memory()
+ >>> mem
+ svmem(total=8374149120L, available=1247768576L, percent=85.1, used=8246628352L, free=127520768L, active=3208777728, inactive=1133408256, buffers=342413312L, cached=777834496)
+ >>>
+ >>> THRESHOLD = 100 * 1024 * 1024 # 100MB
+ >>> if mem.available <= THRESHOLD:
+ ... print("warning")
+ ...
+ >>>
+
+
+.. function:: swap_memory()
+
+ Return system swap memory statistics as a namedtuple including the following
+ fields:
+
+ * **total**: total swap memory in bytes
+ * **used**: used swap memory in bytes
+ * **free**: free swap memory in bytes
+ * **percent**: the percentage usage calculated as ``(total - available) / total * 100``
+ * **sin**: the number of bytes the system has swapped in from disk
+ (cumulative)
+ * **sout**: the number of bytes the system has swapped out from disk
+ (cumulative)
+
+ **sin** and **sout** on Windows are meaningless and are always set to ``0``.
+ See `examples/meminfo.py <https://github.com/giampaolo/psutil/blob/master/examples/meminfo.py>`__
+ script providing an example on how to convert bytes in a human readable form.
+
+ >>> import psutil
+ >>> psutil.swap_memory()
+ sswap(total=2097147904L, used=886620160L, free=1210527744L, percent=42.3, sin=1050411008, sout=1906720768)
+
+Disks
+-----
+
+.. function:: disk_partitions(all=False)
+
+ Return all mounted disk partitions as a list of namedtuples including device,
+ mount point and filesystem type, similarly to "df" command on UNIX. If *all*
+ parameter is ``False`` return physical devices only (e.g. hard disks, cd-rom
+ drives, USB keys) and ignore all others (e.g. memory partitions such as
+ `/dev/shm <http://www.cyberciti.biz/tips/what-is-devshm-and-its-practical-usage.html>`__).
+ Namedtuple's **fstype** field is a string which varies depending on the
+ platform.
+ On Linux it can be one of the values found in /proc/filesystems (e.g.
+ ``'ext3'`` for an ext3 hard drive o ``'iso9660'`` for the CD-ROM drive).
+ On Windows it is determined via
+ `GetDriveType <http://msdn.microsoft.com/en-us/library/aa364939(v=vs.85).aspx>`__
+ and can be either ``"removable"``, ``"fixed"``, ``"remote"``, ``"cdrom"``,
+ ``"unmounted"`` or ``"ramdisk"``. On OSX and FreeBSD it is retrieved via
+ `getfsstat(2) <http://www.manpagez.com/man/2/getfsstat/>`__. See
+ `disk_usage.py <https://github.com/giampaolo/psutil/blob/master/examples/disk_usage.py>`__
+ script providing an example usage.
+
+ >>> import psutil
+ >>> psutil.disk_partitions()
+ [sdiskpart(device='/dev/sda3', mountpoint='/', fstype='ext4', opts='rw,errors=remount-ro'),
+ sdiskpart(device='/dev/sda7', mountpoint='/home', fstype='ext4', opts='rw')]
+
+.. function:: disk_usage(path)
+
+ Return disk usage statistics about the given *path* as a namedtuple including
+ **total**, **used** and **free** space expressed in bytes, plus the
+ **percentage** usage.
+ `OSError <http://docs.python.org/3/library/exceptions.html#OSError>`__ is
+ raised if *path* does not exist. See
+ `examples/disk_usage.py <https://github.com/giampaolo/psutil/blob/master/examples/disk_usage.py>`__
+ script providing an example usage. Starting from
+ `Python 3.3 <http://bugs.python.org/issue12442>`__ this is also
+ available as
+ `shutil.disk_usage() <http://docs.python.org/3/library/shutil.html#shutil.disk_usage>`__.
+ See
+ `disk_usage.py <https://github.com/giampaolo/psutil/blob/master/examples/disk_usage.py>`__
+ script providing an example usage.
+
+ >>> import psutil
+ >>> psutil.disk_usage('/')
+ sdiskusage(total=21378641920, used=4809781248, free=15482871808, percent=22.5)
+
+.. function:: disk_io_counters(perdisk=False)
+
+ Return system-wide disk I/O statistics as a namedtuple including the
+ following fields:
+
+ - **read_count**: number of reads
+ - **write_count**: number of writes
+ - **read_bytes**: number of bytes read
+ - **write_bytes**: number of bytes written
+ - **read_time**: time spent reading from disk (in milliseconds)
+ - **write_time**: time spent writing to disk (in milliseconds)
+
+ If *perdisk* is ``True`` return the same information for every physical disk
+ installed on the system as a dictionary with partition names as the keys and
+ the namedtuple described above as the values.
+ See `examples/iotop.py <https://github.com/giampaolo/psutil/blob/master/examples/iotop.py>`__
+ for an example application.
+
+ >>> import psutil
+ >>> psutil.disk_io_counters()
+ sdiskio(read_count=8141, write_count=2431, read_bytes=290203, write_bytes=537676, read_time=5868, write_time=94922)
+ >>>
+ >>> psutil.disk_io_counters(perdisk=True)
+ {'sda1': sdiskio(read_count=920, write_count=1, read_bytes=2933248, write_bytes=512, read_time=6016, write_time=4),
+ 'sda2': sdiskio(read_count=18707, write_count=8830, read_bytes=6060, write_bytes=3443, read_time=24585, write_time=1572),
+ 'sdb1': sdiskio(read_count=161, write_count=0, read_bytes=786432, write_bytes=0, read_time=44, write_time=0)}
+
+Network
+-------
+
+.. function:: net_io_counters(pernic=False)
+
+ Return system-wide network I/O statistics as a namedtuple including the
+ following attributes:
+
+ - **bytes_sent**: number of bytes sent
+ - **bytes_recv**: number of bytes received
+ - **packets_sent**: number of packets sent
+ - **packets_recv**: number of packets received
+ - **errin**: total number of errors while receiving
+ - **errout**: total number of errors while sending
+ - **dropin**: total number of incoming packets which were dropped
+ - **dropout**: total number of outgoing packets which were dropped (always 0
+ on OSX and BSD)
+
+ If *pernic* is ``True`` return the same information for every network
+ interface installed on the system as a dictionary with network interface
+ names as the keys and the namedtuple described above as the values.
+ See `examples/nettop.py <https://github.com/giampaolo/psutil/blob/master/examples/nettop.py>`__
+ for an example application.
+
+ >>> import psutil
+ >>> psutil.net_io_counters()
+ snetio(bytes_sent=14508483, bytes_recv=62749361, packets_sent=84311, packets_recv=94888, errin=0, errout=0, dropin=0, dropout=0)
+ >>>
+ >>> psutil.net_io_counters(pernic=True)
+ {'lo': snetio(bytes_sent=547971, bytes_recv=547971, packets_sent=5075, packets_recv=5075, errin=0, errout=0, dropin=0, dropout=0),
+ 'wlan0': snetio(bytes_sent=13921765, bytes_recv=62162574, packets_sent=79097, packets_recv=89648, errin=0, errout=0, dropin=0, dropout=0)}
+
+.. function:: net_connections(kind='inet')
+
+ Return system-wide socket connections as a list of namedtuples.
+ Every namedtuple provides 7 attributes:
+
+ - **fd**: the socket file descriptor, if retrievable, else ``-1``.
+ If the connection refers to the current process this may be passed to
+ `socket.fromfd() <http://docs.python.org/library/socket.html#socket.fromfd>`__
+ to obtain a usable socket object.
+ - **family**: the address family, either `AF_INET
+ <http://docs.python.org//library/socket.html#socket.AF_INET>`__,
+ `AF_INET6 <http://docs.python.org//library/socket.html#socket.AF_INET6>`__
+ or `AF_UNIX <http://docs.python.org//library/socket.html#socket.AF_UNIX>`__.
+ - **type**: the address type, either `SOCK_STREAM
+ <http://docs.python.org//library/socket.html#socket.SOCK_STREAM>`__ or
+ `SOCK_DGRAM
+ <http://docs.python.org//library/socket.html#socket.SOCK_DGRAM>`__.
+ - **laddr**: the local address as a ``(ip, port)`` tuple or a ``path``
+ in case of AF_UNIX sockets.
+ - **raddr**: the remote address as a ``(ip, port)`` tuple or an absolute
+ ``path`` in case of UNIX sockets.
+ When the remote endpoint is not connected you'll get an empty tuple
+ (AF_INET*) or ``None`` (AF_UNIX).
+ On Linux AF_UNIX sockets will always have this set to ``None``.
+ - **status**: represents the status of a TCP connection. The return value
+ is one of the :data:`psutil.CONN_* <psutil.CONN_ESTABLISHED>` constants
+ (a string).
+ For UDP and UNIX sockets this is always going to be
+ :const:`psutil.CONN_NONE`.
+ - **pid**: the PID of the process which opened the socket, if retrievable,
+ else ``None``. On some platforms (e.g. Linux) the availability of this
+ field changes depending on process privileges (root is needed).
+
+ The *kind* parameter is a string which filters for connections that fit the
+ following criteria:
+
+ .. table::
+
+ +----------------+-----------------------------------------------------+
+ | **Kind value** | **Connections using** |
+ +================+=====================================================+
+ | "inet" | IPv4 and IPv6 |
+ +----------------+-----------------------------------------------------+
+ | "inet4" | IPv4 |
+ +----------------+-----------------------------------------------------+
+ | "inet6" | IPv6 |
+ +----------------+-----------------------------------------------------+
+ | "tcp" | TCP |
+ +----------------+-----------------------------------------------------+
+ | "tcp4" | TCP over IPv4 |
+ +----------------+-----------------------------------------------------+
+ | "tcp6" | TCP over IPv6 |
+ +----------------+-----------------------------------------------------+
+ | "udp" | UDP |
+ +----------------+-----------------------------------------------------+
+ | "udp4" | UDP over IPv4 |
+ +----------------+-----------------------------------------------------+
+ | "udp6" | UDP over IPv6 |
+ +----------------+-----------------------------------------------------+
+ | "unix" | UNIX socket (both UDP and TCP protocols) |
+ +----------------+-----------------------------------------------------+
+ | "all" | the sum of all the possible families and protocols |
+ +----------------+-----------------------------------------------------+
+
+ On OSX this function requires root privileges.
+ To get per-process connections use :meth:`Process.connections`.
+ Also, see
+ `netstat.py sample script <https://github.com/giampaolo/psutil/blob/master/examples/netstat.py>`__.
+ Example:
+
+ >>> import psutil
+ >>> psutil.net_connections()
+ [pconn(fd=115, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 48776), raddr=('93.186.135.91', 80), status='ESTABLISHED', pid=1254),
+ pconn(fd=117, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 43761), raddr=('72.14.234.100', 80), status='CLOSING', pid=2987),
+ pconn(fd=-1, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 60759), raddr=('72.14.234.104', 80), status='ESTABLISHED', pid=None),
+ pconn(fd=-1, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 51314), raddr=('72.14.234.83', 443), status='SYN_SENT', pid=None)
+ ...]
+
+ .. note:: (OSX) :class:`psutil.AccessDenied` is always raised unless running
+ as root (lsof does the same).
+ .. note:: (Solaris) UNIX sockets are not supported.
+
+ .. versionadded:: 2.1.0
+
+.. function:: net_if_addrs()
+
+ Return the addresses associated to each NIC (network interface card)
+ installed on the system as a dictionary whose keys are the NIC names and
+ value is a list of namedtuples for each address assigned to the NIC.
+ Each namedtuple includes 4 fields:
+
+ - **family**
+ - **address**
+ - **netmask**
+ - **broadcast**
+
+ *family* can be either
+ `AF_INET <http://docs.python.org//library/socket.html#socket.AF_INET>`__,
+ `AF_INET6 <http://docs.python.org//library/socket.html#socket.AF_INET6>`__
+ or :const:`psutil.AF_LINK`, which refers to a MAC address.
+ *address* is the primary address, *netmask* and *broadcast* may be ``None``.
+ Example::
+
+ >>> import psutil
+ >>> psutil.net_if_addrs()
+ {'lo': [snic(family=<AddressFamily.AF_INET: 2>, address='127.0.0.1', netmask='255.0.0.0', broadcast='127.0.0.1'),
+ snic(family=<AddressFamily.AF_INET6: 10>, address='::1', netmask='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', broadcast=None),
+ snic(family=<AddressFamily.AF_LINK: 17>, address='00:00:00:00:00:00', netmask=None, broadcast='00:00:00:00:00:00')],
+ 'wlan0': [snic(family=<AddressFamily.AF_INET: 2>, address='192.168.1.3', netmask='255.255.255.0', broadcast='192.168.1.255'),
+ snic(family=<AddressFamily.AF_INET6: 10>, address='fe80::c685:8ff:fe45:641%wlan0', netmask='ffff:ffff:ffff:ffff::', broadcast=None),
+ snic(family=<AddressFamily.AF_LINK: 17>, address='c4:85:08:45:06:41', netmask=None, broadcast='ff:ff:ff:ff:ff:ff')]}
+ >>>
+
+ See also `examples/ifconfig.py <https://github.com/giampaolo/psutil/blob/master/examples/ifconfig.py>`__
+ for an example application.
+
+ .. note:: if you're interested in others families (e.g. AF_BLUETOOTH) you can
+ use the more powerful `netifaces <https://pypi.python.org/pypi/netifaces/>`__
+ extension.
+
+ .. note:: you can have more than one address of the same family associated
+ with each interface (that's why dict values are lists).
+
+ *New in 3.0.0*
+
+.. function:: net_if_stats()
+
+ Return information about each NIC (network interface card) installed on the
+ system as a dictionary whose keys are the NIC names and value is a namedtuple
+ with the following fields:
+
+ - **isup**
+ - **duplex**
+ - **speed**
+ - **mtu**
+
+ *isup* is a boolean indicating whether the NIC is up and running, *duplex*
+ can be either :const:`NIC_DUPLEX_FULL`, :const:`NIC_DUPLEX_HALF` or
+ :const:`NIC_DUPLEX_UNKNOWN`, *speed* is the NIC speed expressed in mega bits
+ (MB), if it can't be determined (e.g. 'localhost') it will be set to ``0``,
+ *mtu* is the maximum transmission unit expressed in bytes.
+ See also `examples/ifconfig.py <https://github.com/giampaolo/psutil/blob/master/examples/ifconfig.py>`__
+ for an example application.
+ Example:
+
+ >>> import psutil
+ >>> psutil.net_if_stats()
+ {'eth0': snicstats(isup=True, duplex=<NicDuplex.NIC_DUPLEX_FULL: 2>, speed=100, mtu=1500),
+ 'lo': snicstats(isup=True, duplex=<NicDuplex.NIC_DUPLEX_UNKNOWN: 0>, speed=0, mtu=65536)}
+
+ *New in 3.0.0*
+
+
+Other system info
+-----------------
+
+.. function:: users()
+
+ Return users currently connected on the system as a list of namedtuples
+ including the following fields:
+
+ - **user**: the name of the user.
+ - **terminal**: the tty or pseudo-tty associated with the user, if any,
+ else ``None``.
+ - **host**: the host name associated with the entry, if any.
+ - **started**: the creation time as a floating point number expressed in
+ seconds since the epoch.
+
+ Example::
+
+ >>> import psutil
+ >>> psutil.users()
+ [suser(name='giampaolo', terminal='pts/2', host='localhost', started=1340737536.0),
+ suser(name='giampaolo', terminal='pts/3', host='localhost', started=1340737792.0)]
+
+.. function:: boot_time()
+
+ Return the system boot time expressed in seconds since the epoch.
+ Example:
+
+ .. code-block:: python
+
+ >>> import psutil, datetime
+ >>> psutil.boot_time()
+ 1389563460.0
+ >>> datetime.datetime.fromtimestamp(psutil.boot_time()).strftime("%Y-%m-%d %H:%M:%S")
+ '2014-01-12 22:51:00'
+
+Processes
+=========
+
+Functions
+---------
+
+.. function:: pids()
+
+ Return a list of current running PIDs. To iterate over all processes
+ :func:`process_iter()` should be preferred.
+
+.. function:: pid_exists(pid)
+
+ Check whether the given PID exists in the current process list. This is
+ faster than doing ``"pid in psutil.pids()"`` and should be preferred.
+
+.. function:: process_iter()
+
+ Return an iterator yielding a :class:`Process` class instance for all running
+ processes on the local machine.
+ Every instance is only created once and then cached into an internal table
+ which is updated every time an element is yielded.
+ Cached :class:`Process` instances are checked for identity so that you're
+ safe in case a PID has been reused by another process, in which case the
+ cached instance is updated.
+ This is should be preferred over :func:`psutil.pids()` for iterating over
+ processes.
+ Sorting order in which processes are returned is
+ based on their PID. Example usage::
+
+ import psutil
+
+ for proc in psutil.process_iter():
+ try:
+ pinfo = proc.as_dict(attrs=['pid', 'name'])
+ except psutil.NoSuchProcess:
+ pass
+ else:
+ print(pinfo)
+
+.. function:: wait_procs(procs, timeout=None, callback=None)
+
+ Convenience function which waits for a list of :class:`Process` instances to
+ terminate. Return a ``(gone, alive)`` tuple indicating which processes are
+ gone and which ones are still alive. The *gone* ones will have a new
+ *returncode* attribute indicating process exit status (it may be ``None``).
+ ``callback`` is a function which gets called every time a process terminates
+ (a :class:`Process` instance is passed as callback argument). Function will
+ return as soon as all processes terminate or when timeout occurs. Tipical use
+ case is:
+
+ - send SIGTERM to a list of processes
+ - give them some time to terminate
+ - send SIGKILL to those ones which are still alive
+
+ Example::
+
+ import psutil
+
+ def on_terminate(proc):
+ print("process {} terminated with exit code {}".format(proc, proc.returncode))
+
+ procs = [...] # a list of Process instances
+ for p in procs:
+ p.terminate()
+ gone, alive = wait_procs(procs, timeout=3, callback=on_terminate)
+ for p in alive:
+ p.kill()
+
+Exceptions
+----------
+
+.. class:: Error()
+
+ Base exception class. All other exceptions inherit from this one.
+
+.. class:: NoSuchProcess(pid, name=None, msg=None)
+
+ Raised by :class:`Process` class methods when no process with the given
+ *pid* is found in the current process list or when a process no longer
+ exists. "name" is the name the process had before disappearing
+ and gets set only if :meth:`Process.name()` was previosly called.
+
+.. class:: ZombieProcess(pid, name=None, ppid=None, msg=None)
+
+ This may be raised by :class:`Process` class methods when querying a zombie
+ process on UNIX (Windows doesn't have zombie processes). Depending on the
+ method called the OS may be able to succeed in retrieving the process
+ information or not.
+ Note: this is a subclass of :class:`NoSuchProcess` so if you're not
+ interested in retrieving zombies (e.g. when using :func:`process_iter()`)
+ you can ignore this exception and just catch :class:`NoSuchProcess`.
+
+ *New in 3.0.0*
+
+.. class:: AccessDenied(pid=None, name=None, msg=None)
+
+ Raised by :class:`Process` class methods when permission to perform an
+ action is denied. "name" is the name of the process (may be ``None``).
+
+.. class:: TimeoutExpired(seconds, pid=None, name=None, msg=None)
+
+ Raised by :meth:`Process.wait` if timeout expires and process is still
+ alive.
+
+Process class
+-------------
+
+.. class:: Process(pid=None)
+
+ Represents an OS process with the given *pid*. If *pid* is omitted current
+ process *pid* (`os.getpid() <http://docs.python.org/library/os.html#os.getpid>`__)
+ is used.
+ Raise :class:`NoSuchProcess` if *pid* does not exist.
+ When accessing methods of this class always be prepared to catch
+ :class:`NoSuchProcess` and :class:`AccessDenied` exceptions.
+ `hash() <http://docs.python.org/2/library/functions.html#hash>`__ builtin can
+ be used against instances of this class in order to identify a process
+ univocally over time (the hash is determined by mixing process PID
+ and creation time). As such it can also be used with
+ `set()s <http://docs.python.org/2/library/stdtypes.html#types-set>`__.
+
+ .. warning::
+
+ the way this class is bound to a process is uniquely via its **PID**.
+ That means that if the :class:`Process` instance is old enough and
+ the PID has been reused by another process in the meantime you might end up
+ interacting with another process.
+ The only exceptions for which process identity is pre-emptively checked
+ (via PID + creation time) and guaranteed are for
+ :meth:`nice` (set),
+ :meth:`ionice` (set),
+ :meth:`cpu_affinity` (set),
+ :meth:`rlimit` (set),
+ :meth:`children`,
+ :meth:`parent`,
+ :meth:`suspend`
+ :meth:`resume`,
+ :meth:`send_signal`,
+ :meth:`terminate`, and
+ :meth:`kill`
+ methods.
+ To prevent this problem for all other methods you can use
+ :meth:`is_running()` before querying the process or use
+ :func:`process_iter()` in case you're iterating over all processes.
+
+ .. attribute:: pid
+
+ The process PID.
+
+ .. method:: ppid()
+
+ The process parent pid. On Windows the return value is cached after first
+ call.
+
+ .. method:: name()
+
+ The process name. The return value is cached after first call.
+
+ .. method:: exe()
+
+ The process executable as an absolute path.
+ On some systems this may also be an empty string.
+ The return value is cached after first call.
+
+ .. method:: cmdline()
+
+ The command line this process has been called with.
+
+ .. method:: create_time()
+
+ The process creation time as a floating point number expressed in seconds
+ since the epoch, in
+ `UTC <http://en.wikipedia.org/wiki/Coordinated_universal_time>`__.
+ The return value is cached after first call.
+
+ >>> import psutil, datetime
+ >>> p = psutil.Process()
+ >>> p.create_time()
+ 1307289803.47
+ >>> datetime.datetime.fromtimestamp(p.create_time()).strftime("%Y-%m-%d %H:%M:%S")
+ '2011-03-05 18:03:52'
+
+ .. method:: as_dict(attrs=None, ad_value=None)
+
+ Utility method returning process information as a hashable dictionary.
+ If *attrs* is specified it must be a list of strings reflecting available
+ :class:`Process` class's attribute names (e.g. ``['cpu_times', 'name']``)
+ else all public (read only) attributes are assumed. *ad_value* is the
+ value which gets assigned to a dict key in case :class:`AccessDenied`
+ or :class:`ZombieProcess` exception is raised when retrieving that
+ particular process information.
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>> p.as_dict(attrs=['pid', 'name', 'username'])
+ {'username': 'giampaolo', 'pid': 12366, 'name': 'python'}
+
+ .. versionchanged:: 3.0.0 *ad_value* is used also when incurring into
+ :class:`ZombieProcess` exception, not only :class:`AccessDenied`
+
+ .. method:: parent()
+
+ Utility method which returns the parent process as a :class:`Process`
+ object pre-emptively checking whether PID has been reused. If no parent
+ PID is known return ``None``.
+
+ .. method:: status()
+
+ The current process status as a string. The returned string is one of the
+ :data:`psutil.STATUS_*<psutil.STATUS_RUNNING>` constants.
+
+ .. method:: cwd()
+
+ The process current working directory as an absolute path.
+
+ .. method:: username()
+
+ The name of the user that owns the process. On UNIX this is calculated by
+ using real process uid.
+
+ .. method:: uids()
+
+ The **real**, **effective** and **saved** user ids of this process as a
+ namedtuple. This is the same as
+ `os.getresuid() <http://docs.python.org//library/os.html#os.getresuid>`__
+ but can be used for every process PID.
+
+ Availability: UNIX
+
+ .. method:: gids()
+
+ The **real**, **effective** and **saved** group ids of this process as a
+ namedtuple. This is the same as
+ `os.getresgid() <http://docs.python.org//library/os.html#os.getresgid>`__
+ but can be used for every process PID.
+
+ Availability: UNIX
+
+ .. method:: terminal()
+
+ The terminal associated with this process, if any, else ``None``. This is
+ similar to "tty" command but can be used for every process PID.
+
+ Availability: UNIX
+
+ .. method:: nice(value=None)
+
+ Get or set process
+ `niceness <blogs.techrepublic.com.com/opensource/?p=140>`__ (priority).
+ On UNIX this is a number which usually goes from ``-20`` to ``20``.
+ The higher the nice value, the lower the priority of the process.
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>> p.nice(10) # set
+ >>> p.nice() # get
+ 10
+ >>>
+
+ Starting from `Python 3.3 <http://bugs.python.org/issue10784>`__ this
+ functionality is also available as
+ `os.getpriority() <http://docs.python.org/3/library/os.html#os.getpriority>`__
+ and
+ `os.setpriority() <http://docs.python.org/3/library/os.html#os.setpriority>`__
+ (UNIX only).
+
+ On Windows this is available as well by using
+ `GetPriorityClass <http://msdn.microsoft.com/en-us/library/ms683211(v=vs.85).aspx>`__
+ and `SetPriorityClass <http://msdn.microsoft.com/en-us/library/ms686219(v=vs.85).aspx>`__
+ and *value* is one of the
+ :data:`psutil.*_PRIORITY_CLASS <psutil.ABOVE_NORMAL_PRIORITY_CLASS>`
+ constants.
+ Example which increases process priority on Windows:
+
+ >>> p.nice(psutil.HIGH_PRIORITY_CLASS)
+
+ .. method:: ionice(ioclass=None, value=None)
+
+ Get or set
+ `process I/O niceness <http://friedcpu.wordpress.com/2007/07/17/why-arent-you-using-ionice-yet/>`__ (priority).
+ On Linux *ioclass* is one of the
+ :data:`psutil.IOPRIO_CLASS_*<psutil.IOPRIO_CLASS_NONE>` constants.
+ *value* is a number which goes from ``0`` to ``7``. The higher the value,
+ the lower the I/O priority of the process. On Windows only *ioclass* is
+ used and it can be set to ``2`` (normal), ``1`` (low) or ``0`` (very low).
+ The example below sets IDLE priority class for the current process,
+ meaning it will only get I/O time when no other process needs the disk:
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>> p.ionice(psutil.IOPRIO_CLASS_IDLE) # set
+ >>> p.ionice() # get
+ pionice(ioclass=<IOPriority.IOPRIO_CLASS_IDLE: 3>, value=0)
+ >>>
+
+ On Windows only *ioclass* is used and it can be set to ``2`` (normal),
+ ``1`` (low) or ``0`` (very low).
+
+ Availability: Linux and Windows > Vista
+
+ .. versionchanged:: 3.0.0 on >= Python 3.4 the returned ``ioclass``
+ constant is an `enum <https://docs.python.org/3/library/enum.html#module-enum>`__
+ instead of a plain integer.
+
+ .. method:: rlimit(resource, limits=None)
+
+ Get or set process resource limits (see
+ `man prlimit <http://linux.die.net/man/2/prlimit>`__). *resource* is one of
+ the :data:`psutil.RLIMIT_* <psutil.RLIMIT_INFINITY>` constants.
+ *limits* is a ``(soft, hard)`` tuple.
+ This is the same as `resource.getrlimit() <http://docs.python.org/library/resource.html#resource.getrlimit>`__
+ and `resource.setrlimit() <http://docs.python.org/library/resource.html#resource.setrlimit>`__
+ but can be used for every process PID and only on Linux.
+ Example:
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>> # process may open no more than 128 file descriptors
+ >>> p.rlimit(psutil.RLIMIT_NOFILE, (128, 128))
+ >>> # process may create files no bigger than 1024 bytes
+ >>> p.rlimit(psutil.RLIMIT_FSIZE, (1024, 1024))
+ >>> # get
+ >>> p.rlimit(psutil.RLIMIT_FSIZE)
+ (1024, 1024)
+ >>>
+
+ Availability: Linux
+
+ .. method:: io_counters()
+
+ Return process I/O statistics as a namedtuple including the number of read
+ and write operations performed by the process and the amount of bytes read
+ and written. For Linux refer to
+ `/proc filesysem documentation <https://www.kernel.org/doc/Documentation/filesystems/proc.txt>`__.
+ On BSD there's apparently no way to retrieve bytes counters, hence ``-1``
+ is returned for **read_bytes** and **write_bytes** fields. OSX is not
+ supported.
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>> p.io_counters()
+ pio(read_count=454556, write_count=3456, read_bytes=110592, write_bytes=0)
+
+ Availability: all platforms except OSX and Solaris
+
+ .. method:: num_ctx_switches()
+
+ The number voluntary and involuntary context switches performed by
+ this process.
+
+ .. method:: num_fds()
+
+ The number of file descriptors used by this process.
+
+ Availability: UNIX
+
+ .. method:: num_handles()
+
+ The number of handles used by this process.
+
+ Availability: Windows
+
+ .. method:: num_threads()
+
+ The number of threads currently used by this process.
+
+ .. method:: threads()
+
+ Return threads opened by process as a list of namedtuples including thread
+ id and thread CPU times (user/system).
+
+ .. method:: cpu_times()
+
+ Return a tuple whose values are process CPU **user** and **system**
+ times which means the amount of time expressed in seconds that a process
+ has spent in
+ `user / system mode <http://stackoverflow.com/questions/556405/what-do-real-user-and-sys-mean-in-the-output-of-time1>`__.
+ This is similar to
+ `os.times() <http://docs.python.org//library/os.html#os.times>`__
+ but can be used for every process PID.
+
+ .. method:: cpu_percent(interval=None)
+
+ Return a float representing the process CPU utilization as a percentage.
+ When *interval* is > ``0.0`` compares process times to system CPU times
+ elapsed before and after the interval (blocking). When interval is ``0.0``
+ or ``None`` compares process times to system CPU times elapsed since last
+ call, returning immediately. That means the first time this is called it
+ will return a meaningless ``0.0`` value which you are supposed to ignore.
+ In this case is recommended for accuracy that this function be called a
+ second time with at least ``0.1`` seconds between calls. Example:
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>>
+ >>> # blocking
+ >>> p.cpu_percent(interval=1)
+ 2.0
+ >>> # non-blocking (percentage since last call)
+ >>> p.cpu_percent(interval=None)
+ 2.9
+ >>>
+
+ .. note::
+ a percentage > 100 is legitimate as it can result from a process with
+ multiple threads running on different CPU cores.
+
+ .. warning::
+ the first time this method is called with interval = ``0.0`` or
+ ``None`` it will return a meaningless ``0.0`` value which you are
+ supposed to ignore.
+
+ .. method:: cpu_affinity(cpus=None)
+
+ Get or set process current
+ `CPU affinity <http://www.linuxjournal.com/article/6799?page=0,0>`__.
+ CPU affinity consists in telling the OS to run a certain process on a
+ limited set of CPUs only. The number of eligible CPUs can be obtained with
+ ``list(range(psutil.cpu_count()))``. On set raises ``ValueError`` in case
+ an invalid CPU number is specified.
+
+ >>> import psutil
+ >>> psutil.cpu_count()
+ 4
+ >>> p = psutil.Process()
+ >>> p.cpu_affinity() # get
+ [0, 1, 2, 3]
+ >>> p.cpu_affinity([0]) # set; from now on, process will run on CPU #0 only
+ >>> p.cpu_affinity()
+ [0]
+ >>>
+ >>> # reset affinity against all CPUs
+ >>> all_cpus = list(range(psutil.cpu_count()))
+ >>> p.cpu_affinity(all_cpus)
+ >>>
+
+ Availability: Linux, Windows, BSD
+
+ .. versionchanged:: 2.2.0 added support for FreeBSD
+
+ .. method:: memory_info()
+
+ Return a tuple representing RSS (Resident Set Size) and VMS (Virtual
+ Memory Size) in bytes. On UNIX *rss* and *vms* are the same values shown
+ by ps. On Windows *rss* and *vms* refer to "Mem Usage" and "VM Size"
+ columns of taskmgr.exe. For more detailed memory stats use
+ :meth:`memory_info_ex`.
+
+ .. method:: memory_info_ex()
+
+ Return a namedtuple with variable fields depending on the platform
+ representing extended memory information about the process.
+ All numbers are expressed in bytes.
+
+ +--------+---------+-------+-------+--------------------+
+ | Linux | OSX | BSD | SunOS | Windows |
+ +========+=========+=======+=======+====================+
+ | rss | rss | rss | rss | num_page_faults |
+ +--------+---------+-------+-------+--------------------+
+ | vms | vms | vms | vms | peak_wset |
+ +--------+---------+-------+-------+--------------------+
+ | shared | pfaults | text | | wset |
+ +--------+---------+-------+-------+--------------------+
+ | text | pageins | data | | peak_paged_pool |
+ +--------+---------+-------+-------+--------------------+
+ | lib | | stack | | paged_pool |
+ +--------+---------+-------+-------+--------------------+
+ | data | | | | peak_nonpaged_pool |
+ +--------+---------+-------+-------+--------------------+
+ | dirty | | | | nonpaged_pool |
+ +--------+---------+-------+-------+--------------------+
+ | | | | | pagefile |
+ +--------+---------+-------+-------+--------------------+
+ | | | | | peak_pagefile |
+ +--------+---------+-------+-------+--------------------+
+ | | | | | private |
+ +--------+---------+-------+-------+--------------------+
+
+ Windows metrics are extracted from
+ `PROCESS_MEMORY_COUNTERS_EX <http://msdn.microsoft.com/en-us/library/windows/desktop/ms684874(v=vs.85).aspx>`__ structure.
+ Example on Linux:
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>> p.memory_info_ex()
+ pextmem(rss=15491072, vms=84025344, shared=5206016, text=2555904, lib=0, data=9891840, dirty=0)
+
+ .. method:: memory_percent()
+
+ Compare physical system memory to process resident memory (RSS) and
+ calculate process memory utilization as a percentage.
+
+ .. method:: memory_maps(grouped=True)
+
+ Return process's mapped memory regions as a list of namedtuples whose
+ fields are variable depending on the platform. As such, portable
+ applications should rely on namedtuple's `path` and `rss` fields only.
+ This method is useful to obtain a detailed representation of process
+ memory usage as explained
+ `here <http://bmaurer.blogspot.it/2006/03/memory-usage-with-smaps.html>`__.
+ If *grouped* is ``True`` the mapped regions with the same *path* are
+ grouped together and the different memory fields are summed. If *grouped*
+ is ``False`` every mapped region is shown as a single entity and the
+ namedtuple will also include the mapped region's address space (*addr*)
+ and permission set (*perms*).
+ See `examples/pmap.py <https://github.com/giampaolo/psutil/blob/master/examples/pmap.py>`__
+ for an example application.
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>> p.memory_maps()
+ [pmmap_grouped(path='/lib/x8664-linux-gnu/libutil-2.15.so', rss=16384, anonymous=8192, swap=0),
+ pmmap_grouped(path='/lib/x8664-linux-gnu/libc-2.15.so', rss=6384, anonymous=15, swap=0),
+ pmmap_grouped(path='/lib/x8664-linux-gnu/libcrypto.so.0.1', rss=34124, anonymous=1245, swap=0),
+ pmmap_grouped(path='[heap]', rss=54653, anonymous=8192, swap=0),
+ pmmap_grouped(path='[stack]', rss=1542, anonymous=166, swap=0),
+ ...]
+ >>>
+
+ .. method:: children(recursive=False)
+
+ Return the children of this process as a list of :Class:`Process` objects,
+ pre-emptively checking whether PID has been reused. If recursive is `True`
+ return all the parent descendants.
+ Example assuming *A == this process*:
+ ::
+
+ A ─â”
+ │
+ ├─ B (child) ─â”
+ │ └─ X (grandchild) ─â”
+ │ └─ Y (great grandchild)
+ ├─ C (child)
+ └─ D (child)
+
+ >>> p.children()
+ B, C, D
+ >>> p.children(recursive=True)
+ B, X, Y, C, D
+
+ Note that in the example above if process X disappears process Y won't be
+ returned either as the reference to process A is lost.
+
+ .. method:: open_files()
+
+ Return regular files opened by process as a list of namedtuples including
+ the absolute file name and the file descriptor number (on Windows this is
+ always ``-1``). Example:
+
+ >>> import psutil
+ >>> f = open('file.ext', 'w')
+ >>> p = psutil.Process()
+ >>> p.open_files()
+ [popenfile(path='/home/giampaolo/svn/psutil/file.ext', fd=3)]
+
+ .. warning::
+ on Windows this is not fully reliable as due to some limitations of the
+ Windows API the underlying implementation may hang when retrieving
+ certain file handles.
+ In order to work around that psutil on Windows Vista (and higher) spawns
+ a thread and kills it if it's not responding after 100ms.
+ That implies that on Windows this method is not guaranteed to enumerate
+ all regular file handles (see full discusion
+ `here <https://github.com/giampaolo/psutil/pull/597>`_).
+
+ .. warning::
+ on FreeBSD this method can return files with a 'null' path (see
+ `issue 595 <https://github.com/giampaolo/psutil/pull/595>`_).
+
+ .. versionchanged:: 3.1.0 no longer hangs on Windows.
+
+ .. method:: connections(kind="inet")
+
+ Return socket connections opened by process as a list of namedtuples.
+ To get system-wide connections use :func:`psutil.net_connections()`.
+ Every namedtuple provides 6 attributes:
+
+ - **fd**: the socket file descriptor. This can be passed to
+ `socket.fromfd() <http://docs.python.org/library/socket.html#socket.fromfd>`__
+ to obtain a usable socket object.
+ This is only available on UNIX; on Windows ``-1`` is always returned.
+ - **family**: the address family, either `AF_INET
+ <http://docs.python.org//library/socket.html#socket.AF_INET>`__,
+ `AF_INET6 <http://docs.python.org//library/socket.html#socket.AF_INET6>`__
+ or `AF_UNIX <http://docs.python.org//library/socket.html#socket.AF_UNIX>`__.
+ - **type**: the address type, either `SOCK_STREAM
+ <http://docs.python.org//library/socket.html#socket.SOCK_STREAM>`__ or
+ `SOCK_DGRAM
+ <http://docs.python.org//library/socket.html#socket.SOCK_DGRAM>`__.
+ - **laddr**: the local address as a ``(ip, port)`` tuple or a ``path``
+ in case of AF_UNIX sockets.
+ - **raddr**: the remote address as a ``(ip, port)`` tuple or an absolute
+ ``path`` in case of UNIX sockets.
+ When the remote endpoint is not connected you'll get an empty tuple
+ (AF_INET) or ``None`` (AF_UNIX).
+ On Linux AF_UNIX sockets will always have this set to ``None``.
+ - **status**: represents the status of a TCP connection. The return value
+ is one of the :data:`psutil.CONN_* <psutil.CONN_ESTABLISHED>` constants.
+ For UDP and UNIX sockets this is always going to be
+ :const:`psutil.CONN_NONE`.
+
+ The *kind* parameter is a string which filters for connections that fit the
+ following criteria:
+
+ .. table::
+
+ +----------------+-----------------------------------------------------+
+ | **Kind value** | **Connections using** |
+ +================+=====================================================+
+ | "inet" | IPv4 and IPv6 |
+ +----------------+-----------------------------------------------------+
+ | "inet4" | IPv4 |
+ +----------------+-----------------------------------------------------+
+ | "inet6" | IPv6 |
+ +----------------+-----------------------------------------------------+
+ | "tcp" | TCP |
+ +----------------+-----------------------------------------------------+
+ | "tcp4" | TCP over IPv4 |
+ +----------------+-----------------------------------------------------+
+ | "tcp6" | TCP over IPv6 |
+ +----------------+-----------------------------------------------------+
+ | "udp" | UDP |
+ +----------------+-----------------------------------------------------+
+ | "udp4" | UDP over IPv4 |
+ +----------------+-----------------------------------------------------+
+ | "udp6" | UDP over IPv6 |
+ +----------------+-----------------------------------------------------+
+ | "unix" | UNIX socket (both UDP and TCP protocols) |
+ +----------------+-----------------------------------------------------+
+ | "all" | the sum of all the possible families and protocols |
+ +----------------+-----------------------------------------------------+
+
+ Example:
+
+ >>> import psutil
+ >>> p = psutil.Process(1694)
+ >>> p.name()
+ 'firefox'
+ >>> p.connections()
+ [pconn(fd=115, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 48776), raddr=('93.186.135.91', 80), status='ESTABLISHED'),
+ pconn(fd=117, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 43761), raddr=('72.14.234.100', 80), status='CLOSING'),
+ pconn(fd=119, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 60759), raddr=('72.14.234.104', 80), status='ESTABLISHED'),
+ pconn(fd=123, family=<AddressFamily.AF_INET: 2>, type=<SocketType.SOCK_STREAM: 1>, laddr=('10.0.0.1', 51314), raddr=('72.14.234.83', 443), status='SYN_SENT')]
+
+ .. method:: is_running()
+
+ Return whether the current process is running in the current process list.
+ This is reliable also in case the process is gone and its PID reused by
+ another process, therefore it must be preferred over doing
+ ``psutil.pid_exists(p.pid)``.
+
+ .. note::
+ this will return ``True`` also if the process is a zombie
+ (``p.status() == psutil.STATUS_ZOMBIE``).
+
+ .. method:: send_signal(signal)
+
+ Send a signal to process (see
+ `signal module <http://docs.python.org//library/signal.html>`__
+ constants) pre-emptively checking whether PID has been reused.
+ This is the same as ``os.kill(pid, sig)``.
+ On Windows only **SIGTERM** is valid and is treated as an alias for
+ :meth:`kill()`.
+
+ .. method:: suspend()
+
+ Suspend process execution with **SIGSTOP** signal pre-emptively checking
+ whether PID has been reused.
+ On UNIX this is the same as ``os.kill(pid, signal.SIGSTOP)``.
+ On Windows this is done by suspending all process threads execution.
+
+ .. method:: resume()
+
+ Resume process execution with **SIGCONT** signal pre-emptively checking
+ whether PID has been reused.
+ On UNIX this is the same as ``os.kill(pid, signal.SIGCONT)``.
+ On Windows this is done by resuming all process threads execution.
+
+ .. method:: terminate()
+
+ Terminate the process with **SIGTERM** signal pre-emptively checking
+ whether PID has been reused.
+ On UNIX this is the same as ``os.kill(pid, signal.SIGTERM)``.
+ On Windows this is an alias for :meth:`kill`.
+
+ .. method:: kill()
+
+ Kill the current process by using **SIGKILL** signal pre-emptively
+ checking whether PID has been reused.
+ On UNIX this is the same as ``os.kill(pid, signal.SIGKILL)``.
+ On Windows this is done by using
+ `TerminateProcess <http://msdn.microsoft.com/en-us/library/windows/desktop/ms686714(v=vs.85).aspx>`__.
+
+ .. method:: wait(timeout=None)
+
+ Wait for process termination and if the process is a children of the
+ current one also return the exit code, else ``None``. On Windows there's
+ no such limitation (exit code is always returned). If the process is
+ already terminated immediately return ``None`` instead of raising
+ :class:`NoSuchProcess`. If *timeout* is specified and process is still
+ alive raise :class:`TimeoutExpired` exception. It can also be used in a
+ non-blocking fashion by specifying ``timeout=0`` in which case it will
+ either return immediately or raise :class:`TimeoutExpired`.
+ To wait for multiple processes use :func:`psutil.wait_procs()`.
+
+
+Popen class
+-----------
+
+.. class:: Popen(*args, **kwargs)
+
+ A more convenient interface to stdlib
+ `subprocess.Popen <http://docs.python.org/library/subprocess.html#subprocess.Popen>`__.
+ It starts a sub process and deals with it exactly as when using
+ `subprocess.Popen <http://docs.python.org/library/subprocess.html#subprocess.Popen>`__
+ but in addition it also provides all the methods of
+ :class:`psutil.Process` class in a single interface.
+ For method names common to both classes such as
+ :meth:`send_signal() <psutil.Process.send_signal()>`,
+ :meth:`terminate() <psutil.Process.terminate()>` and
+ :meth:`kill() <psutil.Process.kill()>`
+ :class:`psutil.Process` implementation takes precedence.
+ For a complete documentation refer to
+ `subprocess module documentation <http://docs.python.org/library/subprocess.html>`__.
+
+ .. note::
+
+ Unlike `subprocess.Popen <http://docs.python.org/library/subprocess.html#subprocess.Popen>`__
+ this class pre-emptively checks wheter PID has been reused on
+ :meth:`send_signal() <psutil.Process.send_signal()>`,
+ :meth:`terminate() <psutil.Process.terminate()>` and
+ :meth:`kill() <psutil.Process.kill()>`
+ so that you can't accidentally terminate another process, fixing
+ http://bugs.python.org/issue6973.
+
+ >>> import psutil
+ >>> from subprocess import PIPE
+ >>>
+ >>> p = psutil.Popen(["/usr/bin/python", "-c", "print('hello')"], stdout=PIPE)
+ >>> p.name()
+ 'python'
+ >>> p.username()
+ 'giampaolo'
+ >>> p.communicate()
+ ('hello\n', None)
+ >>> p.wait(timeout=2)
+ 0
+ >>>
+
+Constants
+=========
+
+.. _const-pstatus:
+.. data:: STATUS_RUNNING
+ STATUS_SLEEPING
+ STATUS_DISK_SLEEP
+ STATUS_STOPPED
+ STATUS_TRACING_STOP
+ STATUS_ZOMBIE
+ STATUS_DEAD
+ STATUS_WAKE_KILL
+ STATUS_WAKING
+ STATUS_IDLE
+ STATUS_LOCKED
+ STATUS_WAITING
+
+ A set of strings representing the status of a process.
+ Returned by :meth:`psutil.Process.status()`.
+
+.. _const-conn:
+.. data:: CONN_ESTABLISHED
+ CONN_SYN_SENT
+ CONN_SYN_RECV
+ CONN_FIN_WAIT1
+ CONN_FIN_WAIT2
+ CONN_TIME_WAIT
+ CONN_CLOSE
+ CONN_CLOSE_WAIT
+ CONN_LAST_ACK
+ CONN_LISTEN
+ CONN_CLOSING
+ CONN_NONE
+ CONN_DELETE_TCB (Windows)
+ CONN_IDLE (Solaris)
+ CONN_BOUND (Solaris)
+
+ A set of strings representing the status of a TCP connection.
+ Returned by :meth:`psutil.Process.connections()` (`status` field).
+
+.. _const-prio:
+.. data:: ABOVE_NORMAL_PRIORITY_CLASS
+ BELOW_NORMAL_PRIORITY_CLASS
+ HIGH_PRIORITY_CLASS
+ IDLE_PRIORITY_CLASS
+ NORMAL_PRIORITY_CLASS
+ REALTIME_PRIORITY_CLASS
+
+ A set of integers representing the priority of a process on Windows (see
+ `MSDN documentation <http://msdn.microsoft.com/en-us/library/ms686219(v=vs.85).aspx>`__).
+ They can be used in conjunction with
+ :meth:`psutil.Process.nice()` to get or set process priority.
+
+ Availability: Windows
+
+ .. versionchanged:: 3.0.0 on Python >= 3.4 these constants are
+ `enums <https://docs.python.org/3/library/enum.html#module-enum>`__
+ instead of a plain integer.
+
+.. _const-ioprio:
+.. data:: IOPRIO_CLASS_NONE
+ IOPRIO_CLASS_RT
+ IOPRIO_CLASS_BE
+ IOPRIO_CLASS_IDLE
+
+ A set of integers representing the I/O priority of a process on Linux. They
+ can be used in conjunction with :meth:`psutil.Process.ionice()` to get or set
+ process I/O priority.
+ *IOPRIO_CLASS_NONE* and *IOPRIO_CLASS_BE* (best effort) is the default for
+ any process that hasn't set a specific I/O priority.
+ *IOPRIO_CLASS_RT* (real time) means the process is given first access to the
+ disk, regardless of what else is going on in the system.
+ *IOPRIO_CLASS_IDLE* means the process will get I/O time when no-one else
+ needs the disk.
+ For further information refer to manuals of
+ `ionice <http://linux.die.net/man/1/ionice>`__
+ command line utility or
+ `ioprio_get <http://linux.die.net/man/2/ioprio_get>`__
+ system call.
+
+ Availability: Linux
+
+ .. versionchanged:: 3.0.0 on Python >= 3.4 thse constants are
+ `enums <https://docs.python.org/3/library/enum.html#module-enum>`__
+ instead of a plain integer.
+
+.. _const-rlimit:
+.. data:: RLIMIT_INFINITY
+ RLIMIT_AS
+ RLIMIT_CORE
+ RLIMIT_CPU
+ RLIMIT_DATA
+ RLIMIT_FSIZE
+ RLIMIT_LOCKS
+ RLIMIT_MEMLOCK
+ RLIMIT_MSGQUEUE
+ RLIMIT_NICE
+ RLIMIT_NOFILE
+ RLIMIT_NPROC
+ RLIMIT_RSS
+ RLIMIT_RTPRIO
+ RLIMIT_RTTIME
+ RLIMIT_RTPRIO
+ RLIMIT_SIGPENDING
+ RLIMIT_STACK
+
+ Constants used for getting and setting process resource limits to be used in
+ conjunction with :meth:`psutil.Process.rlimit()`. See
+ `man prlimit <http://linux.die.net/man/2/prlimit>`__ for futher information.
+
+ Availability: Linux
+
+.. _const-aflink:
+.. data:: AF_LINK
+
+ Constant which identifies a MAC address associated with a network interface.
+ To be used in conjunction with :func:`psutil.net_if_addrs()`.
+
+ *New in 3.0.0*
+
+.. _const-duplex:
+.. data:: NIC_DUPLEX_FULL
+ NIC_DUPLEX_HALF
+ NIC_DUPLEX_UNKNOWN
+
+ Constants which identifies whether a NIC (network interface card) has full or
+ half mode speed. NIC_DUPLEX_FULL means the NIC is able to send and receive
+ data (files) simultaneously, NIC_DUPLEX_FULL means the NIC can either send or
+ receive data at a time.
+ To be used in conjunction with :func:`psutil.net_if_stats()`.
+
+ *New in 3.0.0*
+
+Development guide
+=================
+
+If you plan on hacking on psutil (e.g. want to add a new feature or fix a bug)
+take a look at the
+`development guide <https://github.com/giampaolo/psutil/blob/master/DEVGUIDE.rst>`_.
diff --git a/python/psutil/docs/make.bat b/python/psutil/docs/make.bat
new file mode 100644
index 000000000..9bc67515c
--- /dev/null
+++ b/python/psutil/docs/make.bat
@@ -0,0 +1,242 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. xml to make Docutils-native XML files
+ echo. pseudoxml to make pseudoxml-XML files for display purposes
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\psutil.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\psutil.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdf" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdfja" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf-ja
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+if "%1" == "xml" (
+ %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The XML files are in %BUILDDIR%/xml.
+ goto end
+)
+
+if "%1" == "pseudoxml" (
+ %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+ goto end
+)
+
+:end
diff --git a/python/psutil/docs/xxx b/python/psutil/docs/xxx
new file mode 100644
index 000000000..b78d53f2d
--- /dev/null
+++ b/python/psutil/docs/xxx
@@ -0,0 +1,11 @@
+cpu 1974613 1749 485728 6305758 80280 15 5924 0 0 0
+
+cpu0 519156 374 132999 5977865 72925 10 1458 0 0 0
+
+cpu1 524667 401 125931 108960 2110 4 2214 0 0 0
+
+cpu2 462286 520 117046 109514 2666 0 828 0 0 0
+
+cpu3 468502 453 109750 109418 2578 0 1424 0 0 0
+
+
diff --git a/python/psutil/examples/disk_usage.py b/python/psutil/examples/disk_usage.py
new file mode 100755
index 000000000..d8600a8c4
--- /dev/null
+++ b/python/psutil/examples/disk_usage.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+List all mounted disk partitions a-la "df -h" command.
+
+$ python examples/disk_usage.py
+Device Total Used Free Use % Type Mount
+/dev/sdb3 18.9G 14.7G 3.3G 77% ext4 /
+/dev/sda6 345.9G 83.8G 244.5G 24% ext4 /home
+/dev/sda1 296.0M 43.1M 252.9M 14% vfat /boot/efi
+/dev/sda2 600.0M 312.4M 287.6M 52% fuseblk /media/Recovery
+"""
+
+import sys
+import os
+import psutil
+
+
+def bytes2human(n):
+ # http://code.activestate.com/recipes/578019
+ # >>> bytes2human(10000)
+ # '9.8K'
+ # >>> bytes2human(100001221)
+ # '95.4M'
+ symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+ prefix = {}
+ for i, s in enumerate(symbols):
+ prefix[s] = 1 << (i + 1) * 10
+ for s in reversed(symbols):
+ if n >= prefix[s]:
+ value = float(n) / prefix[s]
+ return '%.1f%s' % (value, s)
+ return "%sB" % n
+
+
+def main():
+ templ = "%-17s %8s %8s %8s %5s%% %9s %s"
+ print(templ % ("Device", "Total", "Used", "Free", "Use ", "Type",
+ "Mount"))
+ for part in psutil.disk_partitions(all=False):
+ if os.name == 'nt':
+ if 'cdrom' in part.opts or part.fstype == '':
+ # skip cd-rom drives with no disk in it; they may raise
+ # ENOENT, pop-up a Windows GUI error for a non-ready
+ # partition or just hang.
+ continue
+ usage = psutil.disk_usage(part.mountpoint)
+ print(templ % (
+ part.device,
+ bytes2human(usage.total),
+ bytes2human(usage.used),
+ bytes2human(usage.free),
+ int(usage.percent),
+ part.fstype,
+ part.mountpoint))
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/python/psutil/examples/free.py b/python/psutil/examples/free.py
new file mode 100755
index 000000000..913ca58a4
--- /dev/null
+++ b/python/psutil/examples/free.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of 'free' cmdline utility.
+
+$ python examples/free.py
+ total used free shared buffers cache
+Mem: 10125520 8625996 1499524 0 349500 3307836
+Swap: 0 0 0
+"""
+
+import psutil
+
+
+def main():
+ virt = psutil.virtual_memory()
+ swap = psutil.swap_memory()
+ templ = "%-7s %10s %10s %10s %10s %10s %10s"
+ print(templ % ('', 'total', 'used', 'free', 'shared', 'buffers', 'cache'))
+ print(templ % (
+ 'Mem:',
+ int(virt.total / 1024),
+ int(virt.used / 1024),
+ int(virt.free / 1024),
+ int(getattr(virt, 'shared', 0) / 1024),
+ int(getattr(virt, 'buffers', 0) / 1024),
+ int(getattr(virt, 'cached', 0) / 1024)))
+ print(templ % (
+ 'Swap:', int(swap.total / 1024),
+ int(swap.used / 1024),
+ int(swap.free / 1024),
+ '',
+ '',
+ ''))
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/ifconfig.py b/python/psutil/examples/ifconfig.py
new file mode 100644
index 000000000..e7a436cc0
--- /dev/null
+++ b/python/psutil/examples/ifconfig.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of 'ifconfig' on UNIX.
+
+$ python examples/ifconfig.py
+lo (speed=0MB, duplex=?, mtu=65536, up=yes):
+ IPv4 address : 127.0.0.1
+ broadcast : 127.0.0.1
+ netmask : 255.0.0.0
+ IPv6 address : ::1
+ netmask : ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
+ MAC address : 00:00:00:00:00:00
+ broadcast : 00:00:00:00:00:00
+
+wlan0 (speed=0MB, duplex=?, mtu=1500, up=yes):
+ IPv4 address : 10.0.3.1
+ broadcast : 10.0.3.255
+ netmask : 255.255.255.0
+ IPv6 address : fe80::3005:adff:fe31:8698
+ netmask : ffff:ffff:ffff:ffff::
+ MAC address : 32:05:ad:31:86:98
+ broadcast : ff:ff:ff:ff:ff:ff
+
+eth0 (speed=100MB, duplex=full, mtu=1500, up=yes):
+ IPv4 address : 192.168.1.2
+ broadcast : 192.168.1.255
+ netmask : 255.255.255.0
+ IPv6 address : fe80::c685:8ff:fe45:641
+ netmask : ffff:ffff:ffff:ffff::
+ MAC address : c4:85:08:45:06:41
+ broadcast : ff:ff:ff:ff:ff:ff
+"""
+
+from __future__ import print_function
+import socket
+
+import psutil
+
+
+af_map = {
+ socket.AF_INET: 'IPv4',
+ socket.AF_INET6: 'IPv6',
+ psutil.AF_LINK: 'MAC',
+}
+
+duplex_map = {
+ psutil.NIC_DUPLEX_FULL: "full",
+ psutil.NIC_DUPLEX_HALF: "half",
+ psutil.NIC_DUPLEX_UNKNOWN: "?",
+}
+
+
+def main():
+ stats = psutil.net_if_stats()
+ for nic, addrs in psutil.net_if_addrs().items():
+ if nic in stats:
+ print("%s (speed=%sMB, duplex=%s, mtu=%s, up=%s):" % (
+ nic, stats[nic].speed, duplex_map[stats[nic].duplex],
+ stats[nic].mtu, "yes" if stats[nic].isup else "no"))
+ else:
+ print("%s:" % (nic))
+ for addr in addrs:
+ print(" %-8s" % af_map.get(addr.family, addr.family), end="")
+ print(" address : %s" % addr.address)
+ if addr.broadcast:
+ print(" broadcast : %s" % addr.broadcast)
+ if addr.netmask:
+ print(" netmask : %s" % addr.netmask)
+ print("")
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/iotop.py b/python/psutil/examples/iotop.py
new file mode 100755
index 000000000..16ac7fbf6
--- /dev/null
+++ b/python/psutil/examples/iotop.py
@@ -0,0 +1,179 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of iotop (http://guichaz.free.fr/iotop/) showing real time
+disk I/O statistics.
+
+It works on Linux only (FreeBSD and OSX are missing support for IO
+counters).
+It doesn't work on Windows as curses module is required.
+
+Example output:
+
+$ python examples/iotop.py
+Total DISK READ: 0.00 B/s | Total DISK WRITE: 472.00 K/s
+PID USER DISK READ DISK WRITE COMMAND
+13155 giampao 0.00 B/s 428.00 K/s /usr/bin/google-chrome-beta
+3260 giampao 0.00 B/s 0.00 B/s bash
+3779 giampao 0.00 B/s 0.00 B/s gnome-session --session=ubuntu
+3830 giampao 0.00 B/s 0.00 B/s /usr/bin/dbus-launch
+3831 giampao 0.00 B/s 0.00 B/s //bin/dbus-daemon --fork --print-pid 5
+3841 giampao 0.00 B/s 0.00 B/s /usr/lib/at-spi-bus-launcher
+3845 giampao 0.00 B/s 0.00 B/s /bin/dbus-daemon
+3848 giampao 0.00 B/s 0.00 B/s /usr/lib/at-spi2-core/at-spi2-registryd
+3862 giampao 0.00 B/s 0.00 B/s /usr/lib/gnome-settings-daemon
+
+Author: Giampaolo Rodola' <g.rodola@gmail.com>
+"""
+
+import atexit
+import time
+import sys
+try:
+ import curses
+except ImportError:
+ sys.exit('platform not supported')
+
+import psutil
+
+
+# --- curses stuff
+def tear_down():
+ win.keypad(0)
+ curses.nocbreak()
+ curses.echo()
+ curses.endwin()
+
+win = curses.initscr()
+atexit.register(tear_down)
+curses.endwin()
+lineno = 0
+
+
+def print_line(line, highlight=False):
+ """A thin wrapper around curses's addstr()."""
+ global lineno
+ try:
+ if highlight:
+ line += " " * (win.getmaxyx()[1] - len(line))
+ win.addstr(lineno, 0, line, curses.A_REVERSE)
+ else:
+ win.addstr(lineno, 0, line, 0)
+ except curses.error:
+ lineno = 0
+ win.refresh()
+ raise
+ else:
+ lineno += 1
+# --- /curses stuff
+
+
+def bytes2human(n):
+ """
+ >>> bytes2human(10000)
+ '9.8 K/s'
+ >>> bytes2human(100001221)
+ '95.4 M/s'
+ """
+ symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+ prefix = {}
+ for i, s in enumerate(symbols):
+ prefix[s] = 1 << (i + 1) * 10
+ for s in reversed(symbols):
+ if n >= prefix[s]:
+ value = float(n) / prefix[s]
+ return '%.2f %s/s' % (value, s)
+ return '%.2f B/s' % (n)
+
+
+def poll(interval):
+ """Calculate IO usage by comparing IO statics before and
+ after the interval.
+ Return a tuple including all currently running processes
+ sorted by IO activity and total disks I/O activity.
+ """
+ # first get a list of all processes and disk io counters
+ procs = [p for p in psutil.process_iter()]
+ for p in procs[:]:
+ try:
+ p._before = p.io_counters()
+ except psutil.Error:
+ procs.remove(p)
+ continue
+ disks_before = psutil.disk_io_counters()
+
+ # sleep some time
+ time.sleep(interval)
+
+ # then retrieve the same info again
+ for p in procs[:]:
+ try:
+ p._after = p.io_counters()
+ p._cmdline = ' '.join(p.cmdline())
+ if not p._cmdline:
+ p._cmdline = p.name()
+ p._username = p.username()
+ except (psutil.NoSuchProcess, psutil.ZombieProcess):
+ procs.remove(p)
+ disks_after = psutil.disk_io_counters()
+
+ # finally calculate results by comparing data before and
+ # after the interval
+ for p in procs:
+ p._read_per_sec = p._after.read_bytes - p._before.read_bytes
+ p._write_per_sec = p._after.write_bytes - p._before.write_bytes
+ p._total = p._read_per_sec + p._write_per_sec
+
+ disks_read_per_sec = disks_after.read_bytes - disks_before.read_bytes
+ disks_write_per_sec = disks_after.write_bytes - disks_before.write_bytes
+
+ # sort processes by total disk IO so that the more intensive
+ # ones get listed first
+ processes = sorted(procs, key=lambda p: p._total, reverse=True)
+
+ return (processes, disks_read_per_sec, disks_write_per_sec)
+
+
+def refresh_window(procs, disks_read, disks_write):
+ """Print results on screen by using curses."""
+ curses.endwin()
+ templ = "%-5s %-7s %11s %11s %s"
+ win.erase()
+
+ disks_tot = "Total DISK READ: %s | Total DISK WRITE: %s" \
+ % (bytes2human(disks_read), bytes2human(disks_write))
+ print_line(disks_tot)
+
+ header = templ % ("PID", "USER", "DISK READ", "DISK WRITE", "COMMAND")
+ print_line(header, highlight=True)
+
+ for p in procs:
+ line = templ % (
+ p.pid,
+ p._username[:7],
+ bytes2human(p._read_per_sec),
+ bytes2human(p._write_per_sec),
+ p._cmdline)
+ try:
+ print_line(line)
+ except curses.error:
+ break
+ win.refresh()
+
+
+def main():
+ try:
+ interval = 0
+ while True:
+ args = poll(interval)
+ refresh_window(*args)
+ interval = 1
+ except (KeyboardInterrupt, SystemExit):
+ pass
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/killall.py b/python/psutil/examples/killall.py
new file mode 100755
index 000000000..b548e7bc5
--- /dev/null
+++ b/python/psutil/examples/killall.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Kill a process by name.
+"""
+
+import os
+import sys
+import psutil
+
+
+def main():
+ if len(sys.argv) != 2:
+ sys.exit('usage: %s name' % __file__)
+ else:
+ NAME = sys.argv[1]
+
+ killed = []
+ for proc in psutil.process_iter():
+ if proc.name() == NAME and proc.pid != os.getpid():
+ proc.kill()
+ killed.append(proc.pid)
+ if not killed:
+ sys.exit('%s: no process found' % NAME)
+ else:
+ sys.exit(0)
+
+sys.exit(main())
diff --git a/python/psutil/examples/meminfo.py b/python/psutil/examples/meminfo.py
new file mode 100755
index 000000000..c463a3de4
--- /dev/null
+++ b/python/psutil/examples/meminfo.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Print system memory information.
+
+$ python examples/meminfo.py
+MEMORY
+------
+Total : 9.7G
+Available : 4.9G
+Percent : 49.0
+Used : 8.2G
+Free : 1.4G
+Active : 5.6G
+Inactive : 2.1G
+Buffers : 341.2M
+Cached : 3.2G
+
+SWAP
+----
+Total : 0B
+Used : 0B
+Free : 0B
+Percent : 0.0
+Sin : 0B
+Sout : 0B
+"""
+
+import psutil
+
+
+def bytes2human(n):
+ # http://code.activestate.com/recipes/578019
+ # >>> bytes2human(10000)
+ # '9.8K'
+ # >>> bytes2human(100001221)
+ # '95.4M'
+ symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+ prefix = {}
+ for i, s in enumerate(symbols):
+ prefix[s] = 1 << (i + 1) * 10
+ for s in reversed(symbols):
+ if n >= prefix[s]:
+ value = float(n) / prefix[s]
+ return '%.1f%s' % (value, s)
+ return "%sB" % n
+
+
+def pprint_ntuple(nt):
+ for name in nt._fields:
+ value = getattr(nt, name)
+ if name != 'percent':
+ value = bytes2human(value)
+ print('%-10s : %7s' % (name.capitalize(), value))
+
+
+def main():
+ print('MEMORY\n------')
+ pprint_ntuple(psutil.virtual_memory())
+ print('\nSWAP\n----')
+ pprint_ntuple(psutil.swap_memory())
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/netstat.py b/python/psutil/examples/netstat.py
new file mode 100755
index 000000000..884622e9e
--- /dev/null
+++ b/python/psutil/examples/netstat.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of 'netstat -antp' on Linux.
+
+$ python examples/netstat.py
+Proto Local address Remote address Status PID Program name
+tcp 127.0.0.1:48256 127.0.0.1:45884 ESTABLISHED 13646 chrome
+tcp 127.0.0.1:47073 127.0.0.1:45884 ESTABLISHED 13646 chrome
+tcp 127.0.0.1:47072 127.0.0.1:45884 ESTABLISHED 13646 chrome
+tcp 127.0.0.1:45884 - LISTEN 13651 GoogleTalkPlugi
+tcp 127.0.0.1:60948 - LISTEN 13651 GoogleTalkPlugi
+tcp 172.17.42.1:49102 127.0.0.1:19305 CLOSE_WAIT 13651 GoogleTalkPlugi
+tcp 172.17.42.1:55797 127.0.0.1:443 CLOSE_WAIT 13651 GoogleTalkPlugi
+...
+"""
+
+import socket
+from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM
+
+import psutil
+
+
+AD = "-"
+AF_INET6 = getattr(socket, 'AF_INET6', object())
+proto_map = {
+ (AF_INET, SOCK_STREAM): 'tcp',
+ (AF_INET6, SOCK_STREAM): 'tcp6',
+ (AF_INET, SOCK_DGRAM): 'udp',
+ (AF_INET6, SOCK_DGRAM): 'udp6',
+}
+
+
+def main():
+ templ = "%-5s %-30s %-30s %-13s %-6s %s"
+ print(templ % (
+ "Proto", "Local address", "Remote address", "Status", "PID",
+ "Program name"))
+ proc_names = {}
+ for p in psutil.process_iter():
+ try:
+ proc_names[p.pid] = p.name()
+ except psutil.Error:
+ pass
+ for c in psutil.net_connections(kind='inet'):
+ laddr = "%s:%s" % (c.laddr)
+ raddr = ""
+ if c.raddr:
+ raddr = "%s:%s" % (c.raddr)
+ print(templ % (
+ proto_map[(c.family, c.type)],
+ laddr,
+ raddr or AD,
+ c.status,
+ c.pid or AD,
+ proc_names.get(c.pid, '?')[:15],
+ ))
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/nettop.py b/python/psutil/examples/nettop.py
new file mode 100755
index 000000000..7a8343ee4
--- /dev/null
+++ b/python/psutil/examples/nettop.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python
+#
+# $Id: iotop.py 1160 2011-10-14 18:50:36Z g.rodola@gmail.com $
+#
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Shows real-time network statistics.
+
+Author: Giampaolo Rodola' <g.rodola@gmail.com>
+
+$ python examples/nettop.py
+-----------------------------------------------------------
+total bytes: sent: 1.49 G received: 4.82 G
+total packets: sent: 7338724 received: 8082712
+
+wlan0 TOTAL PER-SEC
+-----------------------------------------------------------
+bytes-sent 1.29 G 0.00 B/s
+bytes-recv 3.48 G 0.00 B/s
+pkts-sent 7221782 0
+pkts-recv 6753724 0
+
+eth1 TOTAL PER-SEC
+-----------------------------------------------------------
+bytes-sent 131.77 M 0.00 B/s
+bytes-recv 1.28 G 0.00 B/s
+pkts-sent 0 0
+pkts-recv 1214470 0
+"""
+
+import atexit
+import time
+import sys
+try:
+ import curses
+except ImportError:
+ sys.exit('platform not supported')
+
+import psutil
+
+
+# --- curses stuff
+def tear_down():
+ win.keypad(0)
+ curses.nocbreak()
+ curses.echo()
+ curses.endwin()
+
+win = curses.initscr()
+atexit.register(tear_down)
+curses.endwin()
+lineno = 0
+
+
+def print_line(line, highlight=False):
+ """A thin wrapper around curses's addstr()."""
+ global lineno
+ try:
+ if highlight:
+ line += " " * (win.getmaxyx()[1] - len(line))
+ win.addstr(lineno, 0, line, curses.A_REVERSE)
+ else:
+ win.addstr(lineno, 0, line, 0)
+ except curses.error:
+ lineno = 0
+ win.refresh()
+ raise
+ else:
+ lineno += 1
+# --- curses stuff
+
+
+def bytes2human(n):
+ """
+ >>> bytes2human(10000)
+ '9.8 K'
+ >>> bytes2human(100001221)
+ '95.4 M'
+ """
+ symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+ prefix = {}
+ for i, s in enumerate(symbols):
+ prefix[s] = 1 << (i + 1) * 10
+ for s in reversed(symbols):
+ if n >= prefix[s]:
+ value = float(n) / prefix[s]
+ return '%.2f %s' % (value, s)
+ return '%.2f B' % (n)
+
+
+def poll(interval):
+ """Retrieve raw stats within an interval window."""
+ tot_before = psutil.net_io_counters()
+ pnic_before = psutil.net_io_counters(pernic=True)
+ # sleep some time
+ time.sleep(interval)
+ tot_after = psutil.net_io_counters()
+ pnic_after = psutil.net_io_counters(pernic=True)
+ return (tot_before, tot_after, pnic_before, pnic_after)
+
+
+def refresh_window(tot_before, tot_after, pnic_before, pnic_after):
+ """Print stats on screen."""
+ global lineno
+
+ # totals
+ print_line("total bytes: sent: %-10s received: %s" % (
+ bytes2human(tot_after.bytes_sent),
+ bytes2human(tot_after.bytes_recv))
+ )
+ print_line("total packets: sent: %-10s received: %s" % (
+ tot_after.packets_sent, tot_after.packets_recv))
+
+ # per-network interface details: let's sort network interfaces so
+ # that the ones which generated more traffic are shown first
+ print_line("")
+ nic_names = list(pnic_after.keys())
+ nic_names.sort(key=lambda x: sum(pnic_after[x]), reverse=True)
+ for name in nic_names:
+ stats_before = pnic_before[name]
+ stats_after = pnic_after[name]
+ templ = "%-15s %15s %15s"
+ print_line(templ % (name, "TOTAL", "PER-SEC"), highlight=True)
+ print_line(templ % (
+ "bytes-sent",
+ bytes2human(stats_after.bytes_sent),
+ bytes2human(
+ stats_after.bytes_sent - stats_before.bytes_sent) + '/s',
+ ))
+ print_line(templ % (
+ "bytes-recv",
+ bytes2human(stats_after.bytes_recv),
+ bytes2human(
+ stats_after.bytes_recv - stats_before.bytes_recv) + '/s',
+ ))
+ print_line(templ % (
+ "pkts-sent",
+ stats_after.packets_sent,
+ stats_after.packets_sent - stats_before.packets_sent,
+ ))
+ print_line(templ % (
+ "pkts-recv",
+ stats_after.packets_recv,
+ stats_after.packets_recv - stats_before.packets_recv,
+ ))
+ print_line("")
+ win.refresh()
+ lineno = 0
+
+
+def main():
+ try:
+ interval = 0
+ while True:
+ args = poll(interval)
+ refresh_window(*args)
+ interval = 1
+ except (KeyboardInterrupt, SystemExit):
+ pass
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/pidof.py b/python/psutil/examples/pidof.py
new file mode 100755
index 000000000..8692a3152
--- /dev/null
+++ b/python/psutil/examples/pidof.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola', karthikrev. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""
+A clone of 'pidof' cmdline utility.
+$ pidof python
+1140 1138 1136 1134 1133 1129 1127 1125 1121 1120 1119
+"""
+
+from __future__ import print_function
+import psutil
+import sys
+
+
+def pidof(pgname):
+ pids = []
+ for proc in psutil.process_iter():
+ # search for matches in the process name and cmdline
+ try:
+ name = proc.name()
+ except psutil.Error:
+ pass
+ else:
+ if name == pgname:
+ pids.append(str(proc.pid))
+ continue
+
+ try:
+ cmdline = proc.cmdline()
+ except psutil.Error:
+ pass
+ else:
+ if cmdline and cmdline[0] == pgname:
+ pids.append(str(proc.pid))
+
+ return pids
+
+
+def main():
+ if len(sys.argv) != 2:
+ sys.exit('usage: %s pgname' % __file__)
+ else:
+ pgname = sys.argv[1]
+ pids = pidof(pgname)
+ if pids:
+ print(" ".join(pids))
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/pmap.py b/python/psutil/examples/pmap.py
new file mode 100755
index 000000000..7593777ae
--- /dev/null
+++ b/python/psutil/examples/pmap.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of 'pmap' utility on Linux, 'vmmap' on OSX and 'procstat -v' on BSD.
+Report memory map of a process.
+
+$ python examples/pmap.py 32402
+pid=32402, name=hg
+Address RSS Mode Mapping
+0000000000400000 1200K r-xp /usr/bin/python2.7
+0000000000838000 4K r--p /usr/bin/python2.7
+0000000000839000 304K rw-p /usr/bin/python2.7
+00000000008ae000 68K rw-p [anon]
+000000000275e000 5396K rw-p [heap]
+00002b29bb1e0000 124K r-xp /lib/x86_64-linux-gnu/ld-2.17.so
+00002b29bb203000 8K rw-p [anon]
+00002b29bb220000 528K rw-p [anon]
+00002b29bb2d8000 768K rw-p [anon]
+00002b29bb402000 4K r--p /lib/x86_64-linux-gnu/ld-2.17.so
+00002b29bb403000 8K rw-p /lib/x86_64-linux-gnu/ld-2.17.so
+00002b29bb405000 60K r-xp /lib/x86_64-linux-gnu/libpthread-2.17.so
+00002b29bb41d000 0K ---p /lib/x86_64-linux-gnu/libpthread-2.17.so
+00007fff94be6000 48K rw-p [stack]
+00007fff94dd1000 4K r-xp [vdso]
+ffffffffff600000 0K r-xp [vsyscall]
+...
+"""
+
+import sys
+
+import psutil
+
+
+def main():
+ if len(sys.argv) != 2:
+ sys.exit('usage: pmap <pid>')
+ p = psutil.Process(int(sys.argv[1]))
+ print("pid=%s, name=%s" % (p.pid, p.name()))
+ templ = "%-16s %10s %-7s %s"
+ print(templ % ("Address", "RSS", "Mode", "Mapping"))
+ total_rss = 0
+ for m in p.memory_maps(grouped=False):
+ total_rss += m.rss
+ print(templ % (
+ m.addr.split('-')[0].zfill(16),
+ str(m.rss / 1024) + 'K',
+ m.perms,
+ m.path))
+ print("-" * 33)
+ print(templ % ("Total", str(total_rss / 1024) + 'K', '', ''))
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/process_detail.py b/python/psutil/examples/process_detail.py
new file mode 100755
index 000000000..e20371aef
--- /dev/null
+++ b/python/psutil/examples/process_detail.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Print detailed information about a process.
+Author: Giampaolo Rodola' <g.rodola@gmail.com>
+
+$ python examples/process_detail.py
+pid 820
+name python
+exe /usr/bin/python2.7
+parent 29613 (bash)
+cmdline python examples/process_detail.py
+started 2014-41-27 03:41
+user giampaolo
+uids real=1000, effective=1000, saved=1000
+gids real=1000, effective=1000, saved=1000
+terminal /dev/pts/17
+cwd /ssd/svn/psutil
+memory 0.1% (resident=10.6M, virtual=58.5M)
+cpu 0.0% (user=0.09, system=0.0)
+status running
+niceness 0
+num threads 1
+I/O bytes-read=0B, bytes-written=0B
+open files
+running threads id=820, user-time=0.09, sys-time=0.0
+"""
+
+import datetime
+import os
+import socket
+import sys
+
+import psutil
+
+
+POSIX = os.name == 'posix'
+
+
+def convert_bytes(n):
+ symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+ prefix = {}
+ for i, s in enumerate(symbols):
+ prefix[s] = 1 << (i + 1) * 10
+ for s in reversed(symbols):
+ if n >= prefix[s]:
+ value = float(n) / prefix[s]
+ return '%.1f%s' % (value, s)
+ return "%sB" % n
+
+
+def print_(a, b):
+ if sys.stdout.isatty() and POSIX:
+ fmt = '\x1b[1;32m%-17s\x1b[0m %s' % (a, b)
+ else:
+ fmt = '%-15s %s' % (a, b)
+ # python 2/3 compatibility layer
+ sys.stdout.write(fmt + '\n')
+ sys.stdout.flush()
+
+
+def run(pid):
+ ACCESS_DENIED = ''
+ try:
+ p = psutil.Process(pid)
+ pinfo = p.as_dict(ad_value=ACCESS_DENIED)
+ except psutil.NoSuchProcess as err:
+ sys.exit(str(err))
+
+ try:
+ parent = p.parent()
+ if parent:
+ parent = '(%s)' % parent.name()
+ else:
+ parent = ''
+ except psutil.Error:
+ parent = ''
+ if pinfo['create_time'] != ACCESS_DENIED:
+ started = datetime.datetime.fromtimestamp(
+ pinfo['create_time']).strftime('%Y-%m-%d %H:%M')
+ else:
+ started = ACCESS_DENIED
+ io = pinfo.get('io_counters', ACCESS_DENIED)
+ if pinfo['memory_info'] != ACCESS_DENIED:
+ mem = '%s%% (resident=%s, virtual=%s) ' % (
+ round(pinfo['memory_percent'], 1),
+ convert_bytes(pinfo['memory_info'].rss),
+ convert_bytes(pinfo['memory_info'].vms))
+ else:
+ mem = ACCESS_DENIED
+ children = p.children()
+
+ print_('pid', pinfo['pid'])
+ print_('name', pinfo['name'])
+ print_('exe', pinfo['exe'])
+ print_('parent', '%s %s' % (pinfo['ppid'], parent))
+ print_('cmdline', ' '.join(pinfo['cmdline']))
+ print_('started', started)
+ print_('user', pinfo['username'])
+ if POSIX and pinfo['uids'] and pinfo['gids']:
+ print_('uids', 'real=%s, effective=%s, saved=%s' % pinfo['uids'])
+ if POSIX and pinfo['gids']:
+ print_('gids', 'real=%s, effective=%s, saved=%s' % pinfo['gids'])
+ if POSIX:
+ print_('terminal', pinfo['terminal'] or '')
+ print_('cwd', pinfo['cwd'])
+ print_('memory', mem)
+ print_('cpu', '%s%% (user=%s, system=%s)' % (
+ pinfo['cpu_percent'],
+ getattr(pinfo['cpu_times'], 'user', '?'),
+ getattr(pinfo['cpu_times'], 'system', '?')))
+ print_('status', pinfo['status'])
+ print_('niceness', pinfo['nice'])
+ print_('num threads', pinfo['num_threads'])
+ if io != ACCESS_DENIED:
+ print_('I/O', 'bytes-read=%s, bytes-written=%s' % (
+ convert_bytes(io.read_bytes),
+ convert_bytes(io.write_bytes)))
+ if children:
+ print_('children', '')
+ for child in children:
+ print_('', 'pid=%s name=%s' % (child.pid, child.name()))
+
+ if pinfo['open_files'] != ACCESS_DENIED:
+ print_('open files', '')
+ for file in pinfo['open_files']:
+ print_('', 'fd=%s %s ' % (file.fd, file.path))
+
+ if pinfo['threads']:
+ print_('running threads', '')
+ for thread in pinfo['threads']:
+ print_('', 'id=%s, user-time=%s, sys-time=%s' % (
+ thread.id, thread.user_time, thread.system_time))
+ if pinfo['connections'] not in (ACCESS_DENIED, []):
+ print_('open connections', '')
+ for conn in pinfo['connections']:
+ if conn.type == socket.SOCK_STREAM:
+ type = 'TCP'
+ elif conn.type == socket.SOCK_DGRAM:
+ type = 'UDP'
+ else:
+ type = 'UNIX'
+ lip, lport = conn.laddr
+ if not conn.raddr:
+ rip, rport = '*', '*'
+ else:
+ rip, rport = conn.raddr
+ print_('', '%s:%s -> %s:%s type=%s status=%s' % (
+ lip, lport, rip, rport, type, conn.status))
+
+
+def main(argv=None):
+ if argv is None:
+ argv = sys.argv
+ if len(argv) == 1:
+ sys.exit(run(os.getpid()))
+ elif len(argv) == 2:
+ sys.exit(run(int(argv[1])))
+ else:
+ sys.exit('usage: %s [pid]' % __file__)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/python/psutil/examples/ps.py b/python/psutil/examples/ps.py
new file mode 100644
index 000000000..2b67bd18f
--- /dev/null
+++ b/python/psutil/examples/ps.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of 'ps -aux' on UNIX.
+
+$ python examples/ps.py
+...
+"""
+
+import datetime
+import os
+import time
+
+import psutil
+
+
+def main():
+ today_day = datetime.date.today()
+ templ = "%-10s %5s %4s %4s %7s %7s %-13s %5s %7s %s"
+ attrs = ['pid', 'cpu_percent', 'memory_percent', 'name', 'cpu_times',
+ 'create_time', 'memory_info']
+ if os.name == 'posix':
+ attrs.append('uids')
+ attrs.append('terminal')
+ print(templ % ("USER", "PID", "%CPU", "%MEM", "VSZ", "RSS", "TTY",
+ "START", "TIME", "COMMAND"))
+ for p in psutil.process_iter():
+ try:
+ pinfo = p.as_dict(attrs, ad_value='')
+ except psutil.NoSuchProcess:
+ pass
+ else:
+ if pinfo['create_time']:
+ ctime = datetime.datetime.fromtimestamp(pinfo['create_time'])
+ if ctime.date() == today_day:
+ ctime = ctime.strftime("%H:%M")
+ else:
+ ctime = ctime.strftime("%b%d")
+ else:
+ ctime = ''
+ cputime = time.strftime("%M:%S",
+ time.localtime(sum(pinfo['cpu_times'])))
+ try:
+ user = p.username()
+ except KeyError:
+ if os.name == 'posix':
+ if pinfo['uids']:
+ user = str(pinfo['uids'].real)
+ else:
+ user = ''
+ else:
+ raise
+ except psutil.Error:
+ user = ''
+ if os.name == 'nt' and '\\' in user:
+ user = user.split('\\')[1]
+ vms = pinfo['memory_info'] and \
+ int(pinfo['memory_info'].vms / 1024) or '?'
+ rss = pinfo['memory_info'] and \
+ int(pinfo['memory_info'].rss / 1024) or '?'
+ memp = pinfo['memory_percent'] and \
+ round(pinfo['memory_percent'], 1) or '?'
+ print(templ % (
+ user[:10],
+ pinfo['pid'],
+ pinfo['cpu_percent'],
+ memp,
+ vms,
+ rss,
+ pinfo.get('terminal', '') or '?',
+ ctime,
+ cputime,
+ pinfo['name'].strip() or '?'))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/pstree.py b/python/psutil/examples/pstree.py
new file mode 100644
index 000000000..1bf8c9c04
--- /dev/null
+++ b/python/psutil/examples/pstree.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Similar to 'ps aux --forest' on Linux, prints the process list
+as a tree structure.
+
+$ python examples/pstree.py
+0 ?
+|- 1 init
+| |- 289 cgmanager
+| |- 616 upstart-socket-bridge
+| |- 628 rpcbind
+| |- 892 upstart-file-bridge
+| |- 907 dbus-daemon
+| |- 978 avahi-daemon
+| | `_ 979 avahi-daemon
+| |- 987 NetworkManager
+| | |- 2242 dnsmasq
+| | `_ 10699 dhclient
+| |- 993 polkitd
+| |- 1061 getty
+| |- 1066 su
+| | `_ 1190 salt-minion...
+...
+"""
+
+from __future__ import print_function
+import collections
+import sys
+
+import psutil
+
+
+def print_tree(parent, tree, indent=''):
+ try:
+ name = psutil.Process(parent).name()
+ except psutil.Error:
+ name = "?"
+ print(parent, name)
+ if parent not in tree:
+ return
+ children = tree[parent][:-1]
+ for child in children:
+ sys.stdout.write(indent + "|- ")
+ print_tree(child, tree, indent + "| ")
+ child = tree[parent][-1]
+ sys.stdout.write(indent + "`_ ")
+ print_tree(child, tree, indent + " ")
+
+
+def main():
+ # construct a dict where 'values' are all the processes
+ # having 'key' as their parent
+ tree = collections.defaultdict(list)
+ for p in psutil.process_iter():
+ try:
+ tree[p.ppid()].append(p.pid)
+ except (psutil.NoSuchProcess, psutil.ZombieProcess):
+ pass
+ # on systems supporting PID 0, PID 0's parent is usually 0
+ if 0 in tree and 0 in tree[0]:
+ tree[0].remove(0)
+ print_tree(min(tree), tree)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/top.py b/python/psutil/examples/top.py
new file mode 100755
index 000000000..7aebef1d4
--- /dev/null
+++ b/python/psutil/examples/top.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of top / htop.
+
+Author: Giampaolo Rodola' <g.rodola@gmail.com>
+
+$ python examples/top.py
+ CPU0 [| ] 4.9%
+ CPU1 [||| ] 7.8%
+ CPU2 [ ] 2.0%
+ CPU3 [||||| ] 13.9%
+ Mem [||||||||||||||||||| ] 49.8% 4920M/9888M
+ Swap [ ] 0.0% 0M/0M
+ Processes: 287 (running=1 sleeping=286)
+ Load average: 0.34 0.54 0.46 Uptime: 3 days, 10:16:37
+
+PID USER NI VIRT RES CPU% MEM% TIME+ NAME
+------------------------------------------------------------
+989 giampaol 0 66M 12M 7.4 0.1 0:00.61 python
+2083 root 0 506M 159M 6.5 1.6 0:29.26 Xorg
+4503 giampaol 0 599M 25M 6.5 0.3 3:32.60 gnome-terminal
+3868 giampaol 0 358M 8M 2.8 0.1 23:12.60 pulseaudio
+3936 giampaol 0 1G 111M 2.8 1.1 33:41.67 compiz
+4401 giampaol 0 536M 141M 2.8 1.4 35:42.73 skype
+4047 giampaol 0 743M 76M 1.8 0.8 42:03.33 unity-panel-service
+13155 giampaol 0 1G 280M 1.8 2.8 41:57.34 chrome
+10 root 0 0B 0B 0.9 0.0 4:01.81 rcu_sched
+339 giampaol 0 1G 113M 0.9 1.1 8:15.73 chrome
+...
+"""
+
+from datetime import datetime, timedelta
+import atexit
+import os
+import time
+import sys
+try:
+ import curses
+except ImportError:
+ sys.exit('platform not supported')
+
+import psutil
+
+
+# --- curses stuff
+def tear_down():
+ win.keypad(0)
+ curses.nocbreak()
+ curses.echo()
+ curses.endwin()
+
+win = curses.initscr()
+atexit.register(tear_down)
+curses.endwin()
+lineno = 0
+
+
+def print_line(line, highlight=False):
+ """A thin wrapper around curses's addstr()."""
+ global lineno
+ try:
+ if highlight:
+ line += " " * (win.getmaxyx()[1] - len(line))
+ win.addstr(lineno, 0, line, curses.A_REVERSE)
+ else:
+ win.addstr(lineno, 0, line, 0)
+ except curses.error:
+ lineno = 0
+ win.refresh()
+ raise
+ else:
+ lineno += 1
+# --- /curses stuff
+
+
+def bytes2human(n):
+ """
+ >>> bytes2human(10000)
+ '9K'
+ >>> bytes2human(100001221)
+ '95M'
+ """
+ symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+ prefix = {}
+ for i, s in enumerate(symbols):
+ prefix[s] = 1 << (i + 1) * 10
+ for s in reversed(symbols):
+ if n >= prefix[s]:
+ value = int(float(n) / prefix[s])
+ return '%s%s' % (value, s)
+ return "%sB" % n
+
+
+def poll(interval):
+ # sleep some time
+ time.sleep(interval)
+ procs = []
+ procs_status = {}
+ for p in psutil.process_iter():
+ try:
+ p.dict = p.as_dict(['username', 'nice', 'memory_info',
+ 'memory_percent', 'cpu_percent',
+ 'cpu_times', 'name', 'status'])
+ try:
+ procs_status[p.dict['status']] += 1
+ except KeyError:
+ procs_status[p.dict['status']] = 1
+ except psutil.NoSuchProcess:
+ pass
+ else:
+ procs.append(p)
+
+ # return processes sorted by CPU percent usage
+ processes = sorted(procs, key=lambda p: p.dict['cpu_percent'],
+ reverse=True)
+ return (processes, procs_status)
+
+
+def print_header(procs_status, num_procs):
+ """Print system-related info, above the process list."""
+
+ def get_dashes(perc):
+ dashes = "|" * int((float(perc) / 10 * 4))
+ empty_dashes = " " * (40 - len(dashes))
+ return dashes, empty_dashes
+
+ # cpu usage
+ percs = psutil.cpu_percent(interval=0, percpu=True)
+ for cpu_num, perc in enumerate(percs):
+ dashes, empty_dashes = get_dashes(perc)
+ print_line(" CPU%-2s [%s%s] %5s%%" % (cpu_num, dashes, empty_dashes,
+ perc))
+ mem = psutil.virtual_memory()
+ dashes, empty_dashes = get_dashes(mem.percent)
+ used = mem.total - mem.available
+ line = " Mem [%s%s] %5s%% %6s/%s" % (
+ dashes, empty_dashes,
+ mem.percent,
+ str(int(used / 1024 / 1024)) + "M",
+ str(int(mem.total / 1024 / 1024)) + "M"
+ )
+ print_line(line)
+
+ # swap usage
+ swap = psutil.swap_memory()
+ dashes, empty_dashes = get_dashes(swap.percent)
+ line = " Swap [%s%s] %5s%% %6s/%s" % (
+ dashes, empty_dashes,
+ swap.percent,
+ str(int(swap.used / 1024 / 1024)) + "M",
+ str(int(swap.total / 1024 / 1024)) + "M"
+ )
+ print_line(line)
+
+ # processes number and status
+ st = []
+ for x, y in procs_status.items():
+ if y:
+ st.append("%s=%s" % (x, y))
+ st.sort(key=lambda x: x[:3] in ('run', 'sle'), reverse=1)
+ print_line(" Processes: %s (%s)" % (num_procs, ' '.join(st)))
+ # load average, uptime
+ uptime = datetime.now() - datetime.fromtimestamp(psutil.boot_time())
+ av1, av2, av3 = os.getloadavg()
+ line = " Load average: %.2f %.2f %.2f Uptime: %s" \
+ % (av1, av2, av3, str(uptime).split('.')[0])
+ print_line(line)
+
+
+def refresh_window(procs, procs_status):
+ """Print results on screen by using curses."""
+ curses.endwin()
+ templ = "%-6s %-8s %4s %5s %5s %6s %4s %9s %2s"
+ win.erase()
+ header = templ % ("PID", "USER", "NI", "VIRT", "RES", "CPU%", "MEM%",
+ "TIME+", "NAME")
+ print_header(procs_status, len(procs))
+ print_line("")
+ print_line(header, highlight=True)
+ for p in procs:
+ # TIME+ column shows process CPU cumulative time and it
+ # is expressed as: "mm:ss.ms"
+ if p.dict['cpu_times'] is not None:
+ ctime = timedelta(seconds=sum(p.dict['cpu_times']))
+ ctime = "%s:%s.%s" % (ctime.seconds // 60 % 60,
+ str((ctime.seconds % 60)).zfill(2),
+ str(ctime.microseconds)[:2])
+ else:
+ ctime = ''
+ if p.dict['memory_percent'] is not None:
+ p.dict['memory_percent'] = round(p.dict['memory_percent'], 1)
+ else:
+ p.dict['memory_percent'] = ''
+ if p.dict['cpu_percent'] is None:
+ p.dict['cpu_percent'] = ''
+ if p.dict['username']:
+ username = p.dict['username'][:8]
+ else:
+ username = ""
+ line = templ % (p.pid,
+ username,
+ p.dict['nice'],
+ bytes2human(getattr(p.dict['memory_info'], 'vms', 0)),
+ bytes2human(getattr(p.dict['memory_info'], 'rss', 0)),
+ p.dict['cpu_percent'],
+ p.dict['memory_percent'],
+ ctime,
+ p.dict['name'] or '',
+ )
+ try:
+ print_line(line)
+ except curses.error:
+ break
+ win.refresh()
+
+
+def main():
+ try:
+ interval = 0
+ while True:
+ args = poll(interval)
+ refresh_window(*args)
+ interval = 1
+ except (KeyboardInterrupt, SystemExit):
+ pass
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/examples/who.py b/python/psutil/examples/who.py
new file mode 100755
index 000000000..b382bebfa
--- /dev/null
+++ b/python/psutil/examples/who.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of 'who' command; print information about users who are
+currently logged in.
+
+$ python examples/who.py
+giampaolo tty7 2014-02-23 17:25 (:0)
+giampaolo pts/7 2014-02-24 18:25 (:192.168.1.56)
+giampaolo pts/8 2014-02-24 18:25 (:0)
+giampaolo pts/9 2014-02-27 01:32 (:0)
+"""
+
+from datetime import datetime
+
+import psutil
+
+
+def main():
+ users = psutil.users()
+ for user in users:
+ print("%-15s %-15s %s (%s)" % (
+ user.name,
+ user.terminal or '-',
+ datetime.fromtimestamp(user.started).strftime("%Y-%m-%d %H:%M"),
+ user.host))
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/make.bat b/python/psutil/make.bat
new file mode 100644
index 000000000..9c430101d
--- /dev/null
+++ b/python/psutil/make.bat
@@ -0,0 +1,201 @@
+@echo off
+
+rem ==========================================================================
+rem Shortcuts for various tasks, emulating UNIX "make" on Windows.
+rem It is primarly intended as a shortcut for compiling / installing
+rem psutil ("make.bat build", "make.bat install") and running tests
+rem ("make.bat test").
+rem
+rem This script is modeled after my Windows installation which uses:
+rem - Visual studio 2008 for Python 2.6, 2.7, 3.2
+rem - Visual studio 2010 for Python 3.3+
+rem ...therefore it might not work on your Windows installation.
+rem
+rem By default C:\Python27\python.exe is used.
+rem To compile for a specific Python version run:
+rem set PYTHON=C:\Python34\python.exe & make.bat build
+rem
+rem To use a different test script:
+rem set PYTHON=C:\Python34\python.exe & set TSCRIPT=foo.py & make.bat test
+rem ==========================================================================
+
+if "%PYTHON%" == "" (
+ set PYTHON=C:\Python27\python.exe
+)
+if "%TSCRIPT%" == "" (
+ set TSCRIPT=test\test_psutil.py
+)
+
+set PYTHON26=C:\Python26\python.exe
+set PYTHON27=C:\Python27\python.exe
+set PYTHON33=C:\Python33\python.exe
+set PYTHON34=C:\Python34\python.exe
+set PYTHON26-64=C:\Python26-64\python.exe
+set PYTHON27-64=C:\Python27-64\python.exe
+set PYTHON33-64=C:\Python33-64\python.exe
+set PYTHON34-64=C:\Python34-64\python.exe
+
+set ALL_PYTHONS=%PYTHON26% %PYTHON27% %PYTHON33% %PYTHON34% %PYTHON26-64% %PYTHON27-64% %PYTHON33-64% %PYTHON34-64%
+
+rem Needed to locate the .pypirc file and upload exes on PYPI.
+set HOME=%USERPROFILE%
+
+rem ==========================================================================
+
+if "%1" == "help" (
+ :help
+ echo Run `make ^<target^>` where ^<target^> is one of:
+ echo build compile without installing
+ echo build-all build exes + wheels
+ echo clean clean build files
+ echo flake8 run flake8
+ echo install compile and install
+ echo setup-dev-env install pip, pywin32, wheels, etc. for all python versions
+ echo test run tests
+ echo test-memleaks run memory leak tests
+ echo test-process run process related tests
+ echo test-system run system APIs related tests
+ echo uninstall uninstall
+ echo upload-all upload exes + wheels
+ goto :eof
+)
+
+if "%1" == "clean" (
+ for /r %%R in (__pycache__) do if exist %%R (rmdir /S /Q %%R)
+ for /r %%R in (*.pyc) do if exist %%R (del /s %%R)
+ for /r %%R in (*.pyd) do if exist %%R (del /s %%R)
+ for /r %%R in (*.orig) do if exist %%R (del /s %%R)
+ for /r %%R in (*.bak) do if exist %%R (del /s %%R)
+ for /r %%R in (*.rej) do if exist %%R (del /s %%R)
+ if exist psutil.egg-info (rmdir /S /Q psutil.egg-info)
+ if exist build (rmdir /S /Q build)
+ if exist dist (rmdir /S /Q dist)
+ goto :eof
+)
+
+if "%1" == "build" (
+ :build
+ "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\vcvars64.bat"
+ %PYTHON% setup.py build
+ if %errorlevel% neq 0 goto :error
+ rem copies *.pyd files in ./psutil directory in order to allow
+ rem "import psutil" when using the interactive interpreter from
+ rem within this directory.
+ %PYTHON% setup.py build_ext -i
+ if %errorlevel% neq 0 goto :error
+ goto :eof
+)
+
+if "%1" == "install" (
+ :install
+ call :build
+ %PYTHON% setup.py install
+ goto :eof
+)
+
+if "%1" == "uninstall" (
+ for %%A in ("%PYTHON%") do (
+ set folder=%%~dpA
+ )
+ for /F "delims=" %%i in ('dir /b %folder%\Lib\site-packages\*psutil*') do (
+ rmdir /S /Q %folder%\Lib\site-packages\%%i
+ )
+ goto :eof
+)
+
+if "%1" == "test" (
+ call :install
+ %PYTHON% %TSCRIPT%
+ goto :eof
+)
+
+if "%1" == "test-process" (
+ call :install
+ %PYTHON% -m unittest -v test.test_psutil.TestProcess
+ goto :eof
+)
+
+if "%1" == "test-system" (
+ call :install
+ %PYTHON% -m unittest -v test.test_psutil.TestSystem
+ goto :eof
+)
+
+if "%1" == "test-memleaks" (
+ call :install
+ %PYTHON% test\test_memory_leaks.py
+ goto :eof
+)
+
+if "%1" == "build-all" (
+ :build-all
+ "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\vcvars64.bat"
+ for %%P in (%ALL_PYTHONS%) do (
+ @echo ------------------------------------------------
+ @echo building exe for %%P
+ @echo ------------------------------------------------
+ %%P setup.py build bdist_wininst || goto :error
+ @echo ------------------------------------------------
+ @echo building wheel for %%P
+ @echo ------------------------------------------------
+ %%P setup.py build bdist_wheel || goto :error
+ )
+ echo OK
+ goto :eof
+)
+
+if "%1" == "upload-all" (
+ :upload-exes
+ "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\vcvars64.bat"
+ for %%P in (%ALL_PYTHONS%) do (
+ @echo ------------------------------------------------
+ @echo uploading exe for %%P
+ @echo ------------------------------------------------
+ %%P setup.py build bdist_wininst upload || goto :error
+ @echo ------------------------------------------------
+ @echo uploading wheel for %%P
+ @echo ------------------------------------------------
+ %%P setup.py build bdist_wheel upload || goto :error
+ )
+ echo OK
+ goto :eof
+)
+
+if "%1" == "setup-dev-env" (
+ :setup-env
+ @echo ------------------------------------------------
+ @echo downloading pip installer
+ @echo ------------------------------------------------
+ C:\python27\python.exe -c "import urllib2; r = urllib2.urlopen('https://raw.github.com/pypa/pip/master/contrib/get-pip.py'); open('get-pip.py', 'wb').write(r.read())"
+ for %%P in (%ALL_PYTHONS%) do (
+ @echo ------------------------------------------------
+ @echo installing pip for %%P
+ @echo ------------------------------------------------
+ %%P get-pip.py
+ )
+ for %%P in (%ALL_PYTHONS%) do (
+ @echo ------------------------------------------------
+ @echo installing deps for %%P
+ @echo ------------------------------------------------
+ rem mandatory / for unittests
+ %%P -m pip install unittest2 ipaddress mock wmi wheel pypiwin32 --upgrade
+ rem nice to have
+ %%P -m pip install ipdb pep8 pyflakes flake8 --upgrade
+ )
+ goto :eof
+)
+
+if "%1" == "flake8" (
+ :flake8
+ %PYTHON% -c "from flake8.main import main; main()"
+ goto :eof
+)
+
+goto :help
+
+:error
+ @echo ------------------------------------------------
+ @echo last command exited with error code %errorlevel%
+ @echo ------------------------------------------------
+ @exit /b %errorlevel%
+ goto :eof
diff --git a/python/psutil/psutil/__init__.py b/python/psutil/psutil/__init__.py
new file mode 100644
index 000000000..1444425b8
--- /dev/null
+++ b/python/psutil/psutil/__init__.py
@@ -0,0 +1,1887 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""psutil is a cross-platform library for retrieving information on
+running processes and system utilization (CPU, memory, disks, network)
+in Python.
+"""
+
+from __future__ import division
+
+import collections
+import errno
+import functools
+import os
+import signal
+import subprocess
+import sys
+import time
+try:
+ import pwd
+except ImportError:
+ pwd = None
+
+from . import _common
+from ._common import memoize
+from ._compat import callable, long
+from ._compat import PY3 as _PY3
+
+from ._common import (STATUS_RUNNING, # NOQA
+ STATUS_SLEEPING,
+ STATUS_DISK_SLEEP,
+ STATUS_STOPPED,
+ STATUS_TRACING_STOP,
+ STATUS_ZOMBIE,
+ STATUS_DEAD,
+ STATUS_WAKING,
+ STATUS_LOCKED,
+ STATUS_IDLE, # bsd
+ STATUS_WAITING) # bsd
+
+from ._common import (CONN_ESTABLISHED,
+ CONN_SYN_SENT,
+ CONN_SYN_RECV,
+ CONN_FIN_WAIT1,
+ CONN_FIN_WAIT2,
+ CONN_TIME_WAIT,
+ CONN_CLOSE,
+ CONN_CLOSE_WAIT,
+ CONN_LAST_ACK,
+ CONN_LISTEN,
+ CONN_CLOSING,
+ CONN_NONE)
+
+from ._common import (NIC_DUPLEX_FULL, # NOQA
+ NIC_DUPLEX_HALF,
+ NIC_DUPLEX_UNKNOWN)
+
+if sys.platform.startswith("linux"):
+ from . import _pslinux as _psplatform
+
+ from ._pslinux import (IOPRIO_CLASS_NONE, # NOQA
+ IOPRIO_CLASS_RT,
+ IOPRIO_CLASS_BE,
+ IOPRIO_CLASS_IDLE)
+ # Linux >= 2.6.36
+ if _psplatform.HAS_PRLIMIT:
+ from ._psutil_linux import (RLIM_INFINITY, # NOQA
+ RLIMIT_AS,
+ RLIMIT_CORE,
+ RLIMIT_CPU,
+ RLIMIT_DATA,
+ RLIMIT_FSIZE,
+ RLIMIT_LOCKS,
+ RLIMIT_MEMLOCK,
+ RLIMIT_NOFILE,
+ RLIMIT_NPROC,
+ RLIMIT_RSS,
+ RLIMIT_STACK)
+ # Kinda ugly but considerably faster than using hasattr() and
+ # setattr() against the module object (we are at import time:
+ # speed matters).
+ from . import _psutil_linux
+ try:
+ RLIMIT_MSGQUEUE = _psutil_linux.RLIMIT_MSGQUEUE
+ except AttributeError:
+ pass
+ try:
+ RLIMIT_NICE = _psutil_linux.RLIMIT_NICE
+ except AttributeError:
+ pass
+ try:
+ RLIMIT_RTPRIO = _psutil_linux.RLIMIT_RTPRIO
+ except AttributeError:
+ pass
+ try:
+ RLIMIT_RTTIME = _psutil_linux.RLIMIT_RTTIME
+ except AttributeError:
+ pass
+ try:
+ RLIMIT_SIGPENDING = _psutil_linux.RLIMIT_SIGPENDING
+ except AttributeError:
+ pass
+ del _psutil_linux
+
+elif sys.platform.startswith("win32"):
+ from . import _pswindows as _psplatform
+ from ._psutil_windows import (ABOVE_NORMAL_PRIORITY_CLASS, # NOQA
+ BELOW_NORMAL_PRIORITY_CLASS,
+ HIGH_PRIORITY_CLASS,
+ IDLE_PRIORITY_CLASS,
+ NORMAL_PRIORITY_CLASS,
+ REALTIME_PRIORITY_CLASS)
+ from ._pswindows import CONN_DELETE_TCB # NOQA
+
+elif sys.platform.startswith("darwin"):
+ from . import _psosx as _psplatform
+
+elif sys.platform.startswith("freebsd"):
+ from . import _psbsd as _psplatform
+
+elif sys.platform.startswith("sunos"):
+ from . import _pssunos as _psplatform
+ from ._pssunos import (CONN_IDLE, # NOQA
+ CONN_BOUND)
+
+else: # pragma: no cover
+ raise NotImplementedError('platform %s is not supported' % sys.platform)
+
+
+__all__ = [
+ # exceptions
+ "Error", "NoSuchProcess", "ZombieProcess", "AccessDenied",
+ "TimeoutExpired",
+ # constants
+ "version_info", "__version__",
+ "STATUS_RUNNING", "STATUS_IDLE", "STATUS_SLEEPING", "STATUS_DISK_SLEEP",
+ "STATUS_STOPPED", "STATUS_TRACING_STOP", "STATUS_ZOMBIE", "STATUS_DEAD",
+ "STATUS_WAKING", "STATUS_LOCKED", "STATUS_WAITING", "STATUS_LOCKED",
+ "CONN_ESTABLISHED", "CONN_SYN_SENT", "CONN_SYN_RECV", "CONN_FIN_WAIT1",
+ "CONN_FIN_WAIT2", "CONN_TIME_WAIT", "CONN_CLOSE", "CONN_CLOSE_WAIT",
+ "CONN_LAST_ACK", "CONN_LISTEN", "CONN_CLOSING", "CONN_NONE",
+ "AF_LINK",
+ "NIC_DUPLEX_FULL", "NIC_DUPLEX_HALF", "NIC_DUPLEX_UNKNOWN",
+ # classes
+ "Process", "Popen",
+ # functions
+ "pid_exists", "pids", "process_iter", "wait_procs", # proc
+ "virtual_memory", "swap_memory", # memory
+ "cpu_times", "cpu_percent", "cpu_times_percent", "cpu_count", # cpu
+ "net_io_counters", "net_connections", "net_if_addrs", # network
+ "net_if_stats",
+ "disk_io_counters", "disk_partitions", "disk_usage", # disk
+ "users", "boot_time", # others
+]
+__all__.extend(_psplatform.__extra__all__)
+__author__ = "Giampaolo Rodola'"
+__version__ = "3.1.1"
+version_info = tuple([int(num) for num in __version__.split('.')])
+AF_LINK = _psplatform.AF_LINK
+_TOTAL_PHYMEM = None
+_POSIX = os.name == 'posix'
+_WINDOWS = os.name == 'nt'
+_timer = getattr(time, 'monotonic', time.time)
+
+
+# Sanity check in case the user messed up with psutil installation
+# or did something weird with sys.path. In this case we might end
+# up importing a python module using a C extension module which
+# was compiled for a different version of psutil.
+# We want to prevent that by failing sooner rather than later.
+# See: https://github.com/giampaolo/psutil/issues/564
+if (int(__version__.replace('.', '')) !=
+ getattr(_psplatform.cext, 'version', None)):
+ msg = "version conflict: %r C extension module was built for another " \
+ "version of psutil (different than %s)" % (_psplatform.cext.__file__,
+ __version__)
+ raise ImportError(msg)
+
+
+# =====================================================================
+# --- exceptions
+# =====================================================================
+
+class Error(Exception):
+ """Base exception class. All other psutil exceptions inherit
+ from this one.
+ """
+
+ def __init__(self, msg=""):
+ self.msg = msg
+
+ def __repr__(self):
+ ret = "%s.%s %s" % (self.__class__.__module__,
+ self.__class__.__name__, self.msg)
+ return ret.strip()
+
+ __str__ = __repr__
+
+
+class NoSuchProcess(Error):
+ """Exception raised when a process with a certain PID doesn't
+ or no longer exists.
+ """
+
+ def __init__(self, pid, name=None, msg=None):
+ Error.__init__(self, msg)
+ self.pid = pid
+ self.name = name
+ self.msg = msg
+ if msg is None:
+ if name:
+ details = "(pid=%s, name=%s)" % (self.pid, repr(self.name))
+ else:
+ details = "(pid=%s)" % self.pid
+ self.msg = "process no longer exists " + details
+
+
+class ZombieProcess(NoSuchProcess):
+ """Exception raised when querying a zombie process. This is
+ raised on OSX, BSD and Solaris only, and not always: depending
+ on the query the OS may be able to succeed anyway.
+ On Linux all zombie processes are querable (hence this is never
+ raised). Windows doesn't have zombie processes.
+ """
+
+ def __init__(self, pid, name=None, ppid=None, msg=None):
+ Error.__init__(self, msg)
+ self.pid = pid
+ self.ppid = ppid
+ self.name = name
+ self.msg = msg
+ if msg is None:
+ if name and ppid:
+ details = "(pid=%s, name=%s, ppid=%s)" % (
+ self.pid, repr(self.name), self.ppid)
+ elif name:
+ details = "(pid=%s, name=%s)" % (self.pid, repr(self.name))
+ else:
+ details = "(pid=%s)" % self.pid
+ self.msg = "process still exists but it's a zombie " + details
+
+
+class AccessDenied(Error):
+ """Exception raised when permission to perform an action is denied."""
+
+ def __init__(self, pid=None, name=None, msg=None):
+ Error.__init__(self, msg)
+ self.pid = pid
+ self.name = name
+ self.msg = msg
+ if msg is None:
+ if (pid is not None) and (name is not None):
+ self.msg = "(pid=%s, name=%s)" % (pid, repr(name))
+ elif (pid is not None):
+ self.msg = "(pid=%s)" % self.pid
+ else:
+ self.msg = ""
+
+
+class TimeoutExpired(Error):
+ """Raised on Process.wait(timeout) if timeout expires and process
+ is still alive.
+ """
+
+ def __init__(self, seconds, pid=None, name=None):
+ Error.__init__(self, "timeout after %s seconds" % seconds)
+ self.seconds = seconds
+ self.pid = pid
+ self.name = name
+ if (pid is not None) and (name is not None):
+ self.msg += " (pid=%s, name=%s)" % (pid, repr(name))
+ elif (pid is not None):
+ self.msg += " (pid=%s)" % self.pid
+
+
+# push exception classes into platform specific module namespace
+_psplatform.NoSuchProcess = NoSuchProcess
+_psplatform.ZombieProcess = ZombieProcess
+_psplatform.AccessDenied = AccessDenied
+_psplatform.TimeoutExpired = TimeoutExpired
+
+
+# =====================================================================
+# --- Process class
+# =====================================================================
+
+
+def _assert_pid_not_reused(fun):
+ """Decorator which raises NoSuchProcess in case a process is no
+ longer running or its PID has been reused.
+ """
+ @functools.wraps(fun)
+ def wrapper(self, *args, **kwargs):
+ if not self.is_running():
+ raise NoSuchProcess(self.pid, self._name)
+ return fun(self, *args, **kwargs)
+ return wrapper
+
+
+class Process(object):
+ """Represents an OS process with the given PID.
+ If PID is omitted current process PID (os.getpid()) is used.
+ Raise NoSuchProcess if PID does not exist.
+
+ Note that most of the methods of this class do not make sure
+ the PID of the process being queried has been reused over time.
+ That means you might end up retrieving an information referring
+ to another process in case the original one this instance
+ refers to is gone in the meantime.
+
+ The only exceptions for which process identity is pre-emptively
+ checked and guaranteed are:
+
+ - parent()
+ - children()
+ - nice() (set)
+ - ionice() (set)
+ - rlimit() (set)
+ - cpu_affinity (set)
+ - suspend()
+ - resume()
+ - send_signal()
+ - terminate()
+ - kill()
+
+ To prevent this problem for all other methods you can:
+ - use is_running() before querying the process
+ - if you're continuously iterating over a set of Process
+ instances use process_iter() which pre-emptively checks
+ process identity for every yielded instance
+ """
+
+ def __init__(self, pid=None):
+ self._init(pid)
+
+ def _init(self, pid, _ignore_nsp=False):
+ if pid is None:
+ pid = os.getpid()
+ else:
+ if not _PY3 and not isinstance(pid, (int, long)):
+ raise TypeError('pid must be an integer (got %r)' % pid)
+ if pid < 0:
+ raise ValueError('pid must be a positive integer (got %s)'
+ % pid)
+ self._pid = pid
+ self._name = None
+ self._exe = None
+ self._create_time = None
+ self._gone = False
+ self._hash = None
+ # used for caching on Windows only (on POSIX ppid may change)
+ self._ppid = None
+ # platform-specific modules define an _psplatform.Process
+ # implementation class
+ self._proc = _psplatform.Process(pid)
+ self._last_sys_cpu_times = None
+ self._last_proc_cpu_times = None
+ # cache creation time for later use in is_running() method
+ try:
+ self.create_time()
+ except AccessDenied:
+ # we should never get here as AFAIK we're able to get
+ # process creation time on all platforms even as a
+ # limited user
+ pass
+ except ZombieProcess:
+ # Let's consider a zombie process as legitimate as
+ # tehcnically it's still alive (it can be queried,
+ # although not always, and it's returned by pids()).
+ pass
+ except NoSuchProcess:
+ if not _ignore_nsp:
+ msg = 'no process found with pid %s' % pid
+ raise NoSuchProcess(pid, None, msg)
+ else:
+ self._gone = True
+ # This pair is supposed to indentify a Process instance
+ # univocally over time (the PID alone is not enough as
+ # it might refer to a process whose PID has been reused).
+ # This will be used later in __eq__() and is_running().
+ self._ident = (self.pid, self._create_time)
+
+ def __str__(self):
+ try:
+ pid = self.pid
+ name = repr(self.name())
+ except ZombieProcess:
+ details = "(pid=%s (zombie))" % self.pid
+ except NoSuchProcess:
+ details = "(pid=%s (terminated))" % self.pid
+ except AccessDenied:
+ details = "(pid=%s)" % (self.pid)
+ else:
+ details = "(pid=%s, name=%s)" % (pid, name)
+ return "%s.%s%s" % (self.__class__.__module__,
+ self.__class__.__name__, details)
+
+ def __repr__(self):
+ return "<%s at %s>" % (self.__str__(), id(self))
+
+ def __eq__(self, other):
+ # Test for equality with another Process object based
+ # on PID and creation time.
+ if not isinstance(other, Process):
+ return NotImplemented
+ return self._ident == other._ident
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ if self._hash is None:
+ self._hash = hash(self._ident)
+ return self._hash
+
+ # --- utility methods
+
+ def as_dict(self, attrs=None, ad_value=None):
+ """Utility method returning process information as a
+ hashable dictionary.
+
+ If 'attrs' is specified it must be a list of strings
+ reflecting available Process class' attribute names
+ (e.g. ['cpu_times', 'name']) else all public (read
+ only) attributes are assumed.
+
+ 'ad_value' is the value which gets assigned in case
+ AccessDenied or ZombieProcess exception is raised when
+ retrieving that particular process information.
+ """
+ excluded_names = set(
+ ['send_signal', 'suspend', 'resume', 'terminate', 'kill', 'wait',
+ 'is_running', 'as_dict', 'parent', 'children', 'rlimit'])
+ retdict = dict()
+ ls = set(attrs or [x for x in dir(self)])
+ for name in ls:
+ if name.startswith('_'):
+ continue
+ if name in excluded_names:
+ continue
+ try:
+ attr = getattr(self, name)
+ if callable(attr):
+ ret = attr()
+ else:
+ ret = attr
+ except (AccessDenied, ZombieProcess):
+ ret = ad_value
+ except NotImplementedError:
+ # in case of not implemented functionality (may happen
+ # on old or exotic systems) we want to crash only if
+ # the user explicitly asked for that particular attr
+ if attrs:
+ raise
+ continue
+ retdict[name] = ret
+ return retdict
+
+ def parent(self):
+ """Return the parent process as a Process object pre-emptively
+ checking whether PID has been reused.
+ If no parent is known return None.
+ """
+ ppid = self.ppid()
+ if ppid is not None:
+ ctime = self.create_time()
+ try:
+ parent = Process(ppid)
+ if parent.create_time() <= ctime:
+ return parent
+ # ...else ppid has been reused by another process
+ except NoSuchProcess:
+ pass
+
+ def is_running(self):
+ """Return whether this process is running.
+ It also checks if PID has been reused by another process in
+ which case return False.
+ """
+ if self._gone:
+ return False
+ try:
+ # Checking if PID is alive is not enough as the PID might
+ # have been reused by another process: we also want to
+ # check process identity.
+ # Process identity / uniqueness over time is greanted by
+ # (PID + creation time) and that is verified in __eq__.
+ return self == Process(self.pid)
+ except NoSuchProcess:
+ self._gone = True
+ return False
+
+ # --- actual API
+
+ @property
+ def pid(self):
+ """The process PID."""
+ return self._pid
+
+ def ppid(self):
+ """The process parent PID.
+ On Windows the return value is cached after first call.
+ """
+ # On POSIX we don't want to cache the ppid as it may unexpectedly
+ # change to 1 (init) in case this process turns into a zombie:
+ # https://github.com/giampaolo/psutil/issues/321
+ # http://stackoverflow.com/questions/356722/
+
+ # XXX should we check creation time here rather than in
+ # Process.parent()?
+ if _POSIX:
+ return self._proc.ppid()
+ else:
+ self._ppid = self._ppid or self._proc.ppid()
+ return self._ppid
+
+ def name(self):
+ """The process name. The return value is cached after first call."""
+ if self._name is None:
+ name = self._proc.name()
+ if _POSIX and len(name) >= 15:
+ # On UNIX the name gets truncated to the first 15 characters.
+ # If it matches the first part of the cmdline we return that
+ # one instead because it's usually more explicative.
+ # Examples are "gnome-keyring-d" vs. "gnome-keyring-daemon".
+ try:
+ cmdline = self.cmdline()
+ except AccessDenied:
+ pass
+ else:
+ if cmdline:
+ extended_name = os.path.basename(cmdline[0])
+ if extended_name.startswith(name):
+ name = extended_name
+ self._proc._name = name
+ self._name = name
+ return self._name
+
+ def exe(self):
+ """The process executable as an absolute path.
+ May also be an empty string.
+ The return value is cached after first call.
+ """
+ def guess_it(fallback):
+ # try to guess exe from cmdline[0] in absence of a native
+ # exe representation
+ cmdline = self.cmdline()
+ if cmdline and hasattr(os, 'access') and hasattr(os, 'X_OK'):
+ exe = cmdline[0] # the possible exe
+ # Attempt to guess only in case of an absolute path.
+ # It is not safe otherwise as the process might have
+ # changed cwd.
+ if (os.path.isabs(exe) and
+ os.path.isfile(exe) and
+ os.access(exe, os.X_OK)):
+ return exe
+ if isinstance(fallback, AccessDenied):
+ raise fallback
+ return fallback
+
+ if self._exe is None:
+ try:
+ exe = self._proc.exe()
+ except AccessDenied as err:
+ return guess_it(fallback=err)
+ else:
+ if not exe:
+ # underlying implementation can legitimately return an
+ # empty string; if that's the case we don't want to
+ # raise AD while guessing from the cmdline
+ try:
+ exe = guess_it(fallback=exe)
+ except AccessDenied:
+ pass
+ self._exe = exe
+ return self._exe
+
+ def cmdline(self):
+ """The command line this process has been called with."""
+ return self._proc.cmdline()
+
+ def status(self):
+ """The process current status as a STATUS_* constant."""
+ try:
+ return self._proc.status()
+ except ZombieProcess:
+ return STATUS_ZOMBIE
+
+ def username(self):
+ """The name of the user that owns the process.
+ On UNIX this is calculated by using *real* process uid.
+ """
+ if _POSIX:
+ if pwd is None:
+ # might happen if python was installed from sources
+ raise ImportError(
+ "requires pwd module shipped with standard python")
+ real_uid = self.uids().real
+ try:
+ return pwd.getpwuid(real_uid).pw_name
+ except KeyError:
+ # the uid can't be resolved by the system
+ return str(real_uid)
+ else:
+ return self._proc.username()
+
+ def create_time(self):
+ """The process creation time as a floating point number
+ expressed in seconds since the epoch, in UTC.
+ The return value is cached after first call.
+ """
+ if self._create_time is None:
+ self._create_time = self._proc.create_time()
+ return self._create_time
+
+ def cwd(self):
+ """Process current working directory as an absolute path."""
+ return self._proc.cwd()
+
+ def nice(self, value=None):
+ """Get or set process niceness (priority)."""
+ if value is None:
+ return self._proc.nice_get()
+ else:
+ if not self.is_running():
+ raise NoSuchProcess(self.pid, self._name)
+ self._proc.nice_set(value)
+
+ if _POSIX:
+
+ def uids(self):
+ """Return process UIDs as a (real, effective, saved)
+ namedtuple.
+ """
+ return self._proc.uids()
+
+ def gids(self):
+ """Return process GIDs as a (real, effective, saved)
+ namedtuple.
+ """
+ return self._proc.gids()
+
+ def terminal(self):
+ """The terminal associated with this process, if any,
+ else None.
+ """
+ return self._proc.terminal()
+
+ def num_fds(self):
+ """Return the number of file descriptors opened by this
+ process (POSIX only).
+ """
+ return self._proc.num_fds()
+
+ # Linux, BSD and Windows only
+ if hasattr(_psplatform.Process, "io_counters"):
+
+ def io_counters(self):
+ """Return process I/O statistics as a
+ (read_count, write_count, read_bytes, write_bytes)
+ namedtuple.
+ Those are the number of read/write calls performed and the
+ amount of bytes read and written by the process.
+ """
+ return self._proc.io_counters()
+
+ # Linux and Windows >= Vista only
+ if hasattr(_psplatform.Process, "ionice_get"):
+
+ def ionice(self, ioclass=None, value=None):
+ """Get or set process I/O niceness (priority).
+
+ On Linux 'ioclass' is one of the IOPRIO_CLASS_* constants.
+ 'value' is a number which goes from 0 to 7. The higher the
+ value, the lower the I/O priority of the process.
+
+ On Windows only 'ioclass' is used and it can be set to 2
+ (normal), 1 (low) or 0 (very low).
+
+ Available on Linux and Windows > Vista only.
+ """
+ if ioclass is None:
+ if value is not None:
+ raise ValueError("'ioclass' argument must be specified")
+ return self._proc.ionice_get()
+ else:
+ return self._proc.ionice_set(ioclass, value)
+
+ # Linux only
+ if hasattr(_psplatform.Process, "rlimit"):
+
+ def rlimit(self, resource, limits=None):
+ """Get or set process resource limits as a (soft, hard)
+ tuple.
+
+ 'resource' is one of the RLIMIT_* constants.
+ 'limits' is supposed to be a (soft, hard) tuple.
+
+ See "man prlimit" for further info.
+ Available on Linux only.
+ """
+ if limits is None:
+ return self._proc.rlimit(resource)
+ else:
+ return self._proc.rlimit(resource, limits)
+
+ # Windows, Linux and BSD only
+ if hasattr(_psplatform.Process, "cpu_affinity_get"):
+
+ def cpu_affinity(self, cpus=None):
+ """Get or set process CPU affinity.
+ If specified 'cpus' must be a list of CPUs for which you
+ want to set the affinity (e.g. [0, 1]).
+ (Windows, Linux and BSD only).
+ """
+ # Automatically remove duplicates both on get and
+ # set (for get it's not really necessary, it's
+ # just for extra safety).
+ if cpus is None:
+ return list(set(self._proc.cpu_affinity_get()))
+ else:
+ self._proc.cpu_affinity_set(list(set(cpus)))
+
+ if _WINDOWS:
+
+ def num_handles(self):
+ """Return the number of handles opened by this process
+ (Windows only).
+ """
+ return self._proc.num_handles()
+
+ def num_ctx_switches(self):
+ """Return the number of voluntary and involuntary context
+ switches performed by this process.
+ """
+ return self._proc.num_ctx_switches()
+
+ def num_threads(self):
+ """Return the number of threads used by this process."""
+ return self._proc.num_threads()
+
+ def threads(self):
+ """Return threads opened by process as a list of
+ (id, user_time, system_time) namedtuples representing
+ thread id and thread CPU times (user/system).
+ """
+ return self._proc.threads()
+
+ @_assert_pid_not_reused
+ def children(self, recursive=False):
+ """Return the children of this process as a list of Process
+ instances, pre-emptively checking whether PID has been reused.
+ If recursive is True return all the parent descendants.
+
+ Example (A == this process):
+
+ A ─â”
+ │
+ ├─ B (child) ─â”
+ │ └─ X (grandchild) ─â”
+ │ └─ Y (great grandchild)
+ ├─ C (child)
+ └─ D (child)
+
+ >>> import psutil
+ >>> p = psutil.Process()
+ >>> p.children()
+ B, C, D
+ >>> p.children(recursive=True)
+ B, X, Y, C, D
+
+ Note that in the example above if process X disappears
+ process Y won't be listed as the reference to process A
+ is lost.
+ """
+ if hasattr(_psplatform, 'ppid_map'):
+ # Windows only: obtain a {pid:ppid, ...} dict for all running
+ # processes in one shot (faster).
+ ppid_map = _psplatform.ppid_map()
+ else:
+ ppid_map = None
+
+ ret = []
+ if not recursive:
+ if ppid_map is None:
+ # 'slow' version, common to all platforms except Windows
+ for p in process_iter():
+ try:
+ if p.ppid() == self.pid:
+ # if child happens to be older than its parent
+ # (self) it means child's PID has been reused
+ if self.create_time() <= p.create_time():
+ ret.append(p)
+ except (NoSuchProcess, ZombieProcess):
+ pass
+ else:
+ # Windows only (faster)
+ for pid, ppid in ppid_map.items():
+ if ppid == self.pid:
+ try:
+ child = Process(pid)
+ # if child happens to be older than its parent
+ # (self) it means child's PID has been reused
+ if self.create_time() <= child.create_time():
+ ret.append(child)
+ except (NoSuchProcess, ZombieProcess):
+ pass
+ else:
+ # construct a dict where 'values' are all the processes
+ # having 'key' as their parent
+ table = collections.defaultdict(list)
+ if ppid_map is None:
+ for p in process_iter():
+ try:
+ table[p.ppid()].append(p)
+ except (NoSuchProcess, ZombieProcess):
+ pass
+ else:
+ for pid, ppid in ppid_map.items():
+ try:
+ p = Process(pid)
+ table[ppid].append(p)
+ except (NoSuchProcess, ZombieProcess):
+ pass
+ # At this point we have a mapping table where table[self.pid]
+ # are the current process' children.
+ # Below, we look for all descendants recursively, similarly
+ # to a recursive function call.
+ checkpids = [self.pid]
+ for pid in checkpids:
+ for child in table[pid]:
+ try:
+ # if child happens to be older than its parent
+ # (self) it means child's PID has been reused
+ intime = self.create_time() <= child.create_time()
+ except (NoSuchProcess, ZombieProcess):
+ pass
+ else:
+ if intime:
+ ret.append(child)
+ if child.pid not in checkpids:
+ checkpids.append(child.pid)
+ return ret
+
+ def cpu_percent(self, interval=None):
+ """Return a float representing the current process CPU
+ utilization as a percentage.
+
+ When interval is 0.0 or None (default) compares process times
+ to system CPU times elapsed since last call, returning
+ immediately (non-blocking). That means that the first time
+ this is called it will return a meaningful 0.0 value.
+
+ When interval is > 0.0 compares process times to system CPU
+ times elapsed before and after the interval (blocking).
+
+ In this case is recommended for accuracy that this function
+ be called with at least 0.1 seconds between calls.
+
+ Examples:
+
+ >>> import psutil
+ >>> p = psutil.Process(os.getpid())
+ >>> # blocking
+ >>> p.cpu_percent(interval=1)
+ 2.0
+ >>> # non-blocking (percentage since last call)
+ >>> p.cpu_percent(interval=None)
+ 2.9
+ >>>
+ """
+ blocking = interval is not None and interval > 0.0
+ num_cpus = cpu_count()
+ if _POSIX:
+ def timer():
+ return _timer() * num_cpus
+ else:
+ def timer():
+ return sum(cpu_times())
+ if blocking:
+ st1 = timer()
+ pt1 = self._proc.cpu_times()
+ time.sleep(interval)
+ st2 = timer()
+ pt2 = self._proc.cpu_times()
+ else:
+ st1 = self._last_sys_cpu_times
+ pt1 = self._last_proc_cpu_times
+ st2 = timer()
+ pt2 = self._proc.cpu_times()
+ if st1 is None or pt1 is None:
+ self._last_sys_cpu_times = st2
+ self._last_proc_cpu_times = pt2
+ return 0.0
+
+ delta_proc = (pt2.user - pt1.user) + (pt2.system - pt1.system)
+ delta_time = st2 - st1
+ # reset values for next call in case of interval == None
+ self._last_sys_cpu_times = st2
+ self._last_proc_cpu_times = pt2
+
+ try:
+ # The utilization split between all CPUs.
+ # Note: a percentage > 100 is legitimate as it can result
+ # from a process with multiple threads running on different
+ # CPU cores, see:
+ # http://stackoverflow.com/questions/1032357
+ # https://github.com/giampaolo/psutil/issues/474
+ overall_percent = ((delta_proc / delta_time) * 100) * num_cpus
+ except ZeroDivisionError:
+ # interval was too low
+ return 0.0
+ else:
+ return round(overall_percent, 1)
+
+ def cpu_times(self):
+ """Return a (user, system) namedtuple representing the
+ accumulated process time, in seconds.
+ This is the same as os.times() but per-process.
+ """
+ return self._proc.cpu_times()
+
+ def memory_info(self):
+ """Return a tuple representing RSS (Resident Set Size) and VMS
+ (Virtual Memory Size) in bytes.
+
+ On UNIX RSS and VMS are the same values shown by 'ps'.
+
+ On Windows RSS and VMS refer to "Mem Usage" and "VM Size"
+ columns of taskmgr.exe.
+ """
+ return self._proc.memory_info()
+
+ def memory_info_ex(self):
+ """Return a namedtuple with variable fields depending on the
+ platform representing extended memory information about
+ this process. All numbers are expressed in bytes.
+ """
+ return self._proc.memory_info_ex()
+
+ def memory_percent(self):
+ """Compare physical system memory to process resident memory
+ (RSS) and calculate process memory utilization as a percentage.
+ """
+ rss = self._proc.memory_info()[0]
+ # use cached value if available
+ total_phymem = _TOTAL_PHYMEM or virtual_memory().total
+ try:
+ return (rss / float(total_phymem)) * 100
+ except ZeroDivisionError:
+ return 0.0
+
+ def memory_maps(self, grouped=True):
+ """Return process' mapped memory regions as a list of namedtuples
+ whose fields are variable depending on the platform.
+
+ If 'grouped' is True the mapped regions with the same 'path'
+ are grouped together and the different memory fields are summed.
+
+ If 'grouped' is False every mapped region is shown as a single
+ entity and the namedtuple will also include the mapped region's
+ address space ('addr') and permission set ('perms').
+ """
+ it = self._proc.memory_maps()
+ if grouped:
+ d = {}
+ for tupl in it:
+ path = tupl[2]
+ nums = tupl[3:]
+ try:
+ d[path] = map(lambda x, y: x + y, d[path], nums)
+ except KeyError:
+ d[path] = nums
+ nt = _psplatform.pmmap_grouped
+ return [nt(path, *d[path]) for path in d] # NOQA
+ else:
+ nt = _psplatform.pmmap_ext
+ return [nt(*x) for x in it]
+
+ def open_files(self):
+ """Return files opened by process as a list of
+ (path, fd) namedtuples including the absolute file name
+ and file descriptor number.
+ """
+ return self._proc.open_files()
+
+ def connections(self, kind='inet'):
+ """Return connections opened by process as a list of
+ (fd, family, type, laddr, raddr, status) namedtuples.
+ The 'kind' parameter filters for connections that match the
+ following criteria:
+
+ Kind Value Connections using
+ inet IPv4 and IPv6
+ inet4 IPv4
+ inet6 IPv6
+ tcp TCP
+ tcp4 TCP over IPv4
+ tcp6 TCP over IPv6
+ udp UDP
+ udp4 UDP over IPv4
+ udp6 UDP over IPv6
+ unix UNIX socket (both UDP and TCP protocols)
+ all the sum of all the possible families and protocols
+ """
+ return self._proc.connections(kind)
+
+ if _POSIX:
+ def _send_signal(self, sig):
+ if self.pid == 0:
+ # see "man 2 kill"
+ raise ValueError(
+ "preventing sending signal to process with PID 0 as it "
+ "would affect every process in the process group of the "
+ "calling process (os.getpid()) instead of PID 0")
+ try:
+ os.kill(self.pid, sig)
+ except OSError as err:
+ if err.errno == errno.ESRCH:
+ self._gone = True
+ raise NoSuchProcess(self.pid, self._name)
+ if err.errno == errno.EPERM:
+ raise AccessDenied(self.pid, self._name)
+ raise
+
+ @_assert_pid_not_reused
+ def send_signal(self, sig):
+ """Send a signal to process pre-emptively checking whether
+ PID has been reused (see signal module constants) .
+ On Windows only SIGTERM is valid and is treated as an alias
+ for kill().
+ """
+ if _POSIX:
+ self._send_signal(sig)
+ else:
+ if sig == signal.SIGTERM:
+ self._proc.kill()
+ else:
+ raise ValueError("only SIGTERM is supported on Windows")
+
+ @_assert_pid_not_reused
+ def suspend(self):
+ """Suspend process execution with SIGSTOP pre-emptively checking
+ whether PID has been reused.
+ On Windows this has the effect ot suspending all process threads.
+ """
+ if _POSIX:
+ self._send_signal(signal.SIGSTOP)
+ else:
+ self._proc.suspend()
+
+ @_assert_pid_not_reused
+ def resume(self):
+ """Resume process execution with SIGCONT pre-emptively checking
+ whether PID has been reused.
+ On Windows this has the effect of resuming all process threads.
+ """
+ if _POSIX:
+ self._send_signal(signal.SIGCONT)
+ else:
+ self._proc.resume()
+
+ @_assert_pid_not_reused
+ def terminate(self):
+ """Terminate the process with SIGTERM pre-emptively checking
+ whether PID has been reused.
+ On Windows this is an alias for kill().
+ """
+ if _POSIX:
+ self._send_signal(signal.SIGTERM)
+ else:
+ self._proc.kill()
+
+ @_assert_pid_not_reused
+ def kill(self):
+ """Kill the current process with SIGKILL pre-emptively checking
+ whether PID has been reused.
+ """
+ if _POSIX:
+ self._send_signal(signal.SIGKILL)
+ else:
+ self._proc.kill()
+
+ def wait(self, timeout=None):
+ """Wait for process to terminate and, if process is a children
+ of os.getpid(), also return its exit code, else None.
+
+ If the process is already terminated immediately return None
+ instead of raising NoSuchProcess.
+
+ If timeout (in seconds) is specified and process is still alive
+ raise TimeoutExpired.
+
+ To wait for multiple Process(es) use psutil.wait_procs().
+ """
+ if timeout is not None and not timeout >= 0:
+ raise ValueError("timeout must be a positive integer")
+ return self._proc.wait(timeout)
+
+
+# =====================================================================
+# --- Popen class
+# =====================================================================
+
+
+class Popen(Process):
+ """A more convenient interface to stdlib subprocess module.
+ It starts a sub process and deals with it exactly as when using
+ subprocess.Popen class but in addition also provides all the
+ properties and methods of psutil.Process class as a unified
+ interface:
+
+ >>> import psutil
+ >>> from subprocess import PIPE
+ >>> p = psutil.Popen(["python", "-c", "print 'hi'"], stdout=PIPE)
+ >>> p.name()
+ 'python'
+ >>> p.uids()
+ user(real=1000, effective=1000, saved=1000)
+ >>> p.username()
+ 'giampaolo'
+ >>> p.communicate()
+ ('hi\n', None)
+ >>> p.terminate()
+ >>> p.wait(timeout=2)
+ 0
+ >>>
+
+ For method names common to both classes such as kill(), terminate()
+ and wait(), psutil.Process implementation takes precedence.
+
+ Unlike subprocess.Popen this class pre-emptively checks wheter PID
+ has been reused on send_signal(), terminate() and kill() so that
+ you don't accidentally terminate another process, fixing
+ http://bugs.python.org/issue6973.
+
+ For a complete documentation refer to:
+ http://docs.python.org/library/subprocess.html
+ """
+
+ def __init__(self, *args, **kwargs):
+ # Explicitly avoid to raise NoSuchProcess in case the process
+ # spawned by subprocess.Popen terminates too quickly, see:
+ # https://github.com/giampaolo/psutil/issues/193
+ self.__subproc = subprocess.Popen(*args, **kwargs)
+ self._init(self.__subproc.pid, _ignore_nsp=True)
+
+ def __dir__(self):
+ return sorted(set(dir(Popen) + dir(subprocess.Popen)))
+
+ def __getattribute__(self, name):
+ try:
+ return object.__getattribute__(self, name)
+ except AttributeError:
+ try:
+ return object.__getattribute__(self.__subproc, name)
+ except AttributeError:
+ raise AttributeError("%s instance has no attribute '%s'"
+ % (self.__class__.__name__, name))
+
+ def wait(self, timeout=None):
+ if self.__subproc.returncode is not None:
+ return self.__subproc.returncode
+ ret = super(Popen, self).wait(timeout)
+ self.__subproc.returncode = ret
+ return ret
+
+
+# =====================================================================
+# --- system processes related functions
+# =====================================================================
+
+
+def pids():
+ """Return a list of current running PIDs."""
+ return _psplatform.pids()
+
+
+def pid_exists(pid):
+ """Return True if given PID exists in the current process list.
+ This is faster than doing "pid in psutil.pids()" and
+ should be preferred.
+ """
+ if pid < 0:
+ return False
+ elif pid == 0 and _POSIX:
+ # On POSIX we use os.kill() to determine PID existence.
+ # According to "man 2 kill" PID 0 has a special meaning
+ # though: it refers to <<every process in the process
+ # group of the calling process>> and that is not we want
+ # to do here.
+ return pid in pids()
+ else:
+ return _psplatform.pid_exists(pid)
+
+
+_pmap = {}
+
+
+def process_iter():
+ """Return a generator yielding a Process instance for all
+ running processes.
+
+ Every new Process instance is only created once and then cached
+ into an internal table which is updated every time this is used.
+
+ Cached Process instances are checked for identity so that you're
+ safe in case a PID has been reused by another process, in which
+ case the cached instance is updated.
+
+ The sorting order in which processes are yielded is based on
+ their PIDs.
+ """
+ def add(pid):
+ proc = Process(pid)
+ _pmap[proc.pid] = proc
+ return proc
+
+ def remove(pid):
+ _pmap.pop(pid, None)
+
+ a = set(pids())
+ b = set(_pmap.keys())
+ new_pids = a - b
+ gone_pids = b - a
+
+ for pid in gone_pids:
+ remove(pid)
+ for pid, proc in sorted(list(_pmap.items()) +
+ list(dict.fromkeys(new_pids).items())):
+ try:
+ if proc is None: # new process
+ yield add(pid)
+ else:
+ # use is_running() to check whether PID has been reused by
+ # another process in which case yield a new Process instance
+ if proc.is_running():
+ yield proc
+ else:
+ yield add(pid)
+ except NoSuchProcess:
+ remove(pid)
+ except AccessDenied:
+ # Process creation time can't be determined hence there's
+ # no way to tell whether the pid of the cached process
+ # has been reused. Just return the cached version.
+ yield proc
+
+
+def wait_procs(procs, timeout=None, callback=None):
+ """Convenience function which waits for a list of processes to
+ terminate.
+
+ Return a (gone, alive) tuple indicating which processes
+ are gone and which ones are still alive.
+
+ The gone ones will have a new 'returncode' attribute indicating
+ process exit status (may be None).
+
+ 'callback' is a function which gets called every time a process
+ terminates (a Process instance is passed as callback argument).
+
+ Function will return as soon as all processes terminate or when
+ timeout occurs.
+
+ Typical use case is:
+
+ - send SIGTERM to a list of processes
+ - give them some time to terminate
+ - send SIGKILL to those ones which are still alive
+
+ Example:
+
+ >>> def on_terminate(proc):
+ ... print("process {} terminated".format(proc))
+ ...
+ >>> for p in procs:
+ ... p.terminate()
+ ...
+ >>> gone, alive = wait_procs(procs, timeout=3, callback=on_terminate)
+ >>> for p in alive:
+ ... p.kill()
+ """
+ def check_gone(proc, timeout):
+ try:
+ returncode = proc.wait(timeout=timeout)
+ except TimeoutExpired:
+ pass
+ else:
+ if returncode is not None or not proc.is_running():
+ proc.returncode = returncode
+ gone.add(proc)
+ if callback is not None:
+ callback(proc)
+
+ if timeout is not None and not timeout >= 0:
+ msg = "timeout must be a positive integer, got %s" % timeout
+ raise ValueError(msg)
+ gone = set()
+ alive = set(procs)
+ if callback is not None and not callable(callback):
+ raise TypeError("callback %r is not a callable" % callable)
+ if timeout is not None:
+ deadline = _timer() + timeout
+
+ while alive:
+ if timeout is not None and timeout <= 0:
+ break
+ for proc in alive:
+ # Make sure that every complete iteration (all processes)
+ # will last max 1 sec.
+ # We do this because we don't want to wait too long on a
+ # single process: in case it terminates too late other
+ # processes may disappear in the meantime and their PID
+ # reused.
+ max_timeout = 1.0 / len(alive)
+ if timeout is not None:
+ timeout = min((deadline - _timer()), max_timeout)
+ if timeout <= 0:
+ break
+ check_gone(proc, timeout)
+ else:
+ check_gone(proc, max_timeout)
+ alive = alive - gone
+
+ if alive:
+ # Last attempt over processes survived so far.
+ # timeout == 0 won't make this function wait any further.
+ for proc in alive:
+ check_gone(proc, 0)
+ alive = alive - gone
+
+ return (list(gone), list(alive))
+
+
+# =====================================================================
+# --- CPU related functions
+# =====================================================================
+
+
+@memoize
+def cpu_count(logical=True):
+ """Return the number of logical CPUs in the system (same as
+ os.cpu_count() in Python 3.4).
+
+ If logical is False return the number of physical cores only
+ (e.g. hyper thread CPUs are excluded).
+
+ Return None if undetermined.
+
+ The return value is cached after first call.
+ If desired cache can be cleared like this:
+
+ >>> psutil.cpu_count.cache_clear()
+ """
+ if logical:
+ return _psplatform.cpu_count_logical()
+ else:
+ return _psplatform.cpu_count_physical()
+
+
+def cpu_times(percpu=False):
+ """Return system-wide CPU times as a namedtuple.
+ Every CPU time represents the seconds the CPU has spent in the given mode.
+ The namedtuple's fields availability varies depending on the platform:
+ - user
+ - system
+ - idle
+ - nice (UNIX)
+ - iowait (Linux)
+ - irq (Linux, FreeBSD)
+ - softirq (Linux)
+ - steal (Linux >= 2.6.11)
+ - guest (Linux >= 2.6.24)
+ - guest_nice (Linux >= 3.2.0)
+
+ When percpu is True return a list of namedtuples for each CPU.
+ First element of the list refers to first CPU, second element
+ to second CPU and so on.
+ The order of the list is consistent across calls.
+ """
+ if not percpu:
+ return _psplatform.cpu_times()
+ else:
+ return _psplatform.per_cpu_times()
+
+
+_last_cpu_times = cpu_times()
+_last_per_cpu_times = cpu_times(percpu=True)
+
+
+def cpu_percent(interval=None, percpu=False):
+ """Return a float representing the current system-wide CPU
+ utilization as a percentage.
+
+ When interval is > 0.0 compares system CPU times elapsed before
+ and after the interval (blocking).
+
+ When interval is 0.0 or None compares system CPU times elapsed
+ since last call or module import, returning immediately (non
+ blocking). That means the first time this is called it will
+ return a meaningless 0.0 value which you should ignore.
+ In this case is recommended for accuracy that this function be
+ called with at least 0.1 seconds between calls.
+
+ When percpu is True returns a list of floats representing the
+ utilization as a percentage for each CPU.
+ First element of the list refers to first CPU, second element
+ to second CPU and so on.
+ The order of the list is consistent across calls.
+
+ Examples:
+
+ >>> # blocking, system-wide
+ >>> psutil.cpu_percent(interval=1)
+ 2.0
+ >>>
+ >>> # blocking, per-cpu
+ >>> psutil.cpu_percent(interval=1, percpu=True)
+ [2.0, 1.0]
+ >>>
+ >>> # non-blocking (percentage since last call)
+ >>> psutil.cpu_percent(interval=None)
+ 2.9
+ >>>
+ """
+ global _last_cpu_times
+ global _last_per_cpu_times
+ blocking = interval is not None and interval > 0.0
+
+ def calculate(t1, t2):
+ t1_all = sum(t1)
+ t1_busy = t1_all - t1.idle
+
+ t2_all = sum(t2)
+ t2_busy = t2_all - t2.idle
+
+ # this usually indicates a float precision issue
+ if t2_busy <= t1_busy:
+ return 0.0
+
+ busy_delta = t2_busy - t1_busy
+ all_delta = t2_all - t1_all
+ busy_perc = (busy_delta / all_delta) * 100
+ return round(busy_perc, 1)
+
+ # system-wide usage
+ if not percpu:
+ if blocking:
+ t1 = cpu_times()
+ time.sleep(interval)
+ else:
+ t1 = _last_cpu_times
+ _last_cpu_times = cpu_times()
+ return calculate(t1, _last_cpu_times)
+ # per-cpu usage
+ else:
+ ret = []
+ if blocking:
+ tot1 = cpu_times(percpu=True)
+ time.sleep(interval)
+ else:
+ tot1 = _last_per_cpu_times
+ _last_per_cpu_times = cpu_times(percpu=True)
+ for t1, t2 in zip(tot1, _last_per_cpu_times):
+ ret.append(calculate(t1, t2))
+ return ret
+
+
+# Use separate global vars for cpu_times_percent() so that it's
+# independent from cpu_percent() and they can both be used within
+# the same program.
+_last_cpu_times_2 = _last_cpu_times
+_last_per_cpu_times_2 = _last_per_cpu_times
+
+
+def cpu_times_percent(interval=None, percpu=False):
+ """Same as cpu_percent() but provides utilization percentages
+ for each specific CPU time as is returned by cpu_times().
+ For instance, on Linux we'll get:
+
+ >>> cpu_times_percent()
+ cpupercent(user=4.8, nice=0.0, system=4.8, idle=90.5, iowait=0.0,
+ irq=0.0, softirq=0.0, steal=0.0, guest=0.0, guest_nice=0.0)
+ >>>
+
+ interval and percpu arguments have the same meaning as in
+ cpu_percent().
+ """
+ global _last_cpu_times_2
+ global _last_per_cpu_times_2
+ blocking = interval is not None and interval > 0.0
+
+ def calculate(t1, t2):
+ nums = []
+ all_delta = sum(t2) - sum(t1)
+ for field in t1._fields:
+ field_delta = getattr(t2, field) - getattr(t1, field)
+ try:
+ field_perc = (100 * field_delta) / all_delta
+ except ZeroDivisionError:
+ field_perc = 0.0
+ field_perc = round(field_perc, 1)
+ # CPU times are always supposed to increase over time
+ # or at least remain the same and that's because time
+ # cannot go backwards.
+ # Surprisingly sometimes this might not be the case (at
+ # least on Windows and Linux), see:
+ # https://github.com/giampaolo/psutil/issues/392
+ # https://github.com/giampaolo/psutil/issues/645
+ # I really don't know what to do about that except
+ # forcing the value to 0 or 100.
+ if field_perc > 100.0:
+ field_perc = 100.0
+ # `<=` because `-0.0 == 0.0` evaluates to True
+ elif field_perc <= 0.0:
+ field_perc = 0.0
+ nums.append(field_perc)
+ return _psplatform.scputimes(*nums)
+
+ # system-wide usage
+ if not percpu:
+ if blocking:
+ t1 = cpu_times()
+ time.sleep(interval)
+ else:
+ t1 = _last_cpu_times_2
+ _last_cpu_times_2 = cpu_times()
+ return calculate(t1, _last_cpu_times_2)
+ # per-cpu usage
+ else:
+ ret = []
+ if blocking:
+ tot1 = cpu_times(percpu=True)
+ time.sleep(interval)
+ else:
+ tot1 = _last_per_cpu_times_2
+ _last_per_cpu_times_2 = cpu_times(percpu=True)
+ for t1, t2 in zip(tot1, _last_per_cpu_times_2):
+ ret.append(calculate(t1, t2))
+ return ret
+
+
+# =====================================================================
+# --- system memory related functions
+# =====================================================================
+
+
+def virtual_memory():
+ """Return statistics about system memory usage as a namedtuple
+ including the following fields, expressed in bytes:
+
+ - total:
+ total physical memory available.
+
+ - available:
+ the actual amount of available memory that can be given
+ instantly to processes that request more memory in bytes; this
+ is calculated by summing different memory values depending on
+ the platform (e.g. free + buffers + cached on Linux) and it is
+ supposed to be used to monitor actual memory usage in a cross
+ platform fashion.
+
+ - percent:
+ the percentage usage calculated as (total - available) / total * 100
+
+ - used:
+ memory used, calculated differently depending on the platform and
+ designed for informational purposes only:
+ OSX: active + inactive + wired
+ BSD: active + wired + cached
+ LINUX: total - free
+
+ - free:
+ memory not being used at all (zeroed) that is readily available;
+ note that this doesn't reflect the actual memory available
+ (use 'available' instead)
+
+ Platform-specific fields:
+
+ - active (UNIX):
+ memory currently in use or very recently used, and so it is in RAM.
+
+ - inactive (UNIX):
+ memory that is marked as not used.
+
+ - buffers (BSD, Linux):
+ cache for things like file system metadata.
+
+ - cached (BSD, OSX):
+ cache for various things.
+
+ - wired (OSX, BSD):
+ memory that is marked to always stay in RAM. It is never moved to disk.
+
+ - shared (BSD):
+ memory that may be simultaneously accessed by multiple processes.
+
+ The sum of 'used' and 'available' does not necessarily equal total.
+ On Windows 'available' and 'free' are the same.
+ """
+ global _TOTAL_PHYMEM
+ ret = _psplatform.virtual_memory()
+ # cached for later use in Process.memory_percent()
+ _TOTAL_PHYMEM = ret.total
+ return ret
+
+
+def swap_memory():
+ """Return system swap memory statistics as a namedtuple including
+ the following fields:
+
+ - total: total swap memory in bytes
+ - used: used swap memory in bytes
+ - free: free swap memory in bytes
+ - percent: the percentage usage
+ - sin: no. of bytes the system has swapped in from disk (cumulative)
+ - sout: no. of bytes the system has swapped out from disk (cumulative)
+
+ 'sin' and 'sout' on Windows are meaningless and always set to 0.
+ """
+ return _psplatform.swap_memory()
+
+
+# =====================================================================
+# --- disks/paritions related functions
+# =====================================================================
+
+
+def disk_usage(path):
+ """Return disk usage statistics about the given path as a namedtuple
+ including total, used and free space expressed in bytes plus the
+ percentage usage.
+ """
+ return _psplatform.disk_usage(path)
+
+
+def disk_partitions(all=False):
+ """Return mounted partitions as a list of
+ (device, mountpoint, fstype, opts) namedtuple.
+ 'opts' field is a raw string separated by commas indicating mount
+ options which may vary depending on the platform.
+
+ If "all" parameter is False return physical devices only and ignore
+ all others.
+ """
+ return _psplatform.disk_partitions(all)
+
+
+def disk_io_counters(perdisk=False):
+ """Return system disk I/O statistics as a namedtuple including
+ the following fields:
+
+ - read_count: number of reads
+ - write_count: number of writes
+ - read_bytes: number of bytes read
+ - write_bytes: number of bytes written
+ - read_time: time spent reading from disk (in milliseconds)
+ - write_time: time spent writing to disk (in milliseconds)
+
+ If perdisk is True return the same information for every
+ physical disk installed on the system as a dictionary
+ with partition names as the keys and the namedtuple
+ described above as the values.
+
+ On recent Windows versions 'diskperf -y' command may need to be
+ executed first otherwise this function won't find any disk.
+ """
+ rawdict = _psplatform.disk_io_counters()
+ if not rawdict:
+ raise RuntimeError("couldn't find any physical disk")
+ if perdisk:
+ for disk, fields in rawdict.items():
+ rawdict[disk] = _common.sdiskio(*fields)
+ return rawdict
+ else:
+ return _common.sdiskio(*[sum(x) for x in zip(*rawdict.values())])
+
+
+# =====================================================================
+# --- network related functions
+# =====================================================================
+
+
+def net_io_counters(pernic=False):
+ """Return network I/O statistics as a namedtuple including
+ the following fields:
+
+ - bytes_sent: number of bytes sent
+ - bytes_recv: number of bytes received
+ - packets_sent: number of packets sent
+ - packets_recv: number of packets received
+ - errin: total number of errors while receiving
+ - errout: total number of errors while sending
+ - dropin: total number of incoming packets which were dropped
+ - dropout: total number of outgoing packets which were dropped
+ (always 0 on OSX and BSD)
+
+ If pernic is True return the same information for every
+ network interface installed on the system as a dictionary
+ with network interface names as the keys and the namedtuple
+ described above as the values.
+ """
+ rawdict = _psplatform.net_io_counters()
+ if not rawdict:
+ raise RuntimeError("couldn't find any network interface")
+ if pernic:
+ for nic, fields in rawdict.items():
+ rawdict[nic] = _common.snetio(*fields)
+ return rawdict
+ else:
+ return _common.snetio(*[sum(x) for x in zip(*rawdict.values())])
+
+
+def net_connections(kind='inet'):
+ """Return system-wide connections as a list of
+ (fd, family, type, laddr, raddr, status, pid) namedtuples.
+ In case of limited privileges 'fd' and 'pid' may be set to -1
+ and None respectively.
+ The 'kind' parameter filters for connections that fit the
+ following criteria:
+
+ Kind Value Connections using
+ inet IPv4 and IPv6
+ inet4 IPv4
+ inet6 IPv6
+ tcp TCP
+ tcp4 TCP over IPv4
+ tcp6 TCP over IPv6
+ udp UDP
+ udp4 UDP over IPv4
+ udp6 UDP over IPv6
+ unix UNIX socket (both UDP and TCP protocols)
+ all the sum of all the possible families and protocols
+
+ On OSX this function requires root privileges.
+ """
+ return _psplatform.net_connections(kind)
+
+
+def net_if_addrs():
+ """Return the addresses associated to each NIC (network interface
+ card) installed on the system as a dictionary whose keys are the
+ NIC names and value is a list of namedtuples for each address
+ assigned to the NIC. Each namedtuple includes 4 fields:
+
+ - family
+ - address
+ - netmask
+ - broadcast
+
+ 'family' can be either socket.AF_INET, socket.AF_INET6 or
+ psutil.AF_LINK, which refers to a MAC address.
+ 'address' is the primary address, 'netmask' and 'broadcast'
+ may be None.
+ Note: you can have more than one address of the same family
+ associated with each interface.
+ """
+ has_enums = sys.version_info >= (3, 4)
+ if has_enums:
+ import socket
+ rawlist = _psplatform.net_if_addrs()
+ rawlist.sort(key=lambda x: x[1]) # sort by family
+ ret = collections.defaultdict(list)
+ for name, fam, addr, mask, broadcast in rawlist:
+ if has_enums:
+ try:
+ fam = socket.AddressFamily(fam)
+ except ValueError:
+ if os.name == 'nt' and fam == -1:
+ fam = _psplatform.AF_LINK
+ elif (hasattr(_psplatform, "AF_LINK") and
+ _psplatform.AF_LINK == fam):
+ # Linux defines AF_LINK as an alias for AF_PACKET.
+ # We re-set the family here so that repr(family)
+ # will show AF_LINK rather than AF_PACKET
+ fam = _psplatform.AF_LINK
+ ret[name].append(_common.snic(fam, addr, mask, broadcast))
+ return dict(ret)
+
+
+def net_if_stats():
+ """Return information about each NIC (network interface card)
+ installed on the system as a dictionary whose keys are the
+ NIC names and value is a namedtuple with the following fields:
+
+ - isup: whether the interface is up (bool)
+ - duplex: can be either NIC_DUPLEX_FULL, NIC_DUPLEX_HALF or
+ NIC_DUPLEX_UNKNOWN
+ - speed: the NIC speed expressed in mega bits (MB); if it can't
+ be determined (e.g. 'localhost') it will be set to 0.
+ - mtu: the maximum transmission unit expressed in bytes.
+ """
+ return _psplatform.net_if_stats()
+
+
+# =====================================================================
+# --- other system related functions
+# =====================================================================
+
+
+def boot_time():
+ """Return the system boot time expressed in seconds since the epoch."""
+ # Note: we are not caching this because it is subject to
+ # system clock updates.
+ return _psplatform.boot_time()
+
+
+def users():
+ """Return users currently connected on the system as a list of
+ namedtuples including the following fields.
+
+ - user: the name of the user
+ - terminal: the tty or pseudo-tty associated with the user, if any.
+ - host: the host name associated with the entry, if any.
+ - started: the creation time as a floating point number expressed in
+ seconds since the epoch.
+ """
+ return _psplatform.users()
+
+
+def test():
+ """List info of all currently running processes emulating ps aux
+ output.
+ """
+ import datetime
+
+ today_day = datetime.date.today()
+ templ = "%-10s %5s %4s %4s %7s %7s %-13s %5s %7s %s"
+ attrs = ['pid', 'cpu_percent', 'memory_percent', 'name', 'cpu_times',
+ 'create_time', 'memory_info']
+ if _POSIX:
+ attrs.append('uids')
+ attrs.append('terminal')
+ print(templ % ("USER", "PID", "%CPU", "%MEM", "VSZ", "RSS", "TTY",
+ "START", "TIME", "COMMAND"))
+ for p in process_iter():
+ try:
+ pinfo = p.as_dict(attrs, ad_value='')
+ except NoSuchProcess:
+ pass
+ else:
+ if pinfo['create_time']:
+ ctime = datetime.datetime.fromtimestamp(pinfo['create_time'])
+ if ctime.date() == today_day:
+ ctime = ctime.strftime("%H:%M")
+ else:
+ ctime = ctime.strftime("%b%d")
+ else:
+ ctime = ''
+ cputime = time.strftime("%M:%S",
+ time.localtime(sum(pinfo['cpu_times'])))
+ try:
+ user = p.username()
+ except Error:
+ user = ''
+ if _WINDOWS and '\\' in user:
+ user = user.split('\\')[1]
+ vms = pinfo['memory_info'] and \
+ int(pinfo['memory_info'].vms / 1024) or '?'
+ rss = pinfo['memory_info'] and \
+ int(pinfo['memory_info'].rss / 1024) or '?'
+ memp = pinfo['memory_percent'] and \
+ round(pinfo['memory_percent'], 1) or '?'
+ print(templ % (
+ user[:10],
+ pinfo['pid'],
+ pinfo['cpu_percent'],
+ memp,
+ vms,
+ rss,
+ pinfo.get('terminal', '') or '?',
+ ctime,
+ cputime,
+ pinfo['name'].strip() or '?'))
+
+
+del memoize, division
+if sys.version_info < (3, 0):
+ del num
+
+if __name__ == "__main__":
+ test()
diff --git a/python/psutil/psutil/_common.py b/python/psutil/psutil/_common.py
new file mode 100644
index 000000000..e9acf595d
--- /dev/null
+++ b/python/psutil/psutil/_common.py
@@ -0,0 +1,246 @@
+# /usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common objects shared by all _ps* modules."""
+
+from __future__ import division
+import errno
+import functools
+import os
+import socket
+import stat
+import sys
+from collections import namedtuple
+from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading
+
+if sys.version_info >= (3, 4):
+ import enum
+else:
+ enum = None
+
+
+# --- constants
+
+AF_INET6 = getattr(socket, 'AF_INET6', None)
+AF_UNIX = getattr(socket, 'AF_UNIX', None)
+
+STATUS_RUNNING = "running"
+STATUS_SLEEPING = "sleeping"
+STATUS_DISK_SLEEP = "disk-sleep"
+STATUS_STOPPED = "stopped"
+STATUS_TRACING_STOP = "tracing-stop"
+STATUS_ZOMBIE = "zombie"
+STATUS_DEAD = "dead"
+STATUS_WAKE_KILL = "wake-kill"
+STATUS_WAKING = "waking"
+STATUS_IDLE = "idle" # BSD
+STATUS_LOCKED = "locked" # BSD
+STATUS_WAITING = "waiting" # BSD
+
+CONN_ESTABLISHED = "ESTABLISHED"
+CONN_SYN_SENT = "SYN_SENT"
+CONN_SYN_RECV = "SYN_RECV"
+CONN_FIN_WAIT1 = "FIN_WAIT1"
+CONN_FIN_WAIT2 = "FIN_WAIT2"
+CONN_TIME_WAIT = "TIME_WAIT"
+CONN_CLOSE = "CLOSE"
+CONN_CLOSE_WAIT = "CLOSE_WAIT"
+CONN_LAST_ACK = "LAST_ACK"
+CONN_LISTEN = "LISTEN"
+CONN_CLOSING = "CLOSING"
+CONN_NONE = "NONE"
+
+if enum is None:
+ NIC_DUPLEX_FULL = 2
+ NIC_DUPLEX_HALF = 1
+ NIC_DUPLEX_UNKNOWN = 0
+else:
+ class NicDuplex(enum.IntEnum):
+ NIC_DUPLEX_FULL = 2
+ NIC_DUPLEX_HALF = 1
+ NIC_DUPLEX_UNKNOWN = 0
+
+ globals().update(NicDuplex.__members__)
+
+
+# --- functions
+
+def usage_percent(used, total, _round=None):
+ """Calculate percentage usage of 'used' against 'total'."""
+ try:
+ ret = (used / total) * 100
+ except ZeroDivisionError:
+ ret = 0
+ if _round is not None:
+ return round(ret, _round)
+ else:
+ return ret
+
+
+def memoize(fun):
+ """A simple memoize decorator for functions supporting (hashable)
+ positional arguments.
+ It also provides a cache_clear() function for clearing the cache:
+
+ >>> @memoize
+ ... def foo()
+ ... return 1
+ ...
+ >>> foo()
+ 1
+ >>> foo.cache_clear()
+ >>>
+ """
+ @functools.wraps(fun)
+ def wrapper(*args, **kwargs):
+ key = (args, frozenset(sorted(kwargs.items())))
+ lock.acquire()
+ try:
+ try:
+ return cache[key]
+ except KeyError:
+ ret = cache[key] = fun(*args, **kwargs)
+ finally:
+ lock.release()
+ return ret
+
+ def cache_clear():
+ """Clear cache."""
+ lock.acquire()
+ try:
+ cache.clear()
+ finally:
+ lock.release()
+
+ lock = threading.RLock()
+ cache = {}
+ wrapper.cache_clear = cache_clear
+ return wrapper
+
+
+def isfile_strict(path):
+ """Same as os.path.isfile() but does not swallow EACCES / EPERM
+ exceptions, see:
+ http://mail.python.org/pipermail/python-dev/2012-June/120787.html
+ """
+ try:
+ st = os.stat(path)
+ except OSError as err:
+ if err.errno in (errno.EPERM, errno.EACCES):
+ raise
+ return False
+ else:
+ return stat.S_ISREG(st.st_mode)
+
+
+def sockfam_to_enum(num):
+ """Convert a numeric socket family value to an IntEnum member.
+ If it's not a known member, return the numeric value itself.
+ """
+ if enum is None:
+ return num
+ try:
+ return socket.AddressFamily(num)
+ except (ValueError, AttributeError):
+ return num
+
+
+def socktype_to_enum(num):
+ """Convert a numeric socket type value to an IntEnum member.
+ If it's not a known member, return the numeric value itself.
+ """
+ if enum is None:
+ return num
+ try:
+ return socket.AddressType(num)
+ except (ValueError, AttributeError):
+ return num
+
+
+# --- Process.connections() 'kind' parameter mapping
+
+conn_tmap = {
+ "all": ([AF_INET, AF_INET6, AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
+ "tcp": ([AF_INET, AF_INET6], [SOCK_STREAM]),
+ "tcp4": ([AF_INET], [SOCK_STREAM]),
+ "udp": ([AF_INET, AF_INET6], [SOCK_DGRAM]),
+ "udp4": ([AF_INET], [SOCK_DGRAM]),
+ "inet": ([AF_INET, AF_INET6], [SOCK_STREAM, SOCK_DGRAM]),
+ "inet4": ([AF_INET], [SOCK_STREAM, SOCK_DGRAM]),
+ "inet6": ([AF_INET6], [SOCK_STREAM, SOCK_DGRAM]),
+}
+
+if AF_INET6 is not None:
+ conn_tmap.update({
+ "tcp6": ([AF_INET6], [SOCK_STREAM]),
+ "udp6": ([AF_INET6], [SOCK_DGRAM]),
+ })
+
+if AF_UNIX is not None:
+ conn_tmap.update({
+ "unix": ([AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
+ })
+
+del AF_INET, AF_INET6, AF_UNIX, SOCK_STREAM, SOCK_DGRAM
+
+
+# --- namedtuples for psutil.* system-related functions
+
+# psutil.swap_memory()
+sswap = namedtuple('sswap', ['total', 'used', 'free', 'percent', 'sin',
+ 'sout'])
+# psutil.disk_usage()
+sdiskusage = namedtuple('sdiskusage', ['total', 'used', 'free', 'percent'])
+# psutil.disk_io_counters()
+sdiskio = namedtuple('sdiskio', ['read_count', 'write_count',
+ 'read_bytes', 'write_bytes',
+ 'read_time', 'write_time'])
+# psutil.disk_partitions()
+sdiskpart = namedtuple('sdiskpart', ['device', 'mountpoint', 'fstype', 'opts'])
+# psutil.net_io_counters()
+snetio = namedtuple('snetio', ['bytes_sent', 'bytes_recv',
+ 'packets_sent', 'packets_recv',
+ 'errin', 'errout',
+ 'dropin', 'dropout'])
+# psutil.users()
+suser = namedtuple('suser', ['name', 'terminal', 'host', 'started'])
+# psutil.net_connections()
+sconn = namedtuple('sconn', ['fd', 'family', 'type', 'laddr', 'raddr',
+ 'status', 'pid'])
+# psutil.net_if_addrs()
+snic = namedtuple('snic', ['family', 'address', 'netmask', 'broadcast'])
+# psutil.net_if_stats()
+snicstats = namedtuple('snicstats', ['isup', 'duplex', 'speed', 'mtu'])
+
+
+# --- namedtuples for psutil.Process methods
+
+# psutil.Process.memory_info()
+pmem = namedtuple('pmem', ['rss', 'vms'])
+# psutil.Process.cpu_times()
+pcputimes = namedtuple('pcputimes', ['user', 'system'])
+# psutil.Process.open_files()
+popenfile = namedtuple('popenfile', ['path', 'fd'])
+# psutil.Process.threads()
+pthread = namedtuple('pthread', ['id', 'user_time', 'system_time'])
+# psutil.Process.uids()
+puids = namedtuple('puids', ['real', 'effective', 'saved'])
+# psutil.Process.gids()
+pgids = namedtuple('pgids', ['real', 'effective', 'saved'])
+# psutil.Process.io_counters()
+pio = namedtuple('pio', ['read_count', 'write_count',
+ 'read_bytes', 'write_bytes'])
+# psutil.Process.ionice()
+pionice = namedtuple('pionice', ['ioclass', 'value'])
+# psutil.Process.ctx_switches()
+pctxsw = namedtuple('pctxsw', ['voluntary', 'involuntary'])
+# psutil.Process.connections()
+pconn = namedtuple('pconn', ['fd', 'family', 'type', 'laddr', 'raddr',
+ 'status'])
diff --git a/python/psutil/psutil/_compat.py b/python/psutil/psutil/_compat.py
new file mode 100644
index 000000000..38744a84a
--- /dev/null
+++ b/python/psutil/psutil/_compat.py
@@ -0,0 +1,189 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module which provides compatibility with older Python versions."""
+
+import collections
+import functools
+import sys
+
+__all__ = ["PY3", "long", "xrange", "unicode", "callable", "lru_cache"]
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ long = int
+ xrange = range
+ unicode = str
+
+ def u(s):
+ return s
+else:
+ long = long
+ xrange = xrange
+ unicode = unicode
+
+ def u(s):
+ return unicode(s, "unicode_escape")
+
+
+# removed in 3.0, reintroduced in 3.2
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+# --- stdlib additions
+
+
+# py 3.2 functools.lru_cache
+# Taken from: http://code.activestate.com/recipes/578078
+# Credit: Raymond Hettinger
+try:
+ from functools import lru_cache
+except ImportError:
+ try:
+ from threading import RLock
+ except ImportError:
+ from dummy_threading import RLock
+
+ _CacheInfo = collections.namedtuple(
+ "CacheInfo", ["hits", "misses", "maxsize", "currsize"])
+
+ class _HashedSeq(list):
+ __slots__ = 'hashvalue'
+
+ def __init__(self, tup, hash=hash):
+ self[:] = tup
+ self.hashvalue = hash(tup)
+
+ def __hash__(self):
+ return self.hashvalue
+
+ def _make_key(args, kwds, typed,
+ kwd_mark=(object(), ),
+ fasttypes=set((int, str, frozenset, type(None))),
+ sorted=sorted, tuple=tuple, type=type, len=len):
+ key = args
+ if kwds:
+ sorted_items = sorted(kwds.items())
+ key += kwd_mark
+ for item in sorted_items:
+ key += item
+ if typed:
+ key += tuple(type(v) for v in args)
+ if kwds:
+ key += tuple(type(v) for k, v in sorted_items)
+ elif len(key) == 1 and type(key[0]) in fasttypes:
+ return key[0]
+ return _HashedSeq(key)
+
+ def lru_cache(maxsize=100, typed=False):
+ """Least-recently-used cache decorator, see:
+ http://docs.python.org/3/library/functools.html#functools.lru_cache
+ """
+ def decorating_function(user_function):
+ cache = dict()
+ stats = [0, 0]
+ HITS, MISSES = 0, 1
+ make_key = _make_key
+ cache_get = cache.get
+ _len = len
+ lock = RLock()
+ root = []
+ root[:] = [root, root, None, None]
+ nonlocal_root = [root]
+ PREV, NEXT, KEY, RESULT = 0, 1, 2, 3
+ if maxsize == 0:
+ def wrapper(*args, **kwds):
+ result = user_function(*args, **kwds)
+ stats[MISSES] += 1
+ return result
+ elif maxsize is None:
+ def wrapper(*args, **kwds):
+ key = make_key(args, kwds, typed)
+ result = cache_get(key, root)
+ if result is not root:
+ stats[HITS] += 1
+ return result
+ result = user_function(*args, **kwds)
+ cache[key] = result
+ stats[MISSES] += 1
+ return result
+ else:
+ def wrapper(*args, **kwds):
+ if kwds or typed:
+ key = make_key(args, kwds, typed)
+ else:
+ key = args
+ lock.acquire()
+ try:
+ link = cache_get(key)
+ if link is not None:
+ root, = nonlocal_root
+ link_prev, link_next, key, result = link
+ link_prev[NEXT] = link_next
+ link_next[PREV] = link_prev
+ last = root[PREV]
+ last[NEXT] = root[PREV] = link
+ link[PREV] = last
+ link[NEXT] = root
+ stats[HITS] += 1
+ return result
+ finally:
+ lock.release()
+ result = user_function(*args, **kwds)
+ lock.acquire()
+ try:
+ root, = nonlocal_root
+ if key in cache:
+ pass
+ elif _len(cache) >= maxsize:
+ oldroot = root
+ oldroot[KEY] = key
+ oldroot[RESULT] = result
+ root = nonlocal_root[0] = oldroot[NEXT]
+ oldkey = root[KEY]
+ root[KEY] = root[RESULT] = None
+ del cache[oldkey]
+ cache[key] = oldroot
+ else:
+ last = root[PREV]
+ link = [last, root, key, result]
+ last[NEXT] = root[PREV] = cache[key] = link
+ stats[MISSES] += 1
+ finally:
+ lock.release()
+ return result
+
+ def cache_info():
+ """Report cache statistics"""
+ lock.acquire()
+ try:
+ return _CacheInfo(stats[HITS], stats[MISSES], maxsize,
+ len(cache))
+ finally:
+ lock.release()
+
+ def cache_clear():
+ """Clear the cache and cache statistics"""
+ lock.acquire()
+ try:
+ cache.clear()
+ root = nonlocal_root[0]
+ root[:] = [root, root, None, None]
+ stats[:] = [0, 0]
+ finally:
+ lock.release()
+
+ wrapper.__wrapped__ = user_function
+ wrapper.cache_info = cache_info
+ wrapper.cache_clear = cache_clear
+ return functools.update_wrapper(wrapper, user_function)
+
+ return decorating_function
diff --git a/python/psutil/psutil/_psbsd.py b/python/psutil/psutil/_psbsd.py
new file mode 100644
index 000000000..db54a02e1
--- /dev/null
+++ b/python/psutil/psutil/_psbsd.py
@@ -0,0 +1,455 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""FreeBSD platform implementation."""
+
+import errno
+import functools
+import os
+import xml.etree.ElementTree as ET
+from collections import namedtuple
+
+from . import _common
+from . import _psposix
+from . import _psutil_bsd as cext
+from . import _psutil_posix as cext_posix
+from ._common import conn_tmap, usage_percent, sockfam_to_enum
+from ._common import socktype_to_enum
+
+
+__extra__all__ = []
+
+# --- constants
+
+PROC_STATUSES = {
+ cext.SSTOP: _common.STATUS_STOPPED,
+ cext.SSLEEP: _common.STATUS_SLEEPING,
+ cext.SRUN: _common.STATUS_RUNNING,
+ cext.SIDL: _common.STATUS_IDLE,
+ cext.SWAIT: _common.STATUS_WAITING,
+ cext.SLOCK: _common.STATUS_LOCKED,
+ cext.SZOMB: _common.STATUS_ZOMBIE,
+}
+
+TCP_STATUSES = {
+ cext.TCPS_ESTABLISHED: _common.CONN_ESTABLISHED,
+ cext.TCPS_SYN_SENT: _common.CONN_SYN_SENT,
+ cext.TCPS_SYN_RECEIVED: _common.CONN_SYN_RECV,
+ cext.TCPS_FIN_WAIT_1: _common.CONN_FIN_WAIT1,
+ cext.TCPS_FIN_WAIT_2: _common.CONN_FIN_WAIT2,
+ cext.TCPS_TIME_WAIT: _common.CONN_TIME_WAIT,
+ cext.TCPS_CLOSED: _common.CONN_CLOSE,
+ cext.TCPS_CLOSE_WAIT: _common.CONN_CLOSE_WAIT,
+ cext.TCPS_LAST_ACK: _common.CONN_LAST_ACK,
+ cext.TCPS_LISTEN: _common.CONN_LISTEN,
+ cext.TCPS_CLOSING: _common.CONN_CLOSING,
+ cext.PSUTIL_CONN_NONE: _common.CONN_NONE,
+}
+
+PAGESIZE = os.sysconf("SC_PAGE_SIZE")
+AF_LINK = cext_posix.AF_LINK
+
+# extend base mem ntuple with BSD-specific memory metrics
+svmem = namedtuple(
+ 'svmem', ['total', 'available', 'percent', 'used', 'free',
+ 'active', 'inactive', 'buffers', 'cached', 'shared', 'wired'])
+scputimes = namedtuple(
+ 'scputimes', ['user', 'nice', 'system', 'idle', 'irq'])
+pextmem = namedtuple('pextmem', ['rss', 'vms', 'text', 'data', 'stack'])
+pmmap_grouped = namedtuple(
+ 'pmmap_grouped', 'path rss, private, ref_count, shadow_count')
+pmmap_ext = namedtuple(
+ 'pmmap_ext', 'addr, perms path rss, private, ref_count, shadow_count')
+
+# set later from __init__.py
+NoSuchProcess = None
+ZombieProcess = None
+AccessDenied = None
+TimeoutExpired = None
+
+
+def virtual_memory():
+ """System virtual memory as a namedtuple."""
+ mem = cext.virtual_mem()
+ total, free, active, inactive, wired, cached, buffers, shared = mem
+ avail = inactive + cached + free
+ used = active + wired + cached
+ percent = usage_percent((total - avail), total, _round=1)
+ return svmem(total, avail, percent, used, free,
+ active, inactive, buffers, cached, shared, wired)
+
+
+def swap_memory():
+ """System swap memory as (total, used, free, sin, sout) namedtuple."""
+ total, used, free, sin, sout = [x * PAGESIZE for x in cext.swap_mem()]
+ percent = usage_percent(used, total, _round=1)
+ return _common.sswap(total, used, free, percent, sin, sout)
+
+
+def cpu_times():
+ """Return system per-CPU times as a namedtuple"""
+ user, nice, system, idle, irq = cext.cpu_times()
+ return scputimes(user, nice, system, idle, irq)
+
+
+if hasattr(cext, "per_cpu_times"):
+ def per_cpu_times():
+ """Return system CPU times as a namedtuple"""
+ ret = []
+ for cpu_t in cext.per_cpu_times():
+ user, nice, system, idle, irq = cpu_t
+ item = scputimes(user, nice, system, idle, irq)
+ ret.append(item)
+ return ret
+else:
+ # XXX
+ # Ok, this is very dirty.
+ # On FreeBSD < 8 we cannot gather per-cpu information, see:
+ # https://github.com/giampaolo/psutil/issues/226
+ # If num cpus > 1, on first call we return single cpu times to avoid a
+ # crash at psutil import time.
+ # Next calls will fail with NotImplementedError
+ def per_cpu_times():
+ if cpu_count_logical() == 1:
+ return [cpu_times()]
+ if per_cpu_times.__called__:
+ raise NotImplementedError("supported only starting from FreeBSD 8")
+ per_cpu_times.__called__ = True
+ return [cpu_times()]
+
+ per_cpu_times.__called__ = False
+
+
+def cpu_count_logical():
+ """Return the number of logical CPUs in the system."""
+ return cext.cpu_count_logical()
+
+
+def cpu_count_physical():
+ """Return the number of physical CPUs in the system."""
+ # From the C module we'll get an XML string similar to this:
+ # http://manpages.ubuntu.com/manpages/precise/man4/smp.4freebsd.html
+ # We may get None in case "sysctl kern.sched.topology_spec"
+ # is not supported on this BSD version, in which case we'll mimic
+ # os.cpu_count() and return None.
+ ret = None
+ s = cext.cpu_count_phys()
+ if s is not None:
+ # get rid of padding chars appended at the end of the string
+ index = s.rfind("</groups>")
+ if index != -1:
+ s = s[:index + 9]
+ root = ET.fromstring(s)
+ try:
+ ret = len(root.findall('group/children/group/cpu')) or None
+ finally:
+ # needed otherwise it will memleak
+ root.clear()
+ if not ret:
+ # If logical CPUs are 1 it's obvious we'll have only 1
+ # physical CPU.
+ if cpu_count_logical() == 1:
+ return 1
+ return ret
+
+
+def boot_time():
+ """The system boot time expressed in seconds since the epoch."""
+ return cext.boot_time()
+
+
+def disk_partitions(all=False):
+ retlist = []
+ partitions = cext.disk_partitions()
+ for partition in partitions:
+ device, mountpoint, fstype, opts = partition
+ if device == 'none':
+ device = ''
+ if not all:
+ if not os.path.isabs(device) or not os.path.exists(device):
+ continue
+ ntuple = _common.sdiskpart(device, mountpoint, fstype, opts)
+ retlist.append(ntuple)
+ return retlist
+
+
+def users():
+ retlist = []
+ rawlist = cext.users()
+ for item in rawlist:
+ user, tty, hostname, tstamp = item
+ if tty == '~':
+ continue # reboot or shutdown
+ nt = _common.suser(user, tty or None, hostname, tstamp)
+ retlist.append(nt)
+ return retlist
+
+
+def net_connections(kind):
+ if kind not in _common.conn_tmap:
+ raise ValueError("invalid %r kind argument; choose between %s"
+ % (kind, ', '.join([repr(x) for x in conn_tmap])))
+ families, types = conn_tmap[kind]
+ ret = set()
+ rawlist = cext.net_connections()
+ for item in rawlist:
+ fd, fam, type, laddr, raddr, status, pid = item
+ # TODO: apply filter at C level
+ if fam in families and type in types:
+ try:
+ status = TCP_STATUSES[status]
+ except KeyError:
+ # XXX: Not sure why this happens. I saw this occurring
+ # with IPv6 sockets opened by 'vim'. Those sockets
+ # have a very short lifetime so maybe the kernel
+ # can't initialize their status?
+ status = TCP_STATUSES[cext.PSUTIL_CONN_NONE]
+ fam = sockfam_to_enum(fam)
+ type = socktype_to_enum(type)
+ nt = _common.sconn(fd, fam, type, laddr, raddr, status, pid)
+ ret.add(nt)
+ return list(ret)
+
+
+def net_if_stats():
+ """Get NIC stats (isup, duplex, speed, mtu)."""
+ names = net_io_counters().keys()
+ ret = {}
+ for name in names:
+ isup, duplex, speed, mtu = cext_posix.net_if_stats(name)
+ if hasattr(_common, 'NicDuplex'):
+ duplex = _common.NicDuplex(duplex)
+ ret[name] = _common.snicstats(isup, duplex, speed, mtu)
+ return ret
+
+
+pids = cext.pids
+pid_exists = _psposix.pid_exists
+disk_usage = _psposix.disk_usage
+net_io_counters = cext.net_io_counters
+disk_io_counters = cext.disk_io_counters
+net_if_addrs = cext_posix.net_if_addrs
+
+
+def wrap_exceptions(fun):
+ """Decorator which translates bare OSError exceptions into
+ NoSuchProcess and AccessDenied.
+ """
+ @functools.wraps(fun)
+ def wrapper(self, *args, **kwargs):
+ try:
+ return fun(self, *args, **kwargs)
+ except OSError as err:
+ # support for private module import
+ if (NoSuchProcess is None or AccessDenied is None or
+ ZombieProcess is None):
+ raise
+ if err.errno == errno.ESRCH:
+ if not pid_exists(self.pid):
+ raise NoSuchProcess(self.pid, self._name)
+ else:
+ raise ZombieProcess(self.pid, self._name, self._ppid)
+ if err.errno in (errno.EPERM, errno.EACCES):
+ raise AccessDenied(self.pid, self._name)
+ raise
+ return wrapper
+
+
+class Process(object):
+ """Wrapper class around underlying C implementation."""
+
+ __slots__ = ["pid", "_name", "_ppid"]
+
+ def __init__(self, pid):
+ self.pid = pid
+ self._name = None
+ self._ppid = None
+
+ @wrap_exceptions
+ def name(self):
+ return cext.proc_name(self.pid)
+
+ @wrap_exceptions
+ def exe(self):
+ return cext.proc_exe(self.pid)
+
+ @wrap_exceptions
+ def cmdline(self):
+ return cext.proc_cmdline(self.pid)
+
+ @wrap_exceptions
+ def terminal(self):
+ tty_nr = cext.proc_tty_nr(self.pid)
+ tmap = _psposix._get_terminal_map()
+ try:
+ return tmap[tty_nr]
+ except KeyError:
+ return None
+
+ @wrap_exceptions
+ def ppid(self):
+ return cext.proc_ppid(self.pid)
+
+ @wrap_exceptions
+ def uids(self):
+ real, effective, saved = cext.proc_uids(self.pid)
+ return _common.puids(real, effective, saved)
+
+ @wrap_exceptions
+ def gids(self):
+ real, effective, saved = cext.proc_gids(self.pid)
+ return _common.pgids(real, effective, saved)
+
+ @wrap_exceptions
+ def cpu_times(self):
+ user, system = cext.proc_cpu_times(self.pid)
+ return _common.pcputimes(user, system)
+
+ @wrap_exceptions
+ def memory_info(self):
+ rss, vms = cext.proc_memory_info(self.pid)[:2]
+ return _common.pmem(rss, vms)
+
+ @wrap_exceptions
+ def memory_info_ex(self):
+ return pextmem(*cext.proc_memory_info(self.pid))
+
+ @wrap_exceptions
+ def create_time(self):
+ return cext.proc_create_time(self.pid)
+
+ @wrap_exceptions
+ def num_threads(self):
+ return cext.proc_num_threads(self.pid)
+
+ @wrap_exceptions
+ def num_ctx_switches(self):
+ return _common.pctxsw(*cext.proc_num_ctx_switches(self.pid))
+
+ @wrap_exceptions
+ def threads(self):
+ rawlist = cext.proc_threads(self.pid)
+ retlist = []
+ for thread_id, utime, stime in rawlist:
+ ntuple = _common.pthread(thread_id, utime, stime)
+ retlist.append(ntuple)
+ return retlist
+
+ @wrap_exceptions
+ def connections(self, kind='inet'):
+ if kind not in conn_tmap:
+ raise ValueError("invalid %r kind argument; choose between %s"
+ % (kind, ', '.join([repr(x) for x in conn_tmap])))
+ families, types = conn_tmap[kind]
+ rawlist = cext.proc_connections(self.pid, families, types)
+ ret = []
+ for item in rawlist:
+ fd, fam, type, laddr, raddr, status = item
+ fam = sockfam_to_enum(fam)
+ type = socktype_to_enum(type)
+ status = TCP_STATUSES[status]
+ nt = _common.pconn(fd, fam, type, laddr, raddr, status)
+ ret.append(nt)
+ return ret
+
+ @wrap_exceptions
+ def wait(self, timeout=None):
+ try:
+ return _psposix.wait_pid(self.pid, timeout)
+ except _psposix.TimeoutExpired:
+ # support for private module import
+ if TimeoutExpired is None:
+ raise
+ raise TimeoutExpired(timeout, self.pid, self._name)
+
+ @wrap_exceptions
+ def nice_get(self):
+ return cext_posix.getpriority(self.pid)
+
+ @wrap_exceptions
+ def nice_set(self, value):
+ return cext_posix.setpriority(self.pid, value)
+
+ @wrap_exceptions
+ def status(self):
+ code = cext.proc_status(self.pid)
+ if code in PROC_STATUSES:
+ return PROC_STATUSES[code]
+ # XXX is this legit? will we even ever get here?
+ return "?"
+
+ @wrap_exceptions
+ def io_counters(self):
+ rc, wc, rb, wb = cext.proc_io_counters(self.pid)
+ return _common.pio(rc, wc, rb, wb)
+
+ nt_mmap_grouped = namedtuple(
+ 'mmap', 'path rss, private, ref_count, shadow_count')
+ nt_mmap_ext = namedtuple(
+ 'mmap', 'addr, perms path rss, private, ref_count, shadow_count')
+
+ # FreeBSD < 8 does not support functions based on kinfo_getfile()
+ # and kinfo_getvmmap()
+ if hasattr(cext, 'proc_open_files'):
+
+ @wrap_exceptions
+ def open_files(self):
+ """Return files opened by process as a list of namedtuples."""
+ rawlist = cext.proc_open_files(self.pid)
+ return [_common.popenfile(path, fd) for path, fd in rawlist]
+
+ @wrap_exceptions
+ def cwd(self):
+ """Return process current working directory."""
+ # sometimes we get an empty string, in which case we turn
+ # it into None
+ return cext.proc_cwd(self.pid) or None
+
+ @wrap_exceptions
+ def memory_maps(self):
+ return cext.proc_memory_maps(self.pid)
+
+ @wrap_exceptions
+ def num_fds(self):
+ """Return the number of file descriptors opened by this process."""
+ return cext.proc_num_fds(self.pid)
+
+ else:
+ def _not_implemented(self):
+ raise NotImplementedError("supported only starting from FreeBSD 8")
+
+ open_files = _not_implemented
+ proc_cwd = _not_implemented
+ memory_maps = _not_implemented
+ num_fds = _not_implemented
+
+ @wrap_exceptions
+ def cpu_affinity_get(self):
+ return cext.proc_cpu_affinity_get(self.pid)
+
+ @wrap_exceptions
+ def cpu_affinity_set(self, cpus):
+ # Pre-emptively check if CPUs are valid because the C
+ # function has a weird behavior in case of invalid CPUs,
+ # see: https://github.com/giampaolo/psutil/issues/586
+ allcpus = tuple(range(len(per_cpu_times())))
+ for cpu in cpus:
+ if cpu not in allcpus:
+ raise ValueError("invalid CPU #%i (choose between %s)"
+ % (cpu, allcpus))
+ try:
+ cext.proc_cpu_affinity_set(self.pid, cpus)
+ except OSError as err:
+ # 'man cpuset_setaffinity' about EDEADLK:
+ # <<the call would leave a thread without a valid CPU to run
+ # on because the set does not overlap with the thread's
+ # anonymous mask>>
+ if err.errno in (errno.EINVAL, errno.EDEADLK):
+ for cpu in cpus:
+ if cpu not in allcpus:
+ raise ValueError("invalid CPU #%i (choose between %s)"
+ % (cpu, allcpus))
+ raise
diff --git a/python/psutil/psutil/_pslinux.py b/python/psutil/psutil/_pslinux.py
new file mode 100644
index 000000000..7eb25f519
--- /dev/null
+++ b/python/psutil/psutil/_pslinux.py
@@ -0,0 +1,1206 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Linux platform implementation."""
+
+from __future__ import division
+
+import base64
+import errno
+import functools
+import os
+import re
+import socket
+import struct
+import sys
+import warnings
+from collections import namedtuple, defaultdict
+
+from . import _common
+from . import _psposix
+from . import _psutil_linux as cext
+from . import _psutil_posix as cext_posix
+from ._common import isfile_strict, usage_percent
+from ._common import NIC_DUPLEX_FULL, NIC_DUPLEX_HALF, NIC_DUPLEX_UNKNOWN
+from ._compat import PY3, long
+
+if sys.version_info >= (3, 4):
+ import enum
+else:
+ enum = None
+
+
+__extra__all__ = [
+ # io prio constants
+ "IOPRIO_CLASS_NONE", "IOPRIO_CLASS_RT", "IOPRIO_CLASS_BE",
+ "IOPRIO_CLASS_IDLE",
+ # connection status constants
+ "CONN_ESTABLISHED", "CONN_SYN_SENT", "CONN_SYN_RECV", "CONN_FIN_WAIT1",
+ "CONN_FIN_WAIT2", "CONN_TIME_WAIT", "CONN_CLOSE", "CONN_CLOSE_WAIT",
+ "CONN_LAST_ACK", "CONN_LISTEN", "CONN_CLOSING", ]
+
+# --- constants
+
+HAS_PRLIMIT = hasattr(cext, "linux_prlimit")
+
+# RLIMIT_* constants, not guaranteed to be present on all kernels
+if HAS_PRLIMIT:
+ for name in dir(cext):
+ if name.startswith('RLIM'):
+ __extra__all__.append(name)
+
+# Number of clock ticks per second
+CLOCK_TICKS = os.sysconf("SC_CLK_TCK")
+PAGESIZE = os.sysconf("SC_PAGE_SIZE")
+BOOT_TIME = None # set later
+DEFAULT_ENCODING = sys.getdefaultencoding()
+if enum is None:
+ AF_LINK = socket.AF_PACKET
+else:
+ AddressFamily = enum.IntEnum('AddressFamily',
+ {'AF_LINK': socket.AF_PACKET})
+ AF_LINK = AddressFamily.AF_LINK
+
+# ioprio_* constants http://linux.die.net/man/2/ioprio_get
+if enum is None:
+ IOPRIO_CLASS_NONE = 0
+ IOPRIO_CLASS_RT = 1
+ IOPRIO_CLASS_BE = 2
+ IOPRIO_CLASS_IDLE = 3
+else:
+ class IOPriority(enum.IntEnum):
+ IOPRIO_CLASS_NONE = 0
+ IOPRIO_CLASS_RT = 1
+ IOPRIO_CLASS_BE = 2
+ IOPRIO_CLASS_IDLE = 3
+
+ globals().update(IOPriority.__members__)
+
+# taken from /fs/proc/array.c
+PROC_STATUSES = {
+ "R": _common.STATUS_RUNNING,
+ "S": _common.STATUS_SLEEPING,
+ "D": _common.STATUS_DISK_SLEEP,
+ "T": _common.STATUS_STOPPED,
+ "t": _common.STATUS_TRACING_STOP,
+ "Z": _common.STATUS_ZOMBIE,
+ "X": _common.STATUS_DEAD,
+ "x": _common.STATUS_DEAD,
+ "K": _common.STATUS_WAKE_KILL,
+ "W": _common.STATUS_WAKING
+}
+
+# http://students.mimuw.edu.pl/lxr/source/include/net/tcp_states.h
+TCP_STATUSES = {
+ "01": _common.CONN_ESTABLISHED,
+ "02": _common.CONN_SYN_SENT,
+ "03": _common.CONN_SYN_RECV,
+ "04": _common.CONN_FIN_WAIT1,
+ "05": _common.CONN_FIN_WAIT2,
+ "06": _common.CONN_TIME_WAIT,
+ "07": _common.CONN_CLOSE,
+ "08": _common.CONN_CLOSE_WAIT,
+ "09": _common.CONN_LAST_ACK,
+ "0A": _common.CONN_LISTEN,
+ "0B": _common.CONN_CLOSING
+}
+
+# set later from __init__.py
+NoSuchProcess = None
+ZombieProcess = None
+AccessDenied = None
+TimeoutExpired = None
+
+
+# --- named tuples
+
+def _get_cputimes_fields():
+ """Return a namedtuple of variable fields depending on the
+ CPU times available on this Linux kernel version which may be:
+ (user, nice, system, idle, iowait, irq, softirq, [steal, [guest,
+ [guest_nice]]])
+ """
+ with open('/proc/stat', 'rb') as f:
+ values = f.readline().split()[1:]
+ fields = ['user', 'nice', 'system', 'idle', 'iowait', 'irq', 'softirq']
+ vlen = len(values)
+ if vlen >= 8:
+ # Linux >= 2.6.11
+ fields.append('steal')
+ if vlen >= 9:
+ # Linux >= 2.6.24
+ fields.append('guest')
+ if vlen >= 10:
+ # Linux >= 3.2.0
+ fields.append('guest_nice')
+ return fields
+
+
+scputimes = namedtuple('scputimes', _get_cputimes_fields())
+
+svmem = namedtuple(
+ 'svmem', ['total', 'available', 'percent', 'used', 'free',
+ 'active', 'inactive', 'buffers', 'cached'])
+
+pextmem = namedtuple('pextmem', 'rss vms shared text lib data dirty')
+
+pmmap_grouped = namedtuple(
+ 'pmmap_grouped', ['path', 'rss', 'size', 'pss', 'shared_clean',
+ 'shared_dirty', 'private_clean', 'private_dirty',
+ 'referenced', 'anonymous', 'swap'])
+
+pmmap_ext = namedtuple(
+ 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
+
+
+# --- system memory
+
+def virtual_memory():
+ total, free, buffers, shared, _, _ = cext.linux_sysinfo()
+ cached = active = inactive = None
+ with open('/proc/meminfo', 'rb') as f:
+ for line in f:
+ if line.startswith(b"Cached:"):
+ cached = int(line.split()[1]) * 1024
+ elif line.startswith(b"Active:"):
+ active = int(line.split()[1]) * 1024
+ elif line.startswith(b"Inactive:"):
+ inactive = int(line.split()[1]) * 1024
+ if (cached is not None and
+ active is not None and
+ inactive is not None):
+ break
+ else:
+ # we might get here when dealing with exotic Linux flavors, see:
+ # https://github.com/giampaolo/psutil/issues/313
+ msg = "'cached', 'active' and 'inactive' memory stats couldn't " \
+ "be determined and were set to 0"
+ warnings.warn(msg, RuntimeWarning)
+ cached = active = inactive = 0
+ avail = free + buffers + cached
+ used = total - free
+ percent = usage_percent((total - avail), total, _round=1)
+ return svmem(total, avail, percent, used, free,
+ active, inactive, buffers, cached)
+
+
+def swap_memory():
+ _, _, _, _, total, free = cext.linux_sysinfo()
+ used = total - free
+ percent = usage_percent(used, total, _round=1)
+ # get pgin/pgouts
+ with open("/proc/vmstat", "rb") as f:
+ sin = sout = None
+ for line in f:
+ # values are expressed in 4 kilo bytes, we want bytes instead
+ if line.startswith(b'pswpin'):
+ sin = int(line.split(b' ')[1]) * 4 * 1024
+ elif line.startswith(b'pswpout'):
+ sout = int(line.split(b' ')[1]) * 4 * 1024
+ if sin is not None and sout is not None:
+ break
+ else:
+ # we might get here when dealing with exotic Linux flavors, see:
+ # https://github.com/giampaolo/psutil/issues/313
+ msg = "'sin' and 'sout' swap memory stats couldn't " \
+ "be determined and were set to 0"
+ warnings.warn(msg, RuntimeWarning)
+ sin = sout = 0
+ return _common.sswap(total, used, free, percent, sin, sout)
+
+
+# --- CPUs
+
+def cpu_times():
+ """Return a named tuple representing the following system-wide
+ CPU times:
+ (user, nice, system, idle, iowait, irq, softirq [steal, [guest,
+ [guest_nice]]])
+ Last 3 fields may not be available on all Linux kernel versions.
+ """
+ with open('/proc/stat', 'rb') as f:
+ values = f.readline().split()
+ fields = values[1:len(scputimes._fields) + 1]
+ fields = [float(x) / CLOCK_TICKS for x in fields]
+ return scputimes(*fields)
+
+
+def per_cpu_times():
+ """Return a list of namedtuple representing the CPU times
+ for every CPU available on the system.
+ """
+ cpus = []
+ with open('/proc/stat', 'rb') as f:
+ # get rid of the first line which refers to system wide CPU stats
+ f.readline()
+ for line in f:
+ if line.startswith(b'cpu'):
+ values = line.split()
+ fields = values[1:len(scputimes._fields) + 1]
+ fields = [float(x) / CLOCK_TICKS for x in fields]
+ entry = scputimes(*fields)
+ cpus.append(entry)
+ return cpus
+
+
+def cpu_count_logical():
+ """Return the number of logical CPUs in the system."""
+ try:
+ return os.sysconf("SC_NPROCESSORS_ONLN")
+ except ValueError:
+ # as a second fallback we try to parse /proc/cpuinfo
+ num = 0
+ with open('/proc/cpuinfo', 'rb') as f:
+ for line in f:
+ if line.lower().startswith(b'processor'):
+ num += 1
+
+ # unknown format (e.g. amrel/sparc architectures), see:
+ # https://github.com/giampaolo/psutil/issues/200
+ # try to parse /proc/stat as a last resort
+ if num == 0:
+ search = re.compile('cpu\d')
+ with open('/proc/stat', 'rt') as f:
+ for line in f:
+ line = line.split(' ')[0]
+ if search.match(line):
+ num += 1
+
+ if num == 0:
+ # mimic os.cpu_count()
+ return None
+ return num
+
+
+def cpu_count_physical():
+ """Return the number of physical cores in the system."""
+ mapping = {}
+ current_info = {}
+ with open('/proc/cpuinfo', 'rb') as f:
+ for line in f:
+ line = line.strip().lower()
+ if not line:
+ # new section
+ if (b'physical id' in current_info and
+ b'cpu cores' in current_info):
+ mapping[current_info[b'physical id']] = \
+ current_info[b'cpu cores']
+ current_info = {}
+ else:
+ # ongoing section
+ if (line.startswith(b'physical id') or
+ line.startswith(b'cpu cores')):
+ key, value = line.split(b'\t:', 1)
+ current_info[key] = int(value)
+
+ # mimic os.cpu_count()
+ return sum(mapping.values()) or None
+
+
+# --- other system functions
+
+def users():
+ """Return currently connected users as a list of namedtuples."""
+ retlist = []
+ rawlist = cext.users()
+ for item in rawlist:
+ user, tty, hostname, tstamp, user_process = item
+ # note: the underlying C function includes entries about
+ # system boot, run level and others. We might want
+ # to use them in the future.
+ if not user_process:
+ continue
+ if hostname == ':0.0' or hostname == ':0':
+ hostname = 'localhost'
+ nt = _common.suser(user, tty or None, hostname, tstamp)
+ retlist.append(nt)
+ return retlist
+
+
+def boot_time():
+ """Return the system boot time expressed in seconds since the epoch."""
+ global BOOT_TIME
+ with open('/proc/stat', 'rb') as f:
+ for line in f:
+ if line.startswith(b'btime'):
+ ret = float(line.strip().split()[1])
+ BOOT_TIME = ret
+ return ret
+ raise RuntimeError("line 'btime' not found in /proc/stat")
+
+
+# --- processes
+
+def pids():
+ """Returns a list of PIDs currently running on the system."""
+ return [int(x) for x in os.listdir(b'/proc') if x.isdigit()]
+
+
+def pid_exists(pid):
+ """Check For the existence of a unix pid."""
+ return _psposix.pid_exists(pid)
+
+
+# --- network
+
+class Connections:
+ """A wrapper on top of /proc/net/* files, retrieving per-process
+ and system-wide open connections (TCP, UDP, UNIX) similarly to
+ "netstat -an".
+
+ Note: in case of UNIX sockets we're only able to determine the
+ local endpoint/path, not the one it's connected to.
+ According to [1] it would be possible but not easily.
+
+ [1] http://serverfault.com/a/417946
+ """
+
+ def __init__(self):
+ tcp4 = ("tcp", socket.AF_INET, socket.SOCK_STREAM)
+ tcp6 = ("tcp6", socket.AF_INET6, socket.SOCK_STREAM)
+ udp4 = ("udp", socket.AF_INET, socket.SOCK_DGRAM)
+ udp6 = ("udp6", socket.AF_INET6, socket.SOCK_DGRAM)
+ unix = ("unix", socket.AF_UNIX, None)
+ self.tmap = {
+ "all": (tcp4, tcp6, udp4, udp6, unix),
+ "tcp": (tcp4, tcp6),
+ "tcp4": (tcp4,),
+ "tcp6": (tcp6,),
+ "udp": (udp4, udp6),
+ "udp4": (udp4,),
+ "udp6": (udp6,),
+ "unix": (unix,),
+ "inet": (tcp4, tcp6, udp4, udp6),
+ "inet4": (tcp4, udp4),
+ "inet6": (tcp6, udp6),
+ }
+
+ def get_proc_inodes(self, pid):
+ inodes = defaultdict(list)
+ for fd in os.listdir("/proc/%s/fd" % pid):
+ try:
+ inode = os.readlink("/proc/%s/fd/%s" % (pid, fd))
+ except OSError as err:
+ # ENOENT == file which is gone in the meantime;
+ # os.stat('/proc/%s' % self.pid) will be done later
+ # to force NSP (if it's the case)
+ if err.errno in (errno.ENOENT, errno.ESRCH):
+ continue
+ elif err.errno == errno.EINVAL:
+ # not a link
+ continue
+ else:
+ raise
+ else:
+ if inode.startswith('socket:['):
+ # the process is using a socket
+ inode = inode[8:][:-1]
+ inodes[inode].append((pid, int(fd)))
+ return inodes
+
+ def get_all_inodes(self):
+ inodes = {}
+ for pid in pids():
+ try:
+ inodes.update(self.get_proc_inodes(pid))
+ except OSError as err:
+ # os.listdir() is gonna raise a lot of access denied
+ # exceptions in case of unprivileged user; that's fine
+ # as we'll just end up returning a connection with PID
+ # and fd set to None anyway.
+ # Both netstat -an and lsof does the same so it's
+ # unlikely we can do any better.
+ # ENOENT just means a PID disappeared on us.
+ if err.errno not in (
+ errno.ENOENT, errno.ESRCH, errno.EPERM, errno.EACCES):
+ raise
+ return inodes
+
+ def decode_address(self, addr, family):
+ """Accept an "ip:port" address as displayed in /proc/net/*
+ and convert it into a human readable form, like:
+
+ "0500000A:0016" -> ("10.0.0.5", 22)
+ "0000000000000000FFFF00000100007F:9E49" -> ("::ffff:127.0.0.1", 40521)
+
+ The IP address portion is a little or big endian four-byte
+ hexadecimal number; that is, the least significant byte is listed
+ first, so we need to reverse the order of the bytes to convert it
+ to an IP address.
+ The port is represented as a two-byte hexadecimal number.
+
+ Reference:
+ http://linuxdevcenter.com/pub/a/linux/2000/11/16/LinuxAdmin.html
+ """
+ ip, port = addr.split(':')
+ port = int(port, 16)
+ # this usually refers to a local socket in listen mode with
+ # no end-points connected
+ if not port:
+ return ()
+ if PY3:
+ ip = ip.encode('ascii')
+ if family == socket.AF_INET:
+ # see: https://github.com/giampaolo/psutil/issues/201
+ if sys.byteorder == 'little':
+ ip = socket.inet_ntop(family, base64.b16decode(ip)[::-1])
+ else:
+ ip = socket.inet_ntop(family, base64.b16decode(ip))
+ else: # IPv6
+ # old version - let's keep it, just in case...
+ # ip = ip.decode('hex')
+ # return socket.inet_ntop(socket.AF_INET6,
+ # ''.join(ip[i:i+4][::-1] for i in xrange(0, 16, 4)))
+ ip = base64.b16decode(ip)
+ # see: https://github.com/giampaolo/psutil/issues/201
+ if sys.byteorder == 'little':
+ ip = socket.inet_ntop(
+ socket.AF_INET6,
+ struct.pack('>4I', *struct.unpack('<4I', ip)))
+ else:
+ ip = socket.inet_ntop(
+ socket.AF_INET6,
+ struct.pack('<4I', *struct.unpack('<4I', ip)))
+ return (ip, port)
+
+ def process_inet(self, file, family, type_, inodes, filter_pid=None):
+ """Parse /proc/net/tcp* and /proc/net/udp* files."""
+ if file.endswith('6') and not os.path.exists(file):
+ # IPv6 not supported
+ return
+ with open(file, 'rt') as f:
+ f.readline() # skip the first line
+ for line in f:
+ try:
+ _, laddr, raddr, status, _, _, _, _, _, inode = \
+ line.split()[:10]
+ except ValueError:
+ raise RuntimeError(
+ "error while parsing %s; malformed line %r" % (
+ file, line))
+ if inode in inodes:
+ # # We assume inet sockets are unique, so we error
+ # # out if there are multiple references to the
+ # # same inode. We won't do this for UNIX sockets.
+ # if len(inodes[inode]) > 1 and family != socket.AF_UNIX:
+ # raise ValueError("ambiguos inode with multiple "
+ # "PIDs references")
+ pid, fd = inodes[inode][0]
+ else:
+ pid, fd = None, -1
+ if filter_pid is not None and filter_pid != pid:
+ continue
+ else:
+ if type_ == socket.SOCK_STREAM:
+ status = TCP_STATUSES[status]
+ else:
+ status = _common.CONN_NONE
+ laddr = self.decode_address(laddr, family)
+ raddr = self.decode_address(raddr, family)
+ yield (fd, family, type_, laddr, raddr, status, pid)
+
+ def process_unix(self, file, family, inodes, filter_pid=None):
+ """Parse /proc/net/unix files."""
+ with open(file, 'rt') as f:
+ f.readline() # skip the first line
+ for line in f:
+ tokens = line.split()
+ try:
+ _, _, _, _, type_, _, inode = tokens[0:7]
+ except ValueError:
+ raise RuntimeError(
+ "error while parsing %s; malformed line %r" % (
+ file, line))
+ if inode in inodes:
+ # With UNIX sockets we can have a single inode
+ # referencing many file descriptors.
+ pairs = inodes[inode]
+ else:
+ pairs = [(None, -1)]
+ for pid, fd in pairs:
+ if filter_pid is not None and filter_pid != pid:
+ continue
+ else:
+ if len(tokens) == 8:
+ path = tokens[-1]
+ else:
+ path = ""
+ type_ = int(type_)
+ raddr = None
+ status = _common.CONN_NONE
+ yield (fd, family, type_, path, raddr, status, pid)
+
+ def retrieve(self, kind, pid=None):
+ if kind not in self.tmap:
+ raise ValueError("invalid %r kind argument; choose between %s"
+ % (kind, ', '.join([repr(x) for x in self.tmap])))
+ if pid is not None:
+ inodes = self.get_proc_inodes(pid)
+ if not inodes:
+ # no connections for this process
+ return []
+ else:
+ inodes = self.get_all_inodes()
+ ret = set()
+ for f, family, type_ in self.tmap[kind]:
+ if family in (socket.AF_INET, socket.AF_INET6):
+ ls = self.process_inet(
+ "/proc/net/%s" % f, family, type_, inodes, filter_pid=pid)
+ else:
+ ls = self.process_unix(
+ "/proc/net/%s" % f, family, inodes, filter_pid=pid)
+ for fd, family, type_, laddr, raddr, status, bound_pid in ls:
+ if pid:
+ conn = _common.pconn(fd, family, type_, laddr, raddr,
+ status)
+ else:
+ conn = _common.sconn(fd, family, type_, laddr, raddr,
+ status, bound_pid)
+ ret.add(conn)
+ return list(ret)
+
+
+_connections = Connections()
+
+
+def net_connections(kind='inet'):
+ """Return system-wide open connections."""
+ return _connections.retrieve(kind)
+
+
+def net_io_counters():
+ """Return network I/O statistics for every network interface
+ installed on the system as a dict of raw tuples.
+ """
+ with open("/proc/net/dev", "rt") as f:
+ lines = f.readlines()
+ retdict = {}
+ for line in lines[2:]:
+ colon = line.rfind(':')
+ assert colon > 0, repr(line)
+ name = line[:colon].strip()
+ fields = line[colon + 1:].strip().split()
+ bytes_recv = int(fields[0])
+ packets_recv = int(fields[1])
+ errin = int(fields[2])
+ dropin = int(fields[3])
+ bytes_sent = int(fields[8])
+ packets_sent = int(fields[9])
+ errout = int(fields[10])
+ dropout = int(fields[11])
+ retdict[name] = (bytes_sent, bytes_recv, packets_sent, packets_recv,
+ errin, errout, dropin, dropout)
+ return retdict
+
+
+def net_if_stats():
+ """Get NIC stats (isup, duplex, speed, mtu)."""
+ duplex_map = {cext.DUPLEX_FULL: NIC_DUPLEX_FULL,
+ cext.DUPLEX_HALF: NIC_DUPLEX_HALF,
+ cext.DUPLEX_UNKNOWN: NIC_DUPLEX_UNKNOWN}
+ names = net_io_counters().keys()
+ ret = {}
+ for name in names:
+ isup, duplex, speed, mtu = cext.net_if_stats(name)
+ duplex = duplex_map[duplex]
+ ret[name] = _common.snicstats(isup, duplex, speed, mtu)
+ return ret
+
+
+net_if_addrs = cext_posix.net_if_addrs
+
+
+# --- disks
+
+def disk_io_counters():
+ """Return disk I/O statistics for every disk installed on the
+ system as a dict of raw tuples.
+ """
+ # man iostat states that sectors are equivalent with blocks and
+ # have a size of 512 bytes since 2.4 kernels. This value is
+ # needed to calculate the amount of disk I/O in bytes.
+ SECTOR_SIZE = 512
+
+ # determine partitions we want to look for
+ partitions = []
+ with open("/proc/partitions", "rt") as f:
+ lines = f.readlines()[2:]
+ for line in reversed(lines):
+ _, _, _, name = line.split()
+ if name[-1].isdigit():
+ # we're dealing with a partition (e.g. 'sda1'); 'sda' will
+ # also be around but we want to omit it
+ partitions.append(name)
+ else:
+ if not partitions or not partitions[-1].startswith(name):
+ # we're dealing with a disk entity for which no
+ # partitions have been defined (e.g. 'sda' but
+ # 'sda1' was not around), see:
+ # https://github.com/giampaolo/psutil/issues/338
+ partitions.append(name)
+ #
+ retdict = {}
+ with open("/proc/diskstats", "rt") as f:
+ lines = f.readlines()
+ for line in lines:
+ # http://www.mjmwired.net/kernel/Documentation/iostats.txt
+ fields = line.split()
+ if len(fields) > 7:
+ _, _, name, reads, _, rbytes, rtime, writes, _, wbytes, wtime = \
+ fields[:11]
+ else:
+ # from kernel 2.6.0 to 2.6.25
+ _, _, name, reads, rbytes, writes, wbytes = fields
+ rtime, wtime = 0, 0
+ if name in partitions:
+ rbytes = int(rbytes) * SECTOR_SIZE
+ wbytes = int(wbytes) * SECTOR_SIZE
+ reads = int(reads)
+ writes = int(writes)
+ rtime = int(rtime)
+ wtime = int(wtime)
+ retdict[name] = (reads, writes, rbytes, wbytes, rtime, wtime)
+ return retdict
+
+
+def disk_partitions(all=False):
+ """Return mounted disk partitions as a list of namedtuples"""
+ fstypes = set()
+ with open("/proc/filesystems", "r") as f:
+ for line in f:
+ line = line.strip()
+ if not line.startswith("nodev"):
+ fstypes.add(line.strip())
+ else:
+ # ignore all lines starting with "nodev" except "nodev zfs"
+ fstype = line.split("\t")[1]
+ if fstype == "zfs":
+ fstypes.add("zfs")
+
+ retlist = []
+ partitions = cext.disk_partitions()
+ for partition in partitions:
+ device, mountpoint, fstype, opts = partition
+ if device == 'none':
+ device = ''
+ if not all:
+ if device == '' or fstype not in fstypes:
+ continue
+ ntuple = _common.sdiskpart(device, mountpoint, fstype, opts)
+ retlist.append(ntuple)
+ return retlist
+
+
+disk_usage = _psposix.disk_usage
+
+
+# --- decorators
+
+def wrap_exceptions(fun):
+ """Decorator which translates bare OSError and IOError exceptions
+ into NoSuchProcess and AccessDenied.
+ """
+ @functools.wraps(fun)
+ def wrapper(self, *args, **kwargs):
+ try:
+ return fun(self, *args, **kwargs)
+ except EnvironmentError as err:
+ # support for private module import
+ if NoSuchProcess is None or AccessDenied is None:
+ raise
+ # ENOENT (no such file or directory) gets raised on open().
+ # ESRCH (no such process) can get raised on read() if
+ # process is gone in meantime.
+ if err.errno in (errno.ENOENT, errno.ESRCH):
+ raise NoSuchProcess(self.pid, self._name)
+ if err.errno in (errno.EPERM, errno.EACCES):
+ raise AccessDenied(self.pid, self._name)
+ raise
+ return wrapper
+
+
+def wrap_exceptions_w_zombie(fun):
+ """Same as above but also handles zombies."""
+ @functools.wraps(fun)
+ def wrapper(self, *args, **kwargs):
+ try:
+ return wrap_exceptions(fun)(self)
+ except NoSuchProcess:
+ if not pid_exists(self.pid):
+ raise
+ else:
+ raise ZombieProcess(self.pid, self._name, self._ppid)
+ return wrapper
+
+
+class Process(object):
+ """Linux process implementation."""
+
+ __slots__ = ["pid", "_name", "_ppid"]
+
+ def __init__(self, pid):
+ self.pid = pid
+ self._name = None
+ self._ppid = None
+
+ @wrap_exceptions
+ def name(self):
+ fname = "/proc/%s/stat" % self.pid
+ kw = dict(encoding=DEFAULT_ENCODING) if PY3 else dict()
+ with open(fname, "rt", **kw) as f:
+ data = f.read()
+ # XXX - gets changed later and probably needs refactoring
+ return data[data.find('(') + 1:data.rfind(')')]
+
+ def exe(self):
+ try:
+ exe = os.readlink("/proc/%s/exe" % self.pid)
+ except OSError as err:
+ if err.errno in (errno.ENOENT, errno.ESRCH):
+ # no such file error; might be raised also if the
+ # path actually exists for system processes with
+ # low pids (about 0-20)
+ if os.path.lexists("/proc/%s" % self.pid):
+ return ""
+ else:
+ if not pid_exists(self.pid):
+ raise NoSuchProcess(self.pid, self._name)
+ else:
+ raise ZombieProcess(self.pid, self._name, self._ppid)
+ if err.errno in (errno.EPERM, errno.EACCES):
+ raise AccessDenied(self.pid, self._name)
+ raise
+
+ # readlink() might return paths containing null bytes ('\x00').
+ # Certain names have ' (deleted)' appended. Usually this is
+ # bogus as the file actually exists. Either way that's not
+ # important as we don't want to discriminate executables which
+ # have been deleted.
+ exe = exe.split('\x00')[0]
+ if exe.endswith(' (deleted)') and not os.path.exists(exe):
+ exe = exe[:-10]
+ return exe
+
+ @wrap_exceptions
+ def cmdline(self):
+ fname = "/proc/%s/cmdline" % self.pid
+ kw = dict(encoding=DEFAULT_ENCODING) if PY3 else dict()
+ with open(fname, "rt", **kw) as f:
+ data = f.read()
+ if data.endswith('\x00'):
+ data = data[:-1]
+ return [x for x in data.split('\x00')]
+
+ @wrap_exceptions
+ def terminal(self):
+ tmap = _psposix._get_terminal_map()
+ with open("/proc/%s/stat" % self.pid, 'rb') as f:
+ tty_nr = int(f.read().split(b' ')[6])
+ try:
+ return tmap[tty_nr]
+ except KeyError:
+ return None
+
+ if os.path.exists('/proc/%s/io' % os.getpid()):
+ @wrap_exceptions
+ def io_counters(self):
+ fname = "/proc/%s/io" % self.pid
+ with open(fname, 'rb') as f:
+ rcount = wcount = rbytes = wbytes = None
+ for line in f:
+ if rcount is None and line.startswith(b"syscr"):
+ rcount = int(line.split()[1])
+ elif wcount is None and line.startswith(b"syscw"):
+ wcount = int(line.split()[1])
+ elif rbytes is None and line.startswith(b"read_bytes"):
+ rbytes = int(line.split()[1])
+ elif wbytes is None and line.startswith(b"write_bytes"):
+ wbytes = int(line.split()[1])
+ for x in (rcount, wcount, rbytes, wbytes):
+ if x is None:
+ raise NotImplementedError(
+ "couldn't read all necessary info from %r" % fname)
+ return _common.pio(rcount, wcount, rbytes, wbytes)
+ else:
+ def io_counters(self):
+ raise NotImplementedError("couldn't find /proc/%s/io (kernel "
+ "too old?)" % self.pid)
+
+ @wrap_exceptions
+ def cpu_times(self):
+ with open("/proc/%s/stat" % self.pid, 'rb') as f:
+ st = f.read().strip()
+ # ignore the first two values ("pid (exe)")
+ st = st[st.find(b')') + 2:]
+ values = st.split(b' ')
+ utime = float(values[11]) / CLOCK_TICKS
+ stime = float(values[12]) / CLOCK_TICKS
+ return _common.pcputimes(utime, stime)
+
+ @wrap_exceptions
+ def wait(self, timeout=None):
+ try:
+ return _psposix.wait_pid(self.pid, timeout)
+ except _psposix.TimeoutExpired:
+ # support for private module import
+ if TimeoutExpired is None:
+ raise
+ raise TimeoutExpired(timeout, self.pid, self._name)
+
+ @wrap_exceptions
+ def create_time(self):
+ with open("/proc/%s/stat" % self.pid, 'rb') as f:
+ st = f.read().strip()
+ # ignore the first two values ("pid (exe)")
+ st = st[st.rfind(b')') + 2:]
+ values = st.split(b' ')
+ # According to documentation, starttime is in field 21 and the
+ # unit is jiffies (clock ticks).
+ # We first divide it for clock ticks and then add uptime returning
+ # seconds since the epoch, in UTC.
+ # Also use cached value if available.
+ bt = BOOT_TIME or boot_time()
+ return (float(values[19]) / CLOCK_TICKS) + bt
+
+ @wrap_exceptions
+ def memory_info(self):
+ with open("/proc/%s/statm" % self.pid, 'rb') as f:
+ vms, rss = f.readline().split()[:2]
+ return _common.pmem(int(rss) * PAGESIZE,
+ int(vms) * PAGESIZE)
+
+ @wrap_exceptions
+ def memory_info_ex(self):
+ # ============================================================
+ # | FIELD | DESCRIPTION | AKA | TOP |
+ # ============================================================
+ # | rss | resident set size | | RES |
+ # | vms | total program size | size | VIRT |
+ # | shared | shared pages (from shared mappings) | | SHR |
+ # | text | text ('code') | trs | CODE |
+ # | lib | library (unused in Linux 2.6) | lrs | |
+ # | data | data + stack | drs | DATA |
+ # | dirty | dirty pages (unused in Linux 2.6) | dt | |
+ # ============================================================
+ with open("/proc/%s/statm" % self.pid, "rb") as f:
+ vms, rss, shared, text, lib, data, dirty = \
+ [int(x) * PAGESIZE for x in f.readline().split()[:7]]
+ return pextmem(rss, vms, shared, text, lib, data, dirty)
+
+ if os.path.exists('/proc/%s/smaps' % os.getpid()):
+
+ @wrap_exceptions
+ def memory_maps(self):
+ """Return process's mapped memory regions as a list of named tuples.
+ Fields are explained in 'man proc'; here is an updated (Apr 2012)
+ version: http://goo.gl/fmebo
+ """
+ with open("/proc/%s/smaps" % self.pid, "rt") as f:
+ first_line = f.readline()
+ current_block = [first_line]
+
+ def get_blocks():
+ data = {}
+ for line in f:
+ fields = line.split(None, 5)
+ if not fields[0].endswith(':'):
+ # new block section
+ yield (current_block.pop(), data)
+ current_block.append(line)
+ else:
+ try:
+ data[fields[0]] = int(fields[1]) * 1024
+ except ValueError:
+ if fields[0].startswith('VmFlags:'):
+ # see issue #369
+ continue
+ else:
+ raise ValueError("don't know how to inte"
+ "rpret line %r" % line)
+ yield (current_block.pop(), data)
+
+ ls = []
+ if first_line: # smaps file can be empty
+ for header, data in get_blocks():
+ hfields = header.split(None, 5)
+ try:
+ addr, perms, offset, dev, inode, path = hfields
+ except ValueError:
+ addr, perms, offset, dev, inode, path = \
+ hfields + ['']
+ if not path:
+ path = '[anon]'
+ else:
+ path = path.strip()
+ ls.append((
+ addr, perms, path,
+ data['Rss:'],
+ data.get('Size:', 0),
+ data.get('Pss:', 0),
+ data.get('Shared_Clean:', 0),
+ data.get('Shared_Dirty:', 0),
+ data.get('Private_Clean:', 0),
+ data.get('Private_Dirty:', 0),
+ data.get('Referenced:', 0),
+ data.get('Anonymous:', 0),
+ data.get('Swap:', 0)
+ ))
+ return ls
+
+ else:
+ def memory_maps(self):
+ msg = "couldn't find /proc/%s/smaps; kernel < 2.6.14 or " \
+ "CONFIG_MMU kernel configuration option is not enabled" \
+ % self.pid
+ raise NotImplementedError(msg)
+
+ @wrap_exceptions_w_zombie
+ def cwd(self):
+ # readlink() might return paths containing null bytes causing
+ # problems when used with other fs-related functions (os.*,
+ # open(), ...)
+ path = os.readlink("/proc/%s/cwd" % self.pid)
+ return path.replace('\x00', '')
+
+ @wrap_exceptions
+ def num_ctx_switches(self):
+ vol = unvol = None
+ with open("/proc/%s/status" % self.pid, "rb") as f:
+ for line in f:
+ if line.startswith(b"voluntary_ctxt_switches"):
+ vol = int(line.split()[1])
+ elif line.startswith(b"nonvoluntary_ctxt_switches"):
+ unvol = int(line.split()[1])
+ if vol is not None and unvol is not None:
+ return _common.pctxsw(vol, unvol)
+ raise NotImplementedError(
+ "'voluntary_ctxt_switches' and 'nonvoluntary_ctxt_switches'"
+ "fields were not found in /proc/%s/status; the kernel is "
+ "probably older than 2.6.23" % self.pid)
+
+ @wrap_exceptions
+ def num_threads(self):
+ with open("/proc/%s/status" % self.pid, "rb") as f:
+ for line in f:
+ if line.startswith(b"Threads:"):
+ return int(line.split()[1])
+ raise NotImplementedError("line not found")
+
+ @wrap_exceptions
+ def threads(self):
+ thread_ids = os.listdir("/proc/%s/task" % self.pid)
+ thread_ids.sort()
+ retlist = []
+ hit_enoent = False
+ for thread_id in thread_ids:
+ fname = "/proc/%s/task/%s/stat" % (self.pid, thread_id)
+ try:
+ with open(fname, 'rb') as f:
+ st = f.read().strip()
+ except IOError as err:
+ if err.errno == errno.ENOENT:
+ # no such file or directory; it means thread
+ # disappeared on us
+ hit_enoent = True
+ continue
+ raise
+ # ignore the first two values ("pid (exe)")
+ st = st[st.find(b')') + 2:]
+ values = st.split(b' ')
+ utime = float(values[11]) / CLOCK_TICKS
+ stime = float(values[12]) / CLOCK_TICKS
+ ntuple = _common.pthread(int(thread_id), utime, stime)
+ retlist.append(ntuple)
+ if hit_enoent:
+ # raise NSP if the process disappeared on us
+ os.stat('/proc/%s' % self.pid)
+ return retlist
+
+ @wrap_exceptions
+ def nice_get(self):
+ # with open('/proc/%s/stat' % self.pid, 'r') as f:
+ # data = f.read()
+ # return int(data.split()[18])
+
+ # Use C implementation
+ return cext_posix.getpriority(self.pid)
+
+ @wrap_exceptions
+ def nice_set(self, value):
+ return cext_posix.setpriority(self.pid, value)
+
+ @wrap_exceptions
+ def cpu_affinity_get(self):
+ return cext.proc_cpu_affinity_get(self.pid)
+
+ @wrap_exceptions
+ def cpu_affinity_set(self, cpus):
+ try:
+ cext.proc_cpu_affinity_set(self.pid, cpus)
+ except OSError as err:
+ if err.errno == errno.EINVAL:
+ allcpus = tuple(range(len(per_cpu_times())))
+ for cpu in cpus:
+ if cpu not in allcpus:
+ raise ValueError("invalid CPU #%i (choose between %s)"
+ % (cpu, allcpus))
+ raise
+
+ # only starting from kernel 2.6.13
+ if hasattr(cext, "proc_ioprio_get"):
+
+ @wrap_exceptions
+ def ionice_get(self):
+ ioclass, value = cext.proc_ioprio_get(self.pid)
+ if enum is not None:
+ ioclass = IOPriority(ioclass)
+ return _common.pionice(ioclass, value)
+
+ @wrap_exceptions
+ def ionice_set(self, ioclass, value):
+ if value is not None:
+ if not PY3 and not isinstance(value, (int, long)):
+ msg = "value argument is not an integer (gor %r)" % value
+ raise TypeError(msg)
+ if not 0 <= value <= 8:
+ raise ValueError(
+ "value argument range expected is between 0 and 8")
+
+ if ioclass in (IOPRIO_CLASS_NONE, None):
+ if value:
+ msg = "can't specify value with IOPRIO_CLASS_NONE " \
+ "(got %r)" % value
+ raise ValueError(msg)
+ ioclass = IOPRIO_CLASS_NONE
+ value = 0
+ elif ioclass == IOPRIO_CLASS_IDLE:
+ if value:
+ msg = "can't specify value with IOPRIO_CLASS_IDLE " \
+ "(got %r)" % value
+ raise ValueError(msg)
+ value = 0
+ elif ioclass in (IOPRIO_CLASS_RT, IOPRIO_CLASS_BE):
+ if value is None:
+ # TODO: add comment explaining why this is 4 (?)
+ value = 4
+ else:
+ # otherwise we would get OSError(EVINAL)
+ raise ValueError("invalid ioclass argument %r" % ioclass)
+
+ return cext.proc_ioprio_set(self.pid, ioclass, value)
+
+ if HAS_PRLIMIT:
+ @wrap_exceptions
+ def rlimit(self, resource, limits=None):
+ # If pid is 0 prlimit() applies to the calling process and
+ # we don't want that. We should never get here though as
+ # PID 0 is not supported on Linux.
+ if self.pid == 0:
+ raise ValueError("can't use prlimit() against PID 0 process")
+ try:
+ if limits is None:
+ # get
+ return cext.linux_prlimit(self.pid, resource)
+ else:
+ # set
+ if len(limits) != 2:
+ raise ValueError(
+ "second argument must be a (soft, hard) tuple, "
+ "got %s" % repr(limits))
+ soft, hard = limits
+ cext.linux_prlimit(self.pid, resource, soft, hard)
+ except OSError as err:
+ if err.errno == errno.ENOSYS and pid_exists(self.pid):
+ # I saw this happening on Travis:
+ # https://travis-ci.org/giampaolo/psutil/jobs/51368273
+ raise ZombieProcess(self.pid, self._name, self._ppid)
+ else:
+ raise
+
+ @wrap_exceptions
+ def status(self):
+ with open("/proc/%s/status" % self.pid, 'rb') as f:
+ for line in f:
+ if line.startswith(b"State:"):
+ letter = line.split()[1]
+ if PY3:
+ letter = letter.decode()
+ # XXX is '?' legit? (we're not supposed to return
+ # it anyway)
+ return PROC_STATUSES.get(letter, '?')
+
+ @wrap_exceptions
+ def open_files(self):
+ retlist = []
+ files = os.listdir("/proc/%s/fd" % self.pid)
+ hit_enoent = False
+ for fd in files:
+ file = "/proc/%s/fd/%s" % (self.pid, fd)
+ try:
+ file = os.readlink(file)
+ except OSError as err:
+ # ENOENT == file which is gone in the meantime
+ if err.errno in (errno.ENOENT, errno.ESRCH):
+ hit_enoent = True
+ continue
+ elif err.errno == errno.EINVAL:
+ # not a link
+ continue
+ else:
+ raise
+ else:
+ # If file is not an absolute path there's no way
+ # to tell whether it's a regular file or not,
+ # so we skip it. A regular file is always supposed
+ # to be absolutized though.
+ if file.startswith('/') and isfile_strict(file):
+ ntuple = _common.popenfile(file, int(fd))
+ retlist.append(ntuple)
+ if hit_enoent:
+ # raise NSP if the process disappeared on us
+ os.stat('/proc/%s' % self.pid)
+ return retlist
+
+ @wrap_exceptions
+ def connections(self, kind='inet'):
+ ret = _connections.retrieve(kind, self.pid)
+ # raise NSP if the process disappeared on us
+ os.stat('/proc/%s' % self.pid)
+ return ret
+
+ @wrap_exceptions
+ def num_fds(self):
+ return len(os.listdir("/proc/%s/fd" % self.pid))
+
+ @wrap_exceptions
+ def ppid(self):
+ fpath = "/proc/%s/status" % self.pid
+ with open(fpath, 'rb') as f:
+ for line in f:
+ if line.startswith(b"PPid:"):
+ # PPid: nnnn
+ return int(line.split()[1])
+ raise NotImplementedError("line 'PPid' not found in %s" % fpath)
+
+ @wrap_exceptions
+ def uids(self):
+ fpath = "/proc/%s/status" % self.pid
+ with open(fpath, 'rb') as f:
+ for line in f:
+ if line.startswith(b'Uid:'):
+ _, real, effective, saved, fs = line.split()
+ return _common.puids(int(real), int(effective), int(saved))
+ raise NotImplementedError("line 'Uid' not found in %s" % fpath)
+
+ @wrap_exceptions
+ def gids(self):
+ fpath = "/proc/%s/status" % self.pid
+ with open(fpath, 'rb') as f:
+ for line in f:
+ if line.startswith(b'Gid:'):
+ _, real, effective, saved, fs = line.split()
+ return _common.pgids(int(real), int(effective), int(saved))
+ raise NotImplementedError("line 'Gid' not found in %s" % fpath)
diff --git a/python/psutil/psutil/_psosx.py b/python/psutil/psutil/_psosx.py
new file mode 100644
index 000000000..41875fe40
--- /dev/null
+++ b/python/psutil/psutil/_psosx.py
@@ -0,0 +1,363 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""OSX platform implementation."""
+
+import errno
+import functools
+import os
+from collections import namedtuple
+
+from . import _common
+from . import _psposix
+from . import _psutil_osx as cext
+from . import _psutil_posix as cext_posix
+from ._common import conn_tmap, usage_percent, isfile_strict
+from ._common import sockfam_to_enum, socktype_to_enum
+
+
+__extra__all__ = []
+
+# --- constants
+
+PAGESIZE = os.sysconf("SC_PAGE_SIZE")
+AF_LINK = cext_posix.AF_LINK
+
+# http://students.mimuw.edu.pl/lxr/source/include/net/tcp_states.h
+TCP_STATUSES = {
+ cext.TCPS_ESTABLISHED: _common.CONN_ESTABLISHED,
+ cext.TCPS_SYN_SENT: _common.CONN_SYN_SENT,
+ cext.TCPS_SYN_RECEIVED: _common.CONN_SYN_RECV,
+ cext.TCPS_FIN_WAIT_1: _common.CONN_FIN_WAIT1,
+ cext.TCPS_FIN_WAIT_2: _common.CONN_FIN_WAIT2,
+ cext.TCPS_TIME_WAIT: _common.CONN_TIME_WAIT,
+ cext.TCPS_CLOSED: _common.CONN_CLOSE,
+ cext.TCPS_CLOSE_WAIT: _common.CONN_CLOSE_WAIT,
+ cext.TCPS_LAST_ACK: _common.CONN_LAST_ACK,
+ cext.TCPS_LISTEN: _common.CONN_LISTEN,
+ cext.TCPS_CLOSING: _common.CONN_CLOSING,
+ cext.PSUTIL_CONN_NONE: _common.CONN_NONE,
+}
+
+PROC_STATUSES = {
+ cext.SIDL: _common.STATUS_IDLE,
+ cext.SRUN: _common.STATUS_RUNNING,
+ cext.SSLEEP: _common.STATUS_SLEEPING,
+ cext.SSTOP: _common.STATUS_STOPPED,
+ cext.SZOMB: _common.STATUS_ZOMBIE,
+}
+
+scputimes = namedtuple('scputimes', ['user', 'nice', 'system', 'idle'])
+
+svmem = namedtuple(
+ 'svmem', ['total', 'available', 'percent', 'used', 'free',
+ 'active', 'inactive', 'wired'])
+
+pextmem = namedtuple('pextmem', ['rss', 'vms', 'pfaults', 'pageins'])
+
+pmmap_grouped = namedtuple(
+ 'pmmap_grouped',
+ 'path rss private swapped dirtied ref_count shadow_depth')
+
+pmmap_ext = namedtuple(
+ 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
+
+# set later from __init__.py
+NoSuchProcess = None
+ZombieProcess = None
+AccessDenied = None
+TimeoutExpired = None
+
+
+# --- functions
+
+def virtual_memory():
+ """System virtual memory as a namedtuple."""
+ total, active, inactive, wired, free = cext.virtual_mem()
+ avail = inactive + free
+ used = active + inactive + wired
+ percent = usage_percent((total - avail), total, _round=1)
+ return svmem(total, avail, percent, used, free,
+ active, inactive, wired)
+
+
+def swap_memory():
+ """Swap system memory as a (total, used, free, sin, sout) tuple."""
+ total, used, free, sin, sout = cext.swap_mem()
+ percent = usage_percent(used, total, _round=1)
+ return _common.sswap(total, used, free, percent, sin, sout)
+
+
+def cpu_times():
+ """Return system CPU times as a namedtuple."""
+ user, nice, system, idle = cext.cpu_times()
+ return scputimes(user, nice, system, idle)
+
+
+def per_cpu_times():
+ """Return system CPU times as a named tuple"""
+ ret = []
+ for cpu_t in cext.per_cpu_times():
+ user, nice, system, idle = cpu_t
+ item = scputimes(user, nice, system, idle)
+ ret.append(item)
+ return ret
+
+
+def cpu_count_logical():
+ """Return the number of logical CPUs in the system."""
+ return cext.cpu_count_logical()
+
+
+def cpu_count_physical():
+ """Return the number of physical CPUs in the system."""
+ return cext.cpu_count_phys()
+
+
+def boot_time():
+ """The system boot time expressed in seconds since the epoch."""
+ return cext.boot_time()
+
+
+def disk_partitions(all=False):
+ retlist = []
+ partitions = cext.disk_partitions()
+ for partition in partitions:
+ device, mountpoint, fstype, opts = partition
+ if device == 'none':
+ device = ''
+ if not all:
+ if not os.path.isabs(device) or not os.path.exists(device):
+ continue
+ ntuple = _common.sdiskpart(device, mountpoint, fstype, opts)
+ retlist.append(ntuple)
+ return retlist
+
+
+def users():
+ retlist = []
+ rawlist = cext.users()
+ for item in rawlist:
+ user, tty, hostname, tstamp = item
+ if tty == '~':
+ continue # reboot or shutdown
+ if not tstamp:
+ continue
+ nt = _common.suser(user, tty or None, hostname or None, tstamp)
+ retlist.append(nt)
+ return retlist
+
+
+def net_connections(kind='inet'):
+ # Note: on OSX this will fail with AccessDenied unless
+ # the process is owned by root.
+ ret = []
+ for pid in pids():
+ try:
+ cons = Process(pid).connections(kind)
+ except NoSuchProcess:
+ continue
+ else:
+ if cons:
+ for c in cons:
+ c = list(c) + [pid]
+ ret.append(_common.sconn(*c))
+ return ret
+
+
+def net_if_stats():
+ """Get NIC stats (isup, duplex, speed, mtu)."""
+ names = net_io_counters().keys()
+ ret = {}
+ for name in names:
+ isup, duplex, speed, mtu = cext_posix.net_if_stats(name)
+ if hasattr(_common, 'NicDuplex'):
+ duplex = _common.NicDuplex(duplex)
+ ret[name] = _common.snicstats(isup, duplex, speed, mtu)
+ return ret
+
+
+pids = cext.pids
+pid_exists = _psposix.pid_exists
+disk_usage = _psposix.disk_usage
+net_io_counters = cext.net_io_counters
+disk_io_counters = cext.disk_io_counters
+net_if_addrs = cext_posix.net_if_addrs
+
+
+def wrap_exceptions(fun):
+ """Decorator which translates bare OSError exceptions into
+ NoSuchProcess and AccessDenied.
+ """
+ @functools.wraps(fun)
+ def wrapper(self, *args, **kwargs):
+ try:
+ return fun(self, *args, **kwargs)
+ except OSError as err:
+ # support for private module import
+ if (NoSuchProcess is None or AccessDenied is None or
+ ZombieProcess is None):
+ raise
+ if err.errno == errno.ESRCH:
+ if not pid_exists(self.pid):
+ raise NoSuchProcess(self.pid, self._name)
+ else:
+ raise ZombieProcess(self.pid, self._name, self._ppid)
+ if err.errno in (errno.EPERM, errno.EACCES):
+ raise AccessDenied(self.pid, self._name)
+ raise
+ return wrapper
+
+
+class Process(object):
+ """Wrapper class around underlying C implementation."""
+
+ __slots__ = ["pid", "_name", "_ppid"]
+
+ def __init__(self, pid):
+ self.pid = pid
+ self._name = None
+ self._ppid = None
+
+ @wrap_exceptions
+ def name(self):
+ return cext.proc_name(self.pid)
+
+ @wrap_exceptions
+ def exe(self):
+ return cext.proc_exe(self.pid)
+
+ @wrap_exceptions
+ def cmdline(self):
+ if not pid_exists(self.pid):
+ raise NoSuchProcess(self.pid, self._name)
+ return cext.proc_cmdline(self.pid)
+
+ @wrap_exceptions
+ def ppid(self):
+ return cext.proc_ppid(self.pid)
+
+ @wrap_exceptions
+ def cwd(self):
+ return cext.proc_cwd(self.pid)
+
+ @wrap_exceptions
+ def uids(self):
+ real, effective, saved = cext.proc_uids(self.pid)
+ return _common.puids(real, effective, saved)
+
+ @wrap_exceptions
+ def gids(self):
+ real, effective, saved = cext.proc_gids(self.pid)
+ return _common.pgids(real, effective, saved)
+
+ @wrap_exceptions
+ def terminal(self):
+ tty_nr = cext.proc_tty_nr(self.pid)
+ tmap = _psposix._get_terminal_map()
+ try:
+ return tmap[tty_nr]
+ except KeyError:
+ return None
+
+ @wrap_exceptions
+ def memory_info(self):
+ rss, vms = cext.proc_memory_info(self.pid)[:2]
+ return _common.pmem(rss, vms)
+
+ @wrap_exceptions
+ def memory_info_ex(self):
+ rss, vms, pfaults, pageins = cext.proc_memory_info(self.pid)
+ return pextmem(rss, vms, pfaults * PAGESIZE, pageins * PAGESIZE)
+
+ @wrap_exceptions
+ def cpu_times(self):
+ user, system = cext.proc_cpu_times(self.pid)
+ return _common.pcputimes(user, system)
+
+ @wrap_exceptions
+ def create_time(self):
+ return cext.proc_create_time(self.pid)
+
+ @wrap_exceptions
+ def num_ctx_switches(self):
+ return _common.pctxsw(*cext.proc_num_ctx_switches(self.pid))
+
+ @wrap_exceptions
+ def num_threads(self):
+ return cext.proc_num_threads(self.pid)
+
+ @wrap_exceptions
+ def open_files(self):
+ if self.pid == 0:
+ return []
+ files = []
+ rawlist = cext.proc_open_files(self.pid)
+ for path, fd in rawlist:
+ if isfile_strict(path):
+ ntuple = _common.popenfile(path, fd)
+ files.append(ntuple)
+ return files
+
+ @wrap_exceptions
+ def connections(self, kind='inet'):
+ if kind not in conn_tmap:
+ raise ValueError("invalid %r kind argument; choose between %s"
+ % (kind, ', '.join([repr(x) for x in conn_tmap])))
+ families, types = conn_tmap[kind]
+ rawlist = cext.proc_connections(self.pid, families, types)
+ ret = []
+ for item in rawlist:
+ fd, fam, type, laddr, raddr, status = item
+ status = TCP_STATUSES[status]
+ fam = sockfam_to_enum(fam)
+ type = socktype_to_enum(type)
+ nt = _common.pconn(fd, fam, type, laddr, raddr, status)
+ ret.append(nt)
+ return ret
+
+ @wrap_exceptions
+ def num_fds(self):
+ if self.pid == 0:
+ return 0
+ return cext.proc_num_fds(self.pid)
+
+ @wrap_exceptions
+ def wait(self, timeout=None):
+ try:
+ return _psposix.wait_pid(self.pid, timeout)
+ except _psposix.TimeoutExpired:
+ # support for private module import
+ if TimeoutExpired is None:
+ raise
+ raise TimeoutExpired(timeout, self.pid, self._name)
+
+ @wrap_exceptions
+ def nice_get(self):
+ return cext_posix.getpriority(self.pid)
+
+ @wrap_exceptions
+ def nice_set(self, value):
+ return cext_posix.setpriority(self.pid, value)
+
+ @wrap_exceptions
+ def status(self):
+ code = cext.proc_status(self.pid)
+ # XXX is '?' legit? (we're not supposed to return it anyway)
+ return PROC_STATUSES.get(code, '?')
+
+ @wrap_exceptions
+ def threads(self):
+ rawlist = cext.proc_threads(self.pid)
+ retlist = []
+ for thread_id, utime, stime in rawlist:
+ ntuple = _common.pthread(thread_id, utime, stime)
+ retlist.append(ntuple)
+ return retlist
+
+ @wrap_exceptions
+ def memory_maps(self):
+ return cext.proc_memory_maps(self.pid)
diff --git a/python/psutil/psutil/_psposix.py b/python/psutil/psutil/_psposix.py
new file mode 100644
index 000000000..5bb16a386
--- /dev/null
+++ b/python/psutil/psutil/_psposix.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Routines common to all posix systems."""
+
+import errno
+import glob
+import os
+import sys
+import time
+
+from ._common import sdiskusage, usage_percent, memoize
+from ._compat import PY3, unicode
+
+
+class TimeoutExpired(Exception):
+ pass
+
+
+def pid_exists(pid):
+ """Check whether pid exists in the current process table."""
+ if pid == 0:
+ # According to "man 2 kill" PID 0 has a special meaning:
+ # it refers to <<every process in the process group of the
+ # calling process>> so we don't want to go any further.
+ # If we get here it means this UNIX platform *does* have
+ # a process with id 0.
+ return True
+ try:
+ os.kill(pid, 0)
+ except OSError as err:
+ if err.errno == errno.ESRCH:
+ # ESRCH == No such process
+ return False
+ elif err.errno == errno.EPERM:
+ # EPERM clearly means there's a process to deny access to
+ return True
+ else:
+ # According to "man 2 kill" possible error values are
+ # (EINVAL, EPERM, ESRCH) therefore we should never get
+ # here. If we do let's be explicit in considering this
+ # an error.
+ raise err
+ else:
+ return True
+
+
+def wait_pid(pid, timeout=None):
+ """Wait for process with pid 'pid' to terminate and return its
+ exit status code as an integer.
+
+ If pid is not a children of os.getpid() (current process) just
+ waits until the process disappears and return None.
+
+ If pid does not exist at all return None immediately.
+
+ Raise TimeoutExpired on timeout expired.
+ """
+ def check_timeout(delay):
+ if timeout is not None:
+ if timer() >= stop_at:
+ raise TimeoutExpired()
+ time.sleep(delay)
+ return min(delay * 2, 0.04)
+
+ timer = getattr(time, 'monotonic', time.time)
+ if timeout is not None:
+ def waitcall():
+ return os.waitpid(pid, os.WNOHANG)
+ stop_at = timer() + timeout
+ else:
+ def waitcall():
+ return os.waitpid(pid, 0)
+
+ delay = 0.0001
+ while True:
+ try:
+ retpid, status = waitcall()
+ except OSError as err:
+ if err.errno == errno.EINTR:
+ delay = check_timeout(delay)
+ continue
+ elif err.errno == errno.ECHILD:
+ # This has two meanings:
+ # - pid is not a child of os.getpid() in which case
+ # we keep polling until it's gone
+ # - pid never existed in the first place
+ # In both cases we'll eventually return None as we
+ # can't determine its exit status code.
+ while True:
+ if pid_exists(pid):
+ delay = check_timeout(delay)
+ else:
+ return
+ else:
+ raise
+ else:
+ if retpid == 0:
+ # WNOHANG was used, pid is still running
+ delay = check_timeout(delay)
+ continue
+ # process exited due to a signal; return the integer of
+ # that signal
+ if os.WIFSIGNALED(status):
+ return os.WTERMSIG(status)
+ # process exited using exit(2) system call; return the
+ # integer exit(2) system call has been called with
+ elif os.WIFEXITED(status):
+ return os.WEXITSTATUS(status)
+ else:
+ # should never happen
+ raise RuntimeError("unknown process exit status")
+
+
+def disk_usage(path):
+ """Return disk usage associated with path."""
+ try:
+ st = os.statvfs(path)
+ except UnicodeEncodeError:
+ if not PY3 and isinstance(path, unicode):
+ # this is a bug with os.statvfs() and unicode on
+ # Python 2, see:
+ # - https://github.com/giampaolo/psutil/issues/416
+ # - http://bugs.python.org/issue18695
+ try:
+ path = path.encode(sys.getfilesystemencoding())
+ except UnicodeEncodeError:
+ pass
+ st = os.statvfs(path)
+ else:
+ raise
+ free = (st.f_bavail * st.f_frsize)
+ total = (st.f_blocks * st.f_frsize)
+ used = (st.f_blocks - st.f_bfree) * st.f_frsize
+ percent = usage_percent(used, total, _round=1)
+ # NB: the percentage is -5% than what shown by df due to
+ # reserved blocks that we are currently not considering:
+ # http://goo.gl/sWGbH
+ return sdiskusage(total, used, free, percent)
+
+
+@memoize
+def _get_terminal_map():
+ ret = {}
+ ls = glob.glob('/dev/tty*') + glob.glob('/dev/pts/*')
+ for name in ls:
+ assert name not in ret
+ try:
+ ret[os.stat(name).st_rdev] = name
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ raise
+ return ret
diff --git a/python/psutil/psutil/_pssunos.py b/python/psutil/psutil/_pssunos.py
new file mode 100644
index 000000000..bc35a718c
--- /dev/null
+++ b/python/psutil/psutil/_pssunos.py
@@ -0,0 +1,553 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sun OS Solaris platform implementation."""
+
+import errno
+import os
+import socket
+import subprocess
+import sys
+from collections import namedtuple
+
+from . import _common
+from . import _psposix
+from . import _psutil_posix as cext_posix
+from . import _psutil_sunos as cext
+from ._common import isfile_strict, socktype_to_enum, sockfam_to_enum
+from ._common import usage_percent
+from ._compat import PY3
+
+
+__extra__all__ = ["CONN_IDLE", "CONN_BOUND"]
+
+PAGE_SIZE = os.sysconf('SC_PAGE_SIZE')
+AF_LINK = cext_posix.AF_LINK
+
+CONN_IDLE = "IDLE"
+CONN_BOUND = "BOUND"
+
+PROC_STATUSES = {
+ cext.SSLEEP: _common.STATUS_SLEEPING,
+ cext.SRUN: _common.STATUS_RUNNING,
+ cext.SZOMB: _common.STATUS_ZOMBIE,
+ cext.SSTOP: _common.STATUS_STOPPED,
+ cext.SIDL: _common.STATUS_IDLE,
+ cext.SONPROC: _common.STATUS_RUNNING, # same as run
+ cext.SWAIT: _common.STATUS_WAITING,
+}
+
+TCP_STATUSES = {
+ cext.TCPS_ESTABLISHED: _common.CONN_ESTABLISHED,
+ cext.TCPS_SYN_SENT: _common.CONN_SYN_SENT,
+ cext.TCPS_SYN_RCVD: _common.CONN_SYN_RECV,
+ cext.TCPS_FIN_WAIT_1: _common.CONN_FIN_WAIT1,
+ cext.TCPS_FIN_WAIT_2: _common.CONN_FIN_WAIT2,
+ cext.TCPS_TIME_WAIT: _common.CONN_TIME_WAIT,
+ cext.TCPS_CLOSED: _common.CONN_CLOSE,
+ cext.TCPS_CLOSE_WAIT: _common.CONN_CLOSE_WAIT,
+ cext.TCPS_LAST_ACK: _common.CONN_LAST_ACK,
+ cext.TCPS_LISTEN: _common.CONN_LISTEN,
+ cext.TCPS_CLOSING: _common.CONN_CLOSING,
+ cext.PSUTIL_CONN_NONE: _common.CONN_NONE,
+ cext.TCPS_IDLE: CONN_IDLE, # sunos specific
+ cext.TCPS_BOUND: CONN_BOUND, # sunos specific
+}
+
+scputimes = namedtuple('scputimes', ['user', 'system', 'idle', 'iowait'])
+svmem = namedtuple('svmem', ['total', 'available', 'percent', 'used', 'free'])
+pextmem = namedtuple('pextmem', ['rss', 'vms'])
+pmmap_grouped = namedtuple('pmmap_grouped', ['path', 'rss', 'anon', 'locked'])
+pmmap_ext = namedtuple(
+ 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
+
+# set later from __init__.py
+NoSuchProcess = None
+ZombieProcess = None
+AccessDenied = None
+TimeoutExpired = None
+
+# --- functions
+
+disk_io_counters = cext.disk_io_counters
+net_io_counters = cext.net_io_counters
+disk_usage = _psposix.disk_usage
+net_if_addrs = cext_posix.net_if_addrs
+
+
+def virtual_memory():
+ # we could have done this with kstat, but imho this is good enough
+ total = os.sysconf('SC_PHYS_PAGES') * PAGE_SIZE
+ # note: there's no difference on Solaris
+ free = avail = os.sysconf('SC_AVPHYS_PAGES') * PAGE_SIZE
+ used = total - free
+ percent = usage_percent(used, total, _round=1)
+ return svmem(total, avail, percent, used, free)
+
+
+def swap_memory():
+ sin, sout = cext.swap_mem()
+ # XXX
+ # we are supposed to get total/free by doing so:
+ # http://cvs.opensolaris.org/source/xref/onnv/onnv-gate/
+ # usr/src/cmd/swap/swap.c
+ # ...nevertheless I can't manage to obtain the same numbers as 'swap'
+ # cmdline utility, so let's parse its output (sigh!)
+ p = subprocess.Popen(['/usr/bin/env', 'PATH=/usr/sbin:/sbin:%s' %
+ os.environ['PATH'], 'swap', '-l', '-k'],
+ stdout=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if PY3:
+ stdout = stdout.decode(sys.stdout.encoding)
+ if p.returncode != 0:
+ raise RuntimeError("'swap -l -k' failed (retcode=%s)" % p.returncode)
+
+ lines = stdout.strip().split('\n')[1:]
+ if not lines:
+ raise RuntimeError('no swap device(s) configured')
+ total = free = 0
+ for line in lines:
+ line = line.split()
+ t, f = line[-2:]
+ t = t.replace('K', '')
+ f = f.replace('K', '')
+ total += int(int(t) * 1024)
+ free += int(int(f) * 1024)
+ used = total - free
+ percent = usage_percent(used, total, _round=1)
+ return _common.sswap(total, used, free, percent,
+ sin * PAGE_SIZE, sout * PAGE_SIZE)
+
+
+def pids():
+ """Returns a list of PIDs currently running on the system."""
+ return [int(x) for x in os.listdir('/proc') if x.isdigit()]
+
+
+def pid_exists(pid):
+ """Check for the existence of a unix pid."""
+ return _psposix.pid_exists(pid)
+
+
+def cpu_times():
+ """Return system-wide CPU times as a named tuple"""
+ ret = cext.per_cpu_times()
+ return scputimes(*[sum(x) for x in zip(*ret)])
+
+
+def per_cpu_times():
+ """Return system per-CPU times as a list of named tuples"""
+ ret = cext.per_cpu_times()
+ return [scputimes(*x) for x in ret]
+
+
+def cpu_count_logical():
+ """Return the number of logical CPUs in the system."""
+ try:
+ return os.sysconf("SC_NPROCESSORS_ONLN")
+ except ValueError:
+ # mimic os.cpu_count() behavior
+ return None
+
+
+def cpu_count_physical():
+ """Return the number of physical CPUs in the system."""
+ return cext.cpu_count_phys()
+
+
+def boot_time():
+ """The system boot time expressed in seconds since the epoch."""
+ return cext.boot_time()
+
+
+def users():
+ """Return currently connected users as a list of namedtuples."""
+ retlist = []
+ rawlist = cext.users()
+ localhost = (':0.0', ':0')
+ for item in rawlist:
+ user, tty, hostname, tstamp, user_process = item
+ # note: the underlying C function includes entries about
+ # system boot, run level and others. We might want
+ # to use them in the future.
+ if not user_process:
+ continue
+ if hostname in localhost:
+ hostname = 'localhost'
+ nt = _common.suser(user, tty, hostname, tstamp)
+ retlist.append(nt)
+ return retlist
+
+
+def disk_partitions(all=False):
+ """Return system disk partitions."""
+ # TODO - the filtering logic should be better checked so that
+ # it tries to reflect 'df' as much as possible
+ retlist = []
+ partitions = cext.disk_partitions()
+ for partition in partitions:
+ device, mountpoint, fstype, opts = partition
+ if device == 'none':
+ device = ''
+ if not all:
+ # Differently from, say, Linux, we don't have a list of
+ # common fs types so the best we can do, AFAIK, is to
+ # filter by filesystem having a total size > 0.
+ if not disk_usage(mountpoint).total:
+ continue
+ ntuple = _common.sdiskpart(device, mountpoint, fstype, opts)
+ retlist.append(ntuple)
+ return retlist
+
+
+def net_connections(kind, _pid=-1):
+ """Return socket connections. If pid == -1 return system-wide
+ connections (as opposed to connections opened by one process only).
+ Only INET sockets are returned (UNIX are not).
+ """
+ cmap = _common.conn_tmap.copy()
+ if _pid == -1:
+ cmap.pop('unix', 0)
+ if kind not in cmap:
+ raise ValueError("invalid %r kind argument; choose between %s"
+ % (kind, ', '.join([repr(x) for x in cmap])))
+ families, types = _common.conn_tmap[kind]
+ rawlist = cext.net_connections(_pid, families, types)
+ ret = set()
+ for item in rawlist:
+ fd, fam, type_, laddr, raddr, status, pid = item
+ if fam not in families:
+ continue
+ if type_ not in types:
+ continue
+ status = TCP_STATUSES[status]
+ fam = sockfam_to_enum(fam)
+ type_ = socktype_to_enum(type_)
+ if _pid == -1:
+ nt = _common.sconn(fd, fam, type_, laddr, raddr, status, pid)
+ else:
+ nt = _common.pconn(fd, fam, type_, laddr, raddr, status)
+ ret.add(nt)
+ return list(ret)
+
+
+def net_if_stats():
+ """Get NIC stats (isup, duplex, speed, mtu)."""
+ ret = cext.net_if_stats()
+ for name, items in ret.items():
+ isup, duplex, speed, mtu = items
+ if hasattr(_common, 'NicDuplex'):
+ duplex = _common.NicDuplex(duplex)
+ ret[name] = _common.snicstats(isup, duplex, speed, mtu)
+ return ret
+
+
+def wrap_exceptions(fun):
+ """Call callable into a try/except clause and translate ENOENT,
+ EACCES and EPERM in NoSuchProcess or AccessDenied exceptions.
+ """
+ def wrapper(self, *args, **kwargs):
+ try:
+ return fun(self, *args, **kwargs)
+ except EnvironmentError as err:
+ # support for private module import
+ if (NoSuchProcess is None or AccessDenied is None or
+ ZombieProcess is None):
+ raise
+ # ENOENT (no such file or directory) gets raised on open().
+ # ESRCH (no such process) can get raised on read() if
+ # process is gone in meantime.
+ if err.errno in (errno.ENOENT, errno.ESRCH):
+ if not pid_exists(self.pid):
+ raise NoSuchProcess(self.pid, self._name)
+ else:
+ raise ZombieProcess(self.pid, self._name, self._ppid)
+ if err.errno in (errno.EPERM, errno.EACCES):
+ raise AccessDenied(self.pid, self._name)
+ raise
+ return wrapper
+
+
+class Process(object):
+ """Wrapper class around underlying C implementation."""
+
+ __slots__ = ["pid", "_name", "_ppid"]
+
+ def __init__(self, pid):
+ self.pid = pid
+ self._name = None
+ self._ppid = None
+
+ @wrap_exceptions
+ def name(self):
+ # note: max len == 15
+ return cext.proc_name_and_args(self.pid)[0]
+
+ @wrap_exceptions
+ def exe(self):
+ # Will be guess later from cmdline but we want to explicitly
+ # invoke cmdline here in order to get an AccessDenied
+ # exception if the user has not enough privileges.
+ self.cmdline()
+ return ""
+
+ @wrap_exceptions
+ def cmdline(self):
+ return cext.proc_name_and_args(self.pid)[1].split(' ')
+
+ @wrap_exceptions
+ def create_time(self):
+ return cext.proc_basic_info(self.pid)[3]
+
+ @wrap_exceptions
+ def num_threads(self):
+ return cext.proc_basic_info(self.pid)[5]
+
+ @wrap_exceptions
+ def nice_get(self):
+ # For some reason getpriority(3) return ESRCH (no such process)
+ # for certain low-pid processes, no matter what (even as root).
+ # The process actually exists though, as it has a name,
+ # creation time, etc.
+ # The best thing we can do here appears to be raising AD.
+ # Note: tested on Solaris 11; on Open Solaris 5 everything is
+ # fine.
+ try:
+ return cext_posix.getpriority(self.pid)
+ except EnvironmentError as err:
+ # 48 is 'operation not supported' but errno does not expose
+ # it. It occurs for low system pids.
+ if err.errno in (errno.ENOENT, errno.ESRCH, 48):
+ if pid_exists(self.pid):
+ raise AccessDenied(self.pid, self._name)
+ raise
+
+ @wrap_exceptions
+ def nice_set(self, value):
+ if self.pid in (2, 3):
+ # Special case PIDs: internally setpriority(3) return ESRCH
+ # (no such process), no matter what.
+ # The process actually exists though, as it has a name,
+ # creation time, etc.
+ raise AccessDenied(self.pid, self._name)
+ return cext_posix.setpriority(self.pid, value)
+
+ @wrap_exceptions
+ def ppid(self):
+ return cext.proc_basic_info(self.pid)[0]
+
+ @wrap_exceptions
+ def uids(self):
+ real, effective, saved, _, _, _ = cext.proc_cred(self.pid)
+ return _common.puids(real, effective, saved)
+
+ @wrap_exceptions
+ def gids(self):
+ _, _, _, real, effective, saved = cext.proc_cred(self.pid)
+ return _common.puids(real, effective, saved)
+
+ @wrap_exceptions
+ def cpu_times(self):
+ user, system = cext.proc_cpu_times(self.pid)
+ return _common.pcputimes(user, system)
+
+ @wrap_exceptions
+ def terminal(self):
+ hit_enoent = False
+ tty = wrap_exceptions(
+ cext.proc_basic_info(self.pid)[0])
+ if tty != cext.PRNODEV:
+ for x in (0, 1, 2, 255):
+ try:
+ return os.readlink('/proc/%d/path/%d' % (self.pid, x))
+ except OSError as err:
+ if err.errno == errno.ENOENT:
+ hit_enoent = True
+ continue
+ raise
+ if hit_enoent:
+ # raise NSP if the process disappeared on us
+ os.stat('/proc/%s' % self.pid)
+
+ @wrap_exceptions
+ def cwd(self):
+ # /proc/PID/path/cwd may not be resolved by readlink() even if
+ # it exists (ls shows it). If that's the case and the process
+ # is still alive return None (we can return None also on BSD).
+ # Reference: http://goo.gl/55XgO
+ try:
+ return os.readlink("/proc/%s/path/cwd" % self.pid)
+ except OSError as err:
+ if err.errno == errno.ENOENT:
+ os.stat("/proc/%s" % self.pid)
+ return None
+ raise
+
+ @wrap_exceptions
+ def memory_info(self):
+ ret = cext.proc_basic_info(self.pid)
+ rss, vms = ret[1] * 1024, ret[2] * 1024
+ return _common.pmem(rss, vms)
+
+ # it seems Solaris uses rss and vms only
+ memory_info_ex = memory_info
+
+ @wrap_exceptions
+ def status(self):
+ code = cext.proc_basic_info(self.pid)[6]
+ # XXX is '?' legit? (we're not supposed to return it anyway)
+ return PROC_STATUSES.get(code, '?')
+
+ @wrap_exceptions
+ def threads(self):
+ ret = []
+ tids = os.listdir('/proc/%d/lwp' % self.pid)
+ hit_enoent = False
+ for tid in tids:
+ tid = int(tid)
+ try:
+ utime, stime = cext.query_process_thread(
+ self.pid, tid)
+ except EnvironmentError as err:
+ # ENOENT == thread gone in meantime
+ if err.errno == errno.ENOENT:
+ hit_enoent = True
+ continue
+ raise
+ else:
+ nt = _common.pthread(tid, utime, stime)
+ ret.append(nt)
+ if hit_enoent:
+ # raise NSP if the process disappeared on us
+ os.stat('/proc/%s' % self.pid)
+ return ret
+
+ @wrap_exceptions
+ def open_files(self):
+ retlist = []
+ hit_enoent = False
+ pathdir = '/proc/%d/path' % self.pid
+ for fd in os.listdir('/proc/%d/fd' % self.pid):
+ path = os.path.join(pathdir, fd)
+ if os.path.islink(path):
+ try:
+ file = os.readlink(path)
+ except OSError as err:
+ # ENOENT == file which is gone in the meantime
+ if err.errno == errno.ENOENT:
+ hit_enoent = True
+ continue
+ raise
+ else:
+ if isfile_strict(file):
+ retlist.append(_common.popenfile(file, int(fd)))
+ if hit_enoent:
+ # raise NSP if the process disappeared on us
+ os.stat('/proc/%s' % self.pid)
+ return retlist
+
+ def _get_unix_sockets(self, pid):
+ """Get UNIX sockets used by process by parsing 'pfiles' output."""
+ # TODO: rewrite this in C (...but the damn netstat source code
+ # does not include this part! Argh!!)
+ cmd = "pfiles %s" % pid
+ p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if PY3:
+ stdout, stderr = [x.decode(sys.stdout.encoding)
+ for x in (stdout, stderr)]
+ if p.returncode != 0:
+ if 'permission denied' in stderr.lower():
+ raise AccessDenied(self.pid, self._name)
+ if 'no such process' in stderr.lower():
+ raise NoSuchProcess(self.pid, self._name)
+ raise RuntimeError("%r command error\n%s" % (cmd, stderr))
+
+ lines = stdout.split('\n')[2:]
+ for i, line in enumerate(lines):
+ line = line.lstrip()
+ if line.startswith('sockname: AF_UNIX'):
+ path = line.split(' ', 2)[2]
+ type = lines[i - 2].strip()
+ if type == 'SOCK_STREAM':
+ type = socket.SOCK_STREAM
+ elif type == 'SOCK_DGRAM':
+ type = socket.SOCK_DGRAM
+ else:
+ type = -1
+ yield (-1, socket.AF_UNIX, type, path, "", _common.CONN_NONE)
+
+ @wrap_exceptions
+ def connections(self, kind='inet'):
+ ret = net_connections(kind, _pid=self.pid)
+ # The underlying C implementation retrieves all OS connections
+ # and filters them by PID. At this point we can't tell whether
+ # an empty list means there were no connections for process or
+ # process is no longer active so we force NSP in case the PID
+ # is no longer there.
+ if not ret:
+ os.stat('/proc/%s' % self.pid) # will raise NSP if process is gone
+
+ # UNIX sockets
+ if kind in ('all', 'unix'):
+ ret.extend([_common.pconn(*conn) for conn in
+ self._get_unix_sockets(self.pid)])
+ return ret
+
+ nt_mmap_grouped = namedtuple('mmap', 'path rss anon locked')
+ nt_mmap_ext = namedtuple('mmap', 'addr perms path rss anon locked')
+
+ @wrap_exceptions
+ def memory_maps(self):
+ def toaddr(start, end):
+ return '%s-%s' % (hex(start)[2:].strip('L'),
+ hex(end)[2:].strip('L'))
+
+ retlist = []
+ rawlist = cext.proc_memory_maps(self.pid)
+ hit_enoent = False
+ for item in rawlist:
+ addr, addrsize, perm, name, rss, anon, locked = item
+ addr = toaddr(addr, addrsize)
+ if not name.startswith('['):
+ try:
+ name = os.readlink('/proc/%s/path/%s' % (self.pid, name))
+ except OSError as err:
+ if err.errno == errno.ENOENT:
+ # sometimes the link may not be resolved by
+ # readlink() even if it exists (ls shows it).
+ # If that's the case we just return the
+ # unresolved link path.
+ # This seems an incosistency with /proc similar
+ # to: http://goo.gl/55XgO
+ name = '/proc/%s/path/%s' % (self.pid, name)
+ hit_enoent = True
+ else:
+ raise
+ retlist.append((addr, perm, name, rss, anon, locked))
+ if hit_enoent:
+ # raise NSP if the process disappeared on us
+ os.stat('/proc/%s' % self.pid)
+ return retlist
+
+ @wrap_exceptions
+ def num_fds(self):
+ return len(os.listdir("/proc/%s/fd" % self.pid))
+
+ @wrap_exceptions
+ def num_ctx_switches(self):
+ return _common.pctxsw(*cext.proc_num_ctx_switches(self.pid))
+
+ @wrap_exceptions
+ def wait(self, timeout=None):
+ try:
+ return _psposix.wait_pid(self.pid, timeout)
+ except _psposix.TimeoutExpired:
+ # support for private module import
+ if TimeoutExpired is None:
+ raise
+ raise TimeoutExpired(timeout, self.pid, self._name)
diff --git a/python/psutil/psutil/_psutil_bsd.c b/python/psutil/psutil/_psutil_bsd.c
new file mode 100644
index 000000000..7b6e56173
--- /dev/null
+++ b/python/psutil/psutil/_psutil_bsd.c
@@ -0,0 +1,2296 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * FreeBSD platform-specific module methods for _psutil_bsd
+ */
+
+
+#include <Python.h>
+#include <assert.h>
+#include <errno.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <signal.h>
+#include <fcntl.h>
+#include <paths.h>
+#include <sys/types.h>
+#include <sys/sysctl.h>
+#include <sys/param.h>
+#include <sys/user.h>
+#include <sys/proc.h>
+#include <sys/file.h>
+#include <sys/cpuset.h>
+#include <net/route.h>
+
+#include <sys/socket.h>
+#include <sys/socketvar.h> // for struct xsocket
+#include <sys/un.h>
+#include <sys/unpcb.h>
+#include <sys/sockio.h>
+// for xinpcb struct
+#include <netinet/in.h>
+#include <netinet/in_systm.h>
+#include <netinet/ip.h>
+#include <netinet/in_pcb.h>
+#include <netinet/tcp_var.h> // for struct xtcpcb
+#include <netinet/tcp_fsm.h> // for TCP connection states
+#include <arpa/inet.h> // for inet_ntop()
+
+#if __FreeBSD_version < 900000
+#include <utmp.h> // system users
+#else
+#include <utmpx.h>
+#endif
+#include <devstat.h> // get io counters
+#include <sys/vmmeter.h> // needed for vmtotal struct
+#include <libutil.h> // process open files, shared libs (kinfo_getvmmap)
+#include <sys/mount.h>
+
+#include <net/if.h> // net io counters
+#include <net/if_dl.h>
+#include <net/route.h>
+#include <net/if_media.h>
+
+#include <netinet/in.h> // process open files/connections
+#include <sys/un.h>
+
+#include "_psutil_bsd.h"
+#include "_psutil_common.h"
+#include "arch/bsd/process_info.h"
+
+
+// convert a timeval struct to a double
+#define TV2DOUBLE(t) ((t).tv_sec + (t).tv_usec / 1000000.0)
+
+
+/*
+ * Utility function which fills a kinfo_proc struct based on process pid
+ */
+static int
+psutil_kinfo_proc(const pid_t pid, struct kinfo_proc *proc)
+{
+ int mib[4];
+ size_t size;
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROC;
+ mib[2] = KERN_PROC_PID;
+ mib[3] = pid;
+
+ size = sizeof(struct kinfo_proc);
+
+ if (sysctl((int *)mib, 4, proc, &size, NULL, 0) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return -1;
+ }
+
+ // sysctl stores 0 in the size if we can't find the process information.
+ if (size == 0) {
+ NoSuchProcess();
+ return -1;
+ }
+ return 0;
+}
+
+
+/*
+ * Set exception to AccessDenied if pid exists else NoSuchProcess.
+ */
+void
+psutil_raise_ad_or_nsp(long pid) {
+ if (psutil_pid_exists(pid) == 0)
+ NoSuchProcess();
+ else
+ AccessDenied();
+}
+
+
+/*
+ * Return a Python list of all the PIDs running on the system.
+ */
+static PyObject *
+psutil_pids(PyObject *self, PyObject *args)
+{
+ kinfo_proc *proclist = NULL;
+ kinfo_proc *orig_address = NULL;
+ size_t num_processes;
+ size_t idx;
+ PyObject *retlist = PyList_New(0);
+ PyObject *pid = NULL;
+
+ if (retlist == NULL)
+ return NULL;
+ if (psutil_get_proc_list(&proclist, &num_processes) != 0) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "failed to retrieve process list.");
+ goto error;
+ }
+
+ if (num_processes > 0) {
+ orig_address = proclist; // save so we can free it after we're done
+ for (idx = 0; idx < num_processes; idx++) {
+ pid = Py_BuildValue("i", proclist->ki_pid);
+ if (!pid)
+ goto error;
+ if (PyList_Append(retlist, pid))
+ goto error;
+ Py_DECREF(pid);
+ proclist++;
+ }
+ free(orig_address);
+ }
+
+ return retlist;
+
+error:
+ Py_XDECREF(pid);
+ Py_DECREF(retlist);
+ if (orig_address != NULL)
+ free(orig_address);
+ return NULL;
+}
+
+
+/*
+ * Return a Python float indicating the system boot time expressed in
+ * seconds since the epoch.
+ */
+static PyObject *
+psutil_boot_time(PyObject *self, PyObject *args)
+{
+ // fetch sysctl "kern.boottime"
+ static int request[2] = { CTL_KERN, KERN_BOOTTIME };
+ struct timeval boottime;
+ size_t len = sizeof(boottime);
+
+ if (sysctl(request, 2, &boottime, &len, NULL, 0) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+ return Py_BuildValue("d", (double)boottime.tv_sec);
+}
+
+
+/*
+ * Return process name from kinfo_proc as a Python string.
+ */
+static PyObject *
+psutil_proc_name(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("s", kp.ki_comm);
+}
+
+
+/*
+ * Return process pathname executable.
+ * Thanks to Robert N. M. Watson:
+ * http://fxr.googlebit.com/source/usr.bin/procstat/procstat_bin.c?v=8-CURRENT
+ */
+static PyObject *
+psutil_proc_exe(PyObject *self, PyObject *args)
+{
+ long pid;
+ char pathname[PATH_MAX];
+ int error;
+ int mib[4];
+ size_t size;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROC;
+ mib[2] = KERN_PROC_PATHNAME;
+ mib[3] = pid;
+
+ size = sizeof(pathname);
+ error = sysctl(mib, 4, pathname, &size, NULL, 0);
+ if (error == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+ if (size == 0 || strlen(pathname) == 0) {
+ if (psutil_pid_exists(pid) == 0)
+ return NoSuchProcess();
+ else
+ strcpy(pathname, "");
+ }
+ return Py_BuildValue("s", pathname);
+}
+
+
+/*
+ * Return process cmdline as a Python list of cmdline arguments.
+ */
+static PyObject *
+psutil_proc_cmdline(PyObject *self, PyObject *args)
+{
+ long pid;
+ PyObject *arglist = NULL;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ // get the commandline, defined in arch/bsd/process_info.c
+ arglist = psutil_get_arg_list(pid);
+
+ // psutil_get_arg_list() returns NULL only if psutil_cmd_args
+ // failed with ESRCH (no process with that PID)
+ if (NULL == arglist)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ return Py_BuildValue("N", arglist);
+}
+
+
+/*
+ * Return process parent pid from kinfo_proc as a Python integer.
+ */
+static PyObject *
+psutil_proc_ppid(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("l", (long)kp.ki_ppid);
+}
+
+
+/*
+ * Return process status as a Python integer.
+ */
+static PyObject *
+psutil_proc_status(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("i", (int)kp.ki_stat);
+}
+
+
+/*
+ * Return process real, effective and saved user ids from kinfo_proc
+ * as a Python tuple.
+ */
+static PyObject *
+psutil_proc_uids(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("lll",
+ (long)kp.ki_ruid,
+ (long)kp.ki_uid,
+ (long)kp.ki_svuid);
+}
+
+
+/*
+ * Return process real, effective and saved group ids from kinfo_proc
+ * as a Python tuple.
+ */
+static PyObject *
+psutil_proc_gids(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("lll",
+ (long)kp.ki_rgid,
+ (long)kp.ki_groups[0],
+ (long)kp.ki_svuid);
+}
+
+
+/*
+ * Return process real, effective and saved group ids from kinfo_proc
+ * as a Python tuple.
+ */
+static PyObject *
+psutil_proc_tty_nr(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("i", kp.ki_tdev);
+}
+
+
+/*
+ * Return the number of context switches performed by process as a tuple.
+ */
+static PyObject *
+psutil_proc_num_ctx_switches(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("(ll)",
+ kp.ki_rusage.ru_nvcsw,
+ kp.ki_rusage.ru_nivcsw);
+}
+
+
+/*
+ * Return number of threads used by process as a Python integer.
+ */
+static PyObject *
+psutil_proc_num_threads(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("l", (long)kp.ki_numthreads);
+}
+
+
+/*
+ * Retrieves all threads used by process returning a list of tuples
+ * including thread id, user time and system time.
+ * Thanks to Robert N. M. Watson:
+ * http://fxr.googlebit.com/source/usr.bin/procstat/
+ * procstat_threads.c?v=8-CURRENT
+ */
+static PyObject *
+psutil_proc_threads(PyObject *self, PyObject *args)
+{
+ long pid;
+ int mib[4];
+ struct kinfo_proc *kip = NULL;
+ struct kinfo_proc *kipp = NULL;
+ int error;
+ unsigned int i;
+ size_t size;
+ PyObject *retList = PyList_New(0);
+ PyObject *pyTuple = NULL;
+
+ if (retList == NULL)
+ return NULL;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ goto error;
+
+ // we need to re-query for thread information, so don't use *kipp
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROC;
+ mib[2] = KERN_PROC_PID | KERN_PROC_INC_THREAD;
+ mib[3] = pid;
+
+ size = 0;
+ error = sysctl(mib, 4, NULL, &size, NULL, 0);
+ if (error == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+ if (size == 0) {
+ NoSuchProcess();
+ goto error;
+ }
+
+ kip = malloc(size);
+ if (kip == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ error = sysctl(mib, 4, kip, &size, NULL, 0);
+ if (error == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+ if (size == 0) {
+ NoSuchProcess();
+ goto error;
+ }
+
+ for (i = 0; i < size / sizeof(*kipp); i++) {
+ kipp = &kip[i];
+ pyTuple = Py_BuildValue("Idd",
+ kipp->ki_tid,
+ TV2DOUBLE(kipp->ki_rusage.ru_utime),
+ TV2DOUBLE(kipp->ki_rusage.ru_stime));
+ if (pyTuple == NULL)
+ goto error;
+ if (PyList_Append(retList, pyTuple))
+ goto error;
+ Py_DECREF(pyTuple);
+ }
+ free(kip);
+ return retList;
+
+error:
+ Py_XDECREF(pyTuple);
+ Py_DECREF(retList);
+ if (kip != NULL)
+ free(kip);
+ return NULL;
+}
+
+
+/*
+ * Return a Python tuple (user_time, kernel_time)
+ */
+static PyObject *
+psutil_proc_cpu_times(PyObject *self, PyObject *args)
+{
+ long pid;
+ double user_t, sys_t;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ // convert from microseconds to seconds
+ user_t = TV2DOUBLE(kp.ki_rusage.ru_utime);
+ sys_t = TV2DOUBLE(kp.ki_rusage.ru_stime);
+ return Py_BuildValue("(dd)", user_t, sys_t);
+}
+
+
+/*
+ * Return the number of logical CPUs in the system.
+ * XXX this could be shared with OSX
+ */
+static PyObject *
+psutil_cpu_count_logical(PyObject *self, PyObject *args)
+{
+ int mib[2];
+ int ncpu;
+ size_t len;
+
+ mib[0] = CTL_HW;
+ mib[1] = HW_NCPU;
+ len = sizeof(ncpu);
+
+ if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
+ Py_RETURN_NONE; // mimic os.cpu_count()
+ else
+ return Py_BuildValue("i", ncpu);
+}
+
+
+/*
+ * Return an XML string from which we'll determine the number of
+ * physical CPU cores in the system.
+ */
+static PyObject *
+psutil_cpu_count_phys(PyObject *self, PyObject *args)
+{
+ void *topology = NULL;
+ size_t size = 0;
+ PyObject *py_str;
+
+ if (sysctlbyname("kern.sched.topology_spec", NULL, &size, NULL, 0))
+ goto error;
+
+ topology = malloc(size);
+ if (!topology) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ if (sysctlbyname("kern.sched.topology_spec", topology, &size, NULL, 0))
+ goto error;
+
+ py_str = Py_BuildValue("s", topology);
+ free(topology);
+ return py_str;
+
+error:
+ if (topology != NULL)
+ free(topology);
+ Py_RETURN_NONE;
+}
+
+
+/*
+ * Return a Python float indicating the process create time expressed in
+ * seconds since the epoch.
+ */
+static PyObject *
+psutil_proc_create_time(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("d", TV2DOUBLE(kp.ki_start));
+}
+
+
+/*
+ * Return a Python float indicating the process create time expressed in
+ * seconds since the epoch.
+ */
+static PyObject *
+psutil_proc_io_counters(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ // there's apparently no way to determine bytes count, hence return -1.
+ return Py_BuildValue("(llll)",
+ kp.ki_rusage.ru_inblock,
+ kp.ki_rusage.ru_oublock,
+ -1,
+ -1);
+}
+
+
+/*
+ * Return extended memory info for a process as a Python tuple.
+ */
+static PyObject *
+psutil_proc_memory_info(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("(lllll)",
+ ptoa(kp.ki_rssize), // rss
+ (long)kp.ki_size, // vms
+ ptoa(kp.ki_tsize), // text
+ ptoa(kp.ki_dsize), // data
+ ptoa(kp.ki_ssize)); // stack
+}
+
+
+/*
+ * Return virtual memory usage statistics.
+ */
+static PyObject *
+psutil_virtual_mem(PyObject *self, PyObject *args)
+{
+ unsigned int total, active, inactive, wired, cached, free;
+ size_t size = sizeof(total);
+ struct vmtotal vm;
+ int mib[] = {CTL_VM, VM_METER};
+ long pagesize = getpagesize();
+#if __FreeBSD_version > 702101
+ long buffers;
+#else
+ int buffers;
+#endif
+ size_t buffers_size = sizeof(buffers);
+
+ if (sysctlbyname("vm.stats.vm.v_page_count", &total, &size, NULL, 0))
+ goto error;
+ if (sysctlbyname("vm.stats.vm.v_active_count", &active, &size, NULL, 0))
+ goto error;
+ if (sysctlbyname("vm.stats.vm.v_inactive_count",
+ &inactive, &size, NULL, 0))
+ goto error;
+ if (sysctlbyname("vm.stats.vm.v_wire_count", &wired, &size, NULL, 0))
+ goto error;
+ if (sysctlbyname("vm.stats.vm.v_cache_count", &cached, &size, NULL, 0))
+ goto error;
+ if (sysctlbyname("vm.stats.vm.v_free_count", &free, &size, NULL, 0))
+ goto error;
+ if (sysctlbyname("vfs.bufspace", &buffers, &buffers_size, NULL, 0))
+ goto error;
+
+ size = sizeof(vm);
+ if (sysctl(mib, 2, &vm, &size, NULL, 0) != 0)
+ goto error;
+
+ return Py_BuildValue("KKKKKKKK",
+ (unsigned long long) total * pagesize,
+ (unsigned long long) free * pagesize,
+ (unsigned long long) active * pagesize,
+ (unsigned long long) inactive * pagesize,
+ (unsigned long long) wired * pagesize,
+ (unsigned long long) cached * pagesize,
+ (unsigned long long) buffers,
+ (unsigned long long) (vm.t_vmshr + vm.t_rmshr) * pagesize // shared
+ );
+
+error:
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+}
+
+
+#ifndef _PATH_DEVNULL
+#define _PATH_DEVNULL "/dev/null"
+#endif
+
+/*
+ * Return swap memory stats (see 'swapinfo' cmdline tool)
+ */
+static PyObject *
+psutil_swap_mem(PyObject *self, PyObject *args)
+{
+ kvm_t *kd;
+ struct kvm_swap kvmsw[1];
+ unsigned int swapin, swapout, nodein, nodeout;
+ size_t size = sizeof(unsigned int);
+
+ kd = kvm_open(NULL, _PATH_DEVNULL, NULL, O_RDONLY, "kvm_open failed");
+ if (kd == NULL) {
+ PyErr_SetString(PyExc_RuntimeError, "kvm_open failed");
+ return NULL;
+ }
+
+ if (kvm_getswapinfo(kd, kvmsw, 1, 0) < 0) {
+ kvm_close(kd);
+ PyErr_SetString(PyExc_RuntimeError, "kvm_getswapinfo failed");
+ return NULL;
+ }
+
+ kvm_close(kd);
+
+ if (sysctlbyname("vm.stats.vm.v_swapin", &swapin, &size, NULL, 0) == -1)
+ goto sbn_error;
+ if (sysctlbyname("vm.stats.vm.v_swapout", &swapout, &size, NULL, 0) == -1)
+ goto sbn_error;
+ if (sysctlbyname("vm.stats.vm.v_vnodein", &nodein, &size, NULL, 0) == -1)
+ goto sbn_error;
+ if (sysctlbyname("vm.stats.vm.v_vnodeout", &nodeout, &size, NULL, 0) == -1)
+ goto sbn_error;
+
+ return Py_BuildValue("(iiiII)",
+ kvmsw[0].ksw_total, // total
+ kvmsw[0].ksw_used, // used
+ kvmsw[0].ksw_total - kvmsw[0].ksw_used, // free
+ swapin + swapout, // swap in
+ nodein + nodeout); // swap out
+
+sbn_error:
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+}
+
+
+/*
+ * Return a Python tuple representing user, kernel and idle CPU times
+ */
+static PyObject *
+psutil_cpu_times(PyObject *self, PyObject *args)
+{
+ long cpu_time[CPUSTATES];
+ size_t size;
+
+ size = sizeof(cpu_time);
+
+ if (sysctlbyname("kern.cp_time", &cpu_time, &size, NULL, 0) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+
+ return Py_BuildValue("(ddddd)",
+ (double)cpu_time[CP_USER] / CLOCKS_PER_SEC,
+ (double)cpu_time[CP_NICE] / CLOCKS_PER_SEC,
+ (double)cpu_time[CP_SYS] / CLOCKS_PER_SEC,
+ (double)cpu_time[CP_IDLE] / CLOCKS_PER_SEC,
+ (double)cpu_time[CP_INTR] / CLOCKS_PER_SEC
+ );
+}
+
+
+/*
+ * XXX
+ * These functions are available on FreeBSD 8 only.
+ * In the upper python layer we do various tricks to avoid crashing
+ * and/or to provide alternatives where possible.
+ */
+
+
+#if defined(__FreeBSD_version) && __FreeBSD_version >= 800000
+/*
+ * Return files opened by process as a list of (path, fd) tuples.
+ * TODO: this is broken as it may report empty paths. 'procstat'
+ * utility has the same problem see:
+ * https://github.com/giampaolo/psutil/issues/595
+ */
+static PyObject *
+psutil_proc_open_files(PyObject *self, PyObject *args)
+{
+ long pid;
+ int i, cnt;
+ struct kinfo_file *freep = NULL;
+ struct kinfo_file *kif;
+ struct kinfo_proc kipp;
+ PyObject *retList = PyList_New(0);
+ PyObject *tuple = NULL;
+
+ if (retList == NULL)
+ return NULL;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ goto error;
+ if (psutil_kinfo_proc(pid, &kipp) == -1)
+ goto error;
+
+ freep = kinfo_getfile(pid, &cnt);
+ if (freep == NULL) {
+ psutil_raise_ad_or_nsp(pid);
+ goto error;
+ }
+
+ for (i = 0; i < cnt; i++) {
+ kif = &freep[i];
+ if ((kif->kf_type == KF_TYPE_VNODE) &&
+ (kif->kf_vnode_type == KF_VTYPE_VREG))
+ {
+ tuple = Py_BuildValue("(si)", kif->kf_path, kif->kf_fd);
+ if (tuple == NULL)
+ goto error;
+ if (PyList_Append(retList, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ }
+ }
+ free(freep);
+ return retList;
+
+error:
+ Py_XDECREF(tuple);
+ Py_DECREF(retList);
+ if (freep != NULL)
+ free(freep);
+ return NULL;
+}
+
+
+/*
+ * Return files opened by process as a list of (path, fd) tuples
+ */
+static PyObject *
+psutil_proc_num_fds(PyObject *self, PyObject *args)
+{
+ long pid;
+ int cnt;
+
+ struct kinfo_file *freep;
+ struct kinfo_proc kipp;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_kinfo_proc(pid, &kipp) == -1)
+ return NULL;
+
+ freep = kinfo_getfile(pid, &cnt);
+ if (freep == NULL) {
+ psutil_raise_ad_or_nsp(pid);
+ return NULL;
+ }
+ free(freep);
+
+ return Py_BuildValue("i", cnt);
+}
+
+
+/*
+ * Return process current working directory.
+ */
+static PyObject *
+psutil_proc_cwd(PyObject *self, PyObject *args)
+{
+ long pid;
+ PyObject *path = NULL;
+ struct kinfo_file *freep = NULL;
+ struct kinfo_file *kif;
+ struct kinfo_proc kipp;
+
+ int i, cnt;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ goto error;
+ if (psutil_kinfo_proc(pid, &kipp) == -1)
+ goto error;
+
+ freep = kinfo_getfile(pid, &cnt);
+ if (freep == NULL) {
+ psutil_raise_ad_or_nsp(pid);
+ goto error;
+ }
+
+ for (i = 0; i < cnt; i++) {
+ kif = &freep[i];
+ if (kif->kf_fd == KF_FD_TYPE_CWD) {
+ path = Py_BuildValue("s", kif->kf_path);
+ if (!path)
+ goto error;
+ break;
+ }
+ }
+ /*
+ * For lower pids it seems we can't retrieve any information
+ * (lsof can't do that it either). Since this happens even
+ * as root we return an empty string instead of AccessDenied.
+ */
+ if (path == NULL)
+ path = Py_BuildValue("s", "");
+ free(freep);
+ return path;
+
+error:
+ Py_XDECREF(path);
+ if (freep != NULL)
+ free(freep);
+ return NULL;
+}
+
+
+// The tcplist fetching and walking is borrowed from netstat/inet.c.
+static char *
+psutil_fetch_tcplist(void)
+{
+ char *buf;
+ size_t len;
+
+ for (;;) {
+ if (sysctlbyname("net.inet.tcp.pcblist", NULL, &len, NULL, 0) < 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+ buf = malloc(len);
+ if (buf == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ if (sysctlbyname("net.inet.tcp.pcblist", buf, &len, NULL, 0) < 0) {
+ free(buf);
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+ return buf;
+ }
+}
+
+static int
+psutil_sockaddr_port(int family, struct sockaddr_storage *ss)
+{
+ struct sockaddr_in6 *sin6;
+ struct sockaddr_in *sin;
+
+ if (family == AF_INET) {
+ sin = (struct sockaddr_in *)ss;
+ return (sin->sin_port);
+ }
+ else {
+ sin6 = (struct sockaddr_in6 *)ss;
+ return (sin6->sin6_port);
+ }
+}
+
+static void *
+psutil_sockaddr_addr(int family, struct sockaddr_storage *ss)
+{
+ struct sockaddr_in6 *sin6;
+ struct sockaddr_in *sin;
+
+ if (family == AF_INET) {
+ sin = (struct sockaddr_in *)ss;
+ return (&sin->sin_addr);
+ }
+ else {
+ sin6 = (struct sockaddr_in6 *)ss;
+ return (&sin6->sin6_addr);
+ }
+}
+
+static socklen_t
+psutil_sockaddr_addrlen(int family)
+{
+ if (family == AF_INET)
+ return (sizeof(struct in_addr));
+ else
+ return (sizeof(struct in6_addr));
+}
+
+static int
+psutil_sockaddr_matches(int family, int port, void *pcb_addr,
+ struct sockaddr_storage *ss)
+{
+ if (psutil_sockaddr_port(family, ss) != port)
+ return (0);
+ return (memcmp(psutil_sockaddr_addr(family, ss), pcb_addr,
+ psutil_sockaddr_addrlen(family)) == 0);
+}
+
+static struct tcpcb *
+psutil_search_tcplist(char *buf, struct kinfo_file *kif)
+{
+ struct tcpcb *tp;
+ struct inpcb *inp;
+ struct xinpgen *xig, *oxig;
+ struct xsocket *so;
+
+ oxig = xig = (struct xinpgen *)buf;
+ for (xig = (struct xinpgen *)((char *)xig + xig->xig_len);
+ xig->xig_len > sizeof(struct xinpgen);
+ xig = (struct xinpgen *)((char *)xig + xig->xig_len)) {
+ tp = &((struct xtcpcb *)xig)->xt_tp;
+ inp = &((struct xtcpcb *)xig)->xt_inp;
+ so = &((struct xtcpcb *)xig)->xt_socket;
+
+ if (so->so_type != kif->kf_sock_type ||
+ so->xso_family != kif->kf_sock_domain ||
+ so->xso_protocol != kif->kf_sock_protocol)
+ continue;
+
+ if (kif->kf_sock_domain == AF_INET) {
+ if (!psutil_sockaddr_matches(
+ AF_INET, inp->inp_lport, &inp->inp_laddr,
+ &kif->kf_sa_local))
+ continue;
+ if (!psutil_sockaddr_matches(
+ AF_INET, inp->inp_fport, &inp->inp_faddr,
+ &kif->kf_sa_peer))
+ continue;
+ } else {
+ if (!psutil_sockaddr_matches(
+ AF_INET6, inp->inp_lport, &inp->in6p_laddr,
+ &kif->kf_sa_local))
+ continue;
+ if (!psutil_sockaddr_matches(
+ AF_INET6, inp->inp_fport, &inp->in6p_faddr,
+ &kif->kf_sa_peer))
+ continue;
+ }
+
+ return (tp);
+ }
+ return NULL;
+}
+
+
+// a signaler for connections without an actual status
+static int PSUTIL_CONN_NONE = 128;
+
+/*
+ * Return connections opened by process.
+ */
+static PyObject *
+psutil_proc_connections(PyObject *self, PyObject *args)
+{
+ long pid;
+ int i, cnt;
+
+ struct kinfo_file *freep = NULL;
+ struct kinfo_file *kif;
+ char *tcplist = NULL;
+ struct tcpcb *tcp;
+
+ PyObject *retList = PyList_New(0);
+ PyObject *tuple = NULL;
+ PyObject *laddr = NULL;
+ PyObject *raddr = NULL;
+ PyObject *af_filter = NULL;
+ PyObject *type_filter = NULL;
+ PyObject *_family = NULL;
+ PyObject *_type = NULL;
+
+ if (retList == NULL)
+ return NULL;
+ if (! PyArg_ParseTuple(args, "lOO", &pid, &af_filter, &type_filter))
+ goto error;
+ if (!PySequence_Check(af_filter) || !PySequence_Check(type_filter)) {
+ PyErr_SetString(PyExc_TypeError, "arg 2 or 3 is not a sequence");
+ goto error;
+ }
+
+ freep = kinfo_getfile(pid, &cnt);
+ if (freep == NULL) {
+ psutil_raise_ad_or_nsp(pid);
+ goto error;
+ }
+
+ tcplist = psutil_fetch_tcplist();
+ if (tcplist == NULL) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ for (i = 0; i < cnt; i++) {
+ int lport, rport, state;
+ char lip[200], rip[200];
+ char path[PATH_MAX];
+ int inseq;
+ tuple = NULL;
+ laddr = NULL;
+ raddr = NULL;
+
+ kif = &freep[i];
+ if (kif->kf_type == KF_TYPE_SOCKET) {
+ // apply filters
+ _family = PyLong_FromLong((long)kif->kf_sock_domain);
+ inseq = PySequence_Contains(af_filter, _family);
+ Py_DECREF(_family);
+ if (inseq == 0)
+ continue;
+ _type = PyLong_FromLong((long)kif->kf_sock_type);
+ inseq = PySequence_Contains(type_filter, _type);
+ Py_DECREF(_type);
+ if (inseq == 0)
+ continue;
+ // IPv4 / IPv6 socket
+ if ((kif->kf_sock_domain == AF_INET) ||
+ (kif->kf_sock_domain == AF_INET6)) {
+ // fill status
+ state = PSUTIL_CONN_NONE;
+ if (kif->kf_sock_type == SOCK_STREAM) {
+ tcp = psutil_search_tcplist(tcplist, kif);
+ if (tcp != NULL)
+ state = (int)tcp->t_state;
+ }
+
+ // build addr and port
+ inet_ntop(
+ kif->kf_sock_domain,
+ psutil_sockaddr_addr(kif->kf_sock_domain,
+ &kif->kf_sa_local),
+ lip,
+ sizeof(lip));
+ inet_ntop(
+ kif->kf_sock_domain,
+ psutil_sockaddr_addr(kif->kf_sock_domain,
+ &kif->kf_sa_peer),
+ rip,
+ sizeof(rip));
+ lport = htons(psutil_sockaddr_port(kif->kf_sock_domain,
+ &kif->kf_sa_local));
+ rport = htons(psutil_sockaddr_port(kif->kf_sock_domain,
+ &kif->kf_sa_peer));
+
+ // construct python tuple/list
+ laddr = Py_BuildValue("(si)", lip, lport);
+ if (!laddr)
+ goto error;
+ if (rport != 0)
+ raddr = Py_BuildValue("(si)", rip, rport);
+ else
+ raddr = Py_BuildValue("()");
+ if (!raddr)
+ goto error;
+ tuple = Py_BuildValue("(iiiNNi)",
+ kif->kf_fd,
+ kif->kf_sock_domain,
+ kif->kf_sock_type,
+ laddr,
+ raddr,
+ state);
+ if (!tuple)
+ goto error;
+ if (PyList_Append(retList, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ }
+ // UNIX socket
+ else if (kif->kf_sock_domain == AF_UNIX) {
+ struct sockaddr_un *sun;
+
+ sun = (struct sockaddr_un *)&kif->kf_sa_local;
+ snprintf(
+ path, sizeof(path), "%.*s",
+ (int)(sun->sun_len - (sizeof(*sun) - sizeof(sun->sun_path))),
+ sun->sun_path);
+
+ tuple = Py_BuildValue("(iiisOi)",
+ kif->kf_fd,
+ kif->kf_sock_domain,
+ kif->kf_sock_type,
+ path,
+ Py_None,
+ PSUTIL_CONN_NONE);
+ if (!tuple)
+ goto error;
+ if (PyList_Append(retList, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ Py_INCREF(Py_None);
+ }
+ }
+ }
+ free(freep);
+ free(tcplist);
+ return retList;
+
+error:
+ Py_XDECREF(tuple);
+ Py_XDECREF(laddr);
+ Py_XDECREF(raddr);
+ Py_DECREF(retList);
+ if (freep != NULL)
+ free(freep);
+ if (tcplist != NULL)
+ free(tcplist);
+ return NULL;
+}
+
+
+/*
+ * Return a Python list of tuple representing per-cpu times
+ */
+static PyObject *
+psutil_per_cpu_times(PyObject *self, PyObject *args)
+{
+ static int maxcpus;
+ int mib[2];
+ int ncpu;
+ size_t len;
+ size_t size;
+ int i;
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_cputime = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+
+ // retrieve maxcpus value
+ size = sizeof(maxcpus);
+ if (sysctlbyname("kern.smp.maxcpus", &maxcpus, &size, NULL, 0) < 0) {
+ Py_DECREF(py_retlist);
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+ long cpu_time[maxcpus][CPUSTATES];
+
+ // retrieve the number of cpus
+ mib[0] = CTL_HW;
+ mib[1] = HW_NCPU;
+ len = sizeof(ncpu);
+ if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ // per-cpu info
+ size = sizeof(cpu_time);
+ if (sysctlbyname("kern.cp_times", &cpu_time, &size, NULL, 0) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ for (i = 0; i < ncpu; i++) {
+ py_cputime = Py_BuildValue(
+ "(ddddd)",
+ (double)cpu_time[i][CP_USER] / CLOCKS_PER_SEC,
+ (double)cpu_time[i][CP_NICE] / CLOCKS_PER_SEC,
+ (double)cpu_time[i][CP_SYS] / CLOCKS_PER_SEC,
+ (double)cpu_time[i][CP_IDLE] / CLOCKS_PER_SEC,
+ (double)cpu_time[i][CP_INTR] / CLOCKS_PER_SEC);
+ if (!py_cputime)
+ goto error;
+ if (PyList_Append(py_retlist, py_cputime))
+ goto error;
+ Py_DECREF(py_cputime);
+ }
+
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_cputime);
+ Py_DECREF(py_retlist);
+ return NULL;
+}
+
+
+// remove spaces from string
+void remove_spaces(char *str) {
+ char *p1 = str;
+ char *p2 = str;
+ do
+ while (*p2 == ' ')
+ p2++;
+ while ((*p1++ = *p2++));
+}
+
+
+/*
+ * Return a list of tuples for every process memory maps.
+ * 'procstat' cmdline utility has been used as an example.
+ */
+static PyObject *
+psutil_proc_memory_maps(PyObject *self, PyObject *args)
+{
+ long pid;
+ int ptrwidth;
+ int i, cnt;
+ char addr[1000];
+ char perms[4];
+ const char *path;
+ struct kinfo_proc kp;
+ struct kinfo_vmentry *freep = NULL;
+ struct kinfo_vmentry *kve;
+ ptrwidth = 2 * sizeof(void *);
+ PyObject *pytuple = NULL;
+ PyObject *retlist = PyList_New(0);
+
+ if (retlist == NULL)
+ return NULL;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ goto error;
+ if (psutil_kinfo_proc(pid, &kp) == -1)
+ goto error;
+
+ freep = kinfo_getvmmap(pid, &cnt);
+ if (freep == NULL) {
+ psutil_raise_ad_or_nsp(pid);
+ goto error;
+ }
+ for (i = 0; i < cnt; i++) {
+ pytuple = NULL;
+ kve = &freep[i];
+ addr[0] = '\0';
+ perms[0] = '\0';
+ sprintf(addr, "%#*jx-%#*jx", ptrwidth, (uintmax_t)kve->kve_start,
+ ptrwidth, (uintmax_t)kve->kve_end);
+ remove_spaces(addr);
+ strlcat(perms, kve->kve_protection & KVME_PROT_READ ? "r" : "-",
+ sizeof(perms));
+ strlcat(perms, kve->kve_protection & KVME_PROT_WRITE ? "w" : "-",
+ sizeof(perms));
+ strlcat(perms, kve->kve_protection & KVME_PROT_EXEC ? "x" : "-",
+ sizeof(perms));
+
+ if (strlen(kve->kve_path) == 0) {
+ switch (kve->kve_type) {
+ case KVME_TYPE_NONE:
+ path = "[none]";
+ break;
+ case KVME_TYPE_DEFAULT:
+ path = "[default]";
+ break;
+ case KVME_TYPE_VNODE:
+ path = "[vnode]";
+ break;
+ case KVME_TYPE_SWAP:
+ path = "[swap]";
+ break;
+ case KVME_TYPE_DEVICE:
+ path = "[device]";
+ break;
+ case KVME_TYPE_PHYS:
+ path = "[phys]";
+ break;
+ case KVME_TYPE_DEAD:
+ path = "[dead]";
+ break;
+ case KVME_TYPE_SG:
+ path = "[sg]";
+ break;
+ case KVME_TYPE_UNKNOWN:
+ path = "[unknown]";
+ break;
+ default:
+ path = "[?]";
+ break;
+ }
+ }
+ else {
+ path = kve->kve_path;
+ }
+
+ pytuple = Py_BuildValue("sssiiii",
+ addr, // "start-end" address
+ perms, // "rwx" permissions
+ path, // path
+ kve->kve_resident, // rss
+ kve->kve_private_resident, // private
+ kve->kve_ref_count, // ref count
+ kve->kve_shadow_count); // shadow count
+ if (!pytuple)
+ goto error;
+ if (PyList_Append(retlist, pytuple))
+ goto error;
+ Py_DECREF(pytuple);
+ }
+ free(freep);
+ return retlist;
+
+error:
+ Py_XDECREF(pytuple);
+ Py_DECREF(retlist);
+ if (freep != NULL)
+ free(freep);
+ return NULL;
+}
+#endif
+
+
+/*
+ * Return a list of tuples including device, mount point and fs type
+ * for all partitions mounted on the system.
+ */
+static PyObject *
+psutil_disk_partitions(PyObject *self, PyObject *args)
+{
+ int num;
+ int i;
+ long len;
+ uint64_t flags;
+ char opts[200];
+ struct statfs *fs = NULL;
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+
+ // get the number of mount points
+ Py_BEGIN_ALLOW_THREADS
+ num = getfsstat(NULL, 0, MNT_NOWAIT);
+ Py_END_ALLOW_THREADS
+ if (num == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ len = sizeof(*fs) * num;
+ fs = malloc(len);
+ if (fs == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ Py_BEGIN_ALLOW_THREADS
+ num = getfsstat(fs, len, MNT_NOWAIT);
+ Py_END_ALLOW_THREADS
+ if (num == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ for (i = 0; i < num; i++) {
+ py_tuple = NULL;
+ opts[0] = 0;
+ flags = fs[i].f_flags;
+
+ // see sys/mount.h
+ if (flags & MNT_RDONLY)
+ strlcat(opts, "ro", sizeof(opts));
+ else
+ strlcat(opts, "rw", sizeof(opts));
+ if (flags & MNT_SYNCHRONOUS)
+ strlcat(opts, ",sync", sizeof(opts));
+ if (flags & MNT_NOEXEC)
+ strlcat(opts, ",noexec", sizeof(opts));
+ if (flags & MNT_NOSUID)
+ strlcat(opts, ",nosuid", sizeof(opts));
+ if (flags & MNT_UNION)
+ strlcat(opts, ",union", sizeof(opts));
+ if (flags & MNT_ASYNC)
+ strlcat(opts, ",async", sizeof(opts));
+ if (flags & MNT_SUIDDIR)
+ strlcat(opts, ",suiddir", sizeof(opts));
+ if (flags & MNT_SOFTDEP)
+ strlcat(opts, ",softdep", sizeof(opts));
+ if (flags & MNT_NOSYMFOLLOW)
+ strlcat(opts, ",nosymfollow", sizeof(opts));
+ if (flags & MNT_GJOURNAL)
+ strlcat(opts, ",gjournal", sizeof(opts));
+ if (flags & MNT_MULTILABEL)
+ strlcat(opts, ",multilabel", sizeof(opts));
+ if (flags & MNT_ACLS)
+ strlcat(opts, ",acls", sizeof(opts));
+ if (flags & MNT_NOATIME)
+ strlcat(opts, ",noatime", sizeof(opts));
+ if (flags & MNT_NOCLUSTERR)
+ strlcat(opts, ",noclusterr", sizeof(opts));
+ if (flags & MNT_NOCLUSTERW)
+ strlcat(opts, ",noclusterw", sizeof(opts));
+ if (flags & MNT_NFS4ACLS)
+ strlcat(opts, ",nfs4acls", sizeof(opts));
+
+ py_tuple = Py_BuildValue("(ssss)",
+ fs[i].f_mntfromname, // device
+ fs[i].f_mntonname, // mount point
+ fs[i].f_fstypename, // fs type
+ opts); // options
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+
+ free(fs);
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_tuple);
+ Py_DECREF(py_retlist);
+ if (fs != NULL)
+ free(fs);
+ return NULL;
+}
+
+
+/*
+ * Return a Python list of named tuples with overall network I/O information
+ */
+static PyObject *
+psutil_net_io_counters(PyObject *self, PyObject *args)
+{
+ char *buf = NULL, *lim, *next;
+ struct if_msghdr *ifm;
+ int mib[6];
+ size_t len;
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_ifc_info = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+ mib[0] = CTL_NET; // networking subsystem
+ mib[1] = PF_ROUTE; // type of information
+ mib[2] = 0; // protocol (IPPROTO_xxx)
+ mib[3] = 0; // address family
+ mib[4] = NET_RT_IFLIST; // operation
+ mib[5] = 0;
+
+ if (sysctl(mib, 6, NULL, &len, NULL, 0) < 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ buf = malloc(len);
+ if (buf == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ if (sysctl(mib, 6, buf, &len, NULL, 0) < 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ lim = buf + len;
+
+ for (next = buf; next < lim; ) {
+ py_ifc_info = NULL;
+ ifm = (struct if_msghdr *)next;
+ next += ifm->ifm_msglen;
+
+ if (ifm->ifm_type == RTM_IFINFO) {
+ struct if_msghdr *if2m = (struct if_msghdr *)ifm;
+ struct sockaddr_dl *sdl = (struct sockaddr_dl *)(if2m + 1);
+ char ifc_name[32];
+
+ strncpy(ifc_name, sdl->sdl_data, sdl->sdl_nlen);
+ ifc_name[sdl->sdl_nlen] = 0;
+ // XXX: ignore usbus interfaces:
+ // http://lists.freebsd.org/pipermail/freebsd-current/
+ // 2011-October/028752.html
+ // 'ifconfig -a' doesn't show them, nor do we.
+ if (strncmp(ifc_name, "usbus", 5) == 0)
+ continue;
+
+ py_ifc_info = Py_BuildValue("(kkkkkkki)",
+ if2m->ifm_data.ifi_obytes,
+ if2m->ifm_data.ifi_ibytes,
+ if2m->ifm_data.ifi_opackets,
+ if2m->ifm_data.ifi_ipackets,
+ if2m->ifm_data.ifi_ierrors,
+ if2m->ifm_data.ifi_oerrors,
+ if2m->ifm_data.ifi_iqdrops,
+ 0); // dropout not supported
+ if (!py_ifc_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, ifc_name, py_ifc_info))
+ goto error;
+ Py_DECREF(py_ifc_info);
+ }
+ else {
+ continue;
+ }
+ }
+
+ free(buf);
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_ifc_info);
+ Py_DECREF(py_retdict);
+ if (buf != NULL)
+ free(buf);
+ return NULL;
+}
+
+
+/*
+ * Return a Python dict of tuples for disk I/O information
+ */
+static PyObject *
+psutil_disk_io_counters(PyObject *self, PyObject *args)
+{
+ int i;
+ struct statinfo stats;
+
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_disk_info = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+ if (devstat_checkversion(NULL) < 0) {
+ PyErr_Format(PyExc_RuntimeError, "devstat_checkversion() failed");
+ goto error;
+ }
+
+ stats.dinfo = (struct devinfo *)malloc(sizeof(struct devinfo));
+ if (stats.dinfo == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ bzero(stats.dinfo, sizeof(struct devinfo));
+
+ if (devstat_getdevs(NULL, &stats) == -1) {
+ PyErr_Format(PyExc_RuntimeError, "devstat_getdevs() failed");
+ goto error;
+ }
+
+ for (i = 0; i < stats.dinfo->numdevs; i++) {
+ py_disk_info = NULL;
+ struct devstat current;
+ char disk_name[128];
+ current = stats.dinfo->devices[i];
+ snprintf(disk_name, sizeof(disk_name), "%s%d",
+ current.device_name,
+ current.unit_number);
+
+ py_disk_info = Py_BuildValue(
+ "(KKKKLL)",
+ current.operations[DEVSTAT_READ], // no reads
+ current.operations[DEVSTAT_WRITE], // no writes
+ current.bytes[DEVSTAT_READ], // bytes read
+ current.bytes[DEVSTAT_WRITE], // bytes written
+ (long long)devstat_compute_etime(
+ &current.duration[DEVSTAT_READ], NULL), // r time
+ (long long)devstat_compute_etime(
+ &current.duration[DEVSTAT_WRITE], NULL)); // w time
+ if (!py_disk_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, disk_name, py_disk_info))
+ goto error;
+ Py_DECREF(py_disk_info);
+ }
+
+ if (stats.dinfo->mem_ptr)
+ free(stats.dinfo->mem_ptr);
+ free(stats.dinfo);
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_disk_info);
+ Py_DECREF(py_retdict);
+ if (stats.dinfo != NULL)
+ free(stats.dinfo);
+ return NULL;
+}
+
+
+/*
+ * Return currently connected users as a list of tuples.
+ */
+static PyObject *
+psutil_users(PyObject *self, PyObject *args)
+{
+ PyObject *ret_list = PyList_New(0);
+ PyObject *tuple = NULL;
+
+ if (ret_list == NULL)
+ return NULL;
+
+#if __FreeBSD_version < 900000
+ struct utmp ut;
+ FILE *fp;
+
+ fp = fopen(_PATH_UTMP, "r");
+ if (fp == NULL) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ while (fread(&ut, sizeof(ut), 1, fp) == 1) {
+ if (*ut.ut_name == '\0')
+ continue;
+ tuple = Py_BuildValue(
+ "(sssf)",
+ ut.ut_name, // username
+ ut.ut_line, // tty
+ ut.ut_host, // hostname
+ (float)ut.ut_time); // start time
+ if (!tuple) {
+ fclose(fp);
+ goto error;
+ }
+ if (PyList_Append(ret_list, tuple)) {
+ fclose(fp);
+ goto error;
+ }
+ Py_DECREF(tuple);
+ }
+
+ fclose(fp);
+#else
+ struct utmpx *utx;
+
+ while ((utx = getutxent()) != NULL) {
+ if (utx->ut_type != USER_PROCESS)
+ continue;
+ tuple = Py_BuildValue(
+ "(sssf)",
+ utx->ut_user, // username
+ utx->ut_line, // tty
+ utx->ut_host, // hostname
+ (float)utx->ut_tv.tv_sec // start time
+ );
+
+ if (!tuple) {
+ endutxent();
+ goto error;
+ }
+ if (PyList_Append(ret_list, tuple)) {
+ endutxent();
+ goto error;
+ }
+ Py_DECREF(tuple);
+ }
+
+ endutxent();
+#endif
+ return ret_list;
+
+error:
+ Py_XDECREF(tuple);
+ Py_DECREF(ret_list);
+ return NULL;
+}
+
+
+
+/*
+ * System-wide open connections.
+ */
+
+#define HASHSIZE 1009
+static struct xfile *psutil_xfiles;
+static int psutil_nxfiles;
+
+int
+psutil_populate_xfiles()
+{
+ size_t len;
+
+ if ((psutil_xfiles = malloc(len = sizeof *psutil_xfiles)) == NULL) {
+ PyErr_NoMemory();
+ return 0;
+ }
+ while (sysctlbyname("kern.file", psutil_xfiles, &len, 0, 0) == -1) {
+ if (errno != ENOMEM) {
+ PyErr_SetFromErrno(0);
+ return 0;
+ }
+ len *= 2;
+ if ((psutil_xfiles = realloc(psutil_xfiles, len)) == NULL) {
+ PyErr_NoMemory();
+ return 0;
+ }
+ }
+ if (len > 0 && psutil_xfiles->xf_size != sizeof *psutil_xfiles) {
+ PyErr_Format(PyExc_RuntimeError, "struct xfile size mismatch");
+ return 0;
+ }
+ psutil_nxfiles = len / sizeof *psutil_xfiles;
+ return 1;
+}
+
+int
+psutil_get_pid_from_sock(int sock_hash)
+{
+ struct xfile *xf;
+ int hash, n;
+ for (xf = psutil_xfiles, n = 0; n < psutil_nxfiles; ++n, ++xf) {
+ if (xf->xf_data == NULL)
+ continue;
+ hash = (int)((uintptr_t)xf->xf_data % HASHSIZE);
+ if (sock_hash == hash)
+ return xf->xf_pid;
+ }
+ return -1;
+}
+
+
+// Reference:
+// https://gitorious.org/freebsd/freebsd/source/
+// f1d6f4778d2044502209708bc167c05f9aa48615:usr.bin/sockstat/sockstat.c
+int psutil_gather_inet(int proto, PyObject *py_retlist)
+{
+ struct xinpgen *xig, *exig;
+ struct xinpcb *xip;
+ struct xtcpcb *xtp;
+ struct inpcb *inp;
+ struct xsocket *so;
+ const char *varname = NULL;
+ size_t len, bufsize;
+ void *buf;
+ int hash;
+ int retry;
+ int type;
+
+ PyObject *tuple = NULL;
+ PyObject *laddr = NULL;
+ PyObject *raddr = NULL;
+
+ switch (proto) {
+ case IPPROTO_TCP:
+ varname = "net.inet.tcp.pcblist";
+ type = SOCK_STREAM;
+ break;
+ case IPPROTO_UDP:
+ varname = "net.inet.udp.pcblist";
+ type = SOCK_DGRAM;
+ break;
+ }
+
+ buf = NULL;
+ bufsize = 8192;
+ retry = 5;
+ do {
+ for (;;) {
+ buf = realloc(buf, bufsize);
+ if (buf == NULL)
+ continue; // XXX
+ len = bufsize;
+ if (sysctlbyname(varname, buf, &len, NULL, 0) == 0)
+ break;
+ if (errno != ENOMEM) {
+ PyErr_SetFromErrno(0);
+ goto error;
+ }
+ bufsize *= 2;
+ }
+ xig = (struct xinpgen *)buf;
+ exig = (struct xinpgen *)(void *)((char *)buf + len - sizeof *exig);
+ if (xig->xig_len != sizeof *xig || exig->xig_len != sizeof *exig) {
+ PyErr_Format(PyExc_RuntimeError, "struct xinpgen size mismatch");
+ goto error;
+ }
+ } while (xig->xig_gen != exig->xig_gen && retry--);
+
+
+ for (;;) {
+ int lport, rport, pid, status, family;
+
+ xig = (struct xinpgen *)(void *)((char *)xig + xig->xig_len);
+ if (xig >= exig)
+ break;
+
+ switch (proto) {
+ case IPPROTO_TCP:
+ xtp = (struct xtcpcb *)xig;
+ if (xtp->xt_len != sizeof *xtp) {
+ PyErr_Format(PyExc_RuntimeError,
+ "struct xtcpcb size mismatch");
+ goto error;
+ }
+ inp = &xtp->xt_inp;
+ so = &xtp->xt_socket;
+ status = xtp->xt_tp.t_state;
+ break;
+ case IPPROTO_UDP:
+ xip = (struct xinpcb *)xig;
+ if (xip->xi_len != sizeof *xip) {
+ PyErr_Format(PyExc_RuntimeError,
+ "struct xinpcb size mismatch");
+ goto error;
+ }
+ inp = &xip->xi_inp;
+ so = &xip->xi_socket;
+ status = PSUTIL_CONN_NONE;
+ break;
+ default:
+ PyErr_Format(PyExc_RuntimeError, "invalid proto");
+ goto error;
+ }
+
+ char lip[200], rip[200];
+
+ hash = (int)((uintptr_t)so->xso_so % HASHSIZE);
+ pid = psutil_get_pid_from_sock(hash);
+ if (pid < 0)
+ continue;
+ lport = ntohs(inp->inp_lport);
+ rport = ntohs(inp->inp_fport);
+
+ if (inp->inp_vflag & INP_IPV4) {
+ family = AF_INET;
+ inet_ntop(AF_INET, &inp->inp_laddr.s_addr, lip, sizeof(lip));
+ inet_ntop(AF_INET, &inp->inp_faddr.s_addr, rip, sizeof(rip));
+ }
+ else if (inp->inp_vflag & INP_IPV6) {
+ family = AF_INET6;
+ inet_ntop(AF_INET6, &inp->in6p_laddr.s6_addr, lip, sizeof(lip));
+ inet_ntop(AF_INET6, &inp->in6p_faddr.s6_addr, rip, sizeof(rip));
+ }
+
+ // construct python tuple/list
+ laddr = Py_BuildValue("(si)", lip, lport);
+ if (!laddr)
+ goto error;
+ if (rport != 0)
+ raddr = Py_BuildValue("(si)", rip, rport);
+ else
+ raddr = Py_BuildValue("()");
+ if (!raddr)
+ goto error;
+ tuple = Py_BuildValue("(iiiNNii)", -1, family, type, laddr, raddr,
+ status, pid);
+ if (!tuple)
+ goto error;
+ if (PyList_Append(py_retlist, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ }
+
+ free(buf);
+ return 1;
+
+error:
+ Py_XDECREF(tuple);
+ Py_XDECREF(laddr);
+ Py_XDECREF(raddr);
+ free(buf);
+ return 0;
+}
+
+
+int psutil_gather_unix(int proto, PyObject *py_retlist)
+{
+ struct xunpgen *xug, *exug;
+ struct xunpcb *xup;
+ const char *varname = NULL;
+ const char *protoname = NULL;
+ size_t len;
+ size_t bufsize;
+ void *buf;
+ int hash;
+ int retry;
+ int pid;
+ struct sockaddr_un *sun;
+ char path[PATH_MAX];
+
+ PyObject *tuple = NULL;
+ PyObject *laddr = NULL;
+ PyObject *raddr = NULL;
+
+ switch (proto) {
+ case SOCK_STREAM:
+ varname = "net.local.stream.pcblist";
+ protoname = "stream";
+ break;
+ case SOCK_DGRAM:
+ varname = "net.local.dgram.pcblist";
+ protoname = "dgram";
+ break;
+ }
+
+ buf = NULL;
+ bufsize = 8192;
+ retry = 5;
+
+ do {
+ for (;;) {
+ buf = realloc(buf, bufsize);
+ if (buf == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ len = bufsize;
+ if (sysctlbyname(varname, buf, &len, NULL, 0) == 0)
+ break;
+ if (errno != ENOMEM) {
+ PyErr_SetFromErrno(0);
+ goto error;
+ }
+ bufsize *= 2;
+ }
+ xug = (struct xunpgen *)buf;
+ exug = (struct xunpgen *)(void *)
+ ((char *)buf + len - sizeof *exug);
+ if (xug->xug_len != sizeof *xug || exug->xug_len != sizeof *exug) {
+ PyErr_Format(PyExc_RuntimeError, "struct xinpgen size mismatch");
+ goto error;
+ }
+ } while (xug->xug_gen != exug->xug_gen && retry--);
+
+ for (;;) {
+ xug = (struct xunpgen *)(void *)((char *)xug + xug->xug_len);
+ if (xug >= exug)
+ break;
+ xup = (struct xunpcb *)xug;
+ if (xup->xu_len != sizeof *xup)
+ goto error;
+
+ hash = (int)((uintptr_t) xup->xu_socket.xso_so % HASHSIZE);
+ pid = psutil_get_pid_from_sock(hash);
+ if (pid < 0)
+ continue;
+
+ sun = (struct sockaddr_un *)&xup->xu_addr;
+ snprintf(path, sizeof(path), "%.*s",
+ (int)(sun->sun_len - (sizeof(*sun) - sizeof(sun->sun_path))),
+ sun->sun_path);
+
+ tuple = Py_BuildValue("(iiisOii)", -1, AF_UNIX, proto, path, Py_None,
+ PSUTIL_CONN_NONE, pid);
+ if (!tuple)
+ goto error;
+ if (PyList_Append(py_retlist, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ Py_INCREF(Py_None);
+ }
+
+ free(buf);
+ return 1;
+
+error:
+ Py_XDECREF(tuple);
+ Py_XDECREF(laddr);
+ Py_XDECREF(raddr);
+ free(buf);
+ return 0;
+}
+
+
+/*
+ * Return system-wide open connections.
+ */
+static PyObject*
+psutil_net_connections(PyObject* self, PyObject* args)
+{
+ PyObject *py_retlist = PyList_New(0);
+
+ if (py_retlist == NULL)
+ return NULL;
+ if (psutil_populate_xfiles() != 1)
+ goto error;
+ if (psutil_gather_inet(IPPROTO_TCP, py_retlist) == 0)
+ goto error;
+ if (psutil_gather_inet(IPPROTO_UDP, py_retlist) == 0)
+ goto error;
+ if (psutil_gather_unix(SOCK_STREAM, py_retlist) == 0)
+ goto error;
+ if (psutil_gather_unix(SOCK_DGRAM, py_retlist) == 0)
+ goto error;
+
+ free(psutil_xfiles);
+ return py_retlist;
+
+error:
+ Py_DECREF(py_retlist);
+ free(psutil_xfiles);
+ return NULL;
+}
+
+
+/*
+ * Get process CPU affinity.
+ * Reference: http://sources.freebsd.org/RELENG_9/src/usr.bin/cpuset/cpuset.c
+ */
+static PyObject*
+psutil_proc_cpu_affinity_get(PyObject* self, PyObject* args)
+{
+ long pid;
+ int ret;
+ int i;
+ cpuset_t mask;
+ PyObject* py_retlist;
+ PyObject* py_cpu_num;
+
+ if (!PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ ret = cpuset_getaffinity(CPU_LEVEL_WHICH, CPU_WHICH_PID, pid,
+ sizeof(mask), &mask);
+ if (ret != 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+
+ py_retlist = PyList_New(0);
+ if (py_retlist == NULL)
+ return NULL;
+
+ for (i = 0; i < CPU_SETSIZE; i++) {
+ if (CPU_ISSET(i, &mask)) {
+ py_cpu_num = Py_BuildValue("i", i);
+ if (py_cpu_num == NULL)
+ goto error;
+ if (PyList_Append(py_retlist, py_cpu_num))
+ goto error;
+ }
+ }
+
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_cpu_num);
+ Py_DECREF(py_retlist);
+ return NULL;
+}
+
+
+/*
+ * Set process CPU affinity.
+ * Reference: http://sources.freebsd.org/RELENG_9/src/usr.bin/cpuset/cpuset.c
+ */
+static PyObject *
+psutil_proc_cpu_affinity_set(PyObject *self, PyObject *args)
+{
+ long pid;
+ int i;
+ int seq_len;
+ int ret;
+ cpuset_t cpu_set;
+ PyObject *py_cpu_set;
+ PyObject *py_cpu_seq = NULL;
+
+ if (!PyArg_ParseTuple(args, "lO", &pid, &py_cpu_set))
+ return NULL;
+
+ py_cpu_seq = PySequence_Fast(py_cpu_set, "expected a sequence or integer");
+ if (!py_cpu_seq)
+ return NULL;
+ seq_len = PySequence_Fast_GET_SIZE(py_cpu_seq);
+
+ // calculate the mask
+ CPU_ZERO(&cpu_set);
+ for (i = 0; i < seq_len; i++) {
+ PyObject *item = PySequence_Fast_GET_ITEM(py_cpu_seq, i);
+#if PY_MAJOR_VERSION >= 3
+ long value = PyLong_AsLong(item);
+#else
+ long value = PyInt_AsLong(item);
+#endif
+ if (value == -1 && PyErr_Occurred())
+ goto error;
+ CPU_SET(value, &cpu_set);
+ }
+
+ // set affinity
+ ret = cpuset_setaffinity(CPU_LEVEL_WHICH, CPU_WHICH_PID, pid,
+ sizeof(cpu_set), &cpu_set);
+ if (ret != 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ Py_DECREF(py_cpu_seq);
+ Py_RETURN_NONE;
+
+error:
+ if (py_cpu_seq != NULL)
+ Py_DECREF(py_cpu_seq);
+ return NULL;
+}
+
+
+/*
+ * define the psutil C module methods and initialize the module.
+ */
+static PyMethodDef
+PsutilMethods[] =
+{
+ // --- per-process functions
+
+ {"proc_name", psutil_proc_name, METH_VARARGS,
+ "Return process name"},
+ {"proc_connections", psutil_proc_connections, METH_VARARGS,
+ "Return connections opened by process"},
+ {"proc_exe", psutil_proc_exe, METH_VARARGS,
+ "Return process pathname executable"},
+ {"proc_cmdline", psutil_proc_cmdline, METH_VARARGS,
+ "Return process cmdline as a list of cmdline arguments"},
+ {"proc_ppid", psutil_proc_ppid, METH_VARARGS,
+ "Return process ppid as an integer"},
+ {"proc_uids", psutil_proc_uids, METH_VARARGS,
+ "Return process real effective and saved user ids as a Python tuple"},
+ {"proc_gids", psutil_proc_gids, METH_VARARGS,
+ "Return process real effective and saved group ids as a Python tuple"},
+ {"proc_cpu_times", psutil_proc_cpu_times, METH_VARARGS,
+ "Return tuple of user/kern time for the given PID"},
+ {"proc_create_time", psutil_proc_create_time, METH_VARARGS,
+ "Return a float indicating the process create time expressed in "
+ "seconds since the epoch"},
+ {"proc_memory_info", psutil_proc_memory_info, METH_VARARGS,
+ "Return extended memory info for a process as a Python tuple."},
+ {"proc_num_threads", psutil_proc_num_threads, METH_VARARGS,
+ "Return number of threads used by process"},
+ {"proc_num_ctx_switches", psutil_proc_num_ctx_switches, METH_VARARGS,
+ "Return the number of context switches performed by process"},
+ {"proc_threads", psutil_proc_threads, METH_VARARGS,
+ "Return process threads"},
+ {"proc_status", psutil_proc_status, METH_VARARGS,
+ "Return process status as an integer"},
+ {"proc_io_counters", psutil_proc_io_counters, METH_VARARGS,
+ "Return process IO counters"},
+ {"proc_tty_nr", psutil_proc_tty_nr, METH_VARARGS,
+ "Return process tty (terminal) number"},
+ {"proc_cpu_affinity_get", psutil_proc_cpu_affinity_get, METH_VARARGS,
+ "Return process CPU affinity."},
+ {"proc_cpu_affinity_set", psutil_proc_cpu_affinity_set, METH_VARARGS,
+ "Set process CPU affinity."},
+#if defined(__FreeBSD_version) && __FreeBSD_version >= 800000
+ {"proc_open_files", psutil_proc_open_files, METH_VARARGS,
+ "Return files opened by process as a list of (path, fd) tuples"},
+ {"proc_cwd", psutil_proc_cwd, METH_VARARGS,
+ "Return process current working directory."},
+ {"proc_memory_maps", psutil_proc_memory_maps, METH_VARARGS,
+ "Return a list of tuples for every process's memory map"},
+ {"proc_num_fds", psutil_proc_num_fds, METH_VARARGS,
+ "Return the number of file descriptors opened by this process"},
+#endif
+
+ // --- system-related functions
+
+ {"pids", psutil_pids, METH_VARARGS,
+ "Returns a list of PIDs currently running on the system"},
+ {"cpu_count_logical", psutil_cpu_count_logical, METH_VARARGS,
+ "Return number of logical CPUs on the system"},
+ {"cpu_count_phys", psutil_cpu_count_phys, METH_VARARGS,
+ "Return an XML string to determine the number physical CPUs."},
+ {"virtual_mem", psutil_virtual_mem, METH_VARARGS,
+ "Return system virtual memory usage statistics"},
+ {"swap_mem", psutil_swap_mem, METH_VARARGS,
+ "Return swap mem stats"},
+ {"cpu_times", psutil_cpu_times, METH_VARARGS,
+ "Return system cpu times as a tuple (user, system, nice, idle, irc)"},
+#if defined(__FreeBSD_version) && __FreeBSD_version >= 800000
+ {"per_cpu_times", psutil_per_cpu_times, METH_VARARGS,
+ "Return system per-cpu times as a list of tuples"},
+#endif
+ {"boot_time", psutil_boot_time, METH_VARARGS,
+ "Return the system boot time expressed in seconds since the epoch."},
+ {"disk_partitions", psutil_disk_partitions, METH_VARARGS,
+ "Return a list of tuples including device, mount point and "
+ "fs type for all partitions mounted on the system."},
+ {"net_io_counters", psutil_net_io_counters, METH_VARARGS,
+ "Return dict of tuples of networks I/O information."},
+ {"disk_io_counters", psutil_disk_io_counters, METH_VARARGS,
+ "Return a Python dict of tuples for disk I/O information"},
+ {"users", psutil_users, METH_VARARGS,
+ "Return currently connected users as a list of tuples"},
+ {"net_connections", psutil_net_connections, METH_VARARGS,
+ "Return system-wide open connections."},
+
+ {NULL, NULL, 0, NULL}
+};
+
+struct module_state {
+ PyObject *error;
+};
+
+#if PY_MAJOR_VERSION >= 3
+#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m))
+#else
+#define GETSTATE(m) (&_state)
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+
+static int
+psutil_bsd_traverse(PyObject *m, visitproc visit, void *arg) {
+ Py_VISIT(GETSTATE(m)->error);
+ return 0;
+}
+
+static int
+psutil_bsd_clear(PyObject *m) {
+ Py_CLEAR(GETSTATE(m)->error);
+ return 0;
+}
+
+static struct PyModuleDef
+ moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "psutil_bsd",
+ NULL,
+ sizeof(struct module_state),
+ PsutilMethods,
+ NULL,
+ psutil_bsd_traverse,
+ psutil_bsd_clear,
+ NULL
+};
+
+#define INITERROR return NULL
+
+PyMODINIT_FUNC PyInit__psutil_bsd(void)
+
+#else
+#define INITERROR return
+
+void init_psutil_bsd(void)
+#endif
+{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&moduledef);
+#else
+ PyObject *module = Py_InitModule("_psutil_bsd", PsutilMethods);
+#endif
+ PyModule_AddIntConstant(module, "version", PSUTIL_VERSION);
+
+ // process status constants
+ PyModule_AddIntConstant(module, "SSTOP", SSTOP);
+ PyModule_AddIntConstant(module, "SSLEEP", SSLEEP);
+ PyModule_AddIntConstant(module, "SRUN", SRUN);
+ PyModule_AddIntConstant(module, "SIDL", SIDL);
+ PyModule_AddIntConstant(module, "SWAIT", SWAIT);
+ PyModule_AddIntConstant(module, "SLOCK", SLOCK);
+ PyModule_AddIntConstant(module, "SZOMB", SZOMB);
+ // connection status constants
+ PyModule_AddIntConstant(module, "TCPS_CLOSED", TCPS_CLOSED);
+ PyModule_AddIntConstant(module, "TCPS_CLOSING", TCPS_CLOSING);
+ PyModule_AddIntConstant(module, "TCPS_CLOSE_WAIT", TCPS_CLOSE_WAIT);
+ PyModule_AddIntConstant(module, "TCPS_LISTEN", TCPS_LISTEN);
+ PyModule_AddIntConstant(module, "TCPS_ESTABLISHED", TCPS_ESTABLISHED);
+ PyModule_AddIntConstant(module, "TCPS_SYN_SENT", TCPS_SYN_SENT);
+ PyModule_AddIntConstant(module, "TCPS_SYN_RECEIVED", TCPS_SYN_RECEIVED);
+ PyModule_AddIntConstant(module, "TCPS_FIN_WAIT_1", TCPS_FIN_WAIT_1);
+ PyModule_AddIntConstant(module, "TCPS_FIN_WAIT_2", TCPS_FIN_WAIT_2);
+ PyModule_AddIntConstant(module, "TCPS_LAST_ACK", TCPS_LAST_ACK);
+ PyModule_AddIntConstant(module, "TCPS_TIME_WAIT", TCPS_TIME_WAIT);
+ PyModule_AddIntConstant(module, "PSUTIL_CONN_NONE", PSUTIL_CONN_NONE);
+
+ if (module == NULL)
+ INITERROR;
+#if PY_MAJOR_VERSION >= 3
+ return module;
+#endif
+}
diff --git a/python/psutil/psutil/_psutil_bsd.h b/python/psutil/psutil/_psutil_bsd.h
new file mode 100644
index 000000000..803957dac
--- /dev/null
+++ b/python/psutil/psutil/_psutil_bsd.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+
+// --- per-process functions
+
+static PyObject* psutil_proc_cmdline(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_connections(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_create_time(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_exe(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_gids(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_info(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_maps(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_name(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_ctx_switches(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_fds(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_threads(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_ppid(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_status(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_threads(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_tty_nr(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_uids(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_affinity_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_affinity_set(PyObject* self, PyObject* args);
+
+#if defined(__FreeBSD_version) && __FreeBSD_version >= 800000
+static PyObject* psutil_proc_open_files(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cwd(PyObject* self, PyObject* args);
+#endif
+
+// --- system-related functions
+
+static PyObject* psutil_boot_time(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_logical(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_phys(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_partitions(PyObject* self, PyObject* args);
+static PyObject* psutil_net_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_pids(PyObject* self, PyObject* args);
+static PyObject* psutil_swap_mem(PyObject* self, PyObject* args);
+static PyObject* psutil_users(PyObject* self, PyObject* args);
+static PyObject* psutil_virtual_mem(PyObject* self, PyObject* args);
+
+#if defined(__FreeBSD_version) && __FreeBSD_version >= 800000
+static PyObject* psutil_per_cpu_times(PyObject* self, PyObject* args);
+#endif
diff --git a/python/psutil/psutil/_psutil_common.c b/python/psutil/psutil/_psutil_common.c
new file mode 100644
index 000000000..1c530d4df
--- /dev/null
+++ b/python/psutil/psutil/_psutil_common.c
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Routines common to all platforms.
+ */
+
+#include <Python.h>
+
+
+/*
+ * Set OSError(errno=ESRCH, strerror="No such process") Python exception.
+ */
+PyObject *
+NoSuchProcess(void) {
+ PyObject *exc;
+ char *msg = strerror(ESRCH);
+ exc = PyObject_CallFunction(PyExc_OSError, "(is)", ESRCH, msg);
+ PyErr_SetObject(PyExc_OSError, exc);
+ Py_XDECREF(exc);
+ return NULL;
+}
+
+
+/*
+ * Set OSError(errno=EACCES, strerror="Permission denied") Python exception.
+ */
+PyObject *
+AccessDenied(void) {
+ PyObject *exc;
+ char *msg = strerror(EACCES);
+ exc = PyObject_CallFunction(PyExc_OSError, "(is)", EACCES, msg);
+ PyErr_SetObject(PyExc_OSError, exc);
+ Py_XDECREF(exc);
+ return NULL;
+}
diff --git a/python/psutil/psutil/_psutil_common.h b/python/psutil/psutil/_psutil_common.h
new file mode 100644
index 000000000..43021a72d
--- /dev/null
+++ b/python/psutil/psutil/_psutil_common.h
@@ -0,0 +1,10 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+
+PyObject* AccessDenied(void);
+PyObject* NoSuchProcess(void);
diff --git a/python/psutil/psutil/_psutil_linux.c b/python/psutil/psutil/_psutil_linux.c
new file mode 100644
index 000000000..a3bf5643c
--- /dev/null
+++ b/python/psutil/psutil/_psutil_linux.c
@@ -0,0 +1,689 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Linux-specific functions.
+ */
+
+#ifndef _GNU_SOURCE
+ #define _GNU_SOURCE 1
+#endif
+#include <Python.h>
+#include <errno.h>
+#include <stdlib.h>
+#include <mntent.h>
+#include <features.h>
+#include <utmp.h>
+#include <sched.h>
+#include <linux/version.h>
+#include <sys/syscall.h>
+#include <sys/sysinfo.h>
+#include <sys/ioctl.h>
+#include <sys/socket.h>
+#include <linux/sockios.h>
+#include <linux/if.h>
+#include <linux/ethtool.h>
+
+#include "_psutil_linux.h"
+
+/* The minimum number of CPUs allocated in a cpu_set_t */
+static const int NCPUS_START = sizeof(unsigned long) * CHAR_BIT;
+
+// Linux >= 2.6.13
+#define PSUTIL_HAVE_IOPRIO defined(__NR_ioprio_get) && defined(__NR_ioprio_set)
+
+// Linux >= 2.6.36 (supposedly) and glibc >= 13
+#define PSUTIL_HAVE_PRLIMIT \
+ (LINUX_VERSION_CODE >= KERNEL_VERSION(2, 6, 36)) && \
+ (__GLIBC__ >= 2 && __GLIBC_MINOR__ >= 13) && \
+ defined(__NR_prlimit64)
+
+#if PSUTIL_HAVE_PRLIMIT
+ #define _FILE_OFFSET_BITS 64
+ #include <time.h>
+ #include <sys/resource.h>
+#endif
+
+
+#if PSUTIL_HAVE_IOPRIO
+enum {
+ IOPRIO_WHO_PROCESS = 1,
+};
+
+// May happen on old RedHat versions, see:
+// https://github.com/giampaolo/psutil/issues/607
+#ifndef DUPLEX_UNKNOWN
+ #define DUPLEX_UNKNOWN 0xff
+#endif
+
+static inline int
+ioprio_get(int which, int who)
+{
+ return syscall(__NR_ioprio_get, which, who);
+}
+
+static inline int
+ioprio_set(int which, int who, int ioprio)
+{
+ return syscall(__NR_ioprio_set, which, who, ioprio);
+}
+
+#define IOPRIO_CLASS_SHIFT 13
+#define IOPRIO_PRIO_MASK ((1UL << IOPRIO_CLASS_SHIFT) - 1)
+
+#define IOPRIO_PRIO_CLASS(mask) ((mask) >> IOPRIO_CLASS_SHIFT)
+#define IOPRIO_PRIO_DATA(mask) ((mask) & IOPRIO_PRIO_MASK)
+#define IOPRIO_PRIO_VALUE(class, data) (((class) << IOPRIO_CLASS_SHIFT) | data)
+
+
+/*
+ * Return a (ioclass, iodata) Python tuple representing process I/O priority.
+ */
+static PyObject *
+psutil_proc_ioprio_get(PyObject *self, PyObject *args)
+{
+ long pid;
+ int ioprio, ioclass, iodata;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ ioprio = ioprio_get(IOPRIO_WHO_PROCESS, pid);
+ if (ioprio == -1)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ ioclass = IOPRIO_PRIO_CLASS(ioprio);
+ iodata = IOPRIO_PRIO_DATA(ioprio);
+ return Py_BuildValue("ii", ioclass, iodata);
+}
+
+
+/*
+ * A wrapper around ioprio_set(); sets process I/O priority.
+ * ioclass can be either IOPRIO_CLASS_RT, IOPRIO_CLASS_BE, IOPRIO_CLASS_IDLE
+ * or 0. iodata goes from 0 to 7 depending on ioclass specified.
+ */
+static PyObject *
+psutil_proc_ioprio_set(PyObject *self, PyObject *args)
+{
+ long pid;
+ int ioprio, ioclass, iodata;
+ int retval;
+
+ if (! PyArg_ParseTuple(args, "lii", &pid, &ioclass, &iodata))
+ return NULL;
+ ioprio = IOPRIO_PRIO_VALUE(ioclass, iodata);
+ retval = ioprio_set(IOPRIO_WHO_PROCESS, pid, ioprio);
+ if (retval == -1)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ Py_RETURN_NONE;
+}
+#endif
+
+
+#if PSUTIL_HAVE_PRLIMIT
+/*
+ * A wrapper around prlimit(2); sets process resource limits.
+ * This can be used for both get and set, in which case extra
+ * 'soft' and 'hard' args must be provided.
+ */
+static PyObject *
+psutil_linux_prlimit(PyObject *self, PyObject *args)
+{
+ long pid;
+ int ret, resource;
+ struct rlimit old, new;
+ struct rlimit *newp = NULL;
+ PyObject *soft = NULL;
+ PyObject *hard = NULL;
+
+ if (! PyArg_ParseTuple(args, "li|OO", &pid, &resource, &soft, &hard))
+ return NULL;
+
+ // get
+ if (soft == NULL && hard == NULL) {
+ ret = prlimit(pid, resource, NULL, &old);
+ if (ret == -1)
+ return PyErr_SetFromErrno(PyExc_OSError);
+#if defined(PSUTIL_HAVE_LONG_LONG)
+ if (sizeof(old.rlim_cur) > sizeof(long)) {
+ return Py_BuildValue("LL",
+ (PY_LONG_LONG)old.rlim_cur,
+ (PY_LONG_LONG)old.rlim_max);
+ }
+#endif
+ return Py_BuildValue("ll", (long)old.rlim_cur, (long)old.rlim_max);
+ }
+
+ // set
+ else {
+#if defined(PSUTIL_HAVE_LARGEFILE_SUPPORT)
+ new.rlim_cur = PyLong_AsLongLong(soft);
+ if (new.rlim_cur == (rlim_t) - 1 && PyErr_Occurred())
+ return NULL;
+ new.rlim_max = PyLong_AsLongLong(hard);
+ if (new.rlim_max == (rlim_t) - 1 && PyErr_Occurred())
+ return NULL;
+#else
+ new.rlim_cur = PyLong_AsLong(soft);
+ if (new.rlim_cur == (rlim_t) - 1 && PyErr_Occurred())
+ return NULL;
+ new.rlim_max = PyLong_AsLong(hard);
+ if (new.rlim_max == (rlim_t) - 1 && PyErr_Occurred())
+ return NULL;
+#endif
+ newp = &new;
+ ret = prlimit(pid, resource, newp, &old);
+ if (ret == -1)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ Py_RETURN_NONE;
+ }
+}
+#endif
+
+
+/*
+ * Return disk mounted partitions as a list of tuples including device,
+ * mount point and filesystem type
+ */
+static PyObject *
+psutil_disk_partitions(PyObject *self, PyObject *args)
+{
+ FILE *file = NULL;
+ struct mntent *entry;
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+
+ // MOUNTED constant comes from mntent.h and it's == '/etc/mtab'
+ Py_BEGIN_ALLOW_THREADS
+ file = setmntent(MOUNTED, "r");
+ Py_END_ALLOW_THREADS
+ if ((file == 0) || (file == NULL)) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, MOUNTED);
+ goto error;
+ }
+
+ while ((entry = getmntent(file))) {
+ if (entry == NULL) {
+ PyErr_Format(PyExc_RuntimeError, "getmntent() failed");
+ goto error;
+ }
+ py_tuple = Py_BuildValue("(ssss)",
+ entry->mnt_fsname, // device
+ entry->mnt_dir, // mount point
+ entry->mnt_type, // fs type
+ entry->mnt_opts); // options
+ if (! py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+ endmntent(file);
+ return py_retlist;
+
+error:
+ if (file != NULL)
+ endmntent(file);
+ Py_XDECREF(py_tuple);
+ Py_DECREF(py_retlist);
+ return NULL;
+}
+
+
+/*
+ * A wrapper around sysinfo(), return system memory usage statistics.
+ */
+static PyObject *
+psutil_linux_sysinfo(PyObject *self, PyObject *args)
+{
+ struct sysinfo info;
+
+ if (sysinfo(&info) != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ // note: boot time might also be determined from here
+ return Py_BuildValue(
+ "(KKKKKK)",
+ (unsigned long long)info.totalram * info.mem_unit, // total
+ (unsigned long long)info.freeram * info.mem_unit, // free
+ (unsigned long long)info.bufferram * info.mem_unit, // buffer
+ (unsigned long long)info.sharedram * info.mem_unit, // shared
+ (unsigned long long)info.totalswap * info.mem_unit, // swap tot
+ (unsigned long long)info.freeswap * info.mem_unit); // swap free
+}
+
+
+/*
+ * Return process CPU affinity as a Python list
+ * The dual implementation exists because of:
+ * https://github.com/giampaolo/psutil/issues/536
+ */
+
+#ifdef CPU_ALLOC
+
+static PyObject *
+psutil_proc_cpu_affinity_get(PyObject *self, PyObject *args)
+{
+ int cpu, ncpus, count, cpucount_s;
+ long pid;
+ size_t setsize;
+ cpu_set_t *mask = NULL;
+ PyObject *res = NULL;
+
+ if (!PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ ncpus = NCPUS_START;
+ while (1) {
+ setsize = CPU_ALLOC_SIZE(ncpus);
+ mask = CPU_ALLOC(ncpus);
+ if (mask == NULL)
+ return PyErr_NoMemory();
+ if (sched_getaffinity(pid, setsize, mask) == 0)
+ break;
+ CPU_FREE(mask);
+ if (errno != EINVAL)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ if (ncpus > INT_MAX / 2) {
+ PyErr_SetString(PyExc_OverflowError, "could not allocate "
+ "a large enough CPU set");
+ return NULL;
+ }
+ ncpus = ncpus * 2;
+ }
+
+ res = PyList_New(0);
+ if (res == NULL)
+ goto error;
+
+ cpucount_s = CPU_COUNT_S(setsize, mask);
+ for (cpu = 0, count = cpucount_s; count; cpu++) {
+ if (CPU_ISSET_S(cpu, setsize, mask)) {
+#if PY_MAJOR_VERSION >= 3
+ PyObject *cpu_num = PyLong_FromLong(cpu);
+#else
+ PyObject *cpu_num = PyInt_FromLong(cpu);
+#endif
+ if (cpu_num == NULL)
+ goto error;
+ if (PyList_Append(res, cpu_num)) {
+ Py_DECREF(cpu_num);
+ goto error;
+ }
+ Py_DECREF(cpu_num);
+ --count;
+ }
+ }
+ CPU_FREE(mask);
+ return res;
+
+error:
+ if (mask)
+ CPU_FREE(mask);
+ Py_XDECREF(res);
+ return NULL;
+}
+#else
+
+
+/*
+ * Alternative implementation in case CPU_ALLOC is not defined.
+ */
+static PyObject *
+psutil_proc_cpu_affinity_get(PyObject *self, PyObject *args)
+{
+ cpu_set_t cpuset;
+ unsigned int len = sizeof(cpu_set_t);
+ long pid;
+ int i;
+ PyObject* py_retlist = NULL;
+ PyObject *py_cpu_num = NULL;
+
+ if (!PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ CPU_ZERO(&cpuset);
+ if (sched_getaffinity(pid, len, &cpuset) < 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+
+ py_retlist = PyList_New(0);
+ if (py_retlist == NULL)
+ goto error;
+ for (i = 0; i < CPU_SETSIZE; ++i) {
+ if (CPU_ISSET(i, &cpuset)) {
+ py_cpu_num = Py_BuildValue("i", i);
+ if (py_cpu_num == NULL)
+ goto error;
+ if (PyList_Append(py_retlist, py_cpu_num))
+ goto error;
+ Py_DECREF(py_cpu_num);
+ }
+ }
+
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_cpu_num);
+ Py_DECREF(py_retlist);
+ return NULL;
+}
+#endif
+
+/*
+ * Set process CPU affinity; expects a bitmask
+ */
+static PyObject *
+psutil_proc_cpu_affinity_set(PyObject *self, PyObject *args)
+{
+ cpu_set_t cpu_set;
+ size_t len;
+ long pid;
+ int i, seq_len;
+ PyObject *py_cpu_set;
+ PyObject *py_cpu_seq = NULL;
+
+ if (!PyArg_ParseTuple(args, "lO", &pid, &py_cpu_set))
+ return NULL;
+
+ if (!PySequence_Check(py_cpu_set)) {
+ PyErr_Format(PyExc_TypeError, "sequence argument expected, got %s",
+ Py_TYPE(py_cpu_set)->tp_name);
+ goto error;
+ }
+
+ py_cpu_seq = PySequence_Fast(py_cpu_set, "expected a sequence or integer");
+ if (!py_cpu_seq)
+ goto error;
+ seq_len = PySequence_Fast_GET_SIZE(py_cpu_seq);
+ CPU_ZERO(&cpu_set);
+ for (i = 0; i < seq_len; i++) {
+ PyObject *item = PySequence_Fast_GET_ITEM(py_cpu_seq, i);
+#if PY_MAJOR_VERSION >= 3
+ long value = PyLong_AsLong(item);
+#else
+ long value = PyInt_AsLong(item);
+#endif
+ if (value == -1 && PyErr_Occurred())
+ goto error;
+ CPU_SET(value, &cpu_set);
+ }
+
+ len = sizeof(cpu_set);
+ if (sched_setaffinity(pid, len, &cpu_set)) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ Py_DECREF(py_cpu_seq);
+ Py_RETURN_NONE;
+
+error:
+ if (py_cpu_seq != NULL)
+ Py_DECREF(py_cpu_seq);
+ return NULL;
+}
+
+
+/*
+ * Return currently connected users as a list of tuples.
+ */
+static PyObject *
+psutil_users(PyObject *self, PyObject *args)
+{
+ PyObject *ret_list = PyList_New(0);
+ PyObject *tuple = NULL;
+ PyObject *user_proc = NULL;
+ struct utmp *ut;
+
+ if (ret_list == NULL)
+ return NULL;
+ setutent();
+ while (NULL != (ut = getutent())) {
+ tuple = NULL;
+ user_proc = NULL;
+ if (ut->ut_type == USER_PROCESS)
+ user_proc = Py_True;
+ else
+ user_proc = Py_False;
+ tuple = Py_BuildValue(
+ "(sssfO)",
+ ut->ut_user, // username
+ ut->ut_line, // tty
+ ut->ut_host, // hostname
+ (float)ut->ut_tv.tv_sec, // tstamp
+ user_proc // (bool) user process
+ );
+ if (! tuple)
+ goto error;
+ if (PyList_Append(ret_list, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ }
+ endutent();
+ return ret_list;
+
+error:
+ Py_XDECREF(tuple);
+ Py_XDECREF(user_proc);
+ Py_DECREF(ret_list);
+ endutent();
+ return NULL;
+}
+
+
+/*
+ * Return stats about a particular network
+ * interface. References:
+ * https://github.com/dpaleino/wicd/blob/master/wicd/backends/be-ioctl.py
+ * http://www.i-scream.org/libstatgrab/
+ */
+static PyObject*
+psutil_net_if_stats(PyObject* self, PyObject* args)
+{
+ char *nic_name;
+ int sock = 0;
+ int ret;
+ int duplex;
+ int speed;
+ int mtu;
+ struct ifreq ifr;
+ struct ethtool_cmd ethcmd;
+ PyObject *py_is_up = NULL;
+ PyObject *py_ret = NULL;
+
+ if (! PyArg_ParseTuple(args, "s", &nic_name))
+ return NULL;
+
+ sock = socket(AF_INET, SOCK_DGRAM, 0);
+ if (sock == -1)
+ goto error;
+ strncpy(ifr.ifr_name, nic_name, sizeof(ifr.ifr_name));
+
+ // is up?
+ ret = ioctl(sock, SIOCGIFFLAGS, &ifr);
+ if (ret == -1)
+ goto error;
+ if ((ifr.ifr_flags & IFF_UP) != 0)
+ py_is_up = Py_True;
+ else
+ py_is_up = Py_False;
+ Py_INCREF(py_is_up);
+
+ // MTU
+ ret = ioctl(sock, SIOCGIFMTU, &ifr);
+ if (ret == -1)
+ goto error;
+ mtu = ifr.ifr_mtu;
+
+ // duplex and speed
+ memset(&ethcmd, 0, sizeof ethcmd);
+ ethcmd.cmd = ETHTOOL_GSET;
+ ifr.ifr_data = (caddr_t)&ethcmd;
+ ret = ioctl(sock, SIOCETHTOOL, &ifr);
+
+ if (ret != -1) {
+ duplex = ethcmd.duplex;
+ speed = ethcmd.speed;
+ }
+ else {
+ if (errno == EOPNOTSUPP) {
+ // we typically get here in case of wi-fi cards
+ duplex = DUPLEX_UNKNOWN;
+ speed = 0;
+ }
+ else {
+ goto error;
+ }
+ }
+
+ close(sock);
+ py_ret = Py_BuildValue("[Oiii]", py_is_up, duplex, speed, mtu);
+ if (!py_ret)
+ goto error;
+ Py_DECREF(py_is_up);
+ return py_ret;
+
+error:
+ Py_XDECREF(py_is_up);
+ if (sock != 0)
+ close(sock);
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+}
+
+
+/*
+ * Define the psutil C module methods and initialize the module.
+ */
+static PyMethodDef
+PsutilMethods[] =
+{
+ // --- per-process functions
+
+#if PSUTIL_HAVE_IOPRIO
+ {"proc_ioprio_get", psutil_proc_ioprio_get, METH_VARARGS,
+ "Get process I/O priority"},
+ {"proc_ioprio_set", psutil_proc_ioprio_set, METH_VARARGS,
+ "Set process I/O priority"},
+#endif
+ {"proc_cpu_affinity_get", psutil_proc_cpu_affinity_get, METH_VARARGS,
+ "Return process CPU affinity as a Python long (the bitmask)."},
+ {"proc_cpu_affinity_set", psutil_proc_cpu_affinity_set, METH_VARARGS,
+ "Set process CPU affinity; expects a bitmask."},
+
+ // --- system related functions
+
+ {"disk_partitions", psutil_disk_partitions, METH_VARARGS,
+ "Return disk mounted partitions as a list of tuples including "
+ "device, mount point and filesystem type"},
+ {"users", psutil_users, METH_VARARGS,
+ "Return currently connected users as a list of tuples"},
+ {"net_if_stats", psutil_net_if_stats, METH_VARARGS,
+ "Return NIC stats (isup, duplex, speed, mtu)"},
+
+ // --- linux specific
+
+ {"linux_sysinfo", psutil_linux_sysinfo, METH_VARARGS,
+ "A wrapper around sysinfo(), return system memory usage statistics"},
+#if PSUTIL_HAVE_PRLIMIT
+ {"linux_prlimit", psutil_linux_prlimit, METH_VARARGS,
+ "Get or set process resource limits."},
+#endif
+
+
+ {NULL, NULL, 0, NULL}
+};
+
+struct module_state {
+ PyObject *error;
+};
+
+#if PY_MAJOR_VERSION >= 3
+#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m))
+#else
+#define GETSTATE(m) (&_state)
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+
+static int
+psutil_linux_traverse(PyObject *m, visitproc visit, void *arg) {
+ Py_VISIT(GETSTATE(m)->error);
+ return 0;
+}
+
+static int
+psutil_linux_clear(PyObject *m) {
+ Py_CLEAR(GETSTATE(m)->error);
+ return 0;
+}
+
+static struct PyModuleDef
+ moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "psutil_linux",
+ NULL,
+ sizeof(struct module_state),
+ PsutilMethods,
+ NULL,
+ psutil_linux_traverse,
+ psutil_linux_clear,
+ NULL
+};
+
+#define INITERROR return NULL
+
+PyMODINIT_FUNC PyInit__psutil_linux(void)
+
+#else
+#define INITERROR return
+
+void init_psutil_linux(void)
+#endif
+{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&moduledef);
+#else
+ PyObject *module = Py_InitModule("_psutil_linux", PsutilMethods);
+#endif
+
+
+ PyModule_AddIntConstant(module, "version", PSUTIL_VERSION);
+#if PSUTIL_HAVE_PRLIMIT
+ PyModule_AddIntConstant(module, "RLIM_INFINITY", RLIM_INFINITY);
+ PyModule_AddIntConstant(module, "RLIMIT_AS", RLIMIT_AS);
+ PyModule_AddIntConstant(module, "RLIMIT_CORE", RLIMIT_CORE);
+ PyModule_AddIntConstant(module, "RLIMIT_CPU", RLIMIT_CPU);
+ PyModule_AddIntConstant(module, "RLIMIT_DATA", RLIMIT_DATA);
+ PyModule_AddIntConstant(module, "RLIMIT_FSIZE", RLIMIT_FSIZE);
+ PyModule_AddIntConstant(module, "RLIMIT_LOCKS", RLIMIT_LOCKS);
+ PyModule_AddIntConstant(module, "RLIMIT_MEMLOCK", RLIMIT_MEMLOCK);
+ PyModule_AddIntConstant(module, "RLIMIT_NOFILE", RLIMIT_NOFILE);
+ PyModule_AddIntConstant(module, "RLIMIT_NPROC", RLIMIT_NPROC);
+ PyModule_AddIntConstant(module, "RLIMIT_RSS", RLIMIT_RSS);
+ PyModule_AddIntConstant(module, "RLIMIT_STACK", RLIMIT_STACK);
+#ifdef RLIMIT_MSGQUEUE
+ PyModule_AddIntConstant(module, "RLIMIT_MSGQUEUE", RLIMIT_MSGQUEUE);
+#endif
+#ifdef RLIMIT_NICE
+ PyModule_AddIntConstant(module, "RLIMIT_NICE", RLIMIT_NICE);
+#endif
+#ifdef RLIMIT_RTPRIO
+ PyModule_AddIntConstant(module, "RLIMIT_RTPRIO", RLIMIT_RTPRIO);
+#endif
+#ifdef RLIMIT_RTTIME
+ PyModule_AddIntConstant(module, "RLIMIT_RTTIME", RLIMIT_RTTIME);
+#endif
+#ifdef RLIMIT_SIGPENDING
+ PyModule_AddIntConstant(module, "RLIMIT_SIGPENDING", RLIMIT_SIGPENDING);
+#endif
+#endif
+ PyModule_AddIntConstant(module, "DUPLEX_HALF", DUPLEX_HALF);
+ PyModule_AddIntConstant(module, "DUPLEX_FULL", DUPLEX_FULL);
+ PyModule_AddIntConstant(module, "DUPLEX_UNKNOWN", DUPLEX_UNKNOWN);
+
+ if (module == NULL)
+ INITERROR;
+#if PY_MAJOR_VERSION >= 3
+ return module;
+#endif
+}
diff --git a/python/psutil/psutil/_psutil_linux.h b/python/psutil/psutil/_psutil_linux.h
new file mode 100644
index 000000000..ec6a33871
--- /dev/null
+++ b/python/psutil/psutil/_psutil_linux.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+
+// process
+
+static PyObject* psutil_proc_cpu_affinity_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_affinity_set(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_ioprio_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_ioprio_get(PyObject* self, PyObject* args);
+
+// system
+
+static PyObject* psutil_disk_partitions(PyObject* self, PyObject* args);
+static PyObject* psutil_linux_sysinfo(PyObject* self, PyObject* args);
+static PyObject* psutil_users(PyObject* self, PyObject* args);
+static PyObject* psutil_net_if_stats(PyObject* self, PyObject* args);
diff --git a/python/psutil/psutil/_psutil_osx.c b/python/psutil/psutil/_psutil_osx.c
new file mode 100644
index 000000000..3ebf8ff27
--- /dev/null
+++ b/python/psutil/psutil/_psutil_osx.c
@@ -0,0 +1,1808 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * OS X platform-specific module methods for _psutil_osx
+ */
+
+#include <Python.h>
+#include <assert.h>
+#include <errno.h>
+#include <stdbool.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <utmpx.h>
+#include <sys/sysctl.h>
+#include <sys/vmmeter.h>
+#include <libproc.h>
+#include <sys/proc_info.h>
+#include <netinet/tcp_fsm.h>
+#include <arpa/inet.h>
+#include <net/if_dl.h>
+#include <pwd.h>
+
+#include <mach/mach.h>
+#include <mach/task.h>
+#include <mach/mach_init.h>
+#include <mach/host_info.h>
+#include <mach/mach_host.h>
+#include <mach/mach_traps.h>
+#include <mach/mach_vm.h>
+#include <mach/shared_region.h>
+
+#include <mach-o/loader.h>
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <IOKit/IOKitLib.h>
+#include <IOKit/storage/IOBlockStorageDriver.h>
+#include <IOKit/storage/IOMedia.h>
+#include <IOKit/IOBSD.h>
+
+#include "_psutil_osx.h"
+#include "_psutil_common.h"
+#include "arch/osx/process_info.h"
+
+
+/*
+ * A wrapper around host_statistics() invoked with HOST_VM_INFO.
+ */
+int
+psutil_sys_vminfo(vm_statistics_data_t *vmstat)
+{
+ kern_return_t ret;
+ mach_msg_type_number_t count = sizeof(*vmstat) / sizeof(integer_t);
+ mach_port_t mport = mach_host_self();
+
+ ret = host_statistics(mport, HOST_VM_INFO, (host_info_t)vmstat, &count);
+ if (ret != KERN_SUCCESS) {
+ PyErr_Format(PyExc_RuntimeError,
+ "host_statistics() failed: %s", mach_error_string(ret));
+ return 0;
+ }
+ mach_port_deallocate(mach_task_self(), mport);
+ return 1;
+}
+
+
+/*
+ * Return a Python list of all the PIDs running on the system.
+ */
+static PyObject *
+psutil_pids(PyObject *self, PyObject *args)
+{
+ kinfo_proc *proclist = NULL;
+ kinfo_proc *orig_address = NULL;
+ size_t num_processes;
+ size_t idx;
+ PyObject *pid = NULL;
+ PyObject *retlist = PyList_New(0);
+
+ if (retlist == NULL)
+ return NULL;
+
+ if (psutil_get_proc_list(&proclist, &num_processes) != 0) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "failed to retrieve process list.");
+ goto error;
+ }
+
+ if (num_processes > 0) {
+ // save the address of proclist so we can free it later
+ orig_address = proclist;
+ for (idx = 0; idx < num_processes; idx++) {
+ pid = Py_BuildValue("i", proclist->kp_proc.p_pid);
+ if (!pid)
+ goto error;
+ if (PyList_Append(retlist, pid))
+ goto error;
+ Py_DECREF(pid);
+ proclist++;
+ }
+ free(orig_address);
+ }
+ return retlist;
+
+error:
+ Py_XDECREF(pid);
+ Py_DECREF(retlist);
+ if (orig_address != NULL)
+ free(orig_address);
+ return NULL;
+}
+
+
+/*
+ * Return process name from kinfo_proc as a Python string.
+ */
+static PyObject *
+psutil_proc_name(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_get_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("s", kp.kp_proc.p_comm);
+}
+
+
+/*
+ * Return process current working directory.
+ */
+static PyObject *
+psutil_proc_cwd(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct proc_vnodepathinfo pathinfo;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ if (! psutil_proc_pidinfo(pid, PROC_PIDVNODEPATHINFO, &pathinfo,
+ sizeof(pathinfo)))
+ {
+ return NULL;
+ }
+ return Py_BuildValue("s", pathinfo.pvi_cdir.vip_path);
+}
+
+
+/*
+ * Return path of the process executable.
+ */
+static PyObject *
+psutil_proc_exe(PyObject *self, PyObject *args)
+{
+ long pid;
+ char buf[PATH_MAX];
+ int ret;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ ret = proc_pidpath(pid, &buf, sizeof(buf));
+ if (ret == 0) {
+ if (! psutil_pid_exists(pid))
+ return NoSuchProcess();
+ else
+ return AccessDenied();
+ }
+ return Py_BuildValue("s", buf);
+}
+
+
+/*
+ * Return process cmdline as a Python list of cmdline arguments.
+ */
+static PyObject *
+psutil_proc_cmdline(PyObject *self, PyObject *args)
+{
+ long pid;
+ PyObject *arglist = NULL;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ // get the commandline, defined in arch/osx/process_info.c
+ arglist = psutil_get_arg_list(pid);
+ return arglist;
+}
+
+
+/*
+ * Return process parent pid from kinfo_proc as a Python integer.
+ */
+static PyObject *
+psutil_proc_ppid(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_get_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("l", (long)kp.kp_eproc.e_ppid);
+}
+
+
+/*
+ * Return process real uid from kinfo_proc as a Python integer.
+ */
+static PyObject *
+psutil_proc_uids(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_get_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("lll",
+ (long)kp.kp_eproc.e_pcred.p_ruid,
+ (long)kp.kp_eproc.e_ucred.cr_uid,
+ (long)kp.kp_eproc.e_pcred.p_svuid);
+}
+
+
+/*
+ * Return process real group id from ki_comm as a Python integer.
+ */
+static PyObject *
+psutil_proc_gids(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_get_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("lll",
+ (long)kp.kp_eproc.e_pcred.p_rgid,
+ (long)kp.kp_eproc.e_ucred.cr_groups[0],
+ (long)kp.kp_eproc.e_pcred.p_svgid);
+}
+
+
+/*
+ * Return process controlling terminal number as an integer.
+ */
+static PyObject *
+psutil_proc_tty_nr(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_get_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("i", kp.kp_eproc.e_tdev);
+}
+
+
+/*
+ * Return a list of tuples for every process memory maps.
+ * 'procstat' cmdline utility has been used as an example.
+ */
+static PyObject *
+psutil_proc_memory_maps(PyObject *self, PyObject *args)
+{
+ char buf[PATH_MAX];
+ char addr_str[34];
+ char perms[8];
+ int pagesize = getpagesize();
+ long pid;
+ kern_return_t err = KERN_SUCCESS;
+ mach_port_t task = MACH_PORT_NULL;
+ uint32_t depth = 1;
+ vm_address_t address = 0;
+ vm_size_t size = 0;
+
+ PyObject *py_tuple = NULL;
+ PyObject *py_list = PyList_New(0);
+
+ if (py_list == NULL)
+ return NULL;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ goto error;
+
+ err = task_for_pid(mach_task_self(), pid, &task);
+
+ if (err != KERN_SUCCESS) {
+ if (! psutil_pid_exists(pid)) {
+ NoSuchProcess();
+ }
+ else {
+ // pid exists, so return AccessDenied error since task_for_pid()
+ // failed
+ AccessDenied();
+ }
+ goto error;
+ }
+
+ while (1) {
+ py_tuple = NULL;
+ struct vm_region_submap_info_64 info;
+ mach_msg_type_number_t count = VM_REGION_SUBMAP_INFO_COUNT_64;
+
+ err = vm_region_recurse_64(task, &address, &size, &depth,
+ (vm_region_info_64_t)&info, &count);
+ if (err == KERN_INVALID_ADDRESS)
+ break;
+ if (info.is_submap) {
+ depth++;
+ }
+ else {
+ // Free/Reset the char[]s to avoid weird paths
+ memset(buf, 0, sizeof(buf));
+ memset(addr_str, 0, sizeof(addr_str));
+ memset(perms, 0, sizeof(perms));
+
+ sprintf(addr_str, "%016lx-%016lx", address, address + size);
+ sprintf(perms, "%c%c%c/%c%c%c",
+ (info.protection & VM_PROT_READ) ? 'r' : '-',
+ (info.protection & VM_PROT_WRITE) ? 'w' : '-',
+ (info.protection & VM_PROT_EXECUTE) ? 'x' : '-',
+ (info.max_protection & VM_PROT_READ) ? 'r' : '-',
+ (info.max_protection & VM_PROT_WRITE) ? 'w' : '-',
+ (info.max_protection & VM_PROT_EXECUTE) ? 'x' : '-');
+
+ err = proc_regionfilename(pid, address, buf, sizeof(buf));
+
+ if (info.share_mode == SM_COW && info.ref_count == 1) {
+ // Treat single reference SM_COW as SM_PRIVATE
+ info.share_mode = SM_PRIVATE;
+ }
+
+ if (strlen(buf) == 0) {
+ switch (info.share_mode) {
+ // case SM_LARGE_PAGE:
+ // Treat SM_LARGE_PAGE the same as SM_PRIVATE
+ // since they are not shareable and are wired.
+ case SM_COW:
+ strcpy(buf, "[cow]");
+ break;
+ case SM_PRIVATE:
+ strcpy(buf, "[prv]");
+ break;
+ case SM_EMPTY:
+ strcpy(buf, "[nul]");
+ break;
+ case SM_SHARED:
+ case SM_TRUESHARED:
+ strcpy(buf, "[shm]");
+ break;
+ case SM_PRIVATE_ALIASED:
+ strcpy(buf, "[ali]");
+ break;
+ case SM_SHARED_ALIASED:
+ strcpy(buf, "[s/a]");
+ break;
+ default:
+ strcpy(buf, "[???]");
+ }
+ }
+
+ py_tuple = Py_BuildValue(
+ "sssIIIIIH",
+ addr_str, // "start-end"address
+ perms, // "rwx" permissions
+ buf, // path
+ info.pages_resident * pagesize, // rss
+ info.pages_shared_now_private * pagesize, // private
+ info.pages_swapped_out * pagesize, // swapped
+ info.pages_dirtied * pagesize, // dirtied
+ info.ref_count, // ref count
+ info.shadow_depth // shadow depth
+ );
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_list, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+
+ // increment address for the next map/file
+ address += size;
+ }
+
+ if (task != MACH_PORT_NULL)
+ mach_port_deallocate(mach_task_self(), task);
+
+ return py_list;
+
+error:
+ if (task != MACH_PORT_NULL)
+ mach_port_deallocate(mach_task_self(), task);
+ Py_XDECREF(py_tuple);
+ Py_DECREF(py_list);
+ return NULL;
+}
+
+
+/*
+ * Return the number of logical CPUs in the system.
+ * XXX this could be shared with BSD.
+ */
+static PyObject *
+psutil_cpu_count_logical(PyObject *self, PyObject *args)
+{
+ int mib[2];
+ int ncpu;
+ size_t len;
+ mib[0] = CTL_HW;
+ mib[1] = HW_NCPU;
+ len = sizeof(ncpu);
+
+ if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
+ Py_RETURN_NONE; // mimic os.cpu_count()
+ else
+ return Py_BuildValue("i", ncpu);
+}
+
+
+/*
+ * Return the number of physical CPUs in the system.
+ */
+static PyObject *
+psutil_cpu_count_phys(PyObject *self, PyObject *args)
+{
+ int num;
+ size_t size = sizeof(int);
+
+ if (sysctlbyname("hw.physicalcpu", &num, &size, NULL, 0))
+ Py_RETURN_NONE; // mimic os.cpu_count()
+ else
+ return Py_BuildValue("i", num);
+}
+
+
+#define TV2DOUBLE(t) ((t).tv_sec + (t).tv_usec / 1000000.0)
+
+/*
+ * Return a Python tuple (user_time, kernel_time)
+ */
+static PyObject *
+psutil_proc_cpu_times(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct proc_taskinfo pti;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_proc_pidinfo(pid, PROC_PIDTASKINFO, &pti, sizeof(pti)))
+ return NULL;
+ return Py_BuildValue("(dd)",
+ (float)pti.pti_total_user / 1000000000.0,
+ (float)pti.pti_total_system / 1000000000.0);
+}
+
+
+/*
+ * Return a Python float indicating the process create time expressed in
+ * seconds since the epoch.
+ */
+static PyObject *
+psutil_proc_create_time(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_get_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("d", TV2DOUBLE(kp.kp_proc.p_starttime));
+}
+
+
+/*
+ * Return extended memory info about a process.
+ */
+static PyObject *
+psutil_proc_memory_info(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct proc_taskinfo pti;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_proc_pidinfo(pid, PROC_PIDTASKINFO, &pti, sizeof(pti)))
+ return NULL;
+ // Note: determining other memory stats on OSX is a mess:
+ // http://www.opensource.apple.com/source/top/top-67/libtop.c?txt
+ // I just give up...
+ // struct proc_regioninfo pri;
+ // psutil_proc_pidinfo(pid, PROC_PIDREGIONINFO, &pri, sizeof(pri))
+ return Py_BuildValue(
+ "(KKkk)",
+ pti.pti_resident_size, // resident memory size (rss)
+ pti.pti_virtual_size, // virtual memory size (vms)
+ pti.pti_faults, // number of page faults (pages)
+ pti.pti_pageins // number of actual pageins (pages)
+ );
+}
+
+
+/*
+ * Return number of threads used by process as a Python integer.
+ */
+static PyObject *
+psutil_proc_num_threads(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct proc_taskinfo pti;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_proc_pidinfo(pid, PROC_PIDTASKINFO, &pti, sizeof(pti)))
+ return NULL;
+ return Py_BuildValue("k", pti.pti_threadnum);
+}
+
+
+/*
+ * Return the number of context switches performed by process.
+ */
+static PyObject *
+psutil_proc_num_ctx_switches(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct proc_taskinfo pti;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_proc_pidinfo(pid, PROC_PIDTASKINFO, &pti, sizeof(pti)))
+ return NULL;
+ // unvoluntary value seems not to be available;
+ // pti.pti_csw probably refers to the sum of the two (getrusage()
+ // numbers seems to confirm this theory).
+ return Py_BuildValue("ki", pti.pti_csw, 0);
+}
+
+
+/*
+ * Return system virtual memory stats
+ */
+static PyObject *
+psutil_virtual_mem(PyObject *self, PyObject *args)
+{
+
+ int mib[2];
+ uint64_t total;
+ size_t len = sizeof(total);
+ vm_statistics_data_t vm;
+ int pagesize = getpagesize();
+ // physical mem
+ mib[0] = CTL_HW;
+ mib[1] = HW_MEMSIZE;
+
+ if (sysctl(mib, 2, &total, &len, NULL, 0)) {
+ if (errno != 0)
+ PyErr_SetFromErrno(PyExc_OSError);
+ else
+ PyErr_Format(PyExc_RuntimeError, "sysctl(HW_MEMSIZE) failed");
+ return NULL;
+ }
+
+ // vm
+ if (!psutil_sys_vminfo(&vm))
+ return NULL;
+
+ return Py_BuildValue(
+ "KKKKK",
+ total,
+ (unsigned long long) vm.active_count * pagesize,
+ (unsigned long long) vm.inactive_count * pagesize,
+ (unsigned long long) vm.wire_count * pagesize,
+ (unsigned long long) vm.free_count * pagesize
+ );
+}
+
+
+/*
+ * Return stats about swap memory.
+ */
+static PyObject *
+psutil_swap_mem(PyObject *self, PyObject *args)
+{
+ int mib[2];
+ size_t size;
+ struct xsw_usage totals;
+ vm_statistics_data_t vmstat;
+ int pagesize = getpagesize();
+
+ mib[0] = CTL_VM;
+ mib[1] = VM_SWAPUSAGE;
+ size = sizeof(totals);
+ if (sysctl(mib, 2, &totals, &size, NULL, 0) == -1) {
+ if (errno != 0)
+ PyErr_SetFromErrno(PyExc_OSError);
+ else
+ PyErr_Format(PyExc_RuntimeError, "sysctl(VM_SWAPUSAGE) failed");
+ return NULL;
+ }
+ if (!psutil_sys_vminfo(&vmstat))
+ return NULL;
+
+ return Py_BuildValue(
+ "LLLKK",
+ totals.xsu_total,
+ totals.xsu_used,
+ totals.xsu_avail,
+ (unsigned long long)vmstat.pageins * pagesize,
+ (unsigned long long)vmstat.pageouts * pagesize);
+}
+
+
+/*
+ * Return a Python tuple representing user, kernel and idle CPU times
+ */
+static PyObject *
+psutil_cpu_times(PyObject *self, PyObject *args)
+{
+ mach_msg_type_number_t count = HOST_CPU_LOAD_INFO_COUNT;
+ kern_return_t error;
+ host_cpu_load_info_data_t r_load;
+
+ mach_port_t host_port = mach_host_self();
+ error = host_statistics(host_port, HOST_CPU_LOAD_INFO,
+ (host_info_t)&r_load, &count);
+ if (error != KERN_SUCCESS)
+ return PyErr_Format(PyExc_RuntimeError,
+ "Error in host_statistics(): %s",
+ mach_error_string(error));
+ mach_port_deallocate(mach_task_self(), host_port);
+
+ return Py_BuildValue(
+ "(dddd)",
+ (double)r_load.cpu_ticks[CPU_STATE_USER] / CLK_TCK,
+ (double)r_load.cpu_ticks[CPU_STATE_NICE] / CLK_TCK,
+ (double)r_load.cpu_ticks[CPU_STATE_SYSTEM] / CLK_TCK,
+ (double)r_load.cpu_ticks[CPU_STATE_IDLE] / CLK_TCK
+ );
+}
+
+
+/*
+ * Return a Python list of tuple representing per-cpu times
+ */
+static PyObject *
+psutil_per_cpu_times(PyObject *self, PyObject *args)
+{
+ natural_t cpu_count;
+ processor_info_array_t info_array;
+ mach_msg_type_number_t info_count;
+ kern_return_t error;
+ processor_cpu_load_info_data_t *cpu_load_info = NULL;
+ int i, ret;
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_cputime = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+
+ mach_port_t host_port = mach_host_self();
+ error = host_processor_info(host_port, PROCESSOR_CPU_LOAD_INFO,
+ &cpu_count, &info_array, &info_count);
+ if (error != KERN_SUCCESS) {
+ PyErr_Format(PyExc_RuntimeError, "Error in host_processor_info(): %s",
+ mach_error_string(error));
+ goto error;
+ }
+ mach_port_deallocate(mach_task_self(), host_port);
+
+ cpu_load_info = (processor_cpu_load_info_data_t *) info_array;
+
+ for (i = 0; i < cpu_count; i++) {
+ py_cputime = Py_BuildValue(
+ "(dddd)",
+ (double)cpu_load_info[i].cpu_ticks[CPU_STATE_USER] / CLK_TCK,
+ (double)cpu_load_info[i].cpu_ticks[CPU_STATE_NICE] / CLK_TCK,
+ (double)cpu_load_info[i].cpu_ticks[CPU_STATE_SYSTEM] / CLK_TCK,
+ (double)cpu_load_info[i].cpu_ticks[CPU_STATE_IDLE] / CLK_TCK
+ );
+ if (!py_cputime)
+ goto error;
+ if (PyList_Append(py_retlist, py_cputime))
+ goto error;
+ Py_DECREF(py_cputime);
+ }
+
+ ret = vm_deallocate(mach_task_self(), (vm_address_t)info_array,
+ info_count * sizeof(int));
+ if (ret != KERN_SUCCESS)
+ PyErr_WarnEx(PyExc_RuntimeWarning, "vm_deallocate() failed", 2);
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_cputime);
+ Py_DECREF(py_retlist);
+ if (cpu_load_info != NULL) {
+ ret = vm_deallocate(mach_task_self(), (vm_address_t)info_array,
+ info_count * sizeof(int));
+ if (ret != KERN_SUCCESS)
+ PyErr_WarnEx(PyExc_RuntimeWarning, "vm_deallocate() failed", 2);
+ }
+ return NULL;
+}
+
+
+/*
+ * Return a Python float indicating the system boot time expressed in
+ * seconds since the epoch.
+ */
+static PyObject *
+psutil_boot_time(PyObject *self, PyObject *args)
+{
+ // fetch sysctl "kern.boottime"
+ static int request[2] = { CTL_KERN, KERN_BOOTTIME };
+ struct timeval result;
+ size_t result_len = sizeof result;
+ time_t boot_time = 0;
+
+ if (sysctl(request, 2, &result, &result_len, NULL, 0) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+ boot_time = result.tv_sec;
+ return Py_BuildValue("f", (float)boot_time);
+}
+
+
+/*
+ * Return a list of tuples including device, mount point and fs type
+ * for all partitions mounted on the system.
+ */
+static PyObject *
+psutil_disk_partitions(PyObject *self, PyObject *args)
+{
+ int num;
+ int i;
+ long len;
+ uint64_t flags;
+ char opts[400];
+ struct statfs *fs = NULL;
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+
+ // get the number of mount points
+ Py_BEGIN_ALLOW_THREADS
+ num = getfsstat(NULL, 0, MNT_NOWAIT);
+ Py_END_ALLOW_THREADS
+ if (num == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ len = sizeof(*fs) * num;
+ fs = malloc(len);
+ if (fs == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ Py_BEGIN_ALLOW_THREADS
+ num = getfsstat(fs, len, MNT_NOWAIT);
+ Py_END_ALLOW_THREADS
+ if (num == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ for (i = 0; i < num; i++) {
+ opts[0] = 0;
+ flags = fs[i].f_flags;
+
+ // see sys/mount.h
+ if (flags & MNT_RDONLY)
+ strlcat(opts, "ro", sizeof(opts));
+ else
+ strlcat(opts, "rw", sizeof(opts));
+ if (flags & MNT_SYNCHRONOUS)
+ strlcat(opts, ",sync", sizeof(opts));
+ if (flags & MNT_NOEXEC)
+ strlcat(opts, ",noexec", sizeof(opts));
+ if (flags & MNT_NOSUID)
+ strlcat(opts, ",nosuid", sizeof(opts));
+ if (flags & MNT_UNION)
+ strlcat(opts, ",union", sizeof(opts));
+ if (flags & MNT_ASYNC)
+ strlcat(opts, ",async", sizeof(opts));
+ if (flags & MNT_EXPORTED)
+ strlcat(opts, ",exported", sizeof(opts));
+ if (flags & MNT_QUARANTINE)
+ strlcat(opts, ",quarantine", sizeof(opts));
+ if (flags & MNT_LOCAL)
+ strlcat(opts, ",local", sizeof(opts));
+ if (flags & MNT_QUOTA)
+ strlcat(opts, ",quota", sizeof(opts));
+ if (flags & MNT_ROOTFS)
+ strlcat(opts, ",rootfs", sizeof(opts));
+ if (flags & MNT_DOVOLFS)
+ strlcat(opts, ",dovolfs", sizeof(opts));
+ if (flags & MNT_DONTBROWSE)
+ strlcat(opts, ",dontbrowse", sizeof(opts));
+ if (flags & MNT_IGNORE_OWNERSHIP)
+ strlcat(opts, ",ignore-ownership", sizeof(opts));
+ if (flags & MNT_AUTOMOUNTED)
+ strlcat(opts, ",automounted", sizeof(opts));
+ if (flags & MNT_JOURNALED)
+ strlcat(opts, ",journaled", sizeof(opts));
+ if (flags & MNT_NOUSERXATTR)
+ strlcat(opts, ",nouserxattr", sizeof(opts));
+ if (flags & MNT_DEFWRITE)
+ strlcat(opts, ",defwrite", sizeof(opts));
+ if (flags & MNT_MULTILABEL)
+ strlcat(opts, ",multilabel", sizeof(opts));
+ if (flags & MNT_NOATIME)
+ strlcat(opts, ",noatime", sizeof(opts));
+ if (flags & MNT_UPDATE)
+ strlcat(opts, ",update", sizeof(opts));
+ if (flags & MNT_RELOAD)
+ strlcat(opts, ",reload", sizeof(opts));
+ if (flags & MNT_FORCE)
+ strlcat(opts, ",force", sizeof(opts));
+ if (flags & MNT_CMDFLAGS)
+ strlcat(opts, ",cmdflags", sizeof(opts));
+
+ py_tuple = Py_BuildValue(
+ "(ssss)", fs[i].f_mntfromname, // device
+ fs[i].f_mntonname, // mount point
+ fs[i].f_fstypename, // fs type
+ opts); // options
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+
+ free(fs);
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_tuple);
+ Py_DECREF(py_retlist);
+ if (fs != NULL)
+ free(fs);
+ return NULL;
+}
+
+
+/*
+ * Return process status as a Python integer.
+ */
+static PyObject *
+psutil_proc_status(PyObject *self, PyObject *args)
+{
+ long pid;
+ struct kinfo_proc kp;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (psutil_get_kinfo_proc(pid, &kp) == -1)
+ return NULL;
+ return Py_BuildValue("i", (int)kp.kp_proc.p_stat);
+}
+
+
+/*
+ * Return process threads
+ */
+static PyObject *
+psutil_proc_threads(PyObject *self, PyObject *args)
+{
+ long pid;
+ int err, j, ret;
+ kern_return_t kr;
+ unsigned int info_count = TASK_BASIC_INFO_COUNT;
+ mach_port_t task = MACH_PORT_NULL;
+ struct task_basic_info tasks_info;
+ thread_act_port_array_t thread_list = NULL;
+ thread_info_data_t thinfo_basic;
+ thread_basic_info_t basic_info_th;
+ mach_msg_type_number_t thread_count, thread_info_count;
+
+ PyObject *retList = PyList_New(0);
+ PyObject *pyTuple = NULL;
+
+ if (retList == NULL)
+ return NULL;
+
+ // the argument passed should be a process id
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ goto error;
+
+ // task_for_pid() requires special privileges
+ err = task_for_pid(mach_task_self(), pid, &task);
+ if (err != KERN_SUCCESS) {
+ if (! psutil_pid_exists(pid))
+ NoSuchProcess();
+ else
+ AccessDenied();
+ goto error;
+ }
+
+ info_count = TASK_BASIC_INFO_COUNT;
+ err = task_info(task, TASK_BASIC_INFO, (task_info_t)&tasks_info,
+ &info_count);
+ if (err != KERN_SUCCESS) {
+ // errcode 4 is "invalid argument" (access denied)
+ if (err == 4) {
+ AccessDenied();
+ }
+ else {
+ // otherwise throw a runtime error with appropriate error code
+ PyErr_Format(PyExc_RuntimeError,
+ "task_info(TASK_BASIC_INFO) failed");
+ }
+ goto error;
+ }
+
+ err = task_threads(task, &thread_list, &thread_count);
+ if (err != KERN_SUCCESS) {
+ PyErr_Format(PyExc_RuntimeError, "task_threads() failed");
+ goto error;
+ }
+
+ for (j = 0; j < thread_count; j++) {
+ pyTuple = NULL;
+ thread_info_count = THREAD_INFO_MAX;
+ kr = thread_info(thread_list[j], THREAD_BASIC_INFO,
+ (thread_info_t)thinfo_basic, &thread_info_count);
+ if (kr != KERN_SUCCESS) {
+ PyErr_Format(PyExc_RuntimeError,
+ "thread_info() with flag THREAD_BASIC_INFO failed");
+ goto error;
+ }
+
+ basic_info_th = (thread_basic_info_t)thinfo_basic;
+ pyTuple = Py_BuildValue(
+ "Iff",
+ j + 1,
+ (float)basic_info_th->user_time.microseconds / 1000000.0,
+ (float)basic_info_th->system_time.microseconds / 1000000.0
+ );
+ if (!pyTuple)
+ goto error;
+ if (PyList_Append(retList, pyTuple))
+ goto error;
+ Py_DECREF(pyTuple);
+ }
+
+ ret = vm_deallocate(task, (vm_address_t)thread_list,
+ thread_count * sizeof(int));
+ if (ret != KERN_SUCCESS)
+ PyErr_WarnEx(PyExc_RuntimeWarning, "vm_deallocate() failed", 2);
+
+ mach_port_deallocate(mach_task_self(), task);
+
+ return retList;
+
+error:
+ if (task != MACH_PORT_NULL)
+ mach_port_deallocate(mach_task_self(), task);
+ Py_XDECREF(pyTuple);
+ Py_DECREF(retList);
+ if (thread_list != NULL) {
+ ret = vm_deallocate(task, (vm_address_t)thread_list,
+ thread_count * sizeof(int));
+ if (ret != KERN_SUCCESS)
+ PyErr_WarnEx(PyExc_RuntimeWarning, "vm_deallocate() failed", 2);
+ }
+ return NULL;
+}
+
+
+/*
+ * Return process open files as a Python tuple.
+ * References:
+ * - lsof source code: http://goo.gl/SYW79 and http://goo.gl/m78fd
+ * - /usr/include/sys/proc_info.h
+ */
+static PyObject *
+psutil_proc_open_files(PyObject *self, PyObject *args)
+{
+ long pid;
+ int pidinfo_result;
+ int iterations;
+ int i;
+ int nb;
+
+ struct proc_fdinfo *fds_pointer = NULL;
+ struct proc_fdinfo *fdp_pointer;
+ struct vnode_fdinfowithpath vi;
+
+ PyObject *retList = PyList_New(0);
+ PyObject *tuple = NULL;
+
+ if (retList == NULL)
+ return NULL;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ goto error;
+
+ pidinfo_result = proc_pidinfo(pid, PROC_PIDLISTFDS, 0, NULL, 0);
+ if (pidinfo_result <= 0) {
+ // may be be ignored later if errno != 0
+ PyErr_Format(PyExc_RuntimeError,
+ "proc_pidinfo(PROC_PIDLISTFDS) failed");
+ goto error;
+ }
+
+ fds_pointer = malloc(pidinfo_result);
+ if (fds_pointer == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ pidinfo_result = proc_pidinfo(pid, PROC_PIDLISTFDS, 0, fds_pointer,
+ pidinfo_result);
+ if (pidinfo_result <= 0) {
+ // may be be ignored later if errno != 0
+ PyErr_Format(PyExc_RuntimeError,
+ "proc_pidinfo(PROC_PIDLISTFDS) failed");
+ goto error;
+ }
+
+ iterations = (pidinfo_result / PROC_PIDLISTFD_SIZE);
+
+ for (i = 0; i < iterations; i++) {
+ tuple = NULL;
+ fdp_pointer = &fds_pointer[i];
+
+ if (fdp_pointer->proc_fdtype == PROX_FDTYPE_VNODE)
+ {
+ nb = proc_pidfdinfo(pid,
+ fdp_pointer->proc_fd,
+ PROC_PIDFDVNODEPATHINFO,
+ &vi,
+ sizeof(vi));
+
+ // --- errors checking
+ if (nb <= 0) {
+ if ((errno == ENOENT) || (errno == EBADF)) {
+ // no such file or directory or bad file descriptor;
+ // let's assume the file has been closed or removed
+ continue;
+ }
+ // may be be ignored later if errno != 0
+ PyErr_Format(PyExc_RuntimeError,
+ "proc_pidinfo(PROC_PIDFDVNODEPATHINFO) failed");
+ goto error;
+ }
+ if (nb < sizeof(vi)) {
+ PyErr_Format(PyExc_RuntimeError,
+ "proc_pidinfo(PROC_PIDFDVNODEPATHINFO) failed "
+ "(buffer mismatch)");
+ goto error;
+ }
+ // --- /errors checking
+
+ // --- construct python list
+ tuple = Py_BuildValue("(si)",
+ vi.pvip.vip_path,
+ (int)fdp_pointer->proc_fd);
+ if (!tuple)
+ goto error;
+ if (PyList_Append(retList, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ // --- /construct python list
+ }
+ }
+
+ free(fds_pointer);
+ return retList;
+
+error:
+ Py_XDECREF(tuple);
+ Py_DECREF(retList);
+ if (fds_pointer != NULL)
+ free(fds_pointer);
+ if (errno != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ else if (! psutil_pid_exists(pid))
+ return NoSuchProcess();
+ else
+ return NULL; // exception has already been set earlier
+}
+
+
+// a signaler for connections without an actual status
+static int PSUTIL_CONN_NONE = 128;
+
+/*
+ * Return process TCP and UDP connections as a list of tuples.
+ * References:
+ * - lsof source code: http://goo.gl/SYW79 and http://goo.gl/wNrC0
+ * - /usr/include/sys/proc_info.h
+ */
+static PyObject *
+psutil_proc_connections(PyObject *self, PyObject *args)
+{
+ long pid;
+ int pidinfo_result;
+ int iterations;
+ int i;
+ int nb;
+
+ struct proc_fdinfo *fds_pointer = NULL;
+ struct proc_fdinfo *fdp_pointer;
+ struct socket_fdinfo si;
+
+ PyObject *retList = PyList_New(0);
+ PyObject *tuple = NULL;
+ PyObject *laddr = NULL;
+ PyObject *raddr = NULL;
+ PyObject *af_filter = NULL;
+ PyObject *type_filter = NULL;
+
+ if (retList == NULL)
+ return NULL;
+
+ if (! PyArg_ParseTuple(args, "lOO", &pid, &af_filter, &type_filter))
+ goto error;
+
+ if (!PySequence_Check(af_filter) || !PySequence_Check(type_filter)) {
+ PyErr_SetString(PyExc_TypeError, "arg 2 or 3 is not a sequence");
+ goto error;
+ }
+
+ if (pid == 0)
+ return retList;
+ pidinfo_result = proc_pidinfo(pid, PROC_PIDLISTFDS, 0, NULL, 0);
+ if (pidinfo_result <= 0)
+ goto error;
+
+ fds_pointer = malloc(pidinfo_result);
+ if (fds_pointer == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ pidinfo_result = proc_pidinfo(pid, PROC_PIDLISTFDS, 0, fds_pointer,
+ pidinfo_result);
+
+ if (pidinfo_result <= 0)
+ goto error;
+ iterations = (pidinfo_result / PROC_PIDLISTFD_SIZE);
+
+ for (i = 0; i < iterations; i++) {
+ tuple = NULL;
+ laddr = NULL;
+ raddr = NULL;
+ errno = 0;
+ fdp_pointer = &fds_pointer[i];
+
+ if (fdp_pointer->proc_fdtype == PROX_FDTYPE_SOCKET)
+ {
+ nb = proc_pidfdinfo(pid, fdp_pointer->proc_fd,
+ PROC_PIDFDSOCKETINFO, &si, sizeof(si));
+
+ // --- errors checking
+ if (nb <= 0) {
+ if (errno == EBADF) {
+ // let's assume socket has been closed
+ continue;
+ }
+ if (errno != 0)
+ PyErr_SetFromErrno(PyExc_OSError);
+ else
+ PyErr_Format(
+ PyExc_RuntimeError,
+ "proc_pidinfo(PROC_PIDFDVNODEPATHINFO) failed");
+ goto error;
+ }
+ if (nb < sizeof(si)) {
+ PyErr_Format(PyExc_RuntimeError,
+ "proc_pidinfo(PROC_PIDFDVNODEPATHINFO) failed "
+ "(buffer mismatch)");
+ goto error;
+ }
+ // --- /errors checking
+
+ //
+ int fd, family, type, lport, rport, state;
+ char lip[200], rip[200];
+ int inseq;
+ PyObject *_family;
+ PyObject *_type;
+
+ fd = (int)fdp_pointer->proc_fd;
+ family = si.psi.soi_family;
+ type = si.psi.soi_type;
+
+ // apply filters
+ _family = PyLong_FromLong((long)family);
+ inseq = PySequence_Contains(af_filter, _family);
+ Py_DECREF(_family);
+ if (inseq == 0)
+ continue;
+ _type = PyLong_FromLong((long)type);
+ inseq = PySequence_Contains(type_filter, _type);
+ Py_DECREF(_type);
+ if (inseq == 0)
+ continue;
+
+ if (errno != 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ if ((family == AF_INET) || (family == AF_INET6)) {
+ if (family == AF_INET) {
+ inet_ntop(AF_INET,
+ &si.psi.soi_proto.pri_tcp.tcpsi_ini. \
+ insi_laddr.ina_46.i46a_addr4,
+ lip,
+ sizeof(lip));
+ inet_ntop(AF_INET,
+ &si.psi.soi_proto.pri_tcp.tcpsi_ini.insi_faddr. \
+ ina_46.i46a_addr4,
+ rip,
+ sizeof(rip));
+ }
+ else {
+ inet_ntop(AF_INET6,
+ &si.psi.soi_proto.pri_tcp.tcpsi_ini. \
+ insi_laddr.ina_6,
+ lip, sizeof(lip));
+ inet_ntop(AF_INET6,
+ &si.psi.soi_proto.pri_tcp.tcpsi_ini. \
+ insi_faddr.ina_6,
+ rip, sizeof(rip));
+ }
+
+ // check for inet_ntop failures
+ if (errno != 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ lport = ntohs(si.psi.soi_proto.pri_tcp.tcpsi_ini.insi_lport);
+ rport = ntohs(si.psi.soi_proto.pri_tcp.tcpsi_ini.insi_fport);
+ if (type == SOCK_STREAM)
+ state = (int)si.psi.soi_proto.pri_tcp.tcpsi_state;
+ else
+ state = PSUTIL_CONN_NONE;
+
+ laddr = Py_BuildValue("(si)", lip, lport);
+ if (!laddr)
+ goto error;
+ if (rport != 0)
+ raddr = Py_BuildValue("(si)", rip, rport);
+ else
+ raddr = Py_BuildValue("()");
+ if (!raddr)
+ goto error;
+
+ // construct the python list
+ tuple = Py_BuildValue("(iiiNNi)", fd, family, type, laddr,
+ raddr, state);
+ if (!tuple)
+ goto error;
+ if (PyList_Append(retList, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ }
+ else if (family == AF_UNIX) {
+ // construct the python list
+ tuple = Py_BuildValue(
+ "(iiissi)",
+ fd, family, type,
+ si.psi.soi_proto.pri_un.unsi_addr.ua_sun.sun_path,
+ si.psi.soi_proto.pri_un.unsi_caddr.ua_sun.sun_path,
+ PSUTIL_CONN_NONE);
+ if (!tuple)
+ goto error;
+ if (PyList_Append(retList, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ }
+ }
+ }
+
+ free(fds_pointer);
+ return retList;
+
+error:
+ Py_XDECREF(tuple);
+ Py_XDECREF(laddr);
+ Py_XDECREF(raddr);
+ Py_DECREF(retList);
+
+ if (fds_pointer != NULL)
+ free(fds_pointer);
+ if (errno != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ else if (! psutil_pid_exists(pid))
+ return NoSuchProcess();
+ else
+ return PyErr_Format(PyExc_RuntimeError,
+ "proc_pidinfo(PROC_PIDLISTFDS) failed");
+}
+
+
+/*
+ * Return number of file descriptors opened by process.
+ */
+static PyObject *
+psutil_proc_num_fds(PyObject *self, PyObject *args)
+{
+ long pid;
+ int pidinfo_result;
+ int num;
+ struct proc_fdinfo *fds_pointer;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ pidinfo_result = proc_pidinfo(pid, PROC_PIDLISTFDS, 0, NULL, 0);
+ if (pidinfo_result <= 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+
+ fds_pointer = malloc(pidinfo_result);
+ if (fds_pointer == NULL)
+ return PyErr_NoMemory();
+ pidinfo_result = proc_pidinfo(pid, PROC_PIDLISTFDS, 0, fds_pointer,
+ pidinfo_result);
+ if (pidinfo_result <= 0) {
+ free(fds_pointer);
+ return PyErr_SetFromErrno(PyExc_OSError);
+ }
+
+ num = (pidinfo_result / PROC_PIDLISTFD_SIZE);
+ free(fds_pointer);
+ return Py_BuildValue("i", num);
+}
+
+
+/*
+ * Return a Python list of named tuples with overall network I/O information
+ */
+static PyObject *
+psutil_net_io_counters(PyObject *self, PyObject *args)
+{
+ char *buf = NULL, *lim, *next;
+ struct if_msghdr *ifm;
+ int mib[6];
+ size_t len;
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_ifc_info = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+
+ mib[0] = CTL_NET; // networking subsystem
+ mib[1] = PF_ROUTE; // type of information
+ mib[2] = 0; // protocol (IPPROTO_xxx)
+ mib[3] = 0; // address family
+ mib[4] = NET_RT_IFLIST2; // operation
+ mib[5] = 0;
+
+ if (sysctl(mib, 6, NULL, &len, NULL, 0) < 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ buf = malloc(len);
+ if (buf == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ if (sysctl(mib, 6, buf, &len, NULL, 0) < 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ lim = buf + len;
+
+ for (next = buf; next < lim; ) {
+ ifm = (struct if_msghdr *)next;
+ next += ifm->ifm_msglen;
+
+ if (ifm->ifm_type == RTM_IFINFO2) {
+ py_ifc_info = NULL;
+ struct if_msghdr2 *if2m = (struct if_msghdr2 *)ifm;
+ struct sockaddr_dl *sdl = (struct sockaddr_dl *)(if2m + 1);
+ char ifc_name[32];
+
+ strncpy(ifc_name, sdl->sdl_data, sdl->sdl_nlen);
+ ifc_name[sdl->sdl_nlen] = 0;
+
+ py_ifc_info = Py_BuildValue(
+ "(KKKKKKKi)",
+ if2m->ifm_data.ifi_obytes,
+ if2m->ifm_data.ifi_ibytes,
+ if2m->ifm_data.ifi_opackets,
+ if2m->ifm_data.ifi_ipackets,
+ if2m->ifm_data.ifi_ierrors,
+ if2m->ifm_data.ifi_oerrors,
+ if2m->ifm_data.ifi_iqdrops,
+ 0); // dropout not supported
+
+ if (!py_ifc_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, ifc_name, py_ifc_info))
+ goto error;
+ Py_DECREF(py_ifc_info);
+ }
+ else {
+ continue;
+ }
+ }
+
+ free(buf);
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_ifc_info);
+ Py_DECREF(py_retdict);
+ if (buf != NULL)
+ free(buf);
+ return NULL;
+}
+
+
+/*
+ * Return a Python dict of tuples for disk I/O information
+ */
+static PyObject *
+psutil_disk_io_counters(PyObject *self, PyObject *args)
+{
+ CFDictionaryRef parent_dict;
+ CFDictionaryRef props_dict;
+ CFDictionaryRef stats_dict;
+ io_registry_entry_t parent;
+ io_registry_entry_t disk;
+ io_iterator_t disk_list;
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_disk_info = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+
+ // Get list of disks
+ if (IOServiceGetMatchingServices(kIOMasterPortDefault,
+ IOServiceMatching(kIOMediaClass),
+ &disk_list) != kIOReturnSuccess) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "unable to get the list of disks.");
+ goto error;
+ }
+
+ // Iterate over disks
+ while ((disk = IOIteratorNext(disk_list)) != 0) {
+ py_disk_info = NULL;
+ parent_dict = NULL;
+ props_dict = NULL;
+ stats_dict = NULL;
+
+ if (IORegistryEntryGetParentEntry(disk, kIOServicePlane, &parent)
+ != kIOReturnSuccess) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "unable to get the disk's parent.");
+ IOObjectRelease(disk);
+ goto error;
+ }
+
+ if (IOObjectConformsTo(parent, "IOBlockStorageDriver")) {
+ if (IORegistryEntryCreateCFProperties(
+ disk,
+ (CFMutableDictionaryRef *) &parent_dict,
+ kCFAllocatorDefault,
+ kNilOptions
+ ) != kIOReturnSuccess)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "unable to get the parent's properties.");
+ IOObjectRelease(disk);
+ IOObjectRelease(parent);
+ goto error;
+ }
+
+ if (IORegistryEntryCreateCFProperties(
+ parent,
+ (CFMutableDictionaryRef *) &props_dict,
+ kCFAllocatorDefault,
+ kNilOptions
+ ) != kIOReturnSuccess)
+ {
+ PyErr_SetString(PyExc_RuntimeError,
+ "unable to get the disk properties.");
+ CFRelease(props_dict);
+ IOObjectRelease(disk);
+ IOObjectRelease(parent);
+ goto error;
+ }
+
+ const int kMaxDiskNameSize = 64;
+ CFStringRef disk_name_ref = (CFStringRef)CFDictionaryGetValue(
+ parent_dict, CFSTR(kIOBSDNameKey));
+ char disk_name[kMaxDiskNameSize];
+
+ CFStringGetCString(disk_name_ref,
+ disk_name,
+ kMaxDiskNameSize,
+ CFStringGetSystemEncoding());
+
+ stats_dict = (CFDictionaryRef)CFDictionaryGetValue(
+ props_dict, CFSTR(kIOBlockStorageDriverStatisticsKey));
+
+ if (stats_dict == NULL) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "Unable to get disk stats.");
+ goto error;
+ }
+
+ CFNumberRef number;
+ int64_t reads = 0;
+ int64_t writes = 0;
+ int64_t read_bytes = 0;
+ int64_t write_bytes = 0;
+ int64_t read_time = 0;
+ int64_t write_time = 0;
+
+ // Get disk reads/writes
+ if ((number = (CFNumberRef)CFDictionaryGetValue(
+ stats_dict,
+ CFSTR(kIOBlockStorageDriverStatisticsReadsKey))))
+ {
+ CFNumberGetValue(number, kCFNumberSInt64Type, &reads);
+ }
+ if ((number = (CFNumberRef)CFDictionaryGetValue(
+ stats_dict,
+ CFSTR(kIOBlockStorageDriverStatisticsWritesKey))))
+ {
+ CFNumberGetValue(number, kCFNumberSInt64Type, &writes);
+ }
+
+ // Get disk bytes read/written
+ if ((number = (CFNumberRef)CFDictionaryGetValue(
+ stats_dict,
+ CFSTR(kIOBlockStorageDriverStatisticsBytesReadKey))))
+ {
+ CFNumberGetValue(number, kCFNumberSInt64Type, &read_bytes);
+ }
+ if ((number = (CFNumberRef)CFDictionaryGetValue(
+ stats_dict,
+ CFSTR(kIOBlockStorageDriverStatisticsBytesWrittenKey))))
+ {
+ CFNumberGetValue(number, kCFNumberSInt64Type, &write_bytes);
+ }
+
+ // Get disk time spent reading/writing (nanoseconds)
+ if ((number = (CFNumberRef)CFDictionaryGetValue(
+ stats_dict,
+ CFSTR(kIOBlockStorageDriverStatisticsTotalReadTimeKey))))
+ {
+ CFNumberGetValue(number, kCFNumberSInt64Type, &read_time);
+ }
+ if ((number = (CFNumberRef)CFDictionaryGetValue(
+ stats_dict,
+ CFSTR(kIOBlockStorageDriverStatisticsTotalWriteTimeKey))))
+ {
+ CFNumberGetValue(number, kCFNumberSInt64Type, &write_time);
+ }
+
+ // Read/Write time on OS X comes back in nanoseconds and in psutil
+ // we've standardized on milliseconds so do the conversion.
+ py_disk_info = Py_BuildValue(
+ "(KKKKKK)",
+ reads,
+ writes,
+ read_bytes,
+ write_bytes,
+ read_time / 1000 / 1000,
+ write_time / 1000 / 1000);
+ if (!py_disk_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, disk_name, py_disk_info))
+ goto error;
+ Py_DECREF(py_disk_info);
+
+ CFRelease(parent_dict);
+ IOObjectRelease(parent);
+ CFRelease(props_dict);
+ IOObjectRelease(disk);
+ }
+ }
+
+ IOObjectRelease (disk_list);
+
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_disk_info);
+ Py_DECREF(py_retdict);
+ return NULL;
+}
+
+
+/*
+ * Return currently connected users as a list of tuples.
+ */
+static PyObject *
+psutil_users(PyObject *self, PyObject *args)
+{
+ struct utmpx *utx;
+ PyObject *ret_list = PyList_New(0);
+ PyObject *tuple = NULL;
+
+ if (ret_list == NULL)
+ return NULL;
+ while ((utx = getutxent()) != NULL) {
+ if (utx->ut_type != USER_PROCESS)
+ continue;
+ tuple = Py_BuildValue(
+ "(sssf)",
+ utx->ut_user, // username
+ utx->ut_line, // tty
+ utx->ut_host, // hostname
+ (float)utx->ut_tv.tv_sec // start time
+ );
+ if (!tuple) {
+ endutxent();
+ goto error;
+ }
+ if (PyList_Append(ret_list, tuple)) {
+ endutxent();
+ goto error;
+ }
+ Py_DECREF(tuple);
+ }
+
+ endutxent();
+ return ret_list;
+
+error:
+ Py_XDECREF(tuple);
+ Py_DECREF(ret_list);
+ return NULL;
+}
+
+
+/*
+ * define the psutil C module methods and initialize the module.
+ */
+static PyMethodDef
+PsutilMethods[] =
+{
+ // --- per-process functions
+
+ {"proc_name", psutil_proc_name, METH_VARARGS,
+ "Return process name"},
+ {"proc_cmdline", psutil_proc_cmdline, METH_VARARGS,
+ "Return process cmdline as a list of cmdline arguments"},
+ {"proc_exe", psutil_proc_exe, METH_VARARGS,
+ "Return path of the process executable"},
+ {"proc_cwd", psutil_proc_cwd, METH_VARARGS,
+ "Return process current working directory."},
+ {"proc_ppid", psutil_proc_ppid, METH_VARARGS,
+ "Return process ppid as an integer"},
+ {"proc_uids", psutil_proc_uids, METH_VARARGS,
+ "Return process real user id as an integer"},
+ {"proc_gids", psutil_proc_gids, METH_VARARGS,
+ "Return process real group id as an integer"},
+ {"proc_cpu_times", psutil_proc_cpu_times, METH_VARARGS,
+ "Return tuple of user/kern time for the given PID"},
+ {"proc_create_time", psutil_proc_create_time, METH_VARARGS,
+ "Return a float indicating the process create time expressed in "
+ "seconds since the epoch"},
+ {"proc_memory_info", psutil_proc_memory_info, METH_VARARGS,
+ "Return memory information about a process"},
+ {"proc_num_threads", psutil_proc_num_threads, METH_VARARGS,
+ "Return number of threads used by process"},
+ {"proc_status", psutil_proc_status, METH_VARARGS,
+ "Return process status as an integer"},
+ {"proc_threads", psutil_proc_threads, METH_VARARGS,
+ "Return process threads as a list of tuples"},
+ {"proc_open_files", psutil_proc_open_files, METH_VARARGS,
+ "Return files opened by process as a list of tuples"},
+ {"proc_num_fds", psutil_proc_num_fds, METH_VARARGS,
+ "Return the number of fds opened by process."},
+ {"proc_num_ctx_switches", psutil_proc_num_ctx_switches, METH_VARARGS,
+ "Return the number of context switches performed by process"},
+ {"proc_connections", psutil_proc_connections, METH_VARARGS,
+ "Get process TCP and UDP connections as a list of tuples"},
+ {"proc_tty_nr", psutil_proc_tty_nr, METH_VARARGS,
+ "Return process tty number as an integer"},
+ {"proc_memory_maps", psutil_proc_memory_maps, METH_VARARGS,
+ "Return a list of tuples for every process's memory map"},
+
+ // --- system-related functions
+
+ {"pids", psutil_pids, METH_VARARGS,
+ "Returns a list of PIDs currently running on the system"},
+ {"cpu_count_logical", psutil_cpu_count_logical, METH_VARARGS,
+ "Return number of logical CPUs on the system"},
+ {"cpu_count_phys", psutil_cpu_count_phys, METH_VARARGS,
+ "Return number of physical CPUs on the system"},
+ {"virtual_mem", psutil_virtual_mem, METH_VARARGS,
+ "Return system virtual memory stats"},
+ {"swap_mem", psutil_swap_mem, METH_VARARGS,
+ "Return stats about swap memory, in bytes"},
+ {"cpu_times", psutil_cpu_times, METH_VARARGS,
+ "Return system cpu times as a tuple (user, system, nice, idle, irc)"},
+ {"per_cpu_times", psutil_per_cpu_times, METH_VARARGS,
+ "Return system per-cpu times as a list of tuples"},
+ {"boot_time", psutil_boot_time, METH_VARARGS,
+ "Return the system boot time expressed in seconds since the epoch."},
+ {"disk_partitions", psutil_disk_partitions, METH_VARARGS,
+ "Return a list of tuples including device, mount point and "
+ "fs type for all partitions mounted on the system."},
+ {"net_io_counters", psutil_net_io_counters, METH_VARARGS,
+ "Return dict of tuples of networks I/O information."},
+ {"disk_io_counters", psutil_disk_io_counters, METH_VARARGS,
+ "Return dict of tuples of disks I/O information."},
+ {"users", psutil_users, METH_VARARGS,
+ "Return currently connected users as a list of tuples"},
+
+ {NULL, NULL, 0, NULL}
+};
+
+
+struct module_state {
+ PyObject *error;
+};
+
+#if PY_MAJOR_VERSION >= 3
+#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m))
+#else
+#define GETSTATE(m) (&_state)
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+
+static int
+psutil_osx_traverse(PyObject *m, visitproc visit, void *arg) {
+ Py_VISIT(GETSTATE(m)->error);
+ return 0;
+}
+
+static int
+psutil_osx_clear(PyObject *m) {
+ Py_CLEAR(GETSTATE(m)->error);
+ return 0;
+}
+
+
+static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "psutil_osx",
+ NULL,
+ sizeof(struct module_state),
+ PsutilMethods,
+ NULL,
+ psutil_osx_traverse,
+ psutil_osx_clear,
+ NULL
+};
+
+#define INITERROR return NULL
+
+PyMODINIT_FUNC PyInit__psutil_osx(void)
+
+#else
+#define INITERROR return
+
+void
+init_psutil_osx(void)
+#endif
+{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&moduledef);
+#else
+ PyObject *module = Py_InitModule("_psutil_osx", PsutilMethods);
+#endif
+ PyModule_AddIntConstant(module, "version", PSUTIL_VERSION);
+ // process status constants, defined in:
+ // http://fxr.watson.org/fxr/source/bsd/sys/proc.h?v=xnu-792.6.70#L149
+ PyModule_AddIntConstant(module, "SIDL", SIDL);
+ PyModule_AddIntConstant(module, "SRUN", SRUN);
+ PyModule_AddIntConstant(module, "SSLEEP", SSLEEP);
+ PyModule_AddIntConstant(module, "SSTOP", SSTOP);
+ PyModule_AddIntConstant(module, "SZOMB", SZOMB);
+ // connection status constants
+ PyModule_AddIntConstant(module, "TCPS_CLOSED", TCPS_CLOSED);
+ PyModule_AddIntConstant(module, "TCPS_CLOSING", TCPS_CLOSING);
+ PyModule_AddIntConstant(module, "TCPS_CLOSE_WAIT", TCPS_CLOSE_WAIT);
+ PyModule_AddIntConstant(module, "TCPS_LISTEN", TCPS_LISTEN);
+ PyModule_AddIntConstant(module, "TCPS_ESTABLISHED", TCPS_ESTABLISHED);
+ PyModule_AddIntConstant(module, "TCPS_SYN_SENT", TCPS_SYN_SENT);
+ PyModule_AddIntConstant(module, "TCPS_SYN_RECEIVED", TCPS_SYN_RECEIVED);
+ PyModule_AddIntConstant(module, "TCPS_FIN_WAIT_1", TCPS_FIN_WAIT_1);
+ PyModule_AddIntConstant(module, "TCPS_FIN_WAIT_2", TCPS_FIN_WAIT_2);
+ PyModule_AddIntConstant(module, "TCPS_LAST_ACK", TCPS_LAST_ACK);
+ PyModule_AddIntConstant(module, "TCPS_TIME_WAIT", TCPS_TIME_WAIT);
+ PyModule_AddIntConstant(module, "PSUTIL_CONN_NONE", PSUTIL_CONN_NONE);
+
+ if (module == NULL)
+ INITERROR;
+#if PY_MAJOR_VERSION >= 3
+ return module;
+#endif
+}
diff --git a/python/psutil/psutil/_psutil_osx.h b/python/psutil/psutil/_psutil_osx.h
new file mode 100644
index 000000000..907a8e537
--- /dev/null
+++ b/python/psutil/psutil/_psutil_osx.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+
+// --- per-process functions
+static PyObject* psutil_proc_cmdline(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_connections(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_create_time(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cwd(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_exe(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_gids(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_info(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_maps(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_name(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_fds(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_threads(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_open_files(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_ppid(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_status(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_threads(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_tty_nr(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_uids(PyObject* self, PyObject* args);
+
+// --- system-related functions
+static PyObject* psutil_boot_time(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_logical(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_phys(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_partitions(PyObject* self, PyObject* args);
+static PyObject* psutil_net_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_per_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_pids(PyObject* self, PyObject* args);
+static PyObject* psutil_swap_mem(PyObject* self, PyObject* args);
+static PyObject* psutil_users(PyObject* self, PyObject* args);
+static PyObject* psutil_virtual_mem(PyObject* self, PyObject* args);
diff --git a/python/psutil/psutil/_psutil_posix.c b/python/psutil/psutil/_psutil_posix.c
new file mode 100644
index 000000000..183dab0e1
--- /dev/null
+++ b/python/psutil/psutil/_psutil_posix.c
@@ -0,0 +1,531 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Functions specific to all POSIX compliant platforms.
+ */
+
+#include <Python.h>
+#include <errno.h>
+#include <stdlib.h>
+#include <sys/resource.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <ifaddrs.h>
+
+#ifdef __linux
+#include <netdb.h>
+#include <linux/if_packet.h>
+#endif // end linux
+
+#if defined(__FreeBSD__) || defined(__APPLE__)
+#include <netdb.h>
+#include <netinet/in.h>
+#include <net/if_dl.h>
+#endif
+
+#if defined(__sun)
+#include <netdb.h>
+#endif
+
+#include "_psutil_posix.h"
+
+
+/*
+ * Given a PID return process priority as a Python integer.
+ */
+static PyObject *
+psutil_posix_getpriority(PyObject *self, PyObject *args)
+{
+ long pid;
+ int priority;
+ errno = 0;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ priority = getpriority(PRIO_PROCESS, pid);
+ if (errno != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ return Py_BuildValue("i", priority);
+}
+
+
+/*
+ * Given a PID and a value change process priority.
+ */
+static PyObject *
+psutil_posix_setpriority(PyObject *self, PyObject *args)
+{
+ long pid;
+ int priority;
+ int retval;
+
+ if (! PyArg_ParseTuple(args, "li", &pid, &priority))
+ return NULL;
+ retval = setpriority(PRIO_PROCESS, pid, priority);
+ if (retval == -1)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ Py_RETURN_NONE;
+}
+
+
+/*
+ * Translate a sockaddr struct into a Python string.
+ * Return None if address family is not AF_INET* or AF_PACKET.
+ */
+static PyObject *
+psutil_convert_ipaddr(struct sockaddr *addr, int family)
+{
+ char buf[NI_MAXHOST];
+ int err;
+ int addrlen;
+ int n;
+ size_t len;
+ const char *data;
+ char *ptr;
+
+ if (addr == NULL) {
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+ else if (family == AF_INET || family == AF_INET6) {
+ if (family == AF_INET)
+ addrlen = sizeof(struct sockaddr_in);
+ else
+ addrlen = sizeof(struct sockaddr_in6);
+ err = getnameinfo(addr, addrlen, buf, sizeof(buf), NULL, 0,
+ NI_NUMERICHOST);
+ if (err != 0) {
+ // XXX we get here on FreeBSD when processing 'lo' / AF_INET6
+ // broadcast. Not sure what to do other than returning None.
+ // ifconfig does not show anything BTW.
+ //PyErr_Format(PyExc_RuntimeError, gai_strerror(err));
+ //return NULL;
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+ else {
+ return Py_BuildValue("s", buf);
+ }
+ }
+#ifdef __linux
+ else if (family == AF_PACKET) {
+ struct sockaddr_ll *lladdr = (struct sockaddr_ll *)addr;
+ len = lladdr->sll_halen;
+ data = (const char *)lladdr->sll_addr;
+ }
+#endif
+#if defined(__FreeBSD__) || defined(__APPLE__)
+ else if (addr->sa_family == AF_LINK) {
+ // Note: prior to Python 3.4 socket module does not expose
+ // AF_LINK so we'll do.
+ struct sockaddr_dl *dladdr = (struct sockaddr_dl *)addr;
+ len = dladdr->sdl_alen;
+ data = LLADDR(dladdr);
+ }
+#endif
+ else {
+ // unknown family
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+
+ // AF_PACKET or AF_LINK
+ if (len > 0) {
+ ptr = buf;
+ for (n = 0; n < len; ++n) {
+ sprintf(ptr, "%02x:", data[n] & 0xff);
+ ptr += 3;
+ }
+ *--ptr = '\0';
+ return Py_BuildValue("s", buf);
+ }
+ else {
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+}
+
+
+/*
+ * Return NICs information a-la ifconfig as a list of tuples.
+ * TODO: on Solaris we won't get any MAC address.
+ */
+static PyObject*
+psutil_net_if_addrs(PyObject* self, PyObject* args)
+{
+ struct ifaddrs *ifaddr, *ifa;
+ int family;
+
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+ PyObject *py_address = NULL;
+ PyObject *py_netmask = NULL;
+ PyObject *py_broadcast = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+ if (getifaddrs(&ifaddr) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ for (ifa = ifaddr; ifa != NULL; ifa = ifa->ifa_next) {
+ if (!ifa->ifa_addr)
+ continue;
+ family = ifa->ifa_addr->sa_family;
+ py_address = psutil_convert_ipaddr(ifa->ifa_addr, family);
+ // If the primary address can't be determined just skip it.
+ // I've never seen this happen on Linux but I did on FreeBSD.
+ if (py_address == Py_None)
+ continue;
+ if (py_address == NULL)
+ goto error;
+ py_netmask = psutil_convert_ipaddr(ifa->ifa_netmask, family);
+ if (py_netmask == NULL)
+ goto error;
+#ifdef __linux
+ py_broadcast = psutil_convert_ipaddr(ifa->ifa_ifu.ifu_broadaddr, family);
+#else
+ py_broadcast = psutil_convert_ipaddr(ifa->ifa_broadaddr, family);
+#endif
+ if (py_broadcast == NULL)
+ goto error;
+ py_tuple = Py_BuildValue(
+ "(siOOO)",
+ ifa->ifa_name,
+ family,
+ py_address,
+ py_netmask,
+ py_broadcast
+ );
+
+ if (! py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ Py_DECREF(py_address);
+ Py_DECREF(py_netmask);
+ Py_DECREF(py_broadcast);
+ }
+
+ freeifaddrs(ifaddr);
+ return py_retlist;
+
+error:
+ if (ifaddr != NULL)
+ freeifaddrs(ifaddr);
+ Py_DECREF(py_retlist);
+ Py_XDECREF(py_tuple);
+ Py_XDECREF(py_address);
+ Py_XDECREF(py_netmask);
+ Py_XDECREF(py_broadcast);
+ return NULL;
+}
+
+
+/*
+ * net_if_stats() implementation. This is here because it is common
+ * to both OSX and FreeBSD and I didn't know where else to put it.
+ */
+#if defined(__FreeBSD__) || defined(__APPLE__)
+
+#include <sys/sockio.h>
+#include <net/if_media.h>
+#include <net/if.h>
+
+int psutil_get_nic_speed(int ifm_active) {
+ // Determine NIC speed. Taken from:
+ // http://www.i-scream.org/libstatgrab/
+ // Assuming only ETHER devices
+ switch(IFM_TYPE(ifm_active)) {
+ case IFM_ETHER:
+ switch(IFM_SUBTYPE(ifm_active)) {
+#if defined(IFM_HPNA_1) && ((!defined(IFM_10G_LR)) \
+ || (IFM_10G_LR != IFM_HPNA_1))
+ // HomePNA 1.0 (1Mb/s)
+ case(IFM_HPNA_1):
+ return 1;
+#endif
+ // 10 Mbit
+ case(IFM_10_T): // 10BaseT - RJ45
+ case(IFM_10_2): // 10Base2 - Thinnet
+ case(IFM_10_5): // 10Base5 - AUI
+ case(IFM_10_STP): // 10BaseT over shielded TP
+ case(IFM_10_FL): // 10baseFL - Fiber
+ return 10;
+ // 100 Mbit
+ case(IFM_100_TX): // 100BaseTX - RJ45
+ case(IFM_100_FX): // 100BaseFX - Fiber
+ case(IFM_100_T4): // 100BaseT4 - 4 pair cat 3
+ case(IFM_100_VG): // 100VG-AnyLAN
+ case(IFM_100_T2): // 100BaseT2
+ return 100;
+ // 1000 Mbit
+ case(IFM_1000_SX): // 1000BaseSX - multi-mode fiber
+ case(IFM_1000_LX): // 1000baseLX - single-mode fiber
+ case(IFM_1000_CX): // 1000baseCX - 150ohm STP
+#if defined(IFM_1000_TX) && !defined(OPENBSD)
+ // FreeBSD 4 and others (but NOT OpenBSD)?
+ case(IFM_1000_TX):
+#endif
+#ifdef IFM_1000_FX
+ case(IFM_1000_FX):
+#endif
+#ifdef IFM_1000_T
+ case(IFM_1000_T):
+#endif
+ return 1000;
+#if defined(IFM_10G_SR) || defined(IFM_10G_LR) || defined(IFM_10G_CX4) \
+ || defined(IFM_10G_T)
+#ifdef IFM_10G_SR
+ case(IFM_10G_SR):
+#endif
+#ifdef IFM_10G_LR
+ case(IFM_10G_LR):
+#endif
+#ifdef IFM_10G_CX4
+ case(IFM_10G_CX4):
+#endif
+#ifdef IFM_10G_TWINAX
+ case(IFM_10G_TWINAX):
+#endif
+#ifdef IFM_10G_TWINAX_LONG
+ case(IFM_10G_TWINAX_LONG):
+#endif
+#ifdef IFM_10G_T
+ case(IFM_10G_T):
+#endif
+ return 10000;
+#endif
+#if defined(IFM_2500_SX)
+#ifdef IFM_2500_SX
+ case(IFM_2500_SX):
+#endif
+ return 2500;
+#endif // any 2.5GBit stuff...
+ // We don't know what it is
+ default:
+ return 0;
+ }
+ break;
+
+#ifdef IFM_TOKEN
+ case IFM_TOKEN:
+ switch(IFM_SUBTYPE(ifm_active)) {
+ case IFM_TOK_STP4: // Shielded twisted pair 4m - DB9
+ case IFM_TOK_UTP4: // Unshielded twisted pair 4m - RJ45
+ return 4;
+ case IFM_TOK_STP16: // Shielded twisted pair 16m - DB9
+ case IFM_TOK_UTP16: // Unshielded twisted pair 16m - RJ45
+ return 16;
+#if defined(IFM_TOK_STP100) || defined(IFM_TOK_UTP100)
+#ifdef IFM_TOK_STP100
+ case IFM_TOK_STP100: // Shielded twisted pair 100m - DB9
+#endif
+#ifdef IFM_TOK_UTP100
+ case IFM_TOK_UTP100: // Unshielded twisted pair 100m - RJ45
+#endif
+ return 100;
+#endif
+ // We don't know what it is
+ default:
+ return 0;
+ }
+ break;
+#endif
+
+#ifdef IFM_FDDI
+ case IFM_FDDI:
+ switch(IFM_SUBTYPE(ifm_active)) {
+ // We don't know what it is
+ default:
+ return 0;
+ }
+ break;
+#endif
+ case IFM_IEEE80211:
+ switch(IFM_SUBTYPE(ifm_active)) {
+ case IFM_IEEE80211_FH1: // Frequency Hopping 1Mbps
+ case IFM_IEEE80211_DS1: // Direct Sequence 1Mbps
+ return 1;
+ case IFM_IEEE80211_FH2: // Frequency Hopping 2Mbps
+ case IFM_IEEE80211_DS2: // Direct Sequence 2Mbps
+ return 2;
+ case IFM_IEEE80211_DS5: // Direct Sequence 5Mbps
+ return 5;
+ case IFM_IEEE80211_DS11: // Direct Sequence 11Mbps
+ return 11;
+ case IFM_IEEE80211_DS22: // Direct Sequence 22Mbps
+ return 22;
+ // We don't know what it is
+ default:
+ return 0;
+ }
+ break;
+
+ default:
+ return 0;
+ }
+}
+
+
+/*
+ * Return stats about a particular network interface.
+ * References:
+ * http://www.i-scream.org/libstatgrab/
+ */
+static PyObject *
+psutil_net_if_stats(PyObject *self, PyObject *args)
+{
+ char *nic_name;
+ int sock = 0;
+ int ret;
+ int duplex;
+ int speed;
+ int mtu;
+ struct ifreq ifr;
+ struct ifmediareq ifmed;
+
+ PyObject *py_is_up = NULL;
+
+ if (! PyArg_ParseTuple(args, "s", &nic_name))
+ return NULL;
+
+ sock = socket(AF_INET, SOCK_DGRAM, 0);
+ if (sock == -1)
+ goto error;
+ strncpy(ifr.ifr_name, nic_name, sizeof(ifr.ifr_name));
+
+ // is up?
+ ret = ioctl(sock, SIOCGIFFLAGS, &ifr);
+ if (ret == -1)
+ goto error;
+ if ((ifr.ifr_flags & IFF_UP) != 0)
+ py_is_up = Py_True;
+ else
+ py_is_up = Py_False;
+ Py_INCREF(py_is_up);
+
+ // MTU
+ ret = ioctl(sock, SIOCGIFMTU, &ifr);
+ if (ret == -1)
+ goto error;
+ mtu = ifr.ifr_mtu;
+
+ // speed / duplex
+ memset(&ifmed, 0, sizeof(struct ifmediareq));
+ strlcpy(ifmed.ifm_name, nic_name, sizeof(ifmed.ifm_name));
+ ret = ioctl(sock, SIOCGIFMEDIA, (caddr_t)&ifmed);
+ if (ret == -1) {
+ speed = 0;
+ duplex = 0;
+ }
+ else {
+ speed = psutil_get_nic_speed(ifmed.ifm_active);
+ if ((ifmed.ifm_active | IFM_FDX) == ifmed.ifm_active)
+ duplex = 2;
+ else if ((ifmed.ifm_active | IFM_HDX) == ifmed.ifm_active)
+ duplex = 1;
+ else
+ duplex = 0;
+ }
+
+ close(sock);
+ Py_DECREF(py_is_up);
+
+ return Py_BuildValue("[Oiii]", py_is_up, duplex, speed, mtu);
+
+error:
+ Py_XDECREF(py_is_up);
+ if (sock != 0)
+ close(sock);
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+}
+#endif // net_if_stats() implementation
+
+
+/*
+ * define the psutil C module methods and initialize the module.
+ */
+static PyMethodDef
+PsutilMethods[] =
+{
+ {"getpriority", psutil_posix_getpriority, METH_VARARGS,
+ "Return process priority"},
+ {"setpriority", psutil_posix_setpriority, METH_VARARGS,
+ "Set process priority"},
+ {"net_if_addrs", psutil_net_if_addrs, METH_VARARGS,
+ "Retrieve NICs information"},
+#if defined(__FreeBSD__) || defined(__APPLE__)
+ {"net_if_stats", psutil_net_if_stats, METH_VARARGS,
+ "Return NIC stats."},
+#endif
+ {NULL, NULL, 0, NULL}
+};
+
+struct module_state {
+ PyObject *error;
+};
+
+#if PY_MAJOR_VERSION >= 3
+#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m))
+#else
+#define GETSTATE(m) (&_state)
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+
+static int
+psutil_posix_traverse(PyObject *m, visitproc visit, void *arg) {
+ Py_VISIT(GETSTATE(m)->error);
+ return 0;
+}
+
+static int
+psutil_posix_clear(PyObject *m) {
+ Py_CLEAR(GETSTATE(m)->error);
+ return 0;
+}
+
+static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "psutil_posix",
+ NULL,
+ sizeof(struct module_state),
+ PsutilMethods,
+ NULL,
+ psutil_posix_traverse,
+ psutil_posix_clear,
+ NULL
+};
+
+#define INITERROR return NULL
+
+PyMODINIT_FUNC PyInit__psutil_posix(void)
+
+#else
+#define INITERROR return
+
+void init_psutil_posix(void)
+#endif
+{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&moduledef);
+#else
+ PyObject *module = Py_InitModule("_psutil_posix", PsutilMethods);
+#endif
+
+#if defined(__FreeBSD__) || defined(__APPLE__) || defined(__sun)
+ PyModule_AddIntConstant(module, "AF_LINK", AF_LINK);
+#endif
+
+ if (module == NULL)
+ INITERROR;
+#if PY_MAJOR_VERSION >= 3
+ return module;
+#endif
+}
diff --git a/python/psutil/psutil/_psutil_posix.h b/python/psutil/psutil/_psutil_posix.h
new file mode 100644
index 000000000..bbe6fc5ad
--- /dev/null
+++ b/python/psutil/psutil/_psutil_posix.h
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+
+static PyObject* psutil_net_if_addrs(PyObject* self, PyObject* args);
+static PyObject* psutil_posix_getpriority(PyObject* self, PyObject* args);
+static PyObject* psutil_posix_setpriority(PyObject* self, PyObject* args);
+
+#if defined(__FreeBSD__) || defined(__APPLE__)
+static PyObject* psutil_net_if_stats(PyObject* self, PyObject* args);
+#endif
diff --git a/python/psutil/psutil/_psutil_sunos.c b/python/psutil/psutil/_psutil_sunos.c
new file mode 100644
index 000000000..0cb6978f2
--- /dev/null
+++ b/python/psutil/psutil/_psutil_sunos.c
@@ -0,0 +1,1389 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Functions specific to Sun OS Solaris platforms.
+ *
+ * Thanks to Justin Venus who originally wrote a consistent part of
+ * this in Cython which I later on translated in C.
+ */
+
+
+#include <Python.h>
+
+// fix for "Cannot use procfs in the large file compilation environment"
+// error, see:
+// http://sourceware.org/ml/gdb-patches/2010-11/msg00336.html
+#undef _FILE_OFFSET_BITS
+#define _STRUCTURED_PROC 1
+
+// fix compilation issue on SunOS 5.10, see:
+// https://github.com/giampaolo/psutil/issues/421
+#define NEW_MIB_COMPLIANT
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/proc.h>
+#include <sys/swap.h>
+#include <sys/sysinfo.h>
+#include <sys/mntent.h> // for MNTTAB
+#include <sys/mnttab.h>
+#include <sys/procfs.h>
+#include <sys/sockio.h>
+#include <sys/socket.h>
+#include <fcntl.h>
+#include <utmpx.h>
+#include <kstat.h>
+#include <sys/ioctl.h>
+#include <sys/tihdr.h>
+#include <stropts.h>
+#include <inet/tcp.h>
+#include <arpa/inet.h>
+#include <net/if.h>
+
+#include "_psutil_sunos.h"
+
+
+#define TV2DOUBLE(t) (((t).tv_nsec * 0.000000001) + (t).tv_sec)
+
+/*
+ * Read a file content and fills a C structure with it.
+ */
+int
+psutil_file_to_struct(char *path, void *fstruct, size_t size)
+{
+ int fd;
+ size_t nbytes;
+ fd = open(path, O_RDONLY);
+ if (fd == -1) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ return 0;
+ }
+ nbytes = read(fd, fstruct, size);
+ if (nbytes <= 0) {
+ close(fd);
+ PyErr_SetFromErrno(PyExc_OSError);
+ return 0;
+ }
+ if (nbytes != size) {
+ close(fd);
+ PyErr_SetString(PyExc_RuntimeError, "structure size mismatch");
+ return 0;
+ }
+ close(fd);
+ return nbytes;
+}
+
+
+/*
+ * Return process ppid, rss, vms, ctime, nice, nthreads, status and tty
+ * as a Python tuple.
+ */
+static PyObject *
+psutil_proc_basic_info(PyObject *self, PyObject *args)
+{
+ int pid;
+ char path[100];
+ psinfo_t info;
+
+ if (! PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ sprintf(path, "/proc/%i/psinfo", pid);
+ if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+ return NULL;
+ return Py_BuildValue("ikkdiiik",
+ info.pr_ppid, // parent pid
+ info.pr_rssize, // rss
+ info.pr_size, // vms
+ TV2DOUBLE(info.pr_start), // create time
+ info.pr_lwp.pr_nice, // nice
+ info.pr_nlwp, // no. of threads
+ info.pr_lwp.pr_state, // status code
+ info.pr_ttydev // tty nr
+ );
+}
+
+
+/*
+ * Return process name and args as a Python tuple.
+ */
+static PyObject *
+psutil_proc_name_and_args(PyObject *self, PyObject *args)
+{
+ int pid;
+ char path[100];
+ psinfo_t info;
+
+ if (! PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ sprintf(path, "/proc/%i/psinfo", pid);
+ if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+ return NULL;
+ return Py_BuildValue("ss", info.pr_fname, info.pr_psargs);
+}
+
+
+/*
+ * Return process user and system CPU times as a Python tuple.
+ */
+static PyObject *
+psutil_proc_cpu_times(PyObject *self, PyObject *args)
+{
+ int pid;
+ char path[100];
+ pstatus_t info;
+
+ if (! PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ sprintf(path, "/proc/%i/status", pid);
+ if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+ return NULL;
+ // results are more precise than os.times()
+ return Py_BuildValue("dd",
+ TV2DOUBLE(info.pr_utime),
+ TV2DOUBLE(info.pr_stime));
+}
+
+
+/*
+ * Return process uids/gids as a Python tuple.
+ */
+static PyObject *
+psutil_proc_cred(PyObject *self, PyObject *args)
+{
+ int pid;
+ char path[100];
+ prcred_t info;
+
+ if (! PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ sprintf(path, "/proc/%i/cred", pid);
+ if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+ return NULL;
+ return Py_BuildValue("iiiiii",
+ info.pr_ruid, info.pr_euid, info.pr_suid,
+ info.pr_rgid, info.pr_egid, info.pr_sgid);
+}
+
+
+/*
+ * Return process uids/gids as a Python tuple.
+ */
+static PyObject *
+psutil_proc_num_ctx_switches(PyObject *self, PyObject *args)
+{
+ int pid;
+ char path[100];
+ prusage_t info;
+
+ if (! PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ sprintf(path, "/proc/%i/usage", pid);
+ if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+ return NULL;
+ return Py_BuildValue("kk", info.pr_vctx, info.pr_ictx);
+}
+
+
+/*
+ * Process IO counters.
+ *
+ * Commented out and left here as a reminder. Apparently we cannot
+ * retrieve process IO stats because:
+ * - 'pr_ioch' is a sum of chars read and written, with no distinction
+ * - 'pr_inblk' and 'pr_oublk', which should be the number of bytes
+ * read and written, hardly increase and according to:
+ * http://www.brendangregg.com/Perf/paper_diskubyp1.pdf
+ * ...they should be meaningless anyway.
+ *
+static PyObject*
+proc_io_counters(PyObject* self, PyObject* args)
+{
+ int pid;
+ char path[100];
+ prusage_t info;
+
+ if (! PyArg_ParseTuple(args, "i", &pid))
+ return NULL;
+ sprintf(path, "/proc/%i/usage", pid);
+ if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+ return NULL;
+
+ // On Solaris we only have 'pr_ioch' which accounts for bytes read
+ // *and* written.
+ // 'pr_inblk' and 'pr_oublk' should be expressed in blocks of
+ // 8KB according to:
+ // http://www.brendangregg.com/Perf/paper_diskubyp1.pdf (pag. 8)
+ return Py_BuildValue("kkkk",
+ info.pr_ioch,
+ info.pr_ioch,
+ info.pr_inblk,
+ info.pr_oublk);
+}
+ */
+
+
+/*
+ * Return information about a given process thread.
+ */
+static PyObject *
+psutil_proc_query_thread(PyObject *self, PyObject *args)
+{
+ int pid, tid;
+ char path[100];
+ lwpstatus_t info;
+
+ if (! PyArg_ParseTuple(args, "ii", &pid, &tid))
+ return NULL;
+ sprintf(path, "/proc/%i/lwp/%i/lwpstatus", pid, tid);
+ if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+ return NULL;
+ return Py_BuildValue("dd",
+ TV2DOUBLE(info.pr_utime),
+ TV2DOUBLE(info.pr_stime));
+}
+
+
+/*
+ * Return information about system virtual memory.
+ */
+static PyObject *
+psutil_swap_mem(PyObject *self, PyObject *args)
+{
+// XXX (arghhh!)
+// total/free swap mem: commented out as for some reason I can't
+// manage to get the same results shown by "swap -l", despite the
+// code below is exactly the same as:
+// http://cvs.opensolaris.org/source/xref/onnv/onnv-gate/usr/src/
+// cmd/swap/swap.c
+// We're going to parse "swap -l" output from Python (sigh!)
+
+/*
+ struct swaptable *st;
+ struct swapent *swapent;
+ int i;
+ struct stat64 statbuf;
+ char *path;
+ char fullpath[MAXPATHLEN+1];
+ int num;
+
+ if ((num = swapctl(SC_GETNSWP, NULL)) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+ if (num == 0) {
+ PyErr_SetString(PyExc_RuntimeError, "no swap devices configured");
+ return NULL;
+ }
+ if ((st = malloc(num * sizeof(swapent_t) + sizeof (int))) == NULL) {
+ PyErr_SetString(PyExc_RuntimeError, "malloc failed");
+ return NULL;
+ }
+ if ((path = malloc(num * MAXPATHLEN)) == NULL) {
+ PyErr_SetString(PyExc_RuntimeError, "malloc failed");
+ return NULL;
+ }
+ swapent = st->swt_ent;
+ for (i = 0; i < num; i++, swapent++) {
+ swapent->ste_path = path;
+ path += MAXPATHLEN;
+ }
+ st->swt_n = num;
+ if ((num = swapctl(SC_LIST, st)) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+
+ swapent = st->swt_ent;
+ long t = 0, f = 0;
+ for (i = 0; i < num; i++, swapent++) {
+ int diskblks_per_page =(int)(sysconf(_SC_PAGESIZE) >> DEV_BSHIFT);
+ t += (long)swapent->ste_pages;
+ f += (long)swapent->ste_free;
+ }
+
+ free(st);
+ return Py_BuildValue("(kk)", t, f);
+*/
+
+ kstat_ctl_t *kc;
+ kstat_t *k;
+ cpu_stat_t *cpu;
+ int cpu_count = 0;
+ int flag = 0;
+ uint_t sin = 0;
+ uint_t sout = 0;
+
+ kc = kstat_open();
+ if (kc == NULL)
+ return PyErr_SetFromErrno(PyExc_OSError);;
+
+ k = kc->kc_chain;
+ while (k != NULL) {
+ if ((strncmp(k->ks_name, "cpu_stat", 8) == 0) && \
+ (kstat_read(kc, k, NULL) != -1) )
+ {
+ flag = 1;
+ cpu = (cpu_stat_t *) k->ks_data;
+ sin += cpu->cpu_vminfo.pgswapin; // num pages swapped in
+ sout += cpu->cpu_vminfo.pgswapout; // num pages swapped out
+ }
+ cpu_count += 1;
+ k = k->ks_next;
+ }
+ kstat_close(kc);
+ if (!flag) {
+ PyErr_SetString(PyExc_RuntimeError, "no swap device was found");
+ return NULL;
+ }
+ return Py_BuildValue("(II)", sin, sout);
+}
+
+
+/*
+ * Return users currently connected on the system.
+ */
+static PyObject *
+psutil_users(PyObject *self, PyObject *args)
+{
+ struct utmpx *ut;
+ PyObject *ret_list = PyList_New(0);
+ PyObject *tuple = NULL;
+ PyObject *user_proc = NULL;
+
+ if (ret_list == NULL)
+ return NULL;
+
+ while (NULL != (ut = getutxent())) {
+ if (ut->ut_type == USER_PROCESS)
+ user_proc = Py_True;
+ else
+ user_proc = Py_False;
+ tuple = Py_BuildValue(
+ "(sssfO)",
+ ut->ut_user, // username
+ ut->ut_line, // tty
+ ut->ut_host, // hostname
+ (float)ut->ut_tv.tv_sec, // tstamp
+ user_proc); // (bool) user process
+ if (tuple == NULL)
+ goto error;
+ if (PyList_Append(ret_list, tuple))
+ goto error;
+ Py_DECREF(tuple);
+ }
+ endutent();
+
+ return ret_list;
+
+error:
+ Py_XDECREF(tuple);
+ Py_DECREF(ret_list);
+ if (ut != NULL)
+ endutent();
+ return NULL;
+}
+
+
+/*
+ * Return disk mounted partitions as a list of tuples including device,
+ * mount point and filesystem type.
+ */
+static PyObject *
+psutil_disk_partitions(PyObject *self, PyObject *args)
+{
+ FILE *file;
+ struct mnttab mt;
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+
+ file = fopen(MNTTAB, "rb");
+ if (file == NULL) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ while (getmntent(file, &mt) == 0) {
+ py_tuple = Py_BuildValue(
+ "(ssss)",
+ mt.mnt_special, // device
+ mt.mnt_mountp, // mount point
+ mt.mnt_fstype, // fs type
+ mt.mnt_mntopts); // options
+ if (py_tuple == NULL)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+
+ }
+ fclose(file);
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_tuple);
+ Py_DECREF(py_retlist);
+ if (file != NULL)
+ fclose(file);
+ return NULL;
+}
+
+
+/*
+ * Return system-wide CPU times.
+ */
+static PyObject *
+psutil_per_cpu_times(PyObject *self, PyObject *args)
+{
+ kstat_ctl_t *kc;
+ kstat_t *ksp;
+ cpu_stat_t cs;
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_cputime = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+
+ kc = kstat_open();
+ if (kc == NULL) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ for (ksp = kc->kc_chain; ksp != NULL; ksp = ksp->ks_next) {
+ if (strcmp(ksp->ks_module, "cpu_stat") == 0) {
+ if (kstat_read(kc, ksp, &cs) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+ py_cputime = Py_BuildValue("ffff",
+ (float)cs.cpu_sysinfo.cpu[CPU_USER],
+ (float)cs.cpu_sysinfo.cpu[CPU_KERNEL],
+ (float)cs.cpu_sysinfo.cpu[CPU_IDLE],
+ (float)cs.cpu_sysinfo.cpu[CPU_WAIT]);
+ if (py_cputime == NULL)
+ goto error;
+ if (PyList_Append(py_retlist, py_cputime))
+ goto error;
+ Py_DECREF(py_cputime);
+ py_cputime = NULL;
+ }
+ }
+
+ kstat_close(kc);
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_cputime);
+ Py_DECREF(py_retlist);
+ if (kc != NULL)
+ kstat_close(kc);
+ return NULL;
+}
+
+
+/*
+ * Return disk IO statistics.
+ */
+static PyObject *
+psutil_disk_io_counters(PyObject *self, PyObject *args)
+{
+ kstat_ctl_t *kc;
+ kstat_t *ksp;
+ kstat_io_t kio;
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_disk_info = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+ kc = kstat_open();
+ if (kc == NULL) {
+ PyErr_SetFromErrno(PyExc_OSError);;
+ goto error;
+ }
+ ksp = kc->kc_chain;
+ while (ksp != NULL) {
+ if (ksp->ks_type == KSTAT_TYPE_IO) {
+ if (strcmp(ksp->ks_class, "disk") == 0) {
+ if (kstat_read(kc, ksp, &kio) == -1) {
+ kstat_close(kc);
+ return PyErr_SetFromErrno(PyExc_OSError);;
+ }
+ py_disk_info = Py_BuildValue(
+ "(IIKKLL)",
+ kio.reads,
+ kio.writes,
+ kio.nread,
+ kio.nwritten,
+ kio.rtime / 1000 / 1000, // from nano to milli secs
+ kio.wtime / 1000 / 1000 // from nano to milli secs
+ );
+ if (!py_disk_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, ksp->ks_name,
+ py_disk_info))
+ goto error;
+ Py_DECREF(py_disk_info);
+ }
+ }
+ ksp = ksp->ks_next;
+ }
+ kstat_close(kc);
+
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_disk_info);
+ Py_DECREF(py_retdict);
+ if (kc != NULL)
+ kstat_close(kc);
+ return NULL;
+}
+
+
+/*
+ * Return process memory mappings.
+ */
+static PyObject *
+psutil_proc_memory_maps(PyObject *self, PyObject *args)
+{
+ int pid;
+ int fd = -1;
+ char path[100];
+ char perms[10];
+ char *name;
+ struct stat st;
+ pstatus_t status;
+
+ prxmap_t *xmap = NULL, *p;
+ off_t size;
+ size_t nread;
+ int nmap;
+ uintptr_t pr_addr_sz;
+ uintptr_t stk_base_sz, brk_base_sz;
+
+ PyObject *pytuple = NULL;
+ PyObject *py_retlist = PyList_New(0);
+
+ if (py_retlist == NULL)
+ return NULL;
+ if (! PyArg_ParseTuple(args, "i", &pid))
+ goto error;
+
+ sprintf(path, "/proc/%i/status", pid);
+ if (! psutil_file_to_struct(path, (void *)&status, sizeof(status)))
+ goto error;
+
+ sprintf(path, "/proc/%i/xmap", pid);
+ if (stat(path, &st) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ size = st.st_size;
+
+ fd = open(path, O_RDONLY);
+ if (fd == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ xmap = (prxmap_t *)malloc(size);
+ if (xmap == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ nread = pread(fd, xmap, size, 0);
+ nmap = nread / sizeof(prxmap_t);
+ p = xmap;
+
+ while (nmap) {
+ nmap -= 1;
+ if (p == NULL) {
+ p += 1;
+ continue;
+ }
+
+ perms[0] = '\0';
+ pr_addr_sz = p->pr_vaddr + p->pr_size;
+
+ // perms
+ sprintf(perms, "%c%c%c%c%c%c", p->pr_mflags & MA_READ ? 'r' : '-',
+ p->pr_mflags & MA_WRITE ? 'w' : '-',
+ p->pr_mflags & MA_EXEC ? 'x' : '-',
+ p->pr_mflags & MA_SHARED ? 's' : '-',
+ p->pr_mflags & MA_NORESERVE ? 'R' : '-',
+ p->pr_mflags & MA_RESERVED1 ? '*' : ' ');
+
+ // name
+ if (strlen(p->pr_mapname) > 0) {
+ name = p->pr_mapname;
+ }
+ else {
+ if ((p->pr_mflags & MA_ISM) || (p->pr_mflags & MA_SHM)) {
+ name = "[shmid]";
+ }
+ else {
+ stk_base_sz = status.pr_stkbase + status.pr_stksize;
+ brk_base_sz = status.pr_brkbase + status.pr_brksize;
+
+ if ((pr_addr_sz > status.pr_stkbase) &&
+ (p->pr_vaddr < stk_base_sz)) {
+ name = "[stack]";
+ }
+ else if ((p->pr_mflags & MA_ANON) && \
+ (pr_addr_sz > status.pr_brkbase) && \
+ (p->pr_vaddr < brk_base_sz)) {
+ name = "[heap]";
+ }
+ else {
+ name = "[anon]";
+ }
+ }
+ }
+
+ pytuple = Py_BuildValue("iisslll",
+ p->pr_vaddr,
+ pr_addr_sz,
+ perms,
+ name,
+ (long)p->pr_rss * p->pr_pagesize,
+ (long)p->pr_anon * p->pr_pagesize,
+ (long)p->pr_locked * p->pr_pagesize);
+ if (!pytuple)
+ goto error;
+ if (PyList_Append(py_retlist, pytuple))
+ goto error;
+ Py_DECREF(pytuple);
+
+ // increment pointer
+ p += 1;
+ }
+
+ close(fd);
+ free(xmap);
+ return py_retlist;
+
+error:
+ if (fd != -1)
+ close(fd);
+ Py_XDECREF(pytuple);
+ Py_DECREF(py_retlist);
+ if (xmap != NULL)
+ free(xmap);
+ return NULL;
+}
+
+
+/*
+ * Return a list of tuples for network I/O statistics.
+ */
+static PyObject *
+psutil_net_io_counters(PyObject *self, PyObject *args)
+{
+ kstat_ctl_t *kc = NULL;
+ kstat_t *ksp;
+ kstat_named_t *rbytes, *wbytes, *rpkts, *wpkts, *ierrs, *oerrs;
+
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_ifc_info = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+ kc = kstat_open();
+ if (kc == NULL)
+ goto error;
+
+ ksp = kc->kc_chain;
+ while (ksp != NULL) {
+ if (ksp->ks_type != KSTAT_TYPE_NAMED)
+ goto next;
+ if (strcmp(ksp->ks_class, "net") != 0)
+ goto next;
+ /*
+ // XXX "lo" (localhost) interface makes kstat_data_lookup() fail
+ // (maybe because "ifconfig -a" says it's a virtual interface?).
+ if ((strcmp(ksp->ks_module, "link") != 0) &&
+ (strcmp(ksp->ks_module, "lo") != 0)) {
+ goto skip;
+ */
+ if ((strcmp(ksp->ks_module, "link") != 0))
+ goto next;
+
+ if (kstat_read(kc, ksp, NULL) == -1) {
+ errno = 0;
+ continue;
+ }
+
+ rbytes = (kstat_named_t *)kstat_data_lookup(ksp, "rbytes");
+ wbytes = (kstat_named_t *)kstat_data_lookup(ksp, "obytes");
+ rpkts = (kstat_named_t *)kstat_data_lookup(ksp, "ipackets");
+ wpkts = (kstat_named_t *)kstat_data_lookup(ksp, "opackets");
+ ierrs = (kstat_named_t *)kstat_data_lookup(ksp, "ierrors");
+ oerrs = (kstat_named_t *)kstat_data_lookup(ksp, "oerrors");
+
+ if ((rbytes == NULL) || (wbytes == NULL) || (rpkts == NULL) ||
+ (wpkts == NULL) || (ierrs == NULL) || (oerrs == NULL))
+ {
+ PyErr_SetString(PyExc_RuntimeError, "kstat_data_lookup() failed");
+ goto error;
+ }
+
+#if defined(_INT64_TYPE)
+ py_ifc_info = Py_BuildValue("(KKKKkkii)",
+ wbytes->value.ui64,
+ rbytes->value.ui64,
+ wpkts->value.ui64,
+ rpkts->value.ui64,
+ ierrs->value.ui32,
+ oerrs->value.ui32,
+#else
+ py_ifc_info = Py_BuildValue("(kkkkkkii)",
+ wbytes->value.ui32,
+ rbytes->value.ui32,
+ wpkts->value.ui32,
+ rpkts->value.ui32,
+ ierrs->value.ui32,
+ oerrs->value.ui32,
+#endif
+ 0, // dropin not supported
+ 0 // dropout not supported
+ );
+ if (!py_ifc_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, ksp->ks_name, py_ifc_info))
+ goto error;
+ Py_DECREF(py_ifc_info);
+ goto next;
+
+next:
+ ksp = ksp->ks_next;
+ }
+
+ kstat_close(kc);
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_ifc_info);
+ Py_DECREF(py_retdict);
+ if (kc != NULL)
+ kstat_close(kc);
+ return NULL;
+}
+
+
+#ifndef EXPER_IP_AND_ALL_IRES
+#define EXPER_IP_AND_ALL_IRES (1024+4)
+#endif
+
+// a signaler for connections without an actual status
+static int PSUTIL_CONN_NONE = 128;
+
+/*
+ * Return TCP and UDP connections opened by process.
+ * UNIX sockets are excluded.
+ *
+ * Thanks to:
+ * https://github.com/DavidGriffith/finx/blob/master/
+ * nxsensor-3.5.0-1/src/sysdeps/solaris.c
+ * ...and:
+ * https://hg.java.net/hg/solaris~on-src/file/tip/usr/src/cmd/
+ * cmd-inet/usr.bin/netstat/netstat.c
+ */
+static PyObject *
+psutil_net_connections(PyObject *self, PyObject *args)
+{
+ long pid;
+ int sd = 0;
+ mib2_tcpConnEntry_t *tp = NULL;
+ mib2_udpEntry_t *ude;
+#if defined(AF_INET6)
+ mib2_tcp6ConnEntry_t *tp6;
+ mib2_udp6Entry_t *ude6;
+#endif
+ char buf[512];
+ int i, flags, getcode, num_ent, state;
+ char lip[200], rip[200];
+ int lport, rport;
+ int processed_pid;
+ int databuf_init = 0;
+ struct strbuf ctlbuf, databuf;
+ struct T_optmgmt_req *tor = (struct T_optmgmt_req *)buf;
+ struct T_optmgmt_ack *toa = (struct T_optmgmt_ack *)buf;
+ struct T_error_ack *tea = (struct T_error_ack *)buf;
+ struct opthdr *mibhdr;
+
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+ PyObject *py_laddr = NULL;
+ PyObject *py_raddr = NULL;
+ PyObject *af_filter = NULL;
+ PyObject *type_filter = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+ if (! PyArg_ParseTuple(args, "lOO", &pid, &af_filter, &type_filter))
+ goto error;
+ if (!PySequence_Check(af_filter) || !PySequence_Check(type_filter)) {
+ PyErr_SetString(PyExc_TypeError, "arg 2 or 3 is not a sequence");
+ goto error;
+ }
+
+ sd = open("/dev/arp", O_RDWR);
+ if (sd == -1) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, "/dev/arp");
+ goto error;
+ }
+
+ /*
+ XXX - These 2 are used in ifconfig.c but they seem unnecessary
+ ret = ioctl(sd, I_PUSH, "tcp");
+ if (ret == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+ ret = ioctl(sd, I_PUSH, "udp");
+ if (ret == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+ */
+
+ // OK, this mess is basically copied and pasted from nxsensor project
+ // which copied and pasted it from netstat source code, mibget()
+ // function. Also see:
+ // http://stackoverflow.com/questions/8723598/
+ tor->PRIM_type = T_SVR4_OPTMGMT_REQ;
+ tor->OPT_offset = sizeof (struct T_optmgmt_req);
+ tor->OPT_length = sizeof (struct opthdr);
+ tor->MGMT_flags = T_CURRENT;
+ mibhdr = (struct opthdr *)&tor[1];
+ mibhdr->level = EXPER_IP_AND_ALL_IRES;
+ mibhdr->name = 0;
+ mibhdr->len = 0;
+
+ ctlbuf.buf = buf;
+ ctlbuf.len = tor->OPT_offset + tor->OPT_length;
+ flags = 0; // request to be sent in non-priority
+
+ if (putmsg(sd, &ctlbuf, (struct strbuf *)0, flags) == -1) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ mibhdr = (struct opthdr *)&toa[1];
+ ctlbuf.maxlen = sizeof (buf);
+
+ for (;;) {
+ flags = 0;
+ getcode = getmsg(sd, &ctlbuf, (struct strbuf *)0, &flags);
+
+ if (getcode != MOREDATA ||
+ ctlbuf.len < sizeof (struct T_optmgmt_ack) ||
+ toa->PRIM_type != T_OPTMGMT_ACK ||
+ toa->MGMT_flags != T_SUCCESS)
+ {
+ break;
+ }
+ if (ctlbuf.len >= sizeof (struct T_error_ack) &&
+ tea->PRIM_type == T_ERROR_ACK)
+ {
+ PyErr_SetString(PyExc_RuntimeError, "ERROR_ACK");
+ goto error;
+ }
+ if (getcode == 0 &&
+ ctlbuf.len >= sizeof (struct T_optmgmt_ack) &&
+ toa->PRIM_type == T_OPTMGMT_ACK &&
+ toa->MGMT_flags == T_SUCCESS)
+ {
+ PyErr_SetString(PyExc_RuntimeError, "ERROR_T_OPTMGMT_ACK");
+ goto error;
+ }
+
+ databuf.maxlen = mibhdr->len;
+ databuf.len = 0;
+ databuf.buf = (char *)malloc((int)mibhdr->len);
+ if (!databuf.buf) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ databuf_init = 1;
+
+ flags = 0;
+ getcode = getmsg(sd, (struct strbuf *)0, &databuf, &flags);
+ if (getcode < 0) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ // TCPv4
+ if (mibhdr->level == MIB2_TCP && mibhdr->name == MIB2_TCP_13) {
+ tp = (mib2_tcpConnEntry_t *)databuf.buf;
+ num_ent = mibhdr->len / sizeof(mib2_tcpConnEntry_t);
+ for (i = 0; i < num_ent; i++, tp++) {
+ processed_pid = tp->tcpConnCreationProcess;
+ if (pid != -1 && processed_pid != pid)
+ continue;
+ // construct local/remote addresses
+ inet_ntop(AF_INET, &tp->tcpConnLocalAddress, lip, sizeof(lip));
+ inet_ntop(AF_INET, &tp->tcpConnRemAddress, rip, sizeof(rip));
+ lport = tp->tcpConnLocalPort;
+ rport = tp->tcpConnRemPort;
+
+ // contruct python tuple/list
+ py_laddr = Py_BuildValue("(si)", lip, lport);
+ if (!py_laddr)
+ goto error;
+ if (rport != 0)
+ py_raddr = Py_BuildValue("(si)", rip, rport);
+ else {
+ py_raddr = Py_BuildValue("()");
+ }
+ if (!py_raddr)
+ goto error;
+ state = tp->tcpConnEntryInfo.ce_state;
+
+ // add item
+ py_tuple = Py_BuildValue("(iiiNNiI)", -1, AF_INET, SOCK_STREAM,
+ py_laddr, py_raddr, state,
+ processed_pid);
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+ }
+#if defined(AF_INET6)
+ // TCPv6
+ else if (mibhdr->level == MIB2_TCP6 && mibhdr->name == MIB2_TCP6_CONN)
+ {
+ tp6 = (mib2_tcp6ConnEntry_t *)databuf.buf;
+ num_ent = mibhdr->len / sizeof(mib2_tcp6ConnEntry_t);
+
+ for (i = 0; i < num_ent; i++, tp6++) {
+ processed_pid = tp6->tcp6ConnCreationProcess;
+ if (pid != -1 && processed_pid != pid)
+ continue;
+ // construct local/remote addresses
+ inet_ntop(AF_INET6, &tp6->tcp6ConnLocalAddress, lip, sizeof(lip));
+ inet_ntop(AF_INET6, &tp6->tcp6ConnRemAddress, rip, sizeof(rip));
+ lport = tp6->tcp6ConnLocalPort;
+ rport = tp6->tcp6ConnRemPort;
+
+ // contruct python tuple/list
+ py_laddr = Py_BuildValue("(si)", lip, lport);
+ if (!py_laddr)
+ goto error;
+ if (rport != 0)
+ py_raddr = Py_BuildValue("(si)", rip, rport);
+ else
+ py_raddr = Py_BuildValue("()");
+ if (!py_raddr)
+ goto error;
+ state = tp6->tcp6ConnEntryInfo.ce_state;
+
+ // add item
+ py_tuple = Py_BuildValue("(iiiNNiI)", -1, AF_INET6, SOCK_STREAM,
+ py_laddr, py_raddr, state, processed_pid);
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+ }
+#endif
+ // UDPv4
+ else if (mibhdr->level == MIB2_UDP || mibhdr->level == MIB2_UDP_ENTRY) {
+ ude = (mib2_udpEntry_t *)databuf.buf;
+ num_ent = mibhdr->len / sizeof(mib2_udpEntry_t);
+ for (i = 0; i < num_ent; i++, ude++) {
+ processed_pid = ude->udpCreationProcess;
+ if (pid != -1 && processed_pid != pid)
+ continue;
+ // XXX Very ugly hack! It seems we get here only the first
+ // time we bump into a UDPv4 socket. PID is a very high
+ // number (clearly impossible) and the address does not
+ // belong to any valid interface. Not sure what else
+ // to do other than skipping.
+ if (processed_pid > 131072)
+ continue;
+ inet_ntop(AF_INET, &ude->udpLocalAddress, lip, sizeof(lip));
+ lport = ude->udpLocalPort;
+ py_laddr = Py_BuildValue("(si)", lip, lport);
+ if (!py_laddr)
+ goto error;
+ py_raddr = Py_BuildValue("()");
+ if (!py_raddr)
+ goto error;
+ py_tuple = Py_BuildValue("(iiiNNiI)", -1, AF_INET, SOCK_DGRAM,
+ py_laddr, py_raddr, PSUTIL_CONN_NONE,
+ processed_pid);
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+ }
+#if defined(AF_INET6)
+ // UDPv6
+ else if (mibhdr->level == MIB2_UDP6 ||
+ mibhdr->level == MIB2_UDP6_ENTRY)
+ {
+ ude6 = (mib2_udp6Entry_t *)databuf.buf;
+ num_ent = mibhdr->len / sizeof(mib2_udp6Entry_t);
+ for (i = 0; i < num_ent; i++, ude6++) {
+ processed_pid = ude6->udp6CreationProcess;
+ if (pid != -1 && processed_pid != pid)
+ continue;
+ inet_ntop(AF_INET6, &ude6->udp6LocalAddress, lip, sizeof(lip));
+ lport = ude6->udp6LocalPort;
+ py_laddr = Py_BuildValue("(si)", lip, lport);
+ if (!py_laddr)
+ goto error;
+ py_raddr = Py_BuildValue("()");
+ if (!py_raddr)
+ goto error;
+ py_tuple = Py_BuildValue("(iiiNNiI)", -1, AF_INET6, SOCK_DGRAM,
+ py_laddr, py_raddr, PSUTIL_CONN_NONE,
+ processed_pid);
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+ }
+#endif
+ free(databuf.buf);
+ }
+
+ close(sd);
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_tuple);
+ Py_XDECREF(py_laddr);
+ Py_XDECREF(py_raddr);
+ Py_DECREF(py_retlist);
+ if (databuf_init == 1)
+ free(databuf.buf);
+ if (sd != 0)
+ close(sd);
+ return NULL;
+}
+
+
+static PyObject *
+psutil_boot_time(PyObject *self, PyObject *args)
+{
+ float boot_time = 0.0;
+ struct utmpx *ut;
+
+ while (NULL != (ut = getutxent())) {
+ if (ut->ut_type == BOOT_TIME) {
+ boot_time = (float)ut->ut_tv.tv_sec;
+ break;
+ }
+ }
+ endutent();
+ if (boot_time != 0.0) {
+ return Py_BuildValue("f", boot_time);
+ }
+ else {
+ PyErr_SetString(PyExc_RuntimeError, "can't determine boot time");
+ return NULL;
+ }
+}
+
+
+/*
+ * Return the number of physical CPU cores on the system.
+ */
+static PyObject *
+psutil_cpu_count_phys(PyObject *self, PyObject *args)
+{
+ kstat_ctl_t *kc;
+ kstat_t *ksp;
+ int ncpus = 0;
+
+ kc = kstat_open();
+ if (kc == NULL)
+ goto error;
+ ksp = kstat_lookup(kc, "cpu_info", -1, NULL);
+ if (ksp == NULL)
+ goto error;
+
+ for (ksp = kc->kc_chain; ksp; ksp = ksp->ks_next) {
+ if (strcmp(ksp->ks_module, "cpu_info") != 0)
+ continue;
+ if (kstat_read(kc, ksp, NULL) == -1)
+ goto error;
+ ncpus += 1;
+ }
+
+ kstat_close(kc);
+ if (ncpus > 0)
+ return Py_BuildValue("i", ncpus);
+ else
+ goto error;
+
+error:
+ // mimic os.cpu_count()
+ if (kc != NULL)
+ kstat_close(kc);
+ Py_RETURN_NONE;
+}
+
+
+/*
+ * Return stats about a particular network
+ * interface. References:
+ * https://github.com/dpaleino/wicd/blob/master/wicd/backends/be-ioctl.py
+ * http://www.i-scream.org/libstatgrab/
+ */
+static PyObject*
+psutil_net_if_stats(PyObject* self, PyObject* args)
+{
+ kstat_ctl_t *kc = NULL;
+ kstat_t *ksp;
+ kstat_named_t *knp;
+ int ret;
+ int sock = 0;
+ int duplex;
+ int speed;
+
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_ifc_info = NULL;
+ PyObject *py_is_up = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+ kc = kstat_open();
+ if (kc == NULL)
+ goto error;
+ sock = socket(AF_INET, SOCK_DGRAM, 0);
+ if (sock == -1)
+ goto error;
+
+ for (ksp = kc->kc_chain; ksp; ksp = ksp->ks_next) {
+ if (strcmp(ksp->ks_class, "net") == 0) {
+ struct ifreq ifr;
+
+ kstat_read(kc, ksp, NULL);
+ if (ksp->ks_type != KSTAT_TYPE_NAMED)
+ continue;
+ if (strcmp(ksp->ks_class, "net") != 0)
+ continue;
+
+ strncpy(ifr.ifr_name, ksp->ks_name, sizeof(ifr.ifr_name));
+ ret = ioctl(sock, SIOCGIFFLAGS, &ifr);
+ if (ret == -1)
+ continue; // not a network interface
+
+ // is up?
+ if ((ifr.ifr_flags & IFF_UP) != 0) {
+ if ((knp = kstat_data_lookup(ksp, "link_up")) != NULL) {
+ if (knp->value.ui32 != 0u)
+ py_is_up = Py_True;
+ else
+ py_is_up = Py_False;
+ }
+ else {
+ py_is_up = Py_True;
+ }
+ }
+ else {
+ py_is_up = Py_False;
+ }
+ Py_INCREF(py_is_up);
+
+ // duplex
+ duplex = 0; // unknown
+ if ((knp = kstat_data_lookup(ksp, "link_duplex")) != NULL) {
+ if (knp->value.ui32 == 1)
+ duplex = 1; // half
+ else if (knp->value.ui32 == 2)
+ duplex = 2; // full
+ }
+
+ // speed
+ if ((knp = kstat_data_lookup(ksp, "ifspeed")) != NULL)
+ // expressed in bits per sec, we want mega bits per sec
+ speed = (int)knp->value.ui64 / 1000000;
+ else
+ speed = 0;
+
+ // mtu
+ ret = ioctl(sock, SIOCGIFMTU, &ifr);
+ if (ret == -1)
+ goto error;
+
+ py_ifc_info = Py_BuildValue("(Oiii)", py_is_up, duplex, speed,
+ ifr.ifr_mtu);
+ if (!py_ifc_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, ksp->ks_name, py_ifc_info))
+ goto error;
+ Py_DECREF(py_ifc_info);
+ }
+ }
+
+ close(sock);
+ kstat_close(kc);
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_is_up);
+ Py_XDECREF(py_ifc_info);
+ Py_DECREF(py_retdict);
+ if (sock != 0)
+ close(sock);
+ if (kc != NULL)
+ kstat_close(kc);
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+}
+
+
+/*
+ * define the psutil C module methods and initialize the module.
+ */
+static PyMethodDef
+PsutilMethods[] =
+{
+ // --- process-related functions
+ {"proc_basic_info", psutil_proc_basic_info, METH_VARARGS,
+ "Return process ppid, rss, vms, ctime, nice, nthreads, status and tty"},
+ {"proc_name_and_args", psutil_proc_name_and_args, METH_VARARGS,
+ "Return process name and args."},
+ {"proc_cpu_times", psutil_proc_cpu_times, METH_VARARGS,
+ "Return process user and system CPU times."},
+ {"proc_cred", psutil_proc_cred, METH_VARARGS,
+ "Return process uids/gids."},
+ {"query_process_thread", psutil_proc_query_thread, METH_VARARGS,
+ "Return info about a process thread"},
+ {"proc_memory_maps", psutil_proc_memory_maps, METH_VARARGS,
+ "Return process memory mappings"},
+ {"proc_num_ctx_switches", psutil_proc_num_ctx_switches, METH_VARARGS,
+ "Return the number of context switches performed by process"},
+
+ // --- system-related functions
+ {"swap_mem", psutil_swap_mem, METH_VARARGS,
+ "Return information about system swap memory."},
+ {"users", psutil_users, METH_VARARGS,
+ "Return currently connected users."},
+ {"disk_partitions", psutil_disk_partitions, METH_VARARGS,
+ "Return disk partitions."},
+ {"per_cpu_times", psutil_per_cpu_times, METH_VARARGS,
+ "Return system per-CPU times."},
+ {"disk_io_counters", psutil_disk_io_counters, METH_VARARGS,
+ "Return a Python dict of tuples for disk I/O statistics."},
+ {"net_io_counters", psutil_net_io_counters, METH_VARARGS,
+ "Return a Python dict of tuples for network I/O statistics."},
+ {"boot_time", psutil_boot_time, METH_VARARGS,
+ "Return system boot time in seconds since the EPOCH."},
+ {"cpu_count_phys", psutil_cpu_count_phys, METH_VARARGS,
+ "Return the number of physical CPUs on the system."},
+ {"net_connections", psutil_net_connections, METH_VARARGS,
+ "Return TCP and UDP syste-wide open connections."},
+ {"net_if_stats", psutil_net_if_stats, METH_VARARGS,
+ "Return NIC stats (isup, duplex, speed, mtu)"},
+
+{NULL, NULL, 0, NULL}
+};
+
+
+struct module_state {
+ PyObject *error;
+};
+
+#if PY_MAJOR_VERSION >= 3
+#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m))
+#else
+#define GETSTATE(m) (&_state)
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+
+static int
+psutil_sunos_traverse(PyObject *m, visitproc visit, void *arg) {
+ Py_VISIT(GETSTATE(m)->error);
+ return 0;
+}
+
+static int
+psutil_sunos_clear(PyObject *m) {
+ Py_CLEAR(GETSTATE(m)->error);
+ return 0;
+}
+
+static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "psutil_sunos",
+ NULL,
+ sizeof(struct module_state),
+ PsutilMethods,
+ NULL,
+ psutil_sunos_traverse,
+ psutil_sunos_clear,
+ NULL
+};
+
+#define INITERROR return NULL
+
+PyMODINIT_FUNC PyInit__psutil_sunos(void)
+
+#else
+#define INITERROR return
+
+void init_psutil_sunos(void)
+#endif
+{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&moduledef);
+#else
+ PyObject *module = Py_InitModule("_psutil_sunos", PsutilMethods);
+#endif
+ PyModule_AddIntConstant(module, "version", PSUTIL_VERSION);
+
+ PyModule_AddIntConstant(module, "SSLEEP", SSLEEP);
+ PyModule_AddIntConstant(module, "SRUN", SRUN);
+ PyModule_AddIntConstant(module, "SZOMB", SZOMB);
+ PyModule_AddIntConstant(module, "SSTOP", SSTOP);
+ PyModule_AddIntConstant(module, "SIDL", SIDL);
+ PyModule_AddIntConstant(module, "SONPROC", SONPROC);
+ PyModule_AddIntConstant(module, "SWAIT", SWAIT);
+
+ PyModule_AddIntConstant(module, "PRNODEV", PRNODEV); // for process tty
+
+ PyModule_AddIntConstant(module, "TCPS_CLOSED", TCPS_CLOSED);
+ PyModule_AddIntConstant(module, "TCPS_CLOSING", TCPS_CLOSING);
+ PyModule_AddIntConstant(module, "TCPS_CLOSE_WAIT", TCPS_CLOSE_WAIT);
+ PyModule_AddIntConstant(module, "TCPS_LISTEN", TCPS_LISTEN);
+ PyModule_AddIntConstant(module, "TCPS_ESTABLISHED", TCPS_ESTABLISHED);
+ PyModule_AddIntConstant(module, "TCPS_SYN_SENT", TCPS_SYN_SENT);
+ PyModule_AddIntConstant(module, "TCPS_SYN_RCVD", TCPS_SYN_RCVD);
+ PyModule_AddIntConstant(module, "TCPS_FIN_WAIT_1", TCPS_FIN_WAIT_1);
+ PyModule_AddIntConstant(module, "TCPS_FIN_WAIT_2", TCPS_FIN_WAIT_2);
+ PyModule_AddIntConstant(module, "TCPS_LAST_ACK", TCPS_LAST_ACK);
+ PyModule_AddIntConstant(module, "TCPS_TIME_WAIT", TCPS_TIME_WAIT);
+ // sunos specific
+ PyModule_AddIntConstant(module, "TCPS_IDLE", TCPS_IDLE);
+ // sunos specific
+ PyModule_AddIntConstant(module, "TCPS_BOUND", TCPS_BOUND);
+ PyModule_AddIntConstant(module, "PSUTIL_CONN_NONE", PSUTIL_CONN_NONE);
+
+ if (module == NULL)
+ INITERROR;
+#if PY_MAJOR_VERSION >= 3
+ return module;
+#endif
+}
diff --git a/python/psutil/psutil/_psutil_sunos.h b/python/psutil/psutil/_psutil_sunos.h
new file mode 100644
index 000000000..f93dbfe0f
--- /dev/null
+++ b/python/psutil/psutil/_psutil_sunos.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+
+// processes
+static PyObject* psutil_proc_basic_info(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cred(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_maps(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_name_and_args(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_ctx_switches(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_query_thread(PyObject* self, PyObject* args);
+
+// system
+static PyObject* psutil_boot_time(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_phys(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_partitions(PyObject* self, PyObject* args);
+static PyObject* psutil_net_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_per_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_swap_mem(PyObject* self, PyObject* args);
+static PyObject* psutil_users(PyObject* self, PyObject* args);
+static PyObject* psutil_net_connections(PyObject* self, PyObject* args);
+static PyObject* psutil_net_if_stats(PyObject* self, PyObject* args);
diff --git a/python/psutil/psutil/_psutil_windows.c b/python/psutil/psutil/_psutil_windows.c
new file mode 100644
index 000000000..3e0f7a7cd
--- /dev/null
+++ b/python/psutil/psutil/_psutil_windows.c
@@ -0,0 +1,3405 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Windows platform-specific module methods for _psutil_windows
+ */
+
+// Fixes clash between winsock2.h and windows.h
+#define WIN32_LEAN_AND_MEAN
+
+#include <Python.h>
+#include <windows.h>
+#include <Psapi.h>
+#include <time.h>
+#include <lm.h>
+#include <WinIoCtl.h>
+#include <tchar.h>
+#include <tlhelp32.h>
+#include <winsock2.h>
+#include <iphlpapi.h>
+#include <wtsapi32.h>
+#include <ws2tcpip.h>
+
+// Link with Iphlpapi.lib
+#pragma comment(lib, "IPHLPAPI.lib")
+
+#include "_psutil_windows.h"
+#include "_psutil_common.h"
+#include "arch/windows/security.h"
+#include "arch/windows/process_info.h"
+#include "arch/windows/process_handles.h"
+#include "arch/windows/ntextapi.h"
+#include "arch/windows/inet_ntop.h"
+
+#ifdef __MINGW32__
+#include "arch/windows/glpi.h"
+#endif
+
+
+/*
+ * ============================================================================
+ * Utilities
+ * ============================================================================
+ */
+
+ // a flag for connections without an actual status
+static int PSUTIL_CONN_NONE = 128;
+
+#define MALLOC(x) HeapAlloc(GetProcessHeap(), 0, (x))
+#define FREE(x) HeapFree(GetProcessHeap(), 0, (x))
+#define LO_T ((float)1e-7)
+#define HI_T (LO_T*4294967296.0)
+#define BYTESWAP_USHORT(x) ((((USHORT)(x) << 8) | ((USHORT)(x) >> 8)) & 0xffff)
+#ifndef AF_INET6
+#define AF_INET6 23
+#endif
+#define _psutil_conn_decref_objs() \
+ Py_DECREF(_AF_INET); \
+ Py_DECREF(_AF_INET6);\
+ Py_DECREF(_SOCK_STREAM);\
+ Py_DECREF(_SOCK_DGRAM);
+
+typedef BOOL (WINAPI *LPFN_GLPI)
+ (PSYSTEM_LOGICAL_PROCESSOR_INFORMATION, PDWORD);
+
+// fix for mingw32, see
+// https://github.com/giampaolo/psutil/issues/351#c2
+typedef struct _DISK_PERFORMANCE_WIN_2008 {
+ LARGE_INTEGER BytesRead;
+ LARGE_INTEGER BytesWritten;
+ LARGE_INTEGER ReadTime;
+ LARGE_INTEGER WriteTime;
+ LARGE_INTEGER IdleTime;
+ DWORD ReadCount;
+ DWORD WriteCount;
+ DWORD QueueDepth;
+ DWORD SplitCount;
+ LARGE_INTEGER QueryTime;
+ DWORD StorageDeviceNumber;
+ WCHAR StorageManagerName[8];
+} DISK_PERFORMANCE_WIN_2008;
+
+// --- network connections mingw32 support
+#ifndef _IPRTRMIB_H
+typedef struct _MIB_TCP6ROW_OWNER_PID {
+ UCHAR ucLocalAddr[16];
+ DWORD dwLocalScopeId;
+ DWORD dwLocalPort;
+ UCHAR ucRemoteAddr[16];
+ DWORD dwRemoteScopeId;
+ DWORD dwRemotePort;
+ DWORD dwState;
+ DWORD dwOwningPid;
+} MIB_TCP6ROW_OWNER_PID, *PMIB_TCP6ROW_OWNER_PID;
+
+typedef struct _MIB_TCP6TABLE_OWNER_PID {
+ DWORD dwNumEntries;
+ MIB_TCP6ROW_OWNER_PID table[ANY_SIZE];
+} MIB_TCP6TABLE_OWNER_PID, *PMIB_TCP6TABLE_OWNER_PID;
+#endif
+
+#ifndef __IPHLPAPI_H__
+typedef struct in6_addr {
+ union {
+ UCHAR Byte[16];
+ USHORT Word[8];
+ } u;
+} IN6_ADDR, *PIN6_ADDR, FAR *LPIN6_ADDR;
+
+typedef enum _UDP_TABLE_CLASS {
+ UDP_TABLE_BASIC,
+ UDP_TABLE_OWNER_PID,
+ UDP_TABLE_OWNER_MODULE
+} UDP_TABLE_CLASS, *PUDP_TABLE_CLASS;
+
+typedef struct _MIB_UDPROW_OWNER_PID {
+ DWORD dwLocalAddr;
+ DWORD dwLocalPort;
+ DWORD dwOwningPid;
+} MIB_UDPROW_OWNER_PID, *PMIB_UDPROW_OWNER_PID;
+
+typedef struct _MIB_UDPTABLE_OWNER_PID {
+ DWORD dwNumEntries;
+ MIB_UDPROW_OWNER_PID table[ANY_SIZE];
+} MIB_UDPTABLE_OWNER_PID, *PMIB_UDPTABLE_OWNER_PID;
+#endif
+
+typedef struct _MIB_UDP6ROW_OWNER_PID {
+ UCHAR ucLocalAddr[16];
+ DWORD dwLocalScopeId;
+ DWORD dwLocalPort;
+ DWORD dwOwningPid;
+} MIB_UDP6ROW_OWNER_PID, *PMIB_UDP6ROW_OWNER_PID;
+
+typedef struct _MIB_UDP6TABLE_OWNER_PID {
+ DWORD dwNumEntries;
+ MIB_UDP6ROW_OWNER_PID table[ANY_SIZE];
+} MIB_UDP6TABLE_OWNER_PID, *PMIB_UDP6TABLE_OWNER_PID;
+
+
+PIP_ADAPTER_ADDRESSES
+psutil_get_nic_addresses() {
+ // allocate a 15 KB buffer to start with
+ int outBufLen = 15000;
+ DWORD dwRetVal = 0;
+ ULONG attempts = 0;
+ PIP_ADAPTER_ADDRESSES pAddresses = NULL;
+
+ do {
+ pAddresses = (IP_ADAPTER_ADDRESSES *) malloc(outBufLen);
+ if (pAddresses == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ dwRetVal = GetAdaptersAddresses(AF_UNSPEC, 0, NULL, pAddresses,
+ &outBufLen);
+ if (dwRetVal == ERROR_BUFFER_OVERFLOW) {
+ free(pAddresses);
+ pAddresses = NULL;
+ }
+ else {
+ break;
+ }
+
+ attempts++;
+ } while ((dwRetVal == ERROR_BUFFER_OVERFLOW) && (attempts < 3));
+
+ if (dwRetVal != NO_ERROR) {
+ PyErr_SetString(PyExc_RuntimeError, "GetAdaptersAddresses() failed.");
+ return NULL;
+ }
+
+ return pAddresses;
+}
+
+
+/*
+ * ============================================================================
+ * Public Python API
+ * ============================================================================
+ */
+
+
+/*
+ * Return a Python float representing the system uptime expressed in seconds
+ * since the epoch.
+ */
+static PyObject *
+psutil_boot_time(PyObject *self, PyObject *args)
+{
+ double uptime;
+ time_t pt;
+ FILETIME fileTime;
+ long long ll;
+
+ GetSystemTimeAsFileTime(&fileTime);
+
+ /*
+ HUGE thanks to:
+ http://johnstewien.spaces.live.com/blog/cns!E6885DB5CEBABBC8!831.entry
+
+ This function converts the FILETIME structure to the 32 bit
+ Unix time structure.
+ The time_t is a 32-bit value for the number of seconds since
+ January 1, 1970. A FILETIME is a 64-bit for the number of
+ 100-nanosecond periods since January 1, 1601. Convert by
+ subtracting the number of 100-nanosecond period betwee 01-01-1970
+ and 01-01-1601, from time_t the divide by 1e+7 to get to the same
+ base granularity.
+ */
+ ll = (((LONGLONG)(fileTime.dwHighDateTime)) << 32) \
+ + fileTime.dwLowDateTime;
+ pt = (time_t)((ll - 116444736000000000ull) / 10000000ull);
+
+ // XXX - By using GetTickCount() time will wrap around to zero if the
+ // system is run continuously for 49.7 days.
+ uptime = GetTickCount() / 1000.00f;
+ return Py_BuildValue("d", (double)pt - uptime);
+}
+
+
+/*
+ * Return 1 if PID exists in the current process list, else 0.
+ */
+static PyObject *
+psutil_pid_exists(PyObject *self, PyObject *args)
+{
+ long pid;
+ int status;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ status = psutil_pid_is_running(pid);
+ if (-1 == status)
+ return NULL; // exception raised in psutil_pid_is_running()
+ return PyBool_FromLong(status);
+}
+
+
+/*
+ * Return a Python list of all the PIDs running on the system.
+ */
+static PyObject *
+psutil_pids(PyObject *self, PyObject *args)
+{
+ DWORD *proclist = NULL;
+ DWORD numberOfReturnedPIDs;
+ DWORD i;
+ PyObject *pid = NULL;
+ PyObject *retlist = PyList_New(0);
+
+ if (retlist == NULL)
+ return NULL;
+ proclist = psutil_get_pids(&numberOfReturnedPIDs);
+ if (proclist == NULL)
+ goto error;
+
+ for (i = 0; i < numberOfReturnedPIDs; i++) {
+ pid = Py_BuildValue("I", proclist[i]);
+ if (!pid)
+ goto error;
+ if (PyList_Append(retlist, pid))
+ goto error;
+ Py_DECREF(pid);
+ }
+
+ // free C array allocated for PIDs
+ free(proclist);
+ return retlist;
+
+error:
+ Py_XDECREF(pid);
+ Py_DECREF(retlist);
+ if (proclist != NULL)
+ free(proclist);
+ return NULL;
+}
+
+
+/*
+ * Kill a process given its PID.
+ */
+static PyObject *
+psutil_proc_kill(PyObject *self, PyObject *args)
+{
+ HANDLE hProcess;
+ long pid;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (pid == 0)
+ return AccessDenied();
+
+ hProcess = OpenProcess(PROCESS_TERMINATE, FALSE, pid);
+ if (hProcess == NULL) {
+ if (GetLastError() == ERROR_INVALID_PARAMETER) {
+ // see https://github.com/giampaolo/psutil/issues/24
+ NoSuchProcess();
+ }
+ else {
+ PyErr_SetFromWindowsErr(0);
+ }
+ return NULL;
+ }
+
+ // kill the process
+ if (! TerminateProcess(hProcess, 0)) {
+ PyErr_SetFromWindowsErr(0);
+ CloseHandle(hProcess);
+ return NULL;
+ }
+
+ CloseHandle(hProcess);
+ Py_RETURN_NONE;
+}
+
+
+/*
+ * Wait for process to terminate and return its exit code.
+ */
+static PyObject *
+psutil_proc_wait(PyObject *self, PyObject *args)
+{
+ HANDLE hProcess;
+ DWORD ExitCode;
+ DWORD retVal;
+ long pid;
+ long timeout;
+
+ if (! PyArg_ParseTuple(args, "ll", &pid, &timeout))
+ return NULL;
+ if (pid == 0)
+ return AccessDenied();
+
+ hProcess = OpenProcess(SYNCHRONIZE | PROCESS_QUERY_INFORMATION,
+ FALSE, pid);
+ if (hProcess == NULL) {
+ if (GetLastError() == ERROR_INVALID_PARAMETER) {
+ // no such process; we do not want to raise NSP but
+ // return None instead.
+ Py_RETURN_NONE;
+ }
+ else {
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ }
+
+ // wait until the process has terminated
+ Py_BEGIN_ALLOW_THREADS
+ retVal = WaitForSingleObject(hProcess, timeout);
+ Py_END_ALLOW_THREADS
+
+ if (retVal == WAIT_FAILED) {
+ CloseHandle(hProcess);
+ return PyErr_SetFromWindowsErr(GetLastError());
+ }
+ if (retVal == WAIT_TIMEOUT) {
+ CloseHandle(hProcess);
+ return Py_BuildValue("l", WAIT_TIMEOUT);
+ }
+
+ // get the exit code; note: subprocess module (erroneously?) uses
+ // what returned by WaitForSingleObject
+ if (GetExitCodeProcess(hProcess, &ExitCode) == 0) {
+ CloseHandle(hProcess);
+ return PyErr_SetFromWindowsErr(GetLastError());
+ }
+ CloseHandle(hProcess);
+#if PY_MAJOR_VERSION >= 3
+ return PyLong_FromLong((long) ExitCode);
+#else
+ return PyInt_FromLong((long) ExitCode);
+#endif
+}
+
+
+/*
+ * Return a Python tuple (user_time, kernel_time)
+ */
+static PyObject *
+psutil_proc_cpu_times(PyObject *self, PyObject *args)
+{
+ long pid;
+ HANDLE hProcess;
+ FILETIME ftCreate, ftExit, ftKernel, ftUser;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ hProcess = psutil_handle_from_pid(pid);
+ if (hProcess == NULL)
+ return NULL;
+ if (! GetProcessTimes(hProcess, &ftCreate, &ftExit, &ftKernel, &ftUser)) {
+ CloseHandle(hProcess);
+ if (GetLastError() == ERROR_ACCESS_DENIED) {
+ // usually means the process has died so we throw a NoSuchProcess
+ // here
+ return NoSuchProcess();
+ }
+ else {
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ }
+
+ CloseHandle(hProcess);
+
+ /*
+ * User and kernel times are represented as a FILETIME structure
+ * wich contains a 64-bit value representing the number of
+ * 100-nanosecond intervals since January 1, 1601 (UTC):
+ * http://msdn.microsoft.com/en-us/library/ms724284(VS.85).aspx
+ * To convert it into a float representing the seconds that the
+ * process has executed in user/kernel mode I borrowed the code
+ * below from Python's Modules/posixmodule.c
+ */
+ return Py_BuildValue(
+ "(dd)",
+ (double)(ftUser.dwHighDateTime * 429.4967296 + \
+ ftUser.dwLowDateTime * 1e-7),
+ (double)(ftKernel.dwHighDateTime * 429.4967296 + \
+ ftKernel.dwLowDateTime * 1e-7)
+ );
+}
+
+
+/*
+ * Return a Python float indicating the process create time expressed in
+ * seconds since the epoch.
+ */
+static PyObject *
+psutil_proc_create_time(PyObject *self, PyObject *args)
+{
+ long pid;
+ long long unix_time;
+ DWORD exitCode;
+ HANDLE hProcess;
+ BOOL ret;
+ FILETIME ftCreate, ftExit, ftKernel, ftUser;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ // special case for PIDs 0 and 4, return system boot time
+ if (0 == pid || 4 == pid)
+ return psutil_boot_time(NULL, NULL);
+
+ hProcess = psutil_handle_from_pid(pid);
+ if (hProcess == NULL)
+ return NULL;
+ if (! GetProcessTimes(hProcess, &ftCreate, &ftExit, &ftKernel, &ftUser)) {
+ CloseHandle(hProcess);
+ if (GetLastError() == ERROR_ACCESS_DENIED) {
+ // usually means the process has died so we throw a
+ // NoSuchProcess here
+ return NoSuchProcess();
+ }
+ else {
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ }
+
+ // Make sure the process is not gone as OpenProcess alone seems to be
+ // unreliable in doing so (it seems a previous call to p.wait() makes
+ // it unreliable).
+ // This check is important as creation time is used to make sure the
+ // process is still running.
+ ret = GetExitCodeProcess(hProcess, &exitCode);
+ CloseHandle(hProcess);
+ if (ret != 0) {
+ if (exitCode != STILL_ACTIVE)
+ return NoSuchProcess();
+ }
+ else {
+ // Ignore access denied as it means the process is still alive.
+ // For all other errors, we want an exception.
+ if (GetLastError() != ERROR_ACCESS_DENIED) {
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ }
+
+ /*
+ Convert the FILETIME structure to a Unix time.
+ It's the best I could find by googling and borrowing code here and there.
+ The time returned has a precision of 1 second.
+ */
+ unix_time = ((LONGLONG)ftCreate.dwHighDateTime) << 32;
+ unix_time += ftCreate.dwLowDateTime - 116444736000000000LL;
+ unix_time /= 10000000;
+ return Py_BuildValue("d", (double)unix_time);
+}
+
+
+
+/*
+ * Return the number of logical CPUs.
+ */
+static PyObject *
+psutil_cpu_count_logical(PyObject *self, PyObject *args)
+{
+ SYSTEM_INFO system_info;
+ system_info.dwNumberOfProcessors = 0;
+
+ GetSystemInfo(&system_info);
+ if (system_info.dwNumberOfProcessors == 0)
+ Py_RETURN_NONE; // mimic os.cpu_count()
+ else
+ return Py_BuildValue("I", system_info.dwNumberOfProcessors);
+}
+
+
+/*
+ * Return the number of physical CPU cores.
+ */
+static PyObject *
+psutil_cpu_count_phys(PyObject *self, PyObject *args)
+{
+ LPFN_GLPI glpi;
+ DWORD rc;
+ PSYSTEM_LOGICAL_PROCESSOR_INFORMATION buffer = NULL;
+ PSYSTEM_LOGICAL_PROCESSOR_INFORMATION ptr = NULL;
+ DWORD length = 0;
+ DWORD offset = 0;
+ int ncpus = 0;
+
+ glpi = (LPFN_GLPI)GetProcAddress(GetModuleHandle(TEXT("kernel32")),
+ "GetLogicalProcessorInformation");
+ if (glpi == NULL)
+ goto return_none;
+
+ while (1) {
+ rc = glpi(buffer, &length);
+ if (rc == FALSE) {
+ if (GetLastError() == ERROR_INSUFFICIENT_BUFFER) {
+ if (buffer)
+ free(buffer);
+ buffer = (PSYSTEM_LOGICAL_PROCESSOR_INFORMATION)malloc(
+ length);
+ if (NULL == buffer) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ }
+ else {
+ goto return_none;
+ }
+ }
+ else {
+ break;
+ }
+ }
+
+ ptr = buffer;
+ while (offset + sizeof(SYSTEM_LOGICAL_PROCESSOR_INFORMATION) <= length) {
+ if (ptr->Relationship == RelationProcessorCore)
+ ncpus += 1;
+ offset += sizeof(SYSTEM_LOGICAL_PROCESSOR_INFORMATION);
+ ptr++;
+ }
+
+ free(buffer);
+ if (ncpus == 0)
+ goto return_none;
+ else
+ return Py_BuildValue("i", ncpus);
+
+return_none:
+ // mimic os.cpu_count()
+ if (buffer != NULL)
+ free(buffer);
+ Py_RETURN_NONE;
+}
+
+
+/*
+ * Return process cmdline as a Python list of cmdline arguments.
+ */
+static PyObject *
+psutil_proc_cmdline(PyObject *self, PyObject *args) {
+ long pid;
+ int pid_return;
+ PyObject *arglist;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if ((pid == 0) || (pid == 4))
+ return Py_BuildValue("[]");
+
+ pid_return = psutil_pid_is_running(pid);
+ if (pid_return == 0)
+ return NoSuchProcess();
+ if (pid_return == -1)
+ return NULL;
+
+ // XXX the assumptio below probably needs to go away
+
+ // May fail any of several ReadProcessMemory calls etc. and
+ // not indicate a real problem so we ignore any errors and
+ // just live without commandline.
+ arglist = psutil_get_arg_list(pid);
+ if ( NULL == arglist ) {
+ // carry on anyway, clear any exceptions too
+ PyErr_Clear();
+ return Py_BuildValue("[]");
+ }
+
+ return arglist;
+}
+
+
+/*
+ * Return process executable path.
+ */
+static PyObject *
+psutil_proc_exe(PyObject *self, PyObject *args) {
+ long pid;
+ HANDLE hProcess;
+ wchar_t exe[MAX_PATH];
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ hProcess = psutil_handle_from_pid_waccess(pid, PROCESS_QUERY_INFORMATION);
+ if (NULL == hProcess)
+ return NULL;
+ if (GetProcessImageFileNameW(hProcess, exe, MAX_PATH) == 0) {
+ CloseHandle(hProcess);
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ CloseHandle(hProcess);
+ return Py_BuildValue("u", exe);
+}
+
+
+/*
+ * Return process base name.
+ * Note: psutil_proc_exe() is attempted first because it's faster
+ * but it raise AccessDenied for processes owned by other users
+ * in which case we fall back on using this.
+ */
+static PyObject *
+psutil_proc_name(PyObject *self, PyObject *args) {
+ long pid;
+ int ok;
+ PROCESSENTRY32 pentry;
+ HANDLE hSnapShot;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ hSnapShot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, pid);
+ if (hSnapShot == INVALID_HANDLE_VALUE) {
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ pentry.dwSize = sizeof(PROCESSENTRY32);
+ ok = Process32First(hSnapShot, &pentry);
+ if (! ok) {
+ CloseHandle(hSnapShot);
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ while (ok) {
+ if (pentry.th32ProcessID == pid) {
+ CloseHandle(hSnapShot);
+ return Py_BuildValue("s", pentry.szExeFile);
+ }
+ ok = Process32Next(hSnapShot, &pentry);
+ }
+
+ CloseHandle(hSnapShot);
+ NoSuchProcess();
+ return NULL;
+}
+
+
+/*
+ * Return process memory information as a Python tuple.
+ */
+static PyObject *
+psutil_proc_memory_info(PyObject *self, PyObject *args)
+{
+ HANDLE hProcess;
+ DWORD pid;
+#if (_WIN32_WINNT >= 0x0501) // Windows XP with SP2
+ PROCESS_MEMORY_COUNTERS_EX cnt;
+#else
+ PROCESS_MEMORY_COUNTERS cnt;
+#endif
+ SIZE_T private = 0;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ hProcess = psutil_handle_from_pid(pid);
+ if (NULL == hProcess)
+ return NULL;
+
+ if (! GetProcessMemoryInfo(hProcess, (PPROCESS_MEMORY_COUNTERS)&cnt,
+ sizeof(cnt))) {
+ CloseHandle(hProcess);
+ return PyErr_SetFromWindowsErr(0);
+ }
+
+#if (_WIN32_WINNT >= 0x0501) // Windows XP with SP2
+ private = cnt.PrivateUsage;
+#endif
+
+ CloseHandle(hProcess);
+
+ // PROCESS_MEMORY_COUNTERS values are defined as SIZE_T which on 64bits
+ // is an (unsigned long long) and on 32bits is an (unsigned int).
+ // "_WIN64" is defined if we're running a 64bit Python interpreter not
+ // exclusively if the *system* is 64bit.
+#if defined(_WIN64)
+ return Py_BuildValue(
+ "(kKKKKKKKKK)",
+ cnt.PageFaultCount, // unsigned long
+ (unsigned long long)cnt.PeakWorkingSetSize,
+ (unsigned long long)cnt.WorkingSetSize,
+ (unsigned long long)cnt.QuotaPeakPagedPoolUsage,
+ (unsigned long long)cnt.QuotaPagedPoolUsage,
+ (unsigned long long)cnt.QuotaPeakNonPagedPoolUsage,
+ (unsigned long long)cnt.QuotaNonPagedPoolUsage,
+ (unsigned long long)cnt.PagefileUsage,
+ (unsigned long long)cnt.PeakPagefileUsage,
+ (unsigned long long)private);
+#else
+ return Py_BuildValue(
+ "(kIIIIIIIII)",
+ cnt.PageFaultCount, // unsigned long
+ (unsigned int)cnt.PeakWorkingSetSize,
+ (unsigned int)cnt.WorkingSetSize,
+ (unsigned int)cnt.QuotaPeakPagedPoolUsage,
+ (unsigned int)cnt.QuotaPagedPoolUsage,
+ (unsigned int)cnt.QuotaPeakNonPagedPoolUsage,
+ (unsigned int)cnt.QuotaNonPagedPoolUsage,
+ (unsigned int)cnt.PagefileUsage,
+ (unsigned int)cnt.PeakPagefileUsage,
+ (unsigned int)private);
+#endif
+}
+
+
+/*
+ * Alternative implementation of the one above but bypasses ACCESS DENIED.
+ */
+static PyObject *
+psutil_proc_memory_info_2(PyObject *self, PyObject *args)
+{
+ DWORD pid;
+ PSYSTEM_PROCESS_INFORMATION process;
+ PVOID buffer;
+ SIZE_T private;
+ unsigned long pfault_count;
+
+#if defined(_WIN64)
+ unsigned long long m1, m2, m3, m4, m5, m6, m7, m8;
+#else
+ unsigned int m1, m2, m3, m4, m5, m6, m7, m8;
+#endif
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_get_proc_info(pid, &process, &buffer))
+ return NULL;
+
+#if (_WIN32_WINNT >= 0x0501) // Windows XP with SP2
+ private = process->PrivatePageCount;
+#else
+ private = 0;
+#endif
+ pfault_count = process->PageFaultCount;
+
+ m1 = process->PeakWorkingSetSize;
+ m2 = process->WorkingSetSize;
+ m3 = process->QuotaPeakPagedPoolUsage;
+ m4 = process->QuotaPagedPoolUsage;
+ m5 = process->QuotaPeakNonPagedPoolUsage;
+ m6 = process->QuotaNonPagedPoolUsage;
+ m7 = process->PagefileUsage;
+ m8 = process->PeakPagefileUsage;
+
+ free(buffer);
+
+ // SYSTEM_PROCESS_INFORMATION values are defined as SIZE_T which on 64
+ // bits is an (unsigned long long) and on 32bits is an (unsigned int).
+ // "_WIN64" is defined if we're running a 64bit Python interpreter not
+ // exclusively if the *system* is 64bit.
+#if defined(_WIN64)
+ return Py_BuildValue("(kKKKKKKKKK)",
+#else
+ return Py_BuildValue("(kIIIIIIIII)",
+#endif
+ pfault_count, m1, m2, m3, m4, m5, m6, m7, m8, private);
+}
+
+
+/*
+ * Return a Python integer indicating the total amount of physical memory
+ * in bytes.
+ */
+static PyObject *
+psutil_virtual_mem(PyObject *self, PyObject *args)
+{
+ MEMORYSTATUSEX memInfo;
+ memInfo.dwLength = sizeof(MEMORYSTATUSEX);
+
+ if (! GlobalMemoryStatusEx(&memInfo))
+ return PyErr_SetFromWindowsErr(0);
+ return Py_BuildValue("(LLLLLL)",
+ memInfo.ullTotalPhys, // total
+ memInfo.ullAvailPhys, // avail
+ memInfo.ullTotalPageFile, // total page file
+ memInfo.ullAvailPageFile, // avail page file
+ memInfo.ullTotalVirtual, // total virtual
+ memInfo.ullAvailVirtual); // avail virtual
+}
+
+
+/*
+ * Retrieves system CPU timing information as a (user, system, idle)
+ * tuple. On a multiprocessor system, the values returned are the
+ * sum of the designated times across all processors.
+ */
+static PyObject *
+psutil_cpu_times(PyObject *self, PyObject *args)
+{
+ float idle, kernel, user, system;
+ FILETIME idle_time, kernel_time, user_time;
+
+ if (!GetSystemTimes(&idle_time, &kernel_time, &user_time))
+ return PyErr_SetFromWindowsErr(0);
+
+ idle = (float)((HI_T * idle_time.dwHighDateTime) + \
+ (LO_T * idle_time.dwLowDateTime));
+ user = (float)((HI_T * user_time.dwHighDateTime) + \
+ (LO_T * user_time.dwLowDateTime));
+ kernel = (float)((HI_T * kernel_time.dwHighDateTime) + \
+ (LO_T * kernel_time.dwLowDateTime));
+
+ // Kernel time includes idle time.
+ // We return only busy kernel time subtracting idle time from
+ // kernel time.
+ system = (kernel - idle);
+ return Py_BuildValue("(fff)", user, system, idle);
+}
+
+
+/*
+ * Same as above but for all system CPUs.
+ */
+static PyObject *
+psutil_per_cpu_times(PyObject *self, PyObject *args)
+{
+ float idle, kernel, user;
+ typedef DWORD (_stdcall * NTQSI_PROC) (int, PVOID, ULONG, PULONG);
+ NTQSI_PROC NtQuerySystemInformation;
+ HINSTANCE hNtDll;
+ SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION *sppi = NULL;
+ SYSTEM_INFO si;
+ UINT i;
+ PyObject *arg = NULL;
+ PyObject *retlist = PyList_New(0);
+
+ if (retlist == NULL)
+ return NULL;
+
+ // dynamic linking is mandatory to use NtQuerySystemInformation
+ hNtDll = LoadLibrary(TEXT("ntdll.dll"));
+ if (hNtDll != NULL) {
+ // gets NtQuerySystemInformation address
+ NtQuerySystemInformation = (NTQSI_PROC)GetProcAddress(
+ hNtDll, "NtQuerySystemInformation");
+
+ if (NtQuerySystemInformation != NULL)
+ {
+ // retrives number of processors
+ GetSystemInfo(&si);
+
+ // allocates an array of SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION
+ // structures, one per processor
+ sppi = (SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION *) \
+ malloc(si.dwNumberOfProcessors * \
+ sizeof(SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION));
+ if (sppi != NULL)
+ {
+ // gets cpu time informations
+ if (0 == NtQuerySystemInformation(
+ SystemProcessorPerformanceInformation,
+ sppi,
+ si.dwNumberOfProcessors * sizeof
+ (SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION),
+ NULL)
+ )
+ {
+ // computes system global times summing each
+ // processor value
+ idle = user = kernel = 0;
+ for (i = 0; i < si.dwNumberOfProcessors; i++) {
+ arg = NULL;
+ user = (float)((HI_T * sppi[i].UserTime.HighPart) +
+ (LO_T * sppi[i].UserTime.LowPart));
+ idle = (float)((HI_T * sppi[i].IdleTime.HighPart) +
+ (LO_T * sppi[i].IdleTime.LowPart));
+ kernel = (float)((HI_T * sppi[i].KernelTime.HighPart) +
+ (LO_T * sppi[i].KernelTime.LowPart));
+ // kernel time includes idle time on windows
+ // we return only busy kernel time subtracting
+ // idle time from kernel time
+ arg = Py_BuildValue("(ddd)",
+ user,
+ kernel - idle,
+ idle);
+ if (!arg)
+ goto error;
+ if (PyList_Append(retlist, arg))
+ goto error;
+ Py_DECREF(arg);
+ }
+ free(sppi);
+ FreeLibrary(hNtDll);
+ return retlist;
+
+ } // END NtQuerySystemInformation
+ } // END malloc SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION
+ } // END GetProcAddress
+ } // END LoadLibrary
+ goto error;
+
+error:
+ Py_XDECREF(arg);
+ Py_DECREF(retlist);
+ if (sppi)
+ free(sppi);
+ if (hNtDll)
+ FreeLibrary(hNtDll);
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+}
+
+
+/*
+ * Return process current working directory as a Python string.
+ */
+
+static PyObject *
+psutil_proc_cwd(PyObject *self, PyObject *args)
+{
+ long pid;
+ HANDLE processHandle = NULL;
+ PVOID pebAddress;
+ PVOID rtlUserProcParamsAddress;
+ UNICODE_STRING currentDirectory;
+ WCHAR *currentDirectoryContent = NULL;
+ PyObject *returnPyObj = NULL;
+ PyObject *cwd_from_wchar = NULL;
+ PyObject *cwd = NULL;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ processHandle = psutil_handle_from_pid(pid);
+ if (processHandle == NULL)
+ return NULL;
+
+ pebAddress = psutil_get_peb_address(processHandle);
+
+ // get the address of ProcessParameters
+#ifdef _WIN64
+ if (!ReadProcessMemory(processHandle, (PCHAR)pebAddress + 32,
+ &rtlUserProcParamsAddress, sizeof(PVOID), NULL))
+#else
+ if (!ReadProcessMemory(processHandle, (PCHAR)pebAddress + 0x10,
+ &rtlUserProcParamsAddress, sizeof(PVOID), NULL))
+#endif
+ {
+ CloseHandle(processHandle);
+ if (GetLastError() == ERROR_PARTIAL_COPY) {
+ // this occurs quite often with system processes
+ return AccessDenied();
+ }
+ else {
+ return PyErr_SetFromWindowsErr(0);
+ }
+ }
+
+ // Read the currentDirectory UNICODE_STRING structure.
+ // 0x24 refers to "CurrentDirectoryPath" of RTL_USER_PROCESS_PARAMETERS
+ // structure, see:
+ // http://wj32.wordpress.com/2009/01/24/
+ // howto-get-the-command-line-of-processes/
+#ifdef _WIN64
+ if (!ReadProcessMemory(processHandle, (PCHAR)rtlUserProcParamsAddress + 56,
+ &currentDirectory, sizeof(currentDirectory), NULL))
+#else
+ if (!ReadProcessMemory(processHandle,
+ (PCHAR)rtlUserProcParamsAddress + 0x24,
+ &currentDirectory, sizeof(currentDirectory), NULL))
+#endif
+ {
+ CloseHandle(processHandle);
+ if (GetLastError() == ERROR_PARTIAL_COPY) {
+ // this occurs quite often with system processes
+ return AccessDenied();
+ }
+ else {
+ return PyErr_SetFromWindowsErr(0);
+ }
+ }
+
+ // allocate memory to hold cwd
+ currentDirectoryContent = (WCHAR *)malloc(currentDirectory.Length + 1);
+ if (currentDirectoryContent == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ // read cwd
+ if (!ReadProcessMemory(processHandle, currentDirectory.Buffer,
+ currentDirectoryContent, currentDirectory.Length,
+ NULL))
+ {
+ if (GetLastError() == ERROR_PARTIAL_COPY) {
+ // this occurs quite often with system processes
+ AccessDenied();
+ }
+ else {
+ PyErr_SetFromWindowsErr(0);
+ }
+ goto error;
+ }
+
+ // null-terminate the string to prevent wcslen from returning
+ // incorrect length the length specifier is in characters, but
+ // currentDirectory.Length is in bytes
+ currentDirectoryContent[(currentDirectory.Length / sizeof(WCHAR))] = '\0';
+
+ // convert wchar array to a Python unicode string, and then to UTF8
+ cwd_from_wchar = PyUnicode_FromWideChar(currentDirectoryContent,
+ wcslen(currentDirectoryContent));
+ if (cwd_from_wchar == NULL)
+ goto error;
+
+#if PY_MAJOR_VERSION >= 3
+ cwd = PyUnicode_FromObject(cwd_from_wchar);
+#else
+ cwd = PyUnicode_AsUTF8String(cwd_from_wchar);
+#endif
+ if (cwd == NULL)
+ goto error;
+
+ // decrement the reference count on our temp unicode str to avoid
+ // mem leak
+ returnPyObj = Py_BuildValue("N", cwd);
+ if (!returnPyObj)
+ goto error;
+
+ Py_DECREF(cwd_from_wchar);
+
+ CloseHandle(processHandle);
+ free(currentDirectoryContent);
+ return returnPyObj;
+
+error:
+ Py_XDECREF(cwd_from_wchar);
+ Py_XDECREF(cwd);
+ Py_XDECREF(returnPyObj);
+ if (currentDirectoryContent != NULL)
+ free(currentDirectoryContent);
+ if (processHandle != NULL)
+ CloseHandle(processHandle);
+ return NULL;
+}
+
+
+/*
+ * Resume or suspends a process
+ */
+int
+psutil_proc_suspend_or_resume(DWORD pid, int suspend)
+{
+ // a huge thanks to http://www.codeproject.com/KB/threads/pausep.aspx
+ HANDLE hThreadSnap = NULL;
+ THREADENTRY32 te32 = {0};
+
+ if (pid == 0) {
+ AccessDenied();
+ return FALSE;
+ }
+
+ hThreadSnap = CreateToolhelp32Snapshot(TH32CS_SNAPTHREAD, 0);
+ if (hThreadSnap == INVALID_HANDLE_VALUE) {
+ PyErr_SetFromWindowsErr(0);
+ return FALSE;
+ }
+
+ // Fill in the size of the structure before using it
+ te32.dwSize = sizeof(THREADENTRY32);
+
+ if (! Thread32First(hThreadSnap, &te32)) {
+ PyErr_SetFromWindowsErr(0);
+ CloseHandle(hThreadSnap);
+ return FALSE;
+ }
+
+ // Walk the thread snapshot to find all threads of the process.
+ // If the thread belongs to the process, add its information
+ // to the display list.
+ do
+ {
+ if (te32.th32OwnerProcessID == pid)
+ {
+ HANDLE hThread = OpenThread(THREAD_SUSPEND_RESUME, FALSE,
+ te32.th32ThreadID);
+ if (hThread == NULL) {
+ PyErr_SetFromWindowsErr(0);
+ CloseHandle(hThread);
+ CloseHandle(hThreadSnap);
+ return FALSE;
+ }
+ if (suspend == 1)
+ {
+ if (SuspendThread(hThread) == (DWORD) - 1) {
+ PyErr_SetFromWindowsErr(0);
+ CloseHandle(hThread);
+ CloseHandle(hThreadSnap);
+ return FALSE;
+ }
+ }
+ else
+ {
+ if (ResumeThread(hThread) == (DWORD) - 1) {
+ PyErr_SetFromWindowsErr(0);
+ CloseHandle(hThread);
+ CloseHandle(hThreadSnap);
+ return FALSE;
+ }
+ }
+ CloseHandle(hThread);
+ }
+ } while (Thread32Next(hThreadSnap, &te32));
+
+ CloseHandle(hThreadSnap);
+ return TRUE;
+}
+
+
+static PyObject *
+psutil_proc_suspend(PyObject *self, PyObject *args)
+{
+ long pid;
+ int suspend = 1;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_proc_suspend_or_resume(pid, suspend))
+ return NULL;
+ Py_RETURN_NONE;
+}
+
+
+static PyObject *
+psutil_proc_resume(PyObject *self, PyObject *args)
+{
+ long pid;
+ int suspend = 0;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_proc_suspend_or_resume(pid, suspend))
+ return NULL;
+ Py_RETURN_NONE;
+}
+
+
+static PyObject *
+psutil_proc_threads(PyObject *self, PyObject *args)
+{
+ HANDLE hThread;
+ THREADENTRY32 te32 = {0};
+ long pid;
+ int pid_return;
+ int rc;
+ FILETIME ftDummy, ftKernel, ftUser;
+ PyObject *retList = PyList_New(0);
+ PyObject *pyTuple = NULL;
+ HANDLE hThreadSnap = NULL;
+
+ if (retList == NULL)
+ return NULL;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ goto error;
+ if (pid == 0) {
+ // raise AD instead of returning 0 as procexp is able to
+ // retrieve useful information somehow
+ AccessDenied();
+ goto error;
+ }
+
+ pid_return = psutil_pid_is_running(pid);
+ if (pid_return == 0) {
+ NoSuchProcess();
+ goto error;
+ }
+ if (pid_return == -1)
+ goto error;
+
+ hThreadSnap = CreateToolhelp32Snapshot(TH32CS_SNAPTHREAD, 0);
+ if (hThreadSnap == INVALID_HANDLE_VALUE) {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ // Fill in the size of the structure before using it
+ te32.dwSize = sizeof(THREADENTRY32);
+
+ if (! Thread32First(hThreadSnap, &te32)) {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ // Walk the thread snapshot to find all threads of the process.
+ // If the thread belongs to the process, increase the counter.
+ do {
+ if (te32.th32OwnerProcessID == pid) {
+ pyTuple = NULL;
+ hThread = NULL;
+ hThread = OpenThread(THREAD_QUERY_INFORMATION,
+ FALSE, te32.th32ThreadID);
+ if (hThread == NULL) {
+ // thread has disappeared on us
+ continue;
+ }
+
+ rc = GetThreadTimes(hThread, &ftDummy, &ftDummy, &ftKernel,
+ &ftUser);
+ if (rc == 0) {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ /*
+ * User and kernel times are represented as a FILETIME structure
+ * wich contains a 64-bit value representing the number of
+ * 100-nanosecond intervals since January 1, 1601 (UTC):
+ * http://msdn.microsoft.com/en-us/library/ms724284(VS.85).aspx
+ * To convert it into a float representing the seconds that the
+ * process has executed in user/kernel mode I borrowed the code
+ * below from Python's Modules/posixmodule.c
+ */
+ pyTuple = Py_BuildValue(
+ "kdd",
+ te32.th32ThreadID,
+ (double)(ftUser.dwHighDateTime * 429.4967296 + \
+ ftUser.dwLowDateTime * 1e-7),
+ (double)(ftKernel.dwHighDateTime * 429.4967296 + \
+ ftKernel.dwLowDateTime * 1e-7));
+ if (!pyTuple)
+ goto error;
+ if (PyList_Append(retList, pyTuple))
+ goto error;
+ Py_DECREF(pyTuple);
+
+ CloseHandle(hThread);
+ }
+ } while (Thread32Next(hThreadSnap, &te32));
+
+ CloseHandle(hThreadSnap);
+ return retList;
+
+error:
+ Py_XDECREF(pyTuple);
+ Py_DECREF(retList);
+ if (hThread != NULL)
+ CloseHandle(hThread);
+ if (hThreadSnap != NULL)
+ CloseHandle(hThreadSnap);
+ return NULL;
+}
+
+
+static PyObject *
+psutil_proc_open_files(PyObject *self, PyObject *args)
+{
+ long pid;
+ HANDLE processHandle;
+ DWORD access = PROCESS_DUP_HANDLE | PROCESS_QUERY_INFORMATION;
+ PyObject *filesList;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ processHandle = psutil_handle_from_pid_waccess(pid, access);
+ if (processHandle == NULL)
+ return NULL;
+ filesList = psutil_get_open_files(pid, processHandle);
+ CloseHandle(processHandle);
+ if (filesList == NULL)
+ return PyErr_SetFromWindowsErr(0);
+ return filesList;
+}
+
+
+/*
+ Accept a filename's drive in native format like "\Device\HarddiskVolume1\"
+ and return the corresponding drive letter (e.g. "C:\\").
+ If no match is found return an empty string.
+*/
+static PyObject *
+psutil_win32_QueryDosDevice(PyObject *self, PyObject *args)
+{
+ LPCTSTR lpDevicePath;
+ TCHAR d = TEXT('A');
+ TCHAR szBuff[5];
+
+ if (!PyArg_ParseTuple(args, "s", &lpDevicePath))
+ return NULL;
+
+ while (d <= TEXT('Z')) {
+ TCHAR szDeviceName[3] = {d, TEXT(':'), TEXT('\0')};
+ TCHAR szTarget[512] = {0};
+ if (QueryDosDevice(szDeviceName, szTarget, 511) != 0) {
+ if (_tcscmp(lpDevicePath, szTarget) == 0) {
+ _stprintf_s(szBuff, _countof(szBuff), TEXT("%c:"), d);
+ return Py_BuildValue("s", szBuff);
+ }
+ }
+ d++;
+ }
+ return Py_BuildValue("s", "");
+}
+
+
+/*
+ * Return process username as a "DOMAIN//USERNAME" string.
+ */
+static PyObject *
+psutil_proc_username(PyObject *self, PyObject *args)
+{
+ long pid;
+ HANDLE processHandle;
+ HANDLE tokenHandle;
+ PTOKEN_USER user;
+ ULONG bufferSize;
+ PTSTR name;
+ ULONG nameSize;
+ PTSTR domainName;
+ ULONG domainNameSize;
+ SID_NAME_USE nameUse;
+ PTSTR fullName;
+ PyObject *returnObject;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ processHandle = psutil_handle_from_pid_waccess(
+ pid, PROCESS_QUERY_INFORMATION);
+ if (processHandle == NULL)
+ return NULL;
+
+ if (!OpenProcessToken(processHandle, TOKEN_QUERY, &tokenHandle)) {
+ CloseHandle(processHandle);
+ return PyErr_SetFromWindowsErr(0);
+ }
+
+ CloseHandle(processHandle);
+
+ // Get the user SID.
+
+ bufferSize = 0x100;
+ user = malloc(bufferSize);
+ if (user == NULL)
+ return PyErr_NoMemory();
+
+ if (!GetTokenInformation(tokenHandle, TokenUser, user, bufferSize,
+ &bufferSize))
+ {
+ free(user);
+ user = malloc(bufferSize);
+ if (user == NULL) {
+ CloseHandle(tokenHandle);
+ return PyErr_NoMemory();
+ }
+ if (!GetTokenInformation(tokenHandle, TokenUser, user, bufferSize,
+ &bufferSize))
+ {
+ free(user);
+ CloseHandle(tokenHandle);
+ return PyErr_SetFromWindowsErr(0);
+ }
+ }
+
+ CloseHandle(tokenHandle);
+
+ // resolve the SID to a name
+ nameSize = 0x100;
+ domainNameSize = 0x100;
+
+ name = malloc(nameSize * sizeof(TCHAR));
+ if (name == NULL)
+ return PyErr_NoMemory();
+ domainName = malloc(domainNameSize * sizeof(TCHAR));
+ if (domainName == NULL)
+ return PyErr_NoMemory();
+
+ if (!LookupAccountSid(NULL, user->User.Sid, name, &nameSize, domainName,
+ &domainNameSize, &nameUse))
+ {
+ free(name);
+ free(domainName);
+ name = malloc(nameSize * sizeof(TCHAR));
+ if (name == NULL)
+ return PyErr_NoMemory();
+ domainName = malloc(domainNameSize * sizeof(TCHAR));
+ if (domainName == NULL)
+ return PyErr_NoMemory();
+ if (!LookupAccountSid(NULL, user->User.Sid, name, &nameSize,
+ domainName, &domainNameSize, &nameUse))
+ {
+ free(name);
+ free(domainName);
+ free(user);
+
+ return PyErr_SetFromWindowsErr(0);
+ }
+ }
+
+ nameSize = _tcslen(name);
+ domainNameSize = _tcslen(domainName);
+
+ // build the full username string
+ fullName = malloc((domainNameSize + 1 + nameSize + 1) * sizeof(TCHAR));
+ if (fullName == NULL) {
+ free(name);
+ free(domainName);
+ free(user);
+ return PyErr_NoMemory();
+ }
+ memcpy(fullName, domainName, domainNameSize);
+ fullName[domainNameSize] = '\\';
+ memcpy(&fullName[domainNameSize + 1], name, nameSize);
+ fullName[domainNameSize + 1 + nameSize] = '\0';
+
+ returnObject = PyUnicode_Decode(
+ fullName, _tcslen(fullName), Py_FileSystemDefaultEncoding, "replace");
+
+ free(fullName);
+ free(name);
+ free(domainName);
+ free(user);
+
+ return returnObject;
+}
+
+
+/*
+ * Return a list of network connections opened by a process
+ */
+static PyObject *
+psutil_net_connections(PyObject *self, PyObject *args)
+{
+ static long null_address[4] = { 0, 0, 0, 0 };
+
+ unsigned long pid;
+ PyObject *connectionsList;
+ PyObject *connectionTuple = NULL;
+ PyObject *af_filter = NULL;
+ PyObject *type_filter = NULL;
+
+ PyObject *_AF_INET = PyLong_FromLong((long)AF_INET);
+ PyObject *_AF_INET6 = PyLong_FromLong((long)AF_INET6);
+ PyObject *_SOCK_STREAM = PyLong_FromLong((long)SOCK_STREAM);
+ PyObject *_SOCK_DGRAM = PyLong_FromLong((long)SOCK_DGRAM);
+
+ typedef PSTR (NTAPI * _RtlIpv4AddressToStringA)(struct in_addr *, PSTR);
+ _RtlIpv4AddressToStringA rtlIpv4AddressToStringA;
+ typedef PSTR (NTAPI * _RtlIpv6AddressToStringA)(struct in6_addr *, PSTR);
+ _RtlIpv6AddressToStringA rtlIpv6AddressToStringA;
+ typedef DWORD (WINAPI * _GetExtendedTcpTable)(PVOID, PDWORD, BOOL, ULONG,
+ TCP_TABLE_CLASS, ULONG);
+ _GetExtendedTcpTable getExtendedTcpTable;
+ typedef DWORD (WINAPI * _GetExtendedUdpTable)(PVOID, PDWORD, BOOL, ULONG,
+ UDP_TABLE_CLASS, ULONG);
+ _GetExtendedUdpTable getExtendedUdpTable;
+ PVOID table = NULL;
+ DWORD tableSize;
+ PMIB_TCPTABLE_OWNER_PID tcp4Table;
+ PMIB_UDPTABLE_OWNER_PID udp4Table;
+ PMIB_TCP6TABLE_OWNER_PID tcp6Table;
+ PMIB_UDP6TABLE_OWNER_PID udp6Table;
+ ULONG i;
+ CHAR addressBufferLocal[65];
+ PyObject *addressTupleLocal = NULL;
+ CHAR addressBufferRemote[65];
+ PyObject *addressTupleRemote = NULL;
+
+ if (! PyArg_ParseTuple(args, "lOO", &pid, &af_filter, &type_filter)) {
+ _psutil_conn_decref_objs();
+ return NULL;
+ }
+
+ if (!PySequence_Check(af_filter) || !PySequence_Check(type_filter)) {
+ _psutil_conn_decref_objs();
+ PyErr_SetString(PyExc_TypeError, "arg 2 or 3 is not a sequence");
+ return NULL;
+ }
+
+ if (pid != -1) {
+ if (psutil_pid_is_running(pid) == 0) {
+ _psutil_conn_decref_objs();
+ return NoSuchProcess();
+ }
+ }
+
+ // Import some functions.
+ {
+ HMODULE ntdll;
+ HMODULE iphlpapi;
+
+ ntdll = LoadLibrary(TEXT("ntdll.dll"));
+ rtlIpv4AddressToStringA = (_RtlIpv4AddressToStringA)GetProcAddress(
+ ntdll, "RtlIpv4AddressToStringA");
+ rtlIpv6AddressToStringA = (_RtlIpv6AddressToStringA)GetProcAddress(
+ ntdll, "RtlIpv6AddressToStringA");
+ /* TODO: Check these two function pointers */
+
+ iphlpapi = LoadLibrary(TEXT("iphlpapi.dll"));
+ getExtendedTcpTable = (_GetExtendedTcpTable)GetProcAddress(iphlpapi,
+ "GetExtendedTcpTable");
+ getExtendedUdpTable = (_GetExtendedUdpTable)GetProcAddress(iphlpapi,
+ "GetExtendedUdpTable");
+ FreeLibrary(ntdll);
+ FreeLibrary(iphlpapi);
+ }
+
+ if ((getExtendedTcpTable == NULL) || (getExtendedUdpTable == NULL)) {
+ PyErr_SetString(PyExc_NotImplementedError,
+ "feature not supported on this Windows version");
+ _psutil_conn_decref_objs();
+ return NULL;
+ }
+
+ connectionsList = PyList_New(0);
+ if (connectionsList == NULL) {
+ _psutil_conn_decref_objs();
+ return NULL;
+ }
+
+ // TCP IPv4
+
+ if ((PySequence_Contains(af_filter, _AF_INET) == 1) &&
+ (PySequence_Contains(type_filter, _SOCK_STREAM) == 1))
+ {
+ table = NULL;
+ connectionTuple = NULL;
+ addressTupleLocal = NULL;
+ addressTupleRemote = NULL;
+ tableSize = 0;
+ getExtendedTcpTable(NULL, &tableSize, FALSE, AF_INET,
+ TCP_TABLE_OWNER_PID_ALL, 0);
+
+ table = malloc(tableSize);
+ if (table == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ if (getExtendedTcpTable(table, &tableSize, FALSE, AF_INET,
+ TCP_TABLE_OWNER_PID_ALL, 0) == 0)
+ {
+ tcp4Table = table;
+
+ for (i = 0; i < tcp4Table->dwNumEntries; i++)
+ {
+ if (pid != -1) {
+ if (tcp4Table->table[i].dwOwningPid != pid) {
+ continue;
+ }
+ }
+
+ if (tcp4Table->table[i].dwLocalAddr != 0 ||
+ tcp4Table->table[i].dwLocalPort != 0)
+ {
+ struct in_addr addr;
+
+ addr.S_un.S_addr = tcp4Table->table[i].dwLocalAddr;
+ rtlIpv4AddressToStringA(&addr, addressBufferLocal);
+ addressTupleLocal = Py_BuildValue(
+ "(si)",
+ addressBufferLocal,
+ BYTESWAP_USHORT(tcp4Table->table[i].dwLocalPort));
+ }
+ else {
+ addressTupleLocal = PyTuple_New(0);
+ }
+
+ if (addressTupleLocal == NULL)
+ goto error;
+
+ // On Windows <= XP, remote addr is filled even if socket
+ // is in LISTEN mode in which case we just ignore it.
+ if ((tcp4Table->table[i].dwRemoteAddr != 0 ||
+ tcp4Table->table[i].dwRemotePort != 0) &&
+ (tcp4Table->table[i].dwState != MIB_TCP_STATE_LISTEN))
+ {
+ struct in_addr addr;
+
+ addr.S_un.S_addr = tcp4Table->table[i].dwRemoteAddr;
+ rtlIpv4AddressToStringA(&addr, addressBufferRemote);
+ addressTupleRemote = Py_BuildValue(
+ "(si)",
+ addressBufferRemote,
+ BYTESWAP_USHORT(tcp4Table->table[i].dwRemotePort));
+ }
+ else
+ {
+ addressTupleRemote = PyTuple_New(0);
+ }
+
+ if (addressTupleRemote == NULL)
+ goto error;
+
+ connectionTuple = Py_BuildValue(
+ "(iiiNNiI)",
+ -1,
+ AF_INET,
+ SOCK_STREAM,
+ addressTupleLocal,
+ addressTupleRemote,
+ tcp4Table->table[i].dwState,
+ tcp4Table->table[i].dwOwningPid);
+ if (!connectionTuple)
+ goto error;
+ if (PyList_Append(connectionsList, connectionTuple))
+ goto error;
+ Py_DECREF(connectionTuple);
+ }
+ }
+
+ free(table);
+ }
+
+ // TCP IPv6
+
+ if ((PySequence_Contains(af_filter, _AF_INET6) == 1) &&
+ (PySequence_Contains(type_filter, _SOCK_STREAM) == 1))
+ {
+ table = NULL;
+ connectionTuple = NULL;
+ addressTupleLocal = NULL;
+ addressTupleRemote = NULL;
+ tableSize = 0;
+ getExtendedTcpTable(NULL, &tableSize, FALSE, AF_INET6,
+ TCP_TABLE_OWNER_PID_ALL, 0);
+
+ table = malloc(tableSize);
+ if (table == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ if (getExtendedTcpTable(table, &tableSize, FALSE, AF_INET6,
+ TCP_TABLE_OWNER_PID_ALL, 0) == 0)
+ {
+ tcp6Table = table;
+
+ for (i = 0; i < tcp6Table->dwNumEntries; i++)
+ {
+ if (pid != -1) {
+ if (tcp6Table->table[i].dwOwningPid != pid) {
+ continue;
+ }
+ }
+
+ if (memcmp(tcp6Table->table[i].ucLocalAddr, null_address, 16)
+ != 0 || tcp6Table->table[i].dwLocalPort != 0)
+ {
+ struct in6_addr addr;
+
+ memcpy(&addr, tcp6Table->table[i].ucLocalAddr, 16);
+ rtlIpv6AddressToStringA(&addr, addressBufferLocal);
+ addressTupleLocal = Py_BuildValue(
+ "(si)",
+ addressBufferLocal,
+ BYTESWAP_USHORT(tcp6Table->table[i].dwLocalPort));
+ }
+ else
+ {
+ addressTupleLocal = PyTuple_New(0);
+ }
+
+ if (addressTupleLocal == NULL)
+ goto error;
+
+ // On Windows <= XP, remote addr is filled even if socket
+ // is in LISTEN mode in which case we just ignore it.
+ if ((memcmp(tcp6Table->table[i].ucRemoteAddr, null_address, 16)
+ != 0 ||
+ tcp6Table->table[i].dwRemotePort != 0) &&
+ (tcp6Table->table[i].dwState != MIB_TCP_STATE_LISTEN))
+ {
+ struct in6_addr addr;
+
+ memcpy(&addr, tcp6Table->table[i].ucRemoteAddr, 16);
+ rtlIpv6AddressToStringA(&addr, addressBufferRemote);
+ addressTupleRemote = Py_BuildValue(
+ "(si)",
+ addressBufferRemote,
+ BYTESWAP_USHORT(tcp6Table->table[i].dwRemotePort));
+ }
+ else
+ {
+ addressTupleRemote = PyTuple_New(0);
+ }
+
+ if (addressTupleRemote == NULL)
+ goto error;
+
+ connectionTuple = Py_BuildValue(
+ "(iiiNNiI)",
+ -1,
+ AF_INET6,
+ SOCK_STREAM,
+ addressTupleLocal,
+ addressTupleRemote,
+ tcp6Table->table[i].dwState,
+ tcp6Table->table[i].dwOwningPid);
+ if (!connectionTuple)
+ goto error;
+ if (PyList_Append(connectionsList, connectionTuple))
+ goto error;
+ Py_DECREF(connectionTuple);
+ }
+ }
+
+ free(table);
+ }
+
+ // UDP IPv4
+
+ if ((PySequence_Contains(af_filter, _AF_INET) == 1) &&
+ (PySequence_Contains(type_filter, _SOCK_DGRAM) == 1))
+ {
+ table = NULL;
+ connectionTuple = NULL;
+ addressTupleLocal = NULL;
+ addressTupleRemote = NULL;
+ tableSize = 0;
+ getExtendedUdpTable(NULL, &tableSize, FALSE, AF_INET,
+ UDP_TABLE_OWNER_PID, 0);
+
+ table = malloc(tableSize);
+ if (table == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ if (getExtendedUdpTable(table, &tableSize, FALSE, AF_INET,
+ UDP_TABLE_OWNER_PID, 0) == 0)
+ {
+ udp4Table = table;
+
+ for (i = 0; i < udp4Table->dwNumEntries; i++)
+ {
+ if (pid != -1) {
+ if (udp4Table->table[i].dwOwningPid != pid) {
+ continue;
+ }
+ }
+
+ if (udp4Table->table[i].dwLocalAddr != 0 ||
+ udp4Table->table[i].dwLocalPort != 0)
+ {
+ struct in_addr addr;
+
+ addr.S_un.S_addr = udp4Table->table[i].dwLocalAddr;
+ rtlIpv4AddressToStringA(&addr, addressBufferLocal);
+ addressTupleLocal = Py_BuildValue(
+ "(si)",
+ addressBufferLocal,
+ BYTESWAP_USHORT(udp4Table->table[i].dwLocalPort));
+ }
+ else {
+ addressTupleLocal = PyTuple_New(0);
+ }
+
+ if (addressTupleLocal == NULL)
+ goto error;
+
+ connectionTuple = Py_BuildValue(
+ "(iiiNNiI)",
+ -1,
+ AF_INET,
+ SOCK_DGRAM,
+ addressTupleLocal,
+ PyTuple_New(0),
+ PSUTIL_CONN_NONE,
+ udp4Table->table[i].dwOwningPid);
+ if (!connectionTuple)
+ goto error;
+ if (PyList_Append(connectionsList, connectionTuple))
+ goto error;
+ Py_DECREF(connectionTuple);
+ }
+ }
+
+ free(table);
+ }
+
+ // UDP IPv6
+
+ if ((PySequence_Contains(af_filter, _AF_INET6) == 1) &&
+ (PySequence_Contains(type_filter, _SOCK_DGRAM) == 1))
+ {
+ table = NULL;
+ connectionTuple = NULL;
+ addressTupleLocal = NULL;
+ addressTupleRemote = NULL;
+ tableSize = 0;
+ getExtendedUdpTable(NULL, &tableSize, FALSE,
+ AF_INET6, UDP_TABLE_OWNER_PID, 0);
+
+ table = malloc(tableSize);
+ if (table == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ if (getExtendedUdpTable(table, &tableSize, FALSE, AF_INET6,
+ UDP_TABLE_OWNER_PID, 0) == 0)
+ {
+ udp6Table = table;
+
+ for (i = 0; i < udp6Table->dwNumEntries; i++)
+ {
+ if (pid != -1) {
+ if (udp6Table->table[i].dwOwningPid != pid) {
+ continue;
+ }
+ }
+
+ if (memcmp(udp6Table->table[i].ucLocalAddr, null_address, 16)
+ != 0 || udp6Table->table[i].dwLocalPort != 0)
+ {
+ struct in6_addr addr;
+
+ memcpy(&addr, udp6Table->table[i].ucLocalAddr, 16);
+ rtlIpv6AddressToStringA(&addr, addressBufferLocal);
+ addressTupleLocal = Py_BuildValue(
+ "(si)",
+ addressBufferLocal,
+ BYTESWAP_USHORT(udp6Table->table[i].dwLocalPort));
+ }
+ else {
+ addressTupleLocal = PyTuple_New(0);
+ }
+
+ if (addressTupleLocal == NULL)
+ goto error;
+
+ connectionTuple = Py_BuildValue(
+ "(iiiNNiI)",
+ -1,
+ AF_INET6,
+ SOCK_DGRAM,
+ addressTupleLocal,
+ PyTuple_New(0),
+ PSUTIL_CONN_NONE,
+ udp6Table->table[i].dwOwningPid);
+ if (!connectionTuple)
+ goto error;
+ if (PyList_Append(connectionsList, connectionTuple))
+ goto error;
+ Py_DECREF(connectionTuple);
+ }
+ }
+
+ free(table);
+ }
+
+ _psutil_conn_decref_objs();
+ return connectionsList;
+
+error:
+ _psutil_conn_decref_objs();
+ Py_XDECREF(connectionTuple);
+ Py_XDECREF(addressTupleLocal);
+ Py_XDECREF(addressTupleRemote);
+ Py_DECREF(connectionsList);
+ if (table != NULL)
+ free(table);
+ return NULL;
+}
+
+
+/*
+ * Get process priority as a Python integer.
+ */
+static PyObject *
+psutil_proc_priority_get(PyObject *self, PyObject *args)
+{
+ long pid;
+ DWORD priority;
+ HANDLE hProcess;
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+
+ hProcess = psutil_handle_from_pid(pid);
+ if (hProcess == NULL) {
+ return NULL;
+ }
+
+ priority = GetPriorityClass(hProcess);
+ CloseHandle(hProcess);
+ if (priority == 0) {
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ return Py_BuildValue("i", priority);
+}
+
+
+/*
+ * Set process priority.
+ */
+static PyObject *
+psutil_proc_priority_set(PyObject *self, PyObject *args)
+{
+ long pid;
+ int priority;
+ int retval;
+ HANDLE hProcess;
+ DWORD dwDesiredAccess = \
+ PROCESS_QUERY_INFORMATION | PROCESS_SET_INFORMATION;
+ if (! PyArg_ParseTuple(args, "li", &pid, &priority)) {
+ return NULL;
+ }
+
+ hProcess = psutil_handle_from_pid_waccess(pid, dwDesiredAccess);
+ if (hProcess == NULL) {
+ return NULL;
+ }
+
+ retval = SetPriorityClass(hProcess, priority);
+ CloseHandle(hProcess);
+ if (retval == 0) {
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
+
+#if (_WIN32_WINNT >= 0x0600) // Windows Vista
+/*
+ * Get process IO priority as a Python integer.
+ */
+static PyObject *
+psutil_proc_io_priority_get(PyObject *self, PyObject *args)
+{
+ long pid;
+ HANDLE hProcess;
+ PULONG IoPriority;
+
+ _NtQueryInformationProcess NtQueryInformationProcess =
+ (_NtQueryInformationProcess)GetProcAddress(
+ GetModuleHandleA("ntdll.dll"), "NtQueryInformationProcess");
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ hProcess = psutil_handle_from_pid(pid);
+ if (hProcess == NULL) {
+ return NULL;
+ }
+
+ NtQueryInformationProcess(
+ hProcess,
+ ProcessIoPriority,
+ &IoPriority,
+ sizeof(ULONG),
+ NULL
+ );
+ CloseHandle(hProcess);
+ return Py_BuildValue("i", IoPriority);
+}
+
+
+/*
+ * Set process IO priority.
+ */
+static PyObject *
+psutil_proc_io_priority_set(PyObject *self, PyObject *args)
+{
+ long pid;
+ int prio;
+ HANDLE hProcess;
+
+ _NtSetInformationProcess NtSetInformationProcess =
+ (_NtSetInformationProcess)GetProcAddress(
+ GetModuleHandleA("ntdll.dll"), "NtSetInformationProcess");
+
+ if (NtSetInformationProcess == NULL) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "couldn't get NtSetInformationProcess");
+ return NULL;
+ }
+
+ if (! PyArg_ParseTuple(args, "li", &pid, &prio)) {
+ return NULL;
+ }
+ hProcess = psutil_handle_from_pid_waccess(pid, PROCESS_ALL_ACCESS);
+ if (hProcess == NULL) {
+ return NULL;
+ }
+
+ NtSetInformationProcess(
+ hProcess,
+ ProcessIoPriority,
+ (PVOID)&prio,
+ sizeof((PVOID)prio)
+ );
+
+ CloseHandle(hProcess);
+ Py_RETURN_NONE;
+}
+#endif
+
+
+/*
+ * Return a Python tuple referencing process I/O counters.
+ */
+static PyObject *
+psutil_proc_io_counters(PyObject *self, PyObject *args)
+{
+ DWORD pid;
+ HANDLE hProcess;
+ IO_COUNTERS IoCounters;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ hProcess = psutil_handle_from_pid(pid);
+ if (NULL == hProcess) {
+ return NULL;
+ }
+ if (! GetProcessIoCounters(hProcess, &IoCounters)) {
+ CloseHandle(hProcess);
+ return PyErr_SetFromWindowsErr(0);
+ }
+ CloseHandle(hProcess);
+ return Py_BuildValue("(KKKK)",
+ IoCounters.ReadOperationCount,
+ IoCounters.WriteOperationCount,
+ IoCounters.ReadTransferCount,
+ IoCounters.WriteTransferCount);
+}
+
+
+/*
+ * Return process CPU affinity as a bitmask
+ */
+static PyObject *
+psutil_proc_cpu_affinity_get(PyObject *self, PyObject *args)
+{
+ DWORD pid;
+ HANDLE hProcess;
+ DWORD_PTR proc_mask;
+ DWORD_PTR system_mask;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ hProcess = psutil_handle_from_pid(pid);
+ if (hProcess == NULL) {
+ return NULL;
+ }
+ if (GetProcessAffinityMask(hProcess, &proc_mask, &system_mask) == 0) {
+ CloseHandle(hProcess);
+ return PyErr_SetFromWindowsErr(0);
+ }
+
+ CloseHandle(hProcess);
+#ifdef _WIN64
+ return Py_BuildValue("K", (unsigned long long)proc_mask);
+#else
+ return Py_BuildValue("k", (unsigned long)proc_mask);
+#endif
+}
+
+
+/*
+ * Set process CPU affinity
+ */
+static PyObject *
+psutil_proc_cpu_affinity_set(PyObject *self, PyObject *args)
+{
+ DWORD pid;
+ HANDLE hProcess;
+ DWORD dwDesiredAccess = \
+ PROCESS_QUERY_INFORMATION | PROCESS_SET_INFORMATION;
+ DWORD_PTR mask;
+
+#ifdef _WIN64
+ if (! PyArg_ParseTuple(args, "lK", &pid, &mask))
+#else
+ if (! PyArg_ParseTuple(args, "lk", &pid, &mask))
+#endif
+ {
+ return NULL;
+ }
+ hProcess = psutil_handle_from_pid_waccess(pid, dwDesiredAccess);
+ if (hProcess == NULL) {
+ return NULL;
+ }
+
+ if (SetProcessAffinityMask(hProcess, mask) == 0) {
+ CloseHandle(hProcess);
+ return PyErr_SetFromWindowsErr(0);
+ }
+
+ CloseHandle(hProcess);
+ Py_RETURN_NONE;
+}
+
+
+/*
+ * Return True if one of the process threads is in a waiting or
+ * suspended status.
+ */
+static PyObject *
+psutil_proc_is_suspended(PyObject *self, PyObject *args)
+{
+ DWORD pid;
+ ULONG i;
+ PSYSTEM_PROCESS_INFORMATION process;
+ PVOID buffer;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_get_proc_info(pid, &process, &buffer)) {
+ return NULL;
+ }
+ for (i = 0; i < process->NumberOfThreads; i++) {
+ if (process->Threads[i].ThreadState != Waiting ||
+ process->Threads[i].WaitReason != Suspended)
+ {
+ free(buffer);
+ Py_RETURN_FALSE;
+ }
+ }
+ free(buffer);
+ Py_RETURN_TRUE;
+}
+
+
+/*
+ * Return path's disk total and free as a Python tuple.
+ */
+static PyObject *
+psutil_disk_usage(PyObject *self, PyObject *args)
+{
+ BOOL retval;
+ ULARGE_INTEGER _, total, free;
+ char *path;
+
+ if (PyArg_ParseTuple(args, "u", &path)) {
+ Py_BEGIN_ALLOW_THREADS
+ retval = GetDiskFreeSpaceExW((LPCWSTR)path, &_, &total, &free);
+ Py_END_ALLOW_THREADS
+ goto return_;
+ }
+
+ // on Python 2 we also want to accept plain strings other
+ // than Unicode
+#if PY_MAJOR_VERSION <= 2
+ PyErr_Clear(); // drop the argument parsing error
+ if (PyArg_ParseTuple(args, "s", &path)) {
+ Py_BEGIN_ALLOW_THREADS
+ retval = GetDiskFreeSpaceEx(path, &_, &total, &free);
+ Py_END_ALLOW_THREADS
+ goto return_;
+ }
+#endif
+
+ return NULL;
+
+return_:
+ if (retval == 0)
+ return PyErr_SetFromWindowsErr(0);
+ else
+ return Py_BuildValue("(LL)", total.QuadPart, free.QuadPart);
+}
+
+
+/*
+ * Return a Python list of named tuples with overall network I/O information
+ */
+static PyObject *
+psutil_net_io_counters(PyObject *self, PyObject *args)
+{
+ char ifname[MAX_PATH];
+ DWORD dwRetVal = 0;
+ MIB_IFROW *pIfRow = NULL;
+ PIP_ADAPTER_ADDRESSES pAddresses = NULL;
+ PIP_ADAPTER_ADDRESSES pCurrAddresses = NULL;
+
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_nic_info = NULL;
+ PyObject *py_nic_name = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+ pAddresses = psutil_get_nic_addresses();
+ if (pAddresses == NULL)
+ goto error;
+ pCurrAddresses = pAddresses;
+
+ while (pCurrAddresses) {
+ py_nic_name = NULL;
+ py_nic_info = NULL;
+ pIfRow = (MIB_IFROW *) malloc(sizeof(MIB_IFROW));
+
+ if (pIfRow == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ pIfRow->dwIndex = pCurrAddresses->IfIndex;
+ dwRetVal = GetIfEntry(pIfRow);
+ if (dwRetVal != NO_ERROR) {
+ PyErr_SetString(PyExc_RuntimeError, "GetIfEntry() failed.");
+ goto error;
+ }
+
+ py_nic_info = Py_BuildValue("(kkkkkkkk)",
+ pIfRow->dwOutOctets,
+ pIfRow->dwInOctets,
+ pIfRow->dwOutUcastPkts,
+ pIfRow->dwInUcastPkts,
+ pIfRow->dwInErrors,
+ pIfRow->dwOutErrors,
+ pIfRow->dwInDiscards,
+ pIfRow->dwOutDiscards);
+ if (!py_nic_info)
+ goto error;
+
+ sprintf_s(ifname, MAX_PATH, "%wS", pCurrAddresses->FriendlyName);
+ py_nic_name = PyUnicode_Decode(
+ ifname, _tcslen(ifname), Py_FileSystemDefaultEncoding, "replace");
+
+ if (py_nic_name == NULL)
+ goto error;
+ if (PyDict_SetItem(py_retdict, py_nic_name, py_nic_info))
+ goto error;
+ Py_XDECREF(py_nic_name);
+ Py_XDECREF(py_nic_info);
+
+ free(pIfRow);
+ pCurrAddresses = pCurrAddresses->Next;
+ }
+
+ free(pAddresses);
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_nic_name);
+ Py_XDECREF(py_nic_info);
+ Py_DECREF(py_retdict);
+ if (pAddresses != NULL)
+ free(pAddresses);
+ if (pIfRow != NULL)
+ free(pIfRow);
+ return NULL;
+}
+
+
+/*
+ * Return a Python dict of tuples for disk I/O information
+ */
+static PyObject *
+psutil_disk_io_counters(PyObject *self, PyObject *args)
+{
+ DISK_PERFORMANCE_WIN_2008 diskPerformance;
+ DWORD dwSize;
+ HANDLE hDevice = NULL;
+ char szDevice[MAX_PATH];
+ char szDeviceDisplay[MAX_PATH];
+ int devNum;
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_disk_info = NULL;
+ if (py_retdict == NULL) {
+ return NULL;
+ }
+
+ // Apparently there's no way to figure out how many times we have
+ // to iterate in order to find valid drives.
+ // Let's assume 32, which is higher than 26, the number of letters
+ // in the alphabet (from A:\ to Z:\).
+ for (devNum = 0; devNum <= 32; ++devNum) {
+ py_disk_info = NULL;
+ sprintf_s(szDevice, MAX_PATH, "\\\\.\\PhysicalDrive%d", devNum);
+ hDevice = CreateFile(szDevice, 0, FILE_SHARE_READ | FILE_SHARE_WRITE,
+ NULL, OPEN_EXISTING, 0, NULL);
+
+ if (hDevice == INVALID_HANDLE_VALUE) {
+ continue;
+ }
+ if (DeviceIoControl(hDevice, IOCTL_DISK_PERFORMANCE, NULL, 0,
+ &diskPerformance, sizeof(diskPerformance),
+ &dwSize, NULL))
+ {
+ sprintf_s(szDeviceDisplay, MAX_PATH, "PhysicalDrive%d", devNum);
+ py_disk_info = Py_BuildValue(
+ "(IILLKK)",
+ diskPerformance.ReadCount,
+ diskPerformance.WriteCount,
+ diskPerformance.BytesRead,
+ diskPerformance.BytesWritten,
+ (unsigned long long)(diskPerformance.ReadTime.QuadPart * 10) / 1000,
+ (unsigned long long)(diskPerformance.WriteTime.QuadPart * 10) / 1000);
+ if (!py_disk_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, szDeviceDisplay,
+ py_disk_info))
+ {
+ goto error;
+ }
+ Py_XDECREF(py_disk_info);
+ }
+ else {
+ // XXX we might get here with ERROR_INSUFFICIENT_BUFFER when
+ // compiling with mingw32; not sure what to do.
+ // return PyErr_SetFromWindowsErr(0);
+ ;;
+ }
+
+ CloseHandle(hDevice);
+ }
+
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_disk_info);
+ Py_DECREF(py_retdict);
+ if (hDevice != NULL)
+ CloseHandle(hDevice);
+ return NULL;
+}
+
+
+static char *psutil_get_drive_type(int type)
+{
+ switch (type) {
+ case DRIVE_FIXED:
+ return "fixed";
+ case DRIVE_CDROM:
+ return "cdrom";
+ case DRIVE_REMOVABLE:
+ return "removable";
+ case DRIVE_UNKNOWN:
+ return "unknown";
+ case DRIVE_NO_ROOT_DIR:
+ return "unmounted";
+ case DRIVE_REMOTE:
+ return "remote";
+ case DRIVE_RAMDISK:
+ return "ramdisk";
+ default:
+ return "?";
+ }
+}
+
+
+#ifndef _ARRAYSIZE
+#define _ARRAYSIZE(a) (sizeof(a)/sizeof(a[0]))
+#endif
+
+/*
+ * Return disk partitions as a list of tuples such as
+ * (drive_letter, drive_letter, type, "")
+ */
+static PyObject *
+psutil_disk_partitions(PyObject *self, PyObject *args)
+{
+ DWORD num_bytes;
+ char drive_strings[255];
+ char *drive_letter = drive_strings;
+ int all;
+ int type;
+ int ret;
+ char opts[20];
+ LPTSTR fs_type[MAX_PATH + 1] = { 0 };
+ DWORD pflags = 0;
+ PyObject *py_all;
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+
+ if (py_retlist == NULL) {
+ return NULL;
+ }
+
+ // avoid to visualize a message box in case something goes wrong
+ // see https://github.com/giampaolo/psutil/issues/264
+ SetErrorMode(SEM_FAILCRITICALERRORS);
+
+ if (! PyArg_ParseTuple(args, "O", &py_all)) {
+ goto error;
+ }
+ all = PyObject_IsTrue(py_all);
+
+ Py_BEGIN_ALLOW_THREADS
+ num_bytes = GetLogicalDriveStrings(254, drive_letter);
+ Py_END_ALLOW_THREADS
+
+ if (num_bytes == 0) {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ while (*drive_letter != 0) {
+ py_tuple = NULL;
+ opts[0] = 0;
+ fs_type[0] = 0;
+
+ Py_BEGIN_ALLOW_THREADS
+ type = GetDriveType(drive_letter);
+ Py_END_ALLOW_THREADS
+
+ // by default we only show hard drives and cd-roms
+ if (all == 0) {
+ if ((type == DRIVE_UNKNOWN) ||
+ (type == DRIVE_NO_ROOT_DIR) ||
+ (type == DRIVE_REMOTE) ||
+ (type == DRIVE_RAMDISK)) {
+ goto next;
+ }
+ // floppy disk: skip it by default as it introduces a
+ // considerable slowdown.
+ if ((type == DRIVE_REMOVABLE) &&
+ (strcmp(drive_letter, "A:\\") == 0)) {
+ goto next;
+ }
+ }
+
+ ret = GetVolumeInformation(
+ (LPCTSTR)drive_letter, NULL, _ARRAYSIZE(drive_letter),
+ NULL, NULL, &pflags, (LPTSTR)fs_type, _ARRAYSIZE(fs_type));
+ if (ret == 0) {
+ // We might get here in case of a floppy hard drive, in
+ // which case the error is (21, "device not ready").
+ // Let's pretend it didn't happen as we already have
+ // the drive name and type ('removable').
+ strcat_s(opts, _countof(opts), "");
+ SetLastError(0);
+ }
+ else {
+ if (pflags & FILE_READ_ONLY_VOLUME) {
+ strcat_s(opts, _countof(opts), "ro");
+ }
+ else {
+ strcat_s(opts, _countof(opts), "rw");
+ }
+ if (pflags & FILE_VOLUME_IS_COMPRESSED) {
+ strcat_s(opts, _countof(opts), ",compressed");
+ }
+ }
+
+ if (strlen(opts) > 0) {
+ strcat_s(opts, _countof(opts), ",");
+ }
+ strcat_s(opts, _countof(opts), psutil_get_drive_type(type));
+
+ py_tuple = Py_BuildValue(
+ "(ssss)",
+ drive_letter,
+ drive_letter,
+ fs_type, // either FAT, FAT32, NTFS, HPFS, CDFS, UDF or NWFS
+ opts);
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ goto next;
+
+next:
+ drive_letter = strchr(drive_letter, 0) + 1;
+ }
+
+ SetErrorMode(0);
+ return py_retlist;
+
+error:
+ SetErrorMode(0);
+ Py_XDECREF(py_tuple);
+ Py_DECREF(py_retlist);
+ return NULL;
+}
+
+/*
+ * Return a Python dict of tuples for disk I/O information
+ */
+static PyObject *
+psutil_users(PyObject *self, PyObject *args)
+{
+ HANDLE hServer = NULL;
+ LPTSTR buffer_user = NULL;
+ LPTSTR buffer_addr = NULL;
+ PWTS_SESSION_INFO sessions = NULL;
+ DWORD count;
+ DWORD i;
+ DWORD sessionId;
+ DWORD bytes;
+ PWTS_CLIENT_ADDRESS address;
+ char address_str[50];
+ long long unix_time;
+
+ PWINSTATIONQUERYINFORMATIONW WinStationQueryInformationW;
+ WINSTATION_INFO station_info;
+ HINSTANCE hInstWinSta = NULL;
+ ULONG returnLen;
+
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+ PyObject *py_address = NULL;
+ PyObject *py_buffer_user_encoded = NULL;
+
+ if (py_retlist == NULL) {
+ return NULL;
+ }
+
+ hInstWinSta = LoadLibraryA("winsta.dll");
+ WinStationQueryInformationW = (PWINSTATIONQUERYINFORMATIONW) \
+ GetProcAddress(hInstWinSta, "WinStationQueryInformationW");
+
+ hServer = WTSOpenServer('\0');
+ if (hServer == NULL) {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ if (WTSEnumerateSessions(hServer, 0, 1, &sessions, &count) == 0) {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ for (i = 0; i < count; i++) {
+ py_address = NULL;
+ py_tuple = NULL;
+ sessionId = sessions[i].SessionId;
+ if (buffer_user != NULL) {
+ WTSFreeMemory(buffer_user);
+ }
+ if (buffer_addr != NULL) {
+ WTSFreeMemory(buffer_addr);
+ }
+
+ buffer_user = NULL;
+ buffer_addr = NULL;
+
+ // username
+ bytes = 0;
+ if (WTSQuerySessionInformation(hServer, sessionId, WTSUserName,
+ &buffer_user, &bytes) == 0) {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+ if (bytes == 1) {
+ continue;
+ }
+
+ // address
+ bytes = 0;
+ if (WTSQuerySessionInformation(hServer, sessionId, WTSClientAddress,
+ &buffer_addr, &bytes) == 0) {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ address = (PWTS_CLIENT_ADDRESS)buffer_addr;
+ if (address->AddressFamily == 0) { // AF_INET
+ sprintf_s(address_str,
+ _countof(address_str),
+ "%u.%u.%u.%u",
+ address->Address[0],
+ address->Address[1],
+ address->Address[2],
+ address->Address[3]);
+ py_address = Py_BuildValue("s", address_str);
+ if (!py_address)
+ goto error;
+ }
+ else {
+ py_address = Py_None;
+ }
+
+ // login time
+ if (!WinStationQueryInformationW(hServer,
+ sessionId,
+ WinStationInformation,
+ &station_info,
+ sizeof(station_info),
+ &returnLen))
+ {
+ goto error;
+ }
+
+ unix_time = ((LONGLONG)station_info.ConnectTime.dwHighDateTime) << 32;
+ unix_time += \
+ station_info.ConnectTime.dwLowDateTime - 116444736000000000LL;
+ unix_time /= 10000000;
+
+ py_buffer_user_encoded = PyUnicode_Decode(
+ buffer_user, _tcslen(buffer_user), Py_FileSystemDefaultEncoding,
+ "replace");
+ py_tuple = Py_BuildValue("OOd", py_buffer_user_encoded, py_address,
+ (double)unix_time);
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_XDECREF(py_buffer_user_encoded);
+ Py_XDECREF(py_address);
+ Py_XDECREF(py_tuple);
+ }
+
+ WTSCloseServer(hServer);
+ WTSFreeMemory(sessions);
+ WTSFreeMemory(buffer_user);
+ WTSFreeMemory(buffer_addr);
+ FreeLibrary(hInstWinSta);
+ return py_retlist;
+
+error:
+ Py_XDECREF(py_buffer_user_encoded);
+ Py_XDECREF(py_tuple);
+ Py_XDECREF(py_address);
+ Py_DECREF(py_retlist);
+
+ if (hInstWinSta != NULL) {
+ FreeLibrary(hInstWinSta);
+ }
+ if (hServer != NULL) {
+ WTSCloseServer(hServer);
+ }
+ if (sessions != NULL) {
+ WTSFreeMemory(sessions);
+ }
+ if (buffer_user != NULL) {
+ WTSFreeMemory(buffer_user);
+ }
+ if (buffer_addr != NULL) {
+ WTSFreeMemory(buffer_addr);
+ }
+ return NULL;
+}
+
+
+/*
+ * Return the number of handles opened by process.
+ */
+static PyObject *
+psutil_proc_num_handles(PyObject *self, PyObject *args)
+{
+ DWORD pid;
+ HANDLE hProcess;
+ DWORD handleCount;
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ hProcess = psutil_handle_from_pid(pid);
+ if (NULL == hProcess) {
+ return NULL;
+ }
+ if (! GetProcessHandleCount(hProcess, &handleCount)) {
+ CloseHandle(hProcess);
+ return PyErr_SetFromWindowsErr(0);
+ }
+ CloseHandle(hProcess);
+ return Py_BuildValue("k", handleCount);
+}
+
+
+/*
+ * Get various process information by using NtQuerySystemInformation.
+ * We use this as a fallback when faster functions fail with access
+ * denied. This is slower because it iterates over all processes.
+ * Returned tuple includes the following process info:
+ *
+ * - num_threads
+ * - ctx_switches
+ * - num_handles (fallback)
+ * - user/kernel times (fallback)
+ * - create time (fallback)
+ * - io counters (fallback)
+ */
+static PyObject *
+psutil_proc_info(PyObject *self, PyObject *args)
+{
+ DWORD pid;
+ PSYSTEM_PROCESS_INFORMATION process;
+ PVOID buffer;
+ ULONG num_handles;
+ ULONG i;
+ ULONG ctx_switches = 0;
+ double user_time;
+ double kernel_time;
+ long long create_time;
+ int num_threads;
+ LONGLONG io_rcount, io_wcount, io_rbytes, io_wbytes;
+
+
+ if (! PyArg_ParseTuple(args, "l", &pid))
+ return NULL;
+ if (! psutil_get_proc_info(pid, &process, &buffer))
+ return NULL;
+
+ num_handles = process->HandleCount;
+ for (i = 0; i < process->NumberOfThreads; i++)
+ ctx_switches += process->Threads[i].ContextSwitches;
+ user_time = (double)process->UserTime.HighPart * 429.4967296 + \
+ (double)process->UserTime.LowPart * 1e-7;
+ kernel_time = (double)process->KernelTime.HighPart * 429.4967296 + \
+ (double)process->KernelTime.LowPart * 1e-7;
+ // Convert the LARGE_INTEGER union to a Unix time.
+ // It's the best I could find by googling and borrowing code here
+ // and there. The time returned has a precision of 1 second.
+ if (0 == pid || 4 == pid) {
+ // the python module will translate this into BOOT_TIME later
+ create_time = 0;
+ }
+ else {
+ create_time = ((LONGLONG)process->CreateTime.HighPart) << 32;
+ create_time += process->CreateTime.LowPart - 116444736000000000LL;
+ create_time /= 10000000;
+ }
+ num_threads = (int)process->NumberOfThreads;
+ io_rcount = process->ReadOperationCount.QuadPart;
+ io_wcount = process->WriteOperationCount.QuadPart;
+ io_rbytes = process->ReadTransferCount.QuadPart;
+ io_wbytes = process->WriteTransferCount.QuadPart;
+ free(buffer);
+
+ return Py_BuildValue(
+ "kkdddiKKKK",
+ num_handles,
+ ctx_switches,
+ user_time,
+ kernel_time,
+ (double)create_time,
+ num_threads,
+ io_rcount,
+ io_wcount,
+ io_rbytes,
+ io_wbytes
+ );
+}
+
+
+static char *get_region_protection_string(ULONG protection)
+{
+ switch (protection & 0xff) {
+ case PAGE_NOACCESS:
+ return "";
+ case PAGE_READONLY:
+ return "r";
+ case PAGE_READWRITE:
+ return "rw";
+ case PAGE_WRITECOPY:
+ return "wc";
+ case PAGE_EXECUTE:
+ return "x";
+ case PAGE_EXECUTE_READ:
+ return "xr";
+ case PAGE_EXECUTE_READWRITE:
+ return "xrw";
+ case PAGE_EXECUTE_WRITECOPY:
+ return "xwc";
+ default:
+ return "?";
+ }
+}
+
+
+/*
+ * Return a list of process's memory mappings.
+ */
+static PyObject *
+psutil_proc_memory_maps(PyObject *self, PyObject *args)
+{
+ DWORD pid;
+ HANDLE hProcess = NULL;
+ MEMORY_BASIC_INFORMATION basicInfo;
+ PVOID baseAddress;
+ PVOID previousAllocationBase;
+ CHAR mappedFileName[MAX_PATH];
+ SYSTEM_INFO system_info;
+ LPVOID maxAddr;
+ PyObject *py_list = PyList_New(0);
+ PyObject *py_tuple = NULL;
+
+ if (py_list == NULL) {
+ return NULL;
+ }
+ if (! PyArg_ParseTuple(args, "l", &pid)) {
+ goto error;
+ }
+ hProcess = psutil_handle_from_pid(pid);
+ if (NULL == hProcess) {
+ goto error;
+ }
+
+ GetSystemInfo(&system_info);
+ maxAddr = system_info.lpMaximumApplicationAddress;
+ baseAddress = NULL;
+ previousAllocationBase = NULL;
+
+ while (VirtualQueryEx(hProcess, baseAddress, &basicInfo,
+ sizeof(MEMORY_BASIC_INFORMATION)))
+ {
+ py_tuple = NULL;
+ if (baseAddress > maxAddr) {
+ break;
+ }
+ if (GetMappedFileNameA(hProcess, baseAddress, mappedFileName,
+ sizeof(mappedFileName)))
+ {
+ py_tuple = Py_BuildValue(
+ "(kssI)",
+ (unsigned long)baseAddress,
+ get_region_protection_string(basicInfo.Protect),
+ mappedFileName,
+ basicInfo.RegionSize);
+ if (!py_tuple)
+ goto error;
+ if (PyList_Append(py_list, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ }
+ previousAllocationBase = basicInfo.AllocationBase;
+ baseAddress = (PCHAR)baseAddress + basicInfo.RegionSize;
+ }
+
+ CloseHandle(hProcess);
+ return py_list;
+
+error:
+ Py_XDECREF(py_tuple);
+ Py_DECREF(py_list);
+ if (hProcess != NULL)
+ CloseHandle(hProcess);
+ return NULL;
+}
+
+
+/*
+ * Return a {pid:ppid, ...} dict for all running processes.
+ */
+static PyObject *
+psutil_ppid_map(PyObject *self, PyObject *args)
+{
+ PyObject *pid = NULL;
+ PyObject *ppid = NULL;
+ PyObject *py_retdict = PyDict_New();
+ HANDLE handle = NULL;
+ PROCESSENTRY32 pe = {0};
+ pe.dwSize = sizeof(PROCESSENTRY32);
+
+ if (py_retdict == NULL)
+ return NULL;
+ handle = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
+ if (handle == INVALID_HANDLE_VALUE) {
+ PyErr_SetFromWindowsErr(0);
+ Py_DECREF(py_retdict);
+ return NULL;
+ }
+
+ if (Process32First(handle, &pe)) {
+ do {
+ pid = Py_BuildValue("I", pe.th32ProcessID);
+ if (pid == NULL)
+ goto error;
+ ppid = Py_BuildValue("I", pe.th32ParentProcessID);
+ if (ppid == NULL)
+ goto error;
+ if (PyDict_SetItem(py_retdict, pid, ppid))
+ goto error;
+ Py_DECREF(pid);
+ Py_DECREF(ppid);
+ } while (Process32Next(handle, &pe));
+ }
+
+ CloseHandle(handle);
+ return py_retdict;
+
+error:
+ Py_XDECREF(pid);
+ Py_XDECREF(ppid);
+ Py_DECREF(py_retdict);
+ CloseHandle(handle);
+ return NULL;
+}
+
+
+/*
+ * Return NICs addresses.
+ */
+
+static PyObject *
+psutil_net_if_addrs(PyObject *self, PyObject *args)
+{
+ unsigned int i = 0;
+ ULONG family;
+ PCTSTR intRet;
+ char *ptr;
+ char buff[100];
+ char ifname[MAX_PATH];
+ DWORD bufflen = 100;
+ PIP_ADAPTER_ADDRESSES pAddresses = NULL;
+ PIP_ADAPTER_ADDRESSES pCurrAddresses = NULL;
+ PIP_ADAPTER_UNICAST_ADDRESS pUnicast = NULL;
+
+ PyObject *py_retlist = PyList_New(0);
+ PyObject *py_tuple = NULL;
+ PyObject *py_address = NULL;
+ PyObject *py_mac_address = NULL;
+
+ if (py_retlist == NULL)
+ return NULL;
+
+ pAddresses = psutil_get_nic_addresses();
+ if (pAddresses == NULL)
+ goto error;
+ pCurrAddresses = pAddresses;
+
+ while (pCurrAddresses) {
+ pUnicast = pCurrAddresses->FirstUnicastAddress;
+ sprintf_s(ifname, MAX_PATH, "%wS", pCurrAddresses->FriendlyName);
+
+ // MAC address
+ if (pCurrAddresses->PhysicalAddressLength != 0) {
+ ptr = buff;
+ *ptr = '\0';
+ for (i = 0; i < (int) pCurrAddresses->PhysicalAddressLength; i++) {
+ if (i == (pCurrAddresses->PhysicalAddressLength - 1)) {
+ sprintf_s(ptr, _countof(buff), "%.2X\n",
+ (int)pCurrAddresses->PhysicalAddress[i]);
+ }
+ else {
+ sprintf_s(ptr, _countof(buff), "%.2X-",
+ (int)pCurrAddresses->PhysicalAddress[i]);
+ }
+ ptr += 3;
+ }
+ *--ptr = '\0';
+
+#if PY_MAJOR_VERSION >= 3
+ py_mac_address = PyUnicode_FromString(buff);
+#else
+ py_mac_address = PyString_FromString(buff);
+#endif
+ if (py_mac_address == NULL)
+ goto error;
+
+ Py_INCREF(Py_None);
+ Py_INCREF(Py_None);
+ py_tuple = Py_BuildValue(
+ "(siOOO)",
+ ifname,
+ -1, // this will be converted later to AF_LINK
+ py_mac_address,
+ Py_None,
+ Py_None
+ );
+ if (! py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ Py_DECREF(py_mac_address);
+ }
+
+ // find out the IP address associated with the NIC
+ if (pUnicast != NULL) {
+ for (i = 0; pUnicast != NULL; i++) {
+ family = pUnicast->Address.lpSockaddr->sa_family;
+ if (family == AF_INET) {
+ struct sockaddr_in *sa_in = (struct sockaddr_in *)
+ pUnicast->Address.lpSockaddr;
+ intRet = inet_ntop(AF_INET, &(sa_in->sin_addr), buff,
+ bufflen);
+ }
+ else if (family == AF_INET6) {
+ struct sockaddr_in6 *sa_in6 = (struct sockaddr_in6 *)
+ pUnicast->Address.lpSockaddr;
+ intRet = inet_ntop(AF_INET6, &(sa_in6->sin6_addr),
+ buff, bufflen);
+ }
+ else {
+ // we should never get here
+ pUnicast = pUnicast->Next;
+ continue;
+ }
+
+ if (intRet == NULL) {
+ PyErr_SetFromWindowsErr(GetLastError());
+ goto error;
+ }
+#if PY_MAJOR_VERSION >= 3
+ py_address = PyUnicode_FromString(buff);
+#else
+ py_address = PyString_FromString(buff);
+#endif
+ if (py_address == NULL)
+ goto error;
+
+ Py_INCREF(Py_None);
+ Py_INCREF(Py_None);
+ py_tuple = Py_BuildValue(
+ "(siOOO)",
+ ifname,
+ family,
+ py_address,
+ Py_None,
+ Py_None
+ );
+
+ if (! py_tuple)
+ goto error;
+ if (PyList_Append(py_retlist, py_tuple))
+ goto error;
+ Py_DECREF(py_tuple);
+ Py_DECREF(py_address);
+
+ pUnicast = pUnicast->Next;
+ }
+ }
+
+ pCurrAddresses = pCurrAddresses->Next;
+ }
+
+ free(pAddresses);
+ return py_retlist;
+
+error:
+ if (pAddresses)
+ free(pAddresses);
+ Py_DECREF(py_retlist);
+ Py_XDECREF(py_tuple);
+ Py_XDECREF(py_address);
+ return NULL;
+}
+
+
+/*
+ * Provides stats about NIC interfaces installed on the system.
+ * TODO: get 'duplex' (currently it's hard coded to '2', aka
+ 'full duplex')
+ */
+static PyObject *
+psutil_net_if_stats(PyObject *self, PyObject *args)
+{
+ int i;
+ DWORD dwSize = 0;
+ DWORD dwRetVal = 0;
+ MIB_IFTABLE *pIfTable;
+ MIB_IFROW *pIfRow;
+ PIP_ADAPTER_ADDRESSES pAddresses = NULL;
+ PIP_ADAPTER_ADDRESSES pCurrAddresses = NULL;
+ char friendly_name[MAX_PATH];
+ char descr[MAX_PATH];
+ int ifname_found;
+
+ PyObject *py_retdict = PyDict_New();
+ PyObject *py_ifc_info = NULL;
+ PyObject *py_is_up = NULL;
+
+ if (py_retdict == NULL)
+ return NULL;
+
+ pAddresses = psutil_get_nic_addresses();
+ if (pAddresses == NULL)
+ goto error;
+
+ pIfTable = (MIB_IFTABLE *) malloc(sizeof (MIB_IFTABLE));
+ if (pIfTable == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ dwSize = sizeof(MIB_IFTABLE);
+ if (GetIfTable(pIfTable, &dwSize, FALSE) == ERROR_INSUFFICIENT_BUFFER) {
+ free(pIfTable);
+ pIfTable = (MIB_IFTABLE *) malloc(dwSize);
+ if (pIfTable == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ }
+ // Make a second call to GetIfTable to get the actual
+ // data we want.
+ if ((dwRetVal = GetIfTable(pIfTable, &dwSize, FALSE)) != NO_ERROR) {
+ PyErr_SetString(PyExc_RuntimeError, "GetIfTable() failed");
+ goto error;
+ }
+
+ for (i = 0; i < (int) pIfTable->dwNumEntries; i++) {
+ pIfRow = (MIB_IFROW *) & pIfTable->table[i];
+
+ // GetIfTable is not able to give us NIC with "friendly names"
+ // so we determine them via GetAdapterAddresses() which
+ // provides friendly names *and* descriptions and find the
+ // ones that match.
+ ifname_found = 0;
+ pCurrAddresses = pAddresses;
+ while (pCurrAddresses) {
+ sprintf_s(descr, MAX_PATH, "%wS", pCurrAddresses->Description);
+ if (lstrcmp(descr, pIfRow->bDescr) == 0) {
+ sprintf_s(friendly_name, MAX_PATH, "%wS", pCurrAddresses->FriendlyName);
+ ifname_found = 1;
+ break;
+ }
+ pCurrAddresses = pCurrAddresses->Next;
+ }
+ if (ifname_found == 0) {
+ // Name not found means GetAdapterAddresses() doesn't list
+ // this NIC, only GetIfTable, meaning it's not really a NIC
+ // interface so we skip it.
+ continue;
+ }
+
+ // is up?
+ if((pIfRow->dwOperStatus == MIB_IF_OPER_STATUS_CONNECTED ||
+ pIfRow->dwOperStatus == MIB_IF_OPER_STATUS_OPERATIONAL) &&
+ pIfRow->dwAdminStatus == 1 ) {
+ py_is_up = Py_True;
+ }
+ else {
+ py_is_up = Py_False;
+ }
+ Py_INCREF(py_is_up);
+
+ py_ifc_info = Py_BuildValue(
+ "(Oikk)",
+ py_is_up,
+ 2, // there's no way to know duplex so let's assume 'full'
+ pIfRow->dwSpeed / 1000000, // expressed in bytes, we want Mb
+ pIfRow->dwMtu
+ );
+ if (!py_ifc_info)
+ goto error;
+ if (PyDict_SetItemString(py_retdict, friendly_name, py_ifc_info))
+ goto error;
+ Py_DECREF(py_ifc_info);
+ }
+
+ free(pIfTable);
+ free(pAddresses);
+ return py_retdict;
+
+error:
+ Py_XDECREF(py_is_up);
+ Py_XDECREF(py_ifc_info);
+ Py_DECREF(py_retdict);
+ if (pIfTable != NULL)
+ free(pIfTable);
+ if (pAddresses != NULL)
+ free(pAddresses);
+ return NULL;
+}
+
+
+// ------------------------ Python init ---------------------------
+
+static PyMethodDef
+PsutilMethods[] =
+{
+ // --- per-process functions
+
+ {"proc_cmdline", psutil_proc_cmdline, METH_VARARGS,
+ "Return process cmdline as a list of cmdline arguments"},
+ {"proc_exe", psutil_proc_exe, METH_VARARGS,
+ "Return path of the process executable"},
+ {"proc_name", psutil_proc_name, METH_VARARGS,
+ "Return process name"},
+ {"proc_kill", psutil_proc_kill, METH_VARARGS,
+ "Kill the process identified by the given PID"},
+ {"proc_cpu_times", psutil_proc_cpu_times, METH_VARARGS,
+ "Return tuple of user/kern time for the given PID"},
+ {"proc_create_time", psutil_proc_create_time, METH_VARARGS,
+ "Return a float indicating the process create time expressed in "
+ "seconds since the epoch"},
+ {"proc_memory_info", psutil_proc_memory_info, METH_VARARGS,
+ "Return a tuple of process memory information"},
+ {"proc_memory_info_2", psutil_proc_memory_info_2, METH_VARARGS,
+ "Alternate implementation"},
+ {"proc_cwd", psutil_proc_cwd, METH_VARARGS,
+ "Return process current working directory"},
+ {"proc_suspend", psutil_proc_suspend, METH_VARARGS,
+ "Suspend a process"},
+ {"proc_resume", psutil_proc_resume, METH_VARARGS,
+ "Resume a process"},
+ {"proc_open_files", psutil_proc_open_files, METH_VARARGS,
+ "Return files opened by process"},
+ {"proc_username", psutil_proc_username, METH_VARARGS,
+ "Return the username of a process"},
+ {"proc_threads", psutil_proc_threads, METH_VARARGS,
+ "Return process threads information as a list of tuple"},
+ {"proc_wait", psutil_proc_wait, METH_VARARGS,
+ "Wait for process to terminate and return its exit code."},
+ {"proc_priority_get", psutil_proc_priority_get, METH_VARARGS,
+ "Return process priority."},
+ {"proc_priority_set", psutil_proc_priority_set, METH_VARARGS,
+ "Set process priority."},
+#if (_WIN32_WINNT >= 0x0600) // Windows Vista
+ {"proc_io_priority_get", psutil_proc_io_priority_get, METH_VARARGS,
+ "Return process IO priority."},
+ {"proc_io_priority_set", psutil_proc_io_priority_set, METH_VARARGS,
+ "Set process IO priority."},
+#endif
+ {"proc_cpu_affinity_get", psutil_proc_cpu_affinity_get, METH_VARARGS,
+ "Return process CPU affinity as a bitmask."},
+ {"proc_cpu_affinity_set", psutil_proc_cpu_affinity_set, METH_VARARGS,
+ "Set process CPU affinity."},
+ {"proc_io_counters", psutil_proc_io_counters, METH_VARARGS,
+ "Get process I/O counters."},
+ {"proc_is_suspended", psutil_proc_is_suspended, METH_VARARGS,
+ "Return True if one of the process threads is in a suspended state"},
+ {"proc_num_handles", psutil_proc_num_handles, METH_VARARGS,
+ "Return the number of handles opened by process."},
+ {"proc_memory_maps", psutil_proc_memory_maps, METH_VARARGS,
+ "Return a list of process's memory mappings"},
+
+ // --- alternative pinfo interface
+ {"proc_info", psutil_proc_info, METH_VARARGS,
+ "Various process information"},
+
+ // --- system-related functions
+ {"pids", psutil_pids, METH_VARARGS,
+ "Returns a list of PIDs currently running on the system"},
+ {"ppid_map", psutil_ppid_map, METH_VARARGS,
+ "Return a {pid:ppid, ...} dict for all running processes"},
+ {"pid_exists", psutil_pid_exists, METH_VARARGS,
+ "Determine if the process exists in the current process list."},
+ {"cpu_count_logical", psutil_cpu_count_logical, METH_VARARGS,
+ "Returns the number of logical CPUs on the system"},
+ {"cpu_count_phys", psutil_cpu_count_phys, METH_VARARGS,
+ "Returns the number of physical CPUs on the system"},
+ {"boot_time", psutil_boot_time, METH_VARARGS,
+ "Return the system boot time expressed in seconds since the epoch."},
+ {"virtual_mem", psutil_virtual_mem, METH_VARARGS,
+ "Return the total amount of physical memory, in bytes"},
+ {"cpu_times", psutil_cpu_times, METH_VARARGS,
+ "Return system cpu times as a list"},
+ {"per_cpu_times", psutil_per_cpu_times, METH_VARARGS,
+ "Return system per-cpu times as a list of tuples"},
+ {"disk_usage", psutil_disk_usage, METH_VARARGS,
+ "Return path's disk total and free as a Python tuple."},
+ {"net_io_counters", psutil_net_io_counters, METH_VARARGS,
+ "Return dict of tuples of networks I/O information."},
+ {"disk_io_counters", psutil_disk_io_counters, METH_VARARGS,
+ "Return dict of tuples of disks I/O information."},
+ {"users", psutil_users, METH_VARARGS,
+ "Return a list of currently connected users."},
+ {"disk_partitions", psutil_disk_partitions, METH_VARARGS,
+ "Return disk partitions."},
+ {"net_connections", psutil_net_connections, METH_VARARGS,
+ "Return system-wide connections"},
+ {"net_if_addrs", psutil_net_if_addrs, METH_VARARGS,
+ "Return NICs addresses."},
+ {"net_if_stats", psutil_net_if_stats, METH_VARARGS,
+ "Return NICs stats."},
+
+ // --- windows API bindings
+ {"win32_QueryDosDevice", psutil_win32_QueryDosDevice, METH_VARARGS,
+ "QueryDosDevice binding"},
+
+ {NULL, NULL, 0, NULL}
+};
+
+
+struct module_state {
+ PyObject *error;
+};
+
+#if PY_MAJOR_VERSION >= 3
+#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m))
+#else
+#define GETSTATE(m) (&_state)
+static struct module_state _state;
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+
+static int psutil_windows_traverse(PyObject *m, visitproc visit, void *arg) {
+ Py_VISIT(GETSTATE(m)->error);
+ return 0;
+}
+
+static int psutil_windows_clear(PyObject *m) {
+ Py_CLEAR(GETSTATE(m)->error);
+ return 0;
+}
+
+static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "psutil_windows",
+ NULL,
+ sizeof(struct module_state),
+ PsutilMethods,
+ NULL,
+ psutil_windows_traverse,
+ psutil_windows_clear,
+ NULL
+};
+
+#define INITERROR return NULL
+
+PyMODINIT_FUNC PyInit__psutil_windows(void)
+
+#else
+#define INITERROR return
+void init_psutil_windows(void)
+#endif
+{
+ struct module_state *st = NULL;
+#if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&moduledef);
+#else
+ PyObject *module = Py_InitModule("_psutil_windows", PsutilMethods);
+#endif
+
+ if (module == NULL) {
+ INITERROR;
+ }
+
+ st = GETSTATE(module);
+ st->error = PyErr_NewException("_psutil_windows.Error", NULL, NULL);
+ if (st->error == NULL) {
+ Py_DECREF(module);
+ INITERROR;
+ }
+
+ PyModule_AddIntConstant(module, "version", PSUTIL_VERSION);
+
+ // process status constants
+ // http://msdn.microsoft.com/en-us/library/ms683211(v=vs.85).aspx
+ PyModule_AddIntConstant(
+ module, "ABOVE_NORMAL_PRIORITY_CLASS", ABOVE_NORMAL_PRIORITY_CLASS);
+ PyModule_AddIntConstant(
+ module, "BELOW_NORMAL_PRIORITY_CLASS", BELOW_NORMAL_PRIORITY_CLASS);
+ PyModule_AddIntConstant(
+ module, "HIGH_PRIORITY_CLASS", HIGH_PRIORITY_CLASS);
+ PyModule_AddIntConstant(
+ module, "IDLE_PRIORITY_CLASS", IDLE_PRIORITY_CLASS);
+ PyModule_AddIntConstant(
+ module, "NORMAL_PRIORITY_CLASS", NORMAL_PRIORITY_CLASS);
+ PyModule_AddIntConstant(
+ module, "REALTIME_PRIORITY_CLASS", REALTIME_PRIORITY_CLASS);
+ // connection status constants
+ // http://msdn.microsoft.com/en-us/library/cc669305.aspx
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_CLOSED", MIB_TCP_STATE_CLOSED);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_CLOSING", MIB_TCP_STATE_CLOSING);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_CLOSE_WAIT", MIB_TCP_STATE_CLOSE_WAIT);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_LISTEN", MIB_TCP_STATE_LISTEN);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_ESTAB", MIB_TCP_STATE_ESTAB);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_SYN_SENT", MIB_TCP_STATE_SYN_SENT);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_SYN_RCVD", MIB_TCP_STATE_SYN_RCVD);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_FIN_WAIT1", MIB_TCP_STATE_FIN_WAIT1);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_FIN_WAIT2", MIB_TCP_STATE_FIN_WAIT2);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_LAST_ACK", MIB_TCP_STATE_LAST_ACK);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_TIME_WAIT", MIB_TCP_STATE_TIME_WAIT);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_TIME_WAIT", MIB_TCP_STATE_TIME_WAIT);
+ PyModule_AddIntConstant(
+ module, "MIB_TCP_STATE_DELETE_TCB", MIB_TCP_STATE_DELETE_TCB);
+ PyModule_AddIntConstant(
+ module, "PSUTIL_CONN_NONE", PSUTIL_CONN_NONE);
+ // ...for internal use in _psutil_windows.py
+ PyModule_AddIntConstant(
+ module, "INFINITE", INFINITE);
+ PyModule_AddIntConstant(
+ module, "ERROR_ACCESS_DENIED", ERROR_ACCESS_DENIED);
+
+ // set SeDebug for the current process
+ psutil_set_se_debug();
+
+#if PY_MAJOR_VERSION >= 3
+ return module;
+#endif
+}
diff --git a/python/psutil/psutil/_psutil_windows.h b/python/psutil/psutil/_psutil_windows.h
new file mode 100644
index 000000000..c77f64e9c
--- /dev/null
+++ b/python/psutil/psutil/_psutil_windows.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+#include <windows.h>
+
+// --- per-process functions
+
+static PyObject* psutil_proc_cmdline(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_affinity_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_affinity_set(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_create_time(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cwd(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_exe(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_info(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_is_suspended(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_kill(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_info(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_info_2(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_maps(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_name(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_handles(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_open_files(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_priority_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_priority_set(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_resume(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_suspend(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_threads(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_username(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_wait(PyObject* self, PyObject* args);
+
+#if (PSUTIL_WINVER >= 0x0600) // Windows Vista
+static PyObject* psutil_proc_io_priority_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_io_priority_set(PyObject* self, PyObject* args);
+#endif
+
+// --- system-related functions
+
+static PyObject* psutil_boot_time(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_logical(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_phys(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_partitions(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_usage(PyObject* self, PyObject* args);
+static PyObject* psutil_net_connections(PyObject* self, PyObject* args);
+static PyObject* psutil_net_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_per_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_pid_exists(PyObject* self, PyObject* args);
+static PyObject* psutil_pids(PyObject* self, PyObject* args);
+static PyObject* psutil_ppid_map(PyObject* self, PyObject* args);
+static PyObject* psutil_users(PyObject* self, PyObject* args);
+static PyObject* psutil_virtual_mem(PyObject* self, PyObject* args);
+static PyObject* psutil_net_if_addrs(PyObject* self, PyObject* args);
+static PyObject* psutil_net_if_stats(PyObject* self, PyObject* args);
+
+// --- windows API bindings
+
+static PyObject* psutil_win32_QueryDosDevice(PyObject* self, PyObject* args);
+
+// --- internal
+
+int psutil_proc_suspend_or_resume(DWORD pid, int suspend);
diff --git a/python/psutil/psutil/_pswindows.py b/python/psutil/psutil/_pswindows.py
new file mode 100644
index 000000000..2d8babb19
--- /dev/null
+++ b/python/psutil/psutil/_pswindows.py
@@ -0,0 +1,548 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Windows platform implementation."""
+
+import errno
+import functools
+import os
+import sys
+from collections import namedtuple
+
+from . import _common
+from . import _psutil_windows as cext
+from ._common import conn_tmap, usage_percent, isfile_strict
+from ._common import sockfam_to_enum, socktype_to_enum
+from ._compat import PY3, xrange, lru_cache, long
+from ._psutil_windows import (ABOVE_NORMAL_PRIORITY_CLASS,
+ BELOW_NORMAL_PRIORITY_CLASS,
+ HIGH_PRIORITY_CLASS,
+ IDLE_PRIORITY_CLASS,
+ NORMAL_PRIORITY_CLASS,
+ REALTIME_PRIORITY_CLASS)
+
+if sys.version_info >= (3, 4):
+ import enum
+else:
+ enum = None
+
+# process priority constants, import from __init__.py:
+# http://msdn.microsoft.com/en-us/library/ms686219(v=vs.85).aspx
+__extra__all__ = ["ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS",
+ "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS",
+ "NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS",
+ "CONN_DELETE_TCB",
+ "AF_LINK",
+ ]
+
+# --- module level constants (gets pushed up to psutil module)
+
+CONN_DELETE_TCB = "DELETE_TCB"
+WAIT_TIMEOUT = 0x00000102 # 258 in decimal
+ACCESS_DENIED_SET = frozenset([errno.EPERM, errno.EACCES,
+ cext.ERROR_ACCESS_DENIED])
+if enum is None:
+ AF_LINK = -1
+else:
+ AddressFamily = enum.IntEnum('AddressFamily', {'AF_LINK': -1})
+ AF_LINK = AddressFamily.AF_LINK
+
+TCP_STATUSES = {
+ cext.MIB_TCP_STATE_ESTAB: _common.CONN_ESTABLISHED,
+ cext.MIB_TCP_STATE_SYN_SENT: _common.CONN_SYN_SENT,
+ cext.MIB_TCP_STATE_SYN_RCVD: _common.CONN_SYN_RECV,
+ cext.MIB_TCP_STATE_FIN_WAIT1: _common.CONN_FIN_WAIT1,
+ cext.MIB_TCP_STATE_FIN_WAIT2: _common.CONN_FIN_WAIT2,
+ cext.MIB_TCP_STATE_TIME_WAIT: _common.CONN_TIME_WAIT,
+ cext.MIB_TCP_STATE_CLOSED: _common.CONN_CLOSE,
+ cext.MIB_TCP_STATE_CLOSE_WAIT: _common.CONN_CLOSE_WAIT,
+ cext.MIB_TCP_STATE_LAST_ACK: _common.CONN_LAST_ACK,
+ cext.MIB_TCP_STATE_LISTEN: _common.CONN_LISTEN,
+ cext.MIB_TCP_STATE_CLOSING: _common.CONN_CLOSING,
+ cext.MIB_TCP_STATE_DELETE_TCB: CONN_DELETE_TCB,
+ cext.PSUTIL_CONN_NONE: _common.CONN_NONE,
+}
+
+if enum is not None:
+ class Priority(enum.IntEnum):
+ ABOVE_NORMAL_PRIORITY_CLASS = ABOVE_NORMAL_PRIORITY_CLASS
+ BELOW_NORMAL_PRIORITY_CLASS = BELOW_NORMAL_PRIORITY_CLASS
+ HIGH_PRIORITY_CLASS = HIGH_PRIORITY_CLASS
+ IDLE_PRIORITY_CLASS = IDLE_PRIORITY_CLASS
+ NORMAL_PRIORITY_CLASS = NORMAL_PRIORITY_CLASS
+ REALTIME_PRIORITY_CLASS = REALTIME_PRIORITY_CLASS
+
+ globals().update(Priority.__members__)
+
+scputimes = namedtuple('scputimes', ['user', 'system', 'idle'])
+svmem = namedtuple('svmem', ['total', 'available', 'percent', 'used', 'free'])
+pextmem = namedtuple(
+ 'pextmem', ['num_page_faults', 'peak_wset', 'wset', 'peak_paged_pool',
+ 'paged_pool', 'peak_nonpaged_pool', 'nonpaged_pool',
+ 'pagefile', 'peak_pagefile', 'private'])
+pmmap_grouped = namedtuple('pmmap_grouped', ['path', 'rss'])
+pmmap_ext = namedtuple(
+ 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
+ntpinfo = namedtuple(
+ 'ntpinfo', ['num_handles', 'ctx_switches', 'user_time', 'kernel_time',
+ 'create_time', 'num_threads', 'io_rcount', 'io_wcount',
+ 'io_rbytes', 'io_wbytes'])
+
+# set later from __init__.py
+NoSuchProcess = None
+AccessDenied = None
+TimeoutExpired = None
+
+
+@lru_cache(maxsize=512)
+def _win32_QueryDosDevice(s):
+ return cext.win32_QueryDosDevice(s)
+
+
+def _convert_raw_path(s):
+ # convert paths using native DOS format like:
+ # "\Device\HarddiskVolume1\Windows\systemew\file.txt"
+ # into: "C:\Windows\systemew\file.txt"
+ if PY3 and not isinstance(s, str):
+ s = s.decode('utf8')
+ rawdrive = '\\'.join(s.split('\\')[:3])
+ driveletter = _win32_QueryDosDevice(rawdrive)
+ return os.path.join(driveletter, s[len(rawdrive):])
+
+
+# --- public functions
+
+
+def virtual_memory():
+ """System virtual memory as a namedtuple."""
+ mem = cext.virtual_mem()
+ totphys, availphys, totpagef, availpagef, totvirt, freevirt = mem
+ #
+ total = totphys
+ avail = availphys
+ free = availphys
+ used = total - avail
+ percent = usage_percent((total - avail), total, _round=1)
+ return svmem(total, avail, percent, used, free)
+
+
+def swap_memory():
+ """Swap system memory as a (total, used, free, sin, sout) tuple."""
+ mem = cext.virtual_mem()
+ total = mem[2]
+ free = mem[3]
+ used = total - free
+ percent = usage_percent(used, total, _round=1)
+ return _common.sswap(total, used, free, percent, 0, 0)
+
+
+def disk_usage(path):
+ """Return disk usage associated with path."""
+ try:
+ total, free = cext.disk_usage(path)
+ except WindowsError:
+ if not os.path.exists(path):
+ msg = "No such file or directory: '%s'" % path
+ raise OSError(errno.ENOENT, msg)
+ raise
+ used = total - free
+ percent = usage_percent(used, total, _round=1)
+ return _common.sdiskusage(total, used, free, percent)
+
+
+def disk_partitions(all):
+ """Return disk partitions."""
+ rawlist = cext.disk_partitions(all)
+ return [_common.sdiskpart(*x) for x in rawlist]
+
+
+def cpu_times():
+ """Return system CPU times as a named tuple."""
+ user, system, idle = cext.cpu_times()
+ return scputimes(user, system, idle)
+
+
+def per_cpu_times():
+ """Return system per-CPU times as a list of named tuples."""
+ ret = []
+ for cpu_t in cext.per_cpu_times():
+ user, system, idle = cpu_t
+ item = scputimes(user, system, idle)
+ ret.append(item)
+ return ret
+
+
+def cpu_count_logical():
+ """Return the number of logical CPUs in the system."""
+ return cext.cpu_count_logical()
+
+
+def cpu_count_physical():
+ """Return the number of physical CPUs in the system."""
+ return cext.cpu_count_phys()
+
+
+def boot_time():
+ """The system boot time expressed in seconds since the epoch."""
+ return cext.boot_time()
+
+
+def net_connections(kind, _pid=-1):
+ """Return socket connections. If pid == -1 return system-wide
+ connections (as opposed to connections opened by one process only).
+ """
+ if kind not in conn_tmap:
+ raise ValueError("invalid %r kind argument; choose between %s"
+ % (kind, ', '.join([repr(x) for x in conn_tmap])))
+ families, types = conn_tmap[kind]
+ rawlist = cext.net_connections(_pid, families, types)
+ ret = set()
+ for item in rawlist:
+ fd, fam, type, laddr, raddr, status, pid = item
+ status = TCP_STATUSES[status]
+ fam = sockfam_to_enum(fam)
+ type = socktype_to_enum(type)
+ if _pid == -1:
+ nt = _common.sconn(fd, fam, type, laddr, raddr, status, pid)
+ else:
+ nt = _common.pconn(fd, fam, type, laddr, raddr, status)
+ ret.add(nt)
+ return list(ret)
+
+
+def net_if_stats():
+ ret = cext.net_if_stats()
+ for name, items in ret.items():
+ isup, duplex, speed, mtu = items
+ if hasattr(_common, 'NicDuplex'):
+ duplex = _common.NicDuplex(duplex)
+ ret[name] = _common.snicstats(isup, duplex, speed, mtu)
+ return ret
+
+
+def users():
+ """Return currently connected users as a list of namedtuples."""
+ retlist = []
+ rawlist = cext.users()
+ for item in rawlist:
+ user, hostname, tstamp = item
+ nt = _common.suser(user, None, hostname, tstamp)
+ retlist.append(nt)
+ return retlist
+
+
+pids = cext.pids
+pid_exists = cext.pid_exists
+net_io_counters = cext.net_io_counters
+disk_io_counters = cext.disk_io_counters
+ppid_map = cext.ppid_map # not meant to be public
+net_if_addrs = cext.net_if_addrs
+
+
+def wrap_exceptions(fun):
+ """Decorator which translates bare OSError and WindowsError
+ exceptions into NoSuchProcess and AccessDenied.
+ """
+ @functools.wraps(fun)
+ def wrapper(self, *args, **kwargs):
+ try:
+ return fun(self, *args, **kwargs)
+ except OSError as err:
+ # support for private module import
+ if NoSuchProcess is None or AccessDenied is None:
+ raise
+ if err.errno in ACCESS_DENIED_SET:
+ raise AccessDenied(self.pid, self._name)
+ if err.errno == errno.ESRCH:
+ raise NoSuchProcess(self.pid, self._name)
+ raise
+ return wrapper
+
+
+class Process(object):
+ """Wrapper class around underlying C implementation."""
+
+ __slots__ = ["pid", "_name", "_ppid"]
+
+ def __init__(self, pid):
+ self.pid = pid
+ self._name = None
+ self._ppid = None
+
+ @wrap_exceptions
+ def name(self):
+ """Return process name, which on Windows is always the final
+ part of the executable.
+ """
+ # This is how PIDs 0 and 4 are always represented in taskmgr
+ # and process-hacker.
+ if self.pid == 0:
+ return "System Idle Process"
+ elif self.pid == 4:
+ return "System"
+ else:
+ try:
+ # Note: this will fail with AD for most PIDs owned
+ # by another user but it's faster.
+ return os.path.basename(self.exe())
+ except AccessDenied:
+ return cext.proc_name(self.pid)
+
+ @wrap_exceptions
+ def exe(self):
+ # Note: os.path.exists(path) may return False even if the file
+ # is there, see:
+ # http://stackoverflow.com/questions/3112546/os-path-exists-lies
+
+ # see https://github.com/giampaolo/psutil/issues/414
+ # see https://github.com/giampaolo/psutil/issues/528
+ if self.pid in (0, 4):
+ raise AccessDenied(self.pid, self._name)
+ return _convert_raw_path(cext.proc_exe(self.pid))
+
+ @wrap_exceptions
+ def cmdline(self):
+ return cext.proc_cmdline(self.pid)
+
+ def ppid(self):
+ try:
+ return ppid_map()[self.pid]
+ except KeyError:
+ raise NoSuchProcess(self.pid, self._name)
+
+ def _get_raw_meminfo(self):
+ try:
+ return cext.proc_memory_info(self.pid)
+ except OSError as err:
+ if err.errno in ACCESS_DENIED_SET:
+ # TODO: the C ext can probably be refactored in order
+ # to get this from cext.proc_info()
+ return cext.proc_memory_info_2(self.pid)
+ raise
+
+ @wrap_exceptions
+ def memory_info(self):
+ # on Windows RSS == WorkingSetSize and VSM == PagefileUsage
+ # fields of PROCESS_MEMORY_COUNTERS struct:
+ # http://msdn.microsoft.com/en-us/library/windows/desktop/
+ # ms684877(v=vs.85).aspx
+ t = self._get_raw_meminfo()
+ return _common.pmem(t[2], t[7])
+
+ @wrap_exceptions
+ def memory_info_ex(self):
+ return pextmem(*self._get_raw_meminfo())
+
+ def memory_maps(self):
+ try:
+ raw = cext.proc_memory_maps(self.pid)
+ except OSError as err:
+ # XXX - can't use wrap_exceptions decorator as we're
+ # returning a generator; probably needs refactoring.
+ if err.errno in ACCESS_DENIED_SET:
+ raise AccessDenied(self.pid, self._name)
+ if err.errno == errno.ESRCH:
+ raise NoSuchProcess(self.pid, self._name)
+ raise
+ else:
+ for addr, perm, path, rss in raw:
+ path = _convert_raw_path(path)
+ addr = hex(addr)
+ yield (addr, perm, path, rss)
+
+ @wrap_exceptions
+ def kill(self):
+ return cext.proc_kill(self.pid)
+
+ @wrap_exceptions
+ def wait(self, timeout=None):
+ if timeout is None:
+ timeout = cext.INFINITE
+ else:
+ # WaitForSingleObject() expects time in milliseconds
+ timeout = int(timeout * 1000)
+ ret = cext.proc_wait(self.pid, timeout)
+ if ret == WAIT_TIMEOUT:
+ # support for private module import
+ if TimeoutExpired is None:
+ raise RuntimeError("timeout expired")
+ raise TimeoutExpired(timeout, self.pid, self._name)
+ return ret
+
+ @wrap_exceptions
+ def username(self):
+ if self.pid in (0, 4):
+ return 'NT AUTHORITY\\SYSTEM'
+ return cext.proc_username(self.pid)
+
+ @wrap_exceptions
+ def create_time(self):
+ # special case for kernel process PIDs; return system boot time
+ if self.pid in (0, 4):
+ return boot_time()
+ try:
+ return cext.proc_create_time(self.pid)
+ except OSError as err:
+ if err.errno in ACCESS_DENIED_SET:
+ return ntpinfo(*cext.proc_info(self.pid)).create_time
+ raise
+
+ @wrap_exceptions
+ def num_threads(self):
+ return ntpinfo(*cext.proc_info(self.pid)).num_threads
+
+ @wrap_exceptions
+ def threads(self):
+ rawlist = cext.proc_threads(self.pid)
+ retlist = []
+ for thread_id, utime, stime in rawlist:
+ ntuple = _common.pthread(thread_id, utime, stime)
+ retlist.append(ntuple)
+ return retlist
+
+ @wrap_exceptions
+ def cpu_times(self):
+ try:
+ ret = cext.proc_cpu_times(self.pid)
+ except OSError as err:
+ if err.errno in ACCESS_DENIED_SET:
+ nt = ntpinfo(*cext.proc_info(self.pid))
+ ret = (nt.user_time, nt.kernel_time)
+ else:
+ raise
+ return _common.pcputimes(*ret)
+
+ @wrap_exceptions
+ def suspend(self):
+ return cext.proc_suspend(self.pid)
+
+ @wrap_exceptions
+ def resume(self):
+ return cext.proc_resume(self.pid)
+
+ @wrap_exceptions
+ def cwd(self):
+ if self.pid in (0, 4):
+ raise AccessDenied(self.pid, self._name)
+ # return a normalized pathname since the native C function appends
+ # "\\" at the and of the path
+ path = cext.proc_cwd(self.pid)
+ return os.path.normpath(path)
+
+ @wrap_exceptions
+ def open_files(self):
+ if self.pid in (0, 4):
+ return []
+ retlist = []
+ # Filenames come in in native format like:
+ # "\Device\HarddiskVolume1\Windows\systemew\file.txt"
+ # Convert the first part in the corresponding drive letter
+ # (e.g. "C:\") by using Windows's QueryDosDevice()
+ raw_file_names = cext.proc_open_files(self.pid)
+ for _file in raw_file_names:
+ _file = _convert_raw_path(_file)
+ if isfile_strict(_file) and _file not in retlist:
+ ntuple = _common.popenfile(_file, -1)
+ retlist.append(ntuple)
+ return retlist
+
+ @wrap_exceptions
+ def connections(self, kind='inet'):
+ return net_connections(kind, _pid=self.pid)
+
+ @wrap_exceptions
+ def nice_get(self):
+ value = cext.proc_priority_get(self.pid)
+ if enum is not None:
+ value = Priority(value)
+ return value
+
+ @wrap_exceptions
+ def nice_set(self, value):
+ return cext.proc_priority_set(self.pid, value)
+
+ # available on Windows >= Vista
+ if hasattr(cext, "proc_io_priority_get"):
+ @wrap_exceptions
+ def ionice_get(self):
+ return cext.proc_io_priority_get(self.pid)
+
+ @wrap_exceptions
+ def ionice_set(self, value, _):
+ if _:
+ raise TypeError("set_proc_ionice() on Windows takes only "
+ "1 argument (2 given)")
+ if value not in (2, 1, 0):
+ raise ValueError("value must be 2 (normal), 1 (low) or 0 "
+ "(very low); got %r" % value)
+ return cext.proc_io_priority_set(self.pid, value)
+
+ @wrap_exceptions
+ def io_counters(self):
+ try:
+ ret = cext.proc_io_counters(self.pid)
+ except OSError as err:
+ if err.errno in ACCESS_DENIED_SET:
+ nt = ntpinfo(*cext.proc_info(self.pid))
+ ret = (nt.io_rcount, nt.io_wcount, nt.io_rbytes, nt.io_wbytes)
+ else:
+ raise
+ return _common.pio(*ret)
+
+ @wrap_exceptions
+ def status(self):
+ suspended = cext.proc_is_suspended(self.pid)
+ if suspended:
+ return _common.STATUS_STOPPED
+ else:
+ return _common.STATUS_RUNNING
+
+ @wrap_exceptions
+ def cpu_affinity_get(self):
+ def from_bitmask(x):
+ return [i for i in xrange(64) if (1 << i) & x]
+ bitmask = cext.proc_cpu_affinity_get(self.pid)
+ return from_bitmask(bitmask)
+
+ @wrap_exceptions
+ def cpu_affinity_set(self, value):
+ def to_bitmask(l):
+ if not l:
+ raise ValueError("invalid argument %r" % l)
+ out = 0
+ for b in l:
+ out |= 2 ** b
+ return out
+
+ # SetProcessAffinityMask() states that ERROR_INVALID_PARAMETER
+ # is returned for an invalid CPU but this seems not to be true,
+ # therefore we check CPUs validy beforehand.
+ allcpus = list(range(len(per_cpu_times())))
+ for cpu in value:
+ if cpu not in allcpus:
+ if not isinstance(cpu, (int, long)):
+ raise TypeError(
+ "invalid CPU %r; an integer is required" % cpu)
+ else:
+ raise ValueError("invalid CPU %r" % cpu)
+
+ bitmask = to_bitmask(value)
+ cext.proc_cpu_affinity_set(self.pid, bitmask)
+
+ @wrap_exceptions
+ def num_handles(self):
+ try:
+ return cext.proc_num_handles(self.pid)
+ except OSError as err:
+ if err.errno in ACCESS_DENIED_SET:
+ return ntpinfo(*cext.proc_info(self.pid)).num_handles
+ raise
+
+ @wrap_exceptions
+ def num_ctx_switches(self):
+ ctx_switches = ntpinfo(*cext.proc_info(self.pid)).ctx_switches
+ # only voluntary ctx switches are supported
+ return _common.pctxsw(ctx_switches, 0)
diff --git a/python/psutil/psutil/arch/bsd/process_info.c b/python/psutil/psutil/arch/bsd/process_info.c
new file mode 100644
index 000000000..4d7392406
--- /dev/null
+++ b/python/psutil/psutil/arch/bsd/process_info.c
@@ -0,0 +1,265 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Helper functions related to fetching process information.
+ * Used by _psutil_bsd module methods.
+ */
+
+
+#include <Python.h>
+#include <assert.h>
+#include <errno.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/types.h>
+#include <sys/sysctl.h>
+#include <sys/param.h>
+#include <sys/user.h>
+#include <sys/proc.h>
+#include <signal.h>
+
+#include "process_info.h"
+
+
+/*
+ * Returns a list of all BSD processes on the system. This routine
+ * allocates the list and puts it in *procList and a count of the
+ * number of entries in *procCount. You are responsible for freeing
+ * this list (use "free" from System framework).
+ * On success, the function returns 0.
+ * On error, the function returns a BSD errno value.
+ */
+int
+psutil_get_proc_list(struct kinfo_proc **procList, size_t *procCount)
+{
+ int err;
+ struct kinfo_proc *result;
+ int done;
+ static const int name[] = { CTL_KERN, KERN_PROC, KERN_PROC_PROC, 0 };
+ // Declaring name as const requires us to cast it when passing it to
+ // sysctl because the prototype doesn't include the const modifier.
+ size_t length;
+
+ assert( procList != NULL);
+ assert(*procList == NULL);
+ assert(procCount != NULL);
+
+ *procCount = 0;
+
+ /*
+ * We start by calling sysctl with result == NULL and length == 0.
+ * That will succeed, and set length to the appropriate length.
+ * We then allocate a buffer of that size and call sysctl again
+ * with that buffer. If that succeeds, we're done. If that fails
+ * with ENOMEM, we have to throw away our buffer and loop. Note
+ * that the loop causes use to call sysctl with NULL again; this
+ * is necessary because the ENOMEM failure case sets length to
+ * the amount of data returned, not the amount of data that
+ * could have been returned.
+ */
+ result = NULL;
+ done = 0;
+ do {
+ assert(result == NULL);
+ // Call sysctl with a NULL buffer.
+ length = 0;
+ err = sysctl((int *)name, (sizeof(name) / sizeof(*name)) - 1,
+ NULL, &length, NULL, 0);
+ if (err == -1)
+ err = errno;
+
+ // Allocate an appropriately sized buffer based on the results
+ // from the previous call.
+ if (err == 0) {
+ result = malloc(length);
+ if (result == NULL)
+ err = ENOMEM;
+ }
+
+ // Call sysctl again with the new buffer. If we get an ENOMEM
+ // error, toss away our buffer and start again.
+ if (err == 0) {
+ err = sysctl((int *) name, (sizeof(name) / sizeof(*name)) - 1,
+ result, &length, NULL, 0);
+ if (err == -1)
+ err = errno;
+ if (err == 0) {
+ done = 1;
+ }
+ else if (err == ENOMEM) {
+ assert(result != NULL);
+ free(result);
+ result = NULL;
+ err = 0;
+ }
+ }
+ } while (err == 0 && ! done);
+
+ // Clean up and establish post conditions.
+ if (err != 0 && result != NULL) {
+ free(result);
+ result = NULL;
+ }
+
+ *procList = result;
+ *procCount = length / sizeof(struct kinfo_proc);
+
+ assert((err == 0) == (*procList != NULL));
+ return err;
+}
+
+
+char
+*psutil_get_cmd_path(long pid, size_t *pathsize)
+{
+ int mib[4];
+ char *path;
+ size_t size = 0;
+
+ /*
+ * Make a sysctl() call to get the raw argument space of the process.
+ */
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROC;
+ mib[2] = KERN_PROC_PATHNAME;
+ mib[3] = pid;
+
+ // call with a null buffer first to determine if we need a buffer
+ if (sysctl(mib, 4, NULL, &size, NULL, 0) == -1)
+ return NULL;
+
+ path = malloc(size);
+ if (path == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ *pathsize = size;
+ if (sysctl(mib, 4, path, &size, NULL, 0) == -1) {
+ free(path);
+ return NULL; // Insufficient privileges
+ }
+
+ return path;
+}
+
+
+/*
+ * XXX no longer used; it probably makese sense to remove it.
+ * Borrowed from psi Python System Information project
+ *
+ * Get command arguments and environment variables.
+ *
+ * Based on code from ps.
+ *
+ * Returns:
+ * 0 for success;
+ * -1 for failure (Exception raised);
+ * 1 for insufficient privileges.
+ */
+char
+*psutil_get_cmd_args(long pid, size_t *argsize)
+{
+ int mib[4], argmax;
+ size_t size = sizeof(argmax);
+ char *procargs = NULL;
+
+ // Get the maximum process arguments size.
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_ARGMAX;
+
+ size = sizeof(argmax);
+ if (sysctl(mib, 2, &argmax, &size, NULL, 0) == -1)
+ return NULL;
+
+ // Allocate space for the arguments.
+ procargs = (char *)malloc(argmax);
+ if (procargs == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ /*
+ * Make a sysctl() call to get the raw argument space of the process.
+ */
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROC;
+ mib[2] = KERN_PROC_ARGS;
+ mib[3] = pid;
+
+ size = argmax;
+ if (sysctl(mib, 4, procargs, &size, NULL, 0) == -1) {
+ free(procargs);
+ return NULL; // Insufficient privileges
+ }
+
+ // return string and set the length of arguments
+ *argsize = size;
+ return procargs;
+}
+
+
+// returns the command line as a python list object
+PyObject *
+psutil_get_arg_list(long pid)
+{
+ char *argstr = NULL;
+ int pos = 0;
+ size_t argsize = 0;
+ PyObject *retlist = Py_BuildValue("[]");
+ PyObject *item = NULL;
+
+ if (pid < 0)
+ return retlist;
+ argstr = psutil_get_cmd_args(pid, &argsize);
+ if (argstr == NULL)
+ goto error;
+
+ // args are returned as a flattened string with \0 separators between
+ // arguments add each string to the list then step forward to the next
+ // separator
+ if (argsize > 0) {
+ while (pos < argsize) {
+ item = Py_BuildValue("s", &argstr[pos]);
+ if (!item)
+ goto error;
+ if (PyList_Append(retlist, item))
+ goto error;
+ Py_DECREF(item);
+ pos = pos + strlen(&argstr[pos]) + 1;
+ }
+ }
+
+ free(argstr);
+ return retlist;
+
+error:
+ Py_XDECREF(item);
+ Py_DECREF(retlist);
+ if (argstr != NULL)
+ free(argstr);
+ return NULL;
+}
+
+
+/*
+ * Return 1 if PID exists in the current process list, else 0.
+ */
+int
+psutil_pid_exists(long pid)
+{
+ int kill_ret;
+
+ if (pid < 0)
+ return 0;
+ // if kill returns success of permission denied we know it's a valid PID
+ kill_ret = kill(pid , 0);
+ if ((0 == kill_ret) || (EPERM == errno))
+ return 1;
+ // otherwise return 0 for PID not found
+ return 0;
+}
+
diff --git a/python/psutil/psutil/arch/bsd/process_info.h b/python/psutil/psutil/arch/bsd/process_info.h
new file mode 100644
index 000000000..858bd88a5
--- /dev/null
+++ b/python/psutil/psutil/arch/bsd/process_info.h
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+
+typedef struct kinfo_proc kinfo_proc;
+
+char *psutil_get_cmd_args(long pid, size_t *argsize);
+char *psutil_get_cmd_path(long pid, size_t *pathsize);
+int psutil_get_proc_list(struct kinfo_proc **procList, size_t *procCount);
+int psutil_pid_exists(long pid);
+PyObject* psutil_get_arg_list(long pid);
diff --git a/python/psutil/psutil/arch/osx/process_info.c b/python/psutil/psutil/arch/osx/process_info.c
new file mode 100644
index 000000000..b6dd5bb93
--- /dev/null
+++ b/python/psutil/psutil/arch/osx/process_info.c
@@ -0,0 +1,281 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Helper functions related to fetching process information.
+ * Used by _psutil_osx module methods.
+ */
+
+
+#include <Python.h>
+#include <assert.h>
+#include <errno.h>
+#include <limits.h> // for INT_MAX
+#include <stdbool.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <signal.h>
+#include <sys/sysctl.h>
+#include <libproc.h>
+
+#include "process_info.h"
+#include "../../_psutil_common.h"
+
+
+/*
+ * Return 1 if PID exists in the current process list, else 0.
+ */
+int
+psutil_pid_exists(long pid)
+{
+ int kill_ret;
+
+ // save some time if it's an invalid PID
+ if (pid < 0)
+ return 0;
+ // if kill returns success of permission denied we know it's a valid PID
+ kill_ret = kill(pid , 0);
+ if ( (0 == kill_ret) || (EPERM == errno))
+ return 1;
+
+ // otherwise return 0 for PID not found
+ return 0;
+}
+
+
+/*
+ * Returns a list of all BSD processes on the system. This routine
+ * allocates the list and puts it in *procList and a count of the
+ * number of entries in *procCount. You are responsible for freeing
+ * this list (use "free" from System framework).
+ * On success, the function returns 0.
+ * On error, the function returns a BSD errno value.
+ */
+int
+psutil_get_proc_list(kinfo_proc **procList, size_t *procCount)
+{
+ // Declaring mib as const requires use of a cast since the
+ // sysctl prototype doesn't include the const modifier.
+ static const int mib3[3] = { CTL_KERN, KERN_PROC, KERN_PROC_ALL };
+ size_t size, size2;
+ void *ptr;
+ int err, lim = 8; // some limit
+
+ assert( procList != NULL);
+ assert(*procList == NULL);
+ assert(procCount != NULL);
+
+ *procCount = 0;
+
+ /*
+ * We start by calling sysctl with ptr == NULL and size == 0.
+ * That will succeed, and set size to the appropriate length.
+ * We then allocate a buffer of at least that size and call
+ * sysctl with that buffer. If that succeeds, we're done.
+ * If that call fails with ENOMEM, we throw the buffer away
+ * and try again.
+ * Note that the loop calls sysctl with NULL again. This is
+ * is necessary because the ENOMEM failure case sets size to
+ * the amount of data returned, not the amount of data that
+ * could have been returned.
+ */
+ while (lim-- > 0) {
+ size = 0;
+ if (sysctl((int *)mib3, 3, NULL, &size, NULL, 0) == -1)
+ return errno;
+ size2 = size + (size >> 3); // add some
+ if (size2 > size) {
+ ptr = malloc(size2);
+ if (ptr == NULL)
+ ptr = malloc(size);
+ else
+ size = size2;
+ }
+ else {
+ ptr = malloc(size);
+ }
+ if (ptr == NULL)
+ return ENOMEM;
+
+ if (sysctl((int *)mib3, 3, ptr, &size, NULL, 0) == -1) {
+ err = errno;
+ free(ptr);
+ if (err != ENOMEM)
+ return err;
+ }
+ else {
+ *procList = (kinfo_proc *)ptr;
+ *procCount = size / sizeof(kinfo_proc);
+ return 0;
+ }
+ }
+ return ENOMEM;
+}
+
+
+// Read the maximum argument size for processes
+int
+psutil_get_argmax()
+{
+ int argmax;
+ int mib[] = { CTL_KERN, KERN_ARGMAX };
+ size_t size = sizeof(argmax);
+
+ if (sysctl(mib, 2, &argmax, &size, NULL, 0) == 0)
+ return argmax;
+ return 0;
+}
+
+
+// return process args as a python list
+PyObject *
+psutil_get_arg_list(long pid)
+{
+ int mib[3];
+ int nargs;
+ int len;
+ char *procargs = NULL;
+ char *arg_ptr;
+ char *arg_end;
+ char *curr_arg;
+ size_t argmax;
+ PyObject *arg = NULL;
+ PyObject *arglist = NULL;
+
+ // special case for PID 0 (kernel_task) where cmdline cannot be fetched
+ if (pid == 0)
+ return Py_BuildValue("[]");
+
+ // read argmax and allocate memory for argument space.
+ argmax = psutil_get_argmax();
+ if (! argmax) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ procargs = (char *)malloc(argmax);
+ if (NULL == procargs) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ goto error;
+ }
+
+ // read argument space
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROCARGS2;
+ mib[2] = pid;
+ if (sysctl(mib, 3, procargs, &argmax, NULL, 0) < 0) {
+ if (EINVAL == errno) {
+ // EINVAL == access denied OR nonexistent PID
+ if (psutil_pid_exists(pid))
+ AccessDenied();
+ else
+ NoSuchProcess();
+ }
+ goto error;
+ }
+
+ arg_end = &procargs[argmax];
+ // copy the number of arguments to nargs
+ memcpy(&nargs, procargs, sizeof(nargs));
+
+ arg_ptr = procargs + sizeof(nargs);
+ len = strlen(arg_ptr);
+ arg_ptr += len + 1;
+
+ if (arg_ptr == arg_end) {
+ free(procargs);
+ return Py_BuildValue("[]");
+ }
+
+ // skip ahead to the first argument
+ for (; arg_ptr < arg_end; arg_ptr++) {
+ if (*arg_ptr != '\0')
+ break;
+ }
+
+ // iterate through arguments
+ curr_arg = arg_ptr;
+ arglist = Py_BuildValue("[]");
+ if (!arglist)
+ goto error;
+ while (arg_ptr < arg_end && nargs > 0) {
+ if (*arg_ptr++ == '\0') {
+ arg = Py_BuildValue("s", curr_arg);
+ if (!arg)
+ goto error;
+ if (PyList_Append(arglist, arg))
+ goto error;
+ Py_DECREF(arg);
+ // iterate to next arg and decrement # of args
+ curr_arg = arg_ptr;
+ nargs--;
+ }
+ }
+
+ free(procargs);
+ return arglist;
+
+error:
+ Py_XDECREF(arg);
+ Py_XDECREF(arglist);
+ if (procargs != NULL)
+ free(procargs);
+ return NULL;
+}
+
+
+int
+psutil_get_kinfo_proc(pid_t pid, struct kinfo_proc *kp)
+{
+ int mib[4];
+ size_t len;
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROC;
+ mib[2] = KERN_PROC_PID;
+ mib[3] = pid;
+
+ // fetch the info with sysctl()
+ len = sizeof(struct kinfo_proc);
+
+ // now read the data from sysctl
+ if (sysctl(mib, 4, kp, &len, NULL, 0) == -1) {
+ // raise an exception and throw errno as the error
+ PyErr_SetFromErrno(PyExc_OSError);
+ return -1;
+ }
+
+ // sysctl succeeds but len is zero, happens when process has gone away
+ if (len == 0) {
+ NoSuchProcess();
+ return -1;
+ }
+ return 0;
+}
+
+
+/*
+ * A thin wrapper around proc_pidinfo()
+ */
+int
+psutil_proc_pidinfo(long pid, int flavor, void *pti, int size)
+{
+ int ret = proc_pidinfo((int)pid, flavor, 0, pti, size);
+ if (ret == 0) {
+ if (! psutil_pid_exists(pid)) {
+ NoSuchProcess();
+ return 0;
+ }
+ else {
+ AccessDenied();
+ return 0;
+ }
+ }
+ else if (ret != size) {
+ AccessDenied();
+ return 0;
+ }
+ else {
+ return 1;
+ }
+}
diff --git a/python/psutil/psutil/arch/osx/process_info.h b/python/psutil/psutil/arch/osx/process_info.h
new file mode 100644
index 000000000..c89c8570e
--- /dev/null
+++ b/python/psutil/psutil/arch/osx/process_info.h
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <Python.h>
+
+typedef struct kinfo_proc kinfo_proc;
+
+int psutil_get_argmax(void);
+int psutil_get_kinfo_proc(pid_t pid, struct kinfo_proc *kp);
+int psutil_get_proc_list(kinfo_proc **procList, size_t *procCount);
+int psutil_pid_exists(long pid);
+int psutil_proc_pidinfo(long pid, int flavor, void *pti, int size);
+PyObject* psutil_get_arg_list(long pid);
diff --git a/python/psutil/psutil/arch/windows/glpi.h b/python/psutil/psutil/arch/windows/glpi.h
new file mode 100644
index 000000000..6f9848373
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/glpi.h
@@ -0,0 +1,41 @@
+// mingw headers are missing this
+
+typedef enum _LOGICAL_PROCESSOR_RELATIONSHIP {
+ RelationProcessorCore,
+ RelationNumaNode,
+ RelationCache,
+ RelationProcessorPackage,
+ RelationGroup,
+ RelationAll=0xffff
+} LOGICAL_PROCESSOR_RELATIONSHIP;
+
+typedef enum _PROCESSOR_CACHE_TYPE {
+ CacheUnified,CacheInstruction,CacheData,CacheTrace
+} PROCESSOR_CACHE_TYPE;
+
+typedef struct _CACHE_DESCRIPTOR {
+ BYTE Level;
+ BYTE Associativity;
+ WORD LineSize;
+ DWORD Size;
+ PROCESSOR_CACHE_TYPE Type;
+} CACHE_DESCRIPTOR,*PCACHE_DESCRIPTOR;
+
+typedef struct _SYSTEM_LOGICAL_PROCESSOR_INFORMATION {
+ ULONG_PTR ProcessorMask;
+ LOGICAL_PROCESSOR_RELATIONSHIP Relationship;
+ union {
+ struct {
+ BYTE Flags;
+ } ProcessorCore;
+ struct {
+ DWORD NodeNumber;
+ } NumaNode;
+ CACHE_DESCRIPTOR Cache;
+ ULONGLONG Reserved[2];
+ };
+} SYSTEM_LOGICAL_PROCESSOR_INFORMATION,*PSYSTEM_LOGICAL_PROCESSOR_INFORMATION;
+
+WINBASEAPI WINBOOL WINAPI
+GetLogicalProcessorInformation(PSYSTEM_LOGICAL_PROCESSOR_INFORMATION Buffer,
+ PDWORD ReturnedLength); \ No newline at end of file
diff --git a/python/psutil/psutil/arch/windows/inet_ntop.c b/python/psutil/psutil/arch/windows/inet_ntop.c
new file mode 100644
index 000000000..b9fffd1c1
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/inet_ntop.c
@@ -0,0 +1,41 @@
+#include "inet_ntop.h"
+
+// From: https://memset.wordpress.com/2010/10/09/inet_ntop-for-win32/
+PCSTR
+WSAAPI
+inet_ntop(
+ __in INT Family,
+ __in PVOID pAddr,
+ __out_ecount(StringBufSize) PSTR pStringBuf,
+ __in size_t StringBufSize
+ )
+{
+ DWORD dwAddressLength = 0;
+ struct sockaddr_storage srcaddr;
+ struct sockaddr_in *srcaddr4 = (struct sockaddr_in*) &srcaddr;
+ struct sockaddr_in6 *srcaddr6 = (struct sockaddr_in6*) &srcaddr;
+
+ memset(&srcaddr, 0, sizeof(struct sockaddr_storage));
+ srcaddr.ss_family = Family;
+
+ if (Family == AF_INET)
+ {
+ dwAddressLength = sizeof(struct sockaddr_in);
+ memcpy(&(srcaddr4->sin_addr), pAddr, sizeof(struct in_addr));
+ } else if (Family == AF_INET6)
+ {
+ dwAddressLength = sizeof(struct sockaddr_in6);
+ memcpy(&(srcaddr6->sin6_addr), pAddr, sizeof(struct in6_addr));
+ } else {
+ return NULL;
+ }
+
+ if (WSAAddressToString((LPSOCKADDR) &srcaddr,
+ dwAddressLength,
+ 0,
+ pStringBuf,
+ (LPDWORD) &StringBufSize) != 0) {
+ return NULL;
+ }
+ return pStringBuf;
+} \ No newline at end of file
diff --git a/python/psutil/psutil/arch/windows/inet_ntop.h b/python/psutil/psutil/arch/windows/inet_ntop.h
new file mode 100644
index 000000000..0d97e28c8
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/inet_ntop.h
@@ -0,0 +1,10 @@
+#include <ws2tcpip.h>
+
+PCSTR
+WSAAPI
+inet_ntop(
+ __in INT Family,
+ __in PVOID pAddr,
+ __out_ecount(StringBufSize) PSTR pStringBuf,
+ __in size_t StringBufSize
+); \ No newline at end of file
diff --git a/python/psutil/psutil/arch/windows/ntextapi.h b/python/psutil/psutil/arch/windows/ntextapi.h
new file mode 100644
index 000000000..d10432a3e
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/ntextapi.h
@@ -0,0 +1,228 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+#if !defined(__NTEXTAPI_H__)
+#define __NTEXTAPI_H__
+#include <winternl.h>
+
+typedef enum _KTHREAD_STATE {
+ Initialized,
+ Ready,
+ Running,
+ Standby,
+ Terminated,
+ Waiting,
+ Transition,
+ DeferredReady,
+ GateWait,
+ MaximumThreadState
+} KTHREAD_STATE, *PKTHREAD_STATE;
+
+typedef enum _KWAIT_REASON {
+ Executive = 0,
+ FreePage = 1,
+ PageIn = 2,
+ PoolAllocation = 3,
+ DelayExecution = 4,
+ Suspended = 5,
+ UserRequest = 6,
+ WrExecutive = 7,
+ WrFreePage = 8,
+ WrPageIn = 9,
+ WrPoolAllocation = 10,
+ WrDelayExecution = 11,
+ WrSuspended = 12,
+ WrUserRequest = 13,
+ WrEventPair = 14,
+ WrQueue = 15,
+ WrLpcReceive = 16,
+ WrLpcReply = 17,
+ WrVirtualMemory = 18,
+ WrPageOut = 19,
+ WrRendezvous = 20,
+ Spare2 = 21,
+ Spare3 = 22,
+ Spare4 = 23,
+ Spare5 = 24,
+ WrCalloutStack = 25,
+ WrKernel = 26,
+ WrResource = 27,
+ WrPushLock = 28,
+ WrMutex = 29,
+ WrQuantumEnd = 30,
+ WrDispatchInt = 31,
+ WrPreempted = 32,
+ WrYieldExecution = 33,
+ WrFastMutex = 34,
+ WrGuardedMutex = 35,
+ WrRundown = 36,
+ MaximumWaitReason = 37
+} KWAIT_REASON, *PKWAIT_REASON;
+
+typedef struct _CLIENT_ID {
+ HANDLE UniqueProcess;
+ HANDLE UniqueThread;
+} CLIENT_ID, *PCLIENT_ID;
+
+typedef struct _SYSTEM_THREAD_INFORMATION {
+ LARGE_INTEGER KernelTime;
+ LARGE_INTEGER UserTime;
+ LARGE_INTEGER CreateTime;
+ ULONG WaitTime;
+ PVOID StartAddress;
+ CLIENT_ID ClientId;
+ LONG Priority;
+ LONG BasePriority;
+ ULONG ContextSwitches;
+ ULONG ThreadState;
+ KWAIT_REASON WaitReason;
+} SYSTEM_THREAD_INFORMATION, *PSYSTEM_THREAD_INFORMATION;
+
+typedef struct _TEB *PTEB;
+
+// private
+typedef struct _SYSTEM_EXTENDED_THREAD_INFORMATION {
+ SYSTEM_THREAD_INFORMATION ThreadInfo;
+ PVOID StackBase;
+ PVOID StackLimit;
+ PVOID Win32StartAddress;
+ PTEB TebBase;
+ ULONG_PTR Reserved2;
+ ULONG_PTR Reserved3;
+ ULONG_PTR Reserved4;
+} SYSTEM_EXTENDED_THREAD_INFORMATION, *PSYSTEM_EXTENDED_THREAD_INFORMATION;
+
+typedef struct _SYSTEM_PROCESS_INFORMATION2 {
+ ULONG NextEntryOffset;
+ ULONG NumberOfThreads;
+ LARGE_INTEGER SpareLi1;
+ LARGE_INTEGER SpareLi2;
+ LARGE_INTEGER SpareLi3;
+ LARGE_INTEGER CreateTime;
+ LARGE_INTEGER UserTime;
+ LARGE_INTEGER KernelTime;
+ UNICODE_STRING ImageName;
+ LONG BasePriority;
+ HANDLE UniqueProcessId;
+ HANDLE InheritedFromUniqueProcessId;
+ ULONG HandleCount;
+ ULONG SessionId;
+ ULONG_PTR PageDirectoryBase;
+ SIZE_T PeakVirtualSize;
+ SIZE_T VirtualSize;
+ DWORD PageFaultCount;
+ SIZE_T PeakWorkingSetSize;
+ SIZE_T WorkingSetSize;
+ SIZE_T QuotaPeakPagedPoolUsage;
+ SIZE_T QuotaPagedPoolUsage;
+ SIZE_T QuotaPeakNonPagedPoolUsage;
+ SIZE_T QuotaNonPagedPoolUsage;
+ SIZE_T PagefileUsage;
+ SIZE_T PeakPagefileUsage;
+ SIZE_T PrivatePageCount;
+ LARGE_INTEGER ReadOperationCount;
+ LARGE_INTEGER WriteOperationCount;
+ LARGE_INTEGER OtherOperationCount;
+ LARGE_INTEGER ReadTransferCount;
+ LARGE_INTEGER WriteTransferCount;
+ LARGE_INTEGER OtherTransferCount;
+ SYSTEM_THREAD_INFORMATION Threads[1];
+} SYSTEM_PROCESS_INFORMATION2, *PSYSTEM_PROCESS_INFORMATION2;
+
+#define SYSTEM_PROCESS_INFORMATION SYSTEM_PROCESS_INFORMATION2
+#define PSYSTEM_PROCESS_INFORMATION PSYSTEM_PROCESS_INFORMATION2
+
+
+// ================================================
+// psutil.users() support
+// ================================================
+
+typedef struct _WINSTATION_INFO {
+ BYTE Reserved1[72];
+ ULONG SessionId;
+ BYTE Reserved2[4];
+ FILETIME ConnectTime;
+ FILETIME DisconnectTime;
+ FILETIME LastInputTime;
+ FILETIME LoginTime;
+ BYTE Reserved3[1096];
+ FILETIME CurrentTime;
+} WINSTATION_INFO, *PWINSTATION_INFO;
+
+typedef BOOLEAN (WINAPI * PWINSTATIONQUERYINFORMATIONW)
+ (HANDLE,ULONG,WINSTATIONINFOCLASS,PVOID,ULONG,PULONG);
+
+
+/*
+ * NtQueryInformationProcess code taken from
+ * http://wj32.wordpress.com/2009/01/24/howto-get-the-command-line-of-processes/
+ * typedefs needed to compile against ntdll functions not exposted in the API
+ */
+typedef LONG NTSTATUS;
+
+typedef NTSTATUS (NTAPI *_NtQueryInformationProcess)(
+ HANDLE ProcessHandle,
+ DWORD ProcessInformationClass,
+ PVOID ProcessInformation,
+ DWORD ProcessInformationLength,
+ PDWORD ReturnLength
+);
+
+typedef NTSTATUS (NTAPI *_NtSetInformationProcess)(
+ HANDLE ProcessHandle,
+ DWORD ProcessInformationClass,
+ PVOID ProcessInformation,
+ DWORD ProcessInformationLength
+);
+
+
+typedef enum _PROCESSINFOCLASS2 {
+ _ProcessBasicInformation,
+ ProcessQuotaLimits,
+ ProcessIoCounters,
+ ProcessVmCounters,
+ ProcessTimes,
+ ProcessBasePriority,
+ ProcessRaisePriority,
+ ProcessDebugPort,
+ ProcessExceptionPort,
+ ProcessAccessToken,
+ ProcessLdtInformation,
+ ProcessLdtSize,
+ ProcessDefaultHardErrorMode,
+ ProcessIoPortHandlers,
+ ProcessPooledUsageAndLimits,
+ ProcessWorkingSetWatch,
+ ProcessUserModeIOPL,
+ ProcessEnableAlignmentFaultFixup,
+ ProcessPriorityClass,
+ ProcessWx86Information,
+ ProcessHandleCount,
+ ProcessAffinityMask,
+ ProcessPriorityBoost,
+ ProcessDeviceMap,
+ ProcessSessionInformation,
+ ProcessForegroundInformation,
+ _ProcessWow64Information,
+ /* added after XP+ */
+ ProcessImageFileName,
+ ProcessLUIDDeviceMapsEnabled,
+ ProcessBreakOnTermination,
+ ProcessDebugObjectHandle,
+ ProcessDebugFlags,
+ ProcessHandleTracing,
+ ProcessIoPriority,
+ ProcessExecuteFlags,
+ ProcessResourceManagement,
+ ProcessCookie,
+ ProcessImageInformation,
+ MaxProcessInfoClass
+} PROCESSINFOCLASS2;
+
+#define PROCESSINFOCLASS PROCESSINFOCLASS2
+#define ProcessBasicInformation _ProcessBasicInformation
+#define ProcessWow64Information _ProcessWow64Information
+
+#endif // __NTEXTAPI_H__
diff --git a/python/psutil/psutil/arch/windows/process_handles.c b/python/psutil/psutil/arch/windows/process_handles.c
new file mode 100644
index 000000000..b3f480af5
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/process_handles.c
@@ -0,0 +1,533 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ */
+#include "process_handles.h"
+
+static _NtQuerySystemInformation __NtQuerySystemInformation = NULL;
+static _NtQueryObject __NtQueryObject = NULL;
+
+CRITICAL_SECTION g_cs;
+BOOL g_initialized = FALSE;
+NTSTATUS g_status;
+HANDLE g_hFile = NULL;
+HANDLE g_hEvtStart = NULL;
+HANDLE g_hEvtFinish = NULL;
+HANDLE g_hThread = NULL;
+PUNICODE_STRING g_pNameBuffer = NULL;
+ULONG g_dwSize = 0;
+ULONG g_dwLength = 0;
+PVOID g_fiber = NULL;
+
+
+PVOID
+GetLibraryProcAddress(PSTR LibraryName, PSTR ProcName)
+{
+ return GetProcAddress(GetModuleHandleA(LibraryName), ProcName);
+}
+
+PyObject *
+psutil_get_open_files(long dwPid, HANDLE hProcess)
+{
+ OSVERSIONINFO osvi;
+
+ ZeroMemory(&osvi, sizeof(OSVERSIONINFO));
+ osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+ GetVersionEx(&osvi);
+
+ // Threaded version only works for Vista+
+ if (osvi.dwMajorVersion >= 6)
+ return psutil_get_open_files_ntqueryobject(dwPid, hProcess);
+ else
+ return psutil_get_open_files_getmappedfilename(dwPid, hProcess);
+}
+
+VOID
+psutil_get_open_files_init(BOOL threaded)
+{
+ if (g_initialized == TRUE)
+ return;
+
+ // Resolve the Windows API calls
+ __NtQuerySystemInformation =
+ GetLibraryProcAddress("ntdll.dll", "NtQuerySystemInformation");
+ __NtQueryObject = GetLibraryProcAddress("ntdll.dll", "NtQueryObject");
+
+ // Create events for signalling work between threads
+ if (threaded == TRUE) {
+ g_hEvtStart = CreateEvent(NULL, FALSE, FALSE, NULL);
+ g_hEvtFinish = CreateEvent(NULL, FALSE, FALSE, NULL);
+ InitializeCriticalSection(&g_cs);
+ }
+
+ g_initialized = TRUE;
+}
+
+PyObject *
+psutil_get_open_files_ntqueryobject(long dwPid, HANDLE hProcess)
+{
+ NTSTATUS status;
+ PSYSTEM_HANDLE_INFORMATION_EX pHandleInfo = NULL;
+ DWORD dwInfoSize = 0x10000;
+ DWORD dwRet = 0;
+ PSYSTEM_HANDLE_TABLE_ENTRY_INFO_EX hHandle = NULL;
+ DWORD i = 0;
+ BOOLEAN error = FALSE;
+ PyObject* pyListFiles = NULL;
+ PyObject* pyFilePath = NULL;
+ DWORD dwWait = 0;
+
+ if (g_initialized == FALSE)
+ psutil_get_open_files_init(TRUE);
+
+ // Due to the use of global variables, ensure only 1 call
+ // to psutil_get_open_files() is running
+ EnterCriticalSection(&g_cs);
+
+ if (__NtQuerySystemInformation == NULL ||
+ __NtQueryObject == NULL ||
+ g_hEvtStart == NULL ||
+ g_hEvtFinish == NULL)
+
+ {
+ PyErr_SetFromWindowsErr(0);
+ error = TRUE;
+ goto cleanup;
+ }
+
+ // Py_BuildValue raises an exception if NULL is returned
+ pyListFiles = PyList_New(0);
+ if (pyListFiles == NULL) {
+ error = TRUE;
+ goto cleanup;
+ }
+
+ do {
+ if (pHandleInfo != NULL) {
+ HeapFree(GetProcessHeap(), 0, pHandleInfo);
+ pHandleInfo = NULL;
+ }
+
+ // NtQuerySystemInformation won't give us the correct buffer size,
+ // so we guess by doubling the buffer size.
+ dwInfoSize *= 2;
+ pHandleInfo = HeapAlloc(GetProcessHeap(),
+ HEAP_ZERO_MEMORY,
+ dwInfoSize);
+
+ if (pHandleInfo == NULL) {
+ PyErr_NoMemory();
+ error = TRUE;
+ goto cleanup;
+ }
+ } while ((status = __NtQuerySystemInformation(
+ SystemExtendedHandleInformation,
+ pHandleInfo,
+ dwInfoSize,
+ &dwRet)) == STATUS_INFO_LENGTH_MISMATCH);
+
+ // NtQuerySystemInformation stopped giving us STATUS_INFO_LENGTH_MISMATCH
+ if (!NT_SUCCESS(status)) {
+ PyErr_SetFromWindowsErr(HRESULT_FROM_NT(status));
+ error = TRUE;
+ goto cleanup;
+ }
+
+ for (i = 0; i < pHandleInfo->NumberOfHandles; i++) {
+ hHandle = &pHandleInfo->Handles[i];
+
+ // Check if this hHandle belongs to the PID the user specified.
+ if (hHandle->UniqueProcessId != (HANDLE)dwPid ||
+ hHandle->ObjectTypeIndex != HANDLE_TYPE_FILE)
+ goto loop_cleanup;
+
+ if (!DuplicateHandle(hProcess,
+ hHandle->HandleValue,
+ GetCurrentProcess(),
+ &g_hFile,
+ 0,
+ TRUE,
+ DUPLICATE_SAME_ACCESS))
+ {
+ /*
+ printf("[%d] DuplicateHandle (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ goto loop_cleanup;
+ }
+
+ // Guess buffer size is MAX_PATH + 1
+ g_dwLength = (MAX_PATH+1) * sizeof(WCHAR);
+
+ do {
+ // Release any previously allocated buffer
+ if (g_pNameBuffer != NULL) {
+ HeapFree(GetProcessHeap(), 0, g_pNameBuffer);
+ g_pNameBuffer = NULL;
+ g_dwSize = 0;
+ }
+
+ // NtQueryObject puts the required buffer size in g_dwLength
+ // WinXP edge case puts g_dwLength == 0, just skip this handle
+ if (g_dwLength == 0)
+ goto loop_cleanup;
+
+ g_dwSize = g_dwLength;
+ if (g_dwSize > 0) {
+ g_pNameBuffer = HeapAlloc(GetProcessHeap(),
+ HEAP_ZERO_MEMORY,
+ g_dwSize);
+
+ if (g_pNameBuffer == NULL)
+ goto loop_cleanup;
+ }
+
+ dwWait = psutil_NtQueryObject();
+
+ // If the call does not return, skip this handle
+ if (dwWait != WAIT_OBJECT_0)
+ goto loop_cleanup;
+
+ } while (g_status == STATUS_INFO_LENGTH_MISMATCH);
+
+ // NtQueryObject stopped returning STATUS_INFO_LENGTH_MISMATCH
+ if (!NT_SUCCESS(g_status))
+ goto loop_cleanup;
+
+ // Convert to PyUnicode and append it to the return list
+ if (g_pNameBuffer->Length > 0) {
+ /*
+ printf("[%d] Filename (%#x) %#d bytes: %S\n",
+ dwPid,
+ hHandle->HandleValue,
+ g_pNameBuffer->Length,
+ g_pNameBuffer->Buffer);
+ */
+
+ pyFilePath = PyUnicode_FromWideChar(g_pNameBuffer->Buffer,
+ g_pNameBuffer->Length/2);
+ if (pyFilePath == NULL) {
+ /*
+ printf("[%d] PyUnicode_FromWideChar (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ error = TRUE;
+ goto loop_cleanup;
+ }
+
+ if (PyList_Append(pyListFiles, pyFilePath)) {
+ /*
+ printf("[%d] PyList_Append (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ error = TRUE;
+ goto loop_cleanup;
+ }
+ }
+
+loop_cleanup:
+ Py_XDECREF(pyFilePath);
+ pyFilePath = NULL;
+
+ if (g_pNameBuffer != NULL)
+ HeapFree(GetProcessHeap(), 0, g_pNameBuffer);
+ g_pNameBuffer = NULL;
+ g_dwSize = 0;
+ g_dwLength = 0;
+
+ if (g_hFile != NULL)
+ CloseHandle(g_hFile);
+ g_hFile = NULL;
+ }
+
+cleanup:
+ if (g_pNameBuffer != NULL)
+ HeapFree(GetProcessHeap(), 0, g_pNameBuffer);
+ g_pNameBuffer = NULL;
+ g_dwSize = 0;
+ g_dwLength = 0;
+
+ if (g_hFile != NULL)
+ CloseHandle(g_hFile);
+ g_hFile = NULL;
+
+ if (pHandleInfo != NULL)
+ HeapFree(GetProcessHeap(), 0, pHandleInfo);
+ pHandleInfo = NULL;
+
+ if (error) {
+ Py_XDECREF(pyListFiles);
+ pyListFiles = NULL;
+ }
+
+ LeaveCriticalSection(&g_cs);
+
+ return pyListFiles;
+}
+
+DWORD
+psutil_NtQueryObject()
+{
+ DWORD dwWait = 0;
+
+ if (g_hThread == NULL)
+ g_hThread = CreateThread(NULL,
+ 0,
+ (LPTHREAD_START_ROUTINE)psutil_NtQueryObjectThread,
+ NULL,
+ 0,
+ NULL);
+ if (g_hThread == NULL)
+ return GetLastError();
+
+ // Signal the worker thread to start
+ SetEvent(g_hEvtStart);
+
+ // Wait for the worker thread to finish
+ dwWait = WaitForSingleObject(g_hEvtFinish, NTQO_TIMEOUT);
+
+ // If the thread hangs, kill it and cleanup
+ if (dwWait == WAIT_TIMEOUT) {
+ SuspendThread(g_hThread);
+ TerminateThread(g_hThread, 1);
+ WaitForSingleObject(g_hThread, INFINITE);
+ CloseHandle(g_hThread);
+
+ // Cleanup Fiber
+ if (g_fiber != NULL)
+ DeleteFiber(g_fiber);
+ g_fiber = NULL;
+
+ g_hThread = NULL;
+ }
+
+ return dwWait;
+}
+
+void
+psutil_NtQueryObjectThread()
+{
+ // Prevent the thread stack from leaking when this
+ // thread gets terminated due to NTQueryObject hanging
+ g_fiber = ConvertThreadToFiber(NULL);
+
+ // Loop infinitely waiting for work
+ while (TRUE) {
+ WaitForSingleObject(g_hEvtStart, INFINITE);
+
+ g_status = __NtQueryObject(g_hFile,
+ ObjectNameInformation,
+ g_pNameBuffer,
+ g_dwSize,
+ &g_dwLength);
+ SetEvent(g_hEvtFinish);
+ }
+}
+
+PyObject *
+psutil_get_open_files_getmappedfilename(long dwPid, HANDLE hProcess)
+{
+ NTSTATUS status;
+ PSYSTEM_HANDLE_INFORMATION_EX pHandleInfo = NULL;
+ DWORD dwInfoSize = 0x10000;
+ DWORD dwRet = 0;
+ PSYSTEM_HANDLE_TABLE_ENTRY_INFO_EX hHandle = NULL;
+ HANDLE hFile = NULL;
+ HANDLE hMap = NULL;
+ DWORD i = 0;
+ BOOLEAN error = FALSE;
+ PyObject* pyListFiles = NULL;
+ PyObject* pyFilePath = NULL;
+ ULONG dwSize = 0;
+ LPVOID pMem = NULL;
+ TCHAR pszFilename[MAX_PATH+1];
+
+ if (g_initialized == FALSE)
+ psutil_get_open_files_init(FALSE);
+
+ if (__NtQuerySystemInformation == NULL || __NtQueryObject == NULL) {
+ PyErr_SetFromWindowsErr(0);
+ error = TRUE;
+ goto cleanup;
+ }
+
+ // Py_BuildValue raises an exception if NULL is returned
+ pyListFiles = PyList_New(0);
+ if (pyListFiles == NULL) {
+ error = TRUE;
+ goto cleanup;
+ }
+
+ do {
+ if (pHandleInfo != NULL) {
+ HeapFree(GetProcessHeap(), 0, pHandleInfo);
+ pHandleInfo = NULL;
+ }
+
+ // NtQuerySystemInformation won't give us the correct buffer size,
+ // so we guess by doubling the buffer size.
+ dwInfoSize *= 2;
+ pHandleInfo = HeapAlloc(GetProcessHeap(),
+ HEAP_ZERO_MEMORY,
+ dwInfoSize);
+
+ if (pHandleInfo == NULL) {
+ PyErr_NoMemory();
+ error = TRUE;
+ goto cleanup;
+ }
+ } while ((status = __NtQuerySystemInformation(
+ SystemExtendedHandleInformation,
+ pHandleInfo,
+ dwInfoSize,
+ &dwRet)) == STATUS_INFO_LENGTH_MISMATCH);
+
+ // NtQuerySystemInformation stopped giving us STATUS_INFO_LENGTH_MISMATCH
+ if (!NT_SUCCESS(status)) {
+ PyErr_SetFromWindowsErr(HRESULT_FROM_NT(status));
+ error = TRUE;
+ goto cleanup;
+ }
+
+ for (i = 0; i < pHandleInfo->NumberOfHandles; i++) {
+ hHandle = &pHandleInfo->Handles[i];
+
+ // Check if this hHandle belongs to the PID the user specified.
+ if (hHandle->UniqueProcessId != (HANDLE)dwPid ||
+ hHandle->ObjectTypeIndex != HANDLE_TYPE_FILE)
+ goto loop_cleanup;
+
+ if (!DuplicateHandle(hProcess,
+ hHandle->HandleValue,
+ GetCurrentProcess(),
+ &hFile,
+ 0,
+ TRUE,
+ DUPLICATE_SAME_ACCESS))
+ {
+ /*
+ printf("[%d] DuplicateHandle (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ goto loop_cleanup;
+ }
+
+ hMap = CreateFileMapping(hFile, NULL, PAGE_READONLY, 0, 0, NULL);
+ if (hMap == NULL) {
+ /*
+ printf("[%d] CreateFileMapping (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ goto loop_cleanup;
+ }
+
+ pMem = MapViewOfFile(hMap, FILE_MAP_READ, 0, 0, 1);
+
+ if (pMem == NULL) {
+ /*
+ printf("[%d] MapViewOfFile (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ goto loop_cleanup;
+ }
+
+ dwSize = GetMappedFileName(GetCurrentProcess(), pMem, pszFilename, MAX_PATH);
+ if (dwSize == 0) {
+ /*
+ printf("[%d] GetMappedFileName (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ goto loop_cleanup;
+ }
+
+ pszFilename[dwSize] = '\0';
+ /*
+ printf("[%d] Filename (%#x) %#d bytes: %S\n",
+ dwPid,
+ hHandle->HandleValue,
+ dwSize,
+ pszFilename);
+ */
+
+ pyFilePath = PyUnicode_FromWideChar(pszFilename, dwSize);
+ if (pyFilePath == NULL) {
+ /*
+ printf("[%d] PyUnicode_FromStringAndSize (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ error = TRUE;
+ goto loop_cleanup;
+ }
+
+ if (PyList_Append(pyListFiles, pyFilePath)) {
+ /*
+ printf("[%d] PyList_Append (%#x): %#x \n",
+ dwPid,
+ hHandle->HandleValue,
+ GetLastError());
+ */
+ error = TRUE;
+ goto loop_cleanup;
+ }
+
+loop_cleanup:
+ Py_XDECREF(pyFilePath);
+ pyFilePath = NULL;
+
+ if (pMem != NULL)
+ UnmapViewOfFile(pMem);
+ pMem = NULL;
+
+ if (hMap != NULL)
+ CloseHandle(hMap);
+ hMap = NULL;
+
+ if (hFile != NULL)
+ CloseHandle(hFile);
+ hFile = NULL;
+
+ dwSize = 0;
+ }
+
+cleanup:
+ if (pMem != NULL)
+ UnmapViewOfFile(pMem);
+ pMem = NULL;
+
+ if (hMap != NULL)
+ CloseHandle(hMap);
+ hMap = NULL;
+
+ if (hFile != NULL)
+ CloseHandle(hFile);
+ hFile = NULL;
+
+ if (pHandleInfo != NULL)
+ HeapFree(GetProcessHeap(), 0, pHandleInfo);
+ pHandleInfo = NULL;
+
+ if (error) {
+ Py_XDECREF(pyListFiles);
+ pyListFiles = NULL;
+ }
+
+ return pyListFiles;
+}
diff --git a/python/psutil/psutil/arch/windows/process_handles.h b/python/psutil/psutil/arch/windows/process_handles.h
new file mode 100644
index 000000000..4cf4023ec
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/process_handles.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#ifndef __PROCESS_HANDLES_H__
+#define __PROCESS_HANDLES_H__
+
+#ifndef UNICODE
+#define UNICODE
+#endif
+
+#include <Python.h>
+#include <stdio.h>
+#include <windows.h>
+#include <strsafe.h>
+#include <winternl.h>
+#include <psapi.h>
+
+
+#ifndef NT_SUCCESS
+#define NT_SUCCESS(x) ((x) >= 0)
+#endif
+
+#define STATUS_INFO_LENGTH_MISMATCH 0xc0000004
+#define ObjectBasicInformation 0
+#define ObjectNameInformation 1
+#define ObjectTypeInformation 2
+#define HANDLE_TYPE_FILE 28
+#define NTQO_TIMEOUT 100
+
+typedef NTSTATUS (NTAPI *_NtQuerySystemInformation)(
+ ULONG SystemInformationClass,
+ PVOID SystemInformation,
+ ULONG SystemInformationLength,
+ PULONG ReturnLength
+);
+
+typedef NTSTATUS (NTAPI *_NtQueryObject)(
+ HANDLE ObjectHandle,
+ ULONG ObjectInformationClass,
+ PVOID ObjectInformation,
+ ULONG ObjectInformationLength,
+ PULONG ReturnLength
+);
+
+// Undocumented FILE_INFORMATION_CLASS: FileNameInformation
+static const SYSTEM_INFORMATION_CLASS SystemExtendedHandleInformation = (SYSTEM_INFORMATION_CLASS)64;
+
+typedef struct _SYSTEM_HANDLE_TABLE_ENTRY_INFO_EX
+{
+ PVOID Object;
+ HANDLE UniqueProcessId;
+ HANDLE HandleValue;
+ ULONG GrantedAccess;
+ USHORT CreatorBackTraceIndex;
+ USHORT ObjectTypeIndex;
+ ULONG HandleAttributes;
+ ULONG Reserved;
+} SYSTEM_HANDLE_TABLE_ENTRY_INFO_EX, *PSYSTEM_HANDLE_TABLE_ENTRY_INFO_EX;
+
+typedef struct _SYSTEM_HANDLE_INFORMATION_EX
+{
+ ULONG_PTR NumberOfHandles;
+ ULONG_PTR Reserved;
+ SYSTEM_HANDLE_TABLE_ENTRY_INFO_EX Handles[1];
+} SYSTEM_HANDLE_INFORMATION_EX, *PSYSTEM_HANDLE_INFORMATION_EX;
+
+typedef enum _POOL_TYPE {
+ NonPagedPool,
+ PagedPool,
+ NonPagedPoolMustSucceed,
+ DontUseThisType,
+ NonPagedPoolCacheAligned,
+ PagedPoolCacheAligned,
+ NonPagedPoolCacheAlignedMustS
+} POOL_TYPE, *PPOOL_TYPE;
+
+typedef struct _OBJECT_TYPE_INFORMATION {
+ UNICODE_STRING Name;
+ ULONG TotalNumberOfObjects;
+ ULONG TotalNumberOfHandles;
+ ULONG TotalPagedPoolUsage;
+ ULONG TotalNonPagedPoolUsage;
+ ULONG TotalNamePoolUsage;
+ ULONG TotalHandleTableUsage;
+ ULONG HighWaterNumberOfObjects;
+ ULONG HighWaterNumberOfHandles;
+ ULONG HighWaterPagedPoolUsage;
+ ULONG HighWaterNonPagedPoolUsage;
+ ULONG HighWaterNamePoolUsage;
+ ULONG HighWaterHandleTableUsage;
+ ULONG InvalidAttributes;
+ GENERIC_MAPPING GenericMapping;
+ ULONG ValidAccess;
+ BOOLEAN SecurityRequired;
+ BOOLEAN MaintainHandleCount;
+ USHORT MaintainTypeList;
+ POOL_TYPE PoolType;
+ ULONG PagedPoolUsage;
+ ULONG NonPagedPoolUsage;
+} OBJECT_TYPE_INFORMATION, *POBJECT_TYPE_INFORMATION;
+
+PVOID GetLibraryProcAddress(PSTR LibraryName, PSTR ProcName);
+VOID psutil_get_open_files_init(BOOL threaded);
+PyObject* psutil_get_open_files(long pid, HANDLE processHandle);
+PyObject* psutil_get_open_files_ntqueryobject(long dwPid, HANDLE hProcess);
+PyObject* psutil_get_open_files_getmappedfilename(long dwPid, HANDLE hProcess);
+DWORD psutil_NtQueryObject(void);
+void psutil_NtQueryObjectThread(void);
+
+#endif // __PROCESS_HANDLES_H__
diff --git a/python/psutil/psutil/arch/windows/process_info.c b/python/psutil/psutil/arch/windows/process_info.c
new file mode 100644
index 000000000..a59cce47a
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/process_info.c
@@ -0,0 +1,435 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Helper functions related to fetching process information. Used by
+ * _psutil_windows module methods.
+ */
+
+#include <Python.h>
+#include <windows.h>
+#include <Psapi.h>
+#include <tlhelp32.h>
+
+#include "security.h"
+#include "process_info.h"
+#include "ntextapi.h"
+#include "../../_psutil_common.h"
+
+
+/*
+ * A wrapper around OpenProcess setting NSP exception if process
+ * no longer exists.
+ * "pid" is the process pid, "dwDesiredAccess" is the first argument
+ * exptected by OpenProcess.
+ * Return a process handle or NULL.
+ */
+HANDLE
+psutil_handle_from_pid_waccess(DWORD pid, DWORD dwDesiredAccess)
+{
+ HANDLE hProcess;
+ DWORD processExitCode = 0;
+
+ if (pid == 0) {
+ // otherwise we'd get NoSuchProcess
+ return AccessDenied();
+ }
+
+ hProcess = OpenProcess(dwDesiredAccess, FALSE, pid);
+ if (hProcess == NULL) {
+ if (GetLastError() == ERROR_INVALID_PARAMETER)
+ NoSuchProcess();
+ else
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+
+ // make sure the process is running
+ GetExitCodeProcess(hProcess, &processExitCode);
+ if (processExitCode == 0) {
+ NoSuchProcess();
+ CloseHandle(hProcess);
+ return NULL;
+ }
+ return hProcess;
+}
+
+
+/*
+ * Same as psutil_handle_from_pid_waccess but implicitly uses
+ * PROCESS_QUERY_INFORMATION | PROCESS_VM_READ as dwDesiredAccess
+ * parameter for OpenProcess.
+ */
+HANDLE
+psutil_handle_from_pid(DWORD pid) {
+ DWORD dwDesiredAccess = PROCESS_QUERY_INFORMATION | PROCESS_VM_READ;
+ return psutil_handle_from_pid_waccess(pid, dwDesiredAccess);
+}
+
+
+// fetch the PEB base address from NtQueryInformationProcess()
+PVOID
+psutil_get_peb_address(HANDLE ProcessHandle)
+{
+ _NtQueryInformationProcess NtQueryInformationProcess =
+ (_NtQueryInformationProcess)GetProcAddress(
+ GetModuleHandleA("ntdll.dll"), "NtQueryInformationProcess");
+ PROCESS_BASIC_INFORMATION pbi;
+
+ NtQueryInformationProcess(ProcessHandle, 0, &pbi, sizeof(pbi), NULL);
+ return pbi.PebBaseAddress;
+}
+
+
+DWORD *
+psutil_get_pids(DWORD *numberOfReturnedPIDs) {
+ // Win32 SDK says the only way to know if our process array
+ // wasn't large enough is to check the returned size and make
+ // sure that it doesn't match the size of the array.
+ // If it does we allocate a larger array and try again
+
+ // Stores the actual array
+ DWORD *procArray = NULL;
+ DWORD procArrayByteSz;
+ int procArraySz = 0;
+
+ // Stores the byte size of the returned array from enumprocesses
+ DWORD enumReturnSz = 0;
+
+ do {
+ procArraySz += 1024;
+ free(procArray);
+ procArrayByteSz = procArraySz * sizeof(DWORD);
+ procArray = malloc(procArrayByteSz);
+ if (procArray == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ if (! EnumProcesses(procArray, procArrayByteSz, &enumReturnSz)) {
+ free(procArray);
+ PyErr_SetFromWindowsErr(0);
+ return NULL;
+ }
+ } while (enumReturnSz == procArraySz * sizeof(DWORD));
+
+ // The number of elements is the returned size / size of each element
+ *numberOfReturnedPIDs = enumReturnSz / sizeof(DWORD);
+
+ return procArray;
+}
+
+
+int
+psutil_pid_is_running(DWORD pid)
+{
+ HANDLE hProcess;
+ DWORD exitCode;
+
+ // Special case for PID 0 System Idle Process
+ if (pid == 0)
+ return 1;
+ if (pid < 0)
+ return 0;
+
+ hProcess = OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ,
+ FALSE, pid);
+ if (NULL == hProcess) {
+ // invalid parameter is no such process
+ if (GetLastError() == ERROR_INVALID_PARAMETER) {
+ CloseHandle(hProcess);
+ return 0;
+ }
+
+ // access denied obviously means there's a process to deny access to...
+ if (GetLastError() == ERROR_ACCESS_DENIED) {
+ CloseHandle(hProcess);
+ return 1;
+ }
+
+ CloseHandle(hProcess);
+ PyErr_SetFromWindowsErr(0);
+ return -1;
+ }
+
+ if (GetExitCodeProcess(hProcess, &exitCode)) {
+ CloseHandle(hProcess);
+ return (exitCode == STILL_ACTIVE);
+ }
+
+ // access denied means there's a process there so we'll assume
+ // it's running
+ if (GetLastError() == ERROR_ACCESS_DENIED) {
+ CloseHandle(hProcess);
+ return 1;
+ }
+
+ PyErr_SetFromWindowsErr(0);
+ CloseHandle(hProcess);
+ return -1;
+}
+
+
+int
+psutil_pid_in_proclist(DWORD pid)
+{
+ DWORD *proclist = NULL;
+ DWORD numberOfReturnedPIDs;
+ DWORD i;
+
+ proclist = psutil_get_pids(&numberOfReturnedPIDs);
+ if (proclist == NULL)
+ return -1;
+ for (i = 0; i < numberOfReturnedPIDs; i++) {
+ if (pid == proclist[i]) {
+ free(proclist);
+ return 1;
+ }
+ }
+
+ free(proclist);
+ return 0;
+}
+
+
+// Check exit code from a process handle. Return FALSE on an error also
+// XXX - not used anymore
+int
+handlep_is_running(HANDLE hProcess)
+{
+ DWORD dwCode;
+
+ if (NULL == hProcess)
+ return 0;
+ if (GetExitCodeProcess(hProcess, &dwCode)) {
+ if (dwCode == STILL_ACTIVE)
+ return 1;
+ }
+ return 0;
+}
+
+
+/*
+ * returns a Python list representing the arguments for the process
+ * with given pid or NULL on error.
+ */
+PyObject *
+psutil_get_arg_list(long pid)
+{
+ int nArgs, i;
+ LPWSTR *szArglist = NULL;
+ HANDLE hProcess = NULL;
+ PVOID pebAddress;
+ PVOID rtlUserProcParamsAddress;
+ UNICODE_STRING commandLine;
+ WCHAR *commandLineContents = NULL;
+ PyObject *arg = NULL;
+ PyObject *arg_from_wchar = NULL;
+ PyObject *argList = NULL;
+
+ hProcess = psutil_handle_from_pid(pid);
+ if (hProcess == NULL)
+ return NULL;
+ pebAddress = psutil_get_peb_address(hProcess);
+
+ // get the address of ProcessParameters
+#ifdef _WIN64
+ if (!ReadProcessMemory(hProcess, (PCHAR)pebAddress + 32,
+ &rtlUserProcParamsAddress, sizeof(PVOID), NULL))
+#else
+ if (!ReadProcessMemory(hProcess, (PCHAR)pebAddress + 0x10,
+ &rtlUserProcParamsAddress, sizeof(PVOID), NULL))
+#endif
+ {
+ ////printf("Could not read the address of ProcessParameters!\n");
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ // read the CommandLine UNICODE_STRING structure
+#ifdef _WIN64
+ if (!ReadProcessMemory(hProcess, (PCHAR)rtlUserProcParamsAddress + 112,
+ &commandLine, sizeof(commandLine), NULL))
+#else
+ if (!ReadProcessMemory(hProcess, (PCHAR)rtlUserProcParamsAddress + 0x40,
+ &commandLine, sizeof(commandLine), NULL))
+#endif
+ {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+
+ // allocate memory to hold the command line
+ commandLineContents = (WCHAR *)malloc(commandLine.Length + 1);
+ if (commandLineContents == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ // read the command line
+ if (!ReadProcessMemory(hProcess, commandLine.Buffer,
+ commandLineContents, commandLine.Length, NULL))
+ {
+ PyErr_SetFromWindowsErr(0);
+ goto error;
+ }
+
+ // Null-terminate the string to prevent wcslen from returning
+ // incorrect length the length specifier is in characters, but
+ // commandLine.Length is in bytes.
+ commandLineContents[(commandLine.Length / sizeof(WCHAR))] = '\0';
+
+ // attempt tp parse the command line using Win32 API, fall back
+ // on string cmdline version otherwise
+ szArglist = CommandLineToArgvW(commandLineContents, &nArgs);
+ if (NULL == szArglist) {
+ // failed to parse arglist
+ // encode as a UTF8 Python string object from WCHAR string
+ arg_from_wchar = PyUnicode_FromWideChar(commandLineContents,
+ commandLine.Length / 2);
+ if (arg_from_wchar == NULL)
+ goto error;
+#if PY_MAJOR_VERSION >= 3
+ argList = Py_BuildValue("N", PyUnicode_AsUTF8String(arg_from_wchar));
+#else
+ argList = Py_BuildValue("N", PyUnicode_FromObject(arg_from_wchar));
+#endif
+ if (!argList)
+ goto error;
+ }
+ else {
+ // arglist parsed as array of UNICODE_STRING, so convert each to
+ // Python string object and add to arg list
+ argList = Py_BuildValue("[]");
+ if (argList == NULL)
+ goto error;
+ for (i = 0; i < nArgs; i++) {
+ arg_from_wchar = NULL;
+ arg = NULL;
+ arg_from_wchar = PyUnicode_FromWideChar(szArglist[i],
+ wcslen(szArglist[i]));
+ if (arg_from_wchar == NULL)
+ goto error;
+#if PY_MAJOR_VERSION >= 3
+ arg = PyUnicode_FromObject(arg_from_wchar);
+#else
+ arg = PyUnicode_AsUTF8String(arg_from_wchar);
+#endif
+ if (arg == NULL)
+ goto error;
+ Py_XDECREF(arg_from_wchar);
+ if (PyList_Append(argList, arg))
+ goto error;
+ Py_XDECREF(arg);
+ }
+ }
+
+ if (szArglist != NULL)
+ LocalFree(szArglist);
+ free(commandLineContents);
+ CloseHandle(hProcess);
+ return argList;
+
+error:
+ Py_XDECREF(arg);
+ Py_XDECREF(arg_from_wchar);
+ Py_XDECREF(argList);
+ if (hProcess != NULL)
+ CloseHandle(hProcess);
+ if (commandLineContents != NULL)
+ free(commandLineContents);
+ if (szArglist != NULL)
+ LocalFree(szArglist);
+ return NULL;
+}
+
+
+#define PH_FIRST_PROCESS(Processes) ((PSYSTEM_PROCESS_INFORMATION)(Processes))
+#define PH_NEXT_PROCESS(Process) ( \
+ ((PSYSTEM_PROCESS_INFORMATION)(Process))->NextEntryOffset ? \
+ (PSYSTEM_PROCESS_INFORMATION)((PCHAR)(Process) + \
+ ((PSYSTEM_PROCESS_INFORMATION)(Process))->NextEntryOffset) : \
+ NULL)
+
+const int STATUS_INFO_LENGTH_MISMATCH = 0xC0000004;
+const int STATUS_BUFFER_TOO_SMALL = 0xC0000023L;
+
+/*
+ * Given a process PID and a PSYSTEM_PROCESS_INFORMATION structure
+ * fills the structure with various process information by using
+ * NtQuerySystemInformation.
+ * We use this as a fallback when faster functions fail with access
+ * denied. This is slower because it iterates over all processes.
+ * On success return 1, else 0 with Python exception already set.
+ */
+int
+psutil_get_proc_info(DWORD pid, PSYSTEM_PROCESS_INFORMATION *retProcess,
+ PVOID *retBuffer)
+{
+ static ULONG initialBufferSize = 0x4000;
+ NTSTATUS status;
+ PVOID buffer;
+ ULONG bufferSize;
+ PSYSTEM_PROCESS_INFORMATION process;
+
+ // get NtQuerySystemInformation
+ typedef DWORD (_stdcall * NTQSI_PROC) (int, PVOID, ULONG, PULONG);
+ NTQSI_PROC NtQuerySystemInformation;
+ HINSTANCE hNtDll;
+ hNtDll = LoadLibrary(TEXT("ntdll.dll"));
+ NtQuerySystemInformation = (NTQSI_PROC)GetProcAddress(
+ hNtDll, "NtQuerySystemInformation");
+
+ bufferSize = initialBufferSize;
+ buffer = malloc(bufferSize);
+ if (buffer == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+
+ while (TRUE) {
+ status = NtQuerySystemInformation(SystemProcessInformation, buffer,
+ bufferSize, &bufferSize);
+
+ if (status == STATUS_BUFFER_TOO_SMALL ||
+ status == STATUS_INFO_LENGTH_MISMATCH)
+ {
+ free(buffer);
+ buffer = malloc(bufferSize);
+ if (buffer == NULL) {
+ PyErr_NoMemory();
+ goto error;
+ }
+ }
+ else {
+ break;
+ }
+ }
+
+ if (status != 0) {
+ PyErr_Format(PyExc_RuntimeError, "NtQuerySystemInformation() failed");
+ goto error;
+ }
+
+ if (bufferSize <= 0x20000)
+ initialBufferSize = bufferSize;
+
+ process = PH_FIRST_PROCESS(buffer);
+ do {
+ if (process->UniqueProcessId == (HANDLE)pid) {
+ *retProcess = process;
+ *retBuffer = buffer;
+ return 1;
+ }
+ } while ( (process = PH_NEXT_PROCESS(process)) );
+
+ NoSuchProcess();
+ goto error;
+
+error:
+ FreeLibrary(hNtDll);
+ if (buffer != NULL)
+ free(buffer);
+ return 0;
+}
diff --git a/python/psutil/psutil/arch/windows/process_info.h b/python/psutil/psutil/arch/windows/process_info.h
new file mode 100644
index 000000000..a44c4aced
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/process_info.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#if !defined(__PROCESS_INFO_H)
+#define __PROCESS_INFO_H
+
+#include <Python.h>
+#include <windows.h>
+#include "security.h"
+#include "ntextapi.h"
+
+DWORD* psutil_get_pids(DWORD *numberOfReturnedPIDs);
+HANDLE psutil_handle_from_pid(DWORD pid);
+HANDLE psutil_handle_from_pid_waccess(DWORD pid, DWORD dwDesiredAccess);
+int psutil_handlep_is_running(HANDLE hProcess);
+int psutil_pid_in_proclist(DWORD pid);
+int psutil_pid_is_running(DWORD pid);
+PVOID psutil_get_peb_address(HANDLE ProcessHandle);
+PyObject* psutil_get_arg_list(long pid);
+int psutil_get_proc_info(DWORD pid, PSYSTEM_PROCESS_INFORMATION *retProcess,
+ PVOID *retBuffer);
+
+#endif
diff --git a/python/psutil/psutil/arch/windows/security.c b/python/psutil/psutil/arch/windows/security.c
new file mode 100644
index 000000000..3aabffd0c
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/security.c
@@ -0,0 +1,228 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Security related functions for Windows platform (Set privileges such as
+ * SeDebug), as well as security helper functions.
+ */
+
+#include <windows.h>
+#include <Python.h>
+
+
+/*
+ * Convert a process handle to a process token handle.
+ */
+HANDLE
+psutil_token_from_handle(HANDLE hProcess) {
+ HANDLE hToken = NULL;
+
+ if (! OpenProcessToken(hProcess, TOKEN_QUERY, &hToken))
+ return PyErr_SetFromWindowsErr(0);
+ return hToken;
+}
+
+
+/*
+ * http://www.ddj.com/windows/184405986
+ *
+ * There's a way to determine whether we're running under the Local System
+ * account. However (you guessed it), we have to call more Win32 functions to
+ * determine this. Backing up through the code listing, we need to make another
+ * call to GetTokenInformation, but instead of passing through the TOKEN_USER
+ * constant, we pass through the TOKEN_PRIVILEGES constant. This value returns
+ * an array of privileges that the account has in the environment. Iterating
+ * through the array, we call the function LookupPrivilegeName looking for the
+ * string “SeTcbPrivilege. If the function returns this string, then this
+ * account has Local System privileges
+ */
+int
+psutil_has_system_privilege(HANDLE hProcess) {
+ DWORD i;
+ DWORD dwSize = 0;
+ DWORD dwRetval = 0;
+ TCHAR privName[256];
+ DWORD dwNameSize = 256;
+ // PTOKEN_PRIVILEGES tp = NULL;
+ BYTE *pBuffer = NULL;
+ TOKEN_PRIVILEGES *tp = NULL;
+ HANDLE hToken = psutil_token_from_handle(hProcess);
+
+ if (NULL == hToken)
+ return -1;
+ // call GetTokenInformation first to get the buffer size
+ if (! GetTokenInformation(hToken, TokenPrivileges, NULL, 0, &dwSize)) {
+ dwRetval = GetLastError();
+ // if it failed for a reason other than the buffer, bail out
+ if (dwRetval != ERROR_INSUFFICIENT_BUFFER ) {
+ PyErr_SetFromWindowsErr(dwRetval);
+ return 0;
+ }
+ }
+
+ // allocate buffer and call GetTokenInformation again
+ // tp = (PTOKEN_PRIVILEGES) GlobalAlloc(GPTR, dwSize);
+ pBuffer = (BYTE *) malloc(dwSize);
+ if (pBuffer == NULL) {
+ PyErr_NoMemory();
+ return -1;
+ }
+
+ if (! GetTokenInformation(hToken, TokenPrivileges, pBuffer,
+ dwSize, &dwSize))
+ {
+ PyErr_SetFromWindowsErr(0);
+ free(pBuffer);
+ return -1;
+ }
+
+ // convert the BYTE buffer to a TOKEN_PRIVILEGES struct pointer
+ tp = (TOKEN_PRIVILEGES *)pBuffer;
+
+ // check all the privileges looking for SeTcbPrivilege
+ for (i = 0; i < tp->PrivilegeCount; i++) {
+ // reset the buffer contents and the buffer size
+ strcpy(privName, "");
+ dwNameSize = sizeof(privName) / sizeof(TCHAR);
+ if (! LookupPrivilegeName(NULL,
+ &tp->Privileges[i].Luid,
+ (LPTSTR)privName,
+ &dwNameSize))
+ {
+ PyErr_SetFromWindowsErr(0);
+ free(pBuffer);
+ return -1;
+ }
+
+ // if we find the SeTcbPrivilege then it's a LocalSystem process
+ if (! lstrcmpi(privName, TEXT("SeTcbPrivilege"))) {
+ free(pBuffer);
+ return 1;
+ }
+ }
+
+ free(pBuffer);
+ return 0;
+}
+
+
+BOOL
+psutil_set_privilege(HANDLE hToken, LPCTSTR Privilege, BOOL bEnablePrivilege)
+{
+ TOKEN_PRIVILEGES tp;
+ LUID luid;
+ TOKEN_PRIVILEGES tpPrevious;
+ DWORD cbPrevious = sizeof(TOKEN_PRIVILEGES);
+
+ if (!LookupPrivilegeValue( NULL, Privilege, &luid )) return FALSE;
+
+ // first pass. get current privilege setting
+ tp.PrivilegeCount = 1;
+ tp.Privileges[0].Luid = luid;
+ tp.Privileges[0].Attributes = 0;
+
+ AdjustTokenPrivileges(
+ hToken,
+ FALSE,
+ &tp,
+ sizeof(TOKEN_PRIVILEGES),
+ &tpPrevious,
+ &cbPrevious
+ );
+
+ if (GetLastError() != ERROR_SUCCESS) return FALSE;
+
+ // second pass. set privilege based on previous setting
+ tpPrevious.PrivilegeCount = 1;
+ tpPrevious.Privileges[0].Luid = luid;
+
+ if (bEnablePrivilege)
+ tpPrevious.Privileges[0].Attributes |= (SE_PRIVILEGE_ENABLED);
+ else
+ tpPrevious.Privileges[0].Attributes ^=
+ (SE_PRIVILEGE_ENABLED & tpPrevious.Privileges[0].Attributes);
+
+ AdjustTokenPrivileges(
+ hToken,
+ FALSE,
+ &tpPrevious,
+ cbPrevious,
+ NULL,
+ NULL
+ );
+
+ if (GetLastError() != ERROR_SUCCESS) return FALSE;
+
+ return TRUE;
+}
+
+
+int
+psutil_set_se_debug()
+{
+ HANDLE hToken;
+ if (! OpenThreadToken(GetCurrentThread(),
+ TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY,
+ FALSE,
+ &hToken)
+ ) {
+ if (GetLastError() == ERROR_NO_TOKEN) {
+ if (!ImpersonateSelf(SecurityImpersonation)) {
+ CloseHandle(hToken);
+ return 0;
+ }
+ if (!OpenThreadToken(GetCurrentThread(),
+ TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY,
+ FALSE,
+ &hToken)
+ ) {
+ RevertToSelf();
+ CloseHandle(hToken);
+ return 0;
+ }
+ }
+ }
+
+ // enable SeDebugPrivilege (open any process)
+ if (! psutil_set_privilege(hToken, SE_DEBUG_NAME, TRUE)) {
+ RevertToSelf();
+ CloseHandle(hToken);
+ return 0;
+ }
+
+ RevertToSelf();
+ CloseHandle(hToken);
+ return 1;
+}
+
+
+int
+psutil_unset_se_debug()
+{
+ HANDLE hToken;
+ if (! OpenThreadToken(GetCurrentThread(),
+ TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY,
+ FALSE,
+ &hToken)
+ ) {
+ if (GetLastError() == ERROR_NO_TOKEN) {
+ if (! ImpersonateSelf(SecurityImpersonation))
+ return 0;
+ if (!OpenThreadToken(GetCurrentThread(),
+ TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY,
+ FALSE,
+ &hToken))
+ {
+ return 0;
+ }
+ }
+ }
+
+ // now disable SeDebug
+ if (! psutil_set_privilege(hToken, SE_DEBUG_NAME, FALSE))
+ return 0;
+
+ CloseHandle(hToken);
+ return 1;
+}
diff --git a/python/psutil/psutil/arch/windows/security.h b/python/psutil/psutil/arch/windows/security.h
new file mode 100644
index 000000000..aa8a22ad1
--- /dev/null
+++ b/python/psutil/psutil/arch/windows/security.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Security related functions for Windows platform (Set privileges such as
+ * SeDebug), as well as security helper functions.
+ */
+
+#include <windows.h>
+
+BOOL psutil_set_privilege(HANDLE hToken, LPCTSTR Privilege, BOOL bEnablePrivilege);
+HANDLE psutil_token_from_handle(HANDLE hProcess);
+int psutil_has_system_privilege(HANDLE hProcess);
+int psutil_set_se_debug();
+int psutil_unset_se_debug();
+
diff --git a/python/psutil/setup.cfg b/python/psutil/setup.cfg
new file mode 100644
index 000000000..861a9f554
--- /dev/null
+++ b/python/psutil/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/psutil/setup.py b/python/psutil/setup.py
new file mode 100644
index 000000000..4c42548ef
--- /dev/null
+++ b/python/psutil/setup.py
@@ -0,0 +1,206 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""psutil is a cross-platform library for retrieving information on
+running processes and system utilization (CPU, memory, disks, network)
+in Python.
+"""
+
+import os
+import sys
+try:
+ from setuptools import setup, Extension
+except ImportError:
+ from distutils.core import setup, Extension
+
+
+HERE = os.path.abspath(os.path.dirname(__file__))
+
+
+def get_version():
+ INIT = os.path.join(HERE, 'psutil/__init__.py')
+ with open(INIT, 'r') as f:
+ for line in f:
+ if line.startswith('__version__'):
+ ret = eval(line.strip().split(' = ')[1])
+ assert ret.count('.') == 2, ret
+ for num in ret.split('.'):
+ assert num.isdigit(), ret
+ return ret
+ else:
+ raise ValueError("couldn't find version string")
+
+
+def get_description():
+ README = os.path.join(HERE, 'README.rst')
+ with open(README, 'r') as f:
+ return f.read()
+
+
+VERSION = get_version()
+VERSION_MACRO = ('PSUTIL_VERSION', int(VERSION.replace('.', '')))
+
+
+# POSIX
+if os.name == 'posix':
+ libraries = []
+ if sys.platform.startswith("sunos"):
+ libraries.append('socket')
+
+ posix_extension = Extension(
+ 'psutil._psutil_posix',
+ sources=['psutil/_psutil_posix.c'],
+ libraries=libraries,
+ )
+# Windows
+if sys.platform.startswith("win32"):
+
+ def get_winver():
+ maj, min = sys.getwindowsversion()[0:2]
+ return '0x0%s' % ((maj * 100) + min)
+
+ extensions = [Extension(
+ 'psutil._psutil_windows',
+ sources=[
+ 'psutil/_psutil_windows.c',
+ 'psutil/_psutil_common.c',
+ 'psutil/arch/windows/process_info.c',
+ 'psutil/arch/windows/process_handles.c',
+ 'psutil/arch/windows/security.c',
+ 'psutil/arch/windows/inet_ntop.c',
+ ],
+ define_macros=[
+ VERSION_MACRO,
+ # be nice to mingw, see:
+ # http://www.mingw.org/wiki/Use_more_recent_defined_functions
+ ('_WIN32_WINNT', get_winver()),
+ ('_AVAIL_WINVER_', get_winver()),
+ ('_CRT_SECURE_NO_WARNINGS', None),
+ # see: https://github.com/giampaolo/psutil/issues/348
+ ('PSAPI_VERSION', 1),
+ ],
+ libraries=[
+ "psapi", "kernel32", "advapi32", "shell32", "netapi32", "iphlpapi",
+ "wtsapi32", "ws2_32",
+ ],
+ # extra_compile_args=["/Z7"],
+ # extra_link_args=["/DEBUG"]
+ )]
+# OS X
+elif sys.platform.startswith("darwin"):
+ extensions = [Extension(
+ 'psutil._psutil_osx',
+ sources=[
+ 'psutil/_psutil_osx.c',
+ 'psutil/_psutil_common.c',
+ 'psutil/arch/osx/process_info.c'
+ ],
+ define_macros=[VERSION_MACRO],
+ extra_link_args=[
+ '-framework', 'CoreFoundation', '-framework', 'IOKit'
+ ],
+ ),
+ posix_extension,
+ ]
+# FreeBSD
+elif sys.platform.startswith("freebsd"):
+ extensions = [Extension(
+ 'psutil._psutil_bsd',
+ sources=[
+ 'psutil/_psutil_bsd.c',
+ 'psutil/_psutil_common.c',
+ 'psutil/arch/bsd/process_info.c'
+ ],
+ define_macros=[VERSION_MACRO],
+ libraries=["devstat"]),
+ posix_extension,
+ ]
+# Linux
+elif sys.platform.startswith("linux"):
+ extensions = [Extension(
+ 'psutil._psutil_linux',
+ sources=['psutil/_psutil_linux.c'],
+ define_macros=[VERSION_MACRO]),
+ posix_extension,
+ ]
+# Solaris
+elif sys.platform.lower().startswith('sunos'):
+ extensions = [Extension(
+ 'psutil._psutil_sunos',
+ sources=['psutil/_psutil_sunos.c'],
+ define_macros=[VERSION_MACRO],
+ libraries=['kstat', 'nsl', 'socket']),
+ posix_extension,
+ ]
+else:
+ sys.exit('platform %s is not supported' % sys.platform)
+
+
+def main():
+ setup_args = dict(
+ name='psutil',
+ version=VERSION,
+ description=__doc__.replace('\n', '').strip(),
+ long_description=get_description(),
+ keywords=[
+ 'ps', 'top', 'kill', 'free', 'lsof', 'netstat', 'nice', 'tty',
+ 'ionice', 'uptime', 'taskmgr', 'process', 'df', 'iotop', 'iostat',
+ 'ifconfig', 'taskset', 'who', 'pidof', 'pmap', 'smem', 'pstree',
+ 'monitoring', 'ulimit', 'prlimit',
+ ],
+ author='Giampaolo Rodola',
+ author_email='g.rodola <at> gmail <dot> com',
+ url='https://github.com/giampaolo/psutil',
+ platforms='Platform Independent',
+ license='BSD',
+ packages=['psutil'],
+ # see: python setup.py register --list-classifiers
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Console',
+ 'Environment :: Win32 (MS Windows)',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: Information Technology',
+ 'Intended Audience :: System Administrators',
+ 'License :: OSI Approved :: BSD License',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Operating System :: Microsoft :: Windows :: Windows NT/2000',
+ 'Operating System :: Microsoft',
+ 'Operating System :: OS Independent',
+ 'Operating System :: POSIX :: BSD :: FreeBSD',
+ 'Operating System :: POSIX :: Linux',
+ 'Operating System :: POSIX :: SunOS/Solaris',
+ 'Operating System :: POSIX',
+ 'Programming Language :: C',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.0',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ 'Programming Language :: Python',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: System :: Benchmark',
+ 'Topic :: System :: Hardware',
+ 'Topic :: System :: Monitoring',
+ 'Topic :: System :: Networking :: Monitoring',
+ 'Topic :: System :: Networking',
+ 'Topic :: System :: Systems Administration',
+ 'Topic :: Utilities',
+ ],
+ )
+ if extensions is not None:
+ setup_args["ext_modules"] = extensions
+ setup(**setup_args)
+
+if __name__ == '__main__':
+ main()
diff --git a/python/psutil/test/README.rst b/python/psutil/test/README.rst
new file mode 100644
index 000000000..3f2a468ef
--- /dev/null
+++ b/python/psutil/test/README.rst
@@ -0,0 +1,21 @@
+- The recommended way to run tests (also on Windows) is to cd into parent
+ directory and run ``make test``
+
+- Dependencies for running tests:
+ - python 2.6: ipaddress, mock, unittest2
+ - python 2.7: ipaddress, mock
+ - python 3.2: ipaddress, mock
+ - python 3.3: ipaddress
+ - python >= 3.4: no deps required
+
+- The main test script is ``test_psutil.py``, which also imports platform-specific
+ ``_*.py`` scripts (which should be ignored).
+
+- ``test_memory_leaks.py`` looks for memory leaks into C extension modules and must
+ be run separately with ``make test-memleaks``.
+
+- To run tests on all supported Python version install tox (pip install tox)
+ then run ``tox``.
+
+- Every time a commit is pushed tests are automatically run on Travis:
+ https://travis-ci.org/giampaolo/psutil/
diff --git a/python/psutil/test/_bsd.py b/python/psutil/test/_bsd.py
new file mode 100644
index 000000000..e4a3225d2
--- /dev/null
+++ b/python/psutil/test/_bsd.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: add test for comparing connections with 'sockstat' cmd
+
+"""BSD specific tests. These are implicitly run by test_psutil.py."""
+
+import os
+import subprocess
+import sys
+import time
+
+import psutil
+
+from psutil._compat import PY3
+from test_psutil import (TOLERANCE, BSD, sh, get_test_subprocess, which,
+ retry_before_failing, reap_children, unittest)
+
+
+PAGESIZE = os.sysconf("SC_PAGE_SIZE")
+if os.getuid() == 0: # muse requires root privileges
+ MUSE_AVAILABLE = which('muse')
+else:
+ MUSE_AVAILABLE = False
+
+
+def sysctl(cmdline):
+ """Expects a sysctl command with an argument and parse the result
+ returning only the value of interest.
+ """
+ result = sh("sysctl " + cmdline)
+ result = result[result.find(": ") + 2:]
+ try:
+ return int(result)
+ except ValueError:
+ return result
+
+
+def muse(field):
+ """Thin wrapper around 'muse' cmdline utility."""
+ out = sh('muse')
+ for line in out.split('\n'):
+ if line.startswith(field):
+ break
+ else:
+ raise ValueError("line not found")
+ return int(line.split()[1])
+
+
+@unittest.skipUnless(BSD, "not a BSD system")
+class BSDSpecificTestCase(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ cls.pid = get_test_subprocess().pid
+
+ @classmethod
+ def tearDownClass(cls):
+ reap_children()
+
+ def test_boot_time(self):
+ s = sysctl('sysctl kern.boottime')
+ s = s[s.find(" sec = ") + 7:]
+ s = s[:s.find(',')]
+ btime = int(s)
+ self.assertEqual(btime, psutil.boot_time())
+
+ def test_process_create_time(self):
+ cmdline = "ps -o lstart -p %s" % self.pid
+ p = subprocess.Popen(cmdline, shell=1, stdout=subprocess.PIPE)
+ output = p.communicate()[0]
+ if PY3:
+ output = str(output, sys.stdout.encoding)
+ start_ps = output.replace('STARTED', '').strip()
+ start_psutil = psutil.Process(self.pid).create_time()
+ start_psutil = time.strftime("%a %b %e %H:%M:%S %Y",
+ time.localtime(start_psutil))
+ self.assertEqual(start_ps, start_psutil)
+
+ def test_disks(self):
+ # test psutil.disk_usage() and psutil.disk_partitions()
+ # against "df -a"
+ def df(path):
+ out = sh('df -k "%s"' % path).strip()
+ lines = out.split('\n')
+ lines.pop(0)
+ line = lines.pop(0)
+ dev, total, used, free = line.split()[:4]
+ if dev == 'none':
+ dev = ''
+ total = int(total) * 1024
+ used = int(used) * 1024
+ free = int(free) * 1024
+ return dev, total, used, free
+
+ for part in psutil.disk_partitions(all=False):
+ usage = psutil.disk_usage(part.mountpoint)
+ dev, total, used, free = df(part.mountpoint)
+ self.assertEqual(part.device, dev)
+ self.assertEqual(usage.total, total)
+ # 10 MB tollerance
+ if abs(usage.free - free) > 10 * 1024 * 1024:
+ self.fail("psutil=%s, df=%s" % (usage.free, free))
+ if abs(usage.used - used) > 10 * 1024 * 1024:
+ self.fail("psutil=%s, df=%s" % (usage.used, used))
+
+ @retry_before_failing()
+ def test_memory_maps(self):
+ out = sh('procstat -v %s' % self.pid)
+ maps = psutil.Process(self.pid).memory_maps(grouped=False)
+ lines = out.split('\n')[1:]
+ while lines:
+ line = lines.pop()
+ fields = line.split()
+ _, start, stop, perms, res = fields[:5]
+ map = maps.pop()
+ self.assertEqual("%s-%s" % (start, stop), map.addr)
+ self.assertEqual(int(res), map.rss)
+ if not map.path.startswith('['):
+ self.assertEqual(fields[10], map.path)
+
+ def test_exe(self):
+ out = sh('procstat -b %s' % self.pid)
+ self.assertEqual(psutil.Process(self.pid).exe(),
+ out.split('\n')[1].split()[-1])
+
+ def test_cmdline(self):
+ out = sh('procstat -c %s' % self.pid)
+ self.assertEqual(' '.join(psutil.Process(self.pid).cmdline()),
+ ' '.join(out.split('\n')[1].split()[2:]))
+
+ def test_uids_gids(self):
+ out = sh('procstat -s %s' % self.pid)
+ euid, ruid, suid, egid, rgid, sgid = out.split('\n')[1].split()[2:8]
+ p = psutil.Process(self.pid)
+ uids = p.uids()
+ gids = p.gids()
+ self.assertEqual(uids.real, int(ruid))
+ self.assertEqual(uids.effective, int(euid))
+ self.assertEqual(uids.saved, int(suid))
+ self.assertEqual(gids.real, int(rgid))
+ self.assertEqual(gids.effective, int(egid))
+ self.assertEqual(gids.saved, int(sgid))
+
+ # --- virtual_memory(); tests against sysctl
+
+ def test_vmem_total(self):
+ syst = sysctl("sysctl vm.stats.vm.v_page_count") * PAGESIZE
+ self.assertEqual(psutil.virtual_memory().total, syst)
+
+ @retry_before_failing()
+ def test_vmem_active(self):
+ syst = sysctl("vm.stats.vm.v_active_count") * PAGESIZE
+ self.assertAlmostEqual(psutil.virtual_memory().active, syst,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_inactive(self):
+ syst = sysctl("vm.stats.vm.v_inactive_count") * PAGESIZE
+ self.assertAlmostEqual(psutil.virtual_memory().inactive, syst,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_wired(self):
+ syst = sysctl("vm.stats.vm.v_wire_count") * PAGESIZE
+ self.assertAlmostEqual(psutil.virtual_memory().wired, syst,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_cached(self):
+ syst = sysctl("vm.stats.vm.v_cache_count") * PAGESIZE
+ self.assertAlmostEqual(psutil.virtual_memory().cached, syst,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_free(self):
+ syst = sysctl("vm.stats.vm.v_free_count") * PAGESIZE
+ self.assertAlmostEqual(psutil.virtual_memory().free, syst,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_buffers(self):
+ syst = sysctl("vfs.bufspace")
+ self.assertAlmostEqual(psutil.virtual_memory().buffers, syst,
+ delta=TOLERANCE)
+
+ def test_cpu_count_logical(self):
+ syst = sysctl("hw.ncpu")
+ self.assertEqual(psutil.cpu_count(logical=True), syst)
+
+ # --- virtual_memory(); tests against muse
+
+ @unittest.skipUnless(MUSE_AVAILABLE, "muse cmdline tool is not available")
+ def test_total(self):
+ num = muse('Total')
+ self.assertEqual(psutil.virtual_memory().total, num)
+
+ @unittest.skipUnless(MUSE_AVAILABLE, "muse cmdline tool is not available")
+ @retry_before_failing()
+ def test_active(self):
+ num = muse('Active')
+ self.assertAlmostEqual(psutil.virtual_memory().active, num,
+ delta=TOLERANCE)
+
+ @unittest.skipUnless(MUSE_AVAILABLE, "muse cmdline tool is not available")
+ @retry_before_failing()
+ def test_inactive(self):
+ num = muse('Inactive')
+ self.assertAlmostEqual(psutil.virtual_memory().inactive, num,
+ delta=TOLERANCE)
+
+ @unittest.skipUnless(MUSE_AVAILABLE, "muse cmdline tool is not available")
+ @retry_before_failing()
+ def test_wired(self):
+ num = muse('Wired')
+ self.assertAlmostEqual(psutil.virtual_memory().wired, num,
+ delta=TOLERANCE)
+
+ @unittest.skipUnless(MUSE_AVAILABLE, "muse cmdline tool is not available")
+ @retry_before_failing()
+ def test_cached(self):
+ num = muse('Cache')
+ self.assertAlmostEqual(psutil.virtual_memory().cached, num,
+ delta=TOLERANCE)
+
+ @unittest.skipUnless(MUSE_AVAILABLE, "muse cmdline tool is not available")
+ @retry_before_failing()
+ def test_free(self):
+ num = muse('Free')
+ self.assertAlmostEqual(psutil.virtual_memory().free, num,
+ delta=TOLERANCE)
+
+ @unittest.skipUnless(MUSE_AVAILABLE, "muse cmdline tool is not available")
+ @retry_before_failing()
+ def test_buffers(self):
+ num = muse('Buffer')
+ self.assertAlmostEqual(psutil.virtual_memory().buffers, num,
+ delta=TOLERANCE)
+
+
+def main():
+ test_suite = unittest.TestSuite()
+ test_suite.addTest(unittest.makeSuite(BSDSpecificTestCase))
+ result = unittest.TextTestRunner(verbosity=2).run(test_suite)
+ return result.wasSuccessful()
+
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1)
diff --git a/python/psutil/test/_linux.py b/python/psutil/test/_linux.py
new file mode 100644
index 000000000..c1927ea8b
--- /dev/null
+++ b/python/psutil/test/_linux.py
@@ -0,0 +1,473 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Linux specific tests. These are implicitly run by test_psutil.py."""
+
+from __future__ import division
+import contextlib
+import errno
+import fcntl
+import io
+import os
+import pprint
+import re
+import socket
+import struct
+import sys
+import tempfile
+import time
+import warnings
+
+try:
+ from unittest import mock # py3
+except ImportError:
+ import mock # requires "pip install mock"
+
+from test_psutil import POSIX, TOLERANCE, TRAVIS, LINUX
+from test_psutil import (skip_on_not_implemented, sh, get_test_subprocess,
+ retry_before_failing, get_kernel_version, unittest,
+ which, call_until)
+
+import psutil
+import psutil._pslinux
+from psutil._compat import PY3, u
+
+
+SIOCGIFADDR = 0x8915
+SIOCGIFCONF = 0x8912
+SIOCGIFHWADDR = 0x8927
+
+
+def get_ipv4_address(ifname):
+ ifname = ifname[:15]
+ if PY3:
+ ifname = bytes(ifname, 'ascii')
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ with contextlib.closing(s):
+ return socket.inet_ntoa(
+ fcntl.ioctl(s.fileno(),
+ SIOCGIFADDR,
+ struct.pack('256s', ifname))[20:24])
+
+
+def get_mac_address(ifname):
+ ifname = ifname[:15]
+ if PY3:
+ ifname = bytes(ifname, 'ascii')
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ with contextlib.closing(s):
+ info = fcntl.ioctl(
+ s.fileno(), SIOCGIFHWADDR, struct.pack('256s', ifname))
+ if PY3:
+ def ord(x):
+ return x
+ else:
+ import __builtin__
+ ord = __builtin__.ord
+ return ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1]
+
+
+@unittest.skipUnless(LINUX, "not a Linux system")
+class LinuxSpecificTestCase(unittest.TestCase):
+
+ @unittest.skipIf(
+ POSIX and not hasattr(os, 'statvfs'),
+ reason="os.statvfs() function not available on this platform")
+ @skip_on_not_implemented()
+ def test_disks(self):
+ # test psutil.disk_usage() and psutil.disk_partitions()
+ # against "df -a"
+ def df(path):
+ out = sh('df -P -B 1 "%s"' % path).strip()
+ lines = out.split('\n')
+ lines.pop(0)
+ line = lines.pop(0)
+ dev, total, used, free = line.split()[:4]
+ if dev == 'none':
+ dev = ''
+ total, used, free = int(total), int(used), int(free)
+ return dev, total, used, free
+
+ for part in psutil.disk_partitions(all=False):
+ usage = psutil.disk_usage(part.mountpoint)
+ dev, total, used, free = df(part.mountpoint)
+ self.assertEqual(part.device, dev)
+ self.assertEqual(usage.total, total)
+ # 10 MB tollerance
+ if abs(usage.free - free) > 10 * 1024 * 1024:
+ self.fail("psutil=%s, df=%s" % (usage.free, free))
+ if abs(usage.used - used) > 10 * 1024 * 1024:
+ self.fail("psutil=%s, df=%s" % (usage.used, used))
+
+ def test_memory_maps(self):
+ sproc = get_test_subprocess()
+ time.sleep(1)
+ p = psutil.Process(sproc.pid)
+ maps = p.memory_maps(grouped=False)
+ pmap = sh('pmap -x %s' % p.pid).split('\n')
+ # get rid of header
+ del pmap[0]
+ del pmap[0]
+ while maps and pmap:
+ this = maps.pop(0)
+ other = pmap.pop(0)
+ addr, _, rss, dirty, mode, path = other.split(None, 5)
+ if not path.startswith('[') and not path.endswith(']'):
+ self.assertEqual(path, os.path.basename(this.path))
+ self.assertEqual(int(rss) * 1024, this.rss)
+ # test only rwx chars, ignore 's' and 'p'
+ self.assertEqual(mode[:3], this.perms[:3])
+
+ def test_vmem_total(self):
+ lines = sh('free').split('\n')[1:]
+ total = int(lines[0].split()[1]) * 1024
+ self.assertEqual(total, psutil.virtual_memory().total)
+
+ @retry_before_failing()
+ def test_vmem_used(self):
+ lines = sh('free').split('\n')[1:]
+ used = int(lines[0].split()[2]) * 1024
+ self.assertAlmostEqual(used, psutil.virtual_memory().used,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_free(self):
+ lines = sh('free').split('\n')[1:]
+ free = int(lines[0].split()[3]) * 1024
+ self.assertAlmostEqual(free, psutil.virtual_memory().free,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_buffers(self):
+ lines = sh('free').split('\n')[1:]
+ buffers = int(lines[0].split()[5]) * 1024
+ self.assertAlmostEqual(buffers, psutil.virtual_memory().buffers,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_cached(self):
+ lines = sh('free').split('\n')[1:]
+ cached = int(lines[0].split()[6]) * 1024
+ self.assertAlmostEqual(cached, psutil.virtual_memory().cached,
+ delta=TOLERANCE)
+
+ def test_swapmem_total(self):
+ lines = sh('free').split('\n')[1:]
+ total = int(lines[2].split()[1]) * 1024
+ self.assertEqual(total, psutil.swap_memory().total)
+
+ @retry_before_failing()
+ def test_swapmem_used(self):
+ lines = sh('free').split('\n')[1:]
+ used = int(lines[2].split()[2]) * 1024
+ self.assertAlmostEqual(used, psutil.swap_memory().used,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_swapmem_free(self):
+ lines = sh('free').split('\n')[1:]
+ free = int(lines[2].split()[3]) * 1024
+ self.assertAlmostEqual(free, psutil.swap_memory().free,
+ delta=TOLERANCE)
+
+ @unittest.skipIf(TRAVIS, "unknown failure on travis")
+ def test_cpu_times(self):
+ fields = psutil.cpu_times()._fields
+ kernel_ver = re.findall('\d+\.\d+\.\d+', os.uname()[2])[0]
+ kernel_ver_info = tuple(map(int, kernel_ver.split('.')))
+ if kernel_ver_info >= (2, 6, 11):
+ self.assertIn('steal', fields)
+ else:
+ self.assertNotIn('steal', fields)
+ if kernel_ver_info >= (2, 6, 24):
+ self.assertIn('guest', fields)
+ else:
+ self.assertNotIn('guest', fields)
+ if kernel_ver_info >= (3, 2, 0):
+ self.assertIn('guest_nice', fields)
+ else:
+ self.assertNotIn('guest_nice', fields)
+
+ def test_net_if_addrs_ips(self):
+ for name, addrs in psutil.net_if_addrs().items():
+ for addr in addrs:
+ if addr.family == psutil.AF_LINK:
+ self.assertEqual(addr.address, get_mac_address(name))
+ elif addr.family == socket.AF_INET:
+ self.assertEqual(addr.address, get_ipv4_address(name))
+ # TODO: test for AF_INET6 family
+
+ @unittest.skipUnless(which('ip'), "'ip' utility not available")
+ @unittest.skipIf(TRAVIS, "skipped on Travis")
+ def test_net_if_names(self):
+ out = sh("ip addr").strip()
+ nics = psutil.net_if_addrs()
+ found = 0
+ for line in out.split('\n'):
+ line = line.strip()
+ if re.search("^\d+:", line):
+ found += 1
+ name = line.split(':')[1].strip()
+ self.assertIn(name, nics.keys())
+ self.assertEqual(len(nics), found, msg="%s\n---\n%s" % (
+ pprint.pformat(nics), out))
+
+ @unittest.skipUnless(which("nproc"), "nproc utility not available")
+ def test_cpu_count_logical_w_nproc(self):
+ num = int(sh("nproc --all"))
+ self.assertEqual(psutil.cpu_count(logical=True), num)
+
+ @unittest.skipUnless(which("lscpu"), "lscpu utility not available")
+ def test_cpu_count_logical_w_lscpu(self):
+ out = sh("lscpu -p")
+ num = len([x for x in out.split('\n') if not x.startswith('#')])
+ self.assertEqual(psutil.cpu_count(logical=True), num)
+
+ # --- mocked tests
+
+ def test_virtual_memory_mocked_warnings(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ with warnings.catch_warnings(record=True) as ws:
+ warnings.simplefilter("always")
+ ret = psutil._pslinux.virtual_memory()
+ assert m.called
+ self.assertEqual(len(ws), 1)
+ w = ws[0]
+ self.assertTrue(w.filename.endswith('psutil/_pslinux.py'))
+ self.assertIn(
+ "'cached', 'active' and 'inactive' memory stats couldn't "
+ "be determined", str(w.message))
+ self.assertEqual(ret.cached, 0)
+ self.assertEqual(ret.active, 0)
+ self.assertEqual(ret.inactive, 0)
+
+ def test_swap_memory_mocked_warnings(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ with warnings.catch_warnings(record=True) as ws:
+ warnings.simplefilter("always")
+ ret = psutil._pslinux.swap_memory()
+ assert m.called
+ self.assertEqual(len(ws), 1)
+ w = ws[0]
+ self.assertTrue(w.filename.endswith('psutil/_pslinux.py'))
+ self.assertIn(
+ "'sin' and 'sout' swap memory stats couldn't "
+ "be determined", str(w.message))
+ self.assertEqual(ret.sin, 0)
+ self.assertEqual(ret.sout, 0)
+
+ def test_cpu_count_logical_mocked(self):
+ import psutil._pslinux
+ original = psutil._pslinux.cpu_count_logical()
+ # Here we want to mock os.sysconf("SC_NPROCESSORS_ONLN") in
+ # order to cause the parsing of /proc/cpuinfo and /proc/stat.
+ with mock.patch(
+ 'psutil._pslinux.os.sysconf', side_effect=ValueError) as m:
+ self.assertEqual(psutil._pslinux.cpu_count_logical(), original)
+ assert m.called
+
+ # Let's have open() return emtpy data and make sure None is
+ # returned ('cause we mimick os.cpu_count()).
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertIsNone(psutil._pslinux.cpu_count_logical())
+ self.assertEqual(m.call_count, 2)
+ # /proc/stat should be the last one
+ self.assertEqual(m.call_args[0][0], '/proc/stat')
+
+ # Let's push this a bit further and make sure /proc/cpuinfo
+ # parsing works as expected.
+ with open('/proc/cpuinfo', 'rb') as f:
+ cpuinfo_data = f.read()
+ fake_file = io.BytesIO(cpuinfo_data)
+ with mock.patch('psutil._pslinux.open',
+ return_value=fake_file, create=True) as m:
+ self.assertEqual(psutil._pslinux.cpu_count_logical(), original)
+
+ def test_cpu_count_physical_mocked(self):
+ # Have open() return emtpy data and make sure None is returned
+ # ('cause we want to mimick os.cpu_count())
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertIsNone(psutil._pslinux.cpu_count_physical())
+ assert m.called
+
+ def test_proc_open_files_file_gone(self):
+ # simulates a file which gets deleted during open_files()
+ # execution
+ p = psutil.Process()
+ files = p.open_files()
+ with tempfile.NamedTemporaryFile():
+ # give the kernel some time to see the new file
+ call_until(p.open_files, "len(ret) != %i" % len(files))
+ with mock.patch('psutil._pslinux.os.readlink',
+ side_effect=OSError(errno.ENOENT, "")) as m:
+ files = p.open_files()
+ assert not files
+ assert m.called
+ # also simulate the case where os.readlink() returns EINVAL
+ # in which case psutil is supposed to 'continue'
+ with mock.patch('psutil._pslinux.os.readlink',
+ side_effect=OSError(errno.EINVAL, "")) as m:
+ self.assertEqual(p.open_files(), [])
+ assert m.called
+
+ def test_proc_terminal_mocked(self):
+ with mock.patch('psutil._pslinux._psposix._get_terminal_map',
+ return_value={}) as m:
+ self.assertIsNone(psutil._pslinux.Process(os.getpid()).terminal())
+ assert m.called
+
+ def test_proc_num_ctx_switches_mocked(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertRaises(
+ NotImplementedError,
+ psutil._pslinux.Process(os.getpid()).num_ctx_switches)
+ assert m.called
+
+ def test_proc_num_threads_mocked(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertRaises(
+ NotImplementedError,
+ psutil._pslinux.Process(os.getpid()).num_threads)
+ assert m.called
+
+ def test_proc_ppid_mocked(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertRaises(
+ NotImplementedError,
+ psutil._pslinux.Process(os.getpid()).ppid)
+ assert m.called
+
+ def test_proc_uids_mocked(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertRaises(
+ NotImplementedError,
+ psutil._pslinux.Process(os.getpid()).uids)
+ assert m.called
+
+ def test_proc_gids_mocked(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertRaises(
+ NotImplementedError,
+ psutil._pslinux.Process(os.getpid()).gids)
+ assert m.called
+
+ def test_proc_cmdline_mocked(self):
+ # see: https://github.com/giampaolo/psutil/issues/639
+ p = psutil.Process()
+ fake_file = io.StringIO(u('foo\x00bar\x00'))
+ with mock.patch('psutil._pslinux.open',
+ return_value=fake_file, create=True) as m:
+ p.cmdline() == ['foo', 'bar']
+ assert m.called
+ fake_file = io.StringIO(u('foo\x00bar\x00\x00'))
+ with mock.patch('psutil._pslinux.open',
+ return_value=fake_file, create=True) as m:
+ p.cmdline() == ['foo', 'bar', '']
+ assert m.called
+
+ def test_proc_io_counters_mocked(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertRaises(
+ NotImplementedError,
+ psutil._pslinux.Process(os.getpid()).io_counters)
+ assert m.called
+
+ def test_boot_time_mocked(self):
+ with mock.patch('psutil._pslinux.open', create=True) as m:
+ self.assertRaises(
+ RuntimeError,
+ psutil._pslinux.boot_time)
+ assert m.called
+
+ def test_users_mocked(self):
+ # Make sure ':0' and ':0.0' (returned by C ext) are converted
+ # to 'localhost'.
+ with mock.patch('psutil._pslinux.cext.users',
+ return_value=[('giampaolo', 'pts/2', ':0',
+ 1436573184.0, True)]) as m:
+ self.assertEqual(psutil.users()[0].host, 'localhost')
+ assert m.called
+ with mock.patch('psutil._pslinux.cext.users',
+ return_value=[('giampaolo', 'pts/2', ':0.0',
+ 1436573184.0, True)]) as m:
+ self.assertEqual(psutil.users()[0].host, 'localhost')
+ assert m.called
+ # ...otherwise it should be returned as-is
+ with mock.patch('psutil._pslinux.cext.users',
+ return_value=[('giampaolo', 'pts/2', 'foo',
+ 1436573184.0, True)]) as m:
+ self.assertEqual(psutil.users()[0].host, 'foo')
+ assert m.called
+
+ def test_disk_partitions_mocked(self):
+ # Test that ZFS partitions are returned.
+ with open("/proc/filesystems", "r") as f:
+ data = f.read()
+ if 'zfs' in data:
+ for part in psutil.disk_partitions():
+ if part.fstype == 'zfs':
+ break
+ else:
+ self.fail("couldn't find any ZFS partition")
+ else:
+ # No ZFS partitions on this system. Let's fake one.
+ fake_file = io.StringIO(u("nodev\tzfs\n"))
+ with mock.patch('psutil._pslinux.open',
+ return_value=fake_file, create=True) as m1:
+ with mock.patch(
+ 'psutil._pslinux.cext.disk_partitions',
+ return_value=[('/dev/sdb3', '/', 'zfs', 'rw')]) as m2:
+ ret = psutil.disk_partitions()
+ assert m1.called
+ assert m2.called
+ assert ret
+ self.assertEqual(ret[0].fstype, 'zfs')
+
+ # --- tests for specific kernel versions
+
+ @unittest.skipUnless(
+ get_kernel_version() >= (2, 6, 36),
+ "prlimit() not available on this Linux kernel version")
+ def test_prlimit_availability(self):
+ # prlimit() should be available starting from kernel 2.6.36
+ p = psutil.Process(os.getpid())
+ p.rlimit(psutil.RLIMIT_NOFILE)
+ # if prlimit() is supported *at least* these constants should
+ # be available
+ self.assertTrue(hasattr(psutil, "RLIM_INFINITY"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_AS"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_CORE"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_CPU"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_DATA"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_FSIZE"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_LOCKS"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_MEMLOCK"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_NOFILE"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_NPROC"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_RSS"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_STACK"))
+
+ @unittest.skipUnless(
+ get_kernel_version() >= (3, 0),
+ "prlimit constants not available on this Linux kernel version")
+ def test_resource_consts_kernel_v(self):
+ # more recent constants
+ self.assertTrue(hasattr(psutil, "RLIMIT_MSGQUEUE"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_NICE"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_RTPRIO"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_RTTIME"))
+ self.assertTrue(hasattr(psutil, "RLIMIT_SIGPENDING"))
+
+
+def main():
+ test_suite = unittest.TestSuite()
+ test_suite.addTest(unittest.makeSuite(LinuxSpecificTestCase))
+ result = unittest.TextTestRunner(verbosity=2).run(test_suite)
+ return result.wasSuccessful()
+
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1)
diff --git a/python/psutil/test/_osx.py b/python/psutil/test/_osx.py
new file mode 100644
index 000000000..6e6e4380e
--- /dev/null
+++ b/python/psutil/test/_osx.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""OSX specific tests. These are implicitly run by test_psutil.py."""
+
+import os
+import re
+import subprocess
+import sys
+import time
+
+import psutil
+
+from psutil._compat import PY3
+from test_psutil import (TOLERANCE, OSX, sh, get_test_subprocess,
+ reap_children, retry_before_failing, unittest)
+
+
+PAGESIZE = os.sysconf("SC_PAGE_SIZE")
+
+
+def sysctl(cmdline):
+ """Expects a sysctl command with an argument and parse the result
+ returning only the value of interest.
+ """
+ p = subprocess.Popen(cmdline, shell=1, stdout=subprocess.PIPE)
+ result = p.communicate()[0].strip().split()[1]
+ if PY3:
+ result = str(result, sys.stdout.encoding)
+ try:
+ return int(result)
+ except ValueError:
+ return result
+
+
+def vm_stat(field):
+ """Wrapper around 'vm_stat' cmdline utility."""
+ out = sh('vm_stat')
+ for line in out.split('\n'):
+ if field in line:
+ break
+ else:
+ raise ValueError("line not found")
+ return int(re.search('\d+', line).group(0)) * PAGESIZE
+
+
+@unittest.skipUnless(OSX, "not an OSX system")
+class OSXSpecificTestCase(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ cls.pid = get_test_subprocess().pid
+
+ @classmethod
+ def tearDownClass(cls):
+ reap_children()
+
+ def test_process_create_time(self):
+ cmdline = "ps -o lstart -p %s" % self.pid
+ p = subprocess.Popen(cmdline, shell=1, stdout=subprocess.PIPE)
+ output = p.communicate()[0]
+ if PY3:
+ output = str(output, sys.stdout.encoding)
+ start_ps = output.replace('STARTED', '').strip()
+ start_psutil = psutil.Process(self.pid).create_time()
+ start_psutil = time.strftime("%a %b %e %H:%M:%S %Y",
+ time.localtime(start_psutil))
+ self.assertEqual(start_ps, start_psutil)
+
+ def test_disks(self):
+ # test psutil.disk_usage() and psutil.disk_partitions()
+ # against "df -a"
+ def df(path):
+ out = sh('df -k "%s"' % path).strip()
+ lines = out.split('\n')
+ lines.pop(0)
+ line = lines.pop(0)
+ dev, total, used, free = line.split()[:4]
+ if dev == 'none':
+ dev = ''
+ total = int(total) * 1024
+ used = int(used) * 1024
+ free = int(free) * 1024
+ return dev, total, used, free
+
+ for part in psutil.disk_partitions(all=False):
+ usage = psutil.disk_usage(part.mountpoint)
+ dev, total, used, free = df(part.mountpoint)
+ self.assertEqual(part.device, dev)
+ self.assertEqual(usage.total, total)
+ # 10 MB tollerance
+ if abs(usage.free - free) > 10 * 1024 * 1024:
+ self.fail("psutil=%s, df=%s" % usage.free, free)
+ if abs(usage.used - used) > 10 * 1024 * 1024:
+ self.fail("psutil=%s, df=%s" % usage.used, used)
+
+ # --- virtual mem
+
+ def test_vmem_total(self):
+ sysctl_hwphymem = sysctl('sysctl hw.memsize')
+ self.assertEqual(sysctl_hwphymem, psutil.virtual_memory().total)
+
+ @retry_before_failing()
+ def test_vmem_free(self):
+ num = vm_stat("free")
+ self.assertAlmostEqual(psutil.virtual_memory().free, num,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_active(self):
+ num = vm_stat("active")
+ self.assertAlmostEqual(psutil.virtual_memory().active, num,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_inactive(self):
+ num = vm_stat("inactive")
+ self.assertAlmostEqual(psutil.virtual_memory().inactive, num,
+ delta=TOLERANCE)
+
+ @retry_before_failing()
+ def test_vmem_wired(self):
+ num = vm_stat("wired")
+ self.assertAlmostEqual(psutil.virtual_memory().wired, num,
+ delta=TOLERANCE)
+
+ # --- swap mem
+
+ def test_swapmem_sin(self):
+ num = vm_stat("Pageins")
+ self.assertEqual(psutil.swap_memory().sin, num)
+
+ def test_swapmem_sout(self):
+ num = vm_stat("Pageouts")
+ self.assertEqual(psutil.swap_memory().sout, num)
+
+ def test_swapmem_total(self):
+ tot1 = psutil.swap_memory().total
+ tot2 = 0
+ # OSX uses multiple cache files:
+ # http://en.wikipedia.org/wiki/Paging#OS_X
+ for name in os.listdir("/var/vm/"):
+ file = os.path.join("/var/vm", name)
+ if os.path.isfile(file):
+ tot2 += os.path.getsize(file)
+ self.assertEqual(tot1, tot2)
+
+
+def main():
+ test_suite = unittest.TestSuite()
+ test_suite.addTest(unittest.makeSuite(OSXSpecificTestCase))
+ result = unittest.TextTestRunner(verbosity=2).run(test_suite)
+ return result.wasSuccessful()
+
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1)
diff --git a/python/psutil/test/_posix.py b/python/psutil/test/_posix.py
new file mode 100644
index 000000000..e6c56aac3
--- /dev/null
+++ b/python/psutil/test/_posix.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""POSIX specific tests. These are implicitly run by test_psutil.py."""
+
+import datetime
+import os
+import subprocess
+import sys
+import time
+
+import psutil
+
+from psutil._compat import PY3, callable
+from test_psutil import LINUX, SUNOS, OSX, BSD, PYTHON, POSIX, TRAVIS
+from test_psutil import (get_test_subprocess, skip_on_access_denied,
+ retry_before_failing, reap_children, sh, unittest,
+ get_kernel_version, wait_for_pid)
+
+
+def ps(cmd):
+ """Expects a ps command with a -o argument and parse the result
+ returning only the value of interest.
+ """
+ if not LINUX:
+ cmd = cmd.replace(" --no-headers ", " ")
+ if SUNOS:
+ cmd = cmd.replace("-o command", "-o comm")
+ cmd = cmd.replace("-o start", "-o stime")
+ p = subprocess.Popen(cmd, shell=1, stdout=subprocess.PIPE)
+ output = p.communicate()[0].strip()
+ if PY3:
+ output = str(output, sys.stdout.encoding)
+ if not LINUX:
+ output = output.split('\n')[1].strip()
+ try:
+ return int(output)
+ except ValueError:
+ return output
+
+
+@unittest.skipUnless(POSIX, "not a POSIX system")
+class PosixSpecificTestCase(unittest.TestCase):
+ """Compare psutil results against 'ps' command line utility."""
+
+ @classmethod
+ def setUpClass(cls):
+ cls.pid = get_test_subprocess([PYTHON, "-E", "-O"],
+ stdin=subprocess.PIPE).pid
+ wait_for_pid(cls.pid)
+
+ @classmethod
+ def tearDownClass(cls):
+ reap_children()
+
+ # for ps -o arguments see: http://unixhelp.ed.ac.uk/CGI/man-cgi?ps
+
+ def test_process_parent_pid(self):
+ ppid_ps = ps("ps --no-headers -o ppid -p %s" % self.pid)
+ ppid_psutil = psutil.Process(self.pid).ppid()
+ self.assertEqual(ppid_ps, ppid_psutil)
+
+ def test_process_uid(self):
+ uid_ps = ps("ps --no-headers -o uid -p %s" % self.pid)
+ uid_psutil = psutil.Process(self.pid).uids().real
+ self.assertEqual(uid_ps, uid_psutil)
+
+ def test_process_gid(self):
+ gid_ps = ps("ps --no-headers -o rgid -p %s" % self.pid)
+ gid_psutil = psutil.Process(self.pid).gids().real
+ self.assertEqual(gid_ps, gid_psutil)
+
+ def test_process_username(self):
+ username_ps = ps("ps --no-headers -o user -p %s" % self.pid)
+ username_psutil = psutil.Process(self.pid).username()
+ self.assertEqual(username_ps, username_psutil)
+
+ @skip_on_access_denied()
+ @retry_before_failing()
+ def test_process_rss_memory(self):
+ # give python interpreter some time to properly initialize
+ # so that the results are the same
+ time.sleep(0.1)
+ rss_ps = ps("ps --no-headers -o rss -p %s" % self.pid)
+ rss_psutil = psutil.Process(self.pid).memory_info()[0] / 1024
+ self.assertEqual(rss_ps, rss_psutil)
+
+ @skip_on_access_denied()
+ @retry_before_failing()
+ def test_process_vsz_memory(self):
+ # give python interpreter some time to properly initialize
+ # so that the results are the same
+ time.sleep(0.1)
+ vsz_ps = ps("ps --no-headers -o vsz -p %s" % self.pid)
+ vsz_psutil = psutil.Process(self.pid).memory_info()[1] / 1024
+ self.assertEqual(vsz_ps, vsz_psutil)
+
+ def test_process_name(self):
+ # use command + arg since "comm" keyword not supported on all platforms
+ name_ps = ps("ps --no-headers -o command -p %s" % (
+ self.pid)).split(' ')[0]
+ # remove path if there is any, from the command
+ name_ps = os.path.basename(name_ps).lower()
+ name_psutil = psutil.Process(self.pid).name().lower()
+ self.assertEqual(name_ps, name_psutil)
+
+ @unittest.skipIf(OSX or BSD,
+ 'ps -o start not available')
+ def test_process_create_time(self):
+ time_ps = ps("ps --no-headers -o start -p %s" % self.pid).split(' ')[0]
+ time_psutil = psutil.Process(self.pid).create_time()
+ time_psutil_tstamp = datetime.datetime.fromtimestamp(
+ time_psutil).strftime("%H:%M:%S")
+ # sometimes ps shows the time rounded up instead of down, so we check
+ # for both possible values
+ round_time_psutil = round(time_psutil)
+ round_time_psutil_tstamp = datetime.datetime.fromtimestamp(
+ round_time_psutil).strftime("%H:%M:%S")
+ self.assertIn(time_ps, [time_psutil_tstamp, round_time_psutil_tstamp])
+
+ def test_process_exe(self):
+ ps_pathname = ps("ps --no-headers -o command -p %s" %
+ self.pid).split(' ')[0]
+ psutil_pathname = psutil.Process(self.pid).exe()
+ try:
+ self.assertEqual(ps_pathname, psutil_pathname)
+ except AssertionError:
+ # certain platforms such as BSD are more accurate returning:
+ # "/usr/local/bin/python2.7"
+ # ...instead of:
+ # "/usr/local/bin/python"
+ # We do not want to consider this difference in accuracy
+ # an error.
+ adjusted_ps_pathname = ps_pathname[:len(ps_pathname)]
+ self.assertEqual(ps_pathname, adjusted_ps_pathname)
+
+ def test_process_cmdline(self):
+ ps_cmdline = ps("ps --no-headers -o command -p %s" % self.pid)
+ psutil_cmdline = " ".join(psutil.Process(self.pid).cmdline())
+ if SUNOS:
+ # ps on Solaris only shows the first part of the cmdline
+ psutil_cmdline = psutil_cmdline.split(" ")[0]
+ self.assertEqual(ps_cmdline, psutil_cmdline)
+
+ @retry_before_failing()
+ def test_pids(self):
+ # Note: this test might fail if the OS is starting/killing
+ # other processes in the meantime
+ if SUNOS:
+ cmd = ["ps", "ax"]
+ else:
+ cmd = ["ps", "ax", "-o", "pid"]
+ p = get_test_subprocess(cmd, stdout=subprocess.PIPE)
+ output = p.communicate()[0].strip()
+ if PY3:
+ output = str(output, sys.stdout.encoding)
+ pids_ps = []
+ for line in output.split('\n')[1:]:
+ if line:
+ pid = int(line.split()[0].strip())
+ pids_ps.append(pid)
+ # remove ps subprocess pid which is supposed to be dead in meantime
+ pids_ps.remove(p.pid)
+ pids_psutil = psutil.pids()
+ pids_ps.sort()
+ pids_psutil.sort()
+
+ # on OSX ps doesn't show pid 0
+ if OSX and 0 not in pids_ps:
+ pids_ps.insert(0, 0)
+
+ if pids_ps != pids_psutil:
+ difference = [x for x in pids_psutil if x not in pids_ps] + \
+ [x for x in pids_ps if x not in pids_psutil]
+ self.fail("difference: " + str(difference))
+
+ # for some reason ifconfig -a does not report all interfaces
+ # returned by psutil
+ @unittest.skipIf(SUNOS, "test not reliable on SUNOS")
+ @unittest.skipIf(TRAVIS, "test not reliable on Travis")
+ def test_nic_names(self):
+ p = subprocess.Popen("ifconfig -a", shell=1, stdout=subprocess.PIPE)
+ output = p.communicate()[0].strip()
+ if PY3:
+ output = str(output, sys.stdout.encoding)
+ for nic in psutil.net_io_counters(pernic=True).keys():
+ for line in output.split():
+ if line.startswith(nic):
+ break
+ else:
+ self.fail(
+ "couldn't find %s nic in 'ifconfig -a' output\n%s" % (
+ nic, output))
+
+ @retry_before_failing()
+ def test_users(self):
+ out = sh("who")
+ lines = out.split('\n')
+ users = [x.split()[0] for x in lines]
+ self.assertEqual(len(users), len(psutil.users()))
+ terminals = [x.split()[1] for x in lines]
+ for u in psutil.users():
+ self.assertTrue(u.name in users, u.name)
+ self.assertTrue(u.terminal in terminals, u.terminal)
+
+ def test_fds_open(self):
+ # Note: this fails from time to time; I'm keen on thinking
+ # it doesn't mean something is broken
+ def call(p, attr):
+ args = ()
+ attr = getattr(p, name, None)
+ if attr is not None and callable(attr):
+ if name == 'rlimit':
+ args = (psutil.RLIMIT_NOFILE,)
+ attr(*args)
+ else:
+ attr
+
+ p = psutil.Process(os.getpid())
+ failures = []
+ ignored_names = ['terminate', 'kill', 'suspend', 'resume', 'nice',
+ 'send_signal', 'wait', 'children', 'as_dict']
+ if LINUX and get_kernel_version() < (2, 6, 36):
+ ignored_names.append('rlimit')
+ if LINUX and get_kernel_version() < (2, 6, 23):
+ ignored_names.append('num_ctx_switches')
+ for name in dir(psutil.Process):
+ if (name.startswith('_') or name in ignored_names):
+ continue
+ else:
+ try:
+ num1 = p.num_fds()
+ for x in range(2):
+ call(p, name)
+ num2 = p.num_fds()
+ except psutil.AccessDenied:
+ pass
+ else:
+ if abs(num2 - num1) > 1:
+ fail = "failure while processing Process.%s method " \
+ "(before=%s, after=%s)" % (name, num1, num2)
+ failures.append(fail)
+ if failures:
+ self.fail('\n' + '\n'.join(failures))
+
+
+def main():
+ test_suite = unittest.TestSuite()
+ test_suite.addTest(unittest.makeSuite(PosixSpecificTestCase))
+ result = unittest.TextTestRunner(verbosity=2).run(test_suite)
+ return result.wasSuccessful()
+
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1)
diff --git a/python/psutil/test/_sunos.py b/python/psutil/test/_sunos.py
new file mode 100644
index 000000000..3d54ccd8c
--- /dev/null
+++ b/python/psutil/test/_sunos.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sun OS specific tests. These are implicitly run by test_psutil.py."""
+
+import sys
+import os
+
+from test_psutil import SUNOS, sh, unittest
+import psutil
+
+
+@unittest.skipUnless(SUNOS, "not a SunOS system")
+class SunOSSpecificTestCase(unittest.TestCase):
+
+ def test_swap_memory(self):
+ out = sh('env PATH=/usr/sbin:/sbin:%s swap -l -k' % os.environ['PATH'])
+ lines = out.strip().split('\n')[1:]
+ if not lines:
+ raise ValueError('no swap device(s) configured')
+ total = free = 0
+ for line in lines:
+ line = line.split()
+ t, f = line[-2:]
+ t = t.replace('K', '')
+ f = f.replace('K', '')
+ total += int(int(t) * 1024)
+ free += int(int(f) * 1024)
+ used = total - free
+
+ psutil_swap = psutil.swap_memory()
+ self.assertEqual(psutil_swap.total, total)
+ self.assertEqual(psutil_swap.used, used)
+ self.assertEqual(psutil_swap.free, free)
+
+
+def main():
+ test_suite = unittest.TestSuite()
+ test_suite.addTest(unittest.makeSuite(SunOSSpecificTestCase))
+ result = unittest.TextTestRunner(verbosity=2).run(test_suite)
+ return result.wasSuccessful()
+
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1)
diff --git a/python/psutil/test/_windows.py b/python/psutil/test/_windows.py
new file mode 100644
index 000000000..b7477bfeb
--- /dev/null
+++ b/python/psutil/test/_windows.py
@@ -0,0 +1,464 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Windows specific tests. These are implicitly run by test_psutil.py."""
+
+import errno
+import os
+import platform
+import signal
+import subprocess
+import sys
+import time
+import traceback
+
+from test_psutil import APPVEYOR, WINDOWS
+from test_psutil import get_test_subprocess, reap_children, unittest
+
+import mock
+try:
+ import wmi
+except ImportError:
+ wmi = None
+try:
+ import win32api
+ import win32con
+except ImportError:
+ win32api = win32con = None
+
+from psutil._compat import PY3, callable, long
+import psutil
+
+
+cext = psutil._psplatform.cext
+
+
+def wrap_exceptions(fun):
+ def wrapper(self, *args, **kwargs):
+ try:
+ return fun(self, *args, **kwargs)
+ except OSError as err:
+ from psutil._pswindows import ACCESS_DENIED_SET
+ if err.errno in ACCESS_DENIED_SET:
+ raise psutil.AccessDenied(None, None)
+ if err.errno == errno.ESRCH:
+ raise psutil.NoSuchProcess(None, None)
+ raise
+ return wrapper
+
+
+@unittest.skipUnless(WINDOWS, "not a Windows system")
+class WindowsSpecificTestCase(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ cls.pid = get_test_subprocess().pid
+
+ @classmethod
+ def tearDownClass(cls):
+ reap_children()
+
+ def test_issue_24(self):
+ p = psutil.Process(0)
+ self.assertRaises(psutil.AccessDenied, p.kill)
+
+ def test_special_pid(self):
+ p = psutil.Process(4)
+ self.assertEqual(p.name(), 'System')
+ # use __str__ to access all common Process properties to check
+ # that nothing strange happens
+ str(p)
+ p.username()
+ self.assertTrue(p.create_time() >= 0.0)
+ try:
+ rss, vms = p.memory_info()
+ except psutil.AccessDenied:
+ # expected on Windows Vista and Windows 7
+ if not platform.uname()[1] in ('vista', 'win-7', 'win7'):
+ raise
+ else:
+ self.assertTrue(rss > 0)
+
+ def test_send_signal(self):
+ p = psutil.Process(self.pid)
+ self.assertRaises(ValueError, p.send_signal, signal.SIGINT)
+
+ def test_nic_names(self):
+ p = subprocess.Popen(['ipconfig', '/all'], stdout=subprocess.PIPE)
+ out = p.communicate()[0]
+ if PY3:
+ out = str(out, sys.stdout.encoding)
+ nics = psutil.net_io_counters(pernic=True).keys()
+ for nic in nics:
+ if "pseudo-interface" in nic.replace(' ', '-').lower():
+ continue
+ if nic not in out:
+ self.fail(
+ "%r nic wasn't found in 'ipconfig /all' output" % nic)
+
+ def test_exe(self):
+ for p in psutil.process_iter():
+ try:
+ self.assertEqual(os.path.basename(p.exe()), p.name())
+ except psutil.Error:
+ pass
+
+ # --- Process class tests
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_process_name(self):
+ w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
+ p = psutil.Process(self.pid)
+ self.assertEqual(p.name(), w.Caption)
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_process_exe(self):
+ w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
+ p = psutil.Process(self.pid)
+ # Note: wmi reports the exe as a lower case string.
+ # Being Windows paths case-insensitive we ignore that.
+ self.assertEqual(p.exe().lower(), w.ExecutablePath.lower())
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_process_cmdline(self):
+ w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
+ p = psutil.Process(self.pid)
+ self.assertEqual(' '.join(p.cmdline()),
+ w.CommandLine.replace('"', ''))
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_process_username(self):
+ w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
+ p = psutil.Process(self.pid)
+ domain, _, username = w.GetOwner()
+ username = "%s\\%s" % (domain, username)
+ self.assertEqual(p.username(), username)
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_process_rss_memory(self):
+ time.sleep(0.1)
+ w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
+ p = psutil.Process(self.pid)
+ rss = p.memory_info().rss
+ self.assertEqual(rss, int(w.WorkingSetSize))
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_process_vms_memory(self):
+ time.sleep(0.1)
+ w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
+ p = psutil.Process(self.pid)
+ vms = p.memory_info().vms
+ # http://msdn.microsoft.com/en-us/library/aa394372(VS.85).aspx
+ # ...claims that PageFileUsage is represented in Kilo
+ # bytes but funnily enough on certain platforms bytes are
+ # returned instead.
+ wmi_usage = int(w.PageFileUsage)
+ if (vms != wmi_usage) and (vms != wmi_usage * 1024):
+ self.fail("wmi=%s, psutil=%s" % (wmi_usage, vms))
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_process_create_time(self):
+ w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
+ p = psutil.Process(self.pid)
+ wmic_create = str(w.CreationDate.split('.')[0])
+ psutil_create = time.strftime("%Y%m%d%H%M%S",
+ time.localtime(p.create_time()))
+ self.assertEqual(wmic_create, psutil_create)
+
+ # --- psutil namespace functions and constants tests
+
+ @unittest.skipUnless('NUMBER_OF_PROCESSORS' in os.environ,
+ 'NUMBER_OF_PROCESSORS env var is not available')
+ def test_cpu_count(self):
+ num_cpus = int(os.environ['NUMBER_OF_PROCESSORS'])
+ self.assertEqual(num_cpus, psutil.cpu_count())
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_total_phymem(self):
+ w = wmi.WMI().Win32_ComputerSystem()[0]
+ self.assertEqual(int(w.TotalPhysicalMemory),
+ psutil.virtual_memory().total)
+
+ # @unittest.skipIf(wmi is None, "wmi module is not installed")
+ # def test__UPTIME(self):
+ # # _UPTIME constant is not public but it is used internally
+ # # as value to return for pid 0 creation time.
+ # # WMI behaves the same.
+ # w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
+ # p = psutil.Process(0)
+ # wmic_create = str(w.CreationDate.split('.')[0])
+ # psutil_create = time.strftime("%Y%m%d%H%M%S",
+ # time.localtime(p.create_time()))
+ #
+
+ # Note: this test is not very reliable
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ @unittest.skipIf(APPVEYOR, "test not relieable on appveyor")
+ def test_pids(self):
+ # Note: this test might fail if the OS is starting/killing
+ # other processes in the meantime
+ w = wmi.WMI().Win32_Process()
+ wmi_pids = set([x.ProcessId for x in w])
+ psutil_pids = set(psutil.pids())
+ self.assertEqual(wmi_pids, psutil_pids)
+
+ @unittest.skipIf(wmi is None, "wmi module is not installed")
+ def test_disks(self):
+ ps_parts = psutil.disk_partitions(all=True)
+ wmi_parts = wmi.WMI().Win32_LogicalDisk()
+ for ps_part in ps_parts:
+ for wmi_part in wmi_parts:
+ if ps_part.device.replace('\\', '') == wmi_part.DeviceID:
+ if not ps_part.mountpoint:
+ # this is usually a CD-ROM with no disk inserted
+ break
+ try:
+ usage = psutil.disk_usage(ps_part.mountpoint)
+ except OSError as err:
+ if err.errno == errno.ENOENT:
+ # usually this is the floppy
+ break
+ else:
+ raise
+ self.assertEqual(usage.total, int(wmi_part.Size))
+ wmi_free = int(wmi_part.FreeSpace)
+ self.assertEqual(usage.free, wmi_free)
+ # 10 MB tollerance
+ if abs(usage.free - wmi_free) > 10 * 1024 * 1024:
+ self.fail("psutil=%s, wmi=%s" % (
+ usage.free, wmi_free))
+ break
+ else:
+ self.fail("can't find partition %s" % repr(ps_part))
+
+ @unittest.skipIf(win32api is None, "pywin32 module is not installed")
+ def test_num_handles(self):
+ p = psutil.Process(os.getpid())
+ before = p.num_handles()
+ handle = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION,
+ win32con.FALSE, os.getpid())
+ after = p.num_handles()
+ self.assertEqual(after, before + 1)
+ win32api.CloseHandle(handle)
+ self.assertEqual(p.num_handles(), before)
+
+ @unittest.skipIf(win32api is None, "pywin32 module is not installed")
+ def test_num_handles_2(self):
+ # Note: this fails from time to time; I'm keen on thinking
+ # it doesn't mean something is broken
+ def call(p, attr):
+ attr = getattr(p, name, None)
+ if attr is not None and callable(attr):
+ attr()
+ else:
+ attr
+
+ p = psutil.Process(self.pid)
+ failures = []
+ for name in dir(psutil.Process):
+ if name.startswith('_') \
+ or name in ('terminate', 'kill', 'suspend', 'resume',
+ 'nice', 'send_signal', 'wait', 'children',
+ 'as_dict'):
+ continue
+ else:
+ try:
+ call(p, name)
+ num1 = p.num_handles()
+ call(p, name)
+ num2 = p.num_handles()
+ except (psutil.NoSuchProcess, psutil.AccessDenied):
+ pass
+ else:
+ if num2 > num1:
+ fail = \
+ "failure while processing Process.%s method " \
+ "(before=%s, after=%s)" % (name, num1, num2)
+ failures.append(fail)
+ if failures:
+ self.fail('\n' + '\n'.join(failures))
+
+ def test_name_always_available(self):
+ # On Windows name() is never supposed to raise AccessDenied,
+ # see https://github.com/giampaolo/psutil/issues/627
+ for p in psutil.process_iter():
+ try:
+ p.name()
+ except psutil.NoSuchProcess():
+ pass
+
+
+@unittest.skipUnless(WINDOWS, "not a Windows system")
+class TestDualProcessImplementation(unittest.TestCase):
+ """
+ Certain APIs on Windows have 2 internal implementations, one
+ based on documented Windows APIs, another one based
+ NtQuerySystemInformation() which gets called as fallback in
+ case the first fails because of limited permission error.
+ Here we test that the two methods return the exact same value,
+ see:
+ https://github.com/giampaolo/psutil/issues/304
+ """
+
+ fun_names = [
+ # function name, tolerance
+ ('proc_cpu_times', 0.2),
+ ('proc_create_time', 0.5),
+ ('proc_num_handles', 1), # 1 because impl #1 opens a handle
+ ('proc_memory_info', 1024), # KB
+ ('proc_io_counters', 0),
+ ]
+
+ def test_compare_values(self):
+ def assert_ge_0(obj):
+ if isinstance(obj, tuple):
+ for value in obj:
+ self.assertGreaterEqual(value, 0, msg=obj)
+ elif isinstance(obj, (int, long, float)):
+ self.assertGreaterEqual(obj, 0)
+ else:
+ assert 0 # case not handled which needs to be fixed
+
+ def compare_with_tolerance(ret1, ret2, tolerance):
+ if ret1 == ret2:
+ return
+ else:
+ if isinstance(ret2, (int, long, float)):
+ diff = abs(ret1 - ret2)
+ self.assertLessEqual(diff, tolerance)
+ elif isinstance(ret2, tuple):
+ for a, b in zip(ret1, ret2):
+ diff = abs(a - b)
+ self.assertLessEqual(diff, tolerance)
+
+ from psutil._pswindows import ntpinfo
+ failures = []
+ for p in psutil.process_iter():
+ try:
+ nt = ntpinfo(*cext.proc_info(p.pid))
+ except psutil.NoSuchProcess:
+ continue
+ assert_ge_0(nt)
+
+ for name, tolerance in self.fun_names:
+ if name == 'proc_memory_info' and p.pid == os.getpid():
+ continue
+ if name == 'proc_create_time' and p.pid in (0, 4):
+ continue
+ meth = wrap_exceptions(getattr(cext, name))
+ try:
+ ret = meth(p.pid)
+ except (psutil.NoSuchProcess, psutil.AccessDenied):
+ continue
+ # compare values
+ try:
+ if name == 'proc_cpu_times':
+ compare_with_tolerance(ret[0], nt.user_time, tolerance)
+ compare_with_tolerance(ret[1],
+ nt.kernel_time, tolerance)
+ elif name == 'proc_create_time':
+ compare_with_tolerance(ret, nt.create_time, tolerance)
+ elif name == 'proc_num_handles':
+ compare_with_tolerance(ret, nt.num_handles, tolerance)
+ elif name == 'proc_io_counters':
+ compare_with_tolerance(ret[0], nt.io_rcount, tolerance)
+ compare_with_tolerance(ret[1], nt.io_wcount, tolerance)
+ compare_with_tolerance(ret[2], nt.io_rbytes, tolerance)
+ compare_with_tolerance(ret[3], nt.io_wbytes, tolerance)
+ elif name == 'proc_memory_info':
+ try:
+ rawtupl = cext.proc_memory_info_2(p.pid)
+ except psutil.NoSuchProcess:
+ continue
+ compare_with_tolerance(ret, rawtupl, tolerance)
+ except AssertionError:
+ trace = traceback.format_exc()
+ msg = '%s\npid=%s, method=%r, ret_1=%r, ret_2=%r' % (
+ trace, p.pid, name, ret, nt)
+ failures.append(msg)
+ break
+
+ if failures:
+ self.fail('\n\n'.join(failures))
+
+ # ---
+ # same tests as above but mimicks the AccessDenied failure of
+ # the first (fast) method failing with AD.
+ # TODO: currently does not take tolerance into account.
+
+ def test_name(self):
+ name = psutil.Process().name()
+ with mock.patch("psutil._psplatform.cext.proc_exe",
+ side_effect=psutil.AccessDenied(os.getpid())) as fun:
+ psutil.Process().name() == name
+ assert fun.called
+
+ def test_memory_info(self):
+ mem = psutil.Process().memory_info()
+ with mock.patch("psutil._psplatform.cext.proc_memory_info",
+ side_effect=OSError(errno.EPERM, "msg")) as fun:
+ psutil.Process().memory_info() == mem
+ assert fun.called
+
+ def test_create_time(self):
+ ctime = psutil.Process().create_time()
+ with mock.patch("psutil._psplatform.cext.proc_create_time",
+ side_effect=OSError(errno.EPERM, "msg")) as fun:
+ psutil.Process().create_time() == ctime
+ assert fun.called
+
+ def test_cpu_times(self):
+ cpu_times = psutil.Process().cpu_times()
+ with mock.patch("psutil._psplatform.cext.proc_cpu_times",
+ side_effect=OSError(errno.EPERM, "msg")) as fun:
+ psutil.Process().cpu_times() == cpu_times
+ assert fun.called
+
+ def test_io_counters(self):
+ io_counters = psutil.Process().io_counters()
+ with mock.patch("psutil._psplatform.cext.proc_io_counters",
+ side_effect=OSError(errno.EPERM, "msg")) as fun:
+ psutil.Process().io_counters() == io_counters
+ assert fun.called
+
+ def test_num_handles(self):
+ io_counters = psutil.Process().io_counters()
+ with mock.patch("psutil._psplatform.cext.proc_io_counters",
+ side_effect=OSError(errno.EPERM, "msg")) as fun:
+ psutil.Process().io_counters() == io_counters
+ assert fun.called
+
+ # --- other tests
+
+ def test_compare_name_exe(self):
+ for p in psutil.process_iter():
+ try:
+ a = os.path.basename(p.exe())
+ b = p.name()
+ except (psutil.NoSuchProcess, psutil.AccessDenied):
+ pass
+ else:
+ self.assertEqual(a, b)
+
+ def test_zombies(self):
+ # test that NPS is raised by the 2nd implementation in case a
+ # process no longer exists
+ ZOMBIE_PID = max(psutil.pids()) + 5000
+ for name, _ in self.fun_names:
+ meth = wrap_exceptions(getattr(cext, name))
+ self.assertRaises(psutil.NoSuchProcess, meth, ZOMBIE_PID)
+
+
+def main():
+ test_suite = unittest.TestSuite()
+ test_suite.addTest(unittest.makeSuite(WindowsSpecificTestCase))
+ test_suite.addTest(unittest.makeSuite(TestDualProcessImplementation))
+ result = unittest.TextTestRunner(verbosity=2).run(test_suite)
+ return result.wasSuccessful()
+
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1)
diff --git a/python/psutil/test/test_memory_leaks.py b/python/psutil/test/test_memory_leaks.py
new file mode 100644
index 000000000..6f02dc0ac
--- /dev/null
+++ b/python/psutil/test/test_memory_leaks.py
@@ -0,0 +1,445 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A test script which attempts to detect memory leaks by calling C
+functions many times and compare process memory usage before and
+after the calls. It might produce false positives.
+"""
+
+import functools
+import gc
+import os
+import socket
+import sys
+import threading
+import time
+
+import psutil
+import psutil._common
+
+from psutil._compat import xrange, callable
+from test_psutil import (WINDOWS, POSIX, OSX, LINUX, SUNOS, BSD, TESTFN,
+ RLIMIT_SUPPORT, TRAVIS)
+from test_psutil import (reap_children, supports_ipv6, safe_remove,
+ get_test_subprocess)
+
+if sys.version_info < (2, 7):
+ import unittest2 as unittest # https://pypi.python.org/pypi/unittest2
+else:
+ import unittest
+
+
+LOOPS = 1000
+TOLERANCE = 4096
+SKIP_PYTHON_IMPL = True
+
+
+def skip_if_linux():
+ return unittest.skipIf(LINUX and SKIP_PYTHON_IMPL,
+ "not worth being tested on LINUX (pure python)")
+
+
+class Base(unittest.TestCase):
+ proc = psutil.Process()
+
+ def execute(self, function, *args, **kwargs):
+ def call_many_times():
+ for x in xrange(LOOPS - 1):
+ self.call(function, *args, **kwargs)
+ del x
+ gc.collect()
+ return self.get_mem()
+
+ self.call(function, *args, **kwargs)
+ self.assertEqual(gc.garbage, [])
+ self.assertEqual(threading.active_count(), 1)
+
+ # RSS comparison
+ # step 1
+ rss1 = call_many_times()
+ # step 2
+ rss2 = call_many_times()
+
+ difference = rss2 - rss1
+ if difference > TOLERANCE:
+ # This doesn't necessarily mean we have a leak yet.
+ # At this point we assume that after having called the
+ # function so many times the memory usage is stabilized
+ # and if there are no leaks it should not increase any
+ # more.
+ # Let's keep calling fun for 3 more seconds and fail if
+ # we notice any difference.
+ stop_at = time.time() + 3
+ while True:
+ self.call(function, *args, **kwargs)
+ if time.time() >= stop_at:
+ break
+ del stop_at
+ gc.collect()
+ rss3 = self.get_mem()
+ difference = rss3 - rss2
+ if rss3 > rss2:
+ self.fail("rss2=%s, rss3=%s, difference=%s"
+ % (rss2, rss3, difference))
+
+ def execute_w_exc(self, exc, function, *args, **kwargs):
+ kwargs['_exc'] = exc
+ self.execute(function, *args, **kwargs)
+
+ def get_mem(self):
+ return psutil.Process().memory_info()[0]
+
+ def call(self, function, *args, **kwargs):
+ raise NotImplementedError("must be implemented in subclass")
+
+
+class TestProcessObjectLeaks(Base):
+ """Test leaks of Process class methods and properties"""
+
+ def setUp(self):
+ gc.collect()
+
+ def tearDown(self):
+ reap_children()
+
+ def call(self, function, *args, **kwargs):
+ if callable(function):
+ if '_exc' in kwargs:
+ exc = kwargs.pop('_exc')
+ self.assertRaises(exc, function, *args, **kwargs)
+ else:
+ try:
+ function(*args, **kwargs)
+ except psutil.Error:
+ pass
+ else:
+ meth = getattr(self.proc, function)
+ if '_exc' in kwargs:
+ exc = kwargs.pop('_exc')
+ self.assertRaises(exc, meth, *args, **kwargs)
+ else:
+ try:
+ meth(*args, **kwargs)
+ except psutil.Error:
+ pass
+
+ @skip_if_linux()
+ def test_name(self):
+ self.execute('name')
+
+ @skip_if_linux()
+ def test_cmdline(self):
+ self.execute('cmdline')
+
+ @skip_if_linux()
+ def test_exe(self):
+ self.execute('exe')
+
+ @skip_if_linux()
+ def test_ppid(self):
+ self.execute('ppid')
+
+ @unittest.skipUnless(POSIX, "POSIX only")
+ @skip_if_linux()
+ def test_uids(self):
+ self.execute('uids')
+
+ @unittest.skipUnless(POSIX, "POSIX only")
+ @skip_if_linux()
+ def test_gids(self):
+ self.execute('gids')
+
+ @skip_if_linux()
+ def test_status(self):
+ self.execute('status')
+
+ def test_nice_get(self):
+ self.execute('nice')
+
+ def test_nice_set(self):
+ niceness = psutil.Process().nice()
+ self.execute('nice', niceness)
+
+ @unittest.skipUnless(hasattr(psutil.Process, 'ionice'),
+ "Linux and Windows Vista only")
+ def test_ionice_get(self):
+ self.execute('ionice')
+
+ @unittest.skipUnless(hasattr(psutil.Process, 'ionice'),
+ "Linux and Windows Vista only")
+ def test_ionice_set(self):
+ if WINDOWS:
+ value = psutil.Process().ionice()
+ self.execute('ionice', value)
+ else:
+ from psutil._pslinux import cext
+ self.execute('ionice', psutil.IOPRIO_CLASS_NONE)
+ fun = functools.partial(cext.proc_ioprio_set, os.getpid(), -1, 0)
+ self.execute_w_exc(OSError, fun)
+
+ @unittest.skipIf(OSX or SUNOS, "feature not supported on this platform")
+ @skip_if_linux()
+ def test_io_counters(self):
+ self.execute('io_counters')
+
+ @unittest.skipUnless(WINDOWS, "not worth being tested on posix")
+ def test_username(self):
+ self.execute('username')
+
+ @skip_if_linux()
+ def test_create_time(self):
+ self.execute('create_time')
+
+ @skip_if_linux()
+ def test_num_threads(self):
+ self.execute('num_threads')
+
+ @unittest.skipUnless(WINDOWS, "Windows only")
+ def test_num_handles(self):
+ self.execute('num_handles')
+
+ @unittest.skipUnless(POSIX, "POSIX only")
+ @skip_if_linux()
+ def test_num_fds(self):
+ self.execute('num_fds')
+
+ @skip_if_linux()
+ def test_threads(self):
+ self.execute('threads')
+
+ @skip_if_linux()
+ def test_cpu_times(self):
+ self.execute('cpu_times')
+
+ @skip_if_linux()
+ def test_memory_info(self):
+ self.execute('memory_info')
+
+ @skip_if_linux()
+ def test_memory_info_ex(self):
+ self.execute('memory_info_ex')
+
+ @unittest.skipUnless(POSIX, "POSIX only")
+ @skip_if_linux()
+ def test_terminal(self):
+ self.execute('terminal')
+
+ @unittest.skipIf(POSIX and SKIP_PYTHON_IMPL,
+ "not worth being tested on POSIX (pure python)")
+ def test_resume(self):
+ self.execute('resume')
+
+ @skip_if_linux()
+ def test_cwd(self):
+ self.execute('cwd')
+
+ @unittest.skipUnless(WINDOWS or LINUX or BSD,
+ "Windows or Linux or BSD only")
+ def test_cpu_affinity_get(self):
+ self.execute('cpu_affinity')
+
+ @unittest.skipUnless(WINDOWS or LINUX or BSD,
+ "Windows or Linux or BSD only")
+ def test_cpu_affinity_set(self):
+ affinity = psutil.Process().cpu_affinity()
+ self.execute('cpu_affinity', affinity)
+ if not TRAVIS:
+ self.execute_w_exc(ValueError, 'cpu_affinity', [-1])
+
+ @skip_if_linux()
+ def test_open_files(self):
+ safe_remove(TESTFN) # needed after UNIX socket test has run
+ with open(TESTFN, 'w'):
+ self.execute('open_files')
+
+ # OSX implementation is unbelievably slow
+ @unittest.skipIf(OSX, "OSX implementation is too slow")
+ @skip_if_linux()
+ def test_memory_maps(self):
+ self.execute('memory_maps')
+
+ @unittest.skipUnless(LINUX, "Linux only")
+ @unittest.skipUnless(LINUX and RLIMIT_SUPPORT,
+ "only available on Linux >= 2.6.36")
+ def test_rlimit_get(self):
+ self.execute('rlimit', psutil.RLIMIT_NOFILE)
+
+ @unittest.skipUnless(LINUX, "Linux only")
+ @unittest.skipUnless(LINUX and RLIMIT_SUPPORT,
+ "only available on Linux >= 2.6.36")
+ def test_rlimit_set(self):
+ limit = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)
+ self.execute('rlimit', psutil.RLIMIT_NOFILE, limit)
+ self.execute_w_exc(OSError, 'rlimit', -1)
+
+ @skip_if_linux()
+ # Windows implementation is based on a single system-wide function
+ @unittest.skipIf(WINDOWS, "tested later")
+ def test_connections(self):
+ def create_socket(family, type):
+ sock = socket.socket(family, type)
+ sock.bind(('', 0))
+ if type == socket.SOCK_STREAM:
+ sock.listen(1)
+ return sock
+
+ socks = []
+ socks.append(create_socket(socket.AF_INET, socket.SOCK_STREAM))
+ socks.append(create_socket(socket.AF_INET, socket.SOCK_DGRAM))
+ if supports_ipv6():
+ socks.append(create_socket(socket.AF_INET6, socket.SOCK_STREAM))
+ socks.append(create_socket(socket.AF_INET6, socket.SOCK_DGRAM))
+ if hasattr(socket, 'AF_UNIX'):
+ safe_remove(TESTFN)
+ s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ s.bind(TESTFN)
+ s.listen(1)
+ socks.append(s)
+ kind = 'all'
+ # TODO: UNIX sockets are temporarily implemented by parsing
+ # 'pfiles' cmd output; we don't want that part of the code to
+ # be executed.
+ if SUNOS:
+ kind = 'inet'
+ try:
+ self.execute('connections', kind=kind)
+ finally:
+ for s in socks:
+ s.close()
+
+
+p = get_test_subprocess()
+DEAD_PROC = psutil.Process(p.pid)
+DEAD_PROC.kill()
+DEAD_PROC.wait()
+del p
+
+
+class TestProcessObjectLeaksZombie(TestProcessObjectLeaks):
+ """Same as above but looks for leaks occurring when dealing with
+ zombie processes raising NoSuchProcess exception.
+ """
+ proc = DEAD_PROC
+
+ def call(self, *args, **kwargs):
+ try:
+ TestProcessObjectLeaks.call(self, *args, **kwargs)
+ except psutil.NoSuchProcess:
+ pass
+
+ if not POSIX:
+ def test_kill(self):
+ self.execute('kill')
+
+ def test_terminate(self):
+ self.execute('terminate')
+
+ def test_suspend(self):
+ self.execute('suspend')
+
+ def test_resume(self):
+ self.execute('resume')
+
+ def test_wait(self):
+ self.execute('wait')
+
+
+class TestModuleFunctionsLeaks(Base):
+ """Test leaks of psutil module functions."""
+
+ def setUp(self):
+ gc.collect()
+
+ def call(self, function, *args, **kwargs):
+ fun = getattr(psutil, function)
+ fun(*args, **kwargs)
+
+ @skip_if_linux()
+ def test_cpu_count_logical(self):
+ psutil.cpu_count = psutil._psplatform.cpu_count_logical
+ self.execute('cpu_count')
+
+ @skip_if_linux()
+ def test_cpu_count_physical(self):
+ psutil.cpu_count = psutil._psplatform.cpu_count_physical
+ self.execute('cpu_count')
+
+ @skip_if_linux()
+ def test_boot_time(self):
+ self.execute('boot_time')
+
+ @unittest.skipIf(POSIX and SKIP_PYTHON_IMPL,
+ "not worth being tested on POSIX (pure python)")
+ def test_pid_exists(self):
+ self.execute('pid_exists', os.getpid())
+
+ def test_virtual_memory(self):
+ self.execute('virtual_memory')
+
+ # TODO: remove this skip when this gets fixed
+ @unittest.skipIf(SUNOS,
+ "not worth being tested on SUNOS (uses a subprocess)")
+ def test_swap_memory(self):
+ self.execute('swap_memory')
+
+ @skip_if_linux()
+ def test_cpu_times(self):
+ self.execute('cpu_times')
+
+ @skip_if_linux()
+ def test_per_cpu_times(self):
+ self.execute('cpu_times', percpu=True)
+
+ @unittest.skipIf(POSIX and SKIP_PYTHON_IMPL,
+ "not worth being tested on POSIX (pure python)")
+ def test_disk_usage(self):
+ self.execute('disk_usage', '.')
+
+ def test_disk_partitions(self):
+ self.execute('disk_partitions')
+
+ @skip_if_linux()
+ def test_net_io_counters(self):
+ self.execute('net_io_counters')
+
+ @unittest.skipIf(LINUX and not os.path.exists('/proc/diskstats'),
+ '/proc/diskstats not available on this Linux version')
+ @skip_if_linux()
+ def test_disk_io_counters(self):
+ self.execute('disk_io_counters')
+
+ # XXX - on Windows this produces a false positive
+ @unittest.skipIf(WINDOWS, "XXX produces a false positive on Windows")
+ def test_users(self):
+ self.execute('users')
+
+ @unittest.skipIf(LINUX,
+ "not worth being tested on Linux (pure python)")
+ def test_net_connections(self):
+ self.execute('net_connections')
+
+ def test_net_if_addrs(self):
+ self.execute('net_if_addrs')
+
+ @unittest.skipIf(TRAVIS, "EPERM on travis")
+ def test_net_if_stats(self):
+ self.execute('net_if_stats')
+
+
+def main():
+ test_suite = unittest.TestSuite()
+ tests = [TestProcessObjectLeaksZombie,
+ TestProcessObjectLeaks,
+ TestModuleFunctionsLeaks]
+ for test in tests:
+ test_suite.addTest(unittest.makeSuite(test))
+ result = unittest.TextTestRunner(verbosity=2).run(test_suite)
+ return result.wasSuccessful()
+
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1)
diff --git a/python/psutil/test/test_psutil.py b/python/psutil/test/test_psutil.py
new file mode 100644
index 000000000..3b2e3587a
--- /dev/null
+++ b/python/psutil/test/test_psutil.py
@@ -0,0 +1,3013 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+psutil test suite. Run it with:
+$ make test
+
+If you're on Python < 2.7 unittest2 module must be installed first:
+https://pypi.python.org/pypi/unittest2
+"""
+
+from __future__ import division
+
+import ast
+import atexit
+import collections
+import contextlib
+import datetime
+import errno
+import functools
+import imp
+import json
+import os
+import pickle
+import pprint
+import re
+import select
+import shutil
+import signal
+import socket
+import stat
+import subprocess
+import sys
+import tempfile
+import textwrap
+import threading
+import time
+import traceback
+import types
+import warnings
+from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM
+try:
+ import ipaddress # python >= 3.3
+except ImportError:
+ ipaddress = None
+try:
+ from unittest import mock # py3
+except ImportError:
+ import mock # requires "pip install mock"
+
+import psutil
+from psutil._compat import PY3, callable, long, unicode
+
+if sys.version_info < (2, 7):
+ import unittest2 as unittest # https://pypi.python.org/pypi/unittest2
+else:
+ import unittest
+if sys.version_info >= (3, 4):
+ import enum
+else:
+ enum = None
+
+
+# ===================================================================
+# --- Constants
+# ===================================================================
+
+# conf for retry_before_failing() decorator
+NO_RETRIES = 10
+# bytes tolerance for OS memory related tests
+TOLERANCE = 500 * 1024 # 500KB
+# the timeout used in functions which have to wait
+GLOBAL_TIMEOUT = 3
+
+AF_INET6 = getattr(socket, "AF_INET6")
+AF_UNIX = getattr(socket, "AF_UNIX", None)
+PYTHON = os.path.realpath(sys.executable)
+DEVNULL = open(os.devnull, 'r+')
+TESTFN = os.path.join(os.getcwd(), "$testfile")
+TESTFN_UNICODE = TESTFN + "Æ’Å‘Å‘"
+TESTFILE_PREFIX = 'psutil-test-suite-'
+if not PY3:
+ try:
+ TESTFN_UNICODE = unicode(TESTFN_UNICODE, sys.getfilesystemencoding())
+ except UnicodeDecodeError:
+ TESTFN_UNICODE = TESTFN + "???"
+
+EXAMPLES_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ '..', 'examples'))
+
+POSIX = os.name == 'posix'
+WINDOWS = os.name == 'nt'
+if WINDOWS:
+ WIN_VISTA = (6, 0, 0)
+LINUX = sys.platform.startswith("linux")
+OSX = sys.platform.startswith("darwin")
+BSD = sys.platform.startswith("freebsd")
+SUNOS = sys.platform.startswith("sunos")
+VALID_PROC_STATUSES = [getattr(psutil, x) for x in dir(psutil)
+ if x.startswith('STATUS_')]
+# whether we're running this test suite on Travis (https://travis-ci.org/)
+TRAVIS = bool(os.environ.get('TRAVIS'))
+# whether we're running this test suite on Appveyor for Windows
+# (http://www.appveyor.com/)
+APPVEYOR = bool(os.environ.get('APPVEYOR'))
+
+if TRAVIS or 'tox' in sys.argv[0]:
+ import ipaddress
+if TRAVIS or APPVEYOR:
+ GLOBAL_TIMEOUT = GLOBAL_TIMEOUT * 4
+
+
+# ===================================================================
+# --- Utility functions
+# ===================================================================
+
+def cleanup():
+ reap_children(search_all=True)
+ safe_remove(TESTFN)
+ try:
+ safe_rmdir(TESTFN_UNICODE)
+ except UnicodeEncodeError:
+ pass
+ for path in _testfiles:
+ safe_remove(path)
+
+atexit.register(cleanup)
+atexit.register(lambda: DEVNULL.close())
+
+
+_subprocesses_started = set()
+
+
+def get_test_subprocess(cmd=None, stdout=DEVNULL, stderr=DEVNULL,
+ stdin=DEVNULL, wait=False):
+ """Return a subprocess.Popen object to use in tests.
+ By default stdout and stderr are redirected to /dev/null and the
+ python interpreter is used as test process.
+ If 'wait' is True attemps to make sure the process is in a
+ reasonably initialized state.
+ """
+ if cmd is None:
+ pyline = ""
+ if wait:
+ pyline += "open(r'%s', 'w'); " % TESTFN
+ pyline += "import time; time.sleep(60);"
+ cmd_ = [PYTHON, "-c", pyline]
+ else:
+ cmd_ = cmd
+ sproc = subprocess.Popen(cmd_, stdout=stdout, stderr=stderr, stdin=stdin)
+ if wait:
+ if cmd is None:
+ stop_at = time.time() + 3
+ while stop_at > time.time():
+ if os.path.exists(TESTFN):
+ break
+ time.sleep(0.001)
+ else:
+ warn("couldn't make sure test file was actually created")
+ else:
+ wait_for_pid(sproc.pid)
+ _subprocesses_started.add(psutil.Process(sproc.pid))
+ return sproc
+
+
+_testfiles = []
+
+
+def pyrun(src):
+ """Run python code 'src' in a separate interpreter.
+ Return interpreter subprocess.
+ """
+ if PY3:
+ src = bytes(src, 'ascii')
+ with tempfile.NamedTemporaryFile(
+ prefix=TESTFILE_PREFIX, delete=False) as f:
+ _testfiles.append(f.name)
+ f.write(src)
+ f.flush()
+ subp = get_test_subprocess([PYTHON, f.name], stdout=None,
+ stderr=None)
+ wait_for_pid(subp.pid)
+ return subp
+
+
+def warn(msg):
+ """Raise a warning msg."""
+ warnings.warn(msg, UserWarning)
+
+
+def sh(cmdline, stdout=subprocess.PIPE, stderr=subprocess.PIPE):
+ """run cmd in a subprocess and return its output.
+ raises RuntimeError on error.
+ """
+ p = subprocess.Popen(cmdline, shell=True, stdout=stdout, stderr=stderr)
+ stdout, stderr = p.communicate()
+ if p.returncode != 0:
+ raise RuntimeError(stderr)
+ if stderr:
+ warn(stderr)
+ if PY3:
+ stdout = str(stdout, sys.stdout.encoding)
+ return stdout.strip()
+
+
+def which(program):
+ """Same as UNIX which command. Return None on command not found."""
+ def is_exe(fpath):
+ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+ fpath, fname = os.path.split(program)
+ if fpath:
+ if is_exe(program):
+ return program
+ else:
+ for path in os.environ["PATH"].split(os.pathsep):
+ exe_file = os.path.join(path, program)
+ if is_exe(exe_file):
+ return exe_file
+ return None
+
+
+if POSIX:
+ def get_kernel_version():
+ """Return a tuple such as (2, 6, 36)."""
+ s = ""
+ uname = os.uname()[2]
+ for c in uname:
+ if c.isdigit() or c == '.':
+ s += c
+ else:
+ break
+ if not s:
+ raise ValueError("can't parse %r" % uname)
+ minor = 0
+ micro = 0
+ nums = s.split('.')
+ major = int(nums[0])
+ if len(nums) >= 2:
+ minor = int(nums[1])
+ if len(nums) >= 3:
+ micro = int(nums[2])
+ return (major, minor, micro)
+
+
+if LINUX:
+ RLIMIT_SUPPORT = get_kernel_version() >= (2, 6, 36)
+else:
+ RLIMIT_SUPPORT = False
+
+
+def wait_for_pid(pid, timeout=GLOBAL_TIMEOUT):
+ """Wait for pid to show up in the process list then return.
+ Used in the test suite to give time the sub process to initialize.
+ """
+ raise_at = time.time() + timeout
+ while True:
+ if pid in psutil.pids():
+ # give it one more iteration to allow full initialization
+ time.sleep(0.01)
+ return
+ time.sleep(0.0001)
+ if time.time() >= raise_at:
+ raise RuntimeError("Timed out")
+
+
+def wait_for_file(fname, timeout=GLOBAL_TIMEOUT, delete_file=True):
+ """Wait for a file to be written on disk."""
+ stop_at = time.time() + 3
+ while time.time() < stop_at:
+ try:
+ with open(fname, "r") as f:
+ data = f.read()
+ if not data:
+ continue
+ if delete_file:
+ os.remove(fname)
+ return data
+ except IOError:
+ time.sleep(0.001)
+ raise RuntimeError("timed out (couldn't read file)")
+
+
+def reap_children(search_all=False):
+ """Kill any subprocess started by this test suite and ensure that
+ no zombies stick around to hog resources and create problems when
+ looking for refleaks.
+ """
+ global _subprocesses_started
+ procs = _subprocesses_started.copy()
+ if search_all:
+ this_process = psutil.Process()
+ for p in this_process.children(recursive=True):
+ procs.add(p)
+ for p in procs:
+ try:
+ p.terminate()
+ except psutil.NoSuchProcess:
+ pass
+ gone, alive = psutil.wait_procs(procs, timeout=GLOBAL_TIMEOUT)
+ for p in alive:
+ warn("couldn't terminate process %s" % p)
+ try:
+ p.kill()
+ except psutil.NoSuchProcess:
+ pass
+ _, alive = psutil.wait_procs(alive, timeout=GLOBAL_TIMEOUT)
+ if alive:
+ warn("couldn't not kill processes %s" % str(alive))
+ _subprocesses_started = set(alive)
+
+
+def check_ip_address(addr, family):
+ """Attempts to check IP address's validity."""
+ if enum and PY3:
+ assert isinstance(family, enum.IntEnum), family
+ if family == AF_INET:
+ octs = [int(x) for x in addr.split('.')]
+ assert len(octs) == 4, addr
+ for num in octs:
+ assert 0 <= num <= 255, addr
+ if ipaddress:
+ if not PY3:
+ addr = unicode(addr)
+ ipaddress.IPv4Address(addr)
+ elif family == AF_INET6:
+ assert isinstance(addr, str), addr
+ if ipaddress:
+ if not PY3:
+ addr = unicode(addr)
+ ipaddress.IPv6Address(addr)
+ elif family == psutil.AF_LINK:
+ assert re.match('([a-fA-F0-9]{2}[:|\-]?){6}', addr) is not None, addr
+ else:
+ raise ValueError("unknown family %r", family)
+
+
+def check_connection_ntuple(conn):
+ """Check validity of a connection namedtuple."""
+ valid_conn_states = [getattr(psutil, x) for x in dir(psutil) if
+ x.startswith('CONN_')]
+ assert conn[0] == conn.fd
+ assert conn[1] == conn.family
+ assert conn[2] == conn.type
+ assert conn[3] == conn.laddr
+ assert conn[4] == conn.raddr
+ assert conn[5] == conn.status
+ assert conn.type in (SOCK_STREAM, SOCK_DGRAM), repr(conn.type)
+ assert conn.family in (AF_INET, AF_INET6, AF_UNIX), repr(conn.family)
+ assert conn.status in valid_conn_states, conn.status
+
+ # check IP address and port sanity
+ for addr in (conn.laddr, conn.raddr):
+ if not addr:
+ continue
+ if conn.family in (AF_INET, AF_INET6):
+ assert isinstance(addr, tuple), addr
+ ip, port = addr
+ assert isinstance(port, int), port
+ assert 0 <= port <= 65535, port
+ check_ip_address(ip, conn.family)
+ elif conn.family == AF_UNIX:
+ assert isinstance(addr, (str, None)), addr
+ else:
+ raise ValueError("unknown family %r", conn.family)
+
+ if conn.family in (AF_INET, AF_INET6):
+ # actually try to bind the local socket; ignore IPv6
+ # sockets as their address might be represented as
+ # an IPv4-mapped-address (e.g. "::127.0.0.1")
+ # and that's rejected by bind()
+ if conn.family == AF_INET:
+ s = socket.socket(conn.family, conn.type)
+ with contextlib.closing(s):
+ try:
+ s.bind((conn.laddr[0], 0))
+ except socket.error as err:
+ if err.errno != errno.EADDRNOTAVAIL:
+ raise
+ elif conn.family == AF_UNIX:
+ assert not conn.raddr, repr(conn.raddr)
+ assert conn.status == psutil.CONN_NONE, conn.status
+
+ if getattr(conn, 'fd', -1) != -1:
+ assert conn.fd > 0, conn
+ if hasattr(socket, 'fromfd') and not WINDOWS:
+ try:
+ dupsock = socket.fromfd(conn.fd, conn.family, conn.type)
+ except (socket.error, OSError) as err:
+ if err.args[0] != errno.EBADF:
+ raise
+ else:
+ with contextlib.closing(dupsock):
+ assert dupsock.family == conn.family
+ assert dupsock.type == conn.type
+
+
+def safe_remove(file):
+ "Convenience function for removing temporary test files"
+ try:
+ os.remove(file)
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ # file is being used by another process
+ if WINDOWS and isinstance(err, WindowsError) and err.errno == 13:
+ return
+ raise
+
+
+def safe_rmdir(dir):
+ "Convenience function for removing temporary test directories"
+ try:
+ os.rmdir(dir)
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ raise
+
+
+def call_until(fun, expr, timeout=GLOBAL_TIMEOUT):
+ """Keep calling function for timeout secs and exit if eval()
+ expression is True.
+ """
+ stop_at = time.time() + timeout
+ while time.time() < stop_at:
+ ret = fun()
+ if eval(expr):
+ return ret
+ time.sleep(0.001)
+ raise RuntimeError('timed out (ret=%r)' % ret)
+
+
+def retry_before_failing(ntimes=None):
+ """Decorator which runs a test function and retries N times before
+ actually failing.
+ """
+ def decorator(fun):
+ @functools.wraps(fun)
+ def wrapper(*args, **kwargs):
+ for x in range(ntimes or NO_RETRIES):
+ try:
+ return fun(*args, **kwargs)
+ except AssertionError:
+ pass
+ raise
+ return wrapper
+ return decorator
+
+
+def skip_on_access_denied(only_if=None):
+ """Decorator to Ignore AccessDenied exceptions."""
+ def decorator(fun):
+ @functools.wraps(fun)
+ def wrapper(*args, **kwargs):
+ try:
+ return fun(*args, **kwargs)
+ except psutil.AccessDenied:
+ if only_if is not None:
+ if not only_if:
+ raise
+ msg = "%r was skipped because it raised AccessDenied" \
+ % fun.__name__
+ raise unittest.SkipTest(msg)
+ return wrapper
+ return decorator
+
+
+def skip_on_not_implemented(only_if=None):
+ """Decorator to Ignore NotImplementedError exceptions."""
+ def decorator(fun):
+ @functools.wraps(fun)
+ def wrapper(*args, **kwargs):
+ try:
+ return fun(*args, **kwargs)
+ except NotImplementedError:
+ if only_if is not None:
+ if not only_if:
+ raise
+ msg = "%r was skipped because it raised NotImplementedError" \
+ % fun.__name__
+ raise unittest.SkipTest(msg)
+ return wrapper
+ return decorator
+
+
+def supports_ipv6():
+ """Return True if IPv6 is supported on this platform."""
+ if not socket.has_ipv6 or not hasattr(socket, "AF_INET6"):
+ return False
+ sock = None
+ try:
+ sock = socket.socket(AF_INET6, SOCK_STREAM)
+ sock.bind(("::1", 0))
+ except (socket.error, socket.gaierror):
+ return False
+ else:
+ return True
+ finally:
+ if sock is not None:
+ sock.close()
+
+
+if WINDOWS:
+ def get_winver():
+ wv = sys.getwindowsversion()
+ if hasattr(wv, 'service_pack_major'): # python >= 2.7
+ sp = wv.service_pack_major or 0
+ else:
+ r = re.search("\s\d$", wv[4])
+ if r:
+ sp = int(r.group(0))
+ else:
+ sp = 0
+ return (wv[0], wv[1], sp)
+
+
+class ThreadTask(threading.Thread):
+ """A thread object used for running process thread tests."""
+
+ def __init__(self):
+ threading.Thread.__init__(self)
+ self._running = False
+ self._interval = None
+ self._flag = threading.Event()
+
+ def __repr__(self):
+ name = self.__class__.__name__
+ return '<%s running=%s at %#x>' % (name, self._running, id(self))
+
+ def start(self, interval=0.001):
+ """Start thread and keep it running until an explicit
+ stop() request. Polls for shutdown every 'timeout' seconds.
+ """
+ if self._running:
+ raise ValueError("already started")
+ self._interval = interval
+ threading.Thread.start(self)
+ self._flag.wait()
+
+ def run(self):
+ self._running = True
+ self._flag.set()
+ while self._running:
+ time.sleep(self._interval)
+
+ def stop(self):
+ """Stop thread execution and and waits until it is stopped."""
+ if not self._running:
+ raise ValueError("already stopped")
+ self._running = False
+ self.join()
+
+
+# ===================================================================
+# --- System-related API tests
+# ===================================================================
+
+class TestSystemAPIs(unittest.TestCase):
+ """Tests for system-related APIs."""
+
+ def setUp(self):
+ safe_remove(TESTFN)
+
+ def tearDown(self):
+ reap_children()
+
+ def test_process_iter(self):
+ self.assertIn(os.getpid(), [x.pid for x in psutil.process_iter()])
+ sproc = get_test_subprocess()
+ self.assertIn(sproc.pid, [x.pid for x in psutil.process_iter()])
+ p = psutil.Process(sproc.pid)
+ p.kill()
+ p.wait()
+ self.assertNotIn(sproc.pid, [x.pid for x in psutil.process_iter()])
+
+ def test_wait_procs(self):
+ def callback(p):
+ l.append(p.pid)
+
+ l = []
+ sproc1 = get_test_subprocess()
+ sproc2 = get_test_subprocess()
+ sproc3 = get_test_subprocess()
+ procs = [psutil.Process(x.pid) for x in (sproc1, sproc2, sproc3)]
+ self.assertRaises(ValueError, psutil.wait_procs, procs, timeout=-1)
+ self.assertRaises(TypeError, psutil.wait_procs, procs, callback=1)
+ t = time.time()
+ gone, alive = psutil.wait_procs(procs, timeout=0.01, callback=callback)
+
+ self.assertLess(time.time() - t, 0.5)
+ self.assertEqual(gone, [])
+ self.assertEqual(len(alive), 3)
+ self.assertEqual(l, [])
+ for p in alive:
+ self.assertFalse(hasattr(p, 'returncode'))
+
+ @retry_before_failing(30)
+ def test(procs, callback):
+ gone, alive = psutil.wait_procs(procs, timeout=0.03,
+ callback=callback)
+ self.assertEqual(len(gone), 1)
+ self.assertEqual(len(alive), 2)
+ return gone, alive
+
+ sproc3.terminate()
+ gone, alive = test(procs, callback)
+ self.assertIn(sproc3.pid, [x.pid for x in gone])
+ if POSIX:
+ self.assertEqual(gone.pop().returncode, signal.SIGTERM)
+ else:
+ self.assertEqual(gone.pop().returncode, 1)
+ self.assertEqual(l, [sproc3.pid])
+ for p in alive:
+ self.assertFalse(hasattr(p, 'returncode'))
+
+ @retry_before_failing(30)
+ def test(procs, callback):
+ gone, alive = psutil.wait_procs(procs, timeout=0.03,
+ callback=callback)
+ self.assertEqual(len(gone), 3)
+ self.assertEqual(len(alive), 0)
+ return gone, alive
+
+ sproc1.terminate()
+ sproc2.terminate()
+ gone, alive = test(procs, callback)
+ self.assertEqual(set(l), set([sproc1.pid, sproc2.pid, sproc3.pid]))
+ for p in gone:
+ self.assertTrue(hasattr(p, 'returncode'))
+
+ def test_wait_procs_no_timeout(self):
+ sproc1 = get_test_subprocess()
+ sproc2 = get_test_subprocess()
+ sproc3 = get_test_subprocess()
+ procs = [psutil.Process(x.pid) for x in (sproc1, sproc2, sproc3)]
+ for p in procs:
+ p.terminate()
+ gone, alive = psutil.wait_procs(procs)
+
+ def test_boot_time(self):
+ bt = psutil.boot_time()
+ self.assertIsInstance(bt, float)
+ self.assertGreater(bt, 0)
+ self.assertLess(bt, time.time())
+
+ @unittest.skipUnless(POSIX, 'posix only')
+ def test_PAGESIZE(self):
+ # pagesize is used internally to perform different calculations
+ # and it's determined by using SC_PAGE_SIZE; make sure
+ # getpagesize() returns the same value.
+ import resource
+ self.assertEqual(os.sysconf("SC_PAGE_SIZE"), resource.getpagesize())
+
+ def test_virtual_memory(self):
+ mem = psutil.virtual_memory()
+ assert mem.total > 0, mem
+ assert mem.available > 0, mem
+ assert 0 <= mem.percent <= 100, mem
+ assert mem.used > 0, mem
+ assert mem.free >= 0, mem
+ for name in mem._fields:
+ value = getattr(mem, name)
+ if name != 'percent':
+ self.assertIsInstance(value, (int, long))
+ if name != 'total':
+ if not value >= 0:
+ self.fail("%r < 0 (%s)" % (name, value))
+ if value > mem.total:
+ self.fail("%r > total (total=%s, %s=%s)"
+ % (name, mem.total, name, value))
+
+ def test_swap_memory(self):
+ mem = psutil.swap_memory()
+ assert mem.total >= 0, mem
+ assert mem.used >= 0, mem
+ if mem.total > 0:
+ # likely a system with no swap partition
+ assert mem.free > 0, mem
+ else:
+ assert mem.free == 0, mem
+ assert 0 <= mem.percent <= 100, mem
+ assert mem.sin >= 0, mem
+ assert mem.sout >= 0, mem
+
+ def test_pid_exists(self):
+ sproc = get_test_subprocess(wait=True)
+ self.assertTrue(psutil.pid_exists(sproc.pid))
+ p = psutil.Process(sproc.pid)
+ p.kill()
+ p.wait()
+ self.assertFalse(psutil.pid_exists(sproc.pid))
+ self.assertFalse(psutil.pid_exists(-1))
+ self.assertEqual(psutil.pid_exists(0), 0 in psutil.pids())
+ # pid 0
+ psutil.pid_exists(0) == 0 in psutil.pids()
+
+ def test_pid_exists_2(self):
+ reap_children()
+ pids = psutil.pids()
+ for pid in pids:
+ try:
+ assert psutil.pid_exists(pid)
+ except AssertionError:
+ # in case the process disappeared in meantime fail only
+ # if it is no longer in psutil.pids()
+ time.sleep(.1)
+ if pid in psutil.pids():
+ self.fail(pid)
+ pids = range(max(pids) + 5000, max(pids) + 6000)
+ for pid in pids:
+ self.assertFalse(psutil.pid_exists(pid), msg=pid)
+
+ def test_pids(self):
+ plist = [x.pid for x in psutil.process_iter()]
+ pidlist = psutil.pids()
+ self.assertEqual(plist.sort(), pidlist.sort())
+ # make sure every pid is unique
+ self.assertEqual(len(pidlist), len(set(pidlist)))
+
+ def test_test(self):
+ # test for psutil.test() function
+ stdout = sys.stdout
+ sys.stdout = DEVNULL
+ try:
+ psutil.test()
+ finally:
+ sys.stdout = stdout
+
+ def test_cpu_count(self):
+ logical = psutil.cpu_count()
+ self.assertEqual(logical, len(psutil.cpu_times(percpu=True)))
+ self.assertGreaterEqual(logical, 1)
+ #
+ if LINUX:
+ with open("/proc/cpuinfo") as fd:
+ cpuinfo_data = fd.read()
+ if "physical id" not in cpuinfo_data:
+ raise unittest.SkipTest("cpuinfo doesn't include physical id")
+ physical = psutil.cpu_count(logical=False)
+ self.assertGreaterEqual(physical, 1)
+ self.assertGreaterEqual(logical, physical)
+
+ def test_sys_cpu_times(self):
+ total = 0
+ times = psutil.cpu_times()
+ sum(times)
+ for cp_time in times:
+ self.assertIsInstance(cp_time, float)
+ self.assertGreaterEqual(cp_time, 0.0)
+ total += cp_time
+ self.assertEqual(total, sum(times))
+ str(times)
+ if not WINDOWS:
+ # CPU times are always supposed to increase over time or
+ # remain the same but never go backwards, see:
+ # https://github.com/giampaolo/psutil/issues/392
+ last = psutil.cpu_times()
+ for x in range(100):
+ new = psutil.cpu_times()
+ for field in new._fields:
+ new_t = getattr(new, field)
+ last_t = getattr(last, field)
+ self.assertGreaterEqual(new_t, last_t,
+ msg="%s %s" % (new_t, last_t))
+ last = new
+
+ def test_sys_cpu_times2(self):
+ t1 = sum(psutil.cpu_times())
+ time.sleep(0.1)
+ t2 = sum(psutil.cpu_times())
+ difference = t2 - t1
+ if not difference >= 0.05:
+ self.fail("difference %s" % difference)
+
+ def test_sys_per_cpu_times(self):
+ for times in psutil.cpu_times(percpu=True):
+ total = 0
+ sum(times)
+ for cp_time in times:
+ self.assertIsInstance(cp_time, float)
+ self.assertGreaterEqual(cp_time, 0.0)
+ total += cp_time
+ self.assertEqual(total, sum(times))
+ str(times)
+ self.assertEqual(len(psutil.cpu_times(percpu=True)[0]),
+ len(psutil.cpu_times(percpu=False)))
+
+ # Note: in theory CPU times are always supposed to increase over
+ # time or remain the same but never go backwards. In practice
+ # sometimes this is not the case.
+ # This issue seemd to be afflict Windows:
+ # https://github.com/giampaolo/psutil/issues/392
+ # ...but it turns out also Linux (rarely) behaves the same.
+ # last = psutil.cpu_times(percpu=True)
+ # for x in range(100):
+ # new = psutil.cpu_times(percpu=True)
+ # for index in range(len(new)):
+ # newcpu = new[index]
+ # lastcpu = last[index]
+ # for field in newcpu._fields:
+ # new_t = getattr(newcpu, field)
+ # last_t = getattr(lastcpu, field)
+ # self.assertGreaterEqual(
+ # new_t, last_t, msg="%s %s" % (lastcpu, newcpu))
+ # last = new
+
+ def test_sys_per_cpu_times_2(self):
+ tot1 = psutil.cpu_times(percpu=True)
+ stop_at = time.time() + 0.1
+ while True:
+ if time.time() >= stop_at:
+ break
+ tot2 = psutil.cpu_times(percpu=True)
+ for t1, t2 in zip(tot1, tot2):
+ t1, t2 = sum(t1), sum(t2)
+ difference = t2 - t1
+ if difference >= 0.05:
+ return
+ self.fail()
+
+ def _test_cpu_percent(self, percent, last_ret, new_ret):
+ try:
+ self.assertIsInstance(percent, float)
+ self.assertGreaterEqual(percent, 0.0)
+ self.assertIsNot(percent, -0.0)
+ self.assertLessEqual(percent, 100.0 * psutil.cpu_count())
+ except AssertionError as err:
+ raise AssertionError("\n%s\nlast=%s\nnew=%s" % (
+ err, pprint.pformat(last_ret), pprint.pformat(new_ret)))
+
+ def test_sys_cpu_percent(self):
+ last = psutil.cpu_percent(interval=0.001)
+ for x in range(100):
+ new = psutil.cpu_percent(interval=None)
+ self._test_cpu_percent(new, last, new)
+ last = new
+
+ def test_sys_per_cpu_percent(self):
+ last = psutil.cpu_percent(interval=0.001, percpu=True)
+ self.assertEqual(len(last), psutil.cpu_count())
+ for x in range(100):
+ new = psutil.cpu_percent(interval=None, percpu=True)
+ for percent in new:
+ self._test_cpu_percent(percent, last, new)
+ last = new
+
+ def test_sys_cpu_times_percent(self):
+ last = psutil.cpu_times_percent(interval=0.001)
+ for x in range(100):
+ new = psutil.cpu_times_percent(interval=None)
+ for percent in new:
+ self._test_cpu_percent(percent, last, new)
+ self._test_cpu_percent(sum(new), last, new)
+ last = new
+
+ def test_sys_per_cpu_times_percent(self):
+ last = psutil.cpu_times_percent(interval=0.001, percpu=True)
+ self.assertEqual(len(last), psutil.cpu_count())
+ for x in range(100):
+ new = psutil.cpu_times_percent(interval=None, percpu=True)
+ for cpu in new:
+ for percent in cpu:
+ self._test_cpu_percent(percent, last, new)
+ self._test_cpu_percent(sum(cpu), last, new)
+ last = new
+
+ def test_sys_per_cpu_times_percent_negative(self):
+ # see: https://github.com/giampaolo/psutil/issues/645
+ psutil.cpu_times_percent(percpu=True)
+ zero_times = [x._make([0 for x in range(len(x._fields))])
+ for x in psutil.cpu_times(percpu=True)]
+ with mock.patch('psutil.cpu_times', return_value=zero_times):
+ for cpu in psutil.cpu_times_percent(percpu=True):
+ for percent in cpu:
+ self._test_cpu_percent(percent, None, None)
+
+ @unittest.skipIf(POSIX and not hasattr(os, 'statvfs'),
+ "os.statvfs() function not available on this platform")
+ def test_disk_usage(self):
+ usage = psutil.disk_usage(os.getcwd())
+ assert usage.total > 0, usage
+ assert usage.used > 0, usage
+ assert usage.free > 0, usage
+ assert usage.total > usage.used, usage
+ assert usage.total > usage.free, usage
+ assert 0 <= usage.percent <= 100, usage.percent
+ if hasattr(shutil, 'disk_usage'):
+ # py >= 3.3, see: http://bugs.python.org/issue12442
+ shutil_usage = shutil.disk_usage(os.getcwd())
+ tolerance = 5 * 1024 * 1024 # 5MB
+ self.assertEqual(usage.total, shutil_usage.total)
+ self.assertAlmostEqual(usage.free, shutil_usage.free,
+ delta=tolerance)
+ self.assertAlmostEqual(usage.used, shutil_usage.used,
+ delta=tolerance)
+
+ # if path does not exist OSError ENOENT is expected across
+ # all platforms
+ fname = tempfile.mktemp()
+ try:
+ psutil.disk_usage(fname)
+ except OSError as err:
+ if err.args[0] != errno.ENOENT:
+ raise
+ else:
+ self.fail("OSError not raised")
+
+ @unittest.skipIf(POSIX and not hasattr(os, 'statvfs'),
+ "os.statvfs() function not available on this platform")
+ def test_disk_usage_unicode(self):
+ # see: https://github.com/giampaolo/psutil/issues/416
+ # XXX this test is not really reliable as it always fails on
+ # Python 3.X (2.X is fine)
+ try:
+ safe_rmdir(TESTFN_UNICODE)
+ os.mkdir(TESTFN_UNICODE)
+ psutil.disk_usage(TESTFN_UNICODE)
+ safe_rmdir(TESTFN_UNICODE)
+ except UnicodeEncodeError:
+ pass
+
+ @unittest.skipIf(POSIX and not hasattr(os, 'statvfs'),
+ "os.statvfs() function not available on this platform")
+ @unittest.skipIf(LINUX and TRAVIS, "unknown failure on travis")
+ def test_disk_partitions(self):
+ # all = False
+ ls = psutil.disk_partitions(all=False)
+ # on travis we get:
+ # self.assertEqual(p.cpu_affinity(), [n])
+ # AssertionError: Lists differ: [0, 1, 2, 3, 4, 5, 6, 7,... != [0]
+ self.assertTrue(ls, msg=ls)
+ for disk in ls:
+ if WINDOWS and 'cdrom' in disk.opts:
+ continue
+ if not POSIX:
+ assert os.path.exists(disk.device), disk
+ else:
+ # we cannot make any assumption about this, see:
+ # http://goo.gl/p9c43
+ disk.device
+ if SUNOS:
+ # on solaris apparently mount points can also be files
+ assert os.path.exists(disk.mountpoint), disk
+ else:
+ assert os.path.isdir(disk.mountpoint), disk
+ assert disk.fstype, disk
+ self.assertIsInstance(disk.opts, str)
+
+ # all = True
+ ls = psutil.disk_partitions(all=True)
+ self.assertTrue(ls, msg=ls)
+ for disk in psutil.disk_partitions(all=True):
+ if not WINDOWS:
+ try:
+ os.stat(disk.mountpoint)
+ except OSError as err:
+ # http://mail.python.org/pipermail/python-dev/
+ # 2012-June/120787.html
+ if err.errno not in (errno.EPERM, errno.EACCES):
+ raise
+ else:
+ if SUNOS:
+ # on solaris apparently mount points can also be files
+ assert os.path.exists(disk.mountpoint), disk
+ else:
+ assert os.path.isdir(disk.mountpoint), disk
+ self.assertIsInstance(disk.fstype, str)
+ self.assertIsInstance(disk.opts, str)
+
+ def find_mount_point(path):
+ path = os.path.abspath(path)
+ while not os.path.ismount(path):
+ path = os.path.dirname(path)
+ return path
+
+ mount = find_mount_point(__file__)
+ mounts = [x.mountpoint for x in psutil.disk_partitions(all=True)]
+ self.assertIn(mount, mounts)
+ psutil.disk_usage(mount)
+
+ @skip_on_access_denied()
+ def test_net_connections(self):
+ def check(cons, families, types_):
+ for conn in cons:
+ self.assertIn(conn.family, families, msg=conn)
+ if conn.family != getattr(socket, 'AF_UNIX', object()):
+ self.assertIn(conn.type, types_, msg=conn)
+
+ from psutil._common import conn_tmap
+ for kind, groups in conn_tmap.items():
+ if SUNOS and kind == 'unix':
+ continue
+ families, types_ = groups
+ cons = psutil.net_connections(kind)
+ self.assertEqual(len(cons), len(set(cons)))
+ check(cons, families, types_)
+
+ def test_net_io_counters(self):
+ def check_ntuple(nt):
+ self.assertEqual(nt[0], nt.bytes_sent)
+ self.assertEqual(nt[1], nt.bytes_recv)
+ self.assertEqual(nt[2], nt.packets_sent)
+ self.assertEqual(nt[3], nt.packets_recv)
+ self.assertEqual(nt[4], nt.errin)
+ self.assertEqual(nt[5], nt.errout)
+ self.assertEqual(nt[6], nt.dropin)
+ self.assertEqual(nt[7], nt.dropout)
+ assert nt.bytes_sent >= 0, nt
+ assert nt.bytes_recv >= 0, nt
+ assert nt.packets_sent >= 0, nt
+ assert nt.packets_recv >= 0, nt
+ assert nt.errin >= 0, nt
+ assert nt.errout >= 0, nt
+ assert nt.dropin >= 0, nt
+ assert nt.dropout >= 0, nt
+
+ ret = psutil.net_io_counters(pernic=False)
+ check_ntuple(ret)
+ ret = psutil.net_io_counters(pernic=True)
+ self.assertNotEqual(ret, [])
+ for key in ret:
+ self.assertTrue(key)
+ check_ntuple(ret[key])
+
+ def test_net_if_addrs(self):
+ nics = psutil.net_if_addrs()
+ assert nics, nics
+
+ # Not reliable on all platforms (net_if_addrs() reports more
+ # interfaces).
+ # self.assertEqual(sorted(nics.keys()),
+ # sorted(psutil.net_io_counters(pernic=True).keys()))
+
+ families = set([socket.AF_INET, AF_INET6, psutil.AF_LINK])
+ for nic, addrs in nics.items():
+ self.assertEqual(len(set(addrs)), len(addrs))
+ for addr in addrs:
+ self.assertIsInstance(addr.family, int)
+ self.assertIsInstance(addr.address, str)
+ self.assertIsInstance(addr.netmask, (str, type(None)))
+ self.assertIsInstance(addr.broadcast, (str, type(None)))
+ self.assertIn(addr.family, families)
+ if sys.version_info >= (3, 4):
+ self.assertIsInstance(addr.family, enum.IntEnum)
+ if addr.family == socket.AF_INET:
+ s = socket.socket(addr.family)
+ with contextlib.closing(s):
+ s.bind((addr.address, 0))
+ elif addr.family == socket.AF_INET6:
+ info = socket.getaddrinfo(
+ addr.address, 0, socket.AF_INET6, socket.SOCK_STREAM,
+ 0, socket.AI_PASSIVE)[0]
+ af, socktype, proto, canonname, sa = info
+ s = socket.socket(af, socktype, proto)
+ with contextlib.closing(s):
+ s.bind(sa)
+ for ip in (addr.address, addr.netmask, addr.broadcast):
+ if ip is not None:
+ # TODO: skip AF_INET6 for now because I get:
+ # AddressValueError: Only hex digits permitted in
+ # u'c6f3%lxcbr0' in u'fe80::c8e0:fff:fe54:c6f3%lxcbr0'
+ if addr.family != AF_INET6:
+ check_ip_address(ip, addr.family)
+
+ if BSD or OSX or SUNOS:
+ if hasattr(socket, "AF_LINK"):
+ self.assertEqual(psutil.AF_LINK, socket.AF_LINK)
+ elif LINUX:
+ self.assertEqual(psutil.AF_LINK, socket.AF_PACKET)
+ elif WINDOWS:
+ self.assertEqual(psutil.AF_LINK, -1)
+
+ @unittest.skipIf(TRAVIS, "EPERM on travis")
+ def test_net_if_stats(self):
+ nics = psutil.net_if_stats()
+ assert nics, nics
+ all_duplexes = (psutil.NIC_DUPLEX_FULL,
+ psutil.NIC_DUPLEX_HALF,
+ psutil.NIC_DUPLEX_UNKNOWN)
+ for nic, stats in nics.items():
+ isup, duplex, speed, mtu = stats
+ self.assertIsInstance(isup, bool)
+ self.assertIn(duplex, all_duplexes)
+ self.assertIn(duplex, all_duplexes)
+ self.assertGreaterEqual(speed, 0)
+ self.assertGreaterEqual(mtu, 0)
+
+ @unittest.skipIf(LINUX and not os.path.exists('/proc/diskstats'),
+ '/proc/diskstats not available on this linux version')
+ @unittest.skipIf(APPVEYOR,
+ "can't find any physical disk on Appveyor")
+ def test_disk_io_counters(self):
+ def check_ntuple(nt):
+ self.assertEqual(nt[0], nt.read_count)
+ self.assertEqual(nt[1], nt.write_count)
+ self.assertEqual(nt[2], nt.read_bytes)
+ self.assertEqual(nt[3], nt.write_bytes)
+ self.assertEqual(nt[4], nt.read_time)
+ self.assertEqual(nt[5], nt.write_time)
+ assert nt.read_count >= 0, nt
+ assert nt.write_count >= 0, nt
+ assert nt.read_bytes >= 0, nt
+ assert nt.write_bytes >= 0, nt
+ assert nt.read_time >= 0, nt
+ assert nt.write_time >= 0, nt
+
+ ret = psutil.disk_io_counters(perdisk=False)
+ check_ntuple(ret)
+ ret = psutil.disk_io_counters(perdisk=True)
+ # make sure there are no duplicates
+ self.assertEqual(len(ret), len(set(ret)))
+ for key in ret:
+ assert key, key
+ check_ntuple(ret[key])
+ if LINUX and key[-1].isdigit():
+ # if 'sda1' is listed 'sda' shouldn't, see:
+ # https://github.com/giampaolo/psutil/issues/338
+ while key[-1].isdigit():
+ key = key[:-1]
+ self.assertNotIn(key, ret.keys())
+
+ def test_users(self):
+ users = psutil.users()
+ if not APPVEYOR:
+ self.assertNotEqual(users, [])
+ for user in users:
+ assert user.name, user
+ user.terminal
+ user.host
+ assert user.started > 0.0, user
+ datetime.datetime.fromtimestamp(user.started)
+
+
+# ===================================================================
+# --- psutil.Process class tests
+# ===================================================================
+
+class TestProcess(unittest.TestCase):
+ """Tests for psutil.Process class."""
+
+ def setUp(self):
+ safe_remove(TESTFN)
+
+ def tearDown(self):
+ reap_children()
+
+ def test_pid(self):
+ self.assertEqual(psutil.Process().pid, os.getpid())
+ sproc = get_test_subprocess()
+ self.assertEqual(psutil.Process(sproc.pid).pid, sproc.pid)
+
+ def test_kill(self):
+ sproc = get_test_subprocess(wait=True)
+ test_pid = sproc.pid
+ p = psutil.Process(test_pid)
+ p.kill()
+ sig = p.wait()
+ self.assertFalse(psutil.pid_exists(test_pid))
+ if POSIX:
+ self.assertEqual(sig, signal.SIGKILL)
+
+ def test_terminate(self):
+ sproc = get_test_subprocess(wait=True)
+ test_pid = sproc.pid
+ p = psutil.Process(test_pid)
+ p.terminate()
+ sig = p.wait()
+ self.assertFalse(psutil.pid_exists(test_pid))
+ if POSIX:
+ self.assertEqual(sig, signal.SIGTERM)
+
+ def test_send_signal(self):
+ sig = signal.SIGKILL if POSIX else signal.SIGTERM
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ p.send_signal(sig)
+ exit_sig = p.wait()
+ self.assertFalse(psutil.pid_exists(p.pid))
+ if POSIX:
+ self.assertEqual(exit_sig, sig)
+ #
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ p.send_signal(sig)
+ with mock.patch('psutil.os.kill',
+ side_effect=OSError(errno.ESRCH, "")) as fun:
+ with self.assertRaises(psutil.NoSuchProcess):
+ p.send_signal(sig)
+ assert fun.called
+ #
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ p.send_signal(sig)
+ with mock.patch('psutil.os.kill',
+ side_effect=OSError(errno.EPERM, "")) as fun:
+ with self.assertRaises(psutil.AccessDenied):
+ p.send_signal(sig)
+ assert fun.called
+
+ def test_wait(self):
+ # check exit code signal
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ p.kill()
+ code = p.wait()
+ if POSIX:
+ self.assertEqual(code, signal.SIGKILL)
+ else:
+ self.assertEqual(code, 0)
+ self.assertFalse(p.is_running())
+
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ p.terminate()
+ code = p.wait()
+ if POSIX:
+ self.assertEqual(code, signal.SIGTERM)
+ else:
+ self.assertEqual(code, 0)
+ self.assertFalse(p.is_running())
+
+ # check sys.exit() code
+ code = "import time, sys; time.sleep(0.01); sys.exit(5);"
+ sproc = get_test_subprocess([PYTHON, "-c", code])
+ p = psutil.Process(sproc.pid)
+ self.assertEqual(p.wait(), 5)
+ self.assertFalse(p.is_running())
+
+ # Test wait() issued twice.
+ # It is not supposed to raise NSP when the process is gone.
+ # On UNIX this should return None, on Windows it should keep
+ # returning the exit code.
+ sproc = get_test_subprocess([PYTHON, "-c", code])
+ p = psutil.Process(sproc.pid)
+ self.assertEqual(p.wait(), 5)
+ self.assertIn(p.wait(), (5, None))
+
+ # test timeout
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ p.name()
+ self.assertRaises(psutil.TimeoutExpired, p.wait, 0.01)
+
+ # timeout < 0 not allowed
+ self.assertRaises(ValueError, p.wait, -1)
+
+ # XXX why is this skipped on Windows?
+ @unittest.skipUnless(POSIX, 'skipped on Windows')
+ def test_wait_non_children(self):
+ # test wait() against processes which are not our children
+ code = "import sys;"
+ code += "from subprocess import Popen, PIPE;"
+ code += "cmd = ['%s', '-c', 'import time; time.sleep(60)'];" % PYTHON
+ code += "sp = Popen(cmd, stdout=PIPE);"
+ code += "sys.stdout.write(str(sp.pid));"
+ sproc = get_test_subprocess([PYTHON, "-c", code],
+ stdout=subprocess.PIPE)
+ grandson_pid = int(sproc.stdout.read())
+ grandson_proc = psutil.Process(grandson_pid)
+ try:
+ self.assertRaises(psutil.TimeoutExpired, grandson_proc.wait, 0.01)
+ grandson_proc.kill()
+ ret = grandson_proc.wait()
+ self.assertEqual(ret, None)
+ finally:
+ if grandson_proc.is_running():
+ grandson_proc.kill()
+ grandson_proc.wait()
+
+ def test_wait_timeout_0(self):
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ self.assertRaises(psutil.TimeoutExpired, p.wait, 0)
+ p.kill()
+ stop_at = time.time() + 2
+ while True:
+ try:
+ code = p.wait(0)
+ except psutil.TimeoutExpired:
+ if time.time() >= stop_at:
+ raise
+ else:
+ break
+ if POSIX:
+ self.assertEqual(code, signal.SIGKILL)
+ else:
+ self.assertEqual(code, 0)
+ self.assertFalse(p.is_running())
+
+ def test_cpu_percent(self):
+ p = psutil.Process()
+ p.cpu_percent(interval=0.001)
+ p.cpu_percent(interval=0.001)
+ for x in range(100):
+ percent = p.cpu_percent(interval=None)
+ self.assertIsInstance(percent, float)
+ self.assertGreaterEqual(percent, 0.0)
+ if not POSIX:
+ self.assertLessEqual(percent, 100.0)
+ else:
+ self.assertGreaterEqual(percent, 0.0)
+
+ def test_cpu_times(self):
+ times = psutil.Process().cpu_times()
+ assert (times.user > 0.0) or (times.system > 0.0), times
+ # make sure returned values can be pretty printed with strftime
+ time.strftime("%H:%M:%S", time.localtime(times.user))
+ time.strftime("%H:%M:%S", time.localtime(times.system))
+
+ # Test Process.cpu_times() against os.times()
+ # os.times() is broken on Python 2.6
+ # http://bugs.python.org/issue1040026
+ # XXX fails on OSX: not sure if it's for os.times(). We should
+ # try this with Python 2.7 and re-enable the test.
+
+ @unittest.skipUnless(sys.version_info > (2, 6, 1) and not OSX,
+ 'os.times() is not reliable on this Python version')
+ def test_cpu_times2(self):
+ user_time, kernel_time = psutil.Process().cpu_times()
+ utime, ktime = os.times()[:2]
+
+ # Use os.times()[:2] as base values to compare our results
+ # using a tolerance of +/- 0.1 seconds.
+ # It will fail if the difference between the values is > 0.1s.
+ if (max([user_time, utime]) - min([user_time, utime])) > 0.1:
+ self.fail("expected: %s, found: %s" % (utime, user_time))
+
+ if (max([kernel_time, ktime]) - min([kernel_time, ktime])) > 0.1:
+ self.fail("expected: %s, found: %s" % (ktime, kernel_time))
+
+ def test_create_time(self):
+ sproc = get_test_subprocess(wait=True)
+ now = time.time()
+ p = psutil.Process(sproc.pid)
+ create_time = p.create_time()
+
+ # Use time.time() as base value to compare our result using a
+ # tolerance of +/- 1 second.
+ # It will fail if the difference between the values is > 2s.
+ difference = abs(create_time - now)
+ if difference > 2:
+ self.fail("expected: %s, found: %s, difference: %s"
+ % (now, create_time, difference))
+
+ # make sure returned value can be pretty printed with strftime
+ time.strftime("%Y %m %d %H:%M:%S", time.localtime(p.create_time()))
+
+ @unittest.skipIf(WINDOWS, 'Windows only')
+ def test_terminal(self):
+ terminal = psutil.Process().terminal()
+ if sys.stdin.isatty():
+ self.assertEqual(terminal, sh('tty'))
+ else:
+ assert terminal, repr(terminal)
+
+ @unittest.skipUnless(LINUX or BSD or WINDOWS,
+ 'not available on this platform')
+ @skip_on_not_implemented(only_if=LINUX)
+ def test_io_counters(self):
+ p = psutil.Process()
+ # test reads
+ io1 = p.io_counters()
+ with open(PYTHON, 'rb') as f:
+ f.read()
+ io2 = p.io_counters()
+ if not BSD:
+ assert io2.read_count > io1.read_count, (io1, io2)
+ self.assertEqual(io2.write_count, io1.write_count)
+ assert io2.read_bytes >= io1.read_bytes, (io1, io2)
+ assert io2.write_bytes >= io1.write_bytes, (io1, io2)
+ # test writes
+ io1 = p.io_counters()
+ with tempfile.TemporaryFile(prefix=TESTFILE_PREFIX) as f:
+ if PY3:
+ f.write(bytes("x" * 1000000, 'ascii'))
+ else:
+ f.write("x" * 1000000)
+ io2 = p.io_counters()
+ assert io2.write_count >= io1.write_count, (io1, io2)
+ assert io2.write_bytes >= io1.write_bytes, (io1, io2)
+ assert io2.read_count >= io1.read_count, (io1, io2)
+ assert io2.read_bytes >= io1.read_bytes, (io1, io2)
+
+ @unittest.skipUnless(LINUX or (WINDOWS and get_winver() >= WIN_VISTA),
+ 'Linux and Windows Vista only')
+ @unittest.skipIf(LINUX and TRAVIS, "unknown failure on travis")
+ def test_ionice(self):
+ if LINUX:
+ from psutil import (IOPRIO_CLASS_NONE, IOPRIO_CLASS_RT,
+ IOPRIO_CLASS_BE, IOPRIO_CLASS_IDLE)
+ self.assertEqual(IOPRIO_CLASS_NONE, 0)
+ self.assertEqual(IOPRIO_CLASS_RT, 1)
+ self.assertEqual(IOPRIO_CLASS_BE, 2)
+ self.assertEqual(IOPRIO_CLASS_IDLE, 3)
+ p = psutil.Process()
+ try:
+ p.ionice(2)
+ ioclass, value = p.ionice()
+ if enum is not None:
+ self.assertIsInstance(ioclass, enum.IntEnum)
+ self.assertEqual(ioclass, 2)
+ self.assertEqual(value, 4)
+ #
+ p.ionice(3)
+ ioclass, value = p.ionice()
+ self.assertEqual(ioclass, 3)
+ self.assertEqual(value, 0)
+ #
+ p.ionice(2, 0)
+ ioclass, value = p.ionice()
+ self.assertEqual(ioclass, 2)
+ self.assertEqual(value, 0)
+ p.ionice(2, 7)
+ ioclass, value = p.ionice()
+ self.assertEqual(ioclass, 2)
+ self.assertEqual(value, 7)
+ #
+ self.assertRaises(ValueError, p.ionice, 2, 10)
+ self.assertRaises(ValueError, p.ionice, 2, -1)
+ self.assertRaises(ValueError, p.ionice, 4)
+ self.assertRaises(TypeError, p.ionice, 2, "foo")
+ self.assertRaisesRegexp(
+ ValueError, "can't specify value with IOPRIO_CLASS_NONE",
+ p.ionice, psutil.IOPRIO_CLASS_NONE, 1)
+ self.assertRaisesRegexp(
+ ValueError, "can't specify value with IOPRIO_CLASS_IDLE",
+ p.ionice, psutil.IOPRIO_CLASS_IDLE, 1)
+ self.assertRaisesRegexp(
+ ValueError, "'ioclass' argument must be specified",
+ p.ionice, value=1)
+ finally:
+ p.ionice(IOPRIO_CLASS_NONE)
+ else:
+ p = psutil.Process()
+ original = p.ionice()
+ self.assertIsInstance(original, int)
+ try:
+ value = 0 # very low
+ if original == value:
+ value = 1 # low
+ p.ionice(value)
+ self.assertEqual(p.ionice(), value)
+ finally:
+ p.ionice(original)
+ #
+ self.assertRaises(ValueError, p.ionice, 3)
+ self.assertRaises(TypeError, p.ionice, 2, 1)
+
+ @unittest.skipUnless(LINUX and RLIMIT_SUPPORT,
+ "only available on Linux >= 2.6.36")
+ def test_rlimit_get(self):
+ import resource
+ p = psutil.Process(os.getpid())
+ names = [x for x in dir(psutil) if x.startswith('RLIMIT')]
+ assert names, names
+ for name in names:
+ value = getattr(psutil, name)
+ self.assertGreaterEqual(value, 0)
+ if name in dir(resource):
+ self.assertEqual(value, getattr(resource, name))
+ self.assertEqual(p.rlimit(value), resource.getrlimit(value))
+ else:
+ ret = p.rlimit(value)
+ self.assertEqual(len(ret), 2)
+ self.assertGreaterEqual(ret[0], -1)
+ self.assertGreaterEqual(ret[1], -1)
+
+ @unittest.skipUnless(LINUX and RLIMIT_SUPPORT,
+ "only available on Linux >= 2.6.36")
+ def test_rlimit_set(self):
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ p.rlimit(psutil.RLIMIT_NOFILE, (5, 5))
+ self.assertEqual(p.rlimit(psutil.RLIMIT_NOFILE), (5, 5))
+ # If pid is 0 prlimit() applies to the calling process and
+ # we don't want that.
+ with self.assertRaises(ValueError):
+ psutil._psplatform.Process(0).rlimit(0)
+ with self.assertRaises(ValueError):
+ p.rlimit(psutil.RLIMIT_NOFILE, (5, 5, 5))
+
+ def test_num_threads(self):
+ # on certain platforms such as Linux we might test for exact
+ # thread number, since we always have with 1 thread per process,
+ # but this does not apply across all platforms (OSX, Windows)
+ p = psutil.Process()
+ step1 = p.num_threads()
+
+ thread = ThreadTask()
+ thread.start()
+ try:
+ step2 = p.num_threads()
+ self.assertEqual(step2, step1 + 1)
+ thread.stop()
+ finally:
+ if thread._running:
+ thread.stop()
+
+ @unittest.skipUnless(WINDOWS, 'Windows only')
+ def test_num_handles(self):
+ # a better test is done later into test/_windows.py
+ p = psutil.Process()
+ self.assertGreater(p.num_handles(), 0)
+
+ def test_threads(self):
+ p = psutil.Process()
+ step1 = p.threads()
+
+ thread = ThreadTask()
+ thread.start()
+
+ try:
+ step2 = p.threads()
+ self.assertEqual(len(step2), len(step1) + 1)
+ # on Linux, first thread id is supposed to be this process
+ if LINUX:
+ self.assertEqual(step2[0].id, os.getpid())
+ athread = step2[0]
+ # test named tuple
+ self.assertEqual(athread.id, athread[0])
+ self.assertEqual(athread.user_time, athread[1])
+ self.assertEqual(athread.system_time, athread[2])
+ # test num threads
+ thread.stop()
+ finally:
+ if thread._running:
+ thread.stop()
+
+ def test_memory_info(self):
+ p = psutil.Process()
+
+ # step 1 - get a base value to compare our results
+ rss1, vms1 = p.memory_info()
+ percent1 = p.memory_percent()
+ self.assertGreater(rss1, 0)
+ self.assertGreater(vms1, 0)
+
+ # step 2 - allocate some memory
+ memarr = [None] * 1500000
+
+ rss2, vms2 = p.memory_info()
+ percent2 = p.memory_percent()
+ # make sure that the memory usage bumped up
+ self.assertGreater(rss2, rss1)
+ self.assertGreaterEqual(vms2, vms1) # vms might be equal
+ self.assertGreater(percent2, percent1)
+ del memarr
+
+ # def test_memory_info_ex(self):
+ # # tested later in fetch all test suite
+
+ def test_memory_maps(self):
+ p = psutil.Process()
+ maps = p.memory_maps()
+ paths = [x for x in maps]
+ self.assertEqual(len(paths), len(set(paths)))
+ ext_maps = p.memory_maps(grouped=False)
+
+ for nt in maps:
+ if not nt.path.startswith('['):
+ assert os.path.isabs(nt.path), nt.path
+ if POSIX:
+ assert os.path.exists(nt.path), nt.path
+ else:
+ # XXX - On Windows we have this strange behavior with
+ # 64 bit dlls: they are visible via explorer but cannot
+ # be accessed via os.stat() (wtf?).
+ if '64' not in os.path.basename(nt.path):
+ assert os.path.exists(nt.path), nt.path
+ for nt in ext_maps:
+ for fname in nt._fields:
+ value = getattr(nt, fname)
+ if fname == 'path':
+ continue
+ elif fname in ('addr', 'perms'):
+ assert value, value
+ else:
+ self.assertIsInstance(value, (int, long))
+ assert value >= 0, value
+
+ def test_memory_percent(self):
+ p = psutil.Process()
+ self.assertGreater(p.memory_percent(), 0.0)
+
+ def test_is_running(self):
+ sproc = get_test_subprocess(wait=True)
+ p = psutil.Process(sproc.pid)
+ assert p.is_running()
+ assert p.is_running()
+ p.kill()
+ p.wait()
+ assert not p.is_running()
+ assert not p.is_running()
+
+ def test_exe(self):
+ sproc = get_test_subprocess(wait=True)
+ exe = psutil.Process(sproc.pid).exe()
+ try:
+ self.assertEqual(exe, PYTHON)
+ except AssertionError:
+ if WINDOWS and len(exe) == len(PYTHON):
+ # on Windows we don't care about case sensitivity
+ self.assertEqual(exe.lower(), PYTHON.lower())
+ else:
+ # certain platforms such as BSD are more accurate returning:
+ # "/usr/local/bin/python2.7"
+ # ...instead of:
+ # "/usr/local/bin/python"
+ # We do not want to consider this difference in accuracy
+ # an error.
+ ver = "%s.%s" % (sys.version_info[0], sys.version_info[1])
+ self.assertEqual(exe.replace(ver, ''), PYTHON.replace(ver, ''))
+
+ def test_cmdline(self):
+ cmdline = [PYTHON, "-c", "import time; time.sleep(60)"]
+ sproc = get_test_subprocess(cmdline, wait=True)
+ self.assertEqual(' '.join(psutil.Process(sproc.pid).cmdline()),
+ ' '.join(cmdline))
+
+ def test_name(self):
+ sproc = get_test_subprocess(PYTHON, wait=True)
+ name = psutil.Process(sproc.pid).name().lower()
+ pyexe = os.path.basename(os.path.realpath(sys.executable)).lower()
+ assert pyexe.startswith(name), (pyexe, name)
+
+ @unittest.skipUnless(POSIX, "posix only")
+ # TODO: add support for other compilers
+ @unittest.skipUnless(which("gcc"), "gcc not available")
+ def test_prog_w_funky_name(self):
+ # Test that name(), exe() and cmdline() correctly handle programs
+ # with funky chars such as spaces and ")", see:
+ # https://github.com/giampaolo/psutil/issues/628
+ funky_name = "/tmp/foo bar )"
+ _, c_file = tempfile.mkstemp(prefix='psutil-', suffix='.c', dir="/tmp")
+ self.addCleanup(lambda: safe_remove(c_file))
+ self.addCleanup(lambda: safe_remove(funky_name))
+ with open(c_file, "w") as f:
+ f.write("void main() { pause(); }")
+ subprocess.check_call(["gcc", c_file, "-o", funky_name])
+ sproc = get_test_subprocess(
+ [funky_name, "arg1", "arg2", "", "arg3", ""])
+ p = psutil.Process(sproc.pid)
+ # ...in order to try to prevent occasional failures on travis
+ wait_for_pid(p.pid)
+ self.assertEqual(p.name(), "foo bar )")
+ self.assertEqual(p.exe(), "/tmp/foo bar )")
+ self.assertEqual(
+ p.cmdline(), ["/tmp/foo bar )", "arg1", "arg2", "", "arg3", ""])
+
+ @unittest.skipUnless(POSIX, 'posix only')
+ def test_uids(self):
+ p = psutil.Process()
+ real, effective, saved = p.uids()
+ # os.getuid() refers to "real" uid
+ self.assertEqual(real, os.getuid())
+ # os.geteuid() refers to "effective" uid
+ self.assertEqual(effective, os.geteuid())
+ # no such thing as os.getsuid() ("saved" uid), but starting
+ # from python 2.7 we have os.getresuid()[2]
+ if hasattr(os, "getresuid"):
+ self.assertEqual(saved, os.getresuid()[2])
+
+ @unittest.skipUnless(POSIX, 'posix only')
+ def test_gids(self):
+ p = psutil.Process()
+ real, effective, saved = p.gids()
+ # os.getuid() refers to "real" uid
+ self.assertEqual(real, os.getgid())
+ # os.geteuid() refers to "effective" uid
+ self.assertEqual(effective, os.getegid())
+ # no such thing as os.getsuid() ("saved" uid), but starting
+ # from python 2.7 we have os.getresgid()[2]
+ if hasattr(os, "getresuid"):
+ self.assertEqual(saved, os.getresgid()[2])
+
+ def test_nice(self):
+ p = psutil.Process()
+ self.assertRaises(TypeError, p.nice, "str")
+ if WINDOWS:
+ try:
+ init = p.nice()
+ if sys.version_info > (3, 4):
+ self.assertIsInstance(init, enum.IntEnum)
+ else:
+ self.assertIsInstance(init, int)
+ self.assertEqual(init, psutil.NORMAL_PRIORITY_CLASS)
+ p.nice(psutil.HIGH_PRIORITY_CLASS)
+ self.assertEqual(p.nice(), psutil.HIGH_PRIORITY_CLASS)
+ p.nice(psutil.NORMAL_PRIORITY_CLASS)
+ self.assertEqual(p.nice(), psutil.NORMAL_PRIORITY_CLASS)
+ finally:
+ p.nice(psutil.NORMAL_PRIORITY_CLASS)
+ else:
+ try:
+ first_nice = p.nice()
+ p.nice(1)
+ self.assertEqual(p.nice(), 1)
+ # going back to previous nice value raises
+ # AccessDenied on OSX
+ if not OSX:
+ p.nice(0)
+ self.assertEqual(p.nice(), 0)
+ except psutil.AccessDenied:
+ pass
+ finally:
+ try:
+ p.nice(first_nice)
+ except psutil.AccessDenied:
+ pass
+
+ def test_status(self):
+ p = psutil.Process()
+ self.assertEqual(p.status(), psutil.STATUS_RUNNING)
+
+ def test_username(self):
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ if POSIX:
+ import pwd
+ self.assertEqual(p.username(), pwd.getpwuid(os.getuid()).pw_name)
+ with mock.patch("psutil.pwd.getpwuid",
+ side_effect=KeyError) as fun:
+ p.username() == str(p.uids().real)
+ assert fun.called
+
+ elif WINDOWS and 'USERNAME' in os.environ:
+ expected_username = os.environ['USERNAME']
+ expected_domain = os.environ['USERDOMAIN']
+ domain, username = p.username().split('\\')
+ self.assertEqual(domain, expected_domain)
+ self.assertEqual(username, expected_username)
+ else:
+ p.username()
+
+ def test_cwd(self):
+ sproc = get_test_subprocess(wait=True)
+ p = psutil.Process(sproc.pid)
+ self.assertEqual(p.cwd(), os.getcwd())
+
+ def test_cwd_2(self):
+ cmd = [PYTHON, "-c", "import os, time; os.chdir('..'); time.sleep(60)"]
+ sproc = get_test_subprocess(cmd, wait=True)
+ p = psutil.Process(sproc.pid)
+ call_until(p.cwd, "ret == os.path.dirname(os.getcwd())")
+
+ @unittest.skipUnless(WINDOWS or LINUX or BSD,
+ 'not available on this platform')
+ @unittest.skipIf(LINUX and TRAVIS, "unknown failure on travis")
+ def test_cpu_affinity(self):
+ p = psutil.Process()
+ initial = p.cpu_affinity()
+ if hasattr(os, "sched_getaffinity"):
+ self.assertEqual(initial, list(os.sched_getaffinity(p.pid)))
+ self.assertEqual(len(initial), len(set(initial)))
+ all_cpus = list(range(len(psutil.cpu_percent(percpu=True))))
+ # setting on travis doesn't seem to work (always return all
+ # CPUs on get):
+ # AssertionError: Lists differ: [0, 1, 2, 3, 4, 5, 6, ... != [0]
+ for n in all_cpus:
+ p.cpu_affinity([n])
+ self.assertEqual(p.cpu_affinity(), [n])
+ if hasattr(os, "sched_getaffinity"):
+ self.assertEqual(p.cpu_affinity(),
+ list(os.sched_getaffinity(p.pid)))
+ #
+ p.cpu_affinity(all_cpus)
+ self.assertEqual(p.cpu_affinity(), all_cpus)
+ if hasattr(os, "sched_getaffinity"):
+ self.assertEqual(p.cpu_affinity(),
+ list(os.sched_getaffinity(p.pid)))
+ #
+ self.assertRaises(TypeError, p.cpu_affinity, 1)
+ p.cpu_affinity(initial)
+ # it should work with all iterables, not only lists
+ p.cpu_affinity(set(all_cpus))
+ p.cpu_affinity(tuple(all_cpus))
+ invalid_cpu = [len(psutil.cpu_times(percpu=True)) + 10]
+ self.assertRaises(ValueError, p.cpu_affinity, invalid_cpu)
+ self.assertRaises(ValueError, p.cpu_affinity, range(10000, 11000))
+ self.assertRaises(TypeError, p.cpu_affinity, [0, "1"])
+
+ # TODO
+ @unittest.skipIf(BSD, "broken on BSD, see #595")
+ @unittest.skipIf(APPVEYOR,
+ "can't find any process file on Appveyor")
+ def test_open_files(self):
+ # current process
+ p = psutil.Process()
+ files = p.open_files()
+ self.assertFalse(TESTFN in files)
+ with open(TESTFN, 'w'):
+ # give the kernel some time to see the new file
+ call_until(p.open_files, "len(ret) != %i" % len(files))
+ filenames = [x.path for x in p.open_files()]
+ self.assertIn(TESTFN, filenames)
+ for file in filenames:
+ assert os.path.isfile(file), file
+
+ # another process
+ cmdline = "import time; f = open(r'%s', 'r'); time.sleep(60);" % TESTFN
+ sproc = get_test_subprocess([PYTHON, "-c", cmdline], wait=True)
+ p = psutil.Process(sproc.pid)
+
+ for x in range(100):
+ filenames = [x.path for x in p.open_files()]
+ if TESTFN in filenames:
+ break
+ time.sleep(.01)
+ else:
+ self.assertIn(TESTFN, filenames)
+ for file in filenames:
+ assert os.path.isfile(file), file
+
+ # TODO
+ @unittest.skipIf(BSD, "broken on BSD, see #595")
+ @unittest.skipIf(APPVEYOR,
+ "can't find any process file on Appveyor")
+ def test_open_files2(self):
+ # test fd and path fields
+ with open(TESTFN, 'w') as fileobj:
+ p = psutil.Process()
+ for path, fd in p.open_files():
+ if path == fileobj.name or fd == fileobj.fileno():
+ break
+ else:
+ self.fail("no file found; files=%s" % repr(p.open_files()))
+ self.assertEqual(path, fileobj.name)
+ if WINDOWS:
+ self.assertEqual(fd, -1)
+ else:
+ self.assertEqual(fd, fileobj.fileno())
+ # test positions
+ ntuple = p.open_files()[0]
+ self.assertEqual(ntuple[0], ntuple.path)
+ self.assertEqual(ntuple[1], ntuple.fd)
+ # test file is gone
+ self.assertTrue(fileobj.name not in p.open_files())
+
+ def compare_proc_sys_cons(self, pid, proc_cons):
+ from psutil._common import pconn
+ sys_cons = []
+ for c in psutil.net_connections(kind='all'):
+ if c.pid == pid:
+ sys_cons.append(pconn(*c[:-1]))
+ if BSD:
+ # on BSD all fds are set to -1
+ proc_cons = [pconn(*[-1] + list(x[1:])) for x in proc_cons]
+ self.assertEqual(sorted(proc_cons), sorted(sys_cons))
+
+ @skip_on_access_denied(only_if=OSX)
+ def test_connections(self):
+ def check_conn(proc, conn, family, type, laddr, raddr, status, kinds):
+ all_kinds = ("all", "inet", "inet4", "inet6", "tcp", "tcp4",
+ "tcp6", "udp", "udp4", "udp6")
+ check_connection_ntuple(conn)
+ self.assertEqual(conn.family, family)
+ self.assertEqual(conn.type, type)
+ self.assertEqual(conn.laddr, laddr)
+ self.assertEqual(conn.raddr, raddr)
+ self.assertEqual(conn.status, status)
+ for kind in all_kinds:
+ cons = proc.connections(kind=kind)
+ if kind in kinds:
+ self.assertNotEqual(cons, [])
+ else:
+ self.assertEqual(cons, [])
+ # compare against system-wide connections
+ # XXX Solaris can't retrieve system-wide UNIX
+ # sockets.
+ if not SUNOS:
+ self.compare_proc_sys_cons(proc.pid, [conn])
+
+ tcp_template = textwrap.dedent("""
+ import socket, time
+ s = socket.socket($family, socket.SOCK_STREAM)
+ s.bind(('$addr', 0))
+ s.listen(1)
+ with open('$testfn', 'w') as f:
+ f.write(str(s.getsockname()[:2]))
+ time.sleep(60)
+ """)
+
+ udp_template = textwrap.dedent("""
+ import socket, time
+ s = socket.socket($family, socket.SOCK_DGRAM)
+ s.bind(('$addr', 0))
+ with open('$testfn', 'w') as f:
+ f.write(str(s.getsockname()[:2]))
+ time.sleep(60)
+ """)
+
+ from string import Template
+ testfile = os.path.basename(TESTFN)
+ tcp4_template = Template(tcp_template).substitute(
+ family=int(AF_INET), addr="127.0.0.1", testfn=testfile)
+ udp4_template = Template(udp_template).substitute(
+ family=int(AF_INET), addr="127.0.0.1", testfn=testfile)
+ tcp6_template = Template(tcp_template).substitute(
+ family=int(AF_INET6), addr="::1", testfn=testfile)
+ udp6_template = Template(udp_template).substitute(
+ family=int(AF_INET6), addr="::1", testfn=testfile)
+
+ # launch various subprocess instantiating a socket of various
+ # families and types to enrich psutil results
+ tcp4_proc = pyrun(tcp4_template)
+ tcp4_addr = eval(wait_for_file(testfile))
+ udp4_proc = pyrun(udp4_template)
+ udp4_addr = eval(wait_for_file(testfile))
+ if supports_ipv6():
+ tcp6_proc = pyrun(tcp6_template)
+ tcp6_addr = eval(wait_for_file(testfile))
+ udp6_proc = pyrun(udp6_template)
+ udp6_addr = eval(wait_for_file(testfile))
+ else:
+ tcp6_proc = None
+ udp6_proc = None
+ tcp6_addr = None
+ udp6_addr = None
+
+ for p in psutil.Process().children():
+ cons = p.connections()
+ self.assertEqual(len(cons), 1)
+ for conn in cons:
+ # TCP v4
+ if p.pid == tcp4_proc.pid:
+ check_conn(p, conn, AF_INET, SOCK_STREAM, tcp4_addr, (),
+ psutil.CONN_LISTEN,
+ ("all", "inet", "inet4", "tcp", "tcp4"))
+ # UDP v4
+ elif p.pid == udp4_proc.pid:
+ check_conn(p, conn, AF_INET, SOCK_DGRAM, udp4_addr, (),
+ psutil.CONN_NONE,
+ ("all", "inet", "inet4", "udp", "udp4"))
+ # TCP v6
+ elif p.pid == getattr(tcp6_proc, "pid", None):
+ check_conn(p, conn, AF_INET6, SOCK_STREAM, tcp6_addr, (),
+ psutil.CONN_LISTEN,
+ ("all", "inet", "inet6", "tcp", "tcp6"))
+ # UDP v6
+ elif p.pid == getattr(udp6_proc, "pid", None):
+ check_conn(p, conn, AF_INET6, SOCK_DGRAM, udp6_addr, (),
+ psutil.CONN_NONE,
+ ("all", "inet", "inet6", "udp", "udp6"))
+
+ @unittest.skipUnless(hasattr(socket, 'AF_UNIX'),
+ 'AF_UNIX is not supported')
+ @skip_on_access_denied(only_if=OSX)
+ def test_connections_unix(self):
+ def check(type):
+ safe_remove(TESTFN)
+ sock = socket.socket(AF_UNIX, type)
+ with contextlib.closing(sock):
+ sock.bind(TESTFN)
+ cons = psutil.Process().connections(kind='unix')
+ conn = cons[0]
+ check_connection_ntuple(conn)
+ if conn.fd != -1: # != sunos and windows
+ self.assertEqual(conn.fd, sock.fileno())
+ self.assertEqual(conn.family, AF_UNIX)
+ self.assertEqual(conn.type, type)
+ self.assertEqual(conn.laddr, TESTFN)
+ if not SUNOS:
+ # XXX Solaris can't retrieve system-wide UNIX
+ # sockets.
+ self.compare_proc_sys_cons(os.getpid(), cons)
+
+ check(SOCK_STREAM)
+ check(SOCK_DGRAM)
+
+ @unittest.skipUnless(hasattr(socket, "fromfd"),
+ 'socket.fromfd() is not availble')
+ @unittest.skipIf(WINDOWS or SUNOS,
+ 'connection fd not available on this platform')
+ def test_connection_fromfd(self):
+ with contextlib.closing(socket.socket()) as sock:
+ sock.bind(('localhost', 0))
+ sock.listen(1)
+ p = psutil.Process()
+ for conn in p.connections():
+ if conn.fd == sock.fileno():
+ break
+ else:
+ self.fail("couldn't find socket fd")
+ dupsock = socket.fromfd(conn.fd, conn.family, conn.type)
+ with contextlib.closing(dupsock):
+ self.assertEqual(dupsock.getsockname(), conn.laddr)
+ self.assertNotEqual(sock.fileno(), dupsock.fileno())
+
+ def test_connection_constants(self):
+ ints = []
+ strs = []
+ for name in dir(psutil):
+ if name.startswith('CONN_'):
+ num = getattr(psutil, name)
+ str_ = str(num)
+ assert str_.isupper(), str_
+ assert str_ not in strs, str_
+ assert num not in ints, num
+ ints.append(num)
+ strs.append(str_)
+ if SUNOS:
+ psutil.CONN_IDLE
+ psutil.CONN_BOUND
+ if WINDOWS:
+ psutil.CONN_DELETE_TCB
+
+ @unittest.skipUnless(POSIX, 'posix only')
+ def test_num_fds(self):
+ p = psutil.Process()
+ start = p.num_fds()
+ file = open(TESTFN, 'w')
+ self.addCleanup(file.close)
+ self.assertEqual(p.num_fds(), start + 1)
+ sock = socket.socket()
+ self.addCleanup(sock.close)
+ self.assertEqual(p.num_fds(), start + 2)
+ file.close()
+ sock.close()
+ self.assertEqual(p.num_fds(), start)
+
+ @skip_on_not_implemented(only_if=LINUX)
+ def test_num_ctx_switches(self):
+ p = psutil.Process()
+ before = sum(p.num_ctx_switches())
+ for x in range(500000):
+ after = sum(p.num_ctx_switches())
+ if after > before:
+ return
+ self.fail("num ctx switches still the same after 50.000 iterations")
+
+ def test_parent_ppid(self):
+ this_parent = os.getpid()
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ self.assertEqual(p.ppid(), this_parent)
+ self.assertEqual(p.parent().pid, this_parent)
+ # no other process is supposed to have us as parent
+ for p in psutil.process_iter():
+ if p.pid == sproc.pid:
+ continue
+ self.assertTrue(p.ppid() != this_parent)
+
+ def test_children(self):
+ p = psutil.Process()
+ self.assertEqual(p.children(), [])
+ self.assertEqual(p.children(recursive=True), [])
+ sproc = get_test_subprocess()
+ children1 = p.children()
+ children2 = p.children(recursive=True)
+ for children in (children1, children2):
+ self.assertEqual(len(children), 1)
+ self.assertEqual(children[0].pid, sproc.pid)
+ self.assertEqual(children[0].ppid(), os.getpid())
+
+ def test_children_recursive(self):
+ # here we create a subprocess which creates another one as in:
+ # A (parent) -> B (child) -> C (grandchild)
+ s = "import subprocess, os, sys, time;"
+ s += "PYTHON = os.path.realpath(sys.executable);"
+ s += "cmd = [PYTHON, '-c', 'import time; time.sleep(60);'];"
+ s += "subprocess.Popen(cmd);"
+ s += "time.sleep(60);"
+ get_test_subprocess(cmd=[PYTHON, "-c", s])
+ p = psutil.Process()
+ self.assertEqual(len(p.children(recursive=False)), 1)
+ # give the grandchild some time to start
+ stop_at = time.time() + GLOBAL_TIMEOUT
+ while time.time() < stop_at:
+ children = p.children(recursive=True)
+ if len(children) > 1:
+ break
+ self.assertEqual(len(children), 2)
+ self.assertEqual(children[0].ppid(), os.getpid())
+ self.assertEqual(children[1].ppid(), children[0].pid)
+
+ def test_children_duplicates(self):
+ # find the process which has the highest number of children
+ table = collections.defaultdict(int)
+ for p in psutil.process_iter():
+ try:
+ table[p.ppid()] += 1
+ except psutil.Error:
+ pass
+ # this is the one, now let's make sure there are no duplicates
+ pid = sorted(table.items(), key=lambda x: x[1])[-1][0]
+ p = psutil.Process(pid)
+ try:
+ c = p.children(recursive=True)
+ except psutil.AccessDenied: # windows
+ pass
+ else:
+ self.assertEqual(len(c), len(set(c)))
+
+ def test_suspend_resume(self):
+ sproc = get_test_subprocess(wait=True)
+ p = psutil.Process(sproc.pid)
+ p.suspend()
+ for x in range(100):
+ if p.status() == psutil.STATUS_STOPPED:
+ break
+ time.sleep(0.01)
+ p.resume()
+ self.assertNotEqual(p.status(), psutil.STATUS_STOPPED)
+
+ def test_invalid_pid(self):
+ self.assertRaises(TypeError, psutil.Process, "1")
+ self.assertRaises(ValueError, psutil.Process, -1)
+
+ def test_as_dict(self):
+ p = psutil.Process()
+ d = p.as_dict(attrs=['exe', 'name'])
+ self.assertEqual(sorted(d.keys()), ['exe', 'name'])
+
+ p = psutil.Process(min(psutil.pids()))
+ d = p.as_dict(attrs=['connections'], ad_value='foo')
+ if not isinstance(d['connections'], list):
+ self.assertEqual(d['connections'], 'foo')
+
+ def test_halfway_terminated_process(self):
+ # Test that NoSuchProcess exception gets raised in case the
+ # process dies after we create the Process object.
+ # Example:
+ # >>> proc = Process(1234)
+ # >>> time.sleep(2) # time-consuming task, process dies in meantime
+ # >>> proc.name()
+ # Refers to Issue #15
+ sproc = get_test_subprocess()
+ p = psutil.Process(sproc.pid)
+ p.terminate()
+ p.wait()
+ if WINDOWS:
+ wait_for_pid(p.pid)
+ self.assertFalse(p.is_running())
+ self.assertFalse(p.pid in psutil.pids())
+
+ excluded_names = ['pid', 'is_running', 'wait', 'create_time']
+ if LINUX and not RLIMIT_SUPPORT:
+ excluded_names.append('rlimit')
+ for name in dir(p):
+ if (name.startswith('_') or
+ name in excluded_names):
+ continue
+ try:
+ meth = getattr(p, name)
+ # get/set methods
+ if name == 'nice':
+ if POSIX:
+ ret = meth(1)
+ else:
+ ret = meth(psutil.NORMAL_PRIORITY_CLASS)
+ elif name == 'ionice':
+ ret = meth()
+ ret = meth(2)
+ elif name == 'rlimit':
+ ret = meth(psutil.RLIMIT_NOFILE)
+ ret = meth(psutil.RLIMIT_NOFILE, (5, 5))
+ elif name == 'cpu_affinity':
+ ret = meth()
+ ret = meth([0])
+ elif name == 'send_signal':
+ ret = meth(signal.SIGTERM)
+ else:
+ ret = meth()
+ except psutil.ZombieProcess:
+ self.fail("ZombieProcess for %r was not supposed to happen" %
+ name)
+ except psutil.NoSuchProcess:
+ pass
+ except NotImplementedError:
+ pass
+ else:
+ self.fail(
+ "NoSuchProcess exception not raised for %r, retval=%s" % (
+ name, ret))
+
+ @unittest.skipUnless(POSIX, 'posix only')
+ def test_zombie_process(self):
+ def succeed_or_zombie_p_exc(fun, *args, **kwargs):
+ try:
+ fun(*args, **kwargs)
+ except (psutil.ZombieProcess, psutil.AccessDenied):
+ pass
+
+ # Note: in this test we'll be creating two sub processes.
+ # Both of them are supposed to be freed / killed by
+ # reap_children() as they are attributable to 'us'
+ # (os.getpid()) via children(recursive=True).
+ src = textwrap.dedent("""\
+ import os, sys, time, socket, contextlib
+ child_pid = os.fork()
+ if child_pid > 0:
+ time.sleep(3000)
+ else:
+ # this is the zombie process
+ s = socket.socket(socket.AF_UNIX)
+ with contextlib.closing(s):
+ s.connect('%s')
+ if sys.version_info < (3, ):
+ pid = str(os.getpid())
+ else:
+ pid = bytes(str(os.getpid()), 'ascii')
+ s.sendall(pid)
+ """ % TESTFN)
+ with contextlib.closing(socket.socket(socket.AF_UNIX)) as sock:
+ try:
+ sock.settimeout(GLOBAL_TIMEOUT)
+ sock.bind(TESTFN)
+ sock.listen(1)
+ pyrun(src)
+ conn, _ = sock.accept()
+ select.select([conn.fileno()], [], [], GLOBAL_TIMEOUT)
+ zpid = int(conn.recv(1024))
+ zproc = psutil.Process(zpid)
+ call_until(lambda: zproc.status(),
+ "ret == psutil.STATUS_ZOMBIE")
+ # A zombie process should always be instantiable
+ zproc = psutil.Process(zpid)
+ # ...and at least its status always be querable
+ self.assertEqual(zproc.status(), psutil.STATUS_ZOMBIE)
+ # ...and it should be considered 'running'
+ self.assertTrue(zproc.is_running())
+ # ...and as_dict() shouldn't crash
+ zproc.as_dict()
+ if hasattr(zproc, "rlimit"):
+ succeed_or_zombie_p_exc(zproc.rlimit, psutil.RLIMIT_NOFILE)
+ succeed_or_zombie_p_exc(zproc.rlimit, psutil.RLIMIT_NOFILE,
+ (5, 5))
+ # set methods
+ succeed_or_zombie_p_exc(zproc.parent)
+ if hasattr(zproc, 'cpu_affinity'):
+ succeed_or_zombie_p_exc(zproc.cpu_affinity, [0])
+ succeed_or_zombie_p_exc(zproc.nice, 0)
+ if hasattr(zproc, 'ionice'):
+ if LINUX:
+ succeed_or_zombie_p_exc(zproc.ionice, 2, 0)
+ else:
+ succeed_or_zombie_p_exc(zproc.ionice, 0) # Windows
+ if hasattr(zproc, 'rlimit'):
+ succeed_or_zombie_p_exc(zproc.rlimit,
+ psutil.RLIMIT_NOFILE, (5, 5))
+ succeed_or_zombie_p_exc(zproc.suspend)
+ succeed_or_zombie_p_exc(zproc.resume)
+ succeed_or_zombie_p_exc(zproc.terminate)
+ succeed_or_zombie_p_exc(zproc.kill)
+
+ # ...its parent should 'see' it
+ # edit: not true on BSD and OSX
+ # descendants = [x.pid for x in psutil.Process().children(
+ # recursive=True)]
+ # self.assertIn(zpid, descendants)
+ # XXX should we also assume ppid be usable? Note: this
+ # would be an important use case as the only way to get
+ # rid of a zombie is to kill its parent.
+ # self.assertEqual(zpid.ppid(), os.getpid())
+ # ...and all other APIs should be able to deal with it
+ self.assertTrue(psutil.pid_exists(zpid))
+ self.assertIn(zpid, psutil.pids())
+ self.assertIn(zpid, [x.pid for x in psutil.process_iter()])
+ psutil._pmap = {}
+ self.assertIn(zpid, [x.pid for x in psutil.process_iter()])
+ finally:
+ reap_children(search_all=True)
+
+ def test_pid_0(self):
+ # Process(0) is supposed to work on all platforms except Linux
+ if 0 not in psutil.pids():
+ self.assertRaises(psutil.NoSuchProcess, psutil.Process, 0)
+ return
+
+ p = psutil.Process(0)
+ self.assertTrue(p.name())
+
+ if POSIX:
+ try:
+ self.assertEqual(p.uids().real, 0)
+ self.assertEqual(p.gids().real, 0)
+ except psutil.AccessDenied:
+ pass
+
+ self.assertRaisesRegexp(
+ ValueError, "preventing sending signal to process with PID 0",
+ p.send_signal, signal.SIGTERM)
+
+ self.assertIn(p.ppid(), (0, 1))
+ # self.assertEqual(p.exe(), "")
+ p.cmdline()
+ try:
+ p.num_threads()
+ except psutil.AccessDenied:
+ pass
+
+ try:
+ p.memory_info()
+ except psutil.AccessDenied:
+ pass
+
+ try:
+ if POSIX:
+ self.assertEqual(p.username(), 'root')
+ elif WINDOWS:
+ self.assertEqual(p.username(), 'NT AUTHORITY\\SYSTEM')
+ else:
+ p.username()
+ except psutil.AccessDenied:
+ pass
+
+ self.assertIn(0, psutil.pids())
+ self.assertTrue(psutil.pid_exists(0))
+
+ def test_Popen(self):
+ # Popen class test
+ # XXX this test causes a ResourceWarning on Python 3 because
+ # psutil.__subproc instance doesn't get propertly freed.
+ # Not sure what to do though.
+ cmd = [PYTHON, "-c", "import time; time.sleep(60);"]
+ proc = psutil.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ try:
+ proc.name()
+ proc.stdin
+ self.assertTrue(hasattr(proc, 'name'))
+ self.assertTrue(hasattr(proc, 'stdin'))
+ self.assertTrue(dir(proc))
+ self.assertRaises(AttributeError, getattr, proc, 'foo')
+ finally:
+ proc.kill()
+ proc.wait()
+ self.assertIsNotNone(proc.returncode)
+
+
+# ===================================================================
+# --- Featch all processes test
+# ===================================================================
+
+class TestFetchAllProcesses(unittest.TestCase):
+ """Test which iterates over all running processes and performs
+ some sanity checks against Process API's returned values.
+ """
+
+ def setUp(self):
+ if POSIX:
+ import pwd
+ pall = pwd.getpwall()
+ self._uids = set([x.pw_uid for x in pall])
+ self._usernames = set([x.pw_name for x in pall])
+
+ def test_fetch_all(self):
+ valid_procs = 0
+ excluded_names = set([
+ 'send_signal', 'suspend', 'resume', 'terminate', 'kill', 'wait',
+ 'as_dict', 'cpu_percent', 'parent', 'children', 'pid'])
+ if LINUX and not RLIMIT_SUPPORT:
+ excluded_names.add('rlimit')
+ attrs = []
+ for name in dir(psutil.Process):
+ if name.startswith("_"):
+ continue
+ if name in excluded_names:
+ continue
+ attrs.append(name)
+
+ default = object()
+ failures = []
+ for name in attrs:
+ for p in psutil.process_iter():
+ ret = default
+ try:
+ try:
+ args = ()
+ attr = getattr(p, name, None)
+ if attr is not None and callable(attr):
+ if name == 'rlimit':
+ args = (psutil.RLIMIT_NOFILE,)
+ ret = attr(*args)
+ else:
+ ret = attr
+ valid_procs += 1
+ except NotImplementedError:
+ msg = "%r was skipped because not implemented" % (
+ self.__class__.__name__ + '.test_' + name)
+ warn(msg)
+ except (psutil.NoSuchProcess, psutil.AccessDenied) as err:
+ self.assertEqual(err.pid, p.pid)
+ if err.name:
+ # make sure exception's name attr is set
+ # with the actual process name
+ self.assertEqual(err.name, p.name())
+ self.assertTrue(str(err))
+ self.assertTrue(err.msg)
+ else:
+ if ret not in (0, 0.0, [], None, ''):
+ assert ret, ret
+ meth = getattr(self, name)
+ meth(ret)
+ except Exception as err:
+ s = '\n' + '=' * 70 + '\n'
+ s += "FAIL: test_%s (proc=%s" % (name, p)
+ if ret != default:
+ s += ", ret=%s)" % repr(ret)
+ s += ')\n'
+ s += '-' * 70
+ s += "\n%s" % traceback.format_exc()
+ s = "\n".join((" " * 4) + i for i in s.splitlines())
+ failures.append(s)
+ break
+
+ if failures:
+ self.fail(''.join(failures))
+
+ # we should always have a non-empty list, not including PID 0 etc.
+ # special cases.
+ self.assertTrue(valid_procs > 0)
+
+ def cmdline(self, ret):
+ pass
+
+ def exe(self, ret):
+ if not ret:
+ self.assertEqual(ret, '')
+ else:
+ assert os.path.isabs(ret), ret
+ # Note: os.stat() may return False even if the file is there
+ # hence we skip the test, see:
+ # http://stackoverflow.com/questions/3112546/os-path-exists-lies
+ if POSIX and os.path.isfile(ret):
+ if hasattr(os, 'access') and hasattr(os, "X_OK"):
+ # XXX may fail on OSX
+ self.assertTrue(os.access(ret, os.X_OK))
+
+ def ppid(self, ret):
+ self.assertTrue(ret >= 0)
+
+ def name(self, ret):
+ self.assertIsInstance(ret, (str, unicode))
+ self.assertTrue(ret)
+
+ def create_time(self, ret):
+ self.assertTrue(ret > 0)
+ # this can't be taken for granted on all platforms
+ # self.assertGreaterEqual(ret, psutil.boot_time())
+ # make sure returned value can be pretty printed
+ # with strftime
+ time.strftime("%Y %m %d %H:%M:%S", time.localtime(ret))
+
+ def uids(self, ret):
+ for uid in ret:
+ self.assertTrue(uid >= 0)
+ self.assertIn(uid, self._uids)
+
+ def gids(self, ret):
+ # note: testing all gids as above seems not to be reliable for
+ # gid == 30 (nodoby); not sure why.
+ for gid in ret:
+ self.assertTrue(gid >= 0)
+ # self.assertIn(uid, self.gids
+
+ def username(self, ret):
+ self.assertTrue(ret)
+ if POSIX:
+ self.assertIn(ret, self._usernames)
+
+ def status(self, ret):
+ self.assertTrue(ret != "")
+ self.assertTrue(ret != '?')
+ self.assertIn(ret, VALID_PROC_STATUSES)
+
+ def io_counters(self, ret):
+ for field in ret:
+ if field != -1:
+ self.assertTrue(field >= 0)
+
+ def ionice(self, ret):
+ if LINUX:
+ self.assertTrue(ret.ioclass >= 0)
+ self.assertTrue(ret.value >= 0)
+ else:
+ self.assertTrue(ret >= 0)
+ self.assertIn(ret, (0, 1, 2))
+
+ def num_threads(self, ret):
+ self.assertTrue(ret >= 1)
+
+ def threads(self, ret):
+ for t in ret:
+ self.assertTrue(t.id >= 0)
+ self.assertTrue(t.user_time >= 0)
+ self.assertTrue(t.system_time >= 0)
+
+ def cpu_times(self, ret):
+ self.assertTrue(ret.user >= 0)
+ self.assertTrue(ret.system >= 0)
+
+ def memory_info(self, ret):
+ self.assertTrue(ret.rss >= 0)
+ self.assertTrue(ret.vms >= 0)
+
+ def memory_info_ex(self, ret):
+ for name in ret._fields:
+ self.assertTrue(getattr(ret, name) >= 0)
+ if POSIX and ret.vms != 0:
+ # VMS is always supposed to be the highest
+ for name in ret._fields:
+ if name != 'vms':
+ value = getattr(ret, name)
+ assert ret.vms > value, ret
+ elif WINDOWS:
+ assert ret.peak_wset >= ret.wset, ret
+ assert ret.peak_paged_pool >= ret.paged_pool, ret
+ assert ret.peak_nonpaged_pool >= ret.nonpaged_pool, ret
+ assert ret.peak_pagefile >= ret.pagefile, ret
+
+ def open_files(self, ret):
+ for f in ret:
+ if WINDOWS:
+ assert f.fd == -1, f
+ else:
+ self.assertIsInstance(f.fd, int)
+ assert os.path.isabs(f.path), f
+ assert os.path.isfile(f.path), f
+
+ def num_fds(self, ret):
+ self.assertTrue(ret >= 0)
+
+ def connections(self, ret):
+ self.assertEqual(len(ret), len(set(ret)))
+ for conn in ret:
+ check_connection_ntuple(conn)
+
+ def cwd(self, ret):
+ if ret is not None: # BSD may return None
+ assert os.path.isabs(ret), ret
+ try:
+ st = os.stat(ret)
+ except OSError as err:
+ # directory has been removed in mean time
+ if err.errno != errno.ENOENT:
+ raise
+ else:
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ def memory_percent(self, ret):
+ assert 0 <= ret <= 100, ret
+
+ def is_running(self, ret):
+ self.assertTrue(ret)
+
+ def cpu_affinity(self, ret):
+ assert ret != [], ret
+
+ def terminal(self, ret):
+ if ret is not None:
+ assert os.path.isabs(ret), ret
+ assert os.path.exists(ret), ret
+
+ def memory_maps(self, ret):
+ for nt in ret:
+ for fname in nt._fields:
+ value = getattr(nt, fname)
+ if fname == 'path':
+ if not value.startswith('['):
+ assert os.path.isabs(nt.path), nt.path
+ # commented as on Linux we might get
+ # '/foo/bar (deleted)'
+ # assert os.path.exists(nt.path), nt.path
+ elif fname in ('addr', 'perms'):
+ self.assertTrue(value)
+ else:
+ self.assertIsInstance(value, (int, long))
+ assert value >= 0, value
+
+ def num_handles(self, ret):
+ if WINDOWS:
+ self.assertGreaterEqual(ret, 0)
+ else:
+ self.assertGreaterEqual(ret, 0)
+
+ def nice(self, ret):
+ if POSIX:
+ assert -20 <= ret <= 20, ret
+ else:
+ priorities = [getattr(psutil, x) for x in dir(psutil)
+ if x.endswith('_PRIORITY_CLASS')]
+ self.assertIn(ret, priorities)
+
+ def num_ctx_switches(self, ret):
+ self.assertTrue(ret.voluntary >= 0)
+ self.assertTrue(ret.involuntary >= 0)
+
+ def rlimit(self, ret):
+ self.assertEqual(len(ret), 2)
+ self.assertGreaterEqual(ret[0], -1)
+ self.assertGreaterEqual(ret[1], -1)
+
+
+# ===================================================================
+# --- Limited user tests
+# ===================================================================
+
+@unittest.skipUnless(POSIX, "UNIX only")
+@unittest.skipUnless(hasattr(os, 'getuid') and os.getuid() == 0,
+ "super user privileges are required")
+class LimitedUserTestCase(TestProcess):
+ """Repeat the previous tests by using a limited user.
+ Executed only on UNIX and only if the user who run the test script
+ is root.
+ """
+ # the uid/gid the test suite runs under
+ if hasattr(os, 'getuid'):
+ PROCESS_UID = os.getuid()
+ PROCESS_GID = os.getgid()
+
+ def __init__(self, *args, **kwargs):
+ TestProcess.__init__(self, *args, **kwargs)
+ # re-define all existent test methods in order to
+ # ignore AccessDenied exceptions
+ for attr in [x for x in dir(self) if x.startswith('test')]:
+ meth = getattr(self, attr)
+
+ def test_(self):
+ try:
+ meth()
+ except psutil.AccessDenied:
+ pass
+ setattr(self, attr, types.MethodType(test_, self))
+
+ def setUp(self):
+ safe_remove(TESTFN)
+ TestProcess.setUp(self)
+ os.setegid(1000)
+ os.seteuid(1000)
+
+ def tearDown(self):
+ os.setegid(self.PROCESS_UID)
+ os.seteuid(self.PROCESS_GID)
+ TestProcess.tearDown(self)
+
+ def test_nice(self):
+ try:
+ psutil.Process().nice(-1)
+ except psutil.AccessDenied:
+ pass
+ else:
+ self.fail("exception not raised")
+
+ def test_zombie_process(self):
+ # causes problems if test test suite is run as root
+ pass
+
+
+# ===================================================================
+# --- Misc tests
+# ===================================================================
+
+class TestMisc(unittest.TestCase):
+ """Misc / generic tests."""
+
+ def test_process__repr__(self, func=repr):
+ p = psutil.Process()
+ r = func(p)
+ self.assertIn("psutil.Process", r)
+ self.assertIn("pid=%s" % p.pid, r)
+ self.assertIn("name=", r)
+ self.assertIn(p.name(), r)
+ with mock.patch.object(psutil.Process, "name",
+ side_effect=psutil.ZombieProcess(os.getpid())):
+ p = psutil.Process()
+ r = func(p)
+ self.assertIn("pid=%s" % p.pid, r)
+ self.assertIn("zombie", r)
+ self.assertNotIn("name=", r)
+ with mock.patch.object(psutil.Process, "name",
+ side_effect=psutil.NoSuchProcess(os.getpid())):
+ p = psutil.Process()
+ r = func(p)
+ self.assertIn("pid=%s" % p.pid, r)
+ self.assertIn("terminated", r)
+ self.assertNotIn("name=", r)
+
+ def test_process__str__(self):
+ self.test_process__repr__(func=str)
+
+ def test_no_such_process__repr__(self, func=repr):
+ self.assertEqual(
+ repr(psutil.NoSuchProcess(321)),
+ "psutil.NoSuchProcess process no longer exists (pid=321)")
+ self.assertEqual(
+ repr(psutil.NoSuchProcess(321, name='foo')),
+ "psutil.NoSuchProcess process no longer exists (pid=321, "
+ "name='foo')")
+ self.assertEqual(
+ repr(psutil.NoSuchProcess(321, msg='foo')),
+ "psutil.NoSuchProcess foo")
+
+ def test_zombie_process__repr__(self, func=repr):
+ self.assertEqual(
+ repr(psutil.ZombieProcess(321)),
+ "psutil.ZombieProcess process still exists but it's a zombie "
+ "(pid=321)")
+ self.assertEqual(
+ repr(psutil.ZombieProcess(321, name='foo')),
+ "psutil.ZombieProcess process still exists but it's a zombie "
+ "(pid=321, name='foo')")
+ self.assertEqual(
+ repr(psutil.ZombieProcess(321, name='foo', ppid=1)),
+ "psutil.ZombieProcess process still exists but it's a zombie "
+ "(pid=321, name='foo', ppid=1)")
+ self.assertEqual(
+ repr(psutil.ZombieProcess(321, msg='foo')),
+ "psutil.ZombieProcess foo")
+
+ def test_access_denied__repr__(self, func=repr):
+ self.assertEqual(
+ repr(psutil.AccessDenied(321)),
+ "psutil.AccessDenied (pid=321)")
+ self.assertEqual(
+ repr(psutil.AccessDenied(321, name='foo')),
+ "psutil.AccessDenied (pid=321, name='foo')")
+ self.assertEqual(
+ repr(psutil.AccessDenied(321, msg='foo')),
+ "psutil.AccessDenied foo")
+
+ def test_timeout_expired__repr__(self, func=repr):
+ self.assertEqual(
+ repr(psutil.TimeoutExpired(321)),
+ "psutil.TimeoutExpired timeout after 321 seconds")
+ self.assertEqual(
+ repr(psutil.TimeoutExpired(321, pid=111)),
+ "psutil.TimeoutExpired timeout after 321 seconds (pid=111)")
+ self.assertEqual(
+ repr(psutil.TimeoutExpired(321, pid=111, name='foo')),
+ "psutil.TimeoutExpired timeout after 321 seconds "
+ "(pid=111, name='foo')")
+
+ def test_process__eq__(self):
+ p1 = psutil.Process()
+ p2 = psutil.Process()
+ self.assertEqual(p1, p2)
+ p2._ident = (0, 0)
+ self.assertNotEqual(p1, p2)
+ self.assertNotEqual(p1, 'foo')
+
+ def test_process__hash__(self):
+ s = set([psutil.Process(), psutil.Process()])
+ self.assertEqual(len(s), 1)
+
+ def test__all__(self):
+ dir_psutil = dir(psutil)
+ for name in dir_psutil:
+ if name in ('callable', 'error', 'namedtuple',
+ 'long', 'test', 'NUM_CPUS', 'BOOT_TIME',
+ 'TOTAL_PHYMEM'):
+ continue
+ if not name.startswith('_'):
+ try:
+ __import__(name)
+ except ImportError:
+ if name not in psutil.__all__:
+ fun = getattr(psutil, name)
+ if fun is None:
+ continue
+ if (fun.__doc__ is not None and
+ 'deprecated' not in fun.__doc__.lower()):
+ self.fail('%r not in psutil.__all__' % name)
+
+ # Import 'star' will break if __all__ is inconsistent, see:
+ # https://github.com/giampaolo/psutil/issues/656
+ # Can't do `from psutil import *` as it won't work on python 3
+ # so we simply iterate over __all__.
+ for name in psutil.__all__:
+ self.assertIn(name, dir_psutil)
+
+ def test_version(self):
+ self.assertEqual('.'.join([str(x) for x in psutil.version_info]),
+ psutil.__version__)
+
+ def test_memoize(self):
+ from psutil._common import memoize
+
+ @memoize
+ def foo(*args, **kwargs):
+ "foo docstring"
+ calls.append(None)
+ return (args, kwargs)
+
+ calls = []
+ # no args
+ for x in range(2):
+ ret = foo()
+ expected = ((), {})
+ self.assertEqual(ret, expected)
+ self.assertEqual(len(calls), 1)
+ # with args
+ for x in range(2):
+ ret = foo(1)
+ expected = ((1, ), {})
+ self.assertEqual(ret, expected)
+ self.assertEqual(len(calls), 2)
+ # with args + kwargs
+ for x in range(2):
+ ret = foo(1, bar=2)
+ expected = ((1, ), {'bar': 2})
+ self.assertEqual(ret, expected)
+ self.assertEqual(len(calls), 3)
+ # clear cache
+ foo.cache_clear()
+ ret = foo()
+ expected = ((), {})
+ self.assertEqual(ret, expected)
+ self.assertEqual(len(calls), 4)
+ # docstring
+ self.assertEqual(foo.__doc__, "foo docstring")
+
+ def test_isfile_strict(self):
+ from psutil._common import isfile_strict
+ this_file = os.path.abspath(__file__)
+ assert isfile_strict(this_file)
+ assert not isfile_strict(os.path.dirname(this_file))
+ with mock.patch('psutil._common.os.stat',
+ side_effect=OSError(errno.EPERM, "foo")):
+ self.assertRaises(OSError, isfile_strict, this_file)
+ with mock.patch('psutil._common.os.stat',
+ side_effect=OSError(errno.EACCES, "foo")):
+ self.assertRaises(OSError, isfile_strict, this_file)
+ with mock.patch('psutil._common.os.stat',
+ side_effect=OSError(errno.EINVAL, "foo")):
+ assert not isfile_strict(this_file)
+ with mock.patch('psutil._common.stat.S_ISREG', return_value=False):
+ assert not isfile_strict(this_file)
+
+ def test_serialization(self):
+ def check(ret):
+ if json is not None:
+ json.loads(json.dumps(ret))
+ a = pickle.dumps(ret)
+ b = pickle.loads(a)
+ self.assertEqual(ret, b)
+
+ check(psutil.Process().as_dict())
+ check(psutil.virtual_memory())
+ check(psutil.swap_memory())
+ check(psutil.cpu_times())
+ check(psutil.cpu_times_percent(interval=0))
+ check(psutil.net_io_counters())
+ if LINUX and not os.path.exists('/proc/diskstats'):
+ pass
+ else:
+ if not APPVEYOR:
+ check(psutil.disk_io_counters())
+ check(psutil.disk_partitions())
+ check(psutil.disk_usage(os.getcwd()))
+ check(psutil.users())
+
+ def test_setup_script(self):
+ here = os.path.abspath(os.path.dirname(__file__))
+ setup_py = os.path.realpath(os.path.join(here, '..', 'setup.py'))
+ module = imp.load_source('setup', setup_py)
+ self.assertRaises(SystemExit, module.setup)
+ self.assertEqual(module.get_version(), psutil.__version__)
+
+ def test_ad_on_process_creation(self):
+ # We are supposed to be able to instantiate Process also in case
+ # of zombie processes or access denied.
+ with mock.patch.object(psutil.Process, 'create_time',
+ side_effect=psutil.AccessDenied) as meth:
+ psutil.Process()
+ assert meth.called
+ with mock.patch.object(psutil.Process, 'create_time',
+ side_effect=psutil.ZombieProcess(1)) as meth:
+ psutil.Process()
+ assert meth.called
+ with mock.patch.object(psutil.Process, 'create_time',
+ side_effect=ValueError) as meth:
+ with self.assertRaises(ValueError):
+ psutil.Process()
+ assert meth.called
+
+
+# ===================================================================
+# --- Example script tests
+# ===================================================================
+
+class TestExampleScripts(unittest.TestCase):
+ """Tests for scripts in the examples directory."""
+
+ def assert_stdout(self, exe, args=None):
+ exe = os.path.join(EXAMPLES_DIR, exe)
+ if args:
+ exe = exe + ' ' + args
+ try:
+ out = sh(sys.executable + ' ' + exe).strip()
+ except RuntimeError as err:
+ if 'AccessDenied' in str(err):
+ return str(err)
+ else:
+ raise
+ assert out, out
+ return out
+
+ def assert_syntax(self, exe, args=None):
+ exe = os.path.join(EXAMPLES_DIR, exe)
+ with open(exe, 'r') as f:
+ src = f.read()
+ ast.parse(src)
+
+ def test_check_presence(self):
+ # make sure all example scripts have a test method defined
+ meths = dir(self)
+ for name in os.listdir(EXAMPLES_DIR):
+ if name.endswith('.py'):
+ if 'test_' + os.path.splitext(name)[0] not in meths:
+ # self.assert_stdout(name)
+ self.fail('no test defined for %r script'
+ % os.path.join(EXAMPLES_DIR, name))
+
+ def test_disk_usage(self):
+ self.assert_stdout('disk_usage.py')
+
+ def test_free(self):
+ self.assert_stdout('free.py')
+
+ def test_meminfo(self):
+ self.assert_stdout('meminfo.py')
+
+ def test_process_detail(self):
+ self.assert_stdout('process_detail.py')
+
+ @unittest.skipIf(APPVEYOR, "can't find users on Appveyor")
+ def test_who(self):
+ self.assert_stdout('who.py')
+
+ def test_ps(self):
+ self.assert_stdout('ps.py')
+
+ def test_pstree(self):
+ self.assert_stdout('pstree.py')
+
+ def test_netstat(self):
+ self.assert_stdout('netstat.py')
+
+ @unittest.skipIf(TRAVIS, "permission denied on travis")
+ def test_ifconfig(self):
+ self.assert_stdout('ifconfig.py')
+
+ def test_pmap(self):
+ self.assert_stdout('pmap.py', args=str(os.getpid()))
+
+ @unittest.skipIf(ast is None,
+ 'ast module not available on this python version')
+ def test_killall(self):
+ self.assert_syntax('killall.py')
+
+ @unittest.skipIf(ast is None,
+ 'ast module not available on this python version')
+ def test_nettop(self):
+ self.assert_syntax('nettop.py')
+
+ @unittest.skipIf(ast is None,
+ 'ast module not available on this python version')
+ def test_top(self):
+ self.assert_syntax('top.py')
+
+ @unittest.skipIf(ast is None,
+ 'ast module not available on this python version')
+ def test_iotop(self):
+ self.assert_syntax('iotop.py')
+
+ def test_pidof(self):
+ output = self.assert_stdout('pidof.py %s' % psutil.Process().name())
+ self.assertIn(str(os.getpid()), output)
+
+
+def main():
+ tests = []
+ test_suite = unittest.TestSuite()
+ tests.append(TestSystemAPIs)
+ tests.append(TestProcess)
+ tests.append(TestFetchAllProcesses)
+ tests.append(TestMisc)
+ tests.append(TestExampleScripts)
+ tests.append(LimitedUserTestCase)
+
+ if POSIX:
+ from _posix import PosixSpecificTestCase
+ tests.append(PosixSpecificTestCase)
+
+ # import the specific platform test suite
+ stc = None
+ if LINUX:
+ from _linux import LinuxSpecificTestCase as stc
+ elif WINDOWS:
+ from _windows import WindowsSpecificTestCase as stc
+ from _windows import TestDualProcessImplementation
+ tests.append(TestDualProcessImplementation)
+ elif OSX:
+ from _osx import OSXSpecificTestCase as stc
+ elif BSD:
+ from _bsd import BSDSpecificTestCase as stc
+ elif SUNOS:
+ from _sunos import SunOSSpecificTestCase as stc
+ if stc is not None:
+ tests.append(stc)
+
+ for test_class in tests:
+ test_suite.addTest(unittest.makeSuite(test_class))
+ result = unittest.TextTestRunner(verbosity=2).run(test_suite)
+ return result.wasSuccessful()
+
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1)
diff --git a/python/psutil/tox.ini b/python/psutil/tox.ini
new file mode 100644
index 000000000..d80dd174b
--- /dev/null
+++ b/python/psutil/tox.ini
@@ -0,0 +1,32 @@
+# Tox (http://tox.testrun.org/) is a tool for running tests
+# in multiple virtualenvs. This configuration file will run the
+# test suite on all supported python versions.
+# To use it run "pip install tox" and then run "tox" from this
+# directory.
+
+[tox]
+envlist = py26, py27, py32, py33, py34
+
+[testenv]
+deps =
+ flake8
+ pytest
+ py26: ipaddress
+ py26: mock==1.0.1
+ py26: unittest2
+ py27: ipaddress
+ py27: mock
+ py32: ipaddress
+ py32: mock
+ py33: ipaddress
+
+setenv =
+ PYTHONPATH = {toxinidir}/test
+
+commands =
+ py.test {posargs}
+ git ls-files | grep \\.py$ | xargs flake8
+
+# suppress "WARNING: 'git' command found but not installed in testenv
+whitelist_externals = git
+usedevelop = True
diff --git a/python/py/AUTHORS b/python/py/AUTHORS
new file mode 100644
index 000000000..8c0cf9b71
--- /dev/null
+++ b/python/py/AUTHORS
@@ -0,0 +1,24 @@
+Holger Krekel, holger at merlinux eu
+Benjamin Peterson, benjamin at python org
+Ronny Pfannschmidt, Ronny.Pfannschmidt at gmx de
+Guido Wesdorp, johnny at johnnydebris net
+Samuele Pedroni, pedronis at openend se
+Carl Friedrich Bolz, cfbolz at gmx de
+Armin Rigo, arigo at tunes org
+Maciek Fijalkowski, fijal at genesilico pl
+Brian Dorsey, briandorsey at gmail com
+Floris Bruynooghe, flub at devork be
+merlinux GmbH, Germany, office at merlinux eu
+
+Contributors include::
+
+Ross Lawley
+Ralf Schmitt
+Chris Lamb
+Harald Armin Massa
+Martijn Faassen
+Ian Bicking
+Jan Balster
+Grig Gheorghiu
+Bob Ippolito
+Christian Tismer
diff --git a/python/py/LICENSE b/python/py/LICENSE
new file mode 100644
index 000000000..31ecdfb1d
--- /dev/null
+++ b/python/py/LICENSE
@@ -0,0 +1,19 @@
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
diff --git a/python/py/MANIFEST.in b/python/py/MANIFEST.in
new file mode 100644
index 000000000..31fb010b4
--- /dev/null
+++ b/python/py/MANIFEST.in
@@ -0,0 +1,9 @@
+include CHANGELOG
+include AUTHORS
+include README.txt
+include setup.py
+include LICENSE
+include conftest.py
+include tox.ini
+graft doc
+graft testing
diff --git a/python/py/PKG-INFO b/python/py/PKG-INFO
new file mode 100644
index 000000000..30b14ae88
--- /dev/null
+++ b/python/py/PKG-INFO
@@ -0,0 +1,46 @@
+Metadata-Version: 1.1
+Name: py
+Version: 1.4.31
+Summary: library with cross-python path, ini-parsing, io, code, log facilities
+Home-page: http://pylib.readthedocs.org/
+Author: holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others
+Author-email: pytest-dev@python.org
+License: MIT license
+Description: .. image:: https://drone.io/bitbucket.org/pytest-dev/py/status.png
+ :target: https://drone.io/bitbucket.org/pytest-dev/py/latest
+ .. image:: https://pypip.in/v/py/badge.png
+ :target: https://pypi.python.org/pypi/py
+
+ The py lib is a Python development support library featuring
+ the following tools and modules:
+
+ * py.path: uniform local and svn path objects
+ * py.apipkg: explicit API control and lazy-importing
+ * py.iniconfig: easy parsing of .ini files
+ * py.code: dynamic code generation and introspection
+
+ NOTE: prior to the 1.4 release this distribution used to
+ contain py.test which is now its own package, see http://pytest.org
+
+ For questions and more information please visit http://pylib.readthedocs.org
+
+ Bugs and issues: http://bitbucket.org/pytest-dev/py/issues/
+
+ Authors: Holger Krekel and others, 2004-2015
+
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: cygwin
+Platform: win32
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
diff --git a/python/py/README.txt b/python/py/README.txt
new file mode 100644
index 000000000..e327e9373
--- /dev/null
+++ b/python/py/README.txt
@@ -0,0 +1,21 @@
+.. image:: https://drone.io/bitbucket.org/pytest-dev/py/status.png
+ :target: https://drone.io/bitbucket.org/pytest-dev/py/latest
+.. image:: https://pypip.in/v/py/badge.png
+ :target: https://pypi.python.org/pypi/py
+
+The py lib is a Python development support library featuring
+the following tools and modules:
+
+* py.path: uniform local and svn path objects
+* py.apipkg: explicit API control and lazy-importing
+* py.iniconfig: easy parsing of .ini files
+* py.code: dynamic code generation and introspection
+
+NOTE: prior to the 1.4 release this distribution used to
+contain py.test which is now its own package, see http://pytest.org
+
+For questions and more information please visit http://pylib.readthedocs.org
+
+Bugs and issues: http://bitbucket.org/pytest-dev/py/issues/
+
+Authors: Holger Krekel and others, 2004-2015
diff --git a/python/py/py/__init__.py b/python/py/py/__init__.py
new file mode 100644
index 000000000..bdb9aa218
--- /dev/null
+++ b/python/py/py/__init__.py
@@ -0,0 +1,150 @@
+"""
+py.test and pylib: rapid testing and development utils
+
+this module uses apipkg.py for lazy-loading sub modules
+and classes. The initpkg-dictionary below specifies
+name->value mappings where value can be another namespace
+dictionary or an import path.
+
+(c) Holger Krekel and others, 2004-2014
+"""
+__version__ = '1.4.31'
+
+from py import _apipkg
+
+# so that py.error.* instances are picklable
+import sys
+sys.modules['py.error'] = _apipkg.AliasModule("py.error", "py._error", 'error')
+
+_apipkg.initpkg(__name__, attr={'_apipkg': _apipkg}, exportdefs={
+ # access to all standard lib modules
+ 'std': '._std:std',
+ # access to all posix errno's as classes
+ 'error': '._error:error',
+
+ '_pydir' : '.__metainfo:pydir',
+ 'version': 'py:__version__', # backward compatibility
+
+ # pytest-2.0 has a flat namespace, we use alias modules
+ # to keep old references compatible
+ 'test' : 'pytest',
+ 'test.collect' : 'pytest',
+ 'test.cmdline' : 'pytest',
+
+ # hook into the top-level standard library
+ 'process' : {
+ '__doc__' : '._process:__doc__',
+ 'cmdexec' : '._process.cmdexec:cmdexec',
+ 'kill' : '._process.killproc:kill',
+ 'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
+ },
+
+ 'apipkg' : {
+ 'initpkg' : '._apipkg:initpkg',
+ 'ApiModule' : '._apipkg:ApiModule',
+ },
+
+ 'iniconfig' : {
+ 'IniConfig' : '._iniconfig:IniConfig',
+ 'ParseError' : '._iniconfig:ParseError',
+ },
+
+ 'path' : {
+ '__doc__' : '._path:__doc__',
+ 'svnwc' : '._path.svnwc:SvnWCCommandPath',
+ 'svnurl' : '._path.svnurl:SvnCommandPath',
+ 'local' : '._path.local:LocalPath',
+ 'SvnAuth' : '._path.svnwc:SvnAuth',
+ },
+
+ # python inspection/code-generation API
+ 'code' : {
+ '__doc__' : '._code:__doc__',
+ 'compile' : '._code.source:compile_',
+ 'Source' : '._code.source:Source',
+ 'Code' : '._code.code:Code',
+ 'Frame' : '._code.code:Frame',
+ 'ExceptionInfo' : '._code.code:ExceptionInfo',
+ 'Traceback' : '._code.code:Traceback',
+ 'getfslineno' : '._code.source:getfslineno',
+ 'getrawcode' : '._code.code:getrawcode',
+ 'patch_builtins' : '._code.code:patch_builtins',
+ 'unpatch_builtins' : '._code.code:unpatch_builtins',
+ '_AssertionError' : '._code.assertion:AssertionError',
+ '_reinterpret_old' : '._code.assertion:reinterpret_old',
+ '_reinterpret' : '._code.assertion:reinterpret',
+ '_reprcompare' : '._code.assertion:_reprcompare',
+ '_format_explanation' : '._code.assertion:_format_explanation',
+ },
+
+ # backports and additions of builtins
+ 'builtin' : {
+ '__doc__' : '._builtin:__doc__',
+ 'enumerate' : '._builtin:enumerate',
+ 'reversed' : '._builtin:reversed',
+ 'sorted' : '._builtin:sorted',
+ 'any' : '._builtin:any',
+ 'all' : '._builtin:all',
+ 'set' : '._builtin:set',
+ 'frozenset' : '._builtin:frozenset',
+ 'BaseException' : '._builtin:BaseException',
+ 'GeneratorExit' : '._builtin:GeneratorExit',
+ '_sysex' : '._builtin:_sysex',
+ 'print_' : '._builtin:print_',
+ '_reraise' : '._builtin:_reraise',
+ '_tryimport' : '._builtin:_tryimport',
+ 'exec_' : '._builtin:exec_',
+ '_basestring' : '._builtin:_basestring',
+ '_totext' : '._builtin:_totext',
+ '_isbytes' : '._builtin:_isbytes',
+ '_istext' : '._builtin:_istext',
+ '_getimself' : '._builtin:_getimself',
+ '_getfuncdict' : '._builtin:_getfuncdict',
+ '_getcode' : '._builtin:_getcode',
+ 'builtins' : '._builtin:builtins',
+ 'execfile' : '._builtin:execfile',
+ 'callable' : '._builtin:callable',
+ 'bytes' : '._builtin:bytes',
+ 'text' : '._builtin:text',
+ },
+
+ # input-output helping
+ 'io' : {
+ '__doc__' : '._io:__doc__',
+ 'dupfile' : '._io.capture:dupfile',
+ 'TextIO' : '._io.capture:TextIO',
+ 'BytesIO' : '._io.capture:BytesIO',
+ 'FDCapture' : '._io.capture:FDCapture',
+ 'StdCapture' : '._io.capture:StdCapture',
+ 'StdCaptureFD' : '._io.capture:StdCaptureFD',
+ 'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
+ 'ansi_print' : '._io.terminalwriter:ansi_print',
+ 'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
+ 'saferepr' : '._io.saferepr:saferepr',
+ },
+
+ # small and mean xml/html generation
+ 'xml' : {
+ '__doc__' : '._xmlgen:__doc__',
+ 'html' : '._xmlgen:html',
+ 'Tag' : '._xmlgen:Tag',
+ 'raw' : '._xmlgen:raw',
+ 'Namespace' : '._xmlgen:Namespace',
+ 'escape' : '._xmlgen:escape',
+ },
+
+ 'log' : {
+ # logging API ('producers' and 'consumers' connected via keywords)
+ '__doc__' : '._log:__doc__',
+ '_apiwarn' : '._log.warning:_apiwarn',
+ 'Producer' : '._log.log:Producer',
+ 'setconsumer' : '._log.log:setconsumer',
+ '_setstate' : '._log.log:setstate',
+ '_getstate' : '._log.log:getstate',
+ 'Path' : '._log.log:Path',
+ 'STDOUT' : '._log.log:STDOUT',
+ 'STDERR' : '._log.log:STDERR',
+ 'Syslog' : '._log.log:Syslog',
+ },
+
+})
diff --git a/python/py/py/__metainfo.py b/python/py/py/__metainfo.py
new file mode 100644
index 000000000..12581eb7a
--- /dev/null
+++ b/python/py/py/__metainfo.py
@@ -0,0 +1,2 @@
+import py
+pydir = py.path.local(py.__file__).dirpath()
diff --git a/python/py/py/_apipkg.py b/python/py/py/_apipkg.py
new file mode 100644
index 000000000..a73b8f6d0
--- /dev/null
+++ b/python/py/py/_apipkg.py
@@ -0,0 +1,181 @@
+"""
+apipkg: control the exported namespace of a python package.
+
+see http://pypi.python.org/pypi/apipkg
+
+(c) holger krekel, 2009 - MIT license
+"""
+import os
+import sys
+from types import ModuleType
+
+__version__ = '1.3.dev'
+
+def _py_abspath(path):
+ """
+ special version of abspath
+ that will leave paths from jython jars alone
+ """
+ if path.startswith('__pyclasspath__'):
+
+ return path
+ else:
+ return os.path.abspath(path)
+
+def initpkg(pkgname, exportdefs, attr=dict()):
+ """ initialize given package from the export definitions. """
+ oldmod = sys.modules.get(pkgname)
+ d = {}
+ f = getattr(oldmod, '__file__', None)
+ if f:
+ f = _py_abspath(f)
+ d['__file__'] = f
+ if hasattr(oldmod, '__version__'):
+ d['__version__'] = oldmod.__version__
+ if hasattr(oldmod, '__loader__'):
+ d['__loader__'] = oldmod.__loader__
+ if hasattr(oldmod, '__path__'):
+ d['__path__'] = [_py_abspath(p) for p in oldmod.__path__]
+ if '__doc__' not in exportdefs and getattr(oldmod, '__doc__', None):
+ d['__doc__'] = oldmod.__doc__
+ d.update(attr)
+ if hasattr(oldmod, "__dict__"):
+ oldmod.__dict__.update(d)
+ mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
+ sys.modules[pkgname] = mod
+
+def importobj(modpath, attrname):
+ module = __import__(modpath, None, None, ['__doc__'])
+ if not attrname:
+ return module
+
+ retval = module
+ names = attrname.split(".")
+ for x in names:
+ retval = getattr(retval, x)
+ return retval
+
+class ApiModule(ModuleType):
+ def __docget(self):
+ try:
+ return self.__doc
+ except AttributeError:
+ if '__doc__' in self.__map__:
+ return self.__makeattr('__doc__')
+ def __docset(self, value):
+ self.__doc = value
+ __doc__ = property(__docget, __docset)
+
+ def __init__(self, name, importspec, implprefix=None, attr=None):
+ self.__name__ = name
+ self.__all__ = [x for x in importspec if x != '__onfirstaccess__']
+ self.__map__ = {}
+ self.__implprefix__ = implprefix or name
+ if attr:
+ for name, val in attr.items():
+ # print "setting", self.__name__, name, val
+ setattr(self, name, val)
+ for name, importspec in importspec.items():
+ if isinstance(importspec, dict):
+ subname = '%s.%s' % (self.__name__, name)
+ apimod = ApiModule(subname, importspec, implprefix)
+ sys.modules[subname] = apimod
+ setattr(self, name, apimod)
+ else:
+ parts = importspec.split(':')
+ modpath = parts.pop(0)
+ attrname = parts and parts[0] or ""
+ if modpath[0] == '.':
+ modpath = implprefix + modpath
+
+ if not attrname:
+ subname = '%s.%s' % (self.__name__, name)
+ apimod = AliasModule(subname, modpath)
+ sys.modules[subname] = apimod
+ if '.' not in name:
+ setattr(self, name, apimod)
+ else:
+ self.__map__[name] = (modpath, attrname)
+
+ def __repr__(self):
+ l = []
+ if hasattr(self, '__version__'):
+ l.append("version=" + repr(self.__version__))
+ if hasattr(self, '__file__'):
+ l.append('from ' + repr(self.__file__))
+ if l:
+ return '<ApiModule %r %s>' % (self.__name__, " ".join(l))
+ return '<ApiModule %r>' % (self.__name__,)
+
+ def __makeattr(self, name):
+ """lazily compute value for name or raise AttributeError if unknown."""
+ # print "makeattr", self.__name__, name
+ target = None
+ if '__onfirstaccess__' in self.__map__:
+ target = self.__map__.pop('__onfirstaccess__')
+ importobj(*target)()
+ try:
+ modpath, attrname = self.__map__[name]
+ except KeyError:
+ if target is not None and name != '__onfirstaccess__':
+ # retry, onfirstaccess might have set attrs
+ return getattr(self, name)
+ raise AttributeError(name)
+ else:
+ result = importobj(modpath, attrname)
+ setattr(self, name, result)
+ try:
+ del self.__map__[name]
+ except KeyError:
+ pass # in a recursive-import situation a double-del can happen
+ return result
+
+ __getattr__ = __makeattr
+
+ def __dict__(self):
+ # force all the content of the module to be loaded when __dict__ is read
+ dictdescr = ModuleType.__dict__['__dict__']
+ dict = dictdescr.__get__(self)
+ if dict is not None:
+ hasattr(self, 'some')
+ for name in self.__all__:
+ try:
+ self.__makeattr(name)
+ except AttributeError:
+ pass
+ return dict
+ __dict__ = property(__dict__)
+
+
+def AliasModule(modname, modpath, attrname=None):
+ mod = []
+
+ def getmod():
+ if not mod:
+ x = importobj(modpath, None)
+ if attrname is not None:
+ x = getattr(x, attrname)
+ mod.append(x)
+ return mod[0]
+
+ class AliasModule(ModuleType):
+
+ def __repr__(self):
+ x = modpath
+ if attrname:
+ x += "." + attrname
+ return '<AliasModule %r for %r>' % (modname, x)
+
+ def __getattribute__(self, name):
+ try:
+ return getattr(getmod(), name)
+ except ImportError:
+ return None
+
+ def __setattr__(self, name, value):
+ setattr(getmod(), name, value)
+
+ def __delattr__(self, name):
+ delattr(getmod(), name)
+
+ return AliasModule(str(modname))
diff --git a/python/py/py/_builtin.py b/python/py/py/_builtin.py
new file mode 100644
index 000000000..52ee9d79c
--- /dev/null
+++ b/python/py/py/_builtin.py
@@ -0,0 +1,248 @@
+import sys
+
+try:
+ reversed = reversed
+except NameError:
+ def reversed(sequence):
+ """reversed(sequence) -> reverse iterator over values of the sequence
+
+ Return a reverse iterator
+ """
+ if hasattr(sequence, '__reversed__'):
+ return sequence.__reversed__()
+ if not hasattr(sequence, '__getitem__'):
+ raise TypeError("argument to reversed() must be a sequence")
+ return reversed_iterator(sequence)
+
+ class reversed_iterator(object):
+
+ def __init__(self, seq):
+ self.seq = seq
+ self.remaining = len(seq)
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ i = self.remaining
+ if i > 0:
+ i -= 1
+ item = self.seq[i]
+ self.remaining = i
+ return item
+ raise StopIteration
+
+ def __length_hint__(self):
+ return self.remaining
+
+try:
+ any = any
+except NameError:
+ def any(iterable):
+ for x in iterable:
+ if x:
+ return True
+ return False
+
+try:
+ all = all
+except NameError:
+ def all(iterable):
+ for x in iterable:
+ if not x:
+ return False
+ return True
+
+try:
+ sorted = sorted
+except NameError:
+ builtin_cmp = cmp # need to use cmp as keyword arg
+
+ def sorted(iterable, cmp=None, key=None, reverse=0):
+ use_cmp = None
+ if key is not None:
+ if cmp is None:
+ def use_cmp(x, y):
+ return builtin_cmp(x[0], y[0])
+ else:
+ def use_cmp(x, y):
+ return cmp(x[0], y[0])
+ l = [(key(element), element) for element in iterable]
+ else:
+ if cmp is not None:
+ use_cmp = cmp
+ l = list(iterable)
+ if use_cmp is not None:
+ l.sort(use_cmp)
+ else:
+ l.sort()
+ if reverse:
+ l.reverse()
+ if key is not None:
+ return [element for (_, element) in l]
+ return l
+
+try:
+ set, frozenset = set, frozenset
+except NameError:
+ from sets import set, frozenset
+
+# pass through
+enumerate = enumerate
+
+try:
+ BaseException = BaseException
+except NameError:
+ BaseException = Exception
+
+try:
+ GeneratorExit = GeneratorExit
+except NameError:
+ class GeneratorExit(Exception):
+ """ This exception is never raised, it is there to make it possible to
+ write code compatible with CPython 2.5 even in lower CPython
+ versions."""
+ pass
+ GeneratorExit.__module__ = 'exceptions'
+
+_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return hasattr(obj, "__call__")
+
+if sys.version_info >= (3, 0):
+ exec ("print_ = print ; exec_=exec")
+ import builtins
+
+ # some backward compatibility helpers
+ _basestring = str
+ def _totext(obj, encoding=None, errors=None):
+ if isinstance(obj, bytes):
+ if errors is None:
+ obj = obj.decode(encoding)
+ else:
+ obj = obj.decode(encoding, errors)
+ elif not isinstance(obj, str):
+ obj = str(obj)
+ return obj
+
+ def _isbytes(x):
+ return isinstance(x, bytes)
+ def _istext(x):
+ return isinstance(x, str)
+
+ text = str
+ bytes = bytes
+
+
+ def _getimself(function):
+ return getattr(function, '__self__', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ return getattr(function, "__code__", None)
+
+ def execfile(fn, globs=None, locs=None):
+ if globs is None:
+ back = sys._getframe(1)
+ globs = back.f_globals
+ locs = back.f_locals
+ del back
+ elif locs is None:
+ locs = globs
+ fp = open(fn, "r")
+ try:
+ source = fp.read()
+ finally:
+ fp.close()
+ co = compile(source, fn, "exec", dont_inherit=True)
+ exec_(co, globs, locs)
+
+else:
+ import __builtin__ as builtins
+ _totext = unicode
+ _basestring = basestring
+ text = unicode
+ bytes = str
+ execfile = execfile
+ callable = callable
+ def _isbytes(x):
+ return isinstance(x, str)
+ def _istext(x):
+ return isinstance(x, unicode)
+
+ def _getimself(function):
+ return getattr(function, 'im_self', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ try:
+ return getattr(function, "__code__")
+ except AttributeError:
+ return getattr(function, "func_code", None)
+
+ def print_(*args, **kwargs):
+ """ minimal backport of py3k print statement. """
+ sep = ' '
+ if 'sep' in kwargs:
+ sep = kwargs.pop('sep')
+ end = '\n'
+ if 'end' in kwargs:
+ end = kwargs.pop('end')
+ file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
+ if kwargs:
+ args = ", ".join([str(x) for x in kwargs])
+ raise TypeError("invalid keyword arguments: %s" % args)
+ at_start = True
+ for x in args:
+ if not at_start:
+ file.write(sep)
+ file.write(str(x))
+ at_start = False
+ file.write(end)
+
+ def exec_(obj, globals=None, locals=None):
+ """ minimal backport of py3k exec statement. """
+ __tracebackhide__ = True
+ if globals is None:
+ frame = sys._getframe(1)
+ globals = frame.f_globals
+ if locals is None:
+ locals = frame.f_locals
+ elif locals is None:
+ locals = globals
+ exec2(obj, globals, locals)
+
+if sys.version_info >= (3, 0):
+ def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ assert hasattr(val, '__traceback__')
+ raise cls.with_traceback(val, tb)
+else:
+ exec ("""
+def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ raise cls, val, tb
+def exec2(obj, globals, locals):
+ __tracebackhide__ = True
+ exec obj in globals, locals
+""")
+
+def _tryimport(*names):
+ """ return the first successfully imported module. """
+ assert names
+ for name in names:
+ try:
+ __import__(name)
+ except ImportError:
+ excinfo = sys.exc_info()
+ else:
+ return sys.modules[name]
+ _reraise(*excinfo)
diff --git a/python/py/py/_code/__init__.py b/python/py/py/_code/__init__.py
new file mode 100644
index 000000000..f15acf851
--- /dev/null
+++ b/python/py/py/_code/__init__.py
@@ -0,0 +1 @@
+""" python inspection/code generation API """
diff --git a/python/py/py/_code/_assertionnew.py b/python/py/py/_code/_assertionnew.py
new file mode 100644
index 000000000..afb1b31ff
--- /dev/null
+++ b/python/py/py/_code/_assertionnew.py
@@ -0,0 +1,339 @@
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+This should replace _assertionold.py eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py._code.assertion import _format_explanation, BuiltinAssertionError
+
+
+if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
+ # See http://bugs.jython.org/issue1497
+ _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
+ "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
+ "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
+ "List", "Tuple")
+ _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
+ "AugAssign", "Print", "For", "While", "If", "With", "Raise",
+ "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
+ "Exec", "Global", "Expr", "Pass", "Break", "Continue")
+ _expr_nodes = set(getattr(ast, name) for name in _exprs)
+ _stmt_nodes = set(getattr(ast, name) for name in _stmts)
+ def _is_ast_expr(node):
+ return node.__class__ in _expr_nodes
+ def _is_ast_stmt(node):
+ return node.__class__ in _stmt_nodes
+else:
+ def _is_ast_expr(node):
+ return isinstance(node, ast.expr)
+ def _is_ast_stmt(node):
+ return isinstance(node, ast.stmt)
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --no-assert)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = py.code.Frame(sys._getframe(1))
+ return interpret(offending_line, frame)
+
+def getfailure(failure):
+ explanation = _format_explanation(failure.explanation)
+ value = failure.cause[1]
+ if str(value):
+ lines = explanation.splitlines()
+ if not lines:
+ lines.append("")
+ lines[0] += " << %s" % (value,)
+ explanation = "\n".join(lines)
+ text = "%s: %s" % (failure.cause[0].__name__, explanation)
+ if text.startswith("AssertionError: assert "):
+ text = text[16:]
+ return text
+
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "<assertion interpretation>", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = False
+ if not local:
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not result:
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ rcomp = py.code._reprcompare
+ if rcomp:
+ res = rcomp(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ if call.starargs:
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ if call.kwargs:
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = True
+ if from_instance:
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ if test_explanation.startswith("False\n{False =") and \
+ test_explanation.endswith("\n"):
+ test_explanation = test_explanation[15:-2]
+ explanation = "assert %s" % (test_explanation,)
+ if not test_result:
+ try:
+ raise BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
diff --git a/python/py/py/_code/_assertionold.py b/python/py/py/_code/_assertionold.py
new file mode 100644
index 000000000..4e81fb3ef
--- /dev/null
+++ b/python/py/py/_code/_assertionold.py
@@ -0,0 +1,555 @@
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py._code.assertion import BuiltinAssertionError, _format_explanation
+
+passthroughex = py.builtin._sysex
+
+class Failure:
+ def __init__(self, node):
+ self.exc, self.value, self.tb = sys.exc_info()
+ self.node = node
+
+class View(object):
+ """View base class.
+
+ If C is a subclass of View, then C(x) creates a proxy object around
+ the object x. The actual class of the proxy is not C in general,
+ but a *subclass* of C determined by the rules below. To avoid confusion
+ we call view class the class of the proxy (a subclass of C, so of View)
+ and object class the class of x.
+
+ Attributes and methods not found in the proxy are automatically read on x.
+ Other operations like setting attributes are performed on the proxy, as
+ determined by its view class. The object x is available from the proxy
+ as its __obj__ attribute.
+
+ The view class selection is determined by the __view__ tuples and the
+ optional __viewkey__ method. By default, the selected view class is the
+ most specific subclass of C whose __view__ mentions the class of x.
+ If no such subclass is found, the search proceeds with the parent
+ object classes. For example, C(True) will first look for a subclass
+ of C with __view__ = (..., bool, ...) and only if it doesn't find any
+ look for one with __view__ = (..., int, ...), and then ..., object,...
+ If everything fails the class C itself is considered to be the default.
+
+ Alternatively, the view class selection can be driven by another aspect
+ of the object x, instead of the class of x, by overriding __viewkey__.
+ See last example at the end of this module.
+ """
+
+ _viewcache = {}
+ __view__ = ()
+
+ def __new__(rootclass, obj, *args, **kwds):
+ self = object.__new__(rootclass)
+ self.__obj__ = obj
+ self.__rootclass__ = rootclass
+ key = self.__viewkey__()
+ try:
+ self.__class__ = self._viewcache[key]
+ except KeyError:
+ self.__class__ = self._selectsubclass(key)
+ return self
+
+ def __getattr__(self, attr):
+ # attributes not found in the normal hierarchy rooted on View
+ # are looked up in the object's real class
+ return getattr(self.__obj__, attr)
+
+ def __viewkey__(self):
+ return self.__obj__.__class__
+
+ def __matchkey__(self, key, subclasses):
+ if inspect.isclass(key):
+ keys = inspect.getmro(key)
+ else:
+ keys = [key]
+ for key in keys:
+ result = [C for C in subclasses if key in C.__view__]
+ if result:
+ return result
+ return []
+
+ def _selectsubclass(self, key):
+ subclasses = list(enumsubclasses(self.__rootclass__))
+ for C in subclasses:
+ if not isinstance(C.__view__, tuple):
+ C.__view__ = (C.__view__,)
+ choices = self.__matchkey__(key, subclasses)
+ if not choices:
+ return self.__rootclass__
+ elif len(choices) == 1:
+ return choices[0]
+ else:
+ # combine the multiple choices
+ return type('?', tuple(choices), {})
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+ for subcls in cls.__subclasses__():
+ for subsubclass in enumsubclasses(subcls):
+ yield subsubclass
+ yield cls
+
+
+class Interpretable(View):
+ """A parse tree node with a few extra methods."""
+ explanation = None
+
+ def is_builtin(self, frame):
+ return False
+
+ def eval(self, frame):
+ # fall-back for unknown expression nodes
+ try:
+ expr = ast.Expression(self.__obj__)
+ expr.filename = '<eval>'
+ self.__obj__.filename = '<eval>'
+ co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+ result = frame.eval(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.result = result
+ self.explanation = self.explanation or frame.repr(self.result)
+
+ def run(self, frame):
+ # fall-back for unknown statement nodes
+ try:
+ expr = ast.Module(None, ast.Stmt([self.__obj__]))
+ expr.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(expr).getCode()
+ frame.exec_(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ def nice_explanation(self):
+ return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+ __view__ = ast.Name
+
+ def is_local(self, frame):
+ source = '%r in locals() is not globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_global(self, frame):
+ source = '%r in globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_builtin(self, frame):
+ source = '%r not in locals() and %r not in globals()' % (
+ self.name, self.name)
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ super(Name, self).eval(frame)
+ if not self.is_local(frame):
+ self.explanation = self.name
+
+class Compare(Interpretable):
+ __view__ = ast.Compare
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ for operation, expr2 in self.ops:
+ if hasattr(self, 'result'):
+ # shortcutting in chained expressions
+ if not frame.is_true(self.result):
+ break
+ expr2 = Interpretable(expr2)
+ expr2.eval(frame)
+ self.explanation = "%s %s %s" % (
+ expr.explanation, operation, expr2.explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % operation
+ try:
+ self.result = frame.eval(source,
+ __exprinfo_left=expr.result,
+ __exprinfo_right=expr2.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ expr = expr2
+
+class And(Interpretable):
+ __view__ = ast.And
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if not frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+ __view__ = ast.Or
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+ ast.Not : 'not __exprinfo_expr',
+ ast.Invert : '(~__exprinfo_expr)',
+ }.items():
+
+ class UnaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.explanation = astpattern.replace('__exprinfo_expr',
+ expr.explanation)
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+ ast.Add : '(__exprinfo_left + __exprinfo_right)',
+ ast.Sub : '(__exprinfo_left - __exprinfo_right)',
+ ast.Mul : '(__exprinfo_left * __exprinfo_right)',
+ ast.Div : '(__exprinfo_left / __exprinfo_right)',
+ ast.Mod : '(__exprinfo_left % __exprinfo_right)',
+ ast.Power : '(__exprinfo_left ** __exprinfo_right)',
+ }.items():
+
+ class BinaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ left = Interpretable(self.left)
+ left.eval(frame)
+ right = Interpretable(self.right)
+ right.eval(frame)
+ self.explanation = (astpattern
+ .replace('__exprinfo_left', left .explanation)
+ .replace('__exprinfo_right', right.explanation))
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_left=left.result,
+ __exprinfo_right=right.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+ __view__ = ast.CallFunc
+
+ def is_bool(self, frame):
+ source = 'isinstance(__exprinfo_value, bool)'
+ try:
+ return frame.is_true(frame.eval(source,
+ __exprinfo_value=self.result))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ node = Interpretable(self.node)
+ node.eval(frame)
+ explanations = []
+ vars = {'__exprinfo_fn': node.result}
+ source = '__exprinfo_fn('
+ for a in self.args:
+ if isinstance(a, ast.Keyword):
+ keyword = a.name
+ a = a.expr
+ else:
+ keyword = None
+ a = Interpretable(a)
+ a.eval(frame)
+ argname = '__exprinfo_%d' % len(vars)
+ vars[argname] = a.result
+ if keyword is None:
+ source += argname + ','
+ explanations.append(a.explanation)
+ else:
+ source += '%s=%s,' % (keyword, argname)
+ explanations.append('%s=%s' % (keyword, a.explanation))
+ if self.star_args:
+ star_args = Interpretable(self.star_args)
+ star_args.eval(frame)
+ argname = '__exprinfo_star'
+ vars[argname] = star_args.result
+ source += '*' + argname + ','
+ explanations.append('*' + star_args.explanation)
+ if self.dstar_args:
+ dstar_args = Interpretable(self.dstar_args)
+ dstar_args.eval(frame)
+ argname = '__exprinfo_kwds'
+ vars[argname] = dstar_args.result
+ source += '**' + argname + ','
+ explanations.append('**' + dstar_args.explanation)
+ self.explanation = "%s(%s)" % (
+ node.explanation, ', '.join(explanations))
+ if source.endswith(','):
+ source = source[:-1]
+ source += ')'
+ try:
+ self.result = frame.eval(source, **vars)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ if not node.is_builtin(frame) or not self.is_bool(frame):
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+ __view__ = ast.Getattr
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ source = '__exprinfo_expr.%s' % self.attrname
+ try:
+ self.result = frame.eval(source, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+ # if the attribute comes from the instance, its value is interesting
+ source = ('hasattr(__exprinfo_expr, "__dict__") and '
+ '%r in __exprinfo_expr.__dict__' % self.attrname)
+ try:
+ from_instance = frame.is_true(
+ frame.eval(source, __exprinfo_expr=expr.result))
+ except passthroughex:
+ raise
+ except:
+ from_instance = True
+ if from_instance:
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+ __view__ = ast.Assert
+
+ def run(self, frame):
+ test = Interpretable(self.test)
+ test.eval(frame)
+ # simplify 'assert False where False = ...'
+ if (test.explanation.startswith('False\n{False = ') and
+ test.explanation.endswith('\n}')):
+ test.explanation = test.explanation[15:-2]
+ # print the result as 'assert <explanation>'
+ self.result = test.result
+ self.explanation = 'assert ' + test.explanation
+ if not frame.is_true(test.result):
+ try:
+ raise BuiltinAssertionError
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Assign(Interpretable):
+ __view__ = ast.Assign
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = '... = ' + expr.explanation
+ # fall-back-run the rest of the assignment
+ ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+ mod = ast.Module(None, ast.Stmt([ass]))
+ mod.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(mod).getCode()
+ try:
+ frame.exec_(co, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Discard(Interpretable):
+ __view__ = ast.Discard
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+ __view__ = ast.Stmt
+
+ def run(self, frame):
+ for stmt in self.nodes:
+ stmt = Interpretable(stmt)
+ stmt.run(frame)
+
+
+def report_failure(e):
+ explanation = e.node.nice_explanation()
+ if explanation:
+ explanation = ", in: " + explanation
+ else:
+ explanation = ""
+ sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ expr = parse(s, 'eval')
+ assert isinstance(expr, ast.Expression)
+ node = Interpretable(expr.node)
+ try:
+ node.eval(frame)
+ except passthroughex:
+ raise
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+ else:
+ if not frame.is_true(node.result):
+ sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+ module = Interpretable(parse(source, 'exec').node)
+ #print "got module", module
+ if isinstance(frame, py.std.types.FrameType):
+ frame = py.code.Frame(frame)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ return getfailure(e)
+ except passthroughex:
+ raise
+ except:
+ import traceback
+ traceback.print_exc()
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --nomagic)")
+ else:
+ return None
+
+def getmsg(excinfo):
+ if isinstance(excinfo, tuple):
+ excinfo = py.code.ExceptionInfo(excinfo)
+ #frame, line = gettbline(tb)
+ #frame = py.code.Frame(frame)
+ #return interpret(line, frame)
+
+ tb = excinfo.traceback[-1]
+ source = str(tb.statement).strip()
+ x = interpret(source, tb.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ return x
+
+def getfailure(e):
+ explanation = e.node.nice_explanation()
+ if str(e.value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (e.value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.exc.__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+def run(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ module = Interpretable(parse(s, 'exec').node)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+
+
+if __name__ == '__main__':
+ # example:
+ def f():
+ return 5
+ def g():
+ return 3
+ def h(x):
+ return 'never'
+ check("f() * g() == 5")
+ check("not f()")
+ check("not (f() and g() or 0)")
+ check("f() == g()")
+ i = 4
+ check("i == f()")
+ check("len(f()) == 0")
+ check("isinstance(2+3+4, float)")
+
+ run("x = i")
+ check("x == 5")
+
+ run("assert not f(), 'oops'")
+ run("a, b, c = 1, 2")
+ run("a, b, c = f()")
+
+ check("max([f(),g()]) == 4")
+ check("'hello'[g()] == 'h'")
+ run("'guk%d' % h(f())")
diff --git a/python/py/py/_code/_py2traceback.py b/python/py/py/_code/_py2traceback.py
new file mode 100644
index 000000000..d65e27cb7
--- /dev/null
+++ b/python/py/py/_code/_py2traceback.py
@@ -0,0 +1,79 @@
+# copied from python-2.7.3's traceback.py
+# CHANGES:
+# - some_str is replaced, trying to create unicode strings
+#
+import types
+
+def format_exception_only(etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.last_type and sys.last_value. The return value is a list of
+ strings, each ending in a newline.
+
+ Normally, the list contains a single string; however, for
+ SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax
+ error occurred.
+
+ The message indicating which exception occurred is always the last
+ string in the list.
+
+ """
+
+ # An instance should not have a meaningful value parameter, but
+ # sometimes does, particularly for string exceptions, such as
+ # >>> raise string1, string2 # deprecated
+ #
+ # Clear these out first because issubtype(string1, SyntaxError)
+ # would throw another exception and mask the original problem.
+ if (isinstance(etype, BaseException) or
+ isinstance(etype, types.InstanceType) or
+ etype is None or type(etype) is str):
+ return [_format_final_exc_line(etype, value)]
+
+ stype = etype.__name__
+
+ if not issubclass(etype, SyntaxError):
+ return [_format_final_exc_line(stype, value)]
+
+ # It was a syntax error; show exactly where the problem was found.
+ lines = []
+ try:
+ msg, (filename, lineno, offset, badline) = value.args
+ except Exception:
+ pass
+ else:
+ filename = filename or "<string>"
+ lines.append(' File "%s", line %d\n' % (filename, lineno))
+ if badline is not None:
+ lines.append(' %s\n' % badline.strip())
+ if offset is not None:
+ caretspace = badline.rstrip('\n')[:offset].lstrip()
+ # non-space whitespace (likes tabs) must be kept for alignment
+ caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+ # only three spaces to account for offset1 == pos 0
+ lines.append(' %s^\n' % ''.join(caretspace))
+ value = msg
+
+ lines.append(_format_final_exc_line(stype, value))
+ return lines
+
+def _format_final_exc_line(etype, value):
+ """Return a list of a single line -- normal case for format_exception_only"""
+ valuestr = _some_str(value)
+ if value is None or not valuestr:
+ line = "%s\n" % etype
+ else:
+ line = "%s: %s\n" % (etype, valuestr)
+ return line
+
+def _some_str(value):
+ try:
+ return unicode(value)
+ except Exception:
+ try:
+ return str(value)
+ except Exception:
+ pass
+ return '<unprintable %s object>' % type(value).__name__
diff --git a/python/py/py/_code/assertion.py b/python/py/py/_code/assertion.py
new file mode 100644
index 000000000..4ce80c75b
--- /dev/null
+++ b/python/py/py/_code/assertion.py
@@ -0,0 +1,94 @@
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+_reprcompare = None # if set, will be called by assert reinterp for comparison ops
+
+def _format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ raw_lines = (explanation or '').split('\n')
+ # escape newlines not followed by {, } and ~
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l.startswith('{') or l.startswith('}') or l.startswith('~'):
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = 'and '
+ else:
+ s = 'where '
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ assert line.startswith('}')
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line.startswith('~')
+ result.append(' '*len(stack) + line[1:])
+ assert len(stack) == 1
+ return '\n'.join(result)
+
+
+class AssertionError(BuiltinAssertionError):
+ def __init__(self, *args):
+ BuiltinAssertionError.__init__(self, *args)
+ if args:
+ try:
+ self.msg = str(args[0])
+ except py.builtin._sysex:
+ raise
+ except:
+ self.msg = "<[broken __repr__] %s at %0xd>" %(
+ args[0].__class__, id(args[0]))
+ else:
+ f = py.code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "<run>".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = "<could not determine information>"
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+ reinterpret_old = "old reinterpretation not available for py3"
+else:
+ from py._code._assertionold import interpret as reinterpret_old
+if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
+ from py._code._assertionnew import interpret as reinterpret
+else:
+ reinterpret = reinterpret_old
+
diff --git a/python/py/py/_code/code.py b/python/py/py/_code/code.py
new file mode 100644
index 000000000..f14c562a2
--- /dev/null
+++ b/python/py/py/_code/code.py
@@ -0,0 +1,787 @@
+import py
+import sys
+from inspect import CO_VARARGS, CO_VARKEYWORDS
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+if sys.version_info[0] >= 3:
+ from traceback import format_exception_only
+else:
+ from py._code._py2traceback import format_exception_only
+
+class Code(object):
+ """ wrapper around Python code objects """
+ def __init__(self, rawcode):
+ if not hasattr(rawcode, "co_filename"):
+ rawcode = py.code.getrawcode(rawcode)
+ try:
+ self.filename = rawcode.co_filename
+ self.firstlineno = rawcode.co_firstlineno - 1
+ self.name = rawcode.co_name
+ except AttributeError:
+ raise TypeError("not a code object: %r" %(rawcode,))
+ self.raw = rawcode
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def path(self):
+ """ return a path object pointing to source code (note that it
+ might not point to an actually existing file). """
+ p = py.path.local(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.check():
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ p = self.raw.co_filename
+ return p
+
+ @property
+ def fullsource(self):
+ """ return a py.code.Source object for the full source file of the code
+ """
+ from py._code import source
+ full, _ = source.findsource(self.raw)
+ return full
+
+ def source(self):
+ """ return a py.code.Source object for the code object's source only
+ """
+ # return source only for that part of code
+ return py.code.Source(self.raw)
+
+ def getargs(self, var=False):
+ """ return a tuple with the argument names for the code object
+
+ if 'var' is set True also return the names of the variable and
+ keyword arguments when present
+ """
+ # handfull shortcut for getting args
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+class Frame(object):
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ def __init__(self, frame):
+ self.lineno = frame.f_lineno - 1
+ self.f_globals = frame.f_globals
+ self.f_locals = frame.f_locals
+ self.raw = frame
+ self.code = py.code.Code(frame.f_code)
+
+ @property
+ def statement(self):
+ """ statement this frame is at """
+ if self.code.fullsource is None:
+ return py.code.Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """ evaluate 'code' in the frame
+
+ 'vars' are optional additional local variables
+
+ returns the result of the evaluation
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def exec_(self, code, **vars):
+ """ exec 'code' in the frame
+
+ 'vars' are optiona; additional local variables
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ py.builtin.exec_(code, self.f_globals, f_locals )
+
+ def repr(self, object):
+ """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+ """
+ return py.io.saferepr(object)
+
+ def is_true(self, object):
+ return object
+
+ def getargs(self, var=False):
+ """ return a list of tuples (name, value) for all arguments
+
+ if 'var' is set True also include the variable and keyword
+ arguments when present
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+class TracebackEntry(object):
+ """ a single entry in a traceback """
+
+ _repr_style = None
+ exprinfo = None
+
+ def __init__(self, rawentry):
+ self._rawentry = rawentry
+ self.lineno = rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode):
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self):
+ return py.code.Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self):
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self):
+ return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
+
+ @property
+ def statement(self):
+ """ py.code.Source object for the current statement """
+ source = self.frame.code.fullsource
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self):
+ """ path to the source code """
+ return self.frame.code.path
+
+ def getlocals(self):
+ return self.frame.f_locals
+ locals = property(getlocals, None, None, "locals of underlaying frame")
+
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ if self.exprinfo is None:
+ source = str(self.statement).strip()
+ x = py.code._reinterpret(source, self.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
+ def getfirstlinesource(self):
+ # on Jython this firstlineno can be -1 apparently
+ return max(self.frame.code.firstlineno, 0)
+
+ def getsource(self, astcache=None):
+ """ return failing source code. """
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ from py._code.source import getstatementrange_ast
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(self.lineno, source,
+ astnode=astnode)
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self):
+ """ return True if the current frame has a var __tracebackhide__
+ resolving to True
+
+ mostly for internal use
+ """
+ try:
+ return self.frame.f_locals['__tracebackhide__']
+ except KeyError:
+ try:
+ return self.frame.f_globals['__tracebackhide__']
+ except KeyError:
+ return False
+
+ def __str__(self):
+ try:
+ fn = str(self.path)
+ except py.error.Error:
+ fn = '???'
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except:
+ line = "???"
+ return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
+
+ def name(self):
+ return self.frame.code.raw.co_name
+ name = property(name, None, None, "co_name of underlaying code")
+
+class Traceback(list):
+ """ Traceback objects encapsulate and offer higher level
+ access to Traceback entries.
+ """
+ Entry = TracebackEntry
+ def __init__(self, tb):
+ """ initialize from given python traceback object. """
+ if hasattr(tb, 'tb_next'):
+ def f(cur):
+ while cur is not None:
+ yield self.Entry(cur)
+ cur = cur.tb_next
+ list.__init__(self, f(tb))
+ else:
+ list.__init__(self, tb)
+
+ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+ """ return a Traceback instance wrapping part of this Traceback
+
+ by provding any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined
+
+ this allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback)
+ """
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if ((path is None or codepath == path) and
+ (excludepath is None or not hasattr(codepath, 'relto') or
+ not codepath.relto(excludepath)) and
+ (lineno is None or x.lineno == lineno) and
+ (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
+ return Traceback(x._rawentry)
+ return self
+
+ def __getitem__(self, key):
+ val = super(Traceback, self).__getitem__(key)
+ if isinstance(key, type(slice(0))):
+ val = self.__class__(val)
+ return val
+
+ def filter(self, fn=lambda x: not x.ishidden()):
+ """ return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackItem
+ instance, and should return True when the item should be added
+ to the Traceback, False when not
+
+ by default this removes all the TracebackItems which are hidden
+ (see ishidden() above)
+ """
+ return Traceback(filter(fn, self))
+
+ def getcrashentry(self):
+ """ return last non-hidden traceback entry that lead
+ to the exception of a traceback.
+ """
+ for i in range(-1, -len(self)-1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self):
+ """ return the index of the frame/TracebackItem where recursion
+ originates if appropriate, None if no recursion occurred
+ """
+ cache = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ #print "checking for recursion at", key
+ l = cache.setdefault(key, [])
+ if l:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in l:
+ if f.is_true(f.eval(co_equal,
+ __recursioncache_locals_1=loc,
+ __recursioncache_locals_2=otherloc)):
+ return i
+ l.append(entry.frame.f_locals)
+ return None
+
+co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
+ '?', 'eval')
+
+class ExceptionInfo(object):
+ """ wraps sys.exc_info() objects and offers
+ help for navigating the traceback.
+ """
+ _striptext = ''
+ def __init__(self, tup=None, exprinfo=None):
+ if tup is None:
+ tup = sys.exc_info()
+ if exprinfo is None and isinstance(tup[1], AssertionError):
+ exprinfo = getattr(tup[1], 'msg', None)
+ if exprinfo is None:
+ exprinfo = str(tup[1])
+ if exprinfo and exprinfo.startswith('assert '):
+ self._striptext = 'AssertionError: '
+ self._excinfo = tup
+ #: the exception class
+ self.type = tup[0]
+ #: the exception instance
+ self.value = tup[1]
+ #: the exception raw traceback
+ self.tb = tup[2]
+ #: the exception type name
+ self.typename = self.type.__name__
+ #: the exception traceback (py.code.Traceback instance)
+ self.traceback = py.code.Traceback(self.tb)
+
+ def __repr__(self):
+ return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
+
+ def exconly(self, tryshort=False):
+ """ return the exception as a string
+
+ when 'tryshort' resolves to True, and the exception is a
+ py.code._AssertionError, only the actual exception part of
+ the exception representation is returned (so 'AssertionError: ' is
+ removed from the beginning)
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = ''.join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext):]
+ return text
+
+ def errisinstance(self, exc):
+ """ return True if the exception is an instance of exc """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self):
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno+1, exconly)
+
+ def getrepr(self, showlocals=False, style="long",
+ abspath=False, tbfilter=True, funcargs=False):
+ """ return str()able representation of this exception info.
+ showlocals: show locals per traceback entry
+ style: long|short|no|native traceback style
+ tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
+ """
+ if style == 'native':
+ return ReprExceptionInfo(ReprTracebackNative(
+ py.std.traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
+
+ fmt = FormattedExcinfo(showlocals=showlocals, style=style,
+ abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
+ return fmt.repr_excinfo(self)
+
+ def __str__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return str(loc)
+
+ def __unicode__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return unicode(loc)
+
+
+class FormattedExcinfo(object):
+ """ presenting information about failing Functions and Generators. """
+ # for traceback entries
+ flow_marker = ">"
+ fail_marker = "E"
+
+ def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
+ self.showlocals = showlocals
+ self.style = style
+ self.tbfilter = tbfilter
+ self.funcargs = funcargs
+ self.abspath = abspath
+ self.astcache = {}
+
+ def _getindent(self, source):
+ # figure out indent for given source
+ try:
+ s = str(source.getstatement(len(source)-1))
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except:
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry):
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def _saferepr(self, obj):
+ return py.io.saferepr(obj)
+
+ def repr_args(self, entry):
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, self._saferepr(argvalue)))
+ return ReprFuncArgs(args)
+
+ def get_source(self, source, line_index=-1, excinfo=None, short=False):
+ """ return formatted and marked up source lines. """
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = py.code.Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index+1:]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(self, excinfo, indent=4, markall=False):
+ lines = []
+ indent = " " * indent
+ # get the real exception information out
+ exlines = excinfo.exconly(tryshort=True).split('\n')
+ failindent = self.fail_marker + indent[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indent
+ return lines
+
+ def repr_locals(self, locals):
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == '__builtins__':
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ str_repr = self._saferepr(value)
+ #if len(str_repr) < 70 or not isinstance(value,
+ # (list, tuple, dict)):
+ lines.append("%-10s = %s" %(name, str_repr))
+ #else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
+ # py.std.pprint.pprint(value, stream=self.excinfowriter)
+ return ReprLocals(lines)
+
+ def repr_traceback_entry(self, entry, excinfo=None):
+ source = self._getentrysource(entry)
+ if source is None:
+ source = py.code.Source("???")
+ line_index = 0
+ else:
+ # entry.getfirstlinesource() can be -1, should be 0 on jython
+ line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
+
+ lines = []
+ style = entry._repr_style
+ if style is None:
+ style = self.style
+ if style in ("short", "long"):
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" %(entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ path = self._makepath(entry.path)
+ filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
+ localsrepr = None
+ if not short:
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path):
+ if not self.abspath:
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
+ if len(np) < len(str(path)):
+ path = np
+ return path
+
+ def repr_traceback(self, excinfo):
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+ recursionindex = None
+ if excinfo.errisinstance(RuntimeError):
+ if "maximum recursion depth exceeded" in str(excinfo.value):
+ recursionindex = traceback.recursionindex()
+ last = traceback[-1]
+ entries = []
+ extraline = None
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ if index == recursionindex:
+ extraline = "!!! Recursion detected (same locals & position)"
+ break
+ return ReprTraceback(entries, extraline, style=self.style)
+
+ def repr_excinfo(self, excinfo):
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+ return ReprExceptionInfo(reprtraceback, reprcrash)
+
+class TerminalRepr:
+ def __str__(self):
+ s = self.__unicode__()
+ if sys.version_info[0] < 3:
+ s = s.encode('utf-8')
+ return s
+
+ def __unicode__(self):
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = py.io.TextIO()
+ tw = py.io.TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self):
+ return "<%s instance at %0x>" %(self.__class__, id(self))
+
+
+class ReprExceptionInfo(TerminalRepr):
+ def __init__(self, reprtraceback, reprcrash):
+ self.reprtraceback = reprtraceback
+ self.reprcrash = reprcrash
+ self.sections = []
+
+ def addsection(self, name, content, sep="-"):
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw):
+ self.reprtraceback.toterminal(tw)
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+class ReprTraceback(TerminalRepr):
+ entrysep = "_ "
+
+ def __init__(self, reprentries, extraline, style):
+ self.reprentries = reprentries
+ self.extraline = extraline
+ self.style = style
+
+ def toterminal(self, tw):
+ # the entries might have different styles
+ last_style = None
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i+1]
+ if entry.style == "long" or \
+ entry.style == "short" and next_entry.style == "long":
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ style = "native"
+
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
+class ReprEntry(TerminalRepr):
+ localssep = "_ "
+
+ def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
+ self.lines = lines
+ self.reprfuncargs = reprfuncargs
+ self.reprlocals = reprlocals
+ self.reprfileloc = filelocrepr
+ self.style = style
+
+ def toterminal(self, tw):
+ if self.style == "short":
+ self.reprfileloc.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ #tw.line("")
+ return
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ if self.reprlocals:
+ #tw.sep(self.localssep, "Locals")
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self):
+ return "%s\n%s\n%s" % ("\n".join(self.lines),
+ self.reprlocals,
+ self.reprfileloc)
+
+class ReprFileLocation(TerminalRepr):
+ def __init__(self, path, lineno, message):
+ self.path = str(path)
+ self.lineno = lineno
+ self.message = message
+
+ def toterminal(self, tw):
+ # filename and lineno output for each entry,
+ # using an output format that most editors unterstand
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
+
+class ReprLocals(TerminalRepr):
+ def __init__(self, lines):
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+
+class ReprFuncArgs(TerminalRepr):
+ def __init__(self, args):
+ self.args = args
+
+ def toterminal(self, tw):
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = "%s = %s" %(name, value)
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+
+oldbuiltins = {}
+
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from py._code import assertion
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = assertion.AssertionError
+ if compile:
+ l = oldbuiltins.setdefault('compile', [])
+ l.append(py.builtin.builtins.compile)
+ py.builtin.builtins.compile = py.code.compile
+
+def unpatch_builtins(assertion=True, compile=True):
+ """ remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
+ if compile:
+ py.builtin.builtins.compile = oldbuiltins['compile'].pop()
+
+def getrawcode(obj, trycall=True):
+ """ return code object for given function. """
+ try:
+ return obj.__code__
+ except AttributeError:
+ obj = getattr(obj, 'im_func', obj)
+ obj = getattr(obj, 'func_code', obj)
+ obj = getattr(obj, 'f_code', obj)
+ obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
+ return obj
+
diff --git a/python/py/py/_code/source.py b/python/py/py/_code/source.py
new file mode 100644
index 000000000..3a648e635
--- /dev/null
+++ b/python/py/py/_code/source.py
@@ -0,0 +1,419 @@
+from __future__ import generators
+
+from bisect import bisect_right
+import sys
+import inspect, tokenize
+import py
+from types import ModuleType
+cpy_compile = compile
+
+try:
+ import _ast
+ from _ast import PyCF_ONLY_AST as _AST_FLAG
+except ImportError:
+ _AST_FLAG = 0
+ _ast = None
+
+
+class Source(object):
+ """ a immutable object holding a source code fragment,
+ possibly deindenting it.
+ """
+ _compilecounter = 0
+ def __init__(self, *parts, **kwargs):
+ self.lines = lines = []
+ de = kwargs.get('deindent', True)
+ rstrip = kwargs.get('rstrip', True)
+ for part in parts:
+ if not part:
+ partlines = []
+ if isinstance(part, Source):
+ partlines = part.lines
+ elif isinstance(part, (tuple, list)):
+ partlines = [x.rstrip("\n") for x in part]
+ elif isinstance(part, py.builtin._basestring):
+ partlines = part.split('\n')
+ if rstrip:
+ while partlines:
+ if partlines[-1].strip():
+ break
+ partlines.pop()
+ else:
+ partlines = getsource(part, deindent=de).lines
+ if de:
+ partlines = deindent(partlines)
+ lines.extend(partlines)
+
+ def __eq__(self, other):
+ try:
+ return self.lines == other.lines
+ except AttributeError:
+ if isinstance(other, str):
+ return str(self) == other
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ return self.__getslice__(key.start, key.stop)
+
+ def __len__(self):
+ return len(self.lines)
+
+ def __getslice__(self, start, end):
+ newsource = Source()
+ newsource.lines = self.lines[start:end]
+ return newsource
+
+ def strip(self):
+ """ return new source object with trailing
+ and leading blank lines removed.
+ """
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end-1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def putaround(self, before='', after='', indent=' ' * 4):
+ """ return a copy of the source object with
+ 'before' and 'after' wrapped around it.
+ """
+ before = Source(before)
+ after = Source(after)
+ newsource = Source()
+ lines = [ (indent + line) for line in self.lines]
+ newsource.lines = before.lines + lines + after.lines
+ return newsource
+
+ def indent(self, indent=' ' * 4):
+ """ return a copy of the source object with
+ all lines indented by the given indent-string.
+ """
+ newsource = Source()
+ newsource.lines = [(indent+line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno, assertion=False):
+ """ return Source statement which contains the
+ given linenumber (counted from 0).
+ """
+ start, end = self.getstatementrange(lineno, assertion)
+ return self[start:end]
+
+ def getstatementrange(self, lineno, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ """
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self, offset=None):
+ """ return a new source object deindented by offset.
+ If offset is None then guess an indentation offset from
+ the first non-blank line. Subsequent lines which have a
+ lower indentation offset will be copied verbatim as
+ they are assumed to be part of multilines.
+ """
+ # XXX maybe use the tokenizer to properly handle multiline
+ # strings etc.pp?
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines, offset)
+ return newsource
+
+ def isparseable(self, deindent=True):
+ """ return True if source is parseable, heuristically
+ deindenting it by default.
+ """
+ try:
+ import parser
+ except ImportError:
+ syntax_checker = lambda x: compile(x, 'asd', 'exec')
+ else:
+ syntax_checker = parser.suite
+
+ if deindent:
+ source = str(self.deindent())
+ else:
+ source = str(self)
+ try:
+ #compile(source+'\n', "x", "exec")
+ syntax_checker(source+'\n')
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return False
+ else:
+ return True
+
+ def __str__(self):
+ return "\n".join(self.lines)
+
+ def compile(self, filename=None, mode='exec',
+ flag=generators.compiler_flag,
+ dont_inherit=0, _genframe=None):
+ """ return compiled code object. if filename is None
+ invent an artificial filename which displays
+ the source/line position of the caller frame.
+ """
+ if not filename or py.path.local(filename).check(file=0):
+ if _genframe is None:
+ _genframe = sys._getframe(1) # the caller
+ fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+ base = "<%d-codegen " % self._compilecounter
+ self.__class__._compilecounter += 1
+ if not filename:
+ filename = base + '%s:%d>' % (fn, lineno)
+ else:
+ filename = base + '%r %s:%d>' % (filename, fn, lineno)
+ source = "\n".join(self.lines) + '\n'
+ try:
+ co = cpy_compile(source, filename, mode, flag)
+ except SyntaxError:
+ ex = sys.exc_info()[1]
+ # re-represent syntax errors from parsing python strings
+ msglines = self.lines[:ex.lineno]
+ if ex.offset:
+ msglines.append(" "*ex.offset + '^')
+ msglines.append("(code was compiled probably from here: %s)" % filename)
+ newex = SyntaxError('\n'.join(msglines))
+ newex.offset = ex.offset
+ newex.lineno = ex.lineno
+ newex.text = ex.text
+ raise newex
+ else:
+ if flag & _AST_FLAG:
+ return co
+ lines = [(x + "\n") for x in self.lines]
+ if sys.version_info[0] >= 3:
+ # XXX py3's inspect.getsourcefile() checks for a module
+ # and a pep302 __loader__ ... we don't have a module
+ # at code compile-time so we need to fake it here
+ m = ModuleType("_pycodecompile_pseudo_module")
+ py.std.inspect.modulesbyfile[filename] = None
+ py.std.sys.modules[None] = m
+ m.__loader__ = 1
+ py.std.linecache.cache[filename] = (1, None, lines, filename)
+ return co
+
+#
+# public API shortcut functions
+#
+
+def compile_(source, filename=None, mode='exec', flags=
+ generators.compiler_flag, dont_inherit=0):
+ """ compile the given source to a raw code object,
+ and maintain an internal cache which allows later
+ retrieval of the source code for the code object
+ and any recursively created code objects.
+ """
+ if _ast is not None and isinstance(source, _ast.AST):
+ # XXX should Source support having AST?
+ return cpy_compile(source, filename, mode, flags, dont_inherit)
+ _genframe = sys._getframe(1) # the caller
+ s = Source(source)
+ co = s.compile(filename, mode, flags, _genframe=_genframe)
+ return co
+
+
+def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
+ try:
+ code = py.code.Code(obj)
+ except TypeError:
+ try:
+ fn = (py.std.inspect.getsourcefile(obj) or
+ py.std.inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
+ fspath = fn and py.path.local(fn) or None
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except IOError:
+ pass
+ else:
+ fspath = code.path
+ lineno = code.firstlineno
+ assert isinstance(lineno, int)
+ return fspath, lineno
+
+#
+# helper functions
+#
+
+def findsource(obj):
+ try:
+ sourcelines, lineno = py.std.inspect.findsource(obj)
+ except py.builtin._sysex:
+ raise
+ except:
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+def getsource(obj, **kwargs):
+ obj = py.code.getrawcode(obj)
+ try:
+ strsrc = inspect.getsource(obj)
+ except IndentationError:
+ strsrc = "\"Buggy python version consider upgrading, cannot get source\""
+ assert isinstance(strsrc, str)
+ return Source(strsrc, **kwargs)
+
+def deindent(lines, offset=None):
+ if offset is None:
+ for line in lines:
+ line = line.expandtabs()
+ s = line.lstrip()
+ if s:
+ offset = len(line)-len(s)
+ break
+ else:
+ offset = 0
+ if offset == 0:
+ return list(lines)
+ newlines = []
+ def readline_generator(lines):
+ for line in lines:
+ yield line + '\n'
+ while True:
+ yield ''
+
+ it = readline_generator(lines)
+
+ try:
+ for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
+ if sline > len(lines):
+ break # End of input reached
+ if sline > len(newlines):
+ line = lines[sline - 1].expandtabs()
+ if line.lstrip() and line[:offset].isspace():
+ line = line[offset:] # Deindent
+ newlines.append(line)
+
+ for i in range(sline, eline):
+ # Don't deindent continuing lines of
+ # multiline tokens (i.e. multiline strings)
+ newlines.append(lines[i])
+ except (IndentationError, tokenize.TokenError):
+ pass
+ # Add any lines we didn't see. E.g. if an exception was raised.
+ newlines.extend(lines[len(newlines):])
+ return newlines
+
+
+def get_statement_startend2(lineno, node):
+ import ast
+ # flatten all statements and except handlers into one lineno-list
+ # AST's line numbers start indexing at 1
+ l = []
+ for x in ast.walk(node):
+ if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
+ l.append(x.lineno - 1)
+ for name in "finalbody", "orelse":
+ val = getattr(x, name, None)
+ if val:
+ # treat the finally/orelse part as its own statement
+ l.append(val[0].lineno - 1 - 1)
+ l.sort()
+ insert_index = bisect_right(l, lineno)
+ start = l[insert_index - 1]
+ if insert_index >= len(l):
+ end = None
+ else:
+ end = l[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
+ if astnode is None:
+ content = str(source)
+ if sys.version_info < (2,7):
+ content += "\n"
+ try:
+ astnode = compile(content, "source", "exec", 1024) # 1024 for AST
+ except ValueError:
+ start, end = getstatementrange_old(lineno, source, assertion)
+ return None, start, end
+ start, end = get_statement_startend2(lineno, astnode)
+ # we need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself
+ block_finder = inspect.BlockFinder()
+ # if we start with an indented line, put blockfinder to "started" mode
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # the end might still point to a comment or empty line, correct it
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
+
+
+def getstatementrange_old(lineno, source, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
+ """
+ # XXX this logic is only used on python2.4 and below
+ # 1. find the start of the statement
+ from codeop import compile_command
+ for start in range(lineno, -1, -1):
+ if assertion:
+ line = source.lines[start]
+ # the following lines are not fully tested, change with care
+ if 'super' in line and 'self' in line and '__init__' in line:
+ raise IndexError("likely a subclass")
+ if "assert" not in line and "raise" not in line:
+ continue
+ trylines = source.lines[start:lineno+1]
+ # quick hack to prepare parsing an indented line with
+ # compile_command() (which errors on "return" outside defs)
+ trylines.insert(0, 'def xxx():')
+ trysource = '\n '.join(trylines)
+ # ^ space here
+ try:
+ compile_command(trysource)
+ except (SyntaxError, OverflowError, ValueError):
+ continue
+
+ # 2. find the end of the statement
+ for end in range(lineno+1, len(source)+1):
+ trysource = source[start:end]
+ if trysource.isparseable():
+ return start, end
+ raise SyntaxError("no valid source range around line %d " % (lineno,))
+
+
diff --git a/python/py/py/_error.py b/python/py/py/_error.py
new file mode 100644
index 000000000..550fb521a
--- /dev/null
+++ b/python/py/py/_error.py
@@ -0,0 +1,88 @@
+"""
+create errno-specific classes for IO or os calls.
+
+"""
+import sys, os, errno
+
+class Error(EnvironmentError):
+ def __repr__(self):
+ return "%s.%s %r: %s " %(self.__class__.__module__,
+ self.__class__.__name__,
+ self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ #repr(self.args)
+ )
+
+ def __str__(self):
+ s = "[%s]: %s" %(self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ )
+ return s
+
+_winerrnomap = {
+ 2: errno.ENOENT,
+ 3: errno.ENOENT,
+ 17: errno.EEXIST,
+ 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
+ 22: errno.ENOTDIR,
+ 20: errno.ENOTDIR,
+ 267: errno.ENOTDIR,
+ 5: errno.EACCES, # anything better?
+}
+
+class ErrorMaker(object):
+ """ lazily provides Exception classes for each possible POSIX errno
+ (as defined per the 'errno' module). All such instances
+ subclass EnvironmentError.
+ """
+ Error = Error
+ _errno2class = {}
+
+ def __getattr__(self, name):
+ if name[0] == "_":
+ raise AttributeError(name)
+ eno = getattr(errno, name)
+ cls = self._geterrnoclass(eno)
+ setattr(self, name, cls)
+ return cls
+
+ def _geterrnoclass(self, eno):
+ try:
+ return self._errno2class[eno]
+ except KeyError:
+ clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
+ errorcls = type(Error)(clsname, (Error,),
+ {'__module__':'py.error',
+ '__doc__': os.strerror(eno)})
+ self._errno2class[eno] = errorcls
+ return errorcls
+
+ def checked_call(self, func, *args, **kwargs):
+ """ call a function and raise an errno-exception if applicable. """
+ __tracebackhide__ = True
+ try:
+ return func(*args, **kwargs)
+ except self.Error:
+ raise
+ except (OSError, EnvironmentError):
+ cls, value, tb = sys.exc_info()
+ if not hasattr(value, 'errno'):
+ raise
+ __tracebackhide__ = False
+ errno = value.errno
+ try:
+ if not isinstance(value, WindowsError):
+ raise NameError
+ except NameError:
+ # we are not on Windows, or we got a proper OSError
+ cls = self._geterrnoclass(errno)
+ else:
+ try:
+ cls = self._geterrnoclass(_winerrnomap[errno])
+ except KeyError:
+ raise value
+ raise cls("%s%r" % (func.__name__, args))
+ __tracebackhide__ = True
+
+
+error = ErrorMaker()
diff --git a/python/py/py/_iniconfig.py b/python/py/py/_iniconfig.py
new file mode 100644
index 000000000..92b50bd85
--- /dev/null
+++ b/python/py/py/_iniconfig.py
@@ -0,0 +1,162 @@
+""" brain-dead simple parser for ini-style files.
+(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed
+"""
+__version__ = "0.2.dev2"
+
+__all__ = ['IniConfig', 'ParseError']
+
+COMMENTCHARS = "#;"
+
+class ParseError(Exception):
+ def __init__(self, path, lineno, msg):
+ Exception.__init__(self, path, lineno, msg)
+ self.path = path
+ self.lineno = lineno
+ self.msg = msg
+
+ def __str__(self):
+ return "%s:%s: %s" %(self.path, self.lineno+1, self.msg)
+
+class SectionWrapper(object):
+ def __init__(self, config, name):
+ self.config = config
+ self.name = name
+
+ def lineof(self, name):
+ return self.config.lineof(self.name, name)
+
+ def get(self, key, default=None, convert=str):
+ return self.config.get(self.name, key, convert=convert, default=default)
+
+ def __getitem__(self, key):
+ return self.config.sections[self.name][key]
+
+ def __iter__(self):
+ section = self.config.sections.get(self.name, [])
+ def lineof(key):
+ return self.config.lineof(self.name, key)
+ for name in sorted(section, key=lineof):
+ yield name
+
+ def items(self):
+ for name in self:
+ yield name, self[name]
+
+
+class IniConfig(object):
+ def __init__(self, path, data=None):
+ self.path = str(path) # convenience
+ if data is None:
+ f = open(self.path)
+ try:
+ tokens = self._parse(iter(f))
+ finally:
+ f.close()
+ else:
+ tokens = self._parse(data.splitlines(True))
+
+ self._sources = {}
+ self.sections = {}
+
+ for lineno, section, name, value in tokens:
+ if section is None:
+ self._raise(lineno, 'no section header defined')
+ self._sources[section, name] = lineno
+ if name is None:
+ if section in self.sections:
+ self._raise(lineno, 'duplicate section %r'%(section, ))
+ self.sections[section] = {}
+ else:
+ if name in self.sections[section]:
+ self._raise(lineno, 'duplicate name %r'%(name, ))
+ self.sections[section][name] = value
+
+ def _raise(self, lineno, msg):
+ raise ParseError(self.path, lineno, msg)
+
+ def _parse(self, line_iter):
+ result = []
+ section = None
+ for lineno, line in enumerate(line_iter):
+ name, data = self._parseline(line, lineno)
+ # new value
+ if name is not None and data is not None:
+ result.append((lineno, section, name, data))
+ # new section
+ elif name is not None and data is None:
+ if not name:
+ self._raise(lineno, 'empty section name')
+ section = name
+ result.append((lineno, section, None, None))
+ # continuation
+ elif name is None and data is not None:
+ if not result:
+ self._raise(lineno, 'unexpected value continuation')
+ last = result.pop()
+ last_name, last_data = last[-2:]
+ if last_name is None:
+ self._raise(lineno, 'unexpected value continuation')
+
+ if last_data:
+ data = '%s\n%s' % (last_data, data)
+ result.append(last[:-1] + (data,))
+ return result
+
+ def _parseline(self, line, lineno):
+ # blank lines
+ if iscommentline(line):
+ line = ""
+ else:
+ line = line.rstrip()
+ if not line:
+ return None, None
+ # section
+ if line[0] == '[':
+ realline = line
+ for c in COMMENTCHARS:
+ line = line.split(c)[0].rstrip()
+ if line[-1] == "]":
+ return line[1:-1], None
+ return None, realline.strip()
+ # value
+ elif not line[0].isspace():
+ try:
+ name, value = line.split('=', 1)
+ if ":" in name:
+ raise ValueError()
+ except ValueError:
+ try:
+ name, value = line.split(":", 1)
+ except ValueError:
+ self._raise(lineno, 'unexpected line: %r' % line)
+ return name.strip(), value.strip()
+ # continuation
+ else:
+ return None, line.strip()
+
+ def lineof(self, section, name=None):
+ lineno = self._sources.get((section, name))
+ if lineno is not None:
+ return lineno + 1
+
+ def get(self, section, name, default=None, convert=str):
+ try:
+ return convert(self.sections[section][name])
+ except KeyError:
+ return default
+
+ def __getitem__(self, name):
+ if name not in self.sections:
+ raise KeyError(name)
+ return SectionWrapper(self, name)
+
+ def __iter__(self):
+ for name in sorted(self.sections, key=self.lineof):
+ yield SectionWrapper(self, name)
+
+ def __contains__(self, arg):
+ return arg in self.sections
+
+def iscommentline(line):
+ c = line.lstrip()[:1]
+ return c in COMMENTCHARS
diff --git a/python/py/py/_io/__init__.py b/python/py/py/_io/__init__.py
new file mode 100644
index 000000000..835f01f3a
--- /dev/null
+++ b/python/py/py/_io/__init__.py
@@ -0,0 +1 @@
+""" input/output helping """
diff --git a/python/py/py/_io/capture.py b/python/py/py/_io/capture.py
new file mode 100644
index 000000000..bc157ed97
--- /dev/null
+++ b/python/py/py/_io/capture.py
@@ -0,0 +1,371 @@
+import os
+import sys
+import py
+import tempfile
+
+try:
+ from io import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+if sys.version_info < (3,0):
+ class TextIO(StringIO):
+ def write(self, data):
+ if not isinstance(data, unicode):
+ data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
+ StringIO.write(self, data)
+else:
+ TextIO = StringIO
+
+try:
+ from io import BytesIO
+except ImportError:
+ class BytesIO(StringIO):
+ def write(self, data):
+ if isinstance(data, unicode):
+ raise TypeError("not a byte value: %r" %(data,))
+ StringIO.write(self, data)
+
+patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
+
+class FDCapture:
+ """ Capture IO to/from a given os-level filedescriptor. """
+
+ def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
+ """ save targetfd descriptor, and open a new
+ temporary file there. If no tmpfile is
+ specified a tempfile.Tempfile() will be opened
+ in text mode.
+ """
+ self.targetfd = targetfd
+ if tmpfile is None and targetfd != 0:
+ f = tempfile.TemporaryFile('wb+')
+ tmpfile = dupfile(f, encoding="UTF-8")
+ f.close()
+ self.tmpfile = tmpfile
+ self._savefd = os.dup(self.targetfd)
+ if patchsys:
+ self._oldsys = getattr(sys, patchsysdict[targetfd])
+ if now:
+ self.start()
+
+ def start(self):
+ try:
+ os.fstat(self._savefd)
+ except OSError:
+ raise ValueError("saved filedescriptor not valid, "
+ "did you call start() twice?")
+ if self.targetfd == 0 and not self.tmpfile:
+ fd = os.open(devnullpath, os.O_RDONLY)
+ os.dup2(fd, 0)
+ os.close(fd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
+ else:
+ os.dup2(self.tmpfile.fileno(), self.targetfd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
+
+ def done(self):
+ """ unpatch and clean up, returns the self.tmpfile (file object)
+ """
+ os.dup2(self._savefd, self.targetfd)
+ os.close(self._savefd)
+ if self.targetfd != 0:
+ self.tmpfile.seek(0)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self._oldsys)
+ return self.tmpfile
+
+ def writeorg(self, data):
+ """ write a string to the original file descriptor
+ """
+ tempfp = tempfile.TemporaryFile()
+ try:
+ os.dup2(self._savefd, tempfp.fileno())
+ tempfp.write(data)
+ finally:
+ tempfp.close()
+
+
+def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
+ """ return a new open file object that's a duplicate of f
+
+ mode is duplicated if not given, 'buffering' controls
+ buffer size (defaulting to no buffering) and 'raising'
+ defines whether an exception is raised when an incompatible
+ file object is passed in (if raising is False, the file
+ object itself will be returned)
+ """
+ try:
+ fd = f.fileno()
+ mode = mode or f.mode
+ except AttributeError:
+ if raising:
+ raise
+ return f
+ newfd = os.dup(fd)
+ if sys.version_info >= (3,0):
+ if encoding is not None:
+ mode = mode.replace("b", "")
+ buffering = True
+ return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
+ else:
+ f = os.fdopen(newfd, mode, buffering)
+ if encoding is not None:
+ return EncodedFile(f, encoding)
+ return f
+
+class EncodedFile(object):
+ def __init__(self, _stream, encoding):
+ self._stream = _stream
+ self.encoding = encoding
+
+ def write(self, obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode(self.encoding)
+ elif isinstance(obj, str):
+ pass
+ else:
+ obj = str(obj)
+ self._stream.write(obj)
+
+ def writelines(self, linelist):
+ data = ''.join(linelist)
+ self.write(data)
+
+ def __getattr__(self, name):
+ return getattr(self._stream, name)
+
+class Capture(object):
+ def call(cls, func, *args, **kwargs):
+ """ return a (res, out, err) tuple where
+ out and err represent the output/error output
+ during function execution.
+ call the given function with args/kwargs
+ and capture output/error during its execution.
+ """
+ so = cls()
+ try:
+ res = func(*args, **kwargs)
+ finally:
+ out, err = so.reset()
+ return res, out, err
+ call = classmethod(call)
+
+ def reset(self):
+ """ reset sys.stdout/stderr and return captured output as strings. """
+ if hasattr(self, '_reset'):
+ raise ValueError("was already reset")
+ self._reset = True
+ outfile, errfile = self.done(save=False)
+ out, err = "", ""
+ if outfile and not outfile.closed:
+ out = outfile.read()
+ outfile.close()
+ if errfile and errfile != outfile and not errfile.closed:
+ err = errfile.read()
+ errfile.close()
+ return out, err
+
+ def suspend(self):
+ """ return current snapshot captures, memorize tempfiles. """
+ outerr = self.readouterr()
+ outfile, errfile = self.done()
+ return outerr
+
+
+class StdCaptureFD(Capture):
+ """ This class allows to capture writes to FD1 and FD2
+ and may connect a NULL file to FD0 (and prevent
+ reads from sys.stdin). If any of the 0,1,2 file descriptors
+ is invalid it will not be captured.
+ """
+ def __init__(self, out=True, err=True, mixed=False,
+ in_=True, patchsys=True, now=True):
+ self._options = {
+ "out": out,
+ "err": err,
+ "mixed": mixed,
+ "in_": in_,
+ "patchsys": patchsys,
+ "now": now,
+ }
+ self._save()
+ if now:
+ self.startall()
+
+ def _save(self):
+ in_ = self._options['in_']
+ out = self._options['out']
+ err = self._options['err']
+ mixed = self._options['mixed']
+ patchsys = self._options['patchsys']
+ if in_:
+ try:
+ self.in_ = FDCapture(0, tmpfile=None, now=False,
+ patchsys=patchsys)
+ except OSError:
+ pass
+ if out:
+ tmpfile = None
+ if hasattr(out, 'write'):
+ tmpfile = out
+ try:
+ self.out = FDCapture(1, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['out'] = self.out.tmpfile
+ except OSError:
+ pass
+ if err:
+ if out and mixed:
+ tmpfile = self.out.tmpfile
+ elif hasattr(err, 'write'):
+ tmpfile = err
+ else:
+ tmpfile = None
+ try:
+ self.err = FDCapture(2, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['err'] = self.err.tmpfile
+ except OSError:
+ pass
+
+ def startall(self):
+ if hasattr(self, 'in_'):
+ self.in_.start()
+ if hasattr(self, 'out'):
+ self.out.start()
+ if hasattr(self, 'err'):
+ self.err.start()
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if hasattr(self, 'out') and not self.out.tmpfile.closed:
+ outfile = self.out.done()
+ if hasattr(self, 'err') and not self.err.tmpfile.closed:
+ errfile = self.err.done()
+ if hasattr(self, 'in_'):
+ tmpfile = self.in_.done()
+ if save:
+ self._save()
+ return outfile, errfile
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ if hasattr(self, "out"):
+ out = self._readsnapshot(self.out.tmpfile)
+ else:
+ out = ""
+ if hasattr(self, "err"):
+ err = self._readsnapshot(self.err.tmpfile)
+ else:
+ err = ""
+ return [out, err]
+
+ def _readsnapshot(self, f):
+ f.seek(0)
+ res = f.read()
+ enc = getattr(f, "encoding", None)
+ if enc:
+ res = py.builtin._totext(res, enc, "replace")
+ f.truncate(0)
+ f.seek(0)
+ return res
+
+
+class StdCapture(Capture):
+ """ This class allows to capture writes to sys.stdout|stderr "in-memory"
+ and will raise errors on tries to read from sys.stdin. It only
+ modifies sys.stdout|stderr|stdin attributes and does not
+ touch underlying File Descriptors (use StdCaptureFD for that).
+ """
+ def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
+ self._oldout = sys.stdout
+ self._olderr = sys.stderr
+ self._oldin = sys.stdin
+ if out and not hasattr(out, 'file'):
+ out = TextIO()
+ self.out = out
+ if err:
+ if mixed:
+ err = out
+ elif not hasattr(err, 'write'):
+ err = TextIO()
+ self.err = err
+ self.in_ = in_
+ if now:
+ self.startall()
+
+ def startall(self):
+ if self.out:
+ sys.stdout = self.out
+ if self.err:
+ sys.stderr = self.err
+ if self.in_:
+ sys.stdin = self.in_ = DontReadFromInput()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if self.out and not self.out.closed:
+ sys.stdout = self._oldout
+ outfile = self.out
+ outfile.seek(0)
+ if self.err and not self.err.closed:
+ sys.stderr = self._olderr
+ errfile = self.err
+ errfile.seek(0)
+ if self.in_:
+ sys.stdin = self._oldin
+ return outfile, errfile
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ out = err = ""
+ if self.out:
+ out = self.out.getvalue()
+ self.out.truncate(0)
+ self.out.seek(0)
+ if self.err:
+ err = self.err.getvalue()
+ self.err.truncate(0)
+ self.err.seek(0)
+ return out, err
+
+class DontReadFromInput:
+ """Temporary stub class. Ideally when stdin is accessed, the
+ capturing should be turned off, with possibly all data captured
+ so far sent to the screen. This should be configurable, though,
+ because in automated test runs it is better to crash than
+ hang indefinitely.
+ """
+ def read(self, *args):
+ raise IOError("reading from stdin while output is captured")
+ readline = read
+ readlines = read
+ __iter__ = read
+
+ def fileno(self):
+ raise ValueError("redirected Stdin is pseudofile, has no fileno()")
+ def isatty(self):
+ return False
+ def close(self):
+ pass
+
+try:
+ devnullpath = os.devnull
+except AttributeError:
+ if os.name == 'nt':
+ devnullpath = 'NUL'
+ else:
+ devnullpath = '/dev/null'
diff --git a/python/py/py/_io/saferepr.py b/python/py/py/_io/saferepr.py
new file mode 100644
index 000000000..8518290ef
--- /dev/null
+++ b/python/py/py/_io/saferepr.py
@@ -0,0 +1,71 @@
+import py
+import sys
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+class SafeRepr(reprlib.Repr):
+ """ subclass of repr.Repr that limits the resulting size of repr()
+ and includes information on exceptions raised during the call.
+ """
+ def repr(self, x):
+ return self._callhelper(reprlib.Repr.repr, self, x)
+
+ def repr_unicode(self, x, level):
+ # Strictly speaking wrong on narrow builds
+ def repr(u):
+ if "'" not in u:
+ return py.builtin._totext("'%s'") % u
+ elif '"' not in u:
+ return py.builtin._totext('"%s"') % u
+ else:
+ return py.builtin._totext("'%s'") % u.replace("'", r"\'")
+ s = repr(x[:self.maxstring])
+ if len(s) > self.maxstring:
+ i = max(0, (self.maxstring-3)//2)
+ j = max(0, self.maxstring-3-i)
+ s = repr(x[:i] + x[len(x)-j:])
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+ def repr_instance(self, x, level):
+ return self._callhelper(builtin_repr, x)
+
+ def _callhelper(self, call, x, *args):
+ try:
+ # Try the vanilla repr and make sure that the result is a string
+ s = call(x, *args)
+ except py.builtin._sysex:
+ raise
+ except:
+ cls, e, tb = sys.exc_info()
+ exc_name = getattr(cls, '__name__', 'unknown')
+ try:
+ exc_info = str(e)
+ except py.builtin._sysex:
+ raise
+ except:
+ exc_info = 'unknown'
+ return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
+ exc_name, exc_info, x.__class__.__name__, id(x))
+ else:
+ if len(s) > self.maxsize:
+ i = max(0, (self.maxsize-3)//2)
+ j = max(0, self.maxsize-3-i)
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+def saferepr(obj, maxsize=240):
+ """ return a size-limited safe repr-string for the given object.
+ Failing __repr__ functions of user instances will be represented
+ with a short exception info and 'saferepr' generally takes
+ care to never raise exceptions itself. This function is a wrapper
+ around the Repr/reprlib functionality of the standard 2.6 lib.
+ """
+ # review exception handling
+ srepr = SafeRepr()
+ srepr.maxstring = maxsize
+ srepr.maxsize = maxsize
+ srepr.maxother = 160
+ return srepr.repr(obj)
diff --git a/python/py/py/_io/terminalwriter.py b/python/py/py/_io/terminalwriter.py
new file mode 100644
index 000000000..cef1ff580
--- /dev/null
+++ b/python/py/py/_io/terminalwriter.py
@@ -0,0 +1,348 @@
+"""
+
+Helper functions for writing to terminals and files.
+
+"""
+
+
+import sys, os
+import py
+py3k = sys.version_info[0] >= 3
+from py.builtin import text, bytes
+
+win32_and_ctypes = False
+colorama = None
+if sys.platform == "win32":
+ try:
+ import colorama
+ except ImportError:
+ try:
+ import ctypes
+ win32_and_ctypes = True
+ except ImportError:
+ pass
+
+
+def _getdimensions():
+ import termios,fcntl,struct
+ call = fcntl.ioctl(1,termios.TIOCGWINSZ,"\000"*8)
+ height,width = struct.unpack( "hhhh", call ) [:2]
+ return height, width
+
+
+def get_terminal_width():
+ height = width = 0
+ try:
+ height, width = _getdimensions()
+ except py.builtin._sysex:
+ raise
+ except:
+ # pass to fallback below
+ pass
+
+ if width == 0:
+ # FALLBACK:
+ # * some exception happened
+ # * or this is emacs terminal which reports (0,0)
+ width = int(os.environ.get('COLUMNS', 80))
+
+ # XXX the windows getdimensions may be bogus, let's sanify a bit
+ if width < 40:
+ width = 80
+ return width
+
+terminal_width = get_terminal_width()
+
+# XXX unify with _escaped func below
+def ansi_print(text, esc, file=None, newline=True, flush=False):
+ if file is None:
+ file = sys.stderr
+ text = text.rstrip()
+ if esc and not isinstance(esc, tuple):
+ esc = (esc,)
+ if esc and sys.platform != "win32" and file.isatty():
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +
+ '\x1b[0m') # ANSI color code "reset"
+ if newline:
+ text += '\n'
+
+ if esc and win32_and_ctypes and file.isatty():
+ if 1 in esc:
+ bold = True
+ esc = tuple([x for x in esc if x != 1])
+ else:
+ bold = False
+ esctable = {() : FOREGROUND_WHITE, # normal
+ (31,): FOREGROUND_RED, # red
+ (32,): FOREGROUND_GREEN, # green
+ (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
+ (34,): FOREGROUND_BLUE, # blue
+ (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
+ (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
+ (37,): FOREGROUND_WHITE, # white
+ (39,): FOREGROUND_WHITE, # reset
+ }
+ attr = esctable.get(esc, FOREGROUND_WHITE)
+ if bold:
+ attr |= FOREGROUND_INTENSITY
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ if file is sys.stderr:
+ handle = GetStdHandle(STD_ERROR_HANDLE)
+ else:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ attr |= (oldcolors & 0x0f0)
+ SetConsoleTextAttribute(handle, attr)
+ while len(text) > 32768:
+ file.write(text[:32768])
+ text = text[32768:]
+ if text:
+ file.write(text)
+ SetConsoleTextAttribute(handle, oldcolors)
+ else:
+ file.write(text)
+
+ if flush:
+ file.flush()
+
+def should_do_markup(file):
+ if os.environ.get('PY_COLORS') == '1':
+ return True
+ if os.environ.get('PY_COLORS') == '0':
+ return False
+ return hasattr(file, 'isatty') and file.isatty() \
+ and os.environ.get('TERM') != 'dumb' \
+ and not (sys.platform.startswith('java') and os._name == 'nt')
+
+class TerminalWriter(object):
+ _esctable = dict(black=30, red=31, green=32, yellow=33,
+ blue=34, purple=35, cyan=36, white=37,
+ Black=40, Red=41, Green=42, Yellow=43,
+ Blue=44, Purple=45, Cyan=46, White=47,
+ bold=1, light=2, blink=5, invert=7)
+
+ # XXX deprecate stringio argument
+ def __init__(self, file=None, stringio=False, encoding=None):
+ if file is None:
+ if stringio:
+ self.stringio = file = py.io.TextIO()
+ else:
+ file = py.std.sys.stdout
+ elif py.builtin.callable(file) and not (
+ hasattr(file, "write") and hasattr(file, "flush")):
+ file = WriteFile(file, encoding=encoding)
+ if hasattr(file, "isatty") and file.isatty() and colorama:
+ file = colorama.AnsiToWin32(file).stream
+ self.encoding = encoding or getattr(file, 'encoding', "utf-8")
+ self._file = file
+ self.fullwidth = get_terminal_width()
+ self.hasmarkup = should_do_markup(file)
+ self._lastlen = 0
+
+ def _escaped(self, text, esc):
+ if esc and self.hasmarkup:
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +'\x1b[0m')
+ return text
+
+ def markup(self, text, **kw):
+ esc = []
+ for name in kw:
+ if name not in self._esctable:
+ raise ValueError("unknown markup: %r" %(name,))
+ if kw[name]:
+ esc.append(self._esctable[name])
+ return self._escaped(text, tuple(esc))
+
+ def sep(self, sepchar, title=None, fullwidth=None, **kw):
+ if fullwidth is None:
+ fullwidth = self.fullwidth
+ # the goal is to have the line be as long as possible
+ # under the condition that len(line) <= fullwidth
+ if sys.platform == "win32":
+ # if we print in the last column on windows we are on a
+ # new line but there is no way to verify/neutralize this
+ # (we may not know the exact line width)
+ # so let's be defensive to avoid empty lines in the output
+ fullwidth -= 1
+ if title is not None:
+ # we want 2 + 2*len(fill) + len(title) <= fullwidth
+ # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
+ # 2*len(sepchar)*N <= fullwidth - len(title) - 2
+ # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
+ N = (fullwidth - len(title) - 2) // (2*len(sepchar))
+ fill = sepchar * N
+ line = "%s %s %s" % (fill, title, fill)
+ else:
+ # we want len(sepchar)*N <= fullwidth
+ # i.e. N <= fullwidth // len(sepchar)
+ line = sepchar * (fullwidth // len(sepchar))
+ # in some situations there is room for an extra sepchar at the right,
+ # in particular if we consider that with a sepchar like "_ " the
+ # trailing space is not important at the end of the line
+ if len(line) + len(sepchar.rstrip()) <= fullwidth:
+ line += sepchar.rstrip()
+
+ self.line(line, **kw)
+
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
+ if self.hasmarkup and kw:
+ markupmsg = self.markup(msg, **kw)
+ else:
+ markupmsg = msg
+ write_out(self._file, markupmsg)
+
+ def line(self, s='', **kw):
+ self.write(s, **kw)
+ self._checkfill(s)
+ self.write('\n')
+
+ def reline(self, line, **kw):
+ if not self.hasmarkup:
+ raise ValueError("cannot use rewrite-line without terminal")
+ self.write(line, **kw)
+ self._checkfill(line)
+ self.write('\r')
+ self._lastlen = len(line)
+
+ def _checkfill(self, line):
+ diff2last = self._lastlen - len(line)
+ if diff2last > 0:
+ self.write(" " * diff2last)
+
+class Win32ConsoleWriter(TerminalWriter):
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
+ oldcolors = None
+ if self.hasmarkup and kw:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ default_bg = oldcolors & 0x00F0
+ attr = default_bg
+ if kw.pop('bold', False):
+ attr |= FOREGROUND_INTENSITY
+
+ if kw.pop('red', False):
+ attr |= FOREGROUND_RED
+ elif kw.pop('blue', False):
+ attr |= FOREGROUND_BLUE
+ elif kw.pop('green', False):
+ attr |= FOREGROUND_GREEN
+ elif kw.pop('yellow', False):
+ attr |= FOREGROUND_GREEN|FOREGROUND_RED
+ else:
+ attr |= oldcolors & 0x0007
+
+ SetConsoleTextAttribute(handle, attr)
+ write_out(self._file, msg)
+ if oldcolors:
+ SetConsoleTextAttribute(handle, oldcolors)
+
+class WriteFile(object):
+ def __init__(self, writemethod, encoding=None):
+ self.encoding = encoding
+ self._writemethod = writemethod
+
+ def write(self, data):
+ if self.encoding:
+ data = data.encode(self.encoding, "replace")
+ self._writemethod(data)
+
+ def flush(self):
+ return
+
+
+if win32_and_ctypes:
+ TerminalWriter = Win32ConsoleWriter
+ import ctypes
+ from ctypes import wintypes
+
+ # ctypes access to the Windows console
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ FOREGROUND_BLACK = 0x0000 # black text
+ FOREGROUND_BLUE = 0x0001 # text color contains blue.
+ FOREGROUND_GREEN = 0x0002 # text color contains green.
+ FOREGROUND_RED = 0x0004 # text color contains red.
+ FOREGROUND_WHITE = 0x0007
+ FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
+ BACKGROUND_BLACK = 0x0000 # background color black
+ BACKGROUND_BLUE = 0x0010 # background color contains blue.
+ BACKGROUND_GREEN = 0x0020 # background color contains green.
+ BACKGROUND_RED = 0x0040 # background color contains red.
+ BACKGROUND_WHITE = 0x0070
+ BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
+
+ SHORT = ctypes.c_short
+ class COORD(ctypes.Structure):
+ _fields_ = [('X', SHORT),
+ ('Y', SHORT)]
+ class SMALL_RECT(ctypes.Structure):
+ _fields_ = [('Left', SHORT),
+ ('Top', SHORT),
+ ('Right', SHORT),
+ ('Bottom', SHORT)]
+ class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
+ _fields_ = [('dwSize', COORD),
+ ('dwCursorPosition', COORD),
+ ('wAttributes', wintypes.WORD),
+ ('srWindow', SMALL_RECT),
+ ('dwMaximumWindowSize', COORD)]
+
+ _GetStdHandle = ctypes.windll.kernel32.GetStdHandle
+ _GetStdHandle.argtypes = [wintypes.DWORD]
+ _GetStdHandle.restype = wintypes.HANDLE
+ def GetStdHandle(kind):
+ return _GetStdHandle(kind)
+
+ SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
+ SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
+ SetConsoleTextAttribute.restype = wintypes.BOOL
+
+ _GetConsoleScreenBufferInfo = \
+ ctypes.windll.kernel32.GetConsoleScreenBufferInfo
+ _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
+ ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+ _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+ def GetConsoleInfo(handle):
+ info = CONSOLE_SCREEN_BUFFER_INFO()
+ _GetConsoleScreenBufferInfo(handle, ctypes.byref(info))
+ return info
+
+ def _getdimensions():
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ info = GetConsoleInfo(handle)
+ # Substract one from the width, otherwise the cursor wraps
+ # and the ending \n causes an empty line to display.
+ return info.dwSize.Y, info.dwSize.X - 1
+
+def write_out(fil, msg):
+ # XXX sometimes "msg" is of type bytes, sometimes text which
+ # complicates the situation. Should we try to enforce unicode?
+ try:
+ # on py27 and above writing out to sys.stdout with an encoding
+ # should usually work for unicode messages (if the encoding is
+ # capable of it)
+ fil.write(msg)
+ except UnicodeEncodeError:
+ # on py26 it might not work because stdout expects bytes
+ if fil.encoding:
+ try:
+ fil.write(msg.encode(fil.encoding))
+ except UnicodeEncodeError:
+ # it might still fail if the encoding is not capable
+ pass
+ else:
+ fil.flush()
+ return
+ # fallback: escape all unicode characters
+ msg = msg.encode("unicode-escape").decode("ascii")
+ fil.write(msg)
+ fil.flush()
diff --git a/python/py/py/_log/__init__.py b/python/py/py/_log/__init__.py
new file mode 100644
index 000000000..fad62e960
--- /dev/null
+++ b/python/py/py/_log/__init__.py
@@ -0,0 +1,2 @@
+""" logging API ('producers' and 'consumers' connected via keywords) """
+
diff --git a/python/py/py/_log/log.py b/python/py/py/_log/log.py
new file mode 100644
index 000000000..ce47e8c75
--- /dev/null
+++ b/python/py/py/_log/log.py
@@ -0,0 +1,186 @@
+"""
+basic logging functionality based on a producer/consumer scheme.
+
+XXX implement this API: (maybe put it into slogger.py?)
+
+ log = Logger(
+ info=py.log.STDOUT,
+ debug=py.log.STDOUT,
+ command=None)
+ log.info("hello", "world")
+ log.command("hello", "world")
+
+ log = Logger(info=Logger(something=...),
+ debug=py.log.STDOUT,
+ command=None)
+"""
+import py, sys
+
+class Message(object):
+ def __init__(self, keywords, args):
+ self.keywords = keywords
+ self.args = args
+
+ def content(self):
+ return " ".join(map(str, self.args))
+
+ def prefix(self):
+ return "[%s] " % (":".join(self.keywords))
+
+ def __str__(self):
+ return self.prefix() + self.content()
+
+
+class Producer(object):
+ """ (deprecated) Log producer API which sends messages to be logged
+ to a 'consumer' object, which then prints them to stdout,
+ stderr, files, etc. Used extensively by PyPy-1.1.
+ """
+
+ Message = Message # to allow later customization
+ keywords2consumer = {}
+
+ def __init__(self, keywords, keywordmapper=None, **kw):
+ if hasattr(keywords, 'split'):
+ keywords = tuple(keywords.split())
+ self._keywords = keywords
+ if keywordmapper is None:
+ keywordmapper = default_keywordmapper
+ self._keywordmapper = keywordmapper
+
+ def __repr__(self):
+ return "<py.log.Producer %s>" % ":".join(self._keywords)
+
+ def __getattr__(self, name):
+ if '_' in name:
+ raise AttributeError(name)
+ producer = self.__class__(self._keywords + (name,))
+ setattr(self, name, producer)
+ return producer
+
+ def __call__(self, *args):
+ """ write a message to the appropriate consumer(s) """
+ func = self._keywordmapper.getconsumer(self._keywords)
+ if func is not None:
+ func(self.Message(self._keywords, args))
+
+class KeywordMapper:
+ def __init__(self):
+ self.keywords2consumer = {}
+
+ def getstate(self):
+ return self.keywords2consumer.copy()
+ def setstate(self, state):
+ self.keywords2consumer.clear()
+ self.keywords2consumer.update(state)
+
+ def getconsumer(self, keywords):
+ """ return a consumer matching the given keywords.
+
+ tries to find the most suitable consumer by walking, starting from
+ the back, the list of keywords, the first consumer matching a
+ keyword is returned (falling back to py.log.default)
+ """
+ for i in range(len(keywords), 0, -1):
+ try:
+ return self.keywords2consumer[keywords[:i]]
+ except KeyError:
+ continue
+ return self.keywords2consumer.get('default', default_consumer)
+
+ def setconsumer(self, keywords, consumer):
+ """ set a consumer for a set of keywords. """
+ # normalize to tuples
+ if isinstance(keywords, str):
+ keywords = tuple(filter(None, keywords.split()))
+ elif hasattr(keywords, '_keywords'):
+ keywords = keywords._keywords
+ elif not isinstance(keywords, tuple):
+ raise TypeError("key %r is not a string or tuple" % (keywords,))
+ if consumer is not None and not py.builtin.callable(consumer):
+ if not hasattr(consumer, 'write'):
+ raise TypeError(
+ "%r should be None, callable or file-like" % (consumer,))
+ consumer = File(consumer)
+ self.keywords2consumer[keywords] = consumer
+
+def default_consumer(msg):
+ """ the default consumer, prints the message to stdout (using 'print') """
+ sys.stderr.write(str(msg)+"\n")
+
+default_keywordmapper = KeywordMapper()
+
+def setconsumer(keywords, consumer):
+ default_keywordmapper.setconsumer(keywords, consumer)
+
+def setstate(state):
+ default_keywordmapper.setstate(state)
+def getstate():
+ return default_keywordmapper.getstate()
+
+#
+# Consumers
+#
+
+class File(object):
+ """ log consumer wrapping a file(-like) object """
+ def __init__(self, f):
+ assert hasattr(f, 'write')
+ #assert isinstance(f, file) or not hasattr(f, 'open')
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ self._file.write(str(msg) + "\n")
+ if hasattr(self._file, 'flush'):
+ self._file.flush()
+
+class Path(object):
+ """ log consumer that opens and writes to a Path """
+ def __init__(self, filename, append=False,
+ delayed_create=False, buffering=False):
+ self._append = append
+ self._filename = str(filename)
+ self._buffering = buffering
+ if not delayed_create:
+ self._openfile()
+
+ def _openfile(self):
+ mode = self._append and 'a' or 'w'
+ f = open(self._filename, mode)
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ if not hasattr(self, "_file"):
+ self._openfile()
+ self._file.write(str(msg) + "\n")
+ if not self._buffering:
+ self._file.flush()
+
+def STDOUT(msg):
+ """ consumer that writes to sys.stdout """
+ sys.stdout.write(str(msg)+"\n")
+
+def STDERR(msg):
+ """ consumer that writes to sys.stderr """
+ sys.stderr.write(str(msg)+"\n")
+
+class Syslog:
+ """ consumer that writes to the syslog daemon """
+
+ def __init__(self, priority = None):
+ if priority is None:
+ priority = self.LOG_INFO
+ self.priority = priority
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ py.std.syslog.syslog(self.priority, str(msg))
+
+for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
+ _prio = "LOG_" + _prio
+ try:
+ setattr(Syslog, _prio, getattr(py.std.syslog, _prio))
+ except AttributeError:
+ pass
diff --git a/python/py/py/_log/warning.py b/python/py/py/_log/warning.py
new file mode 100644
index 000000000..722e31e91
--- /dev/null
+++ b/python/py/py/_log/warning.py
@@ -0,0 +1,76 @@
+import py, sys
+
+class DeprecationWarning(DeprecationWarning):
+ def __init__(self, msg, path, lineno):
+ self.msg = msg
+ self.path = path
+ self.lineno = lineno
+ def __repr__(self):
+ return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
+ def __str__(self):
+ return self.msg
+
+def _apiwarn(startversion, msg, stacklevel=2, function=None):
+ # below is mostly COPIED from python2.4/warnings.py's def warn()
+ # Get context information
+ if isinstance(stacklevel, str):
+ frame = sys._getframe(1)
+ level = 1
+ found = frame.f_code.co_filename.find(stacklevel) != -1
+ while frame:
+ co = frame.f_code
+ if co.co_filename.find(stacklevel) == -1:
+ if found:
+ stacklevel = level
+ break
+ else:
+ found = True
+ level += 1
+ frame = frame.f_back
+ else:
+ stacklevel = 1
+ msg = "%s (since version %s)" %(msg, startversion)
+ warn(msg, stacklevel=stacklevel+1, function=function)
+
+def warn(msg, stacklevel=1, function=None):
+ if function is not None:
+ filename = py.std.inspect.getfile(function)
+ lineno = py.code.getrawcode(function).co_firstlineno
+ else:
+ try:
+ caller = sys._getframe(stacklevel)
+ except ValueError:
+ globals = sys.__dict__
+ lineno = 1
+ else:
+ globals = caller.f_globals
+ lineno = caller.f_lineno
+ if '__name__' in globals:
+ module = globals['__name__']
+ else:
+ module = "<string>"
+ filename = globals.get('__file__')
+ if filename:
+ fnl = filename.lower()
+ if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
+ filename = filename[:-1]
+ elif fnl.endswith("$py.class"):
+ filename = filename.replace('$py.class', '.py')
+ else:
+ if module == "__main__":
+ try:
+ filename = sys.argv[0]
+ except AttributeError:
+ # embedded interpreters don't have sys.argv, see bug #839151
+ filename = '__main__'
+ if not filename:
+ filename = module
+ path = py.path.local(filename)
+ warning = DeprecationWarning(msg, path, lineno)
+ py.std.warnings.warn_explicit(warning, category=Warning,
+ filename=str(warning.path),
+ lineno=warning.lineno,
+ registry=py.std.warnings.__dict__.setdefault(
+ "__warningsregistry__", {})
+ )
+
diff --git a/python/py/py/_path/__init__.py b/python/py/py/_path/__init__.py
new file mode 100644
index 000000000..51f3246f8
--- /dev/null
+++ b/python/py/py/_path/__init__.py
@@ -0,0 +1 @@
+""" unified file system api """
diff --git a/python/py/py/_path/cacheutil.py b/python/py/py/_path/cacheutil.py
new file mode 100644
index 000000000..992250475
--- /dev/null
+++ b/python/py/py/_path/cacheutil.py
@@ -0,0 +1,114 @@
+"""
+This module contains multithread-safe cache implementations.
+
+All Caches have
+
+ getorbuild(key, builder)
+ delentry(key)
+
+methods and allow configuration when instantiating the cache class.
+"""
+from time import time as gettime
+
+class BasicCache(object):
+ def __init__(self, maxentries=128):
+ self.maxentries = maxentries
+ self.prunenum = int(maxentries - maxentries/8)
+ self._dict = {}
+
+ def clear(self):
+ self._dict.clear()
+
+ def _getentry(self, key):
+ return self._dict[key]
+
+ def _putentry(self, key, entry):
+ self._prunelowestweight()
+ self._dict[key] = entry
+
+ def delentry(self, key, raising=False):
+ try:
+ del self._dict[key]
+ except KeyError:
+ if raising:
+ raise
+
+ def getorbuild(self, key, builder):
+ try:
+ entry = self._getentry(key)
+ except KeyError:
+ entry = self._build(key, builder)
+ self._putentry(key, entry)
+ return entry.value
+
+ def _prunelowestweight(self):
+ """ prune out entries with lowest weight. """
+ numentries = len(self._dict)
+ if numentries >= self.maxentries:
+ # evict according to entry's weight
+ items = [(entry.weight, key)
+ for key, entry in self._dict.items()]
+ items.sort()
+ index = numentries - self.prunenum
+ if index > 0:
+ for weight, key in items[:index]:
+ # in MT situations the element might be gone
+ self.delentry(key, raising=False)
+
+class BuildcostAccessCache(BasicCache):
+ """ A BuildTime/Access-counting cache implementation.
+ the weight of a value is computed as the product of
+
+ num-accesses-of-a-value * time-to-build-the-value
+
+ The values with the least such weights are evicted
+ if the cache maxentries threshold is superceded.
+ For implementation flexibility more than one object
+ might be evicted at a time.
+ """
+ # time function to use for measuring build-times
+
+ def _build(self, key, builder):
+ start = gettime()
+ val = builder()
+ end = gettime()
+ return WeightedCountingEntry(val, end-start)
+
+
+class WeightedCountingEntry(object):
+ def __init__(self, value, oneweight):
+ self._value = value
+ self.weight = self._oneweight = oneweight
+
+ def value(self):
+ self.weight += self._oneweight
+ return self._value
+ value = property(value)
+
+class AgingCache(BasicCache):
+ """ This cache prunes out cache entries that are too old.
+ """
+ def __init__(self, maxentries=128, maxseconds=10.0):
+ super(AgingCache, self).__init__(maxentries)
+ self.maxseconds = maxseconds
+
+ def _getentry(self, key):
+ entry = self._dict[key]
+ if entry.isexpired():
+ self.delentry(key)
+ raise KeyError(key)
+ return entry
+
+ def _build(self, key, builder):
+ val = builder()
+ entry = AgingEntry(val, gettime() + self.maxseconds)
+ return entry
+
+class AgingEntry(object):
+ def __init__(self, value, expirationtime):
+ self.value = value
+ self.weight = expirationtime
+
+ def isexpired(self):
+ t = gettime()
+ return t >= self.weight
diff --git a/python/py/py/_path/common.py b/python/py/py/_path/common.py
new file mode 100644
index 000000000..d407434cb
--- /dev/null
+++ b/python/py/py/_path/common.py
@@ -0,0 +1,403 @@
+"""
+"""
+import os, sys, posixpath
+import py
+
+# Moved from local.py.
+iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
+
+class Checkers:
+ _depend_on_existence = 'exists', 'link', 'dir', 'file'
+
+ def __init__(self, path):
+ self.path = path
+
+ def dir(self):
+ raise NotImplementedError
+
+ def file(self):
+ raise NotImplementedError
+
+ def dotfile(self):
+ return self.path.basename.startswith('.')
+
+ def ext(self, arg):
+ if not arg.startswith('.'):
+ arg = '.' + arg
+ return self.path.ext == arg
+
+ def exists(self):
+ raise NotImplementedError
+
+ def basename(self, arg):
+ return self.path.basename == arg
+
+ def basestarts(self, arg):
+ return self.path.basename.startswith(arg)
+
+ def relto(self, arg):
+ return self.path.relto(arg)
+
+ def fnmatch(self, arg):
+ return self.path.fnmatch(arg)
+
+ def endswith(self, arg):
+ return str(self.path).endswith(arg)
+
+ def _evaluate(self, kw):
+ for name, value in kw.items():
+ invert = False
+ meth = None
+ try:
+ meth = getattr(self, name)
+ except AttributeError:
+ if name[:3] == 'not':
+ invert = True
+ try:
+ meth = getattr(self, name[3:])
+ except AttributeError:
+ pass
+ if meth is None:
+ raise TypeError(
+ "no %r checker available for %r" % (name, self.path))
+ try:
+ if py.code.getrawcode(meth).co_argcount > 1:
+ if (not meth(value)) ^ invert:
+ return False
+ else:
+ if bool(value) ^ bool(meth()) ^ invert:
+ return False
+ except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
+ # EBUSY feels not entirely correct,
+ # but its kind of necessary since ENOMEDIUM
+ # is not accessible in python
+ for name in self._depend_on_existence:
+ if name in kw:
+ if kw.get(name):
+ return False
+ name = 'not' + name
+ if name in kw:
+ if not kw.get(name):
+ return False
+ return True
+
+class NeverRaised(Exception):
+ pass
+
+class PathBase(object):
+ """ shared implementation for filesystem path objects."""
+ Checkers = Checkers
+
+ def __div__(self, other):
+ return self.join(str(other))
+ __truediv__ = __div__ # py3k
+
+ def basename(self):
+ """ basename part of path. """
+ return self._getbyspec('basename')[0]
+ basename = property(basename, None, None, basename.__doc__)
+
+ def dirname(self):
+ """ dirname part of path. """
+ return self._getbyspec('dirname')[0]
+ dirname = property(dirname, None, None, dirname.__doc__)
+
+ def purebasename(self):
+ """ pure base name of the path."""
+ return self._getbyspec('purebasename')[0]
+ purebasename = property(purebasename, None, None, purebasename.__doc__)
+
+ def ext(self):
+ """ extension of the path (including the '.')."""
+ return self._getbyspec('ext')[0]
+ ext = property(ext, None, None, ext.__doc__)
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ return self.new(basename='').join(*args, **kwargs)
+
+ def read_binary(self):
+ """ read and return a bytestring from reading the path. """
+ with self.open('rb') as f:
+ return f.read()
+
+ def read_text(self, encoding):
+ """ read and return a Unicode string from reading the path. """
+ with self.open("r", encoding=encoding) as f:
+ return f.read()
+
+
+ def read(self, mode='r'):
+ """ read and return a bytestring from reading the path. """
+ with self.open(mode) as f:
+ return f.read()
+
+ def readlines(self, cr=1):
+ """ read and return a list of lines from the path. if cr is False, the
+newline will be removed from the end of each line. """
+ if not cr:
+ content = self.read('rU')
+ return content.split('\n')
+ else:
+ f = self.open('rU')
+ try:
+ return f.readlines()
+ finally:
+ f.close()
+
+ def load(self):
+ """ (deprecated) return object unpickled from self.read() """
+ f = self.open('rb')
+ try:
+ return py.error.checked_call(py.std.pickle.load, f)
+ finally:
+ f.close()
+
+ def move(self, target):
+ """ move this path to target. """
+ if target.relto(self):
+ raise py.error.EINVAL(target,
+ "cannot move path into a subdirectory of itself")
+ try:
+ self.rename(target)
+ except py.error.EXDEV: # invalid cross-device link
+ self.copy(target)
+ self.remove()
+
+ def __repr__(self):
+ """ return a string representation of this path. """
+ return repr(str(self))
+
+ def check(self, **kw):
+ """ check a path for existence and properties.
+
+ Without arguments, return True if the path exists, otherwise False.
+
+ valid checkers::
+
+ file=1 # is a file
+ file=0 # is not a file (may not even exist)
+ dir=1 # is a dir
+ link=1 # is a link
+ exists=1 # exists
+
+ You can specify multiple checker definitions, for example::
+
+ path.check(file=1, link=1) # a link pointing to a file
+ """
+ if not kw:
+ kw = {'exists' : 1}
+ return self.Checkers(self)._evaluate(kw)
+
+ def fnmatch(self, pattern):
+ """return true if the basename/fullname matches the glob-'pattern'.
+
+ valid pattern characters::
+
+ * matches everything
+ ? matches any single character
+ [seq] matches any character in seq
+ [!seq] matches any char not in seq
+
+ If the pattern contains a path-separator then the full path
+ is used for pattern matching and a '*' is prepended to the
+ pattern.
+
+ if the pattern doesn't contain a path-separator the pattern
+ is only matched against the basename.
+ """
+ return FNMatcher(pattern)(self)
+
+ def relto(self, relpath):
+ """ return a string which is the relative part of the path
+ to the given 'relpath'.
+ """
+ if not isinstance(relpath, (str, PathBase)):
+ raise TypeError("%r: not a string or path object" %(relpath,))
+ strrelpath = str(relpath)
+ if strrelpath and strrelpath[-1] != self.sep:
+ strrelpath += self.sep
+ #assert strrelpath[-1] == self.sep
+ #assert strrelpath[-2] != self.sep
+ strself = self.strpath
+ if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
+ if os.path.normcase(strself).startswith(
+ os.path.normcase(strrelpath)):
+ return strself[len(strrelpath):]
+ elif strself.startswith(strrelpath):
+ return strself[len(strrelpath):]
+ return ""
+
+ def ensure_dir(self, *args):
+ """ ensure the path joined with args is a directory. """
+ return self.ensure(*args, **{"dir": True})
+
+ def bestrelpath(self, dest):
+ """ return a string which is a relative path from self
+ (assumed to be a directory) to dest such that
+ self.join(bestrelpath) == dest and if not such
+ path can be determined return dest.
+ """
+ try:
+ if self == dest:
+ return os.curdir
+ base = self.common(dest)
+ if not base: # can be the case on windows
+ return str(dest)
+ self2base = self.relto(base)
+ reldest = dest.relto(base)
+ if self2base:
+ n = self2base.count(self.sep) + 1
+ else:
+ n = 0
+ l = [os.pardir] * n
+ if reldest:
+ l.append(reldest)
+ target = dest.sep.join(l)
+ return target
+ except AttributeError:
+ return str(dest)
+
+ def exists(self):
+ return self.check()
+
+ def isdir(self):
+ return self.check(dir=1)
+
+ def isfile(self):
+ return self.check(file=1)
+
+ def parts(self, reverse=False):
+ """ return a root-first list of all ancestor directories
+ plus the path itself.
+ """
+ current = self
+ l = [self]
+ while 1:
+ last = current
+ current = current.dirpath()
+ if last == current:
+ break
+ l.append(current)
+ if not reverse:
+ l.reverse()
+ return l
+
+ def common(self, other):
+ """ return the common part shared with the other path
+ or None if there is no common part.
+ """
+ last = None
+ for x, y in zip(self.parts(), other.parts()):
+ if x != y:
+ return last
+ last = x
+ return last
+
+ def __add__(self, other):
+ """ return new path object with 'other' added to the basename"""
+ return self.new(basename=self.basename+str(other))
+
+ def __cmp__(self, other):
+ """ return sort value (-1, 0, +1). """
+ try:
+ return cmp(self.strpath, other.strpath)
+ except AttributeError:
+ return cmp(str(self), str(other)) # self.path, other.path)
+
+ def __lt__(self, other):
+ try:
+ return self.strpath < other.strpath
+ except AttributeError:
+ return str(self) < str(other)
+
+ def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
+ """ yields all paths below the current one
+
+ fil is a filter (glob pattern or callable), if not matching the
+ path will not be yielded, defaulting to None (everything is
+ returned)
+
+ rec is a filter (glob pattern or callable) that controls whether
+ a node is descended, defaulting to None
+
+ ignore is an Exception class that is ignoredwhen calling dirlist()
+ on any of the paths (by default, all exceptions are reported)
+
+ bf if True will cause a breadthfirst search instead of the
+ default depthfirst. Default: False
+
+ sort if True will sort entries within each directory level.
+ """
+ for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
+ yield x
+
+ def _sortlist(self, res, sort):
+ if sort:
+ if hasattr(sort, '__call__'):
+ res.sort(sort)
+ else:
+ res.sort()
+
+ def samefile(self, other):
+ """ return True if other refers to the same stat object as self. """
+ return self.strpath == str(other)
+
+class Visitor:
+ def __init__(self, fil, rec, ignore, bf, sort):
+ if isinstance(fil, str):
+ fil = FNMatcher(fil)
+ if isinstance(rec, str):
+ self.rec = FNMatcher(rec)
+ elif not hasattr(rec, '__call__') and rec:
+ self.rec = lambda path: True
+ else:
+ self.rec = rec
+ self.fil = fil
+ self.ignore = ignore
+ self.breadthfirst = bf
+ self.optsort = sort and sorted or (lambda x: x)
+
+ def gen(self, path):
+ try:
+ entries = path.listdir()
+ except self.ignore:
+ return
+ rec = self.rec
+ dirs = self.optsort([p for p in entries
+ if p.check(dir=1) and (rec is None or rec(p))])
+ if not self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+ for p in self.optsort(entries):
+ if self.fil is None or self.fil(p):
+ yield p
+ if self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+
+class FNMatcher:
+ def __init__(self, pattern):
+ self.pattern = pattern
+
+ def __call__(self, path):
+ pattern = self.pattern
+
+ if (pattern.find(path.sep) == -1 and
+ iswin32 and
+ pattern.find(posixpath.sep) != -1):
+ # Running on Windows, the pattern has no Windows path separators,
+ # and the pattern has one or more Posix path separators. Replace
+ # the Posix path separators with the Windows path separator.
+ pattern = pattern.replace(posixpath.sep, path.sep)
+
+ if pattern.find(path.sep) == -1:
+ name = path.basename
+ else:
+ name = str(path) # path.strpath # XXX svn?
+ if not os.path.isabs(pattern):
+ pattern = '*' + path.sep + pattern
+ return py.std.fnmatch.fnmatch(name, pattern)
+
diff --git a/python/py/py/_path/local.py b/python/py/py/_path/local.py
new file mode 100644
index 000000000..d569404ec
--- /dev/null
+++ b/python/py/py/_path/local.py
@@ -0,0 +1,911 @@
+"""
+local path implementation.
+"""
+from __future__ import with_statement
+
+from contextlib import contextmanager
+import sys, os, re, atexit, io
+import py
+from py._path import common
+from py._path.common import iswin32
+from stat import S_ISLNK, S_ISDIR, S_ISREG
+
+from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
+
+if sys.version_info > (3,0):
+ def map_as_list(func, iter):
+ return list(map(func, iter))
+else:
+ map_as_list = map
+
+class Stat(object):
+ def __getattr__(self, name):
+ return getattr(self._osstatresult, "st_" + name)
+
+ def __init__(self, path, osstatresult):
+ self.path = path
+ self._osstatresult = osstatresult
+
+ @property
+ def owner(self):
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import pwd
+ entry = py.error.checked_call(pwd.getpwuid, self.uid)
+ return entry[0]
+
+ @property
+ def group(self):
+ """ return group name of file. """
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import grp
+ entry = py.error.checked_call(grp.getgrgid, self.gid)
+ return entry[0]
+
+ def isdir(self):
+ return S_ISDIR(self._osstatresult.st_mode)
+
+ def isfile(self):
+ return S_ISREG(self._osstatresult.st_mode)
+
+ def islink(self):
+ st = self.path.lstat()
+ return S_ISLNK(self._osstatresult.st_mode)
+
+class PosixPath(common.PathBase):
+ def chown(self, user, group, rec=0):
+ """ change ownership to the given user and group.
+ user and group may be specified by a number or
+ by a name. if rec is True change ownership
+ recursively.
+ """
+ uid = getuserid(user)
+ gid = getgroupid(group)
+ if rec:
+ for x in self.visit(rec=lambda x: x.check(link=0)):
+ if x.check(link=0):
+ py.error.checked_call(os.chown, str(x), uid, gid)
+ py.error.checked_call(os.chown, str(self), uid, gid)
+
+ def readlink(self):
+ """ return value of a symbolic link. """
+ return py.error.checked_call(os.readlink, self.strpath)
+
+ def mklinkto(self, oldname):
+ """ posix style hard link to another name. """
+ py.error.checked_call(os.link, str(oldname), str(self))
+
+ def mksymlinkto(self, value, absolute=1):
+ """ create a symbolic link with the given value (pointing to another name). """
+ if absolute:
+ py.error.checked_call(os.symlink, str(value), self.strpath)
+ else:
+ base = self.common(value)
+ # with posix local paths '/' is always a common base
+ relsource = self.__class__(value).relto(base)
+ reldest = self.relto(base)
+ n = reldest.count(self.sep)
+ target = self.sep.join(('..', )*n + (relsource, ))
+ py.error.checked_call(os.symlink, target, self.strpath)
+
+def getuserid(user):
+ import pwd
+ if not isinstance(user, int):
+ user = pwd.getpwnam(user)[2]
+ return user
+
+def getgroupid(group):
+ import grp
+ if not isinstance(group, int):
+ group = grp.getgrnam(group)[2]
+ return group
+
+FSBase = not iswin32 and PosixPath or common.PathBase
+
+class LocalPath(FSBase):
+ """ object oriented interface to os.path and other local filesystem
+ related information.
+ """
+ class ImportMismatchError(ImportError):
+ """ raised on pyimport() if there is a mismatch of __file__'s"""
+
+ sep = os.sep
+ class Checkers(common.Checkers):
+ def _stat(self):
+ try:
+ return self._statcache
+ except AttributeError:
+ try:
+ self._statcache = self.path.stat()
+ except py.error.ELOOP:
+ self._statcache = self.path.lstat()
+ return self._statcache
+
+ def dir(self):
+ return S_ISDIR(self._stat().mode)
+
+ def file(self):
+ return S_ISREG(self._stat().mode)
+
+ def exists(self):
+ return self._stat()
+
+ def link(self):
+ st = self.path.lstat()
+ return S_ISLNK(st.mode)
+
+ def __init__(self, path=None, expanduser=False):
+ """ Initialize and return a local Path instance.
+
+ Path can be relative to the current directory.
+ If path is None it defaults to the current working directory.
+ If expanduser is True, tilde-expansion is performed.
+ Note that Path instances always carry an absolute path.
+ Note also that passing in a local path object will simply return
+ the exact same path object. Use new() to get a new copy.
+ """
+ if path is None:
+ self.strpath = py.error.checked_call(os.getcwd)
+ elif isinstance(path, common.PathBase):
+ self.strpath = path.strpath
+ elif isinstance(path, py.builtin._basestring):
+ if expanduser:
+ path = os.path.expanduser(path)
+ self.strpath = abspath(path)
+ else:
+ raise ValueError("can only pass None, Path instances "
+ "or non-empty strings to LocalPath")
+
+ def __hash__(self):
+ return hash(self.strpath)
+
+ def __eq__(self, other):
+ s1 = self.strpath
+ s2 = getattr(other, "strpath", other)
+ if iswin32:
+ s1 = s1.lower()
+ try:
+ s2 = s2.lower()
+ except AttributeError:
+ return False
+ return s1 == s2
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __lt__(self, other):
+ return self.strpath < getattr(other, "strpath", other)
+
+ def __gt__(self, other):
+ return self.strpath > getattr(other, "strpath", other)
+
+ def samefile(self, other):
+ """ return True if 'other' references the same file as 'self'.
+ """
+ other = getattr(other, "strpath", other)
+ if not isabs(other):
+ other = abspath(other)
+ if self == other:
+ return True
+ if iswin32:
+ return False # there is no samefile
+ return py.error.checked_call(
+ os.path.samefile, self.strpath, other)
+
+ def remove(self, rec=1, ignore_errors=False):
+ """ remove a file or directory (or a directory tree if rec=1).
+ if ignore_errors is True, errors while removing directories will
+ be ignored.
+ """
+ if self.check(dir=1, link=0):
+ if rec:
+ # force remove of readonly files on windows
+ if iswin32:
+ self.chmod(448, rec=1) # octcal 0700
+ py.error.checked_call(py.std.shutil.rmtree, self.strpath,
+ ignore_errors=ignore_errors)
+ else:
+ py.error.checked_call(os.rmdir, self.strpath)
+ else:
+ if iswin32:
+ self.chmod(448) # octcal 0700
+ py.error.checked_call(os.remove, self.strpath)
+
+ def computehash(self, hashtype="md5", chunksize=524288):
+ """ return hexdigest of hashvalue for this file. """
+ try:
+ try:
+ import hashlib as mod
+ except ImportError:
+ if hashtype == "sha1":
+ hashtype = "sha"
+ mod = __import__(hashtype)
+ hash = getattr(mod, hashtype)()
+ except (AttributeError, ImportError):
+ raise ValueError("Don't know how to compute %r hash" %(hashtype,))
+ f = self.open('rb')
+ try:
+ while 1:
+ buf = f.read(chunksize)
+ if not buf:
+ return hash.hexdigest()
+ hash.update(buf)
+ finally:
+ f.close()
+
+ def new(self, **kw):
+ """ create a modified version of this path.
+ the following keyword arguments modify various path parts::
+
+ a:/some/path/to/a/file.ext
+ xx drive
+ xxxxxxxxxxxxxxxxx dirname
+ xxxxxxxx basename
+ xxxx purebasename
+ xxx ext
+ """
+ obj = object.__new__(self.__class__)
+ if not kw:
+ obj.strpath = self.strpath
+ return obj
+ drive, dirname, basename, purebasename,ext = self._getbyspec(
+ "drive,dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ try:
+ ext = kw['ext']
+ except KeyError:
+ pass
+ else:
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ if ('dirname' in kw and not kw['dirname']):
+ kw['dirname'] = drive
+ else:
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ obj.strpath = normpath(
+ "%(dirname)s%(sep)s%(basename)s" % kw)
+ return obj
+
+ def _getbyspec(self, spec):
+ """ see new for what 'spec' can be. """
+ res = []
+ parts = self.strpath.split(self.sep)
+
+ args = filter(None, spec.split(',') )
+ append = res.append
+ for name in args:
+ if name == 'drive':
+ append(parts[0])
+ elif name == 'dirname':
+ append(self.sep.join(parts[:-1]))
+ else:
+ basename = parts[-1]
+ if name == 'basename':
+ append(basename)
+ else:
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ append(purebasename)
+ elif name == 'ext':
+ append(ext)
+ else:
+ raise ValueError("invalid part specification %r" % name)
+ return res
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ if not kwargs:
+ path = object.__new__(self.__class__)
+ path.strpath = dirname(self.strpath)
+ if args:
+ path = path.join(*args)
+ return path
+ return super(LocalPath, self).dirpath(*args, **kwargs)
+
+ def join(self, *args, **kwargs):
+ """ return a new path by appending all 'args' as path
+ components. if abs=1 is used restart from root if any
+ of the args is an absolute path.
+ """
+ sep = self.sep
+ strargs = [getattr(arg, "strpath", arg) for arg in args]
+ strpath = self.strpath
+ if kwargs.get('abs'):
+ newargs = []
+ for arg in reversed(strargs):
+ if isabs(arg):
+ strpath = arg
+ strargs = newargs
+ break
+ newargs.insert(0, arg)
+ for arg in strargs:
+ arg = arg.strip(sep)
+ if iswin32:
+ # allow unix style paths even on windows.
+ arg = arg.strip('/')
+ arg = arg.replace('/', sep)
+ strpath = strpath + sep + arg
+ obj = object.__new__(self.__class__)
+ obj.strpath = normpath(strpath)
+ return obj
+
+ def open(self, mode='r', ensure=False, encoding=None):
+ """ return an opened file with the given mode.
+
+ If ensure is True, create parent directories if needed.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if encoding:
+ return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding)
+ return py.error.checked_call(open, self.strpath, mode)
+
+ def _fastjoin(self, name):
+ child = object.__new__(self.__class__)
+ child.strpath = self.strpath + self.sep + name
+ return child
+
+ def islink(self):
+ return islink(self.strpath)
+
+ def check(self, **kw):
+ if not kw:
+ return exists(self.strpath)
+ if len(kw) == 1:
+ if "dir" in kw:
+ return not kw["dir"] ^ isdir(self.strpath)
+ if "file" in kw:
+ return not kw["file"] ^ isfile(self.strpath)
+ return super(LocalPath, self).check(**kw)
+
+ _patternchars = set("*?[" + os.path.sep)
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if fil is None and sort is None:
+ names = py.error.checked_call(os.listdir, self.strpath)
+ return map_as_list(self._fastjoin, names)
+ if isinstance(fil, py.builtin._basestring):
+ if not self._patternchars.intersection(fil):
+ child = self._fastjoin(fil)
+ if exists(child.strpath):
+ return [child]
+ return []
+ fil = common.FNMatcher(fil)
+ names = py.error.checked_call(os.listdir, self.strpath)
+ res = []
+ for name in names:
+ child = self._fastjoin(name)
+ if fil is None or fil(child):
+ res.append(child)
+ self._sortlist(res, sort)
+ return res
+
+ def size(self):
+ """ return size of the underlying file object """
+ return self.stat().size
+
+ def mtime(self):
+ """ return last modification time of the path. """
+ return self.stat().mtime
+
+ def copy(self, target, mode=False):
+ """ copy path to target."""
+ if self.check(file=1):
+ if target.check(dir=1):
+ target = target.join(self.basename)
+ assert self!=target
+ copychunked(self, target)
+ if mode:
+ copymode(self.strpath, target.strpath)
+ else:
+ def rec(p):
+ return p.check(link=0)
+ for x in self.visit(rec=rec):
+ relpath = x.relto(self)
+ newx = target.join(relpath)
+ newx.dirpath().ensure(dir=1)
+ if x.check(link=1):
+ newx.mksymlinkto(x.readlink())
+ continue
+ elif x.check(file=1):
+ copychunked(x, newx)
+ elif x.check(dir=1):
+ newx.ensure(dir=1)
+ if mode:
+ copymode(x.strpath, newx.strpath)
+
+ def rename(self, target):
+ """ rename this path to target. """
+ target = getattr(target, "strpath", target)
+ return py.error.checked_call(os.rename, self.strpath, target)
+
+ def dump(self, obj, bin=1):
+ """ pickle object into path location"""
+ f = self.open('wb')
+ try:
+ py.error.checked_call(py.std.pickle.dump, obj, f, bin)
+ finally:
+ f.close()
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ p = self.join(*args)
+ py.error.checked_call(os.mkdir, getattr(p, "strpath", p))
+ return p
+
+ def write_binary(self, data, ensure=False):
+ """ write binary data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('wb') as f:
+ f.write(data)
+
+ def write_text(self, data, encoding, ensure=False):
+ """ write text data into path using the specified encoding.
+ If ensure is True create missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('w', encoding=encoding) as f:
+ f.write(data)
+
+ def write(self, data, mode='w', ensure=False):
+ """ write data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if 'b' in mode:
+ if not py.builtin._isbytes(data):
+ raise ValueError("can only process bytes")
+ else:
+ if not py.builtin._istext(data):
+ if not py.builtin._isbytes(data):
+ data = str(data)
+ else:
+ data = py.builtin._totext(data, sys.getdefaultencoding())
+ f = self.open(mode)
+ try:
+ f.write(data)
+ finally:
+ f.close()
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent == self:
+ return self
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ try:
+ self.mkdir()
+ except py.error.EEXIST:
+ # race condition: file/dir created by another thread/process.
+ # complain if it is not a dir
+ if self.check(dir=0):
+ raise
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ else:
+ p.dirpath()._ensuredirs()
+ if not p.check(file=1):
+ p.open('w').close()
+ return p
+
+ def stat(self, raising=True):
+ """ Return an os.stat() tuple. """
+ if raising == True:
+ return Stat(self, py.error.checked_call(os.stat, self.strpath))
+ try:
+ return Stat(self, os.stat(self.strpath))
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return None
+
+ def lstat(self):
+ """ Return an os.lstat() tuple. """
+ return Stat(self, py.error.checked_call(os.lstat, self.strpath))
+
+ def setmtime(self, mtime=None):
+ """ set modification time for the given path. if 'mtime' is None
+ (the default) then the file's mtime is set to current time.
+
+ Note that the resolution for 'mtime' is platform dependent.
+ """
+ if mtime is None:
+ return py.error.checked_call(os.utime, self.strpath, mtime)
+ try:
+ return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
+ except py.error.EINVAL:
+ return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
+
+ def chdir(self):
+ """ change directory to self and return old current directory """
+ try:
+ old = self.__class__()
+ except py.error.ENOENT:
+ old = None
+ py.error.checked_call(os.chdir, self.strpath)
+ return old
+
+
+ @contextmanager
+ def as_cwd(self):
+ """ return context manager which changes to current dir during the
+ managed "with" context. On __enter__ it returns the old dir.
+ """
+ old = self.chdir()
+ try:
+ yield old
+ finally:
+ old.chdir()
+
+ def realpath(self):
+ """ return a new path which contains no symbolic links."""
+ return self.__class__(os.path.realpath(self.strpath))
+
+ def atime(self):
+ """ return last access time of the path. """
+ return self.stat().atime
+
+ def __repr__(self):
+ return 'local(%r)' % self.strpath
+
+ def __str__(self):
+ """ return string representation of the Path. """
+ return self.strpath
+
+ def chmod(self, mode, rec=0):
+ """ change permissions to the given mode. If mode is an
+ integer it directly encodes the os-specific modes.
+ if rec is True perform recursively.
+ """
+ if not isinstance(mode, int):
+ raise TypeError("mode %r must be an integer" % (mode,))
+ if rec:
+ for x in self.visit(rec=rec):
+ py.error.checked_call(os.chmod, str(x), mode)
+ py.error.checked_call(os.chmod, self.strpath, mode)
+
+ def pypkgpath(self):
+ """ return the Python package path by looking for the last
+ directory upwards which still contains an __init__.py.
+ Return None if a pkgpath can not be determined.
+ """
+ pkgpath = None
+ for parent in self.parts(reverse=True):
+ if parent.isdir():
+ if not parent.join('__init__.py').exists():
+ break
+ if not isimportable(parent.basename):
+ break
+ pkgpath = parent
+ return pkgpath
+
+ def _ensuresyspath(self, ensuremode, path):
+ if ensuremode:
+ s = str(path)
+ if ensuremode == "append":
+ if s not in sys.path:
+ sys.path.append(s)
+ else:
+ if s != sys.path[0]:
+ sys.path.insert(0, s)
+
+ def pyimport(self, modname=None, ensuresyspath=True):
+ """ return path as an imported python module.
+
+ If modname is None, look for the containing package
+ and construct an according module name.
+ The module will be put/looked up in sys.modules.
+ if ensuresyspath is True then the root dir for importing
+ the file (taking __init__.py files into account) will
+ be prepended to sys.path if it isn't there already.
+ If ensuresyspath=="append" the root dir will be appended
+ if it isn't already contained in sys.path.
+ if ensuresyspath is False no modification of syspath happens.
+ """
+ if not self.check():
+ raise py.error.ENOENT(self)
+
+ pkgpath = None
+ if modname is None:
+ pkgpath = self.pypkgpath()
+ if pkgpath is not None:
+ pkgroot = pkgpath.dirpath()
+ names = self.new(ext="").relto(pkgroot).split(self.sep)
+ if names[-1] == "__init__":
+ names.pop()
+ modname = ".".join(names)
+ else:
+ pkgroot = self.dirpath()
+ modname = self.purebasename
+
+ self._ensuresyspath(ensuresyspath, pkgroot)
+ __import__(modname)
+ mod = sys.modules[modname]
+ if self.basename == "__init__.py":
+ return mod # we don't check anything as we might
+ # we in a namespace package ... too icky to check
+ modfile = mod.__file__
+ if modfile[-4:] in ('.pyc', '.pyo'):
+ modfile = modfile[:-1]
+ elif modfile.endswith('$py.class'):
+ modfile = modfile[:-9] + '.py'
+ if modfile.endswith(os.path.sep + "__init__.py"):
+ if self.basename != "__init__.py":
+ modfile = modfile[:-12]
+ try:
+ issame = self.samefile(modfile)
+ except py.error.ENOENT:
+ issame = False
+ if not issame:
+ raise self.ImportMismatchError(modname, modfile, self)
+ return mod
+ else:
+ try:
+ return sys.modules[modname]
+ except KeyError:
+ # we have a custom modname, do a pseudo-import
+ mod = py.std.types.ModuleType(modname)
+ mod.__file__ = str(self)
+ sys.modules[modname] = mod
+ try:
+ py.builtin.execfile(str(self), mod.__dict__)
+ except:
+ del sys.modules[modname]
+ raise
+ return mod
+
+ def sysexec(self, *argv, **popen_opts):
+ """ return stdout text from executing a system child process,
+ where the 'self' path points to executable.
+ The process is directly invoked and not through a system shell.
+ """
+ from subprocess import Popen, PIPE
+ argv = map_as_list(str, argv)
+ popen_opts['stdout'] = popen_opts['stderr'] = PIPE
+ proc = Popen([str(self)] + argv, **popen_opts)
+ stdout, stderr = proc.communicate()
+ ret = proc.wait()
+ if py.builtin._isbytes(stdout):
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ if ret != 0:
+ if py.builtin._isbytes(stderr):
+ stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
+ raise py.process.cmdexec.Error(ret, ret, str(self),
+ stdout, stderr,)
+ return stdout
+
+ def sysfind(cls, name, checker=None, paths=None):
+ """ return a path object found by looking at the systems
+ underlying PATH specification. If the checker is not None
+ it will be invoked to filter matching paths. If a binary
+ cannot be found, None is returned
+ Note: This is probably not working on plain win32 systems
+ but may work on cygwin.
+ """
+ if isabs(name):
+ p = py.path.local(name)
+ if p.check(file=1):
+ return p
+ else:
+ if paths is None:
+ if iswin32:
+ paths = py.std.os.environ['Path'].split(';')
+ if '' not in paths and '.' not in paths:
+ paths.append('.')
+ try:
+ systemroot = os.environ['SYSTEMROOT']
+ except KeyError:
+ pass
+ else:
+ paths = [re.sub('%SystemRoot%', systemroot, path)
+ for path in paths]
+ else:
+ paths = py.std.os.environ['PATH'].split(':')
+ tryadd = []
+ if iswin32:
+ tryadd += os.environ['PATHEXT'].split(os.pathsep)
+ tryadd.append("")
+
+ for x in paths:
+ for addext in tryadd:
+ p = py.path.local(x).join(name, abs=True) + addext
+ try:
+ if p.check(file=1):
+ if checker:
+ if not checker(p):
+ continue
+ return p
+ except py.error.EACCES:
+ pass
+ return None
+ sysfind = classmethod(sysfind)
+
+ def _gethomedir(cls):
+ try:
+ x = os.environ['HOME']
+ except KeyError:
+ try:
+ x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
+ except KeyError:
+ return None
+ return cls(x)
+ _gethomedir = classmethod(_gethomedir)
+
+ #"""
+ #special class constructors for local filesystem paths
+ #"""
+ def get_temproot(cls):
+ """ return the system's temporary directory
+ (where tempfiles are usually created in)
+ """
+ return py.path.local(py.std.tempfile.gettempdir())
+ get_temproot = classmethod(get_temproot)
+
+ def mkdtemp(cls, rootdir=None):
+ """ return a Path object pointing to a fresh new temporary directory
+ (which we created ourself).
+ """
+ import tempfile
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+ return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
+ mkdtemp = classmethod(mkdtemp)
+
+ def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
+ lock_timeout = 172800): # two days
+ """ return unique directory with a number greater than the current
+ maximum one. The number is assumed to start directly after prefix.
+ if keep is true directories with a number less than (maxnum-keep)
+ will be removed.
+ """
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+
+ def parse_num(path):
+ """ parse the number out of a path (if it matches the prefix) """
+ bn = path.basename
+ if bn.startswith(prefix):
+ try:
+ return int(bn[len(prefix):])
+ except ValueError:
+ pass
+
+ # compute the maximum number currently in use with the
+ # prefix
+ lastmax = None
+ while True:
+ maxnum = -1
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None:
+ maxnum = max(maxnum, num)
+
+ # make the new directory
+ try:
+ udir = rootdir.mkdir(prefix + str(maxnum+1))
+ except py.error.EEXIST:
+ # race condition: another thread/process created the dir
+ # in the meantime. Try counting again
+ if lastmax == maxnum:
+ raise
+ lastmax = maxnum
+ continue
+ break
+
+ # put a .lock file in the new directory that will be removed at
+ # process exit
+ if lock_timeout:
+ lockfile = udir.join('.lock')
+ mypid = os.getpid()
+ if hasattr(lockfile, 'mksymlinkto'):
+ lockfile.mksymlinkto(str(mypid))
+ else:
+ lockfile.write(str(mypid))
+ def try_remove_lockfile():
+ # in a fork() situation, only the last process should
+ # remove the .lock, otherwise the other processes run the
+ # risk of seeing their temporary dir disappear. For now
+ # we remove the .lock in the parent only (i.e. we assume
+ # that the children finish before the parent).
+ if os.getpid() != mypid:
+ return
+ try:
+ lockfile.remove()
+ except py.error.Error:
+ pass
+ atexit.register(try_remove_lockfile)
+
+ # prune old directories
+ if keep:
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None and num <= (maxnum - keep):
+ lf = path.join('.lock')
+ try:
+ t1 = lf.lstat().mtime
+ t2 = lockfile.lstat().mtime
+ if not lock_timeout or abs(t2-t1) < lock_timeout:
+ continue # skip directories still locked
+ except py.error.Error:
+ pass # assume that it means that there is no 'lf'
+ try:
+ path.remove(rec=1)
+ except KeyboardInterrupt:
+ raise
+ except: # this might be py.error.Error, WindowsError ...
+ pass
+
+ # make link...
+ try:
+ username = os.environ['USER'] #linux, et al
+ except KeyError:
+ try:
+ username = os.environ['USERNAME'] #windows
+ except KeyError:
+ username = 'current'
+
+ src = str(udir)
+ dest = src[:src.rfind('-')] + '-' + username
+ try:
+ os.unlink(dest)
+ except OSError:
+ pass
+ try:
+ os.symlink(src, dest)
+ except (OSError, AttributeError, NotImplementedError):
+ pass
+
+ return udir
+ make_numbered_dir = classmethod(make_numbered_dir)
+
+def copymode(src, dest):
+ py.std.shutil.copymode(src, dest)
+
+def copychunked(src, dest):
+ chunksize = 524288 # half a meg of bytes
+ fsrc = src.open('rb')
+ try:
+ fdest = dest.open('wb')
+ try:
+ while 1:
+ buf = fsrc.read(chunksize)
+ if not buf:
+ break
+ fdest.write(buf)
+ finally:
+ fdest.close()
+ finally:
+ fsrc.close()
+
+def isimportable(name):
+ if name and (name[0].isalpha() or name[0] == '_'):
+ name = name.replace("_", '')
+ return not name or name.isalnum()
diff --git a/python/py/py/_path/svnurl.py b/python/py/py/_path/svnurl.py
new file mode 100644
index 000000000..78d71317a
--- /dev/null
+++ b/python/py/py/_path/svnurl.py
@@ -0,0 +1,380 @@
+"""
+module defining a subversion path object based on the external
+command 'svn'. This modules aims to work with svn 1.3 and higher
+but might also interact well with earlier versions.
+"""
+
+import os, sys, time, re
+import py
+from py import path, process
+from py._path import common
+from py._path import svnwc as svncommon
+from py._path.cacheutil import BuildcostAccessCache, AgingCache
+
+DEBUG=False
+
+class SvnCommandPath(svncommon.SvnPathBase):
+ """ path implementation that offers access to (possibly remote) subversion
+ repositories. """
+
+ _lsrevcache = BuildcostAccessCache(maxentries=128)
+ _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
+
+ def __new__(cls, path, rev=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(path, cls):
+ rev = path.rev
+ auth = path.auth
+ path = path.strpath
+ svncommon.checkbadchars(path)
+ path = path.rstrip('/')
+ self.strpath = path
+ self.rev = rev
+ self.auth = auth
+ return self
+
+ def __repr__(self):
+ if self.rev == -1:
+ return 'svnurl(%r)' % self.strpath
+ else:
+ return 'svnurl(%r, %r)' % (self.strpath, self.rev)
+
+ def _svnwithrev(self, cmd, *args):
+ """ execute an svn command, append our own url and revision """
+ if self.rev is None:
+ return self._svnwrite(cmd, *args)
+ else:
+ args = ['-r', self.rev] + list(args)
+ return self._svnwrite(cmd, *args)
+
+ def _svnwrite(self, cmd, *args):
+ """ execute an svn command, append our own url """
+ l = ['svn %s' % cmd]
+ args = ['"%s"' % self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._encodedurl())
+ # fixing the locale because we can't otherwise parse
+ string = " ".join(l)
+ if DEBUG:
+ print("execing %s" % string)
+ out = self._svncmdexecauth(string)
+ return out
+
+ def _svncmdexecauth(self, cmd):
+ """ execute an svn command 'as is' """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._cmdexec(cmd)
+
+ def _cmdexec(self, cmd):
+ try:
+ out = process.cmdexec(cmd)
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if (e.err.find('File Exists') != -1 or
+ e.err.find('File already exists') != -1):
+ raise py.error.EEXIST(self)
+ raise
+ return out
+
+ def _svnpopenauth(self, cmd):
+ """ execute an svn command, return a pipe for reading stdin """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._popen(cmd)
+
+ def _popen(self, cmd):
+ return os.popen(cmd)
+
+ def _encodedurl(self):
+ return self._escape(self.strpath)
+
+ def _norev_delentry(self, path):
+ auth = self.auth and self.auth.makecmdoptions() or None
+ self._lsnorevcache.delentry((str(path), auth))
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ if mode not in ("r", "rU",):
+ raise ValueError("mode %r not supported" % (mode,))
+ assert self.check(file=1) # svn cat returns an empty file otherwise
+ if self.rev is None:
+ return self._svnpopenauth('svn cat "%s"' % (
+ self._escape(self.strpath), ))
+ else:
+ return self._svnpopenauth('svn cat -r %s "%s"' % (
+ self.rev, self._escape(self.strpath)))
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path of the current path joined
+ with any given path arguments.
+ """
+ l = self.strpath.split(self.sep)
+ if len(l) < 4:
+ raise py.error.EINVAL(self, "base is not valid")
+ elif len(l) == 4:
+ return self.join(*args, **kwargs)
+ else:
+ return self.new(basename='').join(*args, **kwargs)
+
+ # modifying methods (cache must be invalidated)
+ def mkdir(self, *args, **kwargs):
+ """ create & return the directory joined with args.
+ pass a 'msg' keyword argument to set the commit message.
+ """
+ commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
+ createpath = self.join(*args)
+ createpath._svnwrite('mkdir', '-m', commit_msg)
+ self._norev_delentry(createpath.dirpath())
+ return createpath
+
+ def copy(self, target, msg='copied by py lib invocation'):
+ """ copy path to target with checkin message msg."""
+ if getattr(target, 'rev', None) is not None:
+ raise py.error.EINVAL(target, "revisions are immutable")
+ self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
+ self._escape(self), self._escape(target)))
+ self._norev_delentry(target.dirpath())
+
+ def rename(self, target, msg="renamed by py lib invocation"):
+ """ rename this path to target with checkin message msg. """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
+ msg, self._escape(self), self._escape(target)))
+ self._norev_delentry(self.dirpath())
+ self._norev_delentry(self)
+
+ def remove(self, rec=1, msg='removed by py lib invocation'):
+ """ remove a file or directory (or a directory tree if rec=1) with
+checkin message msg."""
+ if self.rev is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
+ self._norev_delentry(self.dirpath())
+
+ def export(self, topath):
+ """ export to a local path
+
+ topath should not exist prior to calling this, returns a
+ py.path.local instance
+ """
+ topath = py.path.local(topath)
+ args = ['"%s"' % (self._escape(self),),
+ '"%s"' % (self._escape(topath),)]
+ if self.rev is not None:
+ args = ['-r', str(self.rev)] + args
+ self._svncmdexecauth('svn export %s' % (' '.join(args),))
+ return topath
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). If you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ target = self.join(*args)
+ dir = kwargs.get('dir', 0)
+ for x in target.parts(reverse=True):
+ if x.check():
+ break
+ else:
+ raise py.error.ENOENT(target, "has not any valid base!")
+ if x == target:
+ if not x.check(dir=dir):
+ raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
+ return x
+ tocreate = target.relto(x)
+ basename = tocreate.split(self.sep, 1)[0]
+ tempdir = py.path.local.mkdtemp()
+ try:
+ tempdir.ensure(tocreate, dir=dir)
+ cmd = 'svn import -m "%s" "%s" "%s"' % (
+ "ensure %s" % self._escape(tocreate),
+ self._escape(tempdir.join(basename)),
+ x.join(basename)._encodedurl())
+ self._svncmdexecauth(cmd)
+ self._norev_delentry(x)
+ finally:
+ tempdir.remove()
+ return target
+
+ # end of modifying methods
+ def _propget(self, name):
+ res = self._svnwithrev('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def _proplist(self):
+ res = self._svnwithrev('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return svncommon.PropListDict(self, lines)
+
+ def info(self):
+ """ return an Info structure with svn-provided information. """
+ parent = self.dirpath()
+ nameinfo_seq = parent._listdir_nameinfo()
+ bn = self.basename
+ for name, info in nameinfo_seq:
+ if name == bn:
+ return info
+ raise py.error.ENOENT(self)
+
+
+ def _listdir_nameinfo(self):
+ """ return sequence of name-info directory entries of self """
+ def builder():
+ try:
+ res = self._svnwithrev('ls', '-v')
+ except process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('non-existent in that revision') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("E200009:") != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('File not found') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('not part of a repository')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('Unable to open')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.lower().find('method not allowed')!=-1:
+ raise py.error.EACCES(self, e.err)
+ raise py.error.Error(e.err)
+ lines = res.split('\n')
+ nameinfo_seq = []
+ for lsline in lines:
+ if lsline:
+ info = InfoSvnCommand(lsline)
+ if info._name != '.': # svn 1.5 produces '.' dirs,
+ nameinfo_seq.append((info._name, info))
+ nameinfo_seq.sort()
+ return nameinfo_seq
+ auth = self.auth and self.auth.makecmdoptions() or None
+ if self.rev is not None:
+ return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
+ builder)
+ else:
+ return self._lsnorevcache.getorbuild((self.strpath, auth),
+ builder)
+
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ nameinfo_seq = self._listdir_nameinfo()
+ if len(nameinfo_seq) == 1:
+ name, info = nameinfo_seq[0]
+ if name == self.basename and info.kind == 'file':
+ #if not self.check(dir=1):
+ raise py.error.ENOTDIR(self)
+ paths = [self.join(name) for (name, info) in nameinfo_seq]
+ if fil:
+ paths = [x for x in paths if fil(x)]
+ self._sortlist(paths, sort)
+ return paths
+
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() #make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
+ (rev_opt, verbose_opt, self.strpath))
+ from xml.dom import minidom
+ tree = minidom.parse(xmlpipe)
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(svncommon.LogEntry(logentry))
+ return result
+
+#01234567890123456789012345678901234567890123467
+# 2256 hpk 165 Nov 24 17:55 __init__.py
+# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
+# 1312 johnny 1627 May 05 14:32 test_decorators.py
+#
+class InfoSvnCommand:
+ # the '0?' part in the middle is an indication of whether the resource is
+ # locked, see 'svn help ls'
+ lspattern = re.compile(
+ r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
+ '*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
+ def __init__(self, line):
+ # this is a typical line from 'svn ls http://...'
+ #_ 1127 jum 0 Jul 13 15:28 branch/
+ match = self.lspattern.match(line)
+ data = match.groupdict()
+ self._name = data['file']
+ if self._name[-1] == '/':
+ self._name = self._name[:-1]
+ self.kind = 'dir'
+ else:
+ self.kind = 'file'
+ #self.has_props = l.pop(0) == 'P'
+ self.created_rev = int(data['rev'])
+ self.last_author = data['author']
+ self.size = data['size'] and int(data['size']) or 0
+ self.mtime = parse_time_with_missing_year(data['date'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+
+#____________________________________________________
+#
+# helper functions
+#____________________________________________________
+def parse_time_with_missing_year(timestr):
+ """ analyze the time part from a single line of "svn ls -v"
+ the svn output doesn't show the year makes the 'timestr'
+ ambigous.
+ """
+ import calendar
+ t_now = time.gmtime()
+
+ tparts = timestr.split()
+ month = time.strptime(tparts.pop(0), '%b')[1]
+ day = time.strptime(tparts.pop(0), '%d')[2]
+ last = tparts.pop(0) # year or hour:minute
+ try:
+ if ":" in last:
+ raise ValueError()
+ year = time.strptime(last, '%Y')[0]
+ hour = minute = 0
+ except ValueError:
+ hour, minute = time.strptime(last, '%H:%M')[3:5]
+ year = t_now[0]
+
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ if t_result > t_now:
+ year -= 1
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ return calendar.timegm(t_result)
+
+class PathEntry:
+ def __init__(self, ppart):
+ self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
+ self.action = ppart.getAttribute('action').encode('UTF-8')
+ if self.action == 'A':
+ self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
+ if self.copyfrom_path:
+ self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
+
diff --git a/python/py/py/_path/svnwc.py b/python/py/py/_path/svnwc.py
new file mode 100644
index 000000000..00d3b4bba
--- /dev/null
+++ b/python/py/py/_path/svnwc.py
@@ -0,0 +1,1240 @@
+"""
+svn-Command based Implementation of a Subversion WorkingCopy Path.
+
+ SvnWCCommandPath is the main class.
+
+"""
+
+import os, sys, time, re, calendar
+import py
+import subprocess
+from py._path import common
+
+#-----------------------------------------------------------
+# Caching latest repository revision and repo-paths
+# (getting them is slow with the current implementations)
+#
+# XXX make mt-safe
+#-----------------------------------------------------------
+
+class cache:
+ proplist = {}
+ info = {}
+ entries = {}
+ prop = {}
+
+class RepoEntry:
+ def __init__(self, url, rev, timestamp):
+ self.url = url
+ self.rev = rev
+ self.timestamp = timestamp
+
+ def __str__(self):
+ return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
+
+class RepoCache:
+ """ The Repocache manages discovered repository paths
+ and their revisions. If inside a timeout the cache
+ will even return the revision of the root.
+ """
+ timeout = 20 # seconds after which we forget that we know the last revision
+
+ def __init__(self):
+ self.repos = []
+
+ def clear(self):
+ self.repos = []
+
+ def put(self, url, rev, timestamp=None):
+ if rev is None:
+ return
+ if timestamp is None:
+ timestamp = time.time()
+
+ for entry in self.repos:
+ if url == entry.url:
+ entry.timestamp = timestamp
+ entry.rev = rev
+ #print "set repo", entry
+ break
+ else:
+ entry = RepoEntry(url, rev, timestamp)
+ self.repos.append(entry)
+ #print "appended repo", entry
+
+ def get(self, url):
+ now = time.time()
+ for entry in self.repos:
+ if url.startswith(entry.url):
+ if now < entry.timestamp + self.timeout:
+ #print "returning immediate Etrny", entry
+ return entry.url, entry.rev
+ return entry.url, -1
+ return url, -1
+
+repositories = RepoCache()
+
+
+# svn support code
+
+ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
+if sys.platform == "win32":
+ ALLOWED_CHARS += ":"
+ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
+
+def _getsvnversion(ver=[]):
+ try:
+ return ver[0]
+ except IndexError:
+ v = py.process.cmdexec("svn -q --version")
+ v.strip()
+ v = '.'.join(v.split('.')[:2])
+ ver.append(v)
+ return v
+
+def _escape_helper(text):
+ text = str(text)
+ if py.std.sys.platform != 'win32':
+ text = str(text).replace('$', '\\$')
+ return text
+
+def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
+ for c in str(text):
+ if c.isalnum():
+ continue
+ if c in allowed_chars:
+ continue
+ return True
+ return False
+
+def checkbadchars(url):
+ # (hpk) not quite sure about the exact purpose, guido w.?
+ proto, uri = url.split("://", 1)
+ if proto != "file":
+ host, uripath = uri.split('/', 1)
+ # only check for bad chars in the non-protocol parts
+ if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
+ or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
+ raise ValueError("bad char in %r" % (url, ))
+
+
+#_______________________________________________________________
+
+class SvnPathBase(common.PathBase):
+ """ Base implementation for SvnPath implementations. """
+ sep = '/'
+
+ def _geturl(self):
+ return self.strpath
+ url = property(_geturl, None, None, "url of this svn-path.")
+
+ def __str__(self):
+ """ return a string representation (including rev-number) """
+ return self.strpath
+
+ def __hash__(self):
+ return hash(self.strpath)
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts::
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ obj = object.__new__(self.__class__)
+ obj.rev = kw.get('rev', self.rev)
+ obj.auth = kw.get('auth', self.auth)
+ dirname, basename, purebasename, ext = self._getbyspec(
+ "dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ ext = kw.setdefault('ext', ext)
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ if kw['basename']:
+ obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
+ else:
+ obj.strpath = "%(dirname)s" % kw
+ return obj
+
+ def _getbyspec(self, spec):
+ """ get specified parts of the path. 'arg' is a string
+ with comma separated path parts. The parts are returned
+ in exactly the order of the specification.
+
+ you may specify the following parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ res = []
+ parts = self.strpath.split(self.sep)
+ for name in spec.split(','):
+ name = name.strip()
+ if name == 'dirname':
+ res.append(self.sep.join(parts[:-1]))
+ elif name == 'basename':
+ res.append(parts[-1])
+ else:
+ basename = parts[-1]
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ res.append(purebasename)
+ elif name == 'ext':
+ res.append(ext)
+ else:
+ raise NameError("Don't know part %r" % name)
+ return res
+
+ def __eq__(self, other):
+ """ return true if path and rev attributes each match """
+ return (str(self) == str(other) and
+ (self.rev == other.rev or self.rev == other.rev))
+
+ def __ne__(self, other):
+ return not self == other
+
+ def join(self, *args):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+
+ args = tuple([arg.strip(self.sep) for arg in args])
+ parts = (self.strpath, ) + args
+ newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
+ return newpath
+
+ def propget(self, name):
+ """ return the content of the given property. """
+ value = self._propget(name)
+ return value
+
+ def proplist(self):
+ """ list all property names. """
+ content = self._proplist()
+ return content
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ # shared help methods
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+
+ #def _childmaxrev(self):
+ # """ return maximum revision number of childs (or self.rev if no childs) """
+ # rev = self.rev
+ # for name, info in self._listdir_nameinfo():
+ # rev = max(rev, info.created_rev)
+ # return rev
+
+ #def _getlatestrevision(self):
+ # """ return latest repo-revision for this path. """
+ # url = self.strpath
+ # path = self.__class__(url, None)
+ #
+ # # we need a long walk to find the root-repo and revision
+ # while 1:
+ # try:
+ # rev = max(rev, path._childmaxrev())
+ # previous = path
+ # path = path.dirpath()
+ # except (IOError, process.cmdexec.Error):
+ # break
+ # if rev is None:
+ # raise IOError, "could not determine newest repo revision for %s" % self
+ # return rev
+
+ class Checkers(common.Checkers):
+ def dir(self):
+ try:
+ return self.path.info().kind == 'dir'
+ except py.error.Error:
+ return self._listdirworks()
+
+ def _listdirworks(self):
+ try:
+ self.path.listdir()
+ except py.error.ENOENT:
+ return False
+ else:
+ return True
+
+ def file(self):
+ try:
+ return self.path.info().kind == 'file'
+ except py.error.ENOENT:
+ return False
+
+ def exists(self):
+ try:
+ return self.path.info()
+ except py.error.ENOENT:
+ return self._listdirworks()
+
+def parse_apr_time(timestr):
+ i = timestr.rfind('.')
+ if i == -1:
+ raise ValueError("could not parse %s" % timestr)
+ timestr = timestr[:i]
+ parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
+ return time.mktime(parsedtime)
+
+class PropListDict(dict):
+ """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
+ def __init__(self, path, keynames):
+ dict.__init__(self, [(x, None) for x in keynames])
+ self.path = path
+
+ def __getitem__(self, key):
+ value = dict.__getitem__(self, key)
+ if value is None:
+ value = self.path.propget(key)
+ dict.__setitem__(self, key, value)
+ return value
+
+def fixlocale():
+ if sys.platform != 'win32':
+ return 'LC_ALL=C '
+ return ''
+
+# some nasty chunk of code to solve path and url conversion and quoting issues
+ILLEGAL_CHARS = '* | \ / : < > ? \t \n \x0b \x0c \r'.split(' ')
+if os.sep in ILLEGAL_CHARS:
+ ILLEGAL_CHARS.remove(os.sep)
+ISWINDOWS = sys.platform == 'win32'
+_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
+def _check_path(path):
+ illegal = ILLEGAL_CHARS[:]
+ sp = path.strpath
+ if ISWINDOWS:
+ illegal.remove(':')
+ if not _reg_allow_disk.match(sp):
+ raise ValueError('path may not contain a colon (:)')
+ for char in sp:
+ if char not in string.printable or char in illegal:
+ raise ValueError('illegal character %r in path' % (char,))
+
+def path_to_fspath(path, addat=True):
+ _check_path(path)
+ sp = path.strpath
+ if addat and path.rev != -1:
+ sp = '%s@%s' % (sp, path.rev)
+ elif addat:
+ sp = '%s@HEAD' % (sp,)
+ return sp
+
+def url_from_path(path):
+ fspath = path_to_fspath(path, False)
+ quote = py.std.urllib.quote
+ if ISWINDOWS:
+ match = _reg_allow_disk.match(fspath)
+ fspath = fspath.replace('\\', '/')
+ if match.group(1):
+ fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
+ quote(fspath[len(match.group(1)):]))
+ else:
+ fspath = quote(fspath)
+ else:
+ fspath = quote(fspath)
+ if path.rev != -1:
+ fspath = '%s@%s' % (fspath, path.rev)
+ else:
+ fspath = '%s@HEAD' % (fspath,)
+ return 'file://%s' % (fspath,)
+
+class SvnAuth(object):
+ """ container for auth information for Subversion """
+ def __init__(self, username, password, cache_auth=True, interactive=True):
+ self.username = username
+ self.password = password
+ self.cache_auth = cache_auth
+ self.interactive = interactive
+
+ def makecmdoptions(self):
+ uname = self.username.replace('"', '\\"')
+ passwd = self.password.replace('"', '\\"')
+ ret = []
+ if uname:
+ ret.append('--username="%s"' % (uname,))
+ if passwd:
+ ret.append('--password="%s"' % (passwd,))
+ if not self.cache_auth:
+ ret.append('--no-auth-cache')
+ if not self.interactive:
+ ret.append('--non-interactive')
+ return ' '.join(ret)
+
+ def __str__(self):
+ return "<SvnAuth username=%s ...>" %(self.username,)
+
+rex_blame = re.compile(r'\s*(\d+)\s*(\S+) (.*)')
+
+class SvnWCCommandPath(common.PathBase):
+ """ path implementation offering access/modification to svn working copies.
+ It has methods similar to the functions in os.path and similar to the
+ commands of the svn client.
+ """
+ sep = os.sep
+
+ def __new__(cls, wcpath=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(wcpath, cls):
+ if wcpath.__class__ == cls:
+ return wcpath
+ wcpath = wcpath.localpath
+ if _check_for_bad_chars(str(wcpath),
+ ALLOWED_CHARS):
+ raise ValueError("bad char in wcpath %s" % (wcpath, ))
+ self.localpath = py.path.local(wcpath)
+ self.auth = auth
+ return self
+
+ strpath = property(lambda x: str(x.localpath), None, None, "string path")
+ rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
+
+ def __eq__(self, other):
+ return self.localpath == getattr(other, 'localpath', None)
+
+ def _geturl(self):
+ if getattr(self, '_url', None) is None:
+ info = self.info()
+ self._url = info.url #SvnPath(info.url, info.rev)
+ assert isinstance(self._url, py.builtin._basestring)
+ return self._url
+
+ url = property(_geturl, None, None, "url of this WC item")
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+ def dump(self, obj):
+ """ pickle object into path location"""
+ return self.localpath.dump(obj)
+
+ def svnurl(self):
+ """ return current SvnPath for this WC-item. """
+ info = self.info()
+ return py.path.svnurl(info.url)
+
+ def __repr__(self):
+ return "svnwc(%r)" % (self.strpath) # , self._url)
+
+ def __str__(self):
+ return str(self.localpath)
+
+ def _makeauthoptions(self):
+ if self.auth is None:
+ return ''
+ return self.auth.makecmdoptions()
+
+ def _authsvn(self, cmd, args=None):
+ args = args and list(args) or []
+ args.append(self._makeauthoptions())
+ return self._svn(cmd, *args)
+
+ def _svn(self, cmd, *args):
+ l = ['svn %s' % cmd]
+ args = [self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._escape(self.strpath))
+ # try fixing the locale because we can't otherwise parse
+ string = fixlocale() + " ".join(l)
+ try:
+ try:
+ key = 'LC_MESSAGES'
+ hold = os.environ.get(key)
+ os.environ[key] = 'C'
+ out = py.process.cmdexec(string)
+ finally:
+ if hold:
+ os.environ[key] = hold
+ else:
+ del os.environ[key]
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ strerr = e.err.lower()
+ if strerr.find('not found') != -1:
+ raise py.error.ENOENT(self)
+ elif strerr.find("E200009:") != -1:
+ raise py.error.ENOENT(self)
+ if (strerr.find('file exists') != -1 or
+ strerr.find('file already exists') != -1 or
+ strerr.find('w150002:') != -1 or
+ strerr.find("can't create directory") != -1):
+ raise py.error.EEXIST(strerr) #self)
+ raise
+ return out
+
+ def switch(self, url):
+ """ switch to given URL. """
+ self._authsvn('switch', [url])
+
+ def checkout(self, url=None, rev=None):
+ """ checkout from url to local wcpath. """
+ args = []
+ if url is None:
+ url = self.url
+ if rev is None or rev == -1:
+ if (py.std.sys.platform != 'win32' and
+ _getsvnversion() == '1.3'):
+ url += "@HEAD"
+ else:
+ if _getsvnversion() == '1.3':
+ url += "@%d" % rev
+ else:
+ args.append('-r' + str(rev))
+ args.append(url)
+ self._authsvn('co', args)
+
+ def update(self, rev='HEAD', interactive=True):
+ """ update working copy item to given revision. (None -> HEAD). """
+ opts = ['-r', rev]
+ if not interactive:
+ opts.append("--non-interactive")
+ self._authsvn('up', opts)
+
+ def write(self, content, mode='w'):
+ """ write content into local filesystem wc. """
+ self.localpath.write(content, mode)
+
+ def dirpath(self, *args):
+ """ return the directory Path of the current Path. """
+ return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ self.mkdir()
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'directory=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if p.check():
+ if p.check(versioned=False):
+ p.add()
+ return p
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ parent = p.dirpath()
+ parent._ensuredirs()
+ p.write("")
+ p.add()
+ return p
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ if args:
+ return self.join(*args).mkdir()
+ else:
+ self._svn('mkdir')
+ return self
+
+ def add(self):
+ """ add ourself to svn """
+ self._svn('add')
+
+ def remove(self, rec=1, force=1):
+ """ remove a file or a directory tree. 'rec'ursive is
+ ignored and considered always true (because of
+ underlying svn semantics.
+ """
+ assert rec, "svn cannot remove non-recursively"
+ if not self.check(versioned=True):
+ # not added to svn (anymore?), just remove
+ py.path.local(self).remove()
+ return
+ flags = []
+ if force:
+ flags.append('--force')
+ self._svn('remove', *flags)
+
+ def copy(self, target):
+ """ copy path to target."""
+ py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
+
+ def rename(self, target):
+ """ rename this path to target. """
+ py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
+
+ def lock(self):
+ """ set a lock (exclusive) on the resource """
+ out = self._authsvn('lock').strip()
+ if not out:
+ # warning or error, raise exception
+ raise ValueError("unknown error in svn lock command")
+
+ def unlock(self):
+ """ unset a previously set lock """
+ out = self._authsvn('unlock').strip()
+ if out.startswith('svn:'):
+ # warning or error, raise exception
+ raise Exception(out[4:])
+
+ def cleanup(self):
+ """ remove any locks from the resource """
+ # XXX should be fixed properly!!!
+ try:
+ self.unlock()
+ except:
+ pass
+
+ def status(self, updates=0, rec=0, externals=0):
+ """ return (collective) Status object for this file. """
+ # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
+ # 2201 2192 jum test
+ # XXX
+ if externals:
+ raise ValueError("XXX cannot perform status() "
+ "on external items yet")
+ else:
+ #1.2 supports: externals = '--ignore-externals'
+ externals = ''
+ if rec:
+ rec= ''
+ else:
+ rec = '--non-recursive'
+
+ # XXX does not work on all subversion versions
+ #if not externals:
+ # externals = '--ignore-externals'
+
+ if updates:
+ updates = '-u'
+ else:
+ updates = ''
+
+ try:
+ cmd = 'status -v --xml --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ except py.process.cmdexec.Error:
+ cmd = 'status -v --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ rootstatus = WCStatus(self).fromstring(out, self)
+ else:
+ rootstatus = XMLWCStatus(self).fromstring(out, self)
+ return rootstatus
+
+ def diff(self, rev=None):
+ """ return a diff of the current path against revision rev (defaulting
+ to the last one).
+ """
+ args = []
+ if rev is not None:
+ args.append("-r %d" % rev)
+ out = self._authsvn('diff', args)
+ return out
+
+ def blame(self):
+ """ return a list of tuples of three elements:
+ (revision, commiter, line)
+ """
+ out = self._svn('blame')
+ result = []
+ blamelines = out.splitlines()
+ reallines = py.path.svnurl(self.url).readlines()
+ for i, (blameline, line) in enumerate(
+ zip(blamelines, reallines)):
+ m = rex_blame.match(blameline)
+ if not m:
+ raise ValueError("output line %r of svn blame does not match "
+ "expected format" % (line, ))
+ rev, name, _ = m.groups()
+ result.append((int(rev), name, line))
+ return result
+
+ _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
+ def commit(self, msg='', rec=1):
+ """ commit with support for non-recursive commits """
+ # XXX i guess escaping should be done better here?!?
+ cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
+ if not rec:
+ cmd += ' -N'
+ out = self._authsvn(cmd)
+ try:
+ del cache.info[self]
+ except KeyError:
+ pass
+ if out:
+ m = self._rex_commit.match(out)
+ return int(m.group(1))
+
+ def propset(self, name, value, *args):
+ """ set property name to value on this path. """
+ d = py.path.local.mkdtemp()
+ try:
+ p = d.join('value')
+ p.write(value)
+ self._svn('propset', name, '--file', str(p), *args)
+ finally:
+ d.remove()
+
+ def propget(self, name):
+ """ get property name on this path. """
+ res = self._svn('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def propdel(self, name):
+ """ delete property name on this path. """
+ res = self._svn('propdel', name)
+ return res[:-1] # strip trailing newline
+
+ def proplist(self, rec=0):
+ """ return a mapping of property names to property values.
+If rec is True, then return a dictionary mapping sub-paths to such mappings.
+"""
+ if rec:
+ res = self._svn('proplist -R')
+ return make_recursive_propdict(self, res)
+ else:
+ res = self._svn('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return PropListDict(self, lines)
+
+ def revert(self, rec=0):
+ """ revert the local changes of this path. if rec is True, do so
+recursively. """
+ if rec:
+ result = self._svn('revert -R')
+ else:
+ result = self._svn('revert')
+ return result
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ if kw:
+ localpath = self.localpath.new(**kw)
+ else:
+ localpath = self.localpath
+ return self.__class__(localpath, auth=self.auth)
+
+ def join(self, *args, **kwargs):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+ localpath = self.localpath.join(*args, **kwargs)
+ return self.__class__(localpath, auth=self.auth)
+
+ def info(self, usecache=1):
+ """ return an Info structure with svn-provided information. """
+ info = usecache and cache.info.get(self)
+ if not info:
+ try:
+ output = self._svn('info')
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('Path is not a working copy directory') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("is not under version control") != -1:
+ raise py.error.ENOENT(self, e.err)
+ raise
+ # XXX SVN 1.3 has output on stderr instead of stdout (while it does
+ # return 0!), so a bit nasty, but we assume no output is output
+ # to stderr...
+ if (output.strip() == '' or
+ output.lower().find('not a versioned resource') != -1):
+ raise py.error.ENOENT(self, output)
+ info = InfoSvnWCCommand(output)
+
+ # Can't reliably compare on Windows without access to win32api
+ if py.std.sys.platform != 'win32':
+ if info.path != self.localpath:
+ raise py.error.ENOENT(self, "not a versioned resource:" +
+ " %s != %s" % (info.path, self.localpath))
+ cache.info[self] = info
+ return info
+
+ def listdir(self, fil=None, sort=None):
+ """ return a sequence of Paths.
+
+ listdir will return either a tuple or a list of paths
+ depending on implementation choices.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ # XXX unify argument naming with LocalPath.listdir
+ def notsvn(path):
+ return path.basename != '.svn'
+
+ paths = []
+ for localpath in self.localpath.listdir(notsvn):
+ p = self.__class__(localpath, auth=self.auth)
+ if notsvn(p) and (not fil or fil(p)):
+ paths.append(p)
+ self._sortlist(paths, sort)
+ return paths
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ return open(self.strpath, mode)
+
+ def _getbyspec(self, spec):
+ return self.localpath._getbyspec(spec)
+
+ class Checkers(py.path.local.Checkers):
+ def __init__(self, path):
+ self.svnwcpath = path
+ self.path = path.localpath
+ def versioned(self):
+ try:
+ s = self.svnwcpath.info()
+ except (py.error.ENOENT, py.error.EEXIST):
+ return False
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('is not a working copy')!=-1:
+ return False
+ if e.err.lower().find('not a versioned resource') != -1:
+ return False
+ raise
+ else:
+ return True
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() # make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ locale_env = fixlocale()
+ # some blather on stderr
+ auth_opt = self._makeauthoptions()
+ #stdin, stdout, stderr = os.popen3(locale_env +
+ # 'svn log --xml %s %s %s "%s"' % (
+ # rev_opt, verbose_opt, auth_opt,
+ # self.strpath))
+ cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
+ rev_opt, verbose_opt, auth_opt, self.strpath)
+
+ popen = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=True,
+ )
+ stdout, stderr = popen.communicate()
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ minidom,ExpatError = importxml()
+ try:
+ tree = minidom.parseString(stdout)
+ except ExpatError:
+ raise ValueError('no such revision')
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(LogEntry(logentry))
+ return result
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ def __hash__(self):
+ return hash((self.strpath, self.__class__, self.auth))
+
+
+class WCStatus:
+ attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
+ 'deleted', 'prop_modified', 'unknown', 'update_available',
+ 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
+ )
+
+ def __init__(self, wcpath, rev=None, modrev=None, author=None):
+ self.wcpath = wcpath
+ self.rev = rev
+ self.modrev = modrev
+ self.author = author
+
+ for name in self.attrnames:
+ setattr(self, name, [])
+
+ def allpath(self, sort=True, **kw):
+ d = {}
+ for name in self.attrnames:
+ if name not in kw or kw[name]:
+ for path in getattr(self, name):
+ d[path] = 1
+ l = d.keys()
+ if sort:
+ l.sort()
+ return l
+
+ # XXX a bit scary to assume there's always 2 spaces between username and
+ # path, however with win32 allowing spaces in user names there doesn't
+ # seem to be a more solid approach :(
+ _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
+
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ return a new WCStatus object from data 's'
+ """
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ for line in data.split('\n'):
+ if not line.strip():
+ continue
+ #print "processing %r" % line
+ flags, rest = line[:8], line[8:]
+ # first column
+ c0,c1,c2,c3,c4,c5,x6,c7 = flags
+ #if '*' in line:
+ # print "flags", repr(flags), "rest", repr(rest)
+
+ if c0 in '?XI':
+ fn = line.split(None, 1)[1]
+ if c0 == '?':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.unknown.append(wcpath)
+ elif c0 == 'X':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(fn, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ elif c0 == 'I':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.ignored.append(wcpath)
+
+ continue
+
+ #elif c0 in '~!' or c4 == 'S':
+ # raise NotImplementedError("received flag %r" % c0)
+
+ m = WCStatus._rex_status.match(rest)
+ if not m:
+ if c7 == '*':
+ fn = rest.strip()
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.update_available.append(wcpath)
+ continue
+ if line.lower().find('against revision:')!=-1:
+ update_rev = int(rest.split(':')[1].strip())
+ continue
+ if line.lower().find('status on external') > -1:
+ # XXX not sure what to do here... perhaps we want to
+ # store some state instead of just continuing, as right
+ # now it makes the top-level external get added twice
+ # (once as external, once as 'normal' unchanged item)
+ # because of the way SVN presents external items
+ continue
+ # keep trying
+ raise ValueError("could not parse line %r" % line)
+ else:
+ rev, modrev, author, fn = m.groups()
+ wcpath = rootwcpath.join(fn, abs=1)
+ #assert wcpath.check()
+ if c0 == 'M':
+ assert wcpath.check(file=1), "didn't expect a directory with changed content here"
+ rootstatus.modified.append(wcpath)
+ elif c0 == 'A' or c3 == '+' :
+ rootstatus.added.append(wcpath)
+ elif c0 == 'D':
+ rootstatus.deleted.append(wcpath)
+ elif c0 == 'C':
+ rootstatus.conflict.append(wcpath)
+ elif c0 == '~':
+ rootstatus.kindmismatch.append(wcpath)
+ elif c0 == '!':
+ rootstatus.incomplete.append(wcpath)
+ elif c0 == 'R':
+ rootstatus.replaced.append(wcpath)
+ elif not c0.strip():
+ rootstatus.unchanged.append(wcpath)
+ else:
+ raise NotImplementedError("received flag %r" % c0)
+
+ if c1 == 'M':
+ rootstatus.prop_modified.append(wcpath)
+ # XXX do we cover all client versions here?
+ if c2 == 'L' or c5 == 'K':
+ rootstatus.locked.append(wcpath)
+ if c7 == '*':
+ rootstatus.update_available.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ if update_rev:
+ rootstatus.update_rev = update_rev
+ continue
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class XMLWCStatus(WCStatus):
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ parse 'data' (XML string as outputted by svn st) into a status obj
+ """
+ # XXX for externals, the path is shown twice: once
+ # with external information, and once with full info as if
+ # the item was a normal non-external... the current way of
+ # dealing with this issue is by ignoring it - this does make
+ # externals appear as external items as well as 'normal',
+ # unchanged ones in the status object so this is far from ideal
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ minidom, ExpatError = importxml()
+ try:
+ doc = minidom.parseString(data)
+ except ExpatError:
+ e = sys.exc_info()[1]
+ raise ValueError(str(e))
+ urevels = doc.getElementsByTagName('against')
+ if urevels:
+ rootstatus.update_rev = urevels[-1].getAttribute('revision')
+ for entryel in doc.getElementsByTagName('entry'):
+ path = entryel.getAttribute('path')
+ statusel = entryel.getElementsByTagName('wc-status')[0]
+ itemstatus = statusel.getAttribute('item')
+
+ if itemstatus == 'unversioned':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.unknown.append(wcpath)
+ continue
+ elif itemstatus == 'external':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(path, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ continue
+ elif itemstatus == 'ignored':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.ignored.append(wcpath)
+ continue
+ elif itemstatus == 'incomplete':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.incomplete.append(wcpath)
+ continue
+
+ rev = statusel.getAttribute('revision')
+ if itemstatus == 'added' or itemstatus == 'none':
+ rev = '0'
+ modrev = '?'
+ author = '?'
+ date = ''
+ elif itemstatus == "replaced":
+ pass
+ else:
+ #print entryel.toxml()
+ commitel = entryel.getElementsByTagName('commit')[0]
+ if commitel:
+ modrev = commitel.getAttribute('revision')
+ author = ''
+ author_els = commitel.getElementsByTagName('author')
+ if author_els:
+ for c in author_els[0].childNodes:
+ author += c.nodeValue
+ date = ''
+ for c in commitel.getElementsByTagName('date')[0]\
+ .childNodes:
+ date += c.nodeValue
+
+ wcpath = rootwcpath.join(path, abs=1)
+
+ assert itemstatus != 'modified' or wcpath.check(file=1), (
+ 'did\'t expect a directory with changed content here')
+
+ itemattrname = {
+ 'normal': 'unchanged',
+ 'unversioned': 'unknown',
+ 'conflicted': 'conflict',
+ 'none': 'added',
+ }.get(itemstatus, itemstatus)
+
+ attr = getattr(rootstatus, itemattrname)
+ attr.append(wcpath)
+
+ propsstatus = statusel.getAttribute('props')
+ if propsstatus not in ('none', 'normal'):
+ rootstatus.prop_modified.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ rootstatus.date = date
+
+ # handle repos-status element (remote info)
+ rstatusels = entryel.getElementsByTagName('repos-status')
+ if rstatusels:
+ rstatusel = rstatusels[0]
+ ritemstatus = rstatusel.getAttribute('item')
+ if ritemstatus in ('added', 'modified'):
+ rootstatus.update_available.append(wcpath)
+
+ lockels = entryel.getElementsByTagName('lock')
+ if len(lockels):
+ rootstatus.locked.append(wcpath)
+
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class InfoSvnWCCommand:
+ def __init__(self, output):
+ # Path: test
+ # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
+ # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ # Revision: 2151
+ # Node Kind: directory
+ # Schedule: normal
+ # Last Changed Author: hpk
+ # Last Changed Rev: 2100
+ # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
+
+ d = {}
+ for line in output.split('\n'):
+ if not line.strip():
+ continue
+ key, value = line.split(':', 1)
+ key = key.lower().replace(' ', '')
+ value = value.strip()
+ d[key] = value
+ try:
+ self.url = d['url']
+ except KeyError:
+ raise ValueError("Not a versioned resource")
+ #raise ValueError, "Not a versioned resource %r" % path
+ self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
+ try:
+ self.rev = int(d['revision'])
+ except KeyError:
+ self.rev = None
+
+ self.path = py.path.local(d['path'])
+ self.size = self.path.size()
+ if 'lastchangedrev' in d:
+ self.created_rev = int(d['lastchangedrev'])
+ if 'lastchangedauthor' in d:
+ self.last_author = d['lastchangedauthor']
+ if 'lastchangeddate' in d:
+ self.mtime = parse_wcinfotime(d['lastchangeddate'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+def parse_wcinfotime(timestr):
+ """ Returns seconds since epoch, UTC. """
+ # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
+ if not m:
+ raise ValueError("timestring %r does not match" % timestr)
+ timestr, timezone = m.groups()
+ # do not handle timezone specially, return value should be UTC
+ parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
+ return calendar.timegm(parsedtime)
+
+def make_recursive_propdict(wcroot,
+ output,
+ rex = re.compile("Properties on '(.*)':")):
+ """ Return a dictionary of path->PropListDict mappings. """
+ lines = [x for x in output.split('\n') if x]
+ pdict = {}
+ while lines:
+ line = lines.pop(0)
+ m = rex.match(line)
+ if not m:
+ raise ValueError("could not parse propget-line: %r" % line)
+ path = m.groups()[0]
+ wcpath = wcroot.join(path, abs=1)
+ propnames = []
+ while lines and lines[0].startswith(' '):
+ propname = lines.pop(0).strip()
+ propnames.append(propname)
+ assert propnames, "must have found properties!"
+ pdict[wcpath] = PropListDict(wcpath, propnames)
+ return pdict
+
+
+def importxml(cache=[]):
+ if cache:
+ return cache
+ from xml.dom import minidom
+ from xml.parsers.expat import ExpatError
+ cache.extend([minidom, ExpatError])
+ return cache
+
+class LogEntry:
+ def __init__(self, logentry):
+ self.rev = int(logentry.getAttribute('revision'))
+ for lpart in filter(None, logentry.childNodes):
+ if lpart.nodeType == lpart.ELEMENT_NODE:
+ if lpart.nodeName == 'author':
+ self.author = lpart.firstChild.nodeValue
+ elif lpart.nodeName == 'msg':
+ if lpart.firstChild:
+ self.msg = lpart.firstChild.nodeValue
+ else:
+ self.msg = ''
+ elif lpart.nodeName == 'date':
+ #2003-07-29T20:05:11.598637Z
+ timestr = lpart.firstChild.nodeValue
+ self.date = parse_apr_time(timestr)
+ elif lpart.nodeName == 'paths':
+ self.strpaths = []
+ for ppart in filter(None, lpart.childNodes):
+ if ppart.nodeType == ppart.ELEMENT_NODE:
+ self.strpaths.append(PathEntry(ppart))
+ def __repr__(self):
+ return '<Logentry rev=%d author=%s date=%s>' % (
+ self.rev, self.author, self.date)
+
+
diff --git a/python/py/py/_process/__init__.py b/python/py/py/_process/__init__.py
new file mode 100644
index 000000000..86c714ad1
--- /dev/null
+++ b/python/py/py/_process/__init__.py
@@ -0,0 +1 @@
+""" high-level sub-process handling """
diff --git a/python/py/py/_process/cmdexec.py b/python/py/py/_process/cmdexec.py
new file mode 100644
index 000000000..f83a24940
--- /dev/null
+++ b/python/py/py/_process/cmdexec.py
@@ -0,0 +1,49 @@
+import sys
+import subprocess
+import py
+from subprocess import Popen, PIPE
+
+def cmdexec(cmd):
+ """ return unicode output of executing 'cmd' in a separate process.
+
+ raise cmdexec.Error exeception if the command failed.
+ the exception will provide an 'err' attribute containing
+ the error-output from the command.
+ if the subprocess module does not provide a proper encoding/unicode strings
+ sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
+ """
+ process = subprocess.Popen(cmd, shell=True,
+ universal_newlines=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = process.communicate()
+ if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
+ try:
+ default_encoding = sys.getdefaultencoding() # jython may not have it
+ except AttributeError:
+ default_encoding = sys.stdout.encoding or 'UTF-8'
+ out = unicode(out, process.stdout.encoding or default_encoding)
+ err = unicode(err, process.stderr.encoding or default_encoding)
+ status = process.poll()
+ if status:
+ raise ExecutionFailed(status, status, cmd, out, err)
+ return out
+
+class ExecutionFailed(py.error.Error):
+ def __init__(self, status, systemstatus, cmd, out, err):
+ Exception.__init__(self)
+ self.status = status
+ self.systemstatus = systemstatus
+ self.cmd = cmd
+ self.err = err
+ self.out = out
+
+ def __str__(self):
+ return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
+
+# export the exception under the name 'py.process.cmdexec.Error'
+cmdexec.Error = ExecutionFailed
+try:
+ ExecutionFailed.__module__ = 'py.process.cmdexec'
+ ExecutionFailed.__name__ = 'Error'
+except (AttributeError, TypeError):
+ pass
diff --git a/python/py/py/_process/forkedfunc.py b/python/py/py/_process/forkedfunc.py
new file mode 100644
index 000000000..1c2853068
--- /dev/null
+++ b/python/py/py/_process/forkedfunc.py
@@ -0,0 +1,120 @@
+
+"""
+ ForkedFunc provides a way to run a function in a forked process
+ and get at its return value, stdout and stderr output as well
+ as signals and exitstatusus.
+"""
+
+import py
+import os
+import sys
+import marshal
+
+
+def get_unbuffered_io(fd, filename):
+ f = open(str(filename), "w")
+ if fd != f.fileno():
+ os.dup2(f.fileno(), fd)
+ class AutoFlush:
+ def write(self, data):
+ f.write(data)
+ f.flush()
+ def __getattr__(self, name):
+ return getattr(f, name)
+ return AutoFlush()
+
+
+class ForkedFunc:
+ EXITSTATUS_EXCEPTION = 3
+
+
+ def __init__(self, fun, args=None, kwargs=None, nice_level=0,
+ child_on_start=None, child_on_exit=None):
+ if args is None:
+ args = []
+ if kwargs is None:
+ kwargs = {}
+ self.fun = fun
+ self.args = args
+ self.kwargs = kwargs
+ self.tempdir = tempdir = py.path.local.mkdtemp()
+ self.RETVAL = tempdir.ensure('retval')
+ self.STDOUT = tempdir.ensure('stdout')
+ self.STDERR = tempdir.ensure('stderr')
+
+ pid = os.fork()
+ if pid: # in parent process
+ self.pid = pid
+ else: # in child process
+ self.pid = None
+ self._child(nice_level, child_on_start, child_on_exit)
+
+ def _child(self, nice_level, child_on_start, child_on_exit):
+ # right now we need to call a function, but first we need to
+ # map all IO that might happen
+ sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
+ sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
+ retvalf = self.RETVAL.open("wb")
+ EXITSTATUS = 0
+ try:
+ if nice_level:
+ os.nice(nice_level)
+ try:
+ if child_on_start is not None:
+ child_on_start()
+ retval = self.fun(*self.args, **self.kwargs)
+ retvalf.write(marshal.dumps(retval))
+ if child_on_exit is not None:
+ child_on_exit()
+ except:
+ excinfo = py.code.ExceptionInfo()
+ stderr.write(str(excinfo._getreprcrash()))
+ EXITSTATUS = self.EXITSTATUS_EXCEPTION
+ finally:
+ stdout.close()
+ stderr.close()
+ retvalf.close()
+ os.close(1)
+ os.close(2)
+ os._exit(EXITSTATUS)
+
+ def waitfinish(self, waiter=os.waitpid):
+ pid, systemstatus = waiter(self.pid, 0)
+ if systemstatus:
+ if os.WIFSIGNALED(systemstatus):
+ exitstatus = os.WTERMSIG(systemstatus) + 128
+ else:
+ exitstatus = os.WEXITSTATUS(systemstatus)
+ else:
+ exitstatus = 0
+ signal = systemstatus & 0x7f
+ if not exitstatus and not signal:
+ retval = self.RETVAL.open('rb')
+ try:
+ retval_data = retval.read()
+ finally:
+ retval.close()
+ retval = marshal.loads(retval_data)
+ else:
+ retval = None
+ stdout = self.STDOUT.read()
+ stderr = self.STDERR.read()
+ self._removetemp()
+ return Result(exitstatus, signal, retval, stdout, stderr)
+
+ def _removetemp(self):
+ if self.tempdir.check():
+ self.tempdir.remove()
+
+ def __del__(self):
+ if self.pid is not None: # only clean up in main process
+ self._removetemp()
+
+
+class Result(object):
+ def __init__(self, exitstatus, signal, retval, stdout, stderr):
+ self.exitstatus = exitstatus
+ self.signal = signal
+ self.retval = retval
+ self.out = stdout
+ self.err = stderr
diff --git a/python/py/py/_process/killproc.py b/python/py/py/_process/killproc.py
new file mode 100644
index 000000000..18e8310b5
--- /dev/null
+++ b/python/py/py/_process/killproc.py
@@ -0,0 +1,23 @@
+import py
+import os, sys
+
+if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
+ try:
+ import ctypes
+ except ImportError:
+ def dokill(pid):
+ py.process.cmdexec("taskkill /F /PID %d" %(pid,))
+ else:
+ def dokill(pid):
+ PROCESS_TERMINATE = 1
+ handle = ctypes.windll.kernel32.OpenProcess(
+ PROCESS_TERMINATE, False, pid)
+ ctypes.windll.kernel32.TerminateProcess(handle, -1)
+ ctypes.windll.kernel32.CloseHandle(handle)
+else:
+ def dokill(pid):
+ os.kill(pid, 15)
+
+def kill(pid):
+ """ kill process by id. """
+ dokill(pid)
diff --git a/python/py/py/_std.py b/python/py/py/_std.py
new file mode 100644
index 000000000..97a985332
--- /dev/null
+++ b/python/py/py/_std.py
@@ -0,0 +1,18 @@
+import sys
+
+class Std(object):
+ """ makes top-level python modules available as an attribute,
+ importing them on first access.
+ """
+
+ def __init__(self):
+ self.__dict__ = sys.modules
+
+ def __getattr__(self, name):
+ try:
+ m = __import__(name)
+ except ImportError:
+ raise AttributeError("py.std: could not import %s" % name)
+ return m
+
+std = Std()
diff --git a/python/py/py/_xmlgen.py b/python/py/py/_xmlgen.py
new file mode 100644
index 000000000..2ffcaa14b
--- /dev/null
+++ b/python/py/py/_xmlgen.py
@@ -0,0 +1,253 @@
+"""
+module for generating and serializing xml and html structures
+by using simple python objects.
+
+(c) holger krekel, holger at merlinux eu. 2009
+"""
+import sys, re
+
+if sys.version_info >= (3,0):
+ def u(s):
+ return s
+ def unicode(x, errors=None):
+ if hasattr(x, '__unicode__'):
+ return x.__unicode__()
+ return str(x)
+else:
+ def u(s):
+ return unicode(s)
+ unicode = unicode
+
+
+class NamespaceMetaclass(type):
+ def __getattr__(self, name):
+ if name[:1] == '_':
+ raise AttributeError(name)
+ if self == Namespace:
+ raise ValueError("Namespace class is abstract")
+ tagspec = self.__tagspec__
+ if tagspec is not None and name not in tagspec:
+ raise AttributeError(name)
+ classattr = {}
+ if self.__stickyname__:
+ classattr['xmlname'] = name
+ cls = type(name, (self.__tagclass__,), classattr)
+ setattr(self, name, cls)
+ return cls
+
+class Tag(list):
+ class Attr(object):
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super(Tag, self).__init__(args)
+ self.attr = self.Attr(**kwargs)
+
+ def __unicode__(self):
+ return self.unicode(indent=0)
+ __str__ = __unicode__
+
+ def unicode(self, indent=2):
+ l = []
+ SimpleUnicodeVisitor(l.append, indent).visit(self)
+ return u("").join(l)
+
+ def __repr__(self):
+ name = self.__class__.__name__
+ return "<%r tag object %d>" % (name, id(self))
+
+Namespace = NamespaceMetaclass('Namespace', (object, ), {
+ '__tagspec__': None,
+ '__tagclass__': Tag,
+ '__stickyname__': False,
+})
+
+class HtmlTag(Tag):
+ def unicode(self, indent=2):
+ l = []
+ HtmlVisitor(l.append, indent, shortempty=False).visit(self)
+ return u("").join(l)
+
+# exported plain html namespace
+class html(Namespace):
+ __tagclass__ = HtmlTag
+ __stickyname__ = True
+ __tagspec__ = dict([(x,1) for x in (
+ 'a,abbr,acronym,address,applet,area,b,bdo,big,blink,'
+ 'blockquote,body,br,button,caption,center,cite,code,col,'
+ 'colgroup,comment,dd,del,dfn,dir,div,dl,dt,em,embed,'
+ 'fieldset,font,form,frameset,h1,h2,h3,h4,h5,h6,head,html,'
+ 'i,iframe,img,input,ins,kbd,label,legend,li,link,listing,'
+ 'map,marquee,menu,meta,multicol,nobr,noembed,noframes,'
+ 'noscript,object,ol,optgroup,option,p,pre,q,s,script,'
+ 'select,small,span,strike,strong,style,sub,sup,table,'
+ 'tbody,td,textarea,tfoot,th,thead,title,tr,tt,u,ul,xmp,'
+ 'base,basefont,frame,hr,isindex,param,samp,var'
+ ).split(',') if x])
+
+ class Style(object):
+ def __init__(self, **kw):
+ for x, y in kw.items():
+ x = x.replace('_', '-')
+ setattr(self, x, y)
+
+
+class raw(object):
+ """just a box that can contain a unicode string that will be
+ included directly in the output"""
+ def __init__(self, uniobj):
+ self.uniobj = uniobj
+
+class SimpleUnicodeVisitor(object):
+ """ recursive visitor to write unicode. """
+ def __init__(self, write, indent=0, curindent=0, shortempty=True):
+ self.write = write
+ self.cache = {}
+ self.visited = {} # for detection of recursion
+ self.indent = indent
+ self.curindent = curindent
+ self.parents = []
+ self.shortempty = shortempty # short empty tags or not
+
+ def visit(self, node):
+ """ dispatcher on node's class/bases name. """
+ cls = node.__class__
+ try:
+ visitmethod = self.cache[cls]
+ except KeyError:
+ for subclass in cls.__mro__:
+ visitmethod = getattr(self, subclass.__name__, None)
+ if visitmethod is not None:
+ break
+ else:
+ visitmethod = self.__object
+ self.cache[cls] = visitmethod
+ visitmethod(node)
+
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
+ #self.write(obj)
+ self.write(escape(unicode(obj)))
+
+ def raw(self, obj):
+ self.write(obj.uniobj)
+
+ def list(self, obj):
+ assert id(obj) not in self.visited
+ self.visited[id(obj)] = 1
+ for elem in obj:
+ self.visit(elem)
+
+ def Tag(self, tag):
+ assert id(tag) not in self.visited
+ try:
+ tag.parent = self.parents[-1]
+ except IndexError:
+ tag.parent = None
+ self.visited[id(tag)] = 1
+ tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
+ if self.curindent and not self._isinline(tagname):
+ self.write("\n" + u(' ') * self.curindent)
+ if tag:
+ self.curindent += self.indent
+ self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
+ self.parents.append(tag)
+ for x in tag:
+ self.visit(x)
+ self.parents.pop()
+ self.write(u('</%s>') % tagname)
+ self.curindent -= self.indent
+ else:
+ nameattr = tagname+self.attributes(tag)
+ if self._issingleton(tagname):
+ self.write(u('<%s/>') % (nameattr,))
+ else:
+ self.write(u('<%s></%s>') % (nameattr, tagname))
+
+ def attributes(self, tag):
+ # serialize attributes
+ attrlist = dir(tag.attr)
+ attrlist.sort()
+ l = []
+ for name in attrlist:
+ res = self.repr_attribute(tag.attr, name)
+ if res is not None:
+ l.append(res)
+ l.extend(self.getstyle(tag))
+ return u("").join(l)
+
+ def repr_attribute(self, attrs, name):
+ if name[:2] != '__':
+ value = getattr(attrs, name)
+ if name.endswith('_'):
+ name = name[:-1]
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
+
+ def getstyle(self, tag):
+ """ return attribute list suitable for styling. """
+ try:
+ styledict = tag.style.__dict__
+ except AttributeError:
+ return []
+ else:
+ stylelist = [x+': ' + y for x,y in styledict.items()]
+ return [u(' style="%s"') % u('; ').join(stylelist)]
+
+ def _issingleton(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return self.shortempty
+
+ def _isinline(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return False
+
+class HtmlVisitor(SimpleUnicodeVisitor):
+
+ single = dict([(x, 1) for x in
+ ('br,img,area,param,col,hr,meta,link,base,'
+ 'input,frame').split(',')])
+ inline = dict([(x, 1) for x in
+ ('a abbr acronym b basefont bdo big br cite code dfn em font '
+ 'i img input kbd label q s samp select small span strike '
+ 'strong sub sup textarea tt u var'.split(' '))])
+
+ def repr_attribute(self, attrs, name):
+ if name == 'class_':
+ value = getattr(attrs, name)
+ if value is None:
+ return
+ return super(HtmlVisitor, self).repr_attribute(attrs, name)
+
+ def _issingleton(self, tagname):
+ return tagname in self.single
+
+ def _isinline(self, tagname):
+ return tagname in self.inline
+
+
+class _escape:
+ def __init__(self):
+ self.escape = {
+ u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'),
+ u('&') : u('&amp;'), u("'") : u('&apos;'),
+ }
+ self.charef_rex = re.compile(u("|").join(self.escape.keys()))
+
+ def _replacer(self, match):
+ return self.escape[match.group(0)]
+
+ def __call__(self, ustring):
+ """ xml-escape the given unicode string. """
+ try:
+ ustring = unicode(ustring)
+ except UnicodeDecodeError:
+ ustring = unicode(ustring, 'utf-8', errors='replace')
+ return self.charef_rex.sub(self._replacer, ustring)
+
+escape = _escape()
diff --git a/python/py/py/test.py b/python/py/py/test.py
new file mode 100644
index 000000000..aa5beb178
--- /dev/null
+++ b/python/py/py/test.py
@@ -0,0 +1,10 @@
+import sys
+if __name__ == '__main__':
+ import pytest
+ sys.exit(pytest.main())
+else:
+ import sys, pytest
+ sys.modules['py.test'] = pytest
+
+# for more API entry points see the 'tests' definition
+# in __init__.py
diff --git a/python/py/setup.cfg b/python/py/setup.cfg
new file mode 100644
index 000000000..be0b2a5c8
--- /dev/null
+++ b/python/py/setup.cfg
@@ -0,0 +1,11 @@
+[wheel]
+universal = 1
+
+[devpi:upload]
+formats = sdist.tgz,bdist_wheel
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/py/setup.py b/python/py/setup.py
new file mode 100644
index 000000000..06f0885cd
--- /dev/null
+++ b/python/py/setup.py
@@ -0,0 +1,38 @@
+import os, sys
+
+from setuptools import setup
+
+def main():
+ setup(
+ name='py',
+ description='library with cross-python path, ini-parsing, io, code, log facilities',
+ long_description = open('README.txt').read(),
+ version='1.4.31',
+ url='http://pylib.readthedocs.org/',
+ license='MIT license',
+ platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
+ author='holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others',
+ author_email='pytest-dev@python.org',
+ classifiers=['Development Status :: 6 - Mature',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: POSIX',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Topic :: Software Development :: Testing',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Utilities',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3'],
+ packages=['py',
+ 'py._code',
+ 'py._io',
+ 'py._log',
+ 'py._path',
+ 'py._process',
+ ],
+ zip_safe=False,
+ )
+
+if __name__ == '__main__':
+ main()
diff --git a/python/pyasn1-modules/CHANGES b/python/pyasn1-modules/CHANGES
new file mode 100644
index 000000000..40b7bb07c
--- /dev/null
+++ b/python/pyasn1-modules/CHANGES
@@ -0,0 +1,45 @@
+Revision 0.0.5
+--------------
+
+- License updated to vanilla BSD 2-Clause to ease package use
+ (http://opensource.org/licenses/BSD-2-Clause).
+
+Revision 0.0.4
+--------------
+
+- CMP structures (RFC4210), cmpdump.py tool and test case added.
+- SNMPv2c Message syntax (RFC1901) properly defined.
+- Package version established in form of __init__.__version__
+ which is in-sync with distutils.
+- Package meta information and classifiers updated.
+
+Revision 0.0.3
+--------------
+
+- Text cases implemented
+- X.509 CRMF structures (RFC2511) and crmfdump.py tool added
+- X.509 CRL structures and crldump.py tool added
+- PKCS#10 structures and pkcs10dump.py tool added
+- PKCS#8 structures and pkcs8dump.py tool added
+- PKCS#1 (rfc3447) structures added
+- OCSP request & response dumping tool added
+- SNMPv2c & SNMPv3/USM structures added
+- keydump.py moved into pkcs1dump.py
+- PEM files read function generalized to be used more universally.
+- complete PKIX1 '88 code implemented at rfc2459.py
+
+
+Revision 0.0.2
+--------------
+
+- Require pyasn1 >= 0.1.1
+- Fixes towards Py3K compatibility
+ + use either of existing urllib module
+ + adopt to the new bytes type
+ + print operator is now a function
+ + new exception syntax
+
+Revision 0.0.1a
+---------------
+
+- Initial revision, most code carried from pyasn1 examples.
diff --git a/python/pyasn1-modules/LICENSE b/python/pyasn1-modules/LICENSE
new file mode 100644
index 000000000..fac589b8c
--- /dev/null
+++ b/python/pyasn1-modules/LICENSE
@@ -0,0 +1,24 @@
+Copyright (c) 2005-2013, Ilya Etingof <ilya@glas.net>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/python/pyasn1-modules/MANIFEST.in b/python/pyasn1-modules/MANIFEST.in
new file mode 100644
index 000000000..056c1d05a
--- /dev/null
+++ b/python/pyasn1-modules/MANIFEST.in
@@ -0,0 +1,3 @@
+include CHANGES README LICENSE
+recursive-include tools *.py
+recursive-include test *.sh
diff --git a/python/pyasn1-modules/PKG-INFO b/python/pyasn1-modules/PKG-INFO
new file mode 100644
index 000000000..059bb2045
--- /dev/null
+++ b/python/pyasn1-modules/PKG-INFO
@@ -0,0 +1,26 @@
+Metadata-Version: 1.0
+Name: pyasn1-modules
+Version: 0.0.5
+Summary: A collection of ASN.1-based protocols modules.
+Home-page: http://sourceforge.net/projects/pyasn1/
+Author: Ilya Etingof <ilya@glas.net>
+Author-email: ilya@glas.net
+License: BSD
+Description: A collection of ASN.1 modules expressed in form of pyasn1 classes. Includes protocols PDUs definition (SNMP, LDAP etc.) and various data structures (X.509, PKCS etc.).
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: Science/Research
+Classifier: Intended Audience :: System Administrators
+Classifier: Intended Audience :: Telecommunications Industry
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Communications
+Classifier: Topic :: Security :: Cryptography
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/python/pyasn1-modules/README b/python/pyasn1-modules/README
new file mode 100644
index 000000000..a49a22efe
--- /dev/null
+++ b/python/pyasn1-modules/README
@@ -0,0 +1,17 @@
+
+ASN.1 modules for Python
+------------------------
+
+This is a small but growing collection of ASN.1 data structures
+[1] expressed in Python terms using pyasn1 [2] data model.
+
+It's thought to be useful to protocol developers and testers.
+
+All modules are py2k/py3k-compliant.
+
+If you happen to convert some ASN.1 module into pyasn1 that is not
+yet present in this collection and wish to contribute - please send
+it to me.
+
+=-=-=
+mailto: ilya@glas.net
diff --git a/python/pyasn1-modules/pyasn1_modules/__init__.py b/python/pyasn1-modules/pyasn1_modules/__init__.py
new file mode 100644
index 000000000..824d8dfe5
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/__init__.py
@@ -0,0 +1,2 @@
+# http://www.python.org/dev/peps/pep-0396/
+__version__ = '0.0.5'
diff --git a/python/pyasn1-modules/pyasn1_modules/pem.py b/python/pyasn1-modules/pyasn1_modules/pem.py
new file mode 100644
index 000000000..d8d815873
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/pem.py
@@ -0,0 +1,51 @@
+import base64, sys
+
+stSpam, stHam, stDump = 0, 1, 2
+
+# The markers parameters is in form ('start1', 'stop1'), ('start2', 'stop2')...
+# Return is (marker-index, substrate)
+def readPemBlocksFromFile(fileObj, *markers):
+ startMarkers = dict(map(lambda x: (x[1],x[0]),
+ enumerate(map(lambda x: x[0], markers))))
+ stopMarkers = dict(map(lambda x: (x[1],x[0]),
+ enumerate(map(lambda x: x[1], markers))))
+ idx = -1; substrate = ''
+ state = stSpam
+ while 1:
+ certLine = fileObj.readline()
+ if not certLine:
+ break
+ certLine = certLine.strip()
+ if state == stSpam:
+ if certLine in startMarkers:
+ certLines = []
+ idx = startMarkers[certLine]
+ state = stHam
+ continue
+ if state == stHam:
+ if certLine in stopMarkers and stopMarkers[certLine] == idx:
+ state = stDump
+ else:
+ certLines.append(certLine)
+ if state == stDump:
+ if sys.version_info[0] <= 2:
+ substrate = ''.join([ base64.b64decode(x) for x in certLines ])
+ else:
+ substrate = ''.encode().join([ base64.b64decode(x.encode()) for x in certLines ])
+ break
+ return idx, substrate
+
+# Backward compatibility routine
+def readPemFromFile(fileObj,
+ startMarker='-----BEGIN CERTIFICATE-----',
+ endMarker='-----END CERTIFICATE-----'):
+ idx, substrate = readPemBlocksFromFile(fileObj, (startMarker, endMarker))
+ return substrate
+
+def readBase64FromFile(fileObj):
+ if sys.version_info[0] <= 2:
+ return ''.join([ base64.b64decode(x) for x in fileObj.readlines() ])
+ else:
+ return ''.encode().join(
+ [ base64.b64decode(x.encode()) for x in fileObj.readlines() ]
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/pkcs12.py b/python/pyasn1-modules/pyasn1_modules/pkcs12.py
new file mode 100644
index 000000000..fb17675c4
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/pkcs12.py
@@ -0,0 +1,34 @@
+#
+# PKCS#12 syntax
+#
+# ASN.1 source from:
+# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-12/pkcs-12.asn
+#
+# Sample captures could be obtained with "openssl pkcs12" command
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint
+from pyasn1_modules.rfc2459 import *
+from pyasn1_modules import rfc2251
+
+class Attributes(univ.SetOf):
+ componentType = rfc2251.Attribute()
+
+class Version(univ.Integer): pass
+
+class CertificationRequestInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('subject', Name()),
+ namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
+ namedtype.NamedType('attributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+class Signature(univ.BitString): pass
+class SignatureAlgorithmIdentifier(AlgorithmIdentifier): pass
+
+class CertificationRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificationRequestInfo', CertificationRequestInfo()),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', Signature())
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc1155.py b/python/pyasn1-modules/pyasn1_modules/rfc1155.py
new file mode 100644
index 000000000..9e3c5cdbe
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc1155.py
@@ -0,0 +1,73 @@
+#
+# SNMPv1 message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1155.txt
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import univ, namedtype, namedval, tag, constraint
+
+class ObjectName(univ.ObjectIdentifier): pass
+
+class SimpleSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('number', univ.Integer()),
+ namedtype.NamedType('string', univ.OctetString()),
+ namedtype.NamedType('object', univ.ObjectIdentifier()),
+ namedtype.NamedType('empty', univ.Null())
+ )
+
+class IpAddress(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(
+ 4, 4
+ )
+class NetworkAddress(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('internet', IpAddress())
+ )
+
+class Counter(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 1)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+class Gauge(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+class TimeTicks(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 3)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+class Opaque(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 4)
+ )
+
+class ApplicationSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('address', NetworkAddress()),
+ namedtype.NamedType('counter', Counter()),
+ namedtype.NamedType('gauge', Gauge()),
+ namedtype.NamedType('ticks', TimeTicks()),
+ namedtype.NamedType('arbitrary', Opaque())
+ )
+
+class ObjectSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('simple', SimpleSyntax()),
+ namedtype.NamedType('application-wide', ApplicationSyntax())
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc1157.py b/python/pyasn1-modules/pyasn1_modules/rfc1157.py
new file mode 100644
index 000000000..6a36b06a3
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc1157.py
@@ -0,0 +1,90 @@
+#
+# SNMPv1 message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1157.txt
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import univ, namedtype, namedval, tag, constraint
+from pyasn1_modules import rfc1155
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('version-1', 0)
+ )
+ defaultValue = 0
+
+class Community(univ.OctetString): pass
+
+class RequestID(univ.Integer): pass
+class ErrorStatus(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('noError', 0),
+ ('tooBig', 1),
+ ('noSuchName', 2),
+ ('badValue', 3),
+ ('readOnly', 4),
+ ('genErr', 5)
+ )
+class ErrorIndex(univ.Integer): pass
+
+class VarBind(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('name', rfc1155.ObjectName()),
+ namedtype.NamedType('value', rfc1155.ObjectSyntax())
+ )
+class VarBindList(univ.SequenceOf):
+ componentType = VarBind()
+
+class _RequestBase(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('request-id', RequestID()),
+ namedtype.NamedType('error-status', ErrorStatus()),
+ namedtype.NamedType('error-index', ErrorIndex()),
+ namedtype.NamedType('variable-bindings', VarBindList())
+ )
+
+class GetRequestPDU(_RequestBase):
+ tagSet = _RequestBase.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+class GetNextRequestPDU(_RequestBase):
+ tagSet = _RequestBase.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+class GetResponsePDU(_RequestBase):
+ tagSet = _RequestBase.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)
+ )
+class SetRequestPDU(_RequestBase):
+ tagSet = _RequestBase.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)
+ )
+
+class TrapPDU(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('enterprise', univ.ObjectIdentifier()),
+ namedtype.NamedType('agent-addr', rfc1155.NetworkAddress()),
+ namedtype.NamedType('generic-trap', univ.Integer().clone(namedValues=namedval.NamedValues(('coldStart', 0), ('warmStart', 1), ('linkDown', 2), ('linkUp', 3), ('authenticationFailure', 4), ('egpNeighborLoss', 5), ('enterpriseSpecific', 6)))),
+ namedtype.NamedType('specific-trap', univ.Integer()),
+ namedtype.NamedType('time-stamp', rfc1155.TimeTicks()),
+ namedtype.NamedType('variable-bindings', VarBindList())
+ )
+
+class Pdus(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('get-request', GetRequestPDU()),
+ namedtype.NamedType('get-next-request', GetNextRequestPDU()),
+ namedtype.NamedType('get-response', GetResponsePDU()),
+ namedtype.NamedType('set-request', SetRequestPDU()),
+ namedtype.NamedType('trap', TrapPDU())
+ )
+
+class Message(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('community', Community()),
+ namedtype.NamedType('data', Pdus())
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc1901.py b/python/pyasn1-modules/pyasn1_modules/rfc1901.py
new file mode 100644
index 000000000..8cd7e7d12
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc1901.py
@@ -0,0 +1,15 @@
+#
+# SNMPv2c message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1901.txt
+#
+from pyasn1.type import univ, namedtype, namedval
+
+class Message(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer(namedValues = namedval.NamedValues(('version-2c', 1)))),
+ namedtype.NamedType('community', univ.OctetString()),
+ namedtype.NamedType('data', univ.Any())
+ )
+
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc1902.py b/python/pyasn1-modules/pyasn1_modules/rfc1902.py
new file mode 100644
index 000000000..df0b0c3dc
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc1902.py
@@ -0,0 +1,105 @@
+#
+# SNMPv2c message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1902.txt
+#
+from pyasn1.type import univ, namedtype, namedval, tag, constraint
+
+class Integer(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint(
+ -2147483648, 2147483647
+ )
+
+class Integer32(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint(
+ -2147483648, 2147483647
+ )
+
+class OctetString(univ.OctetString):
+ subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueSizeConstraint(
+ 0, 65535
+ )
+
+class IpAddress(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x00)
+ )
+ subtypeSpec = univ.OctetString.subtypeSpec+constraint.ValueSizeConstraint(
+ 4, 4
+ )
+
+class Counter32(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x01)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+class Gauge32(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x02)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+class Unsigned32(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x02)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+class TimeTicks(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x03)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint(
+ 0, 4294967295
+ )
+
+class Opaque(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x04)
+ )
+
+class Counter64(univ.Integer):
+ tagSet = univ.Integer.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x06)
+ )
+ subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint(
+ 0, 18446744073709551615
+ )
+
+class Bits(univ.OctetString): pass
+
+class ObjectName(univ.ObjectIdentifier): pass
+
+class SimpleSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('integer-value', Integer()),
+ namedtype.NamedType('string-value', OctetString()),
+ namedtype.NamedType('objectID-value', univ.ObjectIdentifier())
+ )
+
+class ApplicationSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('ipAddress-value', IpAddress()),
+ namedtype.NamedType('counter-value', Counter32()),
+ namedtype.NamedType('timeticks-value', TimeTicks()),
+ namedtype.NamedType('arbitrary-value', Opaque()),
+ namedtype.NamedType('big-counter-value', Counter64()),
+# This conflicts with Counter32
+# namedtype.NamedType('unsigned-integer-value', Unsigned32()),
+ namedtype.NamedType('gauge32-value', Gauge32())
+ ) # BITS misplaced?
+
+class ObjectSyntax(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('simple', SimpleSyntax()),
+ namedtype.NamedType('application-wide', ApplicationSyntax())
+ )
+
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc1905.py b/python/pyasn1-modules/pyasn1_modules/rfc1905.py
new file mode 100644
index 000000000..bec60f8e3
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc1905.py
@@ -0,0 +1,100 @@
+#
+# SNMPv2c PDU syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc1905.txt
+#
+from pyasn1.type import univ, namedtype, namedval, tag, constraint
+from pyasn1_modules import rfc1902
+
+max_bindings = rfc1902.Integer(2147483647)
+
+class _BindValue(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('value', rfc1902.ObjectSyntax()),
+ namedtype.NamedType('unSpecified', univ.Null()),
+ namedtype.NamedType('noSuchObject', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('noSuchInstance', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('endOfMibView', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+class VarBind(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('name', rfc1902.ObjectName()),
+ namedtype.NamedType('', _BindValue())
+ )
+
+class VarBindList(univ.SequenceOf):
+ componentType = VarBind()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(
+ 0, max_bindings
+ )
+
+class PDU(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('request-id', rfc1902.Integer32()),
+ namedtype.NamedType('error-status', univ.Integer(namedValues=namedval.NamedValues(('noError', 0), ('tooBig', 1), ('noSuchName', 2), ('badValue', 3), ('readOnly', 4), ('genErr', 5), ('noAccess', 6), ('wrongType', 7), ('wrongLength', 8), ('wrongEncoding', 9), ('wrongValue', 10), ('noCreation', 11), ('inconsistentValue', 12), ('resourceUnavailable', 13), ('commitFailed', 14), ('undoFailed', 15), ('authorizationError', 16), ('notWritable', 17), ('inconsistentName', 18)))),
+ namedtype.NamedType('error-index', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))),
+ namedtype.NamedType('variable-bindings', VarBindList())
+ )
+
+class BulkPDU(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('request-id', rfc1902.Integer32()),
+ namedtype.NamedType('non-repeaters', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))),
+ namedtype.NamedType('max-repetitions', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))),
+ namedtype.NamedType('variable-bindings', VarBindList())
+ )
+
+class GetRequestPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+
+class GetNextRequestPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+
+class ResponsePDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)
+ )
+
+class SetRequestPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)
+ )
+
+class GetBulkRequestPDU(BulkPDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5)
+ )
+
+class InformRequestPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6)
+ )
+
+class SNMPv2TrapPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7)
+ )
+
+class ReportPDU(PDU):
+ tagSet = PDU.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8)
+ )
+
+class PDUs(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('get-request', GetRequestPDU()),
+ namedtype.NamedType('get-next-request', GetNextRequestPDU()),
+ namedtype.NamedType('get-bulk-request', GetBulkRequestPDU()),
+ namedtype.NamedType('response', ResponsePDU()),
+ namedtype.NamedType('set-request', SetRequestPDU()),
+ namedtype.NamedType('inform-request', InformRequestPDU()),
+ namedtype.NamedType('snmpV2-trap', SNMPv2TrapPDU()),
+ namedtype.NamedType('report', ReportPDU())
+ )
+
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc2251.py b/python/pyasn1-modules/pyasn1_modules/rfc2251.py
new file mode 100644
index 000000000..3074c67a3
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc2251.py
@@ -0,0 +1,319 @@
+#
+# LDAP message syntax
+#
+# ASN.1 source from:
+# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/ldap.asn
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint,char,useful
+from pyasn1.codec.der import decoder, encoder
+
+maxInt = univ.Integer(2147483647)
+
+class LDAPString(univ.OctetString): pass
+class LDAPOID(univ.OctetString): pass
+
+class LDAPDN(LDAPString): pass
+class RelativeLDAPDN(LDAPString): pass
+class AttributeType(LDAPString): pass
+class AttributeDescription(LDAPString): pass
+
+class AttributeDescriptionList(univ.SequenceOf):
+ componentType = AttributeDescription()
+
+class AttributeValue(univ.OctetString): pass
+
+class AssertionValue(univ.OctetString): pass
+
+class AttributeValueAssertion(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('attributeDesc', AttributeDescription()),
+ namedtype.NamedType('assertionValue', AssertionValue())
+ )
+
+class Attribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeDescription()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+
+class MatchingRuleId(LDAPString): pass
+
+class Control(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('controlType', LDAPOID()),
+ namedtype.DefaultedNamedType('criticality', univ.Boolean('False')),
+ namedtype.OptionalNamedType('controlValue', univ.OctetString())
+ )
+
+class Controls(univ.SequenceOf):
+ componentType = Control()
+
+class LDAPURL(LDAPString): pass
+
+class Referral(univ.SequenceOf):
+ componentType = LDAPURL()
+
+class SaslCredentials(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('mechanism', LDAPString()),
+ namedtype.OptionalNamedType('credentials', univ.OctetString())
+ )
+
+class AuthenticationChoice(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('simple', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('reserved-1', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('reserved-2', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('sasl', SaslCredentials().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+class BindRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 0)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 127))),
+ namedtype.NamedType('name', LDAPDN()),
+ namedtype.NamedType('authentication', AuthenticationChoice())
+ )
+
+class PartialAttributeList(univ.SequenceOf):
+ componentType = univ.Sequence(componentType=namedtype.NamedTypes(namedtype.NamedType('type', AttributeDescription()), namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))))
+
+class SearchResultEntry(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 4)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('objectName', LDAPDN()),
+ namedtype.NamedType('attributes', PartialAttributeList())
+ )
+
+class MatchingRuleAssertion(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('matchingRule', MatchingRuleId().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('type', AttributeDescription().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('matchValue', AssertionValue().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.DefaultedNamedType('dnAttributes', univ.Boolean('False').subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
+ )
+
+class SubstringFilter(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeDescription()),
+ namedtype.NamedType('substrings', univ.SequenceOf(componentType=univ.Choice(componentType=namedtype.NamedTypes(namedtype.NamedType('initial', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('any', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), namedtype.NamedType('final', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))))))
+ )
+
+# Ugly hack to handle recursive Filter reference (up to 3-levels deep).
+
+class Filter3(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('substrings', SubstringFilter().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
+ namedtype.NamedType('present', AttributeDescription().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
+ namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
+ )
+
+class Filter2(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('and', univ.SetOf(componentType=Filter3()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('or', univ.SetOf(componentType=Filter3()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('not', Filter3().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('substrings', SubstringFilter().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
+ namedtype.NamedType('present', AttributeDescription().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
+ namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
+ )
+
+class Filter(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('and', univ.SetOf(componentType=Filter2()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('or', univ.SetOf(componentType=Filter2()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('not', Filter2().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.NamedType('substrings', SubstringFilter().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
+ namedtype.NamedType('present', AttributeDescription().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))),
+ namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
+ )
+
+# End of Filter hack
+
+class SearchRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 3)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('baseObject', LDAPDN()),
+ namedtype.NamedType('scope', univ.Enumerated(namedValues=namedval.NamedValues(('baseObject', 0), ('singleLevel', 1), ('wholeSubtree', 2)))),
+ namedtype.NamedType('derefAliases', univ.Enumerated(namedValues=namedval.NamedValues(('neverDerefAliases', 0), ('derefInSearching', 1), ('derefFindingBaseObj', 2), ('derefAlways', 3)))),
+ namedtype.NamedType('sizeLimit', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, maxInt))),
+ namedtype.NamedType('timeLimit', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, maxInt))),
+ namedtype.NamedType('typesOnly', univ.Boolean()),
+ namedtype.NamedType('filter', Filter()),
+ namedtype.NamedType('attributes', AttributeDescriptionList())
+ )
+
+class UnbindRequest(univ.Null):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2)
+ )
+
+class BindResponse(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('resultCode', univ.Enumerated(namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2), ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5), ('compareTrue', 6), ('authMethodNotSupported', 7), ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10), ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12), ('confidentialityRequired', 13), ('saslBindInProgress', 14), ('noSuchAttribute', 16), ('undefinedAttributeType', 17), ('inappropriateMatching', 18), ('constraintViolation', 19), ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21), ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34), ('reserved-35', 35), ('aliasDereferencingProblem', 36), ('inappropriateAuthentication', 48), ('invalidCredentials', 49), ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52), ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64), ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66), ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68), ('objectClassModsProhibited', 69), ('reserved-70', 70), ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81), ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84), ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87), ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
+ namedtype.NamedType('matchedDN', LDAPDN()),
+ namedtype.NamedType('errorMessage', LDAPString()),
+ namedtype.OptionalNamedType('referral', Referral().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.OptionalNamedType('serverSaslCreds', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7)))
+ )
+
+class LDAPResult(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('resultCode', univ.Enumerated(namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2), ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5), ('compareTrue', 6), ('authMethodNotSupported', 7), ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10), ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12), ('confidentialityRequired', 13), ('saslBindInProgress', 14), ('noSuchAttribute', 16), ('undefinedAttributeType', 17), ('inappropriateMatching', 18), ('constraintViolation', 19), ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21), ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34), ('reserved-35', 35), ('aliasDereferencingProblem', 36), ('inappropriateAuthentication', 48), ('invalidCredentials', 49), ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52), ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64), ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66), ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68), ('objectClassModsProhibited', 69), ('reserved-70', 70), ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81), ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84), ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87), ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
+ namedtype.NamedType('matchedDN', LDAPDN()),
+ namedtype.NamedType('errorMessage', LDAPString()),
+ namedtype.OptionalNamedType('referral', Referral().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)))
+ )
+
+class SearchResultReference(univ.SequenceOf):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 19)
+ )
+ componentType = LDAPURL()
+
+class SearchResultDone(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 5)
+ )
+
+class AttributeTypeAndValues(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeDescription()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+
+class ModifyRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 6)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('object', LDAPDN()),
+ namedtype.NamedType('modification', univ.SequenceOf(componentType=univ.Sequence(componentType=namedtype.NamedTypes(namedtype.NamedType('operation', univ.Enumerated(namedValues=namedval.NamedValues(('add', 0), ('delete', 1), ('replace', 2)))), namedtype.NamedType('modification', AttributeTypeAndValues())))))
+ )
+
+class ModifyResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 7)
+ )
+
+class AttributeList(univ.SequenceOf):
+ componentType = univ.Sequence(componentType=namedtype.NamedTypes(namedtype.NamedType('type', AttributeDescription()), namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))))
+
+class AddRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 8)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('entry', LDAPDN()),
+ namedtype.NamedType('attributes', AttributeList())
+ )
+
+class AddResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 9)
+ )
+
+class DelRequest(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 10)
+ )
+
+class DelResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 11)
+ )
+
+class ModifyDNRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 12)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('entry', LDAPDN()),
+ namedtype.NamedType('newrdn', RelativeLDAPDN()),
+ namedtype.NamedType('deleteoldrdn', univ.Boolean()),
+ namedtype.OptionalNamedType('newSuperior', LDAPDN().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+
+ )
+
+class ModifyDNResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 13)
+ )
+
+class CompareRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 14)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('entry', LDAPDN()),
+ namedtype.NamedType('ava', AttributeValueAssertion())
+ )
+
+class CompareResponse(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 15)
+ )
+
+class AbandonRequest(LDAPResult):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 16)
+ )
+
+class ExtendedRequest(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 23)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('requestName', LDAPOID().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('requestValue', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class ExtendedResponse(univ.Sequence):
+ tagSet = univ.Sequence.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 24)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('resultCode', univ.Enumerated(namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2), ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5), ('compareTrue', 6), ('authMethodNotSupported', 7), ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10), ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12), ('confidentialityRequired', 13), ('saslBindInProgress', 14), ('noSuchAttribute', 16), ('undefinedAttributeType', 17), ('inappropriateMatching', 18), ('constraintViolation', 19), ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21), ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34), ('reserved-35', 35), ('aliasDereferencingProblem', 36), ('inappropriateAuthentication', 48), ('invalidCredentials', 49), ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52), ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64), ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66), ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68), ('objectClassModsProhibited', 69), ('reserved-70', 70), ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81), ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84), ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87), ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))),
+ namedtype.NamedType('matchedDN', LDAPDN()),
+ namedtype.NamedType('errorMessage', LDAPString()),
+ namedtype.OptionalNamedType('referral', Referral().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+
+ namedtype.OptionalNamedType('responseName', LDAPOID().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10))),
+ namedtype.OptionalNamedType('response', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 11)))
+ )
+
+class MessageID(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(
+ 0, maxInt
+ )
+
+class LDAPMessage(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('messageID', MessageID()),
+ namedtype.NamedType('protocolOp', univ.Choice(componentType=namedtype.NamedTypes(namedtype.NamedType('bindRequest', BindRequest()), namedtype.NamedType('bindResponse', BindResponse()), namedtype.NamedType('unbindRequest', UnbindRequest()), namedtype.NamedType('searchRequest', SearchRequest()), namedtype.NamedType('searchResEntry', SearchResultEntry()), namedtype.NamedType('searchResDone', SearchResultDone()), namedtype.NamedType('searchResRef', SearchResultReference()), namedtype.NamedType('modifyRequest', ModifyRequest()), namedtype.NamedType('modifyResponse', ModifyResponse()), namedtype.NamedType('addRequest', AddRequest()), namedtype.NamedType('addResponse', AddResponse()), namedtype.NamedType('delRequest', DelRequest()), namedtype.NamedType('delResponse', DelResponse()), namedtype.NamedType('modDNRequest', ModifyDNRequest()), namedtype.NamedType('modDNResponse', ModifyDNResponse()), namedtype.NamedType('compareRequest', CompareRequest()), namedtype.NamedType('compareResponse', CompareResponse()), namedtype.NamedType('abandonRequest', AbandonRequest()), namedtype.NamedType('extendedReq', ExtendedRequest()), namedtype.NamedType('extendedResp', ExtendedResponse())))),
+ namedtype.OptionalNamedType('controls', Controls().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc2314.py b/python/pyasn1-modules/pyasn1_modules/rfc2314.py
new file mode 100644
index 000000000..86b11fb30
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc2314.py
@@ -0,0 +1,33 @@
+#
+# PKCS#10 syntax
+#
+# ASN.1 source from:
+# http://tools.ietf.org/html/rfc2314
+#
+# Sample captures could be obtained with "openssl req" command
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint
+from pyasn1_modules.rfc2459 import *
+
+class Attributes(univ.SetOf):
+ componentType = Attribute()
+
+class Version(univ.Integer): pass
+
+class CertificationRequestInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('subject', Name()),
+ namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
+ namedtype.NamedType('attributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+class Signature(univ.BitString): pass
+class SignatureAlgorithmIdentifier(AlgorithmIdentifier): pass
+
+class CertificationRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificationRequestInfo', CertificationRequestInfo()),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', Signature())
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc2315.py b/python/pyasn1-modules/pyasn1_modules/rfc2315.py
new file mode 100644
index 000000000..76bb9573c
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc2315.py
@@ -0,0 +1,205 @@
+#
+# PKCS#7 message syntax
+#
+# ASN.1 source from:
+# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/pkcs7.asn
+#
+# Sample captures from:
+# openssl crl2pkcs7 -nocrl -certfile cert1.cer -out outfile.p7b
+#
+from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful
+from pyasn1_modules.rfc2459 import *
+
+class Attribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('values', univ.SetOf(componentType=AttributeValue()))
+ )
+
+class AttributeValueAssertion(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('attributeType', AttributeType()),
+ namedtype.NamedType('attributeValue', AttributeValue())
+ )
+
+pkcs_7 = univ.ObjectIdentifier('1.2.840.113549.1.7')
+data = univ.ObjectIdentifier('1.2.840.113549.1.7.1')
+signedData = univ.ObjectIdentifier('1.2.840.113549.1.7.2')
+envelopedData = univ.ObjectIdentifier('1.2.840.113549.1.7.3')
+signedAndEnvelopedData = univ.ObjectIdentifier('1.2.840.113549.1.7.4')
+digestedData = univ.ObjectIdentifier('1.2.840.113549.1.7.5')
+encryptedData = univ.ObjectIdentifier('1.2.840.113549.1.7.6')
+
+class ContentType(univ.ObjectIdentifier): pass
+
+class ContentEncryptionAlgorithmIdentifier(AlgorithmIdentifier): pass
+
+class EncryptedContent(univ.OctetString): pass
+
+class EncryptedContentInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contentType', ContentType()),
+ namedtype.NamedType('contentEncryptionAlgorithm', ContentEncryptionAlgorithmIdentifier()),
+ namedtype.OptionalNamedType('encryptedContent', EncryptedContent().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+class Version(univ.Integer): pass # overrides x509.Version
+
+class EncryptedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo())
+ )
+
+class DigestAlgorithmIdentifier(AlgorithmIdentifier): pass
+
+class DigestAlgorithmIdentifiers(univ.SetOf):
+ componentType = DigestAlgorithmIdentifier()
+
+class Digest(univ.OctetString): pass
+
+class ContentInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contentType', ContentType()),
+ namedtype.OptionalNamedType('content', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+class DigestedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.NamedType('contentInfo', ContentInfo()),
+ namedtype.NamedType('digest', Digest)
+ )
+
+class IssuerAndSerialNumber(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('serialNumber', CertificateSerialNumber())
+ )
+
+class KeyEncryptionAlgorithmIdentifier(AlgorithmIdentifier): pass
+
+class EncryptedKey(univ.OctetString): pass
+
+class RecipientInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedKey', EncryptedKey())
+ )
+
+class RecipientInfos(univ.SetOf):
+ componentType = RecipientInfo()
+
+class Attributes(univ.SetOf):
+ componentType = Attribute()
+
+class ExtendedCertificateInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('certificate', Certificate()),
+ namedtype.NamedType('attributes', Attributes())
+ )
+
+class SignatureAlgorithmIdentifier(AlgorithmIdentifier): pass
+
+class Signature(univ.BitString): pass
+
+class ExtendedCertificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extendedCertificateInfo', ExtendedCertificateInfo()),
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
+ namedtype.NamedType('signature', Signature())
+ )
+
+class ExtendedCertificateOrCertificate(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificate', Certificate()),
+ namedtype.NamedType('extendedCertificate', ExtendedCertificate().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+class ExtendedCertificatesAndCertificates(univ.SetOf):
+ componentType = ExtendedCertificateOrCertificate()
+
+class SerialNumber(univ.Integer): pass
+
+class CRLEntry(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('userCertificate', SerialNumber()),
+ namedtype.NamedType('revocationDate', useful.UTCTime())
+ )
+
+class TBSCertificateRevocationList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('lastUpdate', useful.UTCTime()),
+ namedtype.NamedType('nextUpdate', useful.UTCTime()),
+ namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=CRLEntry()))
+ )
+
+class CertificateRevocationList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertificateRevocationList', TBSCertificateRevocationList()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+ )
+
+class CertificateRevocationLists(univ.SetOf):
+ componentType = CertificateRevocationList()
+
+class DigestEncryptionAlgorithmIdentifier(AlgorithmIdentifier): pass
+
+class EncryptedDigest(univ.OctetString): pass
+
+class SignerInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.OptionalNamedType('authenticatedAttributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('digestEncryptionAlgorithm', DigestEncryptionAlgorithmIdentifier()),
+ namedtype.NamedType('encryptedDigest', EncryptedDigest()),
+ namedtype.OptionalNamedType('unauthenticatedAttributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+class SignerInfos(univ.SetOf):
+ componentType = SignerInfo()
+
+class SignedAndEnvelopedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
+ namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()),
+ namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('signerInfos', SignerInfos())
+ )
+
+class EnvelopedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo())
+ )
+
+class DigestInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
+ namedtype.NamedType('digest', Digest())
+ )
+
+class SignedData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()),
+ namedtype.NamedType('contentInfo', ContentInfo()),
+ namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('signerInfos', SignerInfos())
+ )
+
+class Data(univ.OctetString): pass
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc2437.py b/python/pyasn1-modules/pyasn1_modules/rfc2437.py
new file mode 100644
index 000000000..3abf6dc3a
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc2437.py
@@ -0,0 +1,53 @@
+#
+# PKCS#1 syntax
+#
+# ASN.1 source from:
+# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2.asn
+#
+# Sample captures could be obtained with "openssl genrsa" command
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint
+from pyasn1_modules.rfc2459 import AlgorithmIdentifier
+
+pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1')
+rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1')
+md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2')
+md4WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.3')
+md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4')
+sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5')
+rsaOAEPEncryptionSET = univ.ObjectIdentifier('1.2.840.113549.1.1.6')
+id_RSAES_OAEP = univ.ObjectIdentifier('1.2.840.113549.1.1.7')
+id_mgf1 = univ.ObjectIdentifier('1.2.840.113549.1.1.8')
+id_pSpecified = univ.ObjectIdentifier('1.2.840.113549.1.1.9')
+id_sha1 = univ.ObjectIdentifier('1.3.14.3.2.26')
+
+MAX = 16
+
+class Version(univ.Integer): pass
+
+class RSAPrivateKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer()),
+ namedtype.NamedType('privateExponent', univ.Integer()),
+ namedtype.NamedType('prime1', univ.Integer()),
+ namedtype.NamedType('prime2', univ.Integer()),
+ namedtype.NamedType('exponent1', univ.Integer()),
+ namedtype.NamedType('exponent2', univ.Integer()),
+ namedtype.NamedType('coefficient', univ.Integer())
+ )
+
+class RSAPublicKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer())
+ )
+
+# XXX defaults not set
+class RSAES_OAEP_params(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('hashFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('maskGenFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('pSourceFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc2459.py b/python/pyasn1-modules/pyasn1_modules/rfc2459.py
new file mode 100644
index 000000000..c5021e0db
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc2459.py
@@ -0,0 +1,903 @@
+#
+# X.509 message syntax
+#
+# ASN.1 source from:
+# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/x509.asn
+# http://www.ietf.org/rfc/rfc2459.txt
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful
+
+MAX = 64 # XXX ?
+
+#
+# PKIX1Explicit88
+#
+
+# Upper Bounds
+ub_name = univ.Integer(32768)
+ub_common_name = univ.Integer(64)
+ub_locality_name = univ.Integer(128)
+ub_state_name = univ.Integer(128)
+ub_organization_name = univ.Integer(64)
+ub_organizational_unit_name = univ.Integer(64)
+ub_title = univ.Integer(64)
+ub_match = univ.Integer(128)
+ub_emailaddress_length = univ.Integer(128)
+ub_common_name_length = univ.Integer(64)
+ub_country_name_alpha_length = univ.Integer(2)
+ub_country_name_numeric_length = univ.Integer(3)
+ub_domain_defined_attributes = univ.Integer(4)
+ub_domain_defined_attribute_type_length = univ.Integer(8)
+ub_domain_defined_attribute_value_length = univ.Integer(128)
+ub_domain_name_length = univ.Integer(16)
+ub_extension_attributes = univ.Integer(256)
+ub_e163_4_number_length = univ.Integer(15)
+ub_e163_4_sub_address_length = univ.Integer(40)
+ub_generation_qualifier_length = univ.Integer(3)
+ub_given_name_length = univ.Integer(16)
+ub_initials_length = univ.Integer(5)
+ub_integer_options = univ.Integer(256)
+ub_numeric_user_id_length = univ.Integer(32)
+ub_organization_name_length = univ.Integer(64)
+ub_organizational_unit_name_length = univ.Integer(32)
+ub_organizational_units = univ.Integer(4)
+ub_pds_name_length = univ.Integer(16)
+ub_pds_parameter_length = univ.Integer(30)
+ub_pds_physical_address_lines = univ.Integer(6)
+ub_postal_code_length = univ.Integer(16)
+ub_surname_length = univ.Integer(40)
+ub_terminal_id_length = univ.Integer(24)
+ub_unformatted_address_length = univ.Integer(180)
+ub_x121_address_length = univ.Integer(16)
+
+class UniversalString(char.UniversalString): pass
+class BMPString(char.BMPString): pass
+class UTF8String(char.UTF8String): pass
+
+id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7')
+id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1')
+id_qt = univ.ObjectIdentifier('1.3.6.1.5.5.7.2')
+id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3')
+id_ad = univ.ObjectIdentifier('1.3.6.1.5.5.7.48')
+
+id_qt_cps = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.1')
+id_qt_unotice = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.2')
+
+id_ad_ocsp = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.1')
+id_ad_caIssuers = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.2')
+
+class AttributeValue(univ.Any): pass
+
+class AttributeType(univ.ObjectIdentifier): pass
+
+class AttributeTypeAndValue(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('value', AttributeValue())
+ )
+
+class Attribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+
+id_at = univ.ObjectIdentifier('2.5.4')
+id_at_name = univ.ObjectIdentifier('2.5.4.41')
+id_at_sutname = univ.ObjectIdentifier('2.5.4.4')
+id_at_givenName = univ.ObjectIdentifier('2.5.4.42')
+id_at_initials = univ.ObjectIdentifier('2.5.4.43')
+id_at_generationQualifier = univ.ObjectIdentifier('2.5.4.44')
+
+class X520name(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name)))
+ )
+
+id_at_commonName = univ.ObjectIdentifier('2.5.4.3')
+
+class X520CommonName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name)))
+ )
+
+id_at_localityName = univ.ObjectIdentifier('2.5.4.7')
+
+class X520LocalityName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name)))
+ )
+
+id_at_stateOrProvinceName = univ.ObjectIdentifier('2.5.4.8')
+
+class X520StateOrProvinceName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name)))
+ )
+
+id_at_organizationName = univ.ObjectIdentifier('2.5.4.10')
+
+class X520OrganizationName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name)))
+ )
+
+id_at_organizationalUnitName = univ.ObjectIdentifier('2.5.4.11')
+
+class X520OrganizationalUnitName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name)))
+ )
+
+id_at_title = univ.ObjectIdentifier('2.5.4.12')
+
+class X520Title(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title)))
+ )
+
+id_at_dnQualifier = univ.ObjectIdentifier('2.5.4.46')
+
+class X520dnQualifier(char.PrintableString): pass
+
+id_at_countryName = univ.ObjectIdentifier('2.5.4.6')
+
+class X520countryName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(2, 2)
+
+pkcs_9 = univ.ObjectIdentifier('1.2.840.113549.1.9')
+
+emailAddress = univ.ObjectIdentifier('1.2.840.113549.1.9.1')
+
+class Pkcs9email(char.IA5String):
+ subtypeSpec = char.IA5String.subtypeSpec + constraint.ValueSizeConstraint(1, ub_emailaddress_length)
+
+# ----
+
+class DSAPrivateKey(univ.Sequence):
+ """PKIX compliant DSA private key structure"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('v1', 0)))),
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer()),
+ namedtype.NamedType('public', univ.Integer()),
+ namedtype.NamedType('private', univ.Integer())
+ )
+
+# ----
+
+class RelativeDistinguishedName(univ.SetOf):
+ componentType = AttributeTypeAndValue()
+
+class RDNSequence(univ.SequenceOf):
+ componentType = RelativeDistinguishedName()
+
+class Name(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('', RDNSequence())
+ )
+
+class DirectoryString(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) # hm, this should not be here!? XXX
+ )
+
+# certificate and CRL specific structures begin here
+
+class AlgorithmIdentifier(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('parameters', univ.Any())
+ )
+
+class Extension(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extnID', univ.ObjectIdentifier()),
+ namedtype.DefaultedNamedType('critical', univ.Boolean('False')),
+ namedtype.NamedType('extnValue', univ.Any())
+ )
+
+class Extensions(univ.SequenceOf):
+ componentType = Extension()
+ sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class SubjectPublicKeyInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('subjectPublicKey', univ.BitString())
+ )
+
+class UniqueIdentifier(univ.BitString): pass
+
+class Time(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('utcTime', useful.UTCTime()),
+ namedtype.NamedType('generalTime', useful.GeneralizedTime())
+ )
+
+class Validity(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('notBefore', Time()),
+ namedtype.NamedType('notAfter', Time())
+ )
+
+class CertificateSerialNumber(univ.Integer): pass
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('v1', 0), ('v2', 1), ('v3', 2)
+ )
+
+class TBSCertificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('serialNumber', CertificateSerialNumber()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('validity', Validity()),
+ namedtype.NamedType('subject', Name()),
+ namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
+ namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('extensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+class Certificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertificate', TBSCertificate()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signatureValue', univ.BitString())
+ )
+
+# CRL structures
+
+class RevokedCertificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('userCertificate', CertificateSerialNumber()),
+ namedtype.NamedType('revocationDate', Time()),
+ namedtype.OptionalNamedType('crlEntryExtensions', Extensions())
+ )
+
+class TBSCertList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('version', Version()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('thisUpdate', Time()),
+ namedtype.OptionalNamedType('nextUpdate', Time()),
+ namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=RevokedCertificate())),
+ namedtype.OptionalNamedType('crlExtensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+class CertificateList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertList', TBSCertList()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+ )
+
+# Algorithm OIDs and parameter structures
+
+pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1')
+rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1')
+md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2')
+md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4')
+sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5')
+id_dsa_with_sha1 = univ.ObjectIdentifier('1.2.840.10040.4.3')
+
+class Dss_Sig_Value(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('r', univ.Integer()),
+ namedtype.NamedType('s', univ.Integer())
+ )
+
+dhpublicnumber = univ.ObjectIdentifier('1.2.840.10046.2.1')
+
+class ValidationParms(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('seed', univ.BitString()),
+ namedtype.NamedType('pgenCounter', univ.Integer())
+ )
+
+class DomainParameters(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('j', univ.Integer()),
+ namedtype.OptionalNamedType('validationParms', ValidationParms())
+ )
+
+id_dsa = univ.ObjectIdentifier('1.2.840.10040.4.1')
+
+class Dss_Parms(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer())
+ )
+
+# x400 address syntax starts here
+
+teletex_domain_defined_attributes = univ.Integer(6)
+
+class TeletexDomainDefinedAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.TeletexString())
+ )
+
+class TeletexDomainDefinedAttributes(univ.SequenceOf):
+ componentType = TeletexDomainDefinedAttribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+terminal_type = univ.Integer(23)
+
+class TerminalType(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, ub_integer_options)
+ namedValues = namedval.NamedValues(
+ ('telex', 3),
+ ('teletelex', 4),
+ ('g3-facsimile', 5),
+ ('g4-facsimile', 6),
+ ('ia5-terminal', 7),
+ ('videotex', 8)
+ )
+
+class PresentationAddress(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3), subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ )
+
+extended_network_address = univ.Integer(22)
+
+class E163_4_address(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('number', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class ExtendedNetworkAddress(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('e163-4-address', E163_4_address()),
+ namedtype.NamedType('psap-address', PresentationAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class PDSParameter(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))
+ )
+
+local_postal_attributes = univ.Integer(21)
+
+class LocalPostalAttributes(PDSParameter): pass
+
+class UniquePostalName(PDSParameter): pass
+
+unique_postal_name = univ.Integer(20)
+
+poste_restante_address = univ.Integer(19)
+
+class PosteRestanteAddress(PDSParameter): pass
+
+post_office_box_address = univ.Integer(18)
+
+class PostOfficeBoxAddress(PDSParameter): pass
+
+street_address = univ.Integer(17)
+
+class StreetAddress(PDSParameter): pass
+
+class UnformattedPostalAddress(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_physical_address_lines)))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length)))
+ )
+
+physical_delivery_office_name = univ.Integer(10)
+
+class PhysicalDeliveryOfficeName(PDSParameter): pass
+
+physical_delivery_office_number = univ.Integer(11)
+
+class PhysicalDeliveryOfficeNumber(PDSParameter): pass
+
+extension_OR_address_components = univ.Integer(12)
+
+class ExtensionORAddressComponents(PDSParameter): pass
+
+physical_delivery_personal_name = univ.Integer(13)
+
+class PhysicalDeliveryPersonalName(PDSParameter): pass
+
+physical_delivery_organization_name = univ.Integer(14)
+
+class PhysicalDeliveryOrganizationName(PDSParameter): pass
+
+extension_physical_delivery_address_components = univ.Integer(15)
+
+class ExtensionPhysicalDeliveryAddressComponents(PDSParameter): pass
+
+unformatted_postal_address = univ.Integer(16)
+
+postal_code = univ.Integer(9)
+
+class PostalCode(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))),
+ namedtype.NamedType('printable-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length)))
+ )
+
+class PhysicalDeliveryCountryName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+ )
+
+class PDSName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_pds_name_length)
+
+physical_delivery_country_name = univ.Integer(8)
+
+class TeletexOrganizationalUnitName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+pds_name = univ.Integer(7)
+
+teletex_organizational_unit_names = univ.Integer(5)
+
+class TeletexOrganizationalUnitNames(univ.SequenceOf):
+ componentType = TeletexOrganizationalUnitName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+teletex_personal_name = univ.Integer(4)
+
+class TeletexPersonalName(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+teletex_organization_name = univ.Integer(3)
+
+class TeletexOrganizationName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+teletex_common_name = univ.Integer(2)
+
+class TeletexCommonName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+class CommonName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+common_name = univ.Integer(1)
+
+class ExtensionAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extension-attribute-type', univ.Integer().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_extension_attributes), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('extension-attribute-value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class ExtensionAttributes(univ.SetOf):
+ componentType = ExtensionAttribute()
+ subtypeSpec = univ.SetOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_extension_attributes)
+
+class BuiltInDomainDefinedAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
+ )
+
+class BuiltInDomainDefinedAttributes(univ.SequenceOf):
+ componentType = BuiltInDomainDefinedAttribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+class OrganizationalUnitName(char.PrintableString):
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+class OrganizationalUnitNames(univ.SequenceOf):
+ componentType = OrganizationalUnitName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+class PersonalName(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+class NumericUserIdentifier(char.NumericString):
+ subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_numeric_user_id_length)
+
+class OrganizationName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+class PrivateDomainName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length)))
+ )
+
+class TerminalIdentifier(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_terminal_id_length)
+
+class X121Address(char.NumericString):
+ subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_x121_address_length)
+
+class NetworkAddress(X121Address): pass
+
+class AdministrationDomainName(univ.Choice):
+ tagSet = univ.Choice.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length)))
+ )
+
+class CountryName(univ.Choice):
+ tagSet = univ.Choice.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+ )
+
+class BuiltInStandardAttributes(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('country-name', CountryName()),
+ namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()),
+ namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.OptionalNamedType('personal-name', PersonalName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
+ namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6)))
+ )
+
+class ORAddress(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()),
+ namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()),
+ namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes())
+ )
+
+#
+# PKIX1Implicit88
+#
+
+id_ce_invalidityDate = univ.ObjectIdentifier('2.5.29.24')
+
+class InvalidityDate(useful.GeneralizedTime): pass
+
+id_holdinstruction_none = univ.ObjectIdentifier('2.2.840.10040.2.1')
+id_holdinstruction_callissuer = univ.ObjectIdentifier('2.2.840.10040.2.2')
+id_holdinstruction_reject = univ.ObjectIdentifier('2.2.840.10040.2.3')
+
+holdInstruction = univ.ObjectIdentifier('2.2.840.10040.2')
+
+id_ce_holdInstructionCode = univ.ObjectIdentifier('2.5.29.23')
+
+class HoldInstructionCode(univ.ObjectIdentifier): pass
+
+id_ce_cRLReasons = univ.ObjectIdentifier('2.5.29.21')
+
+class CRLReason(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('unspecified', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('removeFromCRL', 8)
+ )
+
+id_ce_cRLNumber = univ.ObjectIdentifier('2.5.29.20')
+
+class CRLNumber(univ.Integer):
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(0, MAX)
+
+class BaseCRLNumber(CRLNumber): pass
+
+id_kp_serverAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.1.1')
+id_kp_clientAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.2')
+id_kp_codeSigning = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.3')
+id_kp_emailProtection = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.4')
+id_kp_ipsecEndSystem = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.5')
+id_kp_ipsecTunnel = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.6')
+id_kp_ipsecUser = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.7')
+id_kp_timeStamping = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.8')
+id_pe_authorityInfoAccess = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.1')
+id_ce_extKeyUsage = univ.ObjectIdentifier('2.5.29.37')
+
+class KeyPurposeId(univ.ObjectIdentifier): pass
+
+class ExtKeyUsageSyntax(univ.SequenceOf):
+ componentType = KeyPurposeId()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class ReasonFlags(univ.BitString):
+ namedValues = namedval.NamedValues(
+ ('unused', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6)
+ )
+
+
+class SkipCerts(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, MAX)
+
+id_ce_policyConstraints = univ.ObjectIdentifier('2.5.29.36')
+
+class PolicyConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('requireExplicitPolicy', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('inhibitPolicyMapping', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+id_ce_basicConstraints = univ.ObjectIdentifier('2.5.29.19')
+
+class BasicConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('cA', univ.Boolean(False)),
+ namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX)))
+ )
+
+id_ce_subjectDirectoryAttributes = univ.ObjectIdentifier('2.5.29.9')
+
+class SubjectDirectoryAttributes(univ.SequenceOf):
+ componentType = Attribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class EDIPartyName(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('partyName', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class AnotherName(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type-id', univ.ObjectIdentifier()),
+ namedtype.NamedType('value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class GeneralName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherName', AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('rfc822Name', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dNSName', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('x400Address', ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.NamedType('directoryName', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.NamedType('ediPartyName', EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
+ namedtype.NamedType('uniformResourceIdentifier', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
+ namedtype.NamedType('iPAddress', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)))
+ )
+
+class GeneralNames(univ.SequenceOf):
+ componentType = GeneralName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class AccessDescription(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('accessMethod', univ.ObjectIdentifier()),
+ namedtype.NamedType('accessLocation', GeneralName())
+ )
+
+class AuthorityInfoAccessSyntax(univ.SequenceOf):
+ componentType = AccessDescription()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_deltaCRLIndicator = univ.ObjectIdentifier('2.5.29.27')
+
+class DistributionPointName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('fullName', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+class DistributionPoint(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
+ )
+class BaseDistance(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(0, MAX)
+
+id_ce_cRLDistributionPoints = univ.ObjectIdentifier('2.5.29.31')
+
+class CRLDistPointsSyntax(univ.SequenceOf):
+ componentType = DistributionPoint
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+id_ce_issuingDistributionPoint = univ.ObjectIdentifier('2.5.29.28')
+
+class IssuingDistributionPoint(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('onlyContainsUserCerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('onlyContainsCACerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.NamedType('indirectCRL', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
+ )
+
+class GeneralSubtree(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('base', GeneralName()),
+ namedtype.NamedType('minimum', BaseDistance(0).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('maximum', BaseDistance().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+class GeneralSubtrees(univ.SequenceOf):
+ componentType = GeneralSubtree()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_nameConstraints = univ.ObjectIdentifier('2.5.29.30')
+
+class NameConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+
+class DisplayText(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('visibleString', char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200)))
+ )
+
+class NoticeReference(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('organization', DisplayText()),
+ namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer()))
+ )
+
+class UserNotice(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('noticeRef', NoticeReference()),
+ namedtype.OptionalNamedType('explicitText', DisplayText())
+ )
+
+class CPSuri(char.IA5String): pass
+
+class PolicyQualifierId(univ.ObjectIdentifier):
+ subtypeSpec = univ.ObjectIdentifier.subtypeSpec + constraint.SingleValueConstraint(id_qt_cps, id_qt_unotice)
+
+class CertPolicyId(univ.ObjectIdentifier): pass
+
+class PolicyQualifierInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyQualifierId', PolicyQualifierId()),
+ namedtype.NamedType('qualifier', univ.Any())
+ )
+
+id_ce_certificatePolicies = univ.ObjectIdentifier('2.5.29.32')
+
+class PolicyInformation(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyIdentifier', CertPolicyId()),
+ namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+ )
+
+class CertificatePolicies(univ.SequenceOf):
+ componentType = PolicyInformation()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_policyMappings = univ.ObjectIdentifier('2.5.29.33')
+
+class PolicyMapping(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerDomainPolicy', CertPolicyId()),
+ namedtype.NamedType('subjectDomainPolicy', CertPolicyId())
+ )
+
+class PolicyMappings(univ.SequenceOf):
+ componentType = PolicyMapping()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_privateKeyUsagePeriod = univ.ObjectIdentifier('2.5.29.16')
+
+class PrivateKeyUsagePeriod(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+id_ce_keyUsage = univ.ObjectIdentifier('2.5.29.15')
+
+class KeyUsage(univ.BitString):
+ namedValues = namedval.NamedValues(
+ ('digitalSignature', 0),
+ ('nonRepudiation', 1),
+ ('keyEncipherment', 2),
+ ('dataEncipherment', 3),
+ ('keyAgreement', 4),
+ ('keyCertSign', 5),
+ ('cRLSign', 6),
+ ('encipherOnly', 7),
+ ('decipherOnly', 8)
+ )
+
+id_ce = univ.ObjectIdentifier('2.5.29')
+
+id_ce_authorityKeyIdentifier = univ.ObjectIdentifier('2.5.29.35')
+
+class KeyIdentifier(univ.OctetString): pass
+
+id_ce_subjectKeyIdentifier = univ.ObjectIdentifier('2.5.29.14')
+
+class SubjectKeyIdentifier(KeyIdentifier): pass
+
+class AuthorityKeyIdentifier(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+id_ce_certificateIssuer = univ.ObjectIdentifier('2.5.29.29')
+
+class CertificateIssuer(GeneralNames): pass
+
+id_ce_subjectAltName = univ.ObjectIdentifier('2.5.29.17')
+
+class SubjectAltName(GeneralNames): pass
+
+id_ce_issuerAltName = univ.ObjectIdentifier('2.5.29.18')
+
+class IssuerAltName(GeneralNames): pass
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc2511.py b/python/pyasn1-modules/pyasn1_modules/rfc2511.py
new file mode 100644
index 000000000..132be134b
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc2511.py
@@ -0,0 +1,176 @@
+#
+# X.509 certificate Request Message Format (CRMF) syntax
+#
+# ASN.1 source from:
+# http://tools.ietf.org/html/rfc2511
+#
+# Sample captures could be obtained with OpenSSL
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint, char,useful
+from pyasn1_modules.rfc2459 import *
+from pyasn1_modules import rfc2315
+
+MAX=16
+
+id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7')
+id_pkip = univ.ObjectIdentifier('1.3.6.1.5.5.7.5')
+id_regCtrl = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1')
+id_regCtrl_regToken = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.1')
+id_regCtrl_authenticator = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.2')
+id_regCtrl_pkiPublicationInfo = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.3')
+id_regCtrl_pkiArchiveOptions = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.4')
+id_regCtrl_oldCertID = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.5')
+id_regCtrl_protocolEncrKey = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.6')
+id_regInfo = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2')
+id_regInfo_utf8Pairs = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2.1')
+id_regInfo_certReq = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2.2')
+
+# This should be in PKIX Certificate Extensions module
+
+class GeneralName(univ.OctetString): pass
+
+# end of PKIX Certificate Extensions module
+
+class UTF8Pairs(char.UTF8String): pass
+
+class ProtocolEncrKey(SubjectPublicKeyInfo): pass
+
+class CertId(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuer', GeneralName()),
+ namedtype.NamedType('serialNumber', univ.Integer())
+ )
+
+class OldCertId(CertId): pass
+
+class KeyGenParameters(univ.OctetString): pass
+
+class EncryptedValue(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('intendedAlg', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('symmAlg', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.OptionalNamedType('encSymmKey', univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.OptionalNamedType('keyAlg', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.OptionalNamedType('valueHint', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.NamedType('encValue', univ.BitString())
+ )
+
+class EncryptedKey(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('encryptedValue', EncryptedValue()),
+ namedtype.NamedType('envelopedData', rfc2315.EnvelopedData().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+class PKIArchiveOptions(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('encryptedPrivKey', EncryptedKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('keyGenParameters', KeyGenParameters().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('archiveRemGenPrivKey', univ.Boolean().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+class SinglePubInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pubMethod', univ.Integer(namedValues=namedval.NamedValues(('dontCare', 0), ('x500', 1), ('web', 2), ('ldap', 3)))),
+ namedtype.OptionalNamedType('pubLocation', GeneralName())
+ )
+
+class PKIPublicationInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('action', univ.Integer(namedValues=namedval.NamedValues(('dontPublish', 0), ('pleasePublish', 1)))),
+ namedtype.OptionalNamedType('pubInfos', univ.SequenceOf(componentType=SinglePubInfo()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+ )
+
+class Authenticator(char.UTF8String): pass
+class RegToken(char.UTF8String): pass
+
+class SubsequentMessage(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('encrCert', 0),
+ ('challengeResp', 1)
+ )
+
+class POPOPrivKey(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('thisMessage', univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('subsequentMessage', SubsequentMessage().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dhMAC', univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+class PBMParameter(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('salt', univ.OctetString()),
+ namedtype.NamedType('owf', AlgorithmIdentifier()),
+ namedtype.NamedType('iterationCount', univ.Integer()),
+ namedtype.NamedType('mac', AlgorithmIdentifier())
+ )
+
+class PKMACValue(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algId', AlgorithmIdentifier()),
+ namedtype.NamedType('value', univ.BitString())
+ )
+
+class POPOSigningKeyInput(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('authInfo', univ.Choice(componentType=namedtype.NamedTypes(namedtype.NamedType('sender', GeneralName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('publicKeyMAC', PKMACValue())))),
+ namedtype.NamedType('publicKey', SubjectPublicKeyInfo())
+ )
+
+class POPOSigningKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('poposkInput', POPOSigningKeyInput().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('algorithmIdentifier', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+ )
+
+class ProofOfPossession(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('raVerified', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('signature', POPOSigningKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.NamedType('keyEncipherment', POPOPrivKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.NamedType('keyAgreement', POPOPrivKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3)))
+ )
+
+class Controls(univ.SequenceOf):
+ componentType = AttributeTypeAndValue()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class OptionalValidity(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('notBefore', Time().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('notAfter', Time().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class CertTemplate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('version', Version().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('serialNumber', univ.Integer().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('signingAlg', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
+ namedtype.OptionalNamedType('issuer', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))),
+ namedtype.OptionalNamedType('validity', OptionalValidity().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))),
+ namedtype.OptionalNamedType('subject', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))),
+ namedtype.OptionalNamedType('publicKey', SubjectPublicKeyInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))),
+ namedtype.OptionalNamedType('issuerUID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.OptionalNamedType('subjectUID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))),
+ namedtype.OptionalNamedType('extensions', Extensions().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9)))
+ )
+
+class CertRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.NamedType('certTemplate', CertTemplate()),
+ namedtype.OptionalNamedType('controls', Controls())
+ )
+
+class CertReq(CertRequest): pass
+
+class CertReqMsg(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReq', CertRequest()),
+ namedtype.OptionalNamedType('pop', ProofOfPossession()),
+ namedtype.OptionalNamedType('regInfo', univ.SequenceOf(componentType=AttributeTypeAndValue).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+ )
+
+class CertReqMessages(univ.SequenceOf):
+ componentType = CertReqMsg()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc2560.py b/python/pyasn1-modules/pyasn1_modules/rfc2560.py
new file mode 100644
index 000000000..0be1091b1
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc2560.py
@@ -0,0 +1,171 @@
+#
+# OCSP request/response syntax
+#
+# Derived from a minimal OCSP library (RFC2560) code written by
+# Bud P. Bruegger <bud@ancitel.it>
+# Copyright: Ancitel, S.p.a, Rome, Italy
+# License: BSD
+#
+
+#
+# current limitations:
+# * request and response works only for a single certificate
+# * only some values are parsed out of the response
+# * the request does't set a nonce nor signature
+# * there is no signature validation of the response
+# * dates are left as strings in GeneralizedTime format -- datetime.datetime
+# would be nicer
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint, useful
+from pyasn1_modules import rfc2459
+
+# Start of OCSP module definitions
+
+# This should be in directory Authentication Framework (X.509) module
+
+class CRLReason(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('unspecified', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('removeFromCRL', 8),
+ ('privilegeWithdrawn', 9),
+ ('aACompromise', 10)
+ )
+
+# end of directory Authentication Framework (X.509) module
+
+# This should be in PKIX Certificate Extensions module
+
+class GeneralName(univ.OctetString): pass
+
+# end of PKIX Certificate Extensions module
+
+id_kp_OCSPSigning = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 3, 9))
+id_pkix_ocsp = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1))
+id_pkix_ocsp_basic = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 1))
+id_pkix_ocsp_nonce = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 2))
+id_pkix_ocsp_crl = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 3))
+id_pkix_ocsp_response = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 4))
+id_pkix_ocsp_nocheck = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 5))
+id_pkix_ocsp_archive_cutoff = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 6))
+id_pkix_ocsp_service_locator = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 7))
+
+class AcceptableResponses(univ.SequenceOf):
+ componentType = univ.ObjectIdentifier()
+
+class ArchiveCutoff(useful.GeneralizedTime): pass
+
+class UnknownInfo(univ.Null): pass
+
+class RevokedInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('revocationTime', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('revocationReason', CRLReason().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class CertID(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('hashAlgorithm', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('issuerNameHash', univ.OctetString()),
+ namedtype.NamedType('issuerKeyHash', univ.OctetString()),
+ namedtype.NamedType('serialNumber', rfc2459.CertificateSerialNumber())
+ )
+
+class CertStatus(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('good', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('revoked', RevokedInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('unknown', UnknownInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+class SingleResponse(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certID', CertID()),
+ namedtype.NamedType('certStatus', CertStatus()),
+ namedtype.NamedType('thisUpdate', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('nextUpdate', useful.GeneralizedTime().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('singleExtensions', rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class KeyHash(univ.OctetString): pass
+
+class ResponderID(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('byName', rfc2459.Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('byKey', KeyHash().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(('v1', 0))
+
+class ResponseData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('responderID', ResponderID()),
+ namedtype.NamedType('producedAt', useful.GeneralizedTime()),
+ namedtype.NamedType('responses', univ.SequenceOf(SingleResponse())),
+ namedtype.OptionalNamedType('responseExtensions', rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class BasicOCSPResponse(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsResponseData', ResponseData()),
+ namedtype.NamedType('signatureAlgorithm', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString()),
+ namedtype.OptionalNamedType('certs', univ.SequenceOf(rfc2459.Certificate()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class ResponseBytes(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('responseType', univ.ObjectIdentifier()),
+ namedtype.NamedType('response', univ.OctetString())
+ )
+
+class OCSPResponseStatus(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('successful', 0),
+ ('malformedRequest', 1),
+ ('internalError', 2),
+ ('tryLater', 3),
+ ('undefinedStatus', 4), # should never occur
+ ('sigRequired', 5),
+ ('unauthorized', 6)
+ )
+
+class OCSPResponse(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('responseStatus', OCSPResponseStatus()),
+ namedtype.OptionalNamedType('responseBytes', ResponseBytes().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class Request(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('reqCert', CertID()),
+ namedtype.OptionalNamedType('singleRequestExtensions', rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class Signature(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('signatureAlgorithm', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString()),
+ namedtype.OptionalNamedType('certs', univ.SequenceOf(rfc2459.Certificate()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class TBSRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('requestorName', GeneralName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('requestList', univ.SequenceOf(Request())),
+ namedtype.OptionalNamedType('requestExtensions', rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+class OCSPRequest(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsRequest', TBSRequest()),
+ namedtype.OptionalNamedType('optionalSignature', Signature().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc3412.py b/python/pyasn1-modules/pyasn1_modules/rfc3412.py
new file mode 100644
index 000000000..e80ce3155
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc3412.py
@@ -0,0 +1,38 @@
+#
+# SNMPv3 message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc3412.txt
+#
+from pyasn1.type import univ, namedtype, namedval, tag, constraint
+from pyasn1_modules import rfc1905
+
+class ScopedPDU(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('contextEngineId', univ.OctetString()),
+ namedtype.NamedType('contextName', univ.OctetString()),
+ namedtype.NamedType('data', rfc1905.PDUs())
+ )
+
+class ScopedPduData(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('plaintext', ScopedPDU()),
+ namedtype.NamedType('encryptedPDU', univ.OctetString()),
+ )
+
+class HeaderData(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('msgID', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
+ namedtype.NamedType('msgMaxSize', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(484, 2147483647))),
+ namedtype.NamedType('msgFlags', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 1))),
+ namedtype.NamedType('msgSecurityModel', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 2147483647)))
+ )
+
+class SNMPv3Message(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('msgVersion', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
+ namedtype.NamedType('msgGlobalData', HeaderData()),
+ namedtype.NamedType('msgSecurityParameters', univ.OctetString()),
+ namedtype.NamedType('msgData', ScopedPduData())
+ )
+
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc3414.py b/python/pyasn1-modules/pyasn1_modules/rfc3414.py
new file mode 100644
index 000000000..580c88e97
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc3414.py
@@ -0,0 +1,17 @@
+#
+# SNMPv3 message syntax
+#
+# ASN.1 source from:
+# http://www.ietf.org/rfc/rfc3414.txt
+#
+from pyasn1.type import univ, namedtype, namedval, tag, constraint
+
+class UsmSecurityParameters(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('msgAuthoritativeEngineID', univ.OctetString()),
+ namedtype.NamedType('msgAuthoritativeEngineBoots', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
+ namedtype.NamedType('msgAuthoritativeEngineTime', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
+ namedtype.NamedType('msgUserName', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, 32))),
+ namedtype.NamedType('msgAuthenticationParameters', univ.OctetString()),
+ namedtype.NamedType('msgPrivacyParameters', univ.OctetString())
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc3447.py b/python/pyasn1-modules/pyasn1_modules/rfc3447.py
new file mode 100644
index 000000000..96dea7fcb
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc3447.py
@@ -0,0 +1,35 @@
+#
+# PKCS#1 syntax
+#
+# ASN.1 source from:
+# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1.asn
+#
+# Sample captures could be obtained with "openssl genrsa" command
+#
+from pyasn1_modules.rfc2437 import *
+
+class OtherPrimeInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('prime', univ.Integer()),
+ namedtype.NamedType('exponent', univ.Integer()),
+ namedtype.NamedType('coefficient', univ.Integer())
+ )
+
+class OtherPrimeInfos(univ.SequenceOf):
+ componentType = OtherPrimeInfo()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + \
+ constraint.ValueSizeConstraint(1, MAX)
+
+class RSAPrivateKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('two-prime', 0), ('multi', 1)))),
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer()),
+ namedtype.NamedType('privateExponent', univ.Integer()),
+ namedtype.NamedType('prime1', univ.Integer()),
+ namedtype.NamedType('prime2', univ.Integer()),
+ namedtype.NamedType('exponent1', univ.Integer()),
+ namedtype.NamedType('exponent2', univ.Integer()),
+ namedtype.NamedType('coefficient', univ.Integer()),
+ namedtype.OptionalNamedType('otherPrimeInfos', OtherPrimeInfos())
+ )
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc4210.py b/python/pyasn1-modules/pyasn1_modules/rfc4210.py
new file mode 100644
index 000000000..545e9f42f
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc4210.py
@@ -0,0 +1,695 @@
+#
+# Certificate Management Protocol structures as per RFC4210
+#
+# Based on Alex Railean's work
+#
+from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful
+from pyasn1_modules import rfc2459, rfc2511, rfc2314
+
+MAX = 64
+
+class KeyIdentifier(univ.OctetString): pass
+
+class CMPCertificate(rfc2459.Certificate): pass
+
+class OOBCert(CMPCertificate): pass
+
+class CertAnnContent(CMPCertificate): pass
+
+class PKIFreeText(univ.SequenceOf):
+ """
+ PKIFreeText ::= SEQUENCE SIZE (1..MAX) OF UTF8String
+ """
+ componentType = char.UTF8String()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class PollRepContent(univ.SequenceOf):
+ """
+ PollRepContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER,
+ checkAfter INTEGER, -- time in seconds
+ reason PKIFreeText OPTIONAL
+ }
+ """
+ class CertReq(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.NamedType('checkAfter', univ.Integer()),
+ namedtype.OptionalNamedType('reason', PKIFreeText())
+ )
+ componentType = CertReq()
+
+class PollReqContent(univ.SequenceOf):
+ """
+ PollReqContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER
+ }
+
+ """
+ class CertReq(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer())
+ )
+ componentType = CertReq()
+
+class InfoTypeAndValue(univ.Sequence):
+ """
+ InfoTypeAndValue ::= SEQUENCE {
+ infoType OBJECT IDENTIFIER,
+ infoValue ANY DEFINED BY infoType OPTIONAL
+ }"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('infoType', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('infoValue', univ.Any())
+ )
+
+class GenRepContent(univ.SequenceOf):
+ componentType = InfoTypeAndValue()
+
+class GenMsgContent(univ.SequenceOf):
+ componentType = InfoTypeAndValue()
+
+class PKIConfirmContent(univ.Null): pass
+
+class CRLAnnContent(univ.SequenceOf):
+ componentType = rfc2459.CertificateList()
+
+class CAKeyUpdAnnContent(univ.Sequence):
+ """
+ CAKeyUpdAnnContent ::= SEQUENCE {
+ oldWithNew CMPCertificate,
+ newWithOld CMPCertificate,
+ newWithNew CMPCertificate
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('oldWithNew', CMPCertificate()),
+ namedtype.NamedType('newWithOld', CMPCertificate()),
+ namedtype.NamedType('newWithNew', CMPCertificate())
+ )
+
+class RevDetails(univ.Sequence):
+ """
+ RevDetails ::= SEQUENCE {
+ certDetails CertTemplate,
+ crlEntryDetails Extensions OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certDetails', rfc2511.CertTemplate()),
+ namedtype.OptionalNamedType('crlEntryDetails', rfc2459.Extensions())
+ )
+
+class RevReqContent(univ.SequenceOf):
+ componentType = RevDetails()
+
+class CertOrEncCert(univ.Choice):
+ """
+ CertOrEncCert ::= CHOICE {
+ certificate [0] CMPCertificate,
+ encryptedCert [1] EncryptedValue
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certificate', CMPCertificate().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+ ),
+ namedtype.NamedType('encryptedCert', rfc2511.EncryptedValue().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+ )
+ )
+
+class CertifiedKeyPair(univ.Sequence):
+ """
+ CertifiedKeyPair ::= SEQUENCE {
+ certOrEncCert CertOrEncCert,
+ privateKey [0] EncryptedValue OPTIONAL,
+ publicationInfo [1] PKIPublicationInfo OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certOrEncCert', CertOrEncCert()),
+ namedtype.OptionalNamedType('privateKey', rfc2511.EncryptedValue().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+ ),
+ namedtype.OptionalNamedType('publicationInfo', rfc2511.PKIPublicationInfo().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+ )
+ )
+
+
+class POPODecKeyRespContent(univ.SequenceOf):
+ componentType = univ.Integer()
+
+class Challenge(univ.Sequence):
+ """
+ Challenge ::= SEQUENCE {
+ owf AlgorithmIdentifier OPTIONAL,
+ witness OCTET STRING,
+ challenge OCTET STRING
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('owf', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('witness', univ.OctetString()),
+ namedtype.NamedType('challenge', univ.OctetString())
+ )
+
+class PKIStatus(univ.Integer):
+ """
+ PKIStatus ::= INTEGER {
+ accepted (0),
+ grantedWithMods (1),
+ rejection (2),
+ waiting (3),
+ revocationWarning (4),
+ revocationNotification (5),
+ keyUpdateWarning (6)
+ }
+ """
+ namedValues = namedval.NamedValues(
+ ('accepted', 0),
+ ('grantedWithMods', 1),
+ ('rejection', 2),
+ ('waiting', 3),
+ ('revocationWarning', 4),
+ ('revocationNotification', 5),
+ ('keyUpdateWarning', 6)
+ )
+
+class PKIFailureInfo(univ.BitString):
+ """
+ PKIFailureInfo ::= BIT STRING {
+ badAlg (0),
+ badMessageCheck (1),
+ badRequest (2),
+ badTime (3),
+ badCertId (4),
+ badDataFormat (5),
+ wrongAuthority (6),
+ incorrectData (7),
+ missingTimeStamp (8),
+ badPOP (9),
+ certRevoked (10),
+ certConfirmed (11),
+ wrongIntegrity (12),
+ badRecipientNonce (13),
+ timeNotAvailable (14),
+ unacceptedPolicy (15),
+ unacceptedExtension (16),
+ addInfoNotAvailable (17),
+ badSenderNonce (18),
+ badCertTemplate (19),
+ signerNotTrusted (20),
+ transactionIdInUse (21),
+ unsupportedVersion (22),
+ notAuthorized (23),
+ systemUnavail (24),
+ systemFailure (25),
+ duplicateCertReq (26)
+ """
+ namedValues = namedval.NamedValues(
+ ('badAlg', 0),
+ ('badMessageCheck', 1),
+ ('badRequest', 2),
+ ('badTime', 3),
+ ('badCertId', 4),
+ ('badDataFormat', 5),
+ ('wrongAuthority', 6),
+ ('incorrectData', 7),
+ ('missingTimeStamp', 8),
+ ('badPOP', 9),
+ ('certRevoked', 10),
+ ('certConfirmed', 11),
+ ('wrongIntegrity', 12),
+ ('badRecipientNonce', 13),
+ ('timeNotAvailable', 14),
+ ('unacceptedPolicy', 15),
+ ('unacceptedExtension', 16),
+ ('addInfoNotAvailable', 17),
+ ('badSenderNonce', 18),
+ ('badCertTemplate', 19),
+ ('signerNotTrusted', 20),
+ ('transactionIdInUse', 21),
+ ('unsupportedVersion', 22),
+ ('notAuthorized', 23),
+ ('systemUnavail', 24),
+ ('systemFailure', 25),
+ ('duplicateCertReq', 26)
+ )
+
+class PKIStatusInfo(univ.Sequence):
+ """
+ PKIStatusInfo ::= SEQUENCE {
+ status PKIStatus,
+ statusString PKIFreeText OPTIONAL,
+ failInfo PKIFailureInfo OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('status', PKIStatus()),
+ namedtype.OptionalNamedType('statusString', PKIFreeText()),
+ namedtype.OptionalNamedType('failInfo', PKIFailureInfo())
+ )
+
+class ErrorMsgContent(univ.Sequence):
+ """
+ ErrorMsgContent ::= SEQUENCE {
+ pKIStatusInfo PKIStatusInfo,
+ errorCode INTEGER OPTIONAL,
+ -- implementation-specific error codes
+ errorDetails PKIFreeText OPTIONAL
+ -- implementation-specific error details
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pKIStatusInfo', PKIStatusInfo()),
+ namedtype.OptionalNamedType('errorCode', univ.Integer()),
+ namedtype.OptionalNamedType('errorDetails', PKIFreeText())
+ )
+
+class CertStatus(univ.Sequence):
+ """
+ CertStatus ::= SEQUENCE {
+ certHash OCTET STRING,
+ certReqId INTEGER,
+ statusInfo PKIStatusInfo OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certHash', univ.OctetString()),
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.OptionalNamedType('statusInfo', PKIStatusInfo())
+ )
+
+class CertConfirmContent(univ.SequenceOf):
+ componentType = CertStatus()
+
+class RevAnnContent(univ.Sequence):
+ """
+ RevAnnContent ::= SEQUENCE {
+ status PKIStatus,
+ certId CertId,
+ willBeRevokedAt GeneralizedTime,
+ badSinceDate GeneralizedTime,
+ crlDetails Extensions OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('status', PKIStatus()),
+ namedtype.NamedType('certId', rfc2511.CertId()),
+ namedtype.NamedType('willBeRevokedAt', useful.GeneralizedTime()),
+ namedtype.NamedType('badSinceDate', useful.GeneralizedTime()),
+ namedtype.OptionalNamedType('crlDetails', rfc2459.Extensions())
+ )
+
+class RevRepContent(univ.Sequence):
+ """
+ RevRepContent ::= SEQUENCE {
+ status SEQUENCE SIZE (1..MAX) OF PKIStatusInfo,
+ revCerts [0] SEQUENCE SIZE (1..MAX) OF CertId
+ OPTIONAL,
+ crls [1] SEQUENCE SIZE (1..MAX) OF CertificateList
+ OPTIONAL
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('status', PKIStatusInfo()),
+ namedtype.OptionalNamedType('revCerts', univ.SequenceOf(
+ componentType=rfc2511.CertId()
+ ).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+ ),
+ namedtype.OptionalNamedType('crls', univ.SequenceOf(
+ componentType=rfc2459.CertificateList()
+ ).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+ )
+ )
+
+class KeyRecRepContent(univ.Sequence):
+ """
+ KeyRecRepContent ::= SEQUENCE {
+ status PKIStatusInfo,
+ newSigCert [0] CMPCertificate OPTIONAL,
+ caCerts [1] SEQUENCE SIZE (1..MAX) OF
+ CMPCertificate OPTIONAL,
+ keyPairHist [2] SEQUENCE SIZE (1..MAX) OF
+ CertifiedKeyPair OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('status', PKIStatusInfo()),
+ namedtype.OptionalNamedType('newSigCert', CMPCertificate().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
+ )
+ ),
+ namedtype.OptionalNamedType('caCerts', univ.SequenceOf(
+ componentType=CMPCertificate()
+ ).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1),
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)
+ )
+ ),
+ namedtype.OptionalNamedType('keyPairHist', univ.SequenceOf(
+ componentType=CertifiedKeyPair()
+ ).subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2),
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)
+ )
+ )
+ )
+
+class CertResponse(univ.Sequence):
+ """
+ CertResponse ::= SEQUENCE {
+ certReqId INTEGER,
+ status PKIStatusInfo,
+ certifiedKeyPair CertifiedKeyPair OPTIONAL,
+ rspInfo OCTET STRING OPTIONAL
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('certReqId', univ.Integer()),
+ namedtype.NamedType('status', PKIStatusInfo()),
+ namedtype.OptionalNamedType('certifiedKeyPair', CertifiedKeyPair()),
+ namedtype.OptionalNamedType('rspInfo', univ.OctetString())
+ )
+
+class CertRepMessage(univ.Sequence):
+ """
+ CertRepMessage ::= SEQUENCE {
+ caPubs [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL,
+ response SEQUENCE OF CertResponse
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('caPubs', univ.SequenceOf(
+ componentType=CMPCertificate()
+ ).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,1)
+ )
+ ),
+ namedtype.NamedType('response', univ.SequenceOf(
+ componentType=CertResponse())
+ )
+ )
+
+class POPODecKeyChallContent(univ.SequenceOf):
+ componentType = Challenge()
+
+class OOBCertHash(univ.Sequence):
+ """
+ OOBCertHash ::= SEQUENCE {
+ hashAlg [0] AlgorithmIdentifier OPTIONAL,
+ certId [1] CertId OPTIONAL,
+ hashVal BIT STRING
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('hashAlg',
+ rfc2459.AlgorithmIdentifier().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,0)
+ )
+ ),
+ namedtype.OptionalNamedType('certId', rfc2511.CertId().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,1)
+ )
+ ),
+ namedtype.NamedType('hashVal', univ.BitString())
+ )
+
+# pyasn1 does not naturally handle recursive definitions, thus this hack:
+# NestedMessageContent ::= PKIMessages
+class NestedMessageContent(univ.SequenceOf):
+ """
+ NestedMessageContent ::= PKIMessages
+ """
+ componentType = univ.Any()
+
+class DHBMParameter(univ.Sequence):
+ """
+ DHBMParameter ::= SEQUENCE {
+ owf AlgorithmIdentifier,
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ mac AlgorithmIdentifier
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11],
+ } -- or HMAC [RFC2104, RFC2202])
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('owf', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('mac', rfc2459.AlgorithmIdentifier())
+ )
+
+id_DHBasedMac = univ.ObjectIdentifier('1.2.840.113533.7.66.30')
+
+class PBMParameter(univ.Sequence):
+ """
+ PBMParameter ::= SEQUENCE {
+ salt OCTET STRING,
+ owf AlgorithmIdentifier,
+ iterationCount INTEGER,
+ mac AlgorithmIdentifier
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('salt', univ.OctetString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(0, 128)
+ )
+ ),
+ namedtype.NamedType('owf', rfc2459.AlgorithmIdentifier()),
+ namedtype.NamedType('iterationCount', univ.Integer()),
+ namedtype.NamedType('mac', rfc2459.AlgorithmIdentifier())
+ )
+
+id_PasswordBasedMac = univ.ObjectIdentifier('1.2.840.113533.7.66.13')
+
+class PKIProtection(univ.BitString): pass
+
+# pyasn1 does not naturally handle recursive definitions, thus this hack:
+# NestedMessageContent ::= PKIMessages
+nestedMessageContent = NestedMessageContent().subtype(explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,20))
+
+class PKIBody(univ.Choice):
+ """
+ PKIBody ::= CHOICE { -- message-specific body elements
+ ir [0] CertReqMessages, --Initialization Request
+ ip [1] CertRepMessage, --Initialization Response
+ cr [2] CertReqMessages, --Certification Request
+ cp [3] CertRepMessage, --Certification Response
+ p10cr [4] CertificationRequest, --imported from [PKCS10]
+ popdecc [5] POPODecKeyChallContent, --pop Challenge
+ popdecr [6] POPODecKeyRespContent, --pop Response
+ kur [7] CertReqMessages, --Key Update Request
+ kup [8] CertRepMessage, --Key Update Response
+ krr [9] CertReqMessages, --Key Recovery Request
+ krp [10] KeyRecRepContent, --Key Recovery Response
+ rr [11] RevReqContent, --Revocation Request
+ rp [12] RevRepContent, --Revocation Response
+ ccr [13] CertReqMessages, --Cross-Cert. Request
+ ccp [14] CertRepMessage, --Cross-Cert. Response
+ ckuann [15] CAKeyUpdAnnContent, --CA Key Update Ann.
+ cann [16] CertAnnContent, --Certificate Ann.
+ rann [17] RevAnnContent, --Revocation Ann.
+ crlann [18] CRLAnnContent, --CRL Announcement
+ pkiconf [19] PKIConfirmContent, --Confirmation
+ nested [20] NestedMessageContent, --Nested Message
+ genm [21] GenMsgContent, --General Message
+
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('ir', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,0)
+ )
+ ),
+ namedtype.NamedType('ip', CertRepMessage().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,1)
+ )
+ ),
+ namedtype.NamedType('cr', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,2)
+ )
+ ),
+ namedtype.NamedType('cp', CertRepMessage().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,3)
+ )
+ ),
+ namedtype.NamedType('p10cr', rfc2314.CertificationRequest().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,4)
+ )
+ ),
+ namedtype.NamedType('popdecc', POPODecKeyChallContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,5)
+ )
+ ),
+ namedtype.NamedType('popdecr', POPODecKeyRespContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,6)
+ )
+ ),
+ namedtype.NamedType('kur', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,7)
+ )
+ ),
+ namedtype.NamedType('kup', CertRepMessage().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,8)
+ )
+ ),
+ namedtype.NamedType('krr', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,9)
+ )
+ ),
+ namedtype.NamedType('krp', KeyRecRepContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,10)
+ )
+ ),
+ namedtype.NamedType('rr', RevReqContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,11)
+ )
+ ),
+ namedtype.NamedType('rp', RevRepContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,12)
+ )
+ ),
+ namedtype.NamedType('ccr', rfc2511.CertReqMessages().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,13)
+ )
+ ),
+ namedtype.NamedType('ccp', CertRepMessage().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,14)
+ )
+ ),
+ namedtype.NamedType('ckuann', CAKeyUpdAnnContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,15)
+ )
+ ),
+ namedtype.NamedType('cann', CertAnnContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,16)
+ )
+ ),
+ namedtype.NamedType('rann', RevAnnContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,17)
+ )
+ ),
+ namedtype.NamedType('crlann', CRLAnnContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,18)
+ )
+ ),
+ namedtype.NamedType('pkiconf', PKIConfirmContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,19)
+ )
+ ),
+ namedtype.NamedType('nested', nestedMessageContent),
+# namedtype.NamedType('nested', NestedMessageContent().subtype(
+# explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,20)
+# )
+# ),
+ namedtype.NamedType('genm', GenMsgContent().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,21)
+ )
+ )
+ )
+
+
+class PKIHeader(univ.Sequence):
+ """
+ PKIHeader ::= SEQUENCE {
+ pvno INTEGER { cmp1999(1), cmp2000(2) },
+ sender GeneralName,
+ recipient GeneralName,
+ messageTime [0] GeneralizedTime OPTIONAL,
+ protectionAlg [1] AlgorithmIdentifier OPTIONAL,
+ senderKID [2] KeyIdentifier OPTIONAL,
+ recipKID [3] KeyIdentifier OPTIONAL,
+ transactionID [4] OCTET STRING OPTIONAL,
+ senderNonce [5] OCTET STRING OPTIONAL,
+ recipNonce [6] OCTET STRING OPTIONAL,
+ freeText [7] PKIFreeText OPTIONAL,
+ generalInfo [8] SEQUENCE SIZE (1..MAX) OF
+ InfoTypeAndValue OPTIONAL
+ }
+
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('pvno', univ.Integer(
+ namedValues=namedval.NamedValues(
+ ('cmp1999', 1),
+ ('cmp2000', 2)
+ )
+ )
+ ),
+ namedtype.NamedType('sender', rfc2459.GeneralName()),
+ namedtype.NamedType('recipient', rfc2459.GeneralName()),
+ namedtype.OptionalNamedType('messageTime', useful.GeneralizedTime().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('protectionAlg', rfc2459.AlgorithmIdentifier().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
+ namedtype.OptionalNamedType('senderKID', rfc2459.KeyIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('recipKID', rfc2459.KeyIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.OptionalNamedType('transactionID', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.OptionalNamedType('senderNonce', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
+ namedtype.OptionalNamedType('recipNonce', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
+ namedtype.OptionalNamedType('freeText', PKIFreeText().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7))),
+ namedtype.OptionalNamedType('generalInfo',
+ univ.SequenceOf(
+ componentType=InfoTypeAndValue().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)
+ )
+ )
+ )
+ )
+
+class ProtectedPart(univ.Sequence):
+ """
+ ProtectedPart ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody
+ }
+ """
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('header', PKIHeader()),
+ namedtype.NamedType('infoValue', PKIBody())
+ )
+
+class PKIMessage(univ.Sequence):
+ """
+ PKIMessage ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody,
+ protection [0] PKIProtection OPTIONAL,
+ extraCerts [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL
+ }"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('header', PKIHeader()),
+ namedtype.NamedType('body', PKIBody()),
+ namedtype.OptionalNamedType('protection', PKIProtection().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType( 'extraCerts',
+ univ.SequenceOf(
+ componentType=CMPCertificate()
+ ).subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX),
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)
+ )
+ )
+ )
+
+class PKIMessages(univ.SequenceOf):
+ """
+ PKIMessages ::= SEQUENCE SIZE (1..MAX) OF PKIMessage
+ """
+ componentType = PKIMessage()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+# pyasn1 does not naturally handle recursive definitions, thus this hack:
+# NestedMessageContent ::= PKIMessages
+NestedMessageContent.componentType = PKIMessages()
+nestedMessageContent.componentType = PKIMessages()
diff --git a/python/pyasn1-modules/pyasn1_modules/rfc5208.py b/python/pyasn1-modules/pyasn1_modules/rfc5208.py
new file mode 100644
index 000000000..d1d2c1691
--- /dev/null
+++ b/python/pyasn1-modules/pyasn1_modules/rfc5208.py
@@ -0,0 +1,39 @@
+#
+# PKCS#8 syntax
+#
+# ASN.1 source from:
+# http://tools.ietf.org/html/rfc5208
+#
+# Sample captures could be obtained with "openssl pkcs8 -topk8" command
+#
+from pyasn1.type import tag, namedtype, namedval, univ, constraint
+from pyasn1_modules.rfc2459 import *
+from pyasn1_modules import rfc2251
+
+class KeyEncryptionAlgorithms(AlgorithmIdentifier): pass
+
+class PrivateKeyAlgorithms(AlgorithmIdentifier): pass
+
+class EncryptedData(univ.OctetString): pass
+
+class EncryptedPrivateKeyInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('encryptionAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('encryptedData', EncryptedData())
+ )
+
+class PrivateKey(univ.OctetString): pass
+
+class Attributes(univ.SetOf):
+ componentType = rfc2251.Attribute()
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(('v1', 0), ('v2', 1))
+
+class PrivateKeyInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', Version()),
+ namedtype.NamedType('privateKeyAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('privateKey', PrivateKey()),
+ namedtype.OptionalNamedType('attributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
diff --git a/python/pyasn1-modules/setup.cfg b/python/pyasn1-modules/setup.cfg
new file mode 100644
index 000000000..861a9f554
--- /dev/null
+++ b/python/pyasn1-modules/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/pyasn1-modules/setup.py b/python/pyasn1-modules/setup.py
new file mode 100644
index 000000000..b7530caa8
--- /dev/null
+++ b/python/pyasn1-modules/setup.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+"""A collection of ASN.1-based protocols modules.
+
+ A collection of ASN.1 modules expressed in form of pyasn1 classes.
+ Includes protocols PDUs definition (SNMP, LDAP etc.) and various
+ data structures (X.509, PKCS etc.).
+"""
+
+classifiers = """\
+Development Status :: 5 - Production/Stable
+Environment :: Console
+Intended Audience :: Developers
+Intended Audience :: Education
+Intended Audience :: Information Technology
+Intended Audience :: Science/Research
+Intended Audience :: System Administrators
+Intended Audience :: Telecommunications Industry
+License :: OSI Approved :: BSD License
+Natural Language :: English
+Operating System :: OS Independent
+Programming Language :: Python :: 2
+Programming Language :: Python :: 3
+Topic :: Communications
+Topic :: Security :: Cryptography
+Topic :: Software Development :: Libraries :: Python Modules
+"""
+
+def howto_install_distribute():
+ print("""
+ Error: You need the distribute Python package!
+
+ It's very easy to install it, just type (as root on Linux):
+
+ wget http://python-distribute.org/distribute_setup.py
+ python distribute_setup.py
+
+ Then you could make eggs from this package.
+""")
+
+def howto_install_setuptools():
+ print("""
+ Error: You need setuptools Python package!
+
+ It's very easy to install it, just type (as root on Linux):
+
+ wget http://peak.telecommunity.com/dist/ez_setup.py
+ python ez_setup.py
+
+ Then you could make eggs from this package.
+""")
+
+try:
+ from setuptools import setup
+ params = {
+ 'install_requires': [ 'pyasn1>=0.1.4' ],
+ 'zip_safe': True
+ }
+except ImportError:
+ import sys
+ for arg in sys.argv:
+ if arg.find('egg') != -1:
+ if sys.version_info[0] > 2:
+ howto_install_distribute()
+ else:
+ howto_install_setuptools()
+ sys.exit(1)
+ from distutils.core import setup
+ params = {}
+ if sys.version_info[:2] > (2, 4):
+ params['requires'] = [ 'pyasn1(>=0.1.4)' ]
+
+doclines = [ x.strip() for x in __doc__.split('\n') if x ]
+
+params.update( {
+ 'name': 'pyasn1-modules',
+ 'version': open('pyasn1_modules/__init__.py').read().split('\'')[1],
+ 'description': doclines[0],
+ 'long_description': ' '.join(doclines[1:]),
+ 'maintainer': 'Ilya Etingof <ilya@glas.net>',
+ 'author': 'Ilya Etingof',
+ 'author_email': 'ilya@glas.net',
+ 'url': 'http://sourceforge.net/projects/pyasn1/',
+ 'platforms': ['any'],
+ 'classifiers': [ x for x in classifiers.split('\n') if x ],
+ 'license': 'BSD',
+ 'packages': [ 'pyasn1_modules' ]
+ } )
+
+setup(**params)
diff --git a/python/pyasn1-modules/test/cmp.sh b/python/pyasn1-modules/test/cmp.sh
new file mode 100644
index 000000000..f734f208c
--- /dev/null
+++ b/python/pyasn1-modules/test/cmp.sh
@@ -0,0 +1,93 @@
+#!/bin/sh
+
+cmpdump.py <<EOT
+MIITuTCCARECAQKkWTBXMQswCQYDVQQGEwJUUjEQMA4GA1UEChMHRS1HdXZlbjEUMBIGA1UECxML
+VHJ1c3RDZW50ZXIxIDAeBgNVBAMTF1JTQSBTZWN1cml0eSBDTVAgU2VydmVypC0wKzELMAkGA1UE
+BhMCVFIxHDAaBgNVBAMME1ZhbGltby1WZXR0b3ItMTdEZWOgERgPMjAxMjA1MDMxMTE2MTdaoQ8w
+DQYJKoZIhvcNAQEFBQCiIgQgZWVhMjg5MGU2ZGY5N2IyNzk5NWY2MWE0MzE2MzI1OWGkEgQQQ01Q
+VjJUMTIyMzM0NjI3MKUSBBCAAAABgAAAAYAAAAGAAAABphIEEDEzNjY0NDMwMjlSYW5kb22jghIZ
+MIISFaGCC84wggvKMIIFwDCCBKigAwIBAgIQfOVE05R616R6Nqgu3drXHzANBgkqhkiG9w0BAQUF
+ADBxMQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5T
+LjE4MDYGA1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNp
+c2kwHhcNMDgxMTI0MTAwMzI0WhcNMTYxMjE0MTExNzI0WjBdMQswCQYDVQQGEwJUUjEoMCYGA1UE
+CgwfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjEkMCIGA1UEAwwbZS1HdXZlbiBNb2Jp
+bCBUZXN0VVRGLTgtU09OMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzqaymRo5chRK
+EKrhjWQky1HOm6b/Jy4tSUuo4vq3O9U3G2osOU/hHb6fyMmznLpc6CaZ3qKYiuDMFRW8g1kNjEjV
+sFSvH0Yd4qgwP1+qqzhBSe+nCAnEbRUrz+nXJ4fKhmGaQ+ZSic+MeyoqDsf/zENKqdV7ea9l3Ilu
+Rj93bmTxas9aWPWQ/U/fpwkwRXaqaONlM5e4GWdgA7T1aq106NvH1z6LDNXcMYw4lSZkj/UjmM/0
+NhVz+57Ib4a0bogTaBmm8a1E5NtzkcA7pgnZT8576T0UoiOpEo+NAELA1B0mRh1/82HK1/0xn1zt
+1ym4XZRtn2r2l/wTeEwU79ALVQIDAQABo4ICZjCCAmIwfAYIKwYBBQUHAQEEcDBuMDIGCCsGAQUF
+BzABhiZodHRwOi8vdGVzdG9jc3AyLmUtZ3V2ZW4uY29tL29jc3AueHVkYTA4BggrBgEFBQcwAoYs
+aHR0cDovL3d3dy5lLWd1dmVuLmNvbS9kb2N1bWVudHMvVGVzdEtvay5jcnQwDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wggElBgNVHSAEggEcMIIBGDCCARQGCWCGGAMAAQECATCCAQUw
+NgYIKwYBBQUHAgEWKmh0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL05FU1VFLnBkZjCB
+ygYIKwYBBQUHAgIwgb0egboAQgB1ACAAcwBlAHIAdABpAGYAaQBrAGEAIABpAGwAZQAgAGkAbABn
+AGkAbABpACAAcwBlAHIAdABpAGYAaQBrAGEAIAB1AHkAZwB1AGwAYQBtAGEAIABlAHMAYQBzAGwA
+YQByATEAbgExACAAbwBrAHUAbQBhAGsAIABpAOcAaQBuACAAYgBlAGwAaQByAHQAaQBsAGUAbgAg
+AGQAbwBrAPwAbQBhAG4BMQAgAGEA5wExAG4BMQB6AC4wWAYDVR0fBFEwTzBNoEugSYZHaHR0cDov
+L3Rlc3RzaWwuZS1ndXZlbi5jb20vRWxla3Ryb25pa0JpbGdpR3V2ZW5saWdpQVNSb290L0xhdGVz
+dENSTC5jcmwwHQYDVR0OBBYEFLMoTImEKeXbqNjbYZkKshQi2vwzMB8GA1UdIwQYMBaAFGCI4dY9
+qCIkag0hwBgz5haCSNl0MA0GCSqGSIb3DQEBBQUAA4IBAQAWOsmvpoFB9sX2aq1/LjPDJ+A5Fpxm
+0XkOGM9yD/FsLfWgyv2HqBY1cVM7mjJfJ1ezkS0ODdlU6TyN5ouvAi21V9CIk69I3eUYSDjPpGia
+qcCCvJoMF0QD7B70kj2zW7IJ7pF11cbvPLaatdzojsH9fVfKtxtn/ZLrXtKsyUW5vKHOeniU6BBB
+Gl/ZZkFNXNN4mrB+B+wDV9OmdMw+Mc8KPq463hJQRat5a9lrXMdNtMAJOkvsUUzOemAsITjXWlyg
+BULijBhi8ZmMp0W7p6oKENX3vH2HCPCGQU29WIrK4iUoscjz93fB6oa4FQpxY0k3JRnWvD5FqkRD
+FKJdq/q9MIIDzzCCAregAwIBAgIQa34pJYdDFNXx90OkMkKzIjANBgkqhkiG9w0BAQUFADBxMQsw
+CQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjE4MDYG
+A1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNpc2kwHhcN
+MDYxMjE1MTUxMzU0WhcNMTYxMjE1MTExMzU0WjBxMQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxl
+a3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjE4MDYGA1UEAxMvZS1HdXZlbiBFbGVrdHJvbmlr
+IFNlcnRpZmlrYSBIaXptZXQgU2FnbGF5aWNpc2kwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQCU/PTxSkcWPJMx4UO8L8ep9/JqRgAZ79EqYWgR4K2bNLgENpc5j0hO+QydgovFODzkEIBP
+RIBavMz9Cw2PONpSBmxd4K1A/5hGqoGEz8UCA2tIx4+Z2A9AQ2O3BYi9FWM+0D1brJDO+6yvX4m5
+Rf3mLlso52NIVV705fIkmOExHjdAj/xB0/LICZMfwKn8F19Jae/SQv9cFnptbNRCq8hU5zLRngpR
+eT1PYrZVV0XLbzbDPwgzLXCzDxG1atdGd5JRTnD58qM1foC3+hGafuyissMQVGnBQFlsx7V6OdlD
+bsxUXegCl2li0RpRJXLqyqMdtEplaznKp8NnbddylfrPAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
+hjAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFGCI4dY9qCIkag0hwBgz5haCSNl0MB0GA1Ud
+DgQWBBRgiOHWPagiJGoNIcAYM+YWgkjZdDANBgkqhkiG9w0BAQUFAAOCAQEAKftTVjgltZJxXwDs
+MumguOSlljOQjotVVpES1QYwo3a5RQVpKuS4KYDEdWLD4ITtDNOA/iGKYWCNyKsE1BCL66irknZw
+iR6p6P+q2Wf7fGYSwUBcSBwWBTA+0EgpvPL3/vRuVVCVgC8XHBr72jKKTg9Nwcj+1FwXGZTDpjX8
+dzPhTXEWceQcDn2FRdNt6BQad9Hdq08lMHiyozsWniYZYuWpud91i8Pl698H9t0KqiJg6rPKc9kd
+z9QyC8E/cLIJgYhvfzXMxvmSjeSSFSqTHioqfpU3k8AWXuxqJUxbdQ8QrVaTXRByzEr1Ze0TYpDs
+oel1PjC9ouO8bC7cGrbCWzCCAi8wggGYAhBlEjJUo9asY2ISG4oHjcpzMA0GCSqGSIb3DQEBBQUA
+MFoxCzAJBgNVBAYTAlRSMRAwDgYDVQQKEwdFLUd1dmVuMRQwEgYDVQQLEwtUcnVzdENlbnRlcjEj
+MCEGA1UEAxMaRS1HdXZlblRFU1RDQUhTTSBTeXN0ZW0gQ0EwHhcNMDkxMTMwMjIxMzEzWhcNMTYx
+MTMwMTkxMTUxWjBXMQswCQYDVQQGEwJUUjEQMA4GA1UEChMHRS1HdXZlbjEUMBIGA1UECxMLVHJ1
+c3RDZW50ZXIxIDAeBgNVBAMTF1JTQSBTZWN1cml0eSBDTVAgU2VydmVyMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDCaZeJerGULW+1UPSu9T0voPNgzPcihXX6G5Q45nS4RNCe+pOc226EtD51
+wu6Eq2oARpZmCrKPn63EFmHEE04dRDr8MS2LHuZK8xslIx/AvPnV568795EPoAyhGIX9Na9ZHhnI
+zSPWmWfBd9bsQiLVF7C9dOvfW125mtywWXELewIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAAiIse/x
+aWwRWUM0CIzfnoXfrgyLdKVykK7dTPgoMJgAx229uN6VTPyk+E+lTKq9PhK+e/VJNNg9PjSFjKFd
+lfSDOi9ne1xOrb7cNTjw+sGf1mfNWyzizLXa7su7ISFN+GaClmAstH9vXsRxg1oh3pFMJv47I6iw
+gUQlwwg8WsY/MIIGPzCCBjsCAQAwAwIBADCCBi+gggYrMIIGJzCCBQ+gAwIBAgIRALGVtVAeoM1x
+gjgOX3alZ5MwDQYJKoZIhvcNAQEFBQAwXTELMAkGA1UEBhMCVFIxKDAmBgNVBAoMH0VsZWt0cm9u
+aWsgQmlsZ2kgR3V2ZW5saWdpIEEuUy4xJDAiBgNVBAMMG2UtR3V2ZW4gTW9iaWwgVGVzdFVURi04
+LVNPTjAeFw0xMjA1MDMxMTE2MTdaFw0xMzA1MDMxMTE2MTdaMGoxCzAJBgNVBAYTAlRSMREwDwYD
+VQQKDAhGaXJlIExMVDEbMBkGA1UECwwScG9wQ29kZSAtIDEyMzQ1Njc4MRQwEgYDVQQFEws3NjU0
+MzQ1Njc2NTEVMBMGA1UEAwwMQnVyYWsgWW9uZGVtMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
+gQCpfSB7xcsHZR4E27yGHkzUJx1y2iknzX4gRM2acyPljRw/V5Lm7POrfWIX9UF2sxfYfRqxYmD0
++nw72nx8R/5AFQK0BfjHxIc5W1YekMHF8PSORo9rJqcX+qn+NBYwqcJl4EdObTcOtMWC6ws6n0uA
+oDvYYN0ujkua496sp+INiQIDAQABo4IDVzCCA1MwQgYIKwYBBQUHAQEENjA0MDIGCCsGAQUFBzAB
+hiZodHRwOi8vdGVzdG9jc3AyLmUtZ3V2ZW4uY29tL29jc3AueHVkYTAfBgNVHSMEGDAWgBSzKEyJ
+hCnl26jY22GZCrIUItr8MzCCAXIGA1UdIASCAWkwggFlMIGxBgZghhgDAAEwgaYwNgYIKwYBBQUH
+AgEWKmh0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL05FU1VFLnBkZjBsBggrBgEFBQcC
+AjBgGl5CdSBzZXJ0aWZpa2EsIDUwNzAgc2F5xLFsxLEgRWxla3Ryb25payDEsG16YSBLYW51bnVu
+YSBnw7ZyZSBuaXRlbGlrbGkgZWxla3Ryb25payBzZXJ0aWZpa2FkxLFyMIGuBglghhgDAAEBAQMw
+gaAwNwYIKwYBBQUHAgEWK2h0dHA6Ly93d3cuZS1ndXZlbi5jb20vZG9jdW1lbnRzL01LTkVTSS5w
+ZGYwZQYIKwYBBQUHAgIwWRpXQnUgc2VydGlmaWthLCBNS05FU0kga2Fwc2FtxLFuZGEgeWF5xLFu
+bGFubcSxxZ8gYmlyIG5pdGVsaWtsaSBlbGVrdHJvbmlrIHNlcnRpZmlrYWTEsXIuMA4GA1UdDwEB
+/wQEAwIGwDCBgwYIKwYBBQUHAQMEdzB1MAgGBgQAjkYBATBpBgtghhgBPQABp04BAQxaQnUgc2Vy
+dGlmaWthLCA1MDcwIHNheWlsaSBFbGVrdHJvbmlrIEltemEgS2FudW51bmEgZ8O2cmUgbml0ZWxp
+a2xpIGVsZWt0cm9uaWsgc2VydGlmaWthZGlyMEUGA1UdCQQ+MDwwFAYIKwYBBQUHCQIxCAQGQW5r
+YXJhMBIGCCsGAQUFBwkBMQYEBDE5NzkwEAYIKwYBBQUHCQQxBAQCVFIwGAYDVR0RBBEwD4ENZmly
+ZUBmaXJlLmNvbTBgBgNVHR8EWTBXMFWgU6BRhk9odHRwOi8vdGVzdHNpbC5lLWd1dmVuLmNvbS9F
+bGVrdHJvbmlrQmlsZ2lHdXZlbmxpZ2lBU01LTkVTSS1VVEYtOC9MYXRlc3RDUkwuY3JsMB0GA1Ud
+DgQWBBSLG9aIb1k2emFLCpM93kXJkWhzuTANBgkqhkiG9w0BAQUFAAOCAQEACoGCn4bzDWLzs799
+rndpB971UD2wbwt8Hkw1MGZkkJVQeVF4IS8FacAyYk5vY8ONuTA/Wsh4x23v9WTCtO89HMTz81eU
+BclqZ2Gc2UeMq7Y4FQWR8PNCMdCsxVVhpRRE6jQAyyR9YEBHQYVLfy34e3+9G/h/BR73VGHZJdZI
+DDJYd+VWXmUD9kGk/mI35qYdzN3O28KI8sokqX0z2hvkpDKuP4jNXSCHcVkK23tX2x5m6m0LdqVn
+vnCx2LfBn1wf1u7q30p/GgMVX+mR3QHs7feGewEjlkxuEyLVVD+uBwWCT6zcad17oaAyXV5RV28L
+vH0WNg6pFUpwOP0l+nIOqqCBhAOBgQBAtTB5Qd18sTxEKhSzRiN2OycFPrqoqlZZTHBohe8bE2D4
+Xc1ejkFWUEvQivkqJxCD6C7I37xgDaq8DZnaczIBxbPkY0QMdeL4MiEqlw/tlrJGrWoC5Twb0t/m
+JA5RSwQoMDYTj2WrwtM/nsP12T39or4JRZhlLSM43IaTwEBtQw==
+EOT
diff --git a/python/pyasn1-modules/test/crl.sh b/python/pyasn1-modules/test/crl.sh
new file mode 100644
index 000000000..76526cbeb
--- /dev/null
+++ b/python/pyasn1-modules/test/crl.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+crldump.py <<EOT
+-----BEGIN X509 CRL-----
+MIIBVjCBwAIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJBVTETMBEGA1UE
+CBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRk
+MRUwEwYDVQQDEwxzbm1wbGFicy5jb20xIDAeBgkqhkiG9w0BCQEWEWluZm9Ac25t
+cGxhYnMuY29tFw0xMjA0MTExMzQwNTlaFw0xMjA1MTExMzQwNTlaoA4wDDAKBgNV
+HRQEAwIBATANBgkqhkiG9w0BAQUFAAOBgQC1D/wwnrcY/uFBHGc6SyoYss2kn+nY
+RTwzXmmldbNTCQ03x5vkWGGIaRJdN8QeCzbEi7gpgxgpxAx6Y5WkxkMQ1UPjNM5n
+DGVDOtR0dskFrrbHuNpWqWrDaBN0/ryZiWKjr9JRbrpkHgVY29I1gLooQ6IHuKHY
+vjnIhxTFoCb5vA==
+-----END X509 CRL-----
+EOT
diff --git a/python/pyasn1-modules/test/crmf.sh b/python/pyasn1-modules/test/crmf.sh
new file mode 100644
index 000000000..487159858
--- /dev/null
+++ b/python/pyasn1-modules/test/crmf.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+crmfdump.py <<EOT
+MIIBozCCAZ8wggEFAgUAwTnj2jCByoABAqURMA8xDTALBgNVBAMTBHVzZXKmgZ8w
+DQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAJ6ZQ2cYbn/lFsmBOlRltbRbFQUvvE0Q
+nbopOu1kC7Bmaaz7QTx8nxeiHi4m7uxCbGGxHNoGCt7EmdG8eZUBNAcHyGlXrJdm
+0z3/uNEGiBHq+xB8FnFJCA5EIJ3RWFnlbu9otSITLxWK7c5+/NHmWM+yaeHD/f/h
+rp01c/8qXZfZAgMBAAGpEDAOBgNVHQ8BAf8EBAMCBeAwLzASBgkrBgEFBQcFAQEM
+BTExMTExMBkGCSsGAQUFBwUBAgwMc2VydmVyX21hZ2ljoYGTMA0GCSqGSIb3DQEB
+BQUAA4GBAEI3KNEvTq/n1kNVhNhPkovk1AZxyJrN1u1+7Gkc4PLjWwjLOjcEVWt4
+AajUk/gkIJ6bbeO+fZlMjHfPSDKcD6AV2hN+n72QZwfzcw3icNvBG1el9EU4XfIm
+xfu5YVWi81/fw8QQ6X6YGHFQkomLd7jxakVyjxSng9BhO6GpjJNF
+EOT
+
diff --git a/python/pyasn1-modules/test/ocspreq.sh b/python/pyasn1-modules/test/ocspreq.sh
new file mode 100644
index 000000000..fff23c36a
--- /dev/null
+++ b/python/pyasn1-modules/test/ocspreq.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+ocspreqdump.py <<EOT
+MGowaDBBMD8wPTAJBgUrDgMCGgUABBS3ZrMV9C5Dko03aH13cEZeppg3wgQUkqR1LKSevoFE63n8
+isWVpesQdXMCBDXe9M+iIzAhMB8GCSsGAQUFBzABAgQSBBBjdJOiIW9EKJGELNNf/rdA
+EOT
+
diff --git a/python/pyasn1-modules/test/ocsprsp.sh b/python/pyasn1-modules/test/ocsprsp.sh
new file mode 100644
index 000000000..62090484c
--- /dev/null
+++ b/python/pyasn1-modules/test/ocsprsp.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+ocsprspdump.py <<EOT
+MIIEvQoBAKCCBLYwggSyBgkrBgEFBQcwAQEEggSjMIIEnzCCAQ+hgYAwfjELMAkGA1UEBhMCQVUx
+EzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEV
+MBMGA1UEAxMMc25tcGxhYnMuY29tMSAwHgYJKoZIhvcNAQkBFhFpbmZvQHNubXBsYWJzLmNvbRgP
+MjAxMjA0MTExNDA5MjJaMFQwUjA9MAkGBSsOAwIaBQAEFLdmsxX0LkOSjTdofXdwRl6mmDfCBBSS
+pHUspJ6+gUTrefyKxZWl6xB1cwIENd70z4IAGA8yMDEyMDQxMTE0MDkyMlqhIzAhMB8GCSsGAQUF
+BzABAgQSBBBjdJOiIW9EKJGELNNf/rdAMA0GCSqGSIb3DQEBBQUAA4GBADk7oRiCy4ew1u0N52QL
+RFpW+tdb0NfkV2Xyu+HChKiTThZPr9ZXalIgkJ1w3BAnzhbB0JX/zq7Pf8yEz/OrQ4GGH7HyD3Vg
+PkMu+J6I3A2An+bUQo99AmCbZ5/tSHtDYQMQt3iNbv1fk0yvDmh7UdKuXUNSyJdHeg27dMNy4k8A
+oIIC9TCCAvEwggLtMIICVqADAgECAgEBMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAkFVMRMw
+EQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxFTAT
+BgNVBAMTDHNubXBsYWJzLmNvbTEgMB4GCSqGSIb3DQEJARYRaW5mb0Bzbm1wbGFicy5jb20wHhcN
+MTIwNDExMTMyNTM1WhcNMTMwNDExMTMyNTM1WjB+MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29t
+ZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRUwEwYDVQQDEwxzbm1w
+bGFicy5jb20xIDAeBgkqhkiG9w0BCQEWEWluZm9Ac25tcGxhYnMuY29tMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDDDU5HOnNV8I2CojxB8ilIWRHYQuaAjnjrETMOprouDHFXnwWqQo/I3m0b
+XYmocrh9kDefb+cgc7+eJKvAvBqrqXRnU38DmQU/zhypCftGGfP8xjuBZ1n23lR3hplN1yYA0J2X
+SgBaAg6e8OsKf1vcX8Es09rDo8mQpt4G2zR56wIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG
++EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU8Ys2dpJFLMHl
+yY57D4BNmlqnEcYwHwYDVR0jBBgwFoAU8Ys2dpJFLMHlyY57D4BNmlqnEcYwDQYJKoZIhvcNAQEF
+BQADgYEAWR0uFJVlQId6hVpUbgXFTpywtNitNXFiYYkRRv77McSJqLCa/c1wnuLmqcFcuRUK0oN6
+8ZJDP2HDDKe8MCZ8+sx+CF54eM8VCgN9uQ9XyE7x9XrXDd3Uw9RJVaWSIezkNKNeBE0lDM2jUjC4
+HAESdf7nebz1wtqAOXE1jWF/y8g=
+EOT
diff --git a/python/pyasn1-modules/test/pkcs1.sh b/python/pyasn1-modules/test/pkcs1.sh
new file mode 100644
index 000000000..cc45f94e7
--- /dev/null
+++ b/python/pyasn1-modules/test/pkcs1.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+pkcs1dump.py <<EOT
+-----BEGIN DSA PRIVATE KEY-----
+MIIBugIBAAKBgQCN91+Cma8UPw09gjwP9WOJCdpv3mv3/qFqzgiODGZx0Q002iTl
+1dq36m5TsWYFEcMCEyC3tFuoQ0mGq5zUUOmJvHCIPufs0g8Av0fhY77uFqneHHUi
+VQMCPCHX9vTCWskmDE21LJppU27bR4H2q+ysE30d6u3+84qrItsn4bjpcQIVAPR5
+QrmooOXDn7fHJzshmxImGC4VAoGAXxKyEnlvzq93d4V6KLWX3H5Jk2JP771Ss1bT
+6D/mSbLlvjjo7qsj6diul1axu6Wny31oPertzA2FeGEzkqvjSNmSxyYYMDB3kEcx
+ahntt37I1FgSlgdZHuhdtl1h1DBKXqCCneOZuNj+kW5ib14u5HDfFIbec2HJbvVs
+lJ/k83kCgYB4TD8vgHetXHxqsiZDoy5wOnQ3mmFAfl8ZdQsIfov6kEgArwPYUOVB
+JsX84f+MFjIOKXUV8dHZ8VRrGCLAbXcxKqLNWKlKHUnEsvt63pkaTy/RKHyQS+pn
+wontdTt9EtbF+CqIWnm2wpn3O+SbdtawzPOL1CcGB0jYABwbeQ81RwIUFKdyRYaa
+INow2I3/ks+0MxDabTY=
+-----END DSA PRIVATE KEY-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIBPAIBAAJBAMfAjvBNDDYBCl1w3yNcagZkPhqd0q5KqeOTgKSLuJWfe5+VSeR5
+Y1PcF3DyH8dvS3t8PIQjxJLoKS7HVRlsfhECAwEAAQJBAIr93/gxhIenXbD7MykF
+yvi7k8MtgkWoymICZwcX+c6RudFyuPPfQJ/sf6RmFZlRA9X9CQm5NwVG7+x1Yi6t
+KoECIQDmJUCWkPCiQYow6YxetpXFa0K6hTzOPmax7MNHVWNgmQIhAN4xOZ4JFT34
+xVhK+8EudBCYRomJUHmOJfoQAxiIXVw5AiEAyB7ecc5on/5zhqKef4Eu7LKfHIdc
+304diFuDVpTmTAkCIC2ZmKOQZaWkSowGR4isCfHl7oQHhFaOD8k0RA5i3hYxAiEA
+n8lDw3JT6NjvMnD6aM8KBsLyhazWSVVkaUSqmJzgCF0=
+-----END RSA PRIVATE KEY-----
+EOT
+
diff --git a/python/pyasn1-modules/test/pkcs10.sh b/python/pyasn1-modules/test/pkcs10.sh
new file mode 100644
index 000000000..d8f9e07df
--- /dev/null
+++ b/python/pyasn1-modules/test/pkcs10.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+pkcs10dump.py <<EOT
+-----BEGIN CERTIFICATE REQUEST-----
+MIIDATCCAekCAQAwgZkxCzAJBgNVBAYTAlJVMRYwFAYDVQQIEw1Nb3Njb3cgUmVn
+aW9uMQ8wDQYDVQQHEwZNb3Njb3cxGjAYBgNVBAoTEVNOTVAgTGFib3JhdG9yaWVz
+MQwwCgYDVQQLFANSJkQxFTATBgNVBAMTDHNubXBsYWJzLmNvbTEgMB4GCSqGSIb3
+DQEJARYRaW5mb0Bzbm1wbGFicy5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQC9n2NfGS98JDBmAXQn+vNUyPB3QPYC1cwpX8UMYh9MdAmBZJCnvXrQ
+Pp14gNAv6AQKxefmGES1b+Yd+1we9HB8AKm1/8xvRDUjAvy4iO0sqFCPvIfSujUy
+pBcfnR7QE2itvyrMxCDSEVnMhKdCNb23L2TptUmpvLcb8wfAMLFsSu2yaOtJysep
+oH/mvGqlRv2ti2+E2YA0M7Pf83wyV1XmuEsc9tQ225rprDk2uyshUglkDD2235rf
+0QyONq3Aw3BMrO9ss1qj7vdDhVHVsxHnTVbEgrxEWkq2GkVKh9QReMZ2AKxe40j4
+og+OjKXguOCggCZHJyXKxccwqCaeCztbAgMBAAGgIjAgBgkqhkiG9w0BCQIxExMR
+U05NUCBMYWJvcmF0b3JpZXMwDQYJKoZIhvcNAQEFBQADggEBAAihbwmN9M2bsNNm
+9KfxqiGMqqcGCtzIlpDz/2NVwY93cEZsbz3Qscc0QpknRmyTSoDwIG+1nUH0vzkT
+Nv8sBmp9I1GdhGg52DIaWwL4t9O5WUHgfHSJpPxZ/zMP2qIsdPJ+8o19BbXRlufc
+73c03H1piGeb9VcePIaulSHI622xukI6f4Sis49vkDaoi+jadbEEb6TYkJQ3AMRD
+WdApGGm0BePdLqboW1Yv70WRRFFD8sxeT7Yw4qrJojdnq0xMHPGfKpf6dJsqWkHk
+b5DRbjil1Zt9pJuF680S9wtBzSi0hsMHXR9TzS7HpMjykL2nmCVY6A78MZapsCzn
+GGbx7DI=
+-----END CERTIFICATE REQUEST-----
+EOT
+
diff --git a/python/pyasn1-modules/test/pkcs7.sh b/python/pyasn1-modules/test/pkcs7.sh
new file mode 100644
index 000000000..45b86c2ea
--- /dev/null
+++ b/python/pyasn1-modules/test/pkcs7.sh
@@ -0,0 +1,63 @@
+#!/bin/sh
+
+pkcs7dump.py <<EOT
+-----BEGIN PKCS7-----
+MIIKdQYJKoZIhvcNAQcCoIIKZjCCCmICAQExADALBgkqhkiG9w0BBwGgggpIMIIC
+XjCCAcegAwIBAgIBADANBgkqhkiG9w0BAQQFADB1MQswCQYDVQQGEwJSVTEPMA0G
+A1UEBxMGTW9zY293MRcwFQYDVQQKEw5Tb3ZhbSBUZWxlcG9ydDEMMAoGA1UECxMD
+TklTMQ8wDQYDVQQDEwZBQlMgQ0ExHTAbBgkqhkiG9w0BCQEWDmNlcnRAb25saW5l
+LnJ1MB4XDTk5MDgxNTE5MDI1OFoXDTAwMDExMjE5MDI1OFowdTELMAkGA1UEBhMC
+UlUxDzANBgNVBAcTBk1vc2NvdzEXMBUGA1UEChMOU292YW0gVGVsZXBvcnQxDDAK
+BgNVBAsTA05JUzEPMA0GA1UEAxMGQUJTIENBMR0wGwYJKoZIhvcNAQkBFg5jZXJ0
+QG9ubGluZS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw0g1P0yQAZIi
+ml2XOCOxnCcuhHmAgj4Ei9M2ebrrGwUMONPzr1a8W7JcpnR3FeOjxEIxrzkHr6UA
+oj4l/oC7Rv28uIig+Okf+82ekhH6VgAQNr5LAzfN8J6dZLx2OXAmmLleAqHuisT7
+I40vEFRoRmC5hiMlILE2rIlIKJn6cUkCAwEAATANBgkqhkiG9w0BAQQFAAOBgQBZ
+7ELDfGUNb+fbpHl5W3d9JMXsdOgd96+HG+X1SPgeiRAMjkla8WFCSaQPIR4vCy0m
+tm5a2bWSji6+vP5FGbjOz5iMlHMrCtu0He7Eim2zpaGI06ZIY75Cn1h2r3+KS0/R
+h01TJUbmsfV1tZm6Wk3bayJ+/K8A4mBHv8P6rhYacDCCAowwggH1oAMCAQICAQAw
+DQYJKoZIhvcNAQEEBQAwgYsxCzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cx
+FzAVBgNVBAoTDkdvbGRlbiBUZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMT
+FUdvbGRlbiBUZWxlY29tIEFCUyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xk
+ZW50ZWxlY29tLnJ1MB4XDTAwMDEwNTE1MDY1MVoXDTEwMDExNTE1MDY1MVowgYsx
+CzAJBgNVBAYTAlJVMQ8wDQYDVQQHEwZNb3Njb3cxFzAVBgNVBAoTDkdvbGRlbiBU
+ZWxlY29tMQwwCgYDVQQLEwNST0wxHjAcBgNVBAMTFUdvbGRlbiBUZWxlY29tIEFC
+UyBDQTEkMCIGCSqGSIb3DQEJARYVY2VydEBnb2xkZW50ZWxlY29tLnJ1MIGfMA0G
+CSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPFel/Svli6ogoUEb6eLtEvNSjyalETSMP
+MIZXdmWIkWijvEUhDnNJVAE3knAt6dVYqxWq0vc6CbAGFZNqEyioGU48IECLzV0G
+toiYejF/c9PuyIKDejeV9/YZnNFaZAUOXhOjREdZURLISKhX4tAbQyvK0Qka9AAR
+MEy9DoqV8QIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAHQzgqFkoSMQr077UCr5C0l1
+rxLA17TrocCmUC1/PLmN0LmUHD0d7TjjTQKJaJBHxcKIg6+FOY6LSSY4nAN79eXi
+nBz+jEUG7+NTU/jcEArI35yP7fi4Mwb96EYDmUkUGtcLNq3JBe/d1Zhmy9HnNBL1
+Dn9thM2Q8RPYAJIU3JnGMIICqTCCAhICAQAwDQYJKoZIhvcNAQEEBQAwgZwxCzAJ
+BgNVBAYTAlJVMQ8wDQYDVQQIEwZNb3Njb3cxDzANBgNVBAcTBk1vc2NvdzEXMBUG
+A1UEChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA1JPTDEeMBwGA1UEAxMVR29s
+ZGVuIFRlbGVjb20gQUJTIENBMSQwIgYJKoZIhvcNAQkBFhVjZXJ0QGdvbGRlbnRl
+bGVjb20ucnUwHhcNMTAwMTE1MTU0MDI2WhcNMjAwMjIyMTU0MDI2WjCBnDELMAkG
+A1UEBhMCUlUxDzANBgNVBAgTBk1vc2NvdzEPMA0GA1UEBxMGTW9zY293MRcwFQYD
+VQQKEw5Hb2xkZW4gVGVsZWNvbTEMMAoGA1UECxMDUk9MMR4wHAYDVQQDExVHb2xk
+ZW4gVGVsZWNvbSBBQlMgQ0ExJDAiBgkqhkiG9w0BCQEWFWNlcnRAZ29sZGVudGVs
+ZWNvbS5ydTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAzxXpf0r5YuqIKFBG
++ni7RLzUo8mpRE0jDzCGV3ZliJFoo7xFIQ5zSVQBN5JwLenVWKsVqtL3OgmwBhWT
+ahMoqBlOPCBAi81dBraImHoxf3PT7siCg3o3lff2GZzRWmQFDl4To0RHWVESyEio
+V+LQG0MrytEJGvQAETBMvQ6KlfECAwEAATANBgkqhkiG9w0BAQQFAAOBgQCMrS4T
+LIzxcpu8nwOq/xMcxW4Ctz/wjIoePWkmSLe+Tkb4zo7aTsvzn+ETaWb7qztUpyl0
+QvlXn4vC2iCJloPpofPqSzF1UV3g5Zb93ReZu7E6kEyW0ag8R5XZKv0xuR3b3Le+
+ZqolT8wQELd5Mmw5JPofZ+O2cGNvet8tYwOKFjCCAqUwggIOoAMCAQICAgboMA0G
+CSqGSIb3DQEBBAUAMIGcMQswCQYDVQQGEwJSVTEPMA0GA1UECBMGTW9zY293MQ8w
+DQYDVQQHEwZNb3Njb3cxFzAVBgNVBAoTDkdvbGRlbiBUZWxlY29tMQwwCgYDVQQL
+EwNST0wxHjAcBgNVBAMTFUdvbGRlbiBUZWxlY29tIEFCUyBDQTEkMCIGCSqGSIb3
+DQEJARYVY2VydEBnb2xkZW50ZWxlY29tLnJ1MB4XDTExMDEyODEyMTcwOVoXDTEy
+MDIwMTAwMDAwMFowdjELMAkGA1UEBhMCUlUxDDAKBgNVBAgTA04vQTEXMBUGA1UE
+ChMOR29sZGVuIFRlbGVjb20xDDAKBgNVBAsTA0lTUDEWMBQGA1UEAxMNY3JheS5n
+bGFzLm5ldDEaMBgGCSqGSIb3DQEJARYLZWxpZUByb2wucnUwgZ8wDQYJKoZIhvcN
+AQEBBQADgY0AMIGJAoGBAPJAm8KG3ZCoJSvoGmLMPlGaMIpadu/EGSEYu+M/ybLp
+Cs8XmwB3876JVKKCbtGI6eqxOqvjedYXb+nKcyhz4Ztmm8RgAD7Z1WUItIpatejT
+79EYOUWrDN713SLZsImMyP4B4EySl4LZfHFRU2iOwLB6WozGCYuULLqYS9MDPrnT
+AgMBAAGjGzAZMBcGCWCGSAGG+EIBDQQKFghDPS07Uz0tOzANBgkqhkiG9w0BAQQF
+AAOBgQDEttS70qYCA+MGBA3hOR88XiBcTmuBarJDwn/rj31vRjYZUgp9bbFwscRI
+Ic4lDnlyvunwNitl+341bDg7u6Ebu9hCMbciyu4EtrsDh77DlLzbmNcXbnhlvbFL
+K9GiPz3dNyvQMfmaA0twd62zJDOVJ1SmO04lLmu/pAx8GhBZkqEAMQA=
+-----END PKCS7-----
+EOT
+
diff --git a/python/pyasn1-modules/test/pkcs8.sh b/python/pyasn1-modules/test/pkcs8.sh
new file mode 100644
index 000000000..add2d7e76
--- /dev/null
+++ b/python/pyasn1-modules/test/pkcs8.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+pkcs8dump.py <<EOT
+-----BEGIN ENCRYPTED PRIVATE KEY-----
+MIIBgTAbBgkqhkiG9w0BBQMwDgQIdtFgDWnipT8CAggABIIBYN0hkm2xqkTCt8dJ
+iZS8+HNiyHxy8g+rmWSXv/i+bTHFUReZA2GINtTRUkWpXqWcSHxNslgf7QdfgbVJ
+xQiUM+lLhwOFh85iAHR3xmPU1wfN9NvY9DiLSpM0DMhF3OvAMZD75zIhA0GSKu7w
+dUu7ey7H4fv7bez6RhEyLdKw9/Lf2KNStNOs4ow9CAtCoxeoMSniTt6CNhbvCkve
+9vNHKiGavX1tS/YTog4wiiGzh2YxuW1RiQpTdhWiKyECgD8qQVg2tY5t3QRcXrzi
+OkStpkiAPAbiwS/gyHpsqiLo0al63SCxRefugbn1ucZyc5Ya59e3xNFQXCNhYl+Z
+Hl3hIl3cssdWZkJ455Z/bBE29ks1HtsL+bTfFi+kw/4yuMzoaB8C7rXScpGNI/8E
+pvTU2+wtuoOFcttJregtR94ZHu5wgdYqRydmFNG8PnvZT1mRMmQgUe/vp88FMmsZ
+dLsZjNQ=
+-----END ENCRYPTED PRIVATE KEY-----
+-----BEGIN PRIVATE KEY-----
+MIIBVgIBADANBgkqhkiG9w0BAQEFAASCAUAwggE8AgEAAkEAx8CO8E0MNgEKXXDf
+I1xqBmQ+Gp3Srkqp45OApIu4lZ97n5VJ5HljU9wXcPIfx29Le3w8hCPEkugpLsdV
+GWx+EQIDAQABAkEAiv3f+DGEh6ddsPszKQXK+LuTwy2CRajKYgJnBxf5zpG50XK4
+899An+x/pGYVmVED1f0JCbk3BUbv7HViLq0qgQIhAOYlQJaQ8KJBijDpjF62lcVr
+QrqFPM4+ZrHsw0dVY2CZAiEA3jE5ngkVPfjFWEr7wS50EJhGiYlQeY4l+hADGIhd
+XDkCIQDIHt5xzmif/nOGop5/gS7ssp8ch1zfTh2IW4NWlOZMCQIgLZmYo5BlpaRK
+jAZHiKwJ8eXuhAeEVo4PyTREDmLeFjECIQCfyUPDclPo2O8ycPpozwoGwvKFrNZJ
+VWRpRKqYnOAIXQ==
+-----END PRIVATE KEY-----
+EOT
+
diff --git a/python/pyasn1-modules/test/x509dump.sh b/python/pyasn1-modules/test/x509dump.sh
new file mode 100644
index 000000000..62904971a
--- /dev/null
+++ b/python/pyasn1-modules/test/x509dump.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+x509dump.py <<EOT
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+-----END CERTIFICATE-----
+EOT
+
diff --git a/python/pyasn1-modules/tools/cmpdump.py b/python/pyasn1-modules/tools/cmpdump.py
new file mode 100755
index 000000000..74c4f77a4
--- /dev/null
+++ b/python/pyasn1-modules/tools/cmpdump.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python
+# Read ASN.1/PEM CMP message on stdin, parse into
+# plain text, then build substrate from it
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc4210, pem
+from pyasn1 import debug
+import sys
+
+if len(sys.argv) == 2 and sys.argv[1] == '-d':
+ debug.setLogger(debug.Debug('all'))
+elif len(sys.argv) != 1:
+ print("""Usage:
+$ cat cmp.pem | %s [-d]""" % sys.argv[0])
+ sys.exit(-1)
+
+pkiMessage = rfc4210.PKIMessage()
+
+substrate = pem.readBase64FromFile(sys.stdin)
+if not substrate:
+ sys.exit(0)
+
+pkiMsg, rest = decoder.decode(substrate, asn1Spec=pkiMessage)
+
+print(pkiMsg.prettyPrint())
+
+assert encoder.encode(pkiMsg, defMode=False) == substrate or \
+ encoder.encode(pkiMsg, defMode=True) == substrate, \
+ 'CMP message recode fails'
diff --git a/python/pyasn1-modules/tools/crldump.py b/python/pyasn1-modules/tools/crldump.py
new file mode 100755
index 000000000..d4b0a547c
--- /dev/null
+++ b/python/pyasn1-modules/tools/crldump.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+#
+# Read X.509 CRL on stdin, print them pretty and encode back into
+# original wire format.
+# CRL can be generated with "openssl openssl ca -gencrl ..." commands.
+#
+from pyasn1_modules import rfc2459, pem
+from pyasn1.codec.der import encoder, decoder
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat crl.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+asn1Spec = rfc2459.CertificateList()
+
+cnt = 0
+
+while 1:
+ idx, substrate = pem.readPemBlocksFromFile(sys.stdin, ('-----BEGIN X509 CRL-----', '-----END X509 CRL-----'))
+ if not substrate:
+ break
+
+
+ key, rest = decoder.decode(substrate, asn1Spec=asn1Spec)
+
+ if rest: substrate = substrate[:-len(rest)]
+
+ print(key.prettyPrint())
+
+ assert encoder.encode(key, defMode=False) == substrate or \
+ encoder.encode(key, defMode=True) == substrate, \
+ 'pkcs8 recode fails'
+
+ cnt = cnt + 1
+
+print('*** %s CRL(s) re/serialized' % cnt)
diff --git a/python/pyasn1-modules/tools/crmfdump.py b/python/pyasn1-modules/tools/crmfdump.py
new file mode 100755
index 000000000..22bfc9d95
--- /dev/null
+++ b/python/pyasn1-modules/tools/crmfdump.py
@@ -0,0 +1,25 @@
+#!/usr/bin/python
+# Read ASN.1/PEM X.509 CRMF request on stdin, parse into
+# plain text, then build substrate from it
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2511, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat crmf.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+certReq = rfc2511.CertReqMessages()
+
+substrate = pem.readBase64FromFile(sys.stdin)
+if not substrate:
+ sys.exit(0)
+
+cr, rest = decoder.decode(substrate, asn1Spec=certReq)
+
+print(cr.prettyPrint())
+
+assert encoder.encode(cr, defMode=False) == substrate or \
+ encoder.encode(cr, defMode=True) == substrate, \
+ 'crmf recode fails'
diff --git a/python/pyasn1-modules/tools/ocspclient.py b/python/pyasn1-modules/tools/ocspclient.py
new file mode 100755
index 000000000..b2d1dfc54
--- /dev/null
+++ b/python/pyasn1-modules/tools/ocspclient.py
@@ -0,0 +1,145 @@
+#!/usr/bin/python
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2560, rfc2459, pem
+from pyasn1.type import univ
+import sys, hashlib
+try:
+ import urllib2
+except ImportError:
+ import urllib.request as urllib2
+
+sha1oid = univ.ObjectIdentifier((1, 3, 14, 3, 2, 26))
+
+class ValueOnlyBitStringEncoder(encoder.encoder.BitStringEncoder):
+ # These methods just do not encode tag and length fields of TLV
+ def encodeTag(self, *args): return ''
+ def encodeLength(self, *args): return ''
+ def encodeValue(*args):
+ substrate, isConstructed = encoder.encoder.BitStringEncoder.encodeValue(*args)
+ # OCSP-specific hack follows: cut off the "unused bit count"
+ # encoded bit-string value.
+ return substrate[1:], isConstructed
+
+ def __call__(self, bitStringValue):
+ return self.encode(None, bitStringValue, defMode=1, maxChunkSize=0)
+
+valueOnlyBitStringEncoder = ValueOnlyBitStringEncoder()
+
+def mkOcspRequest(issuerCert, userCert):
+ issuerTbsCertificate = issuerCert.getComponentByName('tbsCertificate')
+ issuerSubject = issuerTbsCertificate.getComponentByName('subject')
+
+ userTbsCertificate = userCert.getComponentByName('tbsCertificate')
+ userIssuer = userTbsCertificate.getComponentByName('issuer')
+
+ assert issuerSubject == userIssuer, '%s\n%s' % (
+ issuerSubject.prettyPrint(), userIssuer.prettyPrint()
+ )
+
+ userIssuerHash = hashlib.sha1(
+ encoder.encode(userIssuer)
+ ).digest()
+
+ issuerSubjectPublicKey = issuerTbsCertificate.getComponentByName('subjectPublicKeyInfo').getComponentByName('subjectPublicKey')
+
+ issuerKeyHash = hashlib.sha1(
+ valueOnlyBitStringEncoder(issuerSubjectPublicKey)
+ ).digest()
+
+ userSerialNumber = userTbsCertificate.getComponentByName('serialNumber')
+
+ # Build request object
+
+ request = rfc2560.Request()
+
+ reqCert = request.setComponentByName('reqCert').getComponentByName('reqCert')
+
+ hashAlgorithm = reqCert.setComponentByName('hashAlgorithm').getComponentByName('hashAlgorithm')
+ hashAlgorithm.setComponentByName('algorithm', sha1oid)
+
+ reqCert.setComponentByName('issuerNameHash', userIssuerHash)
+ reqCert.setComponentByName('issuerKeyHash', issuerKeyHash)
+ reqCert.setComponentByName('serialNumber', userSerialNumber)
+
+ ocspRequest = rfc2560.OCSPRequest()
+
+ tbsRequest = ocspRequest.setComponentByName('tbsRequest').getComponentByName('tbsRequest')
+ tbsRequest.setComponentByName('version', 'v1')
+
+ requestList = tbsRequest.setComponentByName('requestList').getComponentByName('requestList')
+ requestList.setComponentByPosition(0, request)
+
+ return ocspRequest
+
+def parseOcspResponse(ocspResponse):
+ responseStatus = ocspResponse.getComponentByName('responseStatus')
+ assert responseStatus == rfc2560.OCSPResponseStatus('successful'), responseStatus.prettyPrint()
+ responseBytes = ocspResponse.getComponentByName('responseBytes')
+ responseType = responseBytes.getComponentByName('responseType')
+ assert responseType == id_pkix_ocsp_basic, responseType.prettyPrint()
+
+ response = responseBytes.getComponentByName('response')
+
+ basicOCSPResponse, _ = decoder.decode(
+ response, asn1Spec=rfc2560.BasicOCSPResponse()
+ )
+
+ tbsResponseData = basicOCSPResponse.getComponentByName('tbsResponseData')
+
+ response0 = tbsResponseData.getComponentByName('responses').getComponentByPosition(0)
+
+ return (
+ tbsResponseData.getComponentByName('producedAt'),
+ response0.getComponentByName('certID'),
+ response0.getComponentByName('certStatus').getName(),
+ response0.getComponentByName('thisUpdate')
+ )
+
+if len(sys.argv) != 2:
+ print("""Usage:
+$ cat CACertificate.pem userCertificate.pem | %s <ocsp-responder-url>""" % sys.argv[0])
+ sys.exit(-1)
+else:
+ ocspUrl = sys.argv[1]
+
+# Parse CA and user certificates
+
+issuerCert, _ = decoder.decode(
+ pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE-----', '-----END CERTIFICATE-----')
+ )[1],
+ asn1Spec=rfc2459.Certificate()
+ )
+userCert, _ = decoder.decode(
+ pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE-----', '-----END CERTIFICATE-----')
+ )[1],
+ asn1Spec=rfc2459.Certificate()
+ )
+
+# Build OCSP request
+
+ocspReq = mkOcspRequest(issuerCert, userCert)
+
+# Use HTTP POST to get response (see Appendix A of RFC 2560)
+# In case you need proxies, set the http_proxy env variable
+
+httpReq = urllib2.Request(
+ ocspUrl,
+ encoder.encode(ocspReq),
+ { 'Content-Type': 'application/ocsp-request' }
+ )
+httpRsp = urllib2.urlopen(httpReq).read()
+
+# Process OCSP response
+
+ocspRsp, _ = decoder.decode(httpRsp, asn1Spec=rfc2560.OCSPResponse())
+
+producedAt, certId, certStatus, thisUpdate = parseOcspResponse(ocspRsp)
+
+print('Certificate ID %s is %s at %s till %s\n' % (
+ certId.getComponentByName('serialNumber'),
+ certStatus,
+ producedAt,
+ thisUpdate))
diff --git a/python/pyasn1-modules/tools/ocspreqdump.py b/python/pyasn1-modules/tools/ocspreqdump.py
new file mode 100755
index 000000000..3a03115ea
--- /dev/null
+++ b/python/pyasn1-modules/tools/ocspreqdump.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+#
+# Read ASN.1/PEM X.509 CRMF request on stdin, parse into
+# plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2560, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat ocsp-request.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+ocspReq = rfc2560.OCSPRequest()
+
+substrate = pem.readBase64FromFile(sys.stdin)
+if not substrate:
+ sys.exit(0)
+
+cr, rest = decoder.decode(substrate, asn1Spec=ocspReq)
+
+print(cr.prettyPrint())
+
+assert encoder.encode(cr, defMode=False) == substrate or \
+ encoder.encode(cr, defMode=True) == substrate, \
+ 'OCSP request recode fails'
diff --git a/python/pyasn1-modules/tools/ocsprspdump.py b/python/pyasn1-modules/tools/ocsprspdump.py
new file mode 100755
index 000000000..9e49ce038
--- /dev/null
+++ b/python/pyasn1-modules/tools/ocsprspdump.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+#
+# Read ASN.1/PEM OCSP response on stdin, parse into
+# plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2560, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat ocsp-response.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+ocspReq = rfc2560.OCSPResponse()
+
+substrate = pem.readBase64FromFile(sys.stdin)
+if not substrate:
+ sys.exit(0)
+
+cr, rest = decoder.decode(substrate, asn1Spec=ocspReq)
+
+print(cr.prettyPrint())
+
+assert encoder.encode(cr, defMode=False) == substrate or \
+ encoder.encode(cr, defMode=True) == substrate, \
+ 'OCSP request recode fails'
diff --git a/python/pyasn1-modules/tools/ocspserver.py b/python/pyasn1-modules/tools/ocspserver.py
new file mode 100755
index 000000000..2d12d5399
--- /dev/null
+++ b/python/pyasn1-modules/tools/ocspserver.py
@@ -0,0 +1,143 @@
+#!/usr/bin/python
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2560, rfc2459, pem
+from pyasn1.type import univ
+import sys, hashlib
+try:
+ import urllib2
+except ImportError:
+ import urllib.request as urllib2
+
+sha1oid = univ.ObjectIdentifier((1, 3, 14, 3, 2, 26))
+
+class ValueOnlyBitStringEncoder(encoder.encoder.BitStringEncoder):
+ # These methods just do not encode tag and length fields of TLV
+ def encodeTag(self, *args): return ''
+ def encodeLength(self, *args): return ''
+ def encodeValue(*args):
+ substrate, isConstructed = encoder.encoder.BitStringEncoder.encodeValue(*args)
+ # OCSP-specific hack follows: cut off the "unused bit count"
+ # encoded bit-string value.
+ return substrate[1:], isConstructed
+
+ def __call__(self, bitStringValue):
+ return self.encode(None, bitStringValue, defMode=1, maxChunkSize=0)
+
+valueOnlyBitStringEncoder = ValueOnlyBitStringEncoder()
+
+def mkOcspRequest(issuerCert, userCert):
+ issuerTbsCertificate = issuerCert.getComponentByName('tbsCertificate')
+ issuerSubject = issuerTbsCertificate.getComponentByName('subject')
+
+ userTbsCertificate = userCert.getComponentByName('tbsCertificate')
+ userIssuer = userTbsCertificate.getComponentByName('issuer')
+
+ assert issuerSubject == userIssuer, '%s\n%s' % (
+ issuerSubject.prettyPrint(), userIssuer.prettyPrint()
+ )
+
+ userIssuerHash = hashlib.sha1(
+ encoder.encode(userIssuer)
+ ).digest()
+
+ issuerSubjectPublicKey = issuerTbsCertificate.getComponentByName('subjectPublicKeyInfo').getComponentByName('subjectPublicKey')
+
+ issuerKeyHash = hashlib.sha1(
+ valueOnlyBitStringEncoder(issuerSubjectPublicKey)
+ ).digest()
+
+ userSerialNumber = userTbsCertificate.getComponentByName('serialNumber')
+
+ # Build request object
+
+ request = rfc2560.Request()
+
+ reqCert = request.setComponentByName('reqCert').getComponentByName('reqCert')
+
+ hashAlgorithm = reqCert.setComponentByName('hashAlgorithm').getComponentByName('hashAlgorithm')
+ hashAlgorithm.setComponentByName('algorithm', sha1oid)
+
+ reqCert.setComponentByName('issuerNameHash', userIssuerHash)
+ reqCert.setComponentByName('issuerKeyHash', issuerKeyHash)
+ reqCert.setComponentByName('serialNumber', userSerialNumber)
+
+ ocspRequest = rfc2560.OCSPRequest()
+
+ tbsRequest = ocspRequest.setComponentByName('tbsRequest').getComponentByName('tbsRequest')
+ tbsRequest.setComponentByName('version', 'v1')
+
+ requestList = tbsRequest.setComponentByName('requestList').getComponentByName('requestList')
+ requestList.setComponentByPosition(0, request)
+
+ return ocspRequest
+
+def parseOcspRequest(ocspRequest):
+ tbsRequest = ocspRequest['responseStatus']
+
+ assert responseStatus == rfc2560.OCSPResponseStatus('successful'), responseStatus.prettyPrint()
+ responseBytes = ocspResponse.getComponentByName('responseBytes')
+ responseType = responseBytes.getComponentByName('responseType')
+ assert responseType == id_pkix_ocsp_basic, responseType.prettyPrint()
+
+ response = responseBytes.getComponentByName('response')
+
+ basicOCSPResponse, _ = decoder.decode(
+ response, asn1Spec=rfc2560.BasicOCSPResponse()
+ )
+
+ tbsResponseData = basicOCSPResponse.getComponentByName('tbsResponseData')
+
+ response0 = tbsResponseData.getComponentByName('responses').getComponentByPosition(0)
+
+ return (
+ tbsResponseData.getComponentByName('producedAt'),
+ response0.getComponentByName('certID'),
+ response0.getComponentByName('certStatus').getName(),
+ response0.getComponentByName('thisUpdate')
+ )
+
+if len(sys.argv) != 2:
+ print("""Usage:
+$ cat CACertificate.pem userCertificate.pem | %s <ocsp-responder-url>""" % sys.argv[0])
+ sys.exit(-1)
+else:
+ ocspUrl = sys.argv[1]
+
+# Parse CA and user certificates
+
+issuerCert, _ = decoder.decode(
+ pem.readPemFromFile(sys.stdin)[1],
+ asn1Spec=rfc2459.Certificate()
+ )
+userCert, _ = decoder.decode(
+ pem.readPemFromFile(sys.stdin)[1],
+ asn1Spec=rfc2459.Certificate()
+ )
+
+# Build OCSP request
+
+ocspReq = mkOcspRequest(issuerCert, userCert)
+
+# Use HTTP POST to get response (see Appendix A of RFC 2560)
+# In case you need proxies, set the http_proxy env variable
+
+httpReq = urllib2.Request(
+ ocspUrl,
+ encoder.encode(ocspReq),
+ { 'Content-Type': 'application/ocsp-request' }
+ )
+httpRsp = urllib2.urlopen(httpReq).read()
+
+# Process OCSP response
+
+ocspRsp, _ = decoder.decode(httpRsp, asn1Spec=rfc2560.OCSPResponse())
+
+producedAt, certId, certStatus, thisUpdate = parseOcspResponse(ocspRsp)
+
+print('Certificate ID %s is %s at %s till %s\n' % (
+ certId.getComponentByName('serialNumber'),
+ certStatus,
+ producedAt,
+ thisUpdate
+ ))
diff --git a/python/pyasn1-modules/tools/pkcs10dump.py b/python/pyasn1-modules/tools/pkcs10dump.py
new file mode 100755
index 000000000..ea979c0cf
--- /dev/null
+++ b/python/pyasn1-modules/tools/pkcs10dump.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python
+#
+# Read ASN.1/PEM X.509 certificate requests (PKCS#10 format) on stdin,
+# parse each into plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2314, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat certificateRequest.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+certType = rfc2314.CertificationRequest()
+
+certCnt = 0
+
+while 1:
+ idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE REQUEST-----',
+ '-----END CERTIFICATE REQUEST-----')
+ )
+ if not substrate:
+ break
+
+ cert, rest = decoder.decode(substrate, asn1Spec=certType)
+
+ if rest: substrate = substrate[:-len(rest)]
+
+ print(cert.prettyPrint())
+
+ assert encoder.encode(cert, defMode=False) == substrate or \
+ encoder.encode(cert, defMode=True) == substrate, \
+ 'cert recode fails'
+
+ certCnt = certCnt + 1
+
+print('*** %s PEM certificate request(s) de/serialized' % certCnt)
diff --git a/python/pyasn1-modules/tools/pkcs1dump.py b/python/pyasn1-modules/tools/pkcs1dump.py
new file mode 100755
index 000000000..d0da82b2f
--- /dev/null
+++ b/python/pyasn1-modules/tools/pkcs1dump.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+#
+# Read unencrypted PKCS#1/PKIX-compliant, PEM&DER encoded private keys on
+# stdin, print them pretty and encode back into original wire format.
+# Private keys can be generated with "openssl genrsa|gendsa" commands.
+#
+from pyasn1_modules import rfc2459, rfc2437, pem
+from pyasn1.codec.der import encoder, decoder
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat rsakey.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+cnt = 0
+
+while 1:
+ idx, substrate = pem.readPemBlocksFromFile(sys.stdin, ('-----BEGIN RSA PRIVATE KEY-----', '-----END RSA PRIVATE KEY-----'), ('-----BEGIN DSA PRIVATE KEY-----', '-----END DSA PRIVATE KEY-----') )
+ if not substrate:
+ break
+
+ if idx == 0:
+ asn1Spec = rfc2437.RSAPrivateKey()
+ elif idx == 1:
+ asn1Spec = rfc2459.DSAPrivateKey()
+ else:
+ break
+
+ key, rest = decoder.decode(substrate, asn1Spec=asn1Spec)
+
+ if rest: substrate = substrate[:-len(rest)]
+
+ print(key.prettyPrint())
+
+ assert encoder.encode(key, defMode=False) == substrate or \
+ encoder.encode(key, defMode=True) == substrate, \
+ 'pkcs8 recode fails'
+
+ cnt = cnt + 1
+
+print('*** %s key(s) re/serialized' % cnt)
diff --git a/python/pyasn1-modules/tools/pkcs7dump.py b/python/pyasn1-modules/tools/pkcs7dump.py
new file mode 100755
index 000000000..779487162
--- /dev/null
+++ b/python/pyasn1-modules/tools/pkcs7dump.py
@@ -0,0 +1,47 @@
+#!/usr/bin/python
+#
+# Read ASN.1/PEM PKCS#7 on stdin, parse it into plain text,
+# then build substrate from it
+#
+from pyasn1_modules import rfc2315, pem
+from pyasn1.codec.der import encoder, decoder
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat pkcs7Certificate.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN PKCS7-----', '-----END PKCS7-----')
+ )
+
+assert substrate, 'bad PKCS7 data on input'
+
+contentInfo, rest = decoder.decode(substrate, asn1Spec=rfc2315.ContentInfo())
+
+if rest: substrate = substrate[:-len(rest)]
+
+print(contentInfo.prettyPrint())
+
+assert encoder.encode(contentInfo, defMode=False) == substrate or \
+ encoder.encode(contentInfo, defMode=True) == substrate, \
+ 're-encode fails'
+
+contentType = contentInfo.getComponentByName('contentType')
+
+contentInfoMap = {
+ (1, 2, 840, 113549, 1, 7, 1): rfc2315.Data(),
+ (1, 2, 840, 113549, 1, 7, 2): rfc2315.SignedData(),
+ (1, 2, 840, 113549, 1, 7, 3): rfc2315.EnvelopedData(),
+ (1, 2, 840, 113549, 1, 7, 4): rfc2315.SignedAndEnvelopedData(),
+ (1, 2, 840, 113549, 1, 7, 5): rfc2315.DigestedData(),
+ (1, 2, 840, 113549, 1, 7, 6): rfc2315.EncryptedData()
+ }
+
+content, _ = decoder.decode(
+ contentInfo.getComponentByName('content'),
+ asn1Spec=contentInfoMap[contentType]
+ )
+
+print(content.prettyPrint())
diff --git a/python/pyasn1-modules/tools/pkcs8dump.py b/python/pyasn1-modules/tools/pkcs8dump.py
new file mode 100755
index 000000000..d1d125f8c
--- /dev/null
+++ b/python/pyasn1-modules/tools/pkcs8dump.py
@@ -0,0 +1,41 @@
+#!/usr/bin/python
+#
+# Read bunch of ASN.1/PEM plain/encrypted private keys in PKCS#8
+# format on stdin, parse each into plain text, then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc5208, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat pkcs8key.pem | %s""" % sys.argv[0])
+ sys.exit(-1)
+
+cnt = 0
+
+while 1:
+ idx, substrate = pem.readPemBlocksFromFile(sys.stdin, ('-----BEGIN PRIVATE KEY-----', '-----END PRIVATE KEY-----'), ('-----BEGIN ENCRYPTED PRIVATE KEY-----', '-----END ENCRYPTED PRIVATE KEY-----') )
+ if not substrate:
+ break
+
+ if idx == 0:
+ asn1Spec = rfc5208.PrivateKeyInfo()
+ elif idx == 1:
+ asn1Spec = rfc5208.EncryptedPrivateKeyInfo()
+ else:
+ break
+
+ key, rest = decoder.decode(substrate, asn1Spec=asn1Spec)
+
+ if rest: substrate = substrate[:-len(rest)]
+
+ print(key.prettyPrint())
+
+ assert encoder.encode(key, defMode=False) == substrate or \
+ encoder.encode(key, defMode=True) == substrate, \
+ 'pkcs8 recode fails'
+
+ cnt = cnt + 1
+
+print('*** %s PKCS#8 key(s) de/serialized' % cnt)
diff --git a/python/pyasn1-modules/tools/snmpget.py b/python/pyasn1-modules/tools/snmpget.py
new file mode 100755
index 000000000..372510329
--- /dev/null
+++ b/python/pyasn1-modules/tools/snmpget.py
@@ -0,0 +1,37 @@
+#!/usr/bin/python
+#
+# Generate SNMPGET request, parse response
+#
+from pyasn1.codec.ber import encoder, decoder
+from pyasn1_modules import rfc1157
+import sys, socket
+
+if len(sys.argv) != 4:
+ print("""Usage:
+$ %s <community> <host> <OID>""" % sys.argv[0])
+ sys.exit(-1)
+
+msg = rfc1157.Message()
+msg.setComponentByPosition(0)
+msg.setComponentByPosition(1, sys.argv[1])
+# pdu
+pdus = msg.setComponentByPosition(2).getComponentByPosition(2)
+pdu = pdus.setComponentByPosition(0).getComponentByPosition(0)
+pdu.setComponentByPosition(0, 123)
+pdu.setComponentByPosition(1, 0)
+pdu.setComponentByPosition(2, 0)
+vbl = pdu.setComponentByPosition(3).getComponentByPosition(3)
+vb = vbl.setComponentByPosition(0).getComponentByPosition(0)
+vb.setComponentByPosition(0, sys.argv[3])
+v = vb.setComponentByPosition(1).getComponentByPosition(1).setComponentByPosition(0).getComponentByPosition(0).setComponentByPosition(3).getComponentByPosition(3)
+
+print('sending: %s' % msg.prettyPrint())
+
+sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+sock.sendto(encoder.encode(msg), (sys.argv[2], 161))
+
+substrate, _ = sock.recvfrom(2048)
+
+rMsg, _ = decoder.decode(substrate, asn1Spec=msg)
+
+print('received: %s' % rMsg.prettyPrint())
diff --git a/python/pyasn1-modules/tools/x509dump.py b/python/pyasn1-modules/tools/x509dump.py
new file mode 100755
index 000000000..64cba7e30
--- /dev/null
+++ b/python/pyasn1-modules/tools/x509dump.py
@@ -0,0 +1,40 @@
+#!/usr/bin/python
+#
+# Read ASN.1/PEM X.509 certificates on stdin, parse each into plain text,
+# then build substrate from it
+#
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2459, pem
+import sys
+
+if len(sys.argv) != 1:
+ print("""Usage:
+$ cat CACertificate.pem | %s
+$ cat userCertificate.pem | %s""" % (sys.argv[0], sys.argv[0]))
+ sys.exit(-1)
+
+certType = rfc2459.Certificate()
+
+certCnt = 0
+
+while 1:
+ idx, substrate = pem.readPemBlocksFromFile(
+ sys.stdin, ('-----BEGIN CERTIFICATE-----',
+ '-----END CERTIFICATE-----')
+ )
+ if not substrate:
+ break
+
+ cert, rest = decoder.decode(substrate, asn1Spec=certType)
+
+ if rest: substrate = substrate[:-len(rest)]
+
+ print(cert.prettyPrint())
+
+ assert encoder.encode(cert, defMode=False) == substrate or \
+ encoder.encode(cert, defMode=True) == substrate, \
+ 'cert recode fails'
+
+ certCnt = certCnt + 1
+
+print('*** %s PEM cert(s) de/serialized' % certCnt)
diff --git a/python/pyasn1/CHANGES b/python/pyasn1/CHANGES
new file mode 100644
index 000000000..561dedd88
--- /dev/null
+++ b/python/pyasn1/CHANGES
@@ -0,0 +1,278 @@
+Revision 0.1.7
+--------------
+
+- License updated to vanilla BSD 2-Clause to ease package use
+ (http://opensource.org/licenses/BSD-2-Clause).
+- Test suite made discoverable by unittest/unittest2 discovery feature.
+- Fix to decoder working on indefinite length substrate -- end-of-octets
+ marker is now detected by both tag and value. Otherwise zero values may
+ interfere with end-of-octets marker.
+- Fix to decoder to fail in cases where tagFormat indicates inappropriate
+ format for the type (e.g. BOOLEAN is always PRIMITIVE, SET is always
+ CONSTRUCTED and OCTET STRING is either of the two)
+- Fix to REAL type encoder to force primitive encoding form encoding.
+- Fix to CHOICE decoder to handle explicitly tagged, indefinite length
+ mode encoding
+- Fix to REAL type decoder to handle negative REAL values correctly. Test
+ case added.
+
+Revision 0.1.6
+--------------
+
+- The compact (valueless) way of encoding zero INTEGERs introduced in
+ 0.1.5 seems to fail miserably as the world is filled with broken
+ BER decoders. So we had to back off the *encoder* for a while.
+ There's still the IntegerEncoder.supportCompactZero flag which
+ enables compact encoding form whenever it evaluates to True.
+- Report package version on debugging code initialization.
+
+Revision 0.1.5
+--------------
+
+- Documentation updated and split into chapters to better match
+ web-site contents.
+- Make prettyPrint() working for non-initialized pyasn1 data objects. It
+ used to throw an exception.
+- Fix to encoder to produce empty-payload INTEGER values for zeros
+- Fix to decoder to support empty-payload INTEGER and REAL values
+- Fix to unit test suites imports to be able to run each from
+ their current directory
+
+Revision 0.1.4
+--------------
+
+- Built-in codec debugging facility added
+- Added some more checks to ObjectIdentifier BER encoder catching
+ posible 2^8 overflow condition by two leading sub-OIDs
+- Implementations overriding the AbstractDecoder.valueDecoder method
+ changed to return the rest of substrate behind the item being processed
+ rather than the unprocessed substrate within the item (which is usually
+ empty).
+- Decoder's recursiveFlag feature generalized as a user callback function
+ which is passed an uninitialized object recovered from substrate and
+ its uninterpreted payload.
+- Catch inappropriate substrate type passed to decoder.
+- Expose tagMap/typeMap/Decoder objects at DER decoder to uniform API.
+- Obsolete __init__.MajorVersionId replaced with __init__.__version__
+ which is now in-sync with distutils.
+- Package classifiers updated.
+- The __init__.py's made non-empty (rumors are that they may be optimized
+ out by package managers).
+- Bail out gracefully whenever Python version is older than 2.4.
+- Fix to Real codec exponent encoding (should be in 2's complement form),
+ some more test cases added.
+- Fix in Boolean truth testing built-in methods
+- Fix to substrate underrun error handling at ObjectIdentifier BER decoder
+- Fix to BER Boolean decoder that allows other pre-computed
+ values besides 0 and 1
+- Fix to leading 0x80 octet handling in DER/CER/DER ObjectIdentifier decoder.
+ See http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf
+
+Revision 0.1.3
+--------------
+
+- Include class name into asn1 value constraint violation exception.
+- Fix to OctetString.prettyOut() method that looses leading zero when
+ building hex string.
+
+Revision 0.1.2
+--------------
+
+- Fix to __long__() to actually return longs on py2k
+- Fix to OctetString.__str__() workings of a non-initialized object.
+- Fix to quote initializer of OctetString.__repr__()
+- Minor fix towards ObjectIdentifier.prettyIn() reliability
+- ObjectIdentifier.__str__() is aliased to prettyPrint()
+- Exlicit repr() calls replaced with '%r'
+
+Revision 0.1.1
+--------------
+
+- Hex/bin string initializer to OctetString object reworked
+ (in a backward-incompatible manner)
+- Fixed float() infinity compatibility issue (affects 2.5 and earlier)
+- Fixed a bug/typo at Boolean CER encoder.
+- Major overhawl for Python 2.4 -- 3.2 compatibility:
+ + get rid of old-style types
+ + drop string module usage
+ + switch to rich comparation
+ + drop explicit long integer type use
+ + map()/filter() replaced with list comprehension
+ + apply() replaced with */**args
+ + switched to use 'key' sort() callback function
+ + support both __nonzero__() and __bool__() methods
+ + modified not to use py3k-incompatible exception syntax
+ + getslice() operator fully replaced with getitem()
+ + dictionary operations made 2K/3K compatible
+ + base type for encoding substrate and OctetString-based types
+ is now 'bytes' when running py3k and 'str' otherwise
+ + OctetString and derivatives now unicode compliant.
+ + OctetString now supports two python-neutral getters: asOcts() & asInts()
+ + print OctetString content in hex whenever it is not printable otherwise
+ + in test suite, implicit relative import replaced with the absolute one
+ + in test suite, string constants replaced with numerics
+
+Revision 0.0.13
+---------------
+
+- Fix to base10 normalization function that loops on univ.Real(0)
+
+Revision 0.0.13b
+----------------
+
+- ASN.1 Real type is now supported properly.
+- Objects of Constructed types now support __setitem__()
+- Set/Sequence objects can now be addressed by their field names (string index)
+ and position (integer index).
+- Typo fix to ber.SetDecoder code that prevented guided decoding operation.
+- Fix to explicitly tagged items decoding support.
+- Fix to OctetString.prettyPrint() to better handle non-printable content.
+- Fix to repr() workings of Choice objects.
+
+Revision 0.0.13a
+----------------
+
+- Major codec re-design.
+- Documentation significantly improved.
+- ASN.1 Any type is now supported.
+- All example ASN.1 modules moved to separate pyasn1-modules package.
+- Fix to initial sub-OID overflow condition detection an encoder.
+- BitString initialization value verification improved.
+- The Set/Sequence.getNameByPosition() method implemented.
+- Fix to proper behaviour of PermittedAlphabetConstraint object.
+- Fix to improper Boolean substrate handling at CER/DER decoders.
+- Changes towards performance improvement:
+ + all dict.has_key() & dict.get() invocations replaced with modern syntax
+ (this breaks compatibility with Python 2.1 and older).
+ + tag and tagset caches introduced to decoder
+ + decoder code improved to prevent unnecessary pyasn1 objects creation
+ + allow disabling components verification when setting components to
+ structured types, this is used by decoder whilst running in guided mode.
+ + BER decoder for integer values now looks up a small set of pre-computed
+ substrate values to save on decoding.
+ + a few pre-computed values configured to ObjectIdentifier BER encoder.
+ + ChoiceDecoder split-off SequenceOf one to save on unnecessary checks.
+ + replace slow hasattr()/getattr() calls with isinstance() introspection.
+ + track the number of initialized components of Constructed types to save
+ on default/optional components initialization.
+ + added a shortcut ObjectIdentifier.asTuple() to be used instead of
+ __getitem__() in hotspots.
+ + use Tag.asTuple() and pure integers at tag encoder.
+ + introduce and use in decoder the baseTagSet attribute of the built-in
+ ASN.1 types.
+
+Revision 0.0.12a
+----------------
+
+- The individual tag/length/value processing methods of
+ encoder.AbstractItemEncoder renamed (leading underscore stripped)
+ to promote overloading in cases where partial substrate processing
+ is required.
+- The ocsp.py, ldap.py example scripts added.
+- Fix to univ.ObjectIdentifier input value handler to disallow negative
+ sub-IDs.
+
+Revision 0.0.11a
+----------------
+
+- Decoder can now treat values of unknown types as opaque OctetString.
+- Fix to Set/SetOf type decoder to handle uninitialized scalar SetOf
+ components correctly.
+
+Revision 0.0.10a
+----------------
+
+- API versioning mechanics retired (pyasn1.v1 -> pyasn1) what makes
+ it possible to zip-import pyasn1 sources (used by egg and py2exe).
+
+Revision 0.0.9a
+---------------
+
+- Allow any non-zero values in Boolean type BER decoder, as it's in
+ accordnance with the standard.
+
+Revision 0.0.8a
+---------------
+
+- Integer.__index__() now supported (for Python 2.5+).
+- Fix to empty value encoding in BitString encoder, test case added.
+- Fix to SequenceOf decoder that prevents it skipping possible Choice
+ typed inner component.
+- Choice.getName() method added for getting currently set component
+ name.
+- OctetsString.prettyPrint() does a single str() against its value
+ eliminating an extra quotes.
+
+Revision 0.0.7a
+---------------
+
+- Large tags (>31) now supported by codecs.
+- Fix to encoder to properly handle explicitly tagged untagged items.
+- All possible value lengths (up to 256^126) now supported by encoders.
+- Fix to Tag class constructor to prevent negative IDs.
+
+Revision 0.0.6a
+---------------
+
+- Make use of setuptools.
+- Constraints derivation verification (isSuperTypeOf()/isSubTypeOf()) fixed.
+- Fix to constraints comparation logic -- can't cmp() hash values as it
+ may cause false positives due to hash conflicts.
+
+Revision 0.0.5a
+---------------
+
+- Integer BER codec reworked fixing negative values encoding bug.
+- clone() and subtype() methods of Constructed ASN.1 classes now
+ accept optional cloneValueFlag flag which controls original value
+ inheritance. The default is *not* to inherit original value for
+ performance reasons (this may affect backward compatibility).
+ Performance penalty may be huge on deeply nested Constructed objects
+ re-creation.
+- Base ASN.1 types (pyasn1.type.univ.*) do not have default values
+ anymore. They remain uninitialized acting as ASN.1 types. In
+ this model, initialized ASN.1 types represent either types with
+ default value installed or a type instance.
+- Decoders' prototypes are now class instances rather than classes.
+ This is to simplify initial value installation to decoder's
+ prototype value.
+- Bugfix to BitString BER decoder (trailing bits not regarded).
+- Bugfix to Constraints use as mapping keys.
+- Bugfix to Integer & BitString clone() methods
+- Bugix to the way to distinguish Set from SetOf at CER/DER SetOfEncoder
+- Adjustments to make it running on Python 1.5.
+- In tests, substrate constants converted from hex escaped literals into
+ octals to overcome indefinite hex width issue occuring in young Python.
+- Minor performance optimization of TagSet.isSuperTagSetOf() method
+- examples/sshkey.py added
+
+Revision 0.0.4a
+---------------
+
+* Asn1ItemBase.prettyPrinter() -> *.prettyPrint()
+
+Revision 0.0.3a
+---------------
+
+* Simple ASN1 objects now hash to their Python value and don't
+ depend upon tag/constraints/etc.
+* prettyIn & prettyOut methods of SimplleAsn1Object become public
+* many syntax fixes
+
+Revision 0.0.2a
+---------------
+
+* ConstraintsIntersection.isSuperTypeOf() and
+ ConstraintsIntersection.hasConstraint() implemented
+* Bugfix to NamedValues initialization code
+* +/- operators added to NamedValues objects
+* Integer.__abs__() & Integer.subtype() added
+* ObjectIdentifier.prettyOut() fixes
+* Allow subclass components at SequenceAndSetBase
+* AbstractConstraint.__cmp__() dropped
+* error.Asn1Error replaced with error.PyAsn1Error
+
+Revision 0.0.1a
+---------------
+
+* Initial public alpha release
diff --git a/python/pyasn1/LICENSE b/python/pyasn1/LICENSE
new file mode 100644
index 000000000..fac589b8c
--- /dev/null
+++ b/python/pyasn1/LICENSE
@@ -0,0 +1,24 @@
+Copyright (c) 2005-2013, Ilya Etingof <ilya@glas.net>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/python/pyasn1/MANIFEST.in b/python/pyasn1/MANIFEST.in
new file mode 100644
index 000000000..e8b3d36ce
--- /dev/null
+++ b/python/pyasn1/MANIFEST.in
@@ -0,0 +1,3 @@
+include CHANGES README LICENSE THANKS TODO
+recursive-include test *.py
+recursive-include doc *.html
diff --git a/python/pyasn1/PKG-INFO b/python/pyasn1/PKG-INFO
new file mode 100644
index 000000000..5de78eceb
--- /dev/null
+++ b/python/pyasn1/PKG-INFO
@@ -0,0 +1,26 @@
+Metadata-Version: 1.0
+Name: pyasn1
+Version: 0.1.7
+Summary: ASN.1 types and codecs
+Home-page: http://sourceforge.net/projects/pyasn1/
+Author: Ilya Etingof <ilya@glas.net>
+Author-email: ilya@glas.net
+License: BSD
+Description: A pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208).
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: Science/Research
+Classifier: Intended Audience :: System Administrators
+Classifier: Intended Audience :: Telecommunications Industry
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Communications
+Classifier: Topic :: Security :: Cryptography
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/python/pyasn1/README b/python/pyasn1/README
new file mode 100644
index 000000000..ffa3b57e5
--- /dev/null
+++ b/python/pyasn1/README
@@ -0,0 +1,68 @@
+
+ASN.1 library for Python
+------------------------
+
+This is an implementation of ASN.1 types and codecs in Python programming
+language. It has been first written to support particular protocol (SNMP)
+but then generalized to be suitable for a wide range of protocols
+based on ASN.1 specification.
+
+FEATURES
+--------
+
+* Generic implementation of ASN.1 types (X.208)
+* Fully standard compliant BER/CER/DER codecs
+* 100% Python, works with Python 2.4 up to Python 3.3 (beta 1)
+* MT-safe
+
+MISFEATURES
+-----------
+
+* No ASN.1 compiler (by-hand ASN.1 spec compilation into Python code required)
+* Codecs are not restartable
+
+INSTALLATION
+------------
+
+The pyasn1 package uses setuptools/distutils for installation. Thus do
+either:
+
+$ easy_install pyasn1
+
+or
+
+$ tar zxf pyasn1-0.1.3.tar.gz
+$ cd pyasn1-0.1.3
+$ python setup.py install
+$ cd test
+$ python suite.py # run unit tests
+
+OPERATION
+---------
+
+Perhaps a typical use would involve [by-hand] compilation of your ASN.1
+specification into pyasn1-backed Python code at your application.
+
+For more information on pyasn1 APIs, please, refer to the
+doc/pyasn1-tutorial.html file in the distribution.
+
+Also refer to example modules. Take a look at pyasn1-modules package -- maybe
+it already holds something useful to you.
+
+AVAILABILITY
+------------
+
+The pyasn1 package is distributed under terms and conditions of BSD-style
+license. See LICENSE file in the distribution. Source code is freely
+available from:
+
+http://pyasn1.sf.net
+
+
+FEEDBACK
+--------
+
+Please, send your comments and fixes to mailing lists at project web site.
+
+=-=-=
+mailto: ilya@glas.net
diff --git a/python/pyasn1/THANKS b/python/pyasn1/THANKS
new file mode 100644
index 000000000..4de1713c0
--- /dev/null
+++ b/python/pyasn1/THANKS
@@ -0,0 +1,4 @@
+Denis S. Otkidach
+Gregory Golberg
+Bud P. Bruegger
+Jacek Konieczny
diff --git a/python/pyasn1/TODO b/python/pyasn1/TODO
new file mode 100644
index 000000000..0ee211c2a
--- /dev/null
+++ b/python/pyasn1/TODO
@@ -0,0 +1,36 @@
+* Specialize ASN.1 character and useful types
+* Come up with simpler API for deeply nested constructed objects
+ addressing
+
+ber.decoder:
+* suspend codec on underrun error ?
+* class-static components map (in simple type classes)
+* present subtypes ?
+* component presence check wont work at innertypeconst
+* add the rest of ASN1 types/codecs
+* type vs value, defaultValue
+
+ber.encoder:
+* Asn1Item.clone() / shallowcopy issue
+* large length encoder?
+* codec restart
+* preserve compatible API whenever stateful codec gets implemented
+* restartable vs incremental
+* plan: make a stateless univeral decoder, then convert it to restartable
+ then to incremental
+
+type.useful:
+* may need to implement prettyIn/Out
+
+type.char:
+* may need to implement constraints
+
+type.univ:
+* simpler API to constructed objects: value init, recursive
+
+type.namedtypes
+* type vs tagset name convention
+
+general:
+
+* how untagged TagSet should be initialized?
diff --git a/python/pyasn1/doc/codecs.html b/python/pyasn1/doc/codecs.html
new file mode 100644
index 000000000..9c2c36ed6
--- /dev/null
+++ b/python/pyasn1/doc/codecs.html
@@ -0,0 +1,503 @@
+<html>
+<title>
+PyASN1 codecs
+</title>
+<head>
+</head>
+<body>
+<center>
+<table width=60%>
+<tr>
+<td>
+<h3>
+2. PyASN1 Codecs
+</h3>
+
+<p>
+In ASN.1 context,
+<a href=http://en.wikipedia.org/wiki/Codec>codec</a>
+is a program that transforms between concrete data structures and a stream
+of octets, suitable for transmission over the wire. This serialized form of
+data is sometimes called <i>substrate</i> or <i>essence</i>.
+</p>
+
+<p>
+In pyasn1 implementation, substrate takes shape of Python 3 bytes or
+Python 2 string objects.
+</p>
+
+<p>
+One of the properties of a codec is its ability to cope with incomplete
+data and/or substrate what implies codec to be stateful. In other words,
+when decoder runs out of substrate and data item being recovered is still
+incomplete, stateful codec would suspend and complete data item recovery
+whenever the rest of substrate becomes available. Similarly, stateful encoder
+would encode data items in multiple steps waiting for source data to
+arrive. Codec restartability is especially important when application deals
+with large volumes of data and/or runs on low RAM. For an interesting
+discussion on codecs options and design choices, refer to
+<a href=http://directory.apache.org/subprojects/asn1/>Apache ASN.1 project</a>
+.
+</p>
+
+<p>
+As of this writing, codecs implemented in pyasn1 are all stateless, mostly
+to keep the code simple.
+</p>
+
+<p>
+The pyasn1 package currently supports
+<a href=http://en.wikipedia.org/wiki/Basic_encoding_rules>BER</a> codec and
+its variations --
+<a href=http://en.wikipedia.org/wiki/Canonical_encoding_rules>CER</a> and
+<a href=http://en.wikipedia.org/wiki/Distinguished_encoding_rules>DER</a>.
+More ASN.1 codecs are planned for implementation in the future.
+</p>
+
+<a name="2.1"></a>
+<h4>
+2.1 Encoders
+</h4>
+
+<p>
+Encoder is used for transforming pyasn1 value objects into substrate. Only
+pyasn1 value objects could be serialized, attempts to process pyasn1 type
+objects will cause encoder failure.
+</p>
+
+<p>
+The following code will create a pyasn1 Integer object and serialize it with
+BER encoder:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder
+>>> encoder.encode(univ.Integer(123456))
+b'\x02\x03\x01\xe2@'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+BER standard also defines a so-called <i>indefinite length</i> encoding form
+which makes large data items processing more memory efficient. It is mostly
+useful when encoder does not have the whole value all at once and the
+length of the value can not be determined at the beginning of encoding.
+</p>
+
+<p>
+<i>Constructed encoding</i> is another feature of BER closely related to the
+indefinite length form. In essence, a large scalar value (such as ASN.1
+character BitString type) could be chopped into smaller chunks by encoder
+and transmitted incrementally to limit memory consumption. Unlike indefinite
+length case, the length of the whole value must be known in advance when
+using constructed, definite length encoding form.
+</p>
+
+<p>
+Since pyasn1 codecs are not restartable, pyasn1 encoder may only encode data
+item all at once. However, even in this case, generating indefinite length
+encoding may help a low-memory receiver, running a restartable decoder,
+to process a large data item.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder
+>>> encoder.encode(
+... univ.OctetString('The quick brown fox jumps over the lazy dog'),
+... defMode=False,
+... maxChunkSize=8
+... )
+b'$\x80\x04\x08The quic\x04\x08k brown \x04\x08fox jump\x04\x08s over \
+t\x04\x08he lazy \x04\x03dog\x00\x00'
+>>>
+>>> encoder.encode(
+... univ.OctetString('The quick brown fox jumps over the lazy dog'),
+... maxChunkSize=8
+... )
+b'$7\x04\x08The quic\x04\x08k brown \x04\x08fox jump\x04\x08s over \
+t\x04\x08he lazy \x04\x03dog'
+</pre>
+</td></tr></table>
+
+<p>
+The <b>defMode</b> encoder parameter disables definite length encoding mode,
+while the optional <b>maxChunkSize</b> parameter specifies desired
+substrate chunk size that influences memory requirements at the decoder's end.
+</p>
+
+<p>
+To use CER or DER encoders one needs to explicitly import and call them - the
+APIs are all compatible.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder as ber_encoder
+>>> from pyasn1.codec.cer import encoder as cer_encoder
+>>> from pyasn1.codec.der import encoder as der_encoder
+>>> ber_encoder.encode(univ.Boolean(True))
+b'\x01\x01\x01'
+>>> cer_encoder.encode(univ.Boolean(True))
+b'\x01\x01\xff'
+>>> der_encoder.encode(univ.Boolean(True))
+b'\x01\x01\xff'
+>>>
+</pre>
+</td></tr></table>
+
+<a name="2.2"></a>
+<h4>
+2.2 Decoders
+</h4>
+
+<p>
+In the process of decoding, pyasn1 value objects are created and linked to
+each other, based on the information containted in the substrate. Thus,
+the original pyasn1 value object(s) are recovered.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> substrate = encoder.encode(univ.Boolean(True))
+>>> decoder.decode(substrate)
+(Boolean('True(1)'), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Commenting on the code snippet above, pyasn1 decoder accepts substrate
+as an argument and returns a tuple of pyasn1 value object (possibly
+a top-level one in case of constructed object) and unprocessed part
+of input substrate.
+</p>
+
+<p>
+All pyasn1 decoders can handle both definite and indefinite length
+encoding modes automatically, explicit switching into one mode
+to another is not required.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> substrate = encoder.encode(
+... univ.OctetString('The quick brown fox jumps over the lazy dog'),
+... defMode=False,
+... maxChunkSize=8
+... )
+>>> decoder.decode(substrate)
+(OctetString(b'The quick brown fox jumps over the lazy dog'), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Speaking of BER/CER/DER encoding, in many situations substrate may not contain
+all necessary information needed for complete and accurate ASN.1 values
+recovery. The most obvious cases include implicitly tagged ASN.1 types
+and constrained types.
+</p>
+
+<p>
+As discussed earlier in this handbook, when an ASN.1 type is implicitly
+tagged, previous outermost tag is lost and never appears in substrate.
+If it is the base tag that gets lost, decoder is unable to pick type-specific
+value decoder at its table of built-in types, and therefore recover
+the value part, based only on the information contained in substrate. The
+approach taken by pyasn1 decoder is to use a prototype pyasn1 type object (or
+a set of them) to <i>guide</i> the decoding process by matching [possibly
+incomplete] tags recovered from substrate with those found in prototype pyasn1
+type objects (also called pyasn1 specification object further in this paper).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.codec.ber import decoder
+>>> decoder.decode(b'\x02\x01\x0c', asn1Spec=univ.Integer())
+Integer(12), b''
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Decoder would neither modify pyasn1 specification object nor use
+its current values (if it's a pyasn1 value object), but rather use it as
+a hint for choosing proper decoder and as a pattern for creating new objects:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, tag
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> i = univ.Integer(12345).subtype(
+... implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 40)
+... )
+>>> substrate = encoder.encode(i)
+>>> substrate
+b'\x9f(\x0209'
+>>> decoder.decode(substrate)
+Traceback (most recent call last):
+...
+pyasn1.error.PyAsn1Error:
+ TagSet(Tag(tagClass=128, tagFormat=0, tagId=40)) not in asn1Spec
+>>> decoder.decode(substrate, asn1Spec=i)
+(Integer(12345), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Notice in the example above, that an attempt to run decoder without passing
+pyasn1 specification object fails because recovered tag does not belong
+to any of the built-in types.
+</p>
+
+<p>
+Another important feature of guided decoder operation is the use of
+values constraints possibly present in pyasn1 specification object.
+To explain this, we will decode a random integer object into generic Integer
+and the constrained one.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> class DialDigit(univ.Integer):
+... subtypeSpec = constraint.ValueRangeConstraint(0,9)
+>>> substrate = encoder.encode(univ.Integer(13))
+>>> decoder.decode(substrate)
+(Integer(13), b'')
+>>> decoder.decode(substrate, asn1Spec=DialDigit())
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ValueRangeConstraint(0, 9) failed at: 13
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Similarily to encoders, to use CER or DER decoders application has to
+explicitly import and call them - all APIs are compatible.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder as ber_encoder
+>>> substrate = ber_encoder.encode(univ.OctetString('http://pyasn1.sf.net'))
+>>>
+>>> from pyasn1.codec.ber import decoder as ber_decoder
+>>> from pyasn1.codec.cer import decoder as cer_decoder
+>>> from pyasn1.codec.der import decoder as der_decoder
+>>>
+>>> ber_decoder.decode(substrate)
+(OctetString(b'http://pyasn1.sf.net'), b'')
+>>> cer_decoder.decode(substrate)
+(OctetString(b'http://pyasn1.sf.net'), b'')
+>>> der_decoder.decode(substrate)
+(OctetString(b'http://pyasn1.sf.net'), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<a name="2.2.1"></a>
+<h4>
+2.2.1 Decoding untagged types
+</h4>
+
+<p>
+It has already been mentioned, that ASN.1 has two "special case" types:
+CHOICE and ANY. They are different from other types in part of
+tagging - unless these two are additionally tagged, neither of them will
+have their own tag. Therefore these types become invisible in substrate
+and can not be recovered without passing pyasn1 specification object to
+decoder.
+</p>
+
+<p>
+To explain the issue, we will first prepare a Choice object to deal with:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedtype
+>>> class CodeOrMessage(univ.Choice):
+... componentType = namedtype.NamedTypes(
+... namedtype.NamedType('code', univ.Integer()),
+... namedtype.NamedType('message', univ.OctetString())
+... )
+>>>
+>>> codeOrMessage = CodeOrMessage()
+>>> codeOrMessage.setComponentByName('message', 'my string value')
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+ message=b'my string value'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Let's now encode this Choice object and then decode its substrate
+with and without pyasn1 specification object:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> substrate = encoder.encode(codeOrMessage)
+>>> substrate
+b'\x04\x0fmy string value'
+>>> encoder.encode(univ.OctetString('my string value'))
+b'\x04\x0fmy string value'
+>>>
+>>> decoder.decode(substrate)
+(OctetString(b'my string value'), b'')
+>>> codeOrMessage, substrate = decoder.decode(substrate, asn1Spec=CodeOrMessage())
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+ message=b'my string value'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+First thing to notice in the listing above is that the substrate produced
+for our Choice value object is equivalent to the substrate for an OctetString
+object initialized to the same value. In other words, any information about
+the Choice component is absent in encoding.
+</p>
+
+<p>
+Sure enough, that kind of substrate will decode into an OctetString object,
+unless original Choice type object is passed to decoder to guide the decoding
+process.
+</p>
+
+<p>
+Similarily untagged ANY type behaves differently on decoding phase - when
+decoder bumps into an Any object in pyasn1 specification, it stops decoding
+and puts all the substrate into a new Any value object in form of an octet
+string. Concerned application could then re-run decoder with an additional,
+more exact pyasn1 specification object to recover the contents of Any
+object.
+</p>
+
+<p>
+As it was mentioned elsewhere in this paper, Any type allows for incomplete
+or changing ASN.1 specification to be handled gracefully by decoder and
+applications.
+</p>
+
+<p>
+To illustrate the working of Any type, we'll have to make the stage
+by encoding a pyasn1 object and then putting its substrate into an any
+object.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> innerSubstrate = encoder.encode(univ.Integer(1234))
+>>> innerSubstrate
+b'\x02\x02\x04\xd2'
+>>> any = univ.Any(innerSubstrate)
+>>> any
+Any(b'\x02\x02\x04\xd2')
+>>> substrate = encoder.encode(any)
+>>> substrate
+b'\x02\x02\x04\xd2'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+As with Choice type encoding, there is no traces of Any type in substrate.
+Obviously, the substrate we are dealing with, will decode into the inner
+[Integer] component, unless pyasn1 specification is given to guide the
+decoder. Continuing previous code:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder, decoder
+
+>>> decoder.decode(substrate)
+(Integer(1234), b'')
+>>> any, substrate = decoder.decode(substrate, asn1Spec=univ.Any())
+>>> any
+Any(b'\x02\x02\x04\xd2')
+>>> decoder.decode(str(any))
+(Integer(1234), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Both CHOICE and ANY types are widely used in practice. Reader is welcome to
+take a look at
+<a href=http://www.cs.auckland.ac.nz/~pgut001/pubs/x509guide.txt>
+ASN.1 specifications of X.509 applications</a> for more information.
+</p>
+
+<a name="2.2.2"></a>
+<h4>
+2.2.2 Ignoring unknown types
+</h4>
+
+<p>
+When dealing with a loosely specified ASN.1 structure, the receiving
+end may not be aware of some types present in the substrate. It may be
+convenient then to turn decoder into a recovery mode. Whilst there, decoder
+will not bail out when hit an unknown tag but rather treat it as an Any
+type.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, tag
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> taggedInt = univ.Integer(12345).subtype(
+... implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 40)
+... )
+>>> substrate = encoder.encode(taggedInt)
+>>> decoder.decode(substrate)
+Traceback (most recent call last):
+...
+pyasn1.error.PyAsn1Error: TagSet(Tag(tagClass=128, tagFormat=0, tagId=40)) not in asn1Spec
+>>>
+>>> decoder.decode.defaultErrorState = decoder.stDumpRawValue
+>>> decoder.decode(substrate)
+(Any(b'\x9f(\x0209'), '')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+It's also possible to configure a custom decoder, to handle unknown tags
+found in substrate. This can be done by means of <b>defaultRawDecoder</b>
+attribute holding a reference to type decoder object. Refer to the source
+for API details.
+</p>
+
+<hr>
+
+</td>
+</tr>
+</table>
+</center>
+</body>
+</html>
diff --git a/python/pyasn1/doc/constraints.html b/python/pyasn1/doc/constraints.html
new file mode 100644
index 000000000..53da1addf
--- /dev/null
+++ b/python/pyasn1/doc/constraints.html
@@ -0,0 +1,436 @@
+<html>
+<title>
+PyASN1 subtype constraints
+</title>
+<head>
+</head>
+<body>
+<center>
+<table width=60%>
+<tr>
+<td>
+
+<h4>
+1.4 PyASN1 subtype constraints
+</h4>
+
+<p>
+Most ASN.1 types can correspond to an infinite set of values. To adapt to
+particular application's data model and needs, ASN.1 provides a mechanism
+for limiting the infinite set to values, that make sense in particular case.
+</p>
+
+<p>
+Imposing value constraints on an ASN.1 type can also be seen as creating
+a subtype from its base type.
+</p>
+
+<p>
+In pyasn1, constraints take shape of immutable objects capable
+of evaluating given value against constraint-specific requirements.
+Constraint object is a property of pyasn1 type. Like TagSet property,
+associated with every pyasn1 type, constraints can never be modified
+in place. The only way to modify pyasn1 type constraint is to associate
+new constraint object to a new pyasn1 type object.
+</p>
+
+<p>
+A handful of different flavors of <i>constraints</i> are defined in ASN.1.
+We will discuss them one by one in the following chapters and also explain
+how to combine and apply them to types.
+</p>
+
+<a name="1.4.1"></a>
+<h4>
+1.4.1 Single value constraint
+</h4>
+
+<p>
+This kind of constraint allows for limiting type to a finite, specified set
+of values.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+DialButton ::= OCTET STRING (
+ "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9"
+)
+</pre>
+</td></tr></table>
+
+<p>
+Its pyasn1 implementation would look like:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import constraint
+>>> c = constraint.SingleValueConstraint(
+ '0','1','2','3','4','5','6','7','8','9'
+)
+>>> c
+SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+>>> c('0')
+>>> c('A')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) failed at: A
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+As can be seen in the snippet above, if a value violates the constraint, an
+exception will be thrown. A constrainted pyasn1 type object holds a
+reference to a constraint object (or their combination, as will be explained
+later) and calls it for value verification.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> class DialButton(univ.OctetString):
+... subtypeSpec = constraint.SingleValueConstraint(
+... '0','1','2','3','4','5','6','7','8','9'
+... )
+>>> DialButton('0')
+DialButton(b'0')
+>>> DialButton('A')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) failed at: A
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Constrained pyasn1 value object can never hold a violating value.
+</p>
+
+<a name="1.4.2"></a>
+<h4>
+1.4.2 Value range constraint
+</h4>
+
+<p>
+A pair of values, compliant to a type to be constrained, denote low and upper
+bounds of allowed range of values of a type.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Teenagers ::= INTEGER (13..19)
+</pre>
+</td></tr></table>
+
+<p>
+And in pyasn1 terms:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> class Teenagers(univ.Integer):
+... subtypeSpec = constraint.ValueRangeConstraint(13, 19)
+>>> Teenagers(14)
+Teenagers(14)
+>>> Teenagers(20)
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ValueRangeConstraint(13, 19) failed at: 20
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Value range constraint usually applies numeric types.
+</p>
+
+<a name="1.4.3"></a>
+<h4>
+1.4.3 Size constraint
+</h4>
+
+<p>
+It is sometimes convenient to set or limit the allowed size of a data item
+to be sent from one application to another to manage bandwidth and memory
+consumption issues. Size constraint specifies the lower and upper bounds
+of the size of a valid value.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+TwoBits ::= BIT STRING (SIZE (2))
+</pre>
+</td></tr></table>
+
+<p>
+Express the same grammar in pyasn1:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> class TwoBits(univ.BitString):
+... subtypeSpec = constraint.ValueSizeConstraint(2, 2)
+>>> TwoBits((1,1))
+TwoBits("'11'B")
+>>> TwoBits((1,1,0))
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ValueSizeConstraint(2, 2) failed at: (1, 1, 0)
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Size constraint can be applied to potentially massive values - bit or octet
+strings, SEQUENCE OF/SET OF values.
+</p>
+
+<a name="1.4.4"></a>
+<h4>
+1.4.4 Alphabet constraint
+</h4>
+
+<p>
+The permitted alphabet constraint is similar to Single value constraint
+but constraint applies to individual characters of a value.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+MorseCode ::= PrintableString (FROM ("."|"-"|" "))
+</pre>
+</td></tr></table>
+
+<p>
+And in pyasn1:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char, constraint
+>>> class MorseCode(char.PrintableString):
+... subtypeSpec = constraint.PermittedAlphabetConstraint(".", "-", " ")
+>>> MorseCode("...---...")
+MorseCode('...---...')
+>>> MorseCode("?")
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ PermittedAlphabetConstraint(".", "-", " ") failed at: "?"
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Current implementation does not handle ranges of characters in constraint
+(FROM "A".."Z" syntax), one has to list the whole set in a range.
+</p>
+
+<a name="1.4.5"></a>
+<h4>
+1.4.5 Constraint combinations
+</h4>
+
+<p>
+Up to this moment, we used a single constraint per ASN.1 type. The standard,
+however, allows for combining multiple individual constraints into
+intersections, unions and exclusions.
+</p>
+
+<p>
+In pyasn1 data model, all of these methods of constraint combinations are
+implemented as constraint-like objects holding individual constraint (or
+combination) objects. Like terminal constraint objects, combination objects
+are capable to perform value verification at its set of enclosed constraints
+according to the logic of particular combination.
+</p>
+
+<p>
+Constraints intersection verification succeeds only if a value is
+compliant to each constraint in a set. To begin with, the following
+specification will constitute a valid telephone number:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+PhoneNumber ::= NumericString (FROM ("0".."9")) (SIZE 11)
+</pre>
+</td></tr></table>
+
+<p>
+Constraint intersection object serves the logic above:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char, constraint
+>>> class PhoneNumber(char.NumericString):
+... subtypeSpec = constraint.ConstraintsIntersection(
+... constraint.PermittedAlphabetConstraint('0','1','2','3','4','5','6','7','8','9'),
+... constraint.ValueSizeConstraint(11, 11)
+... )
+>>> PhoneNumber('79039343212')
+PhoneNumber('79039343212')
+>>> PhoneNumber('?9039343212')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ConstraintsIntersection(
+ PermittedAlphabetConstraint('0','1','2','3','4','5','6','7','8','9'),
+ ValueSizeConstraint(11, 11)) failed at:
+ PermittedAlphabetConstraint('0','1','2','3','4','5','6','7','8','9') failed at: "?039343212"
+>>> PhoneNumber('9343212')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ConstraintsIntersection(
+ PermittedAlphabetConstraint('0','1','2','3','4','5','6','7','8','9'),
+ ValueSizeConstraint(11, 11)) failed at:
+ ValueSizeConstraint(10, 10) failed at: "9343212"
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Union of constraints works by making sure that a value is compliant
+to any of the constraint in a set. For instance:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+CapitalOrSmall ::= IA5String (FROM ('A','B','C') | FROM ('a','b','c'))
+</pre>
+</td></tr></table>
+
+<p>
+It's important to note, that a value must fully comply to any single
+constraint in a set. In the specification above, a value of all small or
+all capital letters is compliant, but a mix of small&capitals is not.
+Here's its pyasn1 analogue:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char, constraint
+>>> class CapitalOrSmall(char.IA5String):
+... subtypeSpec = constraint.ConstraintsUnion(
+... constraint.PermittedAlphabetConstraint('A','B','C'),
+... constraint.PermittedAlphabetConstraint('a','b','c')
+... )
+>>> CapitalOrSmall('ABBA')
+CapitalOrSmall('ABBA')
+>>> CapitalOrSmall('abba')
+CapitalOrSmall('abba')
+>>> CapitalOrSmall('Abba')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ConstraintsUnion(PermittedAlphabetConstraint('A', 'B', 'C'),
+ PermittedAlphabetConstraint('a', 'b', 'c')) failed at: failed for "Abba"
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Finally, the exclusion constraint simply negates the logic of value
+verification at a constraint. In the following example, any integer value
+is allowed in a type but not zero.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+NoZero ::= INTEGER (ALL EXCEPT 0)
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1 the above definition would read:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> class NoZero(univ.Integer):
+... subtypeSpec = constraint.ConstraintsExclusion(
+... constraint.SingleValueConstraint(0)
+... )
+>>> NoZero(1)
+NoZero(1)
+>>> NoZero(0)
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ConstraintsExclusion(SingleValueConstraint(0)) failed at: 0
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The depth of such a constraints tree, built with constraint combination objects
+at its nodes, has not explicit limit. Value verification is performed in a
+recursive manner till a definite solution is found.
+</p>
+
+<a name="1.5"></a>
+<h4>
+1.5 Types relationships
+</h4>
+
+<p>
+In the course of data processing in an application, it is sometimes
+convenient to figure out the type relationships between pyasn1 type or
+value objects. Formally, two things influence pyasn1 types relationship:
+<i>tag set</i> and <i>subtype constraints</i>. One pyasn1 type is considered
+to be a derivative of another if their TagSet and Constraint objects are
+a derivation of one another.
+</p>
+
+<p>
+The following example illustrates the concept (we use the same tagset but
+different constraints for simplicity):
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> i1 = univ.Integer(subtypeSpec=constraint.ValueRangeConstraint(3,8))
+>>> i2 = univ.Integer(subtypeSpec=constraint.ConstraintsIntersection(
+... constraint.ValueRangeConstraint(3,8),
+... constraint.ValueRangeConstraint(4,7)
+... ) )
+>>> i1.isSameTypeWith(i2)
+False
+>>> i1.isSuperTypeOf(i2)
+True
+>>> i1.isSuperTypeOf(i1)
+True
+>>> i2.isSuperTypeOf(i1)
+False
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+As can be seen in the above code snippet, there are two methods of any pyasn1
+type/value object that test types for their relationship:
+<b>isSameTypeWith</b>() and <b>isSuperTypeOf</b>(). The former is
+self-descriptive while the latter yields true if the argument appears
+to be a pyasn1 object which has tagset and constraints derived from those
+of the object being called.
+</p>
+
+<hr>
+
+</td>
+</tr>
+</table>
+</center>
+</body>
+</html>
diff --git a/python/pyasn1/doc/constructed.html b/python/pyasn1/doc/constructed.html
new file mode 100644
index 000000000..88de75075
--- /dev/null
+++ b/python/pyasn1/doc/constructed.html
@@ -0,0 +1,377 @@
+<html>
+<title>
+PyASN1 Constructed types
+</title>
+<head>
+</head>
+<body>
+<center>
+<table width=60%>
+<tr>
+<td>
+
+<h4>
+1.3 PyASN1 Constructed types
+</h4>
+
+<p>
+Besides scalar types, ASN.1 specifies so-called constructed ones - these
+are capable of holding one or more values of other types, both scalar
+and constructed.
+</p>
+
+<p>
+In pyasn1 implementation, constructed ASN.1 types behave like
+Python sequences, and also support additional component addressing methods,
+specific to particular constructed type.
+</p>
+
+<a name="1.3.1"></a>
+<h4>
+1.3.1 Sequence and Set types
+</h4>
+
+<p>
+The Sequence and Set types have many similar properties:
+</p>
+<ul>
+<li>they can hold any number of inner components of different types
+<li>every component has a human-friendly identifier
+<li>any component can have a default value
+<li>some components can be absent.
+</ul>
+
+<p>
+However, Sequence type guarantees the ordering of Sequence value components
+to match their declaration order. By contrast, components of the
+Set type can be ordered to best suite application's needs.
+<p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Record ::= SEQUENCE {
+ id INTEGER,
+ room [0] INTEGER OPTIONAL,
+ house [1] INTEGER DEFAULT 0
+}
+</pre>
+</td></tr></table>
+
+<p>
+Up to this moment, the only method we used for creating new pyasn1 types
+is Python sub-classing. With this method, a new, named Python class is created
+what mimics type derivation in ASN.1 grammar. However, ASN.1 also allows for
+defining anonymous subtypes (room and house components in the example above).
+To support anonymous subtyping in pyasn1, a cloning operation on an existing
+pyasn1 type object can be invoked what creates a new instance of original
+object with possibly modified properties.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedtype, tag
+>>> class Record(univ.Sequence):
+... componentType = namedtype.NamedTypes(
+... namedtype.NamedType('id', univ.Integer()),
+... namedtype.OptionalNamedType(
+... 'room',
+... univ.Integer().subtype(
+... implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)
+... )
+... ),
+... namedtype.DefaultedNamedType(
+... 'house',
+... univ.Integer(0).subtype(
+... implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)
+... )
+... )
+... )
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+All pyasn1 constructed type classes have a class attribute <b>componentType</b>
+that represent default type specification. Its value is a NamedTypes object.
+</p>
+
+<p>
+The NamedTypes class instance holds a sequence of NameType, OptionalNamedType
+or DefaultedNamedType objects which, in turn, refer to pyasn1 type objects that
+represent inner SEQUENCE components specification.
+</p>
+
+<p>
+Finally, invocation of a subtype() method of pyasn1 type objects in the code
+above returns an implicitly tagged copy of original object.
+</p>
+
+<p>
+Once a SEQUENCE or SET type is decleared with pyasn1, it can be instantiated
+and initialized (continuing the above code):
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> record = Record()
+>>> record.setComponentByName('id', 123)
+>>> print(record.prettyPrint())
+Record:
+ id=123
+>>>
+>>> record.setComponentByPosition(1, 321)
+>>> print(record.prettyPrint())
+Record:
+ id=123
+ room=321
+>>>
+>>> record.setDefaultComponents()
+>>> print(record.prettyPrint())
+Record:
+ id=123
+ room=321
+ house=0
+</pre>
+</td></tr></table>
+
+<p>
+Inner components of pyasn1 Sequence/Set objects could be accessed using the
+following methods:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> record.getComponentByName('id')
+Integer(123)
+>>> record.getComponentByPosition(1)
+Integer(321)
+>>> record[2]
+Integer(0)
+>>> for idx in range(len(record)):
+... print(record.getNameByPosition(idx), record.getComponentByPosition(idx))
+id 123
+room 321
+house 0
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The Set type share all the properties of Sequence type, and additionally
+support by-tag component addressing (as all Set components have distinct
+types).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedtype, tag
+>>> class Gamer(univ.Set):
+... componentType = namedtype.NamedTypes(
+... namedtype.NamedType('score', univ.Integer()),
+... namedtype.NamedType('player', univ.OctetString()),
+... namedtype.NamedType('id', univ.ObjectIdentifier())
+... )
+>>> gamer = Gamer()
+>>> gamer.setComponentByType(univ.Integer().getTagSet(), 121343)
+>>> gamer.setComponentByType(univ.OctetString().getTagSet(), 'Pascal')
+>>> gamer.setComponentByType(univ.ObjectIdentifier().getTagSet(), (1,3,7,2))
+>>> print(gamer.prettyPrint())
+Gamer:
+ score=121343
+ player=b'Pascal'
+ id=1.3.7.2
+>>>
+</pre>
+</td></tr></table>
+
+<a name="1.3.2"></a>
+<h4>
+1.3.2 SequenceOf and SetOf types
+</h4>
+
+<p>
+Both, SequenceOf and SetOf types resemble an unlimited size list of components.
+All the components must be of the same type.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Progression ::= SEQUENCE OF INTEGER
+
+arithmeticProgression Progression ::= { 1, 3, 5, 7 }
+</pre>
+</td></tr></table>
+
+<p>
+SequenceOf and SetOf types are expressed by the very similar pyasn1 type
+objects. Their components can only be addressed by position and they
+both have a property of automatic resize.
+</p>
+
+<p>
+To specify inner component type, the <b>componentType</b> class attribute
+should refer to another pyasn1 type object.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> class Progression(univ.SequenceOf):
+... componentType = univ.Integer()
+>>> arithmeticProgression = Progression()
+>>> arithmeticProgression.setComponentByPosition(1, 111)
+>>> print(arithmeticProgression.prettyPrint())
+Progression:
+-empty- 111
+>>> arithmeticProgression.setComponentByPosition(0, 100)
+>>> print(arithmeticProgression.prettyPrint())
+Progression:
+100 111
+>>>
+>>> for idx in range(len(arithmeticProgression)):
+... arithmeticProgression.getComponentByPosition(idx)
+Integer(100)
+Integer(111)
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Any scalar or constructed pyasn1 type object can serve as an inner component.
+Missing components are prohibited in SequenceOf/SetOf value objects.
+</p>
+
+<a name="1.3.3"></a>
+<h4>
+1.3.3 Choice type
+</h4>
+
+<p>
+Values of ASN.1 CHOICE type can contain only a single value of a type from a
+list of possible alternatives. Alternatives must be ASN.1 types with
+distinct tags for the whole structure to remain unambiguous. Unlike most
+other types, CHOICE is an untagged one, e.g. it has no base tag of its own.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+CodeOrMessage ::= CHOICE {
+ code INTEGER,
+ message OCTET STRING
+}
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1 implementation, Choice object behaves like Set but accepts only
+a single inner component at a time. It also offers a few additional methods
+specific to its behaviour.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedtype
+>>> class CodeOrMessage(univ.Choice):
+... componentType = namedtype.NamedTypes(
+... namedtype.NamedType('code', univ.Integer()),
+... namedtype.NamedType('message', univ.OctetString())
+... )
+>>>
+>>> codeOrMessage = CodeOrMessage()
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+>>> codeOrMessage.setComponentByName('code', 123)
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+ code=123
+>>> codeOrMessage.setComponentByName('message', 'my string value')
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+ message=b'my string value'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Since there could be only a single inner component value in the pyasn1 Choice
+value object, either of the following methods could be used for fetching it
+(continuing previous code):
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> codeOrMessage.getName()
+'message'
+>>> codeOrMessage.getComponent()
+OctetString(b'my string value')
+>>>
+</pre>
+</td></tr></table>
+
+<a name="1.3.4"></a>
+<h4>
+1.3.4 Any type
+</h4>
+
+<p>
+The ASN.1 ANY type is a kind of wildcard or placeholder that matches
+any other type without knowing it in advance. Like CHOICE type, ANY
+has no base tag.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Error ::= SEQUENCE {
+ code INTEGER,
+ parameter ANY DEFINED BY code
+}
+</pre>
+</td></tr></table>
+
+<p>
+The ANY type is frequently used in specifications, where exact type is not
+yet agreed upon between communicating parties or the number of possible
+alternatives of a type is infinite.
+Sometimes an auxiliary selector is kept around to help parties indicate
+the kind of ANY payload in effect ("code" in the example above).
+</p>
+
+<p>
+Values of the ANY type contain serialized ASN.1 value(s) in form of
+an octet string. Therefore pyasn1 Any value object share the properties of
+pyasn1 OctetString object.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> someValue = univ.Any(b'\x02\x01\x01')
+>>> someValue
+Any(b'\x02\x01\x01')
+>>> str(someValue)
+'\x02\x01\x01'
+>>> bytes(someValue)
+b'\x02\x01\x01'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Receiving application is supposed to explicitly deserialize the content of Any
+value object, possibly using auxiliary selector for figuring out its ASN.1
+type to pick appropriate decoder.
+</p>
+
+<p>
+There will be some more talk and code snippets covering Any type in the codecs
+chapters that follow.
+</p>
+
+<hr>
+
+</td>
+</tr>
+</table>
+</center>
+</body>
+</html>
diff --git a/python/pyasn1/doc/intro.html b/python/pyasn1/doc/intro.html
new file mode 100644
index 000000000..3ff18b6ae
--- /dev/null
+++ b/python/pyasn1/doc/intro.html
@@ -0,0 +1,156 @@
+<html>
+<title>
+PyASN1 reference manual
+</title>
+<head>
+</head>
+<body>
+<center>
+<table width=60%>
+<tr>
+<td>
+
+<h3>
+PyASN1 reference manual
+</h3>
+
+<p align=right>
+<i>written by <a href=mailto:ilya@glas.net>Ilya Etingof</a>, 2011-2012</i>
+</p>
+
+<p>
+Free and open-source pyasn1 library makes it easier for programmers and
+network engineers to develop, debug and experiment with ASN.1-based protocols
+using Python programming language as a tool.
+</p>
+
+<p>
+Abstract Syntax Notation One
+(<a href=http://en.wikipedia.org/wiki/Abstract_Syntax_Notation_1x>ASN.1</a>)
+is a set of
+<a href=http://www.itu.int/ITU-T/studygroups/com17/languages/X.680-X.693-0207w.zip>
+ITU standards</a> concered with provisioning instrumentation for developing
+data exchange protocols in a robust, clear and interoperabable way for
+various IT systems and applications. Most of the efforts are targeting the
+following areas:
+<ul>
+<li>Data structures: the standard introduces a collection of basic data types
+(similar to integers, bits, strings, arrays and records in a programming
+language) that can be used for defining complex, possibly nested data
+structures representing domain-specific data units.
+<li>Serialization protocols: domain-specific data units expressed in ASN.1
+types could be converted into a series of octets for storage or transmission
+over the wire and then recovered back into their structured form on the
+receiving end. This process is immune to various hardware and software
+related dependencies.
+<li>Data description language: could be used to describe particular set of
+domain-specific data structures and their relationships. Such a description
+could be passed to an ASN.1 compiler for automated generation of program
+code that represents ASN.1 data structures in language-native environment
+and handles data serialization issues.
+</ul>
+</p>
+
+<p>
+This tutorial and algorithms, implemented by pyasn1 library, are
+largely based on the information read in the book
+<a href="http://www.oss.com/asn1/dubuisson.html">
+ASN.1 - Communication between heterogeneous systems</a>
+by Olivier Dubuisson. Another relevant resource is
+<a href=ftp://ftp.rsasecurity.com/pub/pkcs/ascii/layman.asc>
+A Layman's Guide to a Subset of ASN.1, BER, and DER</a> by Burton S. Kaliski.
+It's advised to refer to these books for more in-depth knowledge on the
+subject of ASN.1.
+</p>
+
+<p>
+As of this writing, pyasn1 library implements most of standard ASN.1 data
+structures in a rather detailed and feature-rich manner. Another highly
+important capability of the library is its data serialization facilities.
+The last component of the standard - ASN.1 compiler is planned for
+implementation in the future.
+</p>
+
+</p>
+The pyasn1 library was designed to follow the pre-1995 ASN.1 specification
+(also known as X.208). Later, post 1995, revision (X.680) introduced
+significant changes most of which have not yet been supported by pyasn1.
+</p>
+
+<h3>
+Table of contents
+</h3>
+
+<p>
+<ul>
+<li><a href="scalar.html">1. Data model for ASN.1 types</a>
+<li><a href="scalar.html#1.1">1.1 Scalar types</a>
+<li><a href="scalar.html#1.1.1">1.1.1 Boolean type</a>
+<li><a href="scalar.html#1.1.2">1.1.2 Null type</a>
+<li><a href="scalar.html#1.1.3">1.1.3 Integer type</a>
+<li><a href="scalar.html#1.1.4">1.1.4 Enumerated type</a>
+<li><a href="scalar.html#1.1.5">1.1.5 Real type</a>
+<li><a href="scalar.html#1.1.6">1.1.6 Bit string type</a>
+<li><a href="scalar.html#1.1.7">1.1.7 OctetString type</a>
+<li><a href="scalar.html#1.1.8">1.1.8 ObjectIdentifier type</a>
+<li><a href="scalar.html#1.1.9">1.1.9 Character string types</a>
+<li><a href="scalar.html#1.1.10">1.1.10 Useful types</a>
+<li><a href="tagging.html">1.2 Tagging</a>
+<li><a href="constructed.html">1.3 Constructed types</a>
+<li><a href="constructed.html#1.3.1">1.3.1 Sequence and Set types</a>
+<li><a href="constructed.html#1.3.2">1.3.2 SequenceOf and SetOf types</a>
+<li><a href="constructed.html#1.3.3">1.3.3 Choice type</a>
+<li><a href="constructed.html#1.3.4">1.3.4 Any type</a>
+<li><a href="constraints.html">1.4 Subtype constraints</a>
+<li><a href="constraints.html#1.4.1">1.4.1 Single value constraint</a>
+<li><a href="constraints.html#1.4.2">1.4.2 Value range constraint</a>
+<li><a href="constraints.html#1.4.3">1.4.3 Size constraint</a>
+<li><a href="constraints.html#1.4.4">1.4.4 Alphabet constraint</a>
+<li><a href="constraints.html#1.4.5">1.4.5 Constraint combinations</a>
+<li><a href="constraints.html#1.5">1.5 Types relationships</a>
+<li><a href="codecs.html">2. Codecs</a>
+<li><a href="codecs.html#2.1">2.1 Encoders</a>
+<li><a href="codecs.html#2.2">2.2 Decoders</a>
+<li><a href="codecs.html#2.2.1">2.2.1 Decoding untagged types</a>
+<li><a href="codecs.html#2.2.2">2.2.2 Ignoring unknown types</a>
+</ul>
+
+<p>
+Although pyasn1 software is almost a decade old and used in many production
+environments, it still may have bugs and non-implemented pieces. Anyone
+who happens to run into such defect is welcome to complain to
+<a href=mailto:pyasn1-users@lists.sourceforge.net>pyasn1 mailing list</a>
+or better yet fix the issue and send
+<a href=mailto:ilya@glas.net>me</a> the patch.
+</p>
+
+<p>
+Typically, pyasn1 is used for building arbitrary protocol support into
+various applications. This involves manual translation of ASN.1 data
+structures into their pyasn1 implementations. To save time and effort,
+data structures for some of the popular protocols are pre-programmed
+and kept for further re-use in form of the
+<a href=http://sourceforge.net/projects/pyasn1/files/pyasn1-modules/>
+pyasn1-modules package</a>. For instance, many structures for PKI (X.509,
+PKCS#*, CRMF, OCSP), LDAP and SNMP are present.
+Applications authors are advised to import and use relevant modules
+from that package whenever needed protocol structures are already
+there. New protocol modules contributions are welcome.
+</p>
+
+<p>
+And finally, the latest pyasn1 package revision is available for free
+download from
+<a href=http://sourceforge.net/projects/pyasn1/>project home</a> and
+also from the
+<a href=http://pypi.python.org/pypi>Python package repository</a>.
+</p>
+
+<hr>
+
+</td>
+</tr>
+</table>
+</center>
+</body>
+</html>
diff --git a/python/pyasn1/doc/pyasn1-tutorial.html b/python/pyasn1/doc/pyasn1-tutorial.html
new file mode 100644
index 000000000..2eb82f1e9
--- /dev/null
+++ b/python/pyasn1/doc/pyasn1-tutorial.html
@@ -0,0 +1,2405 @@
+<html>
+<title>
+PyASN1 programmer's manual
+</title>
+<head>
+</head>
+<body>
+<center>
+<table width=60%>
+<tr>
+<td>
+
+<h3>
+PyASN1 programmer's manual
+</h3>
+
+<p align=right>
+<i>written by <a href=mailto:ilya@glas.net>Ilya Etingof</a>, 2011-2012</i>
+</p>
+
+<p>
+Free and open-source pyasn1 library makes it easier for programmers and
+network engineers to develop, debug and experiment with ASN.1-based protocols
+using Python programming language as a tool.
+</p>
+
+<p>
+Abstract Syntax Notation One
+(<a href=http://en.wikipedia.org/wiki/Abstract_Syntax_Notation_1x>ASN.1</a>)
+is a set of
+<a href=http://www.itu.int/ITU-T/studygroups/com17/languages/X.680-X.693-0207w.zip>
+ITU standards</a> concered with provisioning instrumentation for developing
+data exchange protocols in a robust, clear and interoperabable way for
+various IT systems and applications. Most of the efforts are targeting the
+following areas:
+<ul>
+<li>Data structures: the standard introduces a collection of basic data types
+(similar to integers, bits, strings, arrays and records in a programming
+language) that can be used for defining complex, possibly nested data
+structures representing domain-specific data units.
+<li>Serialization protocols: domain-specific data units expressed in ASN.1
+types could be converted into a series of octets for storage or transmission
+over the wire and then recovered back into their structured form on the
+receiving end. This process is immune to various hardware and software
+related dependencies.
+<li>Data description language: could be used to describe particular set of
+domain-specific data structures and their relationships. Such a description
+could be passed to an ASN.1 compiler for automated generation of program
+code that represents ASN.1 data structures in language-native environment
+and handles data serialization issues.
+</ul>
+</p>
+
+<p>
+This tutorial and algorithms, implemented by pyasn1 library, are
+largely based on the information read in the book
+<a href="http://www.oss.com/asn1/dubuisson.html">
+ASN.1 - Communication between heterogeneous systems</a>
+by Olivier Dubuisson. Another relevant resource is
+<a href=ftp://ftp.rsasecurity.com/pub/pkcs/ascii/layman.asc>
+A Layman's Guide to a Subset of ASN.1, BER, and DER</a> by Burton S. Kaliski.
+It's advised to refer to these books for more in-depth knowledge on the
+subject of ASN.1.
+</p>
+
+<p>
+As of this writing, pyasn1 library implements most of standard ASN.1 data
+structures in a rather detailed and feature-rich manner. Another highly
+important capability of the library is its data serialization facilities.
+The last component of the standard - ASN.1 compiler is planned for
+implementation in the future.
+</p>
+
+</p>
+The pyasn1 library was designed to follow the pre-1995 ASN.1 specification
+(also known as X.208). Later, post 1995, revision (X.680) introduced
+significant changes most of which have not yet been supported by pyasn1.
+</p>
+
+<h3>
+Table of contents
+</h3>
+
+<p>
+<ul>
+<li><a href="#1">1. Data model for ASN.1 types</a>
+<li><a href="#1.1">1.1 Scalar types</a>
+<li><a href="#1.1.1">1.1.1 Boolean type</a>
+<li><a href="#1.1.2">1.1.2 Null type</a>
+<li><a href="#1.1.3">1.1.3 Integer type</a>
+<li><a href="#1.1.4">1.1.4 Enumerated type</a>
+<li><a href="#1.1.5">1.1.5 Real type</a>
+<li><a href="#1.1.6">1.1.6 Bit string type</a>
+<li><a href="#1.1.7">1.1.7 OctetString type</a>
+<li><a href="#1.1.8">1.1.8 ObjectIdentifier type</a>
+<li><a href="#1.1.9">1.1.9 Character string types</a>
+<li><a href="#1.1.10">1.1.10 Useful types</a>
+<li><a href="#1.2">1.2 Tagging</a>
+<li><a href="#1.3">1.3 Constructed types</a>
+<li><a href="#1.3.1">1.3.1 Sequence and Set types</a>
+<li><a href="#1.3.2">1.3.2 SequenceOf and SetOf types</a>
+<li><a href="#1.3.3">1.3.3 Choice type</a>
+<li><a href="#1.3.4">1.3.4 Any type</a>
+<li><a href="#1.4">1.4 Subtype constraints</a>
+<li><a href="#1.4.1">1.4.1 Single value constraint</a>
+<li><a href="#1.4.2">1.4.2 Value range constraint</a>
+<li><a href="#1.4.3">1.4.3 Size constraint</a>
+<li><a href="#1.4.4">1.4.4 Alphabet constraint</a>
+<li><a href="#1.4.5">1.4.5 Constraint combinations</a>
+<li><a href="#1.5">1.5 Types relationships</a>
+<li><a href="#2">2. Codecs</a>
+<li><a href="#2.1">2.1 Encoders</a>
+<li><a href="#2.2">2.2 Decoders</a>
+<li><a href="#2.2.1">2.2.1 Decoding untagged types</a>
+<li><a href="#2.2.2">2.2.2 Ignoring unknown types</a>
+<li><a href="#3">3. Feedback and getting help</a>
+</ul>
+
+
+<a name="1"></a>
+<h3>
+1. Data model for ASN.1 types
+</h3>
+
+<p>
+All ASN.1 types could be categorized into two groups: scalar (also called
+simple or primitive) and constructed. The first group is populated by
+well-known types like Integer or String. Members of constructed group
+hold other types (simple or constructed) as their inner components, thus
+they are semantically close to a programming language records or lists.
+</p>
+
+<p>
+In pyasn1, all ASN.1 types and values are implemented as Python objects.
+The same pyasn1 object can represent either ASN.1 type and/or value
+depending of the presense of value initializer on object instantiation.
+We will further refer to these as <i>pyasn1 type object</i> versus <i>pyasn1
+value object</i>.
+</p>
+
+<p>
+Primitive ASN.1 types are implemented as immutable scalar objects. There values
+could be used just like corresponding native Python values (integers,
+strings/bytes etc) and freely mixed with them in expressions.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> asn1IntegerValue = univ.Integer(12)
+>>> asn1IntegerValue - 2
+10
+>>> univ.OctetString('abc') == 'abc'
+True # Python 2
+>>> univ.OctetString(b'abc') == b'abc'
+True # Python 3
+</pre>
+</td></tr></table>
+
+<p>
+It would be an error to perform an operation on a pyasn1 type object
+as it holds no value to deal with:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> asn1IntegerType = univ.Integer()
+>>> asn1IntegerType - 2
+...
+pyasn1.error.PyAsn1Error: No value for __coerce__()
+</pre>
+</td></tr></table>
+
+<a name="1.1"></a>
+<h4>
+1.1 Scalar types
+</h4>
+
+<p>
+In the sub-sections that follow we will explain pyasn1 mapping to those
+primitive ASN.1 types. Both, ASN.1 notation and corresponding pyasn1
+syntax will be given in each case.
+</p>
+
+<a name="1.1.1"></a>
+<h4>
+1.1.1 Boolean type
+</h4>
+
+<p>
+This is the simplest type those values could be either True or False.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+;; type specification
+FunFactorPresent ::= BOOLEAN
+
+;; values declaration and assignment
+pythonFunFactor FunFactorPresent ::= TRUE
+cobolFunFactor FunFactorPresent :: FALSE
+</pre>
+</td></tr></table>
+
+<p>
+And here's pyasn1 version of it:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> class FunFactorPresent(univ.Boolean): pass
+...
+>>> pythonFunFactor = FunFactorPresent(True)
+>>> cobolFunFactor = FunFactorPresent(False)
+>>> pythonFunFactor
+FunFactorPresent('True(1)')
+>>> cobolFunFactor
+FunFactorPresent('False(0)')
+>>> pythonFunFactor == cobolFunFactor
+False
+>>>
+</pre>
+</td></tr></table>
+
+<a name="1.1.2"></a>
+<h4>
+1.1.2 Null type
+</h4>
+
+<p>
+The NULL type is sometimes used to express the absense of any information.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+;; type specification
+Vote ::= CHOICE {
+ agreed BOOLEAN,
+ skip NULL
+}
+</td></tr></table>
+
+;; value declaration and assignment
+myVote Vote ::= skip:NULL
+</pre>
+
+<p>
+We will explain the CHOICE type later in this paper, meanwhile the NULL
+type:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> skip = univ.Null()
+>>> skip
+Null('')
+>>>
+</pre>
+</td></tr></table>
+
+<a name="1.1.3"></a>
+<h4>
+1.1.3 Integer type
+</h4>
+
+<p>
+ASN.1 defines the values of Integer type as negative or positive of whatever
+length. This definition plays nicely with Python as the latter places no
+limit on Integers. However, some ASN.1 implementations may impose certain
+limits of integer value ranges. Keep that in mind when designing new
+data structures.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+;; values specification
+age-of-universe INTEGER ::= 13750000000
+mean-martian-surface-temperature INTEGER ::= -63
+</pre>
+</td></tr></table>
+
+<p>
+A rather strigntforward mapping into pyasn1:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> ageOfUniverse = univ.Integer(13750000000)
+>>> ageOfUniverse
+Integer(13750000000)
+>>>
+>>> meanMartianSurfaceTemperature = univ.Integer(-63)
+>>> meanMartianSurfaceTemperature
+Integer(-63)
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+ASN.1 allows to assign human-friendly names to particular values of
+an INTEGER type.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Temperature ::= INTEGER {
+ freezing(0),
+ boiling(100)
+}
+</pre>
+</td></tr></table>
+
+<p>
+The Temperature type expressed in pyasn1:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedval
+>>> class Temperature(univ.Integer):
+... namedValues = namedval.NamedValues(('freezing', 0), ('boiling', 100))
+...
+>>> t = Temperature(0)
+>>> t
+Temperature('freezing(0)')
+>>> t + 1
+Temperature(1)
+>>> t + 100
+Temperature('boiling(100)')
+>>> t = Temperature('boiling')
+>>> t
+Temperature('boiling(100)')
+>>> Temperature('boiling') / 2
+Temperature(50)
+>>> -1 < Temperature('freezing')
+True
+>>> 47 > Temperature('boiling')
+False
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+These values labels have no effect on Integer type operations, any value
+still could be assigned to a type (information on value constraints will
+follow further in this paper).
+</p>
+
+<a name="1.1.4"></a>
+<h4>
+1.1.4 Enumerated type
+</h4>
+
+<p>
+ASN.1 Enumerated type differs from an Integer type in a number of ways.
+Most important is that its instance can only hold a value that belongs
+to a set of values specified on type declaration.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+error-status ::= ENUMERATED {
+ no-error(0),
+ authentication-error(10),
+ authorization-error(20),
+ general-failure(51)
+}
+</pre>
+</td></tr></table>
+
+<p>
+When constructing Enumerated type we will use two pyasn1 features: values
+labels (as mentioned above) and value constraint (will be described in
+more details later on).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedval, constraint
+>>> class ErrorStatus(univ.Enumerated):
+... namedValues = namedval.NamedValues(
+... ('no-error', 0),
+... ('authentication-error', 10),
+... ('authorization-error', 20),
+... ('general-failure', 51)
+... )
+... subtypeSpec = univ.Enumerated.subtypeSpec + \
+... constraint.SingleValueConstraint(0, 10, 20, 51)
+...
+>>> errorStatus = univ.ErrorStatus('no-error')
+>>> errorStatus
+ErrorStatus('no-error(0)')
+>>> errorStatus == univ.ErrorStatus('general-failure')
+False
+>>> univ.ErrorStatus('non-existing-state')
+Traceback (most recent call last):
+...
+pyasn1.error.PyAsn1Error: Can't coerce non-existing-state into integer
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Particular integer values associated with Enumerated value states
+have no meaning. They should not be used as such or in any kind of
+math operation. Those integer values are only used by codecs to
+transfer state from one entity to another.
+</p>
+
+<a name="1.1.5"></a>
+<h4>
+1.1.5 Real type
+</h4>
+
+<p>
+Values of the Real type are a three-component tuple of mantissa, base and
+exponent. All three are integers.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+pi ::= REAL { mantissa 314159, base 10, exponent -5 }
+</pre>
+</td></tr></table>
+
+<p>
+Corresponding pyasn1 objects can be initialized with either a three-component
+tuple or a Python float. Infinite values could be expressed in a way,
+compatible with Python float type.
+
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> pi = univ.Real((314159, 10, -5))
+>>> pi
+Real((314159, 10,-5))
+>>> float(pi)
+3.14159
+>>> pi == univ.Real(3.14159)
+True
+>>> univ.Real('inf')
+Real('inf')
+>>> univ.Real('-inf') == float('-inf')
+True
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+If a Real object is initialized from a Python float or yielded by a math
+operation, the base is set to decimal 10 (what affects encoding).
+</p>
+
+<a name="1.1.6"></a>
+<h4>
+1.1.6 Bit string type
+</h4>
+
+<p>
+ASN.1 BIT STRING type holds opaque binary data of an arbitrarily length.
+A BIT STRING value could be initialized by either a binary (base 2) or
+hex (base 16) value.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+public-key BIT STRING ::= '1010111011110001010110101101101
+ 1011000101010000010110101100010
+ 0110101010000111101010111111110'B
+
+signature BIT STRING ::= 'AF01330CD932093392100B39FF00DE0'H
+</pre>
+</td></tr></table>
+
+<p>
+The pyasn1 BitString objects can initialize from native ASN.1 notation
+(base 2 or base 16 strings) or from a Python tuple of binary components.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> publicKey = univ.BitString(
+... "'1010111011110001010110101101101"
+... "1011000101010000010110101100010"
+... "0110101010000111101010111111110'B"
+)
+>>> publicKey
+BitString("'10101110111100010101101011011011011000101010000010110101100010\
+0110101010000111101010111111110'B")
+>>> signature = univ.BitString(
+... "'AF01330CD932093392100B39FF00DE0'H"
+... )
+>>> signature
+BitString("'101011110000000100110011000011001101100100110010000010010011001\
+1100100100001000000001011001110011111111100000000110111100000'B")
+>>> fingerprint = univ.BitString(
+... (1, 0, 1, 1 ,0, 1, 1, 1, 0, 1, 0, 1)
+... )
+>>> fingerprint
+BitString("'101101110101'B")
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Another BIT STRING initialization method supported by ASN.1 notation
+is to specify only 1-th bits along with their human-friendly label
+and bit offset relative to the beginning of the bit string. With this
+method, all not explicitly mentioned bits are doomed to be zeros.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+bit-mask BIT STRING ::= {
+ read-flag(0),
+ write-flag(2),
+ run-flag(4)
+}
+</pre>
+</td></tr></table>
+
+<p>
+To express this in pyasn1, we will employ the named values feature (as with
+Enumeration type).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedval
+>>> class BitMask(univ.BitString):
+... namedValues = namedval.NamedValues(
+... ('read-flag', 0),
+... ('write-flag', 2),
+... ('run-flag', 4)
+... )
+>>> bitMask = BitMask('read-flag,run-flag')
+>>> bitMask
+BitMask("'10001'B")
+>>> tuple(bitMask)
+(1, 0, 0, 0, 1)
+>>> bitMask[4]
+1
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The BitString objects mimic the properties of Python tuple type in part
+of immutable sequence object protocol support.
+</p>
+
+<a name="1.1.7"></a>
+<h4>
+1.1.7 OctetString type
+</h4>
+
+<p>
+The OCTET STRING type is a confusing subject. According to ASN.1
+specification, this type is similar to BIT STRING, the major difference
+is that the former operates in 8-bit chunks of data. What is important
+to note, is that OCTET STRING was NOT designed to handle text strings - the
+standard provides many other types specialized for text content. For that
+reason, ASN.1 forbids to initialize OCTET STRING values with "quoted text
+strings", only binary or hex initializers, similar to BIT STRING ones,
+are allowed.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+thumbnail OCTET STRING ::= '1000010111101110101111000000111011'B
+thumbnail OCTET STRING ::= 'FA9823C43E43510DE3422'H
+</pre>
+</td></tr></table>
+
+<p>
+However, ASN.1 users (e.g. protocols designers) seem to ignore the original
+purpose of the OCTET STRING type - they used it for handling all kinds of
+data, including text strings.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+welcome-message OCTET STRING ::= "Welcome to ASN.1 wilderness!"
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1, we have taken a liberal approach and allowed both BIT STRING
+style and quoted text initializers for the OctetString objects. To avoid
+possible collisions, quoted text is the default initialization syntax.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> thumbnail = univ.OctetString(
+... binValue='1000010111101110101111000000111011'
+... )
+>>> thumbnail
+OctetString(hexValue='85eebcec0')
+>>> thumbnail = univ.OctetString(
+... hexValue='FA9823C43E43510DE3422'
+... )
+>>> thumbnail
+OctetString(hexValue='fa9823c43e4351de34220')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Most frequent usage of the OctetString class is to instantiate it with
+a text string.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> welcomeMessage = univ.OctetString('Welcome to ASN.1 wilderness!')
+>>> welcomeMessage
+OctetString(b'Welcome to ASN.1 wilderness!')
+>>> print('%s' % welcomeMessage)
+Welcome to ASN.1 wilderness!
+>>> welcomeMessage[11:16]
+OctetString(b'ASN.1')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+OctetString objects support the immutable sequence object protocol.
+In other words, they behave like Python 3 bytes (or Python 2 strings).
+</p>
+
+<p>
+When running pyasn1 on Python 3, it's better to use the bytes objects for
+OctetString instantiation, as it's more reliable and efficient.
+</p>
+
+<p>
+Additionally, OctetString's can also be instantiated with a sequence of
+8-bit integers (ASCII codes).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> univ.OctetString((77, 101, 101, 103, 111))
+OctetString(b'Meego')
+</pre>
+</td></tr></table>
+
+<p>
+It is sometimes convenient to express OctetString instances as 8-bit
+characters (Python 3 bytes or Python 2 strings) or 8-bit integers.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> octetString = univ.OctetString('ABCDEF')
+>>> octetString.asNumbers()
+(65, 66, 67, 68, 69, 70)
+>>> octetString.asOctets()
+b'ABCDEF'
+</pre>
+</td></tr></table>
+
+<a name="1.1.8"></a>
+<h4>
+1.1.8 ObjectIdentifier type
+</h4>
+
+<p>
+Values of the OBJECT IDENTIFIER type are sequences of integers that could
+be used to identify virtually anything in the world. Various ASN.1-based
+protocols employ OBJECT IDENTIFIERs for their own identification needs.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+internet-id OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) dod(6) internet(1)
+}
+</pre>
+</td></tr></table>
+
+<p>
+One of the natural ways to map OBJECT IDENTIFIER type into a Python
+one is to use Python tuples of integers. So this approach is taken by
+pyasn1.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> internetId = univ.ObjectIdentifier((1, 3, 6, 1))
+>>> internetId
+ObjectIdentifier('1.3.6.1')
+>>> internetId[2]
+6
+>>> internetId[1:3]
+ObjectIdentifier('3.6')
+</pre>
+</td></tr></table>
+
+<p>
+A more human-friendly "dotted" notation is also supported.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> univ.ObjectIdentifier('1.3.6.1')
+ObjectIdentifier('1.3.6.1')
+</pre>
+</td></tr></table>
+
+<p>
+Symbolic names of the arcs of object identifier, sometimes present in
+ASN.1 specifications, are not preserved and used in pyasn1 objects.
+</p>
+
+<p>
+The ObjectIdentifier objects mimic the properties of Python tuple type in
+part of immutable sequence object protocol support.
+</p>
+
+<a name="1.1.9"></a>
+<h4>
+1.1.9 Character string types
+</h4>
+
+<p>
+ASN.1 standard introduces a diverse set of text-specific types. All of them
+were designed to handle various types of characters. Some of these types seem
+be obsolete nowdays, as their target technologies are gone. Another issue
+to be aware of is that raw OCTET STRING type is sometimes used in practice
+by ASN.1 users instead of specialized character string types, despite
+explicit prohibition imposed by ASN.1 specification.
+</p>
+
+<p>
+The two types are specific to ASN.1 are NumericString and PrintableString.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+welcome-message ::= PrintableString {
+ "Welcome to ASN.1 text types"
+}
+
+dial-pad-numbers ::= NumericString {
+ "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"
+}
+</pre>
+</td></tr></table>
+
+<p>
+Their pyasn1 implementations are:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char
+>>> '%s' % char.PrintableString("Welcome to ASN.1 text types")
+'Welcome to ASN.1 text types'
+>>> dialPadNumbers = char.NumericString(
+ "0" "1" "2" "3" "4" "5" "6" "7" "8" "9"
+)
+>>> dialPadNumbers
+NumericString(b'0123456789')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The following types came to ASN.1 from ISO standards on character sets.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char
+>>> char.VisibleString("abc")
+VisibleString(b'abc')
+>>> char.IA5String('abc')
+IA5String(b'abc')
+>>> char.TeletexString('abc')
+TeletexString(b'abc')
+>>> char.VideotexString('abc')
+VideotexString(b'abc')
+>>> char.GraphicString('abc')
+GraphicString(b'abc')
+>>> char.GeneralString('abc')
+GeneralString(b'abc')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The last three types are relatively recent addition to the family of
+character string types: UniversalString, BMPString, UTF8String.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char
+>>> char.UniversalString("abc")
+UniversalString(b'abc')
+>>> char.BMPString('abc')
+BMPString(b'abc')
+>>> char.UTF8String('abc')
+UTF8String(b'abc')
+>>> utf8String = char.UTF8String('У попа была Ñобака')
+>>> utf8String
+UTF8String(b'\xd0\xa3 \xd0\xbf\xd0\xbe\xd0\xbf\xd0\xb0 \xd0\xb1\xd1\x8b\xd0\xbb\xd0\xb0 \
+\xd1\x81\xd0\xbe\xd0\xb1\xd0\xb0\xd0\xba\xd0\xb0')
+>>> print(utf8String)
+У попа была Ñобака
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1, all character type objects behave like Python strings. None of
+them is currently constrained in terms of valid alphabet so it's up to
+the data source to keep an eye on data validation for these types.
+</p>
+
+<a name="1.1.10"></a>
+<h4>
+1.1.10 Useful types
+</h4>
+
+<p>
+There are three so-called useful types defined in the standard:
+ObjectDescriptor, GeneralizedTime, UTCTime. They all are subtypes
+of GraphicString or VisibleString types therefore useful types are
+character string types.
+</p>
+
+<p>
+It's advised by the ASN.1 standard to have an instance of ObjectDescriptor
+type holding a human-readable description of corresponding instance of
+OBJECT IDENTIFIER type. There are no formal linkage between these instances
+and provision for ObjectDescriptor uniqueness in the standard.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import useful
+>>> descrBER = useful.ObjectDescriptor(
+ "Basic encoding of a single ASN.1 type"
+)
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+GeneralizedTime and UTCTime types are designed to hold a human-readable
+timestamp in a universal and unambiguous form. The former provides
+more flexibility in notation while the latter is more strict but has
+Y2K issues.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+;; Mar 8 2010 12:00:00 MSK
+moscow-time GeneralizedTime ::= "20110308120000.0"
+;; Mar 8 2010 12:00:00 UTC
+utc-time GeneralizedTime ::= "201103081200Z"
+;; Mar 8 1999 12:00:00 UTC
+utc-time UTCTime ::= "9803081200Z"
+</pre>
+</td></tr></table>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import useful
+>>> moscowTime = useful.GeneralizedTime("20110308120000.0")
+>>> utcTime = useful.UTCTime("9803081200Z")
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Despite their intended use, these types possess no special, time-related,
+handling in pyasn1. They are just printable strings.
+</p>
+
+<a name="1.2"></a>
+<h4>
+1.2 Tagging
+</h4>
+
+<p>
+In order to continue with the Constructed ASN.1 types, we will first have
+to introduce the concept of tagging (and its pyasn1 implementation), as
+some of the Constructed types rely upon the tagging feature.
+</p>
+
+<p>
+When a value is coming into an ASN.1-based system (received from a network
+or read from some storage), the receiving entity has to determine the
+type of the value to interpret and verify it accordingly.
+</p>
+
+<p>
+Historically, the first data serialization protocol introduced in
+ASN.1 was BER (Basic Encoding Rules). According to BER, any serialized
+value is packed into a triplet of (Type, Length, Value) where Type is a
+code that identifies the value (which is called <i>tag</i> in ASN.1),
+length is the number of bytes occupied by the value in its serialized form
+and value is ASN.1 value in a form suitable for serial transmission or storage.
+</p>
+
+<p>
+For that reason almost every ASN.1 type has a tag (which is actually a
+BER type) associated with it by default.
+</p>
+
+<p>
+An ASN.1 tag could be viewed as a tuple of three numbers:
+(Class, Format, Number). While Number identifies a tag, Class component
+is used to create scopes for Numbers. Four scopes are currently defined:
+UNIVERSAL, context-specific, APPLICATION and PRIVATE. The Format component
+is actually a one-bit flag - zero for tags associated with scalar types,
+and one for constructed types (will be discussed later on).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+MyIntegerType ::= [12] INTEGER
+MyOctetString ::= [APPLICATION 0] OCTET STRING
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1, tags are implemented as immutable, tuple-like objects:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import tag
+>>> myTag = tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10)
+>>> myTag
+Tag(tagClass=128, tagFormat=0, tagId=10)
+>>> tuple(myTag)
+(128, 0, 10)
+>>> myTag[2]
+10
+>>> myTag == tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 10)
+False
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Default tag, associated with any ASN.1 type, could be extended or replaced
+to make new type distinguishable from its ancestor. The standard provides
+two modes of tag mangling - IMPLICIT and EXPLICIT.
+</p>
+
+<p>
+EXPLICIT mode works by appending new tag to the existing ones thus creating
+an ordered set of tags. This set will be considered as a whole for type
+identification and encoding purposes. Important property of EXPLICIT tagging
+mode is that it preserves base type information in encoding what makes it
+possible to completely recover type information from encoding.
+</p>
+
+<p>
+When tagging in IMPLICIT mode, the outermost existing tag is dropped and
+replaced with a new one.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+MyIntegerType ::= [12] IMPLICIT INTEGER
+MyOctetString ::= [APPLICATION 0] EXPLICIT OCTET STRING
+</pre>
+</td></tr></table>
+
+<p>
+To model both modes of tagging, a specialized container TagSet object (holding
+zero, one or more Tag objects) is used in pyasn1.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import tag
+>>> tagSet = tag.TagSet(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10), # base tag
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10) # effective tag
+... )
+>>> tagSet
+TagSet(Tag(tagClass=128, tagFormat=0, tagId=10))
+>>> tagSet.getBaseTag()
+Tag(tagClass=128, tagFormat=0, tagId=10)
+>>> tagSet = tagSet.tagExplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 20)
+... )
+>>> tagSet
+TagSet(Tag(tagClass=128, tagFormat=0, tagId=10),
+ Tag(tagClass=128, tagFormat=32, tagId=20))
+>>> tagSet = tagSet.tagExplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 30)
+... )
+>>> tagSet
+TagSet(Tag(tagClass=128, tagFormat=0, tagId=10),
+ Tag(tagClass=128, tagFormat=32, tagId=20),
+ Tag(tagClass=128, tagFormat=32, tagId=30))
+>>> tagSet = tagSet.tagImplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 40)
+... )
+>>> tagSet
+TagSet(Tag(tagClass=128, tagFormat=0, tagId=10),
+ Tag(tagClass=128, tagFormat=32, tagId=20),
+ Tag(tagClass=128, tagFormat=32, tagId=40))
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+As a side note: the "base tag" concept (accessible through the getBaseTag()
+method) is specific to pyasn1 -- the base tag is used to identify the original
+ASN.1 type of an object in question. Base tag is never occurs in encoding
+and is mostly used internally by pyasn1 for choosing type-specific data
+processing algorithms. The "effective tag" is the one that always appears in
+encoding and is used on tagSets comparation.
+</p>
+
+<p>
+Any two TagSet objects could be compared to see if one is a derivative
+of the other. Figuring this out is also useful in cases when a type-specific
+data processing algorithms are to be chosen.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import tag
+>>> tagSet1 = tag.TagSet(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10) # base tag
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10) # effective tag
+... )
+>>> tagSet2 = tagSet1.tagExplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 20)
+... )
+>>> tagSet1.isSuperTagSetOf(tagSet2)
+True
+>>> tagSet2.isSuperTagSetOf(tagSet1)
+False
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+We will complete this discussion on tagging with a real-world example. The
+following ASN.1 tagged type:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+MyIntegerType ::= [12] EXPLICIT INTEGER
+</pre>
+</td></tr></table>
+
+<p>
+could be expressed in pyasn1 like this:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, tag
+>>> class MyIntegerType(univ.Integer):
+... tagSet = univ.Integer.tagSet.tagExplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 12)
+... )
+>>> myInteger = MyIntegerType(12345)
+>>> myInteger.getTagSet()
+TagSet(Tag(tagClass=0, tagFormat=0, tagId=2),
+ Tag(tagClass=128, tagFormat=32, tagId=12))
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Referring to the above code, the tagSet class attribute is a property of any
+pyasn1 type object that assigns default tagSet to a pyasn1 value object. This
+default tagSet specification can be ignored and effectively replaced by some
+other tagSet value passed on object instantiation.
+</p>
+
+<p>
+It's important to understand that the tag set property of pyasn1 type/value
+object can never be modifed in place. In other words, a pyasn1 type/value
+object can never change its tags. The only way is to create a new pyasn1
+type/value object and associate different tag set with it.
+</p>
+
+
+<a name="1.3"></a>
+<h4>
+1.3 Constructed types
+</h4>
+
+<p>
+Besides scalar types, ASN.1 specifies so-called constructed ones - these
+are capable of holding one or more values of other types, both scalar
+and constructed.
+</p>
+
+<p>
+In pyasn1 implementation, constructed ASN.1 types behave like
+Python sequences, and also support additional component addressing methods,
+specific to particular constructed type.
+</p>
+
+<a name="1.3.1"></a>
+<h4>
+1.3.1 Sequence and Set types
+</h4>
+
+<p>
+The Sequence and Set types have many similar properties:
+</p>
+<ul>
+<li>they can hold any number of inner components of different types
+<li>every component has a human-friendly identifier
+<li>any component can have a default value
+<li>some components can be absent.
+</ul>
+
+<p>
+However, Sequence type guarantees the ordering of Sequence value components
+to match their declaration order. By contrast, components of the
+Set type can be ordered to best suite application's needs.
+<p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Record ::= SEQUENCE {
+ id INTEGER,
+ room [0] INTEGER OPTIONAL,
+ house [1] INTEGER DEFAULT 0
+}
+</pre>
+</td></tr></table>
+
+<p>
+Up to this moment, the only method we used for creating new pyasn1 types
+is Python sub-classing. With this method, a new, named Python class is created
+what mimics type derivation in ASN.1 grammar. However, ASN.1 also allows for
+defining anonymous subtypes (room and house components in the example above).
+To support anonymous subtyping in pyasn1, a cloning operation on an existing
+pyasn1 type object can be invoked what creates a new instance of original
+object with possibly modified properties.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedtype, tag
+>>> class Record(univ.Sequence):
+... componentType = namedtype.NamedTypes(
+... namedtype.NamedType('id', univ.Integer()),
+... namedtype.OptionalNamedType(
+... 'room',
+... univ.Integer().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
+... ),
+... namedtype.DefaultedNamedType(
+... 'house',
+... univ.Integer(0).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))
+... )
+... )
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+All pyasn1 constructed type classes have a class attribute <b>componentType</b>
+that represent default type specification. Its value is a NamedTypes object.
+</p>
+
+<p>
+The NamedTypes class instance holds a sequence of NameType, OptionalNamedType
+or DefaultedNamedType objects which, in turn, refer to pyasn1 type objects that
+represent inner SEQUENCE components specification.
+</p>
+
+<p>
+Finally, invocation of a subtype() method of pyasn1 type objects in the code
+above returns an implicitly tagged copy of original object.
+</p>
+
+<p>
+Once a SEQUENCE or SET type is decleared with pyasn1, it can be instantiated
+and initialized (continuing the above code):
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> record = Record()
+>>> record.setComponentByName('id', 123)
+>>> print(record.prettyPrint())
+Record:
+ id=123
+>>>
+>>> record.setComponentByPosition(1, 321)
+>>> print(record.prettyPrint())
+Record:
+ id=123
+ room=321
+>>>
+>>> record.setDefaultComponents()
+>>> print(record.prettyPrint())
+Record:
+ id=123
+ room=321
+ house=0
+</pre>
+</td></tr></table>
+
+<p>
+Inner components of pyasn1 Sequence/Set objects could be accessed using the
+following methods:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> record.getComponentByName('id')
+Integer(123)
+>>> record.getComponentByPosition(1)
+Integer(321)
+>>> record[2]
+Integer(0)
+>>> for idx in range(len(record)):
+... print(record.getNameByPosition(idx), record.getComponentByPosition(idx))
+id 123
+room 321
+house 0
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The Set type share all the properties of Sequence type, and additionally
+support by-tag component addressing (as all Set components have distinct
+types).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedtype, tag
+>>> class Gamer(univ.Set):
+... componentType = namedtype.NamedTypes(
+... namedtype.NamedType('score', univ.Integer()),
+... namedtype.NamedType('player', univ.OctetString()),
+... namedtype.NamedType('id', univ.ObjectIdentifier())
+... )
+>>> gamer = Gamer()
+>>> gamer.setComponentByType(univ.Integer().getTagSet(), 121343)
+>>> gamer.setComponentByType(univ.OctetString().getTagSet(), 'Pascal')
+>>> gamer.setComponentByType(univ.ObjectIdentifier().getTagSet(), (1,3,7,2))
+>>> print(gamer.prettyPrint())
+Gamer:
+ score=121343
+ player=b'Pascal'
+ id=1.3.7.2
+>>>
+</pre>
+</td></tr></table>
+
+<a name="1.3.2"></a>
+<h4>
+1.3.2 SequenceOf and SetOf types
+</h4>
+
+<p>
+Both, SequenceOf and SetOf types resemble an unlimited size list of components.
+All the components must be of the same type.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Progression ::= SEQUENCE OF INTEGER
+
+arithmeticProgression Progression ::= { 1, 3, 5, 7 }
+</pre>
+</td></tr></table>
+
+<p>
+SequenceOf and SetOf types are expressed by the very similar pyasn1 type
+objects. Their components can only be addressed by position and they
+both have a property of automatic resize.
+</p>
+
+<p>
+To specify inner component type, the <b>componentType</b> class attribute
+should refer to another pyasn1 type object.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> class Progression(univ.SequenceOf):
+... componentType = univ.Integer()
+>>> arithmeticProgression = Progression()
+>>> arithmeticProgression.setComponentByPosition(1, 111)
+>>> print(arithmeticProgression.prettyPrint())
+Progression:
+-empty- 111
+>>> arithmeticProgression.setComponentByPosition(0, 100)
+>>> print(arithmeticProgression.prettyPrint())
+Progression:
+100 111
+>>>
+>>> for idx in range(len(arithmeticProgression)):
+... arithmeticProgression.getComponentByPosition(idx)
+Integer(100)
+Integer(111)
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Any scalar or constructed pyasn1 type object can serve as an inner component.
+Missing components are prohibited in SequenceOf/SetOf value objects.
+</p>
+
+<a name="1.3.3"></a>
+<h4>
+1.3.3 Choice type
+</h4>
+
+<p>
+Values of ASN.1 CHOICE type can contain only a single value of a type from a
+list of possible alternatives. Alternatives must be ASN.1 types with
+distinct tags for the whole structure to remain unambiguous. Unlike most
+other types, CHOICE is an untagged one, e.g. it has no base tag of its own.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+CodeOrMessage ::= CHOICE {
+ code INTEGER,
+ message OCTET STRING
+}
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1 implementation, Choice object behaves like Set but accepts only
+a single inner component at a time. It also offers a few additional methods
+specific to its behaviour.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedtype
+>>> class CodeOrMessage(univ.Choice):
+... componentType = namedtype.NamedTypes(
+... namedtype.NamedType('code', univ.Integer()),
+... namedtype.NamedType('message', univ.OctetString())
+... )
+>>>
+>>> codeOrMessage = CodeOrMessage()
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+>>> codeOrMessage.setComponentByName('code', 123)
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+ code=123
+>>> codeOrMessage.setComponentByName('message', 'my string value')
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+ message=b'my string value'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Since there could be only a single inner component value in the pyasn1 Choice
+value object, either of the following methods could be used for fetching it
+(continuing previous code):
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> codeOrMessage.getName()
+'message'
+>>> codeOrMessage.getComponent()
+OctetString(b'my string value')
+>>>
+</pre>
+</td></tr></table>
+
+<a name="1.3.4"></a>
+<h4>
+1.3.4 Any type
+</h4>
+
+<p>
+The ASN.1 ANY type is a kind of wildcard or placeholder that matches
+any other type without knowing it in advance. Like CHOICE type, ANY
+has no base tag.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Error ::= SEQUENCE {
+ code INTEGER,
+ parameter ANY DEFINED BY code
+}
+</pre>
+</td></tr></table>
+
+<p>
+The ANY type is frequently used in specifications, where exact type is not
+yet agreed upon between communicating parties or the number of possible
+alternatives of a type is infinite.
+Sometimes an auxiliary selector is kept around to help parties indicate
+the kind of ANY payload in effect ("code" in the example above).
+</p>
+
+<p>
+Values of the ANY type contain serialized ASN.1 value(s) in form of
+an octet string. Therefore pyasn1 Any value object share the properties of
+pyasn1 OctetString object.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> someValue = univ.Any(b'\x02\x01\x01')
+>>> someValue
+Any(b'\x02\x01\x01')
+>>> str(someValue)
+'\x02\x01\x01'
+>>> bytes(someValue)
+b'\x02\x01\x01'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Receiving application is supposed to explicitly deserialize the content of Any
+value object, possibly using auxiliary selector for figuring out its ASN.1
+type to pick appropriate decoder.
+</p>
+
+<p>
+There will be some more talk and code snippets covering Any type in the codecs
+chapters that follow.
+</p>
+
+<a name="1.4"></a>
+<h4>
+1.4 Subtype constraints
+</h4>
+
+<p>
+Most ASN.1 types can correspond to an infinite set of values. To adapt to
+particular application's data model and needs, ASN.1 provides a mechanism
+for limiting the infinite set to values, that make sense in particular case.
+</p>
+
+<p>
+Imposing value constraints on an ASN.1 type can also be seen as creating
+a subtype from its base type.
+</p>
+
+<p>
+In pyasn1, constraints take shape of immutable objects capable
+of evaluating given value against constraint-specific requirements.
+Constraint object is a property of pyasn1 type. Like TagSet property,
+associated with every pyasn1 type, constraints can never be modified
+in place. The only way to modify pyasn1 type constraint is to associate
+new constraint object to a new pyasn1 type object.
+</p>
+
+<p>
+A handful of different flavors of <i>constraints</i> are defined in ASN.1.
+We will discuss them one by one in the following chapters and also explain
+how to combine and apply them to types.
+</p>
+
+<a name="1.4.1"></a>
+<h4>
+1.4.1 Single value constraint
+</h4>
+
+<p>
+This kind of constraint allows for limiting type to a finite, specified set
+of values.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+DialButton ::= OCTET STRING (
+ "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9"
+)
+</pre>
+</td></tr></table>
+
+<p>
+Its pyasn1 implementation would look like:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import constraint
+>>> c = constraint.SingleValueConstraint(
+ '0','1','2','3','4','5','6','7','8','9'
+)
+>>> c
+SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+>>> c('0')
+>>> c('A')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) failed at: A
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+As can be seen in the snippet above, if a value violates the constraint, an
+exception will be thrown. A constrainted pyasn1 type object holds a
+reference to a constraint object (or their combination, as will be explained
+later) and calls it for value verification.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> class DialButton(univ.OctetString):
+... subtypeSpec = constraint.SingleValueConstraint(
+... '0','1','2','3','4','5','6','7','8','9'
+... )
+>>> DialButton('0')
+DialButton(b'0')
+>>> DialButton('A')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) failed at: A
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Constrained pyasn1 value object can never hold a violating value.
+</p>
+
+<a name="1.4.2"></a>
+<h4>
+1.4.2 Value range constraint
+</h4>
+
+<p>
+A pair of values, compliant to a type to be constrained, denote low and upper
+bounds of allowed range of values of a type.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Teenagers ::= INTEGER (13..19)
+</pre>
+</td></tr></table>
+
+<p>
+And in pyasn1 terms:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> class Teenagers(univ.Integer):
+... subtypeSpec = constraint.ValueRangeConstraint(13, 19)
+>>> Teenagers(14)
+Teenagers(14)
+>>> Teenagers(20)
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ValueRangeConstraint(13, 19) failed at: 20
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Value range constraint usually applies numeric types.
+</p>
+
+<a name="1.4.3"></a>
+<h4>
+1.4.3 Size constraint
+</h4>
+
+<p>
+It is sometimes convenient to set or limit the allowed size of a data item
+to be sent from one application to another to manage bandwidth and memory
+consumption issues. Size constraint specifies the lower and upper bounds
+of the size of a valid value.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+TwoBits ::= BIT STRING (SIZE (2))
+</pre>
+</td></tr></table>
+
+<p>
+Express the same grammar in pyasn1:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> class TwoBits(univ.BitString):
+... subtypeSpec = constraint.ValueSizeConstraint(2, 2)
+>>> TwoBits((1,1))
+TwoBits("'11'B")
+>>> TwoBits((1,1,0))
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ValueSizeConstraint(2, 2) failed at: (1, 1, 0)
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Size constraint can be applied to potentially massive values - bit or octet
+strings, SEQUENCE OF/SET OF values.
+</p>
+
+<a name="1.4.4"></a>
+<h4>
+1.4.4 Alphabet constraint
+</h4>
+
+<p>
+The permitted alphabet constraint is similar to Single value constraint
+but constraint applies to individual characters of a value.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+MorseCode ::= PrintableString (FROM ("."|"-"|" "))
+</pre>
+</td></tr></table>
+
+<p>
+And in pyasn1:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char, constraint
+>>> class MorseCode(char.PrintableString):
+... subtypeSpec = constraint.PermittedAlphabetConstraint(".", "-", " ")
+>>> MorseCode("...---...")
+MorseCode('...---...')
+>>> MorseCode("?")
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ PermittedAlphabetConstraint(".", "-", " ") failed at: "?"
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Current implementation does not handle ranges of characters in constraint
+(FROM "A".."Z" syntax), one has to list the whole set in a range.
+</p>
+
+<a name="1.4.5"></a>
+<h4>
+1.4.5 Constraint combinations
+</h4>
+
+<p>
+Up to this moment, we used a single constraint per ASN.1 type. The standard,
+however, allows for combining multiple individual constraints into
+intersections, unions and exclusions.
+</p>
+
+<p>
+In pyasn1 data model, all of these methods of constraint combinations are
+implemented as constraint-like objects holding individual constraint (or
+combination) objects. Like terminal constraint objects, combination objects
+are capable to perform value verification at its set of enclosed constraints
+according to the logic of particular combination.
+</p>
+
+<p>
+Constraints intersection verification succeeds only if a value is
+compliant to each constraint in a set. To begin with, the following
+specification will constitute a valid telephone number:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+PhoneNumber ::= NumericString (FROM ("0".."9")) (SIZE 11)
+</pre>
+</td></tr></table>
+
+<p>
+Constraint intersection object serves the logic above:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char, constraint
+>>> class PhoneNumber(char.NumericString):
+... subtypeSpec = constraint.ConstraintsIntersection(
+... constraint.PermittedAlphabetConstraint('0','1','2','3','4','5','6','7','8','9'),
+... constraint.ValueSizeConstraint(11, 11)
+... )
+>>> PhoneNumber('79039343212')
+PhoneNumber('79039343212')
+>>> PhoneNumber('?9039343212')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ConstraintsIntersection(
+ PermittedAlphabetConstraint('0','1','2','3','4','5','6','7','8','9'),
+ ValueSizeConstraint(11, 11)) failed at:
+ PermittedAlphabetConstraint('0','1','2','3','4','5','6','7','8','9') failed at: "?039343212"
+>>> PhoneNumber('9343212')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ConstraintsIntersection(
+ PermittedAlphabetConstraint('0','1','2','3','4','5','6','7','8','9'),
+ ValueSizeConstraint(11, 11)) failed at:
+ ValueSizeConstraint(10, 10) failed at: "9343212"
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Union of constraints works by making sure that a value is compliant
+to any of the constraint in a set. For instance:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+CapitalOrSmall ::= IA5String (FROM ('A','B','C') | FROM ('a','b','c'))
+</pre>
+</td></tr></table>
+
+<p>
+It's important to note, that a value must fully comply to any single
+constraint in a set. In the specification above, a value of all small or
+all capital letters is compliant, but a mix of small&capitals is not.
+Here's its pyasn1 analogue:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char, constraint
+>>> class CapitalOrSmall(char.IA5String):
+... subtypeSpec = constraint.ConstraintsUnion(
+... constraint.PermittedAlphabetConstraint('A','B','C'),
+... constraint.PermittedAlphabetConstraint('a','b','c')
+... )
+>>> CapitalOrSmall('ABBA')
+CapitalOrSmall('ABBA')
+>>> CapitalOrSmall('abba')
+CapitalOrSmall('abba')
+>>> CapitalOrSmall('Abba')
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ConstraintsUnion(PermittedAlphabetConstraint('A', 'B', 'C'),
+ PermittedAlphabetConstraint('a', 'b', 'c')) failed at: failed for "Abba"
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Finally, the exclusion constraint simply negates the logic of value
+verification at a constraint. In the following example, any integer value
+is allowed in a type but not zero.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+NoZero ::= INTEGER (ALL EXCEPT 0)
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1 the above definition would read:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> class NoZero(univ.Integer):
+... subtypeSpec = constraint.ConstraintsExclusion(
+... constraint.SingleValueConstraint(0)
+... )
+>>> NoZero(1)
+NoZero(1)
+>>> NoZero(0)
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ConstraintsExclusion(SingleValueConstraint(0)) failed at: 0
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The depth of such a constraints tree, built with constraint combination objects
+at its nodes, has not explicit limit. Value verification is performed in a
+recursive manner till a definite solution is found.
+</p>
+
+<a name="1.5"></a>
+<h4>
+1.5 Types relationships
+</h4>
+
+<p>
+In the course of data processing in an application, it is sometimes
+convenient to figure out the type relationships between pyasn1 type or
+value objects. Formally, two things influence pyasn1 types relationship:
+<i>tag set</i> and <i>subtype constraints</i>. One pyasn1 type is considered
+to be a derivative of another if their TagSet and Constraint objects are
+a derivation of one another.
+</p>
+
+<p>
+The following example illustrates the concept (we use the same tagset but
+different constraints for simplicity):
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> i1 = univ.Integer(subtypeSpec=constraint.ValueRangeConstraint(3,8))
+>>> i2 = univ.Integer(subtypeSpec=constraint.ConstraintsIntersection(
+... constraint.ValueRangeConstraint(3,8),
+... constraint.ValueRangeConstraint(4,7)
+... ) )
+>>> i1.isSameTypeWith(i2)
+False
+>>> i1.isSuperTypeOf(i2)
+True
+>>> i1.isSuperTypeOf(i1)
+True
+>>> i2.isSuperTypeOf(i1)
+False
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+As can be seen in the above code snippet, there are two methods of any pyasn1
+type/value object that test types for their relationship:
+<b>isSameTypeWith</b>() and <b>isSuperTypeOf</b>(). The former is
+self-descriptive while the latter yields true if the argument appears
+to be a pyasn1 object which has tagset and constraints derived from those
+of the object being called.
+</p>
+
+<a name="2"></a>
+<h3>
+2. Codecs
+</h3>
+
+<p>
+In ASN.1 context,
+<a href=http://en.wikipedia.org/wiki/Codec>codec</a>
+is a program that transforms between concrete data structures and a stream
+of octets, suitable for transmission over the wire. This serialized form of
+data is sometimes called <i>substrate</i> or <i>essence</i>.
+</p>
+
+<p>
+In pyasn1 implementation, substrate takes shape of Python 3 bytes or
+Python 2 string objects.
+</p>
+
+<p>
+One of the properties of a codec is its ability to cope with incomplete
+data and/or substrate what implies codec to be stateful. In other words,
+when decoder runs out of substrate and data item being recovered is still
+incomplete, stateful codec would suspend and complete data item recovery
+whenever the rest of substrate becomes available. Similarly, stateful encoder
+would encode data items in multiple steps waiting for source data to
+arrive. Codec restartability is especially important when application deals
+with large volumes of data and/or runs on low RAM. For an interesting
+discussion on codecs options and design choices, refer to
+<a href=http://directory.apache.org/subprojects/asn1/>Apache ASN.1 project</a>
+.
+</p>
+
+<p>
+As of this writing, codecs implemented in pyasn1 are all stateless, mostly
+to keep the code simple.
+</p>
+
+<p>
+The pyasn1 package currently supports
+<a href=http://en.wikipedia.org/wiki/Basic_encoding_rules>BER</a> codec and
+its variations --
+<a href=http://en.wikipedia.org/wiki/Canonical_encoding_rules>CER</a> and
+<a href=http://en.wikipedia.org/wiki/Distinguished_encoding_rules>DER</a>.
+More ASN.1 codecs are planned for implementation in the future.
+</p>
+
+<a name="2.1"></a>
+<h4>
+2.1 Encoders
+</h4>
+
+<p>
+Encoder is used for transforming pyasn1 value objects into substrate. Only
+pyasn1 value objects could be serialized, attempts to process pyasn1 type
+objects will cause encoder failure.
+</p>
+
+<p>
+The following code will create a pyasn1 Integer object and serialize it with
+BER encoder:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder
+>>> encoder.encode(univ.Integer(123456))
+b'\x02\x03\x01\xe2@'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+BER standard also defines a so-called <i>indefinite length</i> encoding form
+which makes large data items processing more memory efficient. It is mostly
+useful when encoder does not have the whole value all at once and the
+length of the value can not be determined at the beginning of encoding.
+</p>
+
+<p>
+<i>Constructed encoding</i> is another feature of BER closely related to the
+indefinite length form. In essence, a large scalar value (such as ASN.1
+character BitString type) could be chopped into smaller chunks by encoder
+and transmitted incrementally to limit memory consumption. Unlike indefinite
+length case, the length of the whole value must be known in advance when
+using constructed, definite length encoding form.
+</p>
+
+<p>
+Since pyasn1 codecs are not restartable, pyasn1 encoder may only encode data
+item all at once. However, even in this case, generating indefinite length
+encoding may help a low-memory receiver, running a restartable decoder,
+to process a large data item.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder
+>>> encoder.encode(
+... univ.OctetString('The quick brown fox jumps over the lazy dog'),
+... defMode=False,
+... maxChunkSize=8
+... )
+b'$\x80\x04\x08The quic\x04\x08k brown \x04\x08fox jump\x04\x08s over \
+t\x04\x08he lazy \x04\x03dog\x00\x00'
+>>>
+>>> encoder.encode(
+... univ.OctetString('The quick brown fox jumps over the lazy dog'),
+... maxChunkSize=8
+... )
+b'$7\x04\x08The quic\x04\x08k brown \x04\x08fox jump\x04\x08s over \
+t\x04\x08he lazy \x04\x03dog'
+</pre>
+</td></tr></table>
+
+<p>
+The <b>defMode</b> encoder parameter disables definite length encoding mode,
+while the optional <b>maxChunkSize</b> parameter specifies desired
+substrate chunk size that influences memory requirements at the decoder's end.
+</p>
+
+<p>
+To use CER or DER encoders one needs to explicitly import and call them - the
+APIs are all compatible.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder as ber_encoder
+>>> from pyasn1.codec.cer import encoder as cer_encoder
+>>> from pyasn1.codec.der import encoder as der_encoder
+>>> ber_encoder.encode(univ.Boolean(True))
+b'\x01\x01\x01'
+>>> cer_encoder.encode(univ.Boolean(True))
+b'\x01\x01\xff'
+>>> der_encoder.encode(univ.Boolean(True))
+b'\x01\x01\xff'
+>>>
+</pre>
+</td></tr></table>
+
+<a name="2.2"></a>
+<h4>
+2.2 Decoders
+</h4>
+
+<p>
+In the process of decoding, pyasn1 value objects are created and linked to
+each other, based on the information containted in the substrate. Thus,
+the original pyasn1 value object(s) are recovered.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> substrate = encoder.encode(univ.Boolean(True))
+>>> decoder.decode(substrate)
+(Boolean('True(1)'), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Commenting on the code snippet above, pyasn1 decoder accepts substrate
+as an argument and returns a tuple of pyasn1 value object (possibly
+a top-level one in case of constructed object) and unprocessed part
+of input substrate.
+</p>
+
+<p>
+All pyasn1 decoders can handle both definite and indefinite length
+encoding modes automatically, explicit switching into one mode
+to another is not required.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> substrate = encoder.encode(
+... univ.OctetString('The quick brown fox jumps over the lazy dog'),
+... defMode=False,
+... maxChunkSize=8
+... )
+>>> decoder.decode(substrate)
+(OctetString(b'The quick brown fox jumps over the lazy dog'), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Speaking of BER/CER/DER encoding, in many situations substrate may not contain
+all necessary information needed for complete and accurate ASN.1 values
+recovery. The most obvious cases include implicitly tagged ASN.1 types
+and constrained types.
+</p>
+
+<p>
+As discussed earlier in this handbook, when an ASN.1 type is implicitly
+tagged, previous outermost tag is lost and never appears in substrate.
+If it is the base tag that gets lost, decoder is unable to pick type-specific
+value decoder at its table of built-in types, and therefore recover
+the value part, based only on the information contained in substrate. The
+approach taken by pyasn1 decoder is to use a prototype pyasn1 type object (or
+a set of them) to <i>guide</i> the decoding process by matching [possibly
+incomplete] tags recovered from substrate with those found in prototype pyasn1
+type objects (also called pyasn1 specification object further in this paper).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.codec.ber import decoder
+>>> decoder.decode(b'\x02\x01\x0c', asn1Spec=univ.Integer())
+Integer(12), b''
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Decoder would neither modify pyasn1 specification object nor use
+its current values (if it's a pyasn1 value object), but rather use it as
+a hint for choosing proper decoder and as a pattern for creating new objects:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, tag
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> i = univ.Integer(12345).subtype(
+... implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 40)
+... )
+>>> substrate = encoder.encode(i)
+>>> substrate
+b'\x9f(\x0209'
+>>> decoder.decode(substrate)
+Traceback (most recent call last):
+...
+pyasn1.error.PyAsn1Error:
+ TagSet(Tag(tagClass=128, tagFormat=0, tagId=40)) not in asn1Spec
+>>> decoder.decode(substrate, asn1Spec=i)
+(Integer(12345), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Notice in the example above, that an attempt to run decoder without passing
+pyasn1 specification object fails because recovered tag does not belong
+to any of the built-in types.
+</p>
+
+<p>
+Another important feature of guided decoder operation is the use of
+values constraints possibly present in pyasn1 specification object.
+To explain this, we will decode a random integer object into generic Integer
+and the constrained one.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, constraint
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> class DialDigit(univ.Integer):
+... subtypeSpec = constraint.ValueRangeConstraint(0,9)
+>>> substrate = encoder.encode(univ.Integer(13))
+>>> decoder.decode(substrate)
+(Integer(13), b'')
+>>> decoder.decode(substrate, asn1Spec=DialDigit())
+Traceback (most recent call last):
+...
+pyasn1.type.error.ValueConstraintError:
+ ValueRangeConstraint(0, 9) failed at: 13
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Similarily to encoders, to use CER or DER decoders application has to
+explicitly import and call them - all APIs are compatible.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder as ber_encoder
+>>> substrate = ber_encoder.encode(univ.OctetString('http://pyasn1.sf.net'))
+>>>
+>>> from pyasn1.codec.ber import decoder as ber_decoder
+>>> from pyasn1.codec.cer import decoder as cer_decoder
+>>> from pyasn1.codec.der import decoder as der_decoder
+>>>
+>>> ber_decoder.decode(substrate)
+(OctetString(b'http://pyasn1.sf.net'), b'')
+>>> cer_decoder.decode(substrate)
+(OctetString(b'http://pyasn1.sf.net'), b'')
+>>> der_decoder.decode(substrate)
+(OctetString(b'http://pyasn1.sf.net'), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<a name="2.2.1"></a>
+<h4>
+2.2.1 Decoding untagged types
+</h4>
+
+<p>
+It has already been mentioned, that ASN.1 has two "special case" types:
+CHOICE and ANY. They are different from other types in part of
+tagging - unless these two are additionally tagged, neither of them will
+have their own tag. Therefore these types become invisible in substrate
+and can not be recovered without passing pyasn1 specification object to
+decoder.
+</p>
+
+<p>
+To explain the issue, we will first prepare a Choice object to deal with:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedtype
+>>> class CodeOrMessage(univ.Choice):
+... componentType = namedtype.NamedTypes(
+... namedtype.NamedType('code', univ.Integer()),
+... namedtype.NamedType('message', univ.OctetString())
+... )
+>>>
+>>> codeOrMessage = CodeOrMessage()
+>>> codeOrMessage.setComponentByName('message', 'my string value')
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+ message=b'my string value'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Let's now encode this Choice object and then decode its substrate
+with and without pyasn1 specification object:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> substrate = encoder.encode(codeOrMessage)
+>>> substrate
+b'\x04\x0fmy string value'
+>>> encoder.encode(univ.OctetString('my string value'))
+b'\x04\x0fmy string value'
+>>>
+>>> decoder.decode(substrate)
+(OctetString(b'my string value'), b'')
+>>> codeOrMessage, substrate = decoder.decode(substrate, asn1Spec=CodeOrMessage())
+>>> print(codeOrMessage.prettyPrint())
+CodeOrMessage:
+ message=b'my string value'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+First thing to notice in the listing above is that the substrate produced
+for our Choice value object is equivalent to the substrate for an OctetString
+object initialized to the same value. In other words, any information about
+the Choice component is absent in encoding.
+</p>
+
+<p>
+Sure enough, that kind of substrate will decode into an OctetString object,
+unless original Choice type object is passed to decoder to guide the decoding
+process.
+</p>
+
+<p>
+Similarily untagged ANY type behaves differently on decoding phase - when
+decoder bumps into an Any object in pyasn1 specification, it stops decoding
+and puts all the substrate into a new Any value object in form of an octet
+string. Concerned application could then re-run decoder with an additional,
+more exact pyasn1 specification object to recover the contents of Any
+object.
+</p>
+
+<p>
+As it was mentioned elsewhere in this paper, Any type allows for incomplete
+or changing ASN.1 specification to be handled gracefully by decoder and
+applications.
+</p>
+
+<p>
+To illustrate the working of Any type, we'll have to make the stage
+by encoding a pyasn1 object and then putting its substrate into an any
+object.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> innerSubstrate = encoder.encode(univ.Integer(1234))
+>>> innerSubstrate
+b'\x02\x02\x04\xd2'
+>>> any = univ.Any(innerSubstrate)
+>>> any
+Any(b'\x02\x02\x04\xd2')
+>>> substrate = encoder.encode(any)
+>>> substrate
+b'\x02\x02\x04\xd2'
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+As with Choice type encoding, there is no traces of Any type in substrate.
+Obviously, the substrate we are dealing with, will decode into the inner
+[Integer] component, unless pyasn1 specification is given to guide the
+decoder. Continuing previous code:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> from pyasn1.codec.ber import encoder, decoder
+
+>>> decoder.decode(substrate)
+(Integer(1234), b'')
+>>> any, substrate = decoder.decode(substrate, asn1Spec=univ.Any())
+>>> any
+Any(b'\x02\x02\x04\xd2')
+>>> decoder.decode(str(any))
+(Integer(1234), b'')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Both CHOICE and ANY types are widely used in practice. Reader is welcome to
+take a look at
+<a href=http://www.cs.auckland.ac.nz/~pgut001/pubs/x509guide.txt>
+ASN.1 specifications of X.509 applications</a> for more information.
+</p>
+
+<a name="2.2.2"></a>
+<h4>
+2.2.2 Ignoring unknown types
+</h4>
+
+<p>
+When dealing with a loosely specified ASN.1 structure, the receiving
+end may not be aware of some types present in the substrate. It may be
+convenient then to turn decoder into a recovery mode. Whilst there, decoder
+will not bail out when hit an unknown tag but rather treat it as an Any
+type.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, tag
+>>> from pyasn1.codec.ber import encoder, decoder
+>>> taggedInt = univ.Integer(12345).subtype(
+... implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 40)
+... )
+>>> substrate = encoder.encode(taggedInt)
+>>> decoder.decode(substrate)
+Traceback (most recent call last):
+...
+pyasn1.error.PyAsn1Error: TagSet(Tag(tagClass=128, tagFormat=0, tagId=40)) not in asn1Spec
+>>>
+>>> decoder.decode.defaultErrorState = decoder.stDumpRawValue
+>>> decoder.decode(substrate)
+(Any(b'\x9f(\x0209'), '')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+It's also possible to configure a custom decoder, to handle unknown tags
+found in substrate. This can be done by means of <b>defaultRawDecoder</b>
+attribute holding a reference to type decoder object. Refer to the source
+for API details.
+</p>
+
+<a name="3"></a>
+<h3>
+3. Feedback and getting help
+</h3>
+
+<p>
+Although pyasn1 software is almost a decade old and used in many production
+environments, it still may have bugs and non-implemented pieces. Anyone
+who happens to run into such defect is welcome to complain to
+<a href=mailto:pyasn1-users@lists.sourceforge.net>pyasn1 mailing list</a>
+or better yet fix the issue and send
+<a href=mailto:ilya@glas.net>me</a> the patch.
+</p>
+
+<p>
+Typically, pyasn1 is used for building arbitrary protocol support into
+various applications. This involves manual translation of ASN.1 data
+structures into their pyasn1 implementations. To save time and effort,
+data structures for some of the popular protocols are pre-programmed
+and kept for further re-use in form of the
+<a href=http://sourceforge.net/projects/pyasn1/files/pyasn1-modules/>
+pyasn1-modules package</a>. For instance, many structures for PKI (X.509,
+PKCS#*, CRMF, OCSP), LDAP and SNMP are present.
+Applications authors are advised to import and use relevant modules
+from that package whenever needed protocol structures are already
+there. New protocol modules contributions are welcome.
+</p>
+
+<p>
+And finally, the latest pyasn1 package revision is available for free
+download from
+<a href=http://sourceforge.net/projects/pyasn1/>project home</a> and
+also from the
+<a href=http://pypi.python.org/pypi>Python package repository</a>.
+</p>
+
+<hr>
+
+</td>
+</tr>
+</table>
+</center>
+</body>
+</html>
diff --git a/python/pyasn1/doc/scalar.html b/python/pyasn1/doc/scalar.html
new file mode 100644
index 000000000..e5ccefe60
--- /dev/null
+++ b/python/pyasn1/doc/scalar.html
@@ -0,0 +1,794 @@
+<html>
+<title>
+PyASN1 data model and scalar types
+</title>
+<head>
+</head>
+<body>
+<center>
+<table width=60%>
+<tr>
+<td>
+
+<h3>
+1. Data model for ASN.1 types
+</h3>
+
+<p>
+All ASN.1 types could be categorized into two groups: scalar (also called
+simple or primitive) and constructed. The first group is populated by
+well-known types like Integer or String. Members of constructed group
+hold other types (simple or constructed) as their inner components, thus
+they are semantically close to a programming language records or lists.
+</p>
+
+<p>
+In pyasn1, all ASN.1 types and values are implemented as Python objects.
+The same pyasn1 object can represent either ASN.1 type and/or value
+depending of the presense of value initializer on object instantiation.
+We will further refer to these as <i>pyasn1 type object</i> versus <i>pyasn1
+value object</i>.
+</p>
+
+<p>
+Primitive ASN.1 types are implemented as immutable scalar objects. There values
+could be used just like corresponding native Python values (integers,
+strings/bytes etc) and freely mixed with them in expressions.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> asn1IntegerValue = univ.Integer(12)
+>>> asn1IntegerValue - 2
+10
+>>> univ.OctetString('abc') == 'abc'
+True # Python 2
+>>> univ.OctetString(b'abc') == b'abc'
+True # Python 3
+</pre>
+</td></tr></table>
+
+<p>
+It would be an error to perform an operation on a pyasn1 type object
+as it holds no value to deal with:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> asn1IntegerType = univ.Integer()
+>>> asn1IntegerType - 2
+...
+pyasn1.error.PyAsn1Error: No value for __coerce__()
+</pre>
+</td></tr></table>
+
+<a name="1.1"></a>
+<h4>
+1.1 Scalar types
+</h4>
+
+<p>
+In the sub-sections that follow we will explain pyasn1 mapping to those
+primitive ASN.1 types. Both, ASN.1 notation and corresponding pyasn1
+syntax will be given in each case.
+</p>
+
+<a name="1.1.1"></a>
+<h4>
+1.1.1 Boolean type
+</h4>
+
+<p>
+This is the simplest type those values could be either True or False.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+;; type specification
+FunFactorPresent ::= BOOLEAN
+
+;; values declaration and assignment
+pythonFunFactor FunFactorPresent ::= TRUE
+cobolFunFactor FunFactorPresent :: FALSE
+</pre>
+</td></tr></table>
+
+<p>
+And here's pyasn1 version of it:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> class FunFactorPresent(univ.Boolean): pass
+...
+>>> pythonFunFactor = FunFactorPresent(True)
+>>> cobolFunFactor = FunFactorPresent(False)
+>>> pythonFunFactor
+FunFactorPresent('True(1)')
+>>> cobolFunFactor
+FunFactorPresent('False(0)')
+>>> pythonFunFactor == cobolFunFactor
+False
+>>>
+</pre>
+</td></tr></table>
+
+<a name="1.1.2"></a>
+<h4>
+1.1.2 Null type
+</h4>
+
+<p>
+The NULL type is sometimes used to express the absense of any information.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+;; type specification
+Vote ::= CHOICE {
+ agreed BOOLEAN,
+ skip NULL
+}
+</td></tr></table>
+
+;; value declaration and assignment
+myVote Vote ::= skip:NULL
+</pre>
+
+<p>
+We will explain the CHOICE type later in this paper, meanwhile the NULL
+type:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> skip = univ.Null()
+>>> skip
+Null('')
+>>>
+</pre>
+</td></tr></table>
+
+<a name="1.1.3"></a>
+<h4>
+1.1.3 Integer type
+</h4>
+
+<p>
+ASN.1 defines the values of Integer type as negative or positive of whatever
+length. This definition plays nicely with Python as the latter places no
+limit on Integers. However, some ASN.1 implementations may impose certain
+limits of integer value ranges. Keep that in mind when designing new
+data structures.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+;; values specification
+age-of-universe INTEGER ::= 13750000000
+mean-martian-surface-temperature INTEGER ::= -63
+</pre>
+</td></tr></table>
+
+<p>
+A rather strigntforward mapping into pyasn1:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> ageOfUniverse = univ.Integer(13750000000)
+>>> ageOfUniverse
+Integer(13750000000)
+>>>
+>>> meanMartianSurfaceTemperature = univ.Integer(-63)
+>>> meanMartianSurfaceTemperature
+Integer(-63)
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+ASN.1 allows to assign human-friendly names to particular values of
+an INTEGER type.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+Temperature ::= INTEGER {
+ freezing(0),
+ boiling(100)
+}
+</pre>
+</td></tr></table>
+
+<p>
+The Temperature type expressed in pyasn1:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedval
+>>> class Temperature(univ.Integer):
+... namedValues = namedval.NamedValues(('freezing', 0), ('boiling', 100))
+...
+>>> t = Temperature(0)
+>>> t
+Temperature('freezing(0)')
+>>> t + 1
+Temperature(1)
+>>> t + 100
+Temperature('boiling(100)')
+>>> t = Temperature('boiling')
+>>> t
+Temperature('boiling(100)')
+>>> Temperature('boiling') / 2
+Temperature(50)
+>>> -1 < Temperature('freezing')
+True
+>>> 47 > Temperature('boiling')
+False
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+These values labels have no effect on Integer type operations, any value
+still could be assigned to a type (information on value constraints will
+follow further in this paper).
+</p>
+
+<a name="1.1.4"></a>
+<h4>
+1.1.4 Enumerated type
+</h4>
+
+<p>
+ASN.1 Enumerated type differs from an Integer type in a number of ways.
+Most important is that its instance can only hold a value that belongs
+to a set of values specified on type declaration.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+error-status ::= ENUMERATED {
+ no-error(0),
+ authentication-error(10),
+ authorization-error(20),
+ general-failure(51)
+}
+</pre>
+</td></tr></table>
+
+<p>
+When constructing Enumerated type we will use two pyasn1 features: values
+labels (as mentioned above) and value constraint (will be described in
+more details later on).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedval, constraint
+>>> class ErrorStatus(univ.Enumerated):
+... namedValues = namedval.NamedValues(
+... ('no-error', 0),
+... ('authentication-error', 10),
+... ('authorization-error', 20),
+... ('general-failure', 51)
+... )
+... subtypeSpec = univ.Enumerated.subtypeSpec + \
+... constraint.SingleValueConstraint(0, 10, 20, 51)
+...
+>>> errorStatus = univ.ErrorStatus('no-error')
+>>> errorStatus
+ErrorStatus('no-error(0)')
+>>> errorStatus == univ.ErrorStatus('general-failure')
+False
+>>> univ.ErrorStatus('non-existing-state')
+Traceback (most recent call last):
+...
+pyasn1.error.PyAsn1Error: Can't coerce non-existing-state into integer
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Particular integer values associated with Enumerated value states
+have no meaning. They should not be used as such or in any kind of
+math operation. Those integer values are only used by codecs to
+transfer state from one entity to another.
+</p>
+
+<a name="1.1.5"></a>
+<h4>
+1.1.5 Real type
+</h4>
+
+<p>
+Values of the Real type are a three-component tuple of mantissa, base and
+exponent. All three are integers.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+pi ::= REAL { mantissa 314159, base 10, exponent -5 }
+</pre>
+</td></tr></table>
+
+<p>
+Corresponding pyasn1 objects can be initialized with either a three-component
+tuple or a Python float. Infinite values could be expressed in a way,
+compatible with Python float type.
+
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> pi = univ.Real((314159, 10, -5))
+>>> pi
+Real((314159, 10,-5))
+>>> float(pi)
+3.14159
+>>> pi == univ.Real(3.14159)
+True
+>>> univ.Real('inf')
+Real('inf')
+>>> univ.Real('-inf') == float('-inf')
+True
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+If a Real object is initialized from a Python float or yielded by a math
+operation, the base is set to decimal 10 (what affects encoding).
+</p>
+
+<a name="1.1.6"></a>
+<h4>
+1.1.6 Bit string type
+</h4>
+
+<p>
+ASN.1 BIT STRING type holds opaque binary data of an arbitrarily length.
+A BIT STRING value could be initialized by either a binary (base 2) or
+hex (base 16) value.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+public-key BIT STRING ::= '1010111011110001010110101101101
+ 1011000101010000010110101100010
+ 0110101010000111101010111111110'B
+
+signature BIT STRING ::= 'AF01330CD932093392100B39FF00DE0'H
+</pre>
+</td></tr></table>
+
+<p>
+The pyasn1 BitString objects can initialize from native ASN.1 notation
+(base 2 or base 16 strings) or from a Python tuple of binary components.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> publicKey = univ.BitString(
+... "'1010111011110001010110101101101"
+... "1011000101010000010110101100010"
+... "0110101010000111101010111111110'B"
+)
+>>> publicKey
+BitString("'10101110111100010101101011011011011000101010000010110101100010\
+0110101010000111101010111111110'B")
+>>> signature = univ.BitString(
+... "'AF01330CD932093392100B39FF00DE0'H"
+... )
+>>> signature
+BitString("'101011110000000100110011000011001101100100110010000010010011001\
+1100100100001000000001011001110011111111100000000110111100000'B")
+>>> fingerprint = univ.BitString(
+... (1, 0, 1, 1 ,0, 1, 1, 1, 0, 1, 0, 1)
+... )
+>>> fingerprint
+BitString("'101101110101'B")
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Another BIT STRING initialization method supported by ASN.1 notation
+is to specify only 1-th bits along with their human-friendly label
+and bit offset relative to the beginning of the bit string. With this
+method, all not explicitly mentioned bits are doomed to be zeros.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+bit-mask BIT STRING ::= {
+ read-flag(0),
+ write-flag(2),
+ run-flag(4)
+}
+</pre>
+</td></tr></table>
+
+<p>
+To express this in pyasn1, we will employ the named values feature (as with
+Enumeration type).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, namedval
+>>> class BitMask(univ.BitString):
+... namedValues = namedval.NamedValues(
+... ('read-flag', 0),
+... ('write-flag', 2),
+... ('run-flag', 4)
+... )
+>>> bitMask = BitMask('read-flag,run-flag')
+>>> bitMask
+BitMask("'10001'B")
+>>> tuple(bitMask)
+(1, 0, 0, 0, 1)
+>>> bitMask[4]
+1
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The BitString objects mimic the properties of Python tuple type in part
+of immutable sequence object protocol support.
+</p>
+
+<a name="1.1.7"></a>
+<h4>
+1.1.7 OctetString type
+</h4>
+
+<p>
+The OCTET STRING type is a confusing subject. According to ASN.1
+specification, this type is similar to BIT STRING, the major difference
+is that the former operates in 8-bit chunks of data. What is important
+to note, is that OCTET STRING was NOT designed to handle text strings - the
+standard provides many other types specialized for text content. For that
+reason, ASN.1 forbids to initialize OCTET STRING values with "quoted text
+strings", only binary or hex initializers, similar to BIT STRING ones,
+are allowed.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+thumbnail OCTET STRING ::= '1000010111101110101111000000111011'B
+thumbnail OCTET STRING ::= 'FA9823C43E43510DE3422'H
+</pre>
+</td></tr></table>
+
+<p>
+However, ASN.1 users (e.g. protocols designers) seem to ignore the original
+purpose of the OCTET STRING type - they used it for handling all kinds of
+data, including text strings.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+welcome-message OCTET STRING ::= "Welcome to ASN.1 wilderness!"
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1, we have taken a liberal approach and allowed both BIT STRING
+style and quoted text initializers for the OctetString objects. To avoid
+possible collisions, quoted text is the default initialization syntax.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> thumbnail = univ.OctetString(
+... binValue='1000010111101110101111000000111011'
+... )
+>>> thumbnail
+OctetString(hexValue='85eebcec0')
+>>> thumbnail = univ.OctetString(
+... hexValue='FA9823C43E43510DE3422'
+... )
+>>> thumbnail
+OctetString(hexValue='fa9823c43e4351de34220')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Most frequent usage of the OctetString class is to instantiate it with
+a text string.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> welcomeMessage = univ.OctetString('Welcome to ASN.1 wilderness!')
+>>> welcomeMessage
+OctetString(b'Welcome to ASN.1 wilderness!')
+>>> print('%s' % welcomeMessage)
+Welcome to ASN.1 wilderness!
+>>> welcomeMessage[11:16]
+OctetString(b'ASN.1')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+OctetString objects support the immutable sequence object protocol.
+In other words, they behave like Python 3 bytes (or Python 2 strings).
+</p>
+
+<p>
+When running pyasn1 on Python 3, it's better to use the bytes objects for
+OctetString instantiation, as it's more reliable and efficient.
+</p>
+
+<p>
+Additionally, OctetString's can also be instantiated with a sequence of
+8-bit integers (ASCII codes).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> univ.OctetString((77, 101, 101, 103, 111))
+OctetString(b'Meego')
+</pre>
+</td></tr></table>
+
+<p>
+It is sometimes convenient to express OctetString instances as 8-bit
+characters (Python 3 bytes or Python 2 strings) or 8-bit integers.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> octetString = univ.OctetString('ABCDEF')
+>>> octetString.asNumbers()
+(65, 66, 67, 68, 69, 70)
+>>> octetString.asOctets()
+b'ABCDEF'
+</pre>
+</td></tr></table>
+
+<a name="1.1.8"></a>
+<h4>
+1.1.8 ObjectIdentifier type
+</h4>
+
+<p>
+Values of the OBJECT IDENTIFIER type are sequences of integers that could
+be used to identify virtually anything in the world. Various ASN.1-based
+protocols employ OBJECT IDENTIFIERs for their own identification needs.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+internet-id OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) dod(6) internet(1)
+}
+</pre>
+</td></tr></table>
+
+<p>
+One of the natural ways to map OBJECT IDENTIFIER type into a Python
+one is to use Python tuples of integers. So this approach is taken by
+pyasn1.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> internetId = univ.ObjectIdentifier((1, 3, 6, 1))
+>>> internetId
+ObjectIdentifier('1.3.6.1')
+>>> internetId[2]
+6
+>>> internetId[1:3]
+ObjectIdentifier('3.6')
+</pre>
+</td></tr></table>
+
+<p>
+A more human-friendly "dotted" notation is also supported.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ
+>>> univ.ObjectIdentifier('1.3.6.1')
+ObjectIdentifier('1.3.6.1')
+</pre>
+</td></tr></table>
+
+<p>
+Symbolic names of the arcs of object identifier, sometimes present in
+ASN.1 specifications, are not preserved and used in pyasn1 objects.
+</p>
+
+<p>
+The ObjectIdentifier objects mimic the properties of Python tuple type in
+part of immutable sequence object protocol support.
+</p>
+
+<a name="1.1.9"></a>
+<h4>
+1.1.9 Character string types
+</h4>
+
+<p>
+ASN.1 standard introduces a diverse set of text-specific types. All of them
+were designed to handle various types of characters. Some of these types seem
+be obsolete nowdays, as their target technologies are gone. Another issue
+to be aware of is that raw OCTET STRING type is sometimes used in practice
+by ASN.1 users instead of specialized character string types, despite
+explicit prohibition imposed by ASN.1 specification.
+</p>
+
+<p>
+The two types are specific to ASN.1 are NumericString and PrintableString.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+welcome-message ::= PrintableString {
+ "Welcome to ASN.1 text types"
+}
+
+dial-pad-numbers ::= NumericString {
+ "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"
+}
+</pre>
+</td></tr></table>
+
+<p>
+Their pyasn1 implementations are:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char
+>>> '%s' % char.PrintableString("Welcome to ASN.1 text types")
+'Welcome to ASN.1 text types'
+>>> dialPadNumbers = char.NumericString(
+ "0" "1" "2" "3" "4" "5" "6" "7" "8" "9"
+)
+>>> dialPadNumbers
+NumericString(b'0123456789')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The following types came to ASN.1 from ISO standards on character sets.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char
+>>> char.VisibleString("abc")
+VisibleString(b'abc')
+>>> char.IA5String('abc')
+IA5String(b'abc')
+>>> char.TeletexString('abc')
+TeletexString(b'abc')
+>>> char.VideotexString('abc')
+VideotexString(b'abc')
+>>> char.GraphicString('abc')
+GraphicString(b'abc')
+>>> char.GeneralString('abc')
+GeneralString(b'abc')
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+The last three types are relatively recent addition to the family of
+character string types: UniversalString, BMPString, UTF8String.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import char
+>>> char.UniversalString("abc")
+UniversalString(b'abc')
+>>> char.BMPString('abc')
+BMPString(b'abc')
+>>> char.UTF8String('abc')
+UTF8String(b'abc')
+>>> utf8String = char.UTF8String('У попа была Ñобака')
+>>> utf8String
+UTF8String(b'\xd0\xa3 \xd0\xbf\xd0\xbe\xd0\xbf\xd0\xb0 \xd0\xb1\xd1\x8b\xd0\xbb\xd0\xb0 \
+\xd1\x81\xd0\xbe\xd0\xb1\xd0\xb0\xd0\xba\xd0\xb0')
+>>> print(utf8String)
+У попа была Ñобака
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1, all character type objects behave like Python strings. None of
+them is currently constrained in terms of valid alphabet so it's up to
+the data source to keep an eye on data validation for these types.
+</p>
+
+<a name="1.1.10"></a>
+<h4>
+1.1.10 Useful types
+</h4>
+
+<p>
+There are three so-called useful types defined in the standard:
+ObjectDescriptor, GeneralizedTime, UTCTime. They all are subtypes
+of GraphicString or VisibleString types therefore useful types are
+character string types.
+</p>
+
+<p>
+It's advised by the ASN.1 standard to have an instance of ObjectDescriptor
+type holding a human-readable description of corresponding instance of
+OBJECT IDENTIFIER type. There are no formal linkage between these instances
+and provision for ObjectDescriptor uniqueness in the standard.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import useful
+>>> descrBER = useful.ObjectDescriptor(
+ "Basic encoding of a single ASN.1 type"
+)
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+GeneralizedTime and UTCTime types are designed to hold a human-readable
+timestamp in a universal and unambiguous form. The former provides
+more flexibility in notation while the latter is more strict but has
+Y2K issues.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+;; Mar 8 2010 12:00:00 MSK
+moscow-time GeneralizedTime ::= "20110308120000.0"
+;; Mar 8 2010 12:00:00 UTC
+utc-time GeneralizedTime ::= "201103081200Z"
+;; Mar 8 1999 12:00:00 UTC
+utc-time UTCTime ::= "9803081200Z"
+</pre>
+</td></tr></table>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import useful
+>>> moscowTime = useful.GeneralizedTime("20110308120000.0")
+>>> utcTime = useful.UTCTime("9803081200Z")
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Despite their intended use, these types possess no special, time-related,
+handling in pyasn1. They are just printable strings.
+</p>
+
+<hr>
+
+</td>
+</tr>
+</table>
+</center>
+</body>
+</html>
diff --git a/python/pyasn1/doc/tagging.html b/python/pyasn1/doc/tagging.html
new file mode 100644
index 000000000..187f1180d
--- /dev/null
+++ b/python/pyasn1/doc/tagging.html
@@ -0,0 +1,233 @@
+<html>
+<title>
+Tagging in PyASN1
+</title>
+<head>
+</head>
+<body>
+<center>
+<table width=60%>
+<tr>
+<td>
+<a name="1.2"></a>
+<h4>
+1.2 Tagging in PyASN1
+</h4>
+
+<p>
+In order to continue with the Constructed ASN.1 types, we will first have
+to introduce the concept of tagging (and its pyasn1 implementation), as
+some of the Constructed types rely upon the tagging feature.
+</p>
+
+<p>
+When a value is coming into an ASN.1-based system (received from a network
+or read from some storage), the receiving entity has to determine the
+type of the value to interpret and verify it accordingly.
+</p>
+
+<p>
+Historically, the first data serialization protocol introduced in
+ASN.1 was BER (Basic Encoding Rules). According to BER, any serialized
+value is packed into a triplet of (Type, Length, Value) where Type is a
+code that identifies the value (which is called <i>tag</i> in ASN.1),
+length is the number of bytes occupied by the value in its serialized form
+and value is ASN.1 value in a form suitable for serial transmission or storage.
+</p>
+
+<p>
+For that reason almost every ASN.1 type has a tag (which is actually a
+BER type) associated with it by default.
+</p>
+
+<p>
+An ASN.1 tag could be viewed as a tuple of three numbers:
+(Class, Format, Number). While Number identifies a tag, Class component
+is used to create scopes for Numbers. Four scopes are currently defined:
+UNIVERSAL, context-specific, APPLICATION and PRIVATE. The Format component
+is actually a one-bit flag - zero for tags associated with scalar types,
+and one for constructed types (will be discussed later on).
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+MyIntegerType ::= [12] INTEGER
+MyOctetString ::= [APPLICATION 0] OCTET STRING
+</pre>
+</td></tr></table>
+
+<p>
+In pyasn1, tags are implemented as immutable, tuple-like objects:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import tag
+>>> myTag = tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10)
+>>> myTag
+Tag(tagClass=128, tagFormat=0, tagId=10)
+>>> tuple(myTag)
+(128, 0, 10)
+>>> myTag[2]
+10
+>>> myTag == tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 10)
+False
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Default tag, associated with any ASN.1 type, could be extended or replaced
+to make new type distinguishable from its ancestor. The standard provides
+two modes of tag mangling - IMPLICIT and EXPLICIT.
+</p>
+
+<p>
+EXPLICIT mode works by appending new tag to the existing ones thus creating
+an ordered set of tags. This set will be considered as a whole for type
+identification and encoding purposes. Important property of EXPLICIT tagging
+mode is that it preserves base type information in encoding what makes it
+possible to completely recover type information from encoding.
+</p>
+
+<p>
+When tagging in IMPLICIT mode, the outermost existing tag is dropped and
+replaced with a new one.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+MyIntegerType ::= [12] IMPLICIT INTEGER
+MyOctetString ::= [APPLICATION 0] EXPLICIT OCTET STRING
+</pre>
+</td></tr></table>
+
+<p>
+To model both modes of tagging, a specialized container TagSet object (holding
+zero, one or more Tag objects) is used in pyasn1.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import tag
+>>> tagSet = tag.TagSet(
+... # base tag
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10),
+... # effective tag
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10)
+... )
+>>> tagSet
+TagSet(Tag(tagClass=128, tagFormat=0, tagId=10))
+>>> tagSet.getBaseTag()
+Tag(tagClass=128, tagFormat=0, tagId=10)
+>>> tagSet = tagSet.tagExplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 20)
+... )
+>>> tagSet
+TagSet(Tag(tagClass=128, tagFormat=0, tagId=10),
+ Tag(tagClass=128, tagFormat=32, tagId=20))
+>>> tagSet = tagSet.tagExplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 30)
+... )
+>>> tagSet
+TagSet(Tag(tagClass=128, tagFormat=0, tagId=10),
+ Tag(tagClass=128, tagFormat=32, tagId=20),
+ Tag(tagClass=128, tagFormat=32, tagId=30))
+>>> tagSet = tagSet.tagImplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 40)
+... )
+>>> tagSet
+TagSet(Tag(tagClass=128, tagFormat=0, tagId=10),
+ Tag(tagClass=128, tagFormat=32, tagId=20),
+ Tag(tagClass=128, tagFormat=32, tagId=40))
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+As a side note: the "base tag" concept (accessible through the getBaseTag()
+method) is specific to pyasn1 -- the base tag is used to identify the original
+ASN.1 type of an object in question. Base tag is never occurs in encoding
+and is mostly used internally by pyasn1 for choosing type-specific data
+processing algorithms. The "effective tag" is the one that always appears in
+encoding and is used on tagSets comparation.
+</p>
+
+<p>
+Any two TagSet objects could be compared to see if one is a derivative
+of the other. Figuring this out is also useful in cases when a type-specific
+data processing algorithms are to be chosen.
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import tag
+>>> tagSet1 = tag.TagSet(
+... # base tag
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10)
+... # effective tag
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10)
+... )
+>>> tagSet2 = tagSet1.tagExplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 20)
+... )
+>>> tagSet1.isSuperTagSetOf(tagSet2)
+True
+>>> tagSet2.isSuperTagSetOf(tagSet1)
+False
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+We will complete this discussion on tagging with a real-world example. The
+following ASN.1 tagged type:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+MyIntegerType ::= [12] EXPLICIT INTEGER
+</pre>
+</td></tr></table>
+
+<p>
+could be expressed in pyasn1 like this:
+</p>
+
+<table bgcolor="lightgray" border=0 width=100%><TR><TD>
+<pre>
+>>> from pyasn1.type import univ, tag
+>>> class MyIntegerType(univ.Integer):
+... tagSet = univ.Integer.tagSet.tagExplicitly(
+... tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 12)
+... )
+>>> myInteger = MyIntegerType(12345)
+>>> myInteger.getTagSet()
+TagSet(Tag(tagClass=0, tagFormat=0, tagId=2),
+ Tag(tagClass=128, tagFormat=32, tagId=12))
+>>>
+</pre>
+</td></tr></table>
+
+<p>
+Referring to the above code, the tagSet class attribute is a property of any
+pyasn1 type object that assigns default tagSet to a pyasn1 value object. This
+default tagSet specification can be ignored and effectively replaced by some
+other tagSet value passed on object instantiation.
+</p>
+
+<p>
+It's important to understand that the tag set property of pyasn1 type/value
+object can never be modifed in place. In other words, a pyasn1 type/value
+object can never change its tags. The only way is to create a new pyasn1
+type/value object and associate different tag set with it.
+</p>
+
+<hr>
+
+</td>
+</tr>
+</table>
+</center>
+</body>
+</html>
diff --git a/python/pyasn1/pyasn1/__init__.py b/python/pyasn1/pyasn1/__init__.py
new file mode 100644
index 000000000..88aff79c8
--- /dev/null
+++ b/python/pyasn1/pyasn1/__init__.py
@@ -0,0 +1,8 @@
+import sys
+
+# http://www.python.org/dev/peps/pep-0396/
+__version__ = '0.1.7'
+
+if sys.version_info[:2] < (2, 4):
+ raise RuntimeError('PyASN1 requires Python 2.4 or later')
+
diff --git a/python/pyasn1/pyasn1/codec/__init__.py b/python/pyasn1/pyasn1/codec/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/pyasn1/codec/ber/__init__.py b/python/pyasn1/pyasn1/codec/ber/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/ber/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/pyasn1/codec/ber/decoder.py b/python/pyasn1/pyasn1/codec/ber/decoder.py
new file mode 100644
index 000000000..be0cf4907
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/ber/decoder.py
@@ -0,0 +1,808 @@
+# BER decoder
+from pyasn1.type import tag, base, univ, char, useful, tagmap
+from pyasn1.codec.ber import eoo
+from pyasn1.compat.octets import oct2int, octs2ints, isOctetsType
+from pyasn1 import debug, error
+
+class AbstractDecoder:
+ protoComponent = None
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,))
+
+ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,))
+
+class AbstractSimpleDecoder(AbstractDecoder):
+ tagFormats = (tag.tagFormatSimple,)
+ def _createComponent(self, asn1Spec, tagSet, value=None):
+ if tagSet[0][1] not in self.tagFormats:
+ raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,))
+ if asn1Spec is None:
+ return self.protoComponent.clone(value, tagSet)
+ elif value is None:
+ return asn1Spec
+ else:
+ return asn1Spec.clone(value)
+
+class AbstractConstructedDecoder(AbstractDecoder):
+ tagFormats = (tag.tagFormatConstructed,)
+ def _createComponent(self, asn1Spec, tagSet, value=None):
+ if tagSet[0][1] not in self.tagFormats:
+ raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,))
+ if asn1Spec is None:
+ return self.protoComponent.clone(tagSet)
+ else:
+ return asn1Spec.clone()
+
+class EndOfOctetsDecoder(AbstractSimpleDecoder):
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ return eoo.endOfOctets, substrate[length:]
+
+class ExplicitTagDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Any('')
+ tagFormats = (tag.tagFormatConstructed,)
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ if substrateFun:
+ return substrateFun(
+ self._createComponent(asn1Spec, tagSet, ''),
+ substrate, length
+ )
+ head, tail = substrate[:length], substrate[length:]
+ value, _ = decodeFun(head, asn1Spec, tagSet, length)
+ return value, tail
+
+ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ if substrateFun:
+ return substrateFun(
+ self._createComponent(asn1Spec, tagSet, ''),
+ substrate, length
+ )
+ value, substrate = decodeFun(substrate, asn1Spec, tagSet, length)
+ terminator, substrate = decodeFun(substrate)
+ if eoo.endOfOctets.isSameTypeWith(terminator) and \
+ terminator == eoo.endOfOctets:
+ return value, substrate
+ else:
+ raise error.PyAsn1Error('Missing end-of-octets terminator')
+
+explicitTagDecoder = ExplicitTagDecoder()
+
+class IntegerDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Integer(0)
+ precomputedValues = {
+ '\x00': 0,
+ '\x01': 1,
+ '\x02': 2,
+ '\x03': 3,
+ '\x04': 4,
+ '\x05': 5,
+ '\x06': 6,
+ '\x07': 7,
+ '\x08': 8,
+ '\x09': 9,
+ '\xff': -1,
+ '\xfe': -2,
+ '\xfd': -3,
+ '\xfc': -4,
+ '\xfb': -5
+ }
+
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
+ state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ if not head:
+ return self._createComponent(asn1Spec, tagSet, 0), tail
+ if head in self.precomputedValues:
+ value = self.precomputedValues[head]
+ else:
+ firstOctet = oct2int(head[0])
+ if firstOctet & 0x80:
+ value = -1
+ else:
+ value = 0
+ for octet in head:
+ value = value << 8 | oct2int(octet)
+ return self._createComponent(asn1Spec, tagSet, value), tail
+
+class BooleanDecoder(IntegerDecoder):
+ protoComponent = univ.Boolean(0)
+ def _createComponent(self, asn1Spec, tagSet, value=None):
+ return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0)
+
+class BitStringDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.BitString(())
+ tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
+ state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check?
+ if not head:
+ raise error.PyAsn1Error('Empty substrate')
+ trailingBits = oct2int(head[0])
+ if trailingBits > 7:
+ raise error.PyAsn1Error(
+ 'Trailing bits overflow %s' % trailingBits
+ )
+ head = head[1:]
+ lsb = p = 0; l = len(head)-1; b = ()
+ while p <= l:
+ if p == l:
+ lsb = trailingBits
+ j = 7
+ o = oct2int(head[p])
+ while j >= lsb:
+ b = b + ((o>>j)&0x01,)
+ j = j - 1
+ p = p + 1
+ return self._createComponent(asn1Spec, tagSet, b), tail
+ r = self._createComponent(asn1Spec, tagSet, ())
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ while head:
+ component, head = decodeFun(head)
+ r = r + component
+ return r, tail
+
+ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ r = self._createComponent(asn1Spec, tagSet, '')
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ while substrate:
+ component, substrate = decodeFun(substrate)
+ if eoo.endOfOctets.isSameTypeWith(component) and \
+ component == eoo.endOfOctets:
+ break
+ r = r + component
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+ return r, substrate
+
+class OctetStringDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.OctetString('')
+ tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
+ state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check?
+ return self._createComponent(asn1Spec, tagSet, head), tail
+ r = self._createComponent(asn1Spec, tagSet, '')
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ while head:
+ component, head = decodeFun(head)
+ r = r + component
+ return r, tail
+
+ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ r = self._createComponent(asn1Spec, tagSet, '')
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ while substrate:
+ component, substrate = decodeFun(substrate)
+ if eoo.endOfOctets.isSameTypeWith(component) and \
+ component == eoo.endOfOctets:
+ break
+ r = r + component
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+ return r, substrate
+
+class NullDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Null('')
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ r = self._createComponent(asn1Spec, tagSet)
+ if head:
+ raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length)
+ return r, tail
+
+class ObjectIdentifierDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.ObjectIdentifier(())
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
+ state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ if not head:
+ raise error.PyAsn1Error('Empty substrate')
+
+ # Get the first subid
+ subId = oct2int(head[0])
+ oid = divmod(subId, 40)
+
+ index = 1
+ substrateLen = len(head)
+ while index < substrateLen:
+ subId = oct2int(head[index])
+ index = index + 1
+ if subId == 128:
+ # ASN.1 spec forbids leading zeros (0x80) in sub-ID OID
+ # encoding, tolerating it opens a vulnerability.
+ # See http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf page 7
+ raise error.PyAsn1Error('Invalid leading 0x80 in sub-OID')
+ elif subId > 128:
+ # Construct subid from a number of octets
+ nextSubId = subId
+ subId = 0
+ while nextSubId >= 128:
+ subId = (subId << 7) + (nextSubId & 0x7F)
+ if index >= substrateLen:
+ raise error.SubstrateUnderrunError(
+ 'Short substrate for sub-OID past %s' % (oid,)
+ )
+ nextSubId = oct2int(head[index])
+ index = index + 1
+ subId = (subId << 7) + nextSubId
+ oid = oid + (subId,)
+ return self._createComponent(asn1Spec, tagSet, oid), tail
+
+class RealDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Real()
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ if not head:
+ return self._createComponent(asn1Spec, tagSet, 0.0), tail
+ fo = oct2int(head[0]); head = head[1:]
+ if fo & 0x80: # binary enoding
+ n = (fo & 0x03) + 1
+ if n == 4:
+ n = oct2int(head[0])
+ eo, head = head[:n], head[n:]
+ if not eo or not head:
+ raise error.PyAsn1Error('Real exponent screwed')
+ e = oct2int(eo[0]) & 0x80 and -1 or 0
+ while eo: # exponent
+ e <<= 8
+ e |= oct2int(eo[0])
+ eo = eo[1:]
+ p = 0
+ while head: # value
+ p <<= 8
+ p |= oct2int(head[0])
+ head = head[1:]
+ if fo & 0x40: # sign bit
+ p = -p
+ value = (p, 2, e)
+ elif fo & 0x40: # infinite value
+ value = fo & 0x01 and '-inf' or 'inf'
+ elif fo & 0xc0 == 0: # character encoding
+ try:
+ if fo & 0x3 == 0x1: # NR1
+ value = (int(head), 10, 0)
+ elif fo & 0x3 == 0x2: # NR2
+ value = float(head)
+ elif fo & 0x3 == 0x3: # NR3
+ value = float(head)
+ else:
+ raise error.SubstrateUnderrunError(
+ 'Unknown NR (tag %s)' % fo
+ )
+ except ValueError:
+ raise error.SubstrateUnderrunError(
+ 'Bad character Real syntax'
+ )
+ else:
+ raise error.SubstrateUnderrunError(
+ 'Unknown encoding (tag %s)' % fo
+ )
+ return self._createComponent(asn1Spec, tagSet, value), tail
+
+class SequenceDecoder(AbstractConstructedDecoder):
+ protoComponent = univ.Sequence()
+ def _getComponentTagMap(self, r, idx):
+ try:
+ return r.getComponentTagMapNearPosition(idx)
+ except error.PyAsn1Error:
+ return
+
+ def _getComponentPositionByType(self, r, t, idx):
+ return r.getComponentPositionNearType(t, idx)
+
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ r = self._createComponent(asn1Spec, tagSet)
+ idx = 0
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ while head:
+ asn1Spec = self._getComponentTagMap(r, idx)
+ component, head = decodeFun(head, asn1Spec)
+ idx = self._getComponentPositionByType(
+ r, component.getEffectiveTagSet(), idx
+ )
+ r.setComponentByPosition(idx, component, asn1Spec is None)
+ idx = idx + 1
+ r.setDefaultComponents()
+ r.verifySizeSpec()
+ return r, tail
+
+ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ r = self._createComponent(asn1Spec, tagSet)
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ idx = 0
+ while substrate:
+ asn1Spec = self._getComponentTagMap(r, idx)
+ component, substrate = decodeFun(substrate, asn1Spec)
+ if eoo.endOfOctets.isSameTypeWith(component) and \
+ component == eoo.endOfOctets:
+ break
+ idx = self._getComponentPositionByType(
+ r, component.getEffectiveTagSet(), idx
+ )
+ r.setComponentByPosition(idx, component, asn1Spec is None)
+ idx = idx + 1
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+ r.setDefaultComponents()
+ r.verifySizeSpec()
+ return r, substrate
+
+class SequenceOfDecoder(AbstractConstructedDecoder):
+ protoComponent = univ.SequenceOf()
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ r = self._createComponent(asn1Spec, tagSet)
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ asn1Spec = r.getComponentType()
+ idx = 0
+ while head:
+ component, head = decodeFun(head, asn1Spec)
+ r.setComponentByPosition(idx, component, asn1Spec is None)
+ idx = idx + 1
+ r.verifySizeSpec()
+ return r, tail
+
+ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ r = self._createComponent(asn1Spec, tagSet)
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ asn1Spec = r.getComponentType()
+ idx = 0
+ while substrate:
+ component, substrate = decodeFun(substrate, asn1Spec)
+ if eoo.endOfOctets.isSameTypeWith(component) and \
+ component == eoo.endOfOctets:
+ break
+ r.setComponentByPosition(idx, component, asn1Spec is None)
+ idx = idx + 1
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+ r.verifySizeSpec()
+ return r, substrate
+
+class SetDecoder(SequenceDecoder):
+ protoComponent = univ.Set()
+ def _getComponentTagMap(self, r, idx):
+ return r.getComponentTagMap()
+
+ def _getComponentPositionByType(self, r, t, idx):
+ nextIdx = r.getComponentPositionByType(t)
+ if nextIdx is None:
+ return idx
+ else:
+ return nextIdx
+
+class SetOfDecoder(SequenceOfDecoder):
+ protoComponent = univ.SetOf()
+
+class ChoiceDecoder(AbstractConstructedDecoder):
+ protoComponent = univ.Choice()
+ tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ r = self._createComponent(asn1Spec, tagSet)
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ if r.getTagSet() == tagSet: # explicitly tagged Choice
+ component, head = decodeFun(
+ head, r.getComponentTagMap()
+ )
+ else:
+ component, head = decodeFun(
+ head, r.getComponentTagMap(), tagSet, length, state
+ )
+ if isinstance(component, univ.Choice):
+ effectiveTagSet = component.getEffectiveTagSet()
+ else:
+ effectiveTagSet = component.getTagSet()
+ r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None)
+ return r, tail
+
+ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ r = self._createComponent(asn1Spec, tagSet)
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ if r.getTagSet() == tagSet: # explicitly tagged Choice
+ component, substrate = decodeFun(substrate, r.getComponentTagMap())
+ eooMarker, substrate = decodeFun(substrate) # eat up EOO marker
+ if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \
+ eooMarker != eoo.endOfOctets:
+ raise error.PyAsn1Error('No EOO seen before substrate ends')
+ else:
+ component, substrate= decodeFun(
+ substrate, r.getComponentTagMap(), tagSet, length, state
+ )
+ if isinstance(component, univ.Choice):
+ effectiveTagSet = component.getEffectiveTagSet()
+ else:
+ effectiveTagSet = component.getTagSet()
+ r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None)
+ return r, substrate
+
+class AnyDecoder(AbstractSimpleDecoder):
+ protoComponent = univ.Any()
+ tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ if asn1Spec is None or \
+ asn1Spec is not None and tagSet != asn1Spec.getTagSet():
+ # untagged Any container, recover inner header substrate
+ length = length + len(fullSubstrate) - len(substrate)
+ substrate = fullSubstrate
+ if substrateFun:
+ return substrateFun(self._createComponent(asn1Spec, tagSet),
+ substrate, length)
+ head, tail = substrate[:length], substrate[length:]
+ return self._createComponent(asn1Spec, tagSet, value=head), tail
+
+ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
+ length, state, decodeFun, substrateFun):
+ if asn1Spec is not None and tagSet == asn1Spec.getTagSet():
+ # tagged Any type -- consume header substrate
+ header = ''
+ else:
+ # untagged Any, recover header substrate
+ header = fullSubstrate[:-len(substrate)]
+
+ r = self._createComponent(asn1Spec, tagSet, header)
+
+ # Any components do not inherit initial tag
+ asn1Spec = self.protoComponent
+
+ if substrateFun:
+ return substrateFun(r, substrate, length)
+ while substrate:
+ component, substrate = decodeFun(substrate, asn1Spec)
+ if eoo.endOfOctets.isSameTypeWith(component) and \
+ component == eoo.endOfOctets:
+ break
+ r = r + component
+ else:
+ raise error.SubstrateUnderrunError(
+ 'No EOO seen before substrate ends'
+ )
+ return r, substrate
+
+# character string types
+class UTF8StringDecoder(OctetStringDecoder):
+ protoComponent = char.UTF8String()
+class NumericStringDecoder(OctetStringDecoder):
+ protoComponent = char.NumericString()
+class PrintableStringDecoder(OctetStringDecoder):
+ protoComponent = char.PrintableString()
+class TeletexStringDecoder(OctetStringDecoder):
+ protoComponent = char.TeletexString()
+class VideotexStringDecoder(OctetStringDecoder):
+ protoComponent = char.VideotexString()
+class IA5StringDecoder(OctetStringDecoder):
+ protoComponent = char.IA5String()
+class GraphicStringDecoder(OctetStringDecoder):
+ protoComponent = char.GraphicString()
+class VisibleStringDecoder(OctetStringDecoder):
+ protoComponent = char.VisibleString()
+class GeneralStringDecoder(OctetStringDecoder):
+ protoComponent = char.GeneralString()
+class UniversalStringDecoder(OctetStringDecoder):
+ protoComponent = char.UniversalString()
+class BMPStringDecoder(OctetStringDecoder):
+ protoComponent = char.BMPString()
+
+# "useful" types
+class GeneralizedTimeDecoder(OctetStringDecoder):
+ protoComponent = useful.GeneralizedTime()
+class UTCTimeDecoder(OctetStringDecoder):
+ protoComponent = useful.UTCTime()
+
+tagMap = {
+ eoo.endOfOctets.tagSet: EndOfOctetsDecoder(),
+ univ.Integer.tagSet: IntegerDecoder(),
+ univ.Boolean.tagSet: BooleanDecoder(),
+ univ.BitString.tagSet: BitStringDecoder(),
+ univ.OctetString.tagSet: OctetStringDecoder(),
+ univ.Null.tagSet: NullDecoder(),
+ univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(),
+ univ.Enumerated.tagSet: IntegerDecoder(),
+ univ.Real.tagSet: RealDecoder(),
+ univ.Sequence.tagSet: SequenceDecoder(), # conflicts with SequenceOf
+ univ.Set.tagSet: SetDecoder(), # conflicts with SetOf
+ univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
+ # character string types
+ char.UTF8String.tagSet: UTF8StringDecoder(),
+ char.NumericString.tagSet: NumericStringDecoder(),
+ char.PrintableString.tagSet: PrintableStringDecoder(),
+ char.TeletexString.tagSet: TeletexStringDecoder(),
+ char.VideotexString.tagSet: VideotexStringDecoder(),
+ char.IA5String.tagSet: IA5StringDecoder(),
+ char.GraphicString.tagSet: GraphicStringDecoder(),
+ char.VisibleString.tagSet: VisibleStringDecoder(),
+ char.GeneralString.tagSet: GeneralStringDecoder(),
+ char.UniversalString.tagSet: UniversalStringDecoder(),
+ char.BMPString.tagSet: BMPStringDecoder(),
+ # useful types
+ useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(),
+ useful.UTCTime.tagSet: UTCTimeDecoder()
+ }
+
+# Type-to-codec map for ambiguous ASN.1 types
+typeMap = {
+ univ.Set.typeId: SetDecoder(),
+ univ.SetOf.typeId: SetOfDecoder(),
+ univ.Sequence.typeId: SequenceDecoder(),
+ univ.SequenceOf.typeId: SequenceOfDecoder(),
+ univ.Choice.typeId: ChoiceDecoder(),
+ univ.Any.typeId: AnyDecoder()
+ }
+
+( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec,
+ stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue,
+ stDumpRawValue, stErrorCondition, stStop ) = [x for x in range(10)]
+
+class Decoder:
+ defaultErrorState = stErrorCondition
+# defaultErrorState = stDumpRawValue
+ defaultRawDecoder = AnyDecoder()
+ def __init__(self, tagMap, typeMap={}):
+ self.__tagMap = tagMap
+ self.__typeMap = typeMap
+ self.__endOfOctetsTagSet = eoo.endOfOctets.getTagSet()
+ # Tag & TagSet objects caches
+ self.__tagCache = {}
+ self.__tagSetCache = {}
+
+ def __call__(self, substrate, asn1Spec=None, tagSet=None,
+ length=None, state=stDecodeTag, recursiveFlag=1,
+ substrateFun=None):
+ if debug.logger & debug.flagDecoder:
+ debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
+ fullSubstrate = substrate
+ while state != stStop:
+ if state == stDecodeTag:
+ # Decode tag
+ if not substrate:
+ raise error.SubstrateUnderrunError(
+ 'Short octet stream on tag decoding'
+ )
+ if not isOctetsType(substrate) and \
+ not isinstance(substrate, univ.OctetString):
+ raise error.PyAsn1Error('Bad octet stream type')
+
+ firstOctet = substrate[0]
+ substrate = substrate[1:]
+ if firstOctet in self.__tagCache:
+ lastTag = self.__tagCache[firstOctet]
+ else:
+ t = oct2int(firstOctet)
+ tagClass = t&0xC0
+ tagFormat = t&0x20
+ tagId = t&0x1F
+ if tagId == 0x1F:
+ tagId = 0
+ while 1:
+ if not substrate:
+ raise error.SubstrateUnderrunError(
+ 'Short octet stream on long tag decoding'
+ )
+ t = oct2int(substrate[0])
+ tagId = tagId << 7 | (t&0x7F)
+ substrate = substrate[1:]
+ if not t&0x80:
+ break
+ lastTag = tag.Tag(
+ tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
+ )
+ if tagId < 31:
+ # cache short tags
+ self.__tagCache[firstOctet] = lastTag
+ if tagSet is None:
+ if firstOctet in self.__tagSetCache:
+ tagSet = self.__tagSetCache[firstOctet]
+ else:
+ # base tag not recovered
+ tagSet = tag.TagSet((), lastTag)
+ if firstOctet in self.__tagCache:
+ self.__tagSetCache[firstOctet] = tagSet
+ else:
+ tagSet = lastTag + tagSet
+ state = stDecodeLength
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %r, decoding length' % tagSet)
+ if state == stDecodeLength:
+ # Decode length
+ if not substrate:
+ raise error.SubstrateUnderrunError(
+ 'Short octet stream on length decoding'
+ )
+ firstOctet = oct2int(substrate[0])
+ if firstOctet == 128:
+ size = 1
+ length = -1
+ elif firstOctet < 128:
+ length, size = firstOctet, 1
+ else:
+ size = firstOctet & 0x7F
+ # encoded in size bytes
+ length = 0
+ lengthString = substrate[1:size+1]
+ # missing check on maximum size, which shouldn't be a
+ # problem, we can handle more than is possible
+ if len(lengthString) != size:
+ raise error.SubstrateUnderrunError(
+ '%s<%s at %s' %
+ (size, len(lengthString), tagSet)
+ )
+ for char in lengthString:
+ length = (length << 8) | oct2int(char)
+ size = size + 1
+ substrate = substrate[size:]
+ if length != -1 and len(substrate) < length:
+ raise error.SubstrateUnderrunError(
+ '%d-octet short' % (length - len(substrate))
+ )
+ state = stGetValueDecoder
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
+ if state == stGetValueDecoder:
+ if asn1Spec is None:
+ state = stGetValueDecoderByTag
+ else:
+ state = stGetValueDecoderByAsn1Spec
+ #
+ # There're two ways of creating subtypes in ASN.1 what influences
+ # decoder operation. These methods are:
+ # 1) Either base types used in or no IMPLICIT tagging has been
+ # applied on subtyping.
+ # 2) Subtype syntax drops base type information (by means of
+ # IMPLICIT tagging.
+ # The first case allows for complete tag recovery from substrate
+ # while the second one requires original ASN.1 type spec for
+ # decoding.
+ #
+ # In either case a set of tags (tagSet) is coming from substrate
+ # in an incremental, tag-by-tag fashion (this is the case of
+ # EXPLICIT tag which is most basic). Outermost tag comes first
+ # from the wire.
+ #
+ if state == stGetValueDecoderByTag:
+ if tagSet in self.__tagMap:
+ concreteDecoder = self.__tagMap[tagSet]
+ else:
+ concreteDecoder = None
+ if concreteDecoder:
+ state = stDecodeValue
+ else:
+ _k = tagSet[:1]
+ if _k in self.__tagMap:
+ concreteDecoder = self.__tagMap[_k]
+ else:
+ concreteDecoder = None
+ if concreteDecoder:
+ state = stDecodeValue
+ else:
+ state = stTryAsExplicitTag
+ if debug.logger and debug.logger & debug.flagDecoder:
+ debug.logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as explicit tag'))
+ debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__)
+ if state == stGetValueDecoderByAsn1Spec:
+ if isinstance(asn1Spec, (dict, tagmap.TagMap)):
+ if tagSet in asn1Spec:
+ __chosenSpec = asn1Spec[tagSet]
+ else:
+ __chosenSpec = None
+ if debug.logger and debug.logger & debug.flagDecoder:
+ debug.logger('candidate ASN.1 spec is a map of:')
+ for t, v in asn1Spec.getPosMap().items():
+ debug.logger(' %r -> %s' % (t, v.__class__.__name__))
+ if asn1Spec.getNegMap():
+ debug.logger('but neither of: ')
+ for i in asn1Spec.getNegMap().items():
+ debug.logger(' %r -> %s' % (t, v.__class__.__name__))
+ debug.logger('new candidate ASN.1 spec is %s, chosen by %r' % (__chosenSpec is None and '<none>' or __chosenSpec.__class__.__name__, tagSet))
+ else:
+ __chosenSpec = asn1Spec
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
+ if __chosenSpec is not None and (
+ tagSet == __chosenSpec.getTagSet() or \
+ tagSet in __chosenSpec.getTagMap()
+ ):
+ # use base type for codec lookup to recover untagged types
+ baseTagSet = __chosenSpec.baseTagSet
+ if __chosenSpec.typeId is not None and \
+ __chosenSpec.typeId in self.__typeMap:
+ # ambiguous type
+ concreteDecoder = self.__typeMap[__chosenSpec.typeId]
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen for an ambiguous type by type ID %s' % (__chosenSpec.typeId,))
+ elif baseTagSet in self.__tagMap:
+ # base type or tagged subtype
+ concreteDecoder = self.__tagMap[baseTagSet]
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %r' % (baseTagSet,))
+ else:
+ concreteDecoder = None
+ if concreteDecoder:
+ asn1Spec = __chosenSpec
+ state = stDecodeValue
+ else:
+ state = stTryAsExplicitTag
+ elif tagSet == self.__endOfOctetsTagSet:
+ concreteDecoder = self.__tagMap[tagSet]
+ state = stDecodeValue
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets found')
+ else:
+ concreteDecoder = None
+ state = stTryAsExplicitTag
+ if debug.logger and debug.logger & debug.flagDecoder:
+ debug.logger('codec %s chosen by ASN.1 spec, decoding %s' % (state == stDecodeValue and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as explicit tag'))
+ debug.scope.push(__chosenSpec is None and '?' or __chosenSpec.__class__.__name__)
+ if state == stTryAsExplicitTag:
+ if tagSet and \
+ tagSet[0][1] == tag.tagFormatConstructed and \
+ tagSet[0][0] != tag.tagClassUniversal:
+ # Assume explicit tagging
+ concreteDecoder = explicitTagDecoder
+ state = stDecodeValue
+ else:
+ concreteDecoder = None
+ state = self.defaultErrorState
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as failure'))
+ if state == stDumpRawValue:
+ concreteDecoder = self.defaultRawDecoder
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__)
+ state = stDecodeValue
+ if state == stDecodeValue:
+ if recursiveFlag == 0 and not substrateFun: # legacy
+ substrateFun = lambda a,b,c: (a,b[:c])
+ if length == -1: # indef length
+ value, substrate = concreteDecoder.indefLenValueDecoder(
+ fullSubstrate, substrate, asn1Spec, tagSet, length,
+ stGetValueDecoder, self, substrateFun
+ )
+ else:
+ value, substrate = concreteDecoder.valueDecoder(
+ fullSubstrate, substrate, asn1Spec, tagSet, length,
+ stGetValueDecoder, self, substrateFun
+ )
+ state = stStop
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '<none>'))
+ if state == stErrorCondition:
+ raise error.PyAsn1Error(
+ '%r not in asn1Spec: %r' % (tagSet, asn1Spec)
+ )
+ if debug.logger and debug.logger & debug.flagDecoder:
+ debug.scope.pop()
+ debug.logger('decoder left scope %s, call completed' % debug.scope)
+ return value, substrate
+
+decode = Decoder(tagMap, typeMap)
+
+# XXX
+# non-recursive decoding; return position rather than substrate
diff --git a/python/pyasn1/pyasn1/codec/ber/encoder.py b/python/pyasn1/pyasn1/codec/ber/encoder.py
new file mode 100644
index 000000000..173949d0b
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/ber/encoder.py
@@ -0,0 +1,353 @@
+# BER encoder
+from pyasn1.type import base, tag, univ, char, useful
+from pyasn1.codec.ber import eoo
+from pyasn1.compat.octets import int2oct, oct2int, ints2octs, null, str2octs
+from pyasn1 import debug, error
+
+class Error(Exception): pass
+
+class AbstractItemEncoder:
+ supportIndefLenMode = 1
+ def encodeTag(self, t, isConstructed):
+ tagClass, tagFormat, tagId = t.asTuple() # this is a hotspot
+ v = tagClass | tagFormat
+ if isConstructed:
+ v = v|tag.tagFormatConstructed
+ if tagId < 31:
+ return int2oct(v|tagId)
+ else:
+ s = int2oct(tagId&0x7f)
+ tagId = tagId >> 7
+ while tagId:
+ s = int2oct(0x80|(tagId&0x7f)) + s
+ tagId = tagId >> 7
+ return int2oct(v|0x1F) + s
+
+ def encodeLength(self, length, defMode):
+ if not defMode and self.supportIndefLenMode:
+ return int2oct(0x80)
+ if length < 0x80:
+ return int2oct(length)
+ else:
+ substrate = null
+ while length:
+ substrate = int2oct(length&0xff) + substrate
+ length = length >> 8
+ substrateLen = len(substrate)
+ if substrateLen > 126:
+ raise Error('Length octets overflow (%d)' % substrateLen)
+ return int2oct(0x80 | substrateLen) + substrate
+
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ raise Error('Not implemented')
+
+ def _encodeEndOfOctets(self, encodeFun, defMode):
+ if defMode or not self.supportIndefLenMode:
+ return null
+ else:
+ return encodeFun(eoo.endOfOctets, defMode)
+
+ def encode(self, encodeFun, value, defMode, maxChunkSize):
+ substrate, isConstructed = self.encodeValue(
+ encodeFun, value, defMode, maxChunkSize
+ )
+ tagSet = value.getTagSet()
+ if tagSet:
+ if not isConstructed: # primitive form implies definite mode
+ defMode = 1
+ return self.encodeTag(
+ tagSet[-1], isConstructed
+ ) + self.encodeLength(
+ len(substrate), defMode
+ ) + substrate + self._encodeEndOfOctets(encodeFun, defMode)
+ else:
+ return substrate # untagged value
+
+class EndOfOctetsEncoder(AbstractItemEncoder):
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ return null, 0
+
+class ExplicitlyTaggedItemEncoder(AbstractItemEncoder):
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ if isinstance(value, base.AbstractConstructedAsn1Item):
+ value = value.clone(tagSet=value.getTagSet()[:-1],
+ cloneValueFlag=1)
+ else:
+ value = value.clone(tagSet=value.getTagSet()[:-1])
+ return encodeFun(value, defMode, maxChunkSize), 1
+
+explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder()
+
+class BooleanEncoder(AbstractItemEncoder):
+ supportIndefLenMode = 0
+ _true = ints2octs((1,))
+ _false = ints2octs((0,))
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ return value and self._true or self._false, 0
+
+class IntegerEncoder(AbstractItemEncoder):
+ supportIndefLenMode = 0
+ supportCompactZero = False
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ if value == 0: # shortcut for zero value
+ if self.supportCompactZero:
+ # this seems to be a correct way for encoding zeros
+ return null, 0
+ else:
+ # this seems to be a widespread way for encoding zeros
+ return ints2octs((0,)), 0
+ octets = []
+ value = int(value) # to save on ops on asn1 type
+ while 1:
+ octets.insert(0, value & 0xff)
+ if value == 0 or value == -1:
+ break
+ value = value >> 8
+ if value == 0 and octets[0] & 0x80:
+ octets.insert(0, 0)
+ while len(octets) > 1 and \
+ (octets[0] == 0 and octets[1] & 0x80 == 0 or \
+ octets[0] == 0xff and octets[1] & 0x80 != 0):
+ del octets[0]
+ return ints2octs(octets), 0
+
+class BitStringEncoder(AbstractItemEncoder):
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ if not maxChunkSize or len(value) <= maxChunkSize*8:
+ r = {}; l = len(value); p = 0; j = 7
+ while p < l:
+ i, j = divmod(p, 8)
+ r[i] = r.get(i,0) | value[p]<<(7-j)
+ p = p + 1
+ keys = list(r); keys.sort()
+ return int2oct(7-j) + ints2octs([r[k] for k in keys]), 0
+ else:
+ pos = 0; substrate = null
+ while 1:
+ # count in octets
+ v = value.clone(value[pos*8:pos*8+maxChunkSize*8])
+ if not v:
+ break
+ substrate = substrate + encodeFun(v, defMode, maxChunkSize)
+ pos = pos + maxChunkSize
+ return substrate, 1
+
+class OctetStringEncoder(AbstractItemEncoder):
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ if not maxChunkSize or len(value) <= maxChunkSize:
+ return value.asOctets(), 0
+ else:
+ pos = 0; substrate = null
+ while 1:
+ v = value.clone(value[pos:pos+maxChunkSize])
+ if not v:
+ break
+ substrate = substrate + encodeFun(v, defMode, maxChunkSize)
+ pos = pos + maxChunkSize
+ return substrate, 1
+
+class NullEncoder(AbstractItemEncoder):
+ supportIndefLenMode = 0
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ return null, 0
+
+class ObjectIdentifierEncoder(AbstractItemEncoder):
+ supportIndefLenMode = 0
+ precomputedValues = {
+ (1, 3, 6, 1, 2): (43, 6, 1, 2),
+ (1, 3, 6, 1, 4): (43, 6, 1, 4)
+ }
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ oid = value.asTuple()
+ if oid[:5] in self.precomputedValues:
+ octets = self.precomputedValues[oid[:5]]
+ index = 5
+ else:
+ if len(oid) < 2:
+ raise error.PyAsn1Error('Short OID %s' % (value,))
+
+ # Build the first twos
+ if oid[0] > 6 or oid[1] > 39 or oid[0] == 6 and oid[1] > 15:
+ raise error.PyAsn1Error(
+ 'Initial sub-ID overflow %s in OID %s' % (oid[:2], value)
+ )
+ octets = (oid[0] * 40 + oid[1],)
+ index = 2
+
+ # Cycle through subids
+ for subid in oid[index:]:
+ if subid > -1 and subid < 128:
+ # Optimize for the common case
+ octets = octets + (subid & 0x7f,)
+ elif subid < 0 or subid > 0xFFFFFFFF:
+ raise error.PyAsn1Error(
+ 'SubId overflow %s in %s' % (subid, value)
+ )
+ else:
+ # Pack large Sub-Object IDs
+ res = (subid & 0x7f,)
+ subid = subid >> 7
+ while subid > 0:
+ res = (0x80 | (subid & 0x7f),) + res
+ subid = subid >> 7
+ # Add packed Sub-Object ID to resulted Object ID
+ octets += res
+
+ return ints2octs(octets), 0
+
+class RealEncoder(AbstractItemEncoder):
+ supportIndefLenMode = 0
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ if value.isPlusInfinity():
+ return int2oct(0x40), 0
+ if value.isMinusInfinity():
+ return int2oct(0x41), 0
+ m, b, e = value
+ if not m:
+ return null, 0
+ if b == 10:
+ return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0
+ elif b == 2:
+ fo = 0x80 # binary enoding
+ if m < 0:
+ fo = fo | 0x40 # sign bit
+ m = -m
+ while int(m) != m: # drop floating point
+ m *= 2
+ e -= 1
+ while m & 0x1 == 0: # mantissa normalization
+ m >>= 1
+ e += 1
+ eo = null
+ while e not in (0, -1):
+ eo = int2oct(e&0xff) + eo
+ e >>= 8
+ if e == 0 and eo and oct2int(eo[0]) & 0x80:
+ eo = int2oct(0) + eo
+ n = len(eo)
+ if n > 0xff:
+ raise error.PyAsn1Error('Real exponent overflow')
+ if n == 1:
+ pass
+ elif n == 2:
+ fo |= 1
+ elif n == 3:
+ fo |= 2
+ else:
+ fo |= 3
+ eo = int2oct(n//0xff+1) + eo
+ po = null
+ while m:
+ po = int2oct(m&0xff) + po
+ m >>= 8
+ substrate = int2oct(fo) + eo + po
+ return substrate, 0
+ else:
+ raise error.PyAsn1Error('Prohibited Real base %s' % b)
+
+class SequenceEncoder(AbstractItemEncoder):
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ value.setDefaultComponents()
+ value.verifySizeSpec()
+ substrate = null; idx = len(value)
+ while idx > 0:
+ idx = idx - 1
+ if value[idx] is None: # Optional component
+ continue
+ component = value.getDefaultComponentByPosition(idx)
+ if component is not None and component == value[idx]:
+ continue
+ substrate = encodeFun(
+ value[idx], defMode, maxChunkSize
+ ) + substrate
+ return substrate, 1
+
+class SequenceOfEncoder(AbstractItemEncoder):
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ value.verifySizeSpec()
+ substrate = null; idx = len(value)
+ while idx > 0:
+ idx = idx - 1
+ substrate = encodeFun(
+ value[idx], defMode, maxChunkSize
+ ) + substrate
+ return substrate, 1
+
+class ChoiceEncoder(AbstractItemEncoder):
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ return encodeFun(value.getComponent(), defMode, maxChunkSize), 1
+
+class AnyEncoder(OctetStringEncoder):
+ def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
+ return value.asOctets(), defMode == 0
+
+tagMap = {
+ eoo.endOfOctets.tagSet: EndOfOctetsEncoder(),
+ univ.Boolean.tagSet: BooleanEncoder(),
+ univ.Integer.tagSet: IntegerEncoder(),
+ univ.BitString.tagSet: BitStringEncoder(),
+ univ.OctetString.tagSet: OctetStringEncoder(),
+ univ.Null.tagSet: NullEncoder(),
+ univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
+ univ.Enumerated.tagSet: IntegerEncoder(),
+ univ.Real.tagSet: RealEncoder(),
+ # Sequence & Set have same tags as SequenceOf & SetOf
+ univ.SequenceOf.tagSet: SequenceOfEncoder(),
+ univ.SetOf.tagSet: SequenceOfEncoder(),
+ univ.Choice.tagSet: ChoiceEncoder(),
+ # character string types
+ char.UTF8String.tagSet: OctetStringEncoder(),
+ char.NumericString.tagSet: OctetStringEncoder(),
+ char.PrintableString.tagSet: OctetStringEncoder(),
+ char.TeletexString.tagSet: OctetStringEncoder(),
+ char.VideotexString.tagSet: OctetStringEncoder(),
+ char.IA5String.tagSet: OctetStringEncoder(),
+ char.GraphicString.tagSet: OctetStringEncoder(),
+ char.VisibleString.tagSet: OctetStringEncoder(),
+ char.GeneralString.tagSet: OctetStringEncoder(),
+ char.UniversalString.tagSet: OctetStringEncoder(),
+ char.BMPString.tagSet: OctetStringEncoder(),
+ # useful types
+ useful.GeneralizedTime.tagSet: OctetStringEncoder(),
+ useful.UTCTime.tagSet: OctetStringEncoder()
+ }
+
+# Type-to-codec map for ambiguous ASN.1 types
+typeMap = {
+ univ.Set.typeId: SequenceEncoder(),
+ univ.SetOf.typeId: SequenceOfEncoder(),
+ univ.Sequence.typeId: SequenceEncoder(),
+ univ.SequenceOf.typeId: SequenceOfEncoder(),
+ univ.Choice.typeId: ChoiceEncoder(),
+ univ.Any.typeId: AnyEncoder()
+ }
+
+class Encoder:
+ def __init__(self, tagMap, typeMap={}):
+ self.__tagMap = tagMap
+ self.__typeMap = typeMap
+
+ def __call__(self, value, defMode=1, maxChunkSize=0):
+ debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.__class__.__name__, value.prettyPrint()))
+ tagSet = value.getTagSet()
+ if len(tagSet) > 1:
+ concreteEncoder = explicitlyTaggedItemEncoder
+ else:
+ if value.typeId is not None and value.typeId in self.__typeMap:
+ concreteEncoder = self.__typeMap[value.typeId]
+ elif tagSet in self.__tagMap:
+ concreteEncoder = self.__tagMap[tagSet]
+ else:
+ tagSet = value.baseTagSet
+ if tagSet in self.__tagMap:
+ concreteEncoder = self.__tagMap[tagSet]
+ else:
+ raise Error('No encoder for %s' % (value,))
+ debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %r' % (concreteEncoder.__class__.__name__, tagSet))
+ substrate = concreteEncoder.encode(
+ self, value, defMode, maxChunkSize
+ )
+ debug.logger & debug.flagEncoder and debug.logger('built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate)))
+ return substrate
+
+encode = Encoder(tagMap, typeMap)
diff --git a/python/pyasn1/pyasn1/codec/ber/eoo.py b/python/pyasn1/pyasn1/codec/ber/eoo.py
new file mode 100644
index 000000000..379be1996
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/ber/eoo.py
@@ -0,0 +1,8 @@
+from pyasn1.type import base, tag
+
+class EndOfOctets(base.AbstractSimpleAsn1Item):
+ defaultValue = 0
+ tagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00)
+ )
+endOfOctets = EndOfOctets()
diff --git a/python/pyasn1/pyasn1/codec/cer/__init__.py b/python/pyasn1/pyasn1/codec/cer/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/cer/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/pyasn1/codec/cer/decoder.py b/python/pyasn1/pyasn1/codec/cer/decoder.py
new file mode 100644
index 000000000..9fd37c134
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/cer/decoder.py
@@ -0,0 +1,35 @@
+# CER decoder
+from pyasn1.type import univ
+from pyasn1.codec.ber import decoder
+from pyasn1.compat.octets import oct2int
+from pyasn1 import error
+
+class BooleanDecoder(decoder.AbstractSimpleDecoder):
+ protoComponent = univ.Boolean(0)
+ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
+ state, decodeFun, substrateFun):
+ head, tail = substrate[:length], substrate[length:]
+ if not head:
+ raise error.PyAsn1Error('Empty substrate')
+ byte = oct2int(head[0])
+ # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
+ # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1
+ # in http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
+ if byte == 0xff:
+ value = 1
+ elif byte == 0x00:
+ value = 0
+ else:
+ raise error.PyAsn1Error('Boolean CER violation: %s' % byte)
+ return self._createComponent(asn1Spec, tagSet, value), tail
+
+tagMap = decoder.tagMap.copy()
+tagMap.update({
+ univ.Boolean.tagSet: BooleanDecoder()
+ })
+
+typeMap = decoder.typeMap
+
+class Decoder(decoder.Decoder): pass
+
+decode = Decoder(tagMap, decoder.typeMap)
diff --git a/python/pyasn1/pyasn1/codec/cer/encoder.py b/python/pyasn1/pyasn1/codec/cer/encoder.py
new file mode 100644
index 000000000..4c05130af
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/cer/encoder.py
@@ -0,0 +1,87 @@
+# CER encoder
+from pyasn1.type import univ
+from pyasn1.codec.ber import encoder
+from pyasn1.compat.octets import int2oct, null
+
+class BooleanEncoder(encoder.IntegerEncoder):
+ def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
+ if client == 0:
+ substrate = int2oct(0)
+ else:
+ substrate = int2oct(255)
+ return substrate, 0
+
+class BitStringEncoder(encoder.BitStringEncoder):
+ def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
+ return encoder.BitStringEncoder.encodeValue(
+ self, encodeFun, client, defMode, 1000
+ )
+
+class OctetStringEncoder(encoder.OctetStringEncoder):
+ def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
+ return encoder.OctetStringEncoder.encodeValue(
+ self, encodeFun, client, defMode, 1000
+ )
+
+# specialized RealEncoder here
+# specialized GeneralStringEncoder here
+# specialized GeneralizedTimeEncoder here
+# specialized UTCTimeEncoder here
+
+class SetOfEncoder(encoder.SequenceOfEncoder):
+ def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
+ if isinstance(client, univ.SequenceAndSetBase):
+ client.setDefaultComponents()
+ client.verifySizeSpec()
+ substrate = null; idx = len(client)
+ # This is certainly a hack but how else do I distinguish SetOf
+ # from Set if they have the same tags&constraints?
+ if isinstance(client, univ.SequenceAndSetBase):
+ # Set
+ comps = []
+ while idx > 0:
+ idx = idx - 1
+ if client[idx] is None: # Optional component
+ continue
+ if client.getDefaultComponentByPosition(idx) == client[idx]:
+ continue
+ comps.append(client[idx])
+ comps.sort(key=lambda x: isinstance(x, univ.Choice) and \
+ x.getMinTagSet() or x.getTagSet())
+ for c in comps:
+ substrate += encodeFun(c, defMode, maxChunkSize)
+ else:
+ # SetOf
+ compSubs = []
+ while idx > 0:
+ idx = idx - 1
+ compSubs.append(
+ encodeFun(client[idx], defMode, maxChunkSize)
+ )
+ compSubs.sort() # perhaps padding's not needed
+ substrate = null
+ for compSub in compSubs:
+ substrate += compSub
+ return substrate, 1
+
+tagMap = encoder.tagMap.copy()
+tagMap.update({
+ univ.Boolean.tagSet: BooleanEncoder(),
+ univ.BitString.tagSet: BitStringEncoder(),
+ univ.OctetString.tagSet: OctetStringEncoder(),
+ univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
+ })
+
+typeMap = encoder.typeMap.copy()
+typeMap.update({
+ univ.Set.typeId: SetOfEncoder(),
+ univ.SetOf.typeId: SetOfEncoder()
+ })
+
+class Encoder(encoder.Encoder):
+ def __call__(self, client, defMode=0, maxChunkSize=0):
+ return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
+
+encode = Encoder(tagMap, typeMap)
+
+# EncoderFactory queries class instance and builds a map of tags -> encoders
diff --git a/python/pyasn1/pyasn1/codec/der/__init__.py b/python/pyasn1/pyasn1/codec/der/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/der/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/pyasn1/codec/der/decoder.py b/python/pyasn1/pyasn1/codec/der/decoder.py
new file mode 100644
index 000000000..604abec2b
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/der/decoder.py
@@ -0,0 +1,9 @@
+# DER decoder
+from pyasn1.type import univ
+from pyasn1.codec.cer import decoder
+
+tagMap = decoder.tagMap
+typeMap = decoder.typeMap
+Decoder = decoder.Decoder
+
+decode = Decoder(tagMap, typeMap)
diff --git a/python/pyasn1/pyasn1/codec/der/encoder.py b/python/pyasn1/pyasn1/codec/der/encoder.py
new file mode 100644
index 000000000..4e5faefad
--- /dev/null
+++ b/python/pyasn1/pyasn1/codec/der/encoder.py
@@ -0,0 +1,28 @@
+# DER encoder
+from pyasn1.type import univ
+from pyasn1.codec.cer import encoder
+
+class SetOfEncoder(encoder.SetOfEncoder):
+ def _cmpSetComponents(self, c1, c2):
+ tagSet1 = isinstance(c1, univ.Choice) and \
+ c1.getEffectiveTagSet() or c1.getTagSet()
+ tagSet2 = isinstance(c2, univ.Choice) and \
+ c2.getEffectiveTagSet() or c2.getTagSet()
+ return cmp(tagSet1, tagSet2)
+
+tagMap = encoder.tagMap.copy()
+tagMap.update({
+ # Overload CER encodrs with BER ones (a bit hackerish XXX)
+ univ.BitString.tagSet: encoder.encoder.BitStringEncoder(),
+ univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(),
+ # Set & SetOf have same tags
+ univ.SetOf().tagSet: SetOfEncoder()
+ })
+
+typeMap = encoder.typeMap
+
+class Encoder(encoder.Encoder):
+ def __call__(self, client, defMode=1, maxChunkSize=0):
+ return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
+
+encode = Encoder(tagMap, typeMap)
diff --git a/python/pyasn1/pyasn1/compat/__init__.py b/python/pyasn1/pyasn1/compat/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/pyasn1/compat/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/pyasn1/compat/octets.py b/python/pyasn1/pyasn1/compat/octets.py
new file mode 100644
index 000000000..f7f2a29bf
--- /dev/null
+++ b/python/pyasn1/pyasn1/compat/octets.py
@@ -0,0 +1,20 @@
+from sys import version_info
+
+if version_info[0] <= 2:
+ int2oct = chr
+ ints2octs = lambda s: ''.join([ int2oct(x) for x in s ])
+ null = ''
+ oct2int = ord
+ octs2ints = lambda s: [ oct2int(x) for x in s ]
+ str2octs = lambda x: x
+ octs2str = lambda x: x
+ isOctetsType = lambda s: isinstance(s, str)
+else:
+ ints2octs = bytes
+ int2oct = lambda x: ints2octs((x,))
+ null = ints2octs()
+ oct2int = lambda x: x
+ octs2ints = lambda s: [ x for x in s ]
+ str2octs = lambda x: x.encode()
+ octs2str = lambda x: x.decode()
+ isOctetsType = lambda s: isinstance(s, bytes)
diff --git a/python/pyasn1/pyasn1/debug.py b/python/pyasn1/pyasn1/debug.py
new file mode 100644
index 000000000..c27cb1d44
--- /dev/null
+++ b/python/pyasn1/pyasn1/debug.py
@@ -0,0 +1,65 @@
+import sys
+from pyasn1.compat.octets import octs2ints
+from pyasn1 import error
+from pyasn1 import __version__
+
+flagNone = 0x0000
+flagEncoder = 0x0001
+flagDecoder = 0x0002
+flagAll = 0xffff
+
+flagMap = {
+ 'encoder': flagEncoder,
+ 'decoder': flagDecoder,
+ 'all': flagAll
+ }
+
+class Debug:
+ defaultPrinter = sys.stderr.write
+ def __init__(self, *flags):
+ self._flags = flagNone
+ self._printer = self.defaultPrinter
+ self('running pyasn1 version %s' % __version__)
+ for f in flags:
+ if f not in flagMap:
+ raise error.PyAsn1Error('bad debug flag %s' % (f,))
+ self._flags = self._flags | flagMap[f]
+ self('debug category \'%s\' enabled' % f)
+
+ def __str__(self):
+ return 'logger %s, flags %x' % (self._printer, self._flags)
+
+ def __call__(self, msg):
+ self._printer('DBG: %s\n' % msg)
+
+ def __and__(self, flag):
+ return self._flags & flag
+
+ def __rand__(self, flag):
+ return flag & self._flags
+
+logger = 0
+
+def setLogger(l):
+ global logger
+ logger = l
+
+def hexdump(octets):
+ return ' '.join(
+ [ '%s%.2X' % (n%16 == 0 and ('\n%.5d: ' % n) or '', x)
+ for n,x in zip(range(len(octets)), octs2ints(octets)) ]
+ )
+
+class Scope:
+ def __init__(self):
+ self._list = []
+
+ def __str__(self): return '.'.join(self._list)
+
+ def push(self, token):
+ self._list.append(token)
+
+ def pop(self):
+ return self._list.pop()
+
+scope = Scope()
diff --git a/python/pyasn1/pyasn1/error.py b/python/pyasn1/pyasn1/error.py
new file mode 100644
index 000000000..716406ff6
--- /dev/null
+++ b/python/pyasn1/pyasn1/error.py
@@ -0,0 +1,3 @@
+class PyAsn1Error(Exception): pass
+class ValueConstraintError(PyAsn1Error): pass
+class SubstrateUnderrunError(PyAsn1Error): pass
diff --git a/python/pyasn1/pyasn1/type/__init__.py b/python/pyasn1/pyasn1/type/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/pyasn1/type/base.py b/python/pyasn1/pyasn1/type/base.py
new file mode 100644
index 000000000..40873719c
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/base.py
@@ -0,0 +1,249 @@
+# Base classes for ASN.1 types
+import sys
+from pyasn1.type import constraint, tagmap
+from pyasn1 import error
+
+class Asn1Item: pass
+
+class Asn1ItemBase(Asn1Item):
+ # Set of tags for this ASN.1 type
+ tagSet = ()
+
+ # A list of constraint.Constraint instances for checking values
+ subtypeSpec = constraint.ConstraintsIntersection()
+
+ # Used for ambiguous ASN.1 types identification
+ typeId = None
+
+ def __init__(self, tagSet=None, subtypeSpec=None):
+ if tagSet is None:
+ self._tagSet = self.tagSet
+ else:
+ self._tagSet = tagSet
+ if subtypeSpec is None:
+ self._subtypeSpec = self.subtypeSpec
+ else:
+ self._subtypeSpec = subtypeSpec
+
+ def _verifySubtypeSpec(self, value, idx=None):
+ try:
+ self._subtypeSpec(value, idx)
+ except error.PyAsn1Error:
+ c, i, t = sys.exc_info()
+ raise c('%s at %s' % (i, self.__class__.__name__))
+
+ def getSubtypeSpec(self): return self._subtypeSpec
+
+ def getTagSet(self): return self._tagSet
+ def getEffectiveTagSet(self): return self._tagSet # used by untagged types
+ def getTagMap(self): return tagmap.TagMap({self._tagSet: self})
+
+ def isSameTypeWith(self, other):
+ return self is other or \
+ self._tagSet == other.getTagSet() and \
+ self._subtypeSpec == other.getSubtypeSpec()
+ def isSuperTypeOf(self, other):
+ """Returns true if argument is a ASN1 subtype of ourselves"""
+ return self._tagSet.isSuperTagSetOf(other.getTagSet()) and \
+ self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec())
+
+class __NoValue:
+ def __getattr__(self, attr):
+ raise error.PyAsn1Error('No value for %s()' % attr)
+ def __getitem__(self, i):
+ raise error.PyAsn1Error('No value')
+
+noValue = __NoValue()
+
+# Base class for "simple" ASN.1 objects. These are immutable.
+class AbstractSimpleAsn1Item(Asn1ItemBase):
+ defaultValue = noValue
+ def __init__(self, value=None, tagSet=None, subtypeSpec=None):
+ Asn1ItemBase.__init__(self, tagSet, subtypeSpec)
+ if value is None or value is noValue:
+ value = self.defaultValue
+ if value is None or value is noValue:
+ self.__hashedValue = value = noValue
+ else:
+ value = self.prettyIn(value)
+ self._verifySubtypeSpec(value)
+ self.__hashedValue = hash(value)
+ self._value = value
+ self._len = None
+
+ def __repr__(self):
+ if self._value is noValue:
+ return self.__class__.__name__ + '()'
+ else:
+ return self.__class__.__name__ + '(%s)' % (self.prettyOut(self._value),)
+ def __str__(self): return str(self._value)
+ def __eq__(self, other):
+ return self is other and True or self._value == other
+ def __ne__(self, other): return self._value != other
+ def __lt__(self, other): return self._value < other
+ def __le__(self, other): return self._value <= other
+ def __gt__(self, other): return self._value > other
+ def __ge__(self, other): return self._value >= other
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self): return bool(self._value)
+ else:
+ def __bool__(self): return bool(self._value)
+ def __hash__(self): return self.__hashedValue
+
+ def clone(self, value=None, tagSet=None, subtypeSpec=None):
+ if value is None and tagSet is None and subtypeSpec is None:
+ return self
+ if value is None:
+ value = self._value
+ if tagSet is None:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ return self.__class__(value, tagSet, subtypeSpec)
+
+ def subtype(self, value=None, implicitTag=None, explicitTag=None,
+ subtypeSpec=None):
+ if value is None:
+ value = self._value
+ if implicitTag is not None:
+ tagSet = self._tagSet.tagImplicitly(implicitTag)
+ elif explicitTag is not None:
+ tagSet = self._tagSet.tagExplicitly(explicitTag)
+ else:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ else:
+ subtypeSpec = subtypeSpec + self._subtypeSpec
+ return self.__class__(value, tagSet, subtypeSpec)
+
+ def prettyIn(self, value): return value
+ def prettyOut(self, value): return str(value)
+
+ def prettyPrint(self, scope=0):
+ if self._value is noValue:
+ return '<no value>'
+ else:
+ return self.prettyOut(self._value)
+
+ # XXX Compatibility stub
+ def prettyPrinter(self, scope=0): return self.prettyPrint(scope)
+
+#
+# Constructed types:
+# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
+# * ASN1 types and values are represened by Python class instances
+# * Value initialization is made for defaulted components only
+# * Primary method of component addressing is by-position. Data model for base
+# type is Python sequence. Additional type-specific addressing methods
+# may be implemented for particular types.
+# * SequenceOf and SetOf types do not implement any additional methods
+# * Sequence, Set and Choice types also implement by-identifier addressing
+# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing
+# * Sequence and Set types may include optional and defaulted
+# components
+# * Constructed types hold a reference to component types used for value
+# verification and ordering.
+# * Component type is a scalar type for SequenceOf/SetOf types and a list
+# of types for Sequence/Set/Choice.
+#
+
+class AbstractConstructedAsn1Item(Asn1ItemBase):
+ componentType = None
+ sizeSpec = constraint.ConstraintsIntersection()
+ def __init__(self, componentType=None, tagSet=None,
+ subtypeSpec=None, sizeSpec=None):
+ Asn1ItemBase.__init__(self, tagSet, subtypeSpec)
+ if componentType is None:
+ self._componentType = self.componentType
+ else:
+ self._componentType = componentType
+ if sizeSpec is None:
+ self._sizeSpec = self.sizeSpec
+ else:
+ self._sizeSpec = sizeSpec
+ self._componentValues = []
+ self._componentValuesSet = 0
+
+ def __repr__(self):
+ r = self.__class__.__name__ + '()'
+ for idx in range(len(self._componentValues)):
+ if self._componentValues[idx] is None:
+ continue
+ r = r + '.setComponentByPosition(%s, %r)' % (
+ idx, self._componentValues[idx]
+ )
+ return r
+
+ def __eq__(self, other):
+ return self is other and True or self._componentValues == other
+ def __ne__(self, other): return self._componentValues != other
+ def __lt__(self, other): return self._componentValues < other
+ def __le__(self, other): return self._componentValues <= other
+ def __gt__(self, other): return self._componentValues > other
+ def __ge__(self, other): return self._componentValues >= other
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self): return bool(self._componentValues)
+ else:
+ def __bool__(self): return bool(self._componentValues)
+
+ def getComponentTagMap(self):
+ raise error.PyAsn1Error('Method not implemented')
+
+ def _cloneComponentValues(self, myClone, cloneValueFlag): pass
+
+ def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None,
+ cloneValueFlag=None):
+ if tagSet is None:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ if sizeSpec is None:
+ sizeSpec = self._sizeSpec
+ r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec)
+ if cloneValueFlag:
+ self._cloneComponentValues(r, cloneValueFlag)
+ return r
+
+ def subtype(self, implicitTag=None, explicitTag=None, subtypeSpec=None,
+ sizeSpec=None, cloneValueFlag=None):
+ if implicitTag is not None:
+ tagSet = self._tagSet.tagImplicitly(implicitTag)
+ elif explicitTag is not None:
+ tagSet = self._tagSet.tagExplicitly(explicitTag)
+ else:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ else:
+ subtypeSpec = subtypeSpec + self._subtypeSpec
+ if sizeSpec is None:
+ sizeSpec = self._sizeSpec
+ else:
+ sizeSpec = sizeSpec + self._sizeSpec
+ r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec)
+ if cloneValueFlag:
+ self._cloneComponentValues(r, cloneValueFlag)
+ return r
+
+ def _verifyComponent(self, idx, value): pass
+
+ def verifySizeSpec(self): self._sizeSpec(self)
+
+ def getComponentByPosition(self, idx):
+ raise error.PyAsn1Error('Method not implemented')
+ def setComponentByPosition(self, idx, value, verifyConstraints=True):
+ raise error.PyAsn1Error('Method not implemented')
+
+ def getComponentType(self): return self._componentType
+
+ def __getitem__(self, idx): return self.getComponentByPosition(idx)
+ def __setitem__(self, idx, value): self.setComponentByPosition(idx, value)
+
+ def __len__(self): return len(self._componentValues)
+
+ def clear(self):
+ self._componentValues = []
+ self._componentValuesSet = 0
+
+ def setDefaultComponents(self): pass
diff --git a/python/pyasn1/pyasn1/type/char.py b/python/pyasn1/pyasn1/type/char.py
new file mode 100644
index 000000000..ae112f8bd
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/char.py
@@ -0,0 +1,61 @@
+# ASN.1 "character string" types
+from pyasn1.type import univ, tag
+
+class UTF8String(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
+ )
+ encoding = "utf-8"
+
+class NumericString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
+ )
+
+class PrintableString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19)
+ )
+
+class TeletexString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
+ )
+
+
+class VideotexString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21)
+ )
+
+class IA5String(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22)
+ )
+
+class GraphicString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25)
+ )
+
+class VisibleString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
+ )
+
+class GeneralString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
+ )
+
+class UniversalString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28)
+ )
+ encoding = "utf-32-be"
+
+class BMPString(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
+ )
+ encoding = "utf-16-be"
diff --git a/python/pyasn1/pyasn1/type/constraint.py b/python/pyasn1/pyasn1/type/constraint.py
new file mode 100644
index 000000000..66873937d
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/constraint.py
@@ -0,0 +1,200 @@
+#
+# ASN.1 subtype constraints classes.
+#
+# Constraints are relatively rare, but every ASN1 object
+# is doing checks all the time for whether they have any
+# constraints and whether they are applicable to the object.
+#
+# What we're going to do is define objects/functions that
+# can be called unconditionally if they are present, and that
+# are simply not present if there are no constraints.
+#
+# Original concept and code by Mike C. Fletcher.
+#
+import sys
+from pyasn1.type import error
+
+class AbstractConstraint:
+ """Abstract base-class for constraint objects
+
+ Constraints should be stored in a simple sequence in the
+ namespace of their client Asn1Item sub-classes.
+ """
+ def __init__(self, *values):
+ self._valueMap = {}
+ self._setValues(values)
+ self.__hashedValues = None
+ def __call__(self, value, idx=None):
+ try:
+ self._testValue(value, idx)
+ except error.ValueConstraintError:
+ raise error.ValueConstraintError(
+ '%s failed at: \"%s\"' % (self, sys.exc_info()[1])
+ )
+ def __repr__(self):
+ return '%s(%s)' % (
+ self.__class__.__name__,
+ ', '.join([repr(x) for x in self._values])
+ )
+ def __eq__(self, other):
+ return self is other and True or self._values == other
+ def __ne__(self, other): return self._values != other
+ def __lt__(self, other): return self._values < other
+ def __le__(self, other): return self._values <= other
+ def __gt__(self, other): return self._values > other
+ def __ge__(self, other): return self._values >= other
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self): return bool(self._values)
+ else:
+ def __bool__(self): return bool(self._values)
+
+ def __hash__(self):
+ if self.__hashedValues is None:
+ self.__hashedValues = hash((self.__class__.__name__, self._values))
+ return self.__hashedValues
+
+ def _setValues(self, values): self._values = values
+ def _testValue(self, value, idx):
+ raise error.ValueConstraintError(value)
+
+ # Constraints derivation logic
+ def getValueMap(self): return self._valueMap
+ def isSuperTypeOf(self, otherConstraint):
+ return self in otherConstraint.getValueMap() or \
+ otherConstraint is self or otherConstraint == self
+ def isSubTypeOf(self, otherConstraint):
+ return otherConstraint in self._valueMap or \
+ otherConstraint is self or otherConstraint == self
+
+class SingleValueConstraint(AbstractConstraint):
+ """Value must be part of defined values constraint"""
+ def _testValue(self, value, idx):
+ # XXX index vals for performance?
+ if value not in self._values:
+ raise error.ValueConstraintError(value)
+
+class ContainedSubtypeConstraint(AbstractConstraint):
+ """Value must satisfy all of defined set of constraints"""
+ def _testValue(self, value, idx):
+ for c in self._values:
+ c(value, idx)
+
+class ValueRangeConstraint(AbstractConstraint):
+ """Value must be within start and stop values (inclusive)"""
+ def _testValue(self, value, idx):
+ if value < self.start or value > self.stop:
+ raise error.ValueConstraintError(value)
+
+ def _setValues(self, values):
+ if len(values) != 2:
+ raise error.PyAsn1Error(
+ '%s: bad constraint values' % (self.__class__.__name__,)
+ )
+ self.start, self.stop = values
+ if self.start > self.stop:
+ raise error.PyAsn1Error(
+ '%s: screwed constraint values (start > stop): %s > %s' % (
+ self.__class__.__name__,
+ self.start, self.stop
+ )
+ )
+ AbstractConstraint._setValues(self, values)
+
+class ValueSizeConstraint(ValueRangeConstraint):
+ """len(value) must be within start and stop values (inclusive)"""
+ def _testValue(self, value, idx):
+ l = len(value)
+ if l < self.start or l > self.stop:
+ raise error.ValueConstraintError(value)
+
+class PermittedAlphabetConstraint(SingleValueConstraint):
+ def _setValues(self, values):
+ self._values = ()
+ for v in values:
+ self._values = self._values + tuple(v)
+
+ def _testValue(self, value, idx):
+ for v in value:
+ if v not in self._values:
+ raise error.ValueConstraintError(value)
+
+# This is a bit kludgy, meaning two op modes within a single constraing
+class InnerTypeConstraint(AbstractConstraint):
+ """Value must satisfy type and presense constraints"""
+ def _testValue(self, value, idx):
+ if self.__singleTypeConstraint:
+ self.__singleTypeConstraint(value)
+ elif self.__multipleTypeConstraint:
+ if idx not in self.__multipleTypeConstraint:
+ raise error.ValueConstraintError(value)
+ constraint, status = self.__multipleTypeConstraint[idx]
+ if status == 'ABSENT': # XXX presense is not checked!
+ raise error.ValueConstraintError(value)
+ constraint(value)
+
+ def _setValues(self, values):
+ self.__multipleTypeConstraint = {}
+ self.__singleTypeConstraint = None
+ for v in values:
+ if isinstance(v, tuple):
+ self.__multipleTypeConstraint[v[0]] = v[1], v[2]
+ else:
+ self.__singleTypeConstraint = v
+ AbstractConstraint._setValues(self, values)
+
+# Boolean ops on constraints
+
+class ConstraintsExclusion(AbstractConstraint):
+ """Value must not fit the single constraint"""
+ def _testValue(self, value, idx):
+ try:
+ self._values[0](value, idx)
+ except error.ValueConstraintError:
+ return
+ else:
+ raise error.ValueConstraintError(value)
+
+ def _setValues(self, values):
+ if len(values) != 1:
+ raise error.PyAsn1Error('Single constraint expected')
+ AbstractConstraint._setValues(self, values)
+
+class AbstractConstraintSet(AbstractConstraint):
+ """Value must not satisfy the single constraint"""
+ def __getitem__(self, idx): return self._values[idx]
+
+ def __add__(self, value): return self.__class__(self, value)
+ def __radd__(self, value): return self.__class__(self, value)
+
+ def __len__(self): return len(self._values)
+
+ # Constraints inclusion in sets
+
+ def _setValues(self, values):
+ self._values = values
+ for v in values:
+ self._valueMap[v] = 1
+ self._valueMap.update(v.getValueMap())
+
+class ConstraintsIntersection(AbstractConstraintSet):
+ """Value must satisfy all constraints"""
+ def _testValue(self, value, idx):
+ for v in self._values:
+ v(value, idx)
+
+class ConstraintsUnion(AbstractConstraintSet):
+ """Value must satisfy at least one constraint"""
+ def _testValue(self, value, idx):
+ for v in self._values:
+ try:
+ v(value, idx)
+ except error.ValueConstraintError:
+ pass
+ else:
+ return
+ raise error.ValueConstraintError(
+ 'all of %s failed for \"%s\"' % (self._values, value)
+ )
+
+# XXX
+# add tests for type check
diff --git a/python/pyasn1/pyasn1/type/error.py b/python/pyasn1/pyasn1/type/error.py
new file mode 100644
index 000000000..3e6848447
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/error.py
@@ -0,0 +1,3 @@
+from pyasn1.error import PyAsn1Error
+
+class ValueConstraintError(PyAsn1Error): pass
diff --git a/python/pyasn1/pyasn1/type/namedtype.py b/python/pyasn1/pyasn1/type/namedtype.py
new file mode 100644
index 000000000..48967a5fe
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/namedtype.py
@@ -0,0 +1,132 @@
+# NamedType specification for constructed types
+import sys
+from pyasn1.type import tagmap
+from pyasn1 import error
+
+class NamedType:
+ isOptional = 0
+ isDefaulted = 0
+ def __init__(self, name, t):
+ self.__name = name; self.__type = t
+ def __repr__(self): return '%s(%s, %s)' % (
+ self.__class__.__name__, self.__name, self.__type
+ )
+ def getType(self): return self.__type
+ def getName(self): return self.__name
+ def __getitem__(self, idx):
+ if idx == 0: return self.__name
+ if idx == 1: return self.__type
+ raise IndexError()
+
+class OptionalNamedType(NamedType):
+ isOptional = 1
+class DefaultedNamedType(NamedType):
+ isDefaulted = 1
+
+class NamedTypes:
+ def __init__(self, *namedTypes):
+ self.__namedTypes = namedTypes
+ self.__namedTypesLen = len(self.__namedTypes)
+ self.__minTagSet = None
+ self.__tagToPosIdx = {}; self.__nameToPosIdx = {}
+ self.__tagMap = { False: None, True: None }
+ self.__ambigiousTypes = {}
+
+ def __repr__(self):
+ r = '%s(' % self.__class__.__name__
+ for n in self.__namedTypes:
+ r = r + '%r, ' % (n,)
+ return r + ')'
+
+ def __getitem__(self, idx): return self.__namedTypes[idx]
+
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self): return bool(self.__namedTypesLen)
+ else:
+ def __bool__(self): return bool(self.__namedTypesLen)
+ def __len__(self): return self.__namedTypesLen
+
+ def getTypeByPosition(self, idx):
+ if idx < 0 or idx >= self.__namedTypesLen:
+ raise error.PyAsn1Error('Type position out of range')
+ else:
+ return self.__namedTypes[idx].getType()
+
+ def getPositionByType(self, tagSet):
+ if not self.__tagToPosIdx:
+ idx = self.__namedTypesLen
+ while idx > 0:
+ idx = idx - 1
+ tagMap = self.__namedTypes[idx].getType().getTagMap()
+ for t in tagMap.getPosMap():
+ if t in self.__tagToPosIdx:
+ raise error.PyAsn1Error('Duplicate type %s' % (t,))
+ self.__tagToPosIdx[t] = idx
+ try:
+ return self.__tagToPosIdx[tagSet]
+ except KeyError:
+ raise error.PyAsn1Error('Type %s not found' % (tagSet,))
+
+ def getNameByPosition(self, idx):
+ try:
+ return self.__namedTypes[idx].getName()
+ except IndexError:
+ raise error.PyAsn1Error('Type position out of range')
+ def getPositionByName(self, name):
+ if not self.__nameToPosIdx:
+ idx = self.__namedTypesLen
+ while idx > 0:
+ idx = idx - 1
+ n = self.__namedTypes[idx].getName()
+ if n in self.__nameToPosIdx:
+ raise error.PyAsn1Error('Duplicate name %s' % (n,))
+ self.__nameToPosIdx[n] = idx
+ try:
+ return self.__nameToPosIdx[name]
+ except KeyError:
+ raise error.PyAsn1Error('Name %s not found' % (name,))
+
+ def __buildAmbigiousTagMap(self):
+ ambigiousTypes = ()
+ idx = self.__namedTypesLen
+ while idx > 0:
+ idx = idx - 1
+ t = self.__namedTypes[idx]
+ if t.isOptional or t.isDefaulted:
+ ambigiousTypes = (t, ) + ambigiousTypes
+ else:
+ ambigiousTypes = (t, )
+ self.__ambigiousTypes[idx] = NamedTypes(*ambigiousTypes)
+
+ def getTagMapNearPosition(self, idx):
+ if not self.__ambigiousTypes: self.__buildAmbigiousTagMap()
+ try:
+ return self.__ambigiousTypes[idx].getTagMap()
+ except KeyError:
+ raise error.PyAsn1Error('Type position out of range')
+
+ def getPositionNearType(self, tagSet, idx):
+ if not self.__ambigiousTypes: self.__buildAmbigiousTagMap()
+ try:
+ return idx+self.__ambigiousTypes[idx].getPositionByType(tagSet)
+ except KeyError:
+ raise error.PyAsn1Error('Type position out of range')
+
+ def genMinTagSet(self):
+ if self.__minTagSet is None:
+ for t in self.__namedTypes:
+ __type = t.getType()
+ tagSet = getattr(__type,'getMinTagSet',__type.getTagSet)()
+ if self.__minTagSet is None or tagSet < self.__minTagSet:
+ self.__minTagSet = tagSet
+ return self.__minTagSet
+
+ def getTagMap(self, uniq=False):
+ if self.__tagMap[uniq] is None:
+ tagMap = tagmap.TagMap()
+ for nt in self.__namedTypes:
+ tagMap = tagMap.clone(
+ nt.getType(), nt.getType().getTagMap(), uniq
+ )
+ self.__tagMap[uniq] = tagMap
+ return self.__tagMap[uniq]
diff --git a/python/pyasn1/pyasn1/type/namedval.py b/python/pyasn1/pyasn1/type/namedval.py
new file mode 100644
index 000000000..d0fea7cc7
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/namedval.py
@@ -0,0 +1,46 @@
+# ASN.1 named integers
+from pyasn1 import error
+
+__all__ = [ 'NamedValues' ]
+
+class NamedValues:
+ def __init__(self, *namedValues):
+ self.nameToValIdx = {}; self.valToNameIdx = {}
+ self.namedValues = ()
+ automaticVal = 1
+ for namedValue in namedValues:
+ if isinstance(namedValue, tuple):
+ name, val = namedValue
+ else:
+ name = namedValue
+ val = automaticVal
+ if name in self.nameToValIdx:
+ raise error.PyAsn1Error('Duplicate name %s' % (name,))
+ self.nameToValIdx[name] = val
+ if val in self.valToNameIdx:
+ raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val))
+ self.valToNameIdx[val] = name
+ self.namedValues = self.namedValues + ((name, val),)
+ automaticVal = automaticVal + 1
+ def __str__(self): return str(self.namedValues)
+
+ def getName(self, value):
+ if value in self.valToNameIdx:
+ return self.valToNameIdx[value]
+
+ def getValue(self, name):
+ if name in self.nameToValIdx:
+ return self.nameToValIdx[name]
+
+ def __getitem__(self, i): return self.namedValues[i]
+ def __len__(self): return len(self.namedValues)
+
+ def __add__(self, namedValues):
+ return self.__class__(*self.namedValues + namedValues)
+ def __radd__(self, namedValues):
+ return self.__class__(*namedValues + tuple(self))
+
+ def clone(self, *namedValues):
+ return self.__class__(*tuple(self) + namedValues)
+
+# XXX clone/subtype?
diff --git a/python/pyasn1/pyasn1/type/tag.py b/python/pyasn1/pyasn1/type/tag.py
new file mode 100644
index 000000000..1144907fa
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/tag.py
@@ -0,0 +1,122 @@
+# ASN.1 types tags
+from operator import getitem
+from pyasn1 import error
+
+tagClassUniversal = 0x00
+tagClassApplication = 0x40
+tagClassContext = 0x80
+tagClassPrivate = 0xC0
+
+tagFormatSimple = 0x00
+tagFormatConstructed = 0x20
+
+tagCategoryImplicit = 0x01
+tagCategoryExplicit = 0x02
+tagCategoryUntagged = 0x04
+
+class Tag:
+ def __init__(self, tagClass, tagFormat, tagId):
+ if tagId < 0:
+ raise error.PyAsn1Error(
+ 'Negative tag ID (%s) not allowed' % (tagId,)
+ )
+ self.__tag = (tagClass, tagFormat, tagId)
+ self.uniq = (tagClass, tagId)
+ self.__hashedUniqTag = hash(self.uniq)
+
+ def __repr__(self):
+ return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
+ (self.__class__.__name__,) + self.__tag
+ )
+ # These is really a hotspot -- expose public "uniq" attribute to save on
+ # function calls
+ def __eq__(self, other): return self.uniq == other.uniq
+ def __ne__(self, other): return self.uniq != other.uniq
+ def __lt__(self, other): return self.uniq < other.uniq
+ def __le__(self, other): return self.uniq <= other.uniq
+ def __gt__(self, other): return self.uniq > other.uniq
+ def __ge__(self, other): return self.uniq >= other.uniq
+ def __hash__(self): return self.__hashedUniqTag
+ def __getitem__(self, idx): return self.__tag[idx]
+ def __and__(self, otherTag):
+ (tagClass, tagFormat, tagId) = otherTag
+ return self.__class__(
+ self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId
+ )
+ def __or__(self, otherTag):
+ (tagClass, tagFormat, tagId) = otherTag
+ return self.__class__(
+ self.__tag[0]|tagClass,
+ self.__tag[1]|tagFormat,
+ self.__tag[2]|tagId
+ )
+ def asTuple(self): return self.__tag # __getitem__() is slow
+
+class TagSet:
+ def __init__(self, baseTag=(), *superTags):
+ self.__baseTag = baseTag
+ self.__superTags = superTags
+ self.__hashedSuperTags = hash(superTags)
+ _uniq = ()
+ for t in superTags:
+ _uniq = _uniq + t.uniq
+ self.uniq = _uniq
+ self.__lenOfSuperTags = len(superTags)
+
+ def __repr__(self):
+ return '%s(%s)' % (
+ self.__class__.__name__,
+ ', '.join([repr(x) for x in self.__superTags])
+ )
+
+ def __add__(self, superTag):
+ return self.__class__(
+ self.__baseTag, *self.__superTags + (superTag,)
+ )
+ def __radd__(self, superTag):
+ return self.__class__(
+ self.__baseTag, *(superTag,) + self.__superTags
+ )
+
+ def tagExplicitly(self, superTag):
+ tagClass, tagFormat, tagId = superTag
+ if tagClass == tagClassUniversal:
+ raise error.PyAsn1Error(
+ 'Can\'t tag with UNIVERSAL-class tag'
+ )
+ if tagFormat != tagFormatConstructed:
+ superTag = Tag(tagClass, tagFormatConstructed, tagId)
+ return self + superTag
+
+ def tagImplicitly(self, superTag):
+ tagClass, tagFormat, tagId = superTag
+ if self.__superTags:
+ superTag = Tag(tagClass, self.__superTags[-1][1], tagId)
+ return self[:-1] + superTag
+
+ def getBaseTag(self): return self.__baseTag
+ def __getitem__(self, idx):
+ if isinstance(idx, slice):
+ return self.__class__(
+ self.__baseTag, *getitem(self.__superTags, idx)
+ )
+ return self.__superTags[idx]
+ def __eq__(self, other): return self.uniq == other.uniq
+ def __ne__(self, other): return self.uniq != other.uniq
+ def __lt__(self, other): return self.uniq < other.uniq
+ def __le__(self, other): return self.uniq <= other.uniq
+ def __gt__(self, other): return self.uniq > other.uniq
+ def __ge__(self, other): return self.uniq >= other.uniq
+ def __hash__(self): return self.__hashedSuperTags
+ def __len__(self): return self.__lenOfSuperTags
+ def isSuperTagSetOf(self, tagSet):
+ if len(tagSet) < self.__lenOfSuperTags:
+ return
+ idx = self.__lenOfSuperTags - 1
+ while idx >= 0:
+ if self.__superTags[idx] != tagSet[idx]:
+ return
+ idx = idx - 1
+ return 1
+
+def initTagSet(tag): return TagSet(tag, tag)
diff --git a/python/pyasn1/pyasn1/type/tagmap.py b/python/pyasn1/pyasn1/type/tagmap.py
new file mode 100644
index 000000000..7cec3a10e
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/tagmap.py
@@ -0,0 +1,52 @@
+from pyasn1 import error
+
+class TagMap:
+ def __init__(self, posMap={}, negMap={}, defType=None):
+ self.__posMap = posMap.copy()
+ self.__negMap = negMap.copy()
+ self.__defType = defType
+
+ def __contains__(self, tagSet):
+ return tagSet in self.__posMap or \
+ self.__defType is not None and tagSet not in self.__negMap
+
+ def __getitem__(self, tagSet):
+ if tagSet in self.__posMap:
+ return self.__posMap[tagSet]
+ elif tagSet in self.__negMap:
+ raise error.PyAsn1Error('Key in negative map')
+ elif self.__defType is not None:
+ return self.__defType
+ else:
+ raise KeyError()
+
+ def __repr__(self):
+ s = '%r/%r' % (self.__posMap, self.__negMap)
+ if self.__defType is not None:
+ s = s + '/%r' % (self.__defType,)
+ return s
+
+ def clone(self, parentType, tagMap, uniq=False):
+ if self.__defType is not None and tagMap.getDef() is not None:
+ raise error.PyAsn1Error('Duplicate default value at %s' % (self,))
+ if tagMap.getDef() is not None:
+ defType = tagMap.getDef()
+ else:
+ defType = self.__defType
+
+ posMap = self.__posMap.copy()
+ for k in tagMap.getPosMap():
+ if uniq and k in posMap:
+ raise error.PyAsn1Error('Duplicate positive key %s' % (k,))
+ posMap[k] = parentType
+
+ negMap = self.__negMap.copy()
+ negMap.update(tagMap.getNegMap())
+
+ return self.__class__(
+ posMap, negMap, defType,
+ )
+
+ def getPosMap(self): return self.__posMap.copy()
+ def getNegMap(self): return self.__negMap.copy()
+ def getDef(self): return self.__defType
diff --git a/python/pyasn1/pyasn1/type/univ.py b/python/pyasn1/pyasn1/type/univ.py
new file mode 100644
index 000000000..9cd16f8a2
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/univ.py
@@ -0,0 +1,1042 @@
+# ASN.1 "universal" data types
+import operator, sys
+from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap
+from pyasn1.codec.ber import eoo
+from pyasn1.compat import octets
+from pyasn1 import error
+
+# "Simple" ASN.1 types (yet incomplete)
+
+class Integer(base.AbstractSimpleAsn1Item):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02)
+ )
+ namedValues = namedval.NamedValues()
+ def __init__(self, value=None, tagSet=None, subtypeSpec=None,
+ namedValues=None):
+ if namedValues is None:
+ self.__namedValues = self.namedValues
+ else:
+ self.__namedValues = namedValues
+ base.AbstractSimpleAsn1Item.__init__(
+ self, value, tagSet, subtypeSpec
+ )
+
+ def __and__(self, value): return self.clone(self._value & value)
+ def __rand__(self, value): return self.clone(value & self._value)
+ def __or__(self, value): return self.clone(self._value | value)
+ def __ror__(self, value): return self.clone(value | self._value)
+ def __xor__(self, value): return self.clone(self._value ^ value)
+ def __rxor__(self, value): return self.clone(value ^ self._value)
+ def __lshift__(self, value): return self.clone(self._value << value)
+ def __rshift__(self, value): return self.clone(self._value >> value)
+
+ def __add__(self, value): return self.clone(self._value + value)
+ def __radd__(self, value): return self.clone(value + self._value)
+ def __sub__(self, value): return self.clone(self._value - value)
+ def __rsub__(self, value): return self.clone(value - self._value)
+ def __mul__(self, value): return self.clone(self._value * value)
+ def __rmul__(self, value): return self.clone(value * self._value)
+ def __mod__(self, value): return self.clone(self._value % value)
+ def __rmod__(self, value): return self.clone(value % self._value)
+ def __pow__(self, value, modulo=None): return self.clone(pow(self._value, value, modulo))
+ def __rpow__(self, value): return self.clone(pow(value, self._value))
+
+ if sys.version_info[0] <= 2:
+ def __div__(self, value): return self.clone(self._value // value)
+ def __rdiv__(self, value): return self.clone(value // self._value)
+ else:
+ def __truediv__(self, value): return self.clone(self._value / value)
+ def __rtruediv__(self, value): return self.clone(value / self._value)
+ def __divmod__(self, value): return self.clone(self._value // value)
+ def __rdivmod__(self, value): return self.clone(value // self._value)
+
+ __hash__ = base.AbstractSimpleAsn1Item.__hash__
+
+ def __int__(self): return int(self._value)
+ if sys.version_info[0] <= 2:
+ def __long__(self): return long(self._value)
+ def __float__(self): return float(self._value)
+ def __abs__(self): return abs(self._value)
+ def __index__(self): return int(self._value)
+
+ def __lt__(self, value): return self._value < value
+ def __le__(self, value): return self._value <= value
+ def __eq__(self, value): return self._value == value
+ def __ne__(self, value): return self._value != value
+ def __gt__(self, value): return self._value > value
+ def __ge__(self, value): return self._value >= value
+
+ def prettyIn(self, value):
+ if not isinstance(value, str):
+ try:
+ return int(value)
+ except:
+ raise error.PyAsn1Error(
+ 'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1])
+ )
+ r = self.__namedValues.getValue(value)
+ if r is not None:
+ return r
+ try:
+ return int(value)
+ except:
+ raise error.PyAsn1Error(
+ 'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1])
+ )
+
+ def prettyOut(self, value):
+ r = self.__namedValues.getName(value)
+ return r is None and str(value) or repr(r)
+
+ def getNamedValues(self): return self.__namedValues
+
+ def clone(self, value=None, tagSet=None, subtypeSpec=None,
+ namedValues=None):
+ if value is None and tagSet is None and subtypeSpec is None \
+ and namedValues is None:
+ return self
+ if value is None:
+ value = self._value
+ if tagSet is None:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ if namedValues is None:
+ namedValues = self.__namedValues
+ return self.__class__(value, tagSet, subtypeSpec, namedValues)
+
+ def subtype(self, value=None, implicitTag=None, explicitTag=None,
+ subtypeSpec=None, namedValues=None):
+ if value is None:
+ value = self._value
+ if implicitTag is not None:
+ tagSet = self._tagSet.tagImplicitly(implicitTag)
+ elif explicitTag is not None:
+ tagSet = self._tagSet.tagExplicitly(explicitTag)
+ else:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ else:
+ subtypeSpec = subtypeSpec + self._subtypeSpec
+ if namedValues is None:
+ namedValues = self.__namedValues
+ else:
+ namedValues = namedValues + self.__namedValues
+ return self.__class__(value, tagSet, subtypeSpec, namedValues)
+
+class Boolean(Integer):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01),
+ )
+ subtypeSpec = Integer.subtypeSpec+constraint.SingleValueConstraint(0,1)
+ namedValues = Integer.namedValues.clone(('False', 0), ('True', 1))
+
+class BitString(base.AbstractSimpleAsn1Item):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03)
+ )
+ namedValues = namedval.NamedValues()
+ def __init__(self, value=None, tagSet=None, subtypeSpec=None,
+ namedValues=None):
+ if namedValues is None:
+ self.__namedValues = self.namedValues
+ else:
+ self.__namedValues = namedValues
+ base.AbstractSimpleAsn1Item.__init__(
+ self, value, tagSet, subtypeSpec
+ )
+
+ def clone(self, value=None, tagSet=None, subtypeSpec=None,
+ namedValues=None):
+ if value is None and tagSet is None and subtypeSpec is None \
+ and namedValues is None:
+ return self
+ if value is None:
+ value = self._value
+ if tagSet is None:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ if namedValues is None:
+ namedValues = self.__namedValues
+ return self.__class__(value, tagSet, subtypeSpec, namedValues)
+
+ def subtype(self, value=None, implicitTag=None, explicitTag=None,
+ subtypeSpec=None, namedValues=None):
+ if value is None:
+ value = self._value
+ if implicitTag is not None:
+ tagSet = self._tagSet.tagImplicitly(implicitTag)
+ elif explicitTag is not None:
+ tagSet = self._tagSet.tagExplicitly(explicitTag)
+ else:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ else:
+ subtypeSpec = subtypeSpec + self._subtypeSpec
+ if namedValues is None:
+ namedValues = self.__namedValues
+ else:
+ namedValues = namedValues + self.__namedValues
+ return self.__class__(value, tagSet, subtypeSpec, namedValues)
+
+ def __str__(self): return str(tuple(self))
+
+ # Immutable sequence object protocol
+
+ def __len__(self):
+ if self._len is None:
+ self._len = len(self._value)
+ return self._len
+ def __getitem__(self, i):
+ if isinstance(i, slice):
+ return self.clone(operator.getitem(self._value, i))
+ else:
+ return self._value[i]
+
+ def __add__(self, value): return self.clone(self._value + value)
+ def __radd__(self, value): return self.clone(value + self._value)
+ def __mul__(self, value): return self.clone(self._value * value)
+ def __rmul__(self, value): return self * value
+
+ def prettyIn(self, value):
+ r = []
+ if not value:
+ return ()
+ elif isinstance(value, str):
+ if value[0] == '\'':
+ if value[-2:] == '\'B':
+ for v in value[1:-2]:
+ if v == '0':
+ r.append(0)
+ elif v == '1':
+ r.append(1)
+ else:
+ raise error.PyAsn1Error(
+ 'Non-binary BIT STRING initializer %s' % (v,)
+ )
+ return tuple(r)
+ elif value[-2:] == '\'H':
+ for v in value[1:-2]:
+ i = 4
+ v = int(v, 16)
+ while i:
+ i = i - 1
+ r.append((v>>i)&0x01)
+ return tuple(r)
+ else:
+ raise error.PyAsn1Error(
+ 'Bad BIT STRING value notation %s' % (value,)
+ )
+ else:
+ for i in value.split(','):
+ j = self.__namedValues.getValue(i)
+ if j is None:
+ raise error.PyAsn1Error(
+ 'Unknown bit identifier \'%s\'' % (i,)
+ )
+ if j >= len(r):
+ r.extend([0]*(j-len(r)+1))
+ r[j] = 1
+ return tuple(r)
+ elif isinstance(value, (tuple, list)):
+ r = tuple(value)
+ for b in r:
+ if b and b != 1:
+ raise error.PyAsn1Error(
+ 'Non-binary BitString initializer \'%s\'' % (r,)
+ )
+ return r
+ elif isinstance(value, BitString):
+ return tuple(value)
+ else:
+ raise error.PyAsn1Error(
+ 'Bad BitString initializer type \'%s\'' % (value,)
+ )
+
+ def prettyOut(self, value):
+ return '\"\'%s\'B\"' % ''.join([str(x) for x in value])
+
+class OctetString(base.AbstractSimpleAsn1Item):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04)
+ )
+ defaultBinValue = defaultHexValue = base.noValue
+ encoding = 'us-ascii'
+ def __init__(self, value=None, tagSet=None, subtypeSpec=None,
+ encoding=None, binValue=None, hexValue=None):
+ if encoding is None:
+ self._encoding = self.encoding
+ else:
+ self._encoding = encoding
+ if binValue is not None:
+ value = self.fromBinaryString(binValue)
+ if hexValue is not None:
+ value = self.fromHexString(hexValue)
+ if value is None or value is base.noValue:
+ value = self.defaultHexValue
+ if value is None or value is base.noValue:
+ value = self.defaultBinValue
+ self.__intValue = None
+ base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec)
+
+ def clone(self, value=None, tagSet=None, subtypeSpec=None,
+ encoding=None, binValue=None, hexValue=None):
+ if value is None and tagSet is None and subtypeSpec is None and \
+ encoding is None and binValue is None and hexValue is None:
+ return self
+ if value is None and binValue is None and hexValue is None:
+ value = self._value
+ if tagSet is None:
+ tagSet = self._tagSet
+ if subtypeSpec is None:
+ subtypeSpec = self._subtypeSpec
+ if encoding is None:
+ encoding = self._encoding
+ return self.__class__(
+ value, tagSet, subtypeSpec, encoding, binValue, hexValue
+ )
+
+ if sys.version_info[0] <= 2:
+ def prettyIn(self, value):
+ if isinstance(value, str):
+ return value
+ elif isinstance(value, (tuple, list)):
+ try:
+ return ''.join([ chr(x) for x in value ])
+ except ValueError:
+ raise error.PyAsn1Error(
+ 'Bad OctetString initializer \'%s\'' % (value,)
+ )
+ else:
+ return str(value)
+ else:
+ def prettyIn(self, value):
+ if isinstance(value, bytes):
+ return value
+ elif isinstance(value, OctetString):
+ return value.asOctets()
+ elif isinstance(value, (tuple, list, map)):
+ try:
+ return bytes(value)
+ except ValueError:
+ raise error.PyAsn1Error(
+ 'Bad OctetString initializer \'%s\'' % (value,)
+ )
+ else:
+ try:
+ return str(value).encode(self._encoding)
+ except UnicodeEncodeError:
+ raise error.PyAsn1Error(
+ 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
+ )
+
+
+ def fromBinaryString(self, value):
+ bitNo = 8; byte = 0; r = ()
+ for v in value:
+ if bitNo:
+ bitNo = bitNo - 1
+ else:
+ bitNo = 7
+ r = r + (byte,)
+ byte = 0
+ if v == '0':
+ v = 0
+ elif v == '1':
+ v = 1
+ else:
+ raise error.PyAsn1Error(
+ 'Non-binary OCTET STRING initializer %s' % (v,)
+ )
+ byte = byte | (v << bitNo)
+ return octets.ints2octs(r + (byte,))
+
+ def fromHexString(self, value):
+ r = p = ()
+ for v in value:
+ if p:
+ r = r + (int(p+v, 16),)
+ p = ()
+ else:
+ p = v
+ if p:
+ r = r + (int(p+'0', 16),)
+ return octets.ints2octs(r)
+
+ def prettyOut(self, value):
+ if sys.version_info[0] <= 2:
+ numbers = tuple([ ord(x) for x in value ])
+ else:
+ numbers = tuple(value)
+ if [ x for x in numbers if x < 32 or x > 126 ]:
+ return '0x' + ''.join([ '%.2x' % x for x in numbers ])
+ else:
+ return str(value)
+
+ def __repr__(self):
+ if self._value is base.noValue:
+ return self.__class__.__name__ + '()'
+ if [ x for x in self.asNumbers() if x < 32 or x > 126 ]:
+ return self.__class__.__name__ + '(hexValue=\'' + ''.join([ '%.2x' % x for x in self.asNumbers() ])+'\')'
+ else:
+ return self.__class__.__name__ + '(\'' + self.prettyOut(self._value) + '\')'
+
+ if sys.version_info[0] <= 2:
+ def __str__(self): return str(self._value)
+ def __unicode__(self):
+ return self._value.decode(self._encoding, 'ignore')
+ def asOctets(self): return self._value
+ def asNumbers(self):
+ if self.__intValue is None:
+ self.__intValue = tuple([ ord(x) for x in self._value ])
+ return self.__intValue
+ else:
+ def __str__(self): return self._value.decode(self._encoding, 'ignore')
+ def __bytes__(self): return self._value
+ def asOctets(self): return self._value
+ def asNumbers(self):
+ if self.__intValue is None:
+ self.__intValue = tuple(self._value)
+ return self.__intValue
+
+ # Immutable sequence object protocol
+
+ def __len__(self):
+ if self._len is None:
+ self._len = len(self._value)
+ return self._len
+ def __getitem__(self, i):
+ if isinstance(i, slice):
+ return self.clone(operator.getitem(self._value, i))
+ else:
+ return self._value[i]
+
+ def __add__(self, value): return self.clone(self._value + self.prettyIn(value))
+ def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value)
+ def __mul__(self, value): return self.clone(self._value * value)
+ def __rmul__(self, value): return self * value
+
+class Null(OctetString):
+ defaultValue = ''.encode() # This is tightly constrained
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05)
+ )
+ subtypeSpec = OctetString.subtypeSpec+constraint.SingleValueConstraint(''.encode())
+
+if sys.version_info[0] <= 2:
+ intTypes = (int, long)
+else:
+ intTypes = int
+
+class ObjectIdentifier(base.AbstractSimpleAsn1Item):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06)
+ )
+ def __add__(self, other): return self.clone(self._value + other)
+ def __radd__(self, other): return self.clone(other + self._value)
+
+ def asTuple(self): return self._value
+
+ # Sequence object protocol
+
+ def __len__(self):
+ if self._len is None:
+ self._len = len(self._value)
+ return self._len
+ def __getitem__(self, i):
+ if isinstance(i, slice):
+ return self.clone(
+ operator.getitem(self._value, i)
+ )
+ else:
+ return self._value[i]
+
+ def __str__(self): return self.prettyPrint()
+
+ def index(self, suboid): return self._value.index(suboid)
+
+ def isPrefixOf(self, value):
+ """Returns true if argument OID resides deeper in the OID tree"""
+ l = len(self)
+ if l <= len(value):
+ if self._value[:l] == value[:l]:
+ return 1
+ return 0
+
+ def prettyIn(self, value):
+ """Dotted -> tuple of numerics OID converter"""
+ if isinstance(value, tuple):
+ pass
+ elif isinstance(value, ObjectIdentifier):
+ return tuple(value)
+ elif isinstance(value, str):
+ r = []
+ for element in [ x for x in value.split('.') if x != '' ]:
+ try:
+ r.append(int(element, 0))
+ except ValueError:
+ raise error.PyAsn1Error(
+ 'Malformed Object ID %s at %s: %s' %
+ (str(value), self.__class__.__name__, sys.exc_info()[1])
+ )
+ value = tuple(r)
+ else:
+ try:
+ value = tuple(value)
+ except TypeError:
+ raise error.PyAsn1Error(
+ 'Malformed Object ID %s at %s: %s' %
+ (str(value), self.__class__.__name__,sys.exc_info()[1])
+ )
+
+ for x in value:
+ if not isinstance(x, intTypes) or x < 0:
+ raise error.PyAsn1Error(
+ 'Invalid sub-ID in %s at %s' % (value, self.__class__.__name__)
+ )
+
+ return value
+
+ def prettyOut(self, value): return '.'.join([ str(x) for x in value ])
+
+class Real(base.AbstractSimpleAsn1Item):
+ try:
+ _plusInf = float('inf')
+ _minusInf = float('-inf')
+ _inf = (_plusInf, _minusInf)
+ except ValueError:
+ # Infinity support is platform and Python dependent
+ _plusInf = _minusInf = None
+ _inf = ()
+
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09)
+ )
+
+ def __normalizeBase10(self, value):
+ m, b, e = value
+ while m and m % 10 == 0:
+ m = m / 10
+ e = e + 1
+ return m, b, e
+
+ def prettyIn(self, value):
+ if isinstance(value, tuple) and len(value) == 3:
+ for d in value:
+ if not isinstance(d, intTypes):
+ raise error.PyAsn1Error(
+ 'Lame Real value syntax: %s' % (value,)
+ )
+ if value[1] not in (2, 10):
+ raise error.PyAsn1Error(
+ 'Prohibited base for Real value: %s' % (value[1],)
+ )
+ if value[1] == 10:
+ value = self.__normalizeBase10(value)
+ return value
+ elif isinstance(value, intTypes):
+ return self.__normalizeBase10((value, 10, 0))
+ elif isinstance(value, float):
+ if self._inf and value in self._inf:
+ return value
+ else:
+ e = 0
+ while int(value) != value:
+ value = value * 10
+ e = e - 1
+ return self.__normalizeBase10((int(value), 10, e))
+ elif isinstance(value, Real):
+ return tuple(value)
+ elif isinstance(value, str): # handle infinite literal
+ try:
+ return float(value)
+ except ValueError:
+ pass
+ raise error.PyAsn1Error(
+ 'Bad real value syntax: %s' % (value,)
+ )
+
+ def prettyOut(self, value):
+ if value in self._inf:
+ return '\'%s\'' % value
+ else:
+ return str(value)
+
+ def isPlusInfinity(self): return self._value == self._plusInf
+ def isMinusInfinity(self): return self._value == self._minusInf
+ def isInfinity(self): return self._value in self._inf
+
+ def __str__(self): return str(float(self))
+
+ def __add__(self, value): return self.clone(float(self) + value)
+ def __radd__(self, value): return self + value
+ def __mul__(self, value): return self.clone(float(self) * value)
+ def __rmul__(self, value): return self * value
+ def __sub__(self, value): return self.clone(float(self) - value)
+ def __rsub__(self, value): return self.clone(value - float(self))
+ def __mod__(self, value): return self.clone(float(self) % value)
+ def __rmod__(self, value): return self.clone(value % float(self))
+ def __pow__(self, value, modulo=None): return self.clone(pow(float(self), value, modulo))
+ def __rpow__(self, value): return self.clone(pow(value, float(self)))
+
+ if sys.version_info[0] <= 2:
+ def __div__(self, value): return self.clone(float(self) / value)
+ def __rdiv__(self, value): return self.clone(value / float(self))
+ else:
+ def __truediv__(self, value): return self.clone(float(self) / value)
+ def __rtruediv__(self, value): return self.clone(value / float(self))
+ def __divmod__(self, value): return self.clone(float(self) // value)
+ def __rdivmod__(self, value): return self.clone(value // float(self))
+
+ def __int__(self): return int(float(self))
+ if sys.version_info[0] <= 2:
+ def __long__(self): return long(float(self))
+ def __float__(self):
+ if self._value in self._inf:
+ return self._value
+ else:
+ return float(
+ self._value[0] * pow(self._value[1], self._value[2])
+ )
+ def __abs__(self): return abs(float(self))
+
+ def __lt__(self, value): return float(self) < value
+ def __le__(self, value): return float(self) <= value
+ def __eq__(self, value): return float(self) == value
+ def __ne__(self, value): return float(self) != value
+ def __gt__(self, value): return float(self) > value
+ def __ge__(self, value): return float(self) >= value
+
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self): return bool(float(self))
+ else:
+ def __bool__(self): return bool(float(self))
+ __hash__ = base.AbstractSimpleAsn1Item.__hash__
+
+ def __getitem__(self, idx):
+ if self._value in self._inf:
+ raise error.PyAsn1Error('Invalid infinite value operation')
+ else:
+ return self._value[idx]
+
+class Enumerated(Integer):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A)
+ )
+
+# "Structured" ASN.1 types
+
+class SetOf(base.AbstractConstructedAsn1Item):
+ componentType = None
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
+ )
+ typeId = 1
+
+ def _cloneComponentValues(self, myClone, cloneValueFlag):
+ idx = 0; l = len(self._componentValues)
+ while idx < l:
+ c = self._componentValues[idx]
+ if c is not None:
+ if isinstance(c, base.AbstractConstructedAsn1Item):
+ myClone.setComponentByPosition(
+ idx, c.clone(cloneValueFlag=cloneValueFlag)
+ )
+ else:
+ myClone.setComponentByPosition(idx, c.clone())
+ idx = idx + 1
+
+ def _verifyComponent(self, idx, value):
+ if self._componentType is not None and \
+ not self._componentType.isSuperTypeOf(value):
+ raise error.PyAsn1Error('Component type error %s' % (value,))
+
+ def getComponentByPosition(self, idx): return self._componentValues[idx]
+ def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
+ l = len(self._componentValues)
+ if idx >= l:
+ self._componentValues = self._componentValues + (idx-l+1)*[None]
+ if value is None:
+ if self._componentValues[idx] is None:
+ if self._componentType is None:
+ raise error.PyAsn1Error('Component type not defined')
+ self._componentValues[idx] = self._componentType.clone()
+ self._componentValuesSet = self._componentValuesSet + 1
+ return self
+ elif not isinstance(value, base.Asn1Item):
+ if self._componentType is None:
+ raise error.PyAsn1Error('Component type not defined')
+ if isinstance(self._componentType, base.AbstractSimpleAsn1Item):
+ value = self._componentType.clone(value=value)
+ else:
+ raise error.PyAsn1Error('Instance value required')
+ if verifyConstraints:
+ if self._componentType is not None:
+ self._verifyComponent(idx, value)
+ self._verifySubtypeSpec(value, idx)
+ if self._componentValues[idx] is None:
+ self._componentValuesSet = self._componentValuesSet + 1
+ self._componentValues[idx] = value
+ return self
+
+ def getComponentTagMap(self):
+ if self._componentType is not None:
+ return self._componentType.getTagMap()
+
+ def prettyPrint(self, scope=0):
+ scope = scope + 1
+ r = self.__class__.__name__ + ':\n'
+ for idx in range(len(self._componentValues)):
+ r = r + ' '*scope
+ if self._componentValues[idx] is None:
+ r = r + '<empty>'
+ else:
+ r = r + self._componentValues[idx].prettyPrint(scope)
+ return r
+
+class SequenceOf(SetOf):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
+ )
+ typeId = 2
+
+class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
+ componentType = namedtype.NamedTypes()
+ def __init__(self, componentType=None, tagSet=None,
+ subtypeSpec=None, sizeSpec=None):
+ base.AbstractConstructedAsn1Item.__init__(
+ self, componentType, tagSet, subtypeSpec, sizeSpec
+ )
+ if self._componentType is None:
+ self._componentTypeLen = 0
+ else:
+ self._componentTypeLen = len(self._componentType)
+
+ def __getitem__(self, idx):
+ if isinstance(idx, str):
+ return self.getComponentByName(idx)
+ else:
+ return base.AbstractConstructedAsn1Item.__getitem__(self, idx)
+
+ def __setitem__(self, idx, value):
+ if isinstance(idx, str):
+ self.setComponentByName(idx, value)
+ else:
+ base.AbstractConstructedAsn1Item.__setitem__(self, idx, value)
+
+ def _cloneComponentValues(self, myClone, cloneValueFlag):
+ idx = 0; l = len(self._componentValues)
+ while idx < l:
+ c = self._componentValues[idx]
+ if c is not None:
+ if isinstance(c, base.AbstractConstructedAsn1Item):
+ myClone.setComponentByPosition(
+ idx, c.clone(cloneValueFlag=cloneValueFlag)
+ )
+ else:
+ myClone.setComponentByPosition(idx, c.clone())
+ idx = idx + 1
+
+ def _verifyComponent(self, idx, value):
+ if idx >= self._componentTypeLen:
+ raise error.PyAsn1Error(
+ 'Component type error out of range'
+ )
+ t = self._componentType[idx].getType()
+ if not t.isSuperTypeOf(value):
+ raise error.PyAsn1Error('Component type error %r vs %r' % (t, value))
+
+ def getComponentByName(self, name):
+ return self.getComponentByPosition(
+ self._componentType.getPositionByName(name)
+ )
+ def setComponentByName(self, name, value=None, verifyConstraints=True):
+ return self.setComponentByPosition(
+ self._componentType.getPositionByName(name), value,
+ verifyConstraints
+ )
+
+ def getComponentByPosition(self, idx):
+ try:
+ return self._componentValues[idx]
+ except IndexError:
+ if idx < self._componentTypeLen:
+ return
+ raise
+ def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
+ l = len(self._componentValues)
+ if idx >= l:
+ self._componentValues = self._componentValues + (idx-l+1)*[None]
+ if value is None:
+ if self._componentValues[idx] is None:
+ self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone()
+ self._componentValuesSet = self._componentValuesSet + 1
+ return self
+ elif not isinstance(value, base.Asn1Item):
+ t = self._componentType.getTypeByPosition(idx)
+ if isinstance(t, base.AbstractSimpleAsn1Item):
+ value = t.clone(value=value)
+ else:
+ raise error.PyAsn1Error('Instance value required')
+ if verifyConstraints:
+ if self._componentTypeLen:
+ self._verifyComponent(idx, value)
+ self._verifySubtypeSpec(value, idx)
+ if self._componentValues[idx] is None:
+ self._componentValuesSet = self._componentValuesSet + 1
+ self._componentValues[idx] = value
+ return self
+
+ def getNameByPosition(self, idx):
+ if self._componentTypeLen:
+ return self._componentType.getNameByPosition(idx)
+
+ def getDefaultComponentByPosition(self, idx):
+ if self._componentTypeLen and self._componentType[idx].isDefaulted:
+ return self._componentType[idx].getType()
+
+ def getComponentType(self):
+ if self._componentTypeLen:
+ return self._componentType
+
+ def setDefaultComponents(self):
+ if self._componentTypeLen == self._componentValuesSet:
+ return
+ idx = self._componentTypeLen
+ while idx:
+ idx = idx - 1
+ if self._componentType[idx].isDefaulted:
+ if self.getComponentByPosition(idx) is None:
+ self.setComponentByPosition(idx)
+ elif not self._componentType[idx].isOptional:
+ if self.getComponentByPosition(idx) is None:
+ raise error.PyAsn1Error(
+ 'Uninitialized component #%s at %r' % (idx, self)
+ )
+
+ def prettyPrint(self, scope=0):
+ scope = scope + 1
+ r = self.__class__.__name__ + ':\n'
+ for idx in range(len(self._componentValues)):
+ if self._componentValues[idx] is not None:
+ r = r + ' '*scope
+ componentType = self.getComponentType()
+ if componentType is None:
+ r = r + '<no-name>'
+ else:
+ r = r + componentType.getNameByPosition(idx)
+ r = '%s=%s\n' % (
+ r, self._componentValues[idx].prettyPrint(scope)
+ )
+ return r
+
+class Sequence(SequenceAndSetBase):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
+ )
+ typeId = 3
+
+ def getComponentTagMapNearPosition(self, idx):
+ if self._componentType:
+ return self._componentType.getTagMapNearPosition(idx)
+
+ def getComponentPositionNearType(self, tagSet, idx):
+ if self._componentType:
+ return self._componentType.getPositionNearType(tagSet, idx)
+ else:
+ return idx
+
+class Set(SequenceAndSetBase):
+ tagSet = baseTagSet = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
+ )
+ typeId = 4
+
+ def getComponent(self, innerFlag=0): return self
+
+ def getComponentByType(self, tagSet, innerFlag=0):
+ c = self.getComponentByPosition(
+ self._componentType.getPositionByType(tagSet)
+ )
+ if innerFlag and isinstance(c, Set):
+ # get inner component by inner tagSet
+ return c.getComponent(1)
+ else:
+ # get outer component by inner tagSet
+ return c
+
+ def setComponentByType(self, tagSet, value=None, innerFlag=0,
+ verifyConstraints=True):
+ idx = self._componentType.getPositionByType(tagSet)
+ t = self._componentType.getTypeByPosition(idx)
+ if innerFlag: # set inner component by inner tagSet
+ if t.getTagSet():
+ return self.setComponentByPosition(
+ idx, value, verifyConstraints
+ )
+ else:
+ t = self.setComponentByPosition(idx).getComponentByPosition(idx)
+ return t.setComponentByType(
+ tagSet, value, innerFlag, verifyConstraints
+ )
+ else: # set outer component by inner tagSet
+ return self.setComponentByPosition(
+ idx, value, verifyConstraints
+ )
+
+ def getComponentTagMap(self):
+ if self._componentType:
+ return self._componentType.getTagMap(True)
+
+ def getComponentPositionByType(self, tagSet):
+ if self._componentType:
+ return self._componentType.getPositionByType(tagSet)
+
+class Choice(Set):
+ tagSet = baseTagSet = tag.TagSet() # untagged
+ sizeSpec = constraint.ConstraintsIntersection(
+ constraint.ValueSizeConstraint(1, 1)
+ )
+ typeId = 5
+ _currentIdx = None
+
+ def __eq__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] == other
+ return NotImplemented
+ def __ne__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] != other
+ return NotImplemented
+ def __lt__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] < other
+ return NotImplemented
+ def __le__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] <= other
+ return NotImplemented
+ def __gt__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] > other
+ return NotImplemented
+ def __ge__(self, other):
+ if self._componentValues:
+ return self._componentValues[self._currentIdx] >= other
+ return NotImplemented
+ if sys.version_info[0] <= 2:
+ def __nonzero__(self): return bool(self._componentValues)
+ else:
+ def __bool__(self): return bool(self._componentValues)
+
+ def __len__(self): return self._currentIdx is not None and 1 or 0
+
+ def verifySizeSpec(self):
+ if self._currentIdx is None:
+ raise error.PyAsn1Error('Component not chosen')
+ else:
+ self._sizeSpec(' ')
+
+ def _cloneComponentValues(self, myClone, cloneValueFlag):
+ try:
+ c = self.getComponent()
+ except error.PyAsn1Error:
+ pass
+ else:
+ if isinstance(c, Choice):
+ tagSet = c.getEffectiveTagSet()
+ else:
+ tagSet = c.getTagSet()
+ if isinstance(c, base.AbstractConstructedAsn1Item):
+ myClone.setComponentByType(
+ tagSet, c.clone(cloneValueFlag=cloneValueFlag)
+ )
+ else:
+ myClone.setComponentByType(tagSet, c.clone())
+
+ def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
+ l = len(self._componentValues)
+ if idx >= l:
+ self._componentValues = self._componentValues + (idx-l+1)*[None]
+ if self._currentIdx is not None:
+ self._componentValues[self._currentIdx] = None
+ if value is None:
+ if self._componentValues[idx] is None:
+ self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone()
+ self._componentValuesSet = 1
+ self._currentIdx = idx
+ return self
+ elif not isinstance(value, base.Asn1Item):
+ value = self._componentType.getTypeByPosition(idx).clone(
+ value=value
+ )
+ if verifyConstraints:
+ if self._componentTypeLen:
+ self._verifyComponent(idx, value)
+ self._verifySubtypeSpec(value, idx)
+ self._componentValues[idx] = value
+ self._currentIdx = idx
+ self._componentValuesSet = 1
+ return self
+
+ def getMinTagSet(self):
+ if self._tagSet:
+ return self._tagSet
+ else:
+ return self._componentType.genMinTagSet()
+
+ def getEffectiveTagSet(self):
+ if self._tagSet:
+ return self._tagSet
+ else:
+ c = self.getComponent()
+ if isinstance(c, Choice):
+ return c.getEffectiveTagSet()
+ else:
+ return c.getTagSet()
+
+ def getTagMap(self):
+ if self._tagSet:
+ return Set.getTagMap(self)
+ else:
+ return Set.getComponentTagMap(self)
+
+ def getComponent(self, innerFlag=0):
+ if self._currentIdx is None:
+ raise error.PyAsn1Error('Component not chosen')
+ else:
+ c = self._componentValues[self._currentIdx]
+ if innerFlag and isinstance(c, Choice):
+ return c.getComponent(innerFlag)
+ else:
+ return c
+
+ def getName(self, innerFlag=0):
+ if self._currentIdx is None:
+ raise error.PyAsn1Error('Component not chosen')
+ else:
+ if innerFlag:
+ c = self._componentValues[self._currentIdx]
+ if isinstance(c, Choice):
+ return c.getName(innerFlag)
+ return self._componentType.getNameByPosition(self._currentIdx)
+
+ def setDefaultComponents(self): pass
+
+class Any(OctetString):
+ tagSet = baseTagSet = tag.TagSet() # untagged
+ typeId = 6
+
+ def getTagMap(self):
+ return tagmap.TagMap(
+ { self.getTagSet(): self },
+ { eoo.endOfOctets.getTagSet(): eoo.endOfOctets },
+ self
+ )
+
+# XXX
+# coercion rules?
diff --git a/python/pyasn1/pyasn1/type/useful.py b/python/pyasn1/pyasn1/type/useful.py
new file mode 100644
index 000000000..a7139c22c
--- /dev/null
+++ b/python/pyasn1/pyasn1/type/useful.py
@@ -0,0 +1,12 @@
+# ASN.1 "useful" types
+from pyasn1.type import char, tag
+
+class GeneralizedTime(char.VisibleString):
+ tagSet = char.VisibleString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
+ )
+
+class UTCTime(char.VisibleString):
+ tagSet = char.VisibleString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23)
+ )
diff --git a/python/pyasn1/setup.cfg b/python/pyasn1/setup.cfg
new file mode 100644
index 000000000..861a9f554
--- /dev/null
+++ b/python/pyasn1/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/pyasn1/setup.py b/python/pyasn1/setup.py
new file mode 100644
index 000000000..194f0c8ca
--- /dev/null
+++ b/python/pyasn1/setup.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+"""ASN.1 types and codecs
+
+ A pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208).
+"""
+
+import os
+import sys
+
+classifiers = """\
+Development Status :: 5 - Production/Stable
+Environment :: Console
+Intended Audience :: Developers
+Intended Audience :: Education
+Intended Audience :: Information Technology
+Intended Audience :: Science/Research
+Intended Audience :: System Administrators
+Intended Audience :: Telecommunications Industry
+License :: OSI Approved :: BSD License
+Natural Language :: English
+Operating System :: OS Independent
+Programming Language :: Python :: 2
+Programming Language :: Python :: 3
+Topic :: Communications
+Topic :: Security :: Cryptography
+Topic :: Software Development :: Libraries :: Python Modules
+"""
+
+def howto_install_distribute():
+ print("""
+ Error: You need the distribute Python package!
+
+ It's very easy to install it, just type (as root on Linux):
+
+ wget http://python-distribute.org/distribute_setup.py
+ python distribute_setup.py
+
+ Then you could make eggs from this package.
+""")
+
+def howto_install_setuptools():
+ print("""
+ Error: You need setuptools Python package!
+
+ It's very easy to install it, just type (as root on Linux):
+
+ wget http://peak.telecommunity.com/dist/ez_setup.py
+ python ez_setup.py
+
+ Then you could make eggs from this package.
+""")
+
+try:
+ from setuptools import setup, Command
+ params = {
+ 'zip_safe': True
+ }
+except ImportError:
+ for arg in sys.argv:
+ if arg.find('egg') != -1:
+ if sys.version_info[0] > 2:
+ howto_install_distribute()
+ else:
+ howto_install_setuptools()
+ sys.exit(1)
+ from distutils.core import setup, Command
+ params = {}
+
+doclines = [ x.strip() for x in __doc__.split('\n') if x ]
+
+params.update( {
+ 'name': 'pyasn1',
+ 'version': open(os.path.join('pyasn1','__init__.py')).read().split('\'')[1],
+ 'description': doclines[0],
+ 'long_description': ' '.join(doclines[1:]),
+ 'maintainer': 'Ilya Etingof <ilya@glas.net>',
+ 'author': 'Ilya Etingof',
+ 'author_email': 'ilya@glas.net',
+ 'url': 'http://sourceforge.net/projects/pyasn1/',
+ 'platforms': ['any'],
+ 'classifiers': [ x for x in classifiers.split('\n') if x ],
+ 'license': 'BSD',
+ 'packages': [ 'pyasn1',
+ 'pyasn1.type',
+ 'pyasn1.compat',
+ 'pyasn1.codec',
+ 'pyasn1.codec.ber',
+ 'pyasn1.codec.cer',
+ 'pyasn1.codec.der' ]
+} )
+
+# handle unittest discovery feature
+if sys.version_info[0:2] < (2, 7) or \
+ sys.version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ unittest = None
+else:
+ import unittest
+
+if unittest:
+ class PyTest(Command):
+ user_options = []
+
+ def initialize_options(self): pass
+ def finalize_options(self): pass
+
+ def run(self):
+ suite = unittest.defaultTestLoader.discover('.')
+ unittest.TextTestRunner(verbosity=2).run(suite)
+
+ params['cmdclass'] = { 'test': PyTest }
+
+setup(**params)
diff --git a/python/pyasn1/test/__init__.py b/python/pyasn1/test/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/test/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/test/codec/__init__.py b/python/pyasn1/test/codec/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/test/codec/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/test/codec/ber/__init__.py b/python/pyasn1/test/codec/ber/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/test/codec/ber/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/test/codec/ber/suite.py b/python/pyasn1/test/codec/ber/suite.py
new file mode 100644
index 000000000..796c526b4
--- /dev/null
+++ b/python/pyasn1/test/codec/ber/suite.py
@@ -0,0 +1,22 @@
+from sys import path, version_info
+from os.path import sep
+path.insert(1, path[0]+sep+'ber')
+import test_encoder, test_decoder
+from pyasn1.error import PyAsn1Error
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+suite = unittest.TestSuite()
+loader = unittest.TestLoader()
+for m in (test_encoder, test_decoder):
+ suite.addTest(loader.loadTestsFromModule(m))
+
+def runTests(): unittest.TextTestRunner(verbosity=2).run(suite)
+
+if __name__ == '__main__': runTests()
diff --git a/python/pyasn1/test/codec/ber/test_decoder.py b/python/pyasn1/test/codec/ber/test_decoder.py
new file mode 100644
index 000000000..36999e84d
--- /dev/null
+++ b/python/pyasn1/test/codec/ber/test_decoder.py
@@ -0,0 +1,535 @@
+from pyasn1.type import tag, namedtype, univ
+from pyasn1.codec.ber import decoder
+from pyasn1.compat.octets import ints2octs, str2octs, null
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class LargeTagDecoderTestCase(unittest.TestCase):
+ def testLargeTag(self):
+ assert decoder.decode(ints2octs((127, 141, 245, 182, 253, 47, 3, 2, 1, 1))) == (1, null)
+
+class IntegerDecoderTestCase(unittest.TestCase):
+ def testPosInt(self):
+ assert decoder.decode(ints2octs((2, 1, 12))) == (12, null)
+ def testNegInt(self):
+ assert decoder.decode(ints2octs((2, 1, 244))) == (-12, null)
+ def testZero(self):
+ assert decoder.decode(ints2octs((2, 0))) == (0, null)
+ def testZeroLong(self):
+ assert decoder.decode(ints2octs((2, 1, 0))) == (0, null)
+ def testMinusOne(self):
+ assert decoder.decode(ints2octs((2, 1, 255))) == (-1, null)
+ def testPosLong(self):
+ assert decoder.decode(
+ ints2octs((2, 9, 0, 255, 255, 255, 255, 255, 255, 255, 255))
+ ) == (0xffffffffffffffff, null)
+ def testNegLong(self):
+ assert decoder.decode(
+ ints2octs((2, 9, 255, 0, 0, 0, 0, 0, 0, 0, 1))
+ ) == (-0xffffffffffffffff, null)
+ def testSpec(self):
+ try:
+ decoder.decode(
+ ints2octs((2, 1, 12)), asn1Spec=univ.Null()
+ ) == (12, null)
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'wrong asn1Spec worked out'
+ assert decoder.decode(
+ ints2octs((2, 1, 12)), asn1Spec=univ.Integer()
+ ) == (12, null)
+ def testTagFormat(self):
+ try:
+ decoder.decode(ints2octs((34, 1, 12)))
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'wrong tagFormat worked out'
+
+class BooleanDecoderTestCase(unittest.TestCase):
+ def testTrue(self):
+ assert decoder.decode(ints2octs((1, 1, 1))) == (1, null)
+ def testTrueNeg(self):
+ assert decoder.decode(ints2octs((1, 1, 255))) == (1, null)
+ def testExtraTrue(self):
+ assert decoder.decode(ints2octs((1, 1, 1, 0, 120, 50, 50))) == (1, ints2octs((0, 120, 50, 50)))
+ def testFalse(self):
+ assert decoder.decode(ints2octs((1, 1, 0))) == (0, null)
+ def testTagFormat(self):
+ try:
+ decoder.decode(ints2octs((33, 1, 1)))
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'wrong tagFormat worked out'
+
+class BitStringDecoderTestCase(unittest.TestCase):
+ def testDefMode(self):
+ assert decoder.decode(
+ ints2octs((3, 3, 1, 169, 138))
+ ) == ((1,0,1,0,1,0,0,1,1,0,0,0,1,0,1), null)
+ def testIndefMode(self):
+ assert decoder.decode(
+ ints2octs((3, 3, 1, 169, 138))
+ ) == ((1,0,1,0,1,0,0,1,1,0,0,0,1,0,1), null)
+ def testDefModeChunked(self):
+ assert decoder.decode(
+ ints2octs((35, 8, 3, 2, 0, 169, 3, 2, 1, 138))
+ ) == ((1,0,1,0,1,0,0,1,1,0,0,0,1,0,1), null)
+ def testIndefModeChunked(self):
+ assert decoder.decode(
+ ints2octs((35, 128, 3, 2, 0, 169, 3, 2, 1, 138, 0, 0))
+ ) == ((1,0,1,0,1,0,0,1,1,0,0,0,1,0,1), null)
+ def testDefModeChunkedSubst(self):
+ assert decoder.decode(
+ ints2octs((35, 8, 3, 2, 0, 169, 3, 2, 1, 138)),
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((3, 2, 0, 169, 3, 2, 1, 138)), 8)
+ def testIndefModeChunkedSubst(self):
+ assert decoder.decode(
+ ints2octs((35, 128, 3, 2, 0, 169, 3, 2, 1, 138, 0, 0)),
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((3, 2, 0, 169, 3, 2, 1, 138, 0, 0)), -1)
+
+class OctetStringDecoderTestCase(unittest.TestCase):
+ def testDefMode(self):
+ assert decoder.decode(
+ ints2octs((4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120))
+ ) == (str2octs('Quick brown fox'), null)
+ def testIndefMode(self):
+ assert decoder.decode(
+ ints2octs((36, 128, 4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120, 0, 0))
+ ) == (str2octs('Quick brown fox'), null)
+ def testDefModeChunked(self):
+ assert decoder.decode(
+ ints2octs((36, 23, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120))
+ ) == (str2octs('Quick brown fox'), null)
+ def testIndefModeChunked(self):
+ assert decoder.decode(
+ ints2octs((36, 128, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120, 0, 0))
+ ) == (str2octs('Quick brown fox'), null)
+ def testDefModeChunkedSubst(self):
+ assert decoder.decode(
+ ints2octs((36, 23, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120)),
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120)), 23)
+ def testIndefModeChunkedSubst(self):
+ assert decoder.decode(
+ ints2octs((36, 128, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120, 0, 0)),
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120, 0, 0)), -1)
+
+class ExpTaggedOctetStringDecoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.o = univ.OctetString(
+ 'Quick brown fox',
+ tagSet=univ.OctetString.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 5)
+ ))
+
+ def testDefMode(self):
+ assert self.o.isSameTypeWith(decoder.decode(
+ ints2octs((101, 17, 4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120))
+ )[0])
+
+ def testIndefMode(self):
+ v, s = decoder.decode(ints2octs((101, 128, 36, 128, 4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120, 0, 0, 0, 0)))
+ assert self.o.isSameTypeWith(v)
+ assert not s
+
+ def testDefModeChunked(self):
+ v, s = decoder.decode(ints2octs((101, 25, 36, 23, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120)))
+ assert self.o.isSameTypeWith(v)
+ assert not s
+
+ def testIndefModeChunked(self):
+ v, s = decoder.decode(ints2octs((101, 128, 36, 128, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120, 0, 0, 0, 0)))
+ assert self.o.isSameTypeWith(v)
+ assert not s
+
+ def testDefModeSubst(self):
+ assert decoder.decode(
+ ints2octs((101, 17, 4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120)),
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120)), 17)
+
+ def testIndefModeSubst(self):
+ assert decoder.decode(
+ ints2octs((101, 128, 36, 128, 4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120, 0, 0, 0, 0)),
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((36, 128, 4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120, 0, 0, 0, 0)), -1)
+
+class NullDecoderTestCase(unittest.TestCase):
+ def testNull(self):
+ assert decoder.decode(ints2octs((5, 0))) == (null, null)
+ def testTagFormat(self):
+ try:
+ decoder.decode(ints2octs((37, 0)))
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'wrong tagFormat worked out'
+
+class ObjectIdentifierDecoderTestCase(unittest.TestCase):
+ def testOID(self):
+ assert decoder.decode(
+ ints2octs((6, 6, 43, 6, 0, 191, 255, 126))
+ ) == ((1,3,6,0,0xffffe), null)
+
+ def testEdges1(self):
+ assert decoder.decode(
+ ints2octs((6, 1, 255))
+ ) == ((6,15), null)
+
+ def testEdges2(self):
+ assert decoder.decode(
+ ints2octs((6, 1, 239))
+ ) == ((5,39), null)
+
+ def testEdges3(self):
+ assert decoder.decode(
+ ints2octs((6, 7, 43, 6, 143, 255, 255, 255, 127))
+ ) == ((1, 3, 6, 4294967295), null)
+
+ def testNonLeading0x80(self):
+ assert decoder.decode(
+ ints2octs((6, 5, 85, 4, 129, 128, 0)),
+ ) == ((2, 5, 4, 16384), null)
+
+ def testLeading0x80(self):
+ try:
+ decoder.decode(
+ ints2octs((6, 5, 85, 4, 128, 129, 0))
+ )
+ except PyAsn1Error:
+ pass
+ else:
+ assert 1, 'Leading 0x80 tolarated'
+
+ def testTagFormat(self):
+ try:
+ decoder.decode(ints2octs((38, 1, 239)))
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'wrong tagFormat worked out'
+
+class RealDecoderTestCase(unittest.TestCase):
+ def testChar(self):
+ assert decoder.decode(
+ ints2octs((9, 7, 3, 49, 50, 51, 69, 49, 49))
+ ) == (univ.Real((123, 10, 11)), null)
+
+ def testBin1(self):
+ assert decoder.decode(
+ ints2octs((9, 4, 128, 245, 4, 77))
+ ) == (univ.Real((1101, 2, -11)), null)
+
+ def testBin2(self):
+ assert decoder.decode(
+ ints2octs((9, 4, 128, 11, 4, 77))
+ ) == (univ.Real((1101, 2, 11)), null)
+
+ def testBin3(self):
+ assert decoder.decode(
+ ints2octs((9, 3, 192, 10, 123))
+ ) == (univ.Real((-123, 2, 10)), null)
+
+
+ def testPlusInf(self):
+ assert decoder.decode(
+ ints2octs((9, 1, 64))
+ ) == (univ.Real('inf'), null)
+
+ def testMinusInf(self):
+ assert decoder.decode(
+ ints2octs((9, 1, 65))
+ ) == (univ.Real('-inf'), null)
+
+ def testEmpty(self):
+ assert decoder.decode(
+ ints2octs((9, 0))
+ ) == (univ.Real(0.0), null)
+
+ def testTagFormat(self):
+ try:
+ decoder.decode(ints2octs((41, 0)))
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'wrong tagFormat worked out'
+
+class SequenceDecoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.s = univ.Sequence(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('place-holder', univ.Null(null)),
+ namedtype.NamedType('first-name', univ.OctetString(null)),
+ namedtype.NamedType('age', univ.Integer(33)),
+ ))
+ self.s.setComponentByPosition(0, univ.Null(null))
+ self.s.setComponentByPosition(1, univ.OctetString('quick brown'))
+ self.s.setComponentByPosition(2, univ.Integer(1))
+ self.s.setDefaultComponents()
+
+ def testWithOptionalAndDefaultedDefMode(self):
+ assert decoder.decode(
+ ints2octs((48, 18, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 2, 1, 1))
+ ) == (self.s, null)
+
+ def testWithOptionalAndDefaultedIndefMode(self):
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 36, 128, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 0, 0, 2, 1, 1, 0, 0))
+ ) == (self.s, null)
+
+ def testWithOptionalAndDefaultedDefModeChunked(self):
+ assert decoder.decode(
+ ints2octs((48, 24, 5, 0, 36, 17, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110, 2, 1, 1))
+ ) == (self.s, null)
+
+ def testWithOptionalAndDefaultedIndefModeChunked(self):
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 36, 128, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110, 0, 0, 2, 1, 1, 0, 0))
+ ) == (self.s, null)
+
+ def testWithOptionalAndDefaultedDefModeSubst(self):
+ assert decoder.decode(
+ ints2octs((48, 18, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 2, 1, 1)),
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 2, 1, 1)), 18)
+
+ def testWithOptionalAndDefaultedIndefModeSubst(self):
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 36, 128, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 0, 0, 2, 1, 1, 0, 0)),
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((5, 0, 36, 128, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 0, 0, 2, 1, 1, 0, 0)), -1)
+
+ def testTagFormat(self):
+ try:
+ decoder.decode(
+ ints2octs((16, 18, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 2, 1, 1))
+ )
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'wrong tagFormat worked out'
+
+class GuidedSequenceDecoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.s = univ.Sequence(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('place-holder', univ.Null(null)),
+ namedtype.OptionalNamedType('first-name', univ.OctetString(null)),
+ namedtype.DefaultedNamedType('age', univ.Integer(33)),
+ ))
+
+ def __init(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0, univ.Null(null))
+ self.s.setDefaultComponents()
+
+ def __initWithOptional(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0, univ.Null(null))
+ self.s.setComponentByPosition(1, univ.OctetString('quick brown'))
+ self.s.setDefaultComponents()
+
+ def __initWithDefaulted(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0, univ.Null(null))
+ self.s.setComponentByPosition(2, univ.Integer(1))
+ self.s.setDefaultComponents()
+
+ def __initWithOptionalAndDefaulted(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0, univ.Null(null))
+ self.s.setComponentByPosition(1, univ.OctetString('quick brown'))
+ self.s.setComponentByPosition(2, univ.Integer(1))
+ self.s.setDefaultComponents()
+
+ def testDefMode(self):
+ self.__init()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 0, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testIndefMode(self):
+ self.__init()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 0, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testDefModeChunked(self):
+ self.__init()
+ assert decoder.decode(
+ ints2octs((48, 2, 5, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testIndefModeChunked(self):
+ self.__init()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 0, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithOptionalDefMode(self):
+ self.__initWithOptional()
+ assert decoder.decode(
+ ints2octs((48, 15, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithOptionaIndefMode(self):
+ self.__initWithOptional()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 36, 128, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 0, 0, 0, 0)),
+ asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithOptionalDefModeChunked(self):
+ self.__initWithOptional()
+ assert decoder.decode(
+ ints2octs((48, 21, 5, 0, 36, 17, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110)),
+ asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithOptionalIndefModeChunked(self):
+ self.__initWithOptional()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 36, 128, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110, 0, 0, 0, 0)),
+ asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithDefaultedDefMode(self):
+ self.__initWithDefaulted()
+ assert decoder.decode(
+ ints2octs((48, 5, 5, 0, 2, 1, 1)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithDefaultedIndefMode(self):
+ self.__initWithDefaulted()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 2, 1, 1, 0, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithDefaultedDefModeChunked(self):
+ self.__initWithDefaulted()
+ assert decoder.decode(
+ ints2octs((48, 5, 5, 0, 2, 1, 1)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithDefaultedIndefModeChunked(self):
+ self.__initWithDefaulted()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 2, 1, 1, 0, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithOptionalAndDefaultedDefMode(self):
+ self.__initWithOptionalAndDefaulted()
+ assert decoder.decode(
+ ints2octs((48, 18, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 2, 1, 1)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithOptionalAndDefaultedIndefMode(self):
+ self.__initWithOptionalAndDefaulted()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 36, 128, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 0, 0, 2, 1, 1, 0, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithOptionalAndDefaultedDefModeChunked(self):
+ self.__initWithOptionalAndDefaulted()
+ assert decoder.decode(
+ ints2octs((48, 24, 5, 0, 36, 17, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110, 2, 1, 1)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithOptionalAndDefaultedIndefModeChunked(self):
+ self.__initWithOptionalAndDefaulted()
+ assert decoder.decode(
+ ints2octs((48, 128, 5, 0, 36, 128, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110, 0, 0, 2, 1, 1, 0, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+class ChoiceDecoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.s = univ.Choice(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('place-holder', univ.Null(null)),
+ namedtype.NamedType('number', univ.Integer(0)),
+ namedtype.NamedType('string', univ.OctetString())
+ ))
+
+ def testBySpec(self):
+ self.s.setComponentByPosition(0, univ.Null(null))
+ assert decoder.decode(
+ ints2octs((5, 0)), asn1Spec=self.s
+ ) == (self.s, null)
+
+ def testWithoutSpec(self):
+ self.s.setComponentByPosition(0, univ.Null(null))
+ assert decoder.decode(ints2octs((5, 0))) == (self.s, null)
+ assert decoder.decode(ints2octs((5, 0))) == (univ.Null(null), null)
+
+ def testUndefLength(self):
+ self.s.setComponentByPosition(2, univ.OctetString('abcdefgh'))
+ assert decoder.decode(ints2octs((36, 128, 4, 3, 97, 98, 99, 4, 3, 100, 101, 102, 4, 2, 103, 104, 0, 0)), asn1Spec=self.s) == (self.s, null)
+
+ def testExplicitTag(self):
+ s = self.s.subtype(explicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatConstructed, 4))
+ s.setComponentByPosition(0, univ.Null(null))
+ assert decoder.decode(ints2octs((164, 2, 5, 0)), asn1Spec=s) == (s, null)
+
+ def testExplicitTagUndefLength(self):
+ s = self.s.subtype(explicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatConstructed, 4))
+ s.setComponentByPosition(0, univ.Null(null))
+ assert decoder.decode(ints2octs((164, 128, 5, 0, 0, 0)), asn1Spec=s) == (s, null)
+
+class AnyDecoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.s = univ.Any()
+
+ def testByUntagged(self):
+ assert decoder.decode(
+ ints2octs((4, 3, 102, 111, 120)), asn1Spec=self.s
+ ) == (univ.Any('\004\003fox'), null)
+
+ def testTaggedEx(self):
+ s = univ.Any('\004\003fox').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))
+ assert decoder.decode(ints2octs((164, 5, 4, 3, 102, 111, 120)), asn1Spec=s) == (s, null)
+
+ def testTaggedIm(self):
+ s = univ.Any('\004\003fox').subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))
+ assert decoder.decode(ints2octs((132, 5, 4, 3, 102, 111, 120)), asn1Spec=s) == (s, null)
+
+ def testByUntaggedIndefMode(self):
+ assert decoder.decode(
+ ints2octs((4, 3, 102, 111, 120)), asn1Spec=self.s
+ ) == (univ.Any('\004\003fox'), null)
+
+ def testTaggedExIndefMode(self):
+ s = univ.Any('\004\003fox').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))
+ assert decoder.decode(ints2octs((164, 128, 4, 3, 102, 111, 120, 0, 0)), asn1Spec=s) == (s, null)
+
+ def testTaggedImIndefMode(self):
+ s = univ.Any('\004\003fox').subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))
+ assert decoder.decode(ints2octs((164, 128, 4, 3, 102, 111, 120, 0, 0)), asn1Spec=s) == (s, null)
+
+ def testByUntaggedSubst(self):
+ assert decoder.decode(
+ ints2octs((4, 3, 102, 111, 120)),
+ asn1Spec=self.s,
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((4, 3, 102, 111, 120)), 5)
+
+ def testTaggedExSubst(self):
+ assert decoder.decode(
+ ints2octs((164, 5, 4, 3, 102, 111, 120)),
+ asn1Spec=self.s,
+ substrateFun=lambda a,b,c: (b,c)
+ ) == (ints2octs((164, 5, 4, 3, 102, 111, 120)), 7)
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pyasn1/test/codec/ber/test_encoder.py b/python/pyasn1/test/codec/ber/test_encoder.py
new file mode 100644
index 000000000..bfb3f618c
--- /dev/null
+++ b/python/pyasn1/test/codec/ber/test_encoder.py
@@ -0,0 +1,338 @@
+from pyasn1.type import tag, namedtype, univ
+from pyasn1.codec.ber import encoder
+from pyasn1.compat.octets import ints2octs
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class LargeTagEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.o = univ.Integer().subtype(
+ value=1, explicitTag=tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0xdeadbeaf)
+ )
+ def testEncoder(self):
+ assert encoder.encode(self.o) == ints2octs((127, 141, 245, 182, 253, 47, 3, 2, 1, 1))
+
+class IntegerEncoderTestCase(unittest.TestCase):
+ def testPosInt(self):
+ assert encoder.encode(univ.Integer(12)) == ints2octs((2, 1, 12))
+
+ def testNegInt(self):
+ assert encoder.encode(univ.Integer(-12)) == ints2octs((2, 1, 244))
+
+ def testZero(self):
+ assert encoder.encode(univ.Integer(0)) == ints2octs((2, 1, 0))
+
+ def testCompactZero(self):
+ encoder.IntegerEncoder.supportCompactZero = True
+ substrate = encoder.encode(univ.Integer(0))
+ encoder.IntegerEncoder.supportCompactZero = False
+ assert substrate == ints2octs((2, 0))
+
+ def testMinusOne(self):
+ assert encoder.encode(univ.Integer(-1)) == ints2octs((2, 1, 255))
+
+ def testPosLong(self):
+ assert encoder.encode(
+ univ.Integer(0xffffffffffffffff)
+ ) == ints2octs((2, 9, 0, 255, 255, 255, 255, 255, 255, 255, 255))
+
+ def testNegLong(self):
+ assert encoder.encode(
+ univ.Integer(-0xffffffffffffffff)
+ ) == ints2octs((2, 9, 255, 0, 0, 0, 0, 0, 0, 0, 1))
+
+class BooleanEncoderTestCase(unittest.TestCase):
+ def testTrue(self):
+ assert encoder.encode(univ.Boolean(1)) == ints2octs((1, 1, 1))
+
+ def testFalse(self):
+ assert encoder.encode(univ.Boolean(0)) == ints2octs((1, 1, 0))
+
+class BitStringEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.b = univ.BitString((1,0,1,0,1,0,0,1,1,0,0,0,1,0,1))
+
+ def testDefMode(self):
+ assert encoder.encode(self.b) == ints2octs((3, 3, 1, 169, 138))
+
+ def testIndefMode(self):
+ assert encoder.encode(
+ self.b, defMode=0
+ ) == ints2octs((3, 3, 1, 169, 138))
+
+ def testDefModeChunked(self):
+ assert encoder.encode(
+ self.b, maxChunkSize=1
+ ) == ints2octs((35, 8, 3, 2, 0, 169, 3, 2, 1, 138))
+
+ def testIndefModeChunked(self):
+ assert encoder.encode(
+ self.b, defMode=0, maxChunkSize=1
+ ) == ints2octs((35, 128, 3, 2, 0, 169, 3, 2, 1, 138, 0, 0))
+
+ def testEmptyValue(self):
+ assert encoder.encode(univ.BitString(())) == ints2octs((3, 1, 0))
+
+class OctetStringEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.o = univ.OctetString('Quick brown fox')
+
+ def testDefMode(self):
+ assert encoder.encode(self.o) == ints2octs((4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120))
+
+ def testIndefMode(self):
+ assert encoder.encode(
+ self.o, defMode=0
+ ) == ints2octs((4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120))
+
+ def testDefModeChunked(self):
+ assert encoder.encode(
+ self.o, maxChunkSize=4
+ ) == ints2octs((36, 23, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120))
+
+ def testIndefModeChunked(self):
+ assert encoder.encode(
+ self.o, defMode=0, maxChunkSize=4
+ ) == ints2octs((36, 128, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120, 0, 0))
+
+class ExpTaggedOctetStringEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.o = univ.OctetString().subtype(
+ value='Quick brown fox',
+ explicitTag=tag.Tag(tag.tagClassApplication,tag.tagFormatSimple,5)
+ )
+ def testDefMode(self):
+ assert encoder.encode(self.o) == ints2octs((101, 17, 4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120))
+
+ def testIndefMode(self):
+ assert encoder.encode(
+ self.o, defMode=0
+ ) == ints2octs((101, 128, 4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120, 0, 0))
+
+ def testDefModeChunked(self):
+ assert encoder.encode(
+ self.o, defMode=1, maxChunkSize=4
+ ) == ints2octs((101, 25, 36, 23, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120))
+
+ def testIndefModeChunked(self):
+ assert encoder.encode(
+ self.o, defMode=0, maxChunkSize=4
+ ) == ints2octs((101, 128, 36, 128, 4, 4, 81, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 4, 111, 119, 110, 32, 4, 3, 102, 111, 120, 0, 0, 0, 0))
+
+class NullEncoderTestCase(unittest.TestCase):
+ def testNull(self):
+ assert encoder.encode(univ.Null('')) == ints2octs((5, 0))
+
+class ObjectIdentifierEncoderTestCase(unittest.TestCase):
+ def testNull(self):
+ assert encoder.encode(
+ univ.ObjectIdentifier((1,3,6,0,0xffffe))
+ ) == ints2octs((6, 6, 43, 6, 0, 191, 255, 126))
+
+class RealEncoderTestCase(unittest.TestCase):
+ def testChar(self):
+ assert encoder.encode(
+ univ.Real((123, 10, 11))
+ ) == ints2octs((9, 7, 3, 49, 50, 51, 69, 49, 49))
+
+ def testBin1(self):
+ assert encoder.encode(
+ univ.Real((1101, 2, 11))
+ ) == ints2octs((9, 4, 128, 11, 4, 77))
+
+ def testBin2(self):
+ assert encoder.encode(
+ univ.Real((1101, 2, -11))
+ ) == ints2octs((9, 4, 128, 245, 4, 77))
+
+ def testPlusInf(self):
+ assert encoder.encode(univ.Real('inf')) == ints2octs((9, 1, 64))
+
+ def testMinusInf(self):
+ assert encoder.encode(univ.Real('-inf')) == ints2octs((9, 1, 65))
+
+ def testZero(self):
+ assert encoder.encode(univ.Real(0)) == ints2octs((9, 0))
+
+class SequenceEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.s = univ.Sequence(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('place-holder', univ.Null('')),
+ namedtype.OptionalNamedType('first-name', univ.OctetString('')),
+ namedtype.DefaultedNamedType('age', univ.Integer(33)),
+ ))
+
+ def __init(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0)
+
+ def __initWithOptional(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0)
+ self.s.setComponentByPosition(1, 'quick brown')
+
+ def __initWithDefaulted(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0)
+ self.s.setComponentByPosition(2, 1)
+
+ def __initWithOptionalAndDefaulted(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0, univ.Null(''))
+ self.s.setComponentByPosition(1, univ.OctetString('quick brown'))
+ self.s.setComponentByPosition(2, univ.Integer(1))
+
+ def testDefMode(self):
+ self.__init()
+ assert encoder.encode(self.s) == ints2octs((48, 2, 5, 0))
+
+ def testIndefMode(self):
+ self.__init()
+ assert encoder.encode(
+ self.s, defMode=0
+ ) == ints2octs((48, 128, 5, 0, 0, 0))
+
+ def testDefModeChunked(self):
+ self.__init()
+ assert encoder.encode(
+ self.s, defMode=1, maxChunkSize=4
+ ) == ints2octs((48, 2, 5, 0))
+
+ def testIndefModeChunked(self):
+ self.__init()
+ assert encoder.encode(
+ self.s, defMode=0, maxChunkSize=4
+ ) == ints2octs((48, 128, 5, 0, 0, 0))
+
+ def testWithOptionalDefMode(self):
+ self.__initWithOptional()
+ assert encoder.encode(self.s) == ints2octs((48, 15, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110))
+
+ def testWithOptionalIndefMode(self):
+ self.__initWithOptional()
+ assert encoder.encode(
+ self.s, defMode=0
+ ) == ints2octs((48, 128, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 0, 0))
+
+ def testWithOptionalDefModeChunked(self):
+ self.__initWithOptional()
+ assert encoder.encode(
+ self.s, defMode=1, maxChunkSize=4
+ ) == ints2octs((48, 21, 5, 0, 36, 17, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110))
+
+ def testWithOptionalIndefModeChunked(self):
+ self.__initWithOptional()
+ assert encoder.encode(
+ self.s, defMode=0, maxChunkSize=4
+ ) == ints2octs((48, 128, 5, 0, 36, 128, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110, 0, 0, 0, 0))
+
+ def testWithDefaultedDefMode(self):
+ self.__initWithDefaulted()
+ assert encoder.encode(self.s) == ints2octs((48, 5, 5, 0, 2, 1, 1))
+
+ def testWithDefaultedIndefMode(self):
+ self.__initWithDefaulted()
+ assert encoder.encode(
+ self.s, defMode=0
+ ) == ints2octs((48, 128, 5, 0, 2, 1, 1, 0, 0))
+
+ def testWithDefaultedDefModeChunked(self):
+ self.__initWithDefaulted()
+ assert encoder.encode(
+ self.s, defMode=1, maxChunkSize=4
+ ) == ints2octs((48, 5, 5, 0, 2, 1, 1))
+
+ def testWithDefaultedIndefModeChunked(self):
+ self.__initWithDefaulted()
+ assert encoder.encode(
+ self.s, defMode=0, maxChunkSize=4
+ ) == ints2octs((48, 128, 5, 0, 2, 1, 1, 0, 0))
+
+ def testWithOptionalAndDefaultedDefMode(self):
+ self.__initWithOptionalAndDefaulted()
+ assert encoder.encode(self.s) == ints2octs((48, 18, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 2, 1, 1))
+
+ def testWithOptionalAndDefaultedIndefMode(self):
+ self.__initWithOptionalAndDefaulted()
+ assert encoder.encode(
+ self.s, defMode=0
+ ) == ints2octs((48, 128, 5, 0, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 2, 1, 1, 0, 0))
+
+ def testWithOptionalAndDefaultedDefModeChunked(self):
+ self.__initWithOptionalAndDefaulted()
+ assert encoder.encode(
+ self.s, defMode=1, maxChunkSize=4
+ ) == ints2octs((48, 24, 5, 0, 36, 17, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110, 2, 1, 1))
+
+ def testWithOptionalAndDefaultedIndefModeChunked(self):
+ self.__initWithOptionalAndDefaulted()
+ assert encoder.encode(
+ self.s, defMode=0, maxChunkSize=4
+ ) == ints2octs((48, 128, 5, 0, 36, 128, 4, 4, 113, 117, 105, 99, 4, 4, 107, 32, 98, 114, 4, 3, 111, 119, 110, 0, 0, 2, 1, 1, 0, 0))
+
+class ChoiceEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.s = univ.Choice(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('place-holder', univ.Null('')),
+ namedtype.NamedType('number', univ.Integer(0)),
+ namedtype.NamedType('string', univ.OctetString())
+ ))
+
+ def testEmpty(self):
+ try:
+ encoder.encode(self.s)
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'encoded unset choice'
+
+ def testFilled(self):
+ self.s.setComponentByPosition(0, univ.Null(''))
+ assert encoder.encode(self.s) == ints2octs((5, 0))
+
+ def testTagged(self):
+ s = self.s.subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,4)
+ )
+ s.setComponentByPosition(0, univ.Null(''))
+ assert encoder.encode(s) == ints2octs((164, 2, 5, 0))
+
+ def testUndefLength(self):
+ self.s.setComponentByPosition(2, univ.OctetString('abcdefgh'))
+ assert encoder.encode(self.s, defMode=False, maxChunkSize=3) == ints2octs((36, 128, 4, 3, 97, 98, 99, 4, 3, 100, 101, 102, 4, 2, 103, 104, 0, 0))
+
+ def testTaggedUndefLength(self):
+ s = self.s.subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,4)
+ )
+ s.setComponentByPosition(2, univ.OctetString('abcdefgh'))
+ assert encoder.encode(s, defMode=False, maxChunkSize=3) == ints2octs((164, 128, 36, 128, 4, 3, 97, 98, 99, 4, 3, 100, 101, 102, 4, 2, 103, 104, 0, 0, 0, 0))
+
+class AnyEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.s = univ.Any(encoder.encode(univ.OctetString('fox')))
+
+ def testUntagged(self):
+ assert encoder.encode(self.s) == ints2octs((4, 3, 102, 111, 120))
+
+ def testTaggedEx(self):
+ s = self.s.subtype(
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)
+ )
+ assert encoder.encode(s) == ints2octs((164, 5, 4, 3, 102, 111, 120))
+
+ def testTaggedIm(self):
+ s = self.s.subtype(
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)
+ )
+ assert encoder.encode(s) == ints2octs((132, 5, 4, 3, 102, 111, 120))
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pyasn1/test/codec/cer/__init__.py b/python/pyasn1/test/codec/cer/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/test/codec/cer/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/test/codec/cer/suite.py b/python/pyasn1/test/codec/cer/suite.py
new file mode 100644
index 000000000..49d682918
--- /dev/null
+++ b/python/pyasn1/test/codec/cer/suite.py
@@ -0,0 +1,22 @@
+from sys import path, version_info
+from os.path import sep
+path.insert(1, path[0]+sep+'cer')
+import test_encoder, test_decoder
+from pyasn1.error import PyAsn1Error
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+suite = unittest.TestSuite()
+loader = unittest.TestLoader()
+for m in (test_encoder, test_decoder):
+ suite.addTest(loader.loadTestsFromModule(m))
+
+def runTests(): unittest.TextTestRunner(verbosity=2).run(suite)
+
+if __name__ == '__main__': runTests()
diff --git a/python/pyasn1/test/codec/cer/test_decoder.py b/python/pyasn1/test/codec/cer/test_decoder.py
new file mode 100644
index 000000000..7195b72e0
--- /dev/null
+++ b/python/pyasn1/test/codec/cer/test_decoder.py
@@ -0,0 +1,31 @@
+from pyasn1.type import univ
+from pyasn1.codec.cer import decoder
+from pyasn1.compat.octets import ints2octs, str2octs, null
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class BooleanDecoderTestCase(unittest.TestCase):
+ def testTrue(self):
+ assert decoder.decode(ints2octs((1, 1, 255))) == (1, null)
+ def testFalse(self):
+ assert decoder.decode(ints2octs((1, 1, 0))) == (0, null)
+
+class OctetStringDecoderTestCase(unittest.TestCase):
+ def testShortMode(self):
+ assert decoder.decode(
+ ints2octs((4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120)),
+ ) == (str2octs('Quick brown fox'), null)
+ def testLongMode(self):
+ assert decoder.decode(
+ ints2octs((36, 128, 4, 130, 3, 232) + (81,)*1000 + (4, 1, 81, 0, 0))
+ ) == (str2octs('Q'*1001), null)
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pyasn1/test/codec/cer/test_encoder.py b/python/pyasn1/test/codec/cer/test_encoder.py
new file mode 100644
index 000000000..a4f80aa20
--- /dev/null
+++ b/python/pyasn1/test/codec/cer/test_encoder.py
@@ -0,0 +1,107 @@
+from pyasn1.type import namedtype, univ
+from pyasn1.codec.cer import encoder
+from pyasn1.compat.octets import ints2octs
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class BooleanEncoderTestCase(unittest.TestCase):
+ def testTrue(self):
+ assert encoder.encode(univ.Boolean(1)) == ints2octs((1, 1, 255))
+ def testFalse(self):
+ assert encoder.encode(univ.Boolean(0)) == ints2octs((1, 1, 0))
+
+class BitStringEncoderTestCase(unittest.TestCase):
+ def testShortMode(self):
+ assert encoder.encode(
+ univ.BitString((1,0)*501)
+ ) == ints2octs((3, 127, 6) + (170,) * 125 + (128,))
+
+ def testLongMode(self):
+ assert encoder.encode(
+ univ.BitString((1,0)*501)
+ ) == ints2octs((3, 127, 6) + (170,) * 125 + (128,))
+
+class OctetStringEncoderTestCase(unittest.TestCase):
+ def testShortMode(self):
+ assert encoder.encode(
+ univ.OctetString('Quick brown fox')
+ ) == ints2octs((4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120))
+ def testLongMode(self):
+ assert encoder.encode(
+ univ.OctetString('Q'*1001)
+ ) == ints2octs((36, 128, 4, 130, 3, 232) + (81,)*1000 + (4, 1, 81, 0, 0))
+
+class SetEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ self.s = univ.Set(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('place-holder', univ.Null('')),
+ namedtype.OptionalNamedType('first-name', univ.OctetString('')),
+ namedtype.DefaultedNamedType('age', univ.Integer(33))
+ ))
+
+ def __init(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0)
+ def __initWithOptional(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0)
+ self.s.setComponentByPosition(1, 'quick brown')
+
+ def __initWithDefaulted(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0)
+ self.s.setComponentByPosition(2, 1)
+
+ def __initWithOptionalAndDefaulted(self):
+ self.s.clear()
+ self.s.setComponentByPosition(0, univ.Null(''))
+ self.s.setComponentByPosition(1, univ.OctetString('quick brown'))
+ self.s.setComponentByPosition(2, univ.Integer(1))
+
+ def testIndefMode(self):
+ self.__init()
+ assert encoder.encode(self.s) == ints2octs((49, 128, 5, 0, 0, 0))
+
+ def testWithOptionalIndefMode(self):
+ self.__initWithOptional()
+ assert encoder.encode(
+ self.s
+ ) == ints2octs((49, 128, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 5, 0, 0, 0))
+
+ def testWithDefaultedIndefMode(self):
+ self.__initWithDefaulted()
+ assert encoder.encode(
+ self.s
+ ) == ints2octs((49, 128, 2, 1, 1, 5, 0, 0, 0))
+
+ def testWithOptionalAndDefaultedIndefMode(self):
+ self.__initWithOptionalAndDefaulted()
+ assert encoder.encode(
+ self.s
+ ) == ints2octs((49, 128, 2, 1, 1, 4, 11, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 5, 0, 0, 0))
+
+class SetWithChoiceEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ c = univ.Choice(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('actual', univ.Boolean(0))
+ ))
+ self.s = univ.Set(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('place-holder', univ.Null('')),
+ namedtype.NamedType('status', c)
+ ))
+
+ def testIndefMode(self):
+ self.s.setComponentByPosition(0)
+ self.s.setComponentByName('status')
+ self.s.getComponentByName('status').setComponentByPosition(0, 1)
+ assert encoder.encode(self.s) == ints2octs((49, 128, 1, 1, 255, 5, 0, 0, 0))
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pyasn1/test/codec/der/__init__.py b/python/pyasn1/test/codec/der/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/test/codec/der/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/test/codec/der/suite.py b/python/pyasn1/test/codec/der/suite.py
new file mode 100644
index 000000000..7af83bf94
--- /dev/null
+++ b/python/pyasn1/test/codec/der/suite.py
@@ -0,0 +1,22 @@
+from sys import path, version_info
+from os.path import sep
+path.insert(1, path[0]+sep+'der')
+import test_encoder, test_decoder
+from pyasn1.error import PyAsn1Error
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+suite = unittest.TestSuite()
+loader = unittest.TestLoader()
+for m in (test_encoder, test_decoder):
+ suite.addTest(loader.loadTestsFromModule(m))
+
+def runTests(): unittest.TextTestRunner(verbosity=2).run(suite)
+
+if __name__ == '__main__': runTests()
diff --git a/python/pyasn1/test/codec/der/test_decoder.py b/python/pyasn1/test/codec/der/test_decoder.py
new file mode 100644
index 000000000..5c9a1948b
--- /dev/null
+++ b/python/pyasn1/test/codec/der/test_decoder.py
@@ -0,0 +1,20 @@
+from pyasn1.type import univ
+from pyasn1.codec.der import decoder
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class OctetStringDecoderTestCase(unittest.TestCase):
+ def testShortMode(self):
+ assert decoder.decode(
+ '\004\017Quick brown fox'.encode()
+ ) == ('Quick brown fox'.encode(), ''.encode())
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pyasn1/test/codec/der/test_encoder.py b/python/pyasn1/test/codec/der/test_encoder.py
new file mode 100644
index 000000000..787da7bec
--- /dev/null
+++ b/python/pyasn1/test/codec/der/test_encoder.py
@@ -0,0 +1,44 @@
+from pyasn1.type import namedtype, univ
+from pyasn1.codec.der import encoder
+from pyasn1.compat.octets import ints2octs
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class OctetStringEncoderTestCase(unittest.TestCase):
+ def testShortMode(self):
+ assert encoder.encode(
+ univ.OctetString('Quick brown fox')
+ ) == ints2octs((4, 15, 81, 117, 105, 99, 107, 32, 98, 114, 111, 119, 110, 32, 102, 111, 120))
+
+class BitStringEncoderTestCase(unittest.TestCase):
+ def testShortMode(self):
+ assert encoder.encode(
+ univ.BitString((1,))
+ ) == ints2octs((3, 2, 7, 128))
+
+class SetWithChoiceEncoderTestCase(unittest.TestCase):
+ def setUp(self):
+ c = univ.Choice(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('name', univ.OctetString('')),
+ namedtype.NamedType('amount', univ.Integer(0))
+ ))
+ self.s = univ.Set(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('place-holder', univ.Null('')),
+ namedtype.NamedType('status', c)
+ ))
+
+ def testDefMode(self):
+ self.s.setComponentByPosition(0)
+ self.s.setComponentByName('status')
+ self.s.getComponentByName('status').setComponentByPosition(0, 'ann')
+ assert encoder.encode(self.s) == ints2octs((49, 7, 4, 3, 97, 110, 110, 5, 0))
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pyasn1/test/codec/suite.py b/python/pyasn1/test/codec/suite.py
new file mode 100644
index 000000000..93ff06381
--- /dev/null
+++ b/python/pyasn1/test/codec/suite.py
@@ -0,0 +1,29 @@
+from sys import path, version_info
+from os.path import sep
+path.insert(1, path[0]+sep+'codec'+sep+'ber')
+import ber.suite
+path.insert(1, path[0]+sep+'codec'+sep+'cer')
+import cer.suite
+path.insert(1, path[0]+sep+'codec'+sep+'der')
+import der.suite
+from pyasn1.error import PyAsn1Error
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+suite = unittest.TestSuite()
+for m in (
+ ber.suite,
+ cer.suite,
+ der.suite
+ ):
+ suite.addTest(getattr(m, 'suite'))
+
+def runTests(): unittest.TextTestRunner(verbosity=2).run(suite)
+
+if __name__ == '__main__': runTests()
diff --git a/python/pyasn1/test/suite.py b/python/pyasn1/test/suite.py
new file mode 100644
index 000000000..b4d80e864
--- /dev/null
+++ b/python/pyasn1/test/suite.py
@@ -0,0 +1,26 @@
+from sys import path, version_info
+from os.path import sep
+path.insert(1, path[0]+sep+'type')
+import type.suite
+path.insert(1, path[0]+sep+'codec')
+import codec.suite
+from pyasn1.error import PyAsn1Error
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+suite = unittest.TestSuite()
+for m in (
+ type.suite,
+ codec.suite
+ ):
+ suite.addTest(getattr(m, 'suite'))
+
+def runTests(): unittest.TextTestRunner(verbosity=2).run(suite)
+
+if __name__ == '__main__': runTests()
diff --git a/python/pyasn1/test/type/__init__.py b/python/pyasn1/test/type/__init__.py
new file mode 100644
index 000000000..8c3066b2e
--- /dev/null
+++ b/python/pyasn1/test/type/__init__.py
@@ -0,0 +1 @@
+# This file is necessary to make this directory a package.
diff --git a/python/pyasn1/test/type/suite.py b/python/pyasn1/test/type/suite.py
new file mode 100644
index 000000000..bc4b48685
--- /dev/null
+++ b/python/pyasn1/test/type/suite.py
@@ -0,0 +1,20 @@
+import test_tag, test_constraint, test_namedtype, test_univ
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+suite = unittest.TestSuite()
+loader = unittest.TestLoader()
+for m in (test_tag, test_constraint, test_namedtype, test_univ):
+ suite.addTest(loader.loadTestsFromModule(m))
+
+def runTests(): unittest.TextTestRunner(verbosity=2).run(suite)
+
+if __name__ == '__main__': runTests()
diff --git a/python/pyasn1/test/type/test_constraint.py b/python/pyasn1/test/type/test_constraint.py
new file mode 100644
index 000000000..3457c0fc3
--- /dev/null
+++ b/python/pyasn1/test/type/test_constraint.py
@@ -0,0 +1,280 @@
+from pyasn1.type import constraint, error
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class SingleValueConstraintTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.SingleValueConstraint(1,2)
+ self.c2 = constraint.SingleValueConstraint(3,4)
+
+ def testCmp(self): assert self.c1 == self.c1, 'comparation fails'
+ def testHash(self): assert hash(self.c1) != hash(self.c2), 'hash() fails'
+ def testGoodVal(self):
+ try:
+ self.c1(1)
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1(4)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+class ContainedSubtypeConstraintTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.ContainedSubtypeConstraint(
+ constraint.SingleValueConstraint(12)
+ )
+
+ def testGoodVal(self):
+ try:
+ self.c1(12)
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1(4)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+class ValueRangeConstraintTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.ValueRangeConstraint(1,4)
+
+ def testGoodVal(self):
+ try:
+ self.c1(1)
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1(-5)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+class ValueSizeConstraintTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.ValueSizeConstraint(1,2)
+
+ def testGoodVal(self):
+ try:
+ self.c1('a')
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1('abc')
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+class PermittedAlphabetConstraintTestCase(SingleValueConstraintTestCase):
+ def setUp(self):
+ self.c1 = constraint.PermittedAlphabetConstraint('A', 'B', 'C')
+ self.c2 = constraint.PermittedAlphabetConstraint('DEF')
+
+ def testGoodVal(self):
+ try:
+ self.c1('A')
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1('E')
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+class ConstraintsIntersectionTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.ConstraintsIntersection(
+ constraint.SingleValueConstraint(4),
+ constraint.ValueRangeConstraint(2, 4)
+ )
+
+ def testCmp1(self):
+ assert constraint.SingleValueConstraint(4) in self.c1, '__cmp__() fails'
+
+ def testCmp2(self):
+ assert constraint.SingleValueConstraint(5) not in self.c1, \
+ '__cmp__() fails'
+
+ def testCmp3(self):
+ c = constraint.ConstraintsUnion(constraint.ConstraintsIntersection(
+ constraint.SingleValueConstraint(4),
+ constraint.ValueRangeConstraint(2, 4)
+ ))
+ assert self.c1 in c, '__cmp__() fails'
+ def testCmp4(self):
+ c = constraint.ConstraintsUnion(
+ constraint.ConstraintsIntersection(constraint.SingleValueConstraint(5))
+ )
+ assert self.c1 not in c, '__cmp__() fails'
+
+ def testGoodVal(self):
+ try:
+ self.c1(4)
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1(-5)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+class InnerTypeConstraintTestCase(unittest.TestCase):
+ def testConst1(self):
+ c = constraint.InnerTypeConstraint(
+ constraint.SingleValueConstraint(4)
+ )
+ try:
+ c(4, 32)
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ try:
+ c(5, 32)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+ def testConst2(self):
+ c = constraint.InnerTypeConstraint(
+ (0, constraint.SingleValueConstraint(4), 'PRESENT'),
+ (1, constraint.SingleValueConstraint(4), 'ABSENT')
+ )
+ try:
+ c(4, 0)
+ except error.ValueConstraintError:
+ raise
+ assert 0, 'constraint check fails'
+ try:
+ c(4, 1)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+ try:
+ c(3, 0)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+# Constraints compositions
+
+class ConstraintsIntersectionTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.ConstraintsIntersection(
+ constraint.ValueRangeConstraint(1, 9),
+ constraint.ValueRangeConstraint(2, 5)
+ )
+
+ def testGoodVal(self):
+ try:
+ self.c1(3)
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1(0)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+class ConstraintsUnionTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.ConstraintsUnion(
+ constraint.SingleValueConstraint(5),
+ constraint.ValueRangeConstraint(1, 3)
+ )
+
+ def testGoodVal(self):
+ try:
+ self.c1(2)
+ self.c1(5)
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1(-5)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+class ConstraintsExclusionTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.ConstraintsExclusion(
+ constraint.ValueRangeConstraint(2, 4)
+ )
+
+ def testGoodVal(self):
+ try:
+ self.c1(6)
+ except error.ValueConstraintError:
+ assert 0, 'constraint check fails'
+ def testBadVal(self):
+ try:
+ self.c1(2)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint check fails'
+
+# Constraints derivations
+
+class DirectDerivationTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.SingleValueConstraint(5)
+ self.c2 = constraint.ConstraintsUnion(
+ self.c1, constraint.ValueRangeConstraint(1, 3)
+ )
+
+ def testGoodVal(self):
+ assert self.c1.isSuperTypeOf(self.c2), 'isSuperTypeOf failed'
+ assert not self.c1.isSubTypeOf(self.c2) , 'isSubTypeOf failed'
+ def testBadVal(self):
+ assert not self.c2.isSuperTypeOf(self.c1) , 'isSuperTypeOf failed'
+ assert self.c2.isSubTypeOf(self.c1) , 'isSubTypeOf failed'
+
+class IndirectDerivationTestCase(unittest.TestCase):
+ def setUp(self):
+ self.c1 = constraint.ConstraintsIntersection(
+ constraint.ValueRangeConstraint(1, 30)
+ )
+ self.c2 = constraint.ConstraintsIntersection(
+ self.c1, constraint.ValueRangeConstraint(1, 20)
+ )
+ self.c2 = constraint.ConstraintsIntersection(
+ self.c2, constraint.ValueRangeConstraint(1, 10)
+ )
+
+ def testGoodVal(self):
+ assert self.c1.isSuperTypeOf(self.c2), 'isSuperTypeOf failed'
+ assert not self.c1.isSubTypeOf(self.c2) , 'isSubTypeOf failed'
+ def testBadVal(self):
+ assert not self.c2.isSuperTypeOf(self.c1) , 'isSuperTypeOf failed'
+ assert self.c2.isSubTypeOf(self.c1) , 'isSubTypeOf failed'
+
+if __name__ == '__main__': unittest.main()
+
+# how to apply size constriants to constructed types?
diff --git a/python/pyasn1/test/type/test_namedtype.py b/python/pyasn1/test/type/test_namedtype.py
new file mode 100644
index 000000000..3a4f30599
--- /dev/null
+++ b/python/pyasn1/test/type/test_namedtype.py
@@ -0,0 +1,87 @@
+from pyasn1.type import namedtype, univ
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class NamedTypeCaseBase(unittest.TestCase):
+ def setUp(self):
+ self.e = namedtype.NamedType('age', univ.Integer())
+ def testIter(self):
+ n, t = self.e
+ assert n == 'age' or t == univ.Integer(), 'unpack fails'
+
+class NamedTypesCaseBase(unittest.TestCase):
+ def setUp(self):
+ self.e = namedtype.NamedTypes(
+ namedtype.NamedType('first-name', univ.OctetString('')),
+ namedtype.OptionalNamedType('age', univ.Integer(0)),
+ namedtype.NamedType('family-name', univ.OctetString(''))
+ )
+ def testIter(self):
+ for t in self.e:
+ break
+ else:
+ assert 0, '__getitem__() fails'
+
+ def testGetTypeByPosition(self):
+ assert self.e.getTypeByPosition(0) == univ.OctetString(''), \
+ 'getTypeByPosition() fails'
+
+ def testGetNameByPosition(self):
+ assert self.e.getNameByPosition(0) == 'first-name', \
+ 'getNameByPosition() fails'
+
+ def testGetPositionByName(self):
+ assert self.e.getPositionByName('first-name') == 0, \
+ 'getPositionByName() fails'
+
+ def testGetTypesNearPosition(self):
+ assert self.e.getTagMapNearPosition(0).getPosMap() == {
+ univ.OctetString.tagSet: univ.OctetString('')
+ }
+ assert self.e.getTagMapNearPosition(1).getPosMap() == {
+ univ.Integer.tagSet: univ.Integer(0),
+ univ.OctetString.tagSet: univ.OctetString('')
+ }
+ assert self.e.getTagMapNearPosition(2).getPosMap() == {
+ univ.OctetString.tagSet: univ.OctetString('')
+ }
+
+ def testGetTagMap(self):
+ assert self.e.getTagMap().getPosMap() == {
+ univ.OctetString.tagSet: univ.OctetString(''),
+ univ.Integer.tagSet: univ.Integer(0)
+ }
+
+ def testGetTagMapWithDups(self):
+ try:
+ self.e.getTagMap(1)
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'Duped types not noticed'
+
+ def testGetPositionNearType(self):
+ assert self.e.getPositionNearType(univ.OctetString.tagSet, 0) == 0
+ assert self.e.getPositionNearType(univ.Integer.tagSet, 1) == 1
+ assert self.e.getPositionNearType(univ.OctetString.tagSet, 2) == 2
+
+class OrderedNamedTypesCaseBase(unittest.TestCase):
+ def setUp(self):
+ self.e = namedtype.NamedTypes(
+ namedtype.NamedType('first-name', univ.OctetString('')),
+ namedtype.NamedType('age', univ.Integer(0))
+ )
+
+ def testGetTypeByPosition(self):
+ assert self.e.getTypeByPosition(0) == univ.OctetString(''), \
+ 'getTypeByPosition() fails'
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pyasn1/test/type/test_tag.py b/python/pyasn1/test/type/test_tag.py
new file mode 100644
index 000000000..78146dca2
--- /dev/null
+++ b/python/pyasn1/test/type/test_tag.py
@@ -0,0 +1,107 @@
+from pyasn1.type import tag
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class TagTestCaseBase(unittest.TestCase):
+ def setUp(self):
+ self.t1 = tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 3)
+ self.t2 = tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 3)
+
+class TagCmpTestCase(TagTestCaseBase):
+ def testCmp(self):
+ assert self.t1 == self.t2, 'tag comparation fails'
+
+ def testHash(self):
+ assert hash(self.t1) == hash(self.t2), 'tag hash comparation fails'
+
+ def testSequence(self):
+ assert self.t1[0] == self.t2[0] and \
+ self.t1[1] == self.t2[1] and \
+ self.t1[2] == self.t2[2], 'tag sequence protocol fails'
+
+class TagSetTestCaseBase(unittest.TestCase):
+ def setUp(self):
+ self.ts1 = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
+ )
+ self.ts2 = tag.initTagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
+ )
+
+class TagSetCmpTestCase(TagSetTestCaseBase):
+ def testCmp(self):
+ assert self.ts1 == self.ts2, 'tag set comparation fails'
+
+ def testHash(self):
+ assert hash(self.ts1) == hash(self.ts2), 'tag set hash comp. fails'
+
+ def testLen(self):
+ assert len(self.ts1) == len(self.ts2), 'tag length comparation fails'
+
+class TaggingTestSuite(TagSetTestCaseBase):
+ def testImplicitTag(self):
+ t = self.ts1.tagImplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 14)
+ )
+ assert t == tag.TagSet(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 12),
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 14)
+ ), 'implicit tagging went wrong'
+
+ def testExplicitTag(self):
+ t = self.ts1.tagExplicitly(
+ tag.Tag(tag.tagClassPrivate, tag.tagFormatSimple, 32)
+ )
+ assert t == tag.TagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12),
+ tag.Tag(tag.tagClassPrivate, tag.tagFormatConstructed, 32)
+ ), 'explicit tagging went wrong'
+
+class TagSetAddTestSuite(TagSetTestCaseBase):
+ def testAdd(self):
+ t = self.ts1 + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2)
+ assert t == tag.TagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12),
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2)
+ ), 'TagSet.__add__() fails'
+
+ def testRadd(self):
+ t = tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2) + self.ts1
+ assert t == tag.TagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12),
+ tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
+ ), 'TagSet.__radd__() fails'
+
+class SuperTagSetTestCase(TagSetTestCaseBase):
+ def testSuperTagCheck1(self):
+ assert self.ts1.isSuperTagSetOf(
+ tag.TagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
+ )), 'isSuperTagSetOf() fails'
+
+ def testSuperTagCheck2(self):
+ assert not self.ts1.isSuperTagSetOf(
+ tag.TagSet(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 13)
+ )), 'isSuperTagSetOf() fails'
+
+ def testSuperTagCheck3(self):
+ assert self.ts1.isSuperTagSetOf(
+ tag.TagSet((), tag.Tag(tag.tagClassUniversal,
+ tag.tagFormatSimple, 12))
+ ), 'isSuperTagSetOf() fails'
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pyasn1/test/type/test_univ.py b/python/pyasn1/test/type/test_univ.py
new file mode 100644
index 000000000..3eedcf26a
--- /dev/null
+++ b/python/pyasn1/test/type/test_univ.py
@@ -0,0 +1,479 @@
+from pyasn1.type import univ, tag, constraint, namedtype, namedval, error
+from pyasn1.compat.octets import str2octs, ints2octs
+from pyasn1.error import PyAsn1Error
+from sys import version_info
+if version_info[0:2] < (2, 7) or \
+ version_info[0:2] in ( (3, 0), (3, 1) ):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ import unittest
+else:
+ import unittest
+
+class IntegerTestCase(unittest.TestCase):
+ def testStr(self): assert str(univ.Integer(1)) in ('1','1L'),'str() fails'
+ def testAnd(self): assert univ.Integer(1) & 0 == 0, '__and__() fails'
+ def testOr(self): assert univ.Integer(1) | 0 == 1, '__or__() fails'
+ def testXor(self): assert univ.Integer(1) ^ 0 == 1, '__xor__() fails'
+ def testRand(self): assert 0 & univ.Integer(1) == 0, '__rand__() fails'
+ def testRor(self): assert 0 | univ.Integer(1) == 1, '__ror__() fails'
+ def testRxor(self): assert 0 ^ univ.Integer(1) == 1, '__rxor__() fails'
+ def testAdd(self): assert univ.Integer(-4) + 6 == 2, '__add__() fails'
+ def testRadd(self): assert 4 + univ.Integer(5) == 9, '__radd__() fails'
+ def testSub(self): assert univ.Integer(3) - 6 == -3, '__sub__() fails'
+ def testRsub(self): assert 6 - univ.Integer(3) == 3, '__rsub__() fails'
+ def testMul(self): assert univ.Integer(3) * -3 == -9, '__mul__() fails'
+ def testRmul(self): assert 2 * univ.Integer(3) == 6, '__rmul__() fails'
+ def testDiv(self): assert univ.Integer(3) / 2 == 1, '__div__() fails'
+ def testRdiv(self): assert 6 / univ.Integer(3) == 2, '__rdiv__() fails'
+ def testMod(self): assert univ.Integer(3) % 2 == 1, '__mod__() fails'
+ def testRmod(self): assert 4 % univ.Integer(3) == 1, '__rmod__() fails'
+ def testPow(self): assert univ.Integer(3) ** 2 == 9, '__pow__() fails'
+ def testRpow(self): assert 2 ** univ.Integer(2) == 4, '__rpow__() fails'
+ def testLshift(self): assert univ.Integer(1) << 1 == 2, '<< fails'
+ def testRshift(self): assert univ.Integer(2) >> 1 == 1, '>> fails'
+ def testInt(self): assert int(univ.Integer(3)) == 3, '__int__() fails'
+ def testLong(self): assert int(univ.Integer(8)) == 8, '__long__() fails'
+ def testFloat(self): assert float(univ.Integer(4))==4.0,'__float__() fails'
+ def testPrettyIn(self): assert univ.Integer('3') == 3, 'prettyIn() fails'
+ def testTag(self):
+ assert univ.Integer().getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02)
+ )
+ def testNamedVals(self):
+ i = univ.Integer(
+ 'asn1', namedValues=univ.Integer.namedValues.clone(('asn1', 1))
+ )
+ assert i == 1, 'named val fails'
+ assert str(i) != 'asn1', 'named val __str__() fails'
+
+class BooleanTestCase(unittest.TestCase):
+ def testTruth(self):
+ assert univ.Boolean(True) and univ.Boolean(1), 'Truth initializer fails'
+ def testFalse(self):
+ assert not univ.Boolean(False) and not univ.Boolean(0), 'False initializer fails'
+ def testStr(self):
+ assert str(univ.Boolean(1)) in ('1', '1L'), 'str() fails'
+ def testTag(self):
+ assert univ.Boolean().getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01)
+ )
+ def testConstraints(self):
+ try:
+ univ.Boolean(2)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint fail'
+ def testSubtype(self):
+ assert univ.Integer().subtype(
+ value=1,
+ implicitTag=tag.Tag(tag.tagClassPrivate,tag.tagFormatSimple,2),
+ subtypeSpec=constraint.SingleValueConstraint(1,3)
+ ) == univ.Integer(
+ value=1,
+ tagSet=tag.TagSet(tag.Tag(tag.tagClassPrivate,
+ tag.tagFormatSimple,2)),
+ subtypeSpec=constraint.ConstraintsIntersection(constraint.SingleValueConstraint(1,3))
+ )
+
+class BitStringTestCase(unittest.TestCase):
+ def setUp(self):
+ self.b = univ.BitString(
+ namedValues=namedval.NamedValues(('Active', 0), ('Urgent', 1))
+ )
+ def testSet(self):
+ assert self.b.clone('Active') == (1,)
+ assert self.b.clone("'1010100110001010'B") == (1,0,1,0,1,0,0,1,1,0,0,0,1,0,1,0)
+ assert self.b.clone("'A98A'H") == (1,0,1,0,1,0,0,1,1,0,0,0,1,0,1,0)
+ assert self.b.clone((1,0,1)) == (1,0,1)
+ def testStr(self):
+ assert str(self.b.clone('Urgent,Active')) == '(1, 1)'
+ def testRepr(self):
+ assert repr(self.b.clone('Urgent,Active')) == 'BitString("\'11\'B")'
+ def testTag(self):
+ assert univ.BitString().getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03)
+ )
+ def testLen(self): assert len(self.b.clone("'A98A'H")) == 16
+ def testIter(self):
+ assert self.b.clone("'A98A'H")[0] == 1
+ assert self.b.clone("'A98A'H")[1] == 0
+ assert self.b.clone("'A98A'H")[2] == 1
+
+class OctetStringTestCase(unittest.TestCase):
+ def testInit(self):
+ assert univ.OctetString(str2octs('abcd')) == str2octs('abcd'), '__init__() fails'
+ def testBinStr(self):
+ assert univ.OctetString(binValue="1000010111101110101111000000111011") == ints2octs((133, 238, 188, 14, 192)), 'bin init fails'
+ def testHexStr(self):
+ assert univ.OctetString(hexValue="FA9823C43E43510DE3422") == ints2octs((250, 152, 35, 196, 62, 67, 81, 13, 227, 66, 32)), 'hex init fails'
+ def testTuple(self):
+ assert univ.OctetString((1,2,3,4,5)) == ints2octs((1,2,3,4,5)), 'tuple init failed'
+ def testStr(self):
+ assert str(univ.OctetString('q')) == 'q', '__str__() fails'
+ def testSeq(self):
+ assert univ.OctetString('q')[0] == str2octs('q')[0],'__getitem__() fails'
+ def testAsOctets(self):
+ assert univ.OctetString('abcd').asOctets() == str2octs('abcd'), 'testAsOctets() fails'
+ def testAsInts(self):
+ assert univ.OctetString('abcd').asNumbers() == (97, 98, 99, 100), 'testAsNumbers() fails'
+
+ def testEmpty(self):
+ try:
+ str(univ.OctetString())
+ except PyAsn1Error:
+ pass
+ else:
+ assert 0, 'empty OctetString() not reported'
+
+ def testAdd(self):
+ assert univ.OctetString('') + 'q' == str2octs('q'), '__add__() fails'
+ def testRadd(self):
+ assert 'b' + univ.OctetString('q') == str2octs('bq'), '__radd__() fails'
+ def testMul(self):
+ assert univ.OctetString('a') * 2 == str2octs('aa'), '__mul__() fails'
+ def testRmul(self):
+ assert 2 * univ.OctetString('b') == str2octs('bb'), '__rmul__() fails'
+ def testTag(self):
+ assert univ.OctetString().getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04)
+ )
+
+class Null(unittest.TestCase):
+ def testStr(self): assert str(univ.Null('')) == '', 'str() fails'
+ def testTag(self):
+ assert univ.Null().getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05)
+ )
+ def testConstraints(self):
+ try:
+ univ.Null(2)
+ except error.ValueConstraintError:
+ pass
+ else:
+ assert 0, 'constraint fail'
+
+class RealTestCase(unittest.TestCase):
+ def testStr(self): assert str(univ.Real(1.0)) == '1.0','str() fails'
+ def testRepr(self): assert repr(univ.Real(-4.1)) == 'Real((-41, 10, -1))','repr() fails'
+ def testAdd(self): assert univ.Real(-4.1) + 1.4 == -2.7, '__add__() fails'
+ def testRadd(self): assert 4 + univ.Real(0.5) == 4.5, '__radd__() fails'
+ def testSub(self): assert univ.Real(3.9) - 1.7 == 2.2, '__sub__() fails'
+ def testRsub(self): assert 6.1 - univ.Real(0.1) == 6, '__rsub__() fails'
+ def testMul(self): assert univ.Real(3.0) * -3 == -9, '__mul__() fails'
+ def testRmul(self): assert 2 * univ.Real(3.0) == 6, '__rmul__() fails'
+ def testDiv(self): assert univ.Real(3.0) / 2 == 1.5, '__div__() fails'
+ def testRdiv(self): assert 6 / univ.Real(3.0) == 2, '__rdiv__() fails'
+ def testMod(self): assert univ.Real(3.0) % 2 == 1, '__mod__() fails'
+ def testRmod(self): assert 4 % univ.Real(3.0) == 1, '__rmod__() fails'
+ def testPow(self): assert univ.Real(3.0) ** 2 == 9, '__pow__() fails'
+ def testRpow(self): assert 2 ** univ.Real(2.0) == 4, '__rpow__() fails'
+ def testInt(self): assert int(univ.Real(3.0)) == 3, '__int__() fails'
+ def testLong(self): assert int(univ.Real(8.0)) == 8, '__long__() fails'
+ def testFloat(self): assert float(univ.Real(4.0))==4.0,'__float__() fails'
+ def testPrettyIn(self): assert univ.Real((3,10,0)) == 3, 'prettyIn() fails'
+ # infinite float values
+ def testStrInf(self):
+ assert str(univ.Real('inf')) == 'inf','str() fails'
+ def testReprInf(self):
+ assert repr(univ.Real('inf')) == 'Real(\'inf\')','repr() fails'
+ def testAddInf(self):
+ assert univ.Real('inf') + 1 == float('inf'), '__add__() fails'
+ def testRaddInf(self):
+ assert 1 + univ.Real('inf') == float('inf'), '__radd__() fails'
+ def testIntInf(self):
+ try:
+ assert int(univ.Real('inf'))
+ except OverflowError:
+ pass
+ else:
+ assert 0, '__int__() fails'
+ def testLongInf(self):
+ try:
+ assert int(univ.Real('inf'))
+ except OverflowError:
+ pass
+ else:
+ assert 0, '__long__() fails'
+ assert int(univ.Real(8.0)) == 8, '__long__() fails'
+ def testFloatInf(self):
+ assert float(univ.Real('-inf')) == float('-inf'),'__float__() fails'
+ def testPrettyInInf(self):
+ assert univ.Real(float('inf')) == float('inf'), 'prettyIn() fails'
+ def testPlusInf(self):
+ assert univ.Real('inf').isPlusInfinity(), 'isPlusInfinity failed'
+ def testMinusInf(self):
+ assert univ.Real('-inf').isMinusInfinity(), 'isMinusInfinity failed'
+
+ def testTag(self):
+ assert univ.Real().getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09)
+ )
+
+class ObjectIdentifier(unittest.TestCase):
+ def testStr(self):
+ assert str(univ.ObjectIdentifier((1,3,6))) == '1.3.6'
+ def testEq(self):
+ assert univ.ObjectIdentifier((1,3,6)) == (1,3,6), '__cmp__() fails'
+ def testAdd(self):
+ assert univ.ObjectIdentifier((1,3)) + (6,)==(1,3,6),'__add__() fails'
+ def testRadd(self):
+ assert (1,) + univ.ObjectIdentifier((3,6))==(1,3,6),'__radd__() fails'
+ def testLen(self):
+ assert len(univ.ObjectIdentifier((1,3))) == 2,'__len__() fails'
+ def testPrefix(self):
+ o = univ.ObjectIdentifier('1.3.6')
+ assert o.isPrefixOf((1,3,6)), 'isPrefixOf() fails'
+ assert o.isPrefixOf((1,3,6,1)), 'isPrefixOf() fails'
+ assert not o.isPrefixOf((1,3)), 'isPrefixOf() fails'
+ def testInput(self):
+ assert univ.ObjectIdentifier('1.3.6')==(1,3,6),'prettyIn() fails'
+ def testTag(self):
+ assert univ.ObjectIdentifier().getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06)
+ )
+
+class SequenceOf(unittest.TestCase):
+ def setUp(self):
+ self.s1 = univ.SequenceOf(
+ componentType=univ.OctetString('')
+ )
+ self.s2 = self.s1.clone()
+ def testTag(self):
+ assert self.s1.getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
+ ), 'wrong tagSet'
+ def testSeq(self):
+ self.s1.setComponentByPosition(0, univ.OctetString('abc'))
+ assert self.s1[0] == str2octs('abc'), 'set by idx fails'
+ self.s1[0] = 'cba'
+ assert self.s1[0] == str2octs('cba'), 'set by idx fails'
+ def testCmp(self):
+ self.s1.clear()
+ self.s1.setComponentByPosition(0, 'abc')
+ self.s2.clear()
+ self.s2.setComponentByPosition(0, univ.OctetString('abc'))
+ assert self.s1 == self.s2, '__cmp__() fails'
+ def testSubtypeSpec(self):
+ s = self.s1.clone(subtypeSpec=constraint.ConstraintsUnion(
+ constraint.SingleValueConstraint(str2octs('abc'))
+ ))
+ try:
+ s.setComponentByPosition(0, univ.OctetString('abc'))
+ except:
+ assert 0, 'constraint fails'
+ try:
+ s.setComponentByPosition(1, univ.OctetString('Abc'))
+ except:
+ pass
+ else:
+ assert 0, 'constraint fails'
+ def testSizeSpec(self):
+ s = self.s1.clone(sizeSpec=constraint.ConstraintsUnion(
+ constraint.ValueSizeConstraint(1,1)
+ ))
+ s.setComponentByPosition(0, univ.OctetString('abc'))
+ try:
+ s.verifySizeSpec()
+ except:
+ assert 0, 'size spec fails'
+ s.setComponentByPosition(1, univ.OctetString('abc'))
+ try:
+ s.verifySizeSpec()
+ except:
+ pass
+ else:
+ assert 0, 'size spec fails'
+ def testGetComponentTagMap(self):
+ assert self.s1.getComponentTagMap().getPosMap() == {
+ univ.OctetString.tagSet: univ.OctetString('')
+ }
+ def testSubtype(self):
+ self.s1.clear()
+ assert self.s1.subtype(
+ implicitTag=tag.Tag(tag.tagClassPrivate,tag.tagFormatSimple,2),
+ subtypeSpec=constraint.SingleValueConstraint(1,3),
+ sizeSpec=constraint.ValueSizeConstraint(0,1)
+ ) == self.s1.clone(
+ tagSet=tag.TagSet(tag.Tag(tag.tagClassPrivate,
+ tag.tagFormatSimple,2)),
+ subtypeSpec=constraint.ConstraintsIntersection(constraint.SingleValueConstraint(1,3)),
+ sizeSpec=constraint.ValueSizeConstraint(0,1)
+ )
+ def testClone(self):
+ self.s1.setComponentByPosition(0, univ.OctetString('abc'))
+ s = self.s1.clone()
+ assert len(s) == 0
+ s = self.s1.clone(cloneValueFlag=1)
+ assert len(s) == 1
+ assert s.getComponentByPosition(0) == self.s1.getComponentByPosition(0)
+
+class Sequence(unittest.TestCase):
+ def setUp(self):
+ self.s1 = univ.Sequence(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('name', univ.OctetString('')),
+ namedtype.OptionalNamedType('nick', univ.OctetString('')),
+ namedtype.DefaultedNamedType('age', univ.Integer(34))
+ ))
+ def testTag(self):
+ assert self.s1.getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
+ ), 'wrong tagSet'
+ def testById(self):
+ self.s1.setComponentByName('name', univ.OctetString('abc'))
+ assert self.s1.getComponentByName('name') == str2octs('abc'), 'set by name fails'
+ def testByKey(self):
+ self.s1['name'] = 'abc'
+ assert self.s1['name'] == str2octs('abc'), 'set by key fails'
+ def testGetNearPosition(self):
+ assert self.s1.getComponentTagMapNearPosition(1).getPosMap() == {
+ univ.OctetString.tagSet: univ.OctetString(''),
+ univ.Integer.tagSet: univ.Integer(34)
+ }
+ assert self.s1.getComponentPositionNearType(
+ univ.OctetString.tagSet, 1
+ ) == 1
+ def testGetDefaultComponentByPosition(self):
+ self.s1.clear()
+ assert self.s1.getDefaultComponentByPosition(0) == None
+ assert self.s1.getDefaultComponentByPosition(2) == univ.Integer(34)
+ def testSetDefaultComponents(self):
+ self.s1.clear()
+ assert self.s1.getComponentByPosition(2) == None
+ self.s1.setComponentByPosition(0, univ.OctetString('Ping'))
+ self.s1.setComponentByPosition(1, univ.OctetString('Pong'))
+ self.s1.setDefaultComponents()
+ assert self.s1.getComponentByPosition(2) == 34
+ def testClone(self):
+ self.s1.setComponentByPosition(0, univ.OctetString('abc'))
+ self.s1.setComponentByPosition(1, univ.OctetString('def'))
+ self.s1.setComponentByPosition(2, univ.Integer(123))
+ s = self.s1.clone()
+ assert s.getComponentByPosition(0) != self.s1.getComponentByPosition(0)
+ assert s.getComponentByPosition(1) != self.s1.getComponentByPosition(1)
+ assert s.getComponentByPosition(2) != self.s1.getComponentByPosition(2)
+ s = self.s1.clone(cloneValueFlag=1)
+ assert s.getComponentByPosition(0) == self.s1.getComponentByPosition(0)
+ assert s.getComponentByPosition(1) == self.s1.getComponentByPosition(1)
+ assert s.getComponentByPosition(2) == self.s1.getComponentByPosition(2)
+
+class SetOf(unittest.TestCase):
+ def setUp(self):
+ self.s1 = univ.SetOf(componentType=univ.OctetString(''))
+ def testTag(self):
+ assert self.s1.getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
+ ), 'wrong tagSet'
+ def testSeq(self):
+ self.s1.setComponentByPosition(0, univ.OctetString('abc'))
+ assert self.s1[0] == str2octs('abc'), 'set by idx fails'
+ self.s1.setComponentByPosition(0, self.s1[0].clone('cba'))
+ assert self.s1[0] == str2octs('cba'), 'set by idx fails'
+
+class Set(unittest.TestCase):
+ def setUp(self):
+ self.s1 = univ.Set(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('name', univ.OctetString('')),
+ namedtype.OptionalNamedType('null', univ.Null('')),
+ namedtype.DefaultedNamedType('age', univ.Integer(34))
+ ))
+ self.s2 = self.s1.clone()
+ def testTag(self):
+ assert self.s1.getTagSet() == tag.TagSet(
+ (),
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
+ ), 'wrong tagSet'
+ def testByTypeWithPythonValue(self):
+ self.s1.setComponentByType(univ.OctetString.tagSet, 'abc')
+ assert self.s1.getComponentByType(
+ univ.OctetString.tagSet
+ ) == str2octs('abc'), 'set by name fails'
+ def testByTypeWithInstance(self):
+ self.s1.setComponentByType(univ.OctetString.tagSet, univ.OctetString('abc'))
+ assert self.s1.getComponentByType(
+ univ.OctetString.tagSet
+ ) == str2octs('abc'), 'set by name fails'
+ def testGetTagMap(self):
+ assert self.s1.getTagMap().getPosMap() == {
+ univ.Set.tagSet: univ.Set()
+ }
+ def testGetComponentTagMap(self):
+ assert self.s1.getComponentTagMap().getPosMap() == {
+ univ.OctetString.tagSet: univ.OctetString(''),
+ univ.Null.tagSet: univ.Null(''),
+ univ.Integer.tagSet: univ.Integer(34)
+ }
+ def testGetPositionByType(self):
+ assert self.s1.getComponentPositionByType(
+ univ.Null().getTagSet()
+ ) == 1
+
+class Choice(unittest.TestCase):
+ def setUp(self):
+ innerComp = univ.Choice(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('count', univ.Integer()),
+ namedtype.NamedType('flag', univ.Boolean())
+ ))
+ self.s1 = univ.Choice(componentType=namedtype.NamedTypes(
+ namedtype.NamedType('name', univ.OctetString()),
+ namedtype.NamedType('sex', innerComp)
+ ))
+ def testTag(self):
+ assert self.s1.getTagSet() == tag.TagSet(), 'wrong tagSet'
+ def testOuterByTypeWithPythonValue(self):
+ self.s1.setComponentByType(univ.OctetString.tagSet, 'abc')
+ assert self.s1.getComponentByType(
+ univ.OctetString.tagSet
+ ) == str2octs('abc')
+ def testOuterByTypeWithInstanceValue(self):
+ self.s1.setComponentByType(
+ univ.OctetString.tagSet, univ.OctetString('abc')
+ )
+ assert self.s1.getComponentByType(
+ univ.OctetString.tagSet
+ ) == str2octs('abc')
+ def testInnerByTypeWithPythonValue(self):
+ self.s1.setComponentByType(univ.Integer.tagSet, 123, 1)
+ assert self.s1.getComponentByType(
+ univ.Integer.tagSet, 1
+ ) == 123
+ def testInnerByTypeWithInstanceValue(self):
+ self.s1.setComponentByType(
+ univ.Integer.tagSet, univ.Integer(123), 1
+ )
+ assert self.s1.getComponentByType(
+ univ.Integer.tagSet, 1
+ ) == 123
+ def testCmp(self):
+ self.s1.setComponentByName('name', univ.OctetString('abc'))
+ assert self.s1 == str2octs('abc'), '__cmp__() fails'
+ def testGetComponent(self):
+ self.s1.setComponentByType(univ.OctetString.tagSet, 'abc')
+ assert self.s1.getComponent() == str2octs('abc'), 'getComponent() fails'
+ def testGetName(self):
+ self.s1.setComponentByType(univ.OctetString.tagSet, 'abc')
+ assert self.s1.getName() == 'name', 'getName() fails'
+ def testSetComponentByPosition(self):
+ self.s1.setComponentByPosition(0, univ.OctetString('Jim'))
+ assert self.s1 == str2octs('Jim')
+ def testClone(self):
+ self.s1.setComponentByPosition(0, univ.OctetString('abc'))
+ s = self.s1.clone()
+ assert len(s) == 0
+ s = self.s1.clone(cloneValueFlag=1)
+ assert len(s) == 1
+ assert s.getComponentByPosition(0) == self.s1.getComponentByPosition(0)
+
+if __name__ == '__main__': unittest.main()
diff --git a/python/pylru/pylru.py b/python/pylru/pylru.py
new file mode 100644
index 000000000..e69cadb76
--- /dev/null
+++ b/python/pylru/pylru.py
@@ -0,0 +1,556 @@
+
+# Cache implementaion with a Least Recently Used (LRU) replacement policy and
+# a basic dictionary interface.
+
+# Copyright (C) 2006, 2009, 2010, 2011 Jay Hutchinson
+
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 2 of the License, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+# more details.
+
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc., 51
+# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+
+# The cache is implemented using a combination of a python dictionary (hash
+# table) and a circular doubly linked list. Items in the cache are stored in
+# nodes. These nodes make up the linked list. The list is used to efficiently
+# maintain the order that the items have been used in. The front or head of
+# the list contains the most recently used item, the tail of the list
+# contains the least recently used item. When an item is used it can easily
+# (in a constant amount of time) be moved to the front of the list, thus
+# updating its position in the ordering. These nodes are also placed in the
+# hash table under their associated key. The hash table allows efficient
+# lookup of values by key.
+
+# Class for the node objects.
+class _dlnode(object):
+ def __init__(self):
+ self.empty = True
+
+
+class lrucache(object):
+
+ def __init__(self, size, callback=None):
+
+ self.callback = callback
+
+ # Create an empty hash table.
+ self.table = {}
+
+ # Initialize the doubly linked list with one empty node. This is an
+ # invariant. The cache size must always be greater than zero. Each
+ # node has a 'prev' and 'next' variable to hold the node that comes
+ # before it and after it respectively. Initially the two variables
+ # each point to the head node itself, creating a circular doubly
+ # linked list of size one. Then the size() method is used to adjust
+ # the list to the desired size.
+
+ self.head = _dlnode()
+ self.head.next = self.head
+ self.head.prev = self.head
+
+ self.listSize = 1
+
+ # Adjust the size
+ self.size(size)
+
+
+ def __len__(self):
+ return len(self.table)
+
+ def clear(self):
+ for node in self.dli():
+ node.empty = True
+ node.key = None
+ node.value = None
+
+ self.table.clear()
+
+
+ def __contains__(self, key):
+ return key in self.table
+
+ # Looks up a value in the cache without affecting cache order.
+ def peek(self, key):
+ # Look up the node
+ node = self.table[key]
+ return node.value
+
+
+ def __getitem__(self, key):
+ # Look up the node
+ node = self.table[key]
+
+ # Update the list ordering. Move this node so that is directly
+ # proceeds the head node. Then set the 'head' variable to it. This
+ # makes it the new head of the list.
+ self.mtf(node)
+ self.head = node
+
+ # Return the value.
+ return node.value
+
+ def get(self, key, default=None):
+ """Get an item - return default (None) if not present"""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __setitem__(self, key, value):
+ # First, see if any value is stored under 'key' in the cache already.
+ # If so we are going to replace that value with the new one.
+ if key in self.table:
+
+ # Lookup the node
+ node = self.table[key]
+
+ # Replace the value.
+ node.value = value
+
+ # Update the list ordering.
+ self.mtf(node)
+ self.head = node
+
+ return
+
+ # Ok, no value is currently stored under 'key' in the cache. We need
+ # to choose a node to place the new item in. There are two cases. If
+ # the cache is full some item will have to be pushed out of the
+ # cache. We want to choose the node with the least recently used
+ # item. This is the node at the tail of the list. If the cache is not
+ # full we want to choose a node that is empty. Because of the way the
+ # list is managed, the empty nodes are always together at the tail
+ # end of the list. Thus, in either case, by chooseing the node at the
+ # tail of the list our conditions are satisfied.
+
+ # Since the list is circular, the tail node directly preceeds the
+ # 'head' node.
+ node = self.head.prev
+
+ # If the node already contains something we need to remove the old
+ # key from the dictionary.
+ if not node.empty:
+ if self.callback is not None:
+ self.callback(node.key, node.value)
+ del self.table[node.key]
+
+ # Place the new key and value in the node
+ node.empty = False
+ node.key = key
+ node.value = value
+
+ # Add the node to the dictionary under the new key.
+ self.table[key] = node
+
+ # We need to move the node to the head of the list. The node is the
+ # tail node, so it directly preceeds the head node due to the list
+ # being circular. Therefore, the ordering is already correct, we just
+ # need to adjust the 'head' variable.
+ self.head = node
+
+
+ def __delitem__(self, key):
+
+ # Lookup the node, then remove it from the hash table.
+ node = self.table[key]
+ del self.table[key]
+
+ node.empty = True
+
+ # Not strictly necessary.
+ node.key = None
+ node.value = None
+
+ # Because this node is now empty we want to reuse it before any
+ # non-empty node. To do that we want to move it to the tail of the
+ # list. We move it so that it directly preceeds the 'head' node. This
+ # makes it the tail node. The 'head' is then adjusted. This
+ # adjustment ensures correctness even for the case where the 'node'
+ # is the 'head' node.
+ self.mtf(node)
+ self.head = node.next
+
+ def __iter__(self):
+
+ # Return an iterator that returns the keys in the cache in order from
+ # the most recently to least recently used. Does not modify the cache
+ # order.
+ for node in self.dli():
+ yield node.key
+
+ def items(self):
+
+ # Return an iterator that returns the (key, value) pairs in the cache
+ # in order from the most recently to least recently used. Does not
+ # modify the cache order.
+ for node in self.dli():
+ yield (node.key, node.value)
+
+ def keys(self):
+
+ # Return an iterator that returns the keys in the cache in order from
+ # the most recently to least recently used. Does not modify the cache
+ # order.
+ for node in self.dli():
+ yield node.key
+
+ def values(self):
+
+ # Return an iterator that returns the values in the cache in order
+ # from the most recently to least recently used. Does not modify the
+ # cache order.
+ for node in self.dli():
+ yield node.value
+
+ def size(self, size=None):
+
+ if size is not None:
+ assert size > 0
+ if size > self.listSize:
+ self.addTailNode(size - self.listSize)
+ elif size < self.listSize:
+ self.removeTailNode(self.listSize - size)
+
+ return self.listSize
+
+ # Increases the size of the cache by inserting n empty nodes at the tail
+ # of the list.
+ def addTailNode(self, n):
+ for i in range(n):
+ node = _dlnode()
+ node.next = self.head
+ node.prev = self.head.prev
+
+ self.head.prev.next = node
+ self.head.prev = node
+
+ self.listSize += n
+
+ # Decreases the size of the list by removing n nodes from the tail of the
+ # list.
+ def removeTailNode(self, n):
+ assert self.listSize > n
+ for i in range(n):
+ node = self.head.prev
+ if not node.empty:
+ if self.callback is not None:
+ self.callback(node.key, node.value)
+ del self.table[node.key]
+
+ # Splice the tail node out of the list
+ self.head.prev = node.prev
+ node.prev.next = self.head
+
+ # The next four lines are not strictly necessary.
+ node.prev = None
+ node.next = None
+
+ node.key = None
+ node.value = None
+
+ self.listSize -= n
+
+
+ # This method adjusts the ordering of the doubly linked list so that
+ # 'node' directly precedes the 'head' node. Because of the order of
+ # operations, if 'node' already directly precedes the 'head' node or if
+ # 'node' is the 'head' node the order of the list will be unchanged.
+ def mtf(self, node):
+ node.prev.next = node.next
+ node.next.prev = node.prev
+
+ node.prev = self.head.prev
+ node.next = self.head.prev.next
+
+ node.next.prev = node
+ node.prev.next = node
+
+ # This method returns an iterator that iterates over the non-empty nodes
+ # in the doubly linked list in order from the most recently to the least
+ # recently used.
+ def dli(self):
+ node = self.head
+ for i in range(len(self.table)):
+ yield node
+ node = node.next
+
+
+
+
+class WriteThroughCacheManager(object):
+ def __init__(self, store, size):
+ self.store = store
+ self.cache = lrucache(size)
+
+ def __len__(self):
+ return len(self.store)
+
+ # Returns/sets the size of the managed cache.
+ def size(self, size=None):
+ return self.cache.size(size)
+
+ def clear(self):
+ self.cache.clear()
+ self.store.clear()
+
+ def __contains__(self, key):
+ # Check the cache first. If it is there we can return quickly.
+ if key in self.cache:
+ return True
+
+ # Not in the cache. Might be in the underlying store.
+ if key in self.store:
+ return True
+
+ return False
+
+ def __getitem__(self, key):
+ # First we try the cache. If successful we just return the value. If
+ # not we catch KeyError and ignore it since that just means the key
+ # was not in the cache.
+ try:
+ return self.cache[key]
+ except KeyError:
+ pass
+
+ # It wasn't in the cache. Look it up in the store, add the entry to
+ # the cache, and return the value.
+ value = self.store[key]
+ self.cache[key] = value
+ return value
+
+ def get(self, key, default=None):
+ """Get an item - return default (None) if not present"""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __setitem__(self, key, value):
+ # Add the key/value pair to the cache and store.
+ self.cache[key] = value
+ self.store[key] = value
+
+ def __delitem__(self, key):
+ # Write-through behavior cache and store should be consistent. Delete
+ # it from the store.
+ del self.store[key]
+ try:
+ # Ok, delete from the store was successful. It might also be in
+ # the cache, try and delete it. If not we catch the KeyError and
+ # ignore it.
+ del self.cache[key]
+ except KeyError:
+ pass
+
+ def __iter__(self):
+ return self.keys()
+
+ def keys(self):
+ return self.store.keys()
+
+ def values(self):
+ return self.store.values()
+
+ def items(self):
+ return self.store.items()
+
+
+
+class WriteBackCacheManager(object):
+ def __init__(self, store, size):
+ self.store = store
+
+ # Create a set to hold the dirty keys.
+ self.dirty = set()
+
+ # Define a callback function to be called by the cache when a
+ # key/value pair is about to be ejected. This callback will check to
+ # see if the key is in the dirty set. If so, then it will update the
+ # store object and remove the key from the dirty set.
+ def callback(key, value):
+ if key in self.dirty:
+ self.store[key] = value
+ self.dirty.remove(key)
+
+ # Create a cache and give it the callback function.
+ self.cache = lrucache(size, callback)
+
+ # Returns/sets the size of the managed cache.
+ def size(self, size=None):
+ return self.cache.size(size)
+
+ def clear(self):
+ self.cache.clear()
+ self.dirty.clear()
+ self.store.clear()
+
+ def __contains__(self, key):
+ # Check the cache first, since if it is there we can return quickly.
+ if key in self.cache:
+ return True
+
+ # Not in the cache. Might be in the underlying store.
+ if key in self.store:
+ return True
+
+ return False
+
+ def __getitem__(self, key):
+ # First we try the cache. If successful we just return the value. If
+ # not we catch KeyError and ignore it since that just means the key
+ # was not in the cache.
+ try:
+ return self.cache[key]
+ except KeyError:
+ pass
+
+ # It wasn't in the cache. Look it up in the store, add the entry to
+ # the cache, and return the value.
+ value = self.store[key]
+ self.cache[key] = value
+ return value
+
+ def get(self, key, default=None):
+ """Get an item - return default (None) if not present"""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __setitem__(self, key, value):
+ # Add the key/value pair to the cache.
+ self.cache[key] = value
+ self.dirty.add(key)
+
+ def __delitem__(self, key):
+
+ found = False
+ try:
+ del self.cache[key]
+ found = True
+ self.dirty.remove(key)
+ except KeyError:
+ pass
+
+ try:
+ del self.store[key]
+ found = True
+ except KeyError:
+ pass
+
+ if not found: # If not found in cache or store, raise error.
+ raise KeyError
+
+
+ def __iter__(self):
+ return self.keys()
+
+ def keys(self):
+ for key in self.store.keys():
+ if key not in self.dirty:
+ yield key
+
+ for key in self.dirty:
+ yield key
+
+
+ def values(self):
+ for key, value in self.items():
+ yield value
+
+
+ def items(self):
+ for key, value in self.store.items():
+ if key not in self.dirty:
+ yield (key, value)
+
+ for key in self.dirty:
+ value = self.cache.peek(key)
+ yield (key, value)
+
+
+
+ def sync(self):
+ # For each dirty key, peek at its value in the cache and update the
+ # store. Doesn't change the cache's order.
+ for key in self.dirty:
+ self.store[key] = self.cache.peek(key)
+ # There are no dirty keys now.
+ self.dirty.clear()
+
+ def flush(self):
+ self.sync()
+ self.cache.clear()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.sync()
+ return False
+
+
+class FunctionCacheManager(object):
+ def __init__(self, func, size):
+ self.func = func
+ self.cache = lrucache(size)
+
+ def size(self, size=None):
+ return self.cache.size(size)
+
+ def clear(self):
+ self.cache.clear()
+
+ def __call__(self, *args, **kwargs):
+ kwtuple = tuple((key, kwargs[key]) for key in sorted(kwargs.keys()))
+ key = (args, kwtuple)
+ try:
+ return self.cache[key]
+ except KeyError:
+ pass
+
+ value = self.func(*args, **kwargs)
+ self.cache[key] = value
+ return value
+
+
+def lruwrap(store, size, writeback=False):
+ if writeback:
+ return WriteBackCacheManager(store, size)
+ else:
+ return WriteThroughCacheManager(store, size)
+
+import functools
+
+class lrudecorator(object):
+ def __init__(self, size):
+ self.cache = lrucache(size)
+
+ def __call__(self, func):
+ def wrapper(*args, **kwargs):
+ kwtuple = tuple((key, kwargs[key]) for key in sorted(kwargs.keys()))
+ key = (args, kwtuple)
+ try:
+ return self.cache[key]
+ except KeyError:
+ pass
+
+ value = func(*args, **kwargs)
+ self.cache[key] = value
+ return value
+
+ wrapper.cache = self.cache
+ wrapper.size = self.cache.size
+ wrapper.clear = self.cache.clear
+ return functools.update_wrapper(wrapper, func)
diff --git a/python/pylru/test.py b/python/pylru/test.py
new file mode 100644
index 000000000..7a4842fb5
--- /dev/null
+++ b/python/pylru/test.py
@@ -0,0 +1,238 @@
+
+from pylru import *
+import random
+
+# This tests PyLRU by fuzzing it with random operations, then checking the
+# results against another, simpler, LRU cache implementation.
+
+class simplelrucache:
+
+ def __init__(self, size):
+
+ # Initialize the cache as empty.
+ self.cache = []
+ self.size = size
+
+ def __contains__(self, key):
+
+ for x in self.cache:
+ if x[0] == key:
+ return True
+
+ return False
+
+
+ def __getitem__(self, key):
+
+ for i in range(len(self.cache)):
+ x = self.cache[i]
+ if x[0] == key:
+ del self.cache[i]
+ self.cache.append(x)
+ return x[1]
+
+ raise KeyError
+
+
+ def __setitem__(self, key, value):
+
+ for i in range(len(self.cache)):
+ x = self.cache[i]
+ if x[0] == key:
+ x[1] = value
+ del self.cache[i]
+ self.cache.append(x)
+ return
+
+ if len(self.cache) == self.size:
+ self.cache = self.cache[1:]
+
+ self.cache.append([key, value])
+
+
+ def __delitem__(self, key):
+
+ for i in range(len(self.cache)):
+ if self.cache[i][0] == key:
+ del self.cache[i]
+ return
+
+ raise KeyError
+
+ def resize(self, x=None):
+ assert x > 0
+ self.size = x
+ if x < len(self.cache):
+ del self.cache[:len(self.cache) - x]
+
+
+def test(a, b, c, d, verify):
+
+ for i in range(1000):
+ x = random.randint(0, 512)
+ y = random.randint(0, 512)
+
+ a[x] = y
+ b[x] = y
+ verify(c, d)
+
+ for i in range(1000):
+ x = random.randint(0, 512)
+ if x in a:
+ assert x in b
+ z = a[x]
+ z += b[x]
+ else:
+ assert x not in b
+ verify(c, d)
+
+ for i in range(256):
+ x = random.randint(0, 512)
+ if x in a:
+ assert x in b
+ del a[x]
+ del b[x]
+ else:
+ assert x not in b
+ verify(c, d)
+
+
+def testcache():
+ def verify(a, b):
+ q = []
+ z = a.head
+ for j in range(len(a.table)):
+ q.append([z.key, z.value])
+ z = z.next
+
+ assert q == b.cache[::-1]
+
+ q2 = []
+ for x, y in q:
+ q2.append((x, y))
+
+ assert list(a.items()) == q2
+ assert list(zip(a.keys(), a.values())) == q2
+ assert list(a.keys()) == list(a)
+
+
+ a = lrucache(128)
+ b = simplelrucache(128)
+ verify(a, b)
+ test(a, b, a, b, verify)
+
+ a.size(71)
+ b.resize(71)
+ verify(a, b)
+ test(a, b, a, b, verify)
+
+ a.size(341)
+ b.resize(341)
+ verify(a, b)
+ test(a, b, a, b, verify)
+
+ a.size(127)
+ b.resize(127)
+ verify(a, b)
+ test(a, b, a, b, verify)
+
+
+def wraptest():
+
+ def verify(p, x):
+ assert p == x.store
+ for key, value in x.cache.items():
+ assert x.store[key] == value
+
+ tmp = list(x.items())
+ tmp.sort()
+
+ tmp2 = list(p.items())
+ tmp2.sort()
+
+ assert tmp == tmp2
+
+ p = dict()
+ q = dict()
+ x = lruwrap(q, 128)
+
+ test(p, x, p, x, verify)
+
+
+
+def wraptest2():
+
+ def verify(p, x):
+ for key, value in x.store.items():
+ if key not in x.dirty:
+ assert p[key] == value
+
+ for key in x.dirty:
+ assert x.cache.peek(key) == p[key]
+
+ for key, value in x.cache.items():
+ if key not in x.dirty:
+ assert x.store[key] == p[key] == value
+
+ tmp = list(x.items())
+ tmp.sort()
+
+ tmp2 = list(p.items())
+ tmp2.sort()
+
+ assert tmp == tmp2
+
+ p = dict()
+ q = dict()
+ x = lruwrap(q, 128, True)
+
+ test(p, x, p, x, verify)
+
+ x.sync()
+ assert p == q
+
+def wraptest3():
+
+ def verify(p, x):
+ for key, value in x.store.items():
+ if key not in x.dirty:
+ assert p[key] == value
+
+ for key in x.dirty:
+ assert x.cache.peek(key) == p[key]
+
+ for key, value in x.cache.items():
+ if key not in x.dirty:
+ assert x.store[key] == p[key] == value
+
+ p = dict()
+ q = dict()
+ with lruwrap(q, 128, True) as x:
+ test(p, x, p, x, verify)
+
+ assert p == q
+
+
+@lrudecorator(100)
+def square(x):
+ return x*x
+
+def testDecorator():
+ for i in range(1000):
+ x = random.randint(0, 200)
+ assert square(x) == x*x
+
+
+if __name__ == '__main__':
+
+ random.seed()
+
+
+ for i in range(20):
+ testcache()
+ wraptest()
+ wraptest2()
+ wraptest3()
+ testDecorator()
+
+
diff --git a/python/pystache/.gitignore b/python/pystache/.gitignore
new file mode 100644
index 000000000..758d62df9
--- /dev/null
+++ b/python/pystache/.gitignore
@@ -0,0 +1,17 @@
+*.pyc
+.DS_Store
+# Tox support. See: http://pypi.python.org/pypi/tox
+.tox
+# Our tox runs convert the doctests in *.rst files to Python 3 prior to
+# running tests. Ignore these temporary files.
+*.temp2to3.rst
+# The setup.py "prep" command converts *.md to *.temp.rst (via *.temp.md).
+*.temp.md
+*.temp.rst
+# TextMate project file
+*.tmproj
+# Distribution-related folders and files.
+build
+dist
+MANIFEST
+pystache.egg-info
diff --git a/python/pystache/.gitmodules b/python/pystache/.gitmodules
new file mode 100644
index 000000000..c55c8e5e3
--- /dev/null
+++ b/python/pystache/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "ext/spec"]
+ path = ext/spec
+ url = http://github.com/mustache/spec.git
diff --git a/python/pystache/.travis.yml b/python/pystache/.travis.yml
new file mode 100644
index 000000000..00227053a
--- /dev/null
+++ b/python/pystache/.travis.yml
@@ -0,0 +1,14 @@
+language: python
+
+# Travis CI has no plans to support Jython and no longer supports Python 2.5.
+python:
+ - 2.6
+ - 2.7
+ - 3.2
+ - pypy
+
+script:
+ - python setup.py install
+ # Include the spec tests directory for Mustache spec tests and the
+ # project directory for doctests.
+ - pystache-test . ext/spec/specs
diff --git a/python/pystache/HISTORY.md b/python/pystache/HISTORY.md
new file mode 100644
index 000000000..e5b7638ae
--- /dev/null
+++ b/python/pystache/HISTORY.md
@@ -0,0 +1,169 @@
+History
+=======
+
+**Note:** Official support for Python 2.4 will end with Pystache version 0.6.0.
+
+0.5.4 (2014-07-11)
+------------------
+
+- Bugfix: made test with filenames OS agnostic (issue \#162).
+
+0.5.3 (2012-11-03)
+------------------
+
+- Added ability to customize string coercion (e.g. to have None render as
+ `''`) (issue \#130).
+- Added Renderer.render_name() to render a template by name (issue \#122).
+- Added TemplateSpec.template_path to specify an absolute path to a
+ template (issue \#41).
+- Added option of raising errors on missing tags/partials:
+ `Renderer(missing_tags='strict')` (issue \#110).
+- Added support for finding and loading templates by file name in
+ addition to by template name (issue \#127). [xgecko]
+- Added a `parse()` function that yields a printable, pre-compiled
+ parse tree.
+- Added support for rendering pre-compiled templates.
+- Added Python 3.3 to the list of supported versions.
+- Added support for [PyPy](http://pypy.org/) (issue \#125).
+- Added support for [Travis CI](http://travis-ci.org) (issue \#124).
+ [msabramo]
+- Bugfix: `defaults.DELIMITERS` can now be changed at runtime (issue \#135).
+ [bennoleslie]
+- Bugfix: exceptions raised from a property are no longer swallowed
+ when getting a key from a context stack (issue \#110).
+- Bugfix: lambda section values can now return non-ascii, non-unicode
+ strings (issue \#118).
+- Bugfix: allow `test_pystache.py` and `tox` to pass when run from a
+ downloaded sdist (i.e. without the spec test directory).
+- Convert HISTORY and README files from reST to Markdown.
+- More robust handling of byte strings in Python 3.
+- Added Creative Commons license for David Phillips's logo.
+
+0.5.2 (2012-05-03)
+------------------
+
+- Added support for dot notation and version 1.1.2 of the spec (issue
+ \#99). [rbp]
+- Missing partials now render as empty string per latest version of
+ spec (issue \#115).
+- Bugfix: falsey values now coerced to strings using str().
+- Bugfix: lambda return values for sections no longer pushed onto
+ context stack (issue \#113).
+- Bugfix: lists of lambdas for sections were not rendered (issue
+ \#114).
+
+0.5.1 (2012-04-24)
+------------------
+
+- Added support for Python 3.1 and 3.2.
+- Added tox support to test multiple Python versions.
+- Added test script entry point: pystache-test.
+- Added \_\_version\_\_ package attribute.
+- Test harness now supports both YAML and JSON forms of Mustache spec.
+- Test harness no longer requires nose.
+
+0.5.0 (2012-04-03)
+------------------
+
+This version represents a major rewrite and refactoring of the code base
+that also adds features and fixes many bugs. All functionality and
+nearly all unit tests have been preserved. However, some backwards
+incompatible changes to the API have been made.
+
+Below is a selection of some of the changes (not exhaustive).
+
+Highlights:
+
+- Pystache now passes all tests in version 1.0.3 of the [Mustache
+ spec](https://github.com/mustache/spec). [pvande]
+- Removed View class: it is no longer necessary to subclass from View
+ or from any other class to create a view.
+- Replaced Template with Renderer class: template rendering behavior
+ can be modified via the Renderer constructor or by setting
+ attributes on a Renderer instance.
+- Added TemplateSpec class: template rendering can be specified on a
+ per-view basis by subclassing from TemplateSpec.
+- Introduced separation of concerns and removed circular dependencies
+ (e.g. between Template and View classes, cf. [issue
+ \#13](https://github.com/defunkt/pystache/issues/13)).
+- Unicode now used consistently throughout the rendering process.
+- Expanded test coverage: nosetests now runs doctests and \~105 test
+ cases from the Mustache spec (increasing the number of tests from 56
+ to \~315).
+- Added a rudimentary benchmarking script to gauge performance while
+ refactoring.
+- Extensive documentation added (e.g. docstrings).
+
+Other changes:
+
+- Added a command-line interface. [vrde]
+- The main rendering class now accepts a custom partial loader (e.g. a
+ dictionary) and a custom escape function.
+- Non-ascii characters in str strings are now supported while
+ rendering.
+- Added string encoding, file encoding, and errors options for
+ decoding to unicode.
+- Removed the output encoding option.
+- Removed the use of markupsafe.
+
+Bug fixes:
+
+- Context values no longer processed as template strings.
+ [jakearchibald]
+- Whitespace surrounding sections is no longer altered, per the spec.
+ [heliodor]
+- Zeroes now render correctly when using PyPy. [alex]
+- Multline comments now permitted. [fczuardi]
+- Extensionless template files are now supported.
+- Passing `**kwargs` to `Template()` no longer modifies the context.
+- Passing `**kwargs` to `Template()` with no context no longer raises
+ an exception.
+
+0.4.1 (2012-03-25)
+------------------
+
+- Added support for Python 2.4. [wangtz, jvantuyl]
+
+0.4.0 (2011-01-12)
+------------------
+
+- Add support for nested contexts (within template and view)
+- Add support for inverted lists
+- Decoupled template loading
+
+0.3.1 (2010-05-07)
+------------------
+
+- Fix package
+
+0.3.0 (2010-05-03)
+------------------
+
+- View.template\_path can now hold a list of path
+- Add {{& blah}} as an alias for {{{ blah }}}
+- Higher Order Sections
+- Inverted sections
+
+0.2.0 (2010-02-15)
+------------------
+
+- Bugfix: Methods returning False or None are not rendered
+- Bugfix: Don't render an empty string when a tag's value is 0.
+ [enaeseth]
+- Add support for using non-callables as View attributes.
+ [joshthecoder]
+- Allow using View instances as attributes. [joshthecoder]
+- Support for Unicode and non-ASCII-encoded bytestring output.
+ [enaeseth]
+- Template file encoding awareness. [enaeseth]
+
+0.1.1 (2009-11-13)
+------------------
+
+- Ensure we're dealing with strings, always
+- Tests can be run by executing the test file directly
+
+0.1.0 (2009-11-12)
+------------------
+
+- First release
diff --git a/python/pystache/LICENSE b/python/pystache/LICENSE
new file mode 100644
index 000000000..42be9d646
--- /dev/null
+++ b/python/pystache/LICENSE
@@ -0,0 +1,22 @@
+Copyright (C) 2012 Chris Jerdonek. All rights reserved.
+
+Copyright (c) 2009 Chris Wanstrath
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/python/pystache/MANIFEST.in b/python/pystache/MANIFEST.in
new file mode 100644
index 000000000..bdc64bf71
--- /dev/null
+++ b/python/pystache/MANIFEST.in
@@ -0,0 +1,13 @@
+include README.md
+include HISTORY.md
+include LICENSE
+include TODO.md
+include setup_description.rst
+include tox.ini
+include test_pystache.py
+# You cannot use package_data, for example, to include data files in a
+# source distribution when using Distribute.
+recursive-include pystache/tests *.mustache *.txt
+# We deliberately exclude the gh/ directory because it contains copies
+# of resources needed only for the web page hosted on GitHub (via the
+# gh-pages branch).
diff --git a/python/pystache/README.md b/python/pystache/README.md
new file mode 100644
index 000000000..54a96088b
--- /dev/null
+++ b/python/pystache/README.md
@@ -0,0 +1,276 @@
+Pystache
+========
+
+<!-- Since PyPI rejects reST long descriptions that contain HTML, -->
+<!-- HTML comments must be removed when converting this file to reST. -->
+<!-- For more information on PyPI's behavior in this regard, see: -->
+<!-- http://docs.python.org/distutils/uploading.html#pypi-package-display -->
+<!-- The Pystache setup script strips 1-line HTML comments prior -->
+<!-- to converting to reST, so all HTML comments should be one line. -->
+<!-- -->
+<!-- We leave the leading brackets empty here. Otherwise, unwanted -->
+<!-- caption text shows up in the reST version converted by pandoc. -->
+![](http://defunkt.github.com/pystache/images/logo_phillips.png "mustachioed, monocled snake by David Phillips")
+
+![](https://secure.travis-ci.org/defunkt/pystache.png "Travis CI current build status")
+
+[Pystache](http://defunkt.github.com/pystache) is a Python
+implementation of [Mustache](http://mustache.github.com/). Mustache is a
+framework-agnostic, logic-free templating system inspired by
+[ctemplate](http://code.google.com/p/google-ctemplate/) and
+[et](http://www.ivan.fomichev.name/2008/05/erlang-template-engine-prototype.html).
+Like ctemplate, Mustache "emphasizes separating logic from presentation:
+it is impossible to embed application logic in this template language."
+
+The [mustache(5)](http://mustache.github.com/mustache.5.html) man page
+provides a good introduction to Mustache's syntax. For a more complete
+(and more current) description of Mustache's behavior, see the official
+[Mustache spec](https://github.com/mustache/spec).
+
+Pystache is [semantically versioned](http://semver.org) and can be found
+on [PyPI](http://pypi.python.org/pypi/pystache). This version of
+Pystache passes all tests in [version
+1.1.2](https://github.com/mustache/spec/tree/v1.1.2) of the spec.
+
+
+Requirements
+------------
+
+Pystache is tested with--
+
+- Python 2.4 (requires simplejson [version
+ 2.0.9](http://pypi.python.org/pypi/simplejson/2.0.9) or earlier)
+- Python 2.5 (requires
+ [simplejson](http://pypi.python.org/pypi/simplejson/))
+- Python 2.6
+- Python 2.7
+- Python 3.1
+- Python 3.2
+- Python 3.3
+- [PyPy](http://pypy.org/)
+
+[Distribute](http://packages.python.org/distribute/) (the setuptools fork)
+is recommended over [setuptools](http://pypi.python.org/pypi/setuptools),
+and is required in some cases (e.g. for Python 3 support).
+If you use [pip](http://www.pip-installer.org/), you probably already satisfy
+this requirement.
+
+JSON support is needed only for the command-line interface and to run
+the spec tests. We require simplejson for earlier versions of Python
+since Python's [json](http://docs.python.org/library/json.html) module
+was added in Python 2.6.
+
+For Python 2.4 we require an earlier version of simplejson since
+simplejson stopped officially supporting Python 2.4 in simplejson
+version 2.1.0. Earlier versions of simplejson can be installed manually,
+as follows:
+
+ pip install 'simplejson<2.1.0'
+
+Official support for Python 2.4 will end with Pystache version 0.6.0.
+
+Install It
+----------
+
+ pip install pystache
+
+And test it--
+
+ pystache-test
+
+To install and test from source (e.g. from GitHub), see the Develop
+section.
+
+Use It
+------
+
+ >>> import pystache
+ >>> print pystache.render('Hi {{person}}!', {'person': 'Mom'})
+ Hi Mom!
+
+You can also create dedicated view classes to hold your view logic.
+
+Here's your view class (in .../examples/readme.py):
+
+ class SayHello(object):
+ def to(self):
+ return "Pizza"
+
+Instantiating like so:
+
+ >>> from pystache.tests.examples.readme import SayHello
+ >>> hello = SayHello()
+
+Then your template, say\_hello.mustache (by default in the same
+directory as your class definition):
+
+ Hello, {{to}}!
+
+Pull it together:
+
+ >>> renderer = pystache.Renderer()
+ >>> print renderer.render(hello)
+ Hello, Pizza!
+
+For greater control over rendering (e.g. to specify a custom template
+directory), use the `Renderer` class like above. One can pass attributes
+to the Renderer class constructor or set them on a Renderer instance. To
+customize template loading on a per-view basis, subclass `TemplateSpec`.
+See the docstrings of the
+[Renderer](https://github.com/defunkt/pystache/blob/master/pystache/renderer.py)
+class and
+[TemplateSpec](https://github.com/defunkt/pystache/blob/master/pystache/template_spec.py)
+class for more information.
+
+You can also pre-parse a template:
+
+ >>> parsed = pystache.parse(u"Hey {{#who}}{{.}}!{{/who}}")
+ >>> print parsed
+ [u'Hey ', _SectionNode(key=u'who', index_begin=12, index_end=18, parsed=[_EscapeNode(key=u'.'), u'!'])]
+
+And then:
+
+ >>> print renderer.render(parsed, {'who': 'Pops'})
+ Hey Pops!
+ >>> print renderer.render(parsed, {'who': 'you'})
+ Hey you!
+
+Python 3
+--------
+
+Pystache has supported Python 3 since version 0.5.1. Pystache behaves
+slightly differently between Python 2 and 3, as follows:
+
+- In Python 2, the default html-escape function `cgi.escape()` does
+ not escape single quotes. In Python 3, the default escape function
+ `html.escape()` does escape single quotes.
+- In both Python 2 and 3, the string and file encodings default to
+ `sys.getdefaultencoding()`. However, this function can return
+ different values under Python 2 and 3, even when run from the same
+ system. Check your own system for the behavior on your system, or do
+ not rely on the defaults by passing in the encodings explicitly
+ (e.g. to the `Renderer` class).
+
+Unicode
+-------
+
+This section describes how Pystache handles unicode, strings, and
+encodings.
+
+Internally, Pystache uses [only unicode
+strings](http://docs.python.org/howto/unicode.html#tips-for-writing-unicode-aware-programs)
+(`str` in Python 3 and `unicode` in Python 2). For input, Pystache
+accepts both unicode strings and byte strings (`bytes` in Python 3 and
+`str` in Python 2). For output, Pystache's template rendering methods
+return only unicode.
+
+Pystache's `Renderer` class supports a number of attributes to control
+how Pystache converts byte strings to unicode on input. These include
+the `file_encoding`, `string_encoding`, and `decode_errors` attributes.
+
+The `file_encoding` attribute is the encoding the renderer uses to
+convert to unicode any files read from the file system. Similarly,
+`string_encoding` is the encoding the renderer uses to convert any other
+byte strings encountered during the rendering process into unicode (e.g.
+context values that are encoded byte strings).
+
+The `decode_errors` attribute is what the renderer passes as the
+`errors` argument to Python's built-in unicode-decoding function
+(`str()` in Python 3 and `unicode()` in Python 2). The valid values for
+this argument are `strict`, `ignore`, and `replace`.
+
+Each of these attributes can be set via the `Renderer` class's
+constructor using a keyword argument of the same name. See the Renderer
+class's docstrings for further details. In addition, the `file_encoding`
+attribute can be controlled on a per-view basis by subclassing the
+`TemplateSpec` class. When not specified explicitly, these attributes
+default to values set in Pystache's `defaults` module.
+
+Develop
+-------
+
+To test from a source distribution (without installing)--
+
+ python test_pystache.py
+
+To test Pystache with multiple versions of Python (with a single
+command!), you can use [tox](http://pypi.python.org/pypi/tox):
+
+ pip install 'virtualenv<1.8' # Version 1.8 dropped support for Python 2.4.
+ pip install 'tox<1.4' # Version 1.4 dropped support for Python 2.4.
+ tox
+
+If you do not have all Python versions listed in `tox.ini`--
+
+ tox -e py26,py32 # for example
+
+The source distribution tests also include doctests and tests from the
+Mustache spec. To include tests from the Mustache spec in your test
+runs:
+
+ git submodule init
+ git submodule update
+
+The test harness parses the spec's (more human-readable) yaml files if
+[PyYAML](http://pypi.python.org/pypi/PyYAML) is present. Otherwise, it
+parses the json files. To install PyYAML--
+
+ pip install pyyaml
+
+To run a subset of the tests, you can use
+[nose](http://somethingaboutorange.com/mrl/projects/nose/0.11.1/testing.html):
+
+ pip install nose
+ nosetests --tests pystache/tests/test_context.py:GetValueTests.test_dictionary__key_present
+
+### Using Python 3 with Pystache from source
+
+Pystache is written in Python 2 and must be converted to Python 3 prior to
+using it with Python 3. The installation process (and tox) do this
+automatically.
+
+To convert the code to Python 3 manually (while using Python 3)--
+
+ python setup.py build
+
+This writes the converted code to a subdirectory called `build`.
+By design, Python 3 builds
+[cannot](https://bitbucket.org/tarek/distribute/issue/292/allow-use_2to3-with-python-2)
+be created from Python 2.
+
+To convert the code without using setup.py, you can use
+[2to3](http://docs.python.org/library/2to3.html) as follows (two steps)--
+
+ 2to3 --write --nobackups --no-diffs --doctests_only pystache
+ 2to3 --write --nobackups --no-diffs pystache
+
+This converts the code (and doctests) in place.
+
+To `import pystache` from a source distribution while using Python 3, be
+sure that you are importing from a directory containing a converted
+version of the code (e.g. from the `build` directory after converting),
+and not from the original (unconverted) source directory. Otherwise, you will
+get a syntax error. You can help prevent this by not running the Python
+IDE from the project directory when importing Pystache while using Python 3.
+
+
+Mailing List
+------------
+
+There is a [mailing list](http://librelist.com/browser/pystache/). Note
+that there is a bit of a delay between posting a message and seeing it
+appear in the mailing list archive.
+
+Credits
+-------
+
+ >>> context = { 'author': 'Chris Wanstrath', 'maintainer': 'Chris Jerdonek' }
+ >>> print pystache.render("Author: {{author}}\nMaintainer: {{maintainer}}", context)
+ Author: Chris Wanstrath
+ Maintainer: Chris Jerdonek
+
+Pystache logo by [David Phillips](http://davidphillips.us/) is licensed
+under a [Creative Commons Attribution-ShareAlike 3.0 Unported
+License](http://creativecommons.org/licenses/by-sa/3.0/deed.en_US).
+![](http://i.creativecommons.org/l/by-sa/3.0/88x31.png "Creative
+Commons Attribution-ShareAlike 3.0 Unported License")
diff --git a/python/pystache/TODO.md b/python/pystache/TODO.md
new file mode 100644
index 000000000..cd8241765
--- /dev/null
+++ b/python/pystache/TODO.md
@@ -0,0 +1,16 @@
+TODO
+====
+
+In development branch:
+
+* Figure out a way to suppress center alignment of images in reST output.
+* Add a unit test for the change made in 7ea8e7180c41. This is with regard
+ to not requiring spec tests when running tests from a downloaded sdist.
+* End support for Python 2.4.
+* Add Python 3.3 to tox file (after deprecating 2.4).
+* Turn the benchmarking script at pystache/tests/benchmark.py into a command
+ in pystache/commands, or make it a subcommand of one of the existing
+ commands (i.e. using a command argument).
+* Provide support for logging in at least one of the commands.
+* Make sure command parsing to pystache-test doesn't break with Python 2.4 and earlier.
+* Combine pystache-test with the main command.
diff --git a/python/pystache/gh/images/logo_phillips.png b/python/pystache/gh/images/logo_phillips.png
new file mode 100644
index 000000000..749190136
--- /dev/null
+++ b/python/pystache/gh/images/logo_phillips.png
Binary files differ
diff --git a/python/pystache/pystache/__init__.py b/python/pystache/pystache/__init__.py
new file mode 100644
index 000000000..4cf24344e
--- /dev/null
+++ b/python/pystache/pystache/__init__.py
@@ -0,0 +1,13 @@
+
+"""
+TODO: add a docstring.
+
+"""
+
+# We keep all initialization code in a separate module.
+
+from pystache.init import parse, render, Renderer, TemplateSpec
+
+__all__ = ['parse', 'render', 'Renderer', 'TemplateSpec']
+
+__version__ = '0.5.4' # Also change in setup.py.
diff --git a/python/pystache/pystache/commands/__init__.py b/python/pystache/pystache/commands/__init__.py
new file mode 100644
index 000000000..a0d386a38
--- /dev/null
+++ b/python/pystache/pystache/commands/__init__.py
@@ -0,0 +1,4 @@
+"""
+TODO: add a docstring.
+
+"""
diff --git a/python/pystache/pystache/commands/render.py b/python/pystache/pystache/commands/render.py
new file mode 100644
index 000000000..1a9c309d5
--- /dev/null
+++ b/python/pystache/pystache/commands/render.py
@@ -0,0 +1,95 @@
+# coding: utf-8
+
+"""
+This module provides command-line access to pystache.
+
+Run this script using the -h option for command-line help.
+
+"""
+
+
+try:
+ import json
+except:
+ # The json module is new in Python 2.6, whereas simplejson is
+ # compatible with earlier versions.
+ try:
+ import simplejson as json
+ except ImportError:
+ # Raise an error with a type different from ImportError as a hack around
+ # this issue:
+ # http://bugs.python.org/issue7559
+ from sys import exc_info
+ ex_type, ex_value, tb = exc_info()
+ new_ex = Exception("%s: %s" % (ex_type.__name__, ex_value))
+ raise new_ex.__class__, new_ex, tb
+
+# The optparse module is deprecated in Python 2.7 in favor of argparse.
+# However, argparse is not available in Python 2.6 and earlier.
+from optparse import OptionParser
+import sys
+
+# We use absolute imports here to allow use of this script from its
+# location in source control (e.g. for development purposes).
+# Otherwise, the following error occurs:
+#
+# ValueError: Attempted relative import in non-package
+#
+from pystache.common import TemplateNotFoundError
+from pystache.renderer import Renderer
+
+
+USAGE = """\
+%prog [-h] template context
+
+Render a mustache template with the given context.
+
+positional arguments:
+ template A filename or template string.
+ context A filename or JSON string."""
+
+
+def parse_args(sys_argv, usage):
+ """
+ Return an OptionParser for the script.
+
+ """
+ args = sys_argv[1:]
+
+ parser = OptionParser(usage=usage)
+ options, args = parser.parse_args(args)
+
+ template, context = args
+
+ return template, context
+
+
+# TODO: verify whether the setup() method's entry_points argument
+# supports passing arguments to main:
+#
+# http://packages.python.org/distribute/setuptools.html#automatic-script-creation
+#
+def main(sys_argv=sys.argv):
+ template, context = parse_args(sys_argv, USAGE)
+
+ if template.endswith('.mustache'):
+ template = template[:-9]
+
+ renderer = Renderer()
+
+ try:
+ template = renderer.load_template(template)
+ except TemplateNotFoundError:
+ pass
+
+ try:
+ context = json.load(open(context))
+ except IOError:
+ context = json.loads(context)
+
+ rendered = renderer.render(template, context)
+ print rendered
+
+
+if __name__=='__main__':
+ main()
diff --git a/python/pystache/pystache/commands/test.py b/python/pystache/pystache/commands/test.py
new file mode 100644
index 000000000..087245338
--- /dev/null
+++ b/python/pystache/pystache/commands/test.py
@@ -0,0 +1,18 @@
+# coding: utf-8
+
+"""
+This module provides a command to test pystache (unit tests, doctests, etc).
+
+"""
+
+import sys
+
+from pystache.tests.main import main as run_tests
+
+
+def main(sys_argv=sys.argv):
+ run_tests(sys_argv=sys_argv)
+
+
+if __name__=='__main__':
+ main()
diff --git a/python/pystache/pystache/common.py b/python/pystache/pystache/common.py
new file mode 100644
index 000000000..fb266dd8b
--- /dev/null
+++ b/python/pystache/pystache/common.py
@@ -0,0 +1,71 @@
+# coding: utf-8
+
+"""
+Exposes functionality needed throughout the project.
+
+"""
+
+from sys import version_info
+
+def _get_string_types():
+ # TODO: come up with a better solution for this. One of the issues here
+ # is that in Python 3 there is no common base class for unicode strings
+ # and byte strings, and 2to3 seems to convert all of "str", "unicode",
+ # and "basestring" to Python 3's "str".
+ if version_info < (3, ):
+ return basestring
+ # The latter evaluates to "bytes" in Python 3 -- even after conversion by 2to3.
+ return (unicode, type(u"a".encode('utf-8')))
+
+
+_STRING_TYPES = _get_string_types()
+
+
+def is_string(obj):
+ """
+ Return whether the given object is a byte string or unicode string.
+
+ This function is provided for compatibility with both Python 2 and 3
+ when using 2to3.
+
+ """
+ return isinstance(obj, _STRING_TYPES)
+
+
+# This function was designed to be portable across Python versions -- both
+# with older versions and with Python 3 after applying 2to3.
+def read(path):
+ """
+ Return the contents of a text file as a byte string.
+
+ """
+ # Opening in binary mode is necessary for compatibility across Python
+ # 2 and 3. In both Python 2 and 3, open() defaults to opening files in
+ # text mode. However, in Python 2, open() returns file objects whose
+ # read() method returns byte strings (strings of type `str` in Python 2),
+ # whereas in Python 3, the file object returns unicode strings (strings
+ # of type `str` in Python 3).
+ f = open(path, 'rb')
+ # We avoid use of the with keyword for Python 2.4 support.
+ try:
+ return f.read()
+ finally:
+ f.close()
+
+
+class MissingTags(object):
+
+ """Contains the valid values for Renderer.missing_tags."""
+
+ ignore = 'ignore'
+ strict = 'strict'
+
+
+class PystacheError(Exception):
+ """Base class for Pystache exceptions."""
+ pass
+
+
+class TemplateNotFoundError(PystacheError):
+ """An exception raised when a template is not found."""
+ pass
diff --git a/python/pystache/pystache/context.py b/python/pystache/pystache/context.py
new file mode 100644
index 000000000..671591609
--- /dev/null
+++ b/python/pystache/pystache/context.py
@@ -0,0 +1,342 @@
+# coding: utf-8
+
+"""
+Exposes a ContextStack class.
+
+The Mustache spec makes a special distinction between two types of context
+stack elements: hashes and objects. For the purposes of interpreting the
+spec, we define these categories mutually exclusively as follows:
+
+ (1) Hash: an item whose type is a subclass of dict.
+
+ (2) Object: an item that is neither a hash nor an instance of a
+ built-in type.
+
+"""
+
+from pystache.common import PystacheError
+
+
+# This equals '__builtin__' in Python 2 and 'builtins' in Python 3.
+_BUILTIN_MODULE = type(0).__module__
+
+
+# We use this private global variable as a return value to represent a key
+# not being found on lookup. This lets us distinguish between the case
+# of a key's value being None with the case of a key not being found --
+# without having to rely on exceptions (e.g. KeyError) for flow control.
+#
+# TODO: eliminate the need for a private global variable, e.g. by using the
+# preferred Python approach of "easier to ask for forgiveness than permission":
+# http://docs.python.org/glossary.html#term-eafp
+class NotFound(object):
+ pass
+_NOT_FOUND = NotFound()
+
+
+def _get_value(context, key):
+ """
+ Retrieve a key's value from a context item.
+
+ Returns _NOT_FOUND if the key does not exist.
+
+ The ContextStack.get() docstring documents this function's intended behavior.
+
+ """
+ if isinstance(context, dict):
+ # Then we consider the argument a "hash" for the purposes of the spec.
+ #
+ # We do a membership test to avoid using exceptions for flow control
+ # (e.g. catching KeyError).
+ if key in context:
+ return context[key]
+ elif type(context).__module__ != _BUILTIN_MODULE:
+ # Then we consider the argument an "object" for the purposes of
+ # the spec.
+ #
+ # The elif test above lets us avoid treating instances of built-in
+ # types like integers and strings as objects (cf. issue #81).
+ # Instances of user-defined classes on the other hand, for example,
+ # are considered objects by the test above.
+ try:
+ attr = getattr(context, key)
+ except AttributeError:
+ # TODO: distinguish the case of the attribute not existing from
+ # an AttributeError being raised by the call to the attribute.
+ # See the following issue for implementation ideas:
+ # http://bugs.python.org/issue7559
+ pass
+ else:
+ # TODO: consider using EAFP here instead.
+ # http://docs.python.org/glossary.html#term-eafp
+ if callable(attr):
+ return attr()
+ return attr
+
+ return _NOT_FOUND
+
+
+class KeyNotFoundError(PystacheError):
+
+ """
+ An exception raised when a key is not found in a context stack.
+
+ """
+
+ def __init__(self, key, details):
+ self.key = key
+ self.details = details
+
+ def __str__(self):
+ return "Key %s not found: %s" % (repr(self.key), self.details)
+
+
+class ContextStack(object):
+
+ """
+ Provides dictionary-like access to a stack of zero or more items.
+
+ Instances of this class are meant to act as the rendering context
+ when rendering Mustache templates in accordance with mustache(5)
+ and the Mustache spec.
+
+ Instances encapsulate a private stack of hashes, objects, and built-in
+ type instances. Querying the stack for the value of a key queries
+ the items in the stack in order from last-added objects to first
+ (last in, first out).
+
+ Caution: this class does not currently support recursive nesting in
+ that items in the stack cannot themselves be ContextStack instances.
+
+ See the docstrings of the methods of this class for more details.
+
+ """
+
+ # We reserve keyword arguments for future options (e.g. a "strict=True"
+ # option for enabling a strict mode).
+ def __init__(self, *items):
+ """
+ Construct an instance, and initialize the private stack.
+
+ The *items arguments are the items with which to populate the
+ initial stack. Items in the argument list are added to the
+ stack in order so that, in particular, items at the end of
+ the argument list are queried first when querying the stack.
+
+ Caution: items should not themselves be ContextStack instances, as
+ recursive nesting does not behave as one might expect.
+
+ """
+ self._stack = list(items)
+
+ def __repr__(self):
+ """
+ Return a string representation of the instance.
+
+ For example--
+
+ >>> context = ContextStack({'alpha': 'abc'}, {'numeric': 123})
+ >>> repr(context)
+ "ContextStack({'alpha': 'abc'}, {'numeric': 123})"
+
+ """
+ return "%s%s" % (self.__class__.__name__, tuple(self._stack))
+
+ @staticmethod
+ def create(*context, **kwargs):
+ """
+ Build a ContextStack instance from a sequence of context-like items.
+
+ This factory-style method is more general than the ContextStack class's
+ constructor in that, unlike the constructor, the argument list
+ can itself contain ContextStack instances.
+
+ Here is an example illustrating various aspects of this method:
+
+ >>> obj1 = {'animal': 'cat', 'vegetable': 'carrot', 'mineral': 'copper'}
+ >>> obj2 = ContextStack({'vegetable': 'spinach', 'mineral': 'silver'})
+ >>>
+ >>> context = ContextStack.create(obj1, None, obj2, mineral='gold')
+ >>>
+ >>> context.get('animal')
+ 'cat'
+ >>> context.get('vegetable')
+ 'spinach'
+ >>> context.get('mineral')
+ 'gold'
+
+ Arguments:
+
+ *context: zero or more dictionaries, ContextStack instances, or objects
+ with which to populate the initial context stack. None
+ arguments will be skipped. Items in the *context list are
+ added to the stack in order so that later items in the argument
+ list take precedence over earlier items. This behavior is the
+ same as the constructor's.
+
+ **kwargs: additional key-value data to add to the context stack.
+ As these arguments appear after all items in the *context list,
+ in the case of key conflicts these values take precedence over
+ all items in the *context list. This behavior is the same as
+ the constructor's.
+
+ """
+ items = context
+
+ context = ContextStack()
+
+ for item in items:
+ if item is None:
+ continue
+ if isinstance(item, ContextStack):
+ context._stack.extend(item._stack)
+ else:
+ context.push(item)
+
+ if kwargs:
+ context.push(kwargs)
+
+ return context
+
+ # TODO: add more unit tests for this.
+ # TODO: update the docstring for dotted names.
+ def get(self, name):
+ """
+ Resolve a dotted name against the current context stack.
+
+ This function follows the rules outlined in the section of the
+ spec regarding tag interpolation. This function returns the value
+ as is and does not coerce the return value to a string.
+
+ Arguments:
+
+ name: a dotted or non-dotted name.
+
+ default: the value to return if name resolution fails at any point.
+ Defaults to the empty string per the Mustache spec.
+
+ This method queries items in the stack in order from last-added
+ objects to first (last in, first out). The value returned is
+ the value of the key in the first item that contains the key.
+ If the key is not found in any item in the stack, then the default
+ value is returned. The default value defaults to None.
+
+ In accordance with the spec, this method queries items in the
+ stack for a key differently depending on whether the item is a
+ hash, object, or neither (as defined in the module docstring):
+
+ (1) Hash: if the item is a hash, then the key's value is the
+ dictionary value of the key. If the dictionary doesn't contain
+ the key, then the key is considered not found.
+
+ (2) Object: if the item is an an object, then the method looks for
+ an attribute with the same name as the key. If an attribute
+ with that name exists, the value of the attribute is returned.
+ If the attribute is callable, however (i.e. if the attribute
+ is a method), then the attribute is called with no arguments
+ and that value is returned. If there is no attribute with
+ the same name as the key, then the key is considered not found.
+
+ (3) Neither: if the item is neither a hash nor an object, then
+ the key is considered not found.
+
+ *Caution*:
+
+ Callables are handled differently depending on whether they are
+ dictionary values, as in (1) above, or attributes, as in (2).
+ The former are returned as-is, while the latter are first
+ called and that value returned.
+
+ Here is an example to illustrate:
+
+ >>> def greet():
+ ... return "Hi Bob!"
+ >>>
+ >>> class Greeter(object):
+ ... greet = None
+ >>>
+ >>> dct = {'greet': greet}
+ >>> obj = Greeter()
+ >>> obj.greet = greet
+ >>>
+ >>> dct['greet'] is obj.greet
+ True
+ >>> ContextStack(dct).get('greet') #doctest: +ELLIPSIS
+ <function greet at 0x...>
+ >>> ContextStack(obj).get('greet')
+ 'Hi Bob!'
+
+ TODO: explain the rationale for this difference in treatment.
+
+ """
+ if name == '.':
+ try:
+ return self.top()
+ except IndexError:
+ raise KeyNotFoundError(".", "empty context stack")
+
+ parts = name.split('.')
+
+ try:
+ result = self._get_simple(parts[0])
+ except KeyNotFoundError:
+ raise KeyNotFoundError(name, "first part")
+
+ for part in parts[1:]:
+ # The full context stack is not used to resolve the remaining parts.
+ # From the spec--
+ #
+ # 5) If any name parts were retained in step 1, each should be
+ # resolved against a context stack containing only the result
+ # from the former resolution. If any part fails resolution, the
+ # result should be considered falsey, and should interpolate as
+ # the empty string.
+ #
+ # TODO: make sure we have a test case for the above point.
+ result = _get_value(result, part)
+ # TODO: consider using EAFP here instead.
+ # http://docs.python.org/glossary.html#term-eafp
+ if result is _NOT_FOUND:
+ raise KeyNotFoundError(name, "missing %s" % repr(part))
+
+ return result
+
+ def _get_simple(self, name):
+ """
+ Query the stack for a non-dotted name.
+
+ """
+ for item in reversed(self._stack):
+ result = _get_value(item, name)
+ if result is not _NOT_FOUND:
+ return result
+
+ raise KeyNotFoundError(name, "part missing")
+
+ def push(self, item):
+ """
+ Push an item onto the stack.
+
+ """
+ self._stack.append(item)
+
+ def pop(self):
+ """
+ Pop an item off of the stack, and return it.
+
+ """
+ return self._stack.pop()
+
+ def top(self):
+ """
+ Return the item last added to the stack.
+
+ """
+ return self._stack[-1]
+
+ def copy(self):
+ """
+ Return a copy of this instance.
+
+ """
+ return ContextStack(*self._stack)
diff --git a/python/pystache/pystache/defaults.py b/python/pystache/pystache/defaults.py
new file mode 100644
index 000000000..bcfdf4cd3
--- /dev/null
+++ b/python/pystache/pystache/defaults.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+
+"""
+This module provides a central location for defining default behavior.
+
+Throughout the package, these defaults take effect only when the user
+does not otherwise specify a value.
+
+"""
+
+try:
+ # Python 3.2 adds html.escape() and deprecates cgi.escape().
+ from html import escape
+except ImportError:
+ from cgi import escape
+
+import os
+import sys
+
+from pystache.common import MissingTags
+
+
+# How to handle encoding errors when decoding strings from str to unicode.
+#
+# This value is passed as the "errors" argument to Python's built-in
+# unicode() function:
+#
+# http://docs.python.org/library/functions.html#unicode
+#
+DECODE_ERRORS = 'strict'
+
+# The name of the encoding to use when converting to unicode any strings of
+# type str encountered during the rendering process.
+STRING_ENCODING = sys.getdefaultencoding()
+
+# The name of the encoding to use when converting file contents to unicode.
+# This default takes precedence over the STRING_ENCODING default for
+# strings that arise from files.
+FILE_ENCODING = sys.getdefaultencoding()
+
+# The delimiters to start with when parsing.
+DELIMITERS = (u'{{', u'}}')
+
+# How to handle missing tags when rendering a template.
+MISSING_TAGS = MissingTags.ignore
+
+# The starting list of directories in which to search for templates when
+# loading a template by file name.
+SEARCH_DIRS = [os.curdir] # i.e. ['.']
+
+# The escape function to apply to strings that require escaping when
+# rendering templates (e.g. for tags enclosed in double braces).
+# Only unicode strings will be passed to this function.
+#
+# The quote=True argument causes double but not single quotes to be escaped
+# in Python 3.1 and earlier, and both double and single quotes to be
+# escaped in Python 3.2 and later:
+#
+# http://docs.python.org/library/cgi.html#cgi.escape
+# http://docs.python.org/dev/library/html.html#html.escape
+#
+TAG_ESCAPE = lambda u: escape(u, quote=True)
+
+# The default template extension, without the leading dot.
+TEMPLATE_EXTENSION = 'mustache'
diff --git a/python/pystache/pystache/init.py b/python/pystache/pystache/init.py
new file mode 100644
index 000000000..38bb1f5a0
--- /dev/null
+++ b/python/pystache/pystache/init.py
@@ -0,0 +1,19 @@
+# encoding: utf-8
+
+"""
+This module contains the initialization logic called by __init__.py.
+
+"""
+
+from pystache.parser import parse
+from pystache.renderer import Renderer
+from pystache.template_spec import TemplateSpec
+
+
+def render(template, context=None, **kwargs):
+ """
+ Return the given template string rendered using the given context.
+
+ """
+ renderer = Renderer()
+ return renderer.render(template, context, **kwargs)
diff --git a/python/pystache/pystache/loader.py b/python/pystache/pystache/loader.py
new file mode 100644
index 000000000..d4a7e5310
--- /dev/null
+++ b/python/pystache/pystache/loader.py
@@ -0,0 +1,170 @@
+# coding: utf-8
+
+"""
+This module provides a Loader class for locating and reading templates.
+
+"""
+
+import os
+import sys
+
+from pystache import common
+from pystache import defaults
+from pystache.locator import Locator
+
+
+# We make a function so that the current defaults take effect.
+# TODO: revisit whether this is necessary.
+
+def _make_to_unicode():
+ def to_unicode(s, encoding=None):
+ """
+ Raises a TypeError exception if the given string is already unicode.
+
+ """
+ if encoding is None:
+ encoding = defaults.STRING_ENCODING
+ return unicode(s, encoding, defaults.DECODE_ERRORS)
+ return to_unicode
+
+
+class Loader(object):
+
+ """
+ Loads the template associated to a name or user-defined object.
+
+ All load_*() methods return the template as a unicode string.
+
+ """
+
+ def __init__(self, file_encoding=None, extension=None, to_unicode=None,
+ search_dirs=None):
+ """
+ Construct a template loader instance.
+
+ Arguments:
+
+ extension: the template file extension, without the leading dot.
+ Pass False for no extension (e.g. to use extensionless template
+ files). Defaults to the package default.
+
+ file_encoding: the name of the encoding to use when converting file
+ contents to unicode. Defaults to the package default.
+
+ search_dirs: the list of directories in which to search when loading
+ a template by name or file name. Defaults to the package default.
+
+ to_unicode: the function to use when converting strings of type
+ str to unicode. The function should have the signature:
+
+ to_unicode(s, encoding=None)
+
+ It should accept a string of type str and an optional encoding
+ name and return a string of type unicode. Defaults to calling
+ Python's built-in function unicode() using the package string
+ encoding and decode errors defaults.
+
+ """
+ if extension is None:
+ extension = defaults.TEMPLATE_EXTENSION
+
+ if file_encoding is None:
+ file_encoding = defaults.FILE_ENCODING
+
+ if search_dirs is None:
+ search_dirs = defaults.SEARCH_DIRS
+
+ if to_unicode is None:
+ to_unicode = _make_to_unicode()
+
+ self.extension = extension
+ self.file_encoding = file_encoding
+ # TODO: unit test setting this attribute.
+ self.search_dirs = search_dirs
+ self.to_unicode = to_unicode
+
+ def _make_locator(self):
+ return Locator(extension=self.extension)
+
+ def unicode(self, s, encoding=None):
+ """
+ Convert a string to unicode using the given encoding, and return it.
+
+ This function uses the underlying to_unicode attribute.
+
+ Arguments:
+
+ s: a basestring instance to convert to unicode. Unlike Python's
+ built-in unicode() function, it is okay to pass unicode strings
+ to this function. (Passing a unicode string to Python's unicode()
+ with the encoding argument throws the error, "TypeError: decoding
+ Unicode is not supported.")
+
+ encoding: the encoding to pass to the to_unicode attribute.
+ Defaults to None.
+
+ """
+ if isinstance(s, unicode):
+ return unicode(s)
+
+ return self.to_unicode(s, encoding)
+
+ def read(self, path, encoding=None):
+ """
+ Read the template at the given path, and return it as a unicode string.
+
+ """
+ b = common.read(path)
+
+ if encoding is None:
+ encoding = self.file_encoding
+
+ return self.unicode(b, encoding)
+
+ def load_file(self, file_name):
+ """
+ Find and return the template with the given file name.
+
+ Arguments:
+
+ file_name: the file name of the template.
+
+ """
+ locator = self._make_locator()
+
+ path = locator.find_file(file_name, self.search_dirs)
+
+ return self.read(path)
+
+ def load_name(self, name):
+ """
+ Find and return the template with the given template name.
+
+ Arguments:
+
+ name: the name of the template.
+
+ """
+ locator = self._make_locator()
+
+ path = locator.find_name(name, self.search_dirs)
+
+ return self.read(path)
+
+ # TODO: unit-test this method.
+ def load_object(self, obj):
+ """
+ Find and return the template associated to the given object.
+
+ Arguments:
+
+ obj: an instance of a user-defined class.
+
+ search_dirs: the list of directories in which to search.
+
+ """
+ locator = self._make_locator()
+
+ path = locator.find_object(obj, self.search_dirs)
+
+ return self.read(path)
diff --git a/python/pystache/pystache/locator.py b/python/pystache/pystache/locator.py
new file mode 100644
index 000000000..30c5b01e0
--- /dev/null
+++ b/python/pystache/pystache/locator.py
@@ -0,0 +1,171 @@
+# coding: utf-8
+
+"""
+This module provides a Locator class for finding template files.
+
+"""
+
+import os
+import re
+import sys
+
+from pystache.common import TemplateNotFoundError
+from pystache import defaults
+
+
+class Locator(object):
+
+ def __init__(self, extension=None):
+ """
+ Construct a template locator.
+
+ Arguments:
+
+ extension: the template file extension, without the leading dot.
+ Pass False for no extension (e.g. to use extensionless template
+ files). Defaults to the package default.
+
+ """
+ if extension is None:
+ extension = defaults.TEMPLATE_EXTENSION
+
+ self.template_extension = extension
+
+ def get_object_directory(self, obj):
+ """
+ Return the directory containing an object's defining class.
+
+ Returns None if there is no such directory, for example if the
+ class was defined in an interactive Python session, or in a
+ doctest that appears in a text file (rather than a Python file).
+
+ """
+ if not hasattr(obj, '__module__'):
+ return None
+
+ module = sys.modules[obj.__module__]
+
+ if not hasattr(module, '__file__'):
+ # TODO: add a unit test for this case.
+ return None
+
+ path = module.__file__
+
+ return os.path.dirname(path)
+
+ def make_template_name(self, obj):
+ """
+ Return the canonical template name for an object instance.
+
+ This method converts Python-style class names (PEP 8's recommended
+ CamelCase, aka CapWords) to lower_case_with_underscords. Here
+ is an example with code:
+
+ >>> class HelloWorld(object):
+ ... pass
+ >>> hi = HelloWorld()
+ >>>
+ >>> locator = Locator()
+ >>> locator.make_template_name(hi)
+ 'hello_world'
+
+ """
+ template_name = obj.__class__.__name__
+
+ def repl(match):
+ return '_' + match.group(0).lower()
+
+ return re.sub('[A-Z]', repl, template_name)[1:]
+
+ def make_file_name(self, template_name, template_extension=None):
+ """
+ Generate and return the file name for the given template name.
+
+ Arguments:
+
+ template_extension: defaults to the instance's extension.
+
+ """
+ file_name = template_name
+
+ if template_extension is None:
+ template_extension = self.template_extension
+
+ if template_extension is not False:
+ file_name += os.path.extsep + template_extension
+
+ return file_name
+
+ def _find_path(self, search_dirs, file_name):
+ """
+ Search for the given file, and return the path.
+
+ Returns None if the file is not found.
+
+ """
+ for dir_path in search_dirs:
+ file_path = os.path.join(dir_path, file_name)
+ if os.path.exists(file_path):
+ return file_path
+
+ return None
+
+ def _find_path_required(self, search_dirs, file_name):
+ """
+ Return the path to a template with the given file name.
+
+ """
+ path = self._find_path(search_dirs, file_name)
+
+ if path is None:
+ raise TemplateNotFoundError('File %s not found in dirs: %s' %
+ (repr(file_name), repr(search_dirs)))
+
+ return path
+
+ def find_file(self, file_name, search_dirs):
+ """
+ Return the path to a template with the given file name.
+
+ Arguments:
+
+ file_name: the file name of the template.
+
+ search_dirs: the list of directories in which to search.
+
+ """
+ return self._find_path_required(search_dirs, file_name)
+
+ def find_name(self, template_name, search_dirs):
+ """
+ Return the path to a template with the given name.
+
+ Arguments:
+
+ template_name: the name of the template.
+
+ search_dirs: the list of directories in which to search.
+
+ """
+ file_name = self.make_file_name(template_name)
+
+ return self._find_path_required(search_dirs, file_name)
+
+ def find_object(self, obj, search_dirs, file_name=None):
+ """
+ Return the path to a template associated with the given object.
+
+ """
+ if file_name is None:
+ # TODO: should we define a make_file_name() method?
+ template_name = self.make_template_name(obj)
+ file_name = self.make_file_name(template_name)
+
+ dir_path = self.get_object_directory(obj)
+
+ if dir_path is not None:
+ search_dirs = [dir_path] + search_dirs
+
+ path = self._find_path_required(search_dirs, file_name)
+
+ return path
diff --git a/python/pystache/pystache/parsed.py b/python/pystache/pystache/parsed.py
new file mode 100644
index 000000000..372d96c66
--- /dev/null
+++ b/python/pystache/pystache/parsed.py
@@ -0,0 +1,50 @@
+# coding: utf-8
+
+"""
+Exposes a class that represents a parsed (or compiled) template.
+
+"""
+
+
+class ParsedTemplate(object):
+
+ """
+ Represents a parsed or compiled template.
+
+ An instance wraps a list of unicode strings and node objects. A node
+ object must have a `render(engine, stack)` method that accepts a
+ RenderEngine instance and a ContextStack instance and returns a unicode
+ string.
+
+ """
+
+ def __init__(self):
+ self._parse_tree = []
+
+ def __repr__(self):
+ return repr(self._parse_tree)
+
+ def add(self, node):
+ """
+ Arguments:
+
+ node: a unicode string or node object instance. See the class
+ docstring for information.
+
+ """
+ self._parse_tree.append(node)
+
+ def render(self, engine, context):
+ """
+ Returns: a string of type unicode.
+
+ """
+ # We avoid use of the ternary operator for Python 2.4 support.
+ def get_unicode(node):
+ if type(node) is unicode:
+ return node
+ return node.render(engine, context)
+ parts = map(get_unicode, self._parse_tree)
+ s = ''.join(parts)
+
+ return unicode(s)
diff --git a/python/pystache/pystache/parser.py b/python/pystache/pystache/parser.py
new file mode 100644
index 000000000..9a4fba235
--- /dev/null
+++ b/python/pystache/pystache/parser.py
@@ -0,0 +1,378 @@
+# coding: utf-8
+
+"""
+Exposes a parse() function to parse template strings.
+
+"""
+
+import re
+
+from pystache import defaults
+from pystache.parsed import ParsedTemplate
+
+
+END_OF_LINE_CHARACTERS = [u'\r', u'\n']
+NON_BLANK_RE = re.compile(ur'^(.)', re.M)
+
+
+# TODO: add some unit tests for this.
+# TODO: add a test case that checks for spurious spaces.
+# TODO: add test cases for delimiters.
+def parse(template, delimiters=None):
+ """
+ Parse a unicode template string and return a ParsedTemplate instance.
+
+ Arguments:
+
+ template: a unicode template string.
+
+ delimiters: a 2-tuple of delimiters. Defaults to the package default.
+
+ Examples:
+
+ >>> parsed = parse(u"Hey {{#who}}{{name}}!{{/who}}")
+ >>> print str(parsed).replace('u', '') # This is a hack to get the test to pass both in Python 2 and 3.
+ ['Hey ', _SectionNode(key='who', index_begin=12, index_end=21, parsed=[_EscapeNode(key='name'), '!'])]
+
+ """
+ if type(template) is not unicode:
+ raise Exception("Template is not unicode: %s" % type(template))
+ parser = _Parser(delimiters)
+ return parser.parse(template)
+
+
+def _compile_template_re(delimiters):
+ """
+ Return a regular expression object (re.RegexObject) instance.
+
+ """
+ # The possible tag type characters following the opening tag,
+ # excluding "=" and "{".
+ tag_types = "!>&/#^"
+
+ # TODO: are we following this in the spec?
+ #
+ # The tag's content MUST be a non-whitespace character sequence
+ # NOT containing the current closing delimiter.
+ #
+ tag = r"""
+ (?P<whitespace>[\ \t]*)
+ %(otag)s \s*
+ (?:
+ (?P<change>=) \s* (?P<delims>.+?) \s* = |
+ (?P<raw>{) \s* (?P<raw_name>.+?) \s* } |
+ (?P<tag>[%(tag_types)s]?) \s* (?P<tag_key>[\s\S]+?)
+ )
+ \s* %(ctag)s
+ """ % {'tag_types': tag_types, 'otag': re.escape(delimiters[0]), 'ctag': re.escape(delimiters[1])}
+
+ return re.compile(tag, re.VERBOSE)
+
+
+class ParsingError(Exception):
+
+ pass
+
+
+## Node types
+
+def _format(obj, exclude=None):
+ if exclude is None:
+ exclude = []
+ exclude.append('key')
+ attrs = obj.__dict__
+ names = list(set(attrs.keys()) - set(exclude))
+ names.sort()
+ names.insert(0, 'key')
+ args = ["%s=%s" % (name, repr(attrs[name])) for name in names]
+ return "%s(%s)" % (obj.__class__.__name__, ", ".join(args))
+
+
+class _CommentNode(object):
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ return u''
+
+
+class _ChangeNode(object):
+
+ def __init__(self, delimiters):
+ self.delimiters = delimiters
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ return u''
+
+
+class _EscapeNode(object):
+
+ def __init__(self, key):
+ self.key = key
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ s = engine.fetch_string(context, self.key)
+ return engine.escape(s)
+
+
+class _LiteralNode(object):
+
+ def __init__(self, key):
+ self.key = key
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ s = engine.fetch_string(context, self.key)
+ return engine.literal(s)
+
+
+class _PartialNode(object):
+
+ def __init__(self, key, indent):
+ self.key = key
+ self.indent = indent
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ template = engine.resolve_partial(self.key)
+ # Indent before rendering.
+ template = re.sub(NON_BLANK_RE, self.indent + ur'\1', template)
+
+ return engine.render(template, context)
+
+
+class _InvertedNode(object):
+
+ def __init__(self, key, parsed_section):
+ self.key = key
+ self.parsed_section = parsed_section
+
+ def __repr__(self):
+ return _format(self)
+
+ def render(self, engine, context):
+ # TODO: is there a bug because we are not using the same
+ # logic as in fetch_string()?
+ data = engine.resolve_context(context, self.key)
+ # Note that lambdas are considered truthy for inverted sections
+ # per the spec.
+ if data:
+ return u''
+ return self.parsed_section.render(engine, context)
+
+
+class _SectionNode(object):
+
+ # TODO: the template_ and parsed_template_ arguments don't both seem
+ # to be necessary. Can we remove one of them? For example, if
+ # callable(data) is True, then the initial parsed_template isn't used.
+ def __init__(self, key, parsed, delimiters, template, index_begin, index_end):
+ self.delimiters = delimiters
+ self.key = key
+ self.parsed = parsed
+ self.template = template
+ self.index_begin = index_begin
+ self.index_end = index_end
+
+ def __repr__(self):
+ return _format(self, exclude=['delimiters', 'template'])
+
+ def render(self, engine, context):
+ values = engine.fetch_section_data(context, self.key)
+
+ parts = []
+ for val in values:
+ if callable(val):
+ # Lambdas special case section rendering and bypass pushing
+ # the data value onto the context stack. From the spec--
+ #
+ # When used as the data value for a Section tag, the
+ # lambda MUST be treatable as an arity 1 function, and
+ # invoked as such (passing a String containing the
+ # unprocessed section contents). The returned value
+ # MUST be rendered against the current delimiters, then
+ # interpolated in place of the section.
+ #
+ # Also see--
+ #
+ # https://github.com/defunkt/pystache/issues/113
+ #
+ # TODO: should we check the arity?
+ val = val(self.template[self.index_begin:self.index_end])
+ val = engine._render_value(val, context, delimiters=self.delimiters)
+ parts.append(val)
+ continue
+
+ context.push(val)
+ parts.append(self.parsed.render(engine, context))
+ context.pop()
+
+ return unicode(''.join(parts))
+
+
+class _Parser(object):
+
+ _delimiters = None
+ _template_re = None
+
+ def __init__(self, delimiters=None):
+ if delimiters is None:
+ delimiters = defaults.DELIMITERS
+
+ self._delimiters = delimiters
+
+ def _compile_delimiters(self):
+ self._template_re = _compile_template_re(self._delimiters)
+
+ def _change_delimiters(self, delimiters):
+ self._delimiters = delimiters
+ self._compile_delimiters()
+
+ def parse(self, template):
+ """
+ Parse a template string starting at some index.
+
+ This method uses the current tag delimiter.
+
+ Arguments:
+
+ template: a unicode string that is the template to parse.
+
+ index: the index at which to start parsing.
+
+ Returns:
+
+ a ParsedTemplate instance.
+
+ """
+ self._compile_delimiters()
+
+ start_index = 0
+ content_end_index, parsed_section, section_key = None, None, None
+ parsed_template = ParsedTemplate()
+
+ states = []
+
+ while True:
+ match = self._template_re.search(template, start_index)
+
+ if match is None:
+ break
+
+ match_index = match.start()
+ end_index = match.end()
+
+ matches = match.groupdict()
+
+ # Normalize the matches dictionary.
+ if matches['change'] is not None:
+ matches.update(tag='=', tag_key=matches['delims'])
+ elif matches['raw'] is not None:
+ matches.update(tag='&', tag_key=matches['raw_name'])
+
+ tag_type = matches['tag']
+ tag_key = matches['tag_key']
+ leading_whitespace = matches['whitespace']
+
+ # Standalone (non-interpolation) tags consume the entire line,
+ # both leading whitespace and trailing newline.
+ did_tag_begin_line = match_index == 0 or template[match_index - 1] in END_OF_LINE_CHARACTERS
+ did_tag_end_line = end_index == len(template) or template[end_index] in END_OF_LINE_CHARACTERS
+ is_tag_interpolating = tag_type in ['', '&']
+
+ if did_tag_begin_line and did_tag_end_line and not is_tag_interpolating:
+ if end_index < len(template):
+ end_index += template[end_index] == '\r' and 1 or 0
+ if end_index < len(template):
+ end_index += template[end_index] == '\n' and 1 or 0
+ elif leading_whitespace:
+ match_index += len(leading_whitespace)
+ leading_whitespace = ''
+
+ # Avoid adding spurious empty strings to the parse tree.
+ if start_index != match_index:
+ parsed_template.add(template[start_index:match_index])
+
+ start_index = end_index
+
+ if tag_type in ('#', '^'):
+ # Cache current state.
+ state = (tag_type, end_index, section_key, parsed_template)
+ states.append(state)
+
+ # Initialize new state
+ section_key, parsed_template = tag_key, ParsedTemplate()
+ continue
+
+ if tag_type == '/':
+ if tag_key != section_key:
+ raise ParsingError("Section end tag mismatch: %s != %s" % (tag_key, section_key))
+
+ # Restore previous state with newly found section data.
+ parsed_section = parsed_template
+
+ (tag_type, section_start_index, section_key, parsed_template) = states.pop()
+ node = self._make_section_node(template, tag_type, tag_key, parsed_section,
+ section_start_index, match_index)
+
+ else:
+ node = self._make_interpolation_node(tag_type, tag_key, leading_whitespace)
+
+ parsed_template.add(node)
+
+ # Avoid adding spurious empty strings to the parse tree.
+ if start_index != len(template):
+ parsed_template.add(template[start_index:])
+
+ return parsed_template
+
+ def _make_interpolation_node(self, tag_type, tag_key, leading_whitespace):
+ """
+ Create and return a non-section node for the parse tree.
+
+ """
+ # TODO: switch to using a dictionary instead of a bunch of ifs and elifs.
+ if tag_type == '!':
+ return _CommentNode()
+
+ if tag_type == '=':
+ delimiters = tag_key.split()
+ self._change_delimiters(delimiters)
+ return _ChangeNode(delimiters)
+
+ if tag_type == '':
+ return _EscapeNode(tag_key)
+
+ if tag_type == '&':
+ return _LiteralNode(tag_key)
+
+ if tag_type == '>':
+ return _PartialNode(tag_key, leading_whitespace)
+
+ raise Exception("Invalid symbol for interpolation tag: %s" % repr(tag_type))
+
+ def _make_section_node(self, template, tag_type, tag_key, parsed_section,
+ section_start_index, section_end_index):
+ """
+ Create and return a section node for the parse tree.
+
+ """
+ if tag_type == '#':
+ return _SectionNode(tag_key, parsed_section, self._delimiters,
+ template, section_start_index, section_end_index)
+
+ if tag_type == '^':
+ return _InvertedNode(tag_key, parsed_section)
+
+ raise Exception("Invalid symbol for section tag: %s" % repr(tag_type))
diff --git a/python/pystache/pystache/renderengine.py b/python/pystache/pystache/renderengine.py
new file mode 100644
index 000000000..c797b1765
--- /dev/null
+++ b/python/pystache/pystache/renderengine.py
@@ -0,0 +1,181 @@
+# coding: utf-8
+
+"""
+Defines a class responsible for rendering logic.
+
+"""
+
+import re
+
+from pystache.common import is_string
+from pystache.parser import parse
+
+
+def context_get(stack, name):
+ """
+ Find and return a name from a ContextStack instance.
+
+ """
+ return stack.get(name)
+
+
+class RenderEngine(object):
+
+ """
+ Provides a render() method.
+
+ This class is meant only for internal use.
+
+ As a rule, the code in this class operates on unicode strings where
+ possible rather than, say, strings of type str or markupsafe.Markup.
+ This means that strings obtained from "external" sources like partials
+ and variable tag values are immediately converted to unicode (or
+ escaped and converted to unicode) before being operated on further.
+ This makes maintaining, reasoning about, and testing the correctness
+ of the code much simpler. In particular, it keeps the implementation
+ of this class independent of the API details of one (or possibly more)
+ unicode subclasses (e.g. markupsafe.Markup).
+
+ """
+
+ # TODO: it would probably be better for the constructor to accept
+ # and set as an attribute a single RenderResolver instance
+ # that encapsulates the customizable aspects of converting
+ # strings and resolving partials and names from context.
+ def __init__(self, literal=None, escape=None, resolve_context=None,
+ resolve_partial=None, to_str=None):
+ """
+ Arguments:
+
+ literal: the function used to convert unescaped variable tag
+ values to unicode, e.g. the value corresponding to a tag
+ "{{{name}}}". The function should accept a string of type
+ str or unicode (or a subclass) and return a string of type
+ unicode (but not a proper subclass of unicode).
+ This class will only pass basestring instances to this
+ function. For example, it will call str() on integer variable
+ values prior to passing them to this function.
+
+ escape: the function used to escape and convert variable tag
+ values to unicode, e.g. the value corresponding to a tag
+ "{{name}}". The function should obey the same properties
+ described above for the "literal" function argument.
+ This function should take care to convert any str
+ arguments to unicode just as the literal function should, as
+ this class will not pass tag values to literal prior to passing
+ them to this function. This allows for more flexibility,
+ for example using a custom escape function that handles
+ incoming strings of type markupsafe.Markup differently
+ from plain unicode strings.
+
+ resolve_context: the function to call to resolve a name against
+ a context stack. The function should accept two positional
+ arguments: a ContextStack instance and a name to resolve.
+
+ resolve_partial: the function to call when loading a partial.
+ The function should accept a template name string and return a
+ template string of type unicode (not a subclass).
+
+ to_str: a function that accepts an object and returns a string (e.g.
+ the built-in function str). This function is used for string
+ coercion whenever a string is required (e.g. for converting None
+ or 0 to a string).
+
+ """
+ self.escape = escape
+ self.literal = literal
+ self.resolve_context = resolve_context
+ self.resolve_partial = resolve_partial
+ self.to_str = to_str
+
+ # TODO: Rename context to stack throughout this module.
+
+ # From the spec:
+ #
+ # When used as the data value for an Interpolation tag, the lambda
+ # MUST be treatable as an arity 0 function, and invoked as such.
+ # The returned value MUST be rendered against the default delimiters,
+ # then interpolated in place of the lambda.
+ #
+ def fetch_string(self, context, name):
+ """
+ Get a value from the given context as a basestring instance.
+
+ """
+ val = self.resolve_context(context, name)
+
+ if callable(val):
+ # Return because _render_value() is already a string.
+ return self._render_value(val(), context)
+
+ if not is_string(val):
+ return self.to_str(val)
+
+ return val
+
+ def fetch_section_data(self, context, name):
+ """
+ Fetch the value of a section as a list.
+
+ """
+ data = self.resolve_context(context, name)
+
+ # From the spec:
+ #
+ # If the data is not of a list type, it is coerced into a list
+ # as follows: if the data is truthy (e.g. `!!data == true`),
+ # use a single-element list containing the data, otherwise use
+ # an empty list.
+ #
+ if not data:
+ data = []
+ else:
+ # The least brittle way to determine whether something
+ # supports iteration is by trying to call iter() on it:
+ #
+ # http://docs.python.org/library/functions.html#iter
+ #
+ # It is not sufficient, for example, to check whether the item
+ # implements __iter__ () (the iteration protocol). There is
+ # also __getitem__() (the sequence protocol). In Python 2,
+ # strings do not implement __iter__(), but in Python 3 they do.
+ try:
+ iter(data)
+ except TypeError:
+ # Then the value does not support iteration.
+ data = [data]
+ else:
+ if is_string(data) or isinstance(data, dict):
+ # Do not treat strings and dicts (which are iterable) as lists.
+ data = [data]
+ # Otherwise, treat the value as a list.
+
+ return data
+
+ def _render_value(self, val, context, delimiters=None):
+ """
+ Render an arbitrary value.
+
+ """
+ if not is_string(val):
+ # In case the template is an integer, for example.
+ val = self.to_str(val)
+ if type(val) is not unicode:
+ val = self.literal(val)
+ return self.render(val, context, delimiters)
+
+ def render(self, template, context_stack, delimiters=None):
+ """
+ Render a unicode template string, and return as unicode.
+
+ Arguments:
+
+ template: a template string of type unicode (but not a proper
+ subclass of unicode).
+
+ context_stack: a ContextStack instance.
+
+ """
+ parsed_template = parse(template, delimiters)
+
+ return parsed_template.render(self, context_stack)
diff --git a/python/pystache/pystache/renderer.py b/python/pystache/pystache/renderer.py
new file mode 100644
index 000000000..ff6a90c64
--- /dev/null
+++ b/python/pystache/pystache/renderer.py
@@ -0,0 +1,460 @@
+# coding: utf-8
+
+"""
+This module provides a Renderer class to render templates.
+
+"""
+
+import sys
+
+from pystache import defaults
+from pystache.common import TemplateNotFoundError, MissingTags, is_string
+from pystache.context import ContextStack, KeyNotFoundError
+from pystache.loader import Loader
+from pystache.parsed import ParsedTemplate
+from pystache.renderengine import context_get, RenderEngine
+from pystache.specloader import SpecLoader
+from pystache.template_spec import TemplateSpec
+
+
+class Renderer(object):
+
+ """
+ A class for rendering mustache templates.
+
+ This class supports several rendering options which are described in
+ the constructor's docstring. Other behavior can be customized by
+ subclassing this class.
+
+ For example, one can pass a string-string dictionary to the constructor
+ to bypass loading partials from the file system:
+
+ >>> partials = {'partial': 'Hello, {{thing}}!'}
+ >>> renderer = Renderer(partials=partials)
+ >>> # We apply print to make the test work in Python 3 after 2to3.
+ >>> print renderer.render('{{>partial}}', {'thing': 'world'})
+ Hello, world!
+
+ To customize string coercion (e.g. to render False values as ''), one can
+ subclass this class. For example:
+
+ class MyRenderer(Renderer):
+ def str_coerce(self, val):
+ if not val:
+ return ''
+ else:
+ return str(val)
+
+ """
+
+ def __init__(self, file_encoding=None, string_encoding=None,
+ decode_errors=None, search_dirs=None, file_extension=None,
+ escape=None, partials=None, missing_tags=None):
+ """
+ Construct an instance.
+
+ Arguments:
+
+ file_encoding: the name of the encoding to use by default when
+ reading template files. All templates are converted to unicode
+ prior to parsing. Defaults to the package default.
+
+ string_encoding: the name of the encoding to use when converting
+ to unicode any byte strings (type str in Python 2) encountered
+ during the rendering process. This name will be passed as the
+ encoding argument to the built-in function unicode().
+ Defaults to the package default.
+
+ decode_errors: the string to pass as the errors argument to the
+ built-in function unicode() when converting byte strings to
+ unicode. Defaults to the package default.
+
+ search_dirs: the list of directories in which to search when
+ loading a template by name or file name. If given a string,
+ the method interprets the string as a single directory.
+ Defaults to the package default.
+
+ file_extension: the template file extension. Pass False for no
+ extension (i.e. to use extensionless template files).
+ Defaults to the package default.
+
+ partials: an object (e.g. a dictionary) for custom partial loading
+ during the rendering process.
+ The object should have a get() method that accepts a string
+ and returns the corresponding template as a string, preferably
+ as a unicode string. If there is no template with that name,
+ the get() method should either return None (as dict.get() does)
+ or raise an exception.
+ If this argument is None, the rendering process will use
+ the normal procedure of locating and reading templates from
+ the file system -- using relevant instance attributes like
+ search_dirs, file_encoding, etc.
+
+ escape: the function used to escape variable tag values when
+ rendering a template. The function should accept a unicode
+ string (or subclass of unicode) and return an escaped string
+ that is again unicode (or a subclass of unicode).
+ This function need not handle strings of type `str` because
+ this class will only pass it unicode strings. The constructor
+ assigns this function to the constructed instance's escape()
+ method.
+ To disable escaping entirely, one can pass `lambda u: u`
+ as the escape function, for example. One may also wish to
+ consider using markupsafe's escape function: markupsafe.escape().
+ This argument defaults to the package default.
+
+ missing_tags: a string specifying how to handle missing tags.
+ If 'strict', an error is raised on a missing tag. If 'ignore',
+ the value of the tag is the empty string. Defaults to the
+ package default.
+
+ """
+ if decode_errors is None:
+ decode_errors = defaults.DECODE_ERRORS
+
+ if escape is None:
+ escape = defaults.TAG_ESCAPE
+
+ if file_encoding is None:
+ file_encoding = defaults.FILE_ENCODING
+
+ if file_extension is None:
+ file_extension = defaults.TEMPLATE_EXTENSION
+
+ if missing_tags is None:
+ missing_tags = defaults.MISSING_TAGS
+
+ if search_dirs is None:
+ search_dirs = defaults.SEARCH_DIRS
+
+ if string_encoding is None:
+ string_encoding = defaults.STRING_ENCODING
+
+ if isinstance(search_dirs, basestring):
+ search_dirs = [search_dirs]
+
+ self._context = None
+ self.decode_errors = decode_errors
+ self.escape = escape
+ self.file_encoding = file_encoding
+ self.file_extension = file_extension
+ self.missing_tags = missing_tags
+ self.partials = partials
+ self.search_dirs = search_dirs
+ self.string_encoding = string_encoding
+
+ # This is an experimental way of giving views access to the current context.
+ # TODO: consider another approach of not giving access via a property,
+ # but instead letting the caller pass the initial context to the
+ # main render() method by reference. This approach would probably
+ # be less likely to be misused.
+ @property
+ def context(self):
+ """
+ Return the current rendering context [experimental].
+
+ """
+ return self._context
+
+ # We could not choose str() as the name because 2to3 renames the unicode()
+ # method of this class to str().
+ def str_coerce(self, val):
+ """
+ Coerce a non-string value to a string.
+
+ This method is called whenever a non-string is encountered during the
+ rendering process when a string is needed (e.g. if a context value
+ for string interpolation is not a string). To customize string
+ coercion, you can override this method.
+
+ """
+ return str(val)
+
+ def _to_unicode_soft(self, s):
+ """
+ Convert a basestring to unicode, preserving any unicode subclass.
+
+ """
+ # We type-check to avoid "TypeError: decoding Unicode is not supported".
+ # We avoid the Python ternary operator for Python 2.4 support.
+ if isinstance(s, unicode):
+ return s
+ return self.unicode(s)
+
+ def _to_unicode_hard(self, s):
+ """
+ Convert a basestring to a string with type unicode (not subclass).
+
+ """
+ return unicode(self._to_unicode_soft(s))
+
+ def _escape_to_unicode(self, s):
+ """
+ Convert a basestring to unicode (preserving any unicode subclass), and escape it.
+
+ Returns a unicode string (not subclass).
+
+ """
+ return unicode(self.escape(self._to_unicode_soft(s)))
+
+ def unicode(self, b, encoding=None):
+ """
+ Convert a byte string to unicode, using string_encoding and decode_errors.
+
+ Arguments:
+
+ b: a byte string.
+
+ encoding: the name of an encoding. Defaults to the string_encoding
+ attribute for this instance.
+
+ Raises:
+
+ TypeError: Because this method calls Python's built-in unicode()
+ function, this method raises the following exception if the
+ given string is already unicode:
+
+ TypeError: decoding Unicode is not supported
+
+ """
+ if encoding is None:
+ encoding = self.string_encoding
+
+ # TODO: Wrap UnicodeDecodeErrors with a message about setting
+ # the string_encoding and decode_errors attributes.
+ return unicode(b, encoding, self.decode_errors)
+
+ def _make_loader(self):
+ """
+ Create a Loader instance using current attributes.
+
+ """
+ return Loader(file_encoding=self.file_encoding, extension=self.file_extension,
+ to_unicode=self.unicode, search_dirs=self.search_dirs)
+
+ def _make_load_template(self):
+ """
+ Return a function that loads a template by name.
+
+ """
+ loader = self._make_loader()
+
+ def load_template(template_name):
+ return loader.load_name(template_name)
+
+ return load_template
+
+ def _make_load_partial(self):
+ """
+ Return a function that loads a partial by name.
+
+ """
+ if self.partials is None:
+ return self._make_load_template()
+
+ # Otherwise, create a function from the custom partial loader.
+ partials = self.partials
+
+ def load_partial(name):
+ # TODO: consider using EAFP here instead.
+ # http://docs.python.org/glossary.html#term-eafp
+ # This would mean requiring that the custom partial loader
+ # raise a KeyError on name not found.
+ template = partials.get(name)
+ if template is None:
+ raise TemplateNotFoundError("Name %s not found in partials: %s" %
+ (repr(name), type(partials)))
+
+ # RenderEngine requires that the return value be unicode.
+ return self._to_unicode_hard(template)
+
+ return load_partial
+
+ def _is_missing_tags_strict(self):
+ """
+ Return whether missing_tags is set to strict.
+
+ """
+ val = self.missing_tags
+
+ if val == MissingTags.strict:
+ return True
+ elif val == MissingTags.ignore:
+ return False
+
+ raise Exception("Unsupported 'missing_tags' value: %s" % repr(val))
+
+ def _make_resolve_partial(self):
+ """
+ Return the resolve_partial function to pass to RenderEngine.__init__().
+
+ """
+ load_partial = self._make_load_partial()
+
+ if self._is_missing_tags_strict():
+ return load_partial
+ # Otherwise, ignore missing tags.
+
+ def resolve_partial(name):
+ try:
+ return load_partial(name)
+ except TemplateNotFoundError:
+ return u''
+
+ return resolve_partial
+
+ def _make_resolve_context(self):
+ """
+ Return the resolve_context function to pass to RenderEngine.__init__().
+
+ """
+ if self._is_missing_tags_strict():
+ return context_get
+ # Otherwise, ignore missing tags.
+
+ def resolve_context(stack, name):
+ try:
+ return context_get(stack, name)
+ except KeyNotFoundError:
+ return u''
+
+ return resolve_context
+
+ def _make_render_engine(self):
+ """
+ Return a RenderEngine instance for rendering.
+
+ """
+ resolve_context = self._make_resolve_context()
+ resolve_partial = self._make_resolve_partial()
+
+ engine = RenderEngine(literal=self._to_unicode_hard,
+ escape=self._escape_to_unicode,
+ resolve_context=resolve_context,
+ resolve_partial=resolve_partial,
+ to_str=self.str_coerce)
+ return engine
+
+ # TODO: add unit tests for this method.
+ def load_template(self, template_name):
+ """
+ Load a template by name from the file system.
+
+ """
+ load_template = self._make_load_template()
+ return load_template(template_name)
+
+ def _render_object(self, obj, *context, **kwargs):
+ """
+ Render the template associated with the given object.
+
+ """
+ loader = self._make_loader()
+
+ # TODO: consider an approach that does not require using an if
+ # block here. For example, perhaps this class's loader can be
+ # a SpecLoader in all cases, and the SpecLoader instance can
+ # check the object's type. Or perhaps Loader and SpecLoader
+ # can be refactored to implement the same interface.
+ if isinstance(obj, TemplateSpec):
+ loader = SpecLoader(loader)
+ template = loader.load(obj)
+ else:
+ template = loader.load_object(obj)
+
+ context = [obj] + list(context)
+
+ return self._render_string(template, *context, **kwargs)
+
+ def render_name(self, template_name, *context, **kwargs):
+ """
+ Render the template with the given name using the given context.
+
+ See the render() docstring for more information.
+
+ """
+ loader = self._make_loader()
+ template = loader.load_name(template_name)
+ return self._render_string(template, *context, **kwargs)
+
+ def render_path(self, template_path, *context, **kwargs):
+ """
+ Render the template at the given path using the given context.
+
+ Read the render() docstring for more information.
+
+ """
+ loader = self._make_loader()
+ template = loader.read(template_path)
+
+ return self._render_string(template, *context, **kwargs)
+
+ def _render_string(self, template, *context, **kwargs):
+ """
+ Render the given template string using the given context.
+
+ """
+ # RenderEngine.render() requires that the template string be unicode.
+ template = self._to_unicode_hard(template)
+
+ render_func = lambda engine, stack: engine.render(template, stack)
+
+ return self._render_final(render_func, *context, **kwargs)
+
+ # All calls to render() should end here because it prepares the
+ # context stack correctly.
+ def _render_final(self, render_func, *context, **kwargs):
+ """
+ Arguments:
+
+ render_func: a function that accepts a RenderEngine and ContextStack
+ instance and returns a template rendering as a unicode string.
+
+ """
+ stack = ContextStack.create(*context, **kwargs)
+ self._context = stack
+
+ engine = self._make_render_engine()
+
+ return render_func(engine, stack)
+
+ def render(self, template, *context, **kwargs):
+ """
+ Render the given template string, view template, or parsed template.
+
+ Returns a unicode string.
+
+ Prior to rendering, this method will convert a template that is a
+ byte string (type str in Python 2) to unicode using the string_encoding
+ and decode_errors attributes. See the constructor docstring for
+ more information.
+
+ Arguments:
+
+ template: a template string that is unicode or a byte string,
+ a ParsedTemplate instance, or another object instance. In the
+ final case, the function first looks for the template associated
+ to the object by calling this class's get_associated_template()
+ method. The rendering process also uses the passed object as
+ the first element of the context stack when rendering.
+
+ *context: zero or more dictionaries, ContextStack instances, or objects
+ with which to populate the initial context stack. None
+ arguments are skipped. Items in the *context list are added to
+ the context stack in order so that later items in the argument
+ list take precedence over earlier items.
+
+ **kwargs: additional key-value data to add to the context stack.
+ As these arguments appear after all items in the *context list,
+ in the case of key conflicts these values take precedence over
+ all items in the *context list.
+
+ """
+ if is_string(template):
+ return self._render_string(template, *context, **kwargs)
+ if isinstance(template, ParsedTemplate):
+ render_func = lambda engine, stack: template.render(engine, stack)
+ return self._render_final(render_func, *context, **kwargs)
+ # Otherwise, we assume the template is an object.
+
+ return self._render_object(template, *context, **kwargs)
diff --git a/python/pystache/pystache/specloader.py b/python/pystache/pystache/specloader.py
new file mode 100644
index 000000000..3a77d4c52
--- /dev/null
+++ b/python/pystache/pystache/specloader.py
@@ -0,0 +1,90 @@
+# coding: utf-8
+
+"""
+This module supports customized (aka special or specified) template loading.
+
+"""
+
+import os.path
+
+from pystache.loader import Loader
+
+
+# TODO: add test cases for this class.
+class SpecLoader(object):
+
+ """
+ Supports loading custom-specified templates (from TemplateSpec instances).
+
+ """
+
+ def __init__(self, loader=None):
+ if loader is None:
+ loader = Loader()
+
+ self.loader = loader
+
+ def _find_relative(self, spec):
+ """
+ Return the path to the template as a relative (dir, file_name) pair.
+
+ The directory returned is relative to the directory containing the
+ class definition of the given object. The method returns None for
+ this directory if the directory is unknown without first searching
+ the search directories.
+
+ """
+ if spec.template_rel_path is not None:
+ return os.path.split(spec.template_rel_path)
+ # Otherwise, determine the file name separately.
+
+ locator = self.loader._make_locator()
+
+ # We do not use the ternary operator for Python 2.4 support.
+ if spec.template_name is not None:
+ template_name = spec.template_name
+ else:
+ template_name = locator.make_template_name(spec)
+
+ file_name = locator.make_file_name(template_name, spec.template_extension)
+
+ return (spec.template_rel_directory, file_name)
+
+ def _find(self, spec):
+ """
+ Find and return the path to the template associated to the instance.
+
+ """
+ if spec.template_path is not None:
+ return spec.template_path
+
+ dir_path, file_name = self._find_relative(spec)
+
+ locator = self.loader._make_locator()
+
+ if dir_path is None:
+ # Then we need to search for the path.
+ path = locator.find_object(spec, self.loader.search_dirs, file_name=file_name)
+ else:
+ obj_dir = locator.get_object_directory(spec)
+ path = os.path.join(obj_dir, dir_path, file_name)
+
+ return path
+
+ def load(self, spec):
+ """
+ Find and return the template associated to a TemplateSpec instance.
+
+ Returns the template as a unicode string.
+
+ Arguments:
+
+ spec: a TemplateSpec instance.
+
+ """
+ if spec.template is not None:
+ return self.loader.unicode(spec.template, spec.template_encoding)
+
+ path = self._find(spec)
+
+ return self.loader.read(path, spec.template_encoding)
diff --git a/python/pystache/pystache/template_spec.py b/python/pystache/pystache/template_spec.py
new file mode 100644
index 000000000..9e9f454c1
--- /dev/null
+++ b/python/pystache/pystache/template_spec.py
@@ -0,0 +1,53 @@
+# coding: utf-8
+
+"""
+Provides a class to customize template information on a per-view basis.
+
+To customize template properties for a particular view, create that view
+from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
+stands for "special" or "specified" template information.
+
+"""
+
+class TemplateSpec(object):
+
+ """
+ A mixin or interface for specifying custom template information.
+
+ The "spec" in TemplateSpec can be taken to mean that the template
+ information is either "specified" or "special."
+
+ A view should subclass this class only if customized template loading
+ is needed. The following attributes allow one to customize/override
+ template information on a per view basis. A None value means to use
+ default behavior for that value and perform no customization. All
+ attributes are initialized to None.
+
+ Attributes:
+
+ template: the template as a string.
+
+ template_encoding: the encoding used by the template.
+
+ template_extension: the template file extension. Defaults to "mustache".
+ Pass False for no extension (i.e. extensionless template files).
+
+ template_name: the name of the template.
+
+ template_path: absolute path to the template.
+
+ template_rel_directory: the directory containing the template file,
+ relative to the directory containing the module defining the class.
+
+ template_rel_path: the path to the template file, relative to the
+ directory containing the module defining the class.
+
+ """
+
+ template = None
+ template_encoding = None
+ template_extension = None
+ template_name = None
+ template_path = None
+ template_rel_directory = None
+ template_rel_path = None
diff --git a/python/pystache/setup.py b/python/pystache/setup.py
new file mode 100644
index 000000000..0d99aae8f
--- /dev/null
+++ b/python/pystache/setup.py
@@ -0,0 +1,413 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+"""
+This script supports publishing Pystache to PyPI.
+
+This docstring contains instructions to Pystache maintainers on how
+to release a new version of Pystache.
+
+(1) Prepare the release.
+
+Make sure the code is finalized and merged to master. Bump the version
+number in setup.py, update the release date in the HISTORY file, etc.
+
+Generate the reStructuredText long_description using--
+
+ $ python setup.py prep
+
+and be sure this new version is checked in. You must have pandoc installed
+to do this step:
+
+ http://johnmacfarlane.net/pandoc/
+
+It helps to review this auto-generated file on GitHub prior to uploading
+because the long description will be sent to PyPI and appear there after
+publishing. PyPI attempts to convert this string to HTML before displaying
+it on the PyPI project page. If PyPI finds any issues, it will render it
+instead as plain-text, which we do not want.
+
+To check in advance that PyPI will accept and parse the reST file as HTML,
+you can use the rst2html program installed by the docutils package
+(http://docutils.sourceforge.net/). To install docutils:
+
+ $ pip install docutils
+
+To check the file, run the following command and confirm that it reports
+no warnings:
+
+ $ python setup.py --long-description | rst2html.py -v --no-raw > out.html
+
+See here for more information:
+
+ http://docs.python.org/distutils/uploading.html#pypi-package-display
+
+(2) Push to PyPI. To release a new version of Pystache to PyPI--
+
+ http://pypi.python.org/pypi/pystache
+
+create a PyPI user account if you do not already have one. The user account
+will need permissions to push to PyPI. A current "Package Index Owner" of
+Pystache can grant you those permissions.
+
+When you have permissions, run the following:
+
+ python setup.py publish
+
+If you get an error like the following--
+
+ Upload failed (401): You must be identified to edit package information
+
+then add a file called .pyirc to your home directory with the following
+contents:
+
+ [server-login]
+ username: <PyPI username>
+ password: <PyPI password>
+
+as described here, for example:
+
+ http://docs.python.org/release/2.5.2/dist/pypirc.html
+
+(3) Tag the release on GitHub. Here are some commands for tagging.
+
+List current tags:
+
+ git tag -l -n3
+
+Create an annotated tag:
+
+ git tag -a -m "Version 0.5.1" "v0.5.1"
+
+Push a tag to GitHub:
+
+ git push --tags defunkt v0.5.1
+
+"""
+
+import os
+import shutil
+import sys
+
+
+py_version = sys.version_info
+
+# distutils does not seem to support the following setup() arguments.
+# It displays a UserWarning when setup() is passed those options:
+#
+# * entry_points
+# * install_requires
+#
+# distribute works with Python 2.3.5 and above:
+#
+# http://packages.python.org/distribute/setuptools.html#building-and-distributing-packages-with-distribute
+#
+if py_version < (2, 3, 5):
+ # TODO: this might not work yet.
+ import distutils as dist
+ from distutils import core
+ setup = core.setup
+else:
+ import setuptools as dist
+ setup = dist.setup
+
+
+VERSION = '0.5.4' # Also change in pystache/__init__.py.
+
+FILE_ENCODING = 'utf-8'
+
+README_PATH = 'README.md'
+HISTORY_PATH = 'HISTORY.md'
+LICENSE_PATH = 'LICENSE'
+
+RST_DESCRIPTION_PATH = 'setup_description.rst'
+
+TEMP_EXTENSION = '.temp'
+
+PREP_COMMAND = 'prep'
+
+CLASSIFIERS = (
+ 'Development Status :: 4 - Beta',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.4',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+)
+
+# Comments in reST begin with two dots.
+RST_LONG_DESCRIPTION_INTRO = """\
+.. Do not edit this file. This file is auto-generated for PyPI by setup.py
+.. using pandoc, so edits should go in the source files rather than here.
+"""
+
+
+def read(path):
+ """
+ Read and return the contents of a text file as a unicode string.
+
+ """
+ # This function implementation was chosen to be compatible across Python 2/3.
+ f = open(path, 'rb')
+ # We avoid use of the with keyword for Python 2.4 support.
+ try:
+ b = f.read()
+ finally:
+ f.close()
+
+ return b.decode(FILE_ENCODING)
+
+
+def write(u, path):
+ """
+ Write a unicode string to a file (as utf-8).
+
+ """
+ print("writing to: %s" % path)
+ # This function implementation was chosen to be compatible across Python 2/3.
+ f = open(path, "wb")
+ try:
+ b = u.encode(FILE_ENCODING)
+ f.write(b)
+ finally:
+ f.close()
+
+
+def make_temp_path(path, new_ext=None):
+ """
+ Arguments:
+
+ new_ext: the new file extension, including the leading dot.
+ Defaults to preserving the existing file extension.
+
+ """
+ root, ext = os.path.splitext(path)
+ if new_ext is None:
+ new_ext = ext
+ temp_path = root + TEMP_EXTENSION + new_ext
+ return temp_path
+
+
+def strip_html_comments(text):
+ """Strip HTML comments from a unicode string."""
+ lines = text.splitlines(True) # preserve line endings.
+
+ # Remove HTML comments (which we only allow to take a special form).
+ new_lines = filter(lambda line: not line.startswith("<!--"), lines)
+
+ return "".join(new_lines)
+
+
+# We write the converted file to a temp file to simplify debugging and
+# to avoid removing a valid pre-existing file on failure.
+def convert_md_to_rst(md_path, rst_temp_path):
+ """
+ Convert the contents of a file from Markdown to reStructuredText.
+
+ Returns the converted text as a Unicode string.
+
+ Arguments:
+
+ md_path: a path to a UTF-8 encoded Markdown file to convert.
+
+ rst_temp_path: a temporary path to which to write the converted contents.
+
+ """
+ # Pandoc uses the UTF-8 character encoding for both input and output.
+ command = "pandoc --write=rst --output=%s %s" % (rst_temp_path, md_path)
+ print("converting with pandoc: %s to %s\n-->%s" % (md_path, rst_temp_path,
+ command))
+
+ if os.path.exists(rst_temp_path):
+ os.remove(rst_temp_path)
+
+ os.system(command)
+
+ if not os.path.exists(rst_temp_path):
+ s = ("Error running: %s\n"
+ " Did you install pandoc per the %s docstring?" % (command,
+ __file__))
+ sys.exit(s)
+
+ return read(rst_temp_path)
+
+
+# The long_description needs to be formatted as reStructuredText.
+# See the following for more information:
+#
+# http://docs.python.org/distutils/setupscript.html#additional-meta-data
+# http://docs.python.org/distutils/uploading.html#pypi-package-display
+#
+def make_long_description():
+ """
+ Generate the reST long_description for setup() from source files.
+
+ Returns the generated long_description as a unicode string.
+
+ """
+ readme_path = README_PATH
+
+ # Remove our HTML comments because PyPI does not allow it.
+ # See the setup.py docstring for more info on this.
+ readme_md = strip_html_comments(read(readme_path))
+ history_md = strip_html_comments(read(HISTORY_PATH))
+ license_md = """\
+License
+=======
+
+""" + read(LICENSE_PATH)
+
+ sections = [readme_md, history_md, license_md]
+ md_description = '\n\n'.join(sections)
+
+ # Write the combined Markdown file to a temp path.
+ md_ext = os.path.splitext(readme_path)[1]
+ md_description_path = make_temp_path(RST_DESCRIPTION_PATH, new_ext=md_ext)
+ write(md_description, md_description_path)
+
+ rst_temp_path = make_temp_path(RST_DESCRIPTION_PATH)
+ long_description = convert_md_to_rst(md_path=md_description_path,
+ rst_temp_path=rst_temp_path)
+
+ return "\n".join([RST_LONG_DESCRIPTION_INTRO, long_description])
+
+
+def prep():
+ """Update the reST long_description file."""
+ long_description = make_long_description()
+ write(long_description, RST_DESCRIPTION_PATH)
+
+
+def publish():
+ """Publish this package to PyPI (aka "the Cheeseshop")."""
+ long_description = make_long_description()
+
+ if long_description != read(RST_DESCRIPTION_PATH):
+ print("""\
+Description file not up-to-date: %s
+Run the following command and commit the changes--
+
+ python setup.py %s
+""" % (RST_DESCRIPTION_PATH, PREP_COMMAND))
+ sys.exit()
+
+ print("Description up-to-date: %s" % RST_DESCRIPTION_PATH)
+
+ answer = raw_input("Are you sure you want to publish to PyPI (yes/no)?")
+
+ if answer != "yes":
+ exit("Aborted: nothing published")
+
+ os.system('python setup.py sdist upload')
+
+
+# We use the package simplejson for older Python versions since Python
+# does not contain the module json before 2.6:
+#
+# http://docs.python.org/library/json.html
+#
+# Moreover, simplejson stopped officially support for Python 2.4 in version 2.1.0:
+#
+# https://github.com/simplejson/simplejson/blob/master/CHANGES.txt
+#
+requires = []
+if py_version < (2, 5):
+ requires.append('simplejson<2.1')
+elif py_version < (2, 6):
+ requires.append('simplejson')
+
+INSTALL_REQUIRES = requires
+
+# TODO: decide whether to use find_packages() instead. I'm not sure that
+# find_packages() is available with distutils, for example.
+PACKAGES = [
+ 'pystache',
+ 'pystache.commands',
+ # The following packages are only for testing.
+ 'pystache.tests',
+ 'pystache.tests.data',
+ 'pystache.tests.data.locator',
+ 'pystache.tests.examples',
+]
+
+
+# The purpose of this function is to follow the guidance suggested here:
+#
+# http://packages.python.org/distribute/python3.html#note-on-compatibility-with-setuptools
+#
+# The guidance is for better compatibility when using setuptools (e.g. with
+# earlier versions of Python 2) instead of Distribute, because of new
+# keyword arguments to setup() that setuptools may not recognize.
+def get_extra_args():
+ """
+ Return a dictionary of extra args to pass to setup().
+
+ """
+ extra = {}
+ # TODO: it might be more correct to check whether we are using
+ # Distribute instead of setuptools, since use_2to3 doesn't take
+ # effect when using Python 2, even when using Distribute.
+ if py_version >= (3, ):
+ # Causes 2to3 to be run during the build step.
+ extra['use_2to3'] = True
+
+ return extra
+
+
+def main(sys_argv):
+
+ # TODO: use the logging module instead of printing.
+ # TODO: include the following in a verbose mode.
+ sys.stderr.write("pystache: using: version %s of %s\n" % (repr(dist.__version__), repr(dist)))
+
+ command = sys_argv[-1]
+
+ if command == 'publish':
+ publish()
+ sys.exit()
+ elif command == PREP_COMMAND:
+ prep()
+ sys.exit()
+
+ long_description = read(RST_DESCRIPTION_PATH)
+ template_files = ['*.mustache', '*.txt']
+ extra_args = get_extra_args()
+
+ setup(name='pystache',
+ version=VERSION,
+ license='MIT',
+ description='Mustache for Python',
+ long_description=long_description,
+ author='Chris Wanstrath',
+ author_email='chris@ozmm.org',
+ maintainer='Chris Jerdonek',
+ maintainer_email='chris.jerdonek@gmail.com',
+ url='http://github.com/defunkt/pystache',
+ install_requires=INSTALL_REQUIRES,
+ packages=PACKAGES,
+ package_data = {
+ # Include template files so tests can be run.
+ 'pystache.tests.data': template_files,
+ 'pystache.tests.data.locator': template_files,
+ 'pystache.tests.examples': template_files,
+ },
+ entry_points = {
+ 'console_scripts': [
+ 'pystache=pystache.commands.render:main',
+ 'pystache-test=pystache.commands.test:main',
+ ],
+ },
+ classifiers = CLASSIFIERS,
+ **extra_args
+ )
+
+
+if __name__=='__main__':
+ main(sys.argv)
diff --git a/python/pystache/setup_description.rst b/python/pystache/setup_description.rst
new file mode 100644
index 000000000..724c45723
--- /dev/null
+++ b/python/pystache/setup_description.rst
@@ -0,0 +1,513 @@
+.. Do not edit this file. This file is auto-generated for PyPI by setup.py
+.. using pandoc, so edits should go in the source files rather than here.
+
+Pystache
+========
+
+.. figure:: http://defunkt.github.com/pystache/images/logo_phillips.png
+ :alt: mustachioed, monocled snake by David Phillips
+
+.. figure:: https://secure.travis-ci.org/defunkt/pystache.png
+ :alt: Travis CI current build status
+
+`Pystache <http://defunkt.github.com/pystache>`__ is a Python
+implementation of `Mustache <http://mustache.github.com/>`__. Mustache
+is a framework-agnostic, logic-free templating system inspired by
+`ctemplate <http://code.google.com/p/google-ctemplate/>`__ and
+`et <http://www.ivan.fomichev.name/2008/05/erlang-template-engine-prototype.html>`__.
+Like ctemplate, Mustache "emphasizes separating logic from presentation:
+it is impossible to embed application logic in this template language."
+
+The `mustache(5) <http://mustache.github.com/mustache.5.html>`__ man
+page provides a good introduction to Mustache's syntax. For a more
+complete (and more current) description of Mustache's behavior, see the
+official `Mustache spec <https://github.com/mustache/spec>`__.
+
+Pystache is `semantically versioned <http://semver.org>`__ and can be
+found on `PyPI <http://pypi.python.org/pypi/pystache>`__. This version
+of Pystache passes all tests in `version
+1.1.2 <https://github.com/mustache/spec/tree/v1.1.2>`__ of the spec.
+
+Requirements
+------------
+
+Pystache is tested with--
+
+- Python 2.4 (requires simplejson `version
+ 2.0.9 <http://pypi.python.org/pypi/simplejson/2.0.9>`__ or earlier)
+- Python 2.5 (requires
+ `simplejson <http://pypi.python.org/pypi/simplejson/>`__)
+- Python 2.6
+- Python 2.7
+- Python 3.1
+- Python 3.2
+- Python 3.3
+- `PyPy <http://pypy.org/>`__
+
+`Distribute <http://packages.python.org/distribute/>`__ (the setuptools
+fork) is recommended over
+`setuptools <http://pypi.python.org/pypi/setuptools>`__, and is required
+in some cases (e.g. for Python 3 support). If you use
+`pip <http://www.pip-installer.org/>`__, you probably already satisfy
+this requirement.
+
+JSON support is needed only for the command-line interface and to run
+the spec tests. We require simplejson for earlier versions of Python
+since Python's `json <http://docs.python.org/library/json.html>`__
+module was added in Python 2.6.
+
+For Python 2.4 we require an earlier version of simplejson since
+simplejson stopped officially supporting Python 2.4 in simplejson
+version 2.1.0. Earlier versions of simplejson can be installed manually,
+as follows:
+
+::
+
+ pip install 'simplejson<2.1.0'
+
+Official support for Python 2.4 will end with Pystache version 0.6.0.
+
+Install It
+----------
+
+::
+
+ pip install pystache
+
+And test it--
+
+::
+
+ pystache-test
+
+To install and test from source (e.g. from GitHub), see the Develop
+section.
+
+Use It
+------
+
+::
+
+ >>> import pystache
+ >>> print pystache.render('Hi {{person}}!', {'person': 'Mom'})
+ Hi Mom!
+
+You can also create dedicated view classes to hold your view logic.
+
+Here's your view class (in .../examples/readme.py):
+
+::
+
+ class SayHello(object):
+ def to(self):
+ return "Pizza"
+
+Instantiating like so:
+
+::
+
+ >>> from pystache.tests.examples.readme import SayHello
+ >>> hello = SayHello()
+
+Then your template, say\_hello.mustache (by default in the same
+directory as your class definition):
+
+::
+
+ Hello, {{to}}!
+
+Pull it together:
+
+::
+
+ >>> renderer = pystache.Renderer()
+ >>> print renderer.render(hello)
+ Hello, Pizza!
+
+For greater control over rendering (e.g. to specify a custom template
+directory), use the ``Renderer`` class like above. One can pass
+attributes to the Renderer class constructor or set them on a Renderer
+instance. To customize template loading on a per-view basis, subclass
+``TemplateSpec``. See the docstrings of the
+`Renderer <https://github.com/defunkt/pystache/blob/master/pystache/renderer.py>`__
+class and
+`TemplateSpec <https://github.com/defunkt/pystache/blob/master/pystache/template_spec.py>`__
+class for more information.
+
+You can also pre-parse a template:
+
+::
+
+ >>> parsed = pystache.parse(u"Hey {{#who}}{{.}}!{{/who}}")
+ >>> print parsed
+ [u'Hey ', _SectionNode(key=u'who', index_begin=12, index_end=18, parsed=[_EscapeNode(key=u'.'), u'!'])]
+
+And then:
+
+::
+
+ >>> print renderer.render(parsed, {'who': 'Pops'})
+ Hey Pops!
+ >>> print renderer.render(parsed, {'who': 'you'})
+ Hey you!
+
+Python 3
+--------
+
+Pystache has supported Python 3 since version 0.5.1. Pystache behaves
+slightly differently between Python 2 and 3, as follows:
+
+- In Python 2, the default html-escape function ``cgi.escape()`` does
+ not escape single quotes. In Python 3, the default escape function
+ ``html.escape()`` does escape single quotes.
+- In both Python 2 and 3, the string and file encodings default to
+ ``sys.getdefaultencoding()``. However, this function can return
+ different values under Python 2 and 3, even when run from the same
+ system. Check your own system for the behavior on your system, or do
+ not rely on the defaults by passing in the encodings explicitly (e.g.
+ to the ``Renderer`` class).
+
+Unicode
+-------
+
+This section describes how Pystache handles unicode, strings, and
+encodings.
+
+Internally, Pystache uses `only unicode
+strings <http://docs.python.org/howto/unicode.html#tips-for-writing-unicode-aware-programs>`__
+(``str`` in Python 3 and ``unicode`` in Python 2). For input, Pystache
+accepts both unicode strings and byte strings (``bytes`` in Python 3 and
+``str`` in Python 2). For output, Pystache's template rendering methods
+return only unicode.
+
+Pystache's ``Renderer`` class supports a number of attributes to control
+how Pystache converts byte strings to unicode on input. These include
+the ``file_encoding``, ``string_encoding``, and ``decode_errors``
+attributes.
+
+The ``file_encoding`` attribute is the encoding the renderer uses to
+convert to unicode any files read from the file system. Similarly,
+``string_encoding`` is the encoding the renderer uses to convert any
+other byte strings encountered during the rendering process into unicode
+(e.g. context values that are encoded byte strings).
+
+The ``decode_errors`` attribute is what the renderer passes as the
+``errors`` argument to Python's built-in unicode-decoding function
+(``str()`` in Python 3 and ``unicode()`` in Python 2). The valid values
+for this argument are ``strict``, ``ignore``, and ``replace``.
+
+Each of these attributes can be set via the ``Renderer`` class's
+constructor using a keyword argument of the same name. See the Renderer
+class's docstrings for further details. In addition, the
+``file_encoding`` attribute can be controlled on a per-view basis by
+subclassing the ``TemplateSpec`` class. When not specified explicitly,
+these attributes default to values set in Pystache's ``defaults``
+module.
+
+Develop
+-------
+
+To test from a source distribution (without installing)--
+
+::
+
+ python test_pystache.py
+
+To test Pystache with multiple versions of Python (with a single
+command!), you can use `tox <http://pypi.python.org/pypi/tox>`__:
+
+::
+
+ pip install 'virtualenv<1.8' # Version 1.8 dropped support for Python 2.4.
+ pip install 'tox<1.4' # Version 1.4 dropped support for Python 2.4.
+ tox
+
+If you do not have all Python versions listed in ``tox.ini``--
+
+::
+
+ tox -e py26,py32 # for example
+
+The source distribution tests also include doctests and tests from the
+Mustache spec. To include tests from the Mustache spec in your test
+runs:
+
+::
+
+ git submodule init
+ git submodule update
+
+The test harness parses the spec's (more human-readable) yaml files if
+`PyYAML <http://pypi.python.org/pypi/PyYAML>`__ is present. Otherwise,
+it parses the json files. To install PyYAML--
+
+::
+
+ pip install pyyaml
+
+To run a subset of the tests, you can use
+`nose <http://somethingaboutorange.com/mrl/projects/nose/0.11.1/testing.html>`__:
+
+::
+
+ pip install nose
+ nosetests --tests pystache/tests/test_context.py:GetValueTests.test_dictionary__key_present
+
+Using Python 3 with Pystache from source
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Pystache is written in Python 2 and must be converted to Python 3 prior
+to using it with Python 3. The installation process (and tox) do this
+automatically.
+
+To convert the code to Python 3 manually (while using Python 3)--
+
+::
+
+ python setup.py build
+
+This writes the converted code to a subdirectory called ``build``. By
+design, Python 3 builds
+`cannot <https://bitbucket.org/tarek/distribute/issue/292/allow-use_2to3-with-python-2>`__
+be created from Python 2.
+
+To convert the code without using setup.py, you can use
+`2to3 <http://docs.python.org/library/2to3.html>`__ as follows (two
+steps)--
+
+::
+
+ 2to3 --write --nobackups --no-diffs --doctests_only pystache
+ 2to3 --write --nobackups --no-diffs pystache
+
+This converts the code (and doctests) in place.
+
+To ``import pystache`` from a source distribution while using Python 3,
+be sure that you are importing from a directory containing a converted
+version of the code (e.g. from the ``build`` directory after
+converting), and not from the original (unconverted) source directory.
+Otherwise, you will get a syntax error. You can help prevent this by not
+running the Python IDE from the project directory when importing
+Pystache while using Python 3.
+
+Mailing List
+------------
+
+There is a `mailing list <http://librelist.com/browser/pystache/>`__.
+Note that there is a bit of a delay between posting a message and seeing
+it appear in the mailing list archive.
+
+Credits
+-------
+
+::
+
+ >>> context = { 'author': 'Chris Wanstrath', 'maintainer': 'Chris Jerdonek' }
+ >>> print pystache.render("Author: {{author}}\nMaintainer: {{maintainer}}", context)
+ Author: Chris Wanstrath
+ Maintainer: Chris Jerdonek
+
+Pystache logo by `David Phillips <http://davidphillips.us/>`__ is
+licensed under a `Creative Commons Attribution-ShareAlike 3.0 Unported
+License <http://creativecommons.org/licenses/by-sa/3.0/deed.en_US>`__.
+|image0|
+
+History
+=======
+
+**Note:** Official support for Python 2.4 will end with Pystache version
+0.6.0.
+
+0.5.4 (2014-07-11)
+------------------
+
+- Bugfix: made test with filenames OS agnostic (issue #162).
+
+0.5.3 (2012-11-03)
+------------------
+
+- Added ability to customize string coercion (e.g. to have None render
+ as ``''``) (issue #130).
+- Added Renderer.render\_name() to render a template by name (issue
+ #122).
+- Added TemplateSpec.template\_path to specify an absolute path to a
+ template (issue #41).
+- Added option of raising errors on missing tags/partials:
+ ``Renderer(missing_tags='strict')`` (issue #110).
+- Added support for finding and loading templates by file name in
+ addition to by template name (issue #127). [xgecko]
+- Added a ``parse()`` function that yields a printable, pre-compiled
+ parse tree.
+- Added support for rendering pre-compiled templates.
+- Added Python 3.3 to the list of supported versions.
+- Added support for `PyPy <http://pypy.org/>`__ (issue #125).
+- Added support for `Travis CI <http://travis-ci.org>`__ (issue #124).
+ [msabramo]
+- Bugfix: ``defaults.DELIMITERS`` can now be changed at runtime (issue
+ #135). [bennoleslie]
+- Bugfix: exceptions raised from a property are no longer swallowed
+ when getting a key from a context stack (issue #110).
+- Bugfix: lambda section values can now return non-ascii, non-unicode
+ strings (issue #118).
+- Bugfix: allow ``test_pystache.py`` and ``tox`` to pass when run from
+ a downloaded sdist (i.e. without the spec test directory).
+- Convert HISTORY and README files from reST to Markdown.
+- More robust handling of byte strings in Python 3.
+- Added Creative Commons license for David Phillips's logo.
+
+0.5.2 (2012-05-03)
+------------------
+
+- Added support for dot notation and version 1.1.2 of the spec (issue
+ #99). [rbp]
+- Missing partials now render as empty string per latest version of
+ spec (issue #115).
+- Bugfix: falsey values now coerced to strings using str().
+- Bugfix: lambda return values for sections no longer pushed onto
+ context stack (issue #113).
+- Bugfix: lists of lambdas for sections were not rendered (issue #114).
+
+0.5.1 (2012-04-24)
+------------------
+
+- Added support for Python 3.1 and 3.2.
+- Added tox support to test multiple Python versions.
+- Added test script entry point: pystache-test.
+- Added \_\_version\_\_ package attribute.
+- Test harness now supports both YAML and JSON forms of Mustache spec.
+- Test harness no longer requires nose.
+
+0.5.0 (2012-04-03)
+------------------
+
+This version represents a major rewrite and refactoring of the code base
+that also adds features and fixes many bugs. All functionality and
+nearly all unit tests have been preserved. However, some backwards
+incompatible changes to the API have been made.
+
+Below is a selection of some of the changes (not exhaustive).
+
+Highlights:
+
+- Pystache now passes all tests in version 1.0.3 of the `Mustache
+ spec <https://github.com/mustache/spec>`__. [pvande]
+- Removed View class: it is no longer necessary to subclass from View
+ or from any other class to create a view.
+- Replaced Template with Renderer class: template rendering behavior
+ can be modified via the Renderer constructor or by setting attributes
+ on a Renderer instance.
+- Added TemplateSpec class: template rendering can be specified on a
+ per-view basis by subclassing from TemplateSpec.
+- Introduced separation of concerns and removed circular dependencies
+ (e.g. between Template and View classes, cf. `issue
+ #13 <https://github.com/defunkt/pystache/issues/13>`__).
+- Unicode now used consistently throughout the rendering process.
+- Expanded test coverage: nosetests now runs doctests and ~105 test
+ cases from the Mustache spec (increasing the number of tests from 56
+ to ~315).
+- Added a rudimentary benchmarking script to gauge performance while
+ refactoring.
+- Extensive documentation added (e.g. docstrings).
+
+Other changes:
+
+- Added a command-line interface. [vrde]
+- The main rendering class now accepts a custom partial loader (e.g. a
+ dictionary) and a custom escape function.
+- Non-ascii characters in str strings are now supported while
+ rendering.
+- Added string encoding, file encoding, and errors options for decoding
+ to unicode.
+- Removed the output encoding option.
+- Removed the use of markupsafe.
+
+Bug fixes:
+
+- Context values no longer processed as template strings.
+ [jakearchibald]
+- Whitespace surrounding sections is no longer altered, per the spec.
+ [heliodor]
+- Zeroes now render correctly when using PyPy. [alex]
+- Multline comments now permitted. [fczuardi]
+- Extensionless template files are now supported.
+- Passing ``**kwargs`` to ``Template()`` no longer modifies the
+ context.
+- Passing ``**kwargs`` to ``Template()`` with no context no longer
+ raises an exception.
+
+0.4.1 (2012-03-25)
+------------------
+
+- Added support for Python 2.4. [wangtz, jvantuyl]
+
+0.4.0 (2011-01-12)
+------------------
+
+- Add support for nested contexts (within template and view)
+- Add support for inverted lists
+- Decoupled template loading
+
+0.3.1 (2010-05-07)
+------------------
+
+- Fix package
+
+0.3.0 (2010-05-03)
+------------------
+
+- View.template\_path can now hold a list of path
+- Add {{& blah}} as an alias for {{{ blah }}}
+- Higher Order Sections
+- Inverted sections
+
+0.2.0 (2010-02-15)
+------------------
+
+- Bugfix: Methods returning False or None are not rendered
+- Bugfix: Don't render an empty string when a tag's value is 0.
+ [enaeseth]
+- Add support for using non-callables as View attributes.
+ [joshthecoder]
+- Allow using View instances as attributes. [joshthecoder]
+- Support for Unicode and non-ASCII-encoded bytestring output.
+ [enaeseth]
+- Template file encoding awareness. [enaeseth]
+
+0.1.1 (2009-11-13)
+------------------
+
+- Ensure we're dealing with strings, always
+- Tests can be run by executing the test file directly
+
+0.1.0 (2009-11-12)
+------------------
+
+- First release
+
+License
+=======
+
+Copyright (C) 2012 Chris Jerdonek. All rights reserved.
+
+Copyright (c) 2009 Chris Wanstrath
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+.. |image0| image:: http://i.creativecommons.org/l/by-sa/3.0/88x31.png
diff --git a/python/pystache/test_pystache.py b/python/pystache/test_pystache.py
new file mode 100644
index 000000000..9a1a3ca26
--- /dev/null
+++ b/python/pystache/test_pystache.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+"""
+Runs project tests.
+
+This script is a substitute for running--
+
+ python -m pystache.commands.test
+
+It is useful in Python 2.4 because the -m flag does not accept subpackages
+in Python 2.4:
+
+ http://docs.python.org/using/cmdline.html#cmdoption-m
+
+"""
+
+import sys
+
+from pystache.commands import test
+from pystache.tests.main import FROM_SOURCE_OPTION
+
+
+def main(sys_argv=sys.argv):
+ sys.argv.insert(1, FROM_SOURCE_OPTION)
+ test.main()
+
+
+if __name__=='__main__':
+ main()
diff --git a/python/pystache/tox.ini b/python/pystache/tox.ini
new file mode 100644
index 000000000..d1eaebfbf
--- /dev/null
+++ b/python/pystache/tox.ini
@@ -0,0 +1,36 @@
+# A tox configuration file to test across multiple Python versions.
+#
+# http://pypi.python.org/pypi/tox
+#
+[tox]
+# Tox 1.4 drops py24 and adds py33. In the current version, we want to
+# support 2.4, so we can't simultaneously support 3.3.
+envlist = py24,py25,py26,py27,py27-yaml,py27-noargs,py31,py32,pypy
+
+[testenv]
+# Change the working directory so that we don't import the pystache located
+# in the original location.
+changedir =
+ {envbindir}
+commands =
+ pystache-test {toxinidir}
+
+# Check that the spec tests work with PyYAML.
+[testenv:py27-yaml]
+basepython =
+ python2.7
+deps =
+ PyYAML
+changedir =
+ {envbindir}
+commands =
+ pystache-test {toxinidir}
+
+# Check that pystache-test works from an install with no arguments.
+[testenv:py27-noargs]
+basepython =
+ python2.7
+changedir =
+ {envbindir}
+commands =
+ pystache-test
diff --git a/python/pytest/.coveragerc b/python/pytest/.coveragerc
new file mode 100644
index 000000000..27db64e09
--- /dev/null
+++ b/python/pytest/.coveragerc
@@ -0,0 +1,7 @@
+[run]
+omit =
+ # standlonetemplate is read dynamically and tested by test_genscript
+ *standalonetemplate.py
+ # oldinterpret could be removed, as it is no longer used in py26+
+ *oldinterpret.py
+ vendored_packages
diff --git a/python/pytest/AUTHORS b/python/pytest/AUTHORS
new file mode 100644
index 000000000..f4a21b22d
--- /dev/null
+++ b/python/pytest/AUTHORS
@@ -0,0 +1,91 @@
+Holger Krekel, holger at merlinux eu
+merlinux GmbH, Germany, office at merlinux eu
+
+Contributors include::
+
+Abhijeet Kasurde
+Anatoly Bubenkoff
+Andreas Zeidler
+Andy Freeland
+Anthon van der Neut
+Armin Rigo
+Aron Curzon
+Aviv Palivoda
+Benjamin Peterson
+Bob Ippolito
+Brian Dorsey
+Brian Okken
+Brianna Laugher
+Bruno Oliveira
+Carl Friedrich Bolz
+Charles Cloud
+Chris Lamb
+Christian Theunert
+Christian Tismer
+Christopher Gilling
+Daniel Grana
+Daniel Hahler
+Daniel Nuri
+Dave Hunt
+David Mohr
+David Vierra
+Edison Gustavo Muenz
+Eduardo Schettino
+Endre Galaczi
+Elizaveta Shashkova
+Eric Hunsberger
+Eric Siegerman
+Erik M. Bray
+Florian Bruhin
+Floris Bruynooghe
+Gabriel Reis
+Georgy Dyuldin
+Graham Horler
+Grig Gheorghiu
+Guido Wesdorp
+Harald Armin Massa
+Ian Bicking
+Jaap Broekhuizen
+Jan Balster
+Janne Vanhala
+Jason R. Coombs
+John Towler
+Joshua Bronson
+Jurko Gospodnetić
+Katarzyna Jachim
+Kevin Cox
+Lee Kamentsky
+Lukas Bednar
+Maciek Fijalkowski
+Maho
+Marc Schlaich
+Mark Abramowitz
+Markus Unterwaditzer
+Martijn Faassen
+Martin Prusse
+Matt Bachmann
+Michael Aquilina
+Michael Birtwell
+Michael Droettboom
+Nicolas Delaby
+Pieter Mulder
+Piotr Banaszkiewicz
+Punyashloka Biswal
+Quentin Pradet
+Ralf Schmitt
+Raphael Pierzina
+Ronny Pfannschmidt
+Ross Lawley
+Ryan Wooden
+Samuele Pedroni
+Tom Viner
+Trevor Bekolay
+Wouter van Ackooy
+David Díaz-Barquero
+Eric Hunsberger
+Simon Gomizelj
+Russel Winder
+Ben Webb
+Alexei Kozlenok
+Cal Leeming
+Feng Ma
diff --git a/python/pytest/LICENSE b/python/pytest/LICENSE
new file mode 100644
index 000000000..9e27bd784
--- /dev/null
+++ b/python/pytest/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2004-2016 Holger Krekel and others
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/python/pytest/MANIFEST.in b/python/pytest/MANIFEST.in
new file mode 100644
index 000000000..266a9184d
--- /dev/null
+++ b/python/pytest/MANIFEST.in
@@ -0,0 +1,34 @@
+include CHANGELOG.rst
+include LICENSE
+include AUTHORS
+
+include README.rst
+include CONTRIBUTING.rst
+
+include tox.ini
+include setup.py
+
+include .coveragerc
+
+include plugin-test.sh
+include requirements-docs.txt
+include runtox.py
+
+recursive-include bench *.py
+recursive-include extra *.py
+
+graft testing
+graft doc
+
+exclude _pytest/impl
+
+graft _pytest/vendored_packages
+
+recursive-exclude * *.pyc *.pyo
+
+exclude appveyor/install.ps1
+exclude appveyor.yml
+exclude appveyor
+
+exclude ISSUES.txt
+exclude HOWTORELEASE.rst
diff --git a/python/pytest/PKG-INFO b/python/pytest/PKG-INFO
new file mode 100644
index 000000000..7b801be0d
--- /dev/null
+++ b/python/pytest/PKG-INFO
@@ -0,0 +1,133 @@
+Metadata-Version: 1.1
+Name: pytest
+Version: 2.9.2
+Summary: pytest: simple powerful testing with Python
+Home-page: http://pytest.org
+Author: Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others
+Author-email: holger at merlinux.eu
+License: MIT license
+Description: .. image:: http://pytest.org/latest/_static/pytest1.png
+ :target: http://pytest.org
+ :align: center
+ :alt: pytest
+
+ ------
+
+ .. image:: https://img.shields.io/pypi/v/pytest.svg
+ :target: https://pypi.python.org/pypi/pytest
+ .. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.python.org/pypi/pytest
+ .. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg
+ :target: https://coveralls.io/r/pytest-dev/pytest
+ .. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master
+ :target: https://travis-ci.org/pytest-dev/pytest
+ .. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true
+ :target: https://ci.appveyor.com/project/pytestbot/pytest
+
+ The ``pytest`` framework makes it easy to write small tests, yet
+ scales to support complex functional testing for applications and libraries.
+
+ An example of a simple test:
+
+ .. code-block:: python
+
+ # content of test_sample.py
+ def func(x):
+ return x + 1
+
+ def test_answer():
+ assert func(3) == 5
+
+
+ To execute it::
+
+ $ py.test
+ ======= test session starts ========
+ platform linux -- Python 3.4.3, pytest-2.8.5, py-1.4.31, pluggy-0.3.1
+ collected 1 items
+
+ test_sample.py F
+
+ ======= FAILURES ========
+ _______ test_answer ________
+
+ def test_answer():
+ > assert func(3) == 5
+ E assert 4 == 5
+ E + where 4 = func(3)
+
+ test_sample.py:5: AssertionError
+ ======= 1 failed in 0.12 seconds ========
+
+ Due to ``py.test``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://pytest.org/latest/getting-started.html#our-first-test-run>`_ for more examples.
+
+
+ Features
+ --------
+
+ - Detailed info on failing `assert statements <http://pytest.org/latest/assert.html>`_ (no need to remember ``self.assert*`` names);
+
+ - `Auto-discovery
+ <http://pytest.org/latest/goodpractices.html#python-test-discovery>`_
+ of test modules and functions;
+
+ - `Modular fixtures <http://pytest.org/latest/fixture.html>`_ for
+ managing small or parametrized long-lived test resources;
+
+ - Can run `unittest <http://pytest.org/latest/unittest.html>`_ (or trial),
+ `nose <http://pytest.org/latest/nose.html>`_ test suites out of the box;
+
+ - Python2.6+, Python3.2+, PyPy-2.3, Jython-2.5 (untested);
+
+ - Rich plugin architecture, with over 150+ `external plugins <http://pytest.org/latest/plugins.html#installing-external-plugins-searching>`_ and thriving community;
+
+
+ Documentation
+ -------------
+
+ For full documentation, including installation, tutorials and PDF documents, please see http://pytest.org.
+
+
+ Bugs/Requests
+ -------------
+
+ Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
+
+
+ Changelog
+ ---------
+
+ Consult the `Changelog <http://pytest.org/latest/changelog.html>`_ page for fixes and enhancements of each version.
+
+
+ License
+ -------
+
+ Copyright Holger Krekel and others, 2004-2016.
+
+ Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
+
+ .. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE
+
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: cygwin
+Platform: win32
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/python/pytest/README.rst b/python/pytest/README.rst
new file mode 100644
index 000000000..68fc92211
--- /dev/null
+++ b/python/pytest/README.rst
@@ -0,0 +1,102 @@
+.. image:: http://pytest.org/latest/_static/pytest1.png
+ :target: http://pytest.org
+ :align: center
+ :alt: pytest
+
+------
+
+.. image:: https://img.shields.io/pypi/v/pytest.svg
+ :target: https://pypi.python.org/pypi/pytest
+.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.python.org/pypi/pytest
+.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg
+ :target: https://coveralls.io/r/pytest-dev/pytest
+.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master
+ :target: https://travis-ci.org/pytest-dev/pytest
+.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true
+ :target: https://ci.appveyor.com/project/pytestbot/pytest
+
+The ``pytest`` framework makes it easy to write small tests, yet
+scales to support complex functional testing for applications and libraries.
+
+An example of a simple test:
+
+.. code-block:: python
+
+ # content of test_sample.py
+ def func(x):
+ return x + 1
+
+ def test_answer():
+ assert func(3) == 5
+
+
+To execute it::
+
+ $ py.test
+ ======= test session starts ========
+ platform linux -- Python 3.4.3, pytest-2.8.5, py-1.4.31, pluggy-0.3.1
+ collected 1 items
+
+ test_sample.py F
+
+ ======= FAILURES ========
+ _______ test_answer ________
+
+ def test_answer():
+ > assert func(3) == 5
+ E assert 4 == 5
+ E + where 4 = func(3)
+
+ test_sample.py:5: AssertionError
+ ======= 1 failed in 0.12 seconds ========
+
+Due to ``py.test``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://pytest.org/latest/getting-started.html#our-first-test-run>`_ for more examples.
+
+
+Features
+--------
+
+- Detailed info on failing `assert statements <http://pytest.org/latest/assert.html>`_ (no need to remember ``self.assert*`` names);
+
+- `Auto-discovery
+ <http://pytest.org/latest/goodpractices.html#python-test-discovery>`_
+ of test modules and functions;
+
+- `Modular fixtures <http://pytest.org/latest/fixture.html>`_ for
+ managing small or parametrized long-lived test resources;
+
+- Can run `unittest <http://pytest.org/latest/unittest.html>`_ (or trial),
+ `nose <http://pytest.org/latest/nose.html>`_ test suites out of the box;
+
+- Python2.6+, Python3.2+, PyPy-2.3, Jython-2.5 (untested);
+
+- Rich plugin architecture, with over 150+ `external plugins <http://pytest.org/latest/plugins.html#installing-external-plugins-searching>`_ and thriving community;
+
+
+Documentation
+-------------
+
+For full documentation, including installation, tutorials and PDF documents, please see http://pytest.org.
+
+
+Bugs/Requests
+-------------
+
+Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
+
+
+Changelog
+---------
+
+Consult the `Changelog <http://pytest.org/latest/changelog.html>`_ page for fixes and enhancements of each version.
+
+
+License
+-------
+
+Copyright Holger Krekel and others, 2004-2016.
+
+Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
+
+.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE
diff --git a/python/pytest/_pytest/__init__.py b/python/pytest/_pytest/__init__.py
new file mode 100644
index 000000000..23dac6d05
--- /dev/null
+++ b/python/pytest/_pytest/__init__.py
@@ -0,0 +1,2 @@
+#
+__version__ = '2.9.2'
diff --git a/python/pytest/_pytest/_argcomplete.py b/python/pytest/_pytest/_argcomplete.py
new file mode 100644
index 000000000..955855a96
--- /dev/null
+++ b/python/pytest/_pytest/_argcomplete.py
@@ -0,0 +1,101 @@
+
+"""allow bash-completion for argparse with argcomplete if installed
+needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail
+to find the magic string, so _ARGCOMPLETE env. var is never set, and
+this does not need special code.
+
+argcomplete does not support python 2.5 (although the changes for that
+are minor).
+
+Function try_argcomplete(parser) should be called directly before
+the call to ArgumentParser.parse_args().
+
+The filescompleter is what you normally would use on the positional
+arguments specification, in order to get "dirname/" after "dirn<TAB>"
+instead of the default "dirname ":
+
+ optparser.add_argument(Config._file_or_dir, nargs='*'
+ ).completer=filescompleter
+
+Other, application specific, completers should go in the file
+doing the add_argument calls as they need to be specified as .completer
+attributes as well. (If argcomplete is not installed, the function the
+attribute points to will not be used).
+
+SPEEDUP
+=======
+The generic argcomplete script for bash-completion
+(/etc/bash_completion.d/python-argcomplete.sh )
+uses a python program to determine startup script generated by pip.
+You can speed up completion somewhat by changing this script to include
+ # PYTHON_ARGCOMPLETE_OK
+so the the python-argcomplete-check-easy-install-script does not
+need to be called to find the entry point of the code and see if that is
+marked with PYTHON_ARGCOMPLETE_OK
+
+INSTALL/DEBUGGING
+=================
+To include this support in another application that has setup.py generated
+scripts:
+- add the line:
+ # PYTHON_ARGCOMPLETE_OK
+ near the top of the main python entry point
+- include in the file calling parse_args():
+ from _argcomplete import try_argcomplete, filescompleter
+ , call try_argcomplete just before parse_args(), and optionally add
+ filescompleter to the positional arguments' add_argument()
+If things do not work right away:
+- switch on argcomplete debugging with (also helpful when doing custom
+ completers):
+ export _ARC_DEBUG=1
+- run:
+ python-argcomplete-check-easy-install-script $(which appname)
+ echo $?
+ will echo 0 if the magic line has been found, 1 if not
+- sometimes it helps to find early on errors using:
+ _ARGCOMPLETE=1 _ARC_DEBUG=1 appname
+ which should throw a KeyError: 'COMPLINE' (which is properly set by the
+ global argcomplete script).
+"""
+
+import sys
+import os
+from glob import glob
+
+class FastFilesCompleter:
+ 'Fast file completer class'
+ def __init__(self, directories=True):
+ self.directories = directories
+
+ def __call__(self, prefix, **kwargs):
+ """only called on non option completions"""
+ if os.path.sep in prefix[1:]: #
+ prefix_dir = len(os.path.dirname(prefix) + os.path.sep)
+ else:
+ prefix_dir = 0
+ completion = []
+ globbed = []
+ if '*' not in prefix and '?' not in prefix:
+ if prefix[-1] == os.path.sep: # we are on unix, otherwise no bash
+ globbed.extend(glob(prefix + '.*'))
+ prefix += '*'
+ globbed.extend(glob(prefix))
+ for x in sorted(globbed):
+ if os.path.isdir(x):
+ x += '/'
+ # append stripping the prefix (like bash, not like compgen)
+ completion.append(x[prefix_dir:])
+ return completion
+
+if os.environ.get('_ARGCOMPLETE'):
+ try:
+ import argcomplete.completers
+ except ImportError:
+ sys.exit(-1)
+ filescompleter = FastFilesCompleter()
+
+ def try_argcomplete(parser):
+ argcomplete.autocomplete(parser)
+else:
+ def try_argcomplete(parser): pass
+ filescompleter = None
diff --git a/python/pytest/_pytest/_code/__init__.py b/python/pytest/_pytest/_code/__init__.py
new file mode 100644
index 000000000..c046b9716
--- /dev/null
+++ b/python/pytest/_pytest/_code/__init__.py
@@ -0,0 +1,12 @@
+""" python inspection/code generation API """
+from .code import Code # noqa
+from .code import ExceptionInfo # noqa
+from .code import Frame # noqa
+from .code import Traceback # noqa
+from .code import getrawcode # noqa
+from .code import patch_builtins # noqa
+from .code import unpatch_builtins # noqa
+from .source import Source # noqa
+from .source import compile_ as compile # noqa
+from .source import getfslineno # noqa
+
diff --git a/python/pytest/_pytest/_code/_py2traceback.py b/python/pytest/_pytest/_code/_py2traceback.py
new file mode 100644
index 000000000..a830d9899
--- /dev/null
+++ b/python/pytest/_pytest/_code/_py2traceback.py
@@ -0,0 +1,81 @@
+# copied from python-2.7.3's traceback.py
+# CHANGES:
+# - some_str is replaced, trying to create unicode strings
+#
+import types
+
+def format_exception_only(etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.last_type and sys.last_value. The return value is a list of
+ strings, each ending in a newline.
+
+ Normally, the list contains a single string; however, for
+ SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax
+ error occurred.
+
+ The message indicating which exception occurred is always the last
+ string in the list.
+
+ """
+
+ # An instance should not have a meaningful value parameter, but
+ # sometimes does, particularly for string exceptions, such as
+ # >>> raise string1, string2 # deprecated
+ #
+ # Clear these out first because issubtype(string1, SyntaxError)
+ # would throw another exception and mask the original problem.
+ if (isinstance(etype, BaseException) or
+ isinstance(etype, types.InstanceType) or
+ etype is None or type(etype) is str):
+ return [_format_final_exc_line(etype, value)]
+
+ stype = etype.__name__
+
+ if not issubclass(etype, SyntaxError):
+ return [_format_final_exc_line(stype, value)]
+
+ # It was a syntax error; show exactly where the problem was found.
+ lines = []
+ try:
+ msg, (filename, lineno, offset, badline) = value.args
+ except Exception:
+ pass
+ else:
+ filename = filename or "<string>"
+ lines.append(' File "%s", line %d\n' % (filename, lineno))
+ if badline is not None:
+ if isinstance(badline, bytes): # python 2 only
+ badline = badline.decode('utf-8', 'replace')
+ lines.append(u' %s\n' % badline.strip())
+ if offset is not None:
+ caretspace = badline.rstrip('\n')[:offset].lstrip()
+ # non-space whitespace (likes tabs) must be kept for alignment
+ caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+ # only three spaces to account for offset1 == pos 0
+ lines.append(' %s^\n' % ''.join(caretspace))
+ value = msg
+
+ lines.append(_format_final_exc_line(stype, value))
+ return lines
+
+def _format_final_exc_line(etype, value):
+ """Return a list of a single line -- normal case for format_exception_only"""
+ valuestr = _some_str(value)
+ if value is None or not valuestr:
+ line = "%s\n" % etype
+ else:
+ line = "%s: %s\n" % (etype, valuestr)
+ return line
+
+def _some_str(value):
+ try:
+ return unicode(value)
+ except Exception:
+ try:
+ return str(value)
+ except Exception:
+ pass
+ return '<unprintable %s object>' % type(value).__name__
diff --git a/python/pytest/_pytest/_code/code.py b/python/pytest/_pytest/_code/code.py
new file mode 100644
index 000000000..8995cc1f7
--- /dev/null
+++ b/python/pytest/_pytest/_code/code.py
@@ -0,0 +1,805 @@
+import sys
+from inspect import CO_VARARGS, CO_VARKEYWORDS
+
+import py
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+if sys.version_info[0] >= 3:
+ from traceback import format_exception_only
+else:
+ from ._py2traceback import format_exception_only
+
+class Code(object):
+ """ wrapper around Python code objects """
+ def __init__(self, rawcode):
+ if not hasattr(rawcode, "co_filename"):
+ rawcode = getrawcode(rawcode)
+ try:
+ self.filename = rawcode.co_filename
+ self.firstlineno = rawcode.co_firstlineno - 1
+ self.name = rawcode.co_name
+ except AttributeError:
+ raise TypeError("not a code object: %r" %(rawcode,))
+ self.raw = rawcode
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def path(self):
+ """ return a path object pointing to source code (note that it
+ might not point to an actually existing file). """
+ p = py.path.local(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.check():
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ p = self.raw.co_filename
+ return p
+
+ @property
+ def fullsource(self):
+ """ return a _pytest._code.Source object for the full source file of the code
+ """
+ from _pytest._code import source
+ full, _ = source.findsource(self.raw)
+ return full
+
+ def source(self):
+ """ return a _pytest._code.Source object for the code object's source only
+ """
+ # return source only for that part of code
+ import _pytest._code
+ return _pytest._code.Source(self.raw)
+
+ def getargs(self, var=False):
+ """ return a tuple with the argument names for the code object
+
+ if 'var' is set True also return the names of the variable and
+ keyword arguments when present
+ """
+ # handfull shortcut for getting args
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+class Frame(object):
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ def __init__(self, frame):
+ self.lineno = frame.f_lineno - 1
+ self.f_globals = frame.f_globals
+ self.f_locals = frame.f_locals
+ self.raw = frame
+ self.code = Code(frame.f_code)
+
+ @property
+ def statement(self):
+ """ statement this frame is at """
+ import _pytest._code
+ if self.code.fullsource is None:
+ return _pytest._code.Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """ evaluate 'code' in the frame
+
+ 'vars' are optional additional local variables
+
+ returns the result of the evaluation
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def exec_(self, code, **vars):
+ """ exec 'code' in the frame
+
+ 'vars' are optiona; additional local variables
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ py.builtin.exec_(code, self.f_globals, f_locals )
+
+ def repr(self, object):
+ """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+ """
+ return py.io.saferepr(object)
+
+ def is_true(self, object):
+ return object
+
+ def getargs(self, var=False):
+ """ return a list of tuples (name, value) for all arguments
+
+ if 'var' is set True also include the variable and keyword
+ arguments when present
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+class TracebackEntry(object):
+ """ a single entry in a traceback """
+
+ _repr_style = None
+ exprinfo = None
+
+ def __init__(self, rawentry):
+ self._rawentry = rawentry
+ self.lineno = rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode):
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self):
+ import _pytest._code
+ return _pytest._code.Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self):
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self):
+ return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
+
+ @property
+ def statement(self):
+ """ _pytest._code.Source object for the current statement """
+ source = self.frame.code.fullsource
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self):
+ """ path to the source code """
+ return self.frame.code.path
+
+ def getlocals(self):
+ return self.frame.f_locals
+ locals = property(getlocals, None, None, "locals of underlaying frame")
+
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ from _pytest.assertion.reinterpret import reinterpret
+ if self.exprinfo is None:
+ source = py.builtin._totext(self.statement).strip()
+ x = reinterpret(source, self.frame, should_fail=True)
+ if not py.builtin._istext(x):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
+ def getfirstlinesource(self):
+ # on Jython this firstlineno can be -1 apparently
+ return max(self.frame.code.firstlineno, 0)
+
+ def getsource(self, astcache=None):
+ """ return failing source code. """
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ from _pytest._code.source import getstatementrange_ast
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(self.lineno, source,
+ astnode=astnode)
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self):
+ """ return True if the current frame has a var __tracebackhide__
+ resolving to True
+
+ mostly for internal use
+ """
+ try:
+ return self.frame.f_locals['__tracebackhide__']
+ except KeyError:
+ try:
+ return self.frame.f_globals['__tracebackhide__']
+ except KeyError:
+ return False
+
+ def __str__(self):
+ try:
+ fn = str(self.path)
+ except py.error.Error:
+ fn = '???'
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except:
+ line = "???"
+ return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
+
+ def name(self):
+ return self.frame.code.raw.co_name
+ name = property(name, None, None, "co_name of underlaying code")
+
+class Traceback(list):
+ """ Traceback objects encapsulate and offer higher level
+ access to Traceback entries.
+ """
+ Entry = TracebackEntry
+ def __init__(self, tb):
+ """ initialize from given python traceback object. """
+ if hasattr(tb, 'tb_next'):
+ def f(cur):
+ while cur is not None:
+ yield self.Entry(cur)
+ cur = cur.tb_next
+ list.__init__(self, f(tb))
+ else:
+ list.__init__(self, tb)
+
+ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+ """ return a Traceback instance wrapping part of this Traceback
+
+ by provding any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined
+
+ this allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback)
+ """
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if ((path is None or codepath == path) and
+ (excludepath is None or not hasattr(codepath, 'relto') or
+ not codepath.relto(excludepath)) and
+ (lineno is None or x.lineno == lineno) and
+ (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
+ return Traceback(x._rawentry)
+ return self
+
+ def __getitem__(self, key):
+ val = super(Traceback, self).__getitem__(key)
+ if isinstance(key, type(slice(0))):
+ val = self.__class__(val)
+ return val
+
+ def filter(self, fn=lambda x: not x.ishidden()):
+ """ return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackEntry
+ instance, and should return True when the item should be added
+ to the Traceback, False when not
+
+ by default this removes all the TracebackEntries which are hidden
+ (see ishidden() above)
+ """
+ return Traceback(filter(fn, self))
+
+ def getcrashentry(self):
+ """ return last non-hidden traceback entry that lead
+ to the exception of a traceback.
+ """
+ for i in range(-1, -len(self)-1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self):
+ """ return the index of the frame/TracebackEntry where recursion
+ originates if appropriate, None if no recursion occurred
+ """
+ cache = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ #print "checking for recursion at", key
+ l = cache.setdefault(key, [])
+ if l:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in l:
+ if f.is_true(f.eval(co_equal,
+ __recursioncache_locals_1=loc,
+ __recursioncache_locals_2=otherloc)):
+ return i
+ l.append(entry.frame.f_locals)
+ return None
+
+co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
+ '?', 'eval')
+
+class ExceptionInfo(object):
+ """ wraps sys.exc_info() objects and offers
+ help for navigating the traceback.
+ """
+ _striptext = ''
+ def __init__(self, tup=None, exprinfo=None):
+ import _pytest._code
+ if tup is None:
+ tup = sys.exc_info()
+ if exprinfo is None and isinstance(tup[1], AssertionError):
+ exprinfo = getattr(tup[1], 'msg', None)
+ if exprinfo is None:
+ exprinfo = str(tup[1])
+ if exprinfo and exprinfo.startswith('assert '):
+ self._striptext = 'AssertionError: '
+ self._excinfo = tup
+ #: the exception class
+ self.type = tup[0]
+ #: the exception instance
+ self.value = tup[1]
+ #: the exception raw traceback
+ self.tb = tup[2]
+ #: the exception type name
+ self.typename = self.type.__name__
+ #: the exception traceback (_pytest._code.Traceback instance)
+ self.traceback = _pytest._code.Traceback(self.tb)
+
+ def __repr__(self):
+ return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
+
+ def exconly(self, tryshort=False):
+ """ return the exception as a string
+
+ when 'tryshort' resolves to True, and the exception is a
+ _pytest._code._AssertionError, only the actual exception part of
+ the exception representation is returned (so 'AssertionError: ' is
+ removed from the beginning)
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = ''.join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext):]
+ return text
+
+ def errisinstance(self, exc):
+ """ return True if the exception is an instance of exc """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self):
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno+1, exconly)
+
+ def getrepr(self, showlocals=False, style="long",
+ abspath=False, tbfilter=True, funcargs=False):
+ """ return str()able representation of this exception info.
+ showlocals: show locals per traceback entry
+ style: long|short|no|native traceback style
+ tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
+ """
+ if style == 'native':
+ return ReprExceptionInfo(ReprTracebackNative(
+ py.std.traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
+
+ fmt = FormattedExcinfo(showlocals=showlocals, style=style,
+ abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
+ return fmt.repr_excinfo(self)
+
+ def __str__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return str(loc)
+
+ def __unicode__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return unicode(loc)
+
+
+class FormattedExcinfo(object):
+ """ presenting information about failing Functions and Generators. """
+ # for traceback entries
+ flow_marker = ">"
+ fail_marker = "E"
+
+ def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
+ self.showlocals = showlocals
+ self.style = style
+ self.tbfilter = tbfilter
+ self.funcargs = funcargs
+ self.abspath = abspath
+ self.astcache = {}
+
+ def _getindent(self, source):
+ # figure out indent for given source
+ try:
+ s = str(source.getstatement(len(source)-1))
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except:
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry):
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def _saferepr(self, obj):
+ return py.io.saferepr(obj)
+
+ def repr_args(self, entry):
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, self._saferepr(argvalue)))
+ return ReprFuncArgs(args)
+
+ def get_source(self, source, line_index=-1, excinfo=None, short=False):
+ """ return formatted and marked up source lines. """
+ import _pytest._code
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = _pytest._code.Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index+1:]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(self, excinfo, indent=4, markall=False):
+ lines = []
+ indent = " " * indent
+ # get the real exception information out
+ exlines = excinfo.exconly(tryshort=True).split('\n')
+ failindent = self.fail_marker + indent[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indent
+ return lines
+
+ def repr_locals(self, locals):
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == '__builtins__':
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ str_repr = self._saferepr(value)
+ #if len(str_repr) < 70 or not isinstance(value,
+ # (list, tuple, dict)):
+ lines.append("%-10s = %s" %(name, str_repr))
+ #else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
+ # py.std.pprint.pprint(value, stream=self.excinfowriter)
+ return ReprLocals(lines)
+
+ def repr_traceback_entry(self, entry, excinfo=None):
+ import _pytest._code
+ source = self._getentrysource(entry)
+ if source is None:
+ source = _pytest._code.Source("???")
+ line_index = 0
+ else:
+ # entry.getfirstlinesource() can be -1, should be 0 on jython
+ line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
+
+ lines = []
+ style = entry._repr_style
+ if style is None:
+ style = self.style
+ if style in ("short", "long"):
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" %(entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ path = self._makepath(entry.path)
+ filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
+ localsrepr = None
+ if not short:
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path):
+ if not self.abspath:
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
+ if len(np) < len(str(path)):
+ path = np
+ return path
+
+ def repr_traceback(self, excinfo):
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+ recursionindex = None
+ if is_recursion_error(excinfo):
+ recursionindex = traceback.recursionindex()
+ last = traceback[-1]
+ entries = []
+ extraline = None
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ if index == recursionindex:
+ extraline = "!!! Recursion detected (same locals & position)"
+ break
+ return ReprTraceback(entries, extraline, style=self.style)
+
+ def repr_excinfo(self, excinfo):
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+ return ReprExceptionInfo(reprtraceback, reprcrash)
+
+class TerminalRepr:
+ def __str__(self):
+ s = self.__unicode__()
+ if sys.version_info[0] < 3:
+ s = s.encode('utf-8')
+ return s
+
+ def __unicode__(self):
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = py.io.TextIO()
+ tw = py.io.TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self):
+ return "<%s instance at %0x>" %(self.__class__, id(self))
+
+
+class ReprExceptionInfo(TerminalRepr):
+ def __init__(self, reprtraceback, reprcrash):
+ self.reprtraceback = reprtraceback
+ self.reprcrash = reprcrash
+ self.sections = []
+
+ def addsection(self, name, content, sep="-"):
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw):
+ self.reprtraceback.toterminal(tw)
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+class ReprTraceback(TerminalRepr):
+ entrysep = "_ "
+
+ def __init__(self, reprentries, extraline, style):
+ self.reprentries = reprentries
+ self.extraline = extraline
+ self.style = style
+
+ def toterminal(self, tw):
+ # the entries might have different styles
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i+1]
+ if entry.style == "long" or \
+ entry.style == "short" and next_entry.style == "long":
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ style = "native"
+
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
+class ReprEntry(TerminalRepr):
+ localssep = "_ "
+
+ def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
+ self.lines = lines
+ self.reprfuncargs = reprfuncargs
+ self.reprlocals = reprlocals
+ self.reprfileloc = filelocrepr
+ self.style = style
+
+ def toterminal(self, tw):
+ if self.style == "short":
+ self.reprfileloc.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ #tw.line("")
+ return
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ if self.reprlocals:
+ #tw.sep(self.localssep, "Locals")
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self):
+ return "%s\n%s\n%s" % ("\n".join(self.lines),
+ self.reprlocals,
+ self.reprfileloc)
+
+class ReprFileLocation(TerminalRepr):
+ def __init__(self, path, lineno, message):
+ self.path = str(path)
+ self.lineno = lineno
+ self.message = message
+
+ def toterminal(self, tw):
+ # filename and lineno output for each entry,
+ # using an output format that most editors unterstand
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
+
+class ReprLocals(TerminalRepr):
+ def __init__(self, lines):
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+
+class ReprFuncArgs(TerminalRepr):
+ def __init__(self, args):
+ self.args = args
+
+ def toterminal(self, tw):
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = "%s = %s" %(name, value)
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+
+oldbuiltins = {}
+
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from _pytest.assertion import reinterpret
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = reinterpret.AssertionError
+ if compile:
+ import _pytest._code
+ l = oldbuiltins.setdefault('compile', [])
+ l.append(py.builtin.builtins.compile)
+ py.builtin.builtins.compile = _pytest._code.compile
+
+def unpatch_builtins(assertion=True, compile=True):
+ """ remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
+ if compile:
+ py.builtin.builtins.compile = oldbuiltins['compile'].pop()
+
+def getrawcode(obj, trycall=True):
+ """ return code object for given function. """
+ try:
+ return obj.__code__
+ except AttributeError:
+ obj = getattr(obj, 'im_func', obj)
+ obj = getattr(obj, 'func_code', obj)
+ obj = getattr(obj, 'f_code', obj)
+ obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
+ return obj
+
+if sys.version_info[:2] >= (3, 5): # RecursionError introduced in 3.5
+ def is_recursion_error(excinfo):
+ return excinfo.errisinstance(RecursionError) # noqa
+else:
+ def is_recursion_error(excinfo):
+ if not excinfo.errisinstance(RuntimeError):
+ return False
+ try:
+ return "maximum recursion depth exceeded" in str(excinfo.value)
+ except UnicodeError:
+ return False
diff --git a/python/pytest/_pytest/_code/source.py b/python/pytest/_pytest/_code/source.py
new file mode 100644
index 000000000..a1521f8a2
--- /dev/null
+++ b/python/pytest/_pytest/_code/source.py
@@ -0,0 +1,421 @@
+from __future__ import generators
+
+from bisect import bisect_right
+import sys
+import inspect, tokenize
+import py
+from types import ModuleType
+cpy_compile = compile
+
+try:
+ import _ast
+ from _ast import PyCF_ONLY_AST as _AST_FLAG
+except ImportError:
+ _AST_FLAG = 0
+ _ast = None
+
+
+class Source(object):
+ """ a immutable object holding a source code fragment,
+ possibly deindenting it.
+ """
+ _compilecounter = 0
+ def __init__(self, *parts, **kwargs):
+ self.lines = lines = []
+ de = kwargs.get('deindent', True)
+ rstrip = kwargs.get('rstrip', True)
+ for part in parts:
+ if not part:
+ partlines = []
+ if isinstance(part, Source):
+ partlines = part.lines
+ elif isinstance(part, (tuple, list)):
+ partlines = [x.rstrip("\n") for x in part]
+ elif isinstance(part, py.builtin._basestring):
+ partlines = part.split('\n')
+ if rstrip:
+ while partlines:
+ if partlines[-1].strip():
+ break
+ partlines.pop()
+ else:
+ partlines = getsource(part, deindent=de).lines
+ if de:
+ partlines = deindent(partlines)
+ lines.extend(partlines)
+
+ def __eq__(self, other):
+ try:
+ return self.lines == other.lines
+ except AttributeError:
+ if isinstance(other, str):
+ return str(self) == other
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ return self.__getslice__(key.start, key.stop)
+
+ def __len__(self):
+ return len(self.lines)
+
+ def __getslice__(self, start, end):
+ newsource = Source()
+ newsource.lines = self.lines[start:end]
+ return newsource
+
+ def strip(self):
+ """ return new source object with trailing
+ and leading blank lines removed.
+ """
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end-1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def putaround(self, before='', after='', indent=' ' * 4):
+ """ return a copy of the source object with
+ 'before' and 'after' wrapped around it.
+ """
+ before = Source(before)
+ after = Source(after)
+ newsource = Source()
+ lines = [ (indent + line) for line in self.lines]
+ newsource.lines = before.lines + lines + after.lines
+ return newsource
+
+ def indent(self, indent=' ' * 4):
+ """ return a copy of the source object with
+ all lines indented by the given indent-string.
+ """
+ newsource = Source()
+ newsource.lines = [(indent+line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno, assertion=False):
+ """ return Source statement which contains the
+ given linenumber (counted from 0).
+ """
+ start, end = self.getstatementrange(lineno, assertion)
+ return self[start:end]
+
+ def getstatementrange(self, lineno, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ """
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self, offset=None):
+ """ return a new source object deindented by offset.
+ If offset is None then guess an indentation offset from
+ the first non-blank line. Subsequent lines which have a
+ lower indentation offset will be copied verbatim as
+ they are assumed to be part of multilines.
+ """
+ # XXX maybe use the tokenizer to properly handle multiline
+ # strings etc.pp?
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines, offset)
+ return newsource
+
+ def isparseable(self, deindent=True):
+ """ return True if source is parseable, heuristically
+ deindenting it by default.
+ """
+ try:
+ import parser
+ except ImportError:
+ syntax_checker = lambda x: compile(x, 'asd', 'exec')
+ else:
+ syntax_checker = parser.suite
+
+ if deindent:
+ source = str(self.deindent())
+ else:
+ source = str(self)
+ try:
+ #compile(source+'\n', "x", "exec")
+ syntax_checker(source+'\n')
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return False
+ else:
+ return True
+
+ def __str__(self):
+ return "\n".join(self.lines)
+
+ def compile(self, filename=None, mode='exec',
+ flag=generators.compiler_flag,
+ dont_inherit=0, _genframe=None):
+ """ return compiled code object. if filename is None
+ invent an artificial filename which displays
+ the source/line position of the caller frame.
+ """
+ if not filename or py.path.local(filename).check(file=0):
+ if _genframe is None:
+ _genframe = sys._getframe(1) # the caller
+ fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+ base = "<%d-codegen " % self._compilecounter
+ self.__class__._compilecounter += 1
+ if not filename:
+ filename = base + '%s:%d>' % (fn, lineno)
+ else:
+ filename = base + '%r %s:%d>' % (filename, fn, lineno)
+ source = "\n".join(self.lines) + '\n'
+ try:
+ co = cpy_compile(source, filename, mode, flag)
+ except SyntaxError:
+ ex = sys.exc_info()[1]
+ # re-represent syntax errors from parsing python strings
+ msglines = self.lines[:ex.lineno]
+ if ex.offset:
+ msglines.append(" "*ex.offset + '^')
+ msglines.append("(code was compiled probably from here: %s)" % filename)
+ newex = SyntaxError('\n'.join(msglines))
+ newex.offset = ex.offset
+ newex.lineno = ex.lineno
+ newex.text = ex.text
+ raise newex
+ else:
+ if flag & _AST_FLAG:
+ return co
+ lines = [(x + "\n") for x in self.lines]
+ if sys.version_info[0] >= 3:
+ # XXX py3's inspect.getsourcefile() checks for a module
+ # and a pep302 __loader__ ... we don't have a module
+ # at code compile-time so we need to fake it here
+ m = ModuleType("_pycodecompile_pseudo_module")
+ py.std.inspect.modulesbyfile[filename] = None
+ py.std.sys.modules[None] = m
+ m.__loader__ = 1
+ py.std.linecache.cache[filename] = (1, None, lines, filename)
+ return co
+
+#
+# public API shortcut functions
+#
+
+def compile_(source, filename=None, mode='exec', flags=
+ generators.compiler_flag, dont_inherit=0):
+ """ compile the given source to a raw code object,
+ and maintain an internal cache which allows later
+ retrieval of the source code for the code object
+ and any recursively created code objects.
+ """
+ if _ast is not None and isinstance(source, _ast.AST):
+ # XXX should Source support having AST?
+ return cpy_compile(source, filename, mode, flags, dont_inherit)
+ _genframe = sys._getframe(1) # the caller
+ s = Source(source)
+ co = s.compile(filename, mode, flags, _genframe=_genframe)
+ return co
+
+
+def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
+ import _pytest._code
+ try:
+ code = _pytest._code.Code(obj)
+ except TypeError:
+ try:
+ fn = (py.std.inspect.getsourcefile(obj) or
+ py.std.inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
+ fspath = fn and py.path.local(fn) or None
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except IOError:
+ pass
+ else:
+ fspath = code.path
+ lineno = code.firstlineno
+ assert isinstance(lineno, int)
+ return fspath, lineno
+
+#
+# helper functions
+#
+
+def findsource(obj):
+ try:
+ sourcelines, lineno = py.std.inspect.findsource(obj)
+ except py.builtin._sysex:
+ raise
+ except:
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+def getsource(obj, **kwargs):
+ import _pytest._code
+ obj = _pytest._code.getrawcode(obj)
+ try:
+ strsrc = inspect.getsource(obj)
+ except IndentationError:
+ strsrc = "\"Buggy python version consider upgrading, cannot get source\""
+ assert isinstance(strsrc, str)
+ return Source(strsrc, **kwargs)
+
+def deindent(lines, offset=None):
+ if offset is None:
+ for line in lines:
+ line = line.expandtabs()
+ s = line.lstrip()
+ if s:
+ offset = len(line)-len(s)
+ break
+ else:
+ offset = 0
+ if offset == 0:
+ return list(lines)
+ newlines = []
+ def readline_generator(lines):
+ for line in lines:
+ yield line + '\n'
+ while True:
+ yield ''
+
+ it = readline_generator(lines)
+
+ try:
+ for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
+ if sline > len(lines):
+ break # End of input reached
+ if sline > len(newlines):
+ line = lines[sline - 1].expandtabs()
+ if line.lstrip() and line[:offset].isspace():
+ line = line[offset:] # Deindent
+ newlines.append(line)
+
+ for i in range(sline, eline):
+ # Don't deindent continuing lines of
+ # multiline tokens (i.e. multiline strings)
+ newlines.append(lines[i])
+ except (IndentationError, tokenize.TokenError):
+ pass
+ # Add any lines we didn't see. E.g. if an exception was raised.
+ newlines.extend(lines[len(newlines):])
+ return newlines
+
+
+def get_statement_startend2(lineno, node):
+ import ast
+ # flatten all statements and except handlers into one lineno-list
+ # AST's line numbers start indexing at 1
+ l = []
+ for x in ast.walk(node):
+ if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
+ l.append(x.lineno - 1)
+ for name in "finalbody", "orelse":
+ val = getattr(x, name, None)
+ if val:
+ # treat the finally/orelse part as its own statement
+ l.append(val[0].lineno - 1 - 1)
+ l.sort()
+ insert_index = bisect_right(l, lineno)
+ start = l[insert_index - 1]
+ if insert_index >= len(l):
+ end = None
+ else:
+ end = l[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
+ if astnode is None:
+ content = str(source)
+ if sys.version_info < (2,7):
+ content += "\n"
+ try:
+ astnode = compile(content, "source", "exec", 1024) # 1024 for AST
+ except ValueError:
+ start, end = getstatementrange_old(lineno, source, assertion)
+ return None, start, end
+ start, end = get_statement_startend2(lineno, astnode)
+ # we need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself
+ block_finder = inspect.BlockFinder()
+ # if we start with an indented line, put blockfinder to "started" mode
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # the end might still point to a comment or empty line, correct it
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
+
+
+def getstatementrange_old(lineno, source, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
+ """
+ # XXX this logic is only used on python2.4 and below
+ # 1. find the start of the statement
+ from codeop import compile_command
+ for start in range(lineno, -1, -1):
+ if assertion:
+ line = source.lines[start]
+ # the following lines are not fully tested, change with care
+ if 'super' in line and 'self' in line and '__init__' in line:
+ raise IndexError("likely a subclass")
+ if "assert" not in line and "raise" not in line:
+ continue
+ trylines = source.lines[start:lineno+1]
+ # quick hack to prepare parsing an indented line with
+ # compile_command() (which errors on "return" outside defs)
+ trylines.insert(0, 'def xxx():')
+ trysource = '\n '.join(trylines)
+ # ^ space here
+ try:
+ compile_command(trysource)
+ except (SyntaxError, OverflowError, ValueError):
+ continue
+
+ # 2. find the end of the statement
+ for end in range(lineno+1, len(source)+1):
+ trysource = source[start:end]
+ if trysource.isparseable():
+ return start, end
+ raise SyntaxError("no valid source range around line %d " % (lineno,))
+
+
diff --git a/python/pytest/_pytest/_pluggy.py b/python/pytest/_pytest/_pluggy.py
new file mode 100644
index 000000000..87d32cf8d
--- /dev/null
+++ b/python/pytest/_pytest/_pluggy.py
@@ -0,0 +1,11 @@
+"""
+imports symbols from vendored "pluggy" if available, otherwise
+falls back to importing "pluggy" from the default namespace.
+"""
+
+try:
+ from _pytest.vendored_packages.pluggy import * # noqa
+ from _pytest.vendored_packages.pluggy import __version__ # noqa
+except ImportError:
+ from pluggy import * # noqa
+ from pluggy import __version__ # noqa
diff --git a/python/pytest/_pytest/assertion/__init__.py b/python/pytest/_pytest/assertion/__init__.py
new file mode 100644
index 000000000..6921deb2a
--- /dev/null
+++ b/python/pytest/_pytest/assertion/__init__.py
@@ -0,0 +1,176 @@
+"""
+support for presenting detailed information in failing assertions.
+"""
+import py
+import os
+import sys
+from _pytest.monkeypatch import monkeypatch
+from _pytest.assertion import util
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption('--assert',
+ action="store",
+ dest="assertmode",
+ choices=("rewrite", "reinterp", "plain",),
+ default="rewrite",
+ metavar="MODE",
+ help="""control assertion debugging tools. 'plain'
+ performs no assertion debugging. 'reinterp'
+ reinterprets assert statements after they failed
+ to provide assertion expression information.
+ 'rewrite' (the default) rewrites assert
+ statements in test modules on import to
+ provide assert expression information. """)
+ group.addoption('--no-assert',
+ action="store_true",
+ default=False,
+ dest="noassert",
+ help="DEPRECATED equivalent to --assert=plain")
+ group.addoption('--nomagic', '--no-magic',
+ action="store_true",
+ default=False,
+ help="DEPRECATED equivalent to --assert=plain")
+
+
+class AssertionState:
+ """State for the assertion plugin."""
+
+ def __init__(self, config, mode):
+ self.mode = mode
+ self.trace = config.trace.root.get("assertion")
+
+
+def pytest_configure(config):
+ mode = config.getvalue("assertmode")
+ if config.getvalue("noassert") or config.getvalue("nomagic"):
+ mode = "plain"
+ if mode == "rewrite":
+ try:
+ import ast # noqa
+ except ImportError:
+ mode = "reinterp"
+ else:
+ # Both Jython and CPython 2.6.0 have AST bugs that make the
+ # assertion rewriting hook malfunction.
+ if (sys.platform.startswith('java') or
+ sys.version_info[:3] == (2, 6, 0)):
+ mode = "reinterp"
+ if mode != "plain":
+ _load_modules(mode)
+ m = monkeypatch()
+ config._cleanup.append(m.undo)
+ m.setattr(py.builtin.builtins, 'AssertionError',
+ reinterpret.AssertionError) # noqa
+ hook = None
+ if mode == "rewrite":
+ hook = rewrite.AssertionRewritingHook() # noqa
+ sys.meta_path.insert(0, hook)
+ warn_about_missing_assertion(mode)
+ config._assertstate = AssertionState(config, mode)
+ config._assertstate.hook = hook
+ config._assertstate.trace("configured with mode set to %r" % (mode,))
+ def undo():
+ hook = config._assertstate.hook
+ if hook is not None and hook in sys.meta_path:
+ sys.meta_path.remove(hook)
+ config.add_cleanup(undo)
+
+
+def pytest_collection(session):
+ # this hook is only called when test modules are collected
+ # so for example not in the master process of pytest-xdist
+ # (which does not collect test modules)
+ hook = session.config._assertstate.hook
+ if hook is not None:
+ hook.set_session(session)
+
+
+def _running_on_ci():
+ """Check if we're currently running on a CI system."""
+ env_vars = ['CI', 'BUILD_NUMBER']
+ return any(var in os.environ for var in env_vars)
+
+
+def pytest_runtest_setup(item):
+ """Setup the pytest_assertrepr_compare hook
+
+ The newinterpret and rewrite modules will use util._reprcompare if
+ it exists to use custom reporting via the
+ pytest_assertrepr_compare hook. This sets up this custom
+ comparison for the test.
+ """
+ def callbinrepr(op, left, right):
+ """Call the pytest_assertrepr_compare hook and prepare the result
+
+ This uses the first result from the hook and then ensures the
+ following:
+ * Overly verbose explanations are dropped unless -vv was used or
+ running on a CI.
+ * Embedded newlines are escaped to help util.format_explanation()
+ later.
+ * If the rewrite mode is used embedded %-characters are replaced
+ to protect later % formatting.
+
+ The result can be formatted by util.format_explanation() for
+ pretty printing.
+ """
+ hook_result = item.ihook.pytest_assertrepr_compare(
+ config=item.config, op=op, left=left, right=right)
+ for new_expl in hook_result:
+ if new_expl:
+ if (sum(len(p) for p in new_expl[1:]) > 80*8 and
+ item.config.option.verbose < 2 and
+ not _running_on_ci()):
+ show_max = 10
+ truncated_lines = len(new_expl) - show_max
+ new_expl[show_max:] = [py.builtin._totext(
+ 'Detailed information truncated (%d more lines)'
+ ', use "-vv" to show' % truncated_lines)]
+ new_expl = [line.replace("\n", "\\n") for line in new_expl]
+ res = py.builtin._totext("\n~").join(new_expl)
+ if item.config.getvalue("assertmode") == "rewrite":
+ res = res.replace("%", "%%")
+ return res
+ util._reprcompare = callbinrepr
+
+
+def pytest_runtest_teardown(item):
+ util._reprcompare = None
+
+
+def pytest_sessionfinish(session):
+ hook = session.config._assertstate.hook
+ if hook is not None:
+ hook.session = None
+
+
+def _load_modules(mode):
+ """Lazily import assertion related code."""
+ global rewrite, reinterpret
+ from _pytest.assertion import reinterpret # noqa
+ if mode == "rewrite":
+ from _pytest.assertion import rewrite # noqa
+
+
+def warn_about_missing_assertion(mode):
+ try:
+ assert False
+ except AssertionError:
+ pass
+ else:
+ if mode == "rewrite":
+ specifically = ("assertions which are not in test modules "
+ "will be ignored")
+ else:
+ specifically = "failing tests may report as passing"
+
+ sys.stderr.write("WARNING: " + specifically +
+ " because assert statements are not executed "
+ "by the underlying Python interpreter "
+ "(are you using python -O?)\n")
+
+
+# Expose this plugin's implementation for the pytest_assertrepr_compare hook
+pytest_assertrepr_compare = util.assertrepr_compare
diff --git a/python/pytest/_pytest/assertion/reinterpret.py b/python/pytest/_pytest/assertion/reinterpret.py
new file mode 100644
index 000000000..f4262c3ac
--- /dev/null
+++ b/python/pytest/_pytest/assertion/reinterpret.py
@@ -0,0 +1,407 @@
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+"""
+import ast
+import sys
+
+import _pytest._code
+import py
+from _pytest.assertion import util
+u = py.builtin._totext
+
+
+class AssertionError(util.BuiltinAssertionError):
+ def __init__(self, *args):
+ util.BuiltinAssertionError.__init__(self, *args)
+ if args:
+ # on Python2.6 we get len(args)==2 for: assert 0, (x,y)
+ # on Python2.7 and above we always get len(args) == 1
+ # with args[0] being the (x,y) tuple.
+ if len(args) > 1:
+ toprint = args
+ else:
+ toprint = args[0]
+ try:
+ self.msg = u(toprint)
+ except Exception:
+ self.msg = u(
+ "<[broken __repr__] %s at %0xd>"
+ % (toprint.__class__, id(toprint)))
+ else:
+ f = _pytest._code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "<run>".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = "<could not determine information>"
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+
+if sys.platform.startswith("java"):
+ # See http://bugs.jython.org/issue1497
+ _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
+ "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
+ "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
+ "List", "Tuple")
+ _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
+ "AugAssign", "Print", "For", "While", "If", "With", "Raise",
+ "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
+ "Exec", "Global", "Expr", "Pass", "Break", "Continue")
+ _expr_nodes = set(getattr(ast, name) for name in _exprs)
+ _stmt_nodes = set(getattr(ast, name) for name in _stmts)
+ def _is_ast_expr(node):
+ return node.__class__ in _expr_nodes
+ def _is_ast_stmt(node):
+ return node.__class__ in _stmt_nodes
+else:
+ def _is_ast_expr(node):
+ return isinstance(node, ast.expr)
+ def _is_ast_stmt(node):
+ return isinstance(node, ast.stmt)
+
+try:
+ _Starred = ast.Starred
+except AttributeError:
+ # Python 2. Define a dummy class so isinstance() will always be False.
+ class _Starred(object): pass
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def reinterpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --assert=plain)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = _pytest._code.Frame(sys._getframe(1))
+ return reinterpret(offending_line, frame)
+
+def getfailure(e):
+ explanation = util.format_explanation(e.explanation)
+ value = e.cause[1]
+ if str(value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.cause[0].__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "<assertion interpretation>", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = None
+ if local is None or not self.frame.is_true(local):
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not self.frame.is_true(result):
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ if util._reprcompare is not None:
+ res = util._reprcompare(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ if isinstance(arg, _Starred):
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ else:
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ if keyword.arg:
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ else:
+ arg_name = "__exprinfo_kwds"
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+
+ ns[arg_name] = arg_result
+
+ if getattr(call, 'starargs', None):
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+
+ if getattr(call, 'kwargs', None):
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except AttributeError:
+ # Maybe the attribute name needs to be mangled?
+ if not attr.attr.startswith("__") or attr.attr.endswith("__"):
+ raise
+ source = "getattr(__exprinfo_expr.__class__, '__name__', '')"
+ co = self._compile(source)
+ class_name = self.frame.eval(co, __exprinfo_expr=source_result)
+ mangled_attr = "_" + class_name + attr.attr
+ source = "__exprinfo_expr.%s" % (mangled_attr,)
+ co = self._compile(source)
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = None
+ if from_instance is None or self.frame.is_true(from_instance):
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ explanation = "assert %s" % (test_explanation,)
+ if not self.frame.is_true(test_result):
+ try:
+ raise util.BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
+
diff --git a/python/pytest/_pytest/assertion/rewrite.py b/python/pytest/_pytest/assertion/rewrite.py
new file mode 100644
index 000000000..14b8e49db
--- /dev/null
+++ b/python/pytest/_pytest/assertion/rewrite.py
@@ -0,0 +1,885 @@
+"""Rewrite assertion AST to produce nice error messages"""
+
+import ast
+import errno
+import itertools
+import imp
+import marshal
+import os
+import re
+import struct
+import sys
+import types
+
+import py
+from _pytest.assertion import util
+
+
+# pytest caches rewritten pycs in __pycache__.
+if hasattr(imp, "get_tag"):
+ PYTEST_TAG = imp.get_tag() + "-PYTEST"
+else:
+ if hasattr(sys, "pypy_version_info"):
+ impl = "pypy"
+ elif sys.platform == "java":
+ impl = "jython"
+ else:
+ impl = "cpython"
+ ver = sys.version_info
+ PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
+ del ver, impl
+
+PYC_EXT = ".py" + (__debug__ and "c" or "o")
+PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
+
+REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
+ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
+
+if sys.version_info >= (3,5):
+ ast_Call = ast.Call
+else:
+ ast_Call = lambda a,b,c: ast.Call(a, b, c, None, None)
+
+
+class AssertionRewritingHook(object):
+ """PEP302 Import hook which rewrites asserts."""
+
+ def __init__(self):
+ self.session = None
+ self.modules = {}
+ self._register_with_pkg_resources()
+
+ def set_session(self, session):
+ self.fnpats = session.config.getini("python_files")
+ self.session = session
+
+ def find_module(self, name, path=None):
+ if self.session is None:
+ return None
+ sess = self.session
+ state = sess.config._assertstate
+ state.trace("find_module called for: %s" % name)
+ names = name.rsplit(".", 1)
+ lastname = names[-1]
+ pth = None
+ if path is not None:
+ # Starting with Python 3.3, path is a _NamespacePath(), which
+ # causes problems if not converted to list.
+ path = list(path)
+ if len(path) == 1:
+ pth = path[0]
+ if pth is None:
+ try:
+ fd, fn, desc = imp.find_module(lastname, path)
+ except ImportError:
+ return None
+ if fd is not None:
+ fd.close()
+ tp = desc[2]
+ if tp == imp.PY_COMPILED:
+ if hasattr(imp, "source_from_cache"):
+ fn = imp.source_from_cache(fn)
+ else:
+ fn = fn[:-1]
+ elif tp != imp.PY_SOURCE:
+ # Don't know what this is.
+ return None
+ else:
+ fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
+ fn_pypath = py.path.local(fn)
+ # Is this a test file?
+ if not sess.isinitpath(fn):
+ # We have to be very careful here because imports in this code can
+ # trigger a cycle.
+ self.session = None
+ try:
+ for pat in self.fnpats:
+ if fn_pypath.fnmatch(pat):
+ state.trace("matched test file %r" % (fn,))
+ break
+ else:
+ return None
+ finally:
+ self.session = sess
+ else:
+ state.trace("matched test file (was specified on cmdline): %r" %
+ (fn,))
+ # The requested module looks like a test file, so rewrite it. This is
+ # the most magical part of the process: load the source, rewrite the
+ # asserts, and load the rewritten source. We also cache the rewritten
+ # module code in a special pyc. We must be aware of the possibility of
+ # concurrent pytest processes rewriting and loading pycs. To avoid
+ # tricky race conditions, we maintain the following invariant: The
+ # cached pyc is always a complete, valid pyc. Operations on it must be
+ # atomic. POSIX's atomic rename comes in handy.
+ write = not sys.dont_write_bytecode
+ cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
+ if write:
+ try:
+ os.mkdir(cache_dir)
+ except OSError:
+ e = sys.exc_info()[1].errno
+ if e == errno.EEXIST:
+ # Either the __pycache__ directory already exists (the
+ # common case) or it's blocked by a non-dir node. In the
+ # latter case, we'll ignore it in _write_pyc.
+ pass
+ elif e in [errno.ENOENT, errno.ENOTDIR]:
+ # One of the path components was not a directory, likely
+ # because we're in a zip file.
+ write = False
+ elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
+ state.trace("read only directory: %r" % fn_pypath.dirname)
+ write = False
+ else:
+ raise
+ cache_name = fn_pypath.basename[:-3] + PYC_TAIL
+ pyc = os.path.join(cache_dir, cache_name)
+ # Notice that even if we're in a read-only directory, I'm going
+ # to check for a cached pyc. This may not be optimal...
+ co = _read_pyc(fn_pypath, pyc, state.trace)
+ if co is None:
+ state.trace("rewriting %r" % (fn,))
+ source_stat, co = _rewrite_test(state, fn_pypath)
+ if co is None:
+ # Probably a SyntaxError in the test.
+ return None
+ if write:
+ _make_rewritten_pyc(state, source_stat, pyc, co)
+ else:
+ state.trace("found cached rewritten pyc for %r" % (fn,))
+ self.modules[name] = co, pyc
+ return self
+
+ def load_module(self, name):
+ # If there is an existing module object named 'fullname' in
+ # sys.modules, the loader must use that existing module. (Otherwise,
+ # the reload() builtin will not work correctly.)
+ if name in sys.modules:
+ return sys.modules[name]
+
+ co, pyc = self.modules.pop(name)
+ # I wish I could just call imp.load_compiled here, but __file__ has to
+ # be set properly. In Python 3.2+, this all would be handled correctly
+ # by load_compiled.
+ mod = sys.modules[name] = imp.new_module(name)
+ try:
+ mod.__file__ = co.co_filename
+ # Normally, this attribute is 3.2+.
+ mod.__cached__ = pyc
+ mod.__loader__ = self
+ py.builtin.exec_(co, mod.__dict__)
+ except:
+ del sys.modules[name]
+ raise
+ return sys.modules[name]
+
+
+
+ def is_package(self, name):
+ try:
+ fd, fn, desc = imp.find_module(name)
+ except ImportError:
+ return False
+ if fd is not None:
+ fd.close()
+ tp = desc[2]
+ return tp == imp.PKG_DIRECTORY
+
+ @classmethod
+ def _register_with_pkg_resources(cls):
+ """
+ Ensure package resources can be loaded from this loader. May be called
+ multiple times, as the operation is idempotent.
+ """
+ try:
+ import pkg_resources
+ # access an attribute in case a deferred importer is present
+ pkg_resources.__name__
+ except ImportError:
+ return
+
+ # Since pytest tests are always located in the file system, the
+ # DefaultProvider is appropriate.
+ pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
+
+ def get_data(self, pathname):
+ """Optional PEP302 get_data API.
+ """
+ with open(pathname, 'rb') as f:
+ return f.read()
+
+
+def _write_pyc(state, co, source_stat, pyc):
+ # Technically, we don't have to have the same pyc format as
+ # (C)Python, since these "pycs" should never be seen by builtin
+ # import. However, there's little reason deviate, and I hope
+ # sometime to be able to use imp.load_compiled to load them. (See
+ # the comment in load_module above.)
+ try:
+ fp = open(pyc, "wb")
+ except IOError:
+ err = sys.exc_info()[1].errno
+ state.trace("error writing pyc file at %s: errno=%s" %(pyc, err))
+ # we ignore any failure to write the cache file
+ # there are many reasons, permission-denied, __pycache__ being a
+ # file etc.
+ return False
+ try:
+ fp.write(imp.get_magic())
+ mtime = int(source_stat.mtime)
+ size = source_stat.size & 0xFFFFFFFF
+ fp.write(struct.pack("<ll", mtime, size))
+ marshal.dump(co, fp)
+ finally:
+ fp.close()
+ return True
+
+RN = "\r\n".encode("utf-8")
+N = "\n".encode("utf-8")
+
+cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
+BOM_UTF8 = '\xef\xbb\xbf'
+
+def _rewrite_test(state, fn):
+ """Try to read and rewrite *fn* and return the code object."""
+ try:
+ stat = fn.stat()
+ source = fn.read("rb")
+ except EnvironmentError:
+ return None, None
+ if ASCII_IS_DEFAULT_ENCODING:
+ # ASCII is the default encoding in Python 2. Without a coding
+ # declaration, Python 2 will complain about any bytes in the file
+ # outside the ASCII range. Sadly, this behavior does not extend to
+ # compile() or ast.parse(), which prefer to interpret the bytes as
+ # latin-1. (At least they properly handle explicit coding cookies.) To
+ # preserve this error behavior, we could force ast.parse() to use ASCII
+ # as the encoding by inserting a coding cookie. Unfortunately, that
+ # messes up line numbers. Thus, we have to check ourselves if anything
+ # is outside the ASCII range in the case no encoding is explicitly
+ # declared. For more context, see issue #269. Yay for Python 3 which
+ # gets this right.
+ end1 = source.find("\n")
+ end2 = source.find("\n", end1 + 1)
+ if (not source.startswith(BOM_UTF8) and
+ cookie_re.match(source[0:end1]) is None and
+ cookie_re.match(source[end1 + 1:end2]) is None):
+ if hasattr(state, "_indecode"):
+ # encodings imported us again, so don't rewrite.
+ return None, None
+ state._indecode = True
+ try:
+ try:
+ source.decode("ascii")
+ except UnicodeDecodeError:
+ # Let it fail in real import.
+ return None, None
+ finally:
+ del state._indecode
+ # On Python versions which are not 2.7 and less than or equal to 3.1, the
+ # parser expects *nix newlines.
+ if REWRITE_NEWLINES:
+ source = source.replace(RN, N) + N
+ try:
+ tree = ast.parse(source)
+ except SyntaxError:
+ # Let this pop up again in the real import.
+ state.trace("failed to parse: %r" % (fn,))
+ return None, None
+ rewrite_asserts(tree)
+ try:
+ co = compile(tree, fn.strpath, "exec")
+ except SyntaxError:
+ # It's possible that this error is from some bug in the
+ # assertion rewriting, but I don't know of a fast way to tell.
+ state.trace("failed to compile: %r" % (fn,))
+ return None, None
+ return stat, co
+
+def _make_rewritten_pyc(state, source_stat, pyc, co):
+ """Try to dump rewritten code to *pyc*."""
+ if sys.platform.startswith("win"):
+ # Windows grants exclusive access to open files and doesn't have atomic
+ # rename, so just write into the final file.
+ _write_pyc(state, co, source_stat, pyc)
+ else:
+ # When not on windows, assume rename is atomic. Dump the code object
+ # into a file specific to this process and atomically replace it.
+ proc_pyc = pyc + "." + str(os.getpid())
+ if _write_pyc(state, co, source_stat, proc_pyc):
+ os.rename(proc_pyc, pyc)
+
+def _read_pyc(source, pyc, trace=lambda x: None):
+ """Possibly read a pytest pyc containing rewritten code.
+
+ Return rewritten code if successful or None if not.
+ """
+ try:
+ fp = open(pyc, "rb")
+ except IOError:
+ return None
+ with fp:
+ try:
+ mtime = int(source.mtime())
+ size = source.size()
+ data = fp.read(12)
+ except EnvironmentError as e:
+ trace('_read_pyc(%s): EnvironmentError %s' % (source, e))
+ return None
+ # Check for invalid or out of date pyc file.
+ if (len(data) != 12 or data[:4] != imp.get_magic() or
+ struct.unpack("<ll", data[4:]) != (mtime, size)):
+ trace('_read_pyc(%s): invalid or out of date pyc' % source)
+ return None
+ try:
+ co = marshal.load(fp)
+ except Exception as e:
+ trace('_read_pyc(%s): marshal.load error %s' % (source, e))
+ return None
+ if not isinstance(co, types.CodeType):
+ trace('_read_pyc(%s): not a code object' % source)
+ return None
+ return co
+
+
+def rewrite_asserts(mod):
+ """Rewrite the assert statements in mod."""
+ AssertionRewriter().run(mod)
+
+
+def _saferepr(obj):
+ """Get a safe repr of an object for assertion error messages.
+
+ The assertion formatting (util.format_explanation()) requires
+ newlines to be escaped since they are a special character for it.
+ Normally assertion.util.format_explanation() does this but for a
+ custom repr it is possible to contain one of the special escape
+ sequences, especially '\n{' and '\n}' are likely to be present in
+ JSON reprs.
+
+ """
+ repr = py.io.saferepr(obj)
+ if py.builtin._istext(repr):
+ t = py.builtin.text
+ else:
+ t = py.builtin.bytes
+ return repr.replace(t("\n"), t("\\n"))
+
+
+from _pytest.assertion.util import format_explanation as _format_explanation # noqa
+
+def _format_assertmsg(obj):
+ """Format the custom assertion message given.
+
+ For strings this simply replaces newlines with '\n~' so that
+ util.format_explanation() will preserve them instead of escaping
+ newlines. For other objects py.io.saferepr() is used first.
+
+ """
+ # reprlib appears to have a bug which means that if a string
+ # contains a newline it gets escaped, however if an object has a
+ # .__repr__() which contains newlines it does not get escaped.
+ # However in either case we want to preserve the newline.
+ if py.builtin._istext(obj) or py.builtin._isbytes(obj):
+ s = obj
+ is_repr = False
+ else:
+ s = py.io.saferepr(obj)
+ is_repr = True
+ if py.builtin._istext(s):
+ t = py.builtin.text
+ else:
+ t = py.builtin.bytes
+ s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%"))
+ if is_repr:
+ s = s.replace(t("\\n"), t("\n~"))
+ return s
+
+def _should_repr_global_name(obj):
+ return not hasattr(obj, "__name__") and not py.builtin.callable(obj)
+
+def _format_boolop(explanations, is_or):
+ explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
+ if py.builtin._istext(explanation):
+ t = py.builtin.text
+ else:
+ t = py.builtin.bytes
+ return explanation.replace(t('%'), t('%%'))
+
+def _call_reprcompare(ops, results, expls, each_obj):
+ for i, res, expl in zip(range(len(ops)), results, expls):
+ try:
+ done = not res
+ except Exception:
+ done = True
+ if done:
+ break
+ if util._reprcompare is not None:
+ custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
+ if custom is not None:
+ return custom
+ return expl
+
+
+unary_map = {
+ ast.Not: "not %s",
+ ast.Invert: "~%s",
+ ast.USub: "-%s",
+ ast.UAdd: "+%s"
+}
+
+binop_map = {
+ ast.BitOr: "|",
+ ast.BitXor: "^",
+ ast.BitAnd: "&",
+ ast.LShift: "<<",
+ ast.RShift: ">>",
+ ast.Add: "+",
+ ast.Sub: "-",
+ ast.Mult: "*",
+ ast.Div: "/",
+ ast.FloorDiv: "//",
+ ast.Mod: "%%", # escaped for string formatting
+ ast.Eq: "==",
+ ast.NotEq: "!=",
+ ast.Lt: "<",
+ ast.LtE: "<=",
+ ast.Gt: ">",
+ ast.GtE: ">=",
+ ast.Pow: "**",
+ ast.Is: "is",
+ ast.IsNot: "is not",
+ ast.In: "in",
+ ast.NotIn: "not in"
+}
+# Python 3.5+ compatibility
+try:
+ binop_map[ast.MatMult] = "@"
+except AttributeError:
+ pass
+
+# Python 3.4+ compatibility
+if hasattr(ast, "NameConstant"):
+ _NameConstant = ast.NameConstant
+else:
+ def _NameConstant(c):
+ return ast.Name(str(c), ast.Load())
+
+
+def set_location(node, lineno, col_offset):
+ """Set node location information recursively."""
+ def _fix(node, lineno, col_offset):
+ if "lineno" in node._attributes:
+ node.lineno = lineno
+ if "col_offset" in node._attributes:
+ node.col_offset = col_offset
+ for child in ast.iter_child_nodes(node):
+ _fix(child, lineno, col_offset)
+ _fix(node, lineno, col_offset)
+ return node
+
+
+class AssertionRewriter(ast.NodeVisitor):
+ """Assertion rewriting implementation.
+
+ The main entrypoint is to call .run() with an ast.Module instance,
+ this will then find all the assert statements and re-write them to
+ provide intermediate values and a detailed assertion error. See
+ http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
+ for an overview of how this works.
+
+ The entry point here is .run() which will iterate over all the
+ statements in an ast.Module and for each ast.Assert statement it
+ finds call .visit() with it. Then .visit_Assert() takes over and
+ is responsible for creating new ast statements to replace the
+ original assert statement: it re-writes the test of an assertion
+ to provide intermediate values and replace it with an if statement
+ which raises an assertion error with a detailed explanation in
+ case the expression is false.
+
+ For this .visit_Assert() uses the visitor pattern to visit all the
+ AST nodes of the ast.Assert.test field, each visit call returning
+ an AST node and the corresponding explanation string. During this
+ state is kept in several instance attributes:
+
+ :statements: All the AST statements which will replace the assert
+ statement.
+
+ :variables: This is populated by .variable() with each variable
+ used by the statements so that they can all be set to None at
+ the end of the statements.
+
+ :variable_counter: Counter to create new unique variables needed
+ by statements. Variables are created using .variable() and
+ have the form of "@py_assert0".
+
+ :on_failure: The AST statements which will be executed if the
+ assertion test fails. This is the code which will construct
+ the failure message and raises the AssertionError.
+
+ :explanation_specifiers: A dict filled by .explanation_param()
+ with %-formatting placeholders and their corresponding
+ expressions to use in the building of an assertion message.
+ This is used by .pop_format_context() to build a message.
+
+ :stack: A stack of the explanation_specifiers dicts maintained by
+ .push_format_context() and .pop_format_context() which allows
+ to build another %-formatted string while already building one.
+
+ This state is reset on every new assert statement visited and used
+ by the other visitors.
+
+ """
+
+ def run(self, mod):
+ """Find all assert statements in *mod* and rewrite them."""
+ if not mod.body:
+ # Nothing to do.
+ return
+ # Insert some special imports at the top of the module but after any
+ # docstrings and __future__ imports.
+ aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
+ ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
+ expect_docstring = True
+ pos = 0
+ lineno = 0
+ for item in mod.body:
+ if (expect_docstring and isinstance(item, ast.Expr) and
+ isinstance(item.value, ast.Str)):
+ doc = item.value.s
+ if "PYTEST_DONT_REWRITE" in doc:
+ # The module has disabled assertion rewriting.
+ return
+ lineno += len(doc) - 1
+ expect_docstring = False
+ elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
+ item.module != "__future__"):
+ lineno = item.lineno
+ break
+ pos += 1
+ imports = [ast.Import([alias], lineno=lineno, col_offset=0)
+ for alias in aliases]
+ mod.body[pos:pos] = imports
+ # Collect asserts.
+ nodes = [mod]
+ while nodes:
+ node = nodes.pop()
+ for name, field in ast.iter_fields(node):
+ if isinstance(field, list):
+ new = []
+ for i, child in enumerate(field):
+ if isinstance(child, ast.Assert):
+ # Transform assert.
+ new.extend(self.visit(child))
+ else:
+ new.append(child)
+ if isinstance(child, ast.AST):
+ nodes.append(child)
+ setattr(node, name, new)
+ elif (isinstance(field, ast.AST) and
+ # Don't recurse into expressions as they can't contain
+ # asserts.
+ not isinstance(field, ast.expr)):
+ nodes.append(field)
+
+ def variable(self):
+ """Get a new variable."""
+ # Use a character invalid in python identifiers to avoid clashing.
+ name = "@py_assert" + str(next(self.variable_counter))
+ self.variables.append(name)
+ return name
+
+ def assign(self, expr):
+ """Give *expr* a name."""
+ name = self.variable()
+ self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
+ return ast.Name(name, ast.Load())
+
+ def display(self, expr):
+ """Call py.io.saferepr on the expression."""
+ return self.helper("saferepr", expr)
+
+ def helper(self, name, *args):
+ """Call a helper in this module."""
+ py_name = ast.Name("@pytest_ar", ast.Load())
+ attr = ast.Attribute(py_name, "_" + name, ast.Load())
+ return ast_Call(attr, list(args), [])
+
+ def builtin(self, name):
+ """Return the builtin called *name*."""
+ builtin_name = ast.Name("@py_builtins", ast.Load())
+ return ast.Attribute(builtin_name, name, ast.Load())
+
+ def explanation_param(self, expr):
+ """Return a new named %-formatting placeholder for expr.
+
+ This creates a %-formatting placeholder for expr in the
+ current formatting context, e.g. ``%(py0)s``. The placeholder
+ and expr are placed in the current format context so that it
+ can be used on the next call to .pop_format_context().
+
+ """
+ specifier = "py" + str(next(self.variable_counter))
+ self.explanation_specifiers[specifier] = expr
+ return "%(" + specifier + ")s"
+
+ def push_format_context(self):
+ """Create a new formatting context.
+
+ The format context is used for when an explanation wants to
+ have a variable value formatted in the assertion message. In
+ this case the value required can be added using
+ .explanation_param(). Finally .pop_format_context() is used
+ to format a string of %-formatted values as added by
+ .explanation_param().
+
+ """
+ self.explanation_specifiers = {}
+ self.stack.append(self.explanation_specifiers)
+
+ def pop_format_context(self, expl_expr):
+ """Format the %-formatted string with current format context.
+
+ The expl_expr should be an ast.Str instance constructed from
+ the %-placeholders created by .explanation_param(). This will
+ add the required code to format said string to .on_failure and
+ return the ast.Name instance of the formatted string.
+
+ """
+ current = self.stack.pop()
+ if self.stack:
+ self.explanation_specifiers = self.stack[-1]
+ keys = [ast.Str(key) for key in current.keys()]
+ format_dict = ast.Dict(keys, list(current.values()))
+ form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
+ name = "@py_format" + str(next(self.variable_counter))
+ self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
+ return ast.Name(name, ast.Load())
+
+ def generic_visit(self, node):
+ """Handle expressions we don't have custom code for."""
+ assert isinstance(node, ast.expr)
+ res = self.assign(node)
+ return res, self.explanation_param(self.display(res))
+
+ def visit_Assert(self, assert_):
+ """Return the AST statements to replace the ast.Assert instance.
+
+ This re-writes the test of an assertion to provide
+ intermediate values and replace it with an if statement which
+ raises an assertion error with a detailed explanation in case
+ the expression is false.
+
+ """
+ self.statements = []
+ self.variables = []
+ self.variable_counter = itertools.count()
+ self.stack = []
+ self.on_failure = []
+ self.push_format_context()
+ # Rewrite assert into a bunch of statements.
+ top_condition, explanation = self.visit(assert_.test)
+ # Create failure message.
+ body = self.on_failure
+ negation = ast.UnaryOp(ast.Not(), top_condition)
+ self.statements.append(ast.If(negation, body, []))
+ if assert_.msg:
+ assertmsg = self.helper('format_assertmsg', assert_.msg)
+ explanation = "\n>assert " + explanation
+ else:
+ assertmsg = ast.Str("")
+ explanation = "assert " + explanation
+ template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
+ msg = self.pop_format_context(template)
+ fmt = self.helper("format_explanation", msg)
+ err_name = ast.Name("AssertionError", ast.Load())
+ exc = ast_Call(err_name, [fmt], [])
+ if sys.version_info[0] >= 3:
+ raise_ = ast.Raise(exc, None)
+ else:
+ raise_ = ast.Raise(exc, None, None)
+ body.append(raise_)
+ # Clear temporary variables by setting them to None.
+ if self.variables:
+ variables = [ast.Name(name, ast.Store())
+ for name in self.variables]
+ clear = ast.Assign(variables, _NameConstant(None))
+ self.statements.append(clear)
+ # Fix line numbers.
+ for stmt in self.statements:
+ set_location(stmt, assert_.lineno, assert_.col_offset)
+ return self.statements
+
+ def visit_Name(self, name):
+ # Display the repr of the name if it's a local variable or
+ # _should_repr_global_name() thinks it's acceptable.
+ locs = ast_Call(self.builtin("locals"), [], [])
+ inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
+ dorepr = self.helper("should_repr_global_name", name)
+ test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
+ expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
+ return name, self.explanation_param(expr)
+
+ def visit_BoolOp(self, boolop):
+ res_var = self.variable()
+ expl_list = self.assign(ast.List([], ast.Load()))
+ app = ast.Attribute(expl_list, "append", ast.Load())
+ is_or = int(isinstance(boolop.op, ast.Or))
+ body = save = self.statements
+ fail_save = self.on_failure
+ levels = len(boolop.values) - 1
+ self.push_format_context()
+ # Process each operand, short-circuting if needed.
+ for i, v in enumerate(boolop.values):
+ if i:
+ fail_inner = []
+ # cond is set in a prior loop iteration below
+ self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
+ self.on_failure = fail_inner
+ self.push_format_context()
+ res, expl = self.visit(v)
+ body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
+ expl_format = self.pop_format_context(ast.Str(expl))
+ call = ast_Call(app, [expl_format], [])
+ self.on_failure.append(ast.Expr(call))
+ if i < levels:
+ cond = res
+ if is_or:
+ cond = ast.UnaryOp(ast.Not(), cond)
+ inner = []
+ self.statements.append(ast.If(cond, inner, []))
+ self.statements = body = inner
+ self.statements = save
+ self.on_failure = fail_save
+ expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
+ expl = self.pop_format_context(expl_template)
+ return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_res, operand_expl = self.visit(unary.operand)
+ res = self.assign(ast.UnaryOp(unary.op, operand_res))
+ return res, pattern % (operand_expl,)
+
+ def visit_BinOp(self, binop):
+ symbol = binop_map[binop.op.__class__]
+ left_expr, left_expl = self.visit(binop.left)
+ right_expr, right_expl = self.visit(binop.right)
+ explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
+ res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
+ return res, explanation
+
+ def visit_Call_35(self, call):
+ """
+ visit `ast.Call` nodes on Python3.5 and after
+ """
+ new_func, func_expl = self.visit(call.func)
+ arg_expls = []
+ new_args = []
+ new_kwargs = []
+ for arg in call.args:
+ res, expl = self.visit(arg)
+ arg_expls.append(expl)
+ new_args.append(res)
+ for keyword in call.keywords:
+ res, expl = self.visit(keyword.value)
+ new_kwargs.append(ast.keyword(keyword.arg, res))
+ if keyword.arg:
+ arg_expls.append(keyword.arg + "=" + expl)
+ else: ## **args have `arg` keywords with an .arg of None
+ arg_expls.append("**" + expl)
+
+ expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
+ new_call = ast.Call(new_func, new_args, new_kwargs)
+ res = self.assign(new_call)
+ res_expl = self.explanation_param(self.display(res))
+ outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
+ return res, outer_expl
+
+ def visit_Starred(self, starred):
+ # From Python 3.5, a Starred node can appear in a function call
+ res, expl = self.visit(starred.value)
+ return starred, '*' + expl
+
+ def visit_Call_legacy(self, call):
+ """
+ visit `ast.Call nodes on 3.4 and below`
+ """
+ new_func, func_expl = self.visit(call.func)
+ arg_expls = []
+ new_args = []
+ new_kwargs = []
+ new_star = new_kwarg = None
+ for arg in call.args:
+ res, expl = self.visit(arg)
+ new_args.append(res)
+ arg_expls.append(expl)
+ for keyword in call.keywords:
+ res, expl = self.visit(keyword.value)
+ new_kwargs.append(ast.keyword(keyword.arg, res))
+ arg_expls.append(keyword.arg + "=" + expl)
+ if call.starargs:
+ new_star, expl = self.visit(call.starargs)
+ arg_expls.append("*" + expl)
+ if call.kwargs:
+ new_kwarg, expl = self.visit(call.kwargs)
+ arg_expls.append("**" + expl)
+ expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
+ new_call = ast.Call(new_func, new_args, new_kwargs,
+ new_star, new_kwarg)
+ res = self.assign(new_call)
+ res_expl = self.explanation_param(self.display(res))
+ outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
+ return res, outer_expl
+
+ # ast.Call signature changed on 3.5,
+ # conditionally change which methods is named
+ # visit_Call depending on Python version
+ if sys.version_info >= (3, 5):
+ visit_Call = visit_Call_35
+ else:
+ visit_Call = visit_Call_legacy
+
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ value, value_expl = self.visit(attr.value)
+ res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
+ res_expl = self.explanation_param(self.display(res))
+ pat = "%s\n{%s = %s.%s\n}"
+ expl = pat % (res_expl, res_expl, value_expl, attr.attr)
+ return res, expl
+
+ def visit_Compare(self, comp):
+ self.push_format_context()
+ left_res, left_expl = self.visit(comp.left)
+ res_variables = [self.variable() for i in range(len(comp.ops))]
+ load_names = [ast.Name(v, ast.Load()) for v in res_variables]
+ store_names = [ast.Name(v, ast.Store()) for v in res_variables]
+ it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
+ expls = []
+ syms = []
+ results = [left_res]
+ for i, op, next_operand in it:
+ next_res, next_expl = self.visit(next_operand)
+ results.append(next_res)
+ sym = binop_map[op.__class__]
+ syms.append(ast.Str(sym))
+ expl = "%s %s %s" % (left_expl, sym, next_expl)
+ expls.append(ast.Str(expl))
+ res_expr = ast.Compare(left_res, [op], [next_res])
+ self.statements.append(ast.Assign([store_names[i]], res_expr))
+ left_res, left_expl = next_res, next_expl
+ # Use pytest.assertion.util._reprcompare if that's available.
+ expl_call = self.helper("call_reprcompare",
+ ast.Tuple(syms, ast.Load()),
+ ast.Tuple(load_names, ast.Load()),
+ ast.Tuple(expls, ast.Load()),
+ ast.Tuple(results, ast.Load()))
+ if len(comp.ops) > 1:
+ res = ast.BoolOp(ast.And(), load_names)
+ else:
+ res = load_names[0]
+ return res, self.explanation_param(self.pop_format_context(expl_call))
diff --git a/python/pytest/_pytest/assertion/util.py b/python/pytest/_pytest/assertion/util.py
new file mode 100644
index 000000000..f2f23efea
--- /dev/null
+++ b/python/pytest/_pytest/assertion/util.py
@@ -0,0 +1,332 @@
+"""Utilities for assertion debugging"""
+import pprint
+
+import _pytest._code
+import py
+try:
+ from collections import Sequence
+except ImportError:
+ Sequence = list
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+u = py.builtin._totext
+
+# The _reprcompare attribute on the util module is used by the new assertion
+# interpretation code and assertion rewriter to detect this plugin was
+# loaded and in turn call the hooks defined here as part of the
+# DebugInterpreter.
+_reprcompare = None
+
+
+# the re-encoding is needed for python2 repr
+# with non-ascii characters (see issue 877 and 1379)
+def ecu(s):
+ try:
+ return u(s, 'utf-8', 'replace')
+ except TypeError:
+ return s
+
+
+def format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ explanation = ecu(explanation)
+ explanation = _collapse_false(explanation)
+ lines = _split_explanation(explanation)
+ result = _format_lines(lines)
+ return u('\n').join(result)
+
+
+def _collapse_false(explanation):
+ """Collapse expansions of False
+
+ So this strips out any "assert False\n{where False = ...\n}"
+ blocks.
+ """
+ where = 0
+ while True:
+ start = where = explanation.find("False\n{False = ", where)
+ if where == -1:
+ break
+ level = 0
+ prev_c = explanation[start]
+ for i, c in enumerate(explanation[start:]):
+ if prev_c + c == "\n{":
+ level += 1
+ elif prev_c + c == "\n}":
+ level -= 1
+ if not level:
+ break
+ prev_c = c
+ else:
+ raise AssertionError("unbalanced braces: %r" % (explanation,))
+ end = start + i
+ where = end
+ if explanation[end - 1] == '\n':
+ explanation = (explanation[:start] + explanation[start+15:end-1] +
+ explanation[end+1:])
+ where -= 17
+ return explanation
+
+
+def _split_explanation(explanation):
+ """Return a list of individual lines in the explanation
+
+ This will return a list of lines split on '\n{', '\n}' and '\n~'.
+ Any other newlines will be escaped and appear in the line as the
+ literal '\n' characters.
+ """
+ raw_lines = (explanation or u('')).split('\n')
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l and l[0] in ['{', '}', '~', '>']:
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+ return lines
+
+
+def _format_lines(lines):
+ """Format the individual lines
+
+ This will replace the '{', '}' and '~' characters of our mini
+ formatting language with the proper 'where ...', 'and ...' and ' +
+ ...' text, taking care of indentation along the way.
+
+ Return a list of formatted lines.
+ """
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = u('and ')
+ else:
+ s = u('where ')
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(u(' +') + u(' ')*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line[0] in ['~', '>']
+ stack[-1] += 1
+ indent = len(stack) if line.startswith('~') else len(stack) - 1
+ result.append(u(' ')*indent + line[1:])
+ assert len(stack) == 1
+ return result
+
+
+# Provide basestring in python3
+try:
+ basestring = basestring
+except NameError:
+ basestring = str
+
+
+def assertrepr_compare(config, op, left, right):
+ """Return specialised explanations for some operators/operands"""
+ width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op
+ left_repr = py.io.saferepr(left, maxsize=int(width/2))
+ right_repr = py.io.saferepr(right, maxsize=width-len(left_repr))
+
+ summary = u('%s %s %s') % (ecu(left_repr), op, ecu(right_repr))
+
+ issequence = lambda x: (isinstance(x, (list, tuple, Sequence)) and
+ not isinstance(x, basestring))
+ istext = lambda x: isinstance(x, basestring)
+ isdict = lambda x: isinstance(x, dict)
+ isset = lambda x: isinstance(x, (set, frozenset))
+
+ def isiterable(obj):
+ try:
+ iter(obj)
+ return not istext(obj)
+ except TypeError:
+ return False
+
+ verbose = config.getoption('verbose')
+ explanation = None
+ try:
+ if op == '==':
+ if istext(left) and istext(right):
+ explanation = _diff_text(left, right, verbose)
+ else:
+ if issequence(left) and issequence(right):
+ explanation = _compare_eq_sequence(left, right, verbose)
+ elif isset(left) and isset(right):
+ explanation = _compare_eq_set(left, right, verbose)
+ elif isdict(left) and isdict(right):
+ explanation = _compare_eq_dict(left, right, verbose)
+ if isiterable(left) and isiterable(right):
+ expl = _compare_eq_iterable(left, right, verbose)
+ if explanation is not None:
+ explanation.extend(expl)
+ else:
+ explanation = expl
+ elif op == 'not in':
+ if istext(left) and istext(right):
+ explanation = _notin_text(left, right, verbose)
+ except Exception:
+ explanation = [
+ u('(pytest_assertion plugin: representation of details failed. '
+ 'Probably an object has a faulty __repr__.)'),
+ u(_pytest._code.ExceptionInfo())]
+
+ if not explanation:
+ return None
+
+ return [summary] + explanation
+
+
+def _diff_text(left, right, verbose=False):
+ """Return the explanation for the diff between text or bytes
+
+ Unless --verbose is used this will skip leading and trailing
+ characters which are identical to keep the diff minimal.
+
+ If the input are bytes they will be safely converted to text.
+ """
+ from difflib import ndiff
+ explanation = []
+ if isinstance(left, py.builtin.bytes):
+ left = u(repr(left)[1:-1]).replace(r'\n', '\n')
+ if isinstance(right, py.builtin.bytes):
+ right = u(repr(right)[1:-1]).replace(r'\n', '\n')
+ if not verbose:
+ i = 0 # just in case left or right has zero length
+ for i in range(min(len(left), len(right))):
+ if left[i] != right[i]:
+ break
+ if i > 42:
+ i -= 10 # Provide some context
+ explanation = [u('Skipping %s identical leading '
+ 'characters in diff, use -v to show') % i]
+ left = left[i:]
+ right = right[i:]
+ if len(left) == len(right):
+ for i in range(len(left)):
+ if left[-i] != right[-i]:
+ break
+ if i > 42:
+ i -= 10 # Provide some context
+ explanation += [u('Skipping %s identical trailing '
+ 'characters in diff, use -v to show') % i]
+ left = left[:-i]
+ right = right[:-i]
+ explanation += [line.strip('\n')
+ for line in ndiff(left.splitlines(),
+ right.splitlines())]
+ return explanation
+
+
+def _compare_eq_iterable(left, right, verbose=False):
+ if not verbose:
+ return [u('Use -v to get the full diff')]
+ # dynamic import to speedup pytest
+ import difflib
+
+ try:
+ left_formatting = pprint.pformat(left).splitlines()
+ right_formatting = pprint.pformat(right).splitlines()
+ explanation = [u('Full diff:')]
+ except Exception:
+ # hack: PrettyPrinter.pformat() in python 2 fails when formatting items that can't be sorted(), ie, calling
+ # sorted() on a list would raise. See issue #718.
+ # As a workaround, the full diff is generated by using the repr() string of each item of each container.
+ left_formatting = sorted(repr(x) for x in left)
+ right_formatting = sorted(repr(x) for x in right)
+ explanation = [u('Full diff (fallback to calling repr on each item):')]
+ explanation.extend(line.strip() for line in difflib.ndiff(left_formatting, right_formatting))
+ return explanation
+
+
+def _compare_eq_sequence(left, right, verbose=False):
+ explanation = []
+ for i in range(min(len(left), len(right))):
+ if left[i] != right[i]:
+ explanation += [u('At index %s diff: %r != %r')
+ % (i, left[i], right[i])]
+ break
+ if len(left) > len(right):
+ explanation += [u('Left contains more items, first extra item: %s')
+ % py.io.saferepr(left[len(right)],)]
+ elif len(left) < len(right):
+ explanation += [
+ u('Right contains more items, first extra item: %s') %
+ py.io.saferepr(right[len(left)],)]
+ return explanation
+
+
+def _compare_eq_set(left, right, verbose=False):
+ explanation = []
+ diff_left = left - right
+ diff_right = right - left
+ if diff_left:
+ explanation.append(u('Extra items in the left set:'))
+ for item in diff_left:
+ explanation.append(py.io.saferepr(item))
+ if diff_right:
+ explanation.append(u('Extra items in the right set:'))
+ for item in diff_right:
+ explanation.append(py.io.saferepr(item))
+ return explanation
+
+
+def _compare_eq_dict(left, right, verbose=False):
+ explanation = []
+ common = set(left).intersection(set(right))
+ same = dict((k, left[k]) for k in common if left[k] == right[k])
+ if same and not verbose:
+ explanation += [u('Omitting %s identical items, use -v to show') %
+ len(same)]
+ elif same:
+ explanation += [u('Common items:')]
+ explanation += pprint.pformat(same).splitlines()
+ diff = set(k for k in common if left[k] != right[k])
+ if diff:
+ explanation += [u('Differing items:')]
+ for k in diff:
+ explanation += [py.io.saferepr({k: left[k]}) + ' != ' +
+ py.io.saferepr({k: right[k]})]
+ extra_left = set(left) - set(right)
+ if extra_left:
+ explanation.append(u('Left contains more items:'))
+ explanation.extend(pprint.pformat(
+ dict((k, left[k]) for k in extra_left)).splitlines())
+ extra_right = set(right) - set(left)
+ if extra_right:
+ explanation.append(u('Right contains more items:'))
+ explanation.extend(pprint.pformat(
+ dict((k, right[k]) for k in extra_right)).splitlines())
+ return explanation
+
+
+def _notin_text(term, text, verbose=False):
+ index = text.find(term)
+ head = text[:index]
+ tail = text[index+len(term):]
+ correct_text = head + tail
+ diff = _diff_text(correct_text, text, verbose)
+ newdiff = [u('%s is contained here:') % py.io.saferepr(term, maxsize=42)]
+ for line in diff:
+ if line.startswith(u('Skipping')):
+ continue
+ if line.startswith(u('- ')):
+ continue
+ if line.startswith(u('+ ')):
+ newdiff.append(u(' ') + line[2:])
+ else:
+ newdiff.append(line)
+ return newdiff
diff --git a/python/pytest/_pytest/cacheprovider.py b/python/pytest/_pytest/cacheprovider.py
new file mode 100755
index 000000000..0657001f2
--- /dev/null
+++ b/python/pytest/_pytest/cacheprovider.py
@@ -0,0 +1,245 @@
+"""
+merged implementation of the cache provider
+
+the name cache was not choosen to ensure pluggy automatically
+ignores the external pytest-cache
+"""
+
+import py
+import pytest
+import json
+from os.path import sep as _sep, altsep as _altsep
+
+
+class Cache(object):
+ def __init__(self, config):
+ self.config = config
+ self._cachedir = config.rootdir.join(".cache")
+ self.trace = config.trace.root.get("cache")
+ if config.getvalue("cacheclear"):
+ self.trace("clearing cachedir")
+ if self._cachedir.check():
+ self._cachedir.remove()
+ self._cachedir.mkdir()
+
+ def makedir(self, name):
+ """ return a directory path object with the given name. If the
+ directory does not yet exist, it will be created. You can use it
+ to manage files likes e. g. store/retrieve database
+ dumps across test sessions.
+
+ :param name: must be a string not containing a ``/`` separator.
+ Make sure the name contains your plugin or application
+ identifiers to prevent clashes with other cache users.
+ """
+ if _sep in name or _altsep is not None and _altsep in name:
+ raise ValueError("name is not allowed to contain path separators")
+ return self._cachedir.ensure_dir("d", name)
+
+ def _getvaluepath(self, key):
+ return self._cachedir.join('v', *key.split('/'))
+
+ def get(self, key, default):
+ """ return cached value for the given key. If no value
+ was yet cached or the value cannot be read, the specified
+ default is returned.
+
+ :param key: must be a ``/`` separated value. Usually the first
+ name is the name of your plugin or your application.
+ :param default: must be provided in case of a cache-miss or
+ invalid cache values.
+
+ """
+ path = self._getvaluepath(key)
+ if path.check():
+ try:
+ with path.open("r") as f:
+ return json.load(f)
+ except ValueError:
+ self.trace("cache-invalid at %s" % (path,))
+ return default
+
+ def set(self, key, value):
+ """ save value for the given key.
+
+ :param key: must be a ``/`` separated value. Usually the first
+ name is the name of your plugin or your application.
+ :param value: must be of any combination of basic
+ python types, including nested types
+ like e. g. lists of dictionaries.
+ """
+ path = self._getvaluepath(key)
+ try:
+ path.dirpath().ensure_dir()
+ except (py.error.EEXIST, py.error.EACCES):
+ self.config.warn(
+ code='I9', message='could not create cache path %s' % (path,)
+ )
+ return
+ try:
+ f = path.open('w')
+ except py.error.ENOTDIR:
+ self.config.warn(
+ code='I9', message='cache could not write path %s' % (path,))
+ else:
+ with f:
+ self.trace("cache-write %s: %r" % (key, value,))
+ json.dump(value, f, indent=2, sort_keys=True)
+
+
+class LFPlugin:
+ """ Plugin which implements the --lf (run last-failing) option """
+ def __init__(self, config):
+ self.config = config
+ active_keys = 'lf', 'failedfirst'
+ self.active = any(config.getvalue(key) for key in active_keys)
+ if self.active:
+ self.lastfailed = config.cache.get("cache/lastfailed", {})
+ else:
+ self.lastfailed = {}
+
+ def pytest_report_header(self):
+ if self.active:
+ if not self.lastfailed:
+ mode = "run all (no recorded failures)"
+ else:
+ mode = "rerun last %d failures%s" % (
+ len(self.lastfailed),
+ " first" if self.config.getvalue("failedfirst") else "")
+ return "run-last-failure: %s" % mode
+
+ def pytest_runtest_logreport(self, report):
+ if report.failed and "xfail" not in report.keywords:
+ self.lastfailed[report.nodeid] = True
+ elif not report.failed:
+ if report.when == "call":
+ self.lastfailed.pop(report.nodeid, None)
+
+ def pytest_collectreport(self, report):
+ passed = report.outcome in ('passed', 'skipped')
+ if passed:
+ if report.nodeid in self.lastfailed:
+ self.lastfailed.pop(report.nodeid)
+ self.lastfailed.update(
+ (item.nodeid, True)
+ for item in report.result)
+ else:
+ self.lastfailed[report.nodeid] = True
+
+ def pytest_collection_modifyitems(self, session, config, items):
+ if self.active and self.lastfailed:
+ previously_failed = []
+ previously_passed = []
+ for item in items:
+ if item.nodeid in self.lastfailed:
+ previously_failed.append(item)
+ else:
+ previously_passed.append(item)
+ if not previously_failed and previously_passed:
+ # running a subset of all tests with recorded failures outside
+ # of the set of tests currently executing
+ pass
+ elif self.config.getvalue("failedfirst"):
+ items[:] = previously_failed + previously_passed
+ else:
+ items[:] = previously_failed
+ config.hook.pytest_deselected(items=previously_passed)
+
+ def pytest_sessionfinish(self, session):
+ config = self.config
+ if config.getvalue("cacheshow") or hasattr(config, "slaveinput"):
+ return
+ prev_failed = config.cache.get("cache/lastfailed", None) is not None
+ if (session.testscollected and prev_failed) or self.lastfailed:
+ config.cache.set("cache/lastfailed", self.lastfailed)
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption(
+ '--lf', '--last-failed', action='store_true', dest="lf",
+ help="rerun only the tests that failed "
+ "at the last run (or all if none failed)")
+ group.addoption(
+ '--ff', '--failed-first', action='store_true', dest="failedfirst",
+ help="run all tests but run the last failures first. "
+ "This may re-order tests and thus lead to "
+ "repeated fixture setup/teardown")
+ group.addoption(
+ '--cache-show', action='store_true', dest="cacheshow",
+ help="show cache contents, don't perform collection or tests")
+ group.addoption(
+ '--cache-clear', action='store_true', dest="cacheclear",
+ help="remove all cache contents at start of test run.")
+
+
+def pytest_cmdline_main(config):
+ if config.option.cacheshow:
+ from _pytest.main import wrap_session
+ return wrap_session(config, cacheshow)
+
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_configure(config):
+ config.cache = Cache(config)
+ config.pluginmanager.register(LFPlugin(config), "lfplugin")
+
+
+@pytest.fixture
+def cache(request):
+ """
+ Return a cache object that can persist state between testing sessions.
+
+ cache.get(key, default)
+ cache.set(key, value)
+
+ Keys must be a ``/`` separated value, where the first part is usually the
+ name of your plugin or application to avoid clashes with other cache users.
+
+ Values can be any object handled by the json stdlib module.
+ """
+ return request.config.cache
+
+
+def pytest_report_header(config):
+ if config.option.verbose:
+ relpath = py.path.local().bestrelpath(config.cache._cachedir)
+ return "cachedir: %s" % relpath
+
+
+def cacheshow(config, session):
+ from pprint import pprint
+ tw = py.io.TerminalWriter()
+ tw.line("cachedir: " + str(config.cache._cachedir))
+ if not config.cache._cachedir.check():
+ tw.line("cache is empty")
+ return 0
+ dummy = object()
+ basedir = config.cache._cachedir
+ vdir = basedir.join("v")
+ tw.sep("-", "cache values")
+ for valpath in vdir.visit(lambda x: x.isfile()):
+ key = valpath.relto(vdir).replace(valpath.sep, "/")
+ val = config.cache.get(key, dummy)
+ if val is dummy:
+ tw.line("%s contains unreadable content, "
+ "will be ignored" % key)
+ else:
+ tw.line("%s contains:" % key)
+ stream = py.io.TextIO()
+ pprint(val, stream=stream)
+ for line in stream.getvalue().splitlines():
+ tw.line(" " + line)
+
+ ddir = basedir.join("d")
+ if ddir.isdir() and ddir.listdir():
+ tw.sep("-", "cache directories")
+ for p in basedir.join("d").visit():
+ #if p.check(dir=1):
+ # print("%s/" % p.relto(basedir))
+ if p.isfile():
+ key = p.relto(basedir)
+ tw.line("%s is a file of length %d" % (
+ key, p.size()))
+ return 0
diff --git a/python/pytest/_pytest/capture.py b/python/pytest/_pytest/capture.py
new file mode 100644
index 000000000..3895a714a
--- /dev/null
+++ b/python/pytest/_pytest/capture.py
@@ -0,0 +1,472 @@
+"""
+per-test stdout/stderr capturing mechanism.
+
+"""
+from __future__ import with_statement
+
+import sys
+import os
+from tempfile import TemporaryFile
+
+import py
+import pytest
+
+from py.io import TextIO
+unicode = py.builtin.text
+
+patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption(
+ '--capture', action="store",
+ default="fd" if hasattr(os, "dup") else "sys",
+ metavar="method", choices=['fd', 'sys', 'no'],
+ help="per-test capturing method: one of fd|sys|no.")
+ group._addoption(
+ '-s', action="store_const", const="no", dest="capture",
+ help="shortcut for --capture=no.")
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_load_initial_conftests(early_config, parser, args):
+ _readline_workaround()
+ ns = early_config.known_args_namespace
+ pluginmanager = early_config.pluginmanager
+ capman = CaptureManager(ns.capture)
+ pluginmanager.register(capman, "capturemanager")
+
+ # make sure that capturemanager is properly reset at final shutdown
+ early_config.add_cleanup(capman.reset_capturings)
+
+ # make sure logging does not raise exceptions at the end
+ def silence_logging_at_shutdown():
+ if "logging" in sys.modules:
+ sys.modules["logging"].raiseExceptions = False
+ early_config.add_cleanup(silence_logging_at_shutdown)
+
+ # finally trigger conftest loading but while capturing (issue93)
+ capman.init_capturings()
+ outcome = yield
+ out, err = capman.suspendcapture()
+ if outcome.excinfo is not None:
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+
+class CaptureManager:
+ def __init__(self, method):
+ self._method = method
+
+ def _getcapture(self, method):
+ if method == "fd":
+ return MultiCapture(out=True, err=True, Capture=FDCapture)
+ elif method == "sys":
+ return MultiCapture(out=True, err=True, Capture=SysCapture)
+ elif method == "no":
+ return MultiCapture(out=False, err=False, in_=False)
+ else:
+ raise ValueError("unknown capturing method: %r" % method)
+
+ def init_capturings(self):
+ assert not hasattr(self, "_capturing")
+ self._capturing = self._getcapture(self._method)
+ self._capturing.start_capturing()
+
+ def reset_capturings(self):
+ cap = self.__dict__.pop("_capturing", None)
+ if cap is not None:
+ cap.pop_outerr_to_orig()
+ cap.stop_capturing()
+
+ def resumecapture(self):
+ self._capturing.resume_capturing()
+
+ def suspendcapture(self, in_=False):
+ self.deactivate_funcargs()
+ cap = getattr(self, "_capturing", None)
+ if cap is not None:
+ try:
+ outerr = cap.readouterr()
+ finally:
+ cap.suspend_capturing(in_=in_)
+ return outerr
+
+ def activate_funcargs(self, pyfuncitem):
+ capfuncarg = pyfuncitem.__dict__.pop("_capfuncarg", None)
+ if capfuncarg is not None:
+ capfuncarg._start()
+ self._capfuncarg = capfuncarg
+
+ def deactivate_funcargs(self):
+ capfuncarg = self.__dict__.pop("_capfuncarg", None)
+ if capfuncarg is not None:
+ capfuncarg.close()
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_make_collect_report(self, collector):
+ if isinstance(collector, pytest.File):
+ self.resumecapture()
+ outcome = yield
+ out, err = self.suspendcapture()
+ rep = outcome.get_result()
+ if out:
+ rep.sections.append(("Captured stdout", out))
+ if err:
+ rep.sections.append(("Captured stderr", err))
+ else:
+ yield
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_setup(self, item):
+ self.resumecapture()
+ yield
+ self.suspendcapture_item(item, "setup")
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_call(self, item):
+ self.resumecapture()
+ self.activate_funcargs(item)
+ yield
+ #self.deactivate_funcargs() called from suspendcapture()
+ self.suspendcapture_item(item, "call")
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_teardown(self, item):
+ self.resumecapture()
+ yield
+ self.suspendcapture_item(item, "teardown")
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_keyboard_interrupt(self, excinfo):
+ self.reset_capturings()
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_internalerror(self, excinfo):
+ self.reset_capturings()
+
+ def suspendcapture_item(self, item, when):
+ out, err = self.suspendcapture()
+ item.add_report_section(when, "stdout", out)
+ item.add_report_section(when, "stderr", err)
+
+error_capsysfderror = "cannot use capsys and capfd at the same time"
+
+
+@pytest.fixture
+def capsys(request):
+ """enables capturing of writes to sys.stdout/sys.stderr and makes
+ captured output available via ``capsys.readouterr()`` method calls
+ which return a ``(out, err)`` tuple.
+ """
+ if "capfd" in request._funcargs:
+ raise request.raiseerror(error_capsysfderror)
+ request.node._capfuncarg = c = CaptureFixture(SysCapture)
+ return c
+
+@pytest.fixture
+def capfd(request):
+ """enables capturing of writes to file descriptors 1 and 2 and makes
+ captured output available via ``capfd.readouterr()`` method calls
+ which return a ``(out, err)`` tuple.
+ """
+ if "capsys" in request._funcargs:
+ request.raiseerror(error_capsysfderror)
+ if not hasattr(os, 'dup'):
+ pytest.skip("capfd funcarg needs os.dup")
+ request.node._capfuncarg = c = CaptureFixture(FDCapture)
+ return c
+
+
+class CaptureFixture:
+ def __init__(self, captureclass):
+ self.captureclass = captureclass
+
+ def _start(self):
+ self._capture = MultiCapture(out=True, err=True, in_=False,
+ Capture=self.captureclass)
+ self._capture.start_capturing()
+
+ def close(self):
+ cap = self.__dict__.pop("_capture", None)
+ if cap is not None:
+ self._outerr = cap.pop_outerr_to_orig()
+ cap.stop_capturing()
+
+ def readouterr(self):
+ try:
+ return self._capture.readouterr()
+ except AttributeError:
+ return self._outerr
+
+
+def safe_text_dupfile(f, mode, default_encoding="UTF8"):
+ """ return a open text file object that's a duplicate of f on the
+ FD-level if possible.
+ """
+ encoding = getattr(f, "encoding", None)
+ try:
+ fd = f.fileno()
+ except Exception:
+ if "b" not in getattr(f, "mode", "") and hasattr(f, "encoding"):
+ # we seem to have a text stream, let's just use it
+ return f
+ else:
+ newfd = os.dup(fd)
+ if "b" not in mode:
+ mode += "b"
+ f = os.fdopen(newfd, mode, 0) # no buffering
+ return EncodedFile(f, encoding or default_encoding)
+
+
+class EncodedFile(object):
+ errors = "strict" # possibly needed by py3 code (issue555)
+ def __init__(self, buffer, encoding):
+ self.buffer = buffer
+ self.encoding = encoding
+
+ def write(self, obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode(self.encoding, "replace")
+ self.buffer.write(obj)
+
+ def writelines(self, linelist):
+ data = ''.join(linelist)
+ self.write(data)
+
+ def __getattr__(self, name):
+ return getattr(object.__getattribute__(self, "buffer"), name)
+
+
+class MultiCapture(object):
+ out = err = in_ = None
+
+ def __init__(self, out=True, err=True, in_=True, Capture=None):
+ if in_:
+ self.in_ = Capture(0)
+ if out:
+ self.out = Capture(1)
+ if err:
+ self.err = Capture(2)
+
+ def start_capturing(self):
+ if self.in_:
+ self.in_.start()
+ if self.out:
+ self.out.start()
+ if self.err:
+ self.err.start()
+
+ def pop_outerr_to_orig(self):
+ """ pop current snapshot out/err capture and flush to orig streams. """
+ out, err = self.readouterr()
+ if out:
+ self.out.writeorg(out)
+ if err:
+ self.err.writeorg(err)
+ return out, err
+
+ def suspend_capturing(self, in_=False):
+ if self.out:
+ self.out.suspend()
+ if self.err:
+ self.err.suspend()
+ if in_ and self.in_:
+ self.in_.suspend()
+ self._in_suspended = True
+
+ def resume_capturing(self):
+ if self.out:
+ self.out.resume()
+ if self.err:
+ self.err.resume()
+ if hasattr(self, "_in_suspended"):
+ self.in_.resume()
+ del self._in_suspended
+
+ def stop_capturing(self):
+ """ stop capturing and reset capturing streams """
+ if hasattr(self, '_reset'):
+ raise ValueError("was already stopped")
+ self._reset = True
+ if self.out:
+ self.out.done()
+ if self.err:
+ self.err.done()
+ if self.in_:
+ self.in_.done()
+
+ def readouterr(self):
+ """ return snapshot unicode value of stdout/stderr capturings. """
+ return (self.out.snap() if self.out is not None else "",
+ self.err.snap() if self.err is not None else "")
+
+class NoCapture:
+ __init__ = start = done = suspend = resume = lambda *args: None
+
+class FDCapture:
+ """ Capture IO to/from a given os-level filedescriptor. """
+
+ def __init__(self, targetfd, tmpfile=None):
+ self.targetfd = targetfd
+ try:
+ self.targetfd_save = os.dup(self.targetfd)
+ except OSError:
+ self.start = lambda: None
+ self.done = lambda: None
+ else:
+ if targetfd == 0:
+ assert not tmpfile, "cannot set tmpfile with stdin"
+ tmpfile = open(os.devnull, "r")
+ self.syscapture = SysCapture(targetfd)
+ else:
+ if tmpfile is None:
+ f = TemporaryFile()
+ with f:
+ tmpfile = safe_text_dupfile(f, mode="wb+")
+ if targetfd in patchsysdict:
+ self.syscapture = SysCapture(targetfd, tmpfile)
+ else:
+ self.syscapture = NoCapture()
+ self.tmpfile = tmpfile
+ self.tmpfile_fd = tmpfile.fileno()
+
+ def __repr__(self):
+ return "<FDCapture %s oldfd=%s>" % (self.targetfd, self.targetfd_save)
+
+ def start(self):
+ """ Start capturing on targetfd using memorized tmpfile. """
+ try:
+ os.fstat(self.targetfd_save)
+ except (AttributeError, OSError):
+ raise ValueError("saved filedescriptor not valid anymore")
+ os.dup2(self.tmpfile_fd, self.targetfd)
+ self.syscapture.start()
+
+ def snap(self):
+ f = self.tmpfile
+ f.seek(0)
+ res = f.read()
+ if res:
+ enc = getattr(f, "encoding", None)
+ if enc and isinstance(res, bytes):
+ res = py.builtin._totext(res, enc, "replace")
+ f.truncate(0)
+ f.seek(0)
+ return res
+ return ''
+
+ def done(self):
+ """ stop capturing, restore streams, return original capture file,
+ seeked to position zero. """
+ targetfd_save = self.__dict__.pop("targetfd_save")
+ os.dup2(targetfd_save, self.targetfd)
+ os.close(targetfd_save)
+ self.syscapture.done()
+ self.tmpfile.close()
+
+ def suspend(self):
+ self.syscapture.suspend()
+ os.dup2(self.targetfd_save, self.targetfd)
+
+ def resume(self):
+ self.syscapture.resume()
+ os.dup2(self.tmpfile_fd, self.targetfd)
+
+ def writeorg(self, data):
+ """ write to original file descriptor. """
+ if py.builtin._istext(data):
+ data = data.encode("utf8") # XXX use encoding of original stream
+ os.write(self.targetfd_save, data)
+
+
+class SysCapture:
+ def __init__(self, fd, tmpfile=None):
+ name = patchsysdict[fd]
+ self._old = getattr(sys, name)
+ self.name = name
+ if tmpfile is None:
+ if name == "stdin":
+ tmpfile = DontReadFromInput()
+ else:
+ tmpfile = TextIO()
+ self.tmpfile = tmpfile
+
+ def start(self):
+ setattr(sys, self.name, self.tmpfile)
+
+ def snap(self):
+ f = self.tmpfile
+ res = f.getvalue()
+ f.truncate(0)
+ f.seek(0)
+ return res
+
+ def done(self):
+ setattr(sys, self.name, self._old)
+ del self._old
+ self.tmpfile.close()
+
+ def suspend(self):
+ setattr(sys, self.name, self._old)
+
+ def resume(self):
+ setattr(sys, self.name, self.tmpfile)
+
+ def writeorg(self, data):
+ self._old.write(data)
+ self._old.flush()
+
+
+class DontReadFromInput:
+ """Temporary stub class. Ideally when stdin is accessed, the
+ capturing should be turned off, with possibly all data captured
+ so far sent to the screen. This should be configurable, though,
+ because in automated test runs it is better to crash than
+ hang indefinitely.
+ """
+
+ encoding = None
+
+ def read(self, *args):
+ raise IOError("reading from stdin while output is captured")
+ readline = read
+ readlines = read
+ __iter__ = read
+
+ def fileno(self):
+ raise ValueError("redirected Stdin is pseudofile, has no fileno()")
+
+ def isatty(self):
+ return False
+
+ def close(self):
+ pass
+
+
+def _readline_workaround():
+ """
+ Ensure readline is imported so that it attaches to the correct stdio
+ handles on Windows.
+
+ Pdb uses readline support where available--when not running from the Python
+ prompt, the readline module is not imported until running the pdb REPL. If
+ running py.test with the --pdb option this means the readline module is not
+ imported until after I/O capture has been started.
+
+ This is a problem for pyreadline, which is often used to implement readline
+ support on Windows, as it does not attach to the correct handles for stdout
+ and/or stdin if they have been redirected by the FDCapture mechanism. This
+ workaround ensures that readline is imported before I/O capture is setup so
+ that it can attach to the actual stdin/out for the console.
+
+ See https://github.com/pytest-dev/pytest/pull/1281
+ """
+
+ if not sys.platform.startswith('win32'):
+ return
+ try:
+ import readline # noqa
+ except ImportError:
+ pass
diff --git a/python/pytest/_pytest/config.py b/python/pytest/_pytest/config.py
new file mode 100644
index 000000000..9a308df2b
--- /dev/null
+++ b/python/pytest/_pytest/config.py
@@ -0,0 +1,1192 @@
+""" command line options, ini-file and conftest.py processing. """
+import argparse
+import shlex
+import traceback
+import types
+import warnings
+
+import py
+# DON't import pytest here because it causes import cycle troubles
+import sys, os
+import _pytest._code
+import _pytest.hookspec # the extension point definitions
+from _pytest._pluggy import PluginManager, HookimplMarker, HookspecMarker
+
+hookimpl = HookimplMarker("pytest")
+hookspec = HookspecMarker("pytest")
+
+# pytest startup
+#
+
+
+class ConftestImportFailure(Exception):
+ def __init__(self, path, excinfo):
+ Exception.__init__(self, path, excinfo)
+ self.path = path
+ self.excinfo = excinfo
+
+
+def main(args=None, plugins=None):
+ """ return exit code, after performing an in-process test run.
+
+ :arg args: list of command line arguments.
+
+ :arg plugins: list of plugin objects to be auto-registered during
+ initialization.
+ """
+ try:
+ try:
+ config = _prepareconfig(args, plugins)
+ except ConftestImportFailure as e:
+ tw = py.io.TerminalWriter(sys.stderr)
+ for line in traceback.format_exception(*e.excinfo):
+ tw.line(line.rstrip(), red=True)
+ tw.line("ERROR: could not load %s\n" % (e.path), red=True)
+ return 4
+ else:
+ try:
+ config.pluginmanager.check_pending()
+ return config.hook.pytest_cmdline_main(config=config)
+ finally:
+ config._ensure_unconfigure()
+ except UsageError as e:
+ for msg in e.args:
+ sys.stderr.write("ERROR: %s\n" %(msg,))
+ return 4
+
+class cmdline: # compatibility namespace
+ main = staticmethod(main)
+
+class UsageError(Exception):
+ """ error in pytest usage or invocation"""
+
+_preinit = []
+
+default_plugins = (
+ "mark main terminal runner python pdb unittest capture skipping "
+ "tmpdir monkeypatch recwarn pastebin helpconfig nose assertion genscript "
+ "junitxml resultlog doctest cacheprovider").split()
+
+builtin_plugins = set(default_plugins)
+builtin_plugins.add("pytester")
+
+
+def _preloadplugins():
+ assert not _preinit
+ _preinit.append(get_config())
+
+def get_config():
+ if _preinit:
+ return _preinit.pop(0)
+ # subsequent calls to main will create a fresh instance
+ pluginmanager = PytestPluginManager()
+ config = Config(pluginmanager)
+ for spec in default_plugins:
+ pluginmanager.import_plugin(spec)
+ return config
+
+def get_plugin_manager():
+ """
+ Obtain a new instance of the
+ :py:class:`_pytest.config.PytestPluginManager`, with default plugins
+ already loaded.
+
+ This function can be used by integration with other tools, like hooking
+ into pytest to run tests into an IDE.
+ """
+ return get_config().pluginmanager
+
+def _prepareconfig(args=None, plugins=None):
+ if args is None:
+ args = sys.argv[1:]
+ elif isinstance(args, py.path.local):
+ args = [str(args)]
+ elif not isinstance(args, (tuple, list)):
+ if not isinstance(args, str):
+ raise ValueError("not a string or argument list: %r" % (args,))
+ args = shlex.split(args, posix=sys.platform != "win32")
+ config = get_config()
+ pluginmanager = config.pluginmanager
+ try:
+ if plugins:
+ for plugin in plugins:
+ if isinstance(plugin, py.builtin._basestring):
+ pluginmanager.consider_pluginarg(plugin)
+ else:
+ pluginmanager.register(plugin)
+ return pluginmanager.hook.pytest_cmdline_parse(
+ pluginmanager=pluginmanager, args=args)
+ except BaseException:
+ config._ensure_unconfigure()
+ raise
+
+
+class PytestPluginManager(PluginManager):
+ """
+ Overwrites :py:class:`pluggy.PluginManager` to add pytest-specific
+ functionality:
+
+ * loading plugins from the command line, ``PYTEST_PLUGIN`` env variable and
+ ``pytest_plugins`` global variables found in plugins being loaded;
+ * ``conftest.py`` loading during start-up;
+ """
+ def __init__(self):
+ super(PytestPluginManager, self).__init__("pytest", implprefix="pytest_")
+ self._conftest_plugins = set()
+
+ # state related to local conftest plugins
+ self._path2confmods = {}
+ self._conftestpath2mod = {}
+ self._confcutdir = None
+ self._noconftest = False
+
+ self.add_hookspecs(_pytest.hookspec)
+ self.register(self)
+ if os.environ.get('PYTEST_DEBUG'):
+ err = sys.stderr
+ encoding = getattr(err, 'encoding', 'utf8')
+ try:
+ err = py.io.dupfile(err, encoding=encoding)
+ except Exception:
+ pass
+ self.trace.root.setwriter(err.write)
+ self.enable_tracing()
+
+ def addhooks(self, module_or_class):
+ """
+ .. deprecated:: 2.8
+
+ Use :py:meth:`pluggy.PluginManager.add_hookspecs` instead.
+ """
+ warning = dict(code="I2",
+ fslocation=_pytest._code.getfslineno(sys._getframe(1)),
+ nodeid=None,
+ message="use pluginmanager.add_hookspecs instead of "
+ "deprecated addhooks() method.")
+ self._warn(warning)
+ return self.add_hookspecs(module_or_class)
+
+ def parse_hookimpl_opts(self, plugin, name):
+ # pytest hooks are always prefixed with pytest_
+ # so we avoid accessing possibly non-readable attributes
+ # (see issue #1073)
+ if not name.startswith("pytest_"):
+ return
+ # ignore some historic special names which can not be hooks anyway
+ if name == "pytest_plugins" or name.startswith("pytest_funcarg__"):
+ return
+
+ method = getattr(plugin, name)
+ opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name)
+ if opts is not None:
+ for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"):
+ opts.setdefault(name, hasattr(method, name))
+ return opts
+
+ def parse_hookspec_opts(self, module_or_class, name):
+ opts = super(PytestPluginManager, self).parse_hookspec_opts(
+ module_or_class, name)
+ if opts is None:
+ method = getattr(module_or_class, name)
+ if name.startswith("pytest_"):
+ opts = {"firstresult": hasattr(method, "firstresult"),
+ "historic": hasattr(method, "historic")}
+ return opts
+
+ def _verify_hook(self, hook, hookmethod):
+ super(PytestPluginManager, self)._verify_hook(hook, hookmethod)
+ if "__multicall__" in hookmethod.argnames:
+ fslineno = _pytest._code.getfslineno(hookmethod.function)
+ warning = dict(code="I1",
+ fslocation=fslineno,
+ nodeid=None,
+ message="%r hook uses deprecated __multicall__ "
+ "argument" % (hook.name))
+ self._warn(warning)
+
+ def register(self, plugin, name=None):
+ ret = super(PytestPluginManager, self).register(plugin, name)
+ if ret:
+ self.hook.pytest_plugin_registered.call_historic(
+ kwargs=dict(plugin=plugin, manager=self))
+ return ret
+
+ def getplugin(self, name):
+ # support deprecated naming because plugins (xdist e.g.) use it
+ return self.get_plugin(name)
+
+ def hasplugin(self, name):
+ """Return True if the plugin with the given name is registered."""
+ return bool(self.get_plugin(name))
+
+ def pytest_configure(self, config):
+ # XXX now that the pluginmanager exposes hookimpl(tryfirst...)
+ # we should remove tryfirst/trylast as markers
+ config.addinivalue_line("markers",
+ "tryfirst: mark a hook implementation function such that the "
+ "plugin machinery will try to call it first/as early as possible.")
+ config.addinivalue_line("markers",
+ "trylast: mark a hook implementation function such that the "
+ "plugin machinery will try to call it last/as late as possible.")
+
+ def _warn(self, message):
+ kwargs = message if isinstance(message, dict) else {
+ 'code': 'I1',
+ 'message': message,
+ 'fslocation': None,
+ 'nodeid': None,
+ }
+ self.hook.pytest_logwarning.call_historic(kwargs=kwargs)
+
+ #
+ # internal API for local conftest plugin handling
+ #
+ def _set_initial_conftests(self, namespace):
+ """ load initial conftest files given a preparsed "namespace".
+ As conftest files may add their own command line options
+ which have arguments ('--my-opt somepath') we might get some
+ false positives. All builtin and 3rd party plugins will have
+ been loaded, however, so common options will not confuse our logic
+ here.
+ """
+ current = py.path.local()
+ self._confcutdir = current.join(namespace.confcutdir, abs=True) \
+ if namespace.confcutdir else None
+ self._noconftest = namespace.noconftest
+ testpaths = namespace.file_or_dir
+ foundanchor = False
+ for path in testpaths:
+ path = str(path)
+ # remove node-id syntax
+ i = path.find("::")
+ if i != -1:
+ path = path[:i]
+ anchor = current.join(path, abs=1)
+ if exists(anchor): # we found some file object
+ self._try_load_conftest(anchor)
+ foundanchor = True
+ if not foundanchor:
+ self._try_load_conftest(current)
+
+ def _try_load_conftest(self, anchor):
+ self._getconftestmodules(anchor)
+ # let's also consider test* subdirs
+ if anchor.check(dir=1):
+ for x in anchor.listdir("test*"):
+ if x.check(dir=1):
+ self._getconftestmodules(x)
+
+ def _getconftestmodules(self, path):
+ if self._noconftest:
+ return []
+ try:
+ return self._path2confmods[path]
+ except KeyError:
+ if path.isfile():
+ clist = self._getconftestmodules(path.dirpath())
+ else:
+ # XXX these days we may rather want to use config.rootdir
+ # and allow users to opt into looking into the rootdir parent
+ # directories instead of requiring to specify confcutdir
+ clist = []
+ for parent in path.parts():
+ if self._confcutdir and self._confcutdir.relto(parent):
+ continue
+ conftestpath = parent.join("conftest.py")
+ if conftestpath.isfile():
+ mod = self._importconftest(conftestpath)
+ clist.append(mod)
+
+ self._path2confmods[path] = clist
+ return clist
+
+ def _rget_with_confmod(self, name, path):
+ modules = self._getconftestmodules(path)
+ for mod in reversed(modules):
+ try:
+ return mod, getattr(mod, name)
+ except AttributeError:
+ continue
+ raise KeyError(name)
+
+ def _importconftest(self, conftestpath):
+ try:
+ return self._conftestpath2mod[conftestpath]
+ except KeyError:
+ pkgpath = conftestpath.pypkgpath()
+ if pkgpath is None:
+ _ensure_removed_sysmodule(conftestpath.purebasename)
+ try:
+ mod = conftestpath.pyimport()
+ except Exception:
+ raise ConftestImportFailure(conftestpath, sys.exc_info())
+
+ self._conftest_plugins.add(mod)
+ self._conftestpath2mod[conftestpath] = mod
+ dirpath = conftestpath.dirpath()
+ if dirpath in self._path2confmods:
+ for path, mods in self._path2confmods.items():
+ if path and path.relto(dirpath) or path == dirpath:
+ assert mod not in mods
+ mods.append(mod)
+ self.trace("loaded conftestmodule %r" %(mod))
+ self.consider_conftest(mod)
+ return mod
+
+ #
+ # API for bootstrapping plugin loading
+ #
+ #
+
+ def consider_preparse(self, args):
+ for opt1,opt2 in zip(args, args[1:]):
+ if opt1 == "-p":
+ self.consider_pluginarg(opt2)
+
+ def consider_pluginarg(self, arg):
+ if arg.startswith("no:"):
+ name = arg[3:]
+ self.set_blocked(name)
+ if not name.startswith("pytest_"):
+ self.set_blocked("pytest_" + name)
+ else:
+ self.import_plugin(arg)
+
+ def consider_conftest(self, conftestmodule):
+ if self.register(conftestmodule, name=conftestmodule.__file__):
+ self.consider_module(conftestmodule)
+
+ def consider_env(self):
+ self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
+
+ def consider_module(self, mod):
+ self._import_plugin_specs(getattr(mod, "pytest_plugins", None))
+
+ def _import_plugin_specs(self, spec):
+ if spec:
+ if isinstance(spec, str):
+ spec = spec.split(",")
+ for import_spec in spec:
+ self.import_plugin(import_spec)
+
+ def import_plugin(self, modname):
+ # most often modname refers to builtin modules, e.g. "pytester",
+ # "terminal" or "capture". Those plugins are registered under their
+ # basename for historic purposes but must be imported with the
+ # _pytest prefix.
+ assert isinstance(modname, str)
+ if self.get_plugin(modname) is not None:
+ return
+ if modname in builtin_plugins:
+ importspec = "_pytest." + modname
+ else:
+ importspec = modname
+ try:
+ __import__(importspec)
+ except ImportError as e:
+ new_exc = ImportError('Error importing plugin "%s": %s' % (modname, e))
+ # copy over name and path attributes
+ for attr in ('name', 'path'):
+ if hasattr(e, attr):
+ setattr(new_exc, attr, getattr(e, attr))
+ raise new_exc
+ except Exception as e:
+ import pytest
+ if not hasattr(pytest, 'skip') or not isinstance(e, pytest.skip.Exception):
+ raise
+ self._warn("skipped plugin %r: %s" %((modname, e.msg)))
+ else:
+ mod = sys.modules[importspec]
+ self.register(mod, modname)
+ self.consider_module(mod)
+
+
+class Parser:
+ """ Parser for command line arguments and ini-file values.
+
+ :ivar extra_info: dict of generic param -> value to display in case
+ there's an error processing the command line arguments.
+ """
+
+ def __init__(self, usage=None, processopt=None):
+ self._anonymous = OptionGroup("custom options", parser=self)
+ self._groups = []
+ self._processopt = processopt
+ self._usage = usage
+ self._inidict = {}
+ self._ininames = []
+ self.extra_info = {}
+
+ def processoption(self, option):
+ if self._processopt:
+ if option.dest:
+ self._processopt(option)
+
+ def getgroup(self, name, description="", after=None):
+ """ get (or create) a named option Group.
+
+ :name: name of the option group.
+ :description: long description for --help output.
+ :after: name of other group, used for ordering --help output.
+
+ The returned group object has an ``addoption`` method with the same
+ signature as :py:func:`parser.addoption
+ <_pytest.config.Parser.addoption>` but will be shown in the
+ respective group in the output of ``pytest. --help``.
+ """
+ for group in self._groups:
+ if group.name == name:
+ return group
+ group = OptionGroup(name, description, parser=self)
+ i = 0
+ for i, grp in enumerate(self._groups):
+ if grp.name == after:
+ break
+ self._groups.insert(i+1, group)
+ return group
+
+ def addoption(self, *opts, **attrs):
+ """ register a command line option.
+
+ :opts: option names, can be short or long options.
+ :attrs: same attributes which the ``add_option()`` function of the
+ `argparse library
+ <http://docs.python.org/2/library/argparse.html>`_
+ accepts.
+
+ After command line parsing options are available on the pytest config
+ object via ``config.option.NAME`` where ``NAME`` is usually set
+ by passing a ``dest`` attribute, for example
+ ``addoption("--long", dest="NAME", ...)``.
+ """
+ self._anonymous.addoption(*opts, **attrs)
+
+ def parse(self, args, namespace=None):
+ from _pytest._argcomplete import try_argcomplete
+ self.optparser = self._getparser()
+ try_argcomplete(self.optparser)
+ return self.optparser.parse_args([str(x) for x in args], namespace=namespace)
+
+ def _getparser(self):
+ from _pytest._argcomplete import filescompleter
+ optparser = MyOptionParser(self, self.extra_info)
+ groups = self._groups + [self._anonymous]
+ for group in groups:
+ if group.options:
+ desc = group.description or group.name
+ arggroup = optparser.add_argument_group(desc)
+ for option in group.options:
+ n = option.names()
+ a = option.attrs()
+ arggroup.add_argument(*n, **a)
+ # bash like autocompletion for dirs (appending '/')
+ optparser.add_argument(FILE_OR_DIR, nargs='*').completer=filescompleter
+ return optparser
+
+ def parse_setoption(self, args, option, namespace=None):
+ parsedoption = self.parse(args, namespace=namespace)
+ for name, value in parsedoption.__dict__.items():
+ setattr(option, name, value)
+ return getattr(parsedoption, FILE_OR_DIR)
+
+ def parse_known_args(self, args, namespace=None):
+ """parses and returns a namespace object with known arguments at this
+ point.
+ """
+ return self.parse_known_and_unknown_args(args, namespace=namespace)[0]
+
+ def parse_known_and_unknown_args(self, args, namespace=None):
+ """parses and returns a namespace object with known arguments, and
+ the remaining arguments unknown at this point.
+ """
+ optparser = self._getparser()
+ args = [str(x) for x in args]
+ return optparser.parse_known_args(args, namespace=namespace)
+
+ def addini(self, name, help, type=None, default=None):
+ """ register an ini-file option.
+
+ :name: name of the ini-variable
+ :type: type of the variable, can be ``pathlist``, ``args``, ``linelist``
+ or ``bool``.
+ :default: default value if no ini-file option exists but is queried.
+
+ The value of ini-variables can be retrieved via a call to
+ :py:func:`config.getini(name) <_pytest.config.Config.getini>`.
+ """
+ assert type in (None, "pathlist", "args", "linelist", "bool")
+ self._inidict[name] = (help, type, default)
+ self._ininames.append(name)
+
+
+class ArgumentError(Exception):
+ """
+ Raised if an Argument instance is created with invalid or
+ inconsistent arguments.
+ """
+
+ def __init__(self, msg, option):
+ self.msg = msg
+ self.option_id = str(option)
+
+ def __str__(self):
+ if self.option_id:
+ return "option %s: %s" % (self.option_id, self.msg)
+ else:
+ return self.msg
+
+
+class Argument:
+ """class that mimics the necessary behaviour of optparse.Option """
+ _typ_map = {
+ 'int': int,
+ 'string': str,
+ }
+ # enable after some grace period for plugin writers
+ TYPE_WARN = False
+
+ def __init__(self, *names, **attrs):
+ """store parms in private vars for use in add_argument"""
+ self._attrs = attrs
+ self._short_opts = []
+ self._long_opts = []
+ self.dest = attrs.get('dest')
+ if self.TYPE_WARN:
+ try:
+ help = attrs['help']
+ if '%default' in help:
+ warnings.warn(
+ 'pytest now uses argparse. "%default" should be'
+ ' changed to "%(default)s" ',
+ FutureWarning,
+ stacklevel=3)
+ except KeyError:
+ pass
+ try:
+ typ = attrs['type']
+ except KeyError:
+ pass
+ else:
+ # this might raise a keyerror as well, don't want to catch that
+ if isinstance(typ, py.builtin._basestring):
+ if typ == 'choice':
+ if self.TYPE_WARN:
+ warnings.warn(
+ 'type argument to addoption() is a string %r.'
+ ' For parsearg this is optional and when supplied '
+ ' should be a type.'
+ ' (options: %s)' % (typ, names),
+ FutureWarning,
+ stacklevel=3)
+ # argparse expects a type here take it from
+ # the type of the first element
+ attrs['type'] = type(attrs['choices'][0])
+ else:
+ if self.TYPE_WARN:
+ warnings.warn(
+ 'type argument to addoption() is a string %r.'
+ ' For parsearg this should be a type.'
+ ' (options: %s)' % (typ, names),
+ FutureWarning,
+ stacklevel=3)
+ attrs['type'] = Argument._typ_map[typ]
+ # used in test_parseopt -> test_parse_defaultgetter
+ self.type = attrs['type']
+ else:
+ self.type = typ
+ try:
+ # attribute existence is tested in Config._processopt
+ self.default = attrs['default']
+ except KeyError:
+ pass
+ self._set_opt_strings(names)
+ if not self.dest:
+ if self._long_opts:
+ self.dest = self._long_opts[0][2:].replace('-', '_')
+ else:
+ try:
+ self.dest = self._short_opts[0][1:]
+ except IndexError:
+ raise ArgumentError(
+ 'need a long or short option', self)
+
+ def names(self):
+ return self._short_opts + self._long_opts
+
+ def attrs(self):
+ # update any attributes set by processopt
+ attrs = 'default dest help'.split()
+ if self.dest:
+ attrs.append(self.dest)
+ for attr in attrs:
+ try:
+ self._attrs[attr] = getattr(self, attr)
+ except AttributeError:
+ pass
+ if self._attrs.get('help'):
+ a = self._attrs['help']
+ a = a.replace('%default', '%(default)s')
+ #a = a.replace('%prog', '%(prog)s')
+ self._attrs['help'] = a
+ return self._attrs
+
+ def _set_opt_strings(self, opts):
+ """directly from optparse
+
+ might not be necessary as this is passed to argparse later on"""
+ for opt in opts:
+ if len(opt) < 2:
+ raise ArgumentError(
+ "invalid option string %r: "
+ "must be at least two characters long" % opt, self)
+ elif len(opt) == 2:
+ if not (opt[0] == "-" and opt[1] != "-"):
+ raise ArgumentError(
+ "invalid short option string %r: "
+ "must be of the form -x, (x any non-dash char)" % opt,
+ self)
+ self._short_opts.append(opt)
+ else:
+ if not (opt[0:2] == "--" and opt[2] != "-"):
+ raise ArgumentError(
+ "invalid long option string %r: "
+ "must start with --, followed by non-dash" % opt,
+ self)
+ self._long_opts.append(opt)
+
+ def __repr__(self):
+ retval = 'Argument('
+ if self._short_opts:
+ retval += '_short_opts: ' + repr(self._short_opts) + ', '
+ if self._long_opts:
+ retval += '_long_opts: ' + repr(self._long_opts) + ', '
+ retval += 'dest: ' + repr(self.dest) + ', '
+ if hasattr(self, 'type'):
+ retval += 'type: ' + repr(self.type) + ', '
+ if hasattr(self, 'default'):
+ retval += 'default: ' + repr(self.default) + ', '
+ if retval[-2:] == ', ': # always long enough to test ("Argument(" )
+ retval = retval[:-2]
+ retval += ')'
+ return retval
+
+
+class OptionGroup:
+ def __init__(self, name, description="", parser=None):
+ self.name = name
+ self.description = description
+ self.options = []
+ self.parser = parser
+
+ def addoption(self, *optnames, **attrs):
+ """ add an option to this group.
+
+ if a shortened version of a long option is specified it will
+ be suppressed in the help. addoption('--twowords', '--two-words')
+ results in help showing '--two-words' only, but --twowords gets
+ accepted **and** the automatic destination is in args.twowords
+ """
+ option = Argument(*optnames, **attrs)
+ self._addoption_instance(option, shortupper=False)
+
+ def _addoption(self, *optnames, **attrs):
+ option = Argument(*optnames, **attrs)
+ self._addoption_instance(option, shortupper=True)
+
+ def _addoption_instance(self, option, shortupper=False):
+ if not shortupper:
+ for opt in option._short_opts:
+ if opt[0] == '-' and opt[1].islower():
+ raise ValueError("lowercase shortoptions reserved")
+ if self.parser:
+ self.parser.processoption(option)
+ self.options.append(option)
+
+
+class MyOptionParser(argparse.ArgumentParser):
+ def __init__(self, parser, extra_info=None):
+ if not extra_info:
+ extra_info = {}
+ self._parser = parser
+ argparse.ArgumentParser.__init__(self, usage=parser._usage,
+ add_help=False, formatter_class=DropShorterLongHelpFormatter)
+ # extra_info is a dict of (param -> value) to display if there's
+ # an usage error to provide more contextual information to the user
+ self.extra_info = extra_info
+
+ def parse_args(self, args=None, namespace=None):
+ """allow splitting of positional arguments"""
+ args, argv = self.parse_known_args(args, namespace)
+ if argv:
+ for arg in argv:
+ if arg and arg[0] == '-':
+ lines = ['unrecognized arguments: %s' % (' '.join(argv))]
+ for k, v in sorted(self.extra_info.items()):
+ lines.append(' %s: %s' % (k, v))
+ self.error('\n'.join(lines))
+ getattr(args, FILE_OR_DIR).extend(argv)
+ return args
+
+
+class DropShorterLongHelpFormatter(argparse.HelpFormatter):
+ """shorten help for long options that differ only in extra hyphens
+
+ - collapse **long** options that are the same except for extra hyphens
+ - special action attribute map_long_option allows surpressing additional
+ long options
+ - shortcut if there are only two options and one of them is a short one
+ - cache result on action object as this is called at least 2 times
+ """
+ def _format_action_invocation(self, action):
+ orgstr = argparse.HelpFormatter._format_action_invocation(self, action)
+ if orgstr and orgstr[0] != '-': # only optional arguments
+ return orgstr
+ res = getattr(action, '_formatted_action_invocation', None)
+ if res:
+ return res
+ options = orgstr.split(', ')
+ if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):
+ # a shortcut for '-h, --help' or '--abc', '-a'
+ action._formatted_action_invocation = orgstr
+ return orgstr
+ return_list = []
+ option_map = getattr(action, 'map_long_option', {})
+ if option_map is None:
+ option_map = {}
+ short_long = {}
+ for option in options:
+ if len(option) == 2 or option[2] == ' ':
+ continue
+ if not option.startswith('--'):
+ raise ArgumentError('long optional argument without "--": [%s]'
+ % (option), self)
+ xxoption = option[2:]
+ if xxoption.split()[0] not in option_map:
+ shortened = xxoption.replace('-', '')
+ if shortened not in short_long or \
+ len(short_long[shortened]) < len(xxoption):
+ short_long[shortened] = xxoption
+ # now short_long has been filled out to the longest with dashes
+ # **and** we keep the right option ordering from add_argument
+ for option in options: #
+ if len(option) == 2 or option[2] == ' ':
+ return_list.append(option)
+ if option[2:] == short_long.get(option.replace('-', '')):
+ return_list.append(option.replace(' ', '='))
+ action._formatted_action_invocation = ', '.join(return_list)
+ return action._formatted_action_invocation
+
+
+
+def _ensure_removed_sysmodule(modname):
+ try:
+ del sys.modules[modname]
+ except KeyError:
+ pass
+
+class CmdOptions(object):
+ """ holds cmdline options as attributes."""
+ def __init__(self, values=()):
+ self.__dict__.update(values)
+ def __repr__(self):
+ return "<CmdOptions %r>" %(self.__dict__,)
+ def copy(self):
+ return CmdOptions(self.__dict__)
+
+class Notset:
+ def __repr__(self):
+ return "<NOTSET>"
+
+notset = Notset()
+FILE_OR_DIR = 'file_or_dir'
+
+class Config(object):
+ """ access to configuration values, pluginmanager and plugin hooks. """
+
+ def __init__(self, pluginmanager):
+ #: access to command line option as attributes.
+ #: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead
+ self.option = CmdOptions()
+ _a = FILE_OR_DIR
+ self._parser = Parser(
+ usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a),
+ processopt=self._processopt,
+ )
+ #: a pluginmanager instance
+ self.pluginmanager = pluginmanager
+ self.trace = self.pluginmanager.trace.root.get("config")
+ self.hook = self.pluginmanager.hook
+ self._inicache = {}
+ self._opt2dest = {}
+ self._cleanup = []
+ self._warn = self.pluginmanager._warn
+ self.pluginmanager.register(self, "pytestconfig")
+ self._configured = False
+ def do_setns(dic):
+ import pytest
+ setns(pytest, dic)
+ self.hook.pytest_namespace.call_historic(do_setns, {})
+ self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser))
+
+ def add_cleanup(self, func):
+ """ Add a function to be called when the config object gets out of
+ use (usually coninciding with pytest_unconfigure)."""
+ self._cleanup.append(func)
+
+ def _do_configure(self):
+ assert not self._configured
+ self._configured = True
+ self.hook.pytest_configure.call_historic(kwargs=dict(config=self))
+
+ def _ensure_unconfigure(self):
+ if self._configured:
+ self._configured = False
+ self.hook.pytest_unconfigure(config=self)
+ self.hook.pytest_configure._call_history = []
+ while self._cleanup:
+ fin = self._cleanup.pop()
+ fin()
+
+ def warn(self, code, message, fslocation=None):
+ """ generate a warning for this test session. """
+ self.hook.pytest_logwarning.call_historic(kwargs=dict(
+ code=code, message=message,
+ fslocation=fslocation, nodeid=None))
+
+ def get_terminal_writer(self):
+ return self.pluginmanager.get_plugin("terminalreporter")._tw
+
+ def pytest_cmdline_parse(self, pluginmanager, args):
+ # REF1 assert self == pluginmanager.config, (self, pluginmanager.config)
+ self.parse(args)
+ return self
+
+ def notify_exception(self, excinfo, option=None):
+ if option and option.fulltrace:
+ style = "long"
+ else:
+ style = "native"
+ excrepr = excinfo.getrepr(funcargs=True,
+ showlocals=getattr(option, 'showlocals', False),
+ style=style,
+ )
+ res = self.hook.pytest_internalerror(excrepr=excrepr,
+ excinfo=excinfo)
+ if not py.builtin.any(res):
+ for line in str(excrepr).split("\n"):
+ sys.stderr.write("INTERNALERROR> %s\n" %line)
+ sys.stderr.flush()
+
+ def cwd_relative_nodeid(self, nodeid):
+ # nodeid's are relative to the rootpath, compute relative to cwd
+ if self.invocation_dir != self.rootdir:
+ fullpath = self.rootdir.join(nodeid)
+ nodeid = self.invocation_dir.bestrelpath(fullpath)
+ return nodeid
+
+ @classmethod
+ def fromdictargs(cls, option_dict, args):
+ """ constructor useable for subprocesses. """
+ config = get_config()
+ config.option.__dict__.update(option_dict)
+ config.parse(args, addopts=False)
+ for x in config.option.plugins:
+ config.pluginmanager.consider_pluginarg(x)
+ return config
+
+ def _processopt(self, opt):
+ for name in opt._short_opts + opt._long_opts:
+ self._opt2dest[name] = opt.dest
+
+ if hasattr(opt, 'default') and opt.dest:
+ if not hasattr(self.option, opt.dest):
+ setattr(self.option, opt.dest, opt.default)
+
+ @hookimpl(trylast=True)
+ def pytest_load_initial_conftests(self, early_config):
+ self.pluginmanager._set_initial_conftests(early_config.known_args_namespace)
+
+ def _initini(self, args):
+ ns, unknown_args = self._parser.parse_known_and_unknown_args(args, namespace=self.option.copy())
+ r = determine_setup(ns.inifilename, ns.file_or_dir + unknown_args)
+ self.rootdir, self.inifile, self.inicfg = r
+ self._parser.extra_info['rootdir'] = self.rootdir
+ self._parser.extra_info['inifile'] = self.inifile
+ self.invocation_dir = py.path.local()
+ self._parser.addini('addopts', 'extra command line options', 'args')
+ self._parser.addini('minversion', 'minimally required pytest version')
+
+ def _preparse(self, args, addopts=True):
+ self._initini(args)
+ if addopts:
+ args[:] = shlex.split(os.environ.get('PYTEST_ADDOPTS', '')) + args
+ args[:] = self.getini("addopts") + args
+ self._checkversion()
+ self.pluginmanager.consider_preparse(args)
+ try:
+ self.pluginmanager.load_setuptools_entrypoints("pytest11")
+ except ImportError as e:
+ self.warn("I2", "could not load setuptools entry import: %s" % (e,))
+ self.pluginmanager.consider_env()
+ self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy())
+ if self.known_args_namespace.confcutdir is None and self.inifile:
+ confcutdir = py.path.local(self.inifile).dirname
+ self.known_args_namespace.confcutdir = confcutdir
+ try:
+ self.hook.pytest_load_initial_conftests(early_config=self,
+ args=args, parser=self._parser)
+ except ConftestImportFailure:
+ e = sys.exc_info()[1]
+ if ns.help or ns.version:
+ # we don't want to prevent --help/--version to work
+ # so just let is pass and print a warning at the end
+ self._warn("could not load initial conftests (%s)\n" % e.path)
+ else:
+ raise
+
+ def _checkversion(self):
+ import pytest
+ minver = self.inicfg.get('minversion', None)
+ if minver:
+ ver = minver.split(".")
+ myver = pytest.__version__.split(".")
+ if myver < ver:
+ raise pytest.UsageError(
+ "%s:%d: requires pytest-%s, actual pytest-%s'" %(
+ self.inicfg.config.path, self.inicfg.lineof('minversion'),
+ minver, pytest.__version__))
+
+ def parse(self, args, addopts=True):
+ # parse given cmdline arguments into this config object.
+ assert not hasattr(self, 'args'), (
+ "can only parse cmdline args at most once per Config object")
+ self._origargs = args
+ self.hook.pytest_addhooks.call_historic(
+ kwargs=dict(pluginmanager=self.pluginmanager))
+ self._preparse(args, addopts=addopts)
+ # XXX deprecated hook:
+ self.hook.pytest_cmdline_preparse(config=self, args=args)
+ args = self._parser.parse_setoption(args, self.option, namespace=self.option)
+ if not args:
+ cwd = os.getcwd()
+ if cwd == self.rootdir:
+ args = self.getini('testpaths')
+ if not args:
+ args = [cwd]
+ self.args = args
+
+ def addinivalue_line(self, name, line):
+ """ add a line to an ini-file option. The option must have been
+ declared but might not yet be set in which case the line becomes the
+ the first line in its value. """
+ x = self.getini(name)
+ assert isinstance(x, list)
+ x.append(line) # modifies the cached list inline
+
+ def getini(self, name):
+ """ return configuration value from an :ref:`ini file <inifiles>`. If the
+ specified name hasn't been registered through a prior
+ :py:func:`parser.addini <pytest.config.Parser.addini>`
+ call (usually from a plugin), a ValueError is raised. """
+ try:
+ return self._inicache[name]
+ except KeyError:
+ self._inicache[name] = val = self._getini(name)
+ return val
+
+ def _getini(self, name):
+ try:
+ description, type, default = self._parser._inidict[name]
+ except KeyError:
+ raise ValueError("unknown configuration value: %r" %(name,))
+ try:
+ value = self.inicfg[name]
+ except KeyError:
+ if default is not None:
+ return default
+ if type is None:
+ return ''
+ return []
+ if type == "pathlist":
+ dp = py.path.local(self.inicfg.config.path).dirpath()
+ l = []
+ for relpath in shlex.split(value):
+ l.append(dp.join(relpath, abs=True))
+ return l
+ elif type == "args":
+ return shlex.split(value)
+ elif type == "linelist":
+ return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
+ elif type == "bool":
+ return bool(_strtobool(value.strip()))
+ else:
+ assert type is None
+ return value
+
+ def _getconftest_pathlist(self, name, path):
+ try:
+ mod, relroots = self.pluginmanager._rget_with_confmod(name, path)
+ except KeyError:
+ return None
+ modpath = py.path.local(mod.__file__).dirpath()
+ l = []
+ for relroot in relroots:
+ if not isinstance(relroot, py.path.local):
+ relroot = relroot.replace("/", py.path.local.sep)
+ relroot = modpath.join(relroot, abs=True)
+ l.append(relroot)
+ return l
+
+ def getoption(self, name, default=notset, skip=False):
+ """ return command line option value.
+
+ :arg name: name of the option. You may also specify
+ the literal ``--OPT`` option instead of the "dest" option name.
+ :arg default: default value if no option of that name exists.
+ :arg skip: if True raise pytest.skip if option does not exists
+ or has a None value.
+ """
+ name = self._opt2dest.get(name, name)
+ try:
+ val = getattr(self.option, name)
+ if val is None and skip:
+ raise AttributeError(name)
+ return val
+ except AttributeError:
+ if default is not notset:
+ return default
+ if skip:
+ import pytest
+ pytest.skip("no %r option found" %(name,))
+ raise ValueError("no option named %r" % (name,))
+
+ def getvalue(self, name, path=None):
+ """ (deprecated, use getoption()) """
+ return self.getoption(name)
+
+ def getvalueorskip(self, name, path=None):
+ """ (deprecated, use getoption(skip=True)) """
+ return self.getoption(name, skip=True)
+
+def exists(path, ignore=EnvironmentError):
+ try:
+ return path.check()
+ except ignore:
+ return False
+
+def getcfg(args, inibasenames):
+ args = [x for x in args if not str(x).startswith("-")]
+ if not args:
+ args = [py.path.local()]
+ for arg in args:
+ arg = py.path.local(arg)
+ for base in arg.parts(reverse=True):
+ for inibasename in inibasenames:
+ p = base.join(inibasename)
+ if exists(p):
+ iniconfig = py.iniconfig.IniConfig(p)
+ if 'pytest' in iniconfig.sections:
+ return base, p, iniconfig['pytest']
+ elif inibasename == "pytest.ini":
+ # allowed to be empty
+ return base, p, {}
+ return None, None, None
+
+
+def get_common_ancestor(args):
+ # args are what we get after early command line parsing (usually
+ # strings, but can be py.path.local objects as well)
+ common_ancestor = None
+ for arg in args:
+ if str(arg)[0] == "-":
+ continue
+ p = py.path.local(arg)
+ if common_ancestor is None:
+ common_ancestor = p
+ else:
+ if p.relto(common_ancestor) or p == common_ancestor:
+ continue
+ elif common_ancestor.relto(p):
+ common_ancestor = p
+ else:
+ shared = p.common(common_ancestor)
+ if shared is not None:
+ common_ancestor = shared
+ if common_ancestor is None:
+ common_ancestor = py.path.local()
+ elif not common_ancestor.isdir():
+ common_ancestor = common_ancestor.dirpath()
+ return common_ancestor
+
+
+def determine_setup(inifile, args):
+ if inifile:
+ iniconfig = py.iniconfig.IniConfig(inifile)
+ try:
+ inicfg = iniconfig["pytest"]
+ except KeyError:
+ inicfg = None
+ rootdir = get_common_ancestor(args)
+ else:
+ ancestor = get_common_ancestor(args)
+ rootdir, inifile, inicfg = getcfg(
+ [ancestor], ["pytest.ini", "tox.ini", "setup.cfg"])
+ if rootdir is None:
+ for rootdir in ancestor.parts(reverse=True):
+ if rootdir.join("setup.py").exists():
+ break
+ else:
+ rootdir = ancestor
+ return rootdir, inifile, inicfg or {}
+
+
+def setns(obj, dic):
+ import pytest
+ for name, value in dic.items():
+ if isinstance(value, dict):
+ mod = getattr(obj, name, None)
+ if mod is None:
+ modname = "pytest.%s" % name
+ mod = types.ModuleType(modname)
+ sys.modules[modname] = mod
+ mod.__all__ = []
+ setattr(obj, name, mod)
+ obj.__all__.append(name)
+ setns(mod, value)
+ else:
+ setattr(obj, name, value)
+ obj.__all__.append(name)
+ #if obj != pytest:
+ # pytest.__all__.append(name)
+ setattr(pytest, name, value)
+
+
+def create_terminal_writer(config, *args, **kwargs):
+ """Create a TerminalWriter instance configured according to the options
+ in the config object. Every code which requires a TerminalWriter object
+ and has access to a config object should use this function.
+ """
+ tw = py.io.TerminalWriter(*args, **kwargs)
+ if config.option.color == 'yes':
+ tw.hasmarkup = True
+ if config.option.color == 'no':
+ tw.hasmarkup = False
+ return tw
+
+
+def _strtobool(val):
+ """Convert a string representation of truth to true (1) or false (0).
+
+ True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+ are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
+ 'val' is anything else.
+
+ .. note:: copied from distutils.util
+ """
+ val = val.lower()
+ if val in ('y', 'yes', 't', 'true', 'on', '1'):
+ return 1
+ elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+ return 0
+ else:
+ raise ValueError("invalid truth value %r" % (val,))
diff --git a/python/pytest/_pytest/doctest.py b/python/pytest/_pytest/doctest.py
new file mode 100644
index 000000000..a57f7a494
--- /dev/null
+++ b/python/pytest/_pytest/doctest.py
@@ -0,0 +1,290 @@
+""" discover and run doctests in modules and test files."""
+from __future__ import absolute_import
+
+import traceback
+
+import pytest
+from _pytest._code.code import TerminalRepr, ReprFileLocation, ExceptionInfo
+from _pytest.python import FixtureRequest
+
+
+
+def pytest_addoption(parser):
+ parser.addini('doctest_optionflags', 'option flags for doctests',
+ type="args", default=["ELLIPSIS"])
+ group = parser.getgroup("collect")
+ group.addoption("--doctest-modules",
+ action="store_true", default=False,
+ help="run doctests in all .py modules",
+ dest="doctestmodules")
+ group.addoption("--doctest-glob",
+ action="append", default=[], metavar="pat",
+ help="doctests file matching pattern, default: test*.txt",
+ dest="doctestglob")
+ group.addoption("--doctest-ignore-import-errors",
+ action="store_true", default=False,
+ help="ignore doctest ImportErrors",
+ dest="doctest_ignore_import_errors")
+
+
+def pytest_collect_file(path, parent):
+ config = parent.config
+ if path.ext == ".py":
+ if config.option.doctestmodules:
+ return DoctestModule(path, parent)
+ elif _is_doctest(config, path, parent):
+ return DoctestTextfile(path, parent)
+
+
+def _is_doctest(config, path, parent):
+ if path.ext in ('.txt', '.rst') and parent.session.isinitpath(path):
+ return True
+ globs = config.getoption("doctestglob") or ['test*.txt']
+ for glob in globs:
+ if path.check(fnmatch=glob):
+ return True
+ return False
+
+
+class ReprFailDoctest(TerminalRepr):
+
+ def __init__(self, reprlocation, lines):
+ self.reprlocation = reprlocation
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+ self.reprlocation.toterminal(tw)
+
+
+class DoctestItem(pytest.Item):
+
+ def __init__(self, name, parent, runner=None, dtest=None):
+ super(DoctestItem, self).__init__(name, parent)
+ self.runner = runner
+ self.dtest = dtest
+ self.obj = None
+ self.fixture_request = None
+
+ def setup(self):
+ if self.dtest is not None:
+ self.fixture_request = _setup_fixtures(self)
+ globs = dict(getfixture=self.fixture_request.getfuncargvalue)
+ self.dtest.globs.update(globs)
+
+ def runtest(self):
+ _check_all_skipped(self.dtest)
+ self.runner.run(self.dtest)
+
+ def repr_failure(self, excinfo):
+ import doctest
+ if excinfo.errisinstance((doctest.DocTestFailure,
+ doctest.UnexpectedException)):
+ doctestfailure = excinfo.value
+ example = doctestfailure.example
+ test = doctestfailure.test
+ filename = test.filename
+ if test.lineno is None:
+ lineno = None
+ else:
+ lineno = test.lineno + example.lineno + 1
+ message = excinfo.type.__name__
+ reprlocation = ReprFileLocation(filename, lineno, message)
+ checker = _get_checker()
+ REPORT_UDIFF = doctest.REPORT_UDIFF
+ if lineno is not None:
+ lines = doctestfailure.test.docstring.splitlines(False)
+ # add line numbers to the left of the error message
+ lines = ["%03d %s" % (i + test.lineno + 1, x)
+ for (i, x) in enumerate(lines)]
+ # trim docstring error lines to 10
+ lines = lines[example.lineno - 9:example.lineno + 1]
+ else:
+ lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example']
+ indent = '>>>'
+ for line in example.source.splitlines():
+ lines.append('??? %s %s' % (indent, line))
+ indent = '...'
+ if excinfo.errisinstance(doctest.DocTestFailure):
+ lines += checker.output_difference(example,
+ doctestfailure.got, REPORT_UDIFF).split("\n")
+ else:
+ inner_excinfo = ExceptionInfo(excinfo.value.exc_info)
+ lines += ["UNEXPECTED EXCEPTION: %s" %
+ repr(inner_excinfo.value)]
+ lines += traceback.format_exception(*excinfo.value.exc_info)
+ return ReprFailDoctest(reprlocation, lines)
+ else:
+ return super(DoctestItem, self).repr_failure(excinfo)
+
+ def reportinfo(self):
+ return self.fspath, None, "[doctest] %s" % self.name
+
+
+def _get_flag_lookup():
+ import doctest
+ return dict(DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,
+ DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,
+ NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,
+ ELLIPSIS=doctest.ELLIPSIS,
+ IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,
+ COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,
+ ALLOW_UNICODE=_get_allow_unicode_flag(),
+ ALLOW_BYTES=_get_allow_bytes_flag(),
+ )
+
+
+def get_optionflags(parent):
+ optionflags_str = parent.config.getini("doctest_optionflags")
+ flag_lookup_table = _get_flag_lookup()
+ flag_acc = 0
+ for flag in optionflags_str:
+ flag_acc |= flag_lookup_table[flag]
+ return flag_acc
+
+
+class DoctestTextfile(DoctestItem, pytest.Module):
+
+ def runtest(self):
+ import doctest
+ fixture_request = _setup_fixtures(self)
+
+ # inspired by doctest.testfile; ideally we would use it directly,
+ # but it doesn't support passing a custom checker
+ text = self.fspath.read()
+ filename = str(self.fspath)
+ name = self.fspath.basename
+ globs = dict(getfixture=fixture_request.getfuncargvalue)
+ if '__name__' not in globs:
+ globs['__name__'] = '__main__'
+
+ optionflags = get_optionflags(self)
+ runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
+ checker=_get_checker())
+
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(text, globs, name, filename, 0)
+ _check_all_skipped(test)
+ runner.run(test)
+
+
+def _check_all_skipped(test):
+ """raises pytest.skip() if all examples in the given DocTest have the SKIP
+ option set.
+ """
+ import doctest
+ all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)
+ if all_skipped:
+ pytest.skip('all tests skipped by +SKIP option')
+
+
+class DoctestModule(pytest.Module):
+ def collect(self):
+ import doctest
+ if self.fspath.basename == "conftest.py":
+ module = self.config.pluginmanager._importconftest(self.fspath)
+ else:
+ try:
+ module = self.fspath.pyimport()
+ except ImportError:
+ if self.config.getvalue('doctest_ignore_import_errors'):
+ pytest.skip('unable to import module %r' % self.fspath)
+ else:
+ raise
+ # uses internal doctest module parsing mechanism
+ finder = doctest.DocTestFinder()
+ optionflags = get_optionflags(self)
+ runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
+ checker=_get_checker())
+ for test in finder.find(module, module.__name__):
+ if test.examples: # skip empty doctests
+ yield DoctestItem(test.name, self, runner, test)
+
+
+def _setup_fixtures(doctest_item):
+ """
+ Used by DoctestTextfile and DoctestItem to setup fixture information.
+ """
+ def func():
+ pass
+
+ doctest_item.funcargs = {}
+ fm = doctest_item.session._fixturemanager
+ doctest_item._fixtureinfo = fm.getfixtureinfo(node=doctest_item, func=func,
+ cls=None, funcargs=False)
+ fixture_request = FixtureRequest(doctest_item)
+ fixture_request._fillfixtures()
+ return fixture_request
+
+
+def _get_checker():
+ """
+ Returns a doctest.OutputChecker subclass that takes in account the
+ ALLOW_UNICODE option to ignore u'' prefixes in strings and ALLOW_BYTES
+ to strip b'' prefixes.
+ Useful when the same doctest should run in Python 2 and Python 3.
+
+ An inner class is used to avoid importing "doctest" at the module
+ level.
+ """
+ if hasattr(_get_checker, 'LiteralsOutputChecker'):
+ return _get_checker.LiteralsOutputChecker()
+
+ import doctest
+ import re
+
+ class LiteralsOutputChecker(doctest.OutputChecker):
+ """
+ Copied from doctest_nose_plugin.py from the nltk project:
+ https://github.com/nltk/nltk
+
+ Further extended to also support byte literals.
+ """
+
+ _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
+ _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE)
+
+ def check_output(self, want, got, optionflags):
+ res = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ if res:
+ return True
+
+ allow_unicode = optionflags & _get_allow_unicode_flag()
+ allow_bytes = optionflags & _get_allow_bytes_flag()
+ if not allow_unicode and not allow_bytes:
+ return False
+
+ else: # pragma: no cover
+ def remove_prefixes(regex, txt):
+ return re.sub(regex, r'\1\2', txt)
+
+ if allow_unicode:
+ want = remove_prefixes(self._unicode_literal_re, want)
+ got = remove_prefixes(self._unicode_literal_re, got)
+ if allow_bytes:
+ want = remove_prefixes(self._bytes_literal_re, want)
+ got = remove_prefixes(self._bytes_literal_re, got)
+ res = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ return res
+
+ _get_checker.LiteralsOutputChecker = LiteralsOutputChecker
+ return _get_checker.LiteralsOutputChecker()
+
+
+def _get_allow_unicode_flag():
+ """
+ Registers and returns the ALLOW_UNICODE flag.
+ """
+ import doctest
+ return doctest.register_optionflag('ALLOW_UNICODE')
+
+
+def _get_allow_bytes_flag():
+ """
+ Registers and returns the ALLOW_BYTES flag.
+ """
+ import doctest
+ return doctest.register_optionflag('ALLOW_BYTES')
diff --git a/python/pytest/_pytest/genscript.py b/python/pytest/_pytest/genscript.py
new file mode 100755
index 000000000..d2962d8fc
--- /dev/null
+++ b/python/pytest/_pytest/genscript.py
@@ -0,0 +1,132 @@
+""" (deprecated) generate a single-file self-contained version of pytest """
+import os
+import sys
+import pkgutil
+
+import py
+import _pytest
+
+
+
+def find_toplevel(name):
+ for syspath in sys.path:
+ base = py.path.local(syspath)
+ lib = base/name
+ if lib.check(dir=1):
+ return lib
+ mod = base.join("%s.py" % name)
+ if mod.check(file=1):
+ return mod
+ raise LookupError(name)
+
+def pkgname(toplevel, rootpath, path):
+ parts = path.parts()[len(rootpath.parts()):]
+ return '.'.join([toplevel] + [x.purebasename for x in parts])
+
+def pkg_to_mapping(name):
+ toplevel = find_toplevel(name)
+ name2src = {}
+ if toplevel.check(file=1): # module
+ name2src[toplevel.purebasename] = toplevel.read()
+ else: # package
+ for pyfile in toplevel.visit('*.py'):
+ pkg = pkgname(name, toplevel, pyfile)
+ name2src[pkg] = pyfile.read()
+ # with wheels py source code might be not be installed
+ # and the resulting genscript is useless, just bail out.
+ assert name2src, "no source code found for %r at %r" %(name, toplevel)
+ return name2src
+
+def compress_mapping(mapping):
+ import base64, pickle, zlib
+ data = pickle.dumps(mapping, 2)
+ data = zlib.compress(data, 9)
+ data = base64.encodestring(data)
+ data = data.decode('ascii')
+ return data
+
+
+def compress_packages(names):
+ mapping = {}
+ for name in names:
+ mapping.update(pkg_to_mapping(name))
+ return compress_mapping(mapping)
+
+def generate_script(entry, packages):
+ data = compress_packages(packages)
+ tmpl = py.path.local(__file__).dirpath().join('standalonetemplate.py')
+ exe = tmpl.read()
+ exe = exe.replace('@SOURCES@', data)
+ exe = exe.replace('@ENTRY@', entry)
+ return exe
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption("--genscript", action="store", default=None,
+ dest="genscript", metavar="path",
+ help="create standalone pytest script at given target path.")
+
+def pytest_cmdline_main(config):
+ import _pytest.config
+ genscript = config.getvalue("genscript")
+ if genscript:
+ tw = _pytest.config.create_terminal_writer(config)
+ tw.line("WARNING: usage of genscript is deprecated.",
+ red=True)
+ deps = ['py', '_pytest', 'pytest'] # pluggy is vendored
+ if sys.version_info < (2,7):
+ deps.append("argparse")
+ tw.line("generated script will run on python2.6-python3.3++")
+ else:
+ tw.line("WARNING: generated script will not run on python2.6 "
+ "due to 'argparse' dependency. Use python2.6 "
+ "to generate a python2.6 compatible script", red=True)
+ script = generate_script(
+ 'import pytest; raise SystemExit(pytest.cmdline.main())',
+ deps,
+ )
+ genscript = py.path.local(genscript)
+ genscript.write(script)
+ tw.line("generated pytest standalone script: %s" % genscript,
+ bold=True)
+ return 0
+
+
+def pytest_namespace():
+ return {'freeze_includes': freeze_includes}
+
+
+def freeze_includes():
+ """
+ Returns a list of module names used by py.test that should be
+ included by cx_freeze.
+ """
+ result = list(_iter_all_modules(py))
+ result += list(_iter_all_modules(_pytest))
+ return result
+
+
+def _iter_all_modules(package, prefix=''):
+ """
+ Iterates over the names of all modules that can be found in the given
+ package, recursively.
+
+ Example:
+ _iter_all_modules(_pytest) ->
+ ['_pytest.assertion.newinterpret',
+ '_pytest.capture',
+ '_pytest.core',
+ ...
+ ]
+ """
+ if type(package) is not str:
+ path, prefix = package.__path__[0], package.__name__ + '.'
+ else:
+ path = package
+ for _, name, is_package in pkgutil.iter_modules([path]):
+ if is_package:
+ for m in _iter_all_modules(os.path.join(path, name), prefix=name + '.'):
+ yield prefix + m
+ else:
+ yield prefix + name
diff --git a/python/pytest/_pytest/helpconfig.py b/python/pytest/_pytest/helpconfig.py
new file mode 100644
index 000000000..1df0c56ac
--- /dev/null
+++ b/python/pytest/_pytest/helpconfig.py
@@ -0,0 +1,139 @@
+""" version info, help messages, tracing configuration. """
+import py
+import pytest
+import os, sys
+
+def pytest_addoption(parser):
+ group = parser.getgroup('debugconfig')
+ group.addoption('--version', action="store_true",
+ help="display pytest lib version and import information.")
+ group._addoption("-h", "--help", action="store_true", dest="help",
+ help="show help message and configuration info")
+ group._addoption('-p', action="append", dest="plugins", default = [],
+ metavar="name",
+ help="early-load given plugin (multi-allowed). "
+ "To avoid loading of plugins, use the `no:` prefix, e.g. "
+ "`no:doctest`.")
+ group.addoption('--traceconfig', '--trace-config',
+ action="store_true", default=False,
+ help="trace considerations of conftest.py files."),
+ group.addoption('--debug',
+ action="store_true", dest="debug", default=False,
+ help="store internal tracing debug information in 'pytestdebug.log'.")
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_cmdline_parse():
+ outcome = yield
+ config = outcome.get_result()
+ if config.option.debug:
+ path = os.path.abspath("pytestdebug.log")
+ debugfile = open(path, 'w')
+ debugfile.write("versions pytest-%s, py-%s, "
+ "python-%s\ncwd=%s\nargs=%s\n\n" %(
+ pytest.__version__, py.__version__,
+ ".".join(map(str, sys.version_info)),
+ os.getcwd(), config._origargs))
+ config.trace.root.setwriter(debugfile.write)
+ undo_tracing = config.pluginmanager.enable_tracing()
+ sys.stderr.write("writing pytestdebug information to %s\n" % path)
+ def unset_tracing():
+ debugfile.close()
+ sys.stderr.write("wrote pytestdebug information to %s\n" %
+ debugfile.name)
+ config.trace.root.setwriter(None)
+ undo_tracing()
+ config.add_cleanup(unset_tracing)
+
+def pytest_cmdline_main(config):
+ if config.option.version:
+ p = py.path.local(pytest.__file__)
+ sys.stderr.write("This is pytest version %s, imported from %s\n" %
+ (pytest.__version__, p))
+ plugininfo = getpluginversioninfo(config)
+ if plugininfo:
+ for line in plugininfo:
+ sys.stderr.write(line + "\n")
+ return 0
+ elif config.option.help:
+ config._do_configure()
+ showhelp(config)
+ config._ensure_unconfigure()
+ return 0
+
+def showhelp(config):
+ reporter = config.pluginmanager.get_plugin('terminalreporter')
+ tw = reporter._tw
+ tw.write(config._parser.optparser.format_help())
+ tw.line()
+ tw.line()
+ #tw.sep( "=", "config file settings")
+ tw.line("[pytest] ini-options in the next "
+ "pytest.ini|tox.ini|setup.cfg file:")
+ tw.line()
+
+ for name in config._parser._ininames:
+ help, type, default = config._parser._inidict[name]
+ if type is None:
+ type = "string"
+ spec = "%s (%s)" % (name, type)
+ line = " %-24s %s" %(spec, help)
+ tw.line(line[:tw.fullwidth])
+
+ tw.line()
+ tw.line("environment variables:")
+ vars = [
+ ("PYTEST_ADDOPTS", "extra command line options"),
+ ("PYTEST_PLUGINS", "comma-separated plugins to load during startup"),
+ ("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals")
+ ]
+ for name, help in vars:
+ tw.line(" %-24s %s" % (name, help))
+ tw.line()
+ tw.line()
+
+ tw.line("to see available markers type: py.test --markers")
+ tw.line("to see available fixtures type: py.test --fixtures")
+ tw.line("(shown according to specified file_or_dir or current dir "
+ "if not specified)")
+
+ for warningreport in reporter.stats.get('warnings', []):
+ tw.line("warning : " + warningreport.message, red=True)
+ return
+
+
+conftest_options = [
+ ('pytest_plugins', 'list of plugin names to load'),
+]
+
+def getpluginversioninfo(config):
+ lines = []
+ plugininfo = config.pluginmanager.list_plugin_distinfo()
+ if plugininfo:
+ lines.append("setuptools registered plugins:")
+ for plugin, dist in plugininfo:
+ loc = getattr(plugin, '__file__', repr(plugin))
+ content = "%s-%s at %s" % (dist.project_name, dist.version, loc)
+ lines.append(" " + content)
+ return lines
+
+def pytest_report_header(config):
+ lines = []
+ if config.option.debug or config.option.traceconfig:
+ lines.append("using: pytest-%s pylib-%s" %
+ (pytest.__version__,py.__version__))
+
+ verinfo = getpluginversioninfo(config)
+ if verinfo:
+ lines.extend(verinfo)
+
+ if config.option.traceconfig:
+ lines.append("active plugins:")
+ items = config.pluginmanager.list_name_plugin()
+ for name, plugin in items:
+ if hasattr(plugin, '__file__'):
+ r = plugin.__file__
+ else:
+ r = repr(plugin)
+ lines.append(" %-20s: %s" %(name, r))
+ return lines
diff --git a/python/pytest/_pytest/hookspec.py b/python/pytest/_pytest/hookspec.py
new file mode 100644
index 000000000..60e9b47d2
--- /dev/null
+++ b/python/pytest/_pytest/hookspec.py
@@ -0,0 +1,295 @@
+""" hook specifications for pytest plugins, invoked from main.py and builtin plugins. """
+
+from _pytest._pluggy import HookspecMarker
+
+hookspec = HookspecMarker("pytest")
+
+# -------------------------------------------------------------------------
+# Initialization hooks called for every plugin
+# -------------------------------------------------------------------------
+
+@hookspec(historic=True)
+def pytest_addhooks(pluginmanager):
+ """called at plugin registration time to allow adding new hooks via a call to
+ pluginmanager.add_hookspecs(module_or_class, prefix)."""
+
+
+@hookspec(historic=True)
+def pytest_namespace():
+ """return dict of name->object to be made globally available in
+ the pytest namespace. This hook is called at plugin registration
+ time.
+ """
+
+@hookspec(historic=True)
+def pytest_plugin_registered(plugin, manager):
+ """ a new pytest plugin got registered. """
+
+
+@hookspec(historic=True)
+def pytest_addoption(parser):
+ """register argparse-style options and ini-style config values,
+ called once at the beginning of a test run.
+
+ .. note::
+
+ This function should be implemented only in plugins or ``conftest.py``
+ files situated at the tests root directory due to how py.test
+ :ref:`discovers plugins during startup <pluginorder>`.
+
+ :arg parser: To add command line options, call
+ :py:func:`parser.addoption(...) <_pytest.config.Parser.addoption>`.
+ To add ini-file values call :py:func:`parser.addini(...)
+ <_pytest.config.Parser.addini>`.
+
+ Options can later be accessed through the
+ :py:class:`config <_pytest.config.Config>` object, respectively:
+
+ - :py:func:`config.getoption(name) <_pytest.config.Config.getoption>` to
+ retrieve the value of a command line option.
+
+ - :py:func:`config.getini(name) <_pytest.config.Config.getini>` to retrieve
+ a value read from an ini-style file.
+
+ The config object is passed around on many internal objects via the ``.config``
+ attribute or can be retrieved as the ``pytestconfig`` fixture or accessed
+ via (deprecated) ``pytest.config``.
+ """
+
+@hookspec(historic=True)
+def pytest_configure(config):
+ """ called after command line options have been parsed
+ and all plugins and initial conftest files been loaded.
+ This hook is called for every plugin.
+ """
+
+# -------------------------------------------------------------------------
+# Bootstrapping hooks called for plugins registered early enough:
+# internal and 3rd party plugins as well as directly
+# discoverable conftest.py local plugins.
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_cmdline_parse(pluginmanager, args):
+ """return initialized config object, parsing the specified args. """
+
+def pytest_cmdline_preparse(config, args):
+ """(deprecated) modify command line arguments before option parsing. """
+
+@hookspec(firstresult=True)
+def pytest_cmdline_main(config):
+ """ called for performing the main command line action. The default
+ implementation will invoke the configure hooks and runtest_mainloop. """
+
+def pytest_load_initial_conftests(early_config, parser, args):
+ """ implements the loading of initial conftest files ahead
+ of command line option parsing. """
+
+
+# -------------------------------------------------------------------------
+# collection hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_collection(session):
+ """ perform the collection protocol for the given session. """
+
+def pytest_collection_modifyitems(session, config, items):
+ """ called after collection has been performed, may filter or re-order
+ the items in-place."""
+
+def pytest_collection_finish(session):
+ """ called after collection has been performed and modified. """
+
+@hookspec(firstresult=True)
+def pytest_ignore_collect(path, config):
+ """ return True to prevent considering this path for collection.
+ This hook is consulted for all files and directories prior to calling
+ more specific hooks.
+ """
+
+@hookspec(firstresult=True)
+def pytest_collect_directory(path, parent):
+ """ called before traversing a directory for collection files. """
+
+def pytest_collect_file(path, parent):
+ """ return collection Node or None for the given path. Any new node
+ needs to have the specified ``parent`` as a parent."""
+
+# logging hooks for collection
+def pytest_collectstart(collector):
+ """ collector starts collecting. """
+
+def pytest_itemcollected(item):
+ """ we just collected a test item. """
+
+def pytest_collectreport(report):
+ """ collector finished collecting. """
+
+def pytest_deselected(items):
+ """ called for test items deselected by keyword. """
+
+@hookspec(firstresult=True)
+def pytest_make_collect_report(collector):
+ """ perform ``collector.collect()`` and return a CollectReport. """
+
+# -------------------------------------------------------------------------
+# Python test function related hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_pycollect_makemodule(path, parent):
+ """ return a Module collector or None for the given path.
+ This hook will be called for each matching test module path.
+ The pytest_collect_file hook needs to be used if you want to
+ create test modules for files that do not match as a test module.
+ """
+
+@hookspec(firstresult=True)
+def pytest_pycollect_makeitem(collector, name, obj):
+ """ return custom item/collector for a python object in a module, or None. """
+
+@hookspec(firstresult=True)
+def pytest_pyfunc_call(pyfuncitem):
+ """ call underlying test function. """
+
+def pytest_generate_tests(metafunc):
+ """ generate (multiple) parametrized calls to a test function."""
+
+# -------------------------------------------------------------------------
+# generic runtest related hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_runtestloop(session):
+ """ called for performing the main runtest loop
+ (after collection finished). """
+
+def pytest_itemstart(item, node):
+ """ (deprecated, use pytest_runtest_logstart). """
+
+@hookspec(firstresult=True)
+def pytest_runtest_protocol(item, nextitem):
+ """ implements the runtest_setup/call/teardown protocol for
+ the given test item, including capturing exceptions and calling
+ reporting hooks.
+
+ :arg item: test item for which the runtest protocol is performed.
+
+ :arg nextitem: the scheduled-to-be-next test item (or None if this
+ is the end my friend). This argument is passed on to
+ :py:func:`pytest_runtest_teardown`.
+
+ :return boolean: True if no further hook implementations should be invoked.
+ """
+
+def pytest_runtest_logstart(nodeid, location):
+ """ signal the start of running a single test item. """
+
+def pytest_runtest_setup(item):
+ """ called before ``pytest_runtest_call(item)``. """
+
+def pytest_runtest_call(item):
+ """ called to execute the test ``item``. """
+
+def pytest_runtest_teardown(item, nextitem):
+ """ called after ``pytest_runtest_call``.
+
+ :arg nextitem: the scheduled-to-be-next test item (None if no further
+ test item is scheduled). This argument can be used to
+ perform exact teardowns, i.e. calling just enough finalizers
+ so that nextitem only needs to call setup-functions.
+ """
+
+@hookspec(firstresult=True)
+def pytest_runtest_makereport(item, call):
+ """ return a :py:class:`_pytest.runner.TestReport` object
+ for the given :py:class:`pytest.Item` and
+ :py:class:`_pytest.runner.CallInfo`.
+ """
+
+def pytest_runtest_logreport(report):
+ """ process a test setup/call/teardown report relating to
+ the respective phase of executing a test. """
+
+# -------------------------------------------------------------------------
+# test session related hooks
+# -------------------------------------------------------------------------
+
+def pytest_sessionstart(session):
+ """ before session.main() is called. """
+
+def pytest_sessionfinish(session, exitstatus):
+ """ whole test run finishes. """
+
+def pytest_unconfigure(config):
+ """ called before test process is exited. """
+
+
+# -------------------------------------------------------------------------
+# hooks for customising the assert methods
+# -------------------------------------------------------------------------
+
+def pytest_assertrepr_compare(config, op, left, right):
+ """return explanation for comparisons in failing assert expressions.
+
+ Return None for no custom explanation, otherwise return a list
+ of strings. The strings will be joined by newlines but any newlines
+ *in* a string will be escaped. Note that all but the first line will
+ be indented sligthly, the intention is for the first line to be a summary.
+ """
+
+# -------------------------------------------------------------------------
+# hooks for influencing reporting (invoked from _pytest_terminal)
+# -------------------------------------------------------------------------
+
+def pytest_report_header(config, startdir):
+ """ return a string to be displayed as header info for terminal reporting."""
+
+@hookspec(firstresult=True)
+def pytest_report_teststatus(report):
+ """ return result-category, shortletter and verbose word for reporting."""
+
+def pytest_terminal_summary(terminalreporter):
+ """ add additional section in terminal summary reporting. """
+
+
+@hookspec(historic=True)
+def pytest_logwarning(message, code, nodeid, fslocation):
+ """ process a warning specified by a message, a code string,
+ a nodeid and fslocation (both of which may be None
+ if the warning is not tied to a partilar node/location)."""
+
+# -------------------------------------------------------------------------
+# doctest hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_doctest_prepare_content(content):
+ """ return processed content for a given doctest"""
+
+# -------------------------------------------------------------------------
+# error handling and internal debugging hooks
+# -------------------------------------------------------------------------
+
+def pytest_internalerror(excrepr, excinfo):
+ """ called for internal errors. """
+
+def pytest_keyboard_interrupt(excinfo):
+ """ called for keyboard interrupt. """
+
+def pytest_exception_interact(node, call, report):
+ """called when an exception was raised which can potentially be
+ interactively handled.
+
+ This hook is only called if an exception was raised
+ that is not an internal exception like ``skip.Exception``.
+ """
+
+def pytest_enter_pdb(config):
+ """ called upon pdb.set_trace(), can be used by plugins to take special
+ action just before the python debugger enters in interactive mode.
+
+ :arg config: pytest config object
+ :type config: _pytest.config.Config
+ """
diff --git a/python/pytest/_pytest/junitxml.py b/python/pytest/_pytest/junitxml.py
new file mode 100644
index 000000000..f4de1343e
--- /dev/null
+++ b/python/pytest/_pytest/junitxml.py
@@ -0,0 +1,387 @@
+"""
+ report test results in JUnit-XML format,
+ for use with Jenkins and build integration servers.
+
+
+Based on initial code from Ross Lawley.
+"""
+# Output conforms to https://github.com/jenkinsci/xunit-plugin/blob/master/
+# src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd
+
+import py
+import os
+import re
+import sys
+import time
+import pytest
+
+# Python 2.X and 3.X compatibility
+if sys.version_info[0] < 3:
+ from codecs import open
+else:
+ unichr = chr
+ unicode = str
+ long = int
+
+
+class Junit(py.xml.Namespace):
+ pass
+
+# We need to get the subset of the invalid unicode ranges according to
+# XML 1.0 which are valid in this python build. Hence we calculate
+# this dynamically instead of hardcoding it. The spec range of valid
+# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
+# | [#x10000-#x10FFFF]
+_legal_chars = (0x09, 0x0A, 0x0d)
+_legal_ranges = (
+ (0x20, 0x7E), (0x80, 0xD7FF), (0xE000, 0xFFFD), (0x10000, 0x10FFFF),
+)
+_legal_xml_re = [
+ unicode("%s-%s") % (unichr(low), unichr(high))
+ for (low, high) in _legal_ranges if low < sys.maxunicode
+]
+_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
+illegal_xml_re = re.compile(unicode('[^%s]') % unicode('').join(_legal_xml_re))
+del _legal_chars
+del _legal_ranges
+del _legal_xml_re
+
+_py_ext_re = re.compile(r"\.py$")
+
+
+def bin_xml_escape(arg):
+ def repl(matchobj):
+ i = ord(matchobj.group())
+ if i <= 0xFF:
+ return unicode('#x%02X') % i
+ else:
+ return unicode('#x%04X') % i
+
+ return py.xml.raw(illegal_xml_re.sub(repl, py.xml.escape(arg)))
+
+
+class _NodeReporter(object):
+ def __init__(self, nodeid, xml):
+
+ self.id = nodeid
+ self.xml = xml
+ self.add_stats = self.xml.add_stats
+ self.duration = 0
+ self.properties = []
+ self.nodes = []
+ self.testcase = None
+ self.attrs = {}
+
+ def append(self, node):
+ self.xml.add_stats(type(node).__name__)
+ self.nodes.append(node)
+
+ def add_property(self, name, value):
+ self.properties.append((str(name), bin_xml_escape(value)))
+
+ def make_properties_node(self):
+ """Return a Junit node containing custom properties, if any.
+ """
+ if self.properties:
+ return Junit.properties([
+ Junit.property(name=name, value=value)
+ for name, value in self.properties
+ ])
+ return ''
+
+ def record_testreport(self, testreport):
+ assert not self.testcase
+ names = mangle_test_address(testreport.nodeid)
+ classnames = names[:-1]
+ if self.xml.prefix:
+ classnames.insert(0, self.xml.prefix)
+ attrs = {
+ "classname": ".".join(classnames),
+ "name": bin_xml_escape(names[-1]),
+ "file": testreport.location[0],
+ }
+ if testreport.location[1] is not None:
+ attrs["line"] = testreport.location[1]
+ self.attrs = attrs
+
+ def to_xml(self):
+ testcase = Junit.testcase(time=self.duration, **self.attrs)
+ testcase.append(self.make_properties_node())
+ for node in self.nodes:
+ testcase.append(node)
+ return testcase
+
+ def _add_simple(self, kind, message, data=None):
+ data = bin_xml_escape(data)
+ node = kind(data, message=message)
+ self.append(node)
+
+ def _write_captured_output(self, report):
+ for capname in ('out', 'err'):
+ allcontent = ""
+ for name, content in report.get_sections("Captured std%s" %
+ capname):
+ allcontent += content
+ if allcontent:
+ tag = getattr(Junit, 'system-' + capname)
+ self.append(tag(bin_xml_escape(allcontent)))
+
+ def append_pass(self, report):
+ self.add_stats('passed')
+ self._write_captured_output(report)
+
+ def append_failure(self, report):
+ # msg = str(report.longrepr.reprtraceback.extraline)
+ if hasattr(report, "wasxfail"):
+ self._add_simple(
+ Junit.skipped,
+ "xfail-marked test passes unexpectedly")
+ else:
+ if hasattr(report.longrepr, "reprcrash"):
+ message = report.longrepr.reprcrash.message
+ elif isinstance(report.longrepr, (unicode, str)):
+ message = report.longrepr
+ else:
+ message = str(report.longrepr)
+ message = bin_xml_escape(message)
+ fail = Junit.failure(message=message)
+ fail.append(bin_xml_escape(report.longrepr))
+ self.append(fail)
+ self._write_captured_output(report)
+
+ def append_collect_error(self, report):
+ # msg = str(report.longrepr.reprtraceback.extraline)
+ self.append(Junit.error(bin_xml_escape(report.longrepr),
+ message="collection failure"))
+
+ def append_collect_skipped(self, report):
+ self._add_simple(
+ Junit.skipped, "collection skipped", report.longrepr)
+
+ def append_error(self, report):
+ self._add_simple(
+ Junit.error, "test setup failure", report.longrepr)
+ self._write_captured_output(report)
+
+ def append_skipped(self, report):
+ if hasattr(report, "wasxfail"):
+ self._add_simple(
+ Junit.skipped, "expected test failure", report.wasxfail
+ )
+ else:
+ filename, lineno, skipreason = report.longrepr
+ if skipreason.startswith("Skipped: "):
+ skipreason = bin_xml_escape(skipreason[9:])
+ self.append(
+ Junit.skipped("%s:%s: %s" % (filename, lineno, skipreason),
+ type="pytest.skip",
+ message=skipreason))
+ self._write_captured_output(report)
+
+ def finalize(self):
+ data = self.to_xml().unicode(indent=0)
+ self.__dict__.clear()
+ self.to_xml = lambda: py.xml.raw(data)
+
+
+@pytest.fixture
+def record_xml_property(request):
+ """Fixture that adds extra xml properties to the tag for the calling test.
+ The fixture is callable with (name, value), with value being automatically
+ xml-encoded.
+ """
+ request.node.warn(
+ code='C3',
+ message='record_xml_property is an experimental feature',
+ )
+ xml = getattr(request.config, "_xml", None)
+ if xml is not None:
+ node_reporter = xml.node_reporter(request.node.nodeid)
+ return node_reporter.add_property
+ else:
+ def add_property_noop(name, value):
+ pass
+
+ return add_property_noop
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting")
+ group.addoption(
+ '--junitxml', '--junit-xml',
+ action="store",
+ dest="xmlpath",
+ metavar="path",
+ default=None,
+ help="create junit-xml style report file at given path.")
+ group.addoption(
+ '--junitprefix', '--junit-prefix',
+ action="store",
+ metavar="str",
+ default=None,
+ help="prepend prefix to classnames in junit-xml output")
+
+
+def pytest_configure(config):
+ xmlpath = config.option.xmlpath
+ # prevent opening xmllog on slave nodes (xdist)
+ if xmlpath and not hasattr(config, 'slaveinput'):
+ config._xml = LogXML(xmlpath, config.option.junitprefix)
+ config.pluginmanager.register(config._xml)
+
+
+def pytest_unconfigure(config):
+ xml = getattr(config, '_xml', None)
+ if xml:
+ del config._xml
+ config.pluginmanager.unregister(xml)
+
+
+def mangle_test_address(address):
+ path, possible_open_bracket, params = address.partition('[')
+ names = path.split("::")
+ try:
+ names.remove('()')
+ except ValueError:
+ pass
+ # convert file path to dotted path
+ names[0] = names[0].replace("/", '.')
+ names[0] = _py_ext_re.sub("", names[0])
+ # put any params back
+ names[-1] += possible_open_bracket + params
+ return names
+
+
+class LogXML(object):
+ def __init__(self, logfile, prefix):
+ logfile = os.path.expanduser(os.path.expandvars(logfile))
+ self.logfile = os.path.normpath(os.path.abspath(logfile))
+ self.prefix = prefix
+ self.stats = dict.fromkeys([
+ 'error',
+ 'passed',
+ 'failure',
+ 'skipped',
+ ], 0)
+ self.node_reporters = {} # nodeid -> _NodeReporter
+ self.node_reporters_ordered = []
+
+ def finalize(self, report):
+ nodeid = getattr(report, 'nodeid', report)
+ # local hack to handle xdist report order
+ slavenode = getattr(report, 'node', None)
+ reporter = self.node_reporters.pop((nodeid, slavenode))
+ if reporter is not None:
+ reporter.finalize()
+
+ def node_reporter(self, report):
+ nodeid = getattr(report, 'nodeid', report)
+ # local hack to handle xdist report order
+ slavenode = getattr(report, 'node', None)
+
+ key = nodeid, slavenode
+
+ if key in self.node_reporters:
+ # TODO: breasks for --dist=each
+ return self.node_reporters[key]
+ reporter = _NodeReporter(nodeid, self)
+ self.node_reporters[key] = reporter
+ self.node_reporters_ordered.append(reporter)
+ return reporter
+
+ def add_stats(self, key):
+ if key in self.stats:
+ self.stats[key] += 1
+
+ def _opentestcase(self, report):
+ reporter = self.node_reporter(report)
+ reporter.record_testreport(report)
+ return reporter
+
+ def pytest_runtest_logreport(self, report):
+ """handle a setup/call/teardown report, generating the appropriate
+ xml tags as necessary.
+
+ note: due to plugins like xdist, this hook may be called in interlaced
+ order with reports from other nodes. for example:
+
+ usual call order:
+ -> setup node1
+ -> call node1
+ -> teardown node1
+ -> setup node2
+ -> call node2
+ -> teardown node2
+
+ possible call order in xdist:
+ -> setup node1
+ -> call node1
+ -> setup node2
+ -> call node2
+ -> teardown node2
+ -> teardown node1
+ """
+ if report.passed:
+ if report.when == "call": # ignore setup/teardown
+ reporter = self._opentestcase(report)
+ reporter.append_pass(report)
+ elif report.failed:
+ reporter = self._opentestcase(report)
+ if report.when == "call":
+ reporter.append_failure(report)
+ else:
+ reporter.append_error(report)
+ elif report.skipped:
+ reporter = self._opentestcase(report)
+ reporter.append_skipped(report)
+ self.update_testcase_duration(report)
+ if report.when == "teardown":
+ self.finalize(report)
+
+ def update_testcase_duration(self, report):
+ """accumulates total duration for nodeid from given report and updates
+ the Junit.testcase with the new total if already created.
+ """
+ reporter = self.node_reporter(report)
+ reporter.duration += getattr(report, 'duration', 0.0)
+
+ def pytest_collectreport(self, report):
+ if not report.passed:
+ reporter = self._opentestcase(report)
+ if report.failed:
+ reporter.append_collect_error(report)
+ else:
+ reporter.append_collect_skipped(report)
+
+ def pytest_internalerror(self, excrepr):
+ reporter = self.node_reporter('internal')
+ reporter.attrs.update(classname="pytest", name='internal')
+ reporter._add_simple(Junit.error, 'internal error', excrepr)
+
+ def pytest_sessionstart(self):
+ self.suite_start_time = time.time()
+
+ def pytest_sessionfinish(self):
+ dirname = os.path.dirname(os.path.abspath(self.logfile))
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ logfile = open(self.logfile, 'w', encoding='utf-8')
+ suite_stop_time = time.time()
+ suite_time_delta = suite_stop_time - self.suite_start_time
+
+ numtests = self.stats['passed'] + self.stats['failure'] + self.stats['skipped']
+
+ logfile.write('<?xml version="1.0" encoding="utf-8"?>')
+ logfile.write(Junit.testsuite(
+ [x.to_xml() for x in self.node_reporters_ordered],
+ name="pytest",
+ errors=self.stats['error'],
+ failures=self.stats['failure'],
+ skips=self.stats['skipped'],
+ tests=numtests,
+ time="%.3f" % suite_time_delta, ).unicode(indent=0))
+ logfile.close()
+
+ def pytest_terminal_summary(self, terminalreporter):
+ terminalreporter.write_sep("-",
+ "generated xml file: %s" % (self.logfile))
diff --git a/python/pytest/_pytest/main.py b/python/pytest/_pytest/main.py
new file mode 100644
index 000000000..8654d7af6
--- /dev/null
+++ b/python/pytest/_pytest/main.py
@@ -0,0 +1,744 @@
+""" core implementation of testing process: init, session, runtest loop. """
+import imp
+import os
+import re
+import sys
+
+import _pytest
+import _pytest._code
+import py
+import pytest
+try:
+ from collections import MutableMapping as MappingMixin
+except ImportError:
+ from UserDict import DictMixin as MappingMixin
+
+from _pytest.runner import collect_one_node
+
+tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
+
+# exitcodes for the command line
+EXIT_OK = 0
+EXIT_TESTSFAILED = 1
+EXIT_INTERRUPTED = 2
+EXIT_INTERNALERROR = 3
+EXIT_USAGEERROR = 4
+EXIT_NOTESTSCOLLECTED = 5
+
+name_re = re.compile("^[a-zA-Z_]\w*$")
+
+def pytest_addoption(parser):
+ parser.addini("norecursedirs", "directory patterns to avoid for recursion",
+ type="args", default=['.*', 'CVS', '_darcs', '{arch}', '*.egg'])
+ parser.addini("testpaths", "directories to search for tests when no files or directories are given in the command line.",
+ type="args", default=[])
+ #parser.addini("dirpatterns",
+ # "patterns specifying possible locations of test files",
+ # type="linelist", default=["**/test_*.txt",
+ # "**/test_*.py", "**/*_test.py"]
+ #)
+ group = parser.getgroup("general", "running and selection options")
+ group._addoption('-x', '--exitfirst', action="store_true", default=False,
+ dest="exitfirst",
+ help="exit instantly on first error or failed test."),
+ group._addoption('--maxfail', metavar="num",
+ action="store", type=int, dest="maxfail", default=0,
+ help="exit after first num failures or errors.")
+ group._addoption('--strict', action="store_true",
+ help="run pytest in strict mode, warnings become errors.")
+ group._addoption("-c", metavar="file", type=str, dest="inifilename",
+ help="load configuration from `file` instead of trying to locate one of the implicit configuration files.")
+
+ group = parser.getgroup("collect", "collection")
+ group.addoption('--collectonly', '--collect-only', action="store_true",
+ help="only collect tests, don't execute them."),
+ group.addoption('--pyargs', action="store_true",
+ help="try to interpret all arguments as python packages.")
+ group.addoption("--ignore", action="append", metavar="path",
+ help="ignore path during collection (multi-allowed).")
+ # when changing this to --conf-cut-dir, config.py Conftest.setinitial
+ # needs upgrading as well
+ group.addoption('--confcutdir', dest="confcutdir", default=None,
+ metavar="dir",
+ help="only load conftest.py's relative to specified dir.")
+ group.addoption('--noconftest', action="store_true",
+ dest="noconftest", default=False,
+ help="Don't load any conftest.py files.")
+
+ group = parser.getgroup("debugconfig",
+ "test session debugging and configuration")
+ group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir",
+ help="base temporary directory for this test run.")
+
+
+def pytest_namespace():
+ collect = dict(Item=Item, Collector=Collector, File=File, Session=Session)
+ return dict(collect=collect)
+
+def pytest_configure(config):
+ pytest.config = config # compatibiltiy
+ if config.option.exitfirst:
+ config.option.maxfail = 1
+
+def wrap_session(config, doit):
+ """Skeleton command line program"""
+ session = Session(config)
+ session.exitstatus = EXIT_OK
+ initstate = 0
+ try:
+ try:
+ config._do_configure()
+ initstate = 1
+ config.hook.pytest_sessionstart(session=session)
+ initstate = 2
+ session.exitstatus = doit(config, session) or 0
+ except pytest.UsageError:
+ raise
+ except KeyboardInterrupt:
+ excinfo = _pytest._code.ExceptionInfo()
+ config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
+ session.exitstatus = EXIT_INTERRUPTED
+ except:
+ excinfo = _pytest._code.ExceptionInfo()
+ config.notify_exception(excinfo, config.option)
+ session.exitstatus = EXIT_INTERNALERROR
+ if excinfo.errisinstance(SystemExit):
+ sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
+
+ finally:
+ excinfo = None # Explicitly break reference cycle.
+ session.startdir.chdir()
+ if initstate >= 2:
+ config.hook.pytest_sessionfinish(
+ session=session,
+ exitstatus=session.exitstatus)
+ config._ensure_unconfigure()
+ return session.exitstatus
+
+def pytest_cmdline_main(config):
+ return wrap_session(config, _main)
+
+def _main(config, session):
+ """ default command line protocol for initialization, session,
+ running tests and reporting. """
+ config.hook.pytest_collection(session=session)
+ config.hook.pytest_runtestloop(session=session)
+
+ if session.testsfailed:
+ return EXIT_TESTSFAILED
+ elif session.testscollected == 0:
+ return EXIT_NOTESTSCOLLECTED
+
+def pytest_collection(session):
+ return session.perform_collect()
+
+def pytest_runtestloop(session):
+ if session.config.option.collectonly:
+ return True
+
+ def getnextitem(i):
+ # this is a function to avoid python2
+ # keeping sys.exc_info set when calling into a test
+ # python2 keeps sys.exc_info till the frame is left
+ try:
+ return session.items[i+1]
+ except IndexError:
+ return None
+
+ for i, item in enumerate(session.items):
+ nextitem = getnextitem(i)
+ item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
+ if session.shouldstop:
+ raise session.Interrupted(session.shouldstop)
+ return True
+
+def pytest_ignore_collect(path, config):
+ p = path.dirpath()
+ ignore_paths = config._getconftest_pathlist("collect_ignore", path=p)
+ ignore_paths = ignore_paths or []
+ excludeopt = config.getoption("ignore")
+ if excludeopt:
+ ignore_paths.extend([py.path.local(x) for x in excludeopt])
+ return path in ignore_paths
+
+class FSHookProxy:
+ def __init__(self, fspath, pm, remove_mods):
+ self.fspath = fspath
+ self.pm = pm
+ self.remove_mods = remove_mods
+
+ def __getattr__(self, name):
+ x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods)
+ self.__dict__[name] = x
+ return x
+
+def compatproperty(name):
+ def fget(self):
+ # deprecated - use pytest.name
+ return getattr(pytest, name)
+
+ return property(fget)
+
+class NodeKeywords(MappingMixin):
+ def __init__(self, node):
+ self.node = node
+ self.parent = node.parent
+ self._markers = {node.name: True}
+
+ def __getitem__(self, key):
+ try:
+ return self._markers[key]
+ except KeyError:
+ if self.parent is None:
+ raise
+ return self.parent.keywords[key]
+
+ def __setitem__(self, key, value):
+ self._markers[key] = value
+
+ def __delitem__(self, key):
+ raise ValueError("cannot delete key in keywords dict")
+
+ def __iter__(self):
+ seen = set(self._markers)
+ if self.parent is not None:
+ seen.update(self.parent.keywords)
+ return iter(seen)
+
+ def __len__(self):
+ return len(self.__iter__())
+
+ def keys(self):
+ return list(self)
+
+ def __repr__(self):
+ return "<NodeKeywords for node %s>" % (self.node, )
+
+
+class Node(object):
+ """ base class for Collector and Item the test collection tree.
+ Collector subclasses have children, Items are terminal nodes."""
+
+ def __init__(self, name, parent=None, config=None, session=None):
+ #: a unique name within the scope of the parent node
+ self.name = name
+
+ #: the parent collector node.
+ self.parent = parent
+
+ #: the pytest config object
+ self.config = config or parent.config
+
+ #: the session this node is part of
+ self.session = session or parent.session
+
+ #: filesystem path where this node was collected from (can be None)
+ self.fspath = getattr(parent, 'fspath', None)
+
+ #: keywords/markers collected from all scopes
+ self.keywords = NodeKeywords(self)
+
+ #: allow adding of extra keywords to use for matching
+ self.extra_keyword_matches = set()
+
+ # used for storing artificial fixturedefs for direct parametrization
+ self._name2pseudofixturedef = {}
+
+ @property
+ def ihook(self):
+ """ fspath sensitive hook proxy used to call pytest hooks"""
+ return self.session.gethookproxy(self.fspath)
+
+ Module = compatproperty("Module")
+ Class = compatproperty("Class")
+ Instance = compatproperty("Instance")
+ Function = compatproperty("Function")
+ File = compatproperty("File")
+ Item = compatproperty("Item")
+
+ def _getcustomclass(self, name):
+ cls = getattr(self, name)
+ if cls != getattr(pytest, name):
+ py.log._apiwarn("2.0", "use of node.%s is deprecated, "
+ "use pytest_pycollect_makeitem(...) to create custom "
+ "collection nodes" % name)
+ return cls
+
+ def __repr__(self):
+ return "<%s %r>" %(self.__class__.__name__,
+ getattr(self, 'name', None))
+
+ def warn(self, code, message):
+ """ generate a warning with the given code and message for this
+ item. """
+ assert isinstance(code, str)
+ fslocation = getattr(self, "location", None)
+ if fslocation is None:
+ fslocation = getattr(self, "fspath", None)
+ else:
+ fslocation = "%s:%s" % fslocation[:2]
+
+ self.ihook.pytest_logwarning.call_historic(kwargs=dict(
+ code=code, message=message,
+ nodeid=self.nodeid, fslocation=fslocation))
+
+ # methods for ordering nodes
+ @property
+ def nodeid(self):
+ """ a ::-separated string denoting its collection tree address. """
+ try:
+ return self._nodeid
+ except AttributeError:
+ self._nodeid = x = self._makeid()
+ return x
+
+ def _makeid(self):
+ return self.parent.nodeid + "::" + self.name
+
+ def __hash__(self):
+ return hash(self.nodeid)
+
+ def setup(self):
+ pass
+
+ def teardown(self):
+ pass
+
+ def _memoizedcall(self, attrname, function):
+ exattrname = "_ex_" + attrname
+ failure = getattr(self, exattrname, None)
+ if failure is not None:
+ py.builtin._reraise(failure[0], failure[1], failure[2])
+ if hasattr(self, attrname):
+ return getattr(self, attrname)
+ try:
+ res = function()
+ except py.builtin._sysex:
+ raise
+ except:
+ failure = sys.exc_info()
+ setattr(self, exattrname, failure)
+ raise
+ setattr(self, attrname, res)
+ return res
+
+ def listchain(self):
+ """ return list of all parent collectors up to self,
+ starting from root of collection tree. """
+ chain = []
+ item = self
+ while item is not None:
+ chain.append(item)
+ item = item.parent
+ chain.reverse()
+ return chain
+
+ def add_marker(self, marker):
+ """ dynamically add a marker object to the node.
+
+ ``marker`` can be a string or pytest.mark.* instance.
+ """
+ from _pytest.mark import MarkDecorator
+ if isinstance(marker, py.builtin._basestring):
+ marker = MarkDecorator(marker)
+ elif not isinstance(marker, MarkDecorator):
+ raise ValueError("is not a string or pytest.mark.* Marker")
+ self.keywords[marker.name] = marker
+
+ def get_marker(self, name):
+ """ get a marker object from this node or None if
+ the node doesn't have a marker with that name. """
+ val = self.keywords.get(name, None)
+ if val is not None:
+ from _pytest.mark import MarkInfo, MarkDecorator
+ if isinstance(val, (MarkDecorator, MarkInfo)):
+ return val
+
+ def listextrakeywords(self):
+ """ Return a set of all extra keywords in self and any parents."""
+ extra_keywords = set()
+ item = self
+ for item in self.listchain():
+ extra_keywords.update(item.extra_keyword_matches)
+ return extra_keywords
+
+ def listnames(self):
+ return [x.name for x in self.listchain()]
+
+ def addfinalizer(self, fin):
+ """ register a function to be called when this node is finalized.
+
+ This method can only be called when this node is active
+ in a setup chain, for example during self.setup().
+ """
+ self.session._setupstate.addfinalizer(fin, self)
+
+ def getparent(self, cls):
+ """ get the next parent node (including ourself)
+ which is an instance of the given class"""
+ current = self
+ while current and not isinstance(current, cls):
+ current = current.parent
+ return current
+
+ def _prunetraceback(self, excinfo):
+ pass
+
+ def _repr_failure_py(self, excinfo, style=None):
+ fm = self.session._fixturemanager
+ if excinfo.errisinstance(fm.FixtureLookupError):
+ return excinfo.value.formatrepr()
+ tbfilter = True
+ if self.config.option.fulltrace:
+ style="long"
+ else:
+ self._prunetraceback(excinfo)
+ tbfilter = False # prunetraceback already does it
+ if style == "auto":
+ style = "long"
+ # XXX should excinfo.getrepr record all data and toterminal() process it?
+ if style is None:
+ if self.config.option.tbstyle == "short":
+ style = "short"
+ else:
+ style = "long"
+
+ return excinfo.getrepr(funcargs=True,
+ showlocals=self.config.option.showlocals,
+ style=style, tbfilter=tbfilter)
+
+ repr_failure = _repr_failure_py
+
+class Collector(Node):
+ """ Collector instances create children through collect()
+ and thus iteratively build a tree.
+ """
+
+ class CollectError(Exception):
+ """ an error during collection, contains a custom message. """
+
+ def collect(self):
+ """ returns a list of children (items and collectors)
+ for this collection node.
+ """
+ raise NotImplementedError("abstract")
+
+ def repr_failure(self, excinfo):
+ """ represent a collection failure. """
+ if excinfo.errisinstance(self.CollectError):
+ exc = excinfo.value
+ return str(exc.args[0])
+ return self._repr_failure_py(excinfo, style="short")
+
+ def _memocollect(self):
+ """ internal helper method to cache results of calling collect(). """
+ return self._memoizedcall('_collected', lambda: list(self.collect()))
+
+ def _prunetraceback(self, excinfo):
+ if hasattr(self, 'fspath'):
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=self.fspath)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.cut(excludepath=tracebackcutdir)
+ excinfo.traceback = ntraceback.filter()
+
+class FSCollector(Collector):
+ def __init__(self, fspath, parent=None, config=None, session=None):
+ fspath = py.path.local(fspath) # xxx only for test_resultlog.py?
+ name = fspath.basename
+ if parent is not None:
+ rel = fspath.relto(parent.fspath)
+ if rel:
+ name = rel
+ name = name.replace(os.sep, "/")
+ super(FSCollector, self).__init__(name, parent, config, session)
+ self.fspath = fspath
+
+ def _makeid(self):
+ relpath = self.fspath.relto(self.config.rootdir)
+ if os.sep != "/":
+ relpath = relpath.replace(os.sep, "/")
+ return relpath
+
+class File(FSCollector):
+ """ base class for collecting tests from a file. """
+
+class Item(Node):
+ """ a basic test invocation item. Note that for a single function
+ there might be multiple test invocation items.
+ """
+ nextitem = None
+
+ def __init__(self, name, parent=None, config=None, session=None):
+ super(Item, self).__init__(name, parent, config, session)
+ self._report_sections = []
+
+ def add_report_section(self, when, key, content):
+ if content:
+ self._report_sections.append((when, key, content))
+
+ def reportinfo(self):
+ return self.fspath, None, ""
+
+ @property
+ def location(self):
+ try:
+ return self._location
+ except AttributeError:
+ location = self.reportinfo()
+ # bestrelpath is a quite slow function
+ cache = self.config.__dict__.setdefault("_bestrelpathcache", {})
+ try:
+ fspath = cache[location[0]]
+ except KeyError:
+ fspath = self.session.fspath.bestrelpath(location[0])
+ cache[location[0]] = fspath
+ location = (fspath, location[1], str(location[2]))
+ self._location = location
+ return location
+
+class NoMatch(Exception):
+ """ raised if matching cannot locate a matching names. """
+
+class Interrupted(KeyboardInterrupt):
+ """ signals an interrupted test run. """
+ __module__ = 'builtins' # for py3
+
+class Session(FSCollector):
+ Interrupted = Interrupted
+
+ def __init__(self, config):
+ FSCollector.__init__(self, config.rootdir, parent=None,
+ config=config, session=self)
+ self._fs2hookproxy = {}
+ self.testsfailed = 0
+ self.testscollected = 0
+ self.shouldstop = False
+ self.trace = config.trace.root.get("collection")
+ self._norecursepatterns = config.getini("norecursedirs")
+ self.startdir = py.path.local()
+ self.config.pluginmanager.register(self, name="session")
+
+ def _makeid(self):
+ return ""
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_collectstart(self):
+ if self.shouldstop:
+ raise self.Interrupted(self.shouldstop)
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_runtest_logreport(self, report):
+ if report.failed and not hasattr(report, 'wasxfail'):
+ self.testsfailed += 1
+ maxfail = self.config.getvalue("maxfail")
+ if maxfail and self.testsfailed >= maxfail:
+ self.shouldstop = "stopping after %d failures" % (
+ self.testsfailed)
+ pytest_collectreport = pytest_runtest_logreport
+
+ def isinitpath(self, path):
+ return path in self._initialpaths
+
+ def gethookproxy(self, fspath):
+ try:
+ return self._fs2hookproxy[fspath]
+ except KeyError:
+ # check if we have the common case of running
+ # hooks with all conftest.py filesall conftest.py
+ pm = self.config.pluginmanager
+ my_conftestmodules = pm._getconftestmodules(fspath)
+ remove_mods = pm._conftest_plugins.difference(my_conftestmodules)
+ if remove_mods:
+ # one or more conftests are not in use at this fspath
+ proxy = FSHookProxy(fspath, pm, remove_mods)
+ else:
+ # all plugis are active for this fspath
+ proxy = self.config.hook
+
+ self._fs2hookproxy[fspath] = proxy
+ return proxy
+
+ def perform_collect(self, args=None, genitems=True):
+ hook = self.config.hook
+ try:
+ items = self._perform_collect(args, genitems)
+ hook.pytest_collection_modifyitems(session=self,
+ config=self.config, items=items)
+ finally:
+ hook.pytest_collection_finish(session=self)
+ self.testscollected = len(items)
+ return items
+
+ def _perform_collect(self, args, genitems):
+ if args is None:
+ args = self.config.args
+ self.trace("perform_collect", self, args)
+ self.trace.root.indent += 1
+ self._notfound = []
+ self._initialpaths = set()
+ self._initialparts = []
+ self.items = items = []
+ for arg in args:
+ parts = self._parsearg(arg)
+ self._initialparts.append(parts)
+ self._initialpaths.add(parts[0])
+ rep = collect_one_node(self)
+ self.ihook.pytest_collectreport(report=rep)
+ self.trace.root.indent -= 1
+ if self._notfound:
+ errors = []
+ for arg, exc in self._notfound:
+ line = "(no name %r in any of %r)" % (arg, exc.args[0])
+ errors.append("not found: %s\n%s" % (arg, line))
+ #XXX: test this
+ raise pytest.UsageError(*errors)
+ if not genitems:
+ return rep.result
+ else:
+ if rep.passed:
+ for node in rep.result:
+ self.items.extend(self.genitems(node))
+ return items
+
+ def collect(self):
+ for parts in self._initialparts:
+ arg = "::".join(map(str, parts))
+ self.trace("processing argument", arg)
+ self.trace.root.indent += 1
+ try:
+ for x in self._collect(arg):
+ yield x
+ except NoMatch:
+ # we are inside a make_report hook so
+ # we cannot directly pass through the exception
+ self._notfound.append((arg, sys.exc_info()[1]))
+
+ self.trace.root.indent -= 1
+
+ def _collect(self, arg):
+ names = self._parsearg(arg)
+ path = names.pop(0)
+ if path.check(dir=1):
+ assert not names, "invalid arg %r" %(arg,)
+ for path in path.visit(fil=lambda x: x.check(file=1),
+ rec=self._recurse, bf=True, sort=True):
+ for x in self._collectfile(path):
+ yield x
+ else:
+ assert path.check(file=1)
+ for x in self.matchnodes(self._collectfile(path), names):
+ yield x
+
+ def _collectfile(self, path):
+ ihook = self.gethookproxy(path)
+ if not self.isinitpath(path):
+ if ihook.pytest_ignore_collect(path=path, config=self.config):
+ return ()
+ return ihook.pytest_collect_file(path=path, parent=self)
+
+ def _recurse(self, path):
+ ihook = self.gethookproxy(path.dirpath())
+ if ihook.pytest_ignore_collect(path=path, config=self.config):
+ return
+ for pat in self._norecursepatterns:
+ if path.check(fnmatch=pat):
+ return False
+ ihook = self.gethookproxy(path)
+ ihook.pytest_collect_directory(path=path, parent=self)
+ return True
+
+ def _tryconvertpyarg(self, x):
+ mod = None
+ path = [os.path.abspath('.')] + sys.path
+ for name in x.split('.'):
+ # ignore anything that's not a proper name here
+ # else something like --pyargs will mess up '.'
+ # since imp.find_module will actually sometimes work for it
+ # but it's supposed to be considered a filesystem path
+ # not a package
+ if name_re.match(name) is None:
+ return x
+ try:
+ fd, mod, type_ = imp.find_module(name, path)
+ except ImportError:
+ return x
+ else:
+ if fd is not None:
+ fd.close()
+
+ if type_[2] != imp.PKG_DIRECTORY:
+ path = [os.path.dirname(mod)]
+ else:
+ path = [mod]
+ return mod
+
+ def _parsearg(self, arg):
+ """ return (fspath, names) tuple after checking the file exists. """
+ arg = str(arg)
+ if self.config.option.pyargs:
+ arg = self._tryconvertpyarg(arg)
+ parts = str(arg).split("::")
+ relpath = parts[0].replace("/", os.sep)
+ path = self.config.invocation_dir.join(relpath, abs=True)
+ if not path.check():
+ if self.config.option.pyargs:
+ msg = "file or package not found: "
+ else:
+ msg = "file not found: "
+ raise pytest.UsageError(msg + arg)
+ parts[0] = path
+ return parts
+
+ def matchnodes(self, matching, names):
+ self.trace("matchnodes", matching, names)
+ self.trace.root.indent += 1
+ nodes = self._matchnodes(matching, names)
+ num = len(nodes)
+ self.trace("matchnodes finished -> ", num, "nodes")
+ self.trace.root.indent -= 1
+ if num == 0:
+ raise NoMatch(matching, names[:1])
+ return nodes
+
+ def _matchnodes(self, matching, names):
+ if not matching or not names:
+ return matching
+ name = names[0]
+ assert name
+ nextnames = names[1:]
+ resultnodes = []
+ for node in matching:
+ if isinstance(node, pytest.Item):
+ if not names:
+ resultnodes.append(node)
+ continue
+ assert isinstance(node, pytest.Collector)
+ rep = collect_one_node(node)
+ if rep.passed:
+ has_matched = False
+ for x in rep.result:
+ # TODO: remove parametrized workaround once collection structure contains parametrization
+ if x.name == name or x.name.split("[")[0] == name:
+ resultnodes.extend(self.matchnodes([x], nextnames))
+ has_matched = True
+ # XXX accept IDs that don't have "()" for class instances
+ if not has_matched and len(rep.result) == 1 and x.name == "()":
+ nextnames.insert(0, name)
+ resultnodes.extend(self.matchnodes([x], nextnames))
+ node.ihook.pytest_collectreport(report=rep)
+ return resultnodes
+
+ def genitems(self, node):
+ self.trace("genitems", node)
+ if isinstance(node, pytest.Item):
+ node.ihook.pytest_itemcollected(item=node)
+ yield node
+ else:
+ assert isinstance(node, pytest.Collector)
+ rep = collect_one_node(node)
+ if rep.passed:
+ for subnode in rep.result:
+ for x in self.genitems(subnode):
+ yield x
+ node.ihook.pytest_collectreport(report=rep)
diff --git a/python/pytest/_pytest/mark.py b/python/pytest/_pytest/mark.py
new file mode 100644
index 000000000..d8b60def3
--- /dev/null
+++ b/python/pytest/_pytest/mark.py
@@ -0,0 +1,311 @@
+""" generic mechanism for marking and selecting python functions. """
+import inspect
+
+
+class MarkerError(Exception):
+
+ """Error in use of a pytest marker/attribute."""
+
+
+def pytest_namespace():
+ return {'mark': MarkGenerator()}
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption(
+ '-k',
+ action="store", dest="keyword", default='', metavar="EXPRESSION",
+ help="only run tests which match the given substring expression. "
+ "An expression is a python evaluatable expression "
+ "where all names are substring-matched against test names "
+ "and their parent classes. Example: -k 'test_method or test "
+ "other' matches all test functions and classes whose name "
+ "contains 'test_method' or 'test_other'. "
+ "Additionally keywords are matched to classes and functions "
+ "containing extra names in their 'extra_keyword_matches' set, "
+ "as well as functions which have names assigned directly to them."
+ )
+
+ group._addoption(
+ "-m",
+ action="store", dest="markexpr", default="", metavar="MARKEXPR",
+ help="only run tests matching given mark expression. "
+ "example: -m 'mark1 and not mark2'."
+ )
+
+ group.addoption(
+ "--markers", action="store_true",
+ help="show markers (builtin, plugin and per-project ones)."
+ )
+
+ parser.addini("markers", "markers for test functions", 'linelist')
+
+
+def pytest_cmdline_main(config):
+ import _pytest.config
+ if config.option.markers:
+ config._do_configure()
+ tw = _pytest.config.create_terminal_writer(config)
+ for line in config.getini("markers"):
+ name, rest = line.split(":", 1)
+ tw.write("@pytest.mark.%s:" % name, bold=True)
+ tw.line(rest)
+ tw.line()
+ config._ensure_unconfigure()
+ return 0
+pytest_cmdline_main.tryfirst = True
+
+
+def pytest_collection_modifyitems(items, config):
+ keywordexpr = config.option.keyword.lstrip()
+ matchexpr = config.option.markexpr
+ if not keywordexpr and not matchexpr:
+ return
+ # pytest used to allow "-" for negating
+ # but today we just allow "-" at the beginning, use "not" instead
+ # we probably remove "-" alltogether soon
+ if keywordexpr.startswith("-"):
+ keywordexpr = "not " + keywordexpr[1:]
+ selectuntil = False
+ if keywordexpr[-1:] == ":":
+ selectuntil = True
+ keywordexpr = keywordexpr[:-1]
+
+ remaining = []
+ deselected = []
+ for colitem in items:
+ if keywordexpr and not matchkeyword(colitem, keywordexpr):
+ deselected.append(colitem)
+ else:
+ if selectuntil:
+ keywordexpr = None
+ if matchexpr:
+ if not matchmark(colitem, matchexpr):
+ deselected.append(colitem)
+ continue
+ remaining.append(colitem)
+
+ if deselected:
+ config.hook.pytest_deselected(items=deselected)
+ items[:] = remaining
+
+
+class MarkMapping:
+ """Provides a local mapping for markers where item access
+ resolves to True if the marker is present. """
+ def __init__(self, keywords):
+ mymarks = set()
+ for key, value in keywords.items():
+ if isinstance(value, MarkInfo) or isinstance(value, MarkDecorator):
+ mymarks.add(key)
+ self._mymarks = mymarks
+
+ def __getitem__(self, name):
+ return name in self._mymarks
+
+
+class KeywordMapping:
+ """Provides a local mapping for keywords.
+ Given a list of names, map any substring of one of these names to True.
+ """
+ def __init__(self, names):
+ self._names = names
+
+ def __getitem__(self, subname):
+ for name in self._names:
+ if subname in name:
+ return True
+ return False
+
+
+def matchmark(colitem, markexpr):
+ """Tries to match on any marker names, attached to the given colitem."""
+ return eval(markexpr, {}, MarkMapping(colitem.keywords))
+
+
+def matchkeyword(colitem, keywordexpr):
+ """Tries to match given keyword expression to given collector item.
+
+ Will match on the name of colitem, including the names of its parents.
+ Only matches names of items which are either a :class:`Class` or a
+ :class:`Function`.
+ Additionally, matches on names in the 'extra_keyword_matches' set of
+ any item, as well as names directly assigned to test functions.
+ """
+ mapped_names = set()
+
+ # Add the names of the current item and any parent items
+ import pytest
+ for item in colitem.listchain():
+ if not isinstance(item, pytest.Instance):
+ mapped_names.add(item.name)
+
+ # Add the names added as extra keywords to current or parent items
+ for name in colitem.listextrakeywords():
+ mapped_names.add(name)
+
+ # Add the names attached to the current function through direct assignment
+ if hasattr(colitem, 'function'):
+ for name in colitem.function.__dict__:
+ mapped_names.add(name)
+
+ mapping = KeywordMapping(mapped_names)
+ if " " not in keywordexpr:
+ # special case to allow for simple "-k pass" and "-k 1.3"
+ return mapping[keywordexpr]
+ elif keywordexpr.startswith("not ") and " " not in keywordexpr[4:]:
+ return not mapping[keywordexpr[4:]]
+ return eval(keywordexpr, {}, mapping)
+
+
+def pytest_configure(config):
+ import pytest
+ if config.option.strict:
+ pytest.mark._config = config
+
+
+class MarkGenerator:
+ """ Factory for :class:`MarkDecorator` objects - exposed as
+ a ``pytest.mark`` singleton instance. Example::
+
+ import pytest
+ @pytest.mark.slowtest
+ def test_function():
+ pass
+
+ will set a 'slowtest' :class:`MarkInfo` object
+ on the ``test_function`` object. """
+
+ def __getattr__(self, name):
+ if name[0] == "_":
+ raise AttributeError("Marker name must NOT start with underscore")
+ if hasattr(self, '_config'):
+ self._check(name)
+ return MarkDecorator(name)
+
+ def _check(self, name):
+ try:
+ if name in self._markers:
+ return
+ except AttributeError:
+ pass
+ self._markers = l = set()
+ for line in self._config.getini("markers"):
+ beginning = line.split(":", 1)
+ x = beginning[0].split("(", 1)[0]
+ l.add(x)
+ if name not in self._markers:
+ raise AttributeError("%r not a registered marker" % (name,))
+
+def istestfunc(func):
+ return hasattr(func, "__call__") and \
+ getattr(func, "__name__", "<lambda>") != "<lambda>"
+
+class MarkDecorator:
+ """ A decorator for test functions and test classes. When applied
+ it will create :class:`MarkInfo` objects which may be
+ :ref:`retrieved by hooks as item keywords <excontrolskip>`.
+ MarkDecorator instances are often created like this::
+
+ mark1 = pytest.mark.NAME # simple MarkDecorator
+ mark2 = pytest.mark.NAME(name1=value) # parametrized MarkDecorator
+
+ and can then be applied as decorators to test functions::
+
+ @mark2
+ def test_function():
+ pass
+
+ When a MarkDecorator instance is called it does the following:
+ 1. If called with a single class as its only positional argument and no
+ additional keyword arguments, it attaches itself to the class so it
+ gets applied automatically to all test cases found in that class.
+ 2. If called with a single function as its only positional argument and
+ no additional keyword arguments, it attaches a MarkInfo object to the
+ function, containing all the arguments already stored internally in
+ the MarkDecorator.
+ 3. When called in any other case, it performs a 'fake construction' call,
+ i.e. it returns a new MarkDecorator instance with the original
+ MarkDecorator's content updated with the arguments passed to this
+ call.
+
+ Note: The rules above prevent MarkDecorator objects from storing only a
+ single function or class reference as their positional argument with no
+ additional keyword or positional arguments.
+
+ """
+ def __init__(self, name, args=None, kwargs=None):
+ self.name = name
+ self.args = args or ()
+ self.kwargs = kwargs or {}
+
+ @property
+ def markname(self):
+ return self.name # for backward-compat (2.4.1 had this attr)
+
+ def __repr__(self):
+ d = self.__dict__.copy()
+ name = d.pop('name')
+ return "<MarkDecorator %r %r>" % (name, d)
+
+ def __call__(self, *args, **kwargs):
+ """ if passed a single callable argument: decorate it with mark info.
+ otherwise add *args/**kwargs in-place to mark information. """
+ if args and not kwargs:
+ func = args[0]
+ is_class = inspect.isclass(func)
+ if len(args) == 1 and (istestfunc(func) or is_class):
+ if is_class:
+ if hasattr(func, 'pytestmark'):
+ mark_list = func.pytestmark
+ if not isinstance(mark_list, list):
+ mark_list = [mark_list]
+ # always work on a copy to avoid updating pytestmark
+ # from a superclass by accident
+ mark_list = mark_list + [self]
+ func.pytestmark = mark_list
+ else:
+ func.pytestmark = [self]
+ else:
+ holder = getattr(func, self.name, None)
+ if holder is None:
+ holder = MarkInfo(
+ self.name, self.args, self.kwargs
+ )
+ setattr(func, self.name, holder)
+ else:
+ holder.add(self.args, self.kwargs)
+ return func
+ kw = self.kwargs.copy()
+ kw.update(kwargs)
+ args = self.args + args
+ return self.__class__(self.name, args=args, kwargs=kw)
+
+
+class MarkInfo:
+ """ Marking object created by :class:`MarkDecorator` instances. """
+ def __init__(self, name, args, kwargs):
+ #: name of attribute
+ self.name = name
+ #: positional argument list, empty if none specified
+ self.args = args
+ #: keyword argument dictionary, empty if nothing specified
+ self.kwargs = kwargs.copy()
+ self._arglist = [(args, kwargs.copy())]
+
+ def __repr__(self):
+ return "<MarkInfo %r args=%r kwargs=%r>" % (
+ self.name, self.args, self.kwargs
+ )
+
+ def add(self, args, kwargs):
+ """ add a MarkInfo with the given args and kwargs. """
+ self._arglist.append((args, kwargs))
+ self.args += args
+ self.kwargs.update(kwargs)
+
+ def __iter__(self):
+ """ yield MarkInfo objects each relating to a marking-call. """
+ for args, kwargs in self._arglist:
+ yield MarkInfo(self.name, args, kwargs)
diff --git a/python/pytest/_pytest/monkeypatch.py b/python/pytest/_pytest/monkeypatch.py
new file mode 100644
index 000000000..d4c169d37
--- /dev/null
+++ b/python/pytest/_pytest/monkeypatch.py
@@ -0,0 +1,254 @@
+""" monkeypatching and mocking functionality. """
+
+import os, sys
+import re
+
+from py.builtin import _basestring
+
+RE_IMPORT_ERROR_NAME = re.compile("^No module named (.*)$")
+
+
+def pytest_funcarg__monkeypatch(request):
+ """The returned ``monkeypatch`` funcarg provides these
+ helper methods to modify objects, dictionaries or os.environ::
+
+ monkeypatch.setattr(obj, name, value, raising=True)
+ monkeypatch.delattr(obj, name, raising=True)
+ monkeypatch.setitem(mapping, name, value)
+ monkeypatch.delitem(obj, name, raising=True)
+ monkeypatch.setenv(name, value, prepend=False)
+ monkeypatch.delenv(name, value, raising=True)
+ monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
+
+ All modifications will be undone after the requesting
+ test function has finished. The ``raising``
+ parameter determines if a KeyError or AttributeError
+ will be raised if the set/deletion operation has no target.
+ """
+ mpatch = monkeypatch()
+ request.addfinalizer(mpatch.undo)
+ return mpatch
+
+
+def resolve(name):
+ # simplified from zope.dottedname
+ parts = name.split('.')
+
+ used = parts.pop(0)
+ found = __import__(used)
+ for part in parts:
+ used += '.' + part
+ try:
+ found = getattr(found, part)
+ except AttributeError:
+ pass
+ else:
+ continue
+ # we use explicit un-nesting of the handling block in order
+ # to avoid nested exceptions on python 3
+ try:
+ __import__(used)
+ except ImportError as ex:
+ # str is used for py2 vs py3
+ expected = str(ex).split()[-1]
+ if expected == used:
+ raise
+ else:
+ raise ImportError(
+ 'import error in %s: %s' % (used, ex)
+ )
+ found = annotated_getattr(found, part, used)
+ return found
+
+
+def annotated_getattr(obj, name, ann):
+ try:
+ obj = getattr(obj, name)
+ except AttributeError:
+ raise AttributeError(
+ '%r object at %s has no attribute %r' % (
+ type(obj).__name__, ann, name
+ )
+ )
+ return obj
+
+
+def derive_importpath(import_path, raising):
+ if not isinstance(import_path, _basestring) or "." not in import_path:
+ raise TypeError("must be absolute import path string, not %r" %
+ (import_path,))
+ module, attr = import_path.rsplit('.', 1)
+ target = resolve(module)
+ if raising:
+ annotated_getattr(target, attr, ann=module)
+ return attr, target
+
+
+class Notset:
+ def __repr__(self):
+ return "<notset>"
+
+
+notset = Notset()
+
+
+class monkeypatch:
+ """ Object keeping a record of setattr/item/env/syspath changes. """
+
+ def __init__(self):
+ self._setattr = []
+ self._setitem = []
+ self._cwd = None
+ self._savesyspath = None
+
+ def setattr(self, target, name, value=notset, raising=True):
+ """ Set attribute value on target, memorizing the old value.
+ By default raise AttributeError if the attribute did not exist.
+
+ For convenience you can specify a string as ``target`` which
+ will be interpreted as a dotted import path, with the last part
+ being the attribute name. Example:
+ ``monkeypatch.setattr("os.getcwd", lambda x: "/")``
+ would set the ``getcwd`` function of the ``os`` module.
+
+ The ``raising`` value determines if the setattr should fail
+ if the attribute is not already present (defaults to True
+ which means it will raise).
+ """
+ __tracebackhide__ = True
+ import inspect
+
+ if value is notset:
+ if not isinstance(target, _basestring):
+ raise TypeError("use setattr(target, name, value) or "
+ "setattr(target, value) with target being a dotted "
+ "import string")
+ value = name
+ name, target = derive_importpath(target, raising)
+
+ oldval = getattr(target, name, notset)
+ if raising and oldval is notset:
+ raise AttributeError("%r has no attribute %r" % (target, name))
+
+ # avoid class descriptors like staticmethod/classmethod
+ if inspect.isclass(target):
+ oldval = target.__dict__.get(name, notset)
+ self._setattr.append((target, name, oldval))
+ setattr(target, name, value)
+
+ def delattr(self, target, name=notset, raising=True):
+ """ Delete attribute ``name`` from ``target``, by default raise
+ AttributeError it the attribute did not previously exist.
+
+ If no ``name`` is specified and ``target`` is a string
+ it will be interpreted as a dotted import path with the
+ last part being the attribute name.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ attribute is missing.
+ """
+ __tracebackhide__ = True
+ if name is notset:
+ if not isinstance(target, _basestring):
+ raise TypeError("use delattr(target, name) or "
+ "delattr(target) with target being a dotted "
+ "import string")
+ name, target = derive_importpath(target, raising)
+
+ if not hasattr(target, name):
+ if raising:
+ raise AttributeError(name)
+ else:
+ self._setattr.append((target, name, getattr(target, name, notset)))
+ delattr(target, name)
+
+ def setitem(self, dic, name, value):
+ """ Set dictionary entry ``name`` to value. """
+ self._setitem.append((dic, name, dic.get(name, notset)))
+ dic[name] = value
+
+ def delitem(self, dic, name, raising=True):
+ """ Delete ``name`` from dict. Raise KeyError if it doesn't exist.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ key is missing.
+ """
+ if name not in dic:
+ if raising:
+ raise KeyError(name)
+ else:
+ self._setitem.append((dic, name, dic.get(name, notset)))
+ del dic[name]
+
+ def setenv(self, name, value, prepend=None):
+ """ Set environment variable ``name`` to ``value``. If ``prepend``
+ is a character, read the current environment variable value
+ and prepend the ``value`` adjoined with the ``prepend`` character."""
+ value = str(value)
+ if prepend and name in os.environ:
+ value = value + prepend + os.environ[name]
+ self.setitem(os.environ, name, value)
+
+ def delenv(self, name, raising=True):
+ """ Delete ``name`` from the environment. Raise KeyError it does not
+ exist.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ environment variable is missing.
+ """
+ self.delitem(os.environ, name, raising=raising)
+
+ def syspath_prepend(self, path):
+ """ Prepend ``path`` to ``sys.path`` list of import locations. """
+ if self._savesyspath is None:
+ self._savesyspath = sys.path[:]
+ sys.path.insert(0, str(path))
+
+ def chdir(self, path):
+ """ Change the current working directory to the specified path.
+ Path can be a string or a py.path.local object.
+ """
+ if self._cwd is None:
+ self._cwd = os.getcwd()
+ if hasattr(path, "chdir"):
+ path.chdir()
+ else:
+ os.chdir(path)
+
+ def undo(self):
+ """ Undo previous changes. This call consumes the
+ undo stack. Calling it a second time has no effect unless
+ you do more monkeypatching after the undo call.
+
+ There is generally no need to call `undo()`, since it is
+ called automatically during tear-down.
+
+ Note that the same `monkeypatch` fixture is used across a
+ single test function invocation. If `monkeypatch` is used both by
+ the test function itself and one of the test fixtures,
+ calling `undo()` will undo all of the changes made in
+ both functions.
+ """
+ for obj, name, value in reversed(self._setattr):
+ if value is not notset:
+ setattr(obj, name, value)
+ else:
+ delattr(obj, name)
+ self._setattr[:] = []
+ for dictionary, name, value in reversed(self._setitem):
+ if value is notset:
+ try:
+ del dictionary[name]
+ except KeyError:
+ pass # was already deleted, so we have the desired state
+ else:
+ dictionary[name] = value
+ self._setitem[:] = []
+ if self._savesyspath is not None:
+ sys.path[:] = self._savesyspath
+ self._savesyspath = None
+
+ if self._cwd is not None:
+ os.chdir(self._cwd)
+ self._cwd = None
diff --git a/python/pytest/_pytest/nose.py b/python/pytest/_pytest/nose.py
new file mode 100644
index 000000000..038746868
--- /dev/null
+++ b/python/pytest/_pytest/nose.py
@@ -0,0 +1,71 @@
+""" run test suites written for nose. """
+
+import sys
+
+import py
+import pytest
+from _pytest import unittest
+
+
+def get_skip_exceptions():
+ skip_classes = set()
+ for module_name in ('unittest', 'unittest2', 'nose'):
+ mod = sys.modules.get(module_name)
+ if hasattr(mod, 'SkipTest'):
+ skip_classes.add(mod.SkipTest)
+ return tuple(skip_classes)
+
+
+def pytest_runtest_makereport(item, call):
+ if call.excinfo and call.excinfo.errisinstance(get_skip_exceptions()):
+ # let's substitute the excinfo with a pytest.skip one
+ call2 = call.__class__(lambda:
+ pytest.skip(str(call.excinfo.value)), call.when)
+ call.excinfo = call2.excinfo
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_runtest_setup(item):
+ if is_potential_nosetest(item):
+ if isinstance(item.parent, pytest.Generator):
+ gen = item.parent
+ if not hasattr(gen, '_nosegensetup'):
+ call_optional(gen.obj, 'setup')
+ if isinstance(gen.parent, pytest.Instance):
+ call_optional(gen.parent.obj, 'setup')
+ gen._nosegensetup = True
+ if not call_optional(item.obj, 'setup'):
+ # call module level setup if there is no object level one
+ call_optional(item.parent.obj, 'setup')
+ #XXX this implies we only call teardown when setup worked
+ item.session._setupstate.addfinalizer((lambda: teardown_nose(item)), item)
+
+def teardown_nose(item):
+ if is_potential_nosetest(item):
+ if not call_optional(item.obj, 'teardown'):
+ call_optional(item.parent.obj, 'teardown')
+ #if hasattr(item.parent, '_nosegensetup'):
+ # #call_optional(item._nosegensetup, 'teardown')
+ # del item.parent._nosegensetup
+
+
+def pytest_make_collect_report(collector):
+ if isinstance(collector, pytest.Generator):
+ call_optional(collector.obj, 'setup')
+
+
+def is_potential_nosetest(item):
+ # extra check needed since we do not do nose style setup/teardown
+ # on direct unittest style classes
+ return isinstance(item, pytest.Function) and \
+ not isinstance(item, unittest.TestCaseFunction)
+
+
+def call_optional(obj, name):
+ method = getattr(obj, name, None)
+ isfixture = hasattr(method, "_pytestfixturefunction")
+ if method is not None and not isfixture and py.builtin.callable(method):
+ # If there's any problems allow the exception to raise rather than
+ # silently ignoring them
+ method()
+ return True
diff --git a/python/pytest/_pytest/pastebin.py b/python/pytest/_pytest/pastebin.py
new file mode 100644
index 000000000..4ec62d022
--- /dev/null
+++ b/python/pytest/_pytest/pastebin.py
@@ -0,0 +1,92 @@
+""" submit failure or test session information to a pastebin service. """
+import pytest
+import sys
+import tempfile
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting")
+ group._addoption('--pastebin', metavar="mode",
+ action='store', dest="pastebin", default=None,
+ choices=['failed', 'all'],
+ help="send failed|all info to bpaste.net pastebin service.")
+
+@pytest.hookimpl(trylast=True)
+def pytest_configure(config):
+ import py
+ if config.option.pastebin == "all":
+ tr = config.pluginmanager.getplugin('terminalreporter')
+ # if no terminal reporter plugin is present, nothing we can do here;
+ # this can happen when this function executes in a slave node
+ # when using pytest-xdist, for example
+ if tr is not None:
+ # pastebin file will be utf-8 encoded binary file
+ config._pastebinfile = tempfile.TemporaryFile('w+b')
+ oldwrite = tr._tw.write
+ def tee_write(s, **kwargs):
+ oldwrite(s, **kwargs)
+ if py.builtin._istext(s):
+ s = s.encode('utf-8')
+ config._pastebinfile.write(s)
+ tr._tw.write = tee_write
+
+def pytest_unconfigure(config):
+ if hasattr(config, '_pastebinfile'):
+ # get terminal contents and delete file
+ config._pastebinfile.seek(0)
+ sessionlog = config._pastebinfile.read()
+ config._pastebinfile.close()
+ del config._pastebinfile
+ # undo our patching in the terminal reporter
+ tr = config.pluginmanager.getplugin('terminalreporter')
+ del tr._tw.__dict__['write']
+ # write summary
+ tr.write_sep("=", "Sending information to Paste Service")
+ pastebinurl = create_new_paste(sessionlog)
+ tr.write_line("pastebin session-log: %s\n" % pastebinurl)
+
+def create_new_paste(contents):
+ """
+ Creates a new paste using bpaste.net service.
+
+ :contents: paste contents as utf-8 encoded bytes
+ :returns: url to the pasted contents
+ """
+ import re
+ if sys.version_info < (3, 0):
+ from urllib import urlopen, urlencode
+ else:
+ from urllib.request import urlopen
+ from urllib.parse import urlencode
+
+ params = {
+ 'code': contents,
+ 'lexer': 'python3' if sys.version_info[0] == 3 else 'python',
+ 'expiry': '1week',
+ }
+ url = 'https://bpaste.net'
+ response = urlopen(url, data=urlencode(params).encode('ascii')).read()
+ m = re.search(r'href="/raw/(\w+)"', response.decode('utf-8'))
+ if m:
+ return '%s/show/%s' % (url, m.group(1))
+ else:
+ return 'bad response: ' + response
+
+def pytest_terminal_summary(terminalreporter):
+ import _pytest.config
+ if terminalreporter.config.option.pastebin != "failed":
+ return
+ tr = terminalreporter
+ if 'failed' in tr.stats:
+ terminalreporter.write_sep("=", "Sending information to Paste Service")
+ for rep in terminalreporter.stats.get('failed'):
+ try:
+ msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
+ except AttributeError:
+ msg = tr._getfailureheadline(rep)
+ tw = _pytest.config.create_terminal_writer(terminalreporter.config, stringio=True)
+ rep.toterminal(tw)
+ s = tw.stringio.getvalue()
+ assert len(s)
+ pastebinurl = create_new_paste(s)
+ tr.write_line("%s --> %s" %(msg, pastebinurl))
diff --git a/python/pytest/_pytest/pdb.py b/python/pytest/_pytest/pdb.py
new file mode 100644
index 000000000..84c920d17
--- /dev/null
+++ b/python/pytest/_pytest/pdb.py
@@ -0,0 +1,109 @@
+""" interactive debugging with PDB, the Python Debugger. """
+from __future__ import absolute_import
+import pdb
+import sys
+
+import pytest
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption('--pdb',
+ action="store_true", dest="usepdb", default=False,
+ help="start the interactive Python debugger on errors.")
+
+def pytest_namespace():
+ return {'set_trace': pytestPDB().set_trace}
+
+def pytest_configure(config):
+ if config.getvalue("usepdb"):
+ config.pluginmanager.register(PdbInvoke(), 'pdbinvoke')
+
+ old = (pdb.set_trace, pytestPDB._pluginmanager)
+ def fin():
+ pdb.set_trace, pytestPDB._pluginmanager = old
+ pytestPDB._config = None
+ pdb.set_trace = pytest.set_trace
+ pytestPDB._pluginmanager = config.pluginmanager
+ pytestPDB._config = config
+ config._cleanup.append(fin)
+
+class pytestPDB:
+ """ Pseudo PDB that defers to the real pdb. """
+ _pluginmanager = None
+ _config = None
+
+ def set_trace(self):
+ """ invoke PDB set_trace debugging, dropping any IO capturing. """
+ import _pytest.config
+ frame = sys._getframe().f_back
+ if self._pluginmanager is not None:
+ capman = self._pluginmanager.getplugin("capturemanager")
+ if capman:
+ capman.suspendcapture(in_=True)
+ tw = _pytest.config.create_terminal_writer(self._config)
+ tw.line()
+ tw.sep(">", "PDB set_trace (IO-capturing turned off)")
+ self._pluginmanager.hook.pytest_enter_pdb(config=self._config)
+ pdb.Pdb().set_trace(frame)
+
+
+class PdbInvoke:
+ def pytest_exception_interact(self, node, call, report):
+ capman = node.config.pluginmanager.getplugin("capturemanager")
+ if capman:
+ out, err = capman.suspendcapture(in_=True)
+ sys.stdout.write(out)
+ sys.stdout.write(err)
+ _enter_pdb(node, call.excinfo, report)
+
+ def pytest_internalerror(self, excrepr, excinfo):
+ for line in str(excrepr).split("\n"):
+ sys.stderr.write("INTERNALERROR> %s\n" %line)
+ sys.stderr.flush()
+ tb = _postmortem_traceback(excinfo)
+ post_mortem(tb)
+
+
+def _enter_pdb(node, excinfo, rep):
+ # XXX we re-use the TerminalReporter's terminalwriter
+ # because this seems to avoid some encoding related troubles
+ # for not completely clear reasons.
+ tw = node.config.pluginmanager.getplugin("terminalreporter")._tw
+ tw.line()
+ tw.sep(">", "traceback")
+ rep.toterminal(tw)
+ tw.sep(">", "entering PDB")
+ tb = _postmortem_traceback(excinfo)
+ post_mortem(tb)
+ rep._pdbshown = True
+ return rep
+
+
+def _postmortem_traceback(excinfo):
+ # A doctest.UnexpectedException is not useful for post_mortem.
+ # Use the underlying exception instead:
+ from doctest import UnexpectedException
+ if isinstance(excinfo.value, UnexpectedException):
+ return excinfo.value.exc_info[2]
+ else:
+ return excinfo._excinfo[2]
+
+
+def _find_last_non_hidden_frame(stack):
+ i = max(0, len(stack) - 1)
+ while i and stack[i][0].f_locals.get("__tracebackhide__", False):
+ i -= 1
+ return i
+
+
+def post_mortem(t):
+ class Pdb(pdb.Pdb):
+ def get_stack(self, f, t):
+ stack, i = pdb.Pdb.get_stack(self, f, t)
+ if f is None:
+ i = _find_last_non_hidden_frame(stack)
+ return stack, i
+ p = Pdb()
+ p.reset()
+ p.interaction(None, t)
diff --git a/python/pytest/_pytest/pytester.py b/python/pytest/_pytest/pytester.py
new file mode 100644
index 000000000..faed7f581
--- /dev/null
+++ b/python/pytest/_pytest/pytester.py
@@ -0,0 +1,1110 @@
+""" (disabled by default) support for testing pytest and pytest plugins. """
+import codecs
+import gc
+import os
+import platform
+import re
+import subprocess
+import sys
+import time
+import traceback
+from fnmatch import fnmatch
+
+from py.builtin import print_
+
+from _pytest._code import Source
+import py
+import pytest
+from _pytest.main import Session, EXIT_OK
+
+
+def pytest_addoption(parser):
+ # group = parser.getgroup("pytester", "pytester (self-tests) options")
+ parser.addoption('--lsof',
+ action="store_true", dest="lsof", default=False,
+ help=("run FD checks if lsof is available"))
+
+ parser.addoption('--runpytest', default="inprocess", dest="runpytest",
+ choices=("inprocess", "subprocess", ),
+ help=("run pytest sub runs in tests using an 'inprocess' "
+ "or 'subprocess' (python -m main) method"))
+
+
+def pytest_configure(config):
+ # This might be called multiple times. Only take the first.
+ global _pytest_fullpath
+ try:
+ _pytest_fullpath
+ except NameError:
+ _pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
+ _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
+
+ if config.getvalue("lsof"):
+ checker = LsofFdLeakChecker()
+ if checker.matching_platform():
+ config.pluginmanager.register(checker)
+
+
+class LsofFdLeakChecker(object):
+ def get_open_files(self):
+ out = self._exec_lsof()
+ open_files = self._parse_lsof_output(out)
+ return open_files
+
+ def _exec_lsof(self):
+ pid = os.getpid()
+ return py.process.cmdexec("lsof -Ffn0 -p %d" % pid)
+
+ def _parse_lsof_output(self, out):
+ def isopen(line):
+ return line.startswith('f') and ("deleted" not in line and
+ 'mem' not in line and "txt" not in line and 'cwd' not in line)
+
+ open_files = []
+
+ for line in out.split("\n"):
+ if isopen(line):
+ fields = line.split('\0')
+ fd = fields[0][1:]
+ filename = fields[1][1:]
+ if filename.startswith('/'):
+ open_files.append((fd, filename))
+
+ return open_files
+
+ def matching_platform(self):
+ try:
+ py.process.cmdexec("lsof -v")
+ except (py.process.cmdexec.Error, UnicodeDecodeError):
+ # cmdexec may raise UnicodeDecodeError on Windows systems
+ # with locale other than english:
+ # https://bitbucket.org/pytest-dev/py/issues/66
+ return False
+ else:
+ return True
+
+ @pytest.hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_runtest_item(self, item):
+ lines1 = self.get_open_files()
+ yield
+ if hasattr(sys, "pypy_version_info"):
+ gc.collect()
+ lines2 = self.get_open_files()
+
+ new_fds = set([t[0] for t in lines2]) - set([t[0] for t in lines1])
+ leaked_files = [t for t in lines2 if t[0] in new_fds]
+ if leaked_files:
+ error = []
+ error.append("***** %s FD leakage detected" % len(leaked_files))
+ error.extend([str(f) for f in leaked_files])
+ error.append("*** Before:")
+ error.extend([str(f) for f in lines1])
+ error.append("*** After:")
+ error.extend([str(f) for f in lines2])
+ error.append(error[0])
+ error.append("*** function %s:%s: %s " % item.location)
+ pytest.fail("\n".join(error), pytrace=False)
+
+
+# XXX copied from execnet's conftest.py - needs to be merged
+winpymap = {
+ 'python2.7': r'C:\Python27\python.exe',
+ 'python2.6': r'C:\Python26\python.exe',
+ 'python3.1': r'C:\Python31\python.exe',
+ 'python3.2': r'C:\Python32\python.exe',
+ 'python3.3': r'C:\Python33\python.exe',
+ 'python3.4': r'C:\Python34\python.exe',
+ 'python3.5': r'C:\Python35\python.exe',
+}
+
+def getexecutable(name, cache={}):
+ try:
+ return cache[name]
+ except KeyError:
+ executable = py.path.local.sysfind(name)
+ if executable:
+ if name == "jython":
+ import subprocess
+ popen = subprocess.Popen([str(executable), "--version"],
+ universal_newlines=True, stderr=subprocess.PIPE)
+ out, err = popen.communicate()
+ if not err or "2.5" not in err:
+ executable = None
+ if "2.5.2" in err:
+ executable = None # http://bugs.jython.org/issue1790
+ cache[name] = executable
+ return executable
+
+@pytest.fixture(params=['python2.6', 'python2.7', 'python3.3', "python3.4",
+ 'pypy', 'pypy3'])
+def anypython(request):
+ name = request.param
+ executable = getexecutable(name)
+ if executable is None:
+ if sys.platform == "win32":
+ executable = winpymap.get(name, None)
+ if executable:
+ executable = py.path.local(executable)
+ if executable.check():
+ return executable
+ pytest.skip("no suitable %s found" % (name,))
+ return executable
+
+# used at least by pytest-xdist plugin
+@pytest.fixture
+def _pytest(request):
+ """ Return a helper which offers a gethookrecorder(hook)
+ method which returns a HookRecorder instance which helps
+ to make assertions about called hooks.
+ """
+ return PytestArg(request)
+
+class PytestArg:
+ def __init__(self, request):
+ self.request = request
+
+ def gethookrecorder(self, hook):
+ hookrecorder = HookRecorder(hook._pm)
+ self.request.addfinalizer(hookrecorder.finish_recording)
+ return hookrecorder
+
+
+def get_public_names(l):
+ """Only return names from iterator l without a leading underscore."""
+ return [x for x in l if x[0] != "_"]
+
+
+class ParsedCall:
+ def __init__(self, name, kwargs):
+ self.__dict__.update(kwargs)
+ self._name = name
+
+ def __repr__(self):
+ d = self.__dict__.copy()
+ del d['_name']
+ return "<ParsedCall %r(**%r)>" %(self._name, d)
+
+
+class HookRecorder:
+ """Record all hooks called in a plugin manager.
+
+ This wraps all the hook calls in the plugin manager, recording
+ each call before propagating the normal calls.
+
+ """
+
+ def __init__(self, pluginmanager):
+ self._pluginmanager = pluginmanager
+ self.calls = []
+
+ def before(hook_name, hook_impls, kwargs):
+ self.calls.append(ParsedCall(hook_name, kwargs))
+
+ def after(outcome, hook_name, hook_impls, kwargs):
+ pass
+
+ self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after)
+
+ def finish_recording(self):
+ self._undo_wrapping()
+
+ def getcalls(self, names):
+ if isinstance(names, str):
+ names = names.split()
+ return [call for call in self.calls if call._name in names]
+
+ def assert_contains(self, entries):
+ __tracebackhide__ = True
+ i = 0
+ entries = list(entries)
+ backlocals = sys._getframe(1).f_locals
+ while entries:
+ name, check = entries.pop(0)
+ for ind, call in enumerate(self.calls[i:]):
+ if call._name == name:
+ print_("NAMEMATCH", name, call)
+ if eval(check, backlocals, call.__dict__):
+ print_("CHECKERMATCH", repr(check), "->", call)
+ else:
+ print_("NOCHECKERMATCH", repr(check), "-", call)
+ continue
+ i += ind + 1
+ break
+ print_("NONAMEMATCH", name, "with", call)
+ else:
+ pytest.fail("could not find %r check %r" % (name, check))
+
+ def popcall(self, name):
+ __tracebackhide__ = True
+ for i, call in enumerate(self.calls):
+ if call._name == name:
+ del self.calls[i]
+ return call
+ lines = ["could not find call %r, in:" % (name,)]
+ lines.extend([" %s" % str(x) for x in self.calls])
+ pytest.fail("\n".join(lines))
+
+ def getcall(self, name):
+ l = self.getcalls(name)
+ assert len(l) == 1, (name, l)
+ return l[0]
+
+ # functionality for test reports
+
+ def getreports(self,
+ names="pytest_runtest_logreport pytest_collectreport"):
+ return [x.report for x in self.getcalls(names)]
+
+ def matchreport(self, inamepart="",
+ names="pytest_runtest_logreport pytest_collectreport", when=None):
+ """ return a testreport whose dotted import path matches """
+ l = []
+ for rep in self.getreports(names=names):
+ try:
+ if not when and rep.when != "call" and rep.passed:
+ # setup/teardown passing reports - let's ignore those
+ continue
+ except AttributeError:
+ pass
+ if when and getattr(rep, 'when', None) != when:
+ continue
+ if not inamepart or inamepart in rep.nodeid.split("::"):
+ l.append(rep)
+ if not l:
+ raise ValueError("could not find test report matching %r: "
+ "no test reports at all!" % (inamepart,))
+ if len(l) > 1:
+ raise ValueError(
+ "found 2 or more testreports matching %r: %s" %(inamepart, l))
+ return l[0]
+
+ def getfailures(self,
+ names='pytest_runtest_logreport pytest_collectreport'):
+ return [rep for rep in self.getreports(names) if rep.failed]
+
+ def getfailedcollections(self):
+ return self.getfailures('pytest_collectreport')
+
+ def listoutcomes(self):
+ passed = []
+ skipped = []
+ failed = []
+ for rep in self.getreports(
+ "pytest_collectreport pytest_runtest_logreport"):
+ if rep.passed:
+ if getattr(rep, "when", None) == "call":
+ passed.append(rep)
+ elif rep.skipped:
+ skipped.append(rep)
+ elif rep.failed:
+ failed.append(rep)
+ return passed, skipped, failed
+
+ def countoutcomes(self):
+ return [len(x) for x in self.listoutcomes()]
+
+ def assertoutcome(self, passed=0, skipped=0, failed=0):
+ realpassed, realskipped, realfailed = self.listoutcomes()
+ assert passed == len(realpassed)
+ assert skipped == len(realskipped)
+ assert failed == len(realfailed)
+
+ def clear(self):
+ self.calls[:] = []
+
+
+@pytest.fixture
+def linecomp(request):
+ return LineComp()
+
+
+def pytest_funcarg__LineMatcher(request):
+ return LineMatcher
+
+
+@pytest.fixture
+def testdir(request, tmpdir_factory):
+ return Testdir(request, tmpdir_factory)
+
+
+rex_outcome = re.compile("(\d+) ([\w-]+)")
+class RunResult:
+ """The result of running a command.
+
+ Attributes:
+
+ :ret: The return value.
+ :outlines: List of lines captured from stdout.
+ :errlines: List of lines captures from stderr.
+ :stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to
+ reconstruct stdout or the commonly used
+ ``stdout.fnmatch_lines()`` method.
+ :stderrr: :py:class:`LineMatcher` of stderr.
+ :duration: Duration in seconds.
+
+ """
+ def __init__(self, ret, outlines, errlines, duration):
+ self.ret = ret
+ self.outlines = outlines
+ self.errlines = errlines
+ self.stdout = LineMatcher(outlines)
+ self.stderr = LineMatcher(errlines)
+ self.duration = duration
+
+ def parseoutcomes(self):
+ """ Return a dictionary of outcomestring->num from parsing
+ the terminal output that the test process produced."""
+ for line in reversed(self.outlines):
+ if 'seconds' in line:
+ outcomes = rex_outcome.findall(line)
+ if outcomes:
+ d = {}
+ for num, cat in outcomes:
+ d[cat] = int(num)
+ return d
+
+ def assert_outcomes(self, passed=0, skipped=0, failed=0):
+ """ assert that the specified outcomes appear with the respective
+ numbers (0 means it didn't occur) in the text output from a test run."""
+ d = self.parseoutcomes()
+ assert passed == d.get("passed", 0)
+ assert skipped == d.get("skipped", 0)
+ assert failed == d.get("failed", 0)
+
+
+
+class Testdir:
+ """Temporary test directory with tools to test/run py.test itself.
+
+ This is based on the ``tmpdir`` fixture but provides a number of
+ methods which aid with testing py.test itself. Unless
+ :py:meth:`chdir` is used all methods will use :py:attr:`tmpdir` as
+ current working directory.
+
+ Attributes:
+
+ :tmpdir: The :py:class:`py.path.local` instance of the temporary
+ directory.
+
+ :plugins: A list of plugins to use with :py:meth:`parseconfig` and
+ :py:meth:`runpytest`. Initially this is an empty list but
+ plugins can be added to the list. The type of items to add to
+ the list depend on the method which uses them so refer to them
+ for details.
+
+ """
+
+ def __init__(self, request, tmpdir_factory):
+ self.request = request
+ # XXX remove duplication with tmpdir plugin
+ basetmp = tmpdir_factory.ensuretemp("testdir")
+ name = request.function.__name__
+ for i in range(100):
+ try:
+ tmpdir = basetmp.mkdir(name + str(i))
+ except py.error.EEXIST:
+ continue
+ break
+ self.tmpdir = tmpdir
+ self.plugins = []
+ self._savesyspath = (list(sys.path), list(sys.meta_path))
+ self._savemodulekeys = set(sys.modules)
+ self.chdir() # always chdir
+ self.request.addfinalizer(self.finalize)
+ method = self.request.config.getoption("--runpytest")
+ if method == "inprocess":
+ self._runpytest_method = self.runpytest_inprocess
+ elif method == "subprocess":
+ self._runpytest_method = self.runpytest_subprocess
+
+ def __repr__(self):
+ return "<Testdir %r>" % (self.tmpdir,)
+
+ def finalize(self):
+ """Clean up global state artifacts.
+
+ Some methods modify the global interpreter state and this
+ tries to clean this up. It does not remove the temporary
+ directory however so it can be looked at after the test run
+ has finished.
+
+ """
+ sys.path[:], sys.meta_path[:] = self._savesyspath
+ if hasattr(self, '_olddir'):
+ self._olddir.chdir()
+ self.delete_loaded_modules()
+
+ def delete_loaded_modules(self):
+ """Delete modules that have been loaded during a test.
+
+ This allows the interpreter to catch module changes in case
+ the module is re-imported.
+ """
+ for name in set(sys.modules).difference(self._savemodulekeys):
+ # it seems zope.interfaces is keeping some state
+ # (used by twisted related tests)
+ if name != "zope.interface":
+ del sys.modules[name]
+
+ def make_hook_recorder(self, pluginmanager):
+ """Create a new :py:class:`HookRecorder` for a PluginManager."""
+ assert not hasattr(pluginmanager, "reprec")
+ pluginmanager.reprec = reprec = HookRecorder(pluginmanager)
+ self.request.addfinalizer(reprec.finish_recording)
+ return reprec
+
+ def chdir(self):
+ """Cd into the temporary directory.
+
+ This is done automatically upon instantiation.
+
+ """
+ old = self.tmpdir.chdir()
+ if not hasattr(self, '_olddir'):
+ self._olddir = old
+
+ def _makefile(self, ext, args, kwargs):
+ items = list(kwargs.items())
+ if args:
+ source = py.builtin._totext("\n").join(
+ map(py.builtin._totext, args)) + py.builtin._totext("\n")
+ basename = self.request.function.__name__
+ items.insert(0, (basename, source))
+ ret = None
+ for name, value in items:
+ p = self.tmpdir.join(name).new(ext=ext)
+ source = Source(value)
+ def my_totext(s, encoding="utf-8"):
+ if py.builtin._isbytes(s):
+ s = py.builtin._totext(s, encoding=encoding)
+ return s
+ source_unicode = "\n".join([my_totext(line) for line in source.lines])
+ source = py.builtin._totext(source_unicode)
+ content = source.strip().encode("utf-8") # + "\n"
+ #content = content.rstrip() + "\n"
+ p.write(content, "wb")
+ if ret is None:
+ ret = p
+ return ret
+
+ def makefile(self, ext, *args, **kwargs):
+ """Create a new file in the testdir.
+
+ ext: The extension the file should use, including the dot.
+ E.g. ".py".
+
+ args: All args will be treated as strings and joined using
+ newlines. The result will be written as contents to the
+ file. The name of the file will be based on the test
+ function requesting this fixture.
+ E.g. "testdir.makefile('.txt', 'line1', 'line2')"
+
+ kwargs: Each keyword is the name of a file, while the value of
+ it will be written as contents of the file.
+ E.g. "testdir.makefile('.ini', pytest='[pytest]\naddopts=-rs\n')"
+
+ """
+ return self._makefile(ext, args, kwargs)
+
+ def makeconftest(self, source):
+ """Write a contest.py file with 'source' as contents."""
+ return self.makepyfile(conftest=source)
+
+ def makeini(self, source):
+ """Write a tox.ini file with 'source' as contents."""
+ return self.makefile('.ini', tox=source)
+
+ def getinicfg(self, source):
+ """Return the pytest section from the tox.ini config file."""
+ p = self.makeini(source)
+ return py.iniconfig.IniConfig(p)['pytest']
+
+ def makepyfile(self, *args, **kwargs):
+ """Shortcut for .makefile() with a .py extension."""
+ return self._makefile('.py', args, kwargs)
+
+ def maketxtfile(self, *args, **kwargs):
+ """Shortcut for .makefile() with a .txt extension."""
+ return self._makefile('.txt', args, kwargs)
+
+ def syspathinsert(self, path=None):
+ """Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`.
+
+ This is undone automatically after the test.
+ """
+ if path is None:
+ path = self.tmpdir
+ sys.path.insert(0, str(path))
+ # a call to syspathinsert() usually means that the caller
+ # wants to import some dynamically created files.
+ # with python3 we thus invalidate import caches.
+ self._possibly_invalidate_import_caches()
+
+ def _possibly_invalidate_import_caches(self):
+ # invalidate caches if we can (py33 and above)
+ try:
+ import importlib
+ except ImportError:
+ pass
+ else:
+ if hasattr(importlib, "invalidate_caches"):
+ importlib.invalidate_caches()
+
+ def mkdir(self, name):
+ """Create a new (sub)directory."""
+ return self.tmpdir.mkdir(name)
+
+ def mkpydir(self, name):
+ """Create a new python package.
+
+ This creates a (sub)direcotry with an empty ``__init__.py``
+ file so that is recognised as a python package.
+
+ """
+ p = self.mkdir(name)
+ p.ensure("__init__.py")
+ return p
+
+ Session = Session
+ def getnode(self, config, arg):
+ """Return the collection node of a file.
+
+ :param config: :py:class:`_pytest.config.Config` instance, see
+ :py:meth:`parseconfig` and :py:meth:`parseconfigure` to
+ create the configuration.
+
+ :param arg: A :py:class:`py.path.local` instance of the file.
+
+ """
+ session = Session(config)
+ assert '::' not in str(arg)
+ p = py.path.local(arg)
+ config.hook.pytest_sessionstart(session=session)
+ res = session.perform_collect([str(p)], genitems=False)[0]
+ config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
+ return res
+
+ def getpathnode(self, path):
+ """Return the collection node of a file.
+
+ This is like :py:meth:`getnode` but uses
+ :py:meth:`parseconfigure` to create the (configured) py.test
+ Config instance.
+
+ :param path: A :py:class:`py.path.local` instance of the file.
+
+ """
+ config = self.parseconfigure(path)
+ session = Session(config)
+ x = session.fspath.bestrelpath(path)
+ config.hook.pytest_sessionstart(session=session)
+ res = session.perform_collect([x], genitems=False)[0]
+ config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
+ return res
+
+ def genitems(self, colitems):
+ """Generate all test items from a collection node.
+
+ This recurses into the collection node and returns a list of
+ all the test items contained within.
+
+ """
+ session = colitems[0].session
+ result = []
+ for colitem in colitems:
+ result.extend(session.genitems(colitem))
+ return result
+
+ def runitem(self, source):
+ """Run the "test_func" Item.
+
+ The calling test instance (the class which contains the test
+ method) must provide a ``.getrunner()`` method which should
+ return a runner which can run the test protocol for a single
+ item, like e.g. :py:func:`_pytest.runner.runtestprotocol`.
+
+ """
+ # used from runner functional tests
+ item = self.getitem(source)
+ # the test class where we are called from wants to provide the runner
+ testclassinstance = self.request.instance
+ runner = testclassinstance.getrunner()
+ return runner(item)
+
+ def inline_runsource(self, source, *cmdlineargs):
+ """Run a test module in process using ``pytest.main()``.
+
+ This run writes "source" into a temporary file and runs
+ ``pytest.main()`` on it, returning a :py:class:`HookRecorder`
+ instance for the result.
+
+ :param source: The source code of the test module.
+
+ :param cmdlineargs: Any extra command line arguments to use.
+
+ :return: :py:class:`HookRecorder` instance of the result.
+
+ """
+ p = self.makepyfile(source)
+ l = list(cmdlineargs) + [p]
+ return self.inline_run(*l)
+
+ def inline_genitems(self, *args):
+ """Run ``pytest.main(['--collectonly'])`` in-process.
+
+ Retuns a tuple of the collected items and a
+ :py:class:`HookRecorder` instance.
+
+ This runs the :py:func:`pytest.main` function to run all of
+ py.test inside the test process itself like
+ :py:meth:`inline_run`. However the return value is a tuple of
+ the collection items and a :py:class:`HookRecorder` instance.
+
+ """
+ rec = self.inline_run("--collect-only", *args)
+ items = [x.item for x in rec.getcalls("pytest_itemcollected")]
+ return items, rec
+
+ def inline_run(self, *args, **kwargs):
+ """Run ``pytest.main()`` in-process, returning a HookRecorder.
+
+ This runs the :py:func:`pytest.main` function to run all of
+ py.test inside the test process itself. This means it can
+ return a :py:class:`HookRecorder` instance which gives more
+ detailed results from then run then can be done by matching
+ stdout/stderr from :py:meth:`runpytest`.
+
+ :param args: Any command line arguments to pass to
+ :py:func:`pytest.main`.
+
+ :param plugin: (keyword-only) Extra plugin instances the
+ ``pytest.main()`` instance should use.
+
+ :return: A :py:class:`HookRecorder` instance.
+
+ """
+ rec = []
+ class Collect:
+ def pytest_configure(x, config):
+ rec.append(self.make_hook_recorder(config.pluginmanager))
+
+ plugins = kwargs.get("plugins") or []
+ plugins.append(Collect())
+ ret = pytest.main(list(args), plugins=plugins)
+ self.delete_loaded_modules()
+ if len(rec) == 1:
+ reprec = rec.pop()
+ else:
+ class reprec:
+ pass
+ reprec.ret = ret
+
+ # typically we reraise keyboard interrupts from the child run
+ # because it's our user requesting interruption of the testing
+ if ret == 2 and not kwargs.get("no_reraise_ctrlc"):
+ calls = reprec.getcalls("pytest_keyboard_interrupt")
+ if calls and calls[-1].excinfo.type == KeyboardInterrupt:
+ raise KeyboardInterrupt()
+ return reprec
+
+ def runpytest_inprocess(self, *args, **kwargs):
+ """ Return result of running pytest in-process, providing a similar
+ interface to what self.runpytest() provides. """
+ if kwargs.get("syspathinsert"):
+ self.syspathinsert()
+ now = time.time()
+ capture = py.io.StdCapture()
+ try:
+ try:
+ reprec = self.inline_run(*args, **kwargs)
+ except SystemExit as e:
+ class reprec:
+ ret = e.args[0]
+ except Exception:
+ traceback.print_exc()
+ class reprec:
+ ret = 3
+ finally:
+ out, err = capture.reset()
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+ res = RunResult(reprec.ret,
+ out.split("\n"), err.split("\n"),
+ time.time()-now)
+ res.reprec = reprec
+ return res
+
+ def runpytest(self, *args, **kwargs):
+ """ Run pytest inline or in a subprocess, depending on the command line
+ option "--runpytest" and return a :py:class:`RunResult`.
+
+ """
+ args = self._ensure_basetemp(args)
+ return self._runpytest_method(*args, **kwargs)
+
+ def _ensure_basetemp(self, args):
+ args = [str(x) for x in args]
+ for x in args:
+ if str(x).startswith('--basetemp'):
+ #print ("basedtemp exists: %s" %(args,))
+ break
+ else:
+ args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
+ #print ("added basetemp: %s" %(args,))
+ return args
+
+ def parseconfig(self, *args):
+ """Return a new py.test Config instance from given commandline args.
+
+ This invokes the py.test bootstrapping code in _pytest.config
+ to create a new :py:class:`_pytest.core.PluginManager` and
+ call the pytest_cmdline_parse hook to create new
+ :py:class:`_pytest.config.Config` instance.
+
+ If :py:attr:`plugins` has been populated they should be plugin
+ modules which will be registered with the PluginManager.
+
+ """
+ args = self._ensure_basetemp(args)
+
+ import _pytest.config
+ config = _pytest.config._prepareconfig(args, self.plugins)
+ # we don't know what the test will do with this half-setup config
+ # object and thus we make sure it gets unconfigured properly in any
+ # case (otherwise capturing could still be active, for example)
+ self.request.addfinalizer(config._ensure_unconfigure)
+ return config
+
+ def parseconfigure(self, *args):
+ """Return a new py.test configured Config instance.
+
+ This returns a new :py:class:`_pytest.config.Config` instance
+ like :py:meth:`parseconfig`, but also calls the
+ pytest_configure hook.
+
+ """
+ config = self.parseconfig(*args)
+ config._do_configure()
+ self.request.addfinalizer(config._ensure_unconfigure)
+ return config
+
+ def getitem(self, source, funcname="test_func"):
+ """Return the test item for a test function.
+
+ This writes the source to a python file and runs py.test's
+ collection on the resulting module, returning the test item
+ for the requested function name.
+
+ :param source: The module source.
+
+ :param funcname: The name of the test function for which the
+ Item must be returned.
+
+ """
+ items = self.getitems(source)
+ for item in items:
+ if item.name == funcname:
+ return item
+ assert 0, "%r item not found in module:\n%s\nitems: %s" %(
+ funcname, source, items)
+
+ def getitems(self, source):
+ """Return all test items collected from the module.
+
+ This writes the source to a python file and runs py.test's
+ collection on the resulting module, returning all test items
+ contained within.
+
+ """
+ modcol = self.getmodulecol(source)
+ return self.genitems([modcol])
+
+ def getmodulecol(self, source, configargs=(), withinit=False):
+ """Return the module collection node for ``source``.
+
+ This writes ``source`` to a file using :py:meth:`makepyfile`
+ and then runs the py.test collection on it, returning the
+ collection node for the test module.
+
+ :param source: The source code of the module to collect.
+
+ :param configargs: Any extra arguments to pass to
+ :py:meth:`parseconfigure`.
+
+ :param withinit: Whether to also write a ``__init__.py`` file
+ to the temporarly directory to ensure it is a package.
+
+ """
+ kw = {self.request.function.__name__: Source(source).strip()}
+ path = self.makepyfile(**kw)
+ if withinit:
+ self.makepyfile(__init__ = "#")
+ self.config = config = self.parseconfigure(path, *configargs)
+ node = self.getnode(config, path)
+ return node
+
+ def collect_by_name(self, modcol, name):
+ """Return the collection node for name from the module collection.
+
+ This will search a module collection node for a collection
+ node matching the given name.
+
+ :param modcol: A module collection node, see
+ :py:meth:`getmodulecol`.
+
+ :param name: The name of the node to return.
+
+ """
+ for colitem in modcol._memocollect():
+ if colitem.name == name:
+ return colitem
+
+ def popen(self, cmdargs, stdout, stderr, **kw):
+ """Invoke subprocess.Popen.
+
+ This calls subprocess.Popen making sure the current working
+ directory is the PYTHONPATH.
+
+ You probably want to use :py:meth:`run` instead.
+
+ """
+ env = os.environ.copy()
+ env['PYTHONPATH'] = os.pathsep.join(filter(None, [
+ str(os.getcwd()), env.get('PYTHONPATH', '')]))
+ kw['env'] = env
+ return subprocess.Popen(cmdargs,
+ stdout=stdout, stderr=stderr, **kw)
+
+ def run(self, *cmdargs):
+ """Run a command with arguments.
+
+ Run a process using subprocess.Popen saving the stdout and
+ stderr.
+
+ Returns a :py:class:`RunResult`.
+
+ """
+ return self._run(*cmdargs)
+
+ def _run(self, *cmdargs):
+ cmdargs = [str(x) for x in cmdargs]
+ p1 = self.tmpdir.join("stdout")
+ p2 = self.tmpdir.join("stderr")
+ print_("running:", ' '.join(cmdargs))
+ print_(" in:", str(py.path.local()))
+ f1 = codecs.open(str(p1), "w", encoding="utf8")
+ f2 = codecs.open(str(p2), "w", encoding="utf8")
+ try:
+ now = time.time()
+ popen = self.popen(cmdargs, stdout=f1, stderr=f2,
+ close_fds=(sys.platform != "win32"))
+ ret = popen.wait()
+ finally:
+ f1.close()
+ f2.close()
+ f1 = codecs.open(str(p1), "r", encoding="utf8")
+ f2 = codecs.open(str(p2), "r", encoding="utf8")
+ try:
+ out = f1.read().splitlines()
+ err = f2.read().splitlines()
+ finally:
+ f1.close()
+ f2.close()
+ self._dump_lines(out, sys.stdout)
+ self._dump_lines(err, sys.stderr)
+ return RunResult(ret, out, err, time.time()-now)
+
+ def _dump_lines(self, lines, fp):
+ try:
+ for line in lines:
+ py.builtin.print_(line, file=fp)
+ except UnicodeEncodeError:
+ print("couldn't print to %s because of encoding" % (fp,))
+
+ def _getpytestargs(self):
+ # we cannot use "(sys.executable,script)"
+ # because on windows the script is e.g. a py.test.exe
+ return (sys.executable, _pytest_fullpath,) # noqa
+
+ def runpython(self, script):
+ """Run a python script using sys.executable as interpreter.
+
+ Returns a :py:class:`RunResult`.
+ """
+ return self.run(sys.executable, script)
+
+ def runpython_c(self, command):
+ """Run python -c "command", return a :py:class:`RunResult`."""
+ return self.run(sys.executable, "-c", command)
+
+ def runpytest_subprocess(self, *args, **kwargs):
+ """Run py.test as a subprocess with given arguments.
+
+ Any plugins added to the :py:attr:`plugins` list will added
+ using the ``-p`` command line option. Addtionally
+ ``--basetemp`` is used put any temporary files and directories
+ in a numbered directory prefixed with "runpytest-" so they do
+ not conflict with the normal numberd pytest location for
+ temporary files and directories.
+
+ Returns a :py:class:`RunResult`.
+
+ """
+ p = py.path.local.make_numbered_dir(prefix="runpytest-",
+ keep=None, rootdir=self.tmpdir)
+ args = ('--basetemp=%s' % p, ) + args
+ #for x in args:
+ # if '--confcutdir' in str(x):
+ # break
+ #else:
+ # pass
+ # args = ('--confcutdir=.',) + args
+ plugins = [x for x in self.plugins if isinstance(x, str)]
+ if plugins:
+ args = ('-p', plugins[0]) + args
+ args = self._getpytestargs() + args
+ return self.run(*args)
+
+ def spawn_pytest(self, string, expect_timeout=10.0):
+ """Run py.test using pexpect.
+
+ This makes sure to use the right py.test and sets up the
+ temporary directory locations.
+
+ The pexpect child is returned.
+
+ """
+ basetemp = self.tmpdir.mkdir("pexpect")
+ invoke = " ".join(map(str, self._getpytestargs()))
+ cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
+ return self.spawn(cmd, expect_timeout=expect_timeout)
+
+ def spawn(self, cmd, expect_timeout=10.0):
+ """Run a command using pexpect.
+
+ The pexpect child is returned.
+ """
+ pexpect = pytest.importorskip("pexpect", "3.0")
+ if hasattr(sys, 'pypy_version_info') and '64' in platform.machine():
+ pytest.skip("pypy-64 bit not supported")
+ if sys.platform == "darwin":
+ pytest.xfail("pexpect does not work reliably on darwin?!")
+ if sys.platform.startswith("freebsd"):
+ pytest.xfail("pexpect does not work reliably on freebsd")
+ logfile = self.tmpdir.join("spawn.out").open("wb")
+ child = pexpect.spawn(cmd, logfile=logfile)
+ self.request.addfinalizer(logfile.close)
+ child.timeout = expect_timeout
+ return child
+
+def getdecoded(out):
+ try:
+ return out.decode("utf-8")
+ except UnicodeDecodeError:
+ return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
+ py.io.saferepr(out),)
+
+
+class LineComp:
+ def __init__(self):
+ self.stringio = py.io.TextIO()
+
+ def assert_contains_lines(self, lines2):
+ """ assert that lines2 are contained (linearly) in lines1.
+ return a list of extralines found.
+ """
+ __tracebackhide__ = True
+ val = self.stringio.getvalue()
+ self.stringio.truncate(0)
+ self.stringio.seek(0)
+ lines1 = val.split("\n")
+ return LineMatcher(lines1).fnmatch_lines(lines2)
+
+
+class LineMatcher:
+ """Flexible matching of text.
+
+ This is a convenience class to test large texts like the output of
+ commands.
+
+ The constructor takes a list of lines without their trailing
+ newlines, i.e. ``text.splitlines()``.
+
+ """
+
+ def __init__(self, lines):
+ self.lines = lines
+
+ def str(self):
+ """Return the entire original text."""
+ return "\n".join(self.lines)
+
+ def _getlines(self, lines2):
+ if isinstance(lines2, str):
+ lines2 = Source(lines2)
+ if isinstance(lines2, Source):
+ lines2 = lines2.strip().lines
+ return lines2
+
+ def fnmatch_lines_random(self, lines2):
+ """Check lines exist in the output.
+
+ The argument is a list of lines which have to occur in the
+ output, in any order. Each line can contain glob whildcards.
+
+ """
+ lines2 = self._getlines(lines2)
+ for line in lines2:
+ for x in self.lines:
+ if line == x or fnmatch(x, line):
+ print_("matched: ", repr(line))
+ break
+ else:
+ raise ValueError("line %r not found in output" % line)
+
+ def get_lines_after(self, fnline):
+ """Return all lines following the given line in the text.
+
+ The given line can contain glob wildcards.
+ """
+ for i, line in enumerate(self.lines):
+ if fnline == line or fnmatch(line, fnline):
+ return self.lines[i+1:]
+ raise ValueError("line %r not found in output" % fnline)
+
+ def fnmatch_lines(self, lines2):
+ """Search the text for matching lines.
+
+ The argument is a list of lines which have to match and can
+ use glob wildcards. If they do not match an pytest.fail() is
+ called. The matches and non-matches are also printed on
+ stdout.
+
+ """
+ def show(arg1, arg2):
+ py.builtin.print_(arg1, arg2, file=sys.stderr)
+ lines2 = self._getlines(lines2)
+ lines1 = self.lines[:]
+ nextline = None
+ extralines = []
+ __tracebackhide__ = True
+ for line in lines2:
+ nomatchprinted = False
+ while lines1:
+ nextline = lines1.pop(0)
+ if line == nextline:
+ show("exact match:", repr(line))
+ break
+ elif fnmatch(nextline, line):
+ show("fnmatch:", repr(line))
+ show(" with:", repr(nextline))
+ break
+ else:
+ if not nomatchprinted:
+ show("nomatch:", repr(line))
+ nomatchprinted = True
+ show(" and:", repr(nextline))
+ extralines.append(nextline)
+ else:
+ pytest.fail("remains unmatched: %r, see stderr" % (line,))
diff --git a/python/pytest/_pytest/python.py b/python/pytest/_pytest/python.py
new file mode 100644
index 000000000..21d78aea3
--- /dev/null
+++ b/python/pytest/_pytest/python.py
@@ -0,0 +1,2300 @@
+""" Python test discovery, setup and run of test functions. """
+import fnmatch
+import functools
+import inspect
+import re
+import types
+import sys
+
+import py
+import pytest
+from _pytest._code.code import TerminalRepr
+from _pytest.mark import MarkDecorator, MarkerError
+
+try:
+ import enum
+except ImportError: # pragma: no cover
+ # Only available in Python 3.4+ or as a backport
+ enum = None
+
+import _pytest
+import _pytest._pluggy as pluggy
+
+cutdir2 = py.path.local(_pytest.__file__).dirpath()
+cutdir1 = py.path.local(pluggy.__file__.rstrip("oc"))
+
+
+NoneType = type(None)
+NOTSET = object()
+isfunction = inspect.isfunction
+isclass = inspect.isclass
+callable = py.builtin.callable
+# used to work around a python2 exception info leak
+exc_clear = getattr(sys, 'exc_clear', lambda: None)
+# The type of re.compile objects is not exposed in Python.
+REGEX_TYPE = type(re.compile(''))
+
+_PY3 = sys.version_info > (3, 0)
+_PY2 = not _PY3
+
+
+if hasattr(inspect, 'signature'):
+ def _format_args(func):
+ return str(inspect.signature(func))
+else:
+ def _format_args(func):
+ return inspect.formatargspec(*inspect.getargspec(func))
+
+if sys.version_info[:2] == (2, 6):
+ def isclass(object):
+ """ Return true if the object is a class. Overrides inspect.isclass for
+ python 2.6 because it will return True for objects which always return
+ something on __getattr__ calls (see #1035).
+ Backport of https://hg.python.org/cpython/rev/35bf8f7a8edc
+ """
+ return isinstance(object, (type, types.ClassType))
+
+def _has_positional_arg(func):
+ return func.__code__.co_argcount
+
+
+def filter_traceback(entry):
+ # entry.path might sometimes return a str object when the entry
+ # points to dynamically generated code
+ # see https://bitbucket.org/pytest-dev/py/issues/71
+ raw_filename = entry.frame.code.raw.co_filename
+ is_generated = '<' in raw_filename and '>' in raw_filename
+ if is_generated:
+ return False
+ # entry.path might point to an inexisting file, in which case it will
+ # alsso return a str object. see #1133
+ p = py.path.local(entry.path)
+ return p != cutdir1 and not p.relto(cutdir2)
+
+
+def get_real_func(obj):
+ """ gets the real function object of the (possibly) wrapped object by
+ functools.wraps or functools.partial.
+ """
+ while hasattr(obj, "__wrapped__"):
+ obj = obj.__wrapped__
+ if isinstance(obj, functools.partial):
+ obj = obj.func
+ return obj
+
+def getfslineno(obj):
+ # xxx let decorators etc specify a sane ordering
+ obj = get_real_func(obj)
+ if hasattr(obj, 'place_as'):
+ obj = obj.place_as
+ fslineno = _pytest._code.getfslineno(obj)
+ assert isinstance(fslineno[1], int), obj
+ return fslineno
+
+def getimfunc(func):
+ try:
+ return func.__func__
+ except AttributeError:
+ try:
+ return func.im_func
+ except AttributeError:
+ return func
+
+def safe_getattr(object, name, default):
+ """ Like getattr but return default upon any Exception.
+
+ Attribute access can potentially fail for 'evil' Python objects.
+ See issue214
+ """
+ try:
+ return getattr(object, name, default)
+ except Exception:
+ return default
+
+
+class FixtureFunctionMarker:
+ def __init__(self, scope, params,
+ autouse=False, yieldctx=False, ids=None):
+ self.scope = scope
+ self.params = params
+ self.autouse = autouse
+ self.yieldctx = yieldctx
+ self.ids = ids
+
+ def __call__(self, function):
+ if isclass(function):
+ raise ValueError(
+ "class fixtures not supported (may be in the future)")
+ function._pytestfixturefunction = self
+ return function
+
+
+def fixture(scope="function", params=None, autouse=False, ids=None):
+ """ (return a) decorator to mark a fixture factory function.
+
+ This decorator can be used (with or or without parameters) to define
+ a fixture function. The name of the fixture function can later be
+ referenced to cause its invocation ahead of running tests: test
+ modules or classes can use the pytest.mark.usefixtures(fixturename)
+ marker. Test functions can directly use fixture names as input
+ arguments in which case the fixture instance returned from the fixture
+ function will be injected.
+
+ :arg scope: the scope for which this fixture is shared, one of
+ "function" (default), "class", "module", "session".
+
+ :arg params: an optional list of parameters which will cause multiple
+ invocations of the fixture function and all of the tests
+ using it.
+
+ :arg autouse: if True, the fixture func is activated for all tests that
+ can see it. If False (the default) then an explicit
+ reference is needed to activate the fixture.
+
+ :arg ids: list of string ids each corresponding to the params
+ so that they are part of the test id. If no ids are provided
+ they will be generated automatically from the params.
+
+ """
+ if callable(scope) and params is None and autouse == False:
+ # direct decoration
+ return FixtureFunctionMarker(
+ "function", params, autouse)(scope)
+ if params is not None and not isinstance(params, (list, tuple)):
+ params = list(params)
+ return FixtureFunctionMarker(scope, params, autouse, ids=ids)
+
+def yield_fixture(scope="function", params=None, autouse=False, ids=None):
+ """ (return a) decorator to mark a yield-fixture factory function
+ (EXPERIMENTAL).
+
+ This takes the same arguments as :py:func:`pytest.fixture` but
+ expects a fixture function to use a ``yield`` instead of a ``return``
+ statement to provide a fixture. See
+ http://pytest.org/en/latest/yieldfixture.html for more info.
+ """
+ if callable(scope) and params is None and autouse == False:
+ # direct decoration
+ return FixtureFunctionMarker(
+ "function", params, autouse, yieldctx=True)(scope)
+ else:
+ return FixtureFunctionMarker(scope, params, autouse,
+ yieldctx=True, ids=ids)
+
+defaultfuncargprefixmarker = fixture()
+
+def pyobj_property(name):
+ def get(self):
+ node = self.getparent(getattr(pytest, name))
+ if node is not None:
+ return node.obj
+ doc = "python %s object this node was collected from (can be None)." % (
+ name.lower(),)
+ return property(get, None, None, doc)
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption('--fixtures', '--funcargs',
+ action="store_true", dest="showfixtures", default=False,
+ help="show available fixtures, sorted by plugin appearance")
+ parser.addini("usefixtures", type="args", default=[],
+ help="list of default fixtures to be used with this project")
+ parser.addini("python_files", type="args",
+ default=['test_*.py', '*_test.py'],
+ help="glob-style file patterns for Python test module discovery")
+ parser.addini("python_classes", type="args", default=["Test",],
+ help="prefixes or glob names for Python test class discovery")
+ parser.addini("python_functions", type="args", default=["test",],
+ help="prefixes or glob names for Python test function and "
+ "method discovery")
+
+ group.addoption("--import-mode", default="prepend",
+ choices=["prepend", "append"], dest="importmode",
+ help="prepend/append to sys.path when importing test modules, "
+ "default is to prepend.")
+
+
+def pytest_cmdline_main(config):
+ if config.option.showfixtures:
+ showfixtures(config)
+ return 0
+
+
+def pytest_generate_tests(metafunc):
+ # those alternative spellings are common - raise a specific error to alert
+ # the user
+ alt_spellings = ['parameterize', 'parametrise', 'parameterise']
+ for attr in alt_spellings:
+ if hasattr(metafunc.function, attr):
+ msg = "{0} has '{1}', spelling should be 'parametrize'"
+ raise MarkerError(msg.format(metafunc.function.__name__, attr))
+ try:
+ markers = metafunc.function.parametrize
+ except AttributeError:
+ return
+ for marker in markers:
+ metafunc.parametrize(*marker.args, **marker.kwargs)
+
+def pytest_configure(config):
+ config.addinivalue_line("markers",
+ "parametrize(argnames, argvalues): call a test function multiple "
+ "times passing in different arguments in turn. argvalues generally "
+ "needs to be a list of values if argnames specifies only one name "
+ "or a list of tuples of values if argnames specifies multiple names. "
+ "Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
+ "decorated test function, one with arg1=1 and another with arg1=2."
+ "see http://pytest.org/latest/parametrize.html for more info and "
+ "examples."
+ )
+ config.addinivalue_line("markers",
+ "usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
+ "all of the specified fixtures. see http://pytest.org/latest/fixture.html#usefixtures "
+ )
+
+def pytest_sessionstart(session):
+ session._fixturemanager = FixtureManager(session)
+
+@pytest.hookimpl(trylast=True)
+def pytest_namespace():
+ raises.Exception = pytest.fail.Exception
+ return {
+ 'fixture': fixture,
+ 'yield_fixture': yield_fixture,
+ 'raises' : raises,
+ 'collect': {
+ 'Module': Module, 'Class': Class, 'Instance': Instance,
+ 'Function': Function, 'Generator': Generator,
+ '_fillfuncargs': fillfixtures}
+ }
+
+@fixture(scope="session")
+def pytestconfig(request):
+ """ the pytest config object with access to command line opts."""
+ return request.config
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_pyfunc_call(pyfuncitem):
+ testfunction = pyfuncitem.obj
+ if pyfuncitem._isyieldedfunction():
+ testfunction(*pyfuncitem._args)
+ else:
+ funcargs = pyfuncitem.funcargs
+ testargs = {}
+ for arg in pyfuncitem._fixtureinfo.argnames:
+ testargs[arg] = funcargs[arg]
+ testfunction(**testargs)
+ return True
+
+def pytest_collect_file(path, parent):
+ ext = path.ext
+ if ext == ".py":
+ if not parent.session.isinitpath(path):
+ for pat in parent.config.getini('python_files'):
+ if path.fnmatch(pat):
+ break
+ else:
+ return
+ ihook = parent.session.gethookproxy(path)
+ return ihook.pytest_pycollect_makemodule(path=path, parent=parent)
+
+def pytest_pycollect_makemodule(path, parent):
+ return Module(path, parent)
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_pycollect_makeitem(collector, name, obj):
+ outcome = yield
+ res = outcome.get_result()
+ if res is not None:
+ raise StopIteration
+ # nothing was collected elsewhere, let's do it here
+ if isclass(obj):
+ if collector.istestclass(obj, name):
+ Class = collector._getcustomclass("Class")
+ outcome.force_result(Class(name, parent=collector))
+ elif collector.istestfunction(obj, name):
+ # mock seems to store unbound methods (issue473), normalize it
+ obj = getattr(obj, "__func__", obj)
+ # We need to try and unwrap the function if it's a functools.partial
+ # or a funtools.wrapped.
+ # We musn't if it's been wrapped with mock.patch (python 2 only)
+ if not (isfunction(obj) or isfunction(get_real_func(obj))):
+ collector.warn(code="C2", message=
+ "cannot collect %r because it is not a function."
+ % name, )
+ elif getattr(obj, "__test__", True):
+ if is_generator(obj):
+ res = Generator(name, parent=collector)
+ else:
+ res = list(collector._genfunctions(name, obj))
+ outcome.force_result(res)
+
+def is_generator(func):
+ try:
+ return _pytest._code.getrawcode(func).co_flags & 32 # generator function
+ except AttributeError: # builtin functions have no bytecode
+ # assume them to not be generators
+ return False
+
+class PyobjContext(object):
+ module = pyobj_property("Module")
+ cls = pyobj_property("Class")
+ instance = pyobj_property("Instance")
+
+class PyobjMixin(PyobjContext):
+ def obj():
+ def fget(self):
+ try:
+ return self._obj
+ except AttributeError:
+ self._obj = obj = self._getobj()
+ return obj
+ def fset(self, value):
+ self._obj = value
+ return property(fget, fset, None, "underlying python object")
+ obj = obj()
+
+ def _getobj(self):
+ return getattr(self.parent.obj, self.name)
+
+ def getmodpath(self, stopatmodule=True, includemodule=False):
+ """ return python path relative to the containing module. """
+ chain = self.listchain()
+ chain.reverse()
+ parts = []
+ for node in chain:
+ if isinstance(node, Instance):
+ continue
+ name = node.name
+ if isinstance(node, Module):
+ assert name.endswith(".py")
+ name = name[:-3]
+ if stopatmodule:
+ if includemodule:
+ parts.append(name)
+ break
+ parts.append(name)
+ parts.reverse()
+ s = ".".join(parts)
+ return s.replace(".[", "[")
+
+ def _getfslineno(self):
+ return getfslineno(self.obj)
+
+ def reportinfo(self):
+ # XXX caching?
+ obj = self.obj
+ compat_co_firstlineno = getattr(obj, 'compat_co_firstlineno', None)
+ if isinstance(compat_co_firstlineno, int):
+ # nose compatibility
+ fspath = sys.modules[obj.__module__].__file__
+ if fspath.endswith(".pyc"):
+ fspath = fspath[:-1]
+ lineno = compat_co_firstlineno
+ else:
+ fspath, lineno = getfslineno(obj)
+ modpath = self.getmodpath()
+ assert isinstance(lineno, int)
+ return fspath, lineno, modpath
+
+class PyCollector(PyobjMixin, pytest.Collector):
+
+ def funcnamefilter(self, name):
+ return self._matches_prefix_or_glob_option('python_functions', name)
+
+ def isnosetest(self, obj):
+ """ Look for the __test__ attribute, which is applied by the
+ @nose.tools.istest decorator
+ """
+ # We explicitly check for "is True" here to not mistakenly treat
+ # classes with a custom __getattr__ returning something truthy (like a
+ # function) as test classes.
+ return safe_getattr(obj, '__test__', False) is True
+
+ def classnamefilter(self, name):
+ return self._matches_prefix_or_glob_option('python_classes', name)
+
+ def istestfunction(self, obj, name):
+ return (
+ (self.funcnamefilter(name) or self.isnosetest(obj)) and
+ safe_getattr(obj, "__call__", False) and getfixturemarker(obj) is None
+ )
+
+ def istestclass(self, obj, name):
+ return self.classnamefilter(name) or self.isnosetest(obj)
+
+ def _matches_prefix_or_glob_option(self, option_name, name):
+ """
+ checks if the given name matches the prefix or glob-pattern defined
+ in ini configuration.
+ """
+ for option in self.config.getini(option_name):
+ if name.startswith(option):
+ return True
+ # check that name looks like a glob-string before calling fnmatch
+ # because this is called for every name in each collected module,
+ # and fnmatch is somewhat expensive to call
+ elif ('*' in option or '?' in option or '[' in option) and \
+ fnmatch.fnmatch(name, option):
+ return True
+ return False
+
+ def collect(self):
+ if not getattr(self.obj, "__test__", True):
+ return []
+
+ # NB. we avoid random getattrs and peek in the __dict__ instead
+ # (XXX originally introduced from a PyPy need, still true?)
+ dicts = [getattr(self.obj, '__dict__', {})]
+ for basecls in inspect.getmro(self.obj.__class__):
+ dicts.append(basecls.__dict__)
+ seen = {}
+ l = []
+ for dic in dicts:
+ for name, obj in list(dic.items()):
+ if name in seen:
+ continue
+ seen[name] = True
+ res = self.makeitem(name, obj)
+ if res is None:
+ continue
+ if not isinstance(res, list):
+ res = [res]
+ l.extend(res)
+ l.sort(key=lambda item: item.reportinfo()[:2])
+ return l
+
+ def makeitem(self, name, obj):
+ #assert self.ihook.fspath == self.fspath, self
+ return self.ihook.pytest_pycollect_makeitem(
+ collector=self, name=name, obj=obj)
+
+ def _genfunctions(self, name, funcobj):
+ module = self.getparent(Module).obj
+ clscol = self.getparent(Class)
+ cls = clscol and clscol.obj or None
+ transfer_markers(funcobj, cls, module)
+ fm = self.session._fixturemanager
+ fixtureinfo = fm.getfixtureinfo(self, funcobj, cls)
+ metafunc = Metafunc(funcobj, fixtureinfo, self.config,
+ cls=cls, module=module)
+ methods = []
+ if hasattr(module, "pytest_generate_tests"):
+ methods.append(module.pytest_generate_tests)
+ if hasattr(cls, "pytest_generate_tests"):
+ methods.append(cls().pytest_generate_tests)
+ if methods:
+ self.ihook.pytest_generate_tests.call_extra(methods,
+ dict(metafunc=metafunc))
+ else:
+ self.ihook.pytest_generate_tests(metafunc=metafunc)
+
+ Function = self._getcustomclass("Function")
+ if not metafunc._calls:
+ yield Function(name, parent=self, fixtureinfo=fixtureinfo)
+ else:
+ # add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs
+ add_funcarg_pseudo_fixture_def(self, metafunc, fm)
+
+ for callspec in metafunc._calls:
+ subname = "%s[%s]" %(name, callspec.id)
+ yield Function(name=subname, parent=self,
+ callspec=callspec, callobj=funcobj,
+ fixtureinfo=fixtureinfo,
+ keywords={callspec.id:True})
+
+def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
+ # this function will transform all collected calls to a functions
+ # if they use direct funcargs (i.e. direct parametrization)
+ # because we want later test execution to be able to rely on
+ # an existing FixtureDef structure for all arguments.
+ # XXX we can probably avoid this algorithm if we modify CallSpec2
+ # to directly care for creating the fixturedefs within its methods.
+ if not metafunc._calls[0].funcargs:
+ return # this function call does not have direct parametrization
+ # collect funcargs of all callspecs into a list of values
+ arg2params = {}
+ arg2scope = {}
+ for callspec in metafunc._calls:
+ for argname, argvalue in callspec.funcargs.items():
+ assert argname not in callspec.params
+ callspec.params[argname] = argvalue
+ arg2params_list = arg2params.setdefault(argname, [])
+ callspec.indices[argname] = len(arg2params_list)
+ arg2params_list.append(argvalue)
+ if argname not in arg2scope:
+ scopenum = callspec._arg2scopenum.get(argname,
+ scopenum_function)
+ arg2scope[argname] = scopes[scopenum]
+ callspec.funcargs.clear()
+
+ # register artificial FixtureDef's so that later at test execution
+ # time we can rely on a proper FixtureDef to exist for fixture setup.
+ arg2fixturedefs = metafunc._arg2fixturedefs
+ for argname, valuelist in arg2params.items():
+ # if we have a scope that is higher than function we need
+ # to make sure we only ever create an according fixturedef on
+ # a per-scope basis. We thus store and cache the fixturedef on the
+ # node related to the scope.
+ scope = arg2scope[argname]
+ node = None
+ if scope != "function":
+ node = get_scope_node(collector, scope)
+ if node is None:
+ assert scope == "class" and isinstance(collector, Module)
+ # use module-level collector for class-scope (for now)
+ node = collector
+ if node and argname in node._name2pseudofixturedef:
+ arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]]
+ else:
+ fixturedef = FixtureDef(fixturemanager, '', argname,
+ get_direct_param_fixture_func,
+ arg2scope[argname],
+ valuelist, False, False)
+ arg2fixturedefs[argname] = [fixturedef]
+ if node is not None:
+ node._name2pseudofixturedef[argname] = fixturedef
+
+
+def get_direct_param_fixture_func(request):
+ return request.param
+
+class FuncFixtureInfo:
+ def __init__(self, argnames, names_closure, name2fixturedefs):
+ self.argnames = argnames
+ self.names_closure = names_closure
+ self.name2fixturedefs = name2fixturedefs
+
+
+def _marked(func, mark):
+ """ Returns True if :func: is already marked with :mark:, False otherwise.
+ This can happen if marker is applied to class and the test file is
+ invoked more than once.
+ """
+ try:
+ func_mark = getattr(func, mark.name)
+ except AttributeError:
+ return False
+ return mark.args == func_mark.args and mark.kwargs == func_mark.kwargs
+
+
+def transfer_markers(funcobj, cls, mod):
+ # XXX this should rather be code in the mark plugin or the mark
+ # plugin should merge with the python plugin.
+ for holder in (cls, mod):
+ try:
+ pytestmark = holder.pytestmark
+ except AttributeError:
+ continue
+ if isinstance(pytestmark, list):
+ for mark in pytestmark:
+ if not _marked(funcobj, mark):
+ mark(funcobj)
+ else:
+ if not _marked(funcobj, pytestmark):
+ pytestmark(funcobj)
+
+class Module(pytest.File, PyCollector):
+ """ Collector for test classes and functions. """
+ def _getobj(self):
+ return self._memoizedcall('_obj', self._importtestmodule)
+
+ def collect(self):
+ self.session._fixturemanager.parsefactories(self)
+ return super(Module, self).collect()
+
+ def _importtestmodule(self):
+ # we assume we are only called once per module
+ importmode = self.config.getoption("--import-mode")
+ try:
+ mod = self.fspath.pyimport(ensuresyspath=importmode)
+ except SyntaxError:
+ raise self.CollectError(
+ _pytest._code.ExceptionInfo().getrepr(style="short"))
+ except self.fspath.ImportMismatchError:
+ e = sys.exc_info()[1]
+ raise self.CollectError(
+ "import file mismatch:\n"
+ "imported module %r has this __file__ attribute:\n"
+ " %s\n"
+ "which is not the same as the test file we want to collect:\n"
+ " %s\n"
+ "HINT: remove __pycache__ / .pyc files and/or use a "
+ "unique basename for your test file modules"
+ % e.args
+ )
+ #print "imported test module", mod
+ self.config.pluginmanager.consider_module(mod)
+ return mod
+
+ def setup(self):
+ setup_module = xunitsetup(self.obj, "setUpModule")
+ if setup_module is None:
+ setup_module = xunitsetup(self.obj, "setup_module")
+ if setup_module is not None:
+ #XXX: nose compat hack, move to nose plugin
+ # if it takes a positional arg, its probably a pytest style one
+ # so we pass the current module object
+ if _has_positional_arg(setup_module):
+ setup_module(self.obj)
+ else:
+ setup_module()
+ fin = getattr(self.obj, 'tearDownModule', None)
+ if fin is None:
+ fin = getattr(self.obj, 'teardown_module', None)
+ if fin is not None:
+ #XXX: nose compat hack, move to nose plugin
+ # if it takes a positional arg, it's probably a pytest style one
+ # so we pass the current module object
+ if _has_positional_arg(fin):
+ finalizer = lambda: fin(self.obj)
+ else:
+ finalizer = fin
+ self.addfinalizer(finalizer)
+
+
+class Class(PyCollector):
+ """ Collector for test methods. """
+ def collect(self):
+ if hasinit(self.obj):
+ self.warn("C1", "cannot collect test class %r because it has a "
+ "__init__ constructor" % self.obj.__name__)
+ return []
+ return [self._getcustomclass("Instance")(name="()", parent=self)]
+
+ def setup(self):
+ setup_class = xunitsetup(self.obj, 'setup_class')
+ if setup_class is not None:
+ setup_class = getattr(setup_class, 'im_func', setup_class)
+ setup_class = getattr(setup_class, '__func__', setup_class)
+ setup_class(self.obj)
+
+ fin_class = getattr(self.obj, 'teardown_class', None)
+ if fin_class is not None:
+ fin_class = getattr(fin_class, 'im_func', fin_class)
+ fin_class = getattr(fin_class, '__func__', fin_class)
+ self.addfinalizer(lambda: fin_class(self.obj))
+
+class Instance(PyCollector):
+ def _getobj(self):
+ obj = self.parent.obj()
+ return obj
+
+ def collect(self):
+ self.session._fixturemanager.parsefactories(self)
+ return super(Instance, self).collect()
+
+ def newinstance(self):
+ self.obj = self._getobj()
+ return self.obj
+
+class FunctionMixin(PyobjMixin):
+ """ mixin for the code common to Function and Generator.
+ """
+
+ def setup(self):
+ """ perform setup for this test function. """
+ if hasattr(self, '_preservedparent'):
+ obj = self._preservedparent
+ elif isinstance(self.parent, Instance):
+ obj = self.parent.newinstance()
+ self.obj = self._getobj()
+ else:
+ obj = self.parent.obj
+ if inspect.ismethod(self.obj):
+ setup_name = 'setup_method'
+ teardown_name = 'teardown_method'
+ else:
+ setup_name = 'setup_function'
+ teardown_name = 'teardown_function'
+ setup_func_or_method = xunitsetup(obj, setup_name)
+ if setup_func_or_method is not None:
+ setup_func_or_method(self.obj)
+ fin = getattr(obj, teardown_name, None)
+ if fin is not None:
+ self.addfinalizer(lambda: fin(self.obj))
+
+ def _prunetraceback(self, excinfo):
+ if hasattr(self, '_obj') and not self.config.option.fulltrace:
+ code = _pytest._code.Code(get_real_func(self.obj))
+ path, firstlineno = code.path, code.firstlineno
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.cut(path=path)
+ if ntraceback == traceback:
+ #ntraceback = ntraceback.cut(excludepath=cutdir2)
+ ntraceback = ntraceback.filter(filter_traceback)
+ if not ntraceback:
+ ntraceback = traceback
+
+ excinfo.traceback = ntraceback.filter()
+ # issue364: mark all but first and last frames to
+ # only show a single-line message for each frame
+ if self.config.option.tbstyle == "auto":
+ if len(excinfo.traceback) > 2:
+ for entry in excinfo.traceback[1:-1]:
+ entry.set_repr_style('short')
+
+ def _repr_failure_py(self, excinfo, style="long"):
+ if excinfo.errisinstance(pytest.fail.Exception):
+ if not excinfo.value.pytrace:
+ return py._builtin._totext(excinfo.value)
+ return super(FunctionMixin, self)._repr_failure_py(excinfo,
+ style=style)
+
+ def repr_failure(self, excinfo, outerr=None):
+ assert outerr is None, "XXX outerr usage is deprecated"
+ style = self.config.option.tbstyle
+ if style == "auto":
+ style = "long"
+ return self._repr_failure_py(excinfo, style=style)
+
+
+class Generator(FunctionMixin, PyCollector):
+ def collect(self):
+ # test generators are seen as collectors but they also
+ # invoke setup/teardown on popular request
+ # (induced by the common "test_*" naming shared with normal tests)
+ self.session._setupstate.prepare(self)
+ # see FunctionMixin.setup and test_setupstate_is_preserved_134
+ self._preservedparent = self.parent.obj
+ l = []
+ seen = {}
+ for i, x in enumerate(self.obj()):
+ name, call, args = self.getcallargs(x)
+ if not callable(call):
+ raise TypeError("%r yielded non callable test %r" %(self.obj, call,))
+ if name is None:
+ name = "[%d]" % i
+ else:
+ name = "['%s']" % name
+ if name in seen:
+ raise ValueError("%r generated tests with non-unique name %r" %(self, name))
+ seen[name] = True
+ l.append(self.Function(name, self, args=args, callobj=call))
+ return l
+
+ def getcallargs(self, obj):
+ if not isinstance(obj, (tuple, list)):
+ obj = (obj,)
+ # explict naming
+ if isinstance(obj[0], py.builtin._basestring):
+ name = obj[0]
+ obj = obj[1:]
+ else:
+ name = None
+ call, args = obj[0], obj[1:]
+ return name, call, args
+
+
+def hasinit(obj):
+ init = getattr(obj, '__init__', None)
+ if init:
+ if init != object.__init__:
+ return True
+
+
+
+def fillfixtures(function):
+ """ fill missing funcargs for a test function. """
+ try:
+ request = function._request
+ except AttributeError:
+ # XXX this special code path is only expected to execute
+ # with the oejskit plugin. It uses classes with funcargs
+ # and we thus have to work a bit to allow this.
+ fm = function.session._fixturemanager
+ fi = fm.getfixtureinfo(function.parent, function.obj, None)
+ function._fixtureinfo = fi
+ request = function._request = FixtureRequest(function)
+ request._fillfixtures()
+ # prune out funcargs for jstests
+ newfuncargs = {}
+ for name in fi.argnames:
+ newfuncargs[name] = function.funcargs[name]
+ function.funcargs = newfuncargs
+ else:
+ request._fillfixtures()
+
+
+_notexists = object()
+
+class CallSpec2(object):
+ def __init__(self, metafunc):
+ self.metafunc = metafunc
+ self.funcargs = {}
+ self._idlist = []
+ self.params = {}
+ self._globalid = _notexists
+ self._globalid_args = set()
+ self._globalparam = _notexists
+ self._arg2scopenum = {} # used for sorting parametrized resources
+ self.keywords = {}
+ self.indices = {}
+
+ def copy(self, metafunc):
+ cs = CallSpec2(self.metafunc)
+ cs.funcargs.update(self.funcargs)
+ cs.params.update(self.params)
+ cs.keywords.update(self.keywords)
+ cs.indices.update(self.indices)
+ cs._arg2scopenum.update(self._arg2scopenum)
+ cs._idlist = list(self._idlist)
+ cs._globalid = self._globalid
+ cs._globalid_args = self._globalid_args
+ cs._globalparam = self._globalparam
+ return cs
+
+ def _checkargnotcontained(self, arg):
+ if arg in self.params or arg in self.funcargs:
+ raise ValueError("duplicate %r" %(arg,))
+
+ def getparam(self, name):
+ try:
+ return self.params[name]
+ except KeyError:
+ if self._globalparam is _notexists:
+ raise ValueError(name)
+ return self._globalparam
+
+ @property
+ def id(self):
+ return "-".join(map(str, filter(None, self._idlist)))
+
+ def setmulti(self, valtypes, argnames, valset, id, keywords, scopenum,
+ param_index):
+ for arg,val in zip(argnames, valset):
+ self._checkargnotcontained(arg)
+ valtype_for_arg = valtypes[arg]
+ getattr(self, valtype_for_arg)[arg] = val
+ self.indices[arg] = param_index
+ self._arg2scopenum[arg] = scopenum
+ self._idlist.append(id)
+ self.keywords.update(keywords)
+
+ def setall(self, funcargs, id, param):
+ for x in funcargs:
+ self._checkargnotcontained(x)
+ self.funcargs.update(funcargs)
+ if id is not _notexists:
+ self._idlist.append(id)
+ if param is not _notexists:
+ assert self._globalparam is _notexists
+ self._globalparam = param
+ for arg in funcargs:
+ self._arg2scopenum[arg] = scopenum_function
+
+
+class FuncargnamesCompatAttr:
+ """ helper class so that Metafunc, Function and FixtureRequest
+ don't need to each define the "funcargnames" compatibility attribute.
+ """
+ @property
+ def funcargnames(self):
+ """ alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
+ return self.fixturenames
+
+class Metafunc(FuncargnamesCompatAttr):
+ """
+ Metafunc objects are passed to the ``pytest_generate_tests`` hook.
+ They help to inspect a test function and to generate tests according to
+ test configuration or values specified in the class or module where a
+ test function is defined.
+
+ :ivar fixturenames: set of fixture names required by the test function
+
+ :ivar function: underlying python test function
+
+ :ivar cls: class object where the test function is defined in or ``None``.
+
+ :ivar module: the module object where the test function is defined in.
+
+ :ivar config: access to the :class:`_pytest.config.Config` object for the
+ test session.
+
+ :ivar funcargnames:
+ .. deprecated:: 2.3
+ Use ``fixturenames`` instead.
+ """
+ def __init__(self, function, fixtureinfo, config, cls=None, module=None):
+ self.config = config
+ self.module = module
+ self.function = function
+ self.fixturenames = fixtureinfo.names_closure
+ self._arg2fixturedefs = fixtureinfo.name2fixturedefs
+ self.cls = cls
+ self._calls = []
+ self._ids = py.builtin.set()
+
+ def parametrize(self, argnames, argvalues, indirect=False, ids=None,
+ scope=None):
+ """ Add new invocations to the underlying test function using the list
+ of argvalues for the given argnames. Parametrization is performed
+ during the collection phase. If you need to setup expensive resources
+ see about setting indirect to do it rather at test setup time.
+
+ :arg argnames: a comma-separated string denoting one or more argument
+ names, or a list/tuple of argument strings.
+
+ :arg argvalues: The list of argvalues determines how often a
+ test is invoked with different argument values. If only one
+ argname was specified argvalues is a list of values. If N
+ argnames were specified, argvalues must be a list of N-tuples,
+ where each tuple-element specifies a value for its respective
+ argname.
+
+ :arg indirect: The list of argnames or boolean. A list of arguments'
+ names (subset of argnames). If True the list contains all names from
+ the argnames. Each argvalue corresponding to an argname in this list will
+ be passed as request.param to its respective argname fixture
+ function so that it can perform more expensive setups during the
+ setup phase of a test rather than at collection time.
+
+ :arg ids: list of string ids, or a callable.
+ If strings, each is corresponding to the argvalues so that they are
+ part of the test id.
+ If callable, it should take one argument (a single argvalue) and return
+ a string or return None. If None, the automatically generated id for that
+ argument will be used.
+ If no ids are provided they will be generated automatically from
+ the argvalues.
+
+ :arg scope: if specified it denotes the scope of the parameters.
+ The scope is used for grouping tests by parameter instances.
+ It will also override any fixture-function defined scope, allowing
+ to set a dynamic scope using test context or configuration.
+ """
+
+ # individual parametrized argument sets can be wrapped in a series
+ # of markers in which case we unwrap the values and apply the mark
+ # at Function init
+ newkeywords = {}
+ unwrapped_argvalues = []
+ for i, argval in enumerate(argvalues):
+ while isinstance(argval, MarkDecorator):
+ newmark = MarkDecorator(argval.markname,
+ argval.args[:-1], argval.kwargs)
+ newmarks = newkeywords.setdefault(i, {})
+ newmarks[newmark.markname] = newmark
+ argval = argval.args[-1]
+ unwrapped_argvalues.append(argval)
+ argvalues = unwrapped_argvalues
+
+ if not isinstance(argnames, (tuple, list)):
+ argnames = [x.strip() for x in argnames.split(",") if x.strip()]
+ if len(argnames) == 1:
+ argvalues = [(val,) for val in argvalues]
+ if not argvalues:
+ argvalues = [(_notexists,) * len(argnames)]
+ # we passed a empty list to parameterize, skip that test
+ #
+ fs, lineno = getfslineno(self.function)
+ newmark = pytest.mark.skip(
+ reason="got empty parameter set %r, function %s at %s:%d" % (
+ argnames, self.function.__name__, fs, lineno))
+ newmarks = newkeywords.setdefault(0, {})
+ newmarks[newmark.markname] = newmark
+
+
+ if scope is None:
+ scope = "function"
+ scopenum = scopes.index(scope)
+ valtypes = {}
+ for arg in argnames:
+ if arg not in self.fixturenames:
+ raise ValueError("%r uses no fixture %r" %(self.function, arg))
+
+ if indirect is True:
+ valtypes = dict.fromkeys(argnames, "params")
+ elif indirect is False:
+ valtypes = dict.fromkeys(argnames, "funcargs")
+ elif isinstance(indirect, (tuple, list)):
+ valtypes = dict.fromkeys(argnames, "funcargs")
+ for arg in indirect:
+ if arg not in argnames:
+ raise ValueError("indirect given to %r: fixture %r doesn't exist" %(
+ self.function, arg))
+ valtypes[arg] = "params"
+ idfn = None
+ if callable(ids):
+ idfn = ids
+ ids = None
+ if ids and len(ids) != len(argvalues):
+ raise ValueError('%d tests specified with %d ids' %(
+ len(argvalues), len(ids)))
+ if not ids:
+ ids = idmaker(argnames, argvalues, idfn)
+ newcalls = []
+ for callspec in self._calls or [CallSpec2(self)]:
+ for param_index, valset in enumerate(argvalues):
+ assert len(valset) == len(argnames)
+ newcallspec = callspec.copy(self)
+ newcallspec.setmulti(valtypes, argnames, valset, ids[param_index],
+ newkeywords.get(param_index, {}), scopenum,
+ param_index)
+ newcalls.append(newcallspec)
+ self._calls = newcalls
+
+ def addcall(self, funcargs=None, id=_notexists, param=_notexists):
+ """ (deprecated, use parametrize) Add a new call to the underlying
+ test function during the collection phase of a test run. Note that
+ request.addcall() is called during the test collection phase prior and
+ independently to actual test execution. You should only use addcall()
+ if you need to specify multiple arguments of a test function.
+
+ :arg funcargs: argument keyword dictionary used when invoking
+ the test function.
+
+ :arg id: used for reporting and identification purposes. If you
+ don't supply an `id` an automatic unique id will be generated.
+
+ :arg param: a parameter which will be exposed to a later fixture function
+ invocation through the ``request.param`` attribute.
+ """
+ assert funcargs is None or isinstance(funcargs, dict)
+ if funcargs is not None:
+ for name in funcargs:
+ if name not in self.fixturenames:
+ pytest.fail("funcarg %r not used in this function." % name)
+ else:
+ funcargs = {}
+ if id is None:
+ raise ValueError("id=None not allowed")
+ if id is _notexists:
+ id = len(self._calls)
+ id = str(id)
+ if id in self._ids:
+ raise ValueError("duplicate id %r" % id)
+ self._ids.add(id)
+
+ cs = CallSpec2(self)
+ cs.setall(funcargs, id, param)
+ self._calls.append(cs)
+
+
+if _PY3:
+ import codecs
+
+ def _escape_bytes(val):
+ """
+ If val is pure ascii, returns it as a str(), otherwise escapes
+ into a sequence of escaped bytes:
+ b'\xc3\xb4\xc5\xd6' -> u'\\xc3\\xb4\\xc5\\xd6'
+
+ note:
+ the obvious "v.decode('unicode-escape')" will return
+ valid utf-8 unicode if it finds them in the string, but we
+ want to return escaped bytes for any byte, even if they match
+ a utf-8 string.
+ """
+ if val:
+ # source: http://goo.gl/bGsnwC
+ encoded_bytes, _ = codecs.escape_encode(val)
+ return encoded_bytes.decode('ascii')
+ else:
+ # empty bytes crashes codecs.escape_encode (#1087)
+ return ''
+else:
+ def _escape_bytes(val):
+ """
+ In py2 bytes and str are the same type, so return it unchanged if it
+ is a full ascii string, otherwise escape it into its binary form.
+ """
+ try:
+ return val.decode('ascii')
+ except UnicodeDecodeError:
+ return val.encode('string-escape')
+
+
+def _idval(val, argname, idx, idfn):
+ if idfn:
+ try:
+ s = idfn(val)
+ if s:
+ return s
+ except Exception:
+ pass
+
+ if isinstance(val, bytes):
+ return _escape_bytes(val)
+ elif isinstance(val, (float, int, str, bool, NoneType)):
+ return str(val)
+ elif isinstance(val, REGEX_TYPE):
+ return _escape_bytes(val.pattern) if isinstance(val.pattern, bytes) else val.pattern
+ elif enum is not None and isinstance(val, enum.Enum):
+ return str(val)
+ elif isclass(val) and hasattr(val, '__name__'):
+ return val.__name__
+ elif _PY2 and isinstance(val, unicode):
+ # special case for python 2: if a unicode string is
+ # convertible to ascii, return it as an str() object instead
+ try:
+ return str(val)
+ except UnicodeError:
+ # fallthrough
+ pass
+ return str(argname)+str(idx)
+
+def _idvalset(idx, valset, argnames, idfn):
+ this_id = [_idval(val, argname, idx, idfn)
+ for val, argname in zip(valset, argnames)]
+ return "-".join(this_id)
+
+def idmaker(argnames, argvalues, idfn=None):
+ ids = [_idvalset(valindex, valset, argnames, idfn)
+ for valindex, valset in enumerate(argvalues)]
+ if len(set(ids)) < len(ids):
+ # user may have provided a bad idfn which means the ids are not unique
+ ids = [str(i) + testid for i, testid in enumerate(ids)]
+ return ids
+
+def showfixtures(config):
+ from _pytest.main import wrap_session
+ return wrap_session(config, _showfixtures_main)
+
+def _showfixtures_main(config, session):
+ import _pytest.config
+ session.perform_collect()
+ curdir = py.path.local()
+ tw = _pytest.config.create_terminal_writer(config)
+ verbose = config.getvalue("verbose")
+
+ fm = session._fixturemanager
+
+ available = []
+ for argname, fixturedefs in fm._arg2fixturedefs.items():
+ assert fixturedefs is not None
+ if not fixturedefs:
+ continue
+ for fixturedef in fixturedefs:
+ loc = getlocation(fixturedef.func, curdir)
+ available.append((len(fixturedef.baseid),
+ fixturedef.func.__module__,
+ curdir.bestrelpath(loc),
+ fixturedef.argname, fixturedef))
+
+ available.sort()
+ currentmodule = None
+ for baseid, module, bestrel, argname, fixturedef in available:
+ if currentmodule != module:
+ if not module.startswith("_pytest."):
+ tw.line()
+ tw.sep("-", "fixtures defined from %s" %(module,))
+ currentmodule = module
+ if verbose <= 0 and argname[0] == "_":
+ continue
+ if verbose > 0:
+ funcargspec = "%s -- %s" %(argname, bestrel,)
+ else:
+ funcargspec = argname
+ tw.line(funcargspec, green=True)
+ loc = getlocation(fixturedef.func, curdir)
+ doc = fixturedef.func.__doc__ or ""
+ if doc:
+ for line in doc.strip().split("\n"):
+ tw.line(" " + line.strip())
+ else:
+ tw.line(" %s: no docstring available" %(loc,),
+ red=True)
+
+def getlocation(function, curdir):
+ import inspect
+ fn = py.path.local(inspect.getfile(function))
+ lineno = py.builtin._getcode(function).co_firstlineno
+ if fn.relto(curdir):
+ fn = fn.relto(curdir)
+ return "%s:%d" %(fn, lineno+1)
+
+# builtin pytest.raises helper
+
+def raises(expected_exception, *args, **kwargs):
+ """ assert that a code block/function call raises ``expected_exception``
+ and raise a failure exception otherwise.
+
+ This helper produces a ``ExceptionInfo()`` object (see below).
+
+ If using Python 2.5 or above, you may use this function as a
+ context manager::
+
+ >>> with raises(ZeroDivisionError):
+ ... 1/0
+
+ .. note::
+
+ When using ``pytest.raises`` as a context manager, it's worthwhile to
+ note that normal context manager rules apply and that the exception
+ raised *must* be the final line in the scope of the context manager.
+ Lines of code after that, within the scope of the context manager will
+ not be executed. For example::
+
+ >>> with raises(OSError) as exc_info:
+ assert 1 == 1 # this will execute as expected
+ raise OSError(errno.EEXISTS, 'directory exists')
+ assert exc_info.value.errno == errno.EEXISTS # this will not execute
+
+ Instead, the following approach must be taken (note the difference in
+ scope)::
+
+ >>> with raises(OSError) as exc_info:
+ assert 1 == 1 # this will execute as expected
+ raise OSError(errno.EEXISTS, 'directory exists')
+
+ assert exc_info.value.errno == errno.EEXISTS # this will now execute
+
+ Or you can specify a callable by passing a to-be-called lambda::
+
+ >>> raises(ZeroDivisionError, lambda: 1/0)
+ <ExceptionInfo ...>
+
+ or you can specify an arbitrary callable with arguments::
+
+ >>> def f(x): return 1/x
+ ...
+ >>> raises(ZeroDivisionError, f, 0)
+ <ExceptionInfo ...>
+ >>> raises(ZeroDivisionError, f, x=0)
+ <ExceptionInfo ...>
+
+ A third possibility is to use a string to be executed::
+
+ >>> raises(ZeroDivisionError, "f(0)")
+ <ExceptionInfo ...>
+
+ .. autoclass:: _pytest._code.ExceptionInfo
+ :members:
+
+ .. note::
+ Similar to caught exception objects in Python, explicitly clearing
+ local references to returned ``ExceptionInfo`` objects can
+ help the Python interpreter speed up its garbage collection.
+
+ Clearing those references breaks a reference cycle
+ (``ExceptionInfo`` --> caught exception --> frame stack raising
+ the exception --> current frame stack --> local variables -->
+ ``ExceptionInfo``) which makes Python keep all objects referenced
+ from that cycle (including all local variables in the current
+ frame) alive until the next cyclic garbage collection run. See the
+ official Python ``try`` statement documentation for more detailed
+ information.
+
+ """
+ __tracebackhide__ = True
+ if expected_exception is AssertionError:
+ # we want to catch a AssertionError
+ # replace our subclass with the builtin one
+ # see https://github.com/pytest-dev/pytest/issues/176
+ from _pytest.assertion.util import BuiltinAssertionError \
+ as expected_exception
+ msg = ("exceptions must be old-style classes or"
+ " derived from BaseException, not %s")
+ if isinstance(expected_exception, tuple):
+ for exc in expected_exception:
+ if not isclass(exc):
+ raise TypeError(msg % type(exc))
+ elif not isclass(expected_exception):
+ raise TypeError(msg % type(expected_exception))
+
+ if not args:
+ return RaisesContext(expected_exception)
+ elif isinstance(args[0], str):
+ code, = args
+ assert isinstance(code, str)
+ frame = sys._getframe(1)
+ loc = frame.f_locals.copy()
+ loc.update(kwargs)
+ #print "raises frame scope: %r" % frame.f_locals
+ try:
+ code = _pytest._code.Source(code).compile()
+ py.builtin.exec_(code, frame.f_globals, loc)
+ # XXX didn'T mean f_globals == f_locals something special?
+ # this is destroyed here ...
+ except expected_exception:
+ return _pytest._code.ExceptionInfo()
+ else:
+ func = args[0]
+ try:
+ func(*args[1:], **kwargs)
+ except expected_exception:
+ return _pytest._code.ExceptionInfo()
+ pytest.fail("DID NOT RAISE {0}".format(expected_exception))
+
+class RaisesContext(object):
+ def __init__(self, expected_exception):
+ self.expected_exception = expected_exception
+ self.excinfo = None
+
+ def __enter__(self):
+ self.excinfo = object.__new__(_pytest._code.ExceptionInfo)
+ return self.excinfo
+
+ def __exit__(self, *tp):
+ __tracebackhide__ = True
+ if tp[0] is None:
+ pytest.fail("DID NOT RAISE")
+ if sys.version_info < (2, 7):
+ # py26: on __exit__() exc_value often does not contain the
+ # exception value.
+ # http://bugs.python.org/issue7853
+ if not isinstance(tp[1], BaseException):
+ exc_type, value, traceback = tp
+ tp = exc_type, exc_type(value), traceback
+ self.excinfo.__init__(tp)
+ return issubclass(self.excinfo.type, self.expected_exception)
+
+#
+# the basic pytest Function item
+#
+
+class Function(FunctionMixin, pytest.Item, FuncargnamesCompatAttr):
+ """ a Function Item is responsible for setting up and executing a
+ Python test function.
+ """
+ _genid = None
+ def __init__(self, name, parent, args=None, config=None,
+ callspec=None, callobj=NOTSET, keywords=None, session=None,
+ fixtureinfo=None):
+ super(Function, self).__init__(name, parent, config=config,
+ session=session)
+ self._args = args
+ if callobj is not NOTSET:
+ self.obj = callobj
+
+ self.keywords.update(self.obj.__dict__)
+ if callspec:
+ self.callspec = callspec
+ self.keywords.update(callspec.keywords)
+ if keywords:
+ self.keywords.update(keywords)
+
+ if fixtureinfo is None:
+ fixtureinfo = self.session._fixturemanager.getfixtureinfo(
+ self.parent, self.obj, self.cls,
+ funcargs=not self._isyieldedfunction())
+ self._fixtureinfo = fixtureinfo
+ self.fixturenames = fixtureinfo.names_closure
+ self._initrequest()
+
+ def _initrequest(self):
+ self.funcargs = {}
+ if self._isyieldedfunction():
+ assert not hasattr(self, "callspec"), (
+ "yielded functions (deprecated) cannot have funcargs")
+ else:
+ if hasattr(self, "callspec"):
+ callspec = self.callspec
+ assert not callspec.funcargs
+ self._genid = callspec.id
+ if hasattr(callspec, "param"):
+ self.param = callspec.param
+ self._request = FixtureRequest(self)
+
+ @property
+ def function(self):
+ "underlying python 'function' object"
+ return getattr(self.obj, 'im_func', self.obj)
+
+ def _getobj(self):
+ name = self.name
+ i = name.find("[") # parametrization
+ if i != -1:
+ name = name[:i]
+ return getattr(self.parent.obj, name)
+
+ @property
+ def _pyfuncitem(self):
+ "(compatonly) for code expecting pytest-2.2 style request objects"
+ return self
+
+ def _isyieldedfunction(self):
+ return getattr(self, "_args", None) is not None
+
+ def runtest(self):
+ """ execute the underlying test function. """
+ self.ihook.pytest_pyfunc_call(pyfuncitem=self)
+
+ def setup(self):
+ super(Function, self).setup()
+ fillfixtures(self)
+
+
+scope2props = dict(session=())
+scope2props["module"] = ("fspath", "module")
+scope2props["class"] = scope2props["module"] + ("cls",)
+scope2props["instance"] = scope2props["class"] + ("instance", )
+scope2props["function"] = scope2props["instance"] + ("function", "keywords")
+
+def scopeproperty(name=None, doc=None):
+ def decoratescope(func):
+ scopename = name or func.__name__
+ def provide(self):
+ if func.__name__ in scope2props[self.scope]:
+ return func(self)
+ raise AttributeError("%s not available in %s-scoped context" % (
+ scopename, self.scope))
+ return property(provide, None, None, func.__doc__)
+ return decoratescope
+
+
+class FixtureRequest(FuncargnamesCompatAttr):
+ """ A request for a fixture from a test or fixture function.
+
+ A request object gives access to the requesting test context
+ and has an optional ``param`` attribute in case
+ the fixture is parametrized indirectly.
+ """
+
+ def __init__(self, pyfuncitem):
+ self._pyfuncitem = pyfuncitem
+ #: fixture for which this request is being performed
+ self.fixturename = None
+ #: Scope string, one of "function", "class", "module", "session"
+ self.scope = "function"
+ self._funcargs = {}
+ self._fixturedefs = {}
+ fixtureinfo = pyfuncitem._fixtureinfo
+ self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()
+ self._arg2index = {}
+ self.fixturenames = fixtureinfo.names_closure
+ self._fixturemanager = pyfuncitem.session._fixturemanager
+
+ @property
+ def node(self):
+ """ underlying collection node (depends on current request scope)"""
+ return self._getscopeitem(self.scope)
+
+
+ def _getnextfixturedef(self, argname):
+ fixturedefs = self._arg2fixturedefs.get(argname, None)
+ if fixturedefs is None:
+ # we arrive here because of a a dynamic call to
+ # getfuncargvalue(argname) usage which was naturally
+ # not known at parsing/collection time
+ fixturedefs = self._fixturemanager.getfixturedefs(
+ argname, self._pyfuncitem.parent.nodeid)
+ self._arg2fixturedefs[argname] = fixturedefs
+ # fixturedefs list is immutable so we maintain a decreasing index
+ index = self._arg2index.get(argname, 0) - 1
+ if fixturedefs is None or (-index > len(fixturedefs)):
+ raise FixtureLookupError(argname, self)
+ self._arg2index[argname] = index
+ return fixturedefs[index]
+
+ @property
+ def config(self):
+ """ the pytest config object associated with this request. """
+ return self._pyfuncitem.config
+
+
+ @scopeproperty()
+ def function(self):
+ """ test function object if the request has a per-function scope. """
+ return self._pyfuncitem.obj
+
+ @scopeproperty("class")
+ def cls(self):
+ """ class (can be None) where the test function was collected. """
+ clscol = self._pyfuncitem.getparent(pytest.Class)
+ if clscol:
+ return clscol.obj
+
+ @property
+ def instance(self):
+ """ instance (can be None) on which test function was collected. """
+ # unittest support hack, see _pytest.unittest.TestCaseFunction
+ try:
+ return self._pyfuncitem._testcase
+ except AttributeError:
+ function = getattr(self, "function", None)
+ if function is not None:
+ return py.builtin._getimself(function)
+
+ @scopeproperty()
+ def module(self):
+ """ python module object where the test function was collected. """
+ return self._pyfuncitem.getparent(pytest.Module).obj
+
+ @scopeproperty()
+ def fspath(self):
+ """ the file system path of the test module which collected this test. """
+ return self._pyfuncitem.fspath
+
+ @property
+ def keywords(self):
+ """ keywords/markers dictionary for the underlying node. """
+ return self.node.keywords
+
+ @property
+ def session(self):
+ """ pytest session object. """
+ return self._pyfuncitem.session
+
+ def addfinalizer(self, finalizer):
+ """ add finalizer/teardown function to be called after the
+ last test within the requesting test context finished
+ execution. """
+ # XXX usually this method is shadowed by fixturedef specific ones
+ self._addfinalizer(finalizer, scope=self.scope)
+
+ def _addfinalizer(self, finalizer, scope):
+ colitem = self._getscopeitem(scope)
+ self._pyfuncitem.session._setupstate.addfinalizer(
+ finalizer=finalizer, colitem=colitem)
+
+ def applymarker(self, marker):
+ """ Apply a marker to a single test function invocation.
+ This method is useful if you don't want to have a keyword/marker
+ on all function invocations.
+
+ :arg marker: a :py:class:`_pytest.mark.MarkDecorator` object
+ created by a call to ``pytest.mark.NAME(...)``.
+ """
+ try:
+ self.node.keywords[marker.markname] = marker
+ except AttributeError:
+ raise ValueError(marker)
+
+ def raiseerror(self, msg):
+ """ raise a FixtureLookupError with the given message. """
+ raise self._fixturemanager.FixtureLookupError(None, self, msg)
+
+ def _fillfixtures(self):
+ item = self._pyfuncitem
+ fixturenames = getattr(item, "fixturenames", self.fixturenames)
+ for argname in fixturenames:
+ if argname not in item.funcargs:
+ item.funcargs[argname] = self.getfuncargvalue(argname)
+
+ def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
+ """ (deprecated) Return a testing resource managed by ``setup`` &
+ ``teardown`` calls. ``scope`` and ``extrakey`` determine when the
+ ``teardown`` function will be called so that subsequent calls to
+ ``setup`` would recreate the resource. With pytest-2.3 you often
+ do not need ``cached_setup()`` as you can directly declare a scope
+ on a fixture function and register a finalizer through
+ ``request.addfinalizer()``.
+
+ :arg teardown: function receiving a previously setup resource.
+ :arg setup: a no-argument function creating a resource.
+ :arg scope: a string value out of ``function``, ``class``, ``module``
+ or ``session`` indicating the caching lifecycle of the resource.
+ :arg extrakey: added to internal caching key of (funcargname, scope).
+ """
+ if not hasattr(self.config, '_setupcache'):
+ self.config._setupcache = {} # XXX weakref?
+ cachekey = (self.fixturename, self._getscopeitem(scope), extrakey)
+ cache = self.config._setupcache
+ try:
+ val = cache[cachekey]
+ except KeyError:
+ self._check_scope(self.fixturename, self.scope, scope)
+ val = setup()
+ cache[cachekey] = val
+ if teardown is not None:
+ def finalizer():
+ del cache[cachekey]
+ teardown(val)
+ self._addfinalizer(finalizer, scope=scope)
+ return val
+
+ def getfuncargvalue(self, argname):
+ """ Dynamically retrieve a named fixture function argument.
+
+ As of pytest-2.3, it is easier and usually better to access other
+ fixture values by stating it as an input argument in the fixture
+ function. If you only can decide about using another fixture at test
+ setup time, you may use this function to retrieve it inside a fixture
+ function body.
+ """
+ return self._get_active_fixturedef(argname).cached_result[0]
+
+ def _get_active_fixturedef(self, argname):
+ try:
+ return self._fixturedefs[argname]
+ except KeyError:
+ try:
+ fixturedef = self._getnextfixturedef(argname)
+ except FixtureLookupError:
+ if argname == "request":
+ class PseudoFixtureDef:
+ cached_result = (self, [0], None)
+ scope = "function"
+ return PseudoFixtureDef
+ raise
+ # remove indent to prevent the python3 exception
+ # from leaking into the call
+ result = self._getfuncargvalue(fixturedef)
+ self._funcargs[argname] = result
+ self._fixturedefs[argname] = fixturedef
+ return fixturedef
+
+ def _get_fixturestack(self):
+ current = self
+ l = []
+ while 1:
+ fixturedef = getattr(current, "_fixturedef", None)
+ if fixturedef is None:
+ l.reverse()
+ return l
+ l.append(fixturedef)
+ current = current._parent_request
+
+ def _getfuncargvalue(self, fixturedef):
+ # prepare a subrequest object before calling fixture function
+ # (latter managed by fixturedef)
+ argname = fixturedef.argname
+ funcitem = self._pyfuncitem
+ scope = fixturedef.scope
+ try:
+ param = funcitem.callspec.getparam(argname)
+ except (AttributeError, ValueError):
+ param = NOTSET
+ param_index = 0
+ else:
+ # indices might not be set if old-style metafunc.addcall() was used
+ param_index = funcitem.callspec.indices.get(argname, 0)
+ # if a parametrize invocation set a scope it will override
+ # the static scope defined with the fixture function
+ paramscopenum = funcitem.callspec._arg2scopenum.get(argname)
+ if paramscopenum is not None:
+ scope = scopes[paramscopenum]
+
+ subrequest = SubRequest(self, scope, param, param_index, fixturedef)
+
+ # check if a higher-level scoped fixture accesses a lower level one
+ subrequest._check_scope(argname, self.scope, scope)
+
+ # clear sys.exc_info before invoking the fixture (python bug?)
+ # if its not explicitly cleared it will leak into the call
+ exc_clear()
+ try:
+ # call the fixture function
+ val = fixturedef.execute(request=subrequest)
+ finally:
+ # if fixture function failed it might have registered finalizers
+ self.session._setupstate.addfinalizer(fixturedef.finish,
+ subrequest.node)
+ return val
+
+ def _check_scope(self, argname, invoking_scope, requested_scope):
+ if argname == "request":
+ return
+ if scopemismatch(invoking_scope, requested_scope):
+ # try to report something helpful
+ lines = self._factorytraceback()
+ pytest.fail("ScopeMismatch: You tried to access the %r scoped "
+ "fixture %r with a %r scoped request object, "
+ "involved factories\n%s" %(
+ (requested_scope, argname, invoking_scope, "\n".join(lines))),
+ pytrace=False)
+
+ def _factorytraceback(self):
+ lines = []
+ for fixturedef in self._get_fixturestack():
+ factory = fixturedef.func
+ fs, lineno = getfslineno(factory)
+ p = self._pyfuncitem.session.fspath.bestrelpath(fs)
+ args = _format_args(factory)
+ lines.append("%s:%d: def %s%s" %(
+ p, lineno, factory.__name__, args))
+ return lines
+
+ def _getscopeitem(self, scope):
+ if scope == "function":
+ # this might also be a non-function Item despite its attribute name
+ return self._pyfuncitem
+ node = get_scope_node(self._pyfuncitem, scope)
+ if node is None and scope == "class":
+ # fallback to function item itself
+ node = self._pyfuncitem
+ assert node
+ return node
+
+ def __repr__(self):
+ return "<FixtureRequest for %r>" %(self.node)
+
+
+class SubRequest(FixtureRequest):
+ """ a sub request for handling getting a fixture from a
+ test function/fixture. """
+ def __init__(self, request, scope, param, param_index, fixturedef):
+ self._parent_request = request
+ self.fixturename = fixturedef.argname
+ if param is not NOTSET:
+ self.param = param
+ self.param_index = param_index
+ self.scope = scope
+ self._fixturedef = fixturedef
+ self.addfinalizer = fixturedef.addfinalizer
+ self._pyfuncitem = request._pyfuncitem
+ self._funcargs = request._funcargs
+ self._fixturedefs = request._fixturedefs
+ self._arg2fixturedefs = request._arg2fixturedefs
+ self._arg2index = request._arg2index
+ self.fixturenames = request.fixturenames
+ self._fixturemanager = request._fixturemanager
+
+ def __repr__(self):
+ return "<SubRequest %r for %r>" % (self.fixturename, self._pyfuncitem)
+
+
+class ScopeMismatchError(Exception):
+ """ A fixture function tries to use a different fixture function which
+ which has a lower scope (e.g. a Session one calls a function one)
+ """
+
+scopes = "session module class function".split()
+scopenum_function = scopes.index("function")
+def scopemismatch(currentscope, newscope):
+ return scopes.index(newscope) > scopes.index(currentscope)
+
+
+class FixtureLookupError(LookupError):
+ """ could not return a requested Fixture (missing or invalid). """
+ def __init__(self, argname, request, msg=None):
+ self.argname = argname
+ self.request = request
+ self.fixturestack = request._get_fixturestack()
+ self.msg = msg
+
+ def formatrepr(self):
+ tblines = []
+ addline = tblines.append
+ stack = [self.request._pyfuncitem.obj]
+ stack.extend(map(lambda x: x.func, self.fixturestack))
+ msg = self.msg
+ if msg is not None:
+ # the last fixture raise an error, let's present
+ # it at the requesting side
+ stack = stack[:-1]
+ for function in stack:
+ fspath, lineno = getfslineno(function)
+ try:
+ lines, _ = inspect.getsourcelines(get_real_func(function))
+ except (IOError, IndexError):
+ error_msg = "file %s, line %s: source code not available"
+ addline(error_msg % (fspath, lineno+1))
+ else:
+ addline("file %s, line %s" % (fspath, lineno+1))
+ for i, line in enumerate(lines):
+ line = line.rstrip()
+ addline(" " + line)
+ if line.lstrip().startswith('def'):
+ break
+
+ if msg is None:
+ fm = self.request._fixturemanager
+ available = []
+ for name, fixturedef in fm._arg2fixturedefs.items():
+ parentid = self.request._pyfuncitem.parent.nodeid
+ faclist = list(fm._matchfactories(fixturedef, parentid))
+ if faclist:
+ available.append(name)
+ msg = "fixture %r not found" % (self.argname,)
+ msg += "\n available fixtures: %s" %(", ".join(available),)
+ msg += "\n use 'py.test --fixtures [testpath]' for help on them."
+
+ return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
+
+class FixtureLookupErrorRepr(TerminalRepr):
+ def __init__(self, filename, firstlineno, tblines, errorstring, argname):
+ self.tblines = tblines
+ self.errorstring = errorstring
+ self.filename = filename
+ self.firstlineno = firstlineno
+ self.argname = argname
+
+ def toterminal(self, tw):
+ #tw.line("FixtureLookupError: %s" %(self.argname), red=True)
+ for tbline in self.tblines:
+ tw.line(tbline.rstrip())
+ for line in self.errorstring.split("\n"):
+ tw.line(" " + line.strip(), red=True)
+ tw.line()
+ tw.line("%s:%d" % (self.filename, self.firstlineno+1))
+
+class FixtureManager:
+ """
+ pytest fixtures definitions and information is stored and managed
+ from this class.
+
+ During collection fm.parsefactories() is called multiple times to parse
+ fixture function definitions into FixtureDef objects and internal
+ data structures.
+
+ During collection of test functions, metafunc-mechanics instantiate
+ a FuncFixtureInfo object which is cached per node/func-name.
+ This FuncFixtureInfo object is later retrieved by Function nodes
+ which themselves offer a fixturenames attribute.
+
+ The FuncFixtureInfo object holds information about fixtures and FixtureDefs
+ relevant for a particular function. An initial list of fixtures is
+ assembled like this:
+
+ - ini-defined usefixtures
+ - autouse-marked fixtures along the collection chain up from the function
+ - usefixtures markers at module/class/function level
+ - test function funcargs
+
+ Subsequently the funcfixtureinfo.fixturenames attribute is computed
+ as the closure of the fixtures needed to setup the initial fixtures,
+ i. e. fixtures needed by fixture functions themselves are appended
+ to the fixturenames list.
+
+ Upon the test-setup phases all fixturenames are instantiated, retrieved
+ by a lookup of their FuncFixtureInfo.
+ """
+
+ _argprefix = "pytest_funcarg__"
+ FixtureLookupError = FixtureLookupError
+ FixtureLookupErrorRepr = FixtureLookupErrorRepr
+
+ def __init__(self, session):
+ self.session = session
+ self.config = session.config
+ self._arg2fixturedefs = {}
+ self._holderobjseen = set()
+ self._arg2finish = {}
+ self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))]
+ session.config.pluginmanager.register(self, "funcmanage")
+
+
+ def getfixtureinfo(self, node, func, cls, funcargs=True):
+ if funcargs and not hasattr(node, "nofuncargs"):
+ if cls is not None:
+ startindex = 1
+ else:
+ startindex = None
+ argnames = getfuncargnames(func, startindex)
+ else:
+ argnames = ()
+ usefixtures = getattr(func, "usefixtures", None)
+ initialnames = argnames
+ if usefixtures is not None:
+ initialnames = usefixtures.args + initialnames
+ fm = node.session._fixturemanager
+ names_closure, arg2fixturedefs = fm.getfixtureclosure(initialnames,
+ node)
+ return FuncFixtureInfo(argnames, names_closure, arg2fixturedefs)
+
+ def pytest_plugin_registered(self, plugin):
+ nodeid = None
+ try:
+ p = py.path.local(plugin.__file__)
+ except AttributeError:
+ pass
+ else:
+ # construct the base nodeid which is later used to check
+ # what fixtures are visible for particular tests (as denoted
+ # by their test id)
+ if p.basename.startswith("conftest.py"):
+ nodeid = p.dirpath().relto(self.config.rootdir)
+ if p.sep != "/":
+ nodeid = nodeid.replace(p.sep, "/")
+ self.parsefactories(plugin, nodeid)
+
+ def _getautousenames(self, nodeid):
+ """ return a tuple of fixture names to be used. """
+ autousenames = []
+ for baseid, basenames in self._nodeid_and_autousenames:
+ if nodeid.startswith(baseid):
+ if baseid:
+ i = len(baseid)
+ nextchar = nodeid[i:i+1]
+ if nextchar and nextchar not in ":/":
+ continue
+ autousenames.extend(basenames)
+ # make sure autousenames are sorted by scope, scopenum 0 is session
+ autousenames.sort(
+ key=lambda x: self._arg2fixturedefs[x][-1].scopenum)
+ return autousenames
+
+ def getfixtureclosure(self, fixturenames, parentnode):
+ # collect the closure of all fixtures , starting with the given
+ # fixturenames as the initial set. As we have to visit all
+ # factory definitions anyway, we also return a arg2fixturedefs
+ # mapping so that the caller can reuse it and does not have
+ # to re-discover fixturedefs again for each fixturename
+ # (discovering matching fixtures for a given name/node is expensive)
+
+ parentid = parentnode.nodeid
+ fixturenames_closure = self._getautousenames(parentid)
+ def merge(otherlist):
+ for arg in otherlist:
+ if arg not in fixturenames_closure:
+ fixturenames_closure.append(arg)
+ merge(fixturenames)
+ arg2fixturedefs = {}
+ lastlen = -1
+ while lastlen != len(fixturenames_closure):
+ lastlen = len(fixturenames_closure)
+ for argname in fixturenames_closure:
+ if argname in arg2fixturedefs:
+ continue
+ fixturedefs = self.getfixturedefs(argname, parentid)
+ if fixturedefs:
+ arg2fixturedefs[argname] = fixturedefs
+ merge(fixturedefs[-1].argnames)
+ return fixturenames_closure, arg2fixturedefs
+
+ def pytest_generate_tests(self, metafunc):
+ for argname in metafunc.fixturenames:
+ faclist = metafunc._arg2fixturedefs.get(argname)
+ if faclist:
+ fixturedef = faclist[-1]
+ if fixturedef.params is not None:
+ func_params = getattr(getattr(metafunc.function, 'parametrize', None), 'args', [[None]])
+ # skip directly parametrized arguments
+ argnames = func_params[0]
+ if not isinstance(argnames, (tuple, list)):
+ argnames = [x.strip() for x in argnames.split(",") if x.strip()]
+ if argname not in func_params and argname not in argnames:
+ metafunc.parametrize(argname, fixturedef.params,
+ indirect=True, scope=fixturedef.scope,
+ ids=fixturedef.ids)
+ else:
+ continue # will raise FixtureLookupError at setup time
+
+ def pytest_collection_modifyitems(self, items):
+ # separate parametrized setups
+ items[:] = reorder_items(items)
+
+ def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
+ if nodeid is not NOTSET:
+ holderobj = node_or_obj
+ else:
+ holderobj = node_or_obj.obj
+ nodeid = node_or_obj.nodeid
+ if holderobj in self._holderobjseen:
+ return
+ self._holderobjseen.add(holderobj)
+ autousenames = []
+ for name in dir(holderobj):
+ obj = getattr(holderobj, name, None)
+ # fixture functions have a pytest_funcarg__ prefix (pre-2.3 style)
+ # or are "@pytest.fixture" marked
+ marker = getfixturemarker(obj)
+ if marker is None:
+ if not name.startswith(self._argprefix):
+ continue
+ if not callable(obj):
+ continue
+ marker = defaultfuncargprefixmarker
+ name = name[len(self._argprefix):]
+ elif not isinstance(marker, FixtureFunctionMarker):
+ # magic globals with __getattr__ might have got us a wrong
+ # fixture attribute
+ continue
+ else:
+ assert not name.startswith(self._argprefix)
+ fixturedef = FixtureDef(self, nodeid, name, obj,
+ marker.scope, marker.params,
+ yieldctx=marker.yieldctx,
+ unittest=unittest, ids=marker.ids)
+ faclist = self._arg2fixturedefs.setdefault(name, [])
+ if fixturedef.has_location:
+ faclist.append(fixturedef)
+ else:
+ # fixturedefs with no location are at the front
+ # so this inserts the current fixturedef after the
+ # existing fixturedefs from external plugins but
+ # before the fixturedefs provided in conftests.
+ i = len([f for f in faclist if not f.has_location])
+ faclist.insert(i, fixturedef)
+ if marker.autouse:
+ autousenames.append(name)
+ if autousenames:
+ self._nodeid_and_autousenames.append((nodeid or '', autousenames))
+
+ def getfixturedefs(self, argname, nodeid):
+ try:
+ fixturedefs = self._arg2fixturedefs[argname]
+ except KeyError:
+ return None
+ else:
+ return tuple(self._matchfactories(fixturedefs, nodeid))
+
+ def _matchfactories(self, fixturedefs, nodeid):
+ for fixturedef in fixturedefs:
+ if nodeid.startswith(fixturedef.baseid):
+ yield fixturedef
+
+
+def fail_fixturefunc(fixturefunc, msg):
+ fs, lineno = getfslineno(fixturefunc)
+ location = "%s:%s" % (fs, lineno+1)
+ source = _pytest._code.Source(fixturefunc)
+ pytest.fail(msg + ":\n\n" + str(source.indent()) + "\n" + location,
+ pytrace=False)
+
+def call_fixture_func(fixturefunc, request, kwargs, yieldctx):
+ if yieldctx:
+ if not is_generator(fixturefunc):
+ fail_fixturefunc(fixturefunc,
+ msg="yield_fixture requires yield statement in function")
+ iter = fixturefunc(**kwargs)
+ next = getattr(iter, "__next__", None)
+ if next is None:
+ next = getattr(iter, "next")
+ res = next()
+ def teardown():
+ try:
+ next()
+ except StopIteration:
+ pass
+ else:
+ fail_fixturefunc(fixturefunc,
+ "yield_fixture function has more than one 'yield'")
+ request.addfinalizer(teardown)
+ else:
+ if is_generator(fixturefunc):
+ fail_fixturefunc(fixturefunc,
+ msg="pytest.fixture functions cannot use ``yield``. "
+ "Instead write and return an inner function/generator "
+ "and let the consumer call and iterate over it.")
+ res = fixturefunc(**kwargs)
+ return res
+
+class FixtureDef:
+ """ A container for a factory definition. """
+ def __init__(self, fixturemanager, baseid, argname, func, scope, params,
+ yieldctx, unittest=False, ids=None):
+ self._fixturemanager = fixturemanager
+ self.baseid = baseid or ''
+ self.has_location = baseid is not None
+ self.func = func
+ self.argname = argname
+ self.scope = scope
+ self.scopenum = scopes.index(scope or "function")
+ self.params = params
+ startindex = unittest and 1 or None
+ self.argnames = getfuncargnames(func, startindex=startindex)
+ self.yieldctx = yieldctx
+ self.unittest = unittest
+ self.ids = ids
+ self._finalizer = []
+
+ def addfinalizer(self, finalizer):
+ self._finalizer.append(finalizer)
+
+ def finish(self):
+ try:
+ while self._finalizer:
+ func = self._finalizer.pop()
+ func()
+ finally:
+ # even if finalization fails, we invalidate
+ # the cached fixture value
+ if hasattr(self, "cached_result"):
+ del self.cached_result
+
+ def execute(self, request):
+ # get required arguments and register our own finish()
+ # with their finalization
+ kwargs = {}
+ for argname in self.argnames:
+ fixturedef = request._get_active_fixturedef(argname)
+ result, arg_cache_key, exc = fixturedef.cached_result
+ request._check_scope(argname, request.scope, fixturedef.scope)
+ kwargs[argname] = result
+ if argname != "request":
+ fixturedef.addfinalizer(self.finish)
+
+ my_cache_key = request.param_index
+ cached_result = getattr(self, "cached_result", None)
+ if cached_result is not None:
+ result, cache_key, err = cached_result
+ if my_cache_key == cache_key:
+ if err is not None:
+ py.builtin._reraise(*err)
+ else:
+ return result
+ # we have a previous but differently parametrized fixture instance
+ # so we need to tear it down before creating a new one
+ self.finish()
+ assert not hasattr(self, "cached_result")
+
+ fixturefunc = self.func
+
+ if self.unittest:
+ if request.instance is not None:
+ # bind the unbound method to the TestCase instance
+ fixturefunc = self.func.__get__(request.instance)
+ else:
+ # the fixture function needs to be bound to the actual
+ # request.instance so that code working with "self" behaves
+ # as expected.
+ if request.instance is not None:
+ fixturefunc = getimfunc(self.func)
+ if fixturefunc != self.func:
+ fixturefunc = fixturefunc.__get__(request.instance)
+
+ try:
+ result = call_fixture_func(fixturefunc, request, kwargs,
+ self.yieldctx)
+ except Exception:
+ self.cached_result = (None, my_cache_key, sys.exc_info())
+ raise
+ self.cached_result = (result, my_cache_key, None)
+ return result
+
+ def __repr__(self):
+ return ("<FixtureDef name=%r scope=%r baseid=%r >" %
+ (self.argname, self.scope, self.baseid))
+
+def num_mock_patch_args(function):
+ """ return number of arguments used up by mock arguments (if any) """
+ patchings = getattr(function, "patchings", None)
+ if not patchings:
+ return 0
+ mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None))
+ if mock is not None:
+ return len([p for p in patchings
+ if not p.attribute_name and p.new is mock.DEFAULT])
+ return len(patchings)
+
+
+def getfuncargnames(function, startindex=None):
+ # XXX merge with main.py's varnames
+ #assert not isclass(function)
+ realfunction = function
+ while hasattr(realfunction, "__wrapped__"):
+ realfunction = realfunction.__wrapped__
+ if startindex is None:
+ startindex = inspect.ismethod(function) and 1 or 0
+ if realfunction != function:
+ startindex += num_mock_patch_args(function)
+ function = realfunction
+ if isinstance(function, functools.partial):
+ argnames = inspect.getargs(_pytest._code.getrawcode(function.func))[0]
+ partial = function
+ argnames = argnames[len(partial.args):]
+ if partial.keywords:
+ for kw in partial.keywords:
+ argnames.remove(kw)
+ else:
+ argnames = inspect.getargs(_pytest._code.getrawcode(function))[0]
+ defaults = getattr(function, 'func_defaults',
+ getattr(function, '__defaults__', None)) or ()
+ numdefaults = len(defaults)
+ if numdefaults:
+ return tuple(argnames[startindex:-numdefaults])
+ return tuple(argnames[startindex:])
+
+# algorithm for sorting on a per-parametrized resource setup basis
+# it is called for scopenum==0 (session) first and performs sorting
+# down to the lower scopes such as to minimize number of "high scope"
+# setups and teardowns
+
+def reorder_items(items):
+ argkeys_cache = {}
+ for scopenum in range(0, scopenum_function):
+ argkeys_cache[scopenum] = d = {}
+ for item in items:
+ keys = set(get_parametrized_fixture_keys(item, scopenum))
+ if keys:
+ d[item] = keys
+ return reorder_items_atscope(items, set(), argkeys_cache, 0)
+
+def reorder_items_atscope(items, ignore, argkeys_cache, scopenum):
+ if scopenum >= scopenum_function or len(items) < 3:
+ return items
+ items_done = []
+ while 1:
+ items_before, items_same, items_other, newignore = \
+ slice_items(items, ignore, argkeys_cache[scopenum])
+ items_before = reorder_items_atscope(
+ items_before, ignore, argkeys_cache,scopenum+1)
+ if items_same is None:
+ # nothing to reorder in this scope
+ assert items_other is None
+ return items_done + items_before
+ items_done.extend(items_before)
+ items = items_same + items_other
+ ignore = newignore
+
+
+def slice_items(items, ignore, scoped_argkeys_cache):
+ # we pick the first item which uses a fixture instance in the
+ # requested scope and which we haven't seen yet. We slice the input
+ # items list into a list of items_nomatch, items_same and
+ # items_other
+ if scoped_argkeys_cache: # do we need to do work at all?
+ it = iter(items)
+ # first find a slicing key
+ for i, item in enumerate(it):
+ argkeys = scoped_argkeys_cache.get(item)
+ if argkeys is not None:
+ argkeys = argkeys.difference(ignore)
+ if argkeys: # found a slicing key
+ slicing_argkey = argkeys.pop()
+ items_before = items[:i]
+ items_same = [item]
+ items_other = []
+ # now slice the remainder of the list
+ for item in it:
+ argkeys = scoped_argkeys_cache.get(item)
+ if argkeys and slicing_argkey in argkeys and \
+ slicing_argkey not in ignore:
+ items_same.append(item)
+ else:
+ items_other.append(item)
+ newignore = ignore.copy()
+ newignore.add(slicing_argkey)
+ return (items_before, items_same, items_other, newignore)
+ return items, None, None, None
+
+def get_parametrized_fixture_keys(item, scopenum):
+ """ return list of keys for all parametrized arguments which match
+ the specified scope. """
+ assert scopenum < scopenum_function # function
+ try:
+ cs = item.callspec
+ except AttributeError:
+ pass
+ else:
+ # cs.indictes.items() is random order of argnames but
+ # then again different functions (items) can change order of
+ # arguments so it doesn't matter much probably
+ for argname, param_index in cs.indices.items():
+ if cs._arg2scopenum[argname] != scopenum:
+ continue
+ if scopenum == 0: # session
+ key = (argname, param_index)
+ elif scopenum == 1: # module
+ key = (argname, param_index, item.fspath)
+ elif scopenum == 2: # class
+ key = (argname, param_index, item.fspath, item.cls)
+ yield key
+
+
+def xunitsetup(obj, name):
+ meth = getattr(obj, name, None)
+ if getfixturemarker(meth) is None:
+ return meth
+
+def getfixturemarker(obj):
+ """ return fixturemarker or None if it doesn't exist or raised
+ exceptions."""
+ try:
+ return getattr(obj, "_pytestfixturefunction", None)
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ # some objects raise errors like request (from flask import request)
+ # we don't expect them to be fixture functions
+ return None
+
+scopename2class = {
+ 'class': Class,
+ 'module': Module,
+ 'function': pytest.Item,
+}
+def get_scope_node(node, scope):
+ cls = scopename2class.get(scope)
+ if cls is None:
+ if scope == "session":
+ return node.session
+ raise ValueError("unknown scope")
+ return node.getparent(cls)
diff --git a/python/pytest/_pytest/recwarn.py b/python/pytest/_pytest/recwarn.py
new file mode 100644
index 000000000..a89474c03
--- /dev/null
+++ b/python/pytest/_pytest/recwarn.py
@@ -0,0 +1,221 @@
+""" recording warnings during test function execution. """
+
+import inspect
+
+import _pytest._code
+import py
+import sys
+import warnings
+import pytest
+
+
+@pytest.yield_fixture
+def recwarn(request):
+ """Return a WarningsRecorder instance that provides these methods:
+
+ * ``pop(category=None)``: return last warning matching the category.
+ * ``clear()``: clear list of warnings
+
+ See http://docs.python.org/library/warnings.html for information
+ on warning categories.
+ """
+ wrec = WarningsRecorder()
+ with wrec:
+ warnings.simplefilter('default')
+ yield wrec
+
+
+def pytest_namespace():
+ return {'deprecated_call': deprecated_call,
+ 'warns': warns}
+
+
+def deprecated_call(func=None, *args, **kwargs):
+ """ assert that calling ``func(*args, **kwargs)`` triggers a
+ ``DeprecationWarning`` or ``PendingDeprecationWarning``.
+
+ This function can be used as a context manager::
+
+ >>> with deprecated_call():
+ ... myobject.deprecated_method()
+
+ Note: we cannot use WarningsRecorder here because it is still subject
+ to the mechanism that prevents warnings of the same type from being
+ triggered twice for the same module. See #1190.
+ """
+ if not func:
+ return WarningsChecker(expected_warning=DeprecationWarning)
+
+ categories = []
+
+ def warn_explicit(message, category, *args, **kwargs):
+ categories.append(category)
+ old_warn_explicit(message, category, *args, **kwargs)
+
+ def warn(message, category=None, *args, **kwargs):
+ if isinstance(message, Warning):
+ categories.append(message.__class__)
+ else:
+ categories.append(category)
+ old_warn(message, category, *args, **kwargs)
+
+ old_warn = warnings.warn
+ old_warn_explicit = warnings.warn_explicit
+ warnings.warn_explicit = warn_explicit
+ warnings.warn = warn
+ try:
+ ret = func(*args, **kwargs)
+ finally:
+ warnings.warn_explicit = old_warn_explicit
+ warnings.warn = old_warn
+ deprecation_categories = (DeprecationWarning, PendingDeprecationWarning)
+ if not any(issubclass(c, deprecation_categories) for c in categories):
+ __tracebackhide__ = True
+ raise AssertionError("%r did not produce DeprecationWarning" % (func,))
+ return ret
+
+
+def warns(expected_warning, *args, **kwargs):
+ """Assert that code raises a particular class of warning.
+
+ Specifically, the input @expected_warning can be a warning class or
+ tuple of warning classes, and the code must return that warning
+ (if a single class) or one of those warnings (if a tuple).
+
+ This helper produces a list of ``warnings.WarningMessage`` objects,
+ one for each warning raised.
+
+ This function can be used as a context manager, or any of the other ways
+ ``pytest.raises`` can be used::
+
+ >>> with warns(RuntimeWarning):
+ ... warnings.warn("my warning", RuntimeWarning)
+ """
+ wcheck = WarningsChecker(expected_warning)
+ if not args:
+ return wcheck
+ elif isinstance(args[0], str):
+ code, = args
+ assert isinstance(code, str)
+ frame = sys._getframe(1)
+ loc = frame.f_locals.copy()
+ loc.update(kwargs)
+
+ with wcheck:
+ code = _pytest._code.Source(code).compile()
+ py.builtin.exec_(code, frame.f_globals, loc)
+ else:
+ func = args[0]
+ with wcheck:
+ return func(*args[1:], **kwargs)
+
+
+class RecordedWarning(object):
+ def __init__(self, message, category, filename, lineno, file, line):
+ self.message = message
+ self.category = category
+ self.filename = filename
+ self.lineno = lineno
+ self.file = file
+ self.line = line
+
+
+class WarningsRecorder(object):
+ """A context manager to record raised warnings.
+
+ Adapted from `warnings.catch_warnings`.
+ """
+
+ def __init__(self, module=None):
+ self._module = sys.modules['warnings'] if module is None else module
+ self._entered = False
+ self._list = []
+
+ @property
+ def list(self):
+ """The list of recorded warnings."""
+ return self._list
+
+ def __getitem__(self, i):
+ """Get a recorded warning by index."""
+ return self._list[i]
+
+ def __iter__(self):
+ """Iterate through the recorded warnings."""
+ return iter(self._list)
+
+ def __len__(self):
+ """The number of recorded warnings."""
+ return len(self._list)
+
+ def pop(self, cls=Warning):
+ """Pop the first recorded warning, raise exception if not exists."""
+ for i, w in enumerate(self._list):
+ if issubclass(w.category, cls):
+ return self._list.pop(i)
+ __tracebackhide__ = True
+ raise AssertionError("%r not found in warning list" % cls)
+
+ def clear(self):
+ """Clear the list of recorded warnings."""
+ self._list[:] = []
+
+ def __enter__(self):
+ if self._entered:
+ __tracebackhide__ = True
+ raise RuntimeError("Cannot enter %r twice" % self)
+ self._entered = True
+ self._filters = self._module.filters
+ self._module.filters = self._filters[:]
+ self._showwarning = self._module.showwarning
+
+ def showwarning(message, category, filename, lineno,
+ file=None, line=None):
+ self._list.append(RecordedWarning(
+ message, category, filename, lineno, file, line))
+
+ # still perform old showwarning functionality
+ self._showwarning(
+ message, category, filename, lineno, file=file, line=line)
+
+ self._module.showwarning = showwarning
+
+ # allow the same warning to be raised more than once
+
+ self._module.simplefilter('always')
+ return self
+
+ def __exit__(self, *exc_info):
+ if not self._entered:
+ __tracebackhide__ = True
+ raise RuntimeError("Cannot exit %r without entering first" % self)
+ self._module.filters = self._filters
+ self._module.showwarning = self._showwarning
+
+
+class WarningsChecker(WarningsRecorder):
+ def __init__(self, expected_warning=None, module=None):
+ super(WarningsChecker, self).__init__(module=module)
+
+ msg = ("exceptions must be old-style classes or "
+ "derived from Warning, not %s")
+ if isinstance(expected_warning, tuple):
+ for exc in expected_warning:
+ if not inspect.isclass(exc):
+ raise TypeError(msg % type(exc))
+ elif inspect.isclass(expected_warning):
+ expected_warning = (expected_warning,)
+ elif expected_warning is not None:
+ raise TypeError(msg % type(expected_warning))
+
+ self.expected_warning = expected_warning
+
+ def __exit__(self, *exc_info):
+ super(WarningsChecker, self).__exit__(*exc_info)
+
+ # only check if we're not currently handling an exception
+ if all(a is None for a in exc_info):
+ if self.expected_warning is not None:
+ if not any(r.category in self.expected_warning for r in self):
+ __tracebackhide__ = True
+ pytest.fail("DID NOT WARN")
diff --git a/python/pytest/_pytest/resultlog.py b/python/pytest/_pytest/resultlog.py
new file mode 100644
index 000000000..3670f0214
--- /dev/null
+++ b/python/pytest/_pytest/resultlog.py
@@ -0,0 +1,104 @@
+""" log machine-parseable test session result information in a plain
+text file.
+"""
+
+import py
+import os
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "resultlog plugin options")
+ group.addoption('--resultlog', '--result-log', action="store",
+ metavar="path", default=None,
+ help="path for machine-readable result log.")
+
+def pytest_configure(config):
+ resultlog = config.option.resultlog
+ # prevent opening resultlog on slave nodes (xdist)
+ if resultlog and not hasattr(config, 'slaveinput'):
+ dirname = os.path.dirname(os.path.abspath(resultlog))
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ logfile = open(resultlog, 'w', 1) # line buffered
+ config._resultlog = ResultLog(config, logfile)
+ config.pluginmanager.register(config._resultlog)
+
+def pytest_unconfigure(config):
+ resultlog = getattr(config, '_resultlog', None)
+ if resultlog:
+ resultlog.logfile.close()
+ del config._resultlog
+ config.pluginmanager.unregister(resultlog)
+
+def generic_path(item):
+ chain = item.listchain()
+ gpath = [chain[0].name]
+ fspath = chain[0].fspath
+ fspart = False
+ for node in chain[1:]:
+ newfspath = node.fspath
+ if newfspath == fspath:
+ if fspart:
+ gpath.append(':')
+ fspart = False
+ else:
+ gpath.append('.')
+ else:
+ gpath.append('/')
+ fspart = True
+ name = node.name
+ if name[0] in '([':
+ gpath.pop()
+ gpath.append(name)
+ fspath = newfspath
+ return ''.join(gpath)
+
+class ResultLog(object):
+ def __init__(self, config, logfile):
+ self.config = config
+ self.logfile = logfile # preferably line buffered
+
+ def write_log_entry(self, testpath, lettercode, longrepr):
+ py.builtin.print_("%s %s" % (lettercode, testpath), file=self.logfile)
+ for line in longrepr.splitlines():
+ py.builtin.print_(" %s" % line, file=self.logfile)
+
+ def log_outcome(self, report, lettercode, longrepr):
+ testpath = getattr(report, 'nodeid', None)
+ if testpath is None:
+ testpath = report.fspath
+ self.write_log_entry(testpath, lettercode, longrepr)
+
+ def pytest_runtest_logreport(self, report):
+ if report.when != "call" and report.passed:
+ return
+ res = self.config.hook.pytest_report_teststatus(report=report)
+ code = res[1]
+ if code == 'x':
+ longrepr = str(report.longrepr)
+ elif code == 'X':
+ longrepr = ''
+ elif report.passed:
+ longrepr = ""
+ elif report.failed:
+ longrepr = str(report.longrepr)
+ elif report.skipped:
+ longrepr = str(report.longrepr[2])
+ self.log_outcome(report, code, longrepr)
+
+ def pytest_collectreport(self, report):
+ if not report.passed:
+ if report.failed:
+ code = "F"
+ longrepr = str(report.longrepr)
+ else:
+ assert report.skipped
+ code = "S"
+ longrepr = "%s:%d: %s" % report.longrepr
+ self.log_outcome(report, code, longrepr)
+
+ def pytest_internalerror(self, excrepr):
+ reprcrash = getattr(excrepr, 'reprcrash', None)
+ path = getattr(reprcrash, "path", None)
+ if path is None:
+ path = "cwd:%s" % py.path.local()
+ self.write_log_entry(path, '!', str(excrepr))
diff --git a/python/pytest/_pytest/runner.py b/python/pytest/_pytest/runner.py
new file mode 100644
index 000000000..cde94c8c8
--- /dev/null
+++ b/python/pytest/_pytest/runner.py
@@ -0,0 +1,515 @@
+""" basic collect and runtest protocol implementations """
+import bdb
+import sys
+from time import time
+
+import py
+import pytest
+from _pytest._code.code import TerminalRepr, ExceptionInfo
+
+
+def pytest_namespace():
+ return {
+ 'fail' : fail,
+ 'skip' : skip,
+ 'importorskip' : importorskip,
+ 'exit' : exit,
+ }
+
+#
+# pytest plugin hooks
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group.addoption('--durations',
+ action="store", type=int, default=None, metavar="N",
+ help="show N slowest setup/test durations (N=0 for all)."),
+
+def pytest_terminal_summary(terminalreporter):
+ durations = terminalreporter.config.option.durations
+ if durations is None:
+ return
+ tr = terminalreporter
+ dlist = []
+ for replist in tr.stats.values():
+ for rep in replist:
+ if hasattr(rep, 'duration'):
+ dlist.append(rep)
+ if not dlist:
+ return
+ dlist.sort(key=lambda x: x.duration)
+ dlist.reverse()
+ if not durations:
+ tr.write_sep("=", "slowest test durations")
+ else:
+ tr.write_sep("=", "slowest %s test durations" % durations)
+ dlist = dlist[:durations]
+
+ for rep in dlist:
+ nodeid = rep.nodeid.replace("::()::", "::")
+ tr.write_line("%02.2fs %-8s %s" %
+ (rep.duration, rep.when, nodeid))
+
+def pytest_sessionstart(session):
+ session._setupstate = SetupState()
+def pytest_sessionfinish(session):
+ session._setupstate.teardown_all()
+
+class NodeInfo:
+ def __init__(self, location):
+ self.location = location
+
+def pytest_runtest_protocol(item, nextitem):
+ item.ihook.pytest_runtest_logstart(
+ nodeid=item.nodeid, location=item.location,
+ )
+ runtestprotocol(item, nextitem=nextitem)
+ return True
+
+def runtestprotocol(item, log=True, nextitem=None):
+ hasrequest = hasattr(item, "_request")
+ if hasrequest and not item._request:
+ item._initrequest()
+ rep = call_and_report(item, "setup", log)
+ reports = [rep]
+ if rep.passed:
+ reports.append(call_and_report(item, "call", log))
+ reports.append(call_and_report(item, "teardown", log,
+ nextitem=nextitem))
+ # after all teardown hooks have been called
+ # want funcargs and request info to go away
+ if hasrequest:
+ item._request = False
+ item.funcargs = None
+ return reports
+
+def pytest_runtest_setup(item):
+ item.session._setupstate.prepare(item)
+
+def pytest_runtest_call(item):
+ try:
+ item.runtest()
+ except Exception:
+ # Store trace info to allow postmortem debugging
+ type, value, tb = sys.exc_info()
+ tb = tb.tb_next # Skip *this* frame
+ sys.last_type = type
+ sys.last_value = value
+ sys.last_traceback = tb
+ del tb # Get rid of it in this namespace
+ raise
+
+def pytest_runtest_teardown(item, nextitem):
+ item.session._setupstate.teardown_exact(item, nextitem)
+
+def pytest_report_teststatus(report):
+ if report.when in ("setup", "teardown"):
+ if report.failed:
+ # category, shortletter, verbose-word
+ return "error", "E", "ERROR"
+ elif report.skipped:
+ return "skipped", "s", "SKIPPED"
+ else:
+ return "", "", ""
+
+
+#
+# Implementation
+
+def call_and_report(item, when, log=True, **kwds):
+ call = call_runtest_hook(item, when, **kwds)
+ hook = item.ihook
+ report = hook.pytest_runtest_makereport(item=item, call=call)
+ if log:
+ hook.pytest_runtest_logreport(report=report)
+ if check_interactive_exception(call, report):
+ hook.pytest_exception_interact(node=item, call=call, report=report)
+ return report
+
+def check_interactive_exception(call, report):
+ return call.excinfo and not (
+ hasattr(report, "wasxfail") or
+ call.excinfo.errisinstance(skip.Exception) or
+ call.excinfo.errisinstance(bdb.BdbQuit))
+
+def call_runtest_hook(item, when, **kwds):
+ hookname = "pytest_runtest_" + when
+ ihook = getattr(item.ihook, hookname)
+ return CallInfo(lambda: ihook(item=item, **kwds), when=when)
+
+class CallInfo:
+ """ Result/Exception info a function invocation. """
+ #: None or ExceptionInfo object.
+ excinfo = None
+ def __init__(self, func, when):
+ #: context of invocation: one of "setup", "call",
+ #: "teardown", "memocollect"
+ self.when = when
+ self.start = time()
+ try:
+ self.result = func()
+ except KeyboardInterrupt:
+ self.stop = time()
+ raise
+ except:
+ self.excinfo = ExceptionInfo()
+ self.stop = time()
+
+ def __repr__(self):
+ if self.excinfo:
+ status = "exception: %s" % str(self.excinfo.value)
+ else:
+ status = "result: %r" % (self.result,)
+ return "<CallInfo when=%r %s>" % (self.when, status)
+
+def getslaveinfoline(node):
+ try:
+ return node._slaveinfocache
+ except AttributeError:
+ d = node.slaveinfo
+ ver = "%s.%s.%s" % d['version_info'][:3]
+ node._slaveinfocache = s = "[%s] %s -- Python %s %s" % (
+ d['id'], d['sysplatform'], ver, d['executable'])
+ return s
+
+class BaseReport(object):
+
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
+ def toterminal(self, out):
+ if hasattr(self, 'node'):
+ out.line(getslaveinfoline(self.node))
+
+ longrepr = self.longrepr
+ if longrepr is None:
+ return
+
+ if hasattr(longrepr, 'toterminal'):
+ longrepr.toterminal(out)
+ else:
+ try:
+ out.line(longrepr)
+ except UnicodeEncodeError:
+ out.line("<unprintable longrepr>")
+
+ def get_sections(self, prefix):
+ for name, content in self.sections:
+ if name.startswith(prefix):
+ yield prefix, content
+
+ passed = property(lambda x: x.outcome == "passed")
+ failed = property(lambda x: x.outcome == "failed")
+ skipped = property(lambda x: x.outcome == "skipped")
+
+ @property
+ def fspath(self):
+ return self.nodeid.split("::")[0]
+
+def pytest_runtest_makereport(item, call):
+ when = call.when
+ duration = call.stop-call.start
+ keywords = dict([(x,1) for x in item.keywords])
+ excinfo = call.excinfo
+ sections = []
+ if not call.excinfo:
+ outcome = "passed"
+ longrepr = None
+ else:
+ if not isinstance(excinfo, ExceptionInfo):
+ outcome = "failed"
+ longrepr = excinfo
+ elif excinfo.errisinstance(pytest.skip.Exception):
+ outcome = "skipped"
+ r = excinfo._getreprcrash()
+ longrepr = (str(r.path), r.lineno, r.message)
+ else:
+ outcome = "failed"
+ if call.when == "call":
+ longrepr = item.repr_failure(excinfo)
+ else: # exception in setup or teardown
+ longrepr = item._repr_failure_py(excinfo,
+ style=item.config.option.tbstyle)
+ for rwhen, key, content in item._report_sections:
+ sections.append(("Captured %s %s" %(key, rwhen), content))
+ return TestReport(item.nodeid, item.location,
+ keywords, outcome, longrepr, when,
+ sections, duration)
+
+class TestReport(BaseReport):
+ """ Basic test report object (also used for setup and teardown calls if
+ they fail).
+ """
+ def __init__(self, nodeid, location, keywords, outcome,
+ longrepr, when, sections=(), duration=0, **extra):
+ #: normalized collection node id
+ self.nodeid = nodeid
+
+ #: a (filesystempath, lineno, domaininfo) tuple indicating the
+ #: actual location of a test item - it might be different from the
+ #: collected one e.g. if a method is inherited from a different module.
+ self.location = location
+
+ #: a name -> value dictionary containing all keywords and
+ #: markers associated with a test invocation.
+ self.keywords = keywords
+
+ #: test outcome, always one of "passed", "failed", "skipped".
+ self.outcome = outcome
+
+ #: None or a failure representation.
+ self.longrepr = longrepr
+
+ #: one of 'setup', 'call', 'teardown' to indicate runtest phase.
+ self.when = when
+
+ #: list of (secname, data) extra information which needs to
+ #: marshallable
+ self.sections = list(sections)
+
+ #: time it took to run just the test
+ self.duration = duration
+
+ self.__dict__.update(extra)
+
+ def __repr__(self):
+ return "<TestReport %r when=%r outcome=%r>" % (
+ self.nodeid, self.when, self.outcome)
+
+class TeardownErrorReport(BaseReport):
+ outcome = "failed"
+ when = "teardown"
+ def __init__(self, longrepr, **extra):
+ self.longrepr = longrepr
+ self.sections = []
+ self.__dict__.update(extra)
+
+def pytest_make_collect_report(collector):
+ call = CallInfo(collector._memocollect, "memocollect")
+ longrepr = None
+ if not call.excinfo:
+ outcome = "passed"
+ else:
+ from _pytest import nose
+ skip_exceptions = (Skipped,) + nose.get_skip_exceptions()
+ if call.excinfo.errisinstance(skip_exceptions):
+ outcome = "skipped"
+ r = collector._repr_failure_py(call.excinfo, "line").reprcrash
+ longrepr = (str(r.path), r.lineno, r.message)
+ else:
+ outcome = "failed"
+ errorinfo = collector.repr_failure(call.excinfo)
+ if not hasattr(errorinfo, "toterminal"):
+ errorinfo = CollectErrorRepr(errorinfo)
+ longrepr = errorinfo
+ rep = CollectReport(collector.nodeid, outcome, longrepr,
+ getattr(call, 'result', None))
+ rep.call = call # see collect_one_node
+ return rep
+
+
+class CollectReport(BaseReport):
+ def __init__(self, nodeid, outcome, longrepr, result,
+ sections=(), **extra):
+ self.nodeid = nodeid
+ self.outcome = outcome
+ self.longrepr = longrepr
+ self.result = result or []
+ self.sections = list(sections)
+ self.__dict__.update(extra)
+
+ @property
+ def location(self):
+ return (self.fspath, None, self.fspath)
+
+ def __repr__(self):
+ return "<CollectReport %r lenresult=%s outcome=%r>" % (
+ self.nodeid, len(self.result), self.outcome)
+
+class CollectErrorRepr(TerminalRepr):
+ def __init__(self, msg):
+ self.longrepr = msg
+ def toterminal(self, out):
+ out.line(self.longrepr, red=True)
+
+class SetupState(object):
+ """ shared state for setting up/tearing down test items or collectors. """
+ def __init__(self):
+ self.stack = []
+ self._finalizers = {}
+
+ def addfinalizer(self, finalizer, colitem):
+ """ attach a finalizer to the given colitem.
+ if colitem is None, this will add a finalizer that
+ is called at the end of teardown_all().
+ """
+ assert colitem and not isinstance(colitem, tuple)
+ assert py.builtin.callable(finalizer)
+ #assert colitem in self.stack # some unit tests don't setup stack :/
+ self._finalizers.setdefault(colitem, []).append(finalizer)
+
+ def _pop_and_teardown(self):
+ colitem = self.stack.pop()
+ self._teardown_with_finalization(colitem)
+
+ def _callfinalizers(self, colitem):
+ finalizers = self._finalizers.pop(colitem, None)
+ exc = None
+ while finalizers:
+ fin = finalizers.pop()
+ try:
+ fin()
+ except Exception:
+ # XXX Only first exception will be seen by user,
+ # ideally all should be reported.
+ if exc is None:
+ exc = sys.exc_info()
+ if exc:
+ py.builtin._reraise(*exc)
+
+ def _teardown_with_finalization(self, colitem):
+ self._callfinalizers(colitem)
+ if hasattr(colitem, "teardown"):
+ colitem.teardown()
+ for colitem in self._finalizers:
+ assert colitem is None or colitem in self.stack \
+ or isinstance(colitem, tuple)
+
+ def teardown_all(self):
+ while self.stack:
+ self._pop_and_teardown()
+ for key in list(self._finalizers):
+ self._teardown_with_finalization(key)
+ assert not self._finalizers
+
+ def teardown_exact(self, item, nextitem):
+ needed_collectors = nextitem and nextitem.listchain() or []
+ self._teardown_towards(needed_collectors)
+
+ def _teardown_towards(self, needed_collectors):
+ while self.stack:
+ if self.stack == needed_collectors[:len(self.stack)]:
+ break
+ self._pop_and_teardown()
+
+ def prepare(self, colitem):
+ """ setup objects along the collector chain to the test-method
+ and teardown previously setup objects."""
+ needed_collectors = colitem.listchain()
+ self._teardown_towards(needed_collectors)
+
+ # check if the last collection node has raised an error
+ for col in self.stack:
+ if hasattr(col, '_prepare_exc'):
+ py.builtin._reraise(*col._prepare_exc)
+ for col in needed_collectors[len(self.stack):]:
+ self.stack.append(col)
+ try:
+ col.setup()
+ except Exception:
+ col._prepare_exc = sys.exc_info()
+ raise
+
+def collect_one_node(collector):
+ ihook = collector.ihook
+ ihook.pytest_collectstart(collector=collector)
+ rep = ihook.pytest_make_collect_report(collector=collector)
+ call = rep.__dict__.pop("call", None)
+ if call and check_interactive_exception(call, rep):
+ ihook.pytest_exception_interact(node=collector, call=call, report=rep)
+ return rep
+
+
+# =============================================================
+# Test OutcomeExceptions and helpers for creating them.
+
+
+class OutcomeException(Exception):
+ """ OutcomeException and its subclass instances indicate and
+ contain info about test and collection outcomes.
+ """
+ def __init__(self, msg=None, pytrace=True):
+ Exception.__init__(self, msg)
+ self.msg = msg
+ self.pytrace = pytrace
+
+ def __repr__(self):
+ if self.msg:
+ val = self.msg
+ if isinstance(val, bytes):
+ val = py._builtin._totext(val, errors='replace')
+ return val
+ return "<%s instance>" %(self.__class__.__name__,)
+ __str__ = __repr__
+
+class Skipped(OutcomeException):
+ # XXX hackish: on 3k we fake to live in the builtins
+ # in order to have Skipped exception printing shorter/nicer
+ __module__ = 'builtins'
+
+class Failed(OutcomeException):
+ """ raised from an explicit call to pytest.fail() """
+ __module__ = 'builtins'
+
+class Exit(KeyboardInterrupt):
+ """ raised for immediate program exits (no tracebacks/summaries)"""
+ def __init__(self, msg="unknown reason"):
+ self.msg = msg
+ KeyboardInterrupt.__init__(self, msg)
+
+# exposed helper methods
+
+def exit(msg):
+ """ exit testing process as if KeyboardInterrupt was triggered. """
+ __tracebackhide__ = True
+ raise Exit(msg)
+
+exit.Exception = Exit
+
+def skip(msg=""):
+ """ skip an executing test with the given message. Note: it's usually
+ better to use the pytest.mark.skipif marker to declare a test to be
+ skipped under certain conditions like mismatching platforms or
+ dependencies. See the pytest_skipping plugin for details.
+ """
+ __tracebackhide__ = True
+ raise Skipped(msg=msg)
+skip.Exception = Skipped
+
+def fail(msg="", pytrace=True):
+ """ explicitly fail an currently-executing test with the given Message.
+
+ :arg pytrace: if false the msg represents the full failure information
+ and no python traceback will be reported.
+ """
+ __tracebackhide__ = True
+ raise Failed(msg=msg, pytrace=pytrace)
+fail.Exception = Failed
+
+
+def importorskip(modname, minversion=None):
+ """ return imported module if it has at least "minversion" as its
+ __version__ attribute. If no minversion is specified the a skip
+ is only triggered if the module can not be imported.
+ """
+ __tracebackhide__ = True
+ compile(modname, '', 'eval') # to catch syntaxerrors
+ try:
+ __import__(modname)
+ except ImportError:
+ skip("could not import %r" %(modname,))
+ mod = sys.modules[modname]
+ if minversion is None:
+ return mod
+ verattr = getattr(mod, '__version__', None)
+ if minversion is not None:
+ try:
+ from pkg_resources import parse_version as pv
+ except ImportError:
+ skip("we have a required version for %r but can not import "
+ "no pkg_resources to parse version strings." %(modname,))
+ if verattr is None or pv(verattr) < pv(minversion):
+ skip("module %r has __version__ %r, required is: %r" %(
+ modname, verattr, minversion))
+ return mod
+
diff --git a/python/pytest/_pytest/skipping.py b/python/pytest/_pytest/skipping.py
new file mode 100644
index 000000000..18e038d2c
--- /dev/null
+++ b/python/pytest/_pytest/skipping.py
@@ -0,0 +1,361 @@
+""" support for skip/xfail functions and markers. """
+import os
+import sys
+import traceback
+
+import py
+import pytest
+from _pytest.mark import MarkInfo, MarkDecorator
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption('--runxfail',
+ action="store_true", dest="runxfail", default=False,
+ help="run tests even if they are marked xfail")
+
+ parser.addini("xfail_strict", "default for the strict parameter of xfail "
+ "markers when not given explicitly (default: "
+ "False)",
+ default=False,
+ type="bool")
+
+
+def pytest_configure(config):
+ if config.option.runxfail:
+ old = pytest.xfail
+ config._cleanup.append(lambda: setattr(pytest, "xfail", old))
+ def nop(*args, **kwargs):
+ pass
+ nop.Exception = XFailed
+ setattr(pytest, "xfail", nop)
+
+ config.addinivalue_line("markers",
+ "skip(reason=None): skip the given test function with an optional reason. "
+ "Example: skip(reason=\"no way of currently testing this\") skips the "
+ "test."
+ )
+ config.addinivalue_line("markers",
+ "skipif(condition): skip the given test function if eval(condition) "
+ "results in a True value. Evaluation happens within the "
+ "module global context. Example: skipif('sys.platform == \"win32\"') "
+ "skips the test if we are on the win32 platform. see "
+ "http://pytest.org/latest/skipping.html"
+ )
+ config.addinivalue_line("markers",
+ "xfail(condition, reason=None, run=True, raises=None, strict=False): "
+ "mark the the test function as an expected failure if eval(condition) "
+ "has a True value. Optionally specify a reason for better reporting "
+ "and run=False if you don't even want to execute the test function. "
+ "If only specific exception(s) are expected, you can list them in "
+ "raises, and if the test fails in other ways, it will be reported as "
+ "a true failure. See http://pytest.org/latest/skipping.html"
+ )
+
+
+def pytest_namespace():
+ return dict(xfail=xfail)
+
+
+class XFailed(pytest.fail.Exception):
+ """ raised from an explicit call to pytest.xfail() """
+
+
+def xfail(reason=""):
+ """ xfail an executing test or setup functions with the given reason."""
+ __tracebackhide__ = True
+ raise XFailed(reason)
+xfail.Exception = XFailed
+
+
+class MarkEvaluator:
+ def __init__(self, item, name):
+ self.item = item
+ self.name = name
+
+ @property
+ def holder(self):
+ return self.item.keywords.get(self.name)
+
+ def __bool__(self):
+ return bool(self.holder)
+ __nonzero__ = __bool__
+
+ def wasvalid(self):
+ return not hasattr(self, 'exc')
+
+ def invalidraise(self, exc):
+ raises = self.get('raises')
+ if not raises:
+ return
+ return not isinstance(exc, raises)
+
+ def istrue(self):
+ try:
+ return self._istrue()
+ except Exception:
+ self.exc = sys.exc_info()
+ if isinstance(self.exc[1], SyntaxError):
+ msg = [" " * (self.exc[1].offset + 4) + "^",]
+ msg.append("SyntaxError: invalid syntax")
+ else:
+ msg = traceback.format_exception_only(*self.exc[:2])
+ pytest.fail("Error evaluating %r expression\n"
+ " %s\n"
+ "%s"
+ %(self.name, self.expr, "\n".join(msg)),
+ pytrace=False)
+
+ def _getglobals(self):
+ d = {'os': os, 'sys': sys, 'config': self.item.config}
+ func = self.item.obj
+ try:
+ d.update(func.__globals__)
+ except AttributeError:
+ d.update(func.func_globals)
+ return d
+
+ def _istrue(self):
+ if hasattr(self, 'result'):
+ return self.result
+ if self.holder:
+ d = self._getglobals()
+ if self.holder.args or 'condition' in self.holder.kwargs:
+ self.result = False
+ # "holder" might be a MarkInfo or a MarkDecorator; only
+ # MarkInfo keeps track of all parameters it received in an
+ # _arglist attribute
+ if hasattr(self.holder, '_arglist'):
+ arglist = self.holder._arglist
+ else:
+ arglist = [(self.holder.args, self.holder.kwargs)]
+ for args, kwargs in arglist:
+ if 'condition' in kwargs:
+ args = (kwargs['condition'],)
+ for expr in args:
+ self.expr = expr
+ if isinstance(expr, py.builtin._basestring):
+ result = cached_eval(self.item.config, expr, d)
+ else:
+ if "reason" not in kwargs:
+ # XXX better be checked at collection time
+ msg = "you need to specify reason=STRING " \
+ "when using booleans as conditions."
+ pytest.fail(msg)
+ result = bool(expr)
+ if result:
+ self.result = True
+ self.reason = kwargs.get('reason', None)
+ self.expr = expr
+ return self.result
+ else:
+ self.result = True
+ return getattr(self, 'result', False)
+
+ def get(self, attr, default=None):
+ return self.holder.kwargs.get(attr, default)
+
+ def getexplanation(self):
+ expl = getattr(self, 'reason', None) or self.get('reason', None)
+ if not expl:
+ if not hasattr(self, 'expr'):
+ return ""
+ else:
+ return "condition: " + str(self.expr)
+ return expl
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_runtest_setup(item):
+ # Check if skip or skipif are specified as pytest marks
+
+ skipif_info = item.keywords.get('skipif')
+ if isinstance(skipif_info, (MarkInfo, MarkDecorator)):
+ eval_skipif = MarkEvaluator(item, 'skipif')
+ if eval_skipif.istrue():
+ item._evalskip = eval_skipif
+ pytest.skip(eval_skipif.getexplanation())
+
+ skip_info = item.keywords.get('skip')
+ if isinstance(skip_info, (MarkInfo, MarkDecorator)):
+ item._evalskip = True
+ if 'reason' in skip_info.kwargs:
+ pytest.skip(skip_info.kwargs['reason'])
+ elif skip_info.args:
+ pytest.skip(skip_info.args[0])
+ else:
+ pytest.skip("unconditional skip")
+
+ item._evalxfail = MarkEvaluator(item, 'xfail')
+ check_xfail_no_run(item)
+
+
+@pytest.mark.hookwrapper
+def pytest_pyfunc_call(pyfuncitem):
+ check_xfail_no_run(pyfuncitem)
+ outcome = yield
+ passed = outcome.excinfo is None
+ if passed:
+ check_strict_xfail(pyfuncitem)
+
+
+def check_xfail_no_run(item):
+ """check xfail(run=False)"""
+ if not item.config.option.runxfail:
+ evalxfail = item._evalxfail
+ if evalxfail.istrue():
+ if not evalxfail.get('run', True):
+ pytest.xfail("[NOTRUN] " + evalxfail.getexplanation())
+
+
+def check_strict_xfail(pyfuncitem):
+ """check xfail(strict=True) for the given PASSING test"""
+ evalxfail = pyfuncitem._evalxfail
+ if evalxfail.istrue():
+ strict_default = pyfuncitem.config.getini('xfail_strict')
+ is_strict_xfail = evalxfail.get('strict', strict_default)
+ if is_strict_xfail:
+ del pyfuncitem._evalxfail
+ explanation = evalxfail.getexplanation()
+ pytest.fail('[XPASS(strict)] ' + explanation, pytrace=False)
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_runtest_makereport(item, call):
+ outcome = yield
+ rep = outcome.get_result()
+ evalxfail = getattr(item, '_evalxfail', None)
+ evalskip = getattr(item, '_evalskip', None)
+ # unitttest special case, see setting of _unexpectedsuccess
+ if hasattr(item, '_unexpectedsuccess') and rep.when == "call":
+ # we need to translate into how pytest encodes xpass
+ rep.wasxfail = "reason: " + repr(item._unexpectedsuccess)
+ rep.outcome = "failed"
+ elif item.config.option.runxfail:
+ pass # don't interefere
+ elif call.excinfo and call.excinfo.errisinstance(pytest.xfail.Exception):
+ rep.wasxfail = "reason: " + call.excinfo.value.msg
+ rep.outcome = "skipped"
+ elif evalxfail and not rep.skipped and evalxfail.wasvalid() and \
+ evalxfail.istrue():
+ if call.excinfo:
+ if evalxfail.invalidraise(call.excinfo.value):
+ rep.outcome = "failed"
+ else:
+ rep.outcome = "skipped"
+ rep.wasxfail = evalxfail.getexplanation()
+ elif call.when == "call":
+ rep.outcome = "failed" # xpass outcome
+ rep.wasxfail = evalxfail.getexplanation()
+ elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple:
+ # skipped by mark.skipif; change the location of the failure
+ # to point to the item definition, otherwise it will display
+ # the location of where the skip exception was raised within pytest
+ filename, line, reason = rep.longrepr
+ filename, line = item.location[:2]
+ rep.longrepr = filename, line, reason
+
+# called by terminalreporter progress reporting
+def pytest_report_teststatus(report):
+ if hasattr(report, "wasxfail"):
+ if report.skipped:
+ return "xfailed", "x", "xfail"
+ elif report.failed:
+ return "xpassed", "X", ("XPASS", {'yellow': True})
+
+# called by the terminalreporter instance/plugin
+def pytest_terminal_summary(terminalreporter):
+ tr = terminalreporter
+ if not tr.reportchars:
+ #for name in "xfailed skipped failed xpassed":
+ # if not tr.stats.get(name, 0):
+ # tr.write_line("HINT: use '-r' option to see extra "
+ # "summary info about tests")
+ # break
+ return
+
+ lines = []
+ for char in tr.reportchars:
+ if char == "x":
+ show_xfailed(terminalreporter, lines)
+ elif char == "X":
+ show_xpassed(terminalreporter, lines)
+ elif char in "fF":
+ show_simple(terminalreporter, lines, 'failed', "FAIL %s")
+ elif char in "sS":
+ show_skipped(terminalreporter, lines)
+ elif char == "E":
+ show_simple(terminalreporter, lines, 'error', "ERROR %s")
+ elif char == 'p':
+ show_simple(terminalreporter, lines, 'passed', "PASSED %s")
+
+ if lines:
+ tr._tw.sep("=", "short test summary info")
+ for line in lines:
+ tr._tw.line(line)
+
+def show_simple(terminalreporter, lines, stat, format):
+ failed = terminalreporter.stats.get(stat)
+ if failed:
+ for rep in failed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ lines.append(format %(pos,))
+
+def show_xfailed(terminalreporter, lines):
+ xfailed = terminalreporter.stats.get("xfailed")
+ if xfailed:
+ for rep in xfailed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ reason = rep.wasxfail
+ lines.append("XFAIL %s" % (pos,))
+ if reason:
+ lines.append(" " + str(reason))
+
+def show_xpassed(terminalreporter, lines):
+ xpassed = terminalreporter.stats.get("xpassed")
+ if xpassed:
+ for rep in xpassed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ reason = rep.wasxfail
+ lines.append("XPASS %s %s" %(pos, reason))
+
+def cached_eval(config, expr, d):
+ if not hasattr(config, '_evalcache'):
+ config._evalcache = {}
+ try:
+ return config._evalcache[expr]
+ except KeyError:
+ import _pytest._code
+ exprcode = _pytest._code.compile(expr, mode="eval")
+ config._evalcache[expr] = x = eval(exprcode, d)
+ return x
+
+
+def folded_skips(skipped):
+ d = {}
+ for event in skipped:
+ key = event.longrepr
+ assert len(key) == 3, (event, key)
+ d.setdefault(key, []).append(event)
+ l = []
+ for key, events in d.items():
+ l.append((len(events),) + key)
+ return l
+
+def show_skipped(terminalreporter, lines):
+ tr = terminalreporter
+ skipped = tr.stats.get('skipped', [])
+ if skipped:
+ #if not tr.hasopt('skipped'):
+ # tr.write_line(
+ # "%d skipped tests, specify -rs for more info" %
+ # len(skipped))
+ # return
+ fskips = folded_skips(skipped)
+ if fskips:
+ #tr.write_sep("_", "skipped test summary")
+ for num, fspath, lineno, reason in fskips:
+ if reason.startswith("Skipped: "):
+ reason = reason[9:]
+ lines.append("SKIP [%d] %s:%d: %s" %
+ (num, fspath, lineno, reason))
diff --git a/python/pytest/_pytest/standalonetemplate.py b/python/pytest/_pytest/standalonetemplate.py
new file mode 100755
index 000000000..484d5d1b2
--- /dev/null
+++ b/python/pytest/_pytest/standalonetemplate.py
@@ -0,0 +1,89 @@
+#! /usr/bin/env python
+
+# Hi There!
+# You may be wondering what this giant blob of binary data here is, you might
+# even be worried that we're up to something nefarious (good for you for being
+# paranoid!). This is a base64 encoding of a zip file, this zip file contains
+# a fully functional basic pytest script.
+#
+# Pytest is a thing that tests packages, pytest itself is a package that some-
+# one might want to install, especially if they're looking to run tests inside
+# some package they want to install. Pytest has a lot of code to collect and
+# execute tests, and other such sort of "tribal knowledge" that has been en-
+# coded in its code base. Because of this we basically include a basic copy
+# of pytest inside this blob. We do this because it let's you as a maintainer
+# or application developer who wants people who don't deal with python much to
+# easily run tests without installing the complete pytest package.
+#
+# If you're wondering how this is created: you can create it yourself if you
+# have a complete pytest installation by using this command on the command-
+# line: ``py.test --genscript=runtests.py``.
+
+sources = """
+@SOURCES@"""
+
+import sys
+import base64
+import zlib
+
+class DictImporter(object):
+ def __init__(self, sources):
+ self.sources = sources
+
+ def find_module(self, fullname, path=None):
+ if fullname == "argparse" and sys.version_info >= (2,7):
+ # we were generated with <python2.7 (which pulls in argparse)
+ # but we are running now on a stdlib which has it, so use that.
+ return None
+ if fullname in self.sources:
+ return self
+ if fullname + '.__init__' in self.sources:
+ return self
+ return None
+
+ def load_module(self, fullname):
+ # print "load_module:", fullname
+ from types import ModuleType
+ try:
+ s = self.sources[fullname]
+ is_pkg = False
+ except KeyError:
+ s = self.sources[fullname + '.__init__']
+ is_pkg = True
+
+ co = compile(s, fullname, 'exec')
+ module = sys.modules.setdefault(fullname, ModuleType(fullname))
+ module.__file__ = "%s/%s" % (__file__, fullname)
+ module.__loader__ = self
+ if is_pkg:
+ module.__path__ = [fullname]
+
+ do_exec(co, module.__dict__) # noqa
+ return sys.modules[fullname]
+
+ def get_source(self, name):
+ res = self.sources.get(name)
+ if res is None:
+ res = self.sources.get(name + '.__init__')
+ return res
+
+if __name__ == "__main__":
+ try:
+ import pkg_resources # noqa
+ except ImportError:
+ sys.stderr.write("ERROR: setuptools not installed\n")
+ sys.exit(2)
+ if sys.version_info >= (3, 0):
+ exec("def do_exec(co, loc): exec(co, loc)\n")
+ import pickle
+ sources = sources.encode("ascii") # ensure bytes
+ sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)))
+ else:
+ import cPickle as pickle
+ exec("def do_exec(co, loc): exec co in loc\n")
+ sources = pickle.loads(zlib.decompress(base64.decodestring(sources)))
+
+ importer = DictImporter(sources)
+ sys.meta_path.insert(0, importer)
+ entry = "@ENTRY@"
+ do_exec(entry, locals()) # noqa
diff --git a/python/pytest/_pytest/terminal.py b/python/pytest/_pytest/terminal.py
new file mode 100644
index 000000000..825f553ef
--- /dev/null
+++ b/python/pytest/_pytest/terminal.py
@@ -0,0 +1,593 @@
+""" terminal reporting of the full testing process.
+
+This is a good source for looking at the various reporting hooks.
+"""
+from _pytest.main import EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, \
+ EXIT_USAGEERROR, EXIT_NOTESTSCOLLECTED
+import pytest
+import py
+import sys
+import time
+import platform
+
+import _pytest._pluggy as pluggy
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group._addoption('-v', '--verbose', action="count",
+ dest="verbose", default=0, help="increase verbosity."),
+ group._addoption('-q', '--quiet', action="count",
+ dest="quiet", default=0, help="decrease verbosity."),
+ group._addoption('-r',
+ action="store", dest="reportchars", default=None, metavar="chars",
+ help="show extra test summary info as specified by chars (f)ailed, "
+ "(E)error, (s)skipped, (x)failed, (X)passed (w)pytest-warnings "
+ "(p)passed, (P)passed with output, (a)all except pP.")
+ group._addoption('-l', '--showlocals',
+ action="store_true", dest="showlocals", default=False,
+ help="show locals in tracebacks (disabled by default).")
+ group._addoption('--report',
+ action="store", dest="report", default=None, metavar="opts",
+ help="(deprecated, use -r)")
+ group._addoption('--tb', metavar="style",
+ action="store", dest="tbstyle", default='auto',
+ choices=['auto', 'long', 'short', 'no', 'line', 'native'],
+ help="traceback print mode (auto/long/short/line/native/no).")
+ group._addoption('--fulltrace', '--full-trace',
+ action="store_true", default=False,
+ help="don't cut any tracebacks (default is to cut).")
+ group._addoption('--color', metavar="color",
+ action="store", dest="color", default='auto',
+ choices=['yes', 'no', 'auto'],
+ help="color terminal output (yes/no/auto).")
+
+def pytest_configure(config):
+ config.option.verbose -= config.option.quiet
+ reporter = TerminalReporter(config, sys.stdout)
+ config.pluginmanager.register(reporter, 'terminalreporter')
+ if config.option.debug or config.option.traceconfig:
+ def mywriter(tags, args):
+ msg = " ".join(map(str, args))
+ reporter.write_line("[traceconfig] " + msg)
+ config.trace.root.setprocessor("pytest:config", mywriter)
+
+def getreportopt(config):
+ reportopts = ""
+ optvalue = config.option.report
+ if optvalue:
+ py.builtin.print_("DEPRECATED: use -r instead of --report option.",
+ file=sys.stderr)
+ if optvalue:
+ for setting in optvalue.split(","):
+ setting = setting.strip()
+ if setting == "skipped":
+ reportopts += "s"
+ elif setting == "xfailed":
+ reportopts += "x"
+ reportchars = config.option.reportchars
+ if reportchars:
+ for char in reportchars:
+ if char not in reportopts and char != 'a':
+ reportopts += char
+ elif char == 'a':
+ reportopts = 'fEsxXw'
+ return reportopts
+
+def pytest_report_teststatus(report):
+ if report.passed:
+ letter = "."
+ elif report.skipped:
+ letter = "s"
+ elif report.failed:
+ letter = "F"
+ if report.when != "call":
+ letter = "f"
+ return report.outcome, letter, report.outcome.upper()
+
+class WarningReport:
+ def __init__(self, code, message, nodeid=None, fslocation=None):
+ self.code = code
+ self.message = message
+ self.nodeid = nodeid
+ self.fslocation = fslocation
+
+
+class TerminalReporter:
+ def __init__(self, config, file=None):
+ import _pytest.config
+ self.config = config
+ self.verbosity = self.config.option.verbose
+ self.showheader = self.verbosity >= 0
+ self.showfspath = self.verbosity >= 0
+ self.showlongtestinfo = self.verbosity > 0
+ self._numcollected = 0
+
+ self.stats = {}
+ self.startdir = py.path.local()
+ if file is None:
+ file = sys.stdout
+ self._tw = self.writer = _pytest.config.create_terminal_writer(config,
+ file)
+ self.currentfspath = None
+ self.reportchars = getreportopt(config)
+ self.hasmarkup = self._tw.hasmarkup
+ self.isatty = file.isatty()
+
+ def hasopt(self, char):
+ char = {'xfailed': 'x', 'skipped': 's'}.get(char, char)
+ return char in self.reportchars
+
+ def write_fspath_result(self, nodeid, res):
+ fspath = self.config.rootdir.join(nodeid.split("::")[0])
+ if fspath != self.currentfspath:
+ self.currentfspath = fspath
+ fspath = self.startdir.bestrelpath(fspath)
+ self._tw.line()
+ self._tw.write(fspath + " ")
+ self._tw.write(res)
+
+ def write_ensure_prefix(self, prefix, extra="", **kwargs):
+ if self.currentfspath != prefix:
+ self._tw.line()
+ self.currentfspath = prefix
+ self._tw.write(prefix)
+ if extra:
+ self._tw.write(extra, **kwargs)
+ self.currentfspath = -2
+
+ def ensure_newline(self):
+ if self.currentfspath:
+ self._tw.line()
+ self.currentfspath = None
+
+ def write(self, content, **markup):
+ self._tw.write(content, **markup)
+
+ def write_line(self, line, **markup):
+ if not py.builtin._istext(line):
+ line = py.builtin.text(line, errors="replace")
+ self.ensure_newline()
+ self._tw.line(line, **markup)
+
+ def rewrite(self, line, **markup):
+ line = str(line)
+ self._tw.write("\r" + line, **markup)
+
+ def write_sep(self, sep, title=None, **markup):
+ self.ensure_newline()
+ self._tw.sep(sep, title, **markup)
+
+ def section(self, title, sep="=", **kw):
+ self._tw.sep(sep, title, **kw)
+
+ def line(self, msg, **kw):
+ self._tw.line(msg, **kw)
+
+ def pytest_internalerror(self, excrepr):
+ for line in py.builtin.text(excrepr).split("\n"):
+ self.write_line("INTERNALERROR> " + line)
+ return 1
+
+ def pytest_logwarning(self, code, fslocation, message, nodeid):
+ warnings = self.stats.setdefault("warnings", [])
+ if isinstance(fslocation, tuple):
+ fslocation = "%s:%d" % fslocation
+ warning = WarningReport(code=code, fslocation=fslocation,
+ message=message, nodeid=nodeid)
+ warnings.append(warning)
+
+ def pytest_plugin_registered(self, plugin):
+ if self.config.option.traceconfig:
+ msg = "PLUGIN registered: %s" % (plugin,)
+ # XXX this event may happen during setup/teardown time
+ # which unfortunately captures our output here
+ # which garbles our output if we use self.write_line
+ self.write_line(msg)
+
+ def pytest_deselected(self, items):
+ self.stats.setdefault('deselected', []).extend(items)
+
+ def pytest_runtest_logstart(self, nodeid, location):
+ # ensure that the path is printed before the
+ # 1st test of a module starts running
+ if self.showlongtestinfo:
+ line = self._locationline(nodeid, *location)
+ self.write_ensure_prefix(line, "")
+ elif self.showfspath:
+ fsid = nodeid.split("::")[0]
+ self.write_fspath_result(fsid, "")
+
+ def pytest_runtest_logreport(self, report):
+ rep = report
+ res = self.config.hook.pytest_report_teststatus(report=rep)
+ cat, letter, word = res
+ self.stats.setdefault(cat, []).append(rep)
+ self._tests_ran = True
+ if not letter and not word:
+ # probably passed setup/teardown
+ return
+ if self.verbosity <= 0:
+ if not hasattr(rep, 'node') and self.showfspath:
+ self.write_fspath_result(rep.nodeid, letter)
+ else:
+ self._tw.write(letter)
+ else:
+ if isinstance(word, tuple):
+ word, markup = word
+ else:
+ if rep.passed:
+ markup = {'green':True}
+ elif rep.failed:
+ markup = {'red':True}
+ elif rep.skipped:
+ markup = {'yellow':True}
+ line = self._locationline(rep.nodeid, *rep.location)
+ if not hasattr(rep, 'node'):
+ self.write_ensure_prefix(line, word, **markup)
+ #self._tw.write(word, **markup)
+ else:
+ self.ensure_newline()
+ if hasattr(rep, 'node'):
+ self._tw.write("[%s] " % rep.node.gateway.id)
+ self._tw.write(word, **markup)
+ self._tw.write(" " + line)
+ self.currentfspath = -2
+
+ def pytest_collection(self):
+ if not self.isatty and self.config.option.verbose >= 1:
+ self.write("collecting ... ", bold=True)
+
+ def pytest_collectreport(self, report):
+ if report.failed:
+ self.stats.setdefault("error", []).append(report)
+ elif report.skipped:
+ self.stats.setdefault("skipped", []).append(report)
+ items = [x for x in report.result if isinstance(x, pytest.Item)]
+ self._numcollected += len(items)
+ if self.isatty:
+ #self.write_fspath_result(report.nodeid, 'E')
+ self.report_collect()
+
+ def report_collect(self, final=False):
+ if self.config.option.verbose < 0:
+ return
+
+ errors = len(self.stats.get('error', []))
+ skipped = len(self.stats.get('skipped', []))
+ if final:
+ line = "collected "
+ else:
+ line = "collecting "
+ line += str(self._numcollected) + " items"
+ if errors:
+ line += " / %d errors" % errors
+ if skipped:
+ line += " / %d skipped" % skipped
+ if self.isatty:
+ if final:
+ line += " \n"
+ self.rewrite(line, bold=True)
+ else:
+ self.write_line(line)
+
+ def pytest_collection_modifyitems(self):
+ self.report_collect(True)
+
+ @pytest.hookimpl(trylast=True)
+ def pytest_sessionstart(self, session):
+ self._sessionstarttime = time.time()
+ if not self.showheader:
+ return
+ self.write_sep("=", "test session starts", bold=True)
+ verinfo = platform.python_version()
+ msg = "platform %s -- Python %s" % (sys.platform, verinfo)
+ if hasattr(sys, 'pypy_version_info'):
+ verinfo = ".".join(map(str, sys.pypy_version_info[:3]))
+ msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3])
+ msg += ", pytest-%s, py-%s, pluggy-%s" % (
+ pytest.__version__, py.__version__, pluggy.__version__)
+ if self.verbosity > 0 or self.config.option.debug or \
+ getattr(self.config.option, 'pastebin', None):
+ msg += " -- " + str(sys.executable)
+ self.write_line(msg)
+ lines = self.config.hook.pytest_report_header(
+ config=self.config, startdir=self.startdir)
+ lines.reverse()
+ for line in flatten(lines):
+ self.write_line(line)
+
+ def pytest_report_header(self, config):
+ inifile = ""
+ if config.inifile:
+ inifile = config.rootdir.bestrelpath(config.inifile)
+ lines = ["rootdir: %s, inifile: %s" %(config.rootdir, inifile)]
+
+ plugininfo = config.pluginmanager.list_plugin_distinfo()
+ if plugininfo:
+
+ lines.append(
+ "plugins: %s" % ", ".join(_plugin_nameversions(plugininfo)))
+ return lines
+
+ def pytest_collection_finish(self, session):
+ if self.config.option.collectonly:
+ self._printcollecteditems(session.items)
+ if self.stats.get('failed'):
+ self._tw.sep("!", "collection failures")
+ for rep in self.stats.get('failed'):
+ rep.toterminal(self._tw)
+ return 1
+ return 0
+ if not self.showheader:
+ return
+ #for i, testarg in enumerate(self.config.args):
+ # self.write_line("test path %d: %s" %(i+1, testarg))
+
+ def _printcollecteditems(self, items):
+ # to print out items and their parent collectors
+ # we take care to leave out Instances aka ()
+ # because later versions are going to get rid of them anyway
+ if self.config.option.verbose < 0:
+ if self.config.option.verbose < -1:
+ counts = {}
+ for item in items:
+ name = item.nodeid.split('::', 1)[0]
+ counts[name] = counts.get(name, 0) + 1
+ for name, count in sorted(counts.items()):
+ self._tw.line("%s: %d" % (name, count))
+ else:
+ for item in items:
+ nodeid = item.nodeid
+ nodeid = nodeid.replace("::()::", "::")
+ self._tw.line(nodeid)
+ return
+ stack = []
+ indent = ""
+ for item in items:
+ needed_collectors = item.listchain()[1:] # strip root node
+ while stack:
+ if stack == needed_collectors[:len(stack)]:
+ break
+ stack.pop()
+ for col in needed_collectors[len(stack):]:
+ stack.append(col)
+ #if col.name == "()":
+ # continue
+ indent = (len(stack) - 1) * " "
+ self._tw.line("%s%s" % (indent, col))
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_sessionfinish(self, exitstatus):
+ outcome = yield
+ outcome.get_result()
+ self._tw.line("")
+ summary_exit_codes = (
+ EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, EXIT_USAGEERROR,
+ EXIT_NOTESTSCOLLECTED)
+ if exitstatus in summary_exit_codes:
+ self.config.hook.pytest_terminal_summary(terminalreporter=self)
+ self.summary_errors()
+ self.summary_failures()
+ self.summary_warnings()
+ self.summary_passes()
+ if exitstatus == EXIT_INTERRUPTED:
+ self._report_keyboardinterrupt()
+ del self._keyboardinterrupt_memo
+ self.summary_deselected()
+ self.summary_stats()
+
+ def pytest_keyboard_interrupt(self, excinfo):
+ self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
+
+ def pytest_unconfigure(self):
+ if hasattr(self, '_keyboardinterrupt_memo'):
+ self._report_keyboardinterrupt()
+
+ def _report_keyboardinterrupt(self):
+ excrepr = self._keyboardinterrupt_memo
+ msg = excrepr.reprcrash.message
+ self.write_sep("!", msg)
+ if "KeyboardInterrupt" in msg:
+ if self.config.option.fulltrace:
+ excrepr.toterminal(self._tw)
+ else:
+ self._tw.line("to show a full traceback on KeyboardInterrupt use --fulltrace", yellow=True)
+ excrepr.reprcrash.toterminal(self._tw)
+
+ def _locationline(self, nodeid, fspath, lineno, domain):
+ def mkrel(nodeid):
+ line = self.config.cwd_relative_nodeid(nodeid)
+ if domain and line.endswith(domain):
+ line = line[:-len(domain)]
+ l = domain.split("[")
+ l[0] = l[0].replace('.', '::') # don't replace '.' in params
+ line += "[".join(l)
+ return line
+ # collect_fspath comes from testid which has a "/"-normalized path
+
+ if fspath:
+ res = mkrel(nodeid).replace("::()", "") # parens-normalization
+ if nodeid.split("::")[0] != fspath.replace("\\", "/"):
+ res += " <- " + self.startdir.bestrelpath(fspath)
+ else:
+ res = "[location]"
+ return res + " "
+
+ def _getfailureheadline(self, rep):
+ if hasattr(rep, 'location'):
+ fspath, lineno, domain = rep.location
+ return domain
+ else:
+ return "test session" # XXX?
+
+ def _getcrashline(self, rep):
+ try:
+ return str(rep.longrepr.reprcrash)
+ except AttributeError:
+ try:
+ return str(rep.longrepr)[:50]
+ except AttributeError:
+ return ""
+
+ #
+ # summaries for sessionfinish
+ #
+ def getreports(self, name):
+ l = []
+ for x in self.stats.get(name, []):
+ if not hasattr(x, '_pdbshown'):
+ l.append(x)
+ return l
+
+ def summary_warnings(self):
+ if self.hasopt("w"):
+ warnings = self.stats.get("warnings")
+ if not warnings:
+ return
+ self.write_sep("=", "pytest-warning summary")
+ for w in warnings:
+ self._tw.line("W%s %s %s" % (w.code,
+ w.fslocation, w.message))
+
+ def summary_passes(self):
+ if self.config.option.tbstyle != "no":
+ if self.hasopt("P"):
+ reports = self.getreports('passed')
+ if not reports:
+ return
+ self.write_sep("=", "PASSES")
+ for rep in reports:
+ msg = self._getfailureheadline(rep)
+ self.write_sep("_", msg)
+ self._outrep_summary(rep)
+
+ def summary_failures(self):
+ if self.config.option.tbstyle != "no":
+ reports = self.getreports('failed')
+ if not reports:
+ return
+ self.write_sep("=", "FAILURES")
+ for rep in reports:
+ if self.config.option.tbstyle == "line":
+ line = self._getcrashline(rep)
+ self.write_line(line)
+ else:
+ msg = self._getfailureheadline(rep)
+ markup = {'red': True, 'bold': True}
+ self.write_sep("_", msg, **markup)
+ self._outrep_summary(rep)
+
+ def summary_errors(self):
+ if self.config.option.tbstyle != "no":
+ reports = self.getreports('error')
+ if not reports:
+ return
+ self.write_sep("=", "ERRORS")
+ for rep in self.stats['error']:
+ msg = self._getfailureheadline(rep)
+ if not hasattr(rep, 'when'):
+ # collect
+ msg = "ERROR collecting " + msg
+ elif rep.when == "setup":
+ msg = "ERROR at setup of " + msg
+ elif rep.when == "teardown":
+ msg = "ERROR at teardown of " + msg
+ self.write_sep("_", msg)
+ self._outrep_summary(rep)
+
+ def _outrep_summary(self, rep):
+ rep.toterminal(self._tw)
+ for secname, content in rep.sections:
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
+
+ def summary_stats(self):
+ session_duration = time.time() - self._sessionstarttime
+ (line, color) = build_summary_stats_line(self.stats)
+ msg = "%s in %.2f seconds" % (line, session_duration)
+ markup = {color: True, 'bold': True}
+
+ if self.verbosity >= 0:
+ self.write_sep("=", msg, **markup)
+ if self.verbosity == -1:
+ self.write_line(msg, **markup)
+
+ def summary_deselected(self):
+ if 'deselected' in self.stats:
+ l = []
+ k = self.config.option.keyword
+ if k:
+ l.append("-k%s" % k)
+ m = self.config.option.markexpr
+ if m:
+ l.append("-m %r" % m)
+ if l:
+ self.write_sep("=", "%d tests deselected by %r" % (
+ len(self.stats['deselected']), " ".join(l)), bold=True)
+
+def repr_pythonversion(v=None):
+ if v is None:
+ v = sys.version_info
+ try:
+ return "%s.%s.%s-%s-%s" % v
+ except (TypeError, ValueError):
+ return str(v)
+
+def flatten(l):
+ for x in l:
+ if isinstance(x, (list, tuple)):
+ for y in flatten(x):
+ yield y
+ else:
+ yield x
+
+def build_summary_stats_line(stats):
+ keys = ("failed passed skipped deselected "
+ "xfailed xpassed warnings error").split()
+ key_translation = {'warnings': 'pytest-warnings'}
+ unknown_key_seen = False
+ for key in stats.keys():
+ if key not in keys:
+ if key: # setup/teardown reports have an empty key, ignore them
+ keys.append(key)
+ unknown_key_seen = True
+ parts = []
+ for key in keys:
+ val = stats.get(key, None)
+ if val:
+ key_name = key_translation.get(key, key)
+ parts.append("%d %s" % (len(val), key_name))
+
+ if parts:
+ line = ", ".join(parts)
+ else:
+ line = "no tests ran"
+
+ if 'failed' in stats or 'error' in stats:
+ color = 'red'
+ elif 'warnings' in stats or unknown_key_seen:
+ color = 'yellow'
+ elif 'passed' in stats:
+ color = 'green'
+ else:
+ color = 'yellow'
+
+ return (line, color)
+
+
+def _plugin_nameversions(plugininfo):
+ l = []
+ for plugin, dist in plugininfo:
+ # gets us name and version!
+ name = '{dist.project_name}-{dist.version}'.format(dist=dist)
+ # questionable convenience, but it keeps things short
+ if name.startswith("pytest-"):
+ name = name[7:]
+ # we decided to print python package names
+ # they can have more than one plugin
+ if name not in l:
+ l.append(name)
+ return l
diff --git a/python/pytest/_pytest/tmpdir.py b/python/pytest/_pytest/tmpdir.py
new file mode 100644
index 000000000..ebc48dbe5
--- /dev/null
+++ b/python/pytest/_pytest/tmpdir.py
@@ -0,0 +1,123 @@
+""" support for providing temporary directories to test functions. """
+import re
+
+import pytest
+import py
+from _pytest.monkeypatch import monkeypatch
+
+
+class TempdirFactory:
+ """Factory for temporary directories under the common base temp directory.
+
+ The base directory can be configured using the ``--basetemp`` option.
+ """
+
+ def __init__(self, config):
+ self.config = config
+ self.trace = config.trace.get("tmpdir")
+
+ def ensuretemp(self, string, dir=1):
+ """ (deprecated) return temporary directory path with
+ the given string as the trailing part. It is usually
+ better to use the 'tmpdir' function argument which
+ provides an empty unique-per-test-invocation directory
+ and is guaranteed to be empty.
+ """
+ #py.log._apiwarn(">1.1", "use tmpdir function argument")
+ return self.getbasetemp().ensure(string, dir=dir)
+
+ def mktemp(self, basename, numbered=True):
+ """Create a subdirectory of the base temporary directory and return it.
+ If ``numbered``, ensure the directory is unique by adding a number
+ prefix greater than any existing one.
+ """
+ basetemp = self.getbasetemp()
+ if not numbered:
+ p = basetemp.mkdir(basename)
+ else:
+ p = py.path.local.make_numbered_dir(prefix=basename,
+ keep=0, rootdir=basetemp, lock_timeout=None)
+ self.trace("mktemp", p)
+ return p
+
+ def getbasetemp(self):
+ """ return base temporary directory. """
+ try:
+ return self._basetemp
+ except AttributeError:
+ basetemp = self.config.option.basetemp
+ if basetemp:
+ basetemp = py.path.local(basetemp)
+ if basetemp.check():
+ basetemp.remove()
+ basetemp.mkdir()
+ else:
+ temproot = py.path.local.get_temproot()
+ user = get_user()
+ if user:
+ # use a sub-directory in the temproot to speed-up
+ # make_numbered_dir() call
+ rootdir = temproot.join('pytest-of-%s' % user)
+ else:
+ rootdir = temproot
+ rootdir.ensure(dir=1)
+ basetemp = py.path.local.make_numbered_dir(prefix='pytest-',
+ rootdir=rootdir)
+ self._basetemp = t = basetemp.realpath()
+ self.trace("new basetemp", t)
+ return t
+
+ def finish(self):
+ self.trace("finish")
+
+
+def get_user():
+ """Return the current user name, or None if getuser() does not work
+ in the current environment (see #1010).
+ """
+ import getpass
+ try:
+ return getpass.getuser()
+ except (ImportError, KeyError):
+ return None
+
+# backward compatibility
+TempdirHandler = TempdirFactory
+
+
+def pytest_configure(config):
+ """Create a TempdirFactory and attach it to the config object.
+
+ This is to comply with existing plugins which expect the handler to be
+ available at pytest_configure time, but ideally should be moved entirely
+ to the tmpdir_factory session fixture.
+ """
+ mp = monkeypatch()
+ t = TempdirFactory(config)
+ config._cleanup.extend([mp.undo, t.finish])
+ mp.setattr(config, '_tmpdirhandler', t, raising=False)
+ mp.setattr(pytest, 'ensuretemp', t.ensuretemp, raising=False)
+
+
+@pytest.fixture(scope='session')
+def tmpdir_factory(request):
+ """Return a TempdirFactory instance for the test session.
+ """
+ return request.config._tmpdirhandler
+
+
+@pytest.fixture
+def tmpdir(request, tmpdir_factory):
+ """return a temporary directory path object
+ which is unique to each test function invocation,
+ created as a sub directory of the base temporary
+ directory. The returned object is a `py.path.local`_
+ path object.
+ """
+ name = request.node.name
+ name = re.sub("[\W]", "_", name)
+ MAXVAL = 30
+ if len(name) > MAXVAL:
+ name = name[:MAXVAL]
+ x = tmpdir_factory.mktemp(name, numbered=True)
+ return x
diff --git a/python/pytest/_pytest/unittest.py b/python/pytest/_pytest/unittest.py
new file mode 100644
index 000000000..8120e94fb
--- /dev/null
+++ b/python/pytest/_pytest/unittest.py
@@ -0,0 +1,205 @@
+""" discovery and running of std-library "unittest" style tests. """
+from __future__ import absolute_import
+
+import sys
+import traceback
+
+import pytest
+# for transfering markers
+import _pytest._code
+from _pytest.python import transfer_markers
+from _pytest.skipping import MarkEvaluator
+
+
+def pytest_pycollect_makeitem(collector, name, obj):
+ # has unittest been imported and is obj a subclass of its TestCase?
+ try:
+ if not issubclass(obj, sys.modules["unittest"].TestCase):
+ return
+ except Exception:
+ return
+ # yes, so let's collect it
+ return UnitTestCase(name, parent=collector)
+
+
+class UnitTestCase(pytest.Class):
+ # marker for fixturemanger.getfixtureinfo()
+ # to declare that our children do not support funcargs
+ nofuncargs = True
+
+ def setup(self):
+ cls = self.obj
+ if getattr(cls, '__unittest_skip__', False):
+ return # skipped
+ setup = getattr(cls, 'setUpClass', None)
+ if setup is not None:
+ setup()
+ teardown = getattr(cls, 'tearDownClass', None)
+ if teardown is not None:
+ self.addfinalizer(teardown)
+ super(UnitTestCase, self).setup()
+
+ def collect(self):
+ from unittest import TestLoader
+ cls = self.obj
+ if not getattr(cls, "__test__", True):
+ return
+ self.session._fixturemanager.parsefactories(self, unittest=True)
+ loader = TestLoader()
+ module = self.getparent(pytest.Module).obj
+ foundsomething = False
+ for name in loader.getTestCaseNames(self.obj):
+ x = getattr(self.obj, name)
+ funcobj = getattr(x, 'im_func', x)
+ transfer_markers(funcobj, cls, module)
+ yield TestCaseFunction(name, parent=self)
+ foundsomething = True
+
+ if not foundsomething:
+ runtest = getattr(self.obj, 'runTest', None)
+ if runtest is not None:
+ ut = sys.modules.get("twisted.trial.unittest", None)
+ if ut is None or runtest != ut.TestCase.runTest:
+ yield TestCaseFunction('runTest', parent=self)
+
+
+
+class TestCaseFunction(pytest.Function):
+ _excinfo = None
+
+ def setup(self):
+ self._testcase = self.parent.obj(self.name)
+ self._fix_unittest_skip_decorator()
+ self._obj = getattr(self._testcase, self.name)
+ if hasattr(self._testcase, 'setup_method'):
+ self._testcase.setup_method(self._obj)
+ if hasattr(self, "_request"):
+ self._request._fillfixtures()
+
+ def _fix_unittest_skip_decorator(self):
+ """
+ The @unittest.skip decorator calls functools.wraps(self._testcase)
+ The call to functools.wraps() fails unless self._testcase
+ has a __name__ attribute. This is usually automatically supplied
+ if the test is a function or method, but we need to add manually
+ here.
+
+ See issue #1169
+ """
+ if sys.version_info[0] == 2:
+ setattr(self._testcase, "__name__", self.name)
+
+ def teardown(self):
+ if hasattr(self._testcase, 'teardown_method'):
+ self._testcase.teardown_method(self._obj)
+
+ def startTest(self, testcase):
+ pass
+
+ def _addexcinfo(self, rawexcinfo):
+ # unwrap potential exception info (see twisted trial support below)
+ rawexcinfo = getattr(rawexcinfo, '_rawexcinfo', rawexcinfo)
+ try:
+ excinfo = _pytest._code.ExceptionInfo(rawexcinfo)
+ except TypeError:
+ try:
+ try:
+ l = traceback.format_exception(*rawexcinfo)
+ l.insert(0, "NOTE: Incompatible Exception Representation, "
+ "displaying natively:\n\n")
+ pytest.fail("".join(l), pytrace=False)
+ except (pytest.fail.Exception, KeyboardInterrupt):
+ raise
+ except:
+ pytest.fail("ERROR: Unknown Incompatible Exception "
+ "representation:\n%r" %(rawexcinfo,), pytrace=False)
+ except KeyboardInterrupt:
+ raise
+ except pytest.fail.Exception:
+ excinfo = _pytest._code.ExceptionInfo()
+ self.__dict__.setdefault('_excinfo', []).append(excinfo)
+
+ def addError(self, testcase, rawexcinfo):
+ self._addexcinfo(rawexcinfo)
+ def addFailure(self, testcase, rawexcinfo):
+ self._addexcinfo(rawexcinfo)
+
+ def addSkip(self, testcase, reason):
+ try:
+ pytest.skip(reason)
+ except pytest.skip.Exception:
+ self._evalskip = MarkEvaluator(self, 'SkipTest')
+ self._evalskip.result = True
+ self._addexcinfo(sys.exc_info())
+
+ def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
+ try:
+ pytest.xfail(str(reason))
+ except pytest.xfail.Exception:
+ self._addexcinfo(sys.exc_info())
+
+ def addUnexpectedSuccess(self, testcase, reason=""):
+ self._unexpectedsuccess = reason
+
+ def addSuccess(self, testcase):
+ pass
+
+ def stopTest(self, testcase):
+ pass
+
+ def runtest(self):
+ self._testcase(result=self)
+
+ def _prunetraceback(self, excinfo):
+ pytest.Function._prunetraceback(self, excinfo)
+ traceback = excinfo.traceback.filter(
+ lambda x:not x.frame.f_globals.get('__unittest'))
+ if traceback:
+ excinfo.traceback = traceback
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_runtest_makereport(item, call):
+ if isinstance(item, TestCaseFunction):
+ if item._excinfo:
+ call.excinfo = item._excinfo.pop(0)
+ try:
+ del call.result
+ except AttributeError:
+ pass
+
+# twisted trial support
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_runtest_protocol(item):
+ if isinstance(item, TestCaseFunction) and \
+ 'twisted.trial.unittest' in sys.modules:
+ ut = sys.modules['twisted.python.failure']
+ Failure__init__ = ut.Failure.__init__
+ check_testcase_implements_trial_reporter()
+ def excstore(self, exc_value=None, exc_type=None, exc_tb=None,
+ captureVars=None):
+ if exc_value is None:
+ self._rawexcinfo = sys.exc_info()
+ else:
+ if exc_type is None:
+ exc_type = type(exc_value)
+ self._rawexcinfo = (exc_type, exc_value, exc_tb)
+ try:
+ Failure__init__(self, exc_value, exc_type, exc_tb,
+ captureVars=captureVars)
+ except TypeError:
+ Failure__init__(self, exc_value, exc_type, exc_tb)
+ ut.Failure.__init__ = excstore
+ yield
+ ut.Failure.__init__ = Failure__init__
+ else:
+ yield
+
+
+def check_testcase_implements_trial_reporter(done=[]):
+ if done:
+ return
+ from zope.interface import classImplements
+ from twisted.trial.itrial import IReporter
+ classImplements(TestCaseFunction, IReporter)
+ done.append(1)
diff --git a/python/pytest/_pytest/vendored_packages/README.md b/python/pytest/_pytest/vendored_packages/README.md
new file mode 100644
index 000000000..eab7c714f
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/README.md
@@ -0,0 +1,13 @@
+This directory vendors the `pluggy` module.
+
+For a more detailed discussion for the reasons to vendoring this
+package, please see [this issue](https://github.com/pytest-dev/pytest/issues/944).
+
+To update the current version, execute:
+
+```
+$ pip install -U pluggy==<version> --no-compile --target=_pytest/vendored_packages
+```
+
+And commit the modified files. The `pluggy-<version>.dist-info` directory
+created by `pip` should be ignored.
diff --git a/python/pytest/_pytest/vendored_packages/__init__.py b/python/pytest/_pytest/vendored_packages/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/__init__.py
diff --git a/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/DESCRIPTION.rst b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/DESCRIPTION.rst
new file mode 100644
index 000000000..aa3bbf812
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/DESCRIPTION.rst
@@ -0,0 +1,10 @@
+Plugin registration and hook calling for Python
+===============================================
+
+This is the plugin manager as used by pytest but stripped
+of pytest specific details.
+
+During the 0.x series this plugin does not have much documentation
+except extensive docstrings in the pluggy.py module.
+
+
diff --git a/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/METADATA b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/METADATA
new file mode 100644
index 000000000..ec81f0a6b
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/METADATA
@@ -0,0 +1,39 @@
+Metadata-Version: 2.0
+Name: pluggy
+Version: 0.3.1
+Summary: plugin and hook calling mechanisms for python
+Home-page: UNKNOWN
+Author: Holger Krekel
+Author-email: holger at merlinux.eu
+License: MIT license
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: win32
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+
+Plugin registration and hook calling for Python
+===============================================
+
+This is the plugin manager as used by pytest but stripped
+of pytest specific details.
+
+During the 0.x series this plugin does not have much documentation
+except extensive docstrings in the pluggy.py module.
+
+
diff --git a/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/RECORD b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/RECORD
new file mode 100644
index 000000000..9626673c4
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/RECORD
@@ -0,0 +1,8 @@
+pluggy.py,sha256=v_RfWzyW6DPU1cJu_EFoL_OHq3t13qloVdR6UaMCXQA,29862
+pluggy-0.3.1.dist-info/top_level.txt,sha256=xKSCRhai-v9MckvMuWqNz16c1tbsmOggoMSwTgcpYHE,7
+pluggy-0.3.1.dist-info/pbr.json,sha256=xX3s6__wOcAyF-AZJX1sdZyW6PUXT-FkfBlM69EEUCg,47
+pluggy-0.3.1.dist-info/RECORD,,
+pluggy-0.3.1.dist-info/metadata.json,sha256=nLKltOT78dMV-00uXD6Aeemp4xNsz2q59j6ORSDeLjw,1027
+pluggy-0.3.1.dist-info/METADATA,sha256=1b85Ho2u4iK30M099k7axMzcDDhLcIMb-A82JUJZnSo,1334
+pluggy-0.3.1.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
+pluggy-0.3.1.dist-info/DESCRIPTION.rst,sha256=P5Akh1EdIBR6CeqtV2P8ZwpGSpZiTKPw0NyS7jEiD-g,306
diff --git a/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/WHEEL b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/WHEEL
new file mode 100644
index 000000000..9dff69d86
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/metadata.json b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/metadata.json
new file mode 100644
index 000000000..426a3a7ad
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/metadata.json
@@ -0,0 +1 @@
+{"license": "MIT license", "name": "pluggy", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "summary": "plugin and hook calling mechanisms for python", "platform": "unix", "version": "0.3.1", "extensions": {"python.details": {"document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "holger at merlinux.eu", "name": "Holger Krekel"}]}}, "classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Testing", "Topic :: Software Development :: Libraries", "Topic :: Utilities", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5"]} \ No newline at end of file
diff --git a/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/pbr.json b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/pbr.json
new file mode 100644
index 000000000..d6b798640
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/pbr.json
@@ -0,0 +1 @@
+{"is_release": false, "git_version": "7d4c9cd"} \ No newline at end of file
diff --git a/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/top_level.txt b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/top_level.txt
new file mode 100644
index 000000000..11bdb5c1f
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+pluggy
diff --git a/python/pytest/_pytest/vendored_packages/pluggy.py b/python/pytest/_pytest/vendored_packages/pluggy.py
new file mode 100644
index 000000000..2f848b23d
--- /dev/null
+++ b/python/pytest/_pytest/vendored_packages/pluggy.py
@@ -0,0 +1,777 @@
+"""
+PluginManager, basic initialization and tracing.
+
+pluggy is the cristallized core of plugin management as used
+by some 150 plugins for pytest.
+
+Pluggy uses semantic versioning. Breaking changes are only foreseen for
+Major releases (incremented X in "X.Y.Z"). If you want to use pluggy in
+your project you should thus use a dependency restriction like
+"pluggy>=0.1.0,<1.0" to avoid surprises.
+
+pluggy is concerned with hook specification, hook implementations and hook
+calling. For any given hook specification a hook call invokes up to N implementations.
+A hook implementation can influence its position and type of execution:
+if attributed "tryfirst" or "trylast" it will be tried to execute
+first or last. However, if attributed "hookwrapper" an implementation
+can wrap all calls to non-hookwrapper implementations. A hookwrapper
+can thus execute some code ahead and after the execution of other hooks.
+
+Hook specification is done by way of a regular python function where
+both the function name and the names of all its arguments are significant.
+Each hook implementation function is verified against the original specification
+function, including the names of all its arguments. To allow for hook specifications
+to evolve over the livetime of a project, hook implementations can
+accept less arguments. One can thus add new arguments and semantics to
+a hook specification by adding another argument typically without breaking
+existing hook implementations.
+
+The chosen approach is meant to let a hook designer think carefuly about
+which objects are needed by an extension writer. By contrast, subclass-based
+extension mechanisms often expose a lot more state and behaviour than needed,
+thus restricting future developments.
+
+Pluggy currently consists of functionality for:
+
+- a way to register new hook specifications. Without a hook
+ specification no hook calling can be performed.
+
+- a registry of plugins which contain hook implementation functions. It
+ is possible to register plugins for which a hook specification is not yet
+ known and validate all hooks when the system is in a more referentially
+ consistent state. Setting an "optionalhook" attribution to a hook
+ implementation will avoid PluginValidationError's if a specification
+ is missing. This allows to have optional integration between plugins.
+
+- a "hook" relay object from which you can launch 1:N calls to
+ registered hook implementation functions
+
+- a mechanism for ordering hook implementation functions
+
+- mechanisms for two different type of 1:N calls: "firstresult" for when
+ the call should stop when the first implementation returns a non-None result.
+ And the other (default) way of guaranteeing that all hook implementations
+ will be called and their non-None result collected.
+
+- mechanisms for "historic" extension points such that all newly
+ registered functions will receive all hook calls that happened
+ before their registration.
+
+- a mechanism for discovering plugin objects which are based on
+ setuptools based entry points.
+
+- a simple tracing mechanism, including tracing of plugin calls and
+ their arguments.
+
+"""
+import sys
+import inspect
+
+__version__ = '0.3.1'
+__all__ = ["PluginManager", "PluginValidationError",
+ "HookspecMarker", "HookimplMarker"]
+
+_py3 = sys.version_info > (3, 0)
+
+
+class HookspecMarker:
+ """ Decorator helper class for marking functions as hook specifications.
+
+ You can instantiate it with a project_name to get a decorator.
+ Calling PluginManager.add_hookspecs later will discover all marked functions
+ if the PluginManager uses the same project_name.
+ """
+
+ def __init__(self, project_name):
+ self.project_name = project_name
+
+ def __call__(self, function=None, firstresult=False, historic=False):
+ """ if passed a function, directly sets attributes on the function
+ which will make it discoverable to add_hookspecs(). If passed no
+ function, returns a decorator which can be applied to a function
+ later using the attributes supplied.
+
+ If firstresult is True the 1:N hook call (N being the number of registered
+ hook implementation functions) will stop at I<=N when the I'th function
+ returns a non-None result.
+
+ If historic is True calls to a hook will be memorized and replayed
+ on later registered plugins.
+
+ """
+ def setattr_hookspec_opts(func):
+ if historic and firstresult:
+ raise ValueError("cannot have a historic firstresult hook")
+ setattr(func, self.project_name + "_spec",
+ dict(firstresult=firstresult, historic=historic))
+ return func
+
+ if function is not None:
+ return setattr_hookspec_opts(function)
+ else:
+ return setattr_hookspec_opts
+
+
+class HookimplMarker:
+ """ Decorator helper class for marking functions as hook implementations.
+
+ You can instantiate with a project_name to get a decorator.
+ Calling PluginManager.register later will discover all marked functions
+ if the PluginManager uses the same project_name.
+ """
+ def __init__(self, project_name):
+ self.project_name = project_name
+
+ def __call__(self, function=None, hookwrapper=False, optionalhook=False,
+ tryfirst=False, trylast=False):
+
+ """ if passed a function, directly sets attributes on the function
+ which will make it discoverable to register(). If passed no function,
+ returns a decorator which can be applied to a function later using
+ the attributes supplied.
+
+ If optionalhook is True a missing matching hook specification will not result
+ in an error (by default it is an error if no matching spec is found).
+
+ If tryfirst is True this hook implementation will run as early as possible
+ in the chain of N hook implementations for a specfication.
+
+ If trylast is True this hook implementation will run as late as possible
+ in the chain of N hook implementations.
+
+ If hookwrapper is True the hook implementations needs to execute exactly
+ one "yield". The code before the yield is run early before any non-hookwrapper
+ function is run. The code after the yield is run after all non-hookwrapper
+ function have run. The yield receives an ``_CallOutcome`` object representing
+ the exception or result outcome of the inner calls (including other hookwrapper
+ calls).
+
+ """
+ def setattr_hookimpl_opts(func):
+ setattr(func, self.project_name + "_impl",
+ dict(hookwrapper=hookwrapper, optionalhook=optionalhook,
+ tryfirst=tryfirst, trylast=trylast))
+ return func
+
+ if function is None:
+ return setattr_hookimpl_opts
+ else:
+ return setattr_hookimpl_opts(function)
+
+
+def normalize_hookimpl_opts(opts):
+ opts.setdefault("tryfirst", False)
+ opts.setdefault("trylast", False)
+ opts.setdefault("hookwrapper", False)
+ opts.setdefault("optionalhook", False)
+
+
+class _TagTracer:
+ def __init__(self):
+ self._tag2proc = {}
+ self.writer = None
+ self.indent = 0
+
+ def get(self, name):
+ return _TagTracerSub(self, (name,))
+
+ def format_message(self, tags, args):
+ if isinstance(args[-1], dict):
+ extra = args[-1]
+ args = args[:-1]
+ else:
+ extra = {}
+
+ content = " ".join(map(str, args))
+ indent = " " * self.indent
+
+ lines = [
+ "%s%s [%s]\n" % (indent, content, ":".join(tags))
+ ]
+
+ for name, value in extra.items():
+ lines.append("%s %s: %s\n" % (indent, name, value))
+ return lines
+
+ def processmessage(self, tags, args):
+ if self.writer is not None and args:
+ lines = self.format_message(tags, args)
+ self.writer(''.join(lines))
+ try:
+ self._tag2proc[tags](tags, args)
+ except KeyError:
+ pass
+
+ def setwriter(self, writer):
+ self.writer = writer
+
+ def setprocessor(self, tags, processor):
+ if isinstance(tags, str):
+ tags = tuple(tags.split(":"))
+ else:
+ assert isinstance(tags, tuple)
+ self._tag2proc[tags] = processor
+
+
+class _TagTracerSub:
+ def __init__(self, root, tags):
+ self.root = root
+ self.tags = tags
+
+ def __call__(self, *args):
+ self.root.processmessage(self.tags, args)
+
+ def setmyprocessor(self, processor):
+ self.root.setprocessor(self.tags, processor)
+
+ def get(self, name):
+ return self.__class__(self.root, self.tags + (name,))
+
+
+def _raise_wrapfail(wrap_controller, msg):
+ co = wrap_controller.gi_code
+ raise RuntimeError("wrap_controller at %r %s:%d %s" %
+ (co.co_name, co.co_filename, co.co_firstlineno, msg))
+
+
+def _wrapped_call(wrap_controller, func):
+ """ Wrap calling to a function with a generator which needs to yield
+ exactly once. The yield point will trigger calling the wrapped function
+ and return its _CallOutcome to the yield point. The generator then needs
+ to finish (raise StopIteration) in order for the wrapped call to complete.
+ """
+ try:
+ next(wrap_controller) # first yield
+ except StopIteration:
+ _raise_wrapfail(wrap_controller, "did not yield")
+ call_outcome = _CallOutcome(func)
+ try:
+ wrap_controller.send(call_outcome)
+ _raise_wrapfail(wrap_controller, "has second yield")
+ except StopIteration:
+ pass
+ return call_outcome.get_result()
+
+
+class _CallOutcome:
+ """ Outcome of a function call, either an exception or a proper result.
+ Calling the ``get_result`` method will return the result or reraise
+ the exception raised when the function was called. """
+ excinfo = None
+
+ def __init__(self, func):
+ try:
+ self.result = func()
+ except BaseException:
+ self.excinfo = sys.exc_info()
+
+ def force_result(self, result):
+ self.result = result
+ self.excinfo = None
+
+ def get_result(self):
+ if self.excinfo is None:
+ return self.result
+ else:
+ ex = self.excinfo
+ if _py3:
+ raise ex[1].with_traceback(ex[2])
+ _reraise(*ex) # noqa
+
+if not _py3:
+ exec("""
+def _reraise(cls, val, tb):
+ raise cls, val, tb
+""")
+
+
+class _TracedHookExecution:
+ def __init__(self, pluginmanager, before, after):
+ self.pluginmanager = pluginmanager
+ self.before = before
+ self.after = after
+ self.oldcall = pluginmanager._inner_hookexec
+ assert not isinstance(self.oldcall, _TracedHookExecution)
+ self.pluginmanager._inner_hookexec = self
+
+ def __call__(self, hook, hook_impls, kwargs):
+ self.before(hook.name, hook_impls, kwargs)
+ outcome = _CallOutcome(lambda: self.oldcall(hook, hook_impls, kwargs))
+ self.after(outcome, hook.name, hook_impls, kwargs)
+ return outcome.get_result()
+
+ def undo(self):
+ self.pluginmanager._inner_hookexec = self.oldcall
+
+
+class PluginManager(object):
+ """ Core Pluginmanager class which manages registration
+ of plugin objects and 1:N hook calling.
+
+ You can register new hooks by calling ``addhooks(module_or_class)``.
+ You can register plugin objects (which contain hooks) by calling
+ ``register(plugin)``. The Pluginmanager is initialized with a
+ prefix that is searched for in the names of the dict of registered
+ plugin objects. An optional excludefunc allows to blacklist names which
+ are not considered as hooks despite a matching prefix.
+
+ For debugging purposes you can call ``enable_tracing()``
+ which will subsequently send debug information to the trace helper.
+ """
+
+ def __init__(self, project_name, implprefix=None):
+ """ if implprefix is given implementation functions
+ will be recognized if their name matches the implprefix. """
+ self.project_name = project_name
+ self._name2plugin = {}
+ self._plugin2hookcallers = {}
+ self._plugin_distinfo = []
+ self.trace = _TagTracer().get("pluginmanage")
+ self.hook = _HookRelay(self.trace.root.get("hook"))
+ self._implprefix = implprefix
+ self._inner_hookexec = lambda hook, methods, kwargs: \
+ _MultiCall(methods, kwargs, hook.spec_opts).execute()
+
+ def _hookexec(self, hook, methods, kwargs):
+ # called from all hookcaller instances.
+ # enable_tracing will set its own wrapping function at self._inner_hookexec
+ return self._inner_hookexec(hook, methods, kwargs)
+
+ def register(self, plugin, name=None):
+ """ Register a plugin and return its canonical name or None if the name
+ is blocked from registering. Raise a ValueError if the plugin is already
+ registered. """
+ plugin_name = name or self.get_canonical_name(plugin)
+
+ if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
+ if self._name2plugin.get(plugin_name, -1) is None:
+ return # blocked plugin, return None to indicate no registration
+ raise ValueError("Plugin already registered: %s=%s\n%s" %
+ (plugin_name, plugin, self._name2plugin))
+
+ # XXX if an error happens we should make sure no state has been
+ # changed at point of return
+ self._name2plugin[plugin_name] = plugin
+
+ # register matching hook implementations of the plugin
+ self._plugin2hookcallers[plugin] = hookcallers = []
+ for name in dir(plugin):
+ hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
+ if hookimpl_opts is not None:
+ normalize_hookimpl_opts(hookimpl_opts)
+ method = getattr(plugin, name)
+ hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
+ hook = getattr(self.hook, name, None)
+ if hook is None:
+ hook = _HookCaller(name, self._hookexec)
+ setattr(self.hook, name, hook)
+ elif hook.has_spec():
+ self._verify_hook(hook, hookimpl)
+ hook._maybe_apply_history(hookimpl)
+ hook._add_hookimpl(hookimpl)
+ hookcallers.append(hook)
+ return plugin_name
+
+ def parse_hookimpl_opts(self, plugin, name):
+ method = getattr(plugin, name)
+ res = getattr(method, self.project_name + "_impl", None)
+ if res is not None and not isinstance(res, dict):
+ # false positive
+ res = None
+ elif res is None and self._implprefix and name.startswith(self._implprefix):
+ res = {}
+ return res
+
+ def unregister(self, plugin=None, name=None):
+ """ unregister a plugin object and all its contained hook implementations
+ from internal data structures. """
+ if name is None:
+ assert plugin is not None, "one of name or plugin needs to be specified"
+ name = self.get_name(plugin)
+
+ if plugin is None:
+ plugin = self.get_plugin(name)
+
+ # if self._name2plugin[name] == None registration was blocked: ignore
+ if self._name2plugin.get(name):
+ del self._name2plugin[name]
+
+ for hookcaller in self._plugin2hookcallers.pop(plugin, []):
+ hookcaller._remove_plugin(plugin)
+
+ return plugin
+
+ def set_blocked(self, name):
+ """ block registrations of the given name, unregister if already registered. """
+ self.unregister(name=name)
+ self._name2plugin[name] = None
+
+ def is_blocked(self, name):
+ """ return True if the name blogs registering plugins of that name. """
+ return name in self._name2plugin and self._name2plugin[name] is None
+
+ def add_hookspecs(self, module_or_class):
+ """ add new hook specifications defined in the given module_or_class.
+ Functions are recognized if they have been decorated accordingly. """
+ names = []
+ for name in dir(module_or_class):
+ spec_opts = self.parse_hookspec_opts(module_or_class, name)
+ if spec_opts is not None:
+ hc = getattr(self.hook, name, None)
+ if hc is None:
+ hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
+ setattr(self.hook, name, hc)
+ else:
+ # plugins registered this hook without knowing the spec
+ hc.set_specification(module_or_class, spec_opts)
+ for hookfunction in (hc._wrappers + hc._nonwrappers):
+ self._verify_hook(hc, hookfunction)
+ names.append(name)
+
+ if not names:
+ raise ValueError("did not find any %r hooks in %r" %
+ (self.project_name, module_or_class))
+
+ def parse_hookspec_opts(self, module_or_class, name):
+ method = getattr(module_or_class, name)
+ return getattr(method, self.project_name + "_spec", None)
+
+ def get_plugins(self):
+ """ return the set of registered plugins. """
+ return set(self._plugin2hookcallers)
+
+ def is_registered(self, plugin):
+ """ Return True if the plugin is already registered. """
+ return plugin in self._plugin2hookcallers
+
+ def get_canonical_name(self, plugin):
+ """ Return canonical name for a plugin object. Note that a plugin
+ may be registered under a different name which was specified
+ by the caller of register(plugin, name). To obtain the name
+ of an registered plugin use ``get_name(plugin)`` instead."""
+ return getattr(plugin, "__name__", None) or str(id(plugin))
+
+ def get_plugin(self, name):
+ """ Return a plugin or None for the given name. """
+ return self._name2plugin.get(name)
+
+ def get_name(self, plugin):
+ """ Return name for registered plugin or None if not registered. """
+ for name, val in self._name2plugin.items():
+ if plugin == val:
+ return name
+
+ def _verify_hook(self, hook, hookimpl):
+ if hook.is_historic() and hookimpl.hookwrapper:
+ raise PluginValidationError(
+ "Plugin %r\nhook %r\nhistoric incompatible to hookwrapper" %
+ (hookimpl.plugin_name, hook.name))
+
+ for arg in hookimpl.argnames:
+ if arg not in hook.argnames:
+ raise PluginValidationError(
+ "Plugin %r\nhook %r\nargument %r not available\n"
+ "plugin definition: %s\n"
+ "available hookargs: %s" %
+ (hookimpl.plugin_name, hook.name, arg,
+ _formatdef(hookimpl.function), ", ".join(hook.argnames)))
+
+ def check_pending(self):
+ """ Verify that all hooks which have not been verified against
+ a hook specification are optional, otherwise raise PluginValidationError"""
+ for name in self.hook.__dict__:
+ if name[0] != "_":
+ hook = getattr(self.hook, name)
+ if not hook.has_spec():
+ for hookimpl in (hook._wrappers + hook._nonwrappers):
+ if not hookimpl.optionalhook:
+ raise PluginValidationError(
+ "unknown hook %r in plugin %r" %
+ (name, hookimpl.plugin))
+
+ def load_setuptools_entrypoints(self, entrypoint_name):
+ """ Load modules from querying the specified setuptools entrypoint name.
+ Return the number of loaded plugins. """
+ from pkg_resources import iter_entry_points, DistributionNotFound
+ for ep in iter_entry_points(entrypoint_name):
+ # is the plugin registered or blocked?
+ if self.get_plugin(ep.name) or self.is_blocked(ep.name):
+ continue
+ try:
+ plugin = ep.load()
+ except DistributionNotFound:
+ continue
+ self.register(plugin, name=ep.name)
+ self._plugin_distinfo.append((plugin, ep.dist))
+ return len(self._plugin_distinfo)
+
+ def list_plugin_distinfo(self):
+ """ return list of distinfo/plugin tuples for all setuptools registered
+ plugins. """
+ return list(self._plugin_distinfo)
+
+ def list_name_plugin(self):
+ """ return list of name/plugin pairs. """
+ return list(self._name2plugin.items())
+
+ def get_hookcallers(self, plugin):
+ """ get all hook callers for the specified plugin. """
+ return self._plugin2hookcallers.get(plugin)
+
+ def add_hookcall_monitoring(self, before, after):
+ """ add before/after tracing functions for all hooks
+ and return an undo function which, when called,
+ will remove the added tracers.
+
+ ``before(hook_name, hook_impls, kwargs)`` will be called ahead
+ of all hook calls and receive a hookcaller instance, a list
+ of HookImpl instances and the keyword arguments for the hook call.
+
+ ``after(outcome, hook_name, hook_impls, kwargs)`` receives the
+ same arguments as ``before`` but also a :py:class:`_CallOutcome`` object
+ which represents the result of the overall hook call.
+ """
+ return _TracedHookExecution(self, before, after).undo
+
+ def enable_tracing(self):
+ """ enable tracing of hook calls and return an undo function. """
+ hooktrace = self.hook._trace
+
+ def before(hook_name, methods, kwargs):
+ hooktrace.root.indent += 1
+ hooktrace(hook_name, kwargs)
+
+ def after(outcome, hook_name, methods, kwargs):
+ if outcome.excinfo is None:
+ hooktrace("finish", hook_name, "-->", outcome.result)
+ hooktrace.root.indent -= 1
+
+ return self.add_hookcall_monitoring(before, after)
+
+ def subset_hook_caller(self, name, remove_plugins):
+ """ Return a new _HookCaller instance for the named method
+ which manages calls to all registered plugins except the
+ ones from remove_plugins. """
+ orig = getattr(self.hook, name)
+ plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
+ if plugins_to_remove:
+ hc = _HookCaller(orig.name, orig._hookexec, orig._specmodule_or_class,
+ orig.spec_opts)
+ for hookimpl in (orig._wrappers + orig._nonwrappers):
+ plugin = hookimpl.plugin
+ if plugin not in plugins_to_remove:
+ hc._add_hookimpl(hookimpl)
+ # we also keep track of this hook caller so it
+ # gets properly removed on plugin unregistration
+ self._plugin2hookcallers.setdefault(plugin, []).append(hc)
+ return hc
+ return orig
+
+
+class _MultiCall:
+ """ execute a call into multiple python functions/methods. """
+
+ # XXX note that the __multicall__ argument is supported only
+ # for pytest compatibility reasons. It was never officially
+ # supported there and is explicitly deprecated since 2.8
+ # so we can remove it soon, allowing to avoid the below recursion
+ # in execute() and simplify/speed up the execute loop.
+
+ def __init__(self, hook_impls, kwargs, specopts={}):
+ self.hook_impls = hook_impls
+ self.kwargs = kwargs
+ self.kwargs["__multicall__"] = self
+ self.specopts = specopts
+
+ def execute(self):
+ all_kwargs = self.kwargs
+ self.results = results = []
+ firstresult = self.specopts.get("firstresult")
+
+ while self.hook_impls:
+ hook_impl = self.hook_impls.pop()
+ args = [all_kwargs[argname] for argname in hook_impl.argnames]
+ if hook_impl.hookwrapper:
+ return _wrapped_call(hook_impl.function(*args), self.execute)
+ res = hook_impl.function(*args)
+ if res is not None:
+ if firstresult:
+ return res
+ results.append(res)
+
+ if not firstresult:
+ return results
+
+ def __repr__(self):
+ status = "%d meths" % (len(self.hook_impls),)
+ if hasattr(self, "results"):
+ status = ("%d results, " % len(self.results)) + status
+ return "<_MultiCall %s, kwargs=%r>" % (status, self.kwargs)
+
+
+def varnames(func, startindex=None):
+ """ return argument name tuple for a function, method, class or callable.
+
+ In case of a class, its "__init__" method is considered.
+ For methods the "self" parameter is not included unless you are passing
+ an unbound method with Python3 (which has no supports for unbound methods)
+ """
+ cache = getattr(func, "__dict__", {})
+ try:
+ return cache["_varnames"]
+ except KeyError:
+ pass
+ if inspect.isclass(func):
+ try:
+ func = func.__init__
+ except AttributeError:
+ return ()
+ startindex = 1
+ else:
+ if not inspect.isfunction(func) and not inspect.ismethod(func):
+ func = getattr(func, '__call__', func)
+ if startindex is None:
+ startindex = int(inspect.ismethod(func))
+
+ try:
+ rawcode = func.__code__
+ except AttributeError:
+ return ()
+ try:
+ x = rawcode.co_varnames[startindex:rawcode.co_argcount]
+ except AttributeError:
+ x = ()
+ else:
+ defaults = func.__defaults__
+ if defaults:
+ x = x[:-len(defaults)]
+ try:
+ cache["_varnames"] = x
+ except TypeError:
+ pass
+ return x
+
+
+class _HookRelay:
+ """ hook holder object for performing 1:N hook calls where N is the number
+ of registered plugins.
+
+ """
+
+ def __init__(self, trace):
+ self._trace = trace
+
+
+class _HookCaller(object):
+ def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
+ self.name = name
+ self._wrappers = []
+ self._nonwrappers = []
+ self._hookexec = hook_execute
+ if specmodule_or_class is not None:
+ assert spec_opts is not None
+ self.set_specification(specmodule_or_class, spec_opts)
+
+ def has_spec(self):
+ return hasattr(self, "_specmodule_or_class")
+
+ def set_specification(self, specmodule_or_class, spec_opts):
+ assert not self.has_spec()
+ self._specmodule_or_class = specmodule_or_class
+ specfunc = getattr(specmodule_or_class, self.name)
+ argnames = varnames(specfunc, startindex=inspect.isclass(specmodule_or_class))
+ assert "self" not in argnames # sanity check
+ self.argnames = ["__multicall__"] + list(argnames)
+ self.spec_opts = spec_opts
+ if spec_opts.get("historic"):
+ self._call_history = []
+
+ def is_historic(self):
+ return hasattr(self, "_call_history")
+
+ def _remove_plugin(self, plugin):
+ def remove(wrappers):
+ for i, method in enumerate(wrappers):
+ if method.plugin == plugin:
+ del wrappers[i]
+ return True
+ if remove(self._wrappers) is None:
+ if remove(self._nonwrappers) is None:
+ raise ValueError("plugin %r not found" % (plugin,))
+
+ def _add_hookimpl(self, hookimpl):
+ if hookimpl.hookwrapper:
+ methods = self._wrappers
+ else:
+ methods = self._nonwrappers
+
+ if hookimpl.trylast:
+ methods.insert(0, hookimpl)
+ elif hookimpl.tryfirst:
+ methods.append(hookimpl)
+ else:
+ # find last non-tryfirst method
+ i = len(methods) - 1
+ while i >= 0 and methods[i].tryfirst:
+ i -= 1
+ methods.insert(i + 1, hookimpl)
+
+ def __repr__(self):
+ return "<_HookCaller %r>" % (self.name,)
+
+ def __call__(self, **kwargs):
+ assert not self.is_historic()
+ return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
+
+ def call_historic(self, proc=None, kwargs=None):
+ self._call_history.append((kwargs or {}, proc))
+ # historizing hooks don't return results
+ self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
+
+ def call_extra(self, methods, kwargs):
+ """ Call the hook with some additional temporarily participating
+ methods using the specified kwargs as call parameters. """
+ old = list(self._nonwrappers), list(self._wrappers)
+ for method in methods:
+ opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
+ hookimpl = HookImpl(None, "<temp>", method, opts)
+ self._add_hookimpl(hookimpl)
+ try:
+ return self(**kwargs)
+ finally:
+ self._nonwrappers, self._wrappers = old
+
+ def _maybe_apply_history(self, method):
+ if self.is_historic():
+ for kwargs, proc in self._call_history:
+ res = self._hookexec(self, [method], kwargs)
+ if res and proc is not None:
+ proc(res[0])
+
+
+class HookImpl:
+ def __init__(self, plugin, plugin_name, function, hook_impl_opts):
+ self.function = function
+ self.argnames = varnames(self.function)
+ self.plugin = plugin
+ self.opts = hook_impl_opts
+ self.plugin_name = plugin_name
+ self.__dict__.update(hook_impl_opts)
+
+
+class PluginValidationError(Exception):
+ """ plugin failed validation. """
+
+
+if hasattr(inspect, 'signature'):
+ def _formatdef(func):
+ return "%s%s" % (
+ func.__name__,
+ str(inspect.signature(func))
+ )
+else:
+ def _formatdef(func):
+ return "%s%s" % (
+ func.__name__,
+ inspect.formatargspec(*inspect.getargspec(func))
+ )
diff --git a/python/pytest/pytest.py b/python/pytest/pytest.py
new file mode 100644
index 000000000..e376e417e
--- /dev/null
+++ b/python/pytest/pytest.py
@@ -0,0 +1,28 @@
+# PYTHON_ARGCOMPLETE_OK
+"""
+pytest: unit and functional testing with Python.
+"""
+__all__ = [
+ 'main',
+ 'UsageError',
+ 'cmdline',
+ 'hookspec',
+ 'hookimpl',
+ '__version__',
+]
+
+if __name__ == '__main__': # if run as a script or by 'python -m pytest'
+ # we trigger the below "else" condition by the following import
+ import pytest
+ raise SystemExit(pytest.main())
+
+# else we are imported
+
+from _pytest.config import (
+ main, UsageError, _preloadplugins, cmdline,
+ hookspec, hookimpl
+)
+from _pytest import __version__
+
+_preloadplugins() # to populate pytest.* namespace so help(pytest) works
+
diff --git a/python/pytest/setup.cfg b/python/pytest/setup.cfg
new file mode 100644
index 000000000..bec4469e0
--- /dev/null
+++ b/python/pytest/setup.cfg
@@ -0,0 +1,19 @@
+[build_sphinx]
+source-dir = doc/en/
+build-dir = doc/build
+all_files = 1
+
+[upload_sphinx]
+upload-dir = doc/en/build/html
+
+[bdist_wheel]
+universal = 1
+
+[devpi:upload]
+formats = sdist.tgz,bdist_wheel
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/pytest/setup.py b/python/pytest/setup.py
new file mode 100644
index 000000000..7cdcdfb99
--- /dev/null
+++ b/python/pytest/setup.py
@@ -0,0 +1,122 @@
+import os, sys
+import setuptools
+import pkg_resources
+from setuptools import setup, Command
+
+classifiers = ['Development Status :: 6 - Mature',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: POSIX',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Topic :: Software Development :: Testing',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Utilities'] + [
+ ('Programming Language :: Python :: %s' % x) for x in
+ '2 2.6 2.7 3 3.2 3.3 3.4 3.5'.split()]
+
+with open('README.rst') as fd:
+ long_description = fd.read()
+
+def get_version():
+ p = os.path.join(os.path.dirname(
+ os.path.abspath(__file__)), "_pytest", "__init__.py")
+ with open(p) as f:
+ for line in f.readlines():
+ if "__version__" in line:
+ return line.strip().split("=")[-1].strip(" '")
+ raise ValueError("could not read version")
+
+
+def has_environment_marker_support():
+ """
+ Tests that setuptools has support for PEP-426 environment marker support.
+
+ The first known release to support it is 0.7 (and the earliest on PyPI seems to be 0.7.2
+ so we're using that), see: http://pythonhosted.org/setuptools/history.html#id142
+
+ References:
+
+ * https://wheel.readthedocs.io/en/latest/index.html#defining-conditional-dependencies
+ * https://www.python.org/dev/peps/pep-0426/#environment-markers
+ """
+ try:
+ return pkg_resources.parse_version(setuptools.__version__) >= pkg_resources.parse_version('0.7.2')
+ except Exception as exc:
+ sys.stderr.write("Could not test setuptool's version: %s\n" % exc)
+ return False
+
+
+def main():
+ install_requires = ['py>=1.4.29'] # pluggy is vendored in _pytest.vendored_packages
+ extras_require = {}
+ if has_environment_marker_support():
+ extras_require[':python_version=="2.6" or python_version=="3.0" or python_version=="3.1"'] = ['argparse']
+ extras_require[':sys_platform=="win32"'] = ['colorama']
+ else:
+ if sys.version_info < (2, 7) or (3,) <= sys.version_info < (3, 2):
+ install_requires.append('argparse')
+ if sys.platform == 'win32':
+ install_requires.append('colorama')
+
+ setup(
+ name='pytest',
+ description='pytest: simple powerful testing with Python',
+ long_description=long_description,
+ version=get_version(),
+ url='http://pytest.org',
+ license='MIT license',
+ platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
+ author='Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others',
+ author_email='holger at merlinux.eu',
+ entry_points=make_entry_points(),
+ classifiers=classifiers,
+ cmdclass={'test': PyTest},
+ # the following should be enabled for release
+ install_requires=install_requires,
+ extras_require=extras_require,
+ packages=['_pytest', '_pytest.assertion', '_pytest._code', '_pytest.vendored_packages'],
+ py_modules=['pytest'],
+ zip_safe=False,
+ )
+
+
+def cmdline_entrypoints(versioninfo, platform, basename):
+ target = 'pytest:main'
+ if platform.startswith('java'):
+ points = {'py.test-jython': target}
+ else:
+ if basename.startswith('pypy'):
+ points = {'py.test-%s' % basename: target}
+ else: # cpython
+ points = {'py.test-%s.%s' % versioninfo[:2] : target}
+ points['py.test'] = target
+ return points
+
+
+def make_entry_points():
+ basename = os.path.basename(sys.executable)
+ points = cmdline_entrypoints(sys.version_info, sys.platform, basename)
+ keys = list(points.keys())
+ keys.sort()
+ l = ['%s = %s' % (x, points[x]) for x in keys]
+ return {'console_scripts': l}
+
+
+class PyTest(Command):
+ user_options = []
+ def initialize_options(self):
+ pass
+ def finalize_options(self):
+ pass
+ def run(self):
+ import subprocess
+ PPATH = [x for x in os.environ.get('PYTHONPATH', '').split(':') if x]
+ PPATH.insert(0, os.getcwd())
+ os.environ['PYTHONPATH'] = ':'.join(PPATH)
+ errno = subprocess.call([sys.executable, 'pytest.py', '--ignore=doc'])
+ raise SystemExit(errno)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/pytoml/PKG-INFO b/python/pytoml/PKG-INFO
new file mode 100644
index 000000000..844436f95
--- /dev/null
+++ b/python/pytoml/PKG-INFO
@@ -0,0 +1,10 @@
+Metadata-Version: 1.0
+Name: pytoml
+Version: 0.1.10
+Summary: A parser for TOML-0.4.0
+Home-page: https://github.com/avakar/pytoml
+Author: Martin Vejnár
+Author-email: avakar@ratatanek.cz
+License: MIT
+Description: UNKNOWN
+Platform: UNKNOWN
diff --git a/python/pytoml/pytoml/__init__.py b/python/pytoml/pytoml/__init__.py
new file mode 100644
index 000000000..222a1967f
--- /dev/null
+++ b/python/pytoml/pytoml/__init__.py
@@ -0,0 +1,3 @@
+from .core import TomlError
+from .parser import load, loads
+from .writer import dump, dumps
diff --git a/python/pytoml/pytoml/core.py b/python/pytoml/pytoml/core.py
new file mode 100644
index 000000000..0fcada48c
--- /dev/null
+++ b/python/pytoml/pytoml/core.py
@@ -0,0 +1,13 @@
+class TomlError(RuntimeError):
+ def __init__(self, message, line, col, filename):
+ RuntimeError.__init__(self, message, line, col, filename)
+ self.message = message
+ self.line = line
+ self.col = col
+ self.filename = filename
+
+ def __str__(self):
+ return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message)
+
+ def __repr__(self):
+ return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename)
diff --git a/python/pytoml/pytoml/parser.py b/python/pytoml/pytoml/parser.py
new file mode 100644
index 000000000..d4c4ac187
--- /dev/null
+++ b/python/pytoml/pytoml/parser.py
@@ -0,0 +1,366 @@
+import string, re, sys, datetime
+from .core import TomlError
+
+if sys.version_info[0] == 2:
+ _chr = unichr
+else:
+ _chr = chr
+
+def load(fin, translate=lambda t, x, v: v):
+ return loads(fin.read(), translate=translate, filename=fin.name)
+
+def loads(s, filename='<string>', translate=lambda t, x, v: v):
+ if isinstance(s, bytes):
+ s = s.decode('utf-8')
+
+ s = s.replace('\r\n', '\n')
+
+ root = {}
+ tables = {}
+ scope = root
+
+ src = _Source(s, filename=filename)
+ ast = _p_toml(src)
+
+ def error(msg):
+ raise TomlError(msg, pos[0], pos[1], filename)
+
+ def process_value(v):
+ kind, text, value, pos = v
+ if kind == 'str' and value.startswith('\n'):
+ value = value[1:]
+ if kind == 'array':
+ if value and any(k != value[0][0] for k, t, v, p in value[1:]):
+ error('array-type-mismatch')
+ value = [process_value(item) for item in value]
+ elif kind == 'table':
+ value = dict([(k, process_value(value[k])) for k in value])
+ return translate(kind, text, value)
+
+ for kind, value, pos in ast:
+ if kind == 'kv':
+ k, v = value
+ if k in scope:
+ error('duplicate_keys. Key "{0}" was used more than once.'.format(k))
+ scope[k] = process_value(v)
+ else:
+ is_table_array = (kind == 'table_array')
+ cur = tables
+ for name in value[:-1]:
+ if isinstance(cur.get(name), list):
+ d, cur = cur[name][-1]
+ else:
+ d, cur = cur.setdefault(name, (None, {}))
+
+ scope = {}
+ name = value[-1]
+ if name not in cur:
+ if is_table_array:
+ cur[name] = [(scope, {})]
+ else:
+ cur[name] = (scope, {})
+ elif isinstance(cur[name], list):
+ if not is_table_array:
+ error('table_type_mismatch')
+ cur[name].append((scope, {}))
+ else:
+ if is_table_array:
+ error('table_type_mismatch')
+ old_scope, next_table = cur[name]
+ if old_scope is not None:
+ error('duplicate_tables')
+ cur[name] = (scope, next_table)
+
+ def merge_tables(scope, tables):
+ if scope is None:
+ scope = {}
+ for k in tables:
+ if k in scope:
+ error('key_table_conflict')
+ v = tables[k]
+ if isinstance(v, list):
+ scope[k] = [merge_tables(sc, tbl) for sc, tbl in v]
+ else:
+ scope[k] = merge_tables(v[0], v[1])
+ return scope
+
+ return merge_tables(root, tables)
+
+class _Source:
+ def __init__(self, s, filename=None):
+ self.s = s
+ self._pos = (1, 1)
+ self._last = None
+ self._filename = filename
+ self.backtrack_stack = []
+
+ def last(self):
+ return self._last
+
+ def pos(self):
+ return self._pos
+
+ def fail(self):
+ return self._expect(None)
+
+ def consume_dot(self):
+ if self.s:
+ self._last = self.s[0]
+ self.s = self[1:]
+ self._advance(self._last)
+ return self._last
+ return None
+
+ def expect_dot(self):
+ return self._expect(self.consume_dot())
+
+ def consume_eof(self):
+ if not self.s:
+ self._last = ''
+ return True
+ return False
+
+ def expect_eof(self):
+ return self._expect(self.consume_eof())
+
+ def consume(self, s):
+ if self.s.startswith(s):
+ self.s = self.s[len(s):]
+ self._last = s
+ self._advance(s)
+ return True
+ return False
+
+ def expect(self, s):
+ return self._expect(self.consume(s))
+
+ def consume_re(self, re):
+ m = re.match(self.s)
+ if m:
+ self.s = self.s[len(m.group(0)):]
+ self._last = m
+ self._advance(m.group(0))
+ return m
+ return None
+
+ def expect_re(self, re):
+ return self._expect(self.consume_re(re))
+
+ def __enter__(self):
+ self.backtrack_stack.append((self.s, self._pos))
+
+ def __exit__(self, type, value, traceback):
+ if type is None:
+ self.backtrack_stack.pop()
+ else:
+ self.s, self._pos = self.backtrack_stack.pop()
+ return type == TomlError
+
+ def commit(self):
+ self.backtrack_stack[-1] = (self.s, self._pos)
+
+ def _expect(self, r):
+ if not r:
+ raise TomlError('msg', self._pos[0], self._pos[1], self._filename)
+ return r
+
+ def _advance(self, s):
+ suffix_pos = s.rfind('\n')
+ if suffix_pos == -1:
+ self._pos = (self._pos[0], self._pos[1] + len(s))
+ else:
+ self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos)
+
+_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*')
+def _p_ews(s):
+ s.expect_re(_ews_re)
+
+_ws_re = re.compile(r'[ \t]*')
+def _p_ws(s):
+ s.expect_re(_ws_re)
+
+_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'',
+ '\\': '\\', '/': '/', 'f': '\f' }
+
+_basicstr_re = re.compile(r'[^"\\\000-\037]*')
+_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})')
+_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})')
+_escapes_re = re.compile('[bnrt"\'\\\\/f]')
+_newline_esc_re = re.compile('\n[ \t\n]*')
+def _p_basicstr_content(s, content=_basicstr_re):
+ res = []
+ while True:
+ res.append(s.expect_re(content).group(0))
+ if not s.consume('\\'):
+ break
+ if s.consume_re(_newline_esc_re):
+ pass
+ elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re):
+ res.append(_chr(int(s.last().group(1), 16)))
+ else:
+ s.expect_re(_escapes_re)
+ res.append(_escapes[s.last().group(0)])
+ return ''.join(res)
+
+_key_re = re.compile(r'[0-9a-zA-Z-_]+')
+def _p_key(s):
+ with s:
+ s.expect('"')
+ r = _p_basicstr_content(s, _basicstr_re)
+ s.expect('"')
+ return r
+ return s.expect_re(_key_re).group(0)
+
+_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?')
+_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))')
+
+_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*')
+_litstr_re = re.compile(r"[^'\000-\037]*")
+_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*")
+def _p_value(s):
+ pos = s.pos()
+
+ if s.consume('true'):
+ return 'bool', s.last(), True, pos
+ if s.consume('false'):
+ return 'bool', s.last(), False, pos
+
+ if s.consume('"'):
+ if s.consume('""'):
+ r = _p_basicstr_content(s, _basicstr_ml_re)
+ s.expect('"""')
+ else:
+ r = _p_basicstr_content(s, _basicstr_re)
+ s.expect('"')
+ return 'str', r, r, pos
+
+ if s.consume('\''):
+ if s.consume('\'\''):
+ r = s.expect_re(_litstr_ml_re).group(0)
+ s.expect('\'\'\'')
+ else:
+ r = s.expect_re(_litstr_re).group(0)
+ s.expect('\'')
+ return 'str', r, r, pos
+
+ if s.consume_re(_datetime_re):
+ m = s.last()
+ s0 = m.group(0)
+ r = map(int, m.groups()[:6])
+ if m.group(7):
+ micro = float(m.group(7))
+ else:
+ micro = 0
+
+ if m.group(8):
+ g = int(m.group(8), 10) * 60 + int(m.group(9), 10)
+ tz = _TimeZone(datetime.timedelta(0, g * 60))
+ else:
+ tz = _TimeZone(datetime.timedelta(0, 0))
+
+ y, m, d, H, M, S = r
+ dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz)
+ return 'datetime', s0, dt, pos
+
+ if s.consume_re(_float_re):
+ m = s.last().group(0)
+ r = m.replace('_','')
+ if '.' in m or 'e' in m or 'E' in m:
+ return 'float', m, float(r), pos
+ else:
+ return 'int', m, int(r, 10), pos
+
+ if s.consume('['):
+ items = []
+ with s:
+ while True:
+ _p_ews(s)
+ items.append(_p_value(s))
+ s.commit()
+ _p_ews(s)
+ s.expect(',')
+ s.commit()
+ _p_ews(s)
+ s.expect(']')
+ return 'array', None, items, pos
+
+ if s.consume('{'):
+ _p_ws(s)
+ items = {}
+ if not s.consume('}'):
+ k = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ items[k] = _p_value(s)
+ _p_ws(s)
+ while s.consume(','):
+ _p_ws(s)
+ k = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ items[k] = _p_value(s)
+ _p_ws(s)
+ s.expect('}')
+ return 'table', None, items, pos
+
+ s.fail()
+
+def _p_stmt(s):
+ pos = s.pos()
+ if s.consume( '['):
+ is_array = s.consume('[')
+ _p_ws(s)
+ keys = [_p_key(s)]
+ _p_ws(s)
+ while s.consume('.'):
+ _p_ws(s)
+ keys.append(_p_key(s))
+ _p_ws(s)
+ s.expect(']')
+ if is_array:
+ s.expect(']')
+ return 'table_array' if is_array else 'table', keys, pos
+
+ key = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ value = _p_value(s)
+ return 'kv', (key, value), pos
+
+_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*')
+def _p_toml(s):
+ stmts = []
+ _p_ews(s)
+ with s:
+ stmts.append(_p_stmt(s))
+ while True:
+ s.commit()
+ s.expect_re(_stmtsep_re)
+ stmts.append(_p_stmt(s))
+ _p_ews(s)
+ s.expect_eof()
+ return stmts
+
+class _TimeZone(datetime.tzinfo):
+ def __init__(self, offset):
+ self._offset = offset
+
+ def utcoffset(self, dt):
+ return self._offset
+
+ def dst(self, dt):
+ return None
+
+ def tzname(self, dt):
+ m = self._offset.total_seconds() // 60
+ if m < 0:
+ res = '-'
+ m = -m
+ else:
+ res = '+'
+ h = m // 60
+ m = m - h * 60
+ return '{}{:.02}{:.02}'.format(res, h, m)
diff --git a/python/pytoml/pytoml/writer.py b/python/pytoml/pytoml/writer.py
new file mode 100644
index 000000000..2c9f7c69d
--- /dev/null
+++ b/python/pytoml/pytoml/writer.py
@@ -0,0 +1,120 @@
+from __future__ import unicode_literals
+import io, datetime, sys
+
+if sys.version_info[0] == 3:
+ long = int
+ unicode = str
+
+
+def dumps(obj, sort_keys=False):
+ fout = io.StringIO()
+ dump(fout, obj, sort_keys=sort_keys)
+ return fout.getvalue()
+
+
+_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'}
+
+
+def _escape_string(s):
+ res = []
+ start = 0
+
+ def flush():
+ if start != i:
+ res.append(s[start:i])
+ return i + 1
+
+ i = 0
+ while i < len(s):
+ c = s[i]
+ if c in '"\\\n\r\t\b\f':
+ start = flush()
+ res.append('\\' + _escapes[c])
+ elif ord(c) < 0x20:
+ start = flush()
+ res.append('\\u%04x' % ord(c))
+ i += 1
+
+ flush()
+ return '"' + ''.join(res) + '"'
+
+
+def _escape_id(s):
+ if any(not c.isalnum() and c not in '-_' for c in s):
+ return _escape_string(s)
+ return s
+
+
+def _format_list(v):
+ return '[{0}]'.format(', '.join(_format_value(obj) for obj in v))
+
+# Formula from:
+# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds
+# Once support for py26 is dropped, this can be replaced by td.total_seconds()
+def _total_seconds(td):
+ return ((td.microseconds
+ + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6)
+
+def _format_value(v):
+ if isinstance(v, bool):
+ return 'true' if v else 'false'
+ if isinstance(v, int) or isinstance(v, long):
+ return unicode(v)
+ if isinstance(v, float):
+ return '{0:.17f}'.format(v)
+ elif isinstance(v, unicode) or isinstance(v, bytes):
+ return _escape_string(v)
+ elif isinstance(v, datetime.datetime):
+ offs = v.utcoffset()
+ offs = _total_seconds(offs) // 60 if offs is not None else 0
+
+ if offs == 0:
+ suffix = 'Z'
+ else:
+ if offs > 0:
+ suffix = '+'
+ else:
+ suffix = '-'
+ offs = -offs
+ suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60)
+
+ if v.microsecond:
+ return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix
+ else:
+ return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix
+ elif isinstance(v, list):
+ return _format_list(v)
+ else:
+ raise RuntimeError(v)
+
+
+def dump(fout, obj, sort_keys=False):
+ tables = [((), obj, False)]
+
+ while tables:
+ if sort_keys:
+ tables.sort(key=lambda tup: tup[0], reverse=True)
+ name, table, is_array = tables.pop()
+ if name:
+ section_name = '.'.join(_escape_id(c) for c in name)
+ if is_array:
+ fout.write('[[{0}]]\n'.format(section_name))
+ else:
+ fout.write('[{0}]\n'.format(section_name))
+
+ table_keys = sorted(table.keys()) if sort_keys else table.keys()
+ for k in table_keys:
+ v = table[k]
+ if isinstance(v, dict):
+ tables.append((name + (k,), v, False))
+ elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v):
+ tables.extend((name + (k,), d, True) for d in reversed(v))
+ elif v is None:
+ # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344
+ fout.write(
+ '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k)))
+ else:
+ fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v)))
+
+ if tables:
+ fout.write('\n')
diff --git a/python/pytoml/setup.cfg b/python/pytoml/setup.cfg
new file mode 100644
index 000000000..b14b0bc3d
--- /dev/null
+++ b/python/pytoml/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/pytoml/setup.py b/python/pytoml/setup.py
new file mode 100644
index 000000000..98c08a540
--- /dev/null
+++ b/python/pytoml/setup.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+from setuptools import setup
+
+setup(
+ name='pytoml',
+ version='0.1.10',
+
+ description='A parser for TOML-0.4.0',
+ author='Martin Vejnár',
+ author_email='avakar@ratatanek.cz',
+ url='https://github.com/avakar/pytoml',
+ license='MIT',
+
+ packages=['pytoml'],
+ )
diff --git a/python/pytoml/test/test.py b/python/pytoml/test/test.py
new file mode 100644
index 000000000..53fcd229d
--- /dev/null
+++ b/python/pytoml/test/test.py
@@ -0,0 +1,100 @@
+import os, json, sys, io, traceback, argparse
+import pytoml as toml
+
+# Formula from:
+# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds
+# Once support for py26 is dropped, this can be replaced by td.total_seconds()
+def _total_seconds(td):
+ return ((td.microseconds
+ + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6)
+
+def _testbench_literal(type, text, value):
+ if type == 'table':
+ return value
+ if type == 'array':
+ return { 'type': 'array', 'value': value }
+ if type == 'datetime':
+ offs = _total_seconds(value.tzinfo.utcoffset(value)) // 60
+ offs = 'Z' if offs == 0 else '{}{}:{}'.format('-' if offs < 0 else '-', abs(offs) // 60, abs(offs) % 60)
+ v = '{0:04}-{1:02}-{2:02}T{3:02}:{4:02}:{5:02}{6}'.format(value.year, value.month, value.day, value.hour, value.minute, value.second, offs)
+ return { 'type': 'datetime', 'value': v }
+ if type == 'bool':
+ return { 'type': 'bool', 'value': 'true' if value else 'false' }
+ if type == 'float':
+ return { 'type': 'float', 'value': value }
+ if type == 'str':
+ return { 'type': 'string', 'value': value }
+ if type == 'int':
+ return { 'type': 'integer', 'value': str(value) }
+
+def adjust_bench(v):
+ if isinstance(v, dict):
+ if v.get('type') == 'float':
+ v['value'] = float(v['value'])
+ return v
+ return dict([(k, adjust_bench(v[k])) for k in v])
+ if isinstance(v, list):
+ return [adjust_bench(v) for v in v]
+ return v
+
+def _main():
+ ap = argparse.ArgumentParser()
+ ap.add_argument('-d', '--dir', action='append')
+ ap.add_argument('testcase', nargs='*')
+ args = ap.parse_args()
+
+ if not args.dir:
+ args.dir = [os.path.join(os.path.split(__file__)[0], 'toml-test/tests')]
+
+ succeeded = []
+ failed = []
+
+ for path in args.dir:
+ if not os.path.isdir(path):
+ print('error: not a dir: {0}'.format(path))
+ return 2
+ for top, dirnames, fnames in os.walk(path):
+ for fname in fnames:
+ if not fname.endswith('.toml'):
+ continue
+
+ if args.testcase and not any(arg in fname for arg in args.testcase):
+ continue
+
+ parse_error = None
+ try:
+ with open(os.path.join(top, fname), 'rb') as fin:
+ parsed = toml.load(fin)
+ except toml.TomlError:
+ parsed = None
+ parse_error = sys.exc_info()
+ else:
+ dumped = toml.dumps(parsed)
+ parsed2 = toml.loads(dumped)
+ if parsed != parsed2:
+ failed.append((fname, None))
+ continue
+
+ with open(os.path.join(top, fname), 'rb') as fin:
+ parsed = toml.load(fin, translate=_testbench_literal)
+
+ try:
+ with io.open(os.path.join(top, fname[:-5] + '.json'), 'rt', encoding='utf-8') as fin:
+ bench = json.load(fin)
+ except IOError:
+ bench = None
+
+ if parsed != adjust_bench(bench):
+ failed.append((fname, parsed, bench, parse_error))
+ else:
+ succeeded.append(fname)
+
+ for f, parsed, bench, e in failed:
+ print('failed: {}\n{}\n{}'.format(f, json.dumps(parsed, indent=4), json.dumps(bench, indent=4)))
+ if e:
+ traceback.print_exception(*e)
+ print('succeeded: {0}'.format(len(succeeded)))
+ return 1 if failed or not succeeded else 0
+
+if __name__ == '__main__':
+ sys.exit(_main())
diff --git a/python/pyyaml/CHANGES b/python/pyyaml/CHANGES
new file mode 100644
index 000000000..938dc4690
--- /dev/null
+++ b/python/pyyaml/CHANGES
@@ -0,0 +1,147 @@
+
+For a complete Mercurial changelog, see
+'https://bitbucket.org/xi/pyyaml/commits'.
+
+3.11 (2014-03-26)
+-----------------
+
+* Source and binary distributions are rebuilt against the latest
+ versions of Cython and LibYAML.
+
+3.10 (2011-05-30)
+-----------------
+
+* Do not try to build LibYAML bindings on platforms other than CPython
+ (Thank to olt(at)bogosoft(dot)com).
+* Clear cyclic references in the parser and the emitter
+ (Thank to kristjan(at)ccpgames(dot)com).
+* Dropped support for Python 2.3 and 2.4.
+
+3.09 (2009-08-31)
+-----------------
+
+* Fixed an obscure scanner error not reported when there is
+ no line break at the end of the stream (Thank to Ingy).
+* Fixed use of uninitialized memory when emitting anchors with
+ LibYAML bindings (Thank to cegner(at)yahoo-inc(dot)com).
+* Fixed emitting incorrect BOM characters for UTF-16 (Thank to
+ Valentin Nechayev)
+* Fixed the emitter for folded scalars not respecting the preferred
+ line width (Thank to Ingy).
+* Fixed a subtle ordering issue with emitting '%TAG' directives
+ (Thank to Andrey Somov).
+* Fixed performance regression with LibYAML bindings.
+
+
+3.08 (2008-12-31)
+-----------------
+
+* Python 3 support (Thank to Erick Tryzelaar).
+* Use Cython instead of Pyrex to build LibYAML bindings.
+* Refactored support for unicode and byte input/output streams.
+
+
+3.07 (2008-12-29)
+-----------------
+
+* The emitter learned to use an optional indentation indicator
+ for block scalar; thus scalars with leading whitespaces
+ could now be represented in a literal or folded style.
+* The test suite is now included in the source distribution.
+ To run the tests, type 'python setup.py test'.
+* Refactored the test suite: dropped unittest in favor of
+ a custom test appliance.
+* Fixed the path resolver in CDumper.
+* Forced an explicit document end indicator when there is
+ a possibility of parsing ambiguity.
+* More setup.py improvements: the package should be usable
+ when any combination of setuptools, Pyrex and LibYAML
+ is installed.
+* Windows binary packages are built against LibYAML-0.1.2.
+* Minor typos and corrections (Thank to Ingy dot Net
+ and Andrey Somov).
+
+
+3.06 (2008-10-03)
+-----------------
+
+* setup.py checks whether LibYAML is installed and if so, builds
+ and installs LibYAML bindings. To force or disable installation
+ of LibYAML bindings, use '--with-libyaml' or '--without-libyaml'
+ respectively.
+* The source distribution includes compiled Pyrex sources so
+ building LibYAML bindings no longer requires Pyrex installed.
+* 'yaml.load()' raises an exception if the input stream contains
+ more than one YAML document.
+* Fixed exceptions produced by LibYAML bindings.
+* Fixed a dot '.' character being recognized as !!float.
+* Fixed Python 2.3 compatibility issue in constructing !!timestamp values.
+* Windows binary packages are built against the LibYAML stable branch.
+* Added attributes 'yaml.__version__' and 'yaml.__with_libyaml__'.
+
+
+3.05 (2007-05-13)
+-----------------
+
+* Windows binary packages were built with LibYAML trunk.
+* Fixed a bug that prevent processing a live stream of YAML documents in
+ timely manner (Thanks edward(at)sweetbytes(dot)net).
+* Fixed a bug when the path in add_path_resolver contains boolean values
+ (Thanks jstroud(at)mbi(dot)ucla(dot)edu).
+* Fixed loss of microsecond precision in timestamps
+ (Thanks edemaine(at)mit(dot)edu).
+* Fixed loading an empty YAML stream.
+* Allowed immutable subclasses of YAMLObject.
+* Made the encoding of the unicode->str conversion explicit so that
+ the conversion does not depend on the default Python encoding.
+* Forced emitting float values in a YAML compatible form.
+
+
+3.04 (2006-08-20)
+-----------------
+
+* Include experimental LibYAML bindings.
+* Fully support recursive structures.
+* Sort dictionary keys. Mapping node values are now represented
+ as lists of pairs instead of dictionaries. No longer check
+ for duplicate mapping keys as it didn't work correctly anyway.
+* Fix invalid output of single-quoted scalars in cases when a single
+ quote is not escaped when preceeded by whitespaces or line breaks.
+* To make porting easier, rewrite Parser not using generators.
+* Fix handling of unexpected block mapping values.
+* Fix a bug in Representer.represent_object: copy_reg.dispatch_table
+ was not correctly handled.
+* Fix a bug when a block scalar is incorrectly emitted in the simple
+ key context.
+* Hold references to the objects being represented.
+* Make Representer not try to guess !!pairs when a list is represented.
+* Fix timestamp constructing and representing.
+* Fix the 'N' plain scalar being incorrectly recognized as !!bool.
+
+
+3.03 (2006-06-19)
+-----------------
+
+* Fix Python 2.5 compatibility issues.
+* Fix numerous bugs in the float handling.
+* Fix scanning some ill-formed documents.
+* Other minor fixes.
+
+
+3.02 (2006-05-15)
+-----------------
+
+* Fix win32 installer. Apparently bdist_wininst does not work well
+ under Linux.
+* Fix a bug in add_path_resolver.
+* Add the yaml-highlight example. Try to run on a color terminal:
+ `python yaml_hl.py <any_document.yaml`.
+
+
+3.01 (2006-05-07)
+-----------------
+
+* Initial release. The version number reflects the codename
+ of the project (PyYAML 3000) and differenciates it from
+ the abandoned PyYaml module.
+
diff --git a/python/pyyaml/LICENSE b/python/pyyaml/LICENSE
new file mode 100644
index 000000000..050ced23f
--- /dev/null
+++ b/python/pyyaml/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2006 Kirill Simonov
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/python/pyyaml/PKG-INFO b/python/pyyaml/PKG-INFO
new file mode 100644
index 000000000..e74f9f73c
--- /dev/null
+++ b/python/pyyaml/PKG-INFO
@@ -0,0 +1,36 @@
+Metadata-Version: 1.1
+Name: PyYAML
+Version: 3.11
+Summary: YAML parser and emitter for Python
+Home-page: http://pyyaml.org/wiki/PyYAML
+Author: Kirill Simonov
+Author-email: xi@resolvent.net
+License: MIT
+Download-URL: http://pyyaml.org/download/pyyaml/PyYAML-3.11.tar.gz
+Description: YAML is a data serialization format designed for human readability
+ and interaction with scripting languages. PyYAML is a YAML parser
+ and emitter for Python.
+
+ PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
+ support, capable extension API, and sensible error messages. PyYAML
+ supports standard YAML tags and provides Python-specific tags that
+ allow to represent an arbitrary Python object.
+
+ PyYAML is applicable for a broad range of tasks from complex
+ configuration files to object serialization and persistance.
+Platform: Any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.0
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup
diff --git a/python/pyyaml/README b/python/pyyaml/README
new file mode 100644
index 000000000..c1edf1387
--- /dev/null
+++ b/python/pyyaml/README
@@ -0,0 +1,35 @@
+PyYAML - The next generation YAML parser and emitter for Python.
+
+To install, type 'python setup.py install'.
+
+By default, the setup.py script checks whether LibYAML is installed
+and if so, builds and installs LibYAML bindings. To skip the check
+and force installation of LibYAML bindings, use the option '--with-libyaml':
+'python setup.py --with-libyaml install'. To disable the check and
+skip building and installing LibYAML bindings, use '--without-libyaml':
+'python setup.py --without-libyaml install'.
+
+When LibYAML bindings are installed, you may use fast LibYAML-based
+parser and emitter as follows:
+
+ >>> yaml.load(stream, Loader=yaml.CLoader)
+ >>> yaml.dump(data, Dumper=yaml.CDumper)
+
+PyYAML includes a comprehensive test suite. To run the tests,
+type 'python setup.py test'.
+
+For more information, check the PyYAML homepage:
+'http://pyyaml.org/wiki/PyYAML'.
+
+For PyYAML tutorial and reference, see:
+'http://pyyaml.org/wiki/PyYAMLDocumentation'.
+
+Post your questions and opinions to the YAML-Core mailing list:
+'http://lists.sourceforge.net/lists/listinfo/yaml-core'.
+
+Submit bug reports and feature requests to the PyYAML bug tracker:
+'http://pyyaml.org/newticket?component=pyyaml'.
+
+PyYAML is written by Kirill Simonov <xi@resolvent.net>. It is released
+under the MIT license. See the file LICENSE for more details.
+
diff --git a/python/pyyaml/examples/pygments-lexer/example.yaml b/python/pyyaml/examples/pygments-lexer/example.yaml
new file mode 100644
index 000000000..9c0ed9d08
--- /dev/null
+++ b/python/pyyaml/examples/pygments-lexer/example.yaml
@@ -0,0 +1,302 @@
+
+#
+# Examples from the Preview section of the YAML specification
+# (http://yaml.org/spec/1.2/#Preview)
+#
+
+# Sequence of scalars
+---
+- Mark McGwire
+- Sammy Sosa
+- Ken Griffey
+
+# Mapping scalars to scalars
+---
+hr: 65 # Home runs
+avg: 0.278 # Batting average
+rbi: 147 # Runs Batted In
+
+# Mapping scalars to sequences
+---
+american:
+ - Boston Red Sox
+ - Detroit Tigers
+ - New York Yankees
+national:
+ - New York Mets
+ - Chicago Cubs
+ - Atlanta Braves
+
+# Sequence of mappings
+---
+-
+ name: Mark McGwire
+ hr: 65
+ avg: 0.278
+-
+ name: Sammy Sosa
+ hr: 63
+ avg: 0.288
+
+# Sequence of sequences
+---
+- [name , hr, avg ]
+- [Mark McGwire, 65, 0.278]
+- [Sammy Sosa , 63, 0.288]
+
+# Mapping of mappings
+---
+Mark McGwire: {hr: 65, avg: 0.278}
+Sammy Sosa: {
+ hr: 63,
+ avg: 0.288
+ }
+
+# Two documents in a stream
+--- # Ranking of 1998 home runs
+- Mark McGwire
+- Sammy Sosa
+- Ken Griffey
+--- # Team ranking
+- Chicago Cubs
+- St Louis Cardinals
+
+# Documents with the end indicator
+---
+time: 20:03:20
+player: Sammy Sosa
+action: strike (miss)
+...
+---
+time: 20:03:47
+player: Sammy Sosa
+action: grand slam
+...
+
+# Comments
+---
+hr: # 1998 hr ranking
+ - Mark McGwire
+ - Sammy Sosa
+rbi:
+ # 1998 rbi ranking
+ - Sammy Sosa
+ - Ken Griffey
+
+# Anchors and aliases
+---
+hr:
+ - Mark McGwire
+ # Following node labeled SS
+ - &SS Sammy Sosa
+rbi:
+ - *SS # Subsequent occurrence
+ - Ken Griffey
+
+# Mapping between sequences
+---
+? - Detroit Tigers
+ - Chicago cubs
+:
+ - 2001-07-23
+? [ New York Yankees,
+ Atlanta Braves ]
+: [ 2001-07-02, 2001-08-12,
+ 2001-08-14 ]
+
+# Inline nested mapping
+---
+# products purchased
+- item : Super Hoop
+ quantity: 1
+- item : Basketball
+ quantity: 4
+- item : Big Shoes
+ quantity: 1
+
+# Literal scalars
+--- | # ASCII art
+ \//||\/||
+ // || ||__
+
+# Folded scalars
+--- >
+ Mark McGwire's
+ year was crippled
+ by a knee injury.
+
+# Preserved indented block in a folded scalar
+---
+>
+ Sammy Sosa completed another
+ fine season with great stats.
+
+ 63 Home Runs
+ 0.288 Batting Average
+
+ What a year!
+
+# Indentation determines scope
+---
+name: Mark McGwire
+accomplishment: >
+ Mark set a major league
+ home run record in 1998.
+stats: |
+ 65 Home Runs
+ 0.278 Batting Average
+
+# Quoted scalars
+---
+unicode: "Sosa did fine.\u263A"
+control: "\b1998\t1999\t2000\n"
+hex esc: "\x0d\x0a is \r\n"
+single: '"Howdy!" he cried.'
+quoted: ' # not a ''comment''.'
+tie-fighter: '|\-*-/|'
+
+# Multi-line flow scalars
+---
+plain:
+ This unquoted scalar
+ spans many lines.
+quoted: "So does this
+ quoted scalar.\n"
+
+# Integers
+---
+canonical: 12345
+decimal: +12_345
+sexagesimal: 3:25:45
+octal: 014
+hexadecimal: 0xC
+
+# Floating point
+---
+canonical: 1.23015e+3
+exponential: 12.3015e+02
+sexagesimal: 20:30.15
+fixed: 1_230.15
+negative infinity: -.inf
+not a number: .NaN
+
+# Miscellaneous
+---
+null: ~
+true: boolean
+false: boolean
+string: '12345'
+
+# Timestamps
+---
+canonical: 2001-12-15T02:59:43.1Z
+iso8601: 2001-12-14t21:59:43.10-05:00
+spaced: 2001-12-14 21:59:43.10 -5
+date: 2002-12-14
+
+# Various explicit tags
+---
+not-date: !!str 2002-04-28
+picture: !!binary |
+ R0lGODlhDAAMAIQAAP//9/X
+ 17unp5WZmZgAAAOfn515eXv
+ Pz7Y6OjuDg4J+fn5OTk6enp
+ 56enmleECcgggoBADs=
+application specific tag: !something |
+ The semantics of the tag
+ above may be different for
+ different documents.
+
+# Global tags
+%TAG ! tag:clarkevans.com,2002:
+--- !shape
+ # Use the ! handle for presenting
+ # tag:clarkevans.com,2002:circle
+- !circle
+ center: &ORIGIN {x: 73, y: 129}
+ radius: 7
+- !line
+ start: *ORIGIN
+ finish: { x: 89, y: 102 }
+- !label
+ start: *ORIGIN
+ color: 0xFFEEBB
+ text: Pretty vector drawing.
+
+# Unordered sets
+--- !!set
+# sets are represented as a
+# mapping where each key is
+# associated with the empty string
+? Mark McGwire
+? Sammy Sosa
+? Ken Griff
+
+# Ordered mappings
+--- !!omap
+# ordered maps are represented as
+# a sequence of mappings, with
+# each mapping having one key
+- Mark McGwire: 65
+- Sammy Sosa: 63
+- Ken Griffy: 58
+
+# Full length example
+--- !<tag:clarkevans.com,2002:invoice>
+invoice: 34843
+date : 2001-01-23
+bill-to: &id001
+ given : Chris
+ family : Dumars
+ address:
+ lines: |
+ 458 Walkman Dr.
+ Suite #292
+ city : Royal Oak
+ state : MI
+ postal : 48046
+ship-to: *id001
+product:
+ - sku : BL394D
+ quantity : 4
+ description : Basketball
+ price : 450.00
+ - sku : BL4438H
+ quantity : 1
+ description : Super Hoop
+ price : 2392.00
+tax : 251.42
+total: 4443.52
+comments:
+ Late afternoon is best.
+ Backup contact is Nancy
+ Billsmer @ 338-4338.
+
+# Another full-length example
+---
+Time: 2001-11-23 15:01:42 -5
+User: ed
+Warning:
+ This is an error message
+ for the log file
+---
+Time: 2001-11-23 15:02:31 -5
+User: ed
+Warning:
+ A slightly different error
+ message.
+---
+Date: 2001-11-23 15:03:17 -5
+User: ed
+Fatal:
+ Unknown variable "bar"
+Stack:
+ - file: TopClass.py
+ line: 23
+ code: |
+ x = MoreObject("345\n")
+ - file: MoreClass.py
+ line: 58
+ code: |-
+ foo = bar
+
diff --git a/python/pyyaml/examples/pygments-lexer/yaml.py b/python/pyyaml/examples/pygments-lexer/yaml.py
new file mode 100644
index 000000000..1ce9dac9e
--- /dev/null
+++ b/python/pyyaml/examples/pygments-lexer/yaml.py
@@ -0,0 +1,431 @@
+
+"""
+yaml.py
+
+Lexer for YAML, a human-friendly data serialization language
+(http://yaml.org/).
+
+Written by Kirill Simonov <xi@resolvent.net>.
+
+License: Whatever suitable for inclusion into the Pygments package.
+"""
+
+from pygments.lexer import \
+ ExtendedRegexLexer, LexerContext, include, bygroups
+from pygments.token import \
+ Text, Comment, Punctuation, Name, Literal
+
+__all__ = ['YAMLLexer']
+
+
+class YAMLLexerContext(LexerContext):
+ """Indentation context for the YAML lexer."""
+
+ def __init__(self, *args, **kwds):
+ super(YAMLLexerContext, self).__init__(*args, **kwds)
+ self.indent_stack = []
+ self.indent = -1
+ self.next_indent = 0
+ self.block_scalar_indent = None
+
+
+def something(TokenClass):
+ """Do not produce empty tokens."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if not text:
+ return
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def reset_indent(TokenClass):
+ """Reset the indentation levels."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.indent_stack = []
+ context.indent = -1
+ context.next_indent = 0
+ context.block_scalar_indent = None
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def save_indent(TokenClass, start=False):
+ """Save a possible indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ extra = ''
+ if start:
+ context.next_indent = len(text)
+ if context.next_indent < context.indent:
+ while context.next_indent < context.indent:
+ context.indent = context.indent_stack.pop()
+ if context.next_indent > context.indent:
+ extra = text[context.indent:]
+ text = text[:context.indent]
+ else:
+ context.next_indent += len(text)
+ if text:
+ yield match.start(), TokenClass, text
+ if extra:
+ yield match.start()+len(text), TokenClass.Error, extra
+ context.pos = match.end()
+ return callback
+
+def set_indent(TokenClass, implicit=False):
+ """Set the previously saved indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.indent < context.next_indent:
+ context.indent_stack.append(context.indent)
+ context.indent = context.next_indent
+ if not implicit:
+ context.next_indent += len(text)
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def set_block_scalar_indent(TokenClass):
+ """Set an explicit indentation level for a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.block_scalar_indent = None
+ if not text:
+ return
+ increment = match.group(1)
+ if increment:
+ current_indent = max(context.indent, 0)
+ increment = int(increment)
+ context.block_scalar_indent = current_indent + increment
+ if text:
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def parse_block_scalar_empty_line(IndentTokenClass, ContentTokenClass):
+ """Process an empty line in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if (context.block_scalar_indent is None or
+ len(text) <= context.block_scalar_indent):
+ if text:
+ yield match.start(), IndentTokenClass, text
+ else:
+ indentation = text[:context.block_scalar_indent]
+ content = text[context.block_scalar_indent:]
+ yield match.start(), IndentTokenClass, indentation
+ yield (match.start()+context.block_scalar_indent,
+ ContentTokenClass, content)
+ context.pos = match.end()
+ return callback
+
+def parse_block_scalar_indent(TokenClass):
+ """Process indentation spaces in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.block_scalar_indent is None:
+ if len(text) <= max(context.indent, 0):
+ context.stack.pop()
+ context.stack.pop()
+ return
+ context.block_scalar_indent = len(text)
+ else:
+ if len(text) < context.block_scalar_indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+def parse_plain_scalar_indent(TokenClass):
+ """Process indentation spaces in a plain scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if len(text) <= context.indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), TokenClass, text
+ context.pos = match.end()
+ return callback
+
+
+class YAMLLexer(ExtendedRegexLexer):
+ """Lexer for the YAML language."""
+
+ name = 'YAML'
+ aliases = ['yaml']
+ filenames = ['*.yaml', '*.yml']
+ mimetypes = ['text/x-yaml']
+
+ tokens = {
+
+ # the root rules
+ 'root': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text.Blank),
+ # line breaks
+ (r'\n+', Text.Break),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # the '%YAML' directive
+ (r'^%YAML(?=[ ]|$)', reset_indent(Name.Directive),
+ 'yaml-directive'),
+ # the %TAG directive
+ (r'^%TAG(?=[ ]|$)', reset_indent(Name.Directive),
+ 'tag-directive'),
+ # document start and document end indicators
+ (r'^(?:---|\.\.\.)(?=[ ]|$)',
+ reset_indent(Punctuation.Document), 'block-line'),
+ # indentation spaces
+ (r'[ ]*(?![ \t\n\r\f\v]|$)',
+ save_indent(Text.Indent, start=True),
+ ('block-line', 'indentation')),
+ ],
+
+ # trailing whitespaces after directives or a block scalar indicator
+ 'ignored-line': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text.Blank),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # line break
+ (r'\n', Text.Break, '#pop:2'),
+ ],
+
+ # the %YAML directive
+ 'yaml-directive': [
+ # the version number
+ (r'([ ]+)([0-9]+\.[0-9]+)',
+ bygroups(Text.Blank, Literal.Version), 'ignored-line'),
+ ],
+
+ # the %YAG directive
+ 'tag-directive': [
+ # a tag handle and the corresponding prefix
+ (r'([ ]+)(!|![0-9A-Za-z_-]*!)'
+ r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)',
+ bygroups(Text.Blank, Name.Type, Text.Blank, Name.Type),
+ 'ignored-line'),
+ ],
+
+ # block scalar indicators and indentation spaces
+ 'indentation': [
+ # trailing whitespaces are ignored
+ (r'[ ]*$', something(Text.Blank), '#pop:2'),
+ # whitespaces preceeding block collection indicators
+ (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text.Indent)),
+ # block collection indicators
+ (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
+ # the beginning a block line
+ (r'[ ]*', save_indent(Text.Indent), '#pop'),
+ ],
+
+ # an indented line in the block context
+ 'block-line': [
+ # the line end
+ (r'[ ]*(?=#|$)', something(Text.Blank), '#pop'),
+ # whitespaces separating tokens
+ (r'[ ]+', Text.Blank),
+ # tags, anchors and aliases,
+ include('descriptors'),
+ # block collections and scalars
+ include('block-nodes'),
+ # flow collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])',
+ something(Literal.Scalar.Plain),
+ 'plain-scalar-in-block-context'),
+ ],
+
+ # tags, anchors, aliases
+ 'descriptors' : [
+ # a full-form tag
+ (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Name.Type),
+ # a tag in the form '!', '!suffix' or '!handle!suffix'
+ (r'!(?:[0-9A-Za-z_-]+)?'
+ r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Name.Type),
+ # an anchor
+ (r'&[0-9A-Za-z_-]+', Name.Anchor),
+ # an alias
+ (r'\*[0-9A-Za-z_-]+', Name.Alias),
+ ],
+
+ # block collections and scalars
+ 'block-nodes': [
+ # implicit key
+ (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
+ # literal and folded scalars
+ (r'[|>]', Punctuation.Indicator,
+ ('block-scalar-content', 'block-scalar-header')),
+ ],
+
+ # flow collections and quoted scalars
+ 'flow-nodes': [
+ # a flow sequence
+ (r'\[', Punctuation.Indicator, 'flow-sequence'),
+ # a flow mapping
+ (r'\{', Punctuation.Indicator, 'flow-mapping'),
+ # a single-quoted scalar
+ (r'\'', Literal.Scalar.Flow.Quote, 'single-quoted-scalar'),
+ # a double-quoted scalar
+ (r'\"', Literal.Scalar.Flow.Quote, 'double-quoted-scalar'),
+ ],
+
+ # the content of a flow collection
+ 'flow-collection': [
+ # whitespaces
+ (r'[ ]+', Text.Blank),
+ # line breaks
+ (r'\n+', Text.Break),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # simple indicators
+ (r'[?:,]', Punctuation.Indicator),
+ # tags, anchors and aliases
+ include('descriptors'),
+ # nested collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])',
+ something(Literal.Scalar.Plain),
+ 'plain-scalar-in-flow-context'),
+ ],
+
+ # a flow sequence indicated by '[' and ']'
+ 'flow-sequence': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\]', Punctuation.Indicator, '#pop'),
+ ],
+
+ # a flow mapping indicated by '{' and '}'
+ 'flow-mapping': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\}', Punctuation.Indicator, '#pop'),
+ ],
+
+ # block scalar lines
+ 'block-scalar-content': [
+ # line break
+ (r'\n', Text.Break),
+ # empty line
+ (r'^[ ]+$',
+ parse_block_scalar_empty_line(Text.Indent,
+ Literal.Scalar.Block)),
+ # indentation spaces (we may leave the state here)
+ (r'^[ ]*', parse_block_scalar_indent(Text.Indent)),
+ # line content
+ (r'[^\n\r\f\v]+', Literal.Scalar.Block),
+ ],
+
+ # the content of a literal or folded scalar
+ 'block-scalar-header': [
+ # indentation indicator followed by chomping flag
+ (r'([1-9])?[+-]?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ # chomping flag followed by indentation indicator
+ (r'[+-]?([1-9])?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ ],
+
+ # ignored and regular whitespaces in quoted scalars
+ 'quoted-scalar-whitespaces': [
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+|[ ]+$', Text.Blank),
+ # line breaks are ignored
+ (r'\n+', Text.Break),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Flow),
+ ],
+
+ # single-quoted scalars
+ 'single-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of the quote character
+ (r'\'\'', Literal.Scalar.Flow.Escape),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v\']+', Literal.Scalar.Flow),
+ # the closing quote
+ (r'\'', Literal.Scalar.Flow.Quote, '#pop'),
+ ],
+
+ # double-quoted scalars
+ 'double-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of special characters
+ (r'\\[0abt\tn\nvfre "\\N_LP]', Literal.Scalar.Flow.Escape),
+ # escape codes
+ (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
+ Literal.Scalar.Flow.Escape),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v\"\\]+', Literal.Scalar.Flow),
+ # the closing quote
+ (r'"', Literal.Scalar.Flow.Quote, '#pop'),
+ ],
+
+ # the beginning of a new line while scanning a plain scalar
+ 'plain-scalar-in-block-context-new-line': [
+ # empty lines
+ (r'^[ ]+$', Text.Blank),
+ # line breaks
+ (r'\n+', Text.Break),
+ # document start and document end indicators
+ (r'^(?=---|\.\.\.)', something(Punctuation.Document), '#pop:3'),
+ # indentation spaces (we may leave the block line state here)
+ (r'^[ ]*', parse_plain_scalar_indent(Text.Indent), '#pop'),
+ ],
+
+ # a plain scalar in the block context
+ 'plain-scalar-in-block-context': [
+ # the scalar ends with the ':' indicator
+ (r'[ ]*(?=:[ ]|:$)', something(Text.Blank), '#pop'),
+ # the scalar ends with whitespaces followed by a comment
+ (r'[ ]+(?=#)', Text.Blank, '#pop'),
+ # trailing whitespaces are ignored
+ (r'[ ]+$', Text.Blank),
+ # line breaks are ignored
+ (r'\n+', Text.Break, 'plain-scalar-in-block-context-new-line'),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Plain),
+ # regular non-whitespace characters
+ (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+',
+ Literal.Scalar.Plain),
+ ],
+
+ # a plain scalar is the flow context
+ 'plain-scalar-in-flow-context': [
+ # the scalar ends with an indicator character
+ (r'[ ]*(?=[,:?\[\]{}])', something(Text.Blank), '#pop'),
+ # the scalar ends with a comment
+ (r'[ ]+(?=#)', Text.Blank, '#pop'),
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+|[ ]+$', Text.Blank),
+ # line breaks are ignored
+ (r'\n+', Text.Break),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Plain),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v,:?\[\]{}]+', Literal.Scalar.Plain),
+ ],
+
+ }
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ if context is None:
+ context = YAMLLexerContext(text, 0)
+ return super(YAMLLexer, self).get_tokens_unprocessed(text, context)
+
+
diff --git a/python/pyyaml/examples/yaml-highlight/yaml_hl.cfg b/python/pyyaml/examples/yaml-highlight/yaml_hl.cfg
new file mode 100644
index 000000000..69bb84776
--- /dev/null
+++ b/python/pyyaml/examples/yaml-highlight/yaml_hl.cfg
@@ -0,0 +1,115 @@
+%YAML 1.1
+---
+
+ascii:
+
+ header: "\e[0;1;30;40m"
+
+ footer: "\e[0m"
+
+ tokens:
+ stream-start:
+ stream-end:
+ directive: { start: "\e[35m", end: "\e[0;1;30;40m" }
+ document-start: { start: "\e[35m", end: "\e[0;1;30;40m" }
+ document-end: { start: "\e[35m", end: "\e[0;1;30;40m" }
+ block-sequence-start:
+ block-mapping-start:
+ block-end:
+ flow-sequence-start: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ flow-mapping-start: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ flow-sequence-end: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ flow-mapping-end: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ key: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ value: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ block-entry: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ flow-entry: { start: "\e[33m", end: "\e[0;1;30;40m" }
+ alias: { start: "\e[32m", end: "\e[0;1;30;40m" }
+ anchor: { start: "\e[32m", end: "\e[0;1;30;40m" }
+ tag: { start: "\e[32m", end: "\e[0;1;30;40m" }
+ scalar: { start: "\e[36m", end: "\e[0;1;30;40m" }
+
+ replaces:
+ - "\r\n": "\n"
+ - "\r": "\n"
+ - "\n": "\n"
+ - "\x85": "\n"
+ - "\u2028": "\n"
+ - "\u2029": "\n"
+
+html: &html
+
+ tokens:
+ stream-start:
+ stream-end:
+ directive: { start: <code class="directive_token">, end: </code> }
+ document-start: { start: <code class="document_start_token">, end: </code> }
+ document-end: { start: <code class="document_end_token">, end: </code> }
+ block-sequence-start:
+ block-mapping-start:
+ block-end:
+ flow-sequence-start: { start: <code class="delimiter_token">, end: </code> }
+ flow-mapping-start: { start: <code class="delimiter_token">, end: </code> }
+ flow-sequence-end: { start: <code class="delimiter_token">, end: </code> }
+ flow-mapping-end: { start: <code class="delimiter_token">, end: </code> }
+ key: { start: <code class="delimiter_token">, end: </code> }
+ value: { start: <code class="delimiter_token">, end: </code> }
+ block-entry: { start: <code class="delimiter_token">, end: </code> }
+ flow-entry: { start: <code class="delimiter_token">, end: </code> }
+ alias: { start: <code class="anchor_token">, end: </code> }
+ anchor: { start: <code class="anchor_token">, end: </code> }
+ tag: { start: <code class="tag_token">, end: </code> }
+ scalar: { start: <code class="scalar_token">, end: </code> }
+
+ events:
+ stream-start: { start: <pre class="yaml_stream"> }
+ stream-end: { end: </pre> }
+ document-start: { start: <span class="document"> }
+ document-end: { end: </span> }
+ sequence-start: { start: <span class="sequence"> }
+ sequence-end: { end: </span> }
+ mapping-start: { start: <span class="mapping"> }
+ mapping-end: { end: </span> }
+ scalar: { start: <span class="scalar">, end: </span> }
+
+ replaces:
+ - "\r\n": "\n"
+ - "\r": "\n"
+ - "\n": "\n"
+ - "\x85": "\n"
+ - "\u2028": "\n"
+ - "\u2029": "\n"
+ - "&": "&amp;"
+ - "<": "&lt;"
+ - ">": "&gt;"
+
+html-page:
+
+ header: |
+ <html>
+ <head>
+ <title>A YAML stream</title>
+ <style type="text/css">
+ .document { background: #FFF }
+ .sequence { background: #EEF }
+ .mapping { background: #EFE }
+ .scalar { background: #FEE }
+ .directive_token { color: #C0C }
+ .document_start_token { color: #C0C; font-weight: bold }
+ .document_end_token { color: #C0C; font-weight: bold }
+ .delimiter_token { color: #600; font-weight: bold }
+ .anchor_token { color: #090 }
+ .tag_token { color: #090 }
+ .scalar_token { color: #000 }
+ .yaml_stream { color: #999 }
+ </style>
+ <body>
+
+ footer: |
+ </body>
+ </html>
+
+ <<: *html
+
+
+# vim: ft=yaml
diff --git a/python/pyyaml/examples/yaml-highlight/yaml_hl.py b/python/pyyaml/examples/yaml-highlight/yaml_hl.py
new file mode 100755
index 000000000..d6f7bf4eb
--- /dev/null
+++ b/python/pyyaml/examples/yaml-highlight/yaml_hl.py
@@ -0,0 +1,114 @@
+#!/usr/bin/python
+
+import yaml, codecs, sys, os.path, optparse
+
+class Style:
+
+ def __init__(self, header=None, footer=None,
+ tokens=None, events=None, replaces=None):
+ self.header = header
+ self.footer = footer
+ self.replaces = replaces
+ self.substitutions = {}
+ for domain, Class in [(tokens, 'Token'), (events, 'Event')]:
+ if not domain:
+ continue
+ for key in domain:
+ name = ''.join([part.capitalize() for part in key.split('-')])
+ cls = getattr(yaml, '%s%s' % (name, Class))
+ value = domain[key]
+ if not value:
+ continue
+ start = value.get('start')
+ end = value.get('end')
+ if start:
+ self.substitutions[cls, -1] = start
+ if end:
+ self.substitutions[cls, +1] = end
+
+ def __setstate__(self, state):
+ self.__init__(**state)
+
+yaml.add_path_resolver(u'tag:yaml.org,2002:python/object:__main__.Style',
+ [None], dict)
+yaml.add_path_resolver(u'tag:yaml.org,2002:pairs',
+ [None, u'replaces'], list)
+
+class YAMLHighlight:
+
+ def __init__(self, options):
+ config = yaml.load(file(options.config, 'rb').read())
+ self.style = config[options.style]
+ if options.input:
+ self.input = file(options.input, 'rb')
+ else:
+ self.input = sys.stdin
+ if options.output:
+ self.output = file(options.output, 'wb')
+ else:
+ self.output = sys.stdout
+
+ def highlight(self):
+ input = self.input.read()
+ if input.startswith(codecs.BOM_UTF16_LE):
+ input = unicode(input, 'utf-16-le')
+ elif input.startswith(codecs.BOM_UTF16_BE):
+ input = unicode(input, 'utf-16-be')
+ else:
+ input = unicode(input, 'utf-8')
+ substitutions = self.style.substitutions
+ tokens = yaml.scan(input)
+ events = yaml.parse(input)
+ markers = []
+ number = 0
+ for token in tokens:
+ number += 1
+ if token.start_mark.index != token.end_mark.index:
+ cls = token.__class__
+ if (cls, -1) in substitutions:
+ markers.append([token.start_mark.index, +2, number, substitutions[cls, -1]])
+ if (cls, +1) in substitutions:
+ markers.append([token.end_mark.index, -2, number, substitutions[cls, +1]])
+ number = 0
+ for event in events:
+ number += 1
+ cls = event.__class__
+ if (cls, -1) in substitutions:
+ markers.append([event.start_mark.index, +1, number, substitutions[cls, -1]])
+ if (cls, +1) in substitutions:
+ markers.append([event.end_mark.index, -1, number, substitutions[cls, +1]])
+ markers.sort()
+ markers.reverse()
+ chunks = []
+ position = len(input)
+ for index, weight1, weight2, substitution in markers:
+ if index < position:
+ chunk = input[index:position]
+ for substring, replacement in self.style.replaces:
+ chunk = chunk.replace(substring, replacement)
+ chunks.append(chunk)
+ position = index
+ chunks.append(substitution)
+ chunks.reverse()
+ result = u''.join(chunks)
+ if self.style.header:
+ self.output.write(self.style.header)
+ self.output.write(result.encode('utf-8'))
+ if self.style.footer:
+ self.output.write(self.style.footer)
+
+if __name__ == '__main__':
+ parser = optparse.OptionParser()
+ parser.add_option('-s', '--style', dest='style', default='ascii',
+ help="specify the highlighting style", metavar='STYLE')
+ parser.add_option('-c', '--config', dest='config',
+ default=os.path.join(os.path.dirname(sys.argv[0]), 'yaml_hl.cfg'),
+ help="set an alternative configuration file", metavar='CONFIG')
+ parser.add_option('-i', '--input', dest='input', default=None,
+ help="set the input file (default: stdin)", metavar='FILE')
+ parser.add_option('-o', '--output', dest='output', default=None,
+ help="set the output file (default: stdout)", metavar='FILE')
+ (options, args) = parser.parse_args()
+ hl = YAMLHighlight(options)
+ hl.highlight()
+
diff --git a/python/pyyaml/ext/_yaml.c b/python/pyyaml/ext/_yaml.c
new file mode 100644
index 000000000..f4a5bc524
--- /dev/null
+++ b/python/pyyaml/ext/_yaml.c
@@ -0,0 +1,22190 @@
+/* Generated by Cython 0.20.1 on Wed Mar 26 13:55:16 2014 */
+
+#define PY_SSIZE_T_CLEAN
+#ifndef CYTHON_USE_PYLONG_INTERNALS
+#ifdef PYLONG_BITS_IN_DIGIT
+#define CYTHON_USE_PYLONG_INTERNALS 0
+#else
+#include "pyconfig.h"
+#ifdef PYLONG_BITS_IN_DIGIT
+#define CYTHON_USE_PYLONG_INTERNALS 1
+#else
+#define CYTHON_USE_PYLONG_INTERNALS 0
+#endif
+#endif
+#endif
+#include "Python.h"
+#ifndef Py_PYTHON_H
+ #error Python headers needed to compile C extensions, please install development version of Python.
+#elif PY_VERSION_HEX < 0x02040000
+ #error Cython requires Python 2.4+.
+#else
+#define CYTHON_ABI "0_20_1"
+#include <stddef.h> /* For offsetof */
+#ifndef offsetof
+#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+ #ifndef __stdcall
+ #define __stdcall
+ #endif
+ #ifndef __cdecl
+ #define __cdecl
+ #endif
+ #ifndef __fastcall
+ #define __fastcall
+ #endif
+#endif
+#ifndef DL_IMPORT
+ #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+ #define DL_EXPORT(t) t
+#endif
+#ifndef PY_LONG_LONG
+ #define PY_LONG_LONG LONG_LONG
+#endif
+#ifndef Py_HUGE_VAL
+ #define Py_HUGE_VAL HUGE_VAL
+#endif
+#ifdef PYPY_VERSION
+#define CYTHON_COMPILING_IN_PYPY 1
+#define CYTHON_COMPILING_IN_CPYTHON 0
+#else
+#define CYTHON_COMPILING_IN_PYPY 0
+#define CYTHON_COMPILING_IN_CPYTHON 1
+#endif
+#if CYTHON_COMPILING_IN_PYPY
+#define Py_OptimizeFlag 0
+#endif
+#if PY_VERSION_HEX < 0x02050000
+ typedef int Py_ssize_t;
+ #define PY_SSIZE_T_MAX INT_MAX
+ #define PY_SSIZE_T_MIN INT_MIN
+ #define PY_FORMAT_SIZE_T ""
+ #define CYTHON_FORMAT_SSIZE_T ""
+ #define PyInt_FromSsize_t(z) PyInt_FromLong(z)
+ #define PyInt_AsSsize_t(o) __Pyx_PyInt_As_int(o)
+ #define PyNumber_Index(o) ((PyNumber_Check(o) && !PyFloat_Check(o)) ? PyNumber_Int(o) : \
+ (PyErr_Format(PyExc_TypeError, \
+ "expected index value, got %.200s", Py_TYPE(o)->tp_name), \
+ (PyObject*)0))
+ #define __Pyx_PyIndex_Check(o) (PyNumber_Check(o) && !PyFloat_Check(o) && \
+ !PyComplex_Check(o))
+ #define PyIndex_Check __Pyx_PyIndex_Check
+ #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message)
+ #define __PYX_BUILD_PY_SSIZE_T "i"
+#else
+ #define __PYX_BUILD_PY_SSIZE_T "n"
+ #define CYTHON_FORMAT_SSIZE_T "z"
+ #define __Pyx_PyIndex_Check PyIndex_Check
+#endif
+#if PY_VERSION_HEX < 0x02060000
+ #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt)
+ #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
+ #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size)
+ #define PyVarObject_HEAD_INIT(type, size) \
+ PyObject_HEAD_INIT(type) size,
+ #define PyType_Modified(t)
+ typedef struct {
+ void *buf;
+ PyObject *obj;
+ Py_ssize_t len;
+ Py_ssize_t itemsize;
+ int readonly;
+ int ndim;
+ char *format;
+ Py_ssize_t *shape;
+ Py_ssize_t *strides;
+ Py_ssize_t *suboffsets;
+ void *internal;
+ } Py_buffer;
+ #define PyBUF_SIMPLE 0
+ #define PyBUF_WRITABLE 0x0001
+ #define PyBUF_FORMAT 0x0004
+ #define PyBUF_ND 0x0008
+ #define PyBUF_STRIDES (0x0010 | PyBUF_ND)
+ #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES)
+ #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES)
+ #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES)
+ #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES)
+ #define PyBUF_RECORDS (PyBUF_STRIDES | PyBUF_FORMAT | PyBUF_WRITABLE)
+ #define PyBUF_FULL (PyBUF_INDIRECT | PyBUF_FORMAT | PyBUF_WRITABLE)
+ typedef int (*getbufferproc)(PyObject *, Py_buffer *, int);
+ typedef void (*releasebufferproc)(PyObject *, Py_buffer *);
+#endif
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \
+ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+ #define __Pyx_DefaultClassType PyClass_Type
+#else
+ #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \
+ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+ #define __Pyx_DefaultClassType PyType_Type
+#endif
+#if PY_VERSION_HEX < 0x02060000
+ #define PyUnicode_FromString(s) PyUnicode_Decode(s, strlen(s), "UTF-8", "strict")
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define Py_TPFLAGS_CHECKTYPES 0
+ #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3)
+ #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+#if PY_VERSION_HEX < 0x02060000
+ #define Py_TPFLAGS_HAVE_VERSION_TAG 0
+#endif
+#if PY_VERSION_HEX < 0x02060000 && !defined(Py_TPFLAGS_IS_ABSTRACT)
+ #define Py_TPFLAGS_IS_ABSTRACT 0
+#endif
+#if PY_VERSION_HEX < 0x030400a1 && !defined(Py_TPFLAGS_HAVE_FINALIZE)
+ #define Py_TPFLAGS_HAVE_FINALIZE 0
+#endif
+#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
+ #define CYTHON_PEP393_ENABLED 1
+ #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \
+ 0 : _PyUnicode_Ready((PyObject *)(op)))
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
+ #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
+ #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
+ #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
+#else
+ #define CYTHON_PEP393_ENABLED 0
+ #define __Pyx_PyUnicode_READY(op) (0)
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
+ #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
+ #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
+ #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
+#endif
+#if CYTHON_COMPILING_IN_PYPY
+ #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
+#else
+ #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ? \
+ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
+#endif
+#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
+#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
+#else
+ #define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBaseString_Type PyUnicode_Type
+ #define PyStringObject PyUnicodeObject
+ #define PyString_Type PyUnicode_Type
+ #define PyString_Check PyUnicode_Check
+ #define PyString_CheckExact PyUnicode_CheckExact
+#endif
+#if PY_VERSION_HEX < 0x02060000
+ #define PyBytesObject PyStringObject
+ #define PyBytes_Type PyString_Type
+ #define PyBytes_Check PyString_Check
+ #define PyBytes_CheckExact PyString_CheckExact
+ #define PyBytes_FromString PyString_FromString
+ #define PyBytes_FromStringAndSize PyString_FromStringAndSize
+ #define PyBytes_FromFormat PyString_FromFormat
+ #define PyBytes_DecodeEscape PyString_DecodeEscape
+ #define PyBytes_AsString PyString_AsString
+ #define PyBytes_AsStringAndSize PyString_AsStringAndSize
+ #define PyBytes_Size PyString_Size
+ #define PyBytes_AS_STRING PyString_AS_STRING
+ #define PyBytes_GET_SIZE PyString_GET_SIZE
+ #define PyBytes_Repr PyString_Repr
+ #define PyBytes_Concat PyString_Concat
+ #define PyBytes_ConcatAndDel PyString_ConcatAndDel
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
+ #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
+#else
+ #define __Pyx_PyBaseString_Check(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj) || \
+ PyString_Check(obj) || PyUnicode_Check(obj))
+ #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
+#endif
+#if PY_VERSION_HEX < 0x02060000
+ #define PySet_Check(obj) PyObject_TypeCheck(obj, &PySet_Type)
+ #define PyFrozenSet_Check(obj) PyObject_TypeCheck(obj, &PyFrozenSet_Type)
+#endif
+#ifndef PySet_CheckExact
+ #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
+#endif
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+#if PY_MAJOR_VERSION >= 3
+ #define PyIntObject PyLongObject
+ #define PyInt_Type PyLong_Type
+ #define PyInt_Check(op) PyLong_Check(op)
+ #define PyInt_CheckExact(op) PyLong_CheckExact(op)
+ #define PyInt_FromString PyLong_FromString
+ #define PyInt_FromUnicode PyLong_FromUnicode
+ #define PyInt_FromLong PyLong_FromLong
+ #define PyInt_FromSize_t PyLong_FromSize_t
+ #define PyInt_FromSsize_t PyLong_FromSsize_t
+ #define PyInt_AsLong PyLong_AsLong
+ #define PyInt_AS_LONG PyLong_AS_LONG
+ #define PyInt_AsSsize_t PyLong_AsSsize_t
+ #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
+ #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+ #define PyNumber_Int PyNumber_Long
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBoolObject PyLongObject
+#endif
+#if PY_VERSION_HEX < 0x030200A4
+ typedef long Py_hash_t;
+ #define __Pyx_PyInt_FromHash_t PyInt_FromLong
+ #define __Pyx_PyInt_AsHash_t PyInt_AsLong
+#else
+ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
+ #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
+#endif
+#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300)
+ #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b)
+ #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value)
+ #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b)
+#else
+ #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \
+ (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \
+ (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \
+ (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0)))
+ #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \
+ (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \
+ (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \
+ (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1)))
+ #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \
+ (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \
+ (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \
+ (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1)))
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func))
+#endif
+#if PY_VERSION_HEX < 0x02050000
+ #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),((char *)(n)))
+ #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a))
+ #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),((char *)(n)))
+#else
+ #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),(n))
+ #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a))
+ #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),(n))
+#endif
+#if PY_VERSION_HEX < 0x02050000
+ #define __Pyx_NAMESTR(n) ((char *)(n))
+ #define __Pyx_DOCSTR(n) ((char *)(n))
+#else
+ #define __Pyx_NAMESTR(n) (n)
+ #define __Pyx_DOCSTR(n) (n)
+#endif
+#ifndef CYTHON_INLINE
+ #if defined(__GNUC__)
+ #define CYTHON_INLINE __inline__
+ #elif defined(_MSC_VER)
+ #define CYTHON_INLINE __inline
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_INLINE inline
+ #else
+ #define CYTHON_INLINE
+ #endif
+#endif
+#ifndef CYTHON_RESTRICT
+ #if defined(__GNUC__)
+ #define CYTHON_RESTRICT __restrict__
+ #elif defined(_MSC_VER) && _MSC_VER >= 1400
+ #define CYTHON_RESTRICT __restrict
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_RESTRICT restrict
+ #else
+ #define CYTHON_RESTRICT
+ #endif
+#endif
+#ifdef NAN
+#define __PYX_NAN() ((float) NAN)
+#else
+static CYTHON_INLINE float __PYX_NAN() {
+ /* Initialize NaN. The sign is irrelevant, an exponent with all bits 1 and
+ a nonzero mantissa means NaN. If the first bit in the mantissa is 1, it is
+ a quiet NaN. */
+ float value;
+ memset(&value, 0xFF, sizeof(value));
+ return value;
+}
+#endif
+
+
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
+#else
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
+#endif
+
+#ifndef __PYX_EXTERN_C
+ #ifdef __cplusplus
+ #define __PYX_EXTERN_C extern "C"
+ #else
+ #define __PYX_EXTERN_C extern
+ #endif
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+#define _USE_MATH_DEFINES
+#endif
+#include <math.h>
+#define __PYX_HAVE___yaml
+#define __PYX_HAVE_API___yaml
+#include "_yaml.h"
+#ifdef _OPENMP
+#include <omp.h>
+#endif /* _OPENMP */
+
+#ifdef PYREX_WITHOUT_ASSERTIONS
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+#endif
+typedef struct {PyObject **p; char *s; const Py_ssize_t n; const char* encoding;
+ const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/
+
+#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0
+#define __PYX_DEFAULT_STRING_ENCODING ""
+#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
+#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#define __Pyx_fits_Py_ssize_t(v, type, is_signed) ( \
+ (sizeof(type) < sizeof(Py_ssize_t)) || \
+ (sizeof(type) > sizeof(Py_ssize_t) && \
+ likely(v < (type)PY_SSIZE_T_MAX || \
+ v == (type)PY_SSIZE_T_MAX) && \
+ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN || \
+ v == (type)PY_SSIZE_T_MIN))) || \
+ (sizeof(type) == sizeof(Py_ssize_t) && \
+ (is_signed || likely(v < (type)PY_SSIZE_T_MAX || \
+ v == (type)PY_SSIZE_T_MAX))) )
+static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject*);
+static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
+#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
+#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
+#define __Pyx_PyBytes_FromString PyBytes_FromString
+#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(char*);
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#else
+ #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
+#endif
+#define __Pyx_PyObject_AsSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_FromUString(s) __Pyx_PyObject_FromString((char*)s)
+#define __Pyx_PyBytes_FromUString(s) __Pyx_PyBytes_FromString((char*)s)
+#define __Pyx_PyByteArray_FromUString(s) __Pyx_PyByteArray_FromString((char*)s)
+#define __Pyx_PyStr_FromUString(s) __Pyx_PyStr_FromString((char*)s)
+#define __Pyx_PyUnicode_FromUString(s) __Pyx_PyUnicode_FromString((char*)s)
+#if PY_MAJOR_VERSION < 3
+static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u)
+{
+ const Py_UNICODE *u_end = u;
+ while (*u_end++) ;
+ return u_end - u - 1;
+}
+#else
+#define __Pyx_Py_UNICODE_strlen Py_UNICODE_strlen
+#endif
+#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
+#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
+#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
+#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None)
+#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False))
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x);
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+#else
+#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
+#endif
+#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+static int __Pyx_sys_getdefaultencoding_not_ascii;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys = NULL;
+ PyObject* default_encoding = NULL;
+ PyObject* ascii_chars_u = NULL;
+ PyObject* ascii_chars_b = NULL;
+ sys = PyImport_ImportModule("sys");
+ if (sys == NULL) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
+ if (default_encoding == NULL) goto bad;
+ if (strcmp(PyBytes_AsString(default_encoding), "ascii") == 0) {
+ __Pyx_sys_getdefaultencoding_not_ascii = 0;
+ } else {
+ const char* default_encoding_c = PyBytes_AS_STRING(default_encoding);
+ char ascii_chars[128];
+ int c;
+ for (c = 0; c < 128; c++) {
+ ascii_chars[c] = c;
+ }
+ __Pyx_sys_getdefaultencoding_not_ascii = 1;
+ ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
+ if (ascii_chars_u == NULL) goto bad;
+ ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
+ if (ascii_chars_b == NULL || strncmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
+ PyErr_Format(
+ PyExc_ValueError,
+ "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
+ default_encoding_c);
+ goto bad;
+ }
+ }
+ Py_XDECREF(sys);
+ Py_XDECREF(default_encoding);
+ Py_XDECREF(ascii_chars_u);
+ Py_XDECREF(ascii_chars_b);
+ return 0;
+bad:
+ Py_XDECREF(sys);
+ Py_XDECREF(default_encoding);
+ Py_XDECREF(ascii_chars_u);
+ Py_XDECREF(ascii_chars_b);
+ return -1;
+}
+#endif
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
+#else
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+static char* __PYX_DEFAULT_STRING_ENCODING;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys = NULL;
+ PyObject* default_encoding = NULL;
+ char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (sys == NULL) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
+ if (default_encoding == NULL) goto bad;
+ default_encoding_c = PyBytes_AS_STRING(default_encoding);
+ __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c));
+ strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
+ Py_DECREF(sys);
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(sys);
+ Py_XDECREF(default_encoding);
+ return -1;
+}
+#endif
+#endif
+
+
+#ifdef __GNUC__
+ /* Test for GCC > 2.95 */
+ #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))
+ #define likely(x) __builtin_expect(!!(x), 1)
+ #define unlikely(x) __builtin_expect(!!(x), 0)
+ #else /* __GNUC__ > 2 ... */
+ #define likely(x) (x)
+ #define unlikely(x) (x)
+ #endif /* __GNUC__ > 2 ... */
+#else /* __GNUC__ */
+ #define likely(x) (x)
+ #define unlikely(x) (x)
+#endif /* __GNUC__ */
+
+static PyObject *__pyx_m;
+static PyObject *__pyx_d;
+static PyObject *__pyx_b;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+
+static const char *__pyx_f[] = {
+ "_yaml.pyx",
+};
+
+/*--- Type declarations ---*/
+struct __pyx_obj_5_yaml_Mark;
+struct __pyx_obj_5_yaml_CParser;
+struct __pyx_obj_5_yaml_CEmitter;
+
+/* "_yaml.pyx":64
+ * MappingNode = yaml.nodes.MappingNode
+ *
+ * cdef class Mark: # <<<<<<<<<<<<<<
+ * cdef readonly object name
+ * cdef readonly int index
+ */
+struct __pyx_obj_5_yaml_Mark {
+ PyObject_HEAD
+ PyObject *name;
+ int index;
+ int line;
+ int column;
+ PyObject *buffer;
+ PyObject *pointer;
+};
+
+
+/* "_yaml.pyx":247
+ * # self.style = style
+ *
+ * cdef class CParser: # <<<<<<<<<<<<<<
+ *
+ * cdef yaml_parser_t parser
+ */
+struct __pyx_obj_5_yaml_CParser {
+ PyObject_HEAD
+ struct __pyx_vtabstruct_5_yaml_CParser *__pyx_vtab;
+ yaml_parser_t parser;
+ yaml_event_t parsed_event;
+ PyObject *stream;
+ PyObject *stream_name;
+ PyObject *current_token;
+ PyObject *current_event;
+ PyObject *anchors;
+ PyObject *stream_cache;
+ int stream_cache_len;
+ int stream_cache_pos;
+ int unicode_source;
+};
+
+
+/* "_yaml.pyx":935
+ * return 1
+ *
+ * cdef class CEmitter: # <<<<<<<<<<<<<<
+ *
+ * cdef yaml_emitter_t emitter
+ */
+struct __pyx_obj_5_yaml_CEmitter {
+ PyObject_HEAD
+ struct __pyx_vtabstruct_5_yaml_CEmitter *__pyx_vtab;
+ yaml_emitter_t emitter;
+ PyObject *stream;
+ int document_start_implicit;
+ int document_end_implicit;
+ PyObject *use_version;
+ PyObject *use_tags;
+ PyObject *serialized_nodes;
+ PyObject *anchors;
+ int last_alias_id;
+ int closed;
+ int dump_unicode;
+ PyObject *use_encoding;
+};
+
+
+
+/* "_yaml.pyx":247
+ * # self.style = style
+ *
+ * cdef class CParser: # <<<<<<<<<<<<<<
+ *
+ * cdef yaml_parser_t parser
+ */
+
+struct __pyx_vtabstruct_5_yaml_CParser {
+ PyObject *(*_parser_error)(struct __pyx_obj_5_yaml_CParser *);
+ PyObject *(*_scan)(struct __pyx_obj_5_yaml_CParser *);
+ PyObject *(*_token_to_object)(struct __pyx_obj_5_yaml_CParser *, yaml_token_t *);
+ PyObject *(*_parse)(struct __pyx_obj_5_yaml_CParser *);
+ PyObject *(*_event_to_object)(struct __pyx_obj_5_yaml_CParser *, yaml_event_t *);
+ PyObject *(*_compose_document)(struct __pyx_obj_5_yaml_CParser *);
+ PyObject *(*_compose_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *, PyObject *);
+ PyObject *(*_compose_scalar_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *);
+ PyObject *(*_compose_sequence_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *);
+ PyObject *(*_compose_mapping_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *);
+ int (*_parse_next_event)(struct __pyx_obj_5_yaml_CParser *);
+};
+static struct __pyx_vtabstruct_5_yaml_CParser *__pyx_vtabptr_5_yaml_CParser;
+
+
+/* "_yaml.pyx":935
+ * return 1
+ *
+ * cdef class CEmitter: # <<<<<<<<<<<<<<
+ *
+ * cdef yaml_emitter_t emitter
+ */
+
+struct __pyx_vtabstruct_5_yaml_CEmitter {
+ PyObject *(*_emitter_error)(struct __pyx_obj_5_yaml_CEmitter *);
+ int (*_object_to_event)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, yaml_event_t *);
+ int (*_anchor_node)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *);
+ int (*_serialize_node)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, PyObject *, PyObject *);
+};
+static struct __pyx_vtabstruct_5_yaml_CEmitter *__pyx_vtabptr_5_yaml_CEmitter;
+#ifndef CYTHON_REFNANNY
+ #define CYTHON_REFNANNY 0
+#endif
+#if CYTHON_REFNANNY
+ typedef struct {
+ void (*INCREF)(void*, PyObject*, int);
+ void (*DECREF)(void*, PyObject*, int);
+ void (*GOTREF)(void*, PyObject*, int);
+ void (*GIVEREF)(void*, PyObject*, int);
+ void* (*SetupContext)(const char*, int, const char*);
+ void (*FinishContext)(void**);
+ } __Pyx_RefNannyAPIStruct;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/
+ #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
+#ifdef WITH_THREAD
+ #define __Pyx_RefNannySetupContext(name, acquire_gil) \
+ if (acquire_gil) { \
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); \
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \
+ PyGILState_Release(__pyx_gilstate_save); \
+ } else { \
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \
+ }
+#else
+ #define __Pyx_RefNannySetupContext(name, acquire_gil) \
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+#endif
+ #define __Pyx_RefNannyFinishContext() \
+ __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+ #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
+ #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
+ #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
+ #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
+#else
+ #define __Pyx_RefNannyDeclarations
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)
+ #define __Pyx_RefNannyFinishContext()
+ #define __Pyx_INCREF(r) Py_INCREF(r)
+ #define __Pyx_DECREF(r) Py_DECREF(r)
+ #define __Pyx_GOTREF(r)
+ #define __Pyx_GIVEREF(r)
+ #define __Pyx_XINCREF(r) Py_XINCREF(r)
+ #define __Pyx_XDECREF(r) Py_XDECREF(r)
+ #define __Pyx_XGOTREF(r)
+ #define __Pyx_XGIVEREF(r)
+#endif /* CYTHON_REFNANNY */
+#define __Pyx_XDECREF_SET(r, v) do { \
+ PyObject *tmp = (PyObject *) r; \
+ r = v; __Pyx_XDECREF(tmp); \
+ } while (0)
+#define __Pyx_DECREF_SET(r, v) do { \
+ PyObject *tmp = (PyObject *) r; \
+ r = v; __Pyx_DECREF(tmp); \
+ } while (0)
+#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
+#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
+
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
+ PyTypeObject* tp = Py_TYPE(obj);
+ if (likely(tp->tp_getattro))
+ return tp->tp_getattro(obj, attr_name);
+#if PY_MAJOR_VERSION < 3
+ if (likely(tp->tp_getattr))
+ return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));
+#endif
+ return PyObject_GetAttr(obj, attr_name);
+}
+#else
+#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
+#endif
+
+static PyObject *__Pyx_GetBuiltinName(PyObject *name); /*proto*/
+
+static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
+ Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/
+
+static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /*proto*/
+
+static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], \
+ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, \
+ const char* function_name); /*proto*/
+
+static CYTHON_INLINE void __Pyx_ExceptionSave(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+static void __Pyx_ExceptionReset(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+
+static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); /*proto*/
+#else
+#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw)
+#endif
+
+static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/
+
+static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); /*proto*/
+
+static CYTHON_INLINE int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); /*proto*/
+
+static CYTHON_INLINE int __Pyx_PySequence_Contains(PyObject* item, PyObject* seq, int eq) {
+ int result = PySequence_Contains(seq, item);
+ return unlikely(result < 0) ? result : (result == (eq == Py_EQ));
+}
+
+static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname);
+
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) {
+ PyListObject* L = (PyListObject*) list;
+ Py_ssize_t len = Py_SIZE(list);
+ if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) {
+ Py_INCREF(x);
+ PyList_SET_ITEM(list, len, x);
+ Py_SIZE(list) = len+1;
+ return 0;
+ }
+ return PyList_Append(list, x);
+}
+#else
+#define __Pyx_PyList_Append(L,x) PyList_Append(L,x)
+#endif
+
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o,n,NULL)
+static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) {
+ PyTypeObject* tp = Py_TYPE(obj);
+ if (likely(tp->tp_setattro))
+ return tp->tp_setattro(obj, attr_name, value);
+#if PY_MAJOR_VERSION < 3
+ if (likely(tp->tp_setattr))
+ return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value);
+#endif
+ return PyObject_SetAttr(obj, attr_name, value);
+}
+#else
+#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n)
+#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v)
+#endif
+
+static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); /*proto*/
+
+static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /*proto*/
+
+#include <string.h>
+
+static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/
+
+static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/
+
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals
+#else
+#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals
+#endif
+
+#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \
+ __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) : \
+ (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) : \
+ __Pyx_GetItemInt_Generic(o, to_py_func(i))))
+#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \
+ __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) : \
+ (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL))
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,
+ int wraparound, int boundscheck);
+#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck) \
+ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ? \
+ __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) : \
+ (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL))
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,
+ int wraparound, int boundscheck);
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j);
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,
+ int is_list, int wraparound, int boundscheck);
+
+static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
+
+static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
+
+static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/
+
+static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/
+
+static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/
+
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);
+
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /*proto*/
+
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
+
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
+
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
+
+static int __Pyx_check_binary_version(void);
+
+typedef struct {
+ int code_line;
+ PyCodeObject* code_object;
+} __Pyx_CodeObjectCacheEntry;
+struct __Pyx_CodeObjectCache {
+ int count;
+ int max_count;
+ __Pyx_CodeObjectCacheEntry* entries;
+};
+static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
+static PyCodeObject *__pyx_find_code_object(int code_line);
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
+
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename); /*proto*/
+
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
+
+
+/* Module declarations from '_yaml' */
+static PyTypeObject *__pyx_ptype_5_yaml_Mark = 0;
+static PyTypeObject *__pyx_ptype_5_yaml_CParser = 0;
+static PyTypeObject *__pyx_ptype_5_yaml_CEmitter = 0;
+static int __pyx_f_5_yaml_input_handler(void *, char *, int, int *); /*proto*/
+static int __pyx_f_5_yaml_output_handler(void *, char *, int); /*proto*/
+#define __Pyx_MODULE_NAME "_yaml"
+int __pyx_module_is_main__yaml = 0;
+
+/* Implementation of '_yaml' */
+static PyObject *__pyx_builtin_MemoryError;
+static PyObject *__pyx_builtin_AttributeError;
+static PyObject *__pyx_builtin_TypeError;
+static PyObject *__pyx_builtin_ValueError;
+static PyObject *__pyx_pf_5_yaml_get_version_string(CYTHON_UNUSED PyObject *__pyx_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_2get_version(CYTHON_UNUSED PyObject *__pyx_self); /* proto */
+static int __pyx_pf_5_yaml_4Mark___init__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self, PyObject *__pyx_v_name, int __pyx_v_index, int __pyx_v_line, int __pyx_v_column, PyObject *__pyx_v_buffer, PyObject *__pyx_v_pointer); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_2get_snippet(CYTHON_UNUSED struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_4__str__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_4name___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_5index___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_4line___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_6column___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_6buffer___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_4Mark_7pointer___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self); /* proto */
+static int __pyx_pf_5_yaml_7CParser___init__(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_stream); /* proto */
+static void __pyx_pf_5_yaml_7CParser_2__dealloc__(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_4dispose(CYTHON_UNUSED struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_6raw_scan(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_8get_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_10peek_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_12check_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_choices); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_14raw_parse(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_16get_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_18peek_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_20check_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_choices); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_22check_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_24get_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_7CParser_26get_single_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self); /* proto */
+static int __pyx_pf_5_yaml_8CEmitter___init__(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_stream, PyObject *__pyx_v_canonical, PyObject *__pyx_v_indent, PyObject *__pyx_v_width, PyObject *__pyx_v_allow_unicode, PyObject *__pyx_v_line_break, PyObject *__pyx_v_encoding, PyObject *__pyx_v_explicit_start, PyObject *__pyx_v_explicit_end, PyObject *__pyx_v_version, PyObject *__pyx_v_tags); /* proto */
+static void __pyx_pf_5_yaml_8CEmitter_2__dealloc__(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_4dispose(CYTHON_UNUSED struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_6emit(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_event_object); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_8open(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_10close(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self); /* proto */
+static PyObject *__pyx_pf_5_yaml_8CEmitter_12serialize(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node); /* proto */
+static PyObject *__pyx_tp_new_5_yaml_Mark(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
+static PyObject *__pyx_tp_new_5_yaml_CParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
+static PyObject *__pyx_tp_new_5_yaml_CEmitter(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
+static char __pyx_k__3[] = "?";
+static char __pyx_k__6[] = "";
+static char __pyx_k__7[] = "'";
+static char __pyx_k__8[] = "\"";
+static char __pyx_k__9[] = "|";
+static char __pyx_k_TAG[] = "TAG";
+static char __pyx_k__10[] = ">";
+static char __pyx_k__17[] = "\r";
+static char __pyx_k__18[] = "\n";
+static char __pyx_k__19[] = "\r\n";
+static char __pyx_k_tag[] = "tag";
+static char __pyx_k_YAML[] = "YAML";
+static char __pyx_k_file[] = "<file>";
+static char __pyx_k_line[] = "line";
+static char __pyx_k_main[] = "__main__";
+static char __pyx_k_name[] = "name";
+static char __pyx_k_read[] = "read";
+static char __pyx_k_tags[] = "tags";
+static char __pyx_k_test[] = "__test__";
+static char __pyx_k_yaml[] = "yaml";
+static char __pyx_k_class[] = "__class__";
+static char __pyx_k_error[] = "error";
+static char __pyx_k_index[] = "index";
+static char __pyx_k_major[] = "major";
+static char __pyx_k_minor[] = "minor";
+static char __pyx_k_nodes[] = "nodes";
+static char __pyx_k_patch[] = "patch";
+static char __pyx_k_style[] = "style";
+static char __pyx_k_utf_8[] = "utf-8";
+static char __pyx_k_value[] = "value";
+static char __pyx_k_width[] = "width";
+static char __pyx_k_write[] = "write";
+static char __pyx_k_anchor[] = "anchor";
+static char __pyx_k_buffer[] = "buffer";
+static char __pyx_k_column[] = "column";
+static char __pyx_k_events[] = "events";
+static char __pyx_k_id_03d[] = "id%03d";
+static char __pyx_k_import[] = "__import__";
+static char __pyx_k_indent[] = "indent";
+static char __pyx_k_parser[] = "parser";
+static char __pyx_k_reader[] = "reader";
+static char __pyx_k_stream[] = "stream";
+static char __pyx_k_strict[] = "strict";
+static char __pyx_k_tokens[] = "tokens";
+static char __pyx_k_yaml_2[] = "_yaml";
+static char __pyx_k_emitter[] = "emitter";
+static char __pyx_k_pointer[] = "pointer";
+static char __pyx_k_resolve[] = "resolve";
+static char __pyx_k_scanner[] = "scanner";
+static char __pyx_k_version[] = "version";
+static char __pyx_k_KeyToken[] = "KeyToken";
+static char __pyx_k_TagToken[] = "TagToken";
+static char __pyx_k_composer[] = "composer";
+static char __pyx_k_encoding[] = "encoding";
+static char __pyx_k_end_mark[] = "end_mark";
+static char __pyx_k_explicit[] = "explicit";
+static char __pyx_k_implicit[] = "implicit";
+static char __pyx_k_TypeError[] = "TypeError";
+static char __pyx_k_YAMLError[] = "YAMLError";
+static char __pyx_k_canonical[] = "canonical";
+static char __pyx_k_utf_16_be[] = "utf-16-be";
+static char __pyx_k_utf_16_le[] = "utf-16-le";
+static char __pyx_k_AliasEvent[] = "AliasEvent";
+static char __pyx_k_AliasToken[] = "AliasToken";
+static char __pyx_k_ScalarNode[] = "ScalarNode";
+static char __pyx_k_ValueError[] = "ValueError";
+static char __pyx_k_ValueToken[] = "ValueToken";
+static char __pyx_k_flow_style[] = "flow_style";
+static char __pyx_k_line_break[] = "line_break";
+static char __pyx_k_pyx_vtable[] = "__pyx_vtable__";
+static char __pyx_k_serializer[] = "serializer";
+static char __pyx_k_start_mark[] = "start_mark";
+static char __pyx_k_AnchorToken[] = "AnchorToken";
+static char __pyx_k_MappingNode[] = "MappingNode";
+static char __pyx_k_MemoryError[] = "MemoryError";
+static char __pyx_k_ParserError[] = "ParserError";
+static char __pyx_k_ReaderError[] = "ReaderError";
+static char __pyx_k_ScalarEvent[] = "ScalarEvent";
+static char __pyx_k_ScalarToken[] = "ScalarToken";
+static char __pyx_k_byte_string[] = "<byte string>";
+static char __pyx_k_constructor[] = "constructor";
+static char __pyx_k_get_version[] = "get_version";
+static char __pyx_k_representer[] = "representer";
+static char __pyx_k_EmitterError[] = "EmitterError";
+static char __pyx_k_ScannerError[] = "ScannerError";
+static char __pyx_k_SequenceNode[] = "SequenceNode";
+static char __pyx_k_explicit_end[] = "explicit_end";
+static char __pyx_k_BlockEndToken[] = "BlockEndToken";
+static char __pyx_k_ComposerError[] = "ComposerError";
+static char __pyx_k_allow_unicode[] = "allow_unicode";
+static char __pyx_k_too_many_tags[] = "too many tags";
+static char __pyx_k_AttributeError[] = "AttributeError";
+static char __pyx_k_DirectiveToken[] = "DirectiveToken";
+static char __pyx_k_FlowEntryToken[] = "FlowEntryToken";
+static char __pyx_k_StreamEndEvent[] = "StreamEndEvent";
+static char __pyx_k_StreamEndToken[] = "StreamEndToken";
+static char __pyx_k_explicit_start[] = "explicit_start";
+static char __pyx_k_unicode_string[] = "<unicode string>";
+static char __pyx_k_BlockEntryToken[] = "BlockEntryToken";
+static char __pyx_k_MappingEndEvent[] = "MappingEndEvent";
+static char __pyx_k_SerializerError[] = "SerializerError";
+static char __pyx_k_ascend_resolver[] = "ascend_resolver";
+static char __pyx_k_invalid_event_s[] = "invalid event %s";
+static char __pyx_k_no_parser_error[] = "no parser error";
+static char __pyx_k_ConstructorError[] = "ConstructorError";
+static char __pyx_k_DocumentEndEvent[] = "DocumentEndEvent";
+static char __pyx_k_DocumentEndToken[] = "DocumentEndToken";
+static char __pyx_k_RepresenterError[] = "RepresenterError";
+static char __pyx_k_SequenceEndEvent[] = "SequenceEndEvent";
+static char __pyx_k_StreamStartEvent[] = "StreamStartEvent";
+static char __pyx_k_StreamStartToken[] = "StreamStartToken";
+static char __pyx_k_descend_resolver[] = "descend_resolver";
+static char __pyx_k_no_emitter_error[] = "no emitter error";
+static char __pyx_k_second_occurence[] = "second occurence";
+static char __pyx_k_MappingStartEvent[] = "MappingStartEvent";
+static char __pyx_k_DocumentStartEvent[] = "DocumentStartEvent";
+static char __pyx_k_DocumentStartToken[] = "DocumentStartToken";
+static char __pyx_k_SequenceStartEvent[] = "SequenceStartEvent";
+static char __pyx_k_get_version_string[] = "get_version_string";
+static char __pyx_k_unknown_event_type[] = "unknown event type";
+static char __pyx_k_unknown_token_type[] = "unknown token type";
+static char __pyx_k_FlowMappingEndToken[] = "FlowMappingEndToken";
+static char __pyx_k_FlowSequenceEndToken[] = "FlowSequenceEndToken";
+static char __pyx_k_in_s_line_d_column_d[] = " in \"%s\", line %d, column %d";
+static char __pyx_k_serializer_is_closed[] = "serializer is closed";
+static char __pyx_k_tag_must_be_a_string[] = "tag must be a string";
+static char __pyx_k_FlowMappingStartToken[] = "FlowMappingStartToken";
+static char __pyx_k_found_undefined_alias[] = "found undefined alias";
+static char __pyx_k_BlockMappingStartToken[] = "BlockMappingStartToken";
+static char __pyx_k_FlowSequenceStartToken[] = "FlowSequenceStartToken";
+static char __pyx_k_value_must_be_a_string[] = "value must be a string";
+static char __pyx_k_BlockSequenceStartToken[] = "BlockSequenceStartToken";
+static char __pyx_k_anchor_must_be_a_string[] = "anchor must be a string";
+static char __pyx_k_serializer_is_not_opened[] = "serializer is not opened";
+static char __pyx_k_a_string_value_is_expected[] = "a string value is expected";
+static char __pyx_k_but_found_another_document[] = "but found another document";
+static char __pyx_k_tag_handle_must_be_a_string[] = "tag handle must be a string";
+static char __pyx_k_tag_prefix_must_be_a_string[] = "tag prefix must be a string";
+static char __pyx_k_serializer_is_already_opened[] = "serializer is already opened";
+static char __pyx_k_root_src_pyyaml_ext__yaml_pyx[] = "/root/src/pyyaml/ext/_yaml.pyx";
+static char __pyx_k_a_string_or_stream_input_is_requ[] = "a string or stream input is required";
+static char __pyx_k_expected_a_single_document_in_th[] = "expected a single document in the stream";
+static char __pyx_k_found_duplicate_anchor_first_occ[] = "found duplicate anchor; first occurence";
+static PyObject *__pyx_n_s_AliasEvent;
+static PyObject *__pyx_n_s_AliasToken;
+static PyObject *__pyx_n_s_AnchorToken;
+static PyObject *__pyx_n_s_AttributeError;
+static PyObject *__pyx_n_s_BlockEndToken;
+static PyObject *__pyx_n_s_BlockEntryToken;
+static PyObject *__pyx_n_s_BlockMappingStartToken;
+static PyObject *__pyx_n_s_BlockSequenceStartToken;
+static PyObject *__pyx_n_s_ComposerError;
+static PyObject *__pyx_n_s_ConstructorError;
+static PyObject *__pyx_n_s_DirectiveToken;
+static PyObject *__pyx_n_s_DocumentEndEvent;
+static PyObject *__pyx_n_s_DocumentEndToken;
+static PyObject *__pyx_n_s_DocumentStartEvent;
+static PyObject *__pyx_n_s_DocumentStartToken;
+static PyObject *__pyx_n_s_EmitterError;
+static PyObject *__pyx_n_s_FlowEntryToken;
+static PyObject *__pyx_n_s_FlowMappingEndToken;
+static PyObject *__pyx_n_s_FlowMappingStartToken;
+static PyObject *__pyx_n_s_FlowSequenceEndToken;
+static PyObject *__pyx_n_s_FlowSequenceStartToken;
+static PyObject *__pyx_n_s_KeyToken;
+static PyObject *__pyx_n_s_MappingEndEvent;
+static PyObject *__pyx_n_s_MappingNode;
+static PyObject *__pyx_n_s_MappingStartEvent;
+static PyObject *__pyx_n_s_MemoryError;
+static PyObject *__pyx_n_s_ParserError;
+static PyObject *__pyx_n_s_ReaderError;
+static PyObject *__pyx_n_s_RepresenterError;
+static PyObject *__pyx_n_s_ScalarEvent;
+static PyObject *__pyx_n_s_ScalarNode;
+static PyObject *__pyx_n_s_ScalarToken;
+static PyObject *__pyx_n_s_ScannerError;
+static PyObject *__pyx_n_s_SequenceEndEvent;
+static PyObject *__pyx_n_s_SequenceNode;
+static PyObject *__pyx_n_s_SequenceStartEvent;
+static PyObject *__pyx_n_s_SerializerError;
+static PyObject *__pyx_n_s_StreamEndEvent;
+static PyObject *__pyx_n_s_StreamEndToken;
+static PyObject *__pyx_n_s_StreamStartEvent;
+static PyObject *__pyx_n_s_StreamStartToken;
+static PyObject *__pyx_n_u_TAG;
+static PyObject *__pyx_n_s_TagToken;
+static PyObject *__pyx_n_s_TypeError;
+static PyObject *__pyx_n_s_ValueError;
+static PyObject *__pyx_n_s_ValueToken;
+static PyObject *__pyx_n_u_YAML;
+static PyObject *__pyx_n_s_YAMLError;
+static PyObject *__pyx_kp_s__10;
+static PyObject *__pyx_kp_u__10;
+static PyObject *__pyx_kp_s__17;
+static PyObject *__pyx_kp_s__18;
+static PyObject *__pyx_kp_s__19;
+static PyObject *__pyx_kp_s__3;
+static PyObject *__pyx_kp_u__3;
+static PyObject *__pyx_kp_u__6;
+static PyObject *__pyx_kp_s__7;
+static PyObject *__pyx_kp_u__7;
+static PyObject *__pyx_kp_s__8;
+static PyObject *__pyx_kp_u__8;
+static PyObject *__pyx_kp_s__9;
+static PyObject *__pyx_kp_u__9;
+static PyObject *__pyx_kp_s_a_string_or_stream_input_is_requ;
+static PyObject *__pyx_kp_u_a_string_or_stream_input_is_requ;
+static PyObject *__pyx_kp_s_a_string_value_is_expected;
+static PyObject *__pyx_kp_u_a_string_value_is_expected;
+static PyObject *__pyx_n_s_allow_unicode;
+static PyObject *__pyx_n_s_anchor;
+static PyObject *__pyx_kp_s_anchor_must_be_a_string;
+static PyObject *__pyx_kp_u_anchor_must_be_a_string;
+static PyObject *__pyx_n_s_ascend_resolver;
+static PyObject *__pyx_n_s_buffer;
+static PyObject *__pyx_kp_s_but_found_another_document;
+static PyObject *__pyx_kp_u_but_found_another_document;
+static PyObject *__pyx_kp_s_byte_string;
+static PyObject *__pyx_kp_u_byte_string;
+static PyObject *__pyx_n_s_canonical;
+static PyObject *__pyx_n_s_class;
+static PyObject *__pyx_n_s_column;
+static PyObject *__pyx_n_s_composer;
+static PyObject *__pyx_n_s_constructor;
+static PyObject *__pyx_n_s_descend_resolver;
+static PyObject *__pyx_n_s_emitter;
+static PyObject *__pyx_n_s_encoding;
+static PyObject *__pyx_n_u_encoding;
+static PyObject *__pyx_n_s_end_mark;
+static PyObject *__pyx_n_s_error;
+static PyObject *__pyx_n_s_events;
+static PyObject *__pyx_kp_s_expected_a_single_document_in_th;
+static PyObject *__pyx_kp_u_expected_a_single_document_in_th;
+static PyObject *__pyx_n_s_explicit;
+static PyObject *__pyx_n_s_explicit_end;
+static PyObject *__pyx_n_s_explicit_start;
+static PyObject *__pyx_kp_s_file;
+static PyObject *__pyx_kp_u_file;
+static PyObject *__pyx_n_s_flow_style;
+static PyObject *__pyx_kp_s_found_duplicate_anchor_first_occ;
+static PyObject *__pyx_kp_u_found_duplicate_anchor_first_occ;
+static PyObject *__pyx_kp_s_found_undefined_alias;
+static PyObject *__pyx_kp_u_found_undefined_alias;
+static PyObject *__pyx_n_s_get_version;
+static PyObject *__pyx_n_s_get_version_string;
+static PyObject *__pyx_kp_u_id_03d;
+static PyObject *__pyx_n_s_implicit;
+static PyObject *__pyx_n_s_import;
+static PyObject *__pyx_kp_s_in_s_line_d_column_d;
+static PyObject *__pyx_n_s_indent;
+static PyObject *__pyx_n_s_index;
+static PyObject *__pyx_kp_s_invalid_event_s;
+static PyObject *__pyx_kp_u_invalid_event_s;
+static PyObject *__pyx_n_s_line;
+static PyObject *__pyx_n_s_line_break;
+static PyObject *__pyx_n_s_main;
+static PyObject *__pyx_n_s_major;
+static PyObject *__pyx_n_s_minor;
+static PyObject *__pyx_n_s_name;
+static PyObject *__pyx_kp_s_no_emitter_error;
+static PyObject *__pyx_kp_u_no_emitter_error;
+static PyObject *__pyx_kp_s_no_parser_error;
+static PyObject *__pyx_kp_u_no_parser_error;
+static PyObject *__pyx_n_s_nodes;
+static PyObject *__pyx_n_s_parser;
+static PyObject *__pyx_n_s_patch;
+static PyObject *__pyx_n_s_pointer;
+static PyObject *__pyx_n_s_pyx_vtable;
+static PyObject *__pyx_n_s_read;
+static PyObject *__pyx_n_s_reader;
+static PyObject *__pyx_n_s_representer;
+static PyObject *__pyx_n_s_resolve;
+static PyObject *__pyx_kp_s_root_src_pyyaml_ext__yaml_pyx;
+static PyObject *__pyx_n_s_scanner;
+static PyObject *__pyx_kp_s_second_occurence;
+static PyObject *__pyx_kp_u_second_occurence;
+static PyObject *__pyx_n_s_serializer;
+static PyObject *__pyx_kp_s_serializer_is_already_opened;
+static PyObject *__pyx_kp_u_serializer_is_already_opened;
+static PyObject *__pyx_kp_s_serializer_is_closed;
+static PyObject *__pyx_kp_u_serializer_is_closed;
+static PyObject *__pyx_kp_s_serializer_is_not_opened;
+static PyObject *__pyx_kp_u_serializer_is_not_opened;
+static PyObject *__pyx_n_s_start_mark;
+static PyObject *__pyx_n_s_stream;
+static PyObject *__pyx_n_s_style;
+static PyObject *__pyx_n_s_tag;
+static PyObject *__pyx_kp_s_tag_handle_must_be_a_string;
+static PyObject *__pyx_kp_u_tag_handle_must_be_a_string;
+static PyObject *__pyx_kp_s_tag_must_be_a_string;
+static PyObject *__pyx_kp_u_tag_must_be_a_string;
+static PyObject *__pyx_kp_s_tag_prefix_must_be_a_string;
+static PyObject *__pyx_kp_u_tag_prefix_must_be_a_string;
+static PyObject *__pyx_n_s_tags;
+static PyObject *__pyx_n_s_test;
+static PyObject *__pyx_n_s_tokens;
+static PyObject *__pyx_kp_s_too_many_tags;
+static PyObject *__pyx_kp_u_too_many_tags;
+static PyObject *__pyx_kp_s_unicode_string;
+static PyObject *__pyx_kp_u_unicode_string;
+static PyObject *__pyx_kp_s_unknown_event_type;
+static PyObject *__pyx_kp_u_unknown_event_type;
+static PyObject *__pyx_kp_s_unknown_token_type;
+static PyObject *__pyx_kp_u_unknown_token_type;
+static PyObject *__pyx_kp_s_utf_16_be;
+static PyObject *__pyx_kp_u_utf_16_be;
+static PyObject *__pyx_kp_s_utf_16_le;
+static PyObject *__pyx_kp_u_utf_16_le;
+static PyObject *__pyx_kp_u_utf_8;
+static PyObject *__pyx_n_s_value;
+static PyObject *__pyx_kp_s_value_must_be_a_string;
+static PyObject *__pyx_kp_u_value_must_be_a_string;
+static PyObject *__pyx_n_s_version;
+static PyObject *__pyx_n_s_width;
+static PyObject *__pyx_n_s_write;
+static PyObject *__pyx_n_s_yaml;
+static PyObject *__pyx_n_s_yaml_2;
+static PyObject *__pyx_int_0;
+static PyObject *__pyx_int_1;
+static PyObject *__pyx_tuple_;
+static PyObject *__pyx_tuple__2;
+static PyObject *__pyx_tuple__4;
+static PyObject *__pyx_tuple__5;
+static PyObject *__pyx_tuple__11;
+static PyObject *__pyx_tuple__12;
+static PyObject *__pyx_tuple__13;
+static PyObject *__pyx_tuple__14;
+static PyObject *__pyx_tuple__15;
+static PyObject *__pyx_tuple__16;
+static PyObject *__pyx_tuple__20;
+static PyObject *__pyx_tuple__21;
+static PyObject *__pyx_tuple__22;
+static PyObject *__pyx_tuple__23;
+static PyObject *__pyx_tuple__24;
+static PyObject *__pyx_tuple__25;
+static PyObject *__pyx_tuple__26;
+static PyObject *__pyx_tuple__27;
+static PyObject *__pyx_tuple__28;
+static PyObject *__pyx_tuple__29;
+static PyObject *__pyx_tuple__30;
+static PyObject *__pyx_tuple__31;
+static PyObject *__pyx_tuple__32;
+static PyObject *__pyx_tuple__33;
+static PyObject *__pyx_tuple__34;
+static PyObject *__pyx_tuple__35;
+static PyObject *__pyx_tuple__36;
+static PyObject *__pyx_tuple__37;
+static PyObject *__pyx_tuple__38;
+static PyObject *__pyx_tuple__39;
+static PyObject *__pyx_tuple__40;
+static PyObject *__pyx_tuple__41;
+static PyObject *__pyx_tuple__42;
+static PyObject *__pyx_tuple__43;
+static PyObject *__pyx_tuple__44;
+static PyObject *__pyx_tuple__45;
+static PyObject *__pyx_tuple__46;
+static PyObject *__pyx_tuple__47;
+static PyObject *__pyx_tuple__48;
+static PyObject *__pyx_tuple__49;
+static PyObject *__pyx_tuple__50;
+static PyObject *__pyx_tuple__51;
+static PyObject *__pyx_tuple__52;
+static PyObject *__pyx_tuple__53;
+static PyObject *__pyx_tuple__54;
+static PyObject *__pyx_tuple__55;
+static PyObject *__pyx_tuple__56;
+static PyObject *__pyx_tuple__57;
+static PyObject *__pyx_tuple__58;
+static PyObject *__pyx_tuple__59;
+static PyObject *__pyx_tuple__60;
+static PyObject *__pyx_tuple__61;
+static PyObject *__pyx_tuple__62;
+static PyObject *__pyx_tuple__63;
+static PyObject *__pyx_tuple__64;
+static PyObject *__pyx_tuple__65;
+static PyObject *__pyx_tuple__66;
+static PyObject *__pyx_tuple__67;
+static PyObject *__pyx_tuple__68;
+static PyObject *__pyx_tuple__69;
+static PyObject *__pyx_tuple__70;
+static PyObject *__pyx_tuple__71;
+static PyObject *__pyx_tuple__72;
+static PyObject *__pyx_tuple__74;
+static PyObject *__pyx_codeobj__73;
+static PyObject *__pyx_codeobj__75;
+
+/* "_yaml.pyx":4
+ * import yaml
+ *
+ * def get_version_string(): # <<<<<<<<<<<<<<
+ * cdef char *value
+ * value = yaml_get_version_string()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_1get_version_string(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyMethodDef __pyx_mdef_5_yaml_1get_version_string = {__Pyx_NAMESTR("get_version_string"), (PyCFunction)__pyx_pw_5_yaml_1get_version_string, METH_NOARGS, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pw_5_yaml_1get_version_string(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_version_string (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_get_version_string(__pyx_self);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_get_version_string(CYTHON_UNUSED PyObject *__pyx_self) {
+ char *__pyx_v_value;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("get_version_string", 0);
+
+ /* "_yaml.pyx":6
+ * def get_version_string():
+ * cdef char *value
+ * value = yaml_get_version_string() # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * return value
+ */
+ __pyx_v_value = yaml_get_version_string();
+
+ /* "_yaml.pyx":7
+ * cdef char *value
+ * value = yaml_get_version_string()
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * return value
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":8
+ * value = yaml_get_version_string()
+ * if PY_MAJOR_VERSION < 3:
+ * return value # <<<<<<<<<<<<<<
+ * else:
+ * return PyUnicode_FromString(value)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_PyBytes_FromString(__pyx_v_value); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 8; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":10
+ * return value
+ * else:
+ * return PyUnicode_FromString(value) # <<<<<<<<<<<<<<
+ *
+ * def get_version():
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_value); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 10; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":4
+ * import yaml
+ *
+ * def get_version_string(): # <<<<<<<<<<<<<<
+ * cdef char *value
+ * value = yaml_get_version_string()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_AddTraceback("_yaml.get_version_string", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":12
+ * return PyUnicode_FromString(value)
+ *
+ * def get_version(): # <<<<<<<<<<<<<<
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_3get_version(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyMethodDef __pyx_mdef_5_yaml_3get_version = {__Pyx_NAMESTR("get_version"), (PyCFunction)__pyx_pw_5_yaml_3get_version, METH_NOARGS, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pw_5_yaml_3get_version(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_version (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_2get_version(__pyx_self);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_2get_version(CYTHON_UNUSED PyObject *__pyx_self) {
+ int __pyx_v_major;
+ int __pyx_v_minor;
+ int __pyx_v_patch;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("get_version", 0);
+
+ /* "_yaml.pyx":14
+ * def get_version():
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch) # <<<<<<<<<<<<<<
+ * return (major, minor, patch)
+ *
+ */
+ yaml_get_version((&__pyx_v_major), (&__pyx_v_minor), (&__pyx_v_patch));
+
+ /* "_yaml.pyx":15
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ * return (major, minor, patch) # <<<<<<<<<<<<<<
+ *
+ * #Mark = yaml.error.Mark
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_major); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_minor); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_patch); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":12
+ * return PyUnicode_FromString(value)
+ *
+ * def get_version(): # <<<<<<<<<<<<<<
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.get_version", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":72
+ * cdef readonly pointer
+ *
+ * def __init__(self, object name, int index, int line, int column, # <<<<<<<<<<<<<<
+ * object buffer, object pointer):
+ * self.name = name
+ */
+
+/* Python wrapper */
+static int __pyx_pw_5_yaml_4Mark_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static int __pyx_pw_5_yaml_4Mark_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_name = 0;
+ int __pyx_v_index;
+ int __pyx_v_line;
+ int __pyx_v_column;
+ PyObject *__pyx_v_buffer = 0;
+ PyObject *__pyx_v_pointer = 0;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_name,&__pyx_n_s_index,&__pyx_n_s_line,&__pyx_n_s_column,&__pyx_n_s_buffer,&__pyx_n_s_pointer,0};
+ PyObject* values[6] = {0,0,0,0,0,0};
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_name)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ case 1:
+ if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_index)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ }
+ case 2:
+ if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_line)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 2); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ }
+ case 3:
+ if (likely((values[3] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_column)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 3); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ }
+ case 4:
+ if (likely((values[4] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_buffer)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 4); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ }
+ case 5:
+ if (likely((values[5] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pointer)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 5); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ }
+ } else if (PyTuple_GET_SIZE(__pyx_args) != 6) {
+ goto __pyx_L5_argtuple_error;
+ } else {
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ }
+ __pyx_v_name = values[0];
+ __pyx_v_index = __Pyx_PyInt_As_int(values[1]); if (unlikely((__pyx_v_index == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ __pyx_v_line = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_line == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ __pyx_v_column = __Pyx_PyInt_As_int(values[3]); if (unlikely((__pyx_v_column == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ __pyx_v_buffer = values[4];
+ __pyx_v_pointer = values[5];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("_yaml.Mark.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return -1;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_5_yaml_4Mark___init__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self), __pyx_v_name, __pyx_v_index, __pyx_v_line, __pyx_v_column, __pyx_v_buffer, __pyx_v_pointer);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static int __pyx_pf_5_yaml_4Mark___init__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self, PyObject *__pyx_v_name, int __pyx_v_index, int __pyx_v_line, int __pyx_v_column, PyObject *__pyx_v_buffer, PyObject *__pyx_v_pointer) {
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__init__", 0);
+
+ /* "_yaml.pyx":74
+ * def __init__(self, object name, int index, int line, int column,
+ * object buffer, object pointer):
+ * self.name = name # <<<<<<<<<<<<<<
+ * self.index = index
+ * self.line = line
+ */
+ __Pyx_INCREF(__pyx_v_name);
+ __Pyx_GIVEREF(__pyx_v_name);
+ __Pyx_GOTREF(__pyx_v_self->name);
+ __Pyx_DECREF(__pyx_v_self->name);
+ __pyx_v_self->name = __pyx_v_name;
+
+ /* "_yaml.pyx":75
+ * object buffer, object pointer):
+ * self.name = name
+ * self.index = index # <<<<<<<<<<<<<<
+ * self.line = line
+ * self.column = column
+ */
+ __pyx_v_self->index = __pyx_v_index;
+
+ /* "_yaml.pyx":76
+ * self.name = name
+ * self.index = index
+ * self.line = line # <<<<<<<<<<<<<<
+ * self.column = column
+ * self.buffer = buffer
+ */
+ __pyx_v_self->line = __pyx_v_line;
+
+ /* "_yaml.pyx":77
+ * self.index = index
+ * self.line = line
+ * self.column = column # <<<<<<<<<<<<<<
+ * self.buffer = buffer
+ * self.pointer = pointer
+ */
+ __pyx_v_self->column = __pyx_v_column;
+
+ /* "_yaml.pyx":78
+ * self.line = line
+ * self.column = column
+ * self.buffer = buffer # <<<<<<<<<<<<<<
+ * self.pointer = pointer
+ *
+ */
+ __Pyx_INCREF(__pyx_v_buffer);
+ __Pyx_GIVEREF(__pyx_v_buffer);
+ __Pyx_GOTREF(__pyx_v_self->buffer);
+ __Pyx_DECREF(__pyx_v_self->buffer);
+ __pyx_v_self->buffer = __pyx_v_buffer;
+
+ /* "_yaml.pyx":79
+ * self.column = column
+ * self.buffer = buffer
+ * self.pointer = pointer # <<<<<<<<<<<<<<
+ *
+ * def get_snippet(self):
+ */
+ __Pyx_INCREF(__pyx_v_pointer);
+ __Pyx_GIVEREF(__pyx_v_pointer);
+ __Pyx_GOTREF(__pyx_v_self->pointer);
+ __Pyx_DECREF(__pyx_v_self->pointer);
+ __pyx_v_self->pointer = __pyx_v_pointer;
+
+ /* "_yaml.pyx":72
+ * cdef readonly pointer
+ *
+ * def __init__(self, object name, int index, int line, int column, # <<<<<<<<<<<<<<
+ * object buffer, object pointer):
+ * self.name = name
+ */
+
+ /* function exit code */
+ __pyx_r = 0;
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":81
+ * self.pointer = pointer
+ *
+ * def get_snippet(self): # <<<<<<<<<<<<<<
+ * return None
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_3get_snippet(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_3get_snippet(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_snippet (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_2get_snippet(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_2get_snippet(CYTHON_UNUSED struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_snippet", 0);
+
+ /* "_yaml.pyx":82
+ *
+ * def get_snippet(self):
+ * return None # <<<<<<<<<<<<<<
+ *
+ * def __str__(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_None);
+ __pyx_r = Py_None;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":81
+ * self.pointer = pointer
+ *
+ * def get_snippet(self): # <<<<<<<<<<<<<<
+ * return None
+ *
+ */
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":84
+ * return None
+ *
+ * def __str__(self): # <<<<<<<<<<<<<<
+ * where = " in \"%s\", line %d, column %d" \
+ * % (self.name, self.line+1, self.column+1)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_5__str__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_5__str__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__str__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_4__str__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_4__str__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_v_where = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("__str__", 0);
+
+ /* "_yaml.pyx":86
+ * def __str__(self):
+ * where = " in \"%s\", line %d, column %d" \
+ * % (self.name, self.line+1, self.column+1) # <<<<<<<<<<<<<<
+ * return where
+ *
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_long((__pyx_v_self->line + 1)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyInt_From_long((__pyx_v_self->column + 1)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_self->name);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->name);
+ __Pyx_GIVEREF(__pyx_v_self->name);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyString_Format(__pyx_kp_s_in_s_line_d_column_d, __pyx_t_3); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_v_where = ((PyObject*)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":87
+ * where = " in \"%s\", line %d, column %d" \
+ * % (self.name, self.line+1, self.column+1)
+ * return where # <<<<<<<<<<<<<<
+ *
+ * #class YAMLError(Exception):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_where);
+ __pyx_r = __pyx_v_where;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":84
+ * return None
+ *
+ * def __str__(self): # <<<<<<<<<<<<<<
+ * where = " in \"%s\", line %d, column %d" \
+ * % (self.name, self.line+1, self.column+1)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.Mark.__str__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_where);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":65
+ *
+ * cdef class Mark:
+ * cdef readonly object name # <<<<<<<<<<<<<<
+ * cdef readonly int index
+ * cdef readonly int line
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_4name_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_4name_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_4name___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_4name___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->name);
+ __pyx_r = __pyx_v_self->name;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":66
+ * cdef class Mark:
+ * cdef readonly object name
+ * cdef readonly int index # <<<<<<<<<<<<<<
+ * cdef readonly int line
+ * cdef readonly int column
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_5index_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_5index_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_5index___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_5index___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 66; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.Mark.index.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":67
+ * cdef readonly object name
+ * cdef readonly int index
+ * cdef readonly int line # <<<<<<<<<<<<<<
+ * cdef readonly int column
+ * cdef readonly buffer
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_4line_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_4line_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_4line___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_4line___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->line); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 67; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.Mark.line.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":68
+ * cdef readonly int index
+ * cdef readonly int line
+ * cdef readonly int column # <<<<<<<<<<<<<<
+ * cdef readonly buffer
+ * cdef readonly pointer
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_6column_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_6column_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_6column___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_6column___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->column); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 68; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.Mark.column.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":69
+ * cdef readonly int line
+ * cdef readonly int column
+ * cdef readonly buffer # <<<<<<<<<<<<<<
+ * cdef readonly pointer
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_6buffer_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_6buffer_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_6buffer___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_6buffer___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->buffer);
+ __pyx_r = __pyx_v_self->buffer;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":70
+ * cdef readonly int column
+ * cdef readonly buffer
+ * cdef readonly pointer # <<<<<<<<<<<<<<
+ *
+ * def __init__(self, object name, int index, int line, int column,
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_4Mark_7pointer_1__get__(PyObject *__pyx_v_self); /*proto*/
+static PyObject *__pyx_pw_5_yaml_4Mark_7pointer_1__get__(PyObject *__pyx_v_self) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_4Mark_7pointer___get__(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_4Mark_7pointer___get__(struct __pyx_obj_5_yaml_Mark *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__get__", 0);
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->pointer);
+ __pyx_r = __pyx_v_self->pointer;
+ goto __pyx_L0;
+
+ /* function exit code */
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":262
+ * cdef int unicode_source
+ *
+ * def __init__(self, stream): # <<<<<<<<<<<<<<
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0:
+ */
+
+/* Python wrapper */
+static int __pyx_pw_5_yaml_7CParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static int __pyx_pw_5_yaml_7CParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_stream = 0;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_stream,0};
+ PyObject* values[1] = {0};
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_stream)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 262; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ }
+ } else if (PyTuple_GET_SIZE(__pyx_args) != 1) {
+ goto __pyx_L5_argtuple_error;
+ } else {
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ }
+ __pyx_v_stream = values[0];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 262; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("_yaml.CParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return -1;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_5_yaml_7CParser___init__(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self), __pyx_v_stream);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static int __pyx_pf_5_yaml_7CParser___init__(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_stream) {
+ PyObject *__pyx_v_is_readable = 0;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ int __pyx_t_6;
+ PyObject *__pyx_t_7 = NULL;
+ PyObject *__pyx_t_8 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("__init__", 0);
+ __Pyx_INCREF(__pyx_v_stream);
+
+ /* "_yaml.pyx":264
+ * def __init__(self, stream):
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * self.parsed_event.type = YAML_NO_EVENT
+ */
+ __pyx_t_1 = ((yaml_parser_initialize((&__pyx_v_self->parser)) == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":265
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * self.parsed_event.type = YAML_NO_EVENT
+ * is_readable = 1
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 265; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":266
+ * if yaml_parser_initialize(&self.parser) == 0:
+ * raise MemoryError
+ * self.parsed_event.type = YAML_NO_EVENT # <<<<<<<<<<<<<<
+ * is_readable = 1
+ * try:
+ */
+ __pyx_v_self->parsed_event.type = YAML_NO_EVENT;
+
+ /* "_yaml.pyx":267
+ * raise MemoryError
+ * self.parsed_event.type = YAML_NO_EVENT
+ * is_readable = 1 # <<<<<<<<<<<<<<
+ * try:
+ * stream.read
+ */
+ __Pyx_INCREF(__pyx_int_1);
+ __pyx_v_is_readable = __pyx_int_1;
+
+ /* "_yaml.pyx":268
+ * self.parsed_event.type = YAML_NO_EVENT
+ * is_readable = 1
+ * try: # <<<<<<<<<<<<<<
+ * stream.read
+ * except AttributeError:
+ */
+ {
+ __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
+ __Pyx_XGOTREF(__pyx_t_2);
+ __Pyx_XGOTREF(__pyx_t_3);
+ __Pyx_XGOTREF(__pyx_t_4);
+ /*try:*/ {
+
+ /* "_yaml.pyx":269
+ * is_readable = 1
+ * try:
+ * stream.read # <<<<<<<<<<<<<<
+ * except AttributeError:
+ * is_readable = 0
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_stream, __pyx_n_s_read); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 269; __pyx_clineno = __LINE__; goto __pyx_L4_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ }
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ goto __pyx_L11_try_end;
+ __pyx_L4_error:;
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":270
+ * try:
+ * stream.read
+ * except AttributeError: # <<<<<<<<<<<<<<
+ * is_readable = 0
+ * self.unicode_source = 0
+ */
+ __pyx_t_6 = PyErr_ExceptionMatches(__pyx_builtin_AttributeError);
+ if (__pyx_t_6) {
+ __Pyx_AddTraceback("_yaml.CParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_7, &__pyx_t_8) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L6_except_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_GOTREF(__pyx_t_8);
+
+ /* "_yaml.pyx":271
+ * stream.read
+ * except AttributeError:
+ * is_readable = 0 # <<<<<<<<<<<<<<
+ * self.unicode_source = 0
+ * if is_readable:
+ */
+ __Pyx_INCREF(__pyx_int_0);
+ __Pyx_DECREF_SET(__pyx_v_is_readable, __pyx_int_0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ goto __pyx_L5_exception_handled;
+ }
+ goto __pyx_L6_except_error;
+ __pyx_L6_except_error:;
+ __Pyx_XGIVEREF(__pyx_t_2);
+ __Pyx_XGIVEREF(__pyx_t_3);
+ __Pyx_XGIVEREF(__pyx_t_4);
+ __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
+ goto __pyx_L1_error;
+ __pyx_L5_exception_handled:;
+ __Pyx_XGIVEREF(__pyx_t_2);
+ __Pyx_XGIVEREF(__pyx_t_3);
+ __Pyx_XGIVEREF(__pyx_t_4);
+ __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
+ __pyx_L11_try_end:;
+ }
+
+ /* "_yaml.pyx":272
+ * except AttributeError:
+ * is_readable = 0
+ * self.unicode_source = 0 # <<<<<<<<<<<<<<
+ * if is_readable:
+ * self.stream = stream
+ */
+ __pyx_v_self->unicode_source = 0;
+
+ /* "_yaml.pyx":273
+ * is_readable = 0
+ * self.unicode_source = 0
+ * if is_readable: # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * try:
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_is_readable); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 273; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":274
+ * self.unicode_source = 0
+ * if is_readable:
+ * self.stream = stream # <<<<<<<<<<<<<<
+ * try:
+ * self.stream_name = stream.name
+ */
+ __Pyx_INCREF(__pyx_v_stream);
+ __Pyx_GIVEREF(__pyx_v_stream);
+ __Pyx_GOTREF(__pyx_v_self->stream);
+ __Pyx_DECREF(__pyx_v_self->stream);
+ __pyx_v_self->stream = __pyx_v_stream;
+
+ /* "_yaml.pyx":275
+ * if is_readable:
+ * self.stream = stream
+ * try: # <<<<<<<<<<<<<<
+ * self.stream_name = stream.name
+ * except AttributeError:
+ */
+ {
+ __Pyx_ExceptionSave(&__pyx_t_4, &__pyx_t_3, &__pyx_t_2);
+ __Pyx_XGOTREF(__pyx_t_4);
+ __Pyx_XGOTREF(__pyx_t_3);
+ __Pyx_XGOTREF(__pyx_t_2);
+ /*try:*/ {
+
+ /* "_yaml.pyx":276
+ * self.stream = stream
+ * try:
+ * self.stream_name = stream.name # <<<<<<<<<<<<<<
+ * except AttributeError:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_stream, __pyx_n_s_name); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L15_error;}
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_GIVEREF(__pyx_t_8);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_t_8;
+ __pyx_t_8 = 0;
+ }
+ __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+ goto __pyx_L22_try_end;
+ __pyx_L15_error:;
+ __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+
+ /* "_yaml.pyx":277
+ * try:
+ * self.stream_name = stream.name
+ * except AttributeError: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<file>'
+ */
+ __pyx_t_6 = PyErr_ExceptionMatches(__pyx_builtin_AttributeError);
+ if (__pyx_t_6) {
+ __Pyx_AddTraceback("_yaml.CParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_7, &__pyx_t_5) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 277; __pyx_clineno = __LINE__; goto __pyx_L17_except_error;}
+ __Pyx_GOTREF(__pyx_t_8);
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":278
+ * self.stream_name = stream.name
+ * except AttributeError:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<file>'
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":279
+ * except AttributeError:
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<file>' # <<<<<<<<<<<<<<
+ * else:
+ * self.stream_name = u'<file>'
+ */
+ __Pyx_INCREF(__pyx_kp_s_file);
+ __Pyx_GIVEREF(__pyx_kp_s_file);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_s_file;
+ goto __pyx_L25;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":281
+ * self.stream_name = '<file>'
+ * else:
+ * self.stream_name = u'<file>' # <<<<<<<<<<<<<<
+ * self.stream_cache = None
+ * self.stream_cache_len = 0
+ */
+ __Pyx_INCREF(__pyx_kp_u_file);
+ __Pyx_GIVEREF(__pyx_kp_u_file);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_u_file;
+ }
+ __pyx_L25:;
+ __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ goto __pyx_L16_exception_handled;
+ }
+ goto __pyx_L17_except_error;
+ __pyx_L17_except_error:;
+ __Pyx_XGIVEREF(__pyx_t_4);
+ __Pyx_XGIVEREF(__pyx_t_3);
+ __Pyx_XGIVEREF(__pyx_t_2);
+ __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_3, __pyx_t_2);
+ goto __pyx_L1_error;
+ __pyx_L16_exception_handled:;
+ __Pyx_XGIVEREF(__pyx_t_4);
+ __Pyx_XGIVEREF(__pyx_t_3);
+ __Pyx_XGIVEREF(__pyx_t_2);
+ __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_3, __pyx_t_2);
+ __pyx_L22_try_end:;
+ }
+
+ /* "_yaml.pyx":282
+ * else:
+ * self.stream_name = u'<file>'
+ * self.stream_cache = None # <<<<<<<<<<<<<<
+ * self.stream_cache_len = 0
+ * self.stream_cache_pos = 0
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->stream_cache);
+ __Pyx_DECREF(__pyx_v_self->stream_cache);
+ __pyx_v_self->stream_cache = Py_None;
+
+ /* "_yaml.pyx":283
+ * self.stream_name = u'<file>'
+ * self.stream_cache = None
+ * self.stream_cache_len = 0 # <<<<<<<<<<<<<<
+ * self.stream_cache_pos = 0
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ */
+ __pyx_v_self->stream_cache_len = 0;
+
+ /* "_yaml.pyx":284
+ * self.stream_cache = None
+ * self.stream_cache_len = 0
+ * self.stream_cache_pos = 0 # <<<<<<<<<<<<<<
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ * else:
+ */
+ __pyx_v_self->stream_cache_pos = 0;
+
+ /* "_yaml.pyx":285
+ * self.stream_cache_len = 0
+ * self.stream_cache_pos = 0
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self) # <<<<<<<<<<<<<<
+ * else:
+ * if PyUnicode_CheckExact(stream) != 0:
+ */
+ yaml_parser_set_input((&__pyx_v_self->parser), __pyx_f_5_yaml_input_handler, ((void *)__pyx_v_self));
+ goto __pyx_L14;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":287
+ * yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ * else:
+ * if PyUnicode_CheckExact(stream) != 0: # <<<<<<<<<<<<<<
+ * stream = PyUnicode_AsUTF8String(stream)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = ((PyUnicode_CheckExact(__pyx_v_stream) != 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":288
+ * else:
+ * if PyUnicode_CheckExact(stream) != 0:
+ * stream = PyUnicode_AsUTF8String(stream) # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<unicode string>'
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_stream); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 288; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_stream, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":289
+ * if PyUnicode_CheckExact(stream) != 0:
+ * stream = PyUnicode_AsUTF8String(stream)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<unicode string>'
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":290
+ * stream = PyUnicode_AsUTF8String(stream)
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<unicode string>' # <<<<<<<<<<<<<<
+ * else:
+ * self.stream_name = u'<unicode string>'
+ */
+ __Pyx_INCREF(__pyx_kp_s_unicode_string);
+ __Pyx_GIVEREF(__pyx_kp_s_unicode_string);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_s_unicode_string;
+ goto __pyx_L27;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":292
+ * self.stream_name = '<unicode string>'
+ * else:
+ * self.stream_name = u'<unicode string>' # <<<<<<<<<<<<<<
+ * self.unicode_source = 1
+ * else:
+ */
+ __Pyx_INCREF(__pyx_kp_u_unicode_string);
+ __Pyx_GIVEREF(__pyx_kp_u_unicode_string);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_u_unicode_string;
+ }
+ __pyx_L27:;
+
+ /* "_yaml.pyx":293
+ * else:
+ * self.stream_name = u'<unicode string>'
+ * self.unicode_source = 1 # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_v_self->unicode_source = 1;
+ goto __pyx_L26;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":295
+ * self.unicode_source = 1
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * self.stream_name = '<byte string>'
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":296
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * self.stream_name = '<byte string>' # <<<<<<<<<<<<<<
+ * else:
+ * self.stream_name = u'<byte string>'
+ */
+ __Pyx_INCREF(__pyx_kp_s_byte_string);
+ __Pyx_GIVEREF(__pyx_kp_s_byte_string);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_s_byte_string;
+ goto __pyx_L28;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":298
+ * self.stream_name = '<byte string>'
+ * else:
+ * self.stream_name = u'<byte string>' # <<<<<<<<<<<<<<
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_INCREF(__pyx_kp_u_byte_string);
+ __Pyx_GIVEREF(__pyx_kp_u_byte_string);
+ __Pyx_GOTREF(__pyx_v_self->stream_name);
+ __Pyx_DECREF(__pyx_v_self->stream_name);
+ __pyx_v_self->stream_name = __pyx_kp_u_byte_string;
+ }
+ __pyx_L28:;
+ }
+ __pyx_L26:;
+
+ /* "_yaml.pyx":299
+ * else:
+ * self.stream_name = u'<byte string>'
+ * if PyString_CheckExact(stream) == 0: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string or stream input is required")
+ */
+ __pyx_t_1 = ((PyString_CheckExact(__pyx_v_stream) == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":300
+ * self.stream_name = u'<byte string>'
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("a string or stream input is required")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":301
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string or stream input is required") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"a string or stream input is required")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 301; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 301; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":303
+ * raise TypeError("a string or stream input is required")
+ * else:
+ * raise TypeError(u"a string or stream input is required") # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 303; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 303; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":304
+ * else:
+ * raise TypeError(u"a string or stream input is required")
+ * self.stream = stream # <<<<<<<<<<<<<<
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ * self.current_token = None
+ */
+ __Pyx_INCREF(__pyx_v_stream);
+ __Pyx_GIVEREF(__pyx_v_stream);
+ __Pyx_GOTREF(__pyx_v_self->stream);
+ __Pyx_DECREF(__pyx_v_self->stream);
+ __pyx_v_self->stream = __pyx_v_stream;
+
+ /* "_yaml.pyx":305
+ * raise TypeError(u"a string or stream input is required")
+ * self.stream = stream
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) # <<<<<<<<<<<<<<
+ * self.current_token = None
+ * self.current_event = None
+ */
+ yaml_parser_set_input_string((&__pyx_v_self->parser), PyString_AS_STRING(__pyx_v_stream), PyString_GET_SIZE(__pyx_v_stream));
+ }
+ __pyx_L14:;
+
+ /* "_yaml.pyx":306
+ * self.stream = stream
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ * self.current_token = None # <<<<<<<<<<<<<<
+ * self.current_event = None
+ * self.anchors = {}
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->current_token);
+ __Pyx_DECREF(__pyx_v_self->current_token);
+ __pyx_v_self->current_token = Py_None;
+
+ /* "_yaml.pyx":307
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ * self.current_token = None
+ * self.current_event = None # <<<<<<<<<<<<<<
+ * self.anchors = {}
+ *
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->current_event);
+ __Pyx_DECREF(__pyx_v_self->current_event);
+ __pyx_v_self->current_event = Py_None;
+
+ /* "_yaml.pyx":308
+ * self.current_token = None
+ * self.current_event = None
+ * self.anchors = {} # <<<<<<<<<<<<<<
+ *
+ * def __dealloc__(self):
+ */
+ __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 308; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __Pyx_GOTREF(__pyx_v_self->anchors);
+ __Pyx_DECREF(__pyx_v_self->anchors);
+ __pyx_v_self->anchors = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":262
+ * cdef int unicode_source
+ *
+ * def __init__(self, stream): # <<<<<<<<<<<<<<
+ * cdef is_readable
+ * if yaml_parser_initialize(&self.parser) == 0:
+ */
+
+ /* function exit code */
+ __pyx_r = 0;
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_AddTraceback("_yaml.CParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = -1;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_is_readable);
+ __Pyx_XDECREF(__pyx_v_stream);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":310
+ * self.anchors = {}
+ *
+ * def __dealloc__(self): # <<<<<<<<<<<<<<
+ * yaml_parser_delete(&self.parser)
+ * yaml_event_delete(&self.parsed_event)
+ */
+
+/* Python wrapper */
+static void __pyx_pw_5_yaml_7CParser_3__dealloc__(PyObject *__pyx_v_self); /*proto*/
+static void __pyx_pw_5_yaml_7CParser_3__dealloc__(PyObject *__pyx_v_self) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0);
+ __pyx_pf_5_yaml_7CParser_2__dealloc__(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+static void __pyx_pf_5_yaml_7CParser_2__dealloc__(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__dealloc__", 0);
+
+ /* "_yaml.pyx":311
+ *
+ * def __dealloc__(self):
+ * yaml_parser_delete(&self.parser) # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ *
+ */
+ yaml_parser_delete((&__pyx_v_self->parser));
+
+ /* "_yaml.pyx":312
+ * def __dealloc__(self):
+ * yaml_parser_delete(&self.parser)
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ *
+ * def dispose(self):
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":310
+ * self.anchors = {}
+ *
+ * def __dealloc__(self): # <<<<<<<<<<<<<<
+ * yaml_parser_delete(&self.parser)
+ * yaml_event_delete(&self.parsed_event)
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+/* "_yaml.pyx":314
+ * yaml_event_delete(&self.parsed_event)
+ *
+ * def dispose(self): # <<<<<<<<<<<<<<
+ * pass
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_5dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_5dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("dispose (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_4dispose(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_4dispose(CYTHON_UNUSED struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("dispose", 0);
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":317
+ * pass
+ *
+ * cdef object _parser_error(self): # <<<<<<<<<<<<<<
+ * if self.parser.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__parser_error(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_context_mark = NULL;
+ PyObject *__pyx_v_problem_mark = NULL;
+ PyObject *__pyx_v_context = NULL;
+ PyObject *__pyx_v_problem = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_parser_error", 0);
+
+ /* "_yaml.pyx":327
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ * elif self.parser.error == YAML_SCANNER_ERROR \ # <<<<<<<<<<<<<<
+ * or self.parser.error == YAML_PARSER_ERROR:
+ * context_mark = None
+ */
+ switch (__pyx_v_self->parser.error) {
+
+ /* "_yaml.pyx":318
+ *
+ * cdef object _parser_error(self):
+ * if self.parser.error == YAML_MEMORY_ERROR: # <<<<<<<<<<<<<<
+ * return MemoryError
+ * elif self.parser.error == YAML_READER_ERROR:
+ */
+ case YAML_MEMORY_ERROR:
+
+ /* "_yaml.pyx":319
+ * cdef object _parser_error(self):
+ * if self.parser.error == YAML_MEMORY_ERROR:
+ * return MemoryError # <<<<<<<<<<<<<<
+ * elif self.parser.error == YAML_READER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_builtin_MemoryError);
+ __pyx_r = __pyx_builtin_MemoryError;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":320
+ * if self.parser.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ * elif self.parser.error == YAML_READER_ERROR: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ */
+ case YAML_READER_ERROR:
+
+ /* "_yaml.pyx":321
+ * return MemoryError
+ * elif self.parser.error == YAML_READER_ERROR:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, '?', self.parser.problem)
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":322
+ * elif self.parser.error == YAML_READER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ * return ReaderError(self.stream_name, self.parser.problem_offset, # <<<<<<<<<<<<<<
+ * self.parser.problem_value, '?', self.parser.problem)
+ * else:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_ReaderError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 322; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_offset); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 322; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":323
+ * if PY_MAJOR_VERSION < 3:
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, '?', self.parser.problem) # <<<<<<<<<<<<<<
+ * else:
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_value); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 323; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = __Pyx_PyBytes_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 323; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":322
+ * elif self.parser.error == YAML_READER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ * return ReaderError(self.stream_name, self.parser.problem_offset, # <<<<<<<<<<<<<<
+ * self.parser.problem_value, '?', self.parser.problem)
+ * else:
+ */
+ __pyx_t_6 = PyTuple_New(5); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 322; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_kp_s__3);
+ PyTuple_SET_ITEM(__pyx_t_6, 3, __pyx_kp_s__3);
+ __Pyx_GIVEREF(__pyx_kp_s__3);
+ PyTuple_SET_ITEM(__pyx_t_6, 4, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 322; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_r = __pyx_t_5;
+ __pyx_t_5 = 0;
+ goto __pyx_L0;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":325
+ * self.parser.problem_value, '?', self.parser.problem)
+ * else:
+ * return ReaderError(self.stream_name, self.parser.problem_offset, # <<<<<<<<<<<<<<
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ * elif self.parser.error == YAML_SCANNER_ERROR \
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_ReaderError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 325; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_offset); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 325; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":326
+ * else:
+ * return ReaderError(self.stream_name, self.parser.problem_offset,
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem)) # <<<<<<<<<<<<<<
+ * elif self.parser.error == YAML_SCANNER_ERROR \
+ * or self.parser.error == YAML_PARSER_ERROR:
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_value); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 326; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 326; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":325
+ * self.parser.problem_value, '?', self.parser.problem)
+ * else:
+ * return ReaderError(self.stream_name, self.parser.problem_offset, # <<<<<<<<<<<<<<
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ * elif self.parser.error == YAML_SCANNER_ERROR \
+ */
+ __pyx_t_3 = PyTuple_New(5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 325; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_6);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_kp_u__3);
+ PyTuple_SET_ITEM(__pyx_t_3, 3, __pyx_kp_u__3);
+ __Pyx_GIVEREF(__pyx_kp_u__3);
+ PyTuple_SET_ITEM(__pyx_t_3, 4, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __pyx_t_6 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 325; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ }
+ break;
+
+ /* "_yaml.pyx":328
+ * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ * elif self.parser.error == YAML_SCANNER_ERROR \
+ * or self.parser.error == YAML_PARSER_ERROR: # <<<<<<<<<<<<<<
+ * context_mark = None
+ * problem_mark = None
+ */
+ case YAML_SCANNER_ERROR:
+ case YAML_PARSER_ERROR:
+
+ /* "_yaml.pyx":329
+ * elif self.parser.error == YAML_SCANNER_ERROR \
+ * or self.parser.error == YAML_PARSER_ERROR:
+ * context_mark = None # <<<<<<<<<<<<<<
+ * problem_mark = None
+ * if self.parser.context != NULL:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_context_mark = Py_None;
+
+ /* "_yaml.pyx":330
+ * or self.parser.error == YAML_PARSER_ERROR:
+ * context_mark = None
+ * problem_mark = None # <<<<<<<<<<<<<<
+ * if self.parser.context != NULL:
+ * context_mark = Mark(self.stream_name,
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_problem_mark = Py_None;
+
+ /* "_yaml.pyx":331
+ * context_mark = None
+ * problem_mark = None
+ * if self.parser.context != NULL: # <<<<<<<<<<<<<<
+ * context_mark = Mark(self.stream_name,
+ * self.parser.context_mark.index,
+ */
+ __pyx_t_1 = ((__pyx_v_self->parser.context != NULL) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":333
+ * if self.parser.context != NULL:
+ * context_mark = Mark(self.stream_name,
+ * self.parser.context_mark.index, # <<<<<<<<<<<<<<
+ * self.parser.context_mark.line,
+ * self.parser.context_mark.column, None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_self->parser.context_mark.index); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 333; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":334
+ * context_mark = Mark(self.stream_name,
+ * self.parser.context_mark.index,
+ * self.parser.context_mark.line, # <<<<<<<<<<<<<<
+ * self.parser.context_mark.column, None, None)
+ * if self.parser.problem != NULL:
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parser.context_mark.line); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 334; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":335
+ * self.parser.context_mark.index,
+ * self.parser.context_mark.line,
+ * self.parser.context_mark.column, None, None) # <<<<<<<<<<<<<<
+ * if self.parser.problem != NULL:
+ * problem_mark = Mark(self.stream_name,
+ */
+ __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_self->parser.context_mark.column); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 335; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":332
+ * problem_mark = None
+ * if self.parser.context != NULL:
+ * context_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parser.context_mark.index,
+ * self.parser.context_mark.line,
+ */
+ __pyx_t_2 = PyTuple_New(6); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 332; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_2, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_2, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_4 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_2, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 332; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF_SET(__pyx_v_context_mark, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":336
+ * self.parser.context_mark.line,
+ * self.parser.context_mark.column, None, None)
+ * if self.parser.problem != NULL: # <<<<<<<<<<<<<<
+ * problem_mark = Mark(self.stream_name,
+ * self.parser.problem_mark.index,
+ */
+ __pyx_t_1 = ((__pyx_v_self->parser.problem != NULL) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":338
+ * if self.parser.problem != NULL:
+ * problem_mark = Mark(self.stream_name,
+ * self.parser.problem_mark.index, # <<<<<<<<<<<<<<
+ * self.parser.problem_mark.line,
+ * self.parser.problem_mark.column, None, None)
+ */
+ __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_mark.index); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 338; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":339
+ * problem_mark = Mark(self.stream_name,
+ * self.parser.problem_mark.index,
+ * self.parser.problem_mark.line, # <<<<<<<<<<<<<<
+ * self.parser.problem_mark.column, None, None)
+ * context = None
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 339; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":340
+ * self.parser.problem_mark.index,
+ * self.parser.problem_mark.line,
+ * self.parser.problem_mark.column, None, None) # <<<<<<<<<<<<<<
+ * context = None
+ * if self.parser.context != NULL:
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parser.problem_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 340; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":337
+ * self.parser.context_mark.column, None, None)
+ * if self.parser.problem != NULL:
+ * problem_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parser.problem_mark.index,
+ * self.parser.problem_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 337; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_5 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 337; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF_SET(__pyx_v_problem_mark, __pyx_t_3);
+ __pyx_t_3 = 0;
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":341
+ * self.parser.problem_mark.line,
+ * self.parser.problem_mark.column, None, None)
+ * context = None # <<<<<<<<<<<<<<
+ * if self.parser.context != NULL:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_context = Py_None;
+
+ /* "_yaml.pyx":342
+ * self.parser.problem_mark.column, None, None)
+ * context = None
+ * if self.parser.context != NULL: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * context = self.parser.context
+ */
+ __pyx_t_1 = ((__pyx_v_self->parser.context != NULL) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":343
+ * context = None
+ * if self.parser.context != NULL:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * context = self.parser.context
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":344
+ * if self.parser.context != NULL:
+ * if PY_MAJOR_VERSION < 3:
+ * context = self.parser.context # <<<<<<<<<<<<<<
+ * else:
+ * context = PyUnicode_FromString(self.parser.context)
+ */
+ __pyx_t_3 = __Pyx_PyBytes_FromString(__pyx_v_self->parser.context); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 344; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_context, __pyx_t_3);
+ __pyx_t_3 = 0;
+ goto __pyx_L7;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":346
+ * context = self.parser.context
+ * else:
+ * context = PyUnicode_FromString(self.parser.context) # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.parser.problem
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parser.context); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 346; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_context, __pyx_t_3);
+ __pyx_t_3 = 0;
+ }
+ __pyx_L7:;
+ goto __pyx_L6;
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":347
+ * else:
+ * context = PyUnicode_FromString(self.parser.context)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * problem = self.parser.problem
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":348
+ * context = PyUnicode_FromString(self.parser.context)
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.parser.problem # <<<<<<<<<<<<<<
+ * else:
+ * problem = PyUnicode_FromString(self.parser.problem)
+ */
+ __pyx_t_3 = __Pyx_PyBytes_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 348; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_problem = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L8;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":350
+ * problem = self.parser.problem
+ * else:
+ * problem = PyUnicode_FromString(self.parser.problem) # <<<<<<<<<<<<<<
+ * if self.parser.error == YAML_SCANNER_ERROR:
+ * return ScannerError(context, context_mark, problem, problem_mark)
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 350; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_problem = __pyx_t_3;
+ __pyx_t_3 = 0;
+ }
+ __pyx_L8:;
+
+ /* "_yaml.pyx":351
+ * else:
+ * problem = PyUnicode_FromString(self.parser.problem)
+ * if self.parser.error == YAML_SCANNER_ERROR: # <<<<<<<<<<<<<<
+ * return ScannerError(context, context_mark, problem, problem_mark)
+ * else:
+ */
+ __pyx_t_1 = ((__pyx_v_self->parser.error == YAML_SCANNER_ERROR) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":352
+ * problem = PyUnicode_FromString(self.parser.problem)
+ * if self.parser.error == YAML_SCANNER_ERROR:
+ * return ScannerError(context, context_mark, problem, problem_mark) # <<<<<<<<<<<<<<
+ * else:
+ * return ParserError(context, context_mark, problem, problem_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScannerError); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 352; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 352; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_context);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_context);
+ __Pyx_GIVEREF(__pyx_v_context);
+ __Pyx_INCREF(__pyx_v_context_mark);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_context_mark);
+ __Pyx_GIVEREF(__pyx_v_context_mark);
+ __Pyx_INCREF(__pyx_v_problem);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_problem);
+ __Pyx_GIVEREF(__pyx_v_problem);
+ __Pyx_INCREF(__pyx_v_problem_mark);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_v_problem_mark);
+ __Pyx_GIVEREF(__pyx_v_problem_mark);
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 352; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":354
+ * return ScannerError(context, context_mark, problem, problem_mark)
+ * else:
+ * return ParserError(context, context_mark, problem, problem_mark) # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no parser error")
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_ParserError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 354; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 354; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_context);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_context);
+ __Pyx_GIVEREF(__pyx_v_context);
+ __Pyx_INCREF(__pyx_v_context_mark);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_context_mark);
+ __Pyx_GIVEREF(__pyx_v_context_mark);
+ __Pyx_INCREF(__pyx_v_problem);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_problem);
+ __Pyx_GIVEREF(__pyx_v_problem);
+ __Pyx_INCREF(__pyx_v_problem_mark);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_v_problem_mark);
+ __Pyx_GIVEREF(__pyx_v_problem_mark);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 354; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ }
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":355
+ * else:
+ * return ParserError(context, context_mark, problem, problem_mark)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("no parser error")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":356
+ * return ParserError(context, context_mark, problem, problem_mark)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no parser error") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"no parser error")
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 356; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 356; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":358
+ * raise ValueError("no parser error")
+ * else:
+ * raise ValueError(u"no parser error") # <<<<<<<<<<<<<<
+ *
+ * def raw_scan(self):
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 358; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 358; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":317
+ * pass
+ *
+ * cdef object _parser_error(self): # <<<<<<<<<<<<<<
+ * if self.parser.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_AddTraceback("_yaml.CParser._parser_error", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_context_mark);
+ __Pyx_XDECREF(__pyx_v_problem_mark);
+ __Pyx_XDECREF(__pyx_v_context);
+ __Pyx_XDECREF(__pyx_v_problem);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":360
+ * raise ValueError(u"no parser error")
+ *
+ * def raw_scan(self): # <<<<<<<<<<<<<<
+ * cdef yaml_token_t token
+ * cdef int done
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_7raw_scan(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_7raw_scan(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("raw_scan (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_6raw_scan(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_6raw_scan(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ yaml_token_t __pyx_v_token;
+ int __pyx_v_done;
+ int __pyx_v_count;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("raw_scan", 0);
+
+ /* "_yaml.pyx":364
+ * cdef int done
+ * cdef int count
+ * count = 0 # <<<<<<<<<<<<<<
+ * done = 0
+ * while done == 0:
+ */
+ __pyx_v_count = 0;
+
+ /* "_yaml.pyx":365
+ * cdef int count
+ * count = 0
+ * done = 0 # <<<<<<<<<<<<<<
+ * while done == 0:
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ */
+ __pyx_v_done = 0;
+
+ /* "_yaml.pyx":366
+ * count = 0
+ * done = 0
+ * while done == 0: # <<<<<<<<<<<<<<
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error()
+ */
+ while (1) {
+ __pyx_t_1 = ((__pyx_v_done == 0) != 0);
+ if (!__pyx_t_1) break;
+
+ /* "_yaml.pyx":367
+ * done = 0
+ * while done == 0:
+ * if yaml_parser_scan(&self.parser, &token) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_2 = yaml_parser_scan((&__pyx_v_self->parser), (&__pyx_v_token)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 367; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_1 = ((__pyx_t_2 == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":368
+ * while done == 0:
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * if token.type == YAML_NO_TOKEN:
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 368; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":369
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * if token.type == YAML_NO_TOKEN:
+ * done = 1
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":370
+ * error = self._parser_error()
+ * raise error
+ * if token.type == YAML_NO_TOKEN: # <<<<<<<<<<<<<<
+ * done = 1
+ * else:
+ */
+ __pyx_t_1 = ((__pyx_v_token.type == YAML_NO_TOKEN) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":371
+ * raise error
+ * if token.type == YAML_NO_TOKEN:
+ * done = 1 # <<<<<<<<<<<<<<
+ * else:
+ * count = count+1
+ */
+ __pyx_v_done = 1;
+ goto __pyx_L6;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":373
+ * done = 1
+ * else:
+ * count = count+1 # <<<<<<<<<<<<<<
+ * yaml_token_delete(&token)
+ * return count
+ */
+ __pyx_v_count = (__pyx_v_count + 1);
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":374
+ * else:
+ * count = count+1
+ * yaml_token_delete(&token) # <<<<<<<<<<<<<<
+ * return count
+ *
+ */
+ yaml_token_delete((&__pyx_v_token));
+ }
+
+ /* "_yaml.pyx":375
+ * count = count+1
+ * yaml_token_delete(&token)
+ * return count # <<<<<<<<<<<<<<
+ *
+ * cdef object _scan(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_count); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 375; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":360
+ * raise ValueError(u"no parser error")
+ *
+ * def raw_scan(self): # <<<<<<<<<<<<<<
+ * cdef yaml_token_t token
+ * cdef int done
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.raw_scan", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":377
+ * return count
+ *
+ * cdef object _scan(self): # <<<<<<<<<<<<<<
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__scan(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ yaml_token_t __pyx_v_token;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_v_token_object = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_scan", 0);
+
+ /* "_yaml.pyx":379
+ * cdef object _scan(self):
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_1 = yaml_parser_scan((&__pyx_v_self->parser), (&__pyx_v_token)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 379; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = ((__pyx_t_1 == 0) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":380
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * token_object = self._token_to_object(&token)
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 380; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":381
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * token_object = self._token_to_object(&token)
+ * yaml_token_delete(&token)
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 381; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":382
+ * error = self._parser_error()
+ * raise error
+ * token_object = self._token_to_object(&token) # <<<<<<<<<<<<<<
+ * yaml_token_delete(&token)
+ * return token_object
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_token_to_object(__pyx_v_self, (&__pyx_v_token)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 382; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_token_object = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":383
+ * raise error
+ * token_object = self._token_to_object(&token)
+ * yaml_token_delete(&token) # <<<<<<<<<<<<<<
+ * return token_object
+ *
+ */
+ yaml_token_delete((&__pyx_v_token));
+
+ /* "_yaml.pyx":384
+ * token_object = self._token_to_object(&token)
+ * yaml_token_delete(&token)
+ * return token_object # <<<<<<<<<<<<<<
+ *
+ * cdef object _token_to_object(self, yaml_token_t *token):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_token_object);
+ __pyx_r = __pyx_v_token_object;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":377
+ * return count
+ *
+ * cdef object _scan(self): # <<<<<<<<<<<<<<
+ * cdef yaml_token_t token
+ * if yaml_parser_scan(&self.parser, &token) == 0:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser._scan", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XDECREF(__pyx_v_token_object);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":386
+ * return token_object
+ *
+ * cdef object _token_to_object(self, yaml_token_t *token): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * token.start_mark.index,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__token_to_object(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, yaml_token_t *__pyx_v_token) {
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark = NULL;
+ PyObject *__pyx_v_encoding = NULL;
+ PyObject *__pyx_v_handle = NULL;
+ PyObject *__pyx_v_prefix = NULL;
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_v_suffix = NULL;
+ int __pyx_v_plain;
+ PyObject *__pyx_v_style = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_token_to_object", 0);
+
+ /* "_yaml.pyx":388
+ * cdef object _token_to_object(self, yaml_token_t *token):
+ * start_mark = Mark(self.stream_name,
+ * token.start_mark.index, # <<<<<<<<<<<<<<
+ * token.start_mark.line,
+ * token.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_token->start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 388; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":389
+ * start_mark = Mark(self.stream_name,
+ * token.start_mark.index,
+ * token.start_mark.line, # <<<<<<<<<<<<<<
+ * token.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_token->start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 389; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":390
+ * token.start_mark.index,
+ * token.start_mark.line,
+ * token.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_token->start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 390; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":387
+ *
+ * cdef object _token_to_object(self, yaml_token_t *token):
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * token.start_mark.index,
+ * token.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 387; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 387; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":393
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ * token.end_mark.index, # <<<<<<<<<<<<<<
+ * token.end_mark.line,
+ * token.end_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_token->end_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 393; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":394
+ * end_mark = Mark(self.stream_name,
+ * token.end_mark.index,
+ * token.end_mark.line, # <<<<<<<<<<<<<<
+ * token.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_token->end_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 394; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":395
+ * token.end_mark.index,
+ * token.end_mark.line,
+ * token.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if token.type == YAML_NO_TOKEN:
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_token->end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 395; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":392
+ * token.start_mark.column,
+ * None, None)
+ * end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * token.end_mark.index,
+ * token.end_mark.line,
+ */
+ __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 392; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 392; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":459
+ * handle = None
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ * elif token.type == YAML_SCALAR_TOKEN: # <<<<<<<<<<<<<<
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ * token.data.scalar.length, 'strict')
+ */
+ switch (__pyx_v_token->type) {
+
+ /* "_yaml.pyx":397
+ * token.end_mark.column,
+ * None, None)
+ * if token.type == YAML_NO_TOKEN: # <<<<<<<<<<<<<<
+ * return None
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ */
+ case YAML_NO_TOKEN:
+
+ /* "_yaml.pyx":398
+ * None, None)
+ * if token.type == YAML_NO_TOKEN:
+ * return None # <<<<<<<<<<<<<<
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ * encoding = None
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_None);
+ __pyx_r = Py_None;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":399
+ * if token.type == YAML_NO_TOKEN:
+ * return None
+ * elif token.type == YAML_STREAM_START_TOKEN: # <<<<<<<<<<<<<<
+ * encoding = None
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ */
+ case YAML_STREAM_START_TOKEN:
+
+ /* "_yaml.pyx":400
+ * return None
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ * encoding = None # <<<<<<<<<<<<<<
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_encoding = Py_None;
+
+ /* "_yaml.pyx":401
+ * elif token.type == YAML_STREAM_START_TOKEN:
+ * encoding = None
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING: # <<<<<<<<<<<<<<
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ */
+ __pyx_t_5 = ((__pyx_v_token->data.stream_start.encoding == YAML_UTF8_ENCODING) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":402
+ * encoding = None
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0: # <<<<<<<<<<<<<<
+ * encoding = u"utf-8"
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ */
+ __pyx_t_5 = ((__pyx_v_self->unicode_source == 0) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":403
+ * if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8" # <<<<<<<<<<<<<<
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le"
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_8);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_8);
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":404
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING: # <<<<<<<<<<<<<<
+ * encoding = u"utf-16-le"
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ */
+ __pyx_t_5 = ((__pyx_v_token->data.stream_start.encoding == YAML_UTF16LE_ENCODING) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":405
+ * encoding = u"utf-8"
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le" # <<<<<<<<<<<<<<
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be"
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_16_le);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_16_le);
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":406
+ * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le"
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING: # <<<<<<<<<<<<<<
+ * encoding = u"utf-16-be"
+ * return StreamStartToken(start_mark, end_mark, encoding)
+ */
+ __pyx_t_5 = ((__pyx_v_token->data.stream_start.encoding == YAML_UTF16BE_ENCODING) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":407
+ * encoding = u"utf-16-le"
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be" # <<<<<<<<<<<<<<
+ * return StreamStartToken(start_mark, end_mark, encoding)
+ * elif token.type == YAML_STREAM_END_TOKEN:
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_16_be);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_16_be);
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":408
+ * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be"
+ * return StreamStartToken(start_mark, end_mark, encoding) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_STREAM_END_TOKEN:
+ * return StreamEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_StreamStartToken); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 408; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 408; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_encoding);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_encoding);
+ __Pyx_GIVEREF(__pyx_v_encoding);
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_1, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 408; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":409
+ * encoding = u"utf-16-be"
+ * return StreamStartToken(start_mark, end_mark, encoding)
+ * elif token.type == YAML_STREAM_END_TOKEN: # <<<<<<<<<<<<<<
+ * return StreamEndToken(start_mark, end_mark)
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ */
+ case YAML_STREAM_END_TOKEN:
+
+ /* "_yaml.pyx":410
+ * return StreamStartToken(start_mark, end_mark, encoding)
+ * elif token.type == YAML_STREAM_END_TOKEN:
+ * return StreamEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML",
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_StreamEndToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 410; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 410; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 410; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":411
+ * elif token.type == YAML_STREAM_END_TOKEN:
+ * return StreamEndToken(start_mark, end_mark)
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN: # <<<<<<<<<<<<<<
+ * return DirectiveToken(u"YAML",
+ * (token.data.version_directive.major,
+ */
+ case YAML_VERSION_DIRECTIVE_TOKEN:
+
+ /* "_yaml.pyx":412
+ * return StreamEndToken(start_mark, end_mark)
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML", # <<<<<<<<<<<<<<
+ * (token.data.version_directive.major,
+ * token.data.version_directive.minor),
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_DirectiveToken); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":413
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML",
+ * (token.data.version_directive.major, # <<<<<<<<<<<<<<
+ * token.data.version_directive.minor),
+ * start_mark, end_mark)
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_token->data.version_directive.major); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 413; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":414
+ * return DirectiveToken(u"YAML",
+ * (token.data.version_directive.major,
+ * token.data.version_directive.minor), # <<<<<<<<<<<<<<
+ * start_mark, end_mark)
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_token->data.version_directive.minor); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":413
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML",
+ * (token.data.version_directive.major, # <<<<<<<<<<<<<<
+ * token.data.version_directive.minor),
+ * start_mark, end_mark)
+ */
+ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 413; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __pyx_t_1 = 0;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":412
+ * return StreamEndToken(start_mark, end_mark)
+ * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ * return DirectiveToken(u"YAML", # <<<<<<<<<<<<<<
+ * (token.data.version_directive.major,
+ * token.data.version_directive.minor),
+ */
+ __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_n_u_YAML);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_n_u_YAML);
+ __Pyx_GIVEREF(__pyx_n_u_YAML);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 2, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 3, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":416
+ * token.data.version_directive.minor),
+ * start_mark, end_mark)
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN: # <<<<<<<<<<<<<<
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ */
+ case YAML_TAG_DIRECTIVE_TOKEN:
+
+ /* "_yaml.pyx":417
+ * start_mark, end_mark)
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle) # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ * return DirectiveToken(u"TAG", (handle, prefix),
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.tag_directive.handle); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 417; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_handle = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":418
+ * elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix) # <<<<<<<<<<<<<<
+ * return DirectiveToken(u"TAG", (handle, prefix),
+ * start_mark, end_mark)
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.tag_directive.prefix); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 418; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_prefix = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":419
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ * return DirectiveToken(u"TAG", (handle, prefix), # <<<<<<<<<<<<<<
+ * start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_DirectiveToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 419; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 419; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_handle);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_handle);
+ __Pyx_GIVEREF(__pyx_v_handle);
+ __Pyx_INCREF(__pyx_v_prefix);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_prefix);
+ __Pyx_GIVEREF(__pyx_v_prefix);
+
+ /* "_yaml.pyx":420
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ * return DirectiveToken(u"TAG", (handle, prefix),
+ * start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ * return DocumentStartToken(start_mark, end_mark)
+ */
+ __pyx_t_2 = PyTuple_New(4); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 419; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_n_u_TAG);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_n_u_TAG);
+ __Pyx_GIVEREF(__pyx_n_u_TAG);
+ PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 2, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 3, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":419
+ * handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ * prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ * return DirectiveToken(u"TAG", (handle, prefix), # <<<<<<<<<<<<<<
+ * start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 419; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":421
+ * return DirectiveToken(u"TAG", (handle, prefix),
+ * start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_START_TOKEN: # <<<<<<<<<<<<<<
+ * return DocumentStartToken(start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_END_TOKEN:
+ */
+ case YAML_DOCUMENT_START_TOKEN:
+
+ /* "_yaml.pyx":422
+ * start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ * return DocumentStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_DOCUMENT_END_TOKEN:
+ * return DocumentEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_DocumentStartToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 422; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 422; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 422; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":423
+ * elif token.type == YAML_DOCUMENT_START_TOKEN:
+ * return DocumentStartToken(start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_END_TOKEN: # <<<<<<<<<<<<<<
+ * return DocumentEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ */
+ case YAML_DOCUMENT_END_TOKEN:
+
+ /* "_yaml.pyx":424
+ * return DocumentStartToken(start_mark, end_mark)
+ * elif token.type == YAML_DOCUMENT_END_TOKEN:
+ * return DocumentEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ * return BlockSequenceStartToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_DocumentEndToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 424; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 424; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 424; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":425
+ * elif token.type == YAML_DOCUMENT_END_TOKEN:
+ * return DocumentEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN: # <<<<<<<<<<<<<<
+ * return BlockSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ */
+ case YAML_BLOCK_SEQUENCE_START_TOKEN:
+
+ /* "_yaml.pyx":426
+ * return DocumentEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ * return BlockSequenceStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ * return BlockMappingStartToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_BlockSequenceStartToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 426; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 426; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 426; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":427
+ * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ * return BlockSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN: # <<<<<<<<<<<<<<
+ * return BlockMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_END_TOKEN:
+ */
+ case YAML_BLOCK_MAPPING_START_TOKEN:
+
+ /* "_yaml.pyx":428
+ * return BlockSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ * return BlockMappingStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_BLOCK_END_TOKEN:
+ * return BlockEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_BlockMappingStartToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 428; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 428; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 428; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":429
+ * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ * return BlockMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_END_TOKEN: # <<<<<<<<<<<<<<
+ * return BlockEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ */
+ case YAML_BLOCK_END_TOKEN:
+
+ /* "_yaml.pyx":430
+ * return BlockMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_END_TOKEN:
+ * return BlockEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ * return FlowSequenceStartToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_BlockEndToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 430; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 430; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 430; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":431
+ * elif token.type == YAML_BLOCK_END_TOKEN:
+ * return BlockEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ */
+ case YAML_FLOW_SEQUENCE_START_TOKEN:
+
+ /* "_yaml.pyx":432
+ * return BlockEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ * return FlowSequenceStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ * return FlowSequenceEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_FlowSequenceStartToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 432; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 432; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 432; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":433
+ * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ * return FlowSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowSequenceEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ */
+ case YAML_FLOW_SEQUENCE_END_TOKEN:
+
+ /* "_yaml.pyx":434
+ * return FlowSequenceStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ * return FlowSequenceEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ * return FlowMappingStartToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_FlowSequenceEndToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 434; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 434; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 434; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":435
+ * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ * return FlowSequenceEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ */
+ case YAML_FLOW_MAPPING_START_TOKEN:
+
+ /* "_yaml.pyx":436
+ * return FlowSequenceEndToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ * return FlowMappingStartToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ * return FlowMappingEndToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_FlowMappingStartToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 436; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 436; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 436; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":437
+ * elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ * return FlowMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowMappingEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ */
+ case YAML_FLOW_MAPPING_END_TOKEN:
+
+ /* "_yaml.pyx":438
+ * return FlowMappingStartToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ * return FlowMappingEndToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ * return BlockEntryToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_FlowMappingEndToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 438; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 438; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 438; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":439
+ * elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ * return FlowMappingEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN: # <<<<<<<<<<<<<<
+ * return BlockEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ */
+ case YAML_BLOCK_ENTRY_TOKEN:
+
+ /* "_yaml.pyx":440
+ * return FlowMappingEndToken(start_mark, end_mark)
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ * return BlockEntryToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ * return FlowEntryToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_BlockEntryToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 440; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 440; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 440; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":441
+ * elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ * return BlockEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN: # <<<<<<<<<<<<<<
+ * return FlowEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_KEY_TOKEN:
+ */
+ case YAML_FLOW_ENTRY_TOKEN:
+
+ /* "_yaml.pyx":442
+ * return BlockEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ * return FlowEntryToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_KEY_TOKEN:
+ * return KeyToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_FlowEntryToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 442; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 442; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 442; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":443
+ * elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ * return FlowEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_KEY_TOKEN: # <<<<<<<<<<<<<<
+ * return KeyToken(start_mark, end_mark)
+ * elif token.type == YAML_VALUE_TOKEN:
+ */
+ case YAML_KEY_TOKEN:
+
+ /* "_yaml.pyx":444
+ * return FlowEntryToken(start_mark, end_mark)
+ * elif token.type == YAML_KEY_TOKEN:
+ * return KeyToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_VALUE_TOKEN:
+ * return ValueToken(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_KeyToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 444; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 444; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 444; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":445
+ * elif token.type == YAML_KEY_TOKEN:
+ * return KeyToken(start_mark, end_mark)
+ * elif token.type == YAML_VALUE_TOKEN: # <<<<<<<<<<<<<<
+ * return ValueToken(start_mark, end_mark)
+ * elif token.type == YAML_ALIAS_TOKEN:
+ */
+ case YAML_VALUE_TOKEN:
+
+ /* "_yaml.pyx":446
+ * return KeyToken(start_mark, end_mark)
+ * elif token.type == YAML_VALUE_TOKEN:
+ * return ValueToken(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_ALIAS_TOKEN:
+ * value = PyUnicode_FromString(token.data.alias.value)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_ValueToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 446; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 446; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 446; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":447
+ * elif token.type == YAML_VALUE_TOKEN:
+ * return ValueToken(start_mark, end_mark)
+ * elif token.type == YAML_ALIAS_TOKEN: # <<<<<<<<<<<<<<
+ * value = PyUnicode_FromString(token.data.alias.value)
+ * return AliasToken(value, start_mark, end_mark)
+ */
+ case YAML_ALIAS_TOKEN:
+
+ /* "_yaml.pyx":448
+ * return ValueToken(start_mark, end_mark)
+ * elif token.type == YAML_ALIAS_TOKEN:
+ * value = PyUnicode_FromString(token.data.alias.value) # <<<<<<<<<<<<<<
+ * return AliasToken(value, start_mark, end_mark)
+ * elif token.type == YAML_ANCHOR_TOKEN:
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.alias.value); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 448; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":449
+ * elif token.type == YAML_ALIAS_TOKEN:
+ * value = PyUnicode_FromString(token.data.alias.value)
+ * return AliasToken(value, start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_ANCHOR_TOKEN:
+ * value = PyUnicode_FromString(token.data.anchor.value)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_AliasToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 449; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 449; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 2, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_2, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 449; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":450
+ * value = PyUnicode_FromString(token.data.alias.value)
+ * return AliasToken(value, start_mark, end_mark)
+ * elif token.type == YAML_ANCHOR_TOKEN: # <<<<<<<<<<<<<<
+ * value = PyUnicode_FromString(token.data.anchor.value)
+ * return AnchorToken(value, start_mark, end_mark)
+ */
+ case YAML_ANCHOR_TOKEN:
+
+ /* "_yaml.pyx":451
+ * return AliasToken(value, start_mark, end_mark)
+ * elif token.type == YAML_ANCHOR_TOKEN:
+ * value = PyUnicode_FromString(token.data.anchor.value) # <<<<<<<<<<<<<<
+ * return AnchorToken(value, start_mark, end_mark)
+ * elif token.type == YAML_TAG_TOKEN:
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_token->data.anchor.value); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 451; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_value = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":452
+ * elif token.type == YAML_ANCHOR_TOKEN:
+ * value = PyUnicode_FromString(token.data.anchor.value)
+ * return AnchorToken(value, start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_TAG_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_AnchorToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 452; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 452; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 2, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 452; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":453
+ * value = PyUnicode_FromString(token.data.anchor.value)
+ * return AnchorToken(value, start_mark, end_mark)
+ * elif token.type == YAML_TAG_TOKEN: # <<<<<<<<<<<<<<
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ */
+ case YAML_TAG_TOKEN:
+
+ /* "_yaml.pyx":454
+ * return AnchorToken(value, start_mark, end_mark)
+ * elif token.type == YAML_TAG_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag.handle) # <<<<<<<<<<<<<<
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ * if not handle:
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.tag.handle); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 454; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_handle = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":455
+ * elif token.type == YAML_TAG_TOKEN:
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ * suffix = PyUnicode_FromString(token.data.tag.suffix) # <<<<<<<<<<<<<<
+ * if not handle:
+ * handle = None
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.tag.suffix); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 455; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_suffix = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":456
+ * handle = PyUnicode_FromString(token.data.tag.handle)
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ * if not handle: # <<<<<<<<<<<<<<
+ * handle = None
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ */
+ __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_v_handle); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 456; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_6 = ((!__pyx_t_5) != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":457
+ * suffix = PyUnicode_FromString(token.data.tag.suffix)
+ * if not handle:
+ * handle = None # <<<<<<<<<<<<<<
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ * elif token.type == YAML_SCALAR_TOKEN:
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_DECREF_SET(__pyx_v_handle, Py_None);
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":458
+ * if not handle:
+ * handle = None
+ * return TagToken((handle, suffix), start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif token.type == YAML_SCALAR_TOKEN:
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_TagToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 458; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 458; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_v_handle);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_handle);
+ __Pyx_GIVEREF(__pyx_v_handle);
+ __Pyx_INCREF(__pyx_v_suffix);
+ PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_suffix);
+ __Pyx_GIVEREF(__pyx_v_suffix);
+ __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 458; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 2, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 458; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":459
+ * handle = None
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ * elif token.type == YAML_SCALAR_TOKEN: # <<<<<<<<<<<<<<
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ * token.data.scalar.length, 'strict')
+ */
+ case YAML_SCALAR_TOKEN:
+
+ /* "_yaml.pyx":460
+ * return TagToken((handle, suffix), start_mark, end_mark)
+ * elif token.type == YAML_SCALAR_TOKEN:
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value, # <<<<<<<<<<<<<<
+ * token.data.scalar.length, 'strict')
+ * plain = False
+ */
+ __pyx_t_2 = PyUnicode_DecodeUTF8(__pyx_v_token->data.scalar.value, __pyx_v_token->data.scalar.length, __pyx_k_strict); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 460; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_value = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":462
+ * value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ * token.data.scalar.length, 'strict')
+ * plain = False # <<<<<<<<<<<<<<
+ * style = None
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ */
+ __pyx_v_plain = 0;
+
+ /* "_yaml.pyx":463
+ * token.data.scalar.length, 'strict')
+ * plain = False
+ * style = None # <<<<<<<<<<<<<<
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * plain = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_style = Py_None;
+
+ /* "_yaml.pyx":464
+ * plain = False
+ * style = None
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * plain = True
+ * style = u''
+ */
+ __pyx_t_6 = ((__pyx_v_token->data.scalar.style == YAML_PLAIN_SCALAR_STYLE) != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":465
+ * style = None
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * plain = True # <<<<<<<<<<<<<<
+ * style = u''
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ */
+ __pyx_v_plain = 1;
+
+ /* "_yaml.pyx":466
+ * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * plain = True
+ * style = u'' # <<<<<<<<<<<<<<
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ */
+ __Pyx_INCREF(__pyx_kp_u__6);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__6);
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":467
+ * plain = True
+ * style = u''
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'\''
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ */
+ __pyx_t_6 = ((__pyx_v_token->data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE) != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":468
+ * style = u''
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\'' # <<<<<<<<<<<<<<
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ */
+ __Pyx_INCREF(__pyx_kp_u__7);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__7);
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":469
+ * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'"'
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ */
+ __pyx_t_6 = ((__pyx_v_token->data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE) != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":470
+ * style = u'\''
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"' # <<<<<<<<<<<<<<
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ */
+ __Pyx_INCREF(__pyx_kp_u__8);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__8);
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":471
+ * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'|'
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ */
+ __pyx_t_6 = ((__pyx_v_token->data.scalar.style == YAML_LITERAL_SCALAR_STYLE) != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":472
+ * style = u'"'
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|' # <<<<<<<<<<<<<<
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ */
+ __Pyx_INCREF(__pyx_kp_u__9);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__9);
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":473
+ * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'>'
+ * return ScalarToken(value, plain,
+ */
+ __pyx_t_6 = ((__pyx_v_token->data.scalar.style == YAML_FOLDED_SCALAR_STYLE) != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":474
+ * style = u'|'
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>' # <<<<<<<<<<<<<<
+ * return ScalarToken(value, plain,
+ * start_mark, end_mark, style)
+ */
+ __Pyx_INCREF(__pyx_kp_u__10);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__10);
+ goto __pyx_L6;
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":475
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ * return ScalarToken(value, plain, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, style)
+ * else:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScalarToken); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 475; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_plain); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 475; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":476
+ * style = u'>'
+ * return ScalarToken(value, plain,
+ * start_mark, end_mark, style) # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_3 = PyTuple_New(5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 475; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 2, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_style);
+ PyTuple_SET_ITEM(__pyx_t_3, 4, __pyx_v_style);
+ __Pyx_GIVEREF(__pyx_v_style);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":475
+ * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ * return ScalarToken(value, plain, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, style)
+ * else:
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 475; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+ default:
+
+ /* "_yaml.pyx":478
+ * start_mark, end_mark, style)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("unknown token type")
+ * else:
+ */
+ __pyx_t_6 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":479
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("unknown token type") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown token type")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 479; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 479; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":481
+ * raise ValueError("unknown token type")
+ * else:
+ * raise ValueError(u"unknown token type") # <<<<<<<<<<<<<<
+ *
+ * def get_token(self):
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 481; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 481; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+
+ /* "_yaml.pyx":386
+ * return token_object
+ *
+ * cdef object _token_to_object(self, yaml_token_t *token): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * token.start_mark.index,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.CParser._token_to_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF((PyObject *)__pyx_v_end_mark);
+ __Pyx_XDECREF(__pyx_v_encoding);
+ __Pyx_XDECREF(__pyx_v_handle);
+ __Pyx_XDECREF(__pyx_v_prefix);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_suffix);
+ __Pyx_XDECREF(__pyx_v_style);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":483
+ * raise ValueError(u"unknown token type")
+ *
+ * def get_token(self): # <<<<<<<<<<<<<<
+ * if self.current_token is not None:
+ * value = self.current_token
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_9get_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_9get_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_token (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_8get_token(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_8get_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("get_token", 0);
+
+ /* "_yaml.pyx":484
+ *
+ * def get_token(self):
+ * if self.current_token is not None: # <<<<<<<<<<<<<<
+ * value = self.current_token
+ * self.current_token = None
+ */
+ __pyx_t_1 = (__pyx_v_self->current_token != Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":485
+ * def get_token(self):
+ * if self.current_token is not None:
+ * value = self.current_token # <<<<<<<<<<<<<<
+ * self.current_token = None
+ * else:
+ */
+ __pyx_t_3 = __pyx_v_self->current_token;
+ __Pyx_INCREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":486
+ * if self.current_token is not None:
+ * value = self.current_token
+ * self.current_token = None # <<<<<<<<<<<<<<
+ * else:
+ * value = self._scan()
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->current_token);
+ __Pyx_DECREF(__pyx_v_self->current_token);
+ __pyx_v_self->current_token = Py_None;
+ goto __pyx_L3;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":488
+ * self.current_token = None
+ * else:
+ * value = self._scan() # <<<<<<<<<<<<<<
+ * return value
+ *
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_scan(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 488; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":489
+ * else:
+ * value = self._scan()
+ * return value # <<<<<<<<<<<<<<
+ *
+ * def peek_token(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_value);
+ __pyx_r = __pyx_v_value;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":483
+ * raise ValueError(u"unknown token type")
+ *
+ * def get_token(self): # <<<<<<<<<<<<<<
+ * if self.current_token is not None:
+ * value = self.current_token
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.get_token", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":491
+ * return value
+ *
+ * def peek_token(self): # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_11peek_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_11peek_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("peek_token (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_10peek_token(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_10peek_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("peek_token", 0);
+
+ /* "_yaml.pyx":492
+ *
+ * def peek_token(self):
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * self.current_token = self._scan()
+ * return self.current_token
+ */
+ __pyx_t_1 = (__pyx_v_self->current_token == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":493
+ * def peek_token(self):
+ * if self.current_token is None:
+ * self.current_token = self._scan() # <<<<<<<<<<<<<<
+ * return self.current_token
+ *
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_scan(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 493; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_GOTREF(__pyx_v_self->current_token);
+ __Pyx_DECREF(__pyx_v_self->current_token);
+ __pyx_v_self->current_token = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":494
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ * return self.current_token # <<<<<<<<<<<<<<
+ *
+ * def check_token(self, *choices):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->current_token);
+ __pyx_r = __pyx_v_self->current_token;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":491
+ * return value
+ *
+ * def peek_token(self): # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.peek_token", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":496
+ * return self.current_token
+ *
+ * def check_token(self, *choices): # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_13check_token(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_13check_token(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_choices = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("check_token (wrapper)", 0);
+ if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "check_token", 0))) return NULL;
+ __Pyx_INCREF(__pyx_args);
+ __pyx_v_choices = __pyx_args;
+ __pyx_r = __pyx_pf_5_yaml_7CParser_12check_token(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self), __pyx_v_choices);
+
+ /* function exit code */
+ __Pyx_XDECREF(__pyx_v_choices);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_12check_token(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_choices) {
+ PyObject *__pyx_v_token_class = NULL;
+ PyObject *__pyx_v_choice = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ Py_ssize_t __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("check_token", 0);
+
+ /* "_yaml.pyx":497
+ *
+ * def check_token(self, *choices):
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * self.current_token = self._scan()
+ * if self.current_token is None:
+ */
+ __pyx_t_1 = (__pyx_v_self->current_token == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":498
+ * def check_token(self, *choices):
+ * if self.current_token is None:
+ * self.current_token = self._scan() # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * return False
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_scan(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 498; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_GOTREF(__pyx_v_self->current_token);
+ __Pyx_DECREF(__pyx_v_self->current_token);
+ __pyx_v_self->current_token = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":499
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ * if self.current_token is None: # <<<<<<<<<<<<<<
+ * return False
+ * if not choices:
+ */
+ __pyx_t_2 = (__pyx_v_self->current_token == Py_None);
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":500
+ * self.current_token = self._scan()
+ * if self.current_token is None:
+ * return False # <<<<<<<<<<<<<<
+ * if not choices:
+ * return True
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":501
+ * if self.current_token is None:
+ * return False
+ * if not choices: # <<<<<<<<<<<<<<
+ * return True
+ * token_class = self.current_token.__class__
+ */
+ __pyx_t_1 = (__pyx_v_choices != Py_None) && (PyTuple_GET_SIZE(__pyx_v_choices) != 0);
+ __pyx_t_2 = ((!__pyx_t_1) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":502
+ * return False
+ * if not choices:
+ * return True # <<<<<<<<<<<<<<
+ * token_class = self.current_token.__class__
+ * for choice in choices:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":503
+ * if not choices:
+ * return True
+ * token_class = self.current_token.__class__ # <<<<<<<<<<<<<<
+ * for choice in choices:
+ * if token_class is choice:
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->current_token, __pyx_n_s_class); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 503; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_token_class = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":504
+ * return True
+ * token_class = self.current_token.__class__
+ * for choice in choices: # <<<<<<<<<<<<<<
+ * if token_class is choice:
+ * return True
+ */
+ __pyx_t_3 = __pyx_v_choices; __Pyx_INCREF(__pyx_t_3); __pyx_t_4 = 0;
+ for (;;) {
+ if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_5); __pyx_t_4++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 504; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 504; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ __Pyx_XDECREF_SET(__pyx_v_choice, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":505
+ * token_class = self.current_token.__class__
+ * for choice in choices:
+ * if token_class is choice: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ __pyx_t_2 = (__pyx_v_token_class == __pyx_v_choice);
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":506
+ * for choice in choices:
+ * if token_class is choice:
+ * return True # <<<<<<<<<<<<<<
+ * return False
+ *
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ goto __pyx_L0;
+ }
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":507
+ * if token_class is choice:
+ * return True
+ * return False # <<<<<<<<<<<<<<
+ *
+ * def raw_parse(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":496
+ * return self.current_token
+ *
+ * def check_token(self, *choices): # <<<<<<<<<<<<<<
+ * if self.current_token is None:
+ * self.current_token = self._scan()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_AddTraceback("_yaml.CParser.check_token", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_token_class);
+ __Pyx_XDECREF(__pyx_v_choice);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":509
+ * return False
+ *
+ * def raw_parse(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef int done
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_15raw_parse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_15raw_parse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("raw_parse (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_14raw_parse(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_14raw_parse(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ yaml_event_t __pyx_v_event;
+ int __pyx_v_done;
+ int __pyx_v_count;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("raw_parse", 0);
+
+ /* "_yaml.pyx":513
+ * cdef int done
+ * cdef int count
+ * count = 0 # <<<<<<<<<<<<<<
+ * done = 0
+ * while done == 0:
+ */
+ __pyx_v_count = 0;
+
+ /* "_yaml.pyx":514
+ * cdef int count
+ * count = 0
+ * done = 0 # <<<<<<<<<<<<<<
+ * while done == 0:
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ */
+ __pyx_v_done = 0;
+
+ /* "_yaml.pyx":515
+ * count = 0
+ * done = 0
+ * while done == 0: # <<<<<<<<<<<<<<
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error()
+ */
+ while (1) {
+ __pyx_t_1 = ((__pyx_v_done == 0) != 0);
+ if (!__pyx_t_1) break;
+
+ /* "_yaml.pyx":516
+ * done = 0
+ * while done == 0:
+ * if yaml_parser_parse(&self.parser, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_2 = yaml_parser_parse((&__pyx_v_self->parser), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 516; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_1 = ((__pyx_t_2 == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":517
+ * while done == 0:
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * if event.type == YAML_NO_EVENT:
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 517; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":518
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * if event.type == YAML_NO_EVENT:
+ * done = 1
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 518; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":519
+ * error = self._parser_error()
+ * raise error
+ * if event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * done = 1
+ * else:
+ */
+ __pyx_t_1 = ((__pyx_v_event.type == YAML_NO_EVENT) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":520
+ * raise error
+ * if event.type == YAML_NO_EVENT:
+ * done = 1 # <<<<<<<<<<<<<<
+ * else:
+ * count = count+1
+ */
+ __pyx_v_done = 1;
+ goto __pyx_L6;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":522
+ * done = 1
+ * else:
+ * count = count+1 # <<<<<<<<<<<<<<
+ * yaml_event_delete(&event)
+ * return count
+ */
+ __pyx_v_count = (__pyx_v_count + 1);
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":523
+ * else:
+ * count = count+1
+ * yaml_event_delete(&event) # <<<<<<<<<<<<<<
+ * return count
+ *
+ */
+ yaml_event_delete((&__pyx_v_event));
+ }
+
+ /* "_yaml.pyx":524
+ * count = count+1
+ * yaml_event_delete(&event)
+ * return count # <<<<<<<<<<<<<<
+ *
+ * cdef object _parse(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_count); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 524; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":509
+ * return False
+ *
+ * def raw_parse(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef int done
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.raw_parse", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":526
+ * return count
+ *
+ * cdef object _parse(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__parse(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ yaml_event_t __pyx_v_event;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_v_event_object = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_parse", 0);
+
+ /* "_yaml.pyx":528
+ * cdef object _parse(self):
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_1 = yaml_parser_parse((&__pyx_v_self->parser), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 528; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = ((__pyx_t_1 == 0) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":529
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * event_object = self._event_to_object(&event)
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 529; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":530
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * event_object = self._event_to_object(&event)
+ * yaml_event_delete(&event)
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 530; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":531
+ * error = self._parser_error()
+ * raise error
+ * event_object = self._event_to_object(&event) # <<<<<<<<<<<<<<
+ * yaml_event_delete(&event)
+ * return event_object
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_event_to_object(__pyx_v_self, (&__pyx_v_event)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 531; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_event_object = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":532
+ * raise error
+ * event_object = self._event_to_object(&event)
+ * yaml_event_delete(&event) # <<<<<<<<<<<<<<
+ * return event_object
+ *
+ */
+ yaml_event_delete((&__pyx_v_event));
+
+ /* "_yaml.pyx":533
+ * event_object = self._event_to_object(&event)
+ * yaml_event_delete(&event)
+ * return event_object # <<<<<<<<<<<<<<
+ *
+ * cdef object _event_to_object(self, yaml_event_t *event):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_event_object);
+ __pyx_r = __pyx_v_event_object;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":526
+ * return count
+ *
+ * cdef object _parse(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * if yaml_parser_parse(&self.parser, &event) == 0:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser._parse", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XDECREF(__pyx_v_event_object);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":535
+ * return event_object
+ *
+ * cdef object _event_to_object(self, yaml_event_t *event): # <<<<<<<<<<<<<<
+ * cdef yaml_tag_directive_t *tag_directive
+ * start_mark = Mark(self.stream_name,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__event_to_object(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, yaml_event_t *__pyx_v_event) {
+ yaml_tag_directive_t *__pyx_v_tag_directive;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark = NULL;
+ PyObject *__pyx_v_encoding = NULL;
+ int __pyx_v_explicit;
+ PyObject *__pyx_v_version = NULL;
+ PyObject *__pyx_v_tags = NULL;
+ PyObject *__pyx_v_handle = NULL;
+ PyObject *__pyx_v_prefix = NULL;
+ PyObject *__pyx_v_anchor = NULL;
+ PyObject *__pyx_v_tag = NULL;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_v_plain_implicit;
+ int __pyx_v_quoted_implicit;
+ PyObject *__pyx_v_style = NULL;
+ int __pyx_v_implicit;
+ PyObject *__pyx_v_flow_style = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ yaml_tag_directive_t *__pyx_t_6;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_event_to_object", 0);
+
+ /* "_yaml.pyx":538
+ * cdef yaml_tag_directive_t *tag_directive
+ * start_mark = Mark(self.stream_name,
+ * event.start_mark.index, # <<<<<<<<<<<<<<
+ * event.start_mark.line,
+ * event.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_event->start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 538; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":539
+ * start_mark = Mark(self.stream_name,
+ * event.start_mark.index,
+ * event.start_mark.line, # <<<<<<<<<<<<<<
+ * event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_event->start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 539; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":540
+ * event.start_mark.index,
+ * event.start_mark.line,
+ * event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_event->start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 540; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":537
+ * cdef object _event_to_object(self, yaml_event_t *event):
+ * cdef yaml_tag_directive_t *tag_directive
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * event.start_mark.index,
+ * event.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 537; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 537; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":543
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ * event.end_mark.index, # <<<<<<<<<<<<<<
+ * event.end_mark.line,
+ * event.end_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_event->end_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 543; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":544
+ * end_mark = Mark(self.stream_name,
+ * event.end_mark.index,
+ * event.end_mark.line, # <<<<<<<<<<<<<<
+ * event.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_event->end_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 544; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":545
+ * event.end_mark.index,
+ * event.end_mark.line,
+ * event.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if event.type == YAML_NO_EVENT:
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_event->end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 545; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":542
+ * event.start_mark.column,
+ * None, None)
+ * end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * event.end_mark.index,
+ * event.end_mark.line,
+ */
+ __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 542; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 542; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":653
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ * return SequenceEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_MAPPING_END_EVENT: # <<<<<<<<<<<<<<
+ * return MappingEndEvent(start_mark, end_mark)
+ * else:
+ */
+ switch (__pyx_v_event->type) {
+
+ /* "_yaml.pyx":547
+ * event.end_mark.column,
+ * None, None)
+ * if event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * return None
+ * elif event.type == YAML_STREAM_START_EVENT:
+ */
+ case YAML_NO_EVENT:
+
+ /* "_yaml.pyx":548
+ * None, None)
+ * if event.type == YAML_NO_EVENT:
+ * return None # <<<<<<<<<<<<<<
+ * elif event.type == YAML_STREAM_START_EVENT:
+ * encoding = None
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_None);
+ __pyx_r = Py_None;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":549
+ * if event.type == YAML_NO_EVENT:
+ * return None
+ * elif event.type == YAML_STREAM_START_EVENT: # <<<<<<<<<<<<<<
+ * encoding = None
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ */
+ case YAML_STREAM_START_EVENT:
+
+ /* "_yaml.pyx":550
+ * return None
+ * elif event.type == YAML_STREAM_START_EVENT:
+ * encoding = None # <<<<<<<<<<<<<<
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_encoding = Py_None;
+
+ /* "_yaml.pyx":551
+ * elif event.type == YAML_STREAM_START_EVENT:
+ * encoding = None
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING: # <<<<<<<<<<<<<<
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.stream_start.encoding == YAML_UTF8_ENCODING) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":552
+ * encoding = None
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0: # <<<<<<<<<<<<<<
+ * encoding = u"utf-8"
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ */
+ __pyx_t_5 = ((__pyx_v_self->unicode_source == 0) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":553
+ * if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8" # <<<<<<<<<<<<<<
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le"
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_8);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_8);
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":554
+ * if self.unicode_source == 0:
+ * encoding = u"utf-8"
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING: # <<<<<<<<<<<<<<
+ * encoding = u"utf-16-le"
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.stream_start.encoding == YAML_UTF16LE_ENCODING) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":555
+ * encoding = u"utf-8"
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le" # <<<<<<<<<<<<<<
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be"
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_16_le);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_16_le);
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":556
+ * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ * encoding = u"utf-16-le"
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING: # <<<<<<<<<<<<<<
+ * encoding = u"utf-16-be"
+ * return StreamStartEvent(start_mark, end_mark, encoding)
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.stream_start.encoding == YAML_UTF16BE_ENCODING) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":557
+ * encoding = u"utf-16-le"
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be" # <<<<<<<<<<<<<<
+ * return StreamStartEvent(start_mark, end_mark, encoding)
+ * elif event.type == YAML_STREAM_END_EVENT:
+ */
+ __Pyx_INCREF(__pyx_kp_u_utf_16_be);
+ __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_kp_u_utf_16_be);
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":558
+ * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ * encoding = u"utf-16-be"
+ * return StreamStartEvent(start_mark, end_mark, encoding) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_STREAM_END_EVENT:
+ * return StreamEndEvent(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_StreamStartEvent); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 558; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 558; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_encoding);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_encoding);
+ __Pyx_GIVEREF(__pyx_v_encoding);
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_1, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 558; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":559
+ * encoding = u"utf-16-be"
+ * return StreamStartEvent(start_mark, end_mark, encoding)
+ * elif event.type == YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * return StreamEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ */
+ case YAML_STREAM_END_EVENT:
+
+ /* "_yaml.pyx":560
+ * return StreamStartEvent(start_mark, end_mark, encoding)
+ * elif event.type == YAML_STREAM_END_EVENT:
+ * return StreamEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ * explicit = False
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_StreamEndEvent); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 560; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 560; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 560; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":561
+ * elif event.type == YAML_STREAM_END_EVENT:
+ * return StreamEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_DOCUMENT_START_EVENT: # <<<<<<<<<<<<<<
+ * explicit = False
+ * if event.data.document_start.implicit == 0:
+ */
+ case YAML_DOCUMENT_START_EVENT:
+
+ /* "_yaml.pyx":562
+ * return StreamEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ * explicit = False # <<<<<<<<<<<<<<
+ * if event.data.document_start.implicit == 0:
+ * explicit = True
+ */
+ __pyx_v_explicit = 0;
+
+ /* "_yaml.pyx":563
+ * elif event.type == YAML_DOCUMENT_START_EVENT:
+ * explicit = False
+ * if event.data.document_start.implicit == 0: # <<<<<<<<<<<<<<
+ * explicit = True
+ * version = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.document_start.implicit == 0) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":564
+ * explicit = False
+ * if event.data.document_start.implicit == 0:
+ * explicit = True # <<<<<<<<<<<<<<
+ * version = None
+ * if event.data.document_start.version_directive != NULL:
+ */
+ __pyx_v_explicit = 1;
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":565
+ * if event.data.document_start.implicit == 0:
+ * explicit = True
+ * version = None # <<<<<<<<<<<<<<
+ * if event.data.document_start.version_directive != NULL:
+ * version = (event.data.document_start.version_directive.major,
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_version = Py_None;
+
+ /* "_yaml.pyx":566
+ * explicit = True
+ * version = None
+ * if event.data.document_start.version_directive != NULL: # <<<<<<<<<<<<<<
+ * version = (event.data.document_start.version_directive.major,
+ * event.data.document_start.version_directive.minor)
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.document_start.version_directive != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":567
+ * version = None
+ * if event.data.document_start.version_directive != NULL:
+ * version = (event.data.document_start.version_directive.major, # <<<<<<<<<<<<<<
+ * event.data.document_start.version_directive.minor)
+ * tags = None
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_event->data.document_start.version_directive->major); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 567; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":568
+ * if event.data.document_start.version_directive != NULL:
+ * version = (event.data.document_start.version_directive.major,
+ * event.data.document_start.version_directive.minor) # <<<<<<<<<<<<<<
+ * tags = None
+ * if event.data.document_start.tag_directives.start != NULL:
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_event->data.document_start.version_directive->minor); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 568; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":567
+ * version = None
+ * if event.data.document_start.version_directive != NULL:
+ * version = (event.data.document_start.version_directive.major, # <<<<<<<<<<<<<<
+ * event.data.document_start.version_directive.minor)
+ * tags = None
+ */
+ __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 567; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __pyx_t_2 = 0;
+ __pyx_t_1 = 0;
+ __Pyx_DECREF_SET(__pyx_v_version, __pyx_t_4);
+ __pyx_t_4 = 0;
+ goto __pyx_L6;
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":569
+ * version = (event.data.document_start.version_directive.major,
+ * event.data.document_start.version_directive.minor)
+ * tags = None # <<<<<<<<<<<<<<
+ * if event.data.document_start.tag_directives.start != NULL:
+ * tags = {}
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_tags = Py_None;
+
+ /* "_yaml.pyx":570
+ * event.data.document_start.version_directive.minor)
+ * tags = None
+ * if event.data.document_start.tag_directives.start != NULL: # <<<<<<<<<<<<<<
+ * tags = {}
+ * tag_directive = event.data.document_start.tag_directives.start
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.document_start.tag_directives.start != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":571
+ * tags = None
+ * if event.data.document_start.tag_directives.start != NULL:
+ * tags = {} # <<<<<<<<<<<<<<
+ * tag_directive = event.data.document_start.tag_directives.start
+ * while tag_directive != event.data.document_start.tag_directives.end:
+ */
+ __pyx_t_4 = PyDict_New(); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 571; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_tags, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":572
+ * if event.data.document_start.tag_directives.start != NULL:
+ * tags = {}
+ * tag_directive = event.data.document_start.tag_directives.start # <<<<<<<<<<<<<<
+ * while tag_directive != event.data.document_start.tag_directives.end:
+ * handle = PyUnicode_FromString(tag_directive.handle)
+ */
+ __pyx_t_6 = __pyx_v_event->data.document_start.tag_directives.start;
+ __pyx_v_tag_directive = __pyx_t_6;
+
+ /* "_yaml.pyx":573
+ * tags = {}
+ * tag_directive = event.data.document_start.tag_directives.start
+ * while tag_directive != event.data.document_start.tag_directives.end: # <<<<<<<<<<<<<<
+ * handle = PyUnicode_FromString(tag_directive.handle)
+ * prefix = PyUnicode_FromString(tag_directive.prefix)
+ */
+ while (1) {
+ __pyx_t_5 = ((__pyx_v_tag_directive != __pyx_v_event->data.document_start.tag_directives.end) != 0);
+ if (!__pyx_t_5) break;
+
+ /* "_yaml.pyx":574
+ * tag_directive = event.data.document_start.tag_directives.start
+ * while tag_directive != event.data.document_start.tag_directives.end:
+ * handle = PyUnicode_FromString(tag_directive.handle) # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_FromString(tag_directive.prefix)
+ * tags[handle] = prefix
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_tag_directive->handle); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 574; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_XDECREF_SET(__pyx_v_handle, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":575
+ * while tag_directive != event.data.document_start.tag_directives.end:
+ * handle = PyUnicode_FromString(tag_directive.handle)
+ * prefix = PyUnicode_FromString(tag_directive.prefix) # <<<<<<<<<<<<<<
+ * tags[handle] = prefix
+ * tag_directive = tag_directive+1
+ */
+ __pyx_t_4 = PyUnicode_FromString(__pyx_v_tag_directive->prefix); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 575; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_XDECREF_SET(__pyx_v_prefix, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":576
+ * handle = PyUnicode_FromString(tag_directive.handle)
+ * prefix = PyUnicode_FromString(tag_directive.prefix)
+ * tags[handle] = prefix # <<<<<<<<<<<<<<
+ * tag_directive = tag_directive+1
+ * return DocumentStartEvent(start_mark, end_mark,
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_tags, __pyx_v_handle, __pyx_v_prefix) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 576; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":577
+ * prefix = PyUnicode_FromString(tag_directive.prefix)
+ * tags[handle] = prefix
+ * tag_directive = tag_directive+1 # <<<<<<<<<<<<<<
+ * return DocumentStartEvent(start_mark, end_mark,
+ * explicit, version, tags)
+ */
+ __pyx_v_tag_directive = (__pyx_v_tag_directive + 1);
+ }
+ goto __pyx_L7;
+ }
+ __pyx_L7:;
+
+ /* "_yaml.pyx":578
+ * tags[handle] = prefix
+ * tag_directive = tag_directive+1
+ * return DocumentStartEvent(start_mark, end_mark, # <<<<<<<<<<<<<<
+ * explicit, version, tags)
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_DocumentStartEvent); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 578; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":579
+ * tag_directive = tag_directive+1
+ * return DocumentStartEvent(start_mark, end_mark,
+ * explicit, version, tags) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ * explicit = False
+ */
+ __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_explicit); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 579; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":578
+ * tags[handle] = prefix
+ * tag_directive = tag_directive+1
+ * return DocumentStartEvent(start_mark, end_mark, # <<<<<<<<<<<<<<
+ * explicit, version, tags)
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ */
+ __pyx_t_2 = PyTuple_New(5); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 578; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_version);
+ PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_v_version);
+ __Pyx_GIVEREF(__pyx_v_version);
+ __Pyx_INCREF(__pyx_v_tags);
+ PyTuple_SET_ITEM(__pyx_t_2, 4, __pyx_v_tags);
+ __Pyx_GIVEREF(__pyx_v_tags);
+ __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_2, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 578; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":580
+ * return DocumentStartEvent(start_mark, end_mark,
+ * explicit, version, tags)
+ * elif event.type == YAML_DOCUMENT_END_EVENT: # <<<<<<<<<<<<<<
+ * explicit = False
+ * if event.data.document_end.implicit == 0:
+ */
+ case YAML_DOCUMENT_END_EVENT:
+
+ /* "_yaml.pyx":581
+ * explicit, version, tags)
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ * explicit = False # <<<<<<<<<<<<<<
+ * if event.data.document_end.implicit == 0:
+ * explicit = True
+ */
+ __pyx_v_explicit = 0;
+
+ /* "_yaml.pyx":582
+ * elif event.type == YAML_DOCUMENT_END_EVENT:
+ * explicit = False
+ * if event.data.document_end.implicit == 0: # <<<<<<<<<<<<<<
+ * explicit = True
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.document_end.implicit == 0) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":583
+ * explicit = False
+ * if event.data.document_end.implicit == 0:
+ * explicit = True # <<<<<<<<<<<<<<
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ * elif event.type == YAML_ALIAS_EVENT:
+ */
+ __pyx_v_explicit = 1;
+ goto __pyx_L10;
+ }
+ __pyx_L10:;
+
+ /* "_yaml.pyx":584
+ * if event.data.document_end.implicit == 0:
+ * explicit = True
+ * return DocumentEndEvent(start_mark, end_mark, explicit) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(event.data.alias.anchor)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_DocumentEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 584; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_explicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 584; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 584; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 584; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":585
+ * explicit = True
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ * elif event.type == YAML_ALIAS_EVENT: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.alias.anchor)
+ * return AliasEvent(anchor, start_mark, end_mark)
+ */
+ case YAML_ALIAS_EVENT:
+
+ /* "_yaml.pyx":586
+ * return DocumentEndEvent(start_mark, end_mark, explicit)
+ * elif event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(event.data.alias.anchor) # <<<<<<<<<<<<<<
+ * return AliasEvent(anchor, start_mark, end_mark)
+ * elif event.type == YAML_SCALAR_EVENT:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_event->data.alias.anchor); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 586; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_anchor = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":587
+ * elif event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(event.data.alias.anchor)
+ * return AliasEvent(anchor, start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_SCALAR_EVENT:
+ * anchor = None
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_AliasEvent); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 587; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 587; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_anchor);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_anchor);
+ __Pyx_GIVEREF(__pyx_v_anchor);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 2, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 587; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":588
+ * anchor = PyUnicode_FromString(event.data.alias.anchor)
+ * return AliasEvent(anchor, start_mark, end_mark)
+ * elif event.type == YAML_SCALAR_EVENT: # <<<<<<<<<<<<<<
+ * anchor = None
+ * if event.data.scalar.anchor != NULL:
+ */
+ case YAML_SCALAR_EVENT:
+
+ /* "_yaml.pyx":589
+ * return AliasEvent(anchor, start_mark, end_mark)
+ * elif event.type == YAML_SCALAR_EVENT:
+ * anchor = None # <<<<<<<<<<<<<<
+ * if event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_anchor = Py_None;
+
+ /* "_yaml.pyx":590
+ * elif event.type == YAML_SCALAR_EVENT:
+ * anchor = None
+ * if event.data.scalar.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ * tag = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.anchor != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":591
+ * anchor = None
+ * if event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor) # <<<<<<<<<<<<<<
+ * tag = None
+ * if event.data.scalar.tag != NULL:
+ */
+ __pyx_t_1 = PyUnicode_FromString(__pyx_v_event->data.scalar.anchor); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 591; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_1);
+ __pyx_t_1 = 0;
+ goto __pyx_L11;
+ }
+ __pyx_L11:;
+
+ /* "_yaml.pyx":592
+ * if event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ * tag = None # <<<<<<<<<<<<<<
+ * if event.data.scalar.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.scalar.tag)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_tag = Py_None;
+
+ /* "_yaml.pyx":593
+ * anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ * tag = None
+ * if event.data.scalar.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.scalar.tag)
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.tag != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":594
+ * tag = None
+ * if event.data.scalar.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.scalar.tag) # <<<<<<<<<<<<<<
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ * event.data.scalar.length, 'strict')
+ */
+ __pyx_t_1 = PyUnicode_FromString(__pyx_v_event->data.scalar.tag); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 594; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_tag, __pyx_t_1);
+ __pyx_t_1 = 0;
+ goto __pyx_L12;
+ }
+ __pyx_L12:;
+
+ /* "_yaml.pyx":595
+ * if event.data.scalar.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.scalar.tag)
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value, # <<<<<<<<<<<<<<
+ * event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ */
+ __pyx_t_1 = PyUnicode_DecodeUTF8(__pyx_v_event->data.scalar.value, __pyx_v_event->data.scalar.length, __pyx_k_strict); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 595; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_value = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":597
+ * value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ * event.data.scalar.length, 'strict')
+ * plain_implicit = False # <<<<<<<<<<<<<<
+ * if event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True
+ */
+ __pyx_v_plain_implicit = 0;
+
+ /* "_yaml.pyx":598
+ * event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ * if event.data.scalar.plain_implicit == 1: # <<<<<<<<<<<<<<
+ * plain_implicit = True
+ * quoted_implicit = False
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.plain_implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":599
+ * plain_implicit = False
+ * if event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True # <<<<<<<<<<<<<<
+ * quoted_implicit = False
+ * if event.data.scalar.quoted_implicit == 1:
+ */
+ __pyx_v_plain_implicit = 1;
+ goto __pyx_L13;
+ }
+ __pyx_L13:;
+
+ /* "_yaml.pyx":600
+ * if event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True
+ * quoted_implicit = False # <<<<<<<<<<<<<<
+ * if event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ */
+ __pyx_v_quoted_implicit = 0;
+
+ /* "_yaml.pyx":601
+ * plain_implicit = True
+ * quoted_implicit = False
+ * if event.data.scalar.quoted_implicit == 1: # <<<<<<<<<<<<<<
+ * quoted_implicit = True
+ * style = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.quoted_implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":602
+ * quoted_implicit = False
+ * if event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True # <<<<<<<<<<<<<<
+ * style = None
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ */
+ __pyx_v_quoted_implicit = 1;
+ goto __pyx_L14;
+ }
+ __pyx_L14:;
+
+ /* "_yaml.pyx":603
+ * if event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ * style = None # <<<<<<<<<<<<<<
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u''
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_style = Py_None;
+
+ /* "_yaml.pyx":604
+ * quoted_implicit = True
+ * style = None
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u''
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.style == YAML_PLAIN_SCALAR_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":605
+ * style = None
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u'' # <<<<<<<<<<<<<<
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ */
+ __Pyx_INCREF(__pyx_kp_u__6);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__6);
+ goto __pyx_L15;
+ }
+
+ /* "_yaml.pyx":606
+ * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u''
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'\''
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":607
+ * style = u''
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\'' # <<<<<<<<<<<<<<
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ */
+ __Pyx_INCREF(__pyx_kp_u__7);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__7);
+ goto __pyx_L15;
+ }
+
+ /* "_yaml.pyx":608
+ * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'"'
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":609
+ * style = u'\''
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"' # <<<<<<<<<<<<<<
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ */
+ __Pyx_INCREF(__pyx_kp_u__8);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__8);
+ goto __pyx_L15;
+ }
+
+ /* "_yaml.pyx":610
+ * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'|'
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.style == YAML_LITERAL_SCALAR_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":611
+ * style = u'"'
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|' # <<<<<<<<<<<<<<
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ */
+ __Pyx_INCREF(__pyx_kp_u__9);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__9);
+ goto __pyx_L15;
+ }
+
+ /* "_yaml.pyx":612
+ * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'>'
+ * return ScalarEvent(anchor, tag,
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.scalar.style == YAML_FOLDED_SCALAR_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":613
+ * style = u'|'
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>' # <<<<<<<<<<<<<<
+ * return ScalarEvent(anchor, tag,
+ * (plain_implicit, quoted_implicit),
+ */
+ __Pyx_INCREF(__pyx_kp_u__10);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__10);
+ goto __pyx_L15;
+ }
+ __pyx_L15:;
+
+ /* "_yaml.pyx":614
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ * return ScalarEvent(anchor, tag, # <<<<<<<<<<<<<<
+ * (plain_implicit, quoted_implicit),
+ * value, start_mark, end_mark, style)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScalarEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 614; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":615
+ * style = u'>'
+ * return ScalarEvent(anchor, tag,
+ * (plain_implicit, quoted_implicit), # <<<<<<<<<<<<<<
+ * value, start_mark, end_mark, style)
+ * elif event.type == YAML_SEQUENCE_START_EVENT:
+ */
+ __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_plain_implicit); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 615; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_quoted_implicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 615; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 615; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":614
+ * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ * return ScalarEvent(anchor, tag, # <<<<<<<<<<<<<<
+ * (plain_implicit, quoted_implicit),
+ * value, start_mark, end_mark, style)
+ */
+ __pyx_t_2 = PyTuple_New(7); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 614; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_INCREF(__pyx_v_anchor);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_anchor);
+ __Pyx_GIVEREF(__pyx_v_anchor);
+ __Pyx_INCREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 4, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_2, 5, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_style);
+ PyTuple_SET_ITEM(__pyx_t_2, 6, __pyx_v_style);
+ __Pyx_GIVEREF(__pyx_v_style);
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_2, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 614; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":617
+ * (plain_implicit, quoted_implicit),
+ * value, start_mark, end_mark, style)
+ * elif event.type == YAML_SEQUENCE_START_EVENT: # <<<<<<<<<<<<<<
+ * anchor = None
+ * if event.data.sequence_start.anchor != NULL:
+ */
+ case YAML_SEQUENCE_START_EVENT:
+
+ /* "_yaml.pyx":618
+ * value, start_mark, end_mark, style)
+ * elif event.type == YAML_SEQUENCE_START_EVENT:
+ * anchor = None # <<<<<<<<<<<<<<
+ * if event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_anchor = Py_None;
+
+ /* "_yaml.pyx":619
+ * elif event.type == YAML_SEQUENCE_START_EVENT:
+ * anchor = None
+ * if event.data.sequence_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ * tag = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.sequence_start.anchor != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":620
+ * anchor = None
+ * if event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor) # <<<<<<<<<<<<<<
+ * tag = None
+ * if event.data.sequence_start.tag != NULL:
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_event->data.sequence_start.anchor); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 620; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_3);
+ __pyx_t_3 = 0;
+ goto __pyx_L16;
+ }
+ __pyx_L16:;
+
+ /* "_yaml.pyx":621
+ * if event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ * tag = None # <<<<<<<<<<<<<<
+ * if event.data.sequence_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_tag = Py_None;
+
+ /* "_yaml.pyx":622
+ * anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ * tag = None
+ * if event.data.sequence_start.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ * implicit = False
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.sequence_start.tag != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":623
+ * tag = None
+ * if event.data.sequence_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag) # <<<<<<<<<<<<<<
+ * implicit = False
+ * if event.data.sequence_start.implicit == 1:
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_event->data.sequence_start.tag); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 623; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_tag, __pyx_t_3);
+ __pyx_t_3 = 0;
+ goto __pyx_L17;
+ }
+ __pyx_L17:;
+
+ /* "_yaml.pyx":624
+ * if event.data.sequence_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ * implicit = False # <<<<<<<<<<<<<<
+ * if event.data.sequence_start.implicit == 1:
+ * implicit = True
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":625
+ * tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ * implicit = False
+ * if event.data.sequence_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * flow_style = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.sequence_start.implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":626
+ * implicit = False
+ * if event.data.sequence_start.implicit == 1:
+ * implicit = True # <<<<<<<<<<<<<<
+ * flow_style = None
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ */
+ __pyx_v_implicit = 1;
+ goto __pyx_L18;
+ }
+ __pyx_L18:;
+
+ /* "_yaml.pyx":627
+ * if event.data.sequence_start.implicit == 1:
+ * implicit = True
+ * flow_style = None # <<<<<<<<<<<<<<
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_flow_style = Py_None;
+
+ /* "_yaml.pyx":628
+ * implicit = True
+ * flow_style = None
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":629
+ * flow_style = None
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True # <<<<<<<<<<<<<<
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ */
+ __Pyx_INCREF(Py_True);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_True);
+ goto __pyx_L19;
+ }
+
+ /* "_yaml.pyx":630
+ * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * return SequenceStartEvent(anchor, tag, implicit,
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":631
+ * flow_style = True
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False # <<<<<<<<<<<<<<
+ * return SequenceStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style)
+ */
+ __Pyx_INCREF(Py_False);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_False);
+ goto __pyx_L19;
+ }
+ __pyx_L19:;
+
+ /* "_yaml.pyx":632
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ * return SequenceStartEvent(anchor, tag, implicit, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceStartEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 632; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 632; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":633
+ * flow_style = False
+ * return SequenceStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ * anchor = None
+ */
+ __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 632; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_anchor);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_anchor);
+ __Pyx_GIVEREF(__pyx_v_anchor);
+ __Pyx_INCREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 3, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 4, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_flow_style);
+ PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_v_flow_style);
+ __Pyx_GIVEREF(__pyx_v_flow_style);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":632
+ * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ * return SequenceStartEvent(anchor, tag, implicit, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ */
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 632; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":634
+ * return SequenceStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_MAPPING_START_EVENT: # <<<<<<<<<<<<<<
+ * anchor = None
+ * if event.data.mapping_start.anchor != NULL:
+ */
+ case YAML_MAPPING_START_EVENT:
+
+ /* "_yaml.pyx":635
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ * anchor = None # <<<<<<<<<<<<<<
+ * if event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_anchor = Py_None;
+
+ /* "_yaml.pyx":636
+ * elif event.type == YAML_MAPPING_START_EVENT:
+ * anchor = None
+ * if event.data.mapping_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ * tag = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.mapping_start.anchor != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":637
+ * anchor = None
+ * if event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor) # <<<<<<<<<<<<<<
+ * tag = None
+ * if event.data.mapping_start.tag != NULL:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_event->data.mapping_start.anchor); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 637; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_2);
+ __pyx_t_2 = 0;
+ goto __pyx_L20;
+ }
+ __pyx_L20:;
+
+ /* "_yaml.pyx":638
+ * if event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ * tag = None # <<<<<<<<<<<<<<
+ * if event.data.mapping_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_tag = Py_None;
+
+ /* "_yaml.pyx":639
+ * anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ * tag = None
+ * if event.data.mapping_start.tag != NULL: # <<<<<<<<<<<<<<
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ * implicit = False
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.mapping_start.tag != NULL) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":640
+ * tag = None
+ * if event.data.mapping_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag) # <<<<<<<<<<<<<<
+ * implicit = False
+ * if event.data.mapping_start.implicit == 1:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_event->data.mapping_start.tag); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 640; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF_SET(__pyx_v_tag, __pyx_t_2);
+ __pyx_t_2 = 0;
+ goto __pyx_L21;
+ }
+ __pyx_L21:;
+
+ /* "_yaml.pyx":641
+ * if event.data.mapping_start.tag != NULL:
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ * implicit = False # <<<<<<<<<<<<<<
+ * if event.data.mapping_start.implicit == 1:
+ * implicit = True
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":642
+ * tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ * implicit = False
+ * if event.data.mapping_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * flow_style = None
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.mapping_start.implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":643
+ * implicit = False
+ * if event.data.mapping_start.implicit == 1:
+ * implicit = True # <<<<<<<<<<<<<<
+ * flow_style = None
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ */
+ __pyx_v_implicit = 1;
+ goto __pyx_L22;
+ }
+ __pyx_L22:;
+
+ /* "_yaml.pyx":644
+ * if event.data.mapping_start.implicit == 1:
+ * implicit = True
+ * flow_style = None # <<<<<<<<<<<<<<
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_flow_style = Py_None;
+
+ /* "_yaml.pyx":645
+ * implicit = True
+ * flow_style = None
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.mapping_start.style == YAML_FLOW_MAPPING_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":646
+ * flow_style = None
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True # <<<<<<<<<<<<<<
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ */
+ __Pyx_INCREF(Py_True);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_True);
+ goto __pyx_L23;
+ }
+
+ /* "_yaml.pyx":647
+ * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * return MappingStartEvent(anchor, tag, implicit,
+ */
+ __pyx_t_5 = ((__pyx_v_event->data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":648
+ * flow_style = True
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False # <<<<<<<<<<<<<<
+ * return MappingStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style)
+ */
+ __Pyx_INCREF(Py_False);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_False);
+ goto __pyx_L23;
+ }
+ __pyx_L23:;
+
+ /* "_yaml.pyx":649
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ * return MappingStartEvent(anchor, tag, implicit, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingStartEvent); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 649; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 649; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":650
+ * flow_style = False
+ * return MappingStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ * return SequenceEndEvent(start_mark, end_mark)
+ */
+ __pyx_t_3 = PyTuple_New(6); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 649; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_anchor);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_anchor);
+ __Pyx_GIVEREF(__pyx_v_anchor);
+ __Pyx_INCREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 4, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_flow_style);
+ PyTuple_SET_ITEM(__pyx_t_3, 5, __pyx_v_flow_style);
+ __Pyx_GIVEREF(__pyx_v_flow_style);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":649
+ * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ * return MappingStartEvent(anchor, tag, implicit, # <<<<<<<<<<<<<<
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 649; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":651
+ * return MappingStartEvent(anchor, tag, implicit,
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_SEQUENCE_END_EVENT: # <<<<<<<<<<<<<<
+ * return SequenceEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_MAPPING_END_EVENT:
+ */
+ case YAML_SEQUENCE_END_EVENT:
+
+ /* "_yaml.pyx":652
+ * start_mark, end_mark, flow_style)
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ * return SequenceEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * elif event.type == YAML_MAPPING_END_EVENT:
+ * return MappingEndEvent(start_mark, end_mark)
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 652; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 652; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 652; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":653
+ * elif event.type == YAML_SEQUENCE_END_EVENT:
+ * return SequenceEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_MAPPING_END_EVENT: # <<<<<<<<<<<<<<
+ * return MappingEndEvent(start_mark, end_mark)
+ * else:
+ */
+ case YAML_MAPPING_END_EVENT:
+
+ /* "_yaml.pyx":654
+ * return SequenceEndEvent(start_mark, end_mark)
+ * elif event.type == YAML_MAPPING_END_EVENT:
+ * return MappingEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingEndEvent); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 654; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 654; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 1, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 654; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+ break;
+ default:
+
+ /* "_yaml.pyx":656
+ * return MappingEndEvent(start_mark, end_mark)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("unknown event type")
+ * else:
+ */
+ __pyx_t_5 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":657
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("unknown event type") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown event type")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__13, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 657; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 657; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":659
+ * raise ValueError("unknown event type")
+ * else:
+ * raise ValueError(u"unknown event type") # <<<<<<<<<<<<<<
+ *
+ * def get_event(self):
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__14, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 659; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 659; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+
+ /* "_yaml.pyx":535
+ * return event_object
+ *
+ * cdef object _event_to_object(self, yaml_event_t *event): # <<<<<<<<<<<<<<
+ * cdef yaml_tag_directive_t *tag_directive
+ * start_mark = Mark(self.stream_name,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.CParser._event_to_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF((PyObject *)__pyx_v_end_mark);
+ __Pyx_XDECREF(__pyx_v_encoding);
+ __Pyx_XDECREF(__pyx_v_version);
+ __Pyx_XDECREF(__pyx_v_tags);
+ __Pyx_XDECREF(__pyx_v_handle);
+ __Pyx_XDECREF(__pyx_v_prefix);
+ __Pyx_XDECREF(__pyx_v_anchor);
+ __Pyx_XDECREF(__pyx_v_tag);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_style);
+ __Pyx_XDECREF(__pyx_v_flow_style);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":661
+ * raise ValueError(u"unknown event type")
+ *
+ * def get_event(self): # <<<<<<<<<<<<<<
+ * if self.current_event is not None:
+ * value = self.current_event
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_17get_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_17get_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_event (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_16get_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_16get_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("get_event", 0);
+
+ /* "_yaml.pyx":662
+ *
+ * def get_event(self):
+ * if self.current_event is not None: # <<<<<<<<<<<<<<
+ * value = self.current_event
+ * self.current_event = None
+ */
+ __pyx_t_1 = (__pyx_v_self->current_event != Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":663
+ * def get_event(self):
+ * if self.current_event is not None:
+ * value = self.current_event # <<<<<<<<<<<<<<
+ * self.current_event = None
+ * else:
+ */
+ __pyx_t_3 = __pyx_v_self->current_event;
+ __Pyx_INCREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":664
+ * if self.current_event is not None:
+ * value = self.current_event
+ * self.current_event = None # <<<<<<<<<<<<<<
+ * else:
+ * value = self._parse()
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_self->current_event);
+ __Pyx_DECREF(__pyx_v_self->current_event);
+ __pyx_v_self->current_event = Py_None;
+ goto __pyx_L3;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":666
+ * self.current_event = None
+ * else:
+ * value = self._parse() # <<<<<<<<<<<<<<
+ * return value
+ *
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 666; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_value = __pyx_t_3;
+ __pyx_t_3 = 0;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":667
+ * else:
+ * value = self._parse()
+ * return value # <<<<<<<<<<<<<<
+ *
+ * def peek_event(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_value);
+ __pyx_r = __pyx_v_value;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":661
+ * raise ValueError(u"unknown event type")
+ *
+ * def get_event(self): # <<<<<<<<<<<<<<
+ * if self.current_event is not None:
+ * value = self.current_event
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.get_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":669
+ * return value
+ *
+ * def peek_event(self): # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_19peek_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_19peek_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("peek_event (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_18peek_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_18peek_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("peek_event", 0);
+
+ /* "_yaml.pyx":670
+ *
+ * def peek_event(self):
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * self.current_event = self._parse()
+ * return self.current_event
+ */
+ __pyx_t_1 = (__pyx_v_self->current_event == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":671
+ * def peek_event(self):
+ * if self.current_event is None:
+ * self.current_event = self._parse() # <<<<<<<<<<<<<<
+ * return self.current_event
+ *
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 671; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_GOTREF(__pyx_v_self->current_event);
+ __Pyx_DECREF(__pyx_v_self->current_event);
+ __pyx_v_self->current_event = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":672
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ * return self.current_event # <<<<<<<<<<<<<<
+ *
+ * def check_event(self, *choices):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_self->current_event);
+ __pyx_r = __pyx_v_self->current_event;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":669
+ * return value
+ *
+ * def peek_event(self): # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.peek_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":674
+ * return self.current_event
+ *
+ * def check_event(self, *choices): # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_21check_event(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_21check_event(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_choices = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("check_event (wrapper)", 0);
+ if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "check_event", 0))) return NULL;
+ __Pyx_INCREF(__pyx_args);
+ __pyx_v_choices = __pyx_args;
+ __pyx_r = __pyx_pf_5_yaml_7CParser_20check_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self), __pyx_v_choices);
+
+ /* function exit code */
+ __Pyx_XDECREF(__pyx_v_choices);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_20check_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_choices) {
+ PyObject *__pyx_v_event_class = NULL;
+ PyObject *__pyx_v_choice = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ Py_ssize_t __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("check_event", 0);
+
+ /* "_yaml.pyx":675
+ *
+ * def check_event(self, *choices):
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * self.current_event = self._parse()
+ * if self.current_event is None:
+ */
+ __pyx_t_1 = (__pyx_v_self->current_event == Py_None);
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":676
+ * def check_event(self, *choices):
+ * if self.current_event is None:
+ * self.current_event = self._parse() # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * return False
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 676; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_GOTREF(__pyx_v_self->current_event);
+ __Pyx_DECREF(__pyx_v_self->current_event);
+ __pyx_v_self->current_event = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":677
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ * if self.current_event is None: # <<<<<<<<<<<<<<
+ * return False
+ * if not choices:
+ */
+ __pyx_t_2 = (__pyx_v_self->current_event == Py_None);
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":678
+ * self.current_event = self._parse()
+ * if self.current_event is None:
+ * return False # <<<<<<<<<<<<<<
+ * if not choices:
+ * return True
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":679
+ * if self.current_event is None:
+ * return False
+ * if not choices: # <<<<<<<<<<<<<<
+ * return True
+ * event_class = self.current_event.__class__
+ */
+ __pyx_t_1 = (__pyx_v_choices != Py_None) && (PyTuple_GET_SIZE(__pyx_v_choices) != 0);
+ __pyx_t_2 = ((!__pyx_t_1) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":680
+ * return False
+ * if not choices:
+ * return True # <<<<<<<<<<<<<<
+ * event_class = self.current_event.__class__
+ * for choice in choices:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":681
+ * if not choices:
+ * return True
+ * event_class = self.current_event.__class__ # <<<<<<<<<<<<<<
+ * for choice in choices:
+ * if event_class is choice:
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->current_event, __pyx_n_s_class); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 681; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_event_class = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":682
+ * return True
+ * event_class = self.current_event.__class__
+ * for choice in choices: # <<<<<<<<<<<<<<
+ * if event_class is choice:
+ * return True
+ */
+ __pyx_t_3 = __pyx_v_choices; __Pyx_INCREF(__pyx_t_3); __pyx_t_4 = 0;
+ for (;;) {
+ if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_5); __pyx_t_4++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 682; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 682; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ __Pyx_XDECREF_SET(__pyx_v_choice, __pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":683
+ * event_class = self.current_event.__class__
+ * for choice in choices:
+ * if event_class is choice: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_v_choice);
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":684
+ * for choice in choices:
+ * if event_class is choice:
+ * return True # <<<<<<<<<<<<<<
+ * return False
+ *
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ goto __pyx_L0;
+ }
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":685
+ * if event_class is choice:
+ * return True
+ * return False # <<<<<<<<<<<<<<
+ *
+ * def check_node(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":674
+ * return self.current_event
+ *
+ * def check_event(self, *choices): # <<<<<<<<<<<<<<
+ * if self.current_event is None:
+ * self.current_event = self._parse()
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_AddTraceback("_yaml.CParser.check_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_event_class);
+ __Pyx_XDECREF(__pyx_v_choice);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":687
+ * return False
+ *
+ * def check_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_23check_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_23check_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("check_node (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_22check_node(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_22check_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("check_node", 0);
+
+ /* "_yaml.pyx":688
+ *
+ * def check_node(self):
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 688; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":689
+ * def check_node(self):
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT: # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type == YAML_STREAM_START_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":690
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":691
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return True
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 691; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":692
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * return True
+ * return False
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type != YAML_STREAM_END_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":693
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return True # <<<<<<<<<<<<<<
+ * return False
+ *
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_True);
+ __pyx_r = Py_True;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":694
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return True
+ * return False # <<<<<<<<<<<<<<
+ *
+ * def get_node(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(Py_False);
+ __pyx_r = Py_False;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":687
+ * return False
+ *
+ * def check_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_AddTraceback("_yaml.CParser.check_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":696
+ * return False
+ *
+ * def get_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_25get_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_25get_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_node (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_24get_node(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_24get_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("get_node", 0);
+
+ /* "_yaml.pyx":697
+ *
+ * def get_node(self):
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return self._compose_document()
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 697; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":698
+ * def get_node(self):
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * return self._compose_document()
+ *
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type != YAML_STREAM_END_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":699
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * return self._compose_document() # <<<<<<<<<<<<<<
+ *
+ * def get_single_node(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_document(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 699; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_r = __pyx_t_3;
+ __pyx_t_3 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":696
+ * return False
+ *
+ * def get_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser.get_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":701
+ * return self._compose_document()
+ *
+ * def get_single_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_7CParser_27get_single_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_7CParser_27get_single_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("get_single_node (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_7CParser_26get_single_node(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_7CParser_26get_single_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_document = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_mark = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("get_single_node", 0);
+
+ /* "_yaml.pyx":702
+ *
+ * def get_single_node(self):
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 702; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":703
+ * def get_single_node(self):
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * document = None
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":704
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * document = None
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 704; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":705
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ * document = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * document = self._compose_document()
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_document = Py_None;
+
+ /* "_yaml.pyx":706
+ * self._parse_next_event()
+ * document = None
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * document = self._compose_document()
+ * self._parse_next_event()
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type != YAML_STREAM_END_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":707
+ * document = None
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * document = self._compose_document() # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_document(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 707; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_document, __pyx_t_3);
+ __pyx_t_3 = 0;
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":708
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * document = self._compose_document()
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * mark = Mark(self.stream_name,
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 708; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":709
+ * document = self._compose_document()
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type != YAML_STREAM_END_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":711
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 711; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":712
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 712; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":713
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 713; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":710
+ * self._parse_next_event()
+ * if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ * mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_6 = PyTuple_New(6); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 710; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_6, 3, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_6, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_6, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_6, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 710; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":715
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError("expected a single document in the stream",
+ * document.start_mark, "but found another document", mark)
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":716
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("expected a single document in the stream", # <<<<<<<<<<<<<<
+ * document.start_mark, "but found another document", mark)
+ * else:
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_ComposerError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 716; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":717
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("expected a single document in the stream",
+ * document.start_mark, "but found another document", mark) # <<<<<<<<<<<<<<
+ * else:
+ * raise ComposerError(u"expected a single document in the stream",
+ */
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_document, __pyx_n_s_start_mark); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 717; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":716
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("expected a single document in the stream", # <<<<<<<<<<<<<<
+ * document.start_mark, "but found another document", mark)
+ * else:
+ */
+ __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 716; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_kp_s_expected_a_single_document_in_th);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_kp_s_expected_a_single_document_in_th);
+ __Pyx_GIVEREF(__pyx_kp_s_expected_a_single_document_in_th);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_6);
+ __Pyx_GIVEREF(__pyx_t_6);
+ __Pyx_INCREF(__pyx_kp_s_but_found_another_document);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_kp_s_but_found_another_document);
+ __Pyx_GIVEREF(__pyx_kp_s_but_found_another_document);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_4, 3, ((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ __pyx_t_6 = 0;
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_4, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 716; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_Raise(__pyx_t_6, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 716; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":719
+ * document.start_mark, "but found another document", mark)
+ * else:
+ * raise ComposerError(u"expected a single document in the stream", # <<<<<<<<<<<<<<
+ * document.start_mark, u"but found another document", mark)
+ * return document
+ */
+ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_ComposerError); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 719; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":720
+ * else:
+ * raise ComposerError(u"expected a single document in the stream",
+ * document.start_mark, u"but found another document", mark) # <<<<<<<<<<<<<<
+ * return document
+ *
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_document, __pyx_n_s_start_mark); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 720; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":719
+ * document.start_mark, "but found another document", mark)
+ * else:
+ * raise ComposerError(u"expected a single document in the stream", # <<<<<<<<<<<<<<
+ * document.start_mark, u"but found another document", mark)
+ * return document
+ */
+ __pyx_t_5 = PyTuple_New(4); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 719; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_INCREF(__pyx_kp_u_expected_a_single_document_in_th);
+ PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_kp_u_expected_a_single_document_in_th);
+ __Pyx_GIVEREF(__pyx_kp_u_expected_a_single_document_in_th);
+ PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_kp_u_but_found_another_document);
+ PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_kp_u_but_found_another_document);
+ __Pyx_GIVEREF(__pyx_kp_u_but_found_another_document);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_5, 3, ((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ __pyx_t_4 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 719; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 719; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":721
+ * raise ComposerError(u"expected a single document in the stream",
+ * document.start_mark, u"but found another document", mark)
+ * return document # <<<<<<<<<<<<<<
+ *
+ * cdef object _compose_document(self):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_document);
+ __pyx_r = __pyx_v_document;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":701
+ * return self._compose_document()
+ *
+ * def get_single_node(self): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_AddTraceback("_yaml.CParser.get_single_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_document);
+ __Pyx_XDECREF((PyObject *)__pyx_v_mark);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":723
+ * return document
+ *
+ * cdef object _compose_document(self): # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * node = self._compose_node(None, None)
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_document(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_compose_document", 0);
+
+ /* "_yaml.pyx":724
+ *
+ * cdef object _compose_document(self):
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * node = self._compose_node(None, None)
+ * self._parse_next_event()
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":725
+ * cdef object _compose_document(self):
+ * yaml_event_delete(&self.parsed_event)
+ * node = self._compose_node(None, None) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, Py_None, Py_None); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 725; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_node = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":726
+ * yaml_event_delete(&self.parsed_event)
+ * node = self._compose_node(None, None)
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self.anchors = {}
+ */
+ __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_2 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 726; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":727
+ * node = self._compose_node(None, None)
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * self.anchors = {}
+ * return node
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":728
+ * self._parse_next_event()
+ * yaml_event_delete(&self.parsed_event)
+ * self.anchors = {} # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 728; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_GOTREF(__pyx_v_self->anchors);
+ __Pyx_DECREF(__pyx_v_self->anchors);
+ __pyx_v_self->anchors = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":729
+ * yaml_event_delete(&self.parsed_event)
+ * self.anchors = {}
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef object _compose_node(self, object parent, object index):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":723
+ * return document
+ *
+ * cdef object _compose_document(self): # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * node = self._compose_node(None, None)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_AddTraceback("_yaml.CParser._compose_document", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":731
+ * return node
+ *
+ * cdef object _compose_node(self, object parent, object index): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_index) {
+ PyObject *__pyx_v_anchor = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_mark = NULL;
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ PyObject *__pyx_t_7 = NULL;
+ int __pyx_t_8;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_compose_node", 0);
+
+ /* "_yaml.pyx":732
+ *
+ * cdef object _compose_node(self, object parent, object index):
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 732; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":733
+ * cdef object _compose_node(self, object parent, object index):
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ * if anchor not in self.anchors:
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type == YAML_ALIAS_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":734
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor) # <<<<<<<<<<<<<<
+ * if anchor not in self.anchors:
+ * mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.alias.anchor); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 734; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_anchor = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":735
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ * if anchor not in self.anchors: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ __pyx_t_2 = (__Pyx_PySequence_Contains(__pyx_v_anchor, __pyx_v_self->anchors, Py_NE)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 735; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":737
+ * if anchor not in self.anchors:
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 737; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":738
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 738; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":739
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 739; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":736
+ * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ * if anchor not in self.anchors:
+ * mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_7 = PyTuple_New(6); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 736; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_7, 3, __pyx_t_6);
+ __Pyx_GIVEREF(__pyx_t_6);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_7, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_7, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_6 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 736; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_6);
+ __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":741
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError(None, None, "found undefined alias", mark)
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":742
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError(None, None, "found undefined alias", mark) # <<<<<<<<<<<<<<
+ * else:
+ * raise ComposerError(None, None, u"found undefined alias", mark)
+ */
+ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_ComposerError); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 742; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_7 = PyTuple_New(4); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 742; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_7, 0, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_7, 1, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(__pyx_kp_s_found_undefined_alias);
+ PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_kp_s_found_undefined_alias);
+ __Pyx_GIVEREF(__pyx_kp_s_found_undefined_alias);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_7, 3, ((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_7, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 742; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 742; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":744
+ * raise ComposerError(None, None, "found undefined alias", mark)
+ * else:
+ * raise ComposerError(None, None, u"found undefined alias", mark) # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * return self.anchors[anchor]
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_ComposerError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 744; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_7 = PyTuple_New(4); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 744; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_7, 0, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_7, 1, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(__pyx_kp_u_found_undefined_alias);
+ PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_kp_u_found_undefined_alias);
+ __Pyx_GIVEREF(__pyx_kp_u_found_undefined_alias);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_7, 3, ((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 744; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_Raise(__pyx_t_6, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 744; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":745
+ * else:
+ * raise ComposerError(None, None, u"found undefined alias", mark)
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * return self.anchors[anchor]
+ * anchor = None
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":746
+ * raise ComposerError(None, None, u"found undefined alias", mark)
+ * yaml_event_delete(&self.parsed_event)
+ * return self.anchors[anchor] # <<<<<<<<<<<<<<
+ * anchor = None
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_6 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (unlikely(__pyx_t_6 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 746; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_r = __pyx_t_6;
+ __pyx_t_6 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "_yaml.pyx":747
+ * yaml_event_delete(&self.parsed_event)
+ * return self.anchors[anchor]
+ * anchor = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_anchor = Py_None;
+
+ /* "_yaml.pyx":748
+ * return self.anchors[anchor]
+ * anchor = None
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ */
+ __pyx_t_4 = ((__pyx_v_self->parsed_event.type == YAML_SCALAR_EVENT) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":749
+ * anchor = None
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \
+ * and self.parsed_event.data.scalar.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.data.scalar.anchor != NULL) != 0);
+ __pyx_t_8 = __pyx_t_2;
+ } else {
+ __pyx_t_8 = __pyx_t_4;
+ }
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":750
+ * if self.parsed_event.type == YAML_SCALAR_EVENT \
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor) # <<<<<<<<<<<<<<
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ */
+ __pyx_t_6 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.scalar.anchor); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 750; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_6);
+ __pyx_t_6 = 0;
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":751
+ * and self.parsed_event.data.scalar.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ */
+ __pyx_t_8 = ((__pyx_v_self->parsed_event.type == YAML_SEQUENCE_START_EVENT) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":752
+ * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ * and self.parsed_event.data.sequence_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ */
+ __pyx_t_4 = ((__pyx_v_self->parsed_event.data.sequence_start.anchor != NULL) != 0);
+ __pyx_t_2 = __pyx_t_4;
+ } else {
+ __pyx_t_2 = __pyx_t_8;
+ }
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":753
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor) # <<<<<<<<<<<<<<
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ */
+ __pyx_t_6 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.sequence_start.anchor); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 753; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_6);
+ __pyx_t_6 = 0;
+ goto __pyx_L6;
+ }
+
+ /* "_yaml.pyx":754
+ * and self.parsed_event.data.sequence_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ */
+ __pyx_t_2 = ((__pyx_v_self->parsed_event.type == YAML_MAPPING_START_EVENT) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":755
+ * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ * and self.parsed_event.data.mapping_start.anchor != NULL: # <<<<<<<<<<<<<<
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ * if anchor is not None:
+ */
+ __pyx_t_8 = ((__pyx_v_self->parsed_event.data.mapping_start.anchor != NULL) != 0);
+ __pyx_t_4 = __pyx_t_8;
+ } else {
+ __pyx_t_4 = __pyx_t_2;
+ }
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":756
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor) # <<<<<<<<<<<<<<
+ * if anchor is not None:
+ * if anchor in self.anchors:
+ */
+ __pyx_t_6 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.mapping_start.anchor); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 756; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF_SET(__pyx_v_anchor, __pyx_t_6);
+ __pyx_t_6 = 0;
+ goto __pyx_L6;
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":757
+ * and self.parsed_event.data.mapping_start.anchor != NULL:
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * if anchor in self.anchors:
+ * mark = Mark(self.stream_name,
+ */
+ __pyx_t_4 = (__pyx_v_anchor != Py_None);
+ __pyx_t_2 = (__pyx_t_4 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":758
+ * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ * if anchor is not None:
+ * if anchor in self.anchors: # <<<<<<<<<<<<<<
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+ __pyx_t_2 = (__Pyx_PySequence_Contains(__pyx_v_anchor, __pyx_v_self->anchors, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 758; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":760
+ * if anchor in self.anchors:
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 760; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+
+ /* "_yaml.pyx":761
+ * mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 761; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+
+ /* "_yaml.pyx":762
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 762; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":759
+ * if anchor is not None:
+ * if anchor in self.anchors:
+ * mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_3 = PyTuple_New(6); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 759; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_6);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_7);
+ __Pyx_GIVEREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_3, 3, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_3, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_3, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_6 = 0;
+ __pyx_t_7 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_3, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 759; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_5);
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":764
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ComposerError("found duplicate anchor; first occurence",
+ * self.anchors[anchor].start_mark, "second occurence", mark)
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":765
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("found duplicate anchor; first occurence", # <<<<<<<<<<<<<<
+ * self.anchors[anchor].start_mark, "second occurence", mark)
+ * else:
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_ComposerError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 765; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+
+ /* "_yaml.pyx":766
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("found duplicate anchor; first occurence",
+ * self.anchors[anchor].start_mark, "second occurence", mark) # <<<<<<<<<<<<<<
+ * else:
+ * raise ComposerError(u"found duplicate anchor; first occurence",
+ */
+ __pyx_t_3 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (unlikely(__pyx_t_3 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 766; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_start_mark); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 766; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":765
+ * None, None)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ComposerError("found duplicate anchor; first occurence", # <<<<<<<<<<<<<<
+ * self.anchors[anchor].start_mark, "second occurence", mark)
+ * else:
+ */
+ __pyx_t_3 = PyTuple_New(4); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 765; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_kp_s_found_duplicate_anchor_first_occ);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_kp_s_found_duplicate_anchor_first_occ);
+ __Pyx_GIVEREF(__pyx_kp_s_found_duplicate_anchor_first_occ);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_7);
+ __Pyx_GIVEREF(__pyx_t_7);
+ __Pyx_INCREF(__pyx_kp_s_second_occurence);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_kp_s_second_occurence);
+ __Pyx_GIVEREF(__pyx_kp_s_second_occurence);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ __pyx_t_7 = 0;
+ __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_3, NULL); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 765; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_7, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 765; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":768
+ * self.anchors[anchor].start_mark, "second occurence", mark)
+ * else:
+ * raise ComposerError(u"found duplicate anchor; first occurence", # <<<<<<<<<<<<<<
+ * self.anchors[anchor].start_mark, u"second occurence", mark)
+ * self.descend_resolver(parent, index)
+ */
+ __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_ComposerError); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 768; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+
+ /* "_yaml.pyx":769
+ * else:
+ * raise ComposerError(u"found duplicate anchor; first occurence",
+ * self.anchors[anchor].start_mark, u"second occurence", mark) # <<<<<<<<<<<<<<
+ * self.descend_resolver(parent, index)
+ * if self.parsed_event.type == YAML_SCALAR_EVENT:
+ */
+ __pyx_t_3 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (unlikely(__pyx_t_3 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 769; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_start_mark); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 769; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":768
+ * self.anchors[anchor].start_mark, "second occurence", mark)
+ * else:
+ * raise ComposerError(u"found duplicate anchor; first occurence", # <<<<<<<<<<<<<<
+ * self.anchors[anchor].start_mark, u"second occurence", mark)
+ * self.descend_resolver(parent, index)
+ */
+ __pyx_t_3 = PyTuple_New(4); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 768; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_kp_u_found_duplicate_anchor_first_occ);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_kp_u_found_duplicate_anchor_first_occ);
+ __Pyx_GIVEREF(__pyx_kp_u_found_duplicate_anchor_first_occ);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __Pyx_INCREF(__pyx_kp_u_second_occurence);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_kp_u_second_occurence);
+ __Pyx_GIVEREF(__pyx_kp_u_second_occurence);
+ __Pyx_INCREF(((PyObject *)__pyx_v_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_mark));
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_3, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 768; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 768; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+ goto __pyx_L7;
+ }
+ __pyx_L7:;
+
+ /* "_yaml.pyx":770
+ * raise ComposerError(u"found duplicate anchor; first occurence",
+ * self.anchors[anchor].start_mark, u"second occurence", mark)
+ * self.descend_resolver(parent, index) # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_SCALAR_EVENT:
+ * node = self._compose_scalar_node(anchor)
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_descend_resolver); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 770; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 770; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_parent);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_parent);
+ __Pyx_GIVEREF(__pyx_v_parent);
+ __Pyx_INCREF(__pyx_v_index);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_index);
+ __Pyx_GIVEREF(__pyx_v_index);
+ __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_3, NULL); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 770; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+
+ /* "_yaml.pyx":775
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ * node = self._compose_sequence_node(anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT: # <<<<<<<<<<<<<<
+ * node = self._compose_mapping_node(anchor)
+ * self.ascend_resolver()
+ */
+ switch (__pyx_v_self->parsed_event.type) {
+
+ /* "_yaml.pyx":771
+ * self.anchors[anchor].start_mark, u"second occurence", mark)
+ * self.descend_resolver(parent, index)
+ * if self.parsed_event.type == YAML_SCALAR_EVENT: # <<<<<<<<<<<<<<
+ * node = self._compose_scalar_node(anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ */
+ case YAML_SCALAR_EVENT:
+
+ /* "_yaml.pyx":772
+ * self.descend_resolver(parent, index)
+ * if self.parsed_event.type == YAML_SCALAR_EVENT:
+ * node = self._compose_scalar_node(anchor) # <<<<<<<<<<<<<<
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ * node = self._compose_sequence_node(anchor)
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_scalar_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 772; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_v_node = __pyx_t_7;
+ __pyx_t_7 = 0;
+ break;
+
+ /* "_yaml.pyx":773
+ * if self.parsed_event.type == YAML_SCALAR_EVENT:
+ * node = self._compose_scalar_node(anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT: # <<<<<<<<<<<<<<
+ * node = self._compose_sequence_node(anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ */
+ case YAML_SEQUENCE_START_EVENT:
+
+ /* "_yaml.pyx":774
+ * node = self._compose_scalar_node(anchor)
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ * node = self._compose_sequence_node(anchor) # <<<<<<<<<<<<<<
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ * node = self._compose_mapping_node(anchor)
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_sequence_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 774; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_v_node = __pyx_t_7;
+ __pyx_t_7 = 0;
+ break;
+
+ /* "_yaml.pyx":775
+ * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ * node = self._compose_sequence_node(anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT: # <<<<<<<<<<<<<<
+ * node = self._compose_mapping_node(anchor)
+ * self.ascend_resolver()
+ */
+ case YAML_MAPPING_START_EVENT:
+
+ /* "_yaml.pyx":776
+ * node = self._compose_sequence_node(anchor)
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ * node = self._compose_mapping_node(anchor) # <<<<<<<<<<<<<<
+ * self.ascend_resolver()
+ * return node
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_mapping_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 776; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_v_node = __pyx_t_7;
+ __pyx_t_7 = 0;
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":777
+ * elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ * node = self._compose_mapping_node(anchor)
+ * self.ascend_resolver() # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_ascend_resolver); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 777; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 777; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":778
+ * node = self._compose_mapping_node(anchor)
+ * self.ascend_resolver()
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef _compose_scalar_node(self, object anchor):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ if (unlikely(!__pyx_v_node)) { __Pyx_RaiseUnboundLocalError("node"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 778; __pyx_clineno = __LINE__; goto __pyx_L1_error;} }
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":731
+ * return node
+ *
+ * cdef object _compose_node(self, object parent, object index): # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * if self.parsed_event.type == YAML_ALIAS_EVENT:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_AddTraceback("_yaml.CParser._compose_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_anchor);
+ __Pyx_XDECREF((PyObject *)__pyx_v_mark);
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":780
+ * return node
+ *
+ * cdef _compose_scalar_node(self, object anchor): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_scalar_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) {
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark = NULL;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_v_plain_implicit;
+ int __pyx_v_quoted_implicit;
+ PyObject *__pyx_v_tag = NULL;
+ PyObject *__pyx_v_style = NULL;
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ int __pyx_t_7;
+ int __pyx_t_8;
+ PyObject *__pyx_t_9 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_compose_scalar_node", 0);
+
+ /* "_yaml.pyx":782
+ * cdef _compose_scalar_node(self, object anchor):
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 782; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":783
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 783; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":784
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 784; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":781
+ *
+ * cdef _compose_scalar_node(self, object anchor):
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 781; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 781; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":787
+ * None, None)
+ * end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 787; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":788
+ * end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 788; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+
+ /* "_yaml.pyx":789
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value,
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 789; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":786
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ */
+ __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 786; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 786; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":791
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value, # <<<<<<<<<<<<<<
+ * self.parsed_event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ */
+ __pyx_t_2 = PyUnicode_DecodeUTF8(__pyx_v_self->parsed_event.data.scalar.value, __pyx_v_self->parsed_event.data.scalar.length, __pyx_k_strict); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 791; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_value = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":793
+ * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value,
+ * self.parsed_event.data.scalar.length, 'strict')
+ * plain_implicit = False # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True
+ */
+ __pyx_v_plain_implicit = 0;
+
+ /* "_yaml.pyx":794
+ * self.parsed_event.data.scalar.length, 'strict')
+ * plain_implicit = False
+ * if self.parsed_event.data.scalar.plain_implicit == 1: # <<<<<<<<<<<<<<
+ * plain_implicit = True
+ * quoted_implicit = False
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.scalar.plain_implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":795
+ * plain_implicit = False
+ * if self.parsed_event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True # <<<<<<<<<<<<<<
+ * quoted_implicit = False
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ */
+ __pyx_v_plain_implicit = 1;
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":796
+ * if self.parsed_event.data.scalar.plain_implicit == 1:
+ * plain_implicit = True
+ * quoted_implicit = False # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ */
+ __pyx_v_quoted_implicit = 0;
+
+ /* "_yaml.pyx":797
+ * plain_implicit = True
+ * quoted_implicit = False
+ * if self.parsed_event.data.scalar.quoted_implicit == 1: # <<<<<<<<<<<<<<
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.scalar.quoted_implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":798
+ * quoted_implicit = False
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.scalar.tag == NULL \
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ */
+ __pyx_v_quoted_implicit = 1;
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":799
+ * if self.parsed_event.data.scalar.quoted_implicit == 1:
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.scalar.tag == NULL) != 0);
+ if (!__pyx_t_5) {
+
+ /* "_yaml.pyx":800
+ * quoted_implicit = True
+ * if self.parsed_event.data.scalar.tag == NULL \
+ * or (self.parsed_event.data.scalar.tag[0] == c'!' # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.scalar.tag[0]) == '!') != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":801
+ * if self.parsed_event.data.scalar.tag == NULL \
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'): # <<<<<<<<<<<<<<
+ * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
+ * else:
+ */
+ __pyx_t_7 = (((__pyx_v_self->parsed_event.data.scalar.tag[1]) == '\x00') != 0);
+ __pyx_t_8 = __pyx_t_7;
+ } else {
+ __pyx_t_8 = __pyx_t_6;
+ }
+ __pyx_t_6 = __pyx_t_8;
+ } else {
+ __pyx_t_6 = __pyx_t_5;
+ }
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":802
+ * or (self.parsed_event.data.scalar.tag[0] == c'!'
+ * and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit)) # <<<<<<<<<<<<<<
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ */
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_plain_implicit); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_quoted_implicit); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_9 = PyTuple_New(2); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_9, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __pyx_t_4 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_9);
+ __Pyx_GIVEREF(__pyx_t_9);
+ __pyx_t_1 = 0;
+ __pyx_t_9 = 0;
+ __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_3, NULL); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_v_tag = __pyx_t_9;
+ __pyx_t_9 = 0;
+ goto __pyx_L5;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":804
+ * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag) # <<<<<<<<<<<<<<
+ * style = None
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ */
+ __pyx_t_9 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.scalar.tag); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 804; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __pyx_v_tag = __pyx_t_9;
+ __pyx_t_9 = 0;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":805
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ * style = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u''
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_style = Py_None;
+
+ /* "_yaml.pyx":814
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'>'
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ */
+ switch (__pyx_v_self->parsed_event.data.scalar.style) {
+
+ /* "_yaml.pyx":806
+ * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ * style = None
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u''
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ */
+ case YAML_PLAIN_SCALAR_STYLE:
+
+ /* "_yaml.pyx":807
+ * style = None
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u'' # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ */
+ __Pyx_INCREF(__pyx_kp_u__6);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__6);
+ break;
+
+ /* "_yaml.pyx":808
+ * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ * style = u''
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'\''
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ */
+ case YAML_SINGLE_QUOTED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":809
+ * style = u''
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\'' # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ */
+ __Pyx_INCREF(__pyx_kp_u__7);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__7);
+ break;
+
+ /* "_yaml.pyx":810
+ * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ * style = u'\''
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'"'
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ */
+ case YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":811
+ * style = u'\''
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"' # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ */
+ __Pyx_INCREF(__pyx_kp_u__8);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__8);
+ break;
+
+ /* "_yaml.pyx":812
+ * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ * style = u'"'
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'|'
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ */
+ case YAML_LITERAL_SCALAR_STYLE:
+
+ /* "_yaml.pyx":813
+ * style = u'"'
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|' # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ */
+ __Pyx_INCREF(__pyx_kp_u__9);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__9);
+ break;
+
+ /* "_yaml.pyx":814
+ * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ * style = u'|'
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<<
+ * style = u'>'
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ */
+ case YAML_FOLDED_SCALAR_STYLE:
+
+ /* "_yaml.pyx":815
+ * style = u'|'
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>' # <<<<<<<<<<<<<<
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ * if anchor is not None:
+ */
+ __Pyx_INCREF(__pyx_kp_u__10);
+ __Pyx_DECREF_SET(__pyx_v_style, __pyx_kp_u__10);
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":816
+ * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ * style = u'>'
+ * node = ScalarNode(tag, value, start_mark, end_mark, style) # <<<<<<<<<<<<<<
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ */
+ __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 816; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __pyx_t_3 = PyTuple_New(5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 816; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 2, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(((PyObject *)__pyx_v_end_mark));
+ PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_end_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark));
+ __Pyx_INCREF(__pyx_v_style);
+ PyTuple_SET_ITEM(__pyx_t_3, 4, __pyx_v_style);
+ __Pyx_GIVEREF(__pyx_v_style);
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 816; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_v_node = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":817
+ * style = u'>'
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_6 = (__pyx_v_anchor != Py_None);
+ __pyx_t_5 = (__pyx_t_6 != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":818
+ * node = ScalarNode(tag, value, start_mark, end_mark, style)
+ * if anchor is not None:
+ * self.anchors[anchor] = node # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * return node
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 818; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ goto __pyx_L6;
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":819
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":820
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef _compose_sequence_node(self, object anchor):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":780
+ * return node
+ *
+ * cdef _compose_scalar_node(self, object anchor): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_AddTraceback("_yaml.CParser._compose_scalar_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF((PyObject *)__pyx_v_end_mark);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_tag);
+ __Pyx_XDECREF(__pyx_v_style);
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":822
+ * return node
+ *
+ * cdef _compose_sequence_node(self, object anchor): # <<<<<<<<<<<<<<
+ * cdef int index
+ * start_mark = Mark(self.stream_name,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_sequence_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) {
+ int __pyx_v_index;
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ int __pyx_v_implicit;
+ PyObject *__pyx_v_tag = NULL;
+ PyObject *__pyx_v_flow_style = NULL;
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ int __pyx_t_7;
+ int __pyx_t_8;
+ int __pyx_t_9;
+ int __pyx_t_10;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_compose_sequence_node", 0);
+
+ /* "_yaml.pyx":825
+ * cdef int index
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 825; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":826
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":827
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * implicit = False
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":824
+ * cdef _compose_sequence_node(self, object anchor):
+ * cdef int index
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 824; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 824; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":829
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * implicit = False # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.sequence_start.implicit == 1:
+ * implicit = True
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":830
+ * None, None)
+ * implicit = False
+ * if self.parsed_event.data.sequence_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.sequence_start.implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":831
+ * implicit = False
+ * if self.parsed_event.data.sequence_start.implicit == 1:
+ * implicit = True # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ */
+ __pyx_v_implicit = 1;
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":832
+ * if self.parsed_event.data.sequence_start.implicit == 1:
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.sequence_start.tag == NULL) != 0);
+ if (!__pyx_t_5) {
+
+ /* "_yaml.pyx":833
+ * implicit = True
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!' # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ * tag = self.resolve(SequenceNode, None, implicit)
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.sequence_start.tag[0]) == '!') != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":834
+ * if self.parsed_event.data.sequence_start.tag == NULL \
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'): # <<<<<<<<<<<<<<
+ * tag = self.resolve(SequenceNode, None, implicit)
+ * else:
+ */
+ __pyx_t_7 = (((__pyx_v_self->parsed_event.data.sequence_start.tag[1]) == '\x00') != 0);
+ __pyx_t_8 = __pyx_t_7;
+ } else {
+ __pyx_t_8 = __pyx_t_6;
+ }
+ __pyx_t_6 = __pyx_t_8;
+ } else {
+ __pyx_t_6 = __pyx_t_5;
+ }
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":835
+ * or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ * and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ * tag = self.resolve(SequenceNode, None, implicit) # <<<<<<<<<<<<<<
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_tag = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L4;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":837
+ * tag = self.resolve(SequenceNode, None, implicit)
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag) # <<<<<<<<<<<<<<
+ * flow_style = None
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.sequence_start.tag); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_tag = __pyx_t_2;
+ __pyx_t_2 = 0;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":838
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ * flow_style = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_flow_style = Py_None;
+
+ /* "_yaml.pyx":841
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * value = []
+ */
+ switch (__pyx_v_self->parsed_event.data.sequence_start.style) {
+
+ /* "_yaml.pyx":839
+ * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ * flow_style = None
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ */
+ case YAML_FLOW_SEQUENCE_STYLE:
+
+ /* "_yaml.pyx":840
+ * flow_style = None
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ */
+ __Pyx_INCREF(Py_True);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_True);
+ break;
+
+ /* "_yaml.pyx":841
+ * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ * flow_style = True
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * value = []
+ */
+ case YAML_BLOCK_SEQUENCE_STYLE:
+
+ /* "_yaml.pyx":842
+ * flow_style = True
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False # <<<<<<<<<<<<<<
+ * value = []
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ */
+ __Pyx_INCREF(Py_False);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_False);
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":843
+ * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ * flow_style = False
+ * value = [] # <<<<<<<<<<<<<<
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None:
+ */
+ __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 843; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_value = ((PyObject*)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":844
+ * flow_style = False
+ * value = []
+ * node = SequenceNode(tag, value, start_mark, None, flow_style) # <<<<<<<<<<<<<<
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ */
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = PyTuple_New(5); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 2, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 3, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(__pyx_v_flow_style);
+ PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_v_flow_style);
+ __Pyx_GIVEREF(__pyx_v_flow_style);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_1, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_node = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":845
+ * value = []
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_6 = (__pyx_v_anchor != Py_None);
+ __pyx_t_5 = (__pyx_t_6 != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":846
+ * node = SequenceNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None:
+ * self.anchors[anchor] = node # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * index = 0
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 846; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":847
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * index = 0
+ * self._parse_next_event()
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":848
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ * index = 0 # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ */
+ __pyx_v_index = 0;
+
+ /* "_yaml.pyx":849
+ * yaml_event_delete(&self.parsed_event)
+ * index = 0
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ * value.append(self._compose_node(node, index))
+ */
+ __pyx_t_9 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_9 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 849; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":850
+ * index = 0
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT: # <<<<<<<<<<<<<<
+ * value.append(self._compose_node(node, index))
+ * index = index+1
+ */
+ while (1) {
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.type != YAML_SEQUENCE_END_EVENT) != 0);
+ if (!__pyx_t_5) break;
+
+ /* "_yaml.pyx":851
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ * value.append(self._compose_node(node, index)) # <<<<<<<<<<<<<<
+ * index = index+1
+ * self._parse_next_event()
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 851; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, __pyx_t_3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 851; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_value, __pyx_t_1); if (unlikely(__pyx_t_10 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 851; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":852
+ * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ * value.append(self._compose_node(node, index))
+ * index = index+1 # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name,
+ */
+ __pyx_v_index = (__pyx_v_index + 1);
+
+ /* "_yaml.pyx":853
+ * value.append(self._compose_node(node, index))
+ * index = index+1
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ */
+ __pyx_t_9 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_9 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 853; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":855
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 855; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":856
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 856; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":857
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 857; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":854
+ * index = index+1
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 854; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 854; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__Pyx_PyObject_SetAttrStr(__pyx_v_node, __pyx_n_s_end_mark, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 854; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":859
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":860
+ * None, None)
+ * yaml_event_delete(&self.parsed_event)
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef _compose_mapping_node(self, object anchor):
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":822
+ * return node
+ *
+ * cdef _compose_sequence_node(self, object anchor): # <<<<<<<<<<<<<<
+ * cdef int index
+ * start_mark = Mark(self.stream_name,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.CParser._compose_sequence_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF(__pyx_v_tag);
+ __Pyx_XDECREF(__pyx_v_flow_style);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":862
+ * return node
+ *
+ * cdef _compose_mapping_node(self, object anchor): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+
+static PyObject *__pyx_f_5_yaml_7CParser__compose_mapping_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) {
+ struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark = NULL;
+ int __pyx_v_implicit;
+ PyObject *__pyx_v_tag = NULL;
+ PyObject *__pyx_v_flow_style = NULL;
+ PyObject *__pyx_v_value = NULL;
+ PyObject *__pyx_v_node = NULL;
+ PyObject *__pyx_v_item_key = NULL;
+ PyObject *__pyx_v_item_value = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_t_5;
+ int __pyx_t_6;
+ int __pyx_t_7;
+ int __pyx_t_8;
+ int __pyx_t_9;
+ int __pyx_t_10;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_compose_mapping_node", 0);
+
+ /* "_yaml.pyx":864
+ * cdef _compose_mapping_node(self, object anchor):
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column,
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 864; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":865
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 865; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":866
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * implicit = False
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 866; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":863
+ *
+ * cdef _compose_mapping_node(self, object anchor):
+ * start_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.start_mark.index,
+ * self.parsed_event.start_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 863; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_3 = 0;
+ __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 863; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":868
+ * self.parsed_event.start_mark.column,
+ * None, None)
+ * implicit = False # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.mapping_start.implicit == 1:
+ * implicit = True
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":869
+ * None, None)
+ * implicit = False
+ * if self.parsed_event.data.mapping_start.implicit == 1: # <<<<<<<<<<<<<<
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.mapping_start.implicit == 1) != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":870
+ * implicit = False
+ * if self.parsed_event.data.mapping_start.implicit == 1:
+ * implicit = True # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ */
+ __pyx_v_implicit = 1;
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":871
+ * if self.parsed_event.data.mapping_start.implicit == 1:
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \ # <<<<<<<<<<<<<<
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ */
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.data.mapping_start.tag == NULL) != 0);
+ if (!__pyx_t_5) {
+
+ /* "_yaml.pyx":872
+ * implicit = True
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!' # <<<<<<<<<<<<<<
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ * tag = self.resolve(MappingNode, None, implicit)
+ */
+ __pyx_t_6 = (((__pyx_v_self->parsed_event.data.mapping_start.tag[0]) == '!') != 0);
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":873
+ * if self.parsed_event.data.mapping_start.tag == NULL \
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'): # <<<<<<<<<<<<<<
+ * tag = self.resolve(MappingNode, None, implicit)
+ * else:
+ */
+ __pyx_t_7 = (((__pyx_v_self->parsed_event.data.mapping_start.tag[1]) == '\x00') != 0);
+ __pyx_t_8 = __pyx_t_7;
+ } else {
+ __pyx_t_8 = __pyx_t_6;
+ }
+ __pyx_t_6 = __pyx_t_8;
+ } else {
+ __pyx_t_6 = __pyx_t_5;
+ }
+ if (__pyx_t_6) {
+
+ /* "_yaml.pyx":874
+ * or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ * and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ * tag = self.resolve(MappingNode, None, implicit) # <<<<<<<<<<<<<<
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingNode); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __pyx_t_4 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_tag = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L4;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":876
+ * tag = self.resolve(MappingNode, None, implicit)
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag) # <<<<<<<<<<<<<<
+ * flow_style = None
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.mapping_start.tag); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 876; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_tag = __pyx_t_2;
+ __pyx_t_2 = 0;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":877
+ * else:
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ * flow_style = None # <<<<<<<<<<<<<<
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ */
+ __Pyx_INCREF(Py_None);
+ __pyx_v_flow_style = Py_None;
+
+ /* "_yaml.pyx":880
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * value = []
+ */
+ switch (__pyx_v_self->parsed_event.data.mapping_start.style) {
+
+ /* "_yaml.pyx":878
+ * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ * flow_style = None
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = True
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ */
+ case YAML_FLOW_MAPPING_STYLE:
+
+ /* "_yaml.pyx":879
+ * flow_style = None
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True # <<<<<<<<<<<<<<
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ */
+ __Pyx_INCREF(Py_True);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_True);
+ break;
+
+ /* "_yaml.pyx":880
+ * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ * flow_style = True
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: # <<<<<<<<<<<<<<
+ * flow_style = False
+ * value = []
+ */
+ case YAML_BLOCK_MAPPING_STYLE:
+
+ /* "_yaml.pyx":881
+ * flow_style = True
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False # <<<<<<<<<<<<<<
+ * value = []
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ */
+ __Pyx_INCREF(Py_False);
+ __Pyx_DECREF_SET(__pyx_v_flow_style, Py_False);
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":882
+ * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ * flow_style = False
+ * value = [] # <<<<<<<<<<<<<<
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None:
+ */
+ __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 882; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_value = ((PyObject*)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":883
+ * flow_style = False
+ * value = []
+ * node = MappingNode(tag, value, start_mark, None, flow_style) # <<<<<<<<<<<<<<
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ */
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingNode); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 883; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = PyTuple_New(5); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 883; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_v_tag);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_tag);
+ __Pyx_GIVEREF(__pyx_v_tag);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __Pyx_INCREF(((PyObject *)__pyx_v_start_mark));
+ PyTuple_SET_ITEM(__pyx_t_1, 2, ((PyObject *)__pyx_v_start_mark));
+ __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark));
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_1, 3, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(__pyx_v_flow_style);
+ PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_v_flow_style);
+ __Pyx_GIVEREF(__pyx_v_flow_style);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_1, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 883; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_node = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":884
+ * value = []
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None: # <<<<<<<<<<<<<<
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_6 = (__pyx_v_anchor != Py_None);
+ __pyx_t_5 = (__pyx_t_6 != 0);
+ if (__pyx_t_5) {
+
+ /* "_yaml.pyx":885
+ * node = MappingNode(tag, value, start_mark, None, flow_style)
+ * if anchor is not None:
+ * self.anchors[anchor] = node # <<<<<<<<<<<<<<
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 885; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":886
+ * if anchor is not None:
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":887
+ * self.anchors[anchor] = node
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ * item_key = self._compose_node(node, None)
+ */
+ __pyx_t_9 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_9 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 887; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":888
+ * yaml_event_delete(&self.parsed_event)
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT: # <<<<<<<<<<<<<<
+ * item_key = self._compose_node(node, None)
+ * item_value = self._compose_node(node, item_key)
+ */
+ while (1) {
+ __pyx_t_5 = ((__pyx_v_self->parsed_event.type != YAML_MAPPING_END_EVENT) != 0);
+ if (!__pyx_t_5) break;
+
+ /* "_yaml.pyx":889
+ * self._parse_next_event()
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ * item_key = self._compose_node(node, None) # <<<<<<<<<<<<<<
+ * item_value = self._compose_node(node, item_key)
+ * value.append((item_key, item_value))
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, Py_None); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 889; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_XDECREF_SET(__pyx_v_item_key, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":890
+ * while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ * item_key = self._compose_node(node, None)
+ * item_value = self._compose_node(node, item_key) # <<<<<<<<<<<<<<
+ * value.append((item_key, item_value))
+ * self._parse_next_event()
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, __pyx_v_item_key); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 890; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_XDECREF_SET(__pyx_v_item_value, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":891
+ * item_key = self._compose_node(node, None)
+ * item_value = self._compose_node(node, item_key)
+ * value.append((item_key, item_value)) # <<<<<<<<<<<<<<
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name,
+ */
+ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 891; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_item_key);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_item_key);
+ __Pyx_GIVEREF(__pyx_v_item_key);
+ __Pyx_INCREF(__pyx_v_item_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_item_value);
+ __Pyx_GIVEREF(__pyx_v_item_value);
+ __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_value, __pyx_t_3); if (unlikely(__pyx_t_10 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 891; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":892
+ * item_value = self._compose_node(node, item_key)
+ * value.append((item_key, item_value))
+ * self._parse_next_event() # <<<<<<<<<<<<<<
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ */
+ __pyx_t_9 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_9 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 892; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":894
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column,
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 894; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "_yaml.pyx":895
+ * node.end_mark = Mark(self.stream_name,
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 895; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+
+ /* "_yaml.pyx":896
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<<
+ * None, None)
+ * yaml_event_delete(&self.parsed_event)
+ */
+ __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 896; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+
+ /* "_yaml.pyx":893
+ * value.append((item_key, item_value))
+ * self._parse_next_event()
+ * node.end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<<
+ * self.parsed_event.end_mark.index,
+ * self.parsed_event.end_mark.line,
+ */
+ __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 893; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_INCREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name);
+ __Pyx_GIVEREF(__pyx_v_self->stream_name);
+ PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_INCREF(Py_None);
+ PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __pyx_t_3 = 0;
+ __pyx_t_1 = 0;
+ __pyx_t_2 = 0;
+ __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 893; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__Pyx_PyObject_SetAttrStr(__pyx_v_node, __pyx_n_s_end_mark, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 893; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":898
+ * self.parsed_event.end_mark.column,
+ * None, None)
+ * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<<
+ * return node
+ *
+ */
+ yaml_event_delete((&__pyx_v_self->parsed_event));
+
+ /* "_yaml.pyx":899
+ * None, None)
+ * yaml_event_delete(&self.parsed_event)
+ * return node # <<<<<<<<<<<<<<
+ *
+ * cdef int _parse_next_event(self) except 0:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_v_node);
+ __pyx_r = __pyx_v_node;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":862
+ * return node
+ *
+ * cdef _compose_mapping_node(self, object anchor): # <<<<<<<<<<<<<<
+ * start_mark = Mark(self.stream_name,
+ * self.parsed_event.start_mark.index,
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.CParser._compose_mapping_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_start_mark);
+ __Pyx_XDECREF(__pyx_v_tag);
+ __Pyx_XDECREF(__pyx_v_flow_style);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_XDECREF(__pyx_v_node);
+ __Pyx_XDECREF(__pyx_v_item_key);
+ __Pyx_XDECREF(__pyx_v_item_value);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":901
+ * return node
+ *
+ * cdef int _parse_next_event(self) except 0: # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ */
+
+static int __pyx_f_5_yaml_7CParser__parse_next_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) {
+ PyObject *__pyx_v_error = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_parse_next_event", 0);
+
+ /* "_yaml.pyx":902
+ *
+ * cdef int _parse_next_event(self) except 0:
+ * if self.parsed_event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<<
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ * error = self._parser_error()
+ */
+ __pyx_t_1 = ((__pyx_v_self->parsed_event.type == YAML_NO_EVENT) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":903
+ * cdef int _parse_next_event(self) except 0:
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: # <<<<<<<<<<<<<<
+ * error = self._parser_error()
+ * raise error
+ */
+ __pyx_t_2 = yaml_parser_parse((&__pyx_v_self->parser), (&__pyx_v_self->parsed_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 903; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_1 = ((__pyx_t_2 == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":904
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ * error = self._parser_error() # <<<<<<<<<<<<<<
+ * raise error
+ * return 1
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 904; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":905
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ * error = self._parser_error()
+ * raise error # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 905; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":906
+ * error = self._parser_error()
+ * raise error
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * cdef int input_handler(void *data, char *buffer, int size, int *read) except 0:
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":901
+ * return node
+ *
+ * cdef int _parse_next_event(self) except 0: # <<<<<<<<<<<<<<
+ * if self.parsed_event.type == YAML_NO_EVENT:
+ * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CParser._parse_next_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":908
+ * return 1
+ *
+ * cdef int input_handler(void *data, char *buffer, int size, int *read) except 0: # <<<<<<<<<<<<<<
+ * cdef CParser parser
+ * parser = <CParser>data
+ */
+
+static int __pyx_f_5_yaml_input_handler(void *__pyx_v_data, char *__pyx_v_buffer, int __pyx_v_size, int *__pyx_v_read) {
+ struct __pyx_obj_5_yaml_CParser *__pyx_v_parser = 0;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("input_handler", 0);
+
+ /* "_yaml.pyx":910
+ * cdef int input_handler(void *data, char *buffer, int size, int *read) except 0:
+ * cdef CParser parser
+ * parser = <CParser>data # <<<<<<<<<<<<<<
+ * if parser.stream_cache is None:
+ * value = parser.stream.read(size)
+ */
+ __pyx_t_1 = ((PyObject *)__pyx_v_data);
+ __Pyx_INCREF(__pyx_t_1);
+ __pyx_v_parser = ((struct __pyx_obj_5_yaml_CParser *)__pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":911
+ * cdef CParser parser
+ * parser = <CParser>data
+ * if parser.stream_cache is None: # <<<<<<<<<<<<<<
+ * value = parser.stream.read(size)
+ * if PyUnicode_CheckExact(value) != 0:
+ */
+ __pyx_t_2 = (__pyx_v_parser->stream_cache == Py_None);
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":912
+ * parser = <CParser>data
+ * if parser.stream_cache is None:
+ * value = parser.stream.read(size) # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(value) != 0:
+ * value = PyUnicode_AsUTF8String(value)
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_parser->stream, __pyx_n_s_read); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 912; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_size); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 912; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 912; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4);
+ __Pyx_GIVEREF(__pyx_t_4);
+ __pyx_t_4 = 0;
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 912; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_v_value = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":913
+ * if parser.stream_cache is None:
+ * value = parser.stream.read(size)
+ * if PyUnicode_CheckExact(value) != 0: # <<<<<<<<<<<<<<
+ * value = PyUnicode_AsUTF8String(value)
+ * parser.unicode_source = 1
+ */
+ __pyx_t_3 = ((PyUnicode_CheckExact(__pyx_v_value) != 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":914
+ * value = parser.stream.read(size)
+ * if PyUnicode_CheckExact(value) != 0:
+ * value = PyUnicode_AsUTF8String(value) # <<<<<<<<<<<<<<
+ * parser.unicode_source = 1
+ * if PyString_CheckExact(value) == 0:
+ */
+ __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_value); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 914; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF_SET(__pyx_v_value, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":915
+ * if PyUnicode_CheckExact(value) != 0:
+ * value = PyUnicode_AsUTF8String(value)
+ * parser.unicode_source = 1 # <<<<<<<<<<<<<<
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_v_parser->unicode_source = 1;
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":916
+ * value = PyUnicode_AsUTF8String(value)
+ * parser.unicode_source = 1
+ * if PyString_CheckExact(value) == 0: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string value is expected")
+ */
+ __pyx_t_3 = ((PyString_CheckExact(__pyx_v_value) == 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":917
+ * parser.unicode_source = 1
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("a string value is expected")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":918
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string value is expected") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"a string value is expected")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 918; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 918; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":920
+ * raise TypeError("a string value is expected")
+ * else:
+ * raise TypeError(u"a string value is expected") # <<<<<<<<<<<<<<
+ * parser.stream_cache = value
+ * parser.stream_cache_pos = 0
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__16, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 920; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 920; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":921
+ * else:
+ * raise TypeError(u"a string value is expected")
+ * parser.stream_cache = value # <<<<<<<<<<<<<<
+ * parser.stream_cache_pos = 0
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ */
+ __Pyx_INCREF(__pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __Pyx_GOTREF(__pyx_v_parser->stream_cache);
+ __Pyx_DECREF(__pyx_v_parser->stream_cache);
+ __pyx_v_parser->stream_cache = __pyx_v_value;
+
+ /* "_yaml.pyx":922
+ * raise TypeError(u"a string value is expected")
+ * parser.stream_cache = value
+ * parser.stream_cache_pos = 0 # <<<<<<<<<<<<<<
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ */
+ __pyx_v_parser->stream_cache_pos = 0;
+
+ /* "_yaml.pyx":923
+ * parser.stream_cache = value
+ * parser.stream_cache_pos = 0
+ * parser.stream_cache_len = PyString_GET_SIZE(value) # <<<<<<<<<<<<<<
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ */
+ __pyx_v_parser->stream_cache_len = PyString_GET_SIZE(__pyx_v_value);
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":924
+ * parser.stream_cache_pos = 0
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size: # <<<<<<<<<<<<<<
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ * if size > 0:
+ */
+ __pyx_t_3 = (((__pyx_v_parser->stream_cache_len - __pyx_v_parser->stream_cache_pos) < __pyx_v_size) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":925
+ * parser.stream_cache_len = PyString_GET_SIZE(value)
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ * size = parser.stream_cache_len - parser.stream_cache_pos # <<<<<<<<<<<<<<
+ * if size > 0:
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ */
+ __pyx_v_size = (__pyx_v_parser->stream_cache_len - __pyx_v_parser->stream_cache_pos);
+ goto __pyx_L7;
+ }
+ __pyx_L7:;
+
+ /* "_yaml.pyx":926
+ * if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ * if size > 0: # <<<<<<<<<<<<<<
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ * + parser.stream_cache_pos, size)
+ */
+ __pyx_t_3 = ((__pyx_v_size > 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":927
+ * size = parser.stream_cache_len - parser.stream_cache_pos
+ * if size > 0:
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache) # <<<<<<<<<<<<<<
+ * + parser.stream_cache_pos, size)
+ * read[0] = size
+ */
+ __pyx_t_4 = __pyx_v_parser->stream_cache;
+ __Pyx_INCREF(__pyx_t_4);
+
+ /* "_yaml.pyx":928
+ * if size > 0:
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ * + parser.stream_cache_pos, size) # <<<<<<<<<<<<<<
+ * read[0] = size
+ * parser.stream_cache_pos += size
+ */
+ memcpy(__pyx_v_buffer, (PyString_AS_STRING(__pyx_t_4) + __pyx_v_parser->stream_cache_pos), __pyx_v_size);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ goto __pyx_L8;
+ }
+ __pyx_L8:;
+
+ /* "_yaml.pyx":929
+ * memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ * + parser.stream_cache_pos, size)
+ * read[0] = size # <<<<<<<<<<<<<<
+ * parser.stream_cache_pos += size
+ * if parser.stream_cache_pos == parser.stream_cache_len:
+ */
+ (__pyx_v_read[0]) = __pyx_v_size;
+
+ /* "_yaml.pyx":930
+ * + parser.stream_cache_pos, size)
+ * read[0] = size
+ * parser.stream_cache_pos += size # <<<<<<<<<<<<<<
+ * if parser.stream_cache_pos == parser.stream_cache_len:
+ * parser.stream_cache = None
+ */
+ __pyx_v_parser->stream_cache_pos = (__pyx_v_parser->stream_cache_pos + __pyx_v_size);
+
+ /* "_yaml.pyx":931
+ * read[0] = size
+ * parser.stream_cache_pos += size
+ * if parser.stream_cache_pos == parser.stream_cache_len: # <<<<<<<<<<<<<<
+ * parser.stream_cache = None
+ * return 1
+ */
+ __pyx_t_3 = ((__pyx_v_parser->stream_cache_pos == __pyx_v_parser->stream_cache_len) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":932
+ * parser.stream_cache_pos += size
+ * if parser.stream_cache_pos == parser.stream_cache_len:
+ * parser.stream_cache = None # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __Pyx_INCREF(Py_None);
+ __Pyx_GIVEREF(Py_None);
+ __Pyx_GOTREF(__pyx_v_parser->stream_cache);
+ __Pyx_DECREF(__pyx_v_parser->stream_cache);
+ __pyx_v_parser->stream_cache = Py_None;
+ goto __pyx_L9;
+ }
+ __pyx_L9:;
+
+ /* "_yaml.pyx":933
+ * if parser.stream_cache_pos == parser.stream_cache_len:
+ * parser.stream_cache = None
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * cdef class CEmitter:
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":908
+ * return 1
+ *
+ * cdef int input_handler(void *data, char *buffer, int size, int *read) except 0: # <<<<<<<<<<<<<<
+ * cdef CParser parser
+ * parser = <CParser>data
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_AddTraceback("_yaml.input_handler", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_parser);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":953
+ * cdef object use_encoding
+ *
+ * def __init__(self, stream, canonical=None, indent=None, width=None, # <<<<<<<<<<<<<<
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ */
+
+/* Python wrapper */
+static int __pyx_pw_5_yaml_8CEmitter_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static int __pyx_pw_5_yaml_8CEmitter_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ PyObject *__pyx_v_stream = 0;
+ PyObject *__pyx_v_canonical = 0;
+ PyObject *__pyx_v_indent = 0;
+ PyObject *__pyx_v_width = 0;
+ PyObject *__pyx_v_allow_unicode = 0;
+ PyObject *__pyx_v_line_break = 0;
+ PyObject *__pyx_v_encoding = 0;
+ PyObject *__pyx_v_explicit_start = 0;
+ PyObject *__pyx_v_explicit_end = 0;
+ PyObject *__pyx_v_version = 0;
+ PyObject *__pyx_v_tags = 0;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_stream,&__pyx_n_s_canonical,&__pyx_n_s_indent,&__pyx_n_s_width,&__pyx_n_s_allow_unicode,&__pyx_n_s_line_break,&__pyx_n_s_encoding,&__pyx_n_s_explicit_start,&__pyx_n_s_explicit_end,&__pyx_n_s_version,&__pyx_n_s_tags,0};
+ PyObject* values[11] = {0,0,0,0,0,0,0,0,0,0,0};
+ values[1] = ((PyObject *)Py_None);
+ values[2] = ((PyObject *)Py_None);
+ values[3] = ((PyObject *)Py_None);
+
+ /* "_yaml.pyx":954
+ *
+ * def __init__(self, stream, canonical=None, indent=None, width=None,
+ * allow_unicode=None, line_break=None, encoding=None, # <<<<<<<<<<<<<<
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ * if yaml_emitter_initialize(&self.emitter) == 0:
+ */
+ values[4] = ((PyObject *)Py_None);
+ values[5] = ((PyObject *)Py_None);
+ values[6] = ((PyObject *)Py_None);
+
+ /* "_yaml.pyx":955
+ * def __init__(self, stream, canonical=None, indent=None, width=None,
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None): # <<<<<<<<<<<<<<
+ * if yaml_emitter_initialize(&self.emitter) == 0:
+ * raise MemoryError
+ */
+ values[7] = ((PyObject *)Py_None);
+ values[8] = ((PyObject *)Py_None);
+ values[9] = ((PyObject *)Py_None);
+ values[10] = ((PyObject *)Py_None);
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10);
+ case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9);
+ case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8);
+ case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7);
+ case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6);
+ case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_stream)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ case 1:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_canonical);
+ if (value) { values[1] = value; kw_args--; }
+ }
+ case 2:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_indent);
+ if (value) { values[2] = value; kw_args--; }
+ }
+ case 3:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_width);
+ if (value) { values[3] = value; kw_args--; }
+ }
+ case 4:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_allow_unicode);
+ if (value) { values[4] = value; kw_args--; }
+ }
+ case 5:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_line_break);
+ if (value) { values[5] = value; kw_args--; }
+ }
+ case 6:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_encoding);
+ if (value) { values[6] = value; kw_args--; }
+ }
+ case 7:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_explicit_start);
+ if (value) { values[7] = value; kw_args--; }
+ }
+ case 8:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_explicit_end);
+ if (value) { values[8] = value; kw_args--; }
+ }
+ case 9:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_version);
+ if (value) { values[9] = value; kw_args--; }
+ }
+ case 10:
+ if (kw_args > 0) {
+ PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_tags);
+ if (value) { values[10] = value; kw_args--; }
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 953; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ }
+ } else {
+ switch (PyTuple_GET_SIZE(__pyx_args)) {
+ case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10);
+ case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9);
+ case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8);
+ case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7);
+ case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6);
+ case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ }
+ __pyx_v_stream = values[0];
+ __pyx_v_canonical = values[1];
+ __pyx_v_indent = values[2];
+ __pyx_v_width = values[3];
+ __pyx_v_allow_unicode = values[4];
+ __pyx_v_line_break = values[5];
+ __pyx_v_encoding = values[6];
+ __pyx_v_explicit_start = values[7];
+ __pyx_v_explicit_end = values[8];
+ __pyx_v_version = values[9];
+ __pyx_v_tags = values[10];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("__init__", 0, 1, 11, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 953; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("_yaml.CEmitter.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return -1;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter___init__(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), __pyx_v_stream, __pyx_v_canonical, __pyx_v_indent, __pyx_v_width, __pyx_v_allow_unicode, __pyx_v_line_break, __pyx_v_encoding, __pyx_v_explicit_start, __pyx_v_explicit_end, __pyx_v_version, __pyx_v_tags);
+
+ /* "_yaml.pyx":953
+ * cdef object use_encoding
+ *
+ * def __init__(self, stream, canonical=None, indent=None, width=None, # <<<<<<<<<<<<<<
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static int __pyx_pf_5_yaml_8CEmitter___init__(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_stream, PyObject *__pyx_v_canonical, PyObject *__pyx_v_indent, PyObject *__pyx_v_width, PyObject *__pyx_v_allow_unicode, PyObject *__pyx_v_line_break, PyObject *__pyx_v_encoding, PyObject *__pyx_v_explicit_start, PyObject *__pyx_v_explicit_end, PyObject *__pyx_v_version, PyObject *__pyx_v_tags) {
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ int __pyx_t_3;
+ int __pyx_t_4;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("__init__", 0);
+
+ /* "_yaml.pyx":956
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ * if yaml_emitter_initialize(&self.emitter) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * self.stream = stream
+ */
+ __pyx_t_1 = ((yaml_emitter_initialize((&__pyx_v_self->emitter)) == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":957
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ * if yaml_emitter_initialize(&self.emitter) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * self.dump_unicode = 0
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 957; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":958
+ * if yaml_emitter_initialize(&self.emitter) == 0:
+ * raise MemoryError
+ * self.stream = stream # <<<<<<<<<<<<<<
+ * self.dump_unicode = 0
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_INCREF(__pyx_v_stream);
+ __Pyx_GIVEREF(__pyx_v_stream);
+ __Pyx_GOTREF(__pyx_v_self->stream);
+ __Pyx_DECREF(__pyx_v_self->stream);
+ __pyx_v_self->stream = __pyx_v_stream;
+
+ /* "_yaml.pyx":959
+ * raise MemoryError
+ * self.stream = stream
+ * self.dump_unicode = 0 # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * if getattr3(stream, 'encoding', None):
+ */
+ __pyx_v_self->dump_unicode = 0;
+
+ /* "_yaml.pyx":960
+ * self.stream = stream
+ * self.dump_unicode = 0
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * if getattr3(stream, 'encoding', None):
+ * self.dump_unicode = 1
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":961
+ * self.dump_unicode = 0
+ * if PY_MAJOR_VERSION < 3:
+ * if getattr3(stream, 'encoding', None): # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * else:
+ */
+ __pyx_t_2 = __Pyx_GetAttr3(__pyx_v_stream, __pyx_n_s_encoding, Py_None); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 961; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 961; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":962
+ * if PY_MAJOR_VERSION < 3:
+ * if getattr3(stream, 'encoding', None):
+ * self.dump_unicode = 1 # <<<<<<<<<<<<<<
+ * else:
+ * if hasattr(stream, u'encoding'):
+ */
+ __pyx_v_self->dump_unicode = 1;
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+ goto __pyx_L4;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":964
+ * self.dump_unicode = 1
+ * else:
+ * if hasattr(stream, u'encoding'): # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * self.use_encoding = encoding
+ */
+ __pyx_t_1 = PyObject_HasAttr(__pyx_v_stream, __pyx_n_u_encoding); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 964; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_3 = (__pyx_t_1 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":965
+ * else:
+ * if hasattr(stream, u'encoding'):
+ * self.dump_unicode = 1 # <<<<<<<<<<<<<<
+ * self.use_encoding = encoding
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ */
+ __pyx_v_self->dump_unicode = 1;
+ goto __pyx_L6;
+ }
+ __pyx_L6:;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":966
+ * if hasattr(stream, u'encoding'):
+ * self.dump_unicode = 1
+ * self.use_encoding = encoding # <<<<<<<<<<<<<<
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ * if canonical:
+ */
+ __Pyx_INCREF(__pyx_v_encoding);
+ __Pyx_GIVEREF(__pyx_v_encoding);
+ __Pyx_GOTREF(__pyx_v_self->use_encoding);
+ __Pyx_DECREF(__pyx_v_self->use_encoding);
+ __pyx_v_self->use_encoding = __pyx_v_encoding;
+
+ /* "_yaml.pyx":967
+ * self.dump_unicode = 1
+ * self.use_encoding = encoding
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self) # <<<<<<<<<<<<<<
+ * if canonical:
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ */
+ yaml_emitter_set_output((&__pyx_v_self->emitter), __pyx_f_5_yaml_output_handler, ((void *)__pyx_v_self));
+
+ /* "_yaml.pyx":968
+ * self.use_encoding = encoding
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ * if canonical: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ * if indent is not None:
+ */
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_canonical); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 968; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":969
+ * yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ * if canonical:
+ * yaml_emitter_set_canonical(&self.emitter, 1) # <<<<<<<<<<<<<<
+ * if indent is not None:
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ */
+ yaml_emitter_set_canonical((&__pyx_v_self->emitter), 1);
+ goto __pyx_L7;
+ }
+ __pyx_L7:;
+
+ /* "_yaml.pyx":970
+ * if canonical:
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ * if indent is not None: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ * if width is not None:
+ */
+ __pyx_t_3 = (__pyx_v_indent != Py_None);
+ __pyx_t_1 = (__pyx_t_3 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":971
+ * yaml_emitter_set_canonical(&self.emitter, 1)
+ * if indent is not None:
+ * yaml_emitter_set_indent(&self.emitter, indent) # <<<<<<<<<<<<<<
+ * if width is not None:
+ * yaml_emitter_set_width(&self.emitter, width)
+ */
+ __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v_indent); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 971; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ yaml_emitter_set_indent((&__pyx_v_self->emitter), __pyx_t_4);
+ goto __pyx_L8;
+ }
+ __pyx_L8:;
+
+ /* "_yaml.pyx":972
+ * if indent is not None:
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ * if width is not None: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_width(&self.emitter, width)
+ * if allow_unicode:
+ */
+ __pyx_t_1 = (__pyx_v_width != Py_None);
+ __pyx_t_3 = (__pyx_t_1 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":973
+ * yaml_emitter_set_indent(&self.emitter, indent)
+ * if width is not None:
+ * yaml_emitter_set_width(&self.emitter, width) # <<<<<<<<<<<<<<
+ * if allow_unicode:
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ */
+ __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v_width); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 973; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ yaml_emitter_set_width((&__pyx_v_self->emitter), __pyx_t_4);
+ goto __pyx_L9;
+ }
+ __pyx_L9:;
+
+ /* "_yaml.pyx":974
+ * if width is not None:
+ * yaml_emitter_set_width(&self.emitter, width)
+ * if allow_unicode: # <<<<<<<<<<<<<<
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None:
+ */
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_allow_unicode); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 974; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":975
+ * yaml_emitter_set_width(&self.emitter, width)
+ * if allow_unicode:
+ * yaml_emitter_set_unicode(&self.emitter, 1) # <<<<<<<<<<<<<<
+ * if line_break is not None:
+ * if line_break == '\r':
+ */
+ yaml_emitter_set_unicode((&__pyx_v_self->emitter), 1);
+ goto __pyx_L10;
+ }
+ __pyx_L10:;
+
+ /* "_yaml.pyx":976
+ * if allow_unicode:
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None: # <<<<<<<<<<<<<<
+ * if line_break == '\r':
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ */
+ __pyx_t_3 = (__pyx_v_line_break != Py_None);
+ __pyx_t_1 = (__pyx_t_3 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":977
+ * yaml_emitter_set_unicode(&self.emitter, 1)
+ * if line_break is not None:
+ * if line_break == '\r': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ * elif line_break == '\n':
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_line_break, __pyx_kp_s__17, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 977; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":978
+ * if line_break is not None:
+ * if line_break == '\r':
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK) # <<<<<<<<<<<<<<
+ * elif line_break == '\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ */
+ yaml_emitter_set_break((&__pyx_v_self->emitter), YAML_CR_BREAK);
+ goto __pyx_L12;
+ }
+
+ /* "_yaml.pyx":979
+ * if line_break == '\r':
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ * elif line_break == '\n': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ * elif line_break == '\r\n':
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_line_break, __pyx_kp_s__18, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 979; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":980
+ * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ * elif line_break == '\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK) # <<<<<<<<<<<<<<
+ * elif line_break == '\r\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ */
+ yaml_emitter_set_break((&__pyx_v_self->emitter), YAML_LN_BREAK);
+ goto __pyx_L12;
+ }
+
+ /* "_yaml.pyx":981
+ * elif line_break == '\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ * elif line_break == '\r\n': # <<<<<<<<<<<<<<
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ * self.document_start_implicit = 1
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_line_break, __pyx_kp_s__19, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 981; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":982
+ * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ * elif line_break == '\r\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK) # <<<<<<<<<<<<<<
+ * self.document_start_implicit = 1
+ * if explicit_start:
+ */
+ yaml_emitter_set_break((&__pyx_v_self->emitter), YAML_CRLN_BREAK);
+ goto __pyx_L12;
+ }
+ __pyx_L12:;
+ goto __pyx_L11;
+ }
+ __pyx_L11:;
+
+ /* "_yaml.pyx":983
+ * elif line_break == '\r\n':
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ * self.document_start_implicit = 1 # <<<<<<<<<<<<<<
+ * if explicit_start:
+ * self.document_start_implicit = 0
+ */
+ __pyx_v_self->document_start_implicit = 1;
+
+ /* "_yaml.pyx":984
+ * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ * self.document_start_implicit = 1
+ * if explicit_start: # <<<<<<<<<<<<<<
+ * self.document_start_implicit = 0
+ * self.document_end_implicit = 1
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_explicit_start); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 984; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":985
+ * self.document_start_implicit = 1
+ * if explicit_start:
+ * self.document_start_implicit = 0 # <<<<<<<<<<<<<<
+ * self.document_end_implicit = 1
+ * if explicit_end:
+ */
+ __pyx_v_self->document_start_implicit = 0;
+ goto __pyx_L13;
+ }
+ __pyx_L13:;
+
+ /* "_yaml.pyx":986
+ * if explicit_start:
+ * self.document_start_implicit = 0
+ * self.document_end_implicit = 1 # <<<<<<<<<<<<<<
+ * if explicit_end:
+ * self.document_end_implicit = 0
+ */
+ __pyx_v_self->document_end_implicit = 1;
+
+ /* "_yaml.pyx":987
+ * self.document_start_implicit = 0
+ * self.document_end_implicit = 1
+ * if explicit_end: # <<<<<<<<<<<<<<
+ * self.document_end_implicit = 0
+ * self.use_version = version
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_explicit_end); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 987; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":988
+ * self.document_end_implicit = 1
+ * if explicit_end:
+ * self.document_end_implicit = 0 # <<<<<<<<<<<<<<
+ * self.use_version = version
+ * self.use_tags = tags
+ */
+ __pyx_v_self->document_end_implicit = 0;
+ goto __pyx_L14;
+ }
+ __pyx_L14:;
+
+ /* "_yaml.pyx":989
+ * if explicit_end:
+ * self.document_end_implicit = 0
+ * self.use_version = version # <<<<<<<<<<<<<<
+ * self.use_tags = tags
+ * self.serialized_nodes = {}
+ */
+ __Pyx_INCREF(__pyx_v_version);
+ __Pyx_GIVEREF(__pyx_v_version);
+ __Pyx_GOTREF(__pyx_v_self->use_version);
+ __Pyx_DECREF(__pyx_v_self->use_version);
+ __pyx_v_self->use_version = __pyx_v_version;
+
+ /* "_yaml.pyx":990
+ * self.document_end_implicit = 0
+ * self.use_version = version
+ * self.use_tags = tags # <<<<<<<<<<<<<<
+ * self.serialized_nodes = {}
+ * self.anchors = {}
+ */
+ __Pyx_INCREF(__pyx_v_tags);
+ __Pyx_GIVEREF(__pyx_v_tags);
+ __Pyx_GOTREF(__pyx_v_self->use_tags);
+ __Pyx_DECREF(__pyx_v_self->use_tags);
+ __pyx_v_self->use_tags = __pyx_v_tags;
+
+ /* "_yaml.pyx":991
+ * self.use_version = version
+ * self.use_tags = tags
+ * self.serialized_nodes = {} # <<<<<<<<<<<<<<
+ * self.anchors = {}
+ * self.last_alias_id = 0
+ */
+ __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 991; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_GOTREF(__pyx_v_self->serialized_nodes);
+ __Pyx_DECREF(__pyx_v_self->serialized_nodes);
+ __pyx_v_self->serialized_nodes = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":992
+ * self.use_tags = tags
+ * self.serialized_nodes = {}
+ * self.anchors = {} # <<<<<<<<<<<<<<
+ * self.last_alias_id = 0
+ * self.closed = -1
+ */
+ __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 992; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_GOTREF(__pyx_v_self->anchors);
+ __Pyx_DECREF(__pyx_v_self->anchors);
+ __pyx_v_self->anchors = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":993
+ * self.serialized_nodes = {}
+ * self.anchors = {}
+ * self.last_alias_id = 0 # <<<<<<<<<<<<<<
+ * self.closed = -1
+ *
+ */
+ __pyx_v_self->last_alias_id = 0;
+
+ /* "_yaml.pyx":994
+ * self.anchors = {}
+ * self.last_alias_id = 0
+ * self.closed = -1 # <<<<<<<<<<<<<<
+ *
+ * def __dealloc__(self):
+ */
+ __pyx_v_self->closed = -1;
+
+ /* "_yaml.pyx":953
+ * cdef object use_encoding
+ *
+ * def __init__(self, stream, canonical=None, indent=None, width=None, # <<<<<<<<<<<<<<
+ * allow_unicode=None, line_break=None, encoding=None,
+ * explicit_start=None, explicit_end=None, version=None, tags=None):
+ */
+
+ /* function exit code */
+ __pyx_r = 0;
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_AddTraceback("_yaml.CEmitter.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = -1;
+ __pyx_L0:;
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":996
+ * self.closed = -1
+ *
+ * def __dealloc__(self): # <<<<<<<<<<<<<<
+ * yaml_emitter_delete(&self.emitter)
+ *
+ */
+
+/* Python wrapper */
+static void __pyx_pw_5_yaml_8CEmitter_3__dealloc__(PyObject *__pyx_v_self); /*proto*/
+static void __pyx_pw_5_yaml_8CEmitter_3__dealloc__(PyObject *__pyx_v_self) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0);
+ __pyx_pf_5_yaml_8CEmitter_2__dealloc__(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+static void __pyx_pf_5_yaml_8CEmitter_2__dealloc__(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__dealloc__", 0);
+
+ /* "_yaml.pyx":997
+ *
+ * def __dealloc__(self):
+ * yaml_emitter_delete(&self.emitter) # <<<<<<<<<<<<<<
+ *
+ * def dispose(self):
+ */
+ yaml_emitter_delete((&__pyx_v_self->emitter));
+
+ /* "_yaml.pyx":996
+ * self.closed = -1
+ *
+ * def __dealloc__(self): # <<<<<<<<<<<<<<
+ * yaml_emitter_delete(&self.emitter)
+ *
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+}
+
+/* "_yaml.pyx":999
+ * yaml_emitter_delete(&self.emitter)
+ *
+ * def dispose(self): # <<<<<<<<<<<<<<
+ * pass
+ *
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_5dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_5dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("dispose (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_4dispose(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_4dispose(CYTHON_UNUSED struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("dispose", 0);
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1002
+ * pass
+ *
+ * cdef object _emitter_error(self): # <<<<<<<<<<<<<<
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ */
+
+static PyObject *__pyx_f_5_yaml_8CEmitter__emitter_error(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ PyObject *__pyx_v_problem = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_emitter_error", 0);
+
+ /* "_yaml.pyx":1005
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.emitter.problem
+ */
+ switch (__pyx_v_self->emitter.error) {
+
+ /* "_yaml.pyx":1003
+ *
+ * cdef object _emitter_error(self):
+ * if self.emitter.error == YAML_MEMORY_ERROR: # <<<<<<<<<<<<<<
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ */
+ case YAML_MEMORY_ERROR:
+
+ /* "_yaml.pyx":1004
+ * cdef object _emitter_error(self):
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError # <<<<<<<<<<<<<<
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __Pyx_INCREF(__pyx_builtin_MemoryError);
+ __pyx_r = __pyx_builtin_MemoryError;
+ goto __pyx_L0;
+ break;
+
+ /* "_yaml.pyx":1005
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.emitter.problem
+ */
+ case YAML_EMITTER_ERROR:
+
+ /* "_yaml.pyx":1006
+ * return MemoryError
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * problem = self.emitter.problem
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1007
+ * elif self.emitter.error == YAML_EMITTER_ERROR:
+ * if PY_MAJOR_VERSION < 3:
+ * problem = self.emitter.problem # <<<<<<<<<<<<<<
+ * else:
+ * problem = PyUnicode_FromString(self.emitter.problem)
+ */
+ __pyx_t_2 = __Pyx_PyBytes_FromString(__pyx_v_self->emitter.problem); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1007; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_problem = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L3;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1009
+ * problem = self.emitter.problem
+ * else:
+ * problem = PyUnicode_FromString(self.emitter.problem) # <<<<<<<<<<<<<<
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_2 = PyUnicode_FromString(__pyx_v_self->emitter.problem); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1009; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_problem = __pyx_t_2;
+ __pyx_t_2 = 0;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1010
+ * else:
+ * problem = PyUnicode_FromString(self.emitter.problem)
+ * return EmitterError(problem) # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no emitter error")
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_EmitterError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1010; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1010; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_problem);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_problem);
+ __Pyx_GIVEREF(__pyx_v_problem);
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1010; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_r = __pyx_t_4;
+ __pyx_t_4 = 0;
+ goto __pyx_L0;
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":1011
+ * problem = PyUnicode_FromString(self.emitter.problem)
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("no emitter error")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1012
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no emitter error") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"no emitter error")
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__20, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1012; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1012; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1014
+ * raise ValueError("no emitter error")
+ * else:
+ * raise ValueError(u"no emitter error") # <<<<<<<<<<<<<<
+ *
+ * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0:
+ */
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__21, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1014; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1014; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1002
+ * pass
+ *
+ * cdef object _emitter_error(self): # <<<<<<<<<<<<<<
+ * if self.emitter.error == YAML_MEMORY_ERROR:
+ * return MemoryError
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.CEmitter._emitter_error", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_problem);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1016
+ * raise ValueError(u"no emitter error")
+ *
+ * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0: # <<<<<<<<<<<<<<
+ * cdef yaml_encoding_t encoding
+ * cdef yaml_version_directive_t version_directive_value
+ */
+
+static int __pyx_f_5_yaml_8CEmitter__object_to_event(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_event_object, yaml_event_t *__pyx_v_event) {
+ yaml_encoding_t __pyx_v_encoding;
+ yaml_version_directive_t __pyx_v_version_directive_value;
+ yaml_version_directive_t *__pyx_v_version_directive;
+ yaml_tag_directive_t __pyx_v_tag_directives_value[128];
+ yaml_tag_directive_t *__pyx_v_tag_directives_start;
+ yaml_tag_directive_t *__pyx_v_tag_directives_end;
+ int __pyx_v_implicit;
+ int __pyx_v_plain_implicit;
+ int __pyx_v_quoted_implicit;
+ char *__pyx_v_anchor;
+ char *__pyx_v_tag;
+ char *__pyx_v_value;
+ int __pyx_v_length;
+ yaml_scalar_style_t __pyx_v_scalar_style;
+ yaml_sequence_style_t __pyx_v_sequence_style;
+ yaml_mapping_style_t __pyx_v_mapping_style;
+ PyObject *__pyx_v_event_class = NULL;
+ PyObject *__pyx_v_cache = NULL;
+ PyObject *__pyx_v_handle = NULL;
+ PyObject *__pyx_v_prefix = NULL;
+ PyObject *__pyx_v_anchor_object = NULL;
+ PyObject *__pyx_v_tag_object = NULL;
+ PyObject *__pyx_v_value_object = NULL;
+ PyObject *__pyx_v_style_object = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ int __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ int __pyx_t_6;
+ Py_ssize_t __pyx_t_7;
+ PyObject *(*__pyx_t_8)(PyObject *);
+ PyObject *__pyx_t_9 = NULL;
+ int __pyx_t_10;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_object_to_event", 0);
+
+ /* "_yaml.pyx":1033
+ * cdef yaml_sequence_style_t sequence_style
+ * cdef yaml_mapping_style_t mapping_style
+ * event_class = event_object.__class__ # <<<<<<<<<<<<<<
+ * if event_class is StreamStartEvent:
+ * encoding = YAML_UTF8_ENCODING
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_class); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1033; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_event_class = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1034
+ * cdef yaml_mapping_style_t mapping_style
+ * event_class = event_object.__class__
+ * if event_class is StreamStartEvent: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_StreamStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1034; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1035
+ * event_class = event_object.__class__
+ * if event_class is StreamStartEvent:
+ * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<<
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ */
+ __pyx_v_encoding = YAML_UTF8_ENCODING;
+
+ /* "_yaml.pyx":1036
+ * if event_class is StreamStartEvent:
+ * encoding = YAML_UTF8_ENCODING
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_kp_u_utf_16_le, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (!__pyx_t_3) {
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_t_1, __pyx_kp_s_utf_16_le, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_4 = __pyx_t_2;
+ } else {
+ __pyx_t_4 = __pyx_t_3;
+ }
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1037
+ * encoding = YAML_UTF8_ENCODING
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING # <<<<<<<<<<<<<<
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING
+ */
+ __pyx_v_encoding = YAML_UTF16LE_ENCODING;
+ goto __pyx_L4;
+ }
+
+ /* "_yaml.pyx":1038
+ * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16BE_ENCODING
+ * if event_object.encoding is None:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = (__Pyx_PyUnicode_Equals(__pyx_t_1, __pyx_kp_u_utf_16_be, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (!__pyx_t_4) {
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_t_1, __pyx_kp_s_utf_16_be, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = __pyx_t_3;
+ } else {
+ __pyx_t_2 = __pyx_t_4;
+ }
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1039
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING # <<<<<<<<<<<<<<
+ * if event_object.encoding is None:
+ * self.dump_unicode = 1
+ */
+ __pyx_v_encoding = YAML_UTF16BE_ENCODING;
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":1040
+ * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING
+ * if event_object.encoding is None: # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_encoding); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1040; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__pyx_t_1 == Py_None);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1041
+ * encoding = YAML_UTF16BE_ENCODING
+ * if event_object.encoding is None:
+ * self.dump_unicode = 1 # <<<<<<<<<<<<<<
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING
+ */
+ __pyx_v_self->dump_unicode = 1;
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":1042
+ * if event_object.encoding is None:
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(event, encoding)
+ */
+ __pyx_t_4 = ((__pyx_v_self->dump_unicode == 1) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1043
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<<
+ * yaml_stream_start_event_initialize(event, encoding)
+ * elif event_class is StreamEndEvent:
+ */
+ __pyx_v_encoding = YAML_UTF8_ENCODING;
+ goto __pyx_L6;
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":1044
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(event, encoding) # <<<<<<<<<<<<<<
+ * elif event_class is StreamEndEvent:
+ * yaml_stream_end_event_initialize(event)
+ */
+ yaml_stream_start_event_initialize(__pyx_v_event, __pyx_v_encoding);
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1045
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(event, encoding)
+ * elif event_class is StreamEndEvent: # <<<<<<<<<<<<<<
+ * yaml_stream_end_event_initialize(event)
+ * elif event_class is DocumentStartEvent:
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_StreamEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1045; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = (__pyx_v_event_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = (__pyx_t_4 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1046
+ * yaml_stream_start_event_initialize(event, encoding)
+ * elif event_class is StreamEndEvent:
+ * yaml_stream_end_event_initialize(event) # <<<<<<<<<<<<<<
+ * elif event_class is DocumentStartEvent:
+ * version_directive = NULL
+ */
+ yaml_stream_end_event_initialize(__pyx_v_event);
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1047
+ * elif event_class is StreamEndEvent:
+ * yaml_stream_end_event_initialize(event)
+ * elif event_class is DocumentStartEvent: # <<<<<<<<<<<<<<
+ * version_directive = NULL
+ * if event_object.version:
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_DocumentStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1047; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_1);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1048
+ * yaml_stream_end_event_initialize(event)
+ * elif event_class is DocumentStartEvent:
+ * version_directive = NULL # <<<<<<<<<<<<<<
+ * if event_object.version:
+ * version_directive_value.major = event_object.version[0]
+ */
+ __pyx_v_version_directive = NULL;
+
+ /* "_yaml.pyx":1049
+ * elif event_class is DocumentStartEvent:
+ * version_directive = NULL
+ * if event_object.version: # <<<<<<<<<<<<<<
+ * version_directive_value.major = event_object.version[0]
+ * version_directive_value.minor = event_object.version[1]
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_version); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1049; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1049; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1050
+ * version_directive = NULL
+ * if event_object.version:
+ * version_directive_value.major = event_object.version[0] # <<<<<<<<<<<<<<
+ * version_directive_value.minor = event_object.version[1]
+ * version_directive = &version_directive_value
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_version); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1050; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_1, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(__pyx_t_5 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1050; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_6 = __Pyx_PyInt_As_int(__pyx_t_5); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1050; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_v_version_directive_value.major = __pyx_t_6;
+
+ /* "_yaml.pyx":1051
+ * if event_object.version:
+ * version_directive_value.major = event_object.version[0]
+ * version_directive_value.minor = event_object.version[1] # <<<<<<<<<<<<<<
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_version); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1051; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_1 = __Pyx_GetItemInt(__pyx_t_5, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(__pyx_t_1 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1051; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_6 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1051; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_version_directive_value.minor = __pyx_t_6;
+
+ /* "_yaml.pyx":1052
+ * version_directive_value.major = event_object.version[0]
+ * version_directive_value.minor = event_object.version[1]
+ * version_directive = &version_directive_value # <<<<<<<<<<<<<<
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ */
+ __pyx_v_version_directive = (&__pyx_v_version_directive_value);
+ goto __pyx_L7;
+ }
+ __pyx_L7:;
+
+ /* "_yaml.pyx":1053
+ * version_directive_value.minor = event_object.version[1]
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL # <<<<<<<<<<<<<<
+ * tag_directives_end = NULL
+ * if event_object.tags:
+ */
+ __pyx_v_tag_directives_start = NULL;
+
+ /* "_yaml.pyx":1054
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL # <<<<<<<<<<<<<<
+ * if event_object.tags:
+ * if len(event_object.tags) > 128:
+ */
+ __pyx_v_tag_directives_end = NULL;
+
+ /* "_yaml.pyx":1055
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ * if event_object.tags: # <<<<<<<<<<<<<<
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tags); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1055; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1055; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1056
+ * tag_directives_end = NULL
+ * if event_object.tags:
+ * if len(event_object.tags) > 128: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags")
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tags); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1056; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_7 = PyObject_Length(__pyx_t_1); if (unlikely(__pyx_t_7 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1056; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_4 = ((__pyx_t_7 > 128) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1057
+ * if event_object.tags:
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("too many tags")
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1058
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"too many tags")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__22, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1058; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1058; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1060
+ * raise ValueError("too many tags")
+ * else:
+ * raise ValueError(u"too many tags") # <<<<<<<<<<<<<<
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1060; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1060; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1061
+ * else:
+ * raise ValueError(u"too many tags")
+ * tag_directives_start = tag_directives_value # <<<<<<<<<<<<<<
+ * tag_directives_end = tag_directives_value
+ * cache = []
+ */
+ __pyx_v_tag_directives_start = __pyx_v_tag_directives_value;
+
+ /* "_yaml.pyx":1062
+ * raise ValueError(u"too many tags")
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value # <<<<<<<<<<<<<<
+ * cache = []
+ * for handle in event_object.tags:
+ */
+ __pyx_v_tag_directives_end = __pyx_v_tag_directives_value;
+
+ /* "_yaml.pyx":1063
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ * cache = [] # <<<<<<<<<<<<<<
+ * for handle in event_object.tags:
+ * prefix = event_object.tags[handle]
+ */
+ __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1063; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_cache = ((PyObject*)__pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1064
+ * tag_directives_end = tag_directives_value
+ * cache = []
+ * for handle in event_object.tags: # <<<<<<<<<<<<<<
+ * prefix = event_object.tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tags); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyList_CheckExact(__pyx_t_1) || PyTuple_CheckExact(__pyx_t_1)) {
+ __pyx_t_5 = __pyx_t_1; __Pyx_INCREF(__pyx_t_5); __pyx_t_7 = 0;
+ __pyx_t_8 = NULL;
+ } else {
+ __pyx_t_7 = -1; __pyx_t_5 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_8 = Py_TYPE(__pyx_t_5)->tp_iternext;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ for (;;) {
+ if (!__pyx_t_8 && PyList_CheckExact(__pyx_t_5)) {
+ if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_5)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_1 = PyList_GET_ITEM(__pyx_t_5, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_1 = PySequence_ITEM(__pyx_t_5, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else if (!__pyx_t_8 && PyTuple_CheckExact(__pyx_t_5)) {
+ if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_5)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_5, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_1 = PySequence_ITEM(__pyx_t_5, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else {
+ __pyx_t_1 = __pyx_t_8(__pyx_t_5);
+ if (unlikely(!__pyx_t_1)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_1);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_handle, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1065
+ * cache = []
+ * for handle in event_object.tags:
+ * prefix = event_object.tags[handle] # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle)
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tags); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1065; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_9 = PyObject_GetItem(__pyx_t_1, __pyx_v_handle); if (unlikely(__pyx_t_9 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1065; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_prefix, __pyx_t_9);
+ __pyx_t_9 = 0;
+
+ /* "_yaml.pyx":1066
+ * for handle in event_object.tags:
+ * prefix = event_object.tags[handle]
+ * if PyUnicode_CheckExact(handle): # <<<<<<<<<<<<<<
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ */
+ __pyx_t_4 = (PyUnicode_CheckExact(__pyx_v_handle) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1067
+ * prefix = event_object.tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle) # <<<<<<<<<<<<<<
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ */
+ __pyx_t_9 = PyUnicode_AsUTF8String(__pyx_v_handle); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1067; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_DECREF_SET(__pyx_v_handle, __pyx_t_9);
+ __pyx_t_9 = 0;
+
+ /* "_yaml.pyx":1068
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_cache, __pyx_v_handle); if (unlikely(__pyx_t_10 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1068; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ goto __pyx_L13;
+ }
+ __pyx_L13:;
+
+ /* "_yaml.pyx":1069
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string")
+ */
+ __pyx_t_4 = ((!(PyString_CheckExact(__pyx_v_handle) != 0)) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1070
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag handle must be a string")
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1071
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ */
+ __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__24, NULL); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1071; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_Raise(__pyx_t_9, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1071; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1073
+ * raise TypeError("tag handle must be a string")
+ * else:
+ * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ */
+ __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__25, NULL); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1073; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_Raise(__pyx_t_9, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1073; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1074
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle) # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ */
+ __pyx_v_tag_directives_end->handle = PyString_AS_STRING(__pyx_v_handle);
+
+ /* "_yaml.pyx":1075
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ */
+ __pyx_t_4 = (PyUnicode_CheckExact(__pyx_v_prefix) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1076
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix) # <<<<<<<<<<<<<<
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ */
+ __pyx_t_9 = PyUnicode_AsUTF8String(__pyx_v_prefix); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1076; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_DECREF_SET(__pyx_v_prefix, __pyx_t_9);
+ __pyx_t_9 = 0;
+
+ /* "_yaml.pyx":1077
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_10 = __Pyx_PyList_Append(__pyx_v_cache, __pyx_v_prefix); if (unlikely(__pyx_t_10 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1077; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ goto __pyx_L16;
+ }
+ __pyx_L16:;
+
+ /* "_yaml.pyx":1078
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string")
+ */
+ __pyx_t_4 = ((!(PyString_CheckExact(__pyx_v_prefix) != 0)) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1079
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1080
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ */
+ __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__26, NULL); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1080; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_Raise(__pyx_t_9, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1080; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1082
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ */
+ __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__27, NULL); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1082; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_Raise(__pyx_t_9, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1082; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1083
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix) # <<<<<<<<<<<<<<
+ * tag_directives_end = tag_directives_end+1
+ * implicit = 1
+ */
+ __pyx_v_tag_directives_end->prefix = PyString_AS_STRING(__pyx_v_prefix);
+
+ /* "_yaml.pyx":1084
+ * raise TypeError(u"tag prefix must be a string")
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1 # <<<<<<<<<<<<<<
+ * implicit = 1
+ * if event_object.explicit:
+ */
+ __pyx_v_tag_directives_end = (__pyx_v_tag_directives_end + 1);
+ }
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ goto __pyx_L8;
+ }
+ __pyx_L8:;
+
+ /* "_yaml.pyx":1085
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * if event_object.explicit:
+ * implicit = 0
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":1086
+ * tag_directives_end = tag_directives_end+1
+ * implicit = 1
+ * if event_object.explicit: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * if yaml_document_start_event_initialize(event, version_directive,
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_explicit); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1086; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1086; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1087
+ * implicit = 1
+ * if event_object.explicit:
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * if yaml_document_start_event_initialize(event, version_directive,
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ */
+ __pyx_v_implicit = 0;
+ goto __pyx_L19;
+ }
+ __pyx_L19:;
+
+ /* "_yaml.pyx":1089
+ * implicit = 0
+ * if yaml_document_start_event_initialize(event, version_directive,
+ * tag_directives_start, tag_directives_end, implicit) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is DocumentEndEvent:
+ */
+ __pyx_t_4 = ((yaml_document_start_event_initialize(__pyx_v_event, __pyx_v_version_directive, __pyx_v_tag_directives_start, __pyx_v_tag_directives_end, __pyx_v_implicit) == 0) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1090
+ * if yaml_document_start_event_initialize(event, version_directive,
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is DocumentEndEvent:
+ * implicit = 1
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1090; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1091
+ * tag_directives_start, tag_directives_end, implicit) == 0:
+ * raise MemoryError
+ * elif event_class is DocumentEndEvent: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * if event_object.explicit:
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_DocumentEndEvent); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1091; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = (__pyx_v_event_class == __pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_2 = (__pyx_t_4 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1092
+ * raise MemoryError
+ * elif event_class is DocumentEndEvent:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * if event_object.explicit:
+ * implicit = 0
+ */
+ __pyx_v_implicit = 1;
+
+ /* "_yaml.pyx":1093
+ * elif event_class is DocumentEndEvent:
+ * implicit = 1
+ * if event_object.explicit: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * yaml_document_end_event_initialize(event, implicit)
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_explicit); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1093; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1093; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1094
+ * implicit = 1
+ * if event_object.explicit:
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * yaml_document_end_event_initialize(event, implicit)
+ * elif event_class is AliasEvent:
+ */
+ __pyx_v_implicit = 0;
+ goto __pyx_L21;
+ }
+ __pyx_L21:;
+
+ /* "_yaml.pyx":1095
+ * if event_object.explicit:
+ * implicit = 0
+ * yaml_document_end_event_initialize(event, implicit) # <<<<<<<<<<<<<<
+ * elif event_class is AliasEvent:
+ * anchor = NULL
+ */
+ yaml_document_end_event_initialize(__pyx_v_event, __pyx_v_implicit);
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1096
+ * implicit = 0
+ * yaml_document_end_event_initialize(event, implicit)
+ * elif event_class is AliasEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_AliasEvent); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1096; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_2 = (__pyx_v_event_class == __pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1097
+ * yaml_document_end_event_initialize(event, implicit)
+ * elif event_class is AliasEvent:
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * anchor_object = event_object.anchor
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1098
+ * elif event_class is AliasEvent:
+ * anchor = NULL
+ * anchor_object = event_object.anchor # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_anchor); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1098; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_anchor_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1099
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_4 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1100
+ * anchor_object = event_object.anchor
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1100; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L22;
+ }
+ __pyx_L22:;
+
+ /* "_yaml.pyx":1101
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_4 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1102
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1103
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__28, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1103; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1103; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1105
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__29, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1105; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1105; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1106
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ * raise MemoryError
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+
+ /* "_yaml.pyx":1107
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is ScalarEvent:
+ */
+ __pyx_t_4 = ((yaml_alias_event_initialize(__pyx_v_event, __pyx_v_anchor) == 0) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1108
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is ScalarEvent:
+ * anchor = NULL
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1108; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1109
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ * raise MemoryError
+ * elif event_class is ScalarEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScalarEvent); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1109; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = (__pyx_v_event_class == __pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_2 = (__pyx_t_4 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1110
+ * raise MemoryError
+ * elif event_class is ScalarEvent:
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1111
+ * elif event_class is ScalarEvent:
+ * anchor = NULL
+ * anchor_object = event_object.anchor # <<<<<<<<<<<<<<
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_anchor); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1111; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_anchor_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1112
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_2 = (__pyx_v_anchor_object != Py_None);
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1113
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_4 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1114
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1114; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L27;
+ }
+ __pyx_L27:;
+
+ /* "_yaml.pyx":1115
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_4 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1116
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1117
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__30, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1117; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1117; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1119
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__31, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1119; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1119; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1120
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * tag = NULL
+ * tag_object = event_object.tag
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+ goto __pyx_L26;
+ }
+ __pyx_L26:;
+
+ /* "_yaml.pyx":1121
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL # <<<<<<<<<<<<<<
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1122
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ * tag_object = event_object.tag # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tag); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1122; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_tag_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1123
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_4 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_2 = (__pyx_t_4 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1124
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1125
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1125; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L31;
+ }
+ __pyx_L31:;
+
+ /* "_yaml.pyx":1126
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1127
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1128
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__32, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1128; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1128; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1130
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__33, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1130; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1130; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1131
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * value_object = event_object.value
+ * if PyUnicode_CheckExact(value_object):
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+ goto __pyx_L30;
+ }
+ __pyx_L30:;
+
+ /* "_yaml.pyx":1132
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_value); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1132; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_value_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1133
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value
+ * if PyUnicode_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_value_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1134
+ * value_object = event_object.value
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_value_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1134; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_value_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L34;
+ }
+ __pyx_L34:;
+
+ /* "_yaml.pyx":1135
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_value_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1136
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("value must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1137
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"value must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__34, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1137; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1137; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1139
+ * raise TypeError("value must be a string")
+ * else:
+ * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<<
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__35, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1139; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1139; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1140
+ * else:
+ * raise TypeError(u"value must be a string")
+ * value = PyString_AS_STRING(value_object) # <<<<<<<<<<<<<<
+ * length = PyString_GET_SIZE(value_object)
+ * plain_implicit = 0
+ */
+ __pyx_v_value = PyString_AS_STRING(__pyx_v_value_object);
+
+ /* "_yaml.pyx":1141
+ * raise TypeError(u"value must be a string")
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object) # <<<<<<<<<<<<<<
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ */
+ __pyx_v_length = PyString_GET_SIZE(__pyx_v_value_object);
+
+ /* "_yaml.pyx":1142
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ * plain_implicit = 0 # <<<<<<<<<<<<<<
+ * quoted_implicit = 0
+ * if event_object.implicit is not None:
+ */
+ __pyx_v_plain_implicit = 0;
+
+ /* "_yaml.pyx":1143
+ * length = PyString_GET_SIZE(value_object)
+ * plain_implicit = 0
+ * quoted_implicit = 0 # <<<<<<<<<<<<<<
+ * if event_object.implicit is not None:
+ * plain_implicit = event_object.implicit[0]
+ */
+ __pyx_v_quoted_implicit = 0;
+
+ /* "_yaml.pyx":1144
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ * if event_object.implicit is not None: # <<<<<<<<<<<<<<
+ * plain_implicit = event_object.implicit[0]
+ * quoted_implicit = event_object.implicit[1]
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1144; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_2 = (__pyx_t_5 != Py_None);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_4 = (__pyx_t_2 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1145
+ * quoted_implicit = 0
+ * if event_object.implicit is not None:
+ * plain_implicit = event_object.implicit[0] # <<<<<<<<<<<<<<
+ * quoted_implicit = event_object.implicit[1]
+ * style_object = event_object.style
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_9 = __Pyx_GetItemInt(__pyx_t_5, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(__pyx_t_9 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1145; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_9);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_6 = __Pyx_PyInt_As_int(__pyx_t_9); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __pyx_v_plain_implicit = __pyx_t_6;
+
+ /* "_yaml.pyx":1146
+ * if event_object.implicit is not None:
+ * plain_implicit = event_object.implicit[0]
+ * quoted_implicit = event_object.implicit[1] # <<<<<<<<<<<<<<
+ * style_object = event_object.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ */
+ __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1146; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_9, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(__pyx_t_5 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1146; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __pyx_t_6 = __Pyx_PyInt_As_int(__pyx_t_5); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1146; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_v_quoted_implicit = __pyx_t_6;
+ goto __pyx_L37;
+ }
+ __pyx_L37:;
+
+ /* "_yaml.pyx":1147
+ * plain_implicit = event_object.implicit[0]
+ * quoted_implicit = event_object.implicit[1]
+ * style_object = event_object.style # <<<<<<<<<<<<<<
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'":
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_style); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1147; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_style_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1148
+ * quoted_implicit = event_object.implicit[1]
+ * style_object = event_object.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_PLAIN_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1149
+ * style_object = event_object.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ */
+ __pyx_t_4 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__7, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1149; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_4) {
+ __pyx_t_2 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__7, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1149; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_3 = __pyx_t_2;
+ } else {
+ __pyx_t_3 = __pyx_t_4;
+ }
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1150
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE;
+ goto __pyx_L38;
+ }
+
+ /* "_yaml.pyx":1151
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__8, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1151; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_3) {
+ __pyx_t_4 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__8, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1151; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = __pyx_t_4;
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ }
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1152
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE;
+ goto __pyx_L38;
+ }
+
+ /* "_yaml.pyx":1153
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ */
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__9, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1153; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_2) {
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__9, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1153; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_4 = __pyx_t_3;
+ } else {
+ __pyx_t_4 = __pyx_t_2;
+ }
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1154
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_LITERAL_SCALAR_STYLE;
+ goto __pyx_L38;
+ }
+
+ /* "_yaml.pyx":1155
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ */
+ __pyx_t_4 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__10, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1155; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_4) {
+ __pyx_t_2 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__10, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1155; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_3 = __pyx_t_2;
+ } else {
+ __pyx_t_3 = __pyx_t_4;
+ }
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1156
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ */
+ __pyx_v_scalar_style = YAML_FOLDED_SCALAR_STYLE;
+ goto __pyx_L38;
+ }
+ __pyx_L38:;
+
+ /* "_yaml.pyx":1158
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is SequenceStartEvent:
+ */
+ __pyx_t_3 = ((yaml_scalar_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_value, __pyx_v_length, __pyx_v_plain_implicit, __pyx_v_quoted_implicit, __pyx_v_scalar_style) == 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1159
+ * if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is SequenceStartEvent:
+ * anchor = NULL
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1159; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1160
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ * elif event_class is SequenceStartEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceStartEvent); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1160; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_3 = (__pyx_v_event_class == __pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_4 = (__pyx_t_3 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1161
+ * raise MemoryError
+ * elif event_class is SequenceStartEvent:
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1162
+ * elif event_class is SequenceStartEvent:
+ * anchor = NULL
+ * anchor_object = event_object.anchor # <<<<<<<<<<<<<<
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_anchor); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1162; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_anchor_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1163
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_4 = (__pyx_v_anchor_object != Py_None);
+ __pyx_t_3 = (__pyx_t_4 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1164
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_3 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1165
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1165; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L41;
+ }
+ __pyx_L41:;
+
+ /* "_yaml.pyx":1166
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_3 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1167
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1168
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__36, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1168; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1168; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1170
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__37, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1170; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1170; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1171
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * tag = NULL
+ * tag_object = event_object.tag
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+ goto __pyx_L40;
+ }
+ __pyx_L40:;
+
+ /* "_yaml.pyx":1172
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL # <<<<<<<<<<<<<<
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1173
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ * tag_object = event_object.tag # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tag); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1173; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_tag_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1174
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_3 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_4 = (__pyx_t_3 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1175
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_4 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1176
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1176; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L45;
+ }
+ __pyx_L45:;
+
+ /* "_yaml.pyx":1177
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_4 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1178
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1179
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__38, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1179; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1179; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1181
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__39, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1181; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1181; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1182
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * implicit = 0
+ * if event_object.implicit:
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+ goto __pyx_L44;
+ }
+ __pyx_L44:;
+
+ /* "_yaml.pyx":1183
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * if event_object.implicit:
+ * implicit = 1
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1184
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ * if event_object.implicit: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1184; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1184; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1185
+ * implicit = 0
+ * if event_object.implicit:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if event_object.flow_style:
+ */
+ __pyx_v_implicit = 1;
+ goto __pyx_L48;
+ }
+ __pyx_L48:;
+
+ /* "_yaml.pyx":1186
+ * if event_object.implicit:
+ * implicit = 1
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE # <<<<<<<<<<<<<<
+ * if event_object.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ */
+ __pyx_v_sequence_style = YAML_BLOCK_SEQUENCE_STYLE;
+
+ /* "_yaml.pyx":1187
+ * implicit = 1
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if event_object.flow_style: # <<<<<<<<<<<<<<
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_flow_style); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1187; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1187; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1188
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if event_object.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE # <<<<<<<<<<<<<<
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ * implicit, sequence_style) == 0:
+ */
+ __pyx_v_sequence_style = YAML_FLOW_SEQUENCE_STYLE;
+ goto __pyx_L49;
+ }
+ __pyx_L49:;
+
+ /* "_yaml.pyx":1190
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ * implicit, sequence_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is MappingStartEvent:
+ */
+ __pyx_t_4 = ((yaml_sequence_start_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_sequence_style) == 0) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1191
+ * if yaml_sequence_start_event_initialize(event, anchor, tag,
+ * implicit, sequence_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is MappingStartEvent:
+ * anchor = NULL
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1191; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1192
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ * elif event_class is MappingStartEvent: # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingStartEvent); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1192; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = (__pyx_v_event_class == __pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_3 = (__pyx_t_4 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1193
+ * raise MemoryError
+ * elif event_class is MappingStartEvent:
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1194
+ * elif event_class is MappingStartEvent:
+ * anchor = NULL
+ * anchor_object = event_object.anchor # <<<<<<<<<<<<<<
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_anchor); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1194; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_anchor_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1195
+ * anchor = NULL
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_3 = (__pyx_v_anchor_object != Py_None);
+ __pyx_t_4 = (__pyx_t_3 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1196
+ * anchor_object = event_object.anchor
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_4 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1197
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1197; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L52;
+ }
+ __pyx_L52:;
+
+ /* "_yaml.pyx":1198
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_4 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1199
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_4 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1200
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__40, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1200; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1200; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1202
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__41, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1202; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1202; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1203
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * tag = NULL
+ * tag_object = event_object.tag
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+ goto __pyx_L51;
+ }
+ __pyx_L51:;
+
+ /* "_yaml.pyx":1204
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL # <<<<<<<<<<<<<<
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1205
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ * tag_object = event_object.tag # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_tag); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1205; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_tag_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1206
+ * tag = NULL
+ * tag_object = event_object.tag
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_4 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_3 = (__pyx_t_4 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1207
+ * tag_object = event_object.tag
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_3 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1208
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1208; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L56;
+ }
+ __pyx_L56:;
+
+ /* "_yaml.pyx":1209
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_3 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1210
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1211
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__42, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1211; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1211; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1213
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__43, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1213; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1213; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1214
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * implicit = 0
+ * if event_object.implicit:
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+ goto __pyx_L55;
+ }
+ __pyx_L55:;
+
+ /* "_yaml.pyx":1215
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * if event_object.implicit:
+ * implicit = 1
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1216
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ * if event_object.implicit: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_implicit); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1216; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1216; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1217
+ * implicit = 0
+ * if event_object.implicit:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if event_object.flow_style:
+ */
+ __pyx_v_implicit = 1;
+ goto __pyx_L59;
+ }
+ __pyx_L59:;
+
+ /* "_yaml.pyx":1218
+ * if event_object.implicit:
+ * implicit = 1
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE # <<<<<<<<<<<<<<
+ * if event_object.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ */
+ __pyx_v_mapping_style = YAML_BLOCK_MAPPING_STYLE;
+
+ /* "_yaml.pyx":1219
+ * implicit = 1
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if event_object.flow_style: # <<<<<<<<<<<<<<
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_event_object, __pyx_n_s_flow_style); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1219; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1219; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1220
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if event_object.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE # <<<<<<<<<<<<<<
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ * implicit, mapping_style) == 0:
+ */
+ __pyx_v_mapping_style = YAML_FLOW_MAPPING_STYLE;
+ goto __pyx_L60;
+ }
+ __pyx_L60:;
+
+ /* "_yaml.pyx":1222
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ * implicit, mapping_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * elif event_class is SequenceEndEvent:
+ */
+ __pyx_t_3 = ((yaml_mapping_start_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_mapping_style) == 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1223
+ * if yaml_mapping_start_event_initialize(event, anchor, tag,
+ * implicit, mapping_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * elif event_class is SequenceEndEvent:
+ * yaml_sequence_end_event_initialize(event)
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1223; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1224
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ * elif event_class is SequenceEndEvent: # <<<<<<<<<<<<<<
+ * yaml_sequence_end_event_initialize(event)
+ * elif event_class is MappingEndEvent:
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceEndEvent); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1224; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_3 = (__pyx_v_event_class == __pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_4 = (__pyx_t_3 != 0);
+ if (__pyx_t_4) {
+
+ /* "_yaml.pyx":1225
+ * raise MemoryError
+ * elif event_class is SequenceEndEvent:
+ * yaml_sequence_end_event_initialize(event) # <<<<<<<<<<<<<<
+ * elif event_class is MappingEndEvent:
+ * yaml_mapping_end_event_initialize(event)
+ */
+ yaml_sequence_end_event_initialize(__pyx_v_event);
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1226
+ * elif event_class is SequenceEndEvent:
+ * yaml_sequence_end_event_initialize(event)
+ * elif event_class is MappingEndEvent: # <<<<<<<<<<<<<<
+ * yaml_mapping_end_event_initialize(event)
+ * else:
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingEndEvent); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1226; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = (__pyx_v_event_class == __pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_3 = (__pyx_t_4 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1227
+ * yaml_sequence_end_event_initialize(event)
+ * elif event_class is MappingEndEvent:
+ * yaml_mapping_end_event_initialize(event) # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ yaml_mapping_end_event_initialize(__pyx_v_event);
+ goto __pyx_L3;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1229
+ * yaml_mapping_end_event_initialize(event)
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("invalid event %s" % event_object)
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1230
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("invalid event %s" % event_object) # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"invalid event %s" % event_object)
+ */
+ __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_invalid_event_s, __pyx_v_event_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1230; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_9 = PyTuple_New(1); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1230; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_t_9, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1230; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1230; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1232
+ * raise TypeError("invalid event %s" % event_object)
+ * else:
+ * raise TypeError(u"invalid event %s" % event_object) # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __pyx_t_5 = PyUnicode_Format(__pyx_kp_u_invalid_event_s, __pyx_v_event_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1232; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_9 = PyTuple_New(1); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1232; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_t_9, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1232; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1232; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1233
+ * else:
+ * raise TypeError(u"invalid event %s" % event_object)
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * def emit(self, event_object):
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1016
+ * raise ValueError(u"no emitter error")
+ *
+ * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0: # <<<<<<<<<<<<<<
+ * cdef yaml_encoding_t encoding
+ * cdef yaml_version_directive_t version_directive_value
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_AddTraceback("_yaml.CEmitter._object_to_event", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_event_class);
+ __Pyx_XDECREF(__pyx_v_cache);
+ __Pyx_XDECREF(__pyx_v_handle);
+ __Pyx_XDECREF(__pyx_v_prefix);
+ __Pyx_XDECREF(__pyx_v_anchor_object);
+ __Pyx_XDECREF(__pyx_v_tag_object);
+ __Pyx_XDECREF(__pyx_v_value_object);
+ __Pyx_XDECREF(__pyx_v_style_object);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1235
+ * return 1
+ *
+ * def emit(self, event_object): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_7emit(PyObject *__pyx_v_self, PyObject *__pyx_v_event_object); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_7emit(PyObject *__pyx_v_self, PyObject *__pyx_v_event_object) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("emit (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_6emit(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), ((PyObject *)__pyx_v_event_object));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_6emit(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_event_object) {
+ yaml_event_t __pyx_v_event;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("emit", 0);
+
+ /* "_yaml.pyx":1237
+ * def emit(self, event_object):
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_object_to_event(__pyx_v_self, __pyx_v_event_object, (&__pyx_v_event)); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1237; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":1238
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_1 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1238; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = ((__pyx_t_1 == 0) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1239
+ * self._object_to_event(event_object, &event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ *
+ */
+ __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1239; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_v_error = __pyx_t_3;
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1240
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ *
+ * def open(self):
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1240; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1235
+ * return 1
+ *
+ * def emit(self, event_object): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * self._object_to_event(event_object, &event)
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CEmitter.emit", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1242
+ * raise error
+ *
+ * def open(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef yaml_encoding_t encoding
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_9open(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_9open(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("open (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_8open(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_8open(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ yaml_event_t __pyx_v_event;
+ yaml_encoding_t __pyx_v_encoding;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ int __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("open", 0);
+
+ /* "_yaml.pyx":1261
+ * raise error
+ * self.closed = 0
+ * elif self.closed == 1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed")
+ */
+ switch (__pyx_v_self->closed) {
+
+ /* "_yaml.pyx":1245
+ * cdef yaml_event_t event
+ * cdef yaml_encoding_t encoding
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ */
+ case -1:
+
+ /* "_yaml.pyx":1246
+ * cdef yaml_encoding_t encoding
+ * if self.closed == -1:
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ */
+ __pyx_t_1 = (__Pyx_PyUnicode_Equals(__pyx_v_self->use_encoding, __pyx_kp_u_utf_16_le, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1246; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_1) {
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_self->use_encoding, __pyx_kp_s_utf_16_le, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1246; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_3 = __pyx_t_2;
+ } else {
+ __pyx_t_3 = __pyx_t_1;
+ }
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1247
+ * if self.closed == -1:
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING # <<<<<<<<<<<<<<
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING
+ */
+ __pyx_v_encoding = YAML_UTF16LE_ENCODING;
+ goto __pyx_L3;
+ }
+
+ /* "_yaml.pyx":1248
+ * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be': # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF16BE_ENCODING
+ * else:
+ */
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_self->use_encoding, __pyx_kp_u_utf_16_be, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1248; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_3) {
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_self->use_encoding, __pyx_kp_s_utf_16_be, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1248; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = __pyx_t_1;
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ }
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1249
+ * encoding = YAML_UTF16LE_ENCODING
+ * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ * encoding = YAML_UTF16BE_ENCODING # <<<<<<<<<<<<<<
+ * else:
+ * encoding = YAML_UTF8_ENCODING
+ */
+ __pyx_v_encoding = YAML_UTF16BE_ENCODING;
+ goto __pyx_L3;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1251
+ * encoding = YAML_UTF16BE_ENCODING
+ * else:
+ * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<<
+ * if self.use_encoding is None:
+ * self.dump_unicode = 1
+ */
+ __pyx_v_encoding = YAML_UTF8_ENCODING;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1252
+ * else:
+ * encoding = YAML_UTF8_ENCODING
+ * if self.use_encoding is None: # <<<<<<<<<<<<<<
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ */
+ __pyx_t_2 = (__pyx_v_self->use_encoding == Py_None);
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1253
+ * encoding = YAML_UTF8_ENCODING
+ * if self.use_encoding is None:
+ * self.dump_unicode = 1 # <<<<<<<<<<<<<<
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING
+ */
+ __pyx_v_self->dump_unicode = 1;
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":1254
+ * if self.use_encoding is None:
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1: # <<<<<<<<<<<<<<
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(&event, encoding)
+ */
+ __pyx_t_3 = ((__pyx_v_self->dump_unicode == 1) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1255
+ * self.dump_unicode = 1
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<<
+ * yaml_stream_start_event_initialize(&event, encoding)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_v_encoding = YAML_UTF8_ENCODING;
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":1256
+ * if self.dump_unicode == 1:
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(&event, encoding) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ yaml_stream_start_event_initialize((&__pyx_v_event), __pyx_v_encoding);
+
+ /* "_yaml.pyx":1257
+ * encoding = YAML_UTF8_ENCODING
+ * yaml_stream_start_event_initialize(&event, encoding)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1257; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_3 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1258
+ * yaml_stream_start_event_initialize(&event, encoding)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self.closed = 0
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1258; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_error = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1259
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self.closed = 0
+ * elif self.closed == 1:
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1259; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1260
+ * error = self._emitter_error()
+ * raise error
+ * self.closed = 0 # <<<<<<<<<<<<<<
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_v_self->closed = 0;
+ break;
+
+ /* "_yaml.pyx":1261
+ * raise error
+ * self.closed = 0
+ * elif self.closed == 1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed")
+ */
+ case 1:
+
+ /* "_yaml.pyx":1262
+ * self.closed = 0
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is closed")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1263
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is closed")
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_tuple__44, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_Raise(__pyx_t_6, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1265
+ * raise SerializerError("serializer is closed")
+ * else:
+ * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1265; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_tuple__45, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1265; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1265; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ default:
+
+ /* "_yaml.pyx":1267
+ * raise SerializerError(u"serializer is closed")
+ * else:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is already opened")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1268
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is already opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is already opened")
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1268; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_tuple__46, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1268; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_Raise(__pyx_t_6, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1268; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1270
+ * raise SerializerError("serializer is already opened")
+ * else:
+ * raise SerializerError(u"serializer is already opened") # <<<<<<<<<<<<<<
+ *
+ * def close(self):
+ */
+ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_tuple__47, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+
+ /* "_yaml.pyx":1242
+ * raise error
+ *
+ * def open(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef yaml_encoding_t encoding
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_AddTraceback("_yaml.CEmitter.open", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1272
+ * raise SerializerError(u"serializer is already opened")
+ *
+ * def close(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * if self.closed == -1:
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_11close(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_11close(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("close (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_10close(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_10close(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) {
+ yaml_event_t __pyx_v_event;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_t_4;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("close", 0);
+
+ /* "_yaml.pyx":1279
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 0: # <<<<<<<<<<<<<<
+ * yaml_stream_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ switch (__pyx_v_self->closed) {
+
+ /* "_yaml.pyx":1274
+ * def close(self):
+ * cdef yaml_event_t event
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened")
+ */
+ case -1:
+
+ /* "_yaml.pyx":1275
+ * cdef yaml_event_t event
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is not opened")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1276
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ */
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__48, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1278
+ * raise SerializerError("serializer is not opened")
+ * else:
+ * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<<
+ * elif self.closed == 0:
+ * yaml_stream_end_event_initialize(&event)
+ */
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1278; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__49, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1278; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1278; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+
+ /* "_yaml.pyx":1279
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 0: # <<<<<<<<<<<<<<
+ * yaml_stream_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ case 0:
+
+ /* "_yaml.pyx":1280
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 0:
+ * yaml_stream_end_event_initialize(&event) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ yaml_stream_end_event_initialize((&__pyx_v_event));
+
+ /* "_yaml.pyx":1281
+ * elif self.closed == 0:
+ * yaml_stream_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1281; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_1 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1282
+ * yaml_stream_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self.closed = 1
+ */
+ __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1282; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_error = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1283
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self.closed = 1
+ *
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1283; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1284
+ * error = self._emitter_error()
+ * raise error
+ * self.closed = 1 # <<<<<<<<<<<<<<
+ *
+ * def serialize(self, node):
+ */
+ __pyx_v_self->closed = 1;
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":1272
+ * raise SerializerError(u"serializer is already opened")
+ *
+ * def close(self): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * if self.closed == -1:
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CEmitter.close", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1286
+ * self.closed = 1
+ *
+ * def serialize(self, node): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef yaml_version_directive_t version_directive_value
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_5_yaml_8CEmitter_13serialize(PyObject *__pyx_v_self, PyObject *__pyx_v_node); /*proto*/
+static PyObject *__pyx_pw_5_yaml_8CEmitter_13serialize(PyObject *__pyx_v_self, PyObject *__pyx_v_node) {
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("serialize (wrapper)", 0);
+ __pyx_r = __pyx_pf_5_yaml_8CEmitter_12serialize(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), ((PyObject *)__pyx_v_node));
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_5_yaml_8CEmitter_12serialize(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node) {
+ yaml_event_t __pyx_v_event;
+ yaml_version_directive_t __pyx_v_version_directive_value;
+ yaml_version_directive_t *__pyx_v_version_directive;
+ yaml_tag_directive_t __pyx_v_tag_directives_value[128];
+ yaml_tag_directive_t *__pyx_v_tag_directives_start;
+ yaml_tag_directive_t *__pyx_v_tag_directives_end;
+ PyObject *__pyx_v_cache = NULL;
+ PyObject *__pyx_v_handle = NULL;
+ PyObject *__pyx_v_prefix = NULL;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_t_4;
+ Py_ssize_t __pyx_t_5;
+ PyObject *(*__pyx_t_6)(PyObject *);
+ int __pyx_t_7;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("serialize", 0);
+
+ /* "_yaml.pyx":1298
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed")
+ */
+ switch (__pyx_v_self->closed) {
+
+ /* "_yaml.pyx":1293
+ * cdef yaml_tag_directive_t *tag_directives_start
+ * cdef yaml_tag_directive_t *tag_directives_end
+ * if self.closed == -1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened")
+ */
+ case -1:
+
+ /* "_yaml.pyx":1294
+ * cdef yaml_tag_directive_t *tag_directives_end
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is not opened")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1295
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ */
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1295; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__50, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1295; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1295; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1297
+ * raise SerializerError("serializer is not opened")
+ * else:
+ * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<<
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1297; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__51, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1297; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1297; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+
+ /* "_yaml.pyx":1298
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 1: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed")
+ */
+ case 1:
+
+ /* "_yaml.pyx":1299
+ * raise SerializerError(u"serializer is not opened")
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise SerializerError("serializer is closed")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1300
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is closed")
+ */
+ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1300; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple__52, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1300; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1300; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1302
+ * raise SerializerError("serializer is closed")
+ * else:
+ * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<<
+ * cache = []
+ * version_directive = NULL
+ */
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_SerializerError); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1302; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__53, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1302; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1302; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ default: break;
+ }
+
+ /* "_yaml.pyx":1303
+ * else:
+ * raise SerializerError(u"serializer is closed")
+ * cache = [] # <<<<<<<<<<<<<<
+ * version_directive = NULL
+ * if self.use_version:
+ */
+ __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1303; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_cache = ((PyObject*)__pyx_t_2);
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1304
+ * raise SerializerError(u"serializer is closed")
+ * cache = []
+ * version_directive = NULL # <<<<<<<<<<<<<<
+ * if self.use_version:
+ * version_directive_value.major = self.use_version[0]
+ */
+ __pyx_v_version_directive = NULL;
+
+ /* "_yaml.pyx":1305
+ * cache = []
+ * version_directive = NULL
+ * if self.use_version: # <<<<<<<<<<<<<<
+ * version_directive_value.major = self.use_version[0]
+ * version_directive_value.minor = self.use_version[1]
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->use_version); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1305; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1306
+ * version_directive = NULL
+ * if self.use_version:
+ * version_directive_value.major = self.use_version[0] # <<<<<<<<<<<<<<
+ * version_directive_value.minor = self.use_version[1]
+ * version_directive = &version_directive_value
+ */
+ __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_self->use_version, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(__pyx_t_2 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1306; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1306; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_v_version_directive_value.major = __pyx_t_4;
+
+ /* "_yaml.pyx":1307
+ * if self.use_version:
+ * version_directive_value.major = self.use_version[0]
+ * version_directive_value.minor = self.use_version[1] # <<<<<<<<<<<<<<
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL
+ */
+ __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_self->use_version, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(__pyx_t_2 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1307; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1307; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_v_version_directive_value.minor = __pyx_t_4;
+
+ /* "_yaml.pyx":1308
+ * version_directive_value.major = self.use_version[0]
+ * version_directive_value.minor = self.use_version[1]
+ * version_directive = &version_directive_value # <<<<<<<<<<<<<<
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ */
+ __pyx_v_version_directive = (&__pyx_v_version_directive_value);
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+
+ /* "_yaml.pyx":1309
+ * version_directive_value.minor = self.use_version[1]
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL # <<<<<<<<<<<<<<
+ * tag_directives_end = NULL
+ * if self.use_tags:
+ */
+ __pyx_v_tag_directives_start = NULL;
+
+ /* "_yaml.pyx":1310
+ * version_directive = &version_directive_value
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL # <<<<<<<<<<<<<<
+ * if self.use_tags:
+ * if len(self.use_tags) > 128:
+ */
+ __pyx_v_tag_directives_end = NULL;
+
+ /* "_yaml.pyx":1311
+ * tag_directives_start = NULL
+ * tag_directives_end = NULL
+ * if self.use_tags: # <<<<<<<<<<<<<<
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->use_tags); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1311; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1312
+ * tag_directives_end = NULL
+ * if self.use_tags:
+ * if len(self.use_tags) > 128: # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags")
+ */
+ __pyx_t_2 = __pyx_v_self->use_tags;
+ __Pyx_INCREF(__pyx_t_2);
+ __pyx_t_5 = PyObject_Length(__pyx_t_2); if (unlikely(__pyx_t_5 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1312; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __pyx_t_1 = ((__pyx_t_5 > 128) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1313
+ * if self.use_tags:
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise ValueError("too many tags")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1314
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"too many tags")
+ */
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__54, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1314; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1314; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1316
+ * raise ValueError("too many tags")
+ * else:
+ * raise ValueError(u"too many tags") # <<<<<<<<<<<<<<
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ */
+ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__55, NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1316; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_Raise(__pyx_t_2, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1316; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1317
+ * else:
+ * raise ValueError(u"too many tags")
+ * tag_directives_start = tag_directives_value # <<<<<<<<<<<<<<
+ * tag_directives_end = tag_directives_value
+ * for handle in self.use_tags:
+ */
+ __pyx_v_tag_directives_start = __pyx_v_tag_directives_value;
+
+ /* "_yaml.pyx":1318
+ * raise ValueError(u"too many tags")
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value # <<<<<<<<<<<<<<
+ * for handle in self.use_tags:
+ * prefix = self.use_tags[handle]
+ */
+ __pyx_v_tag_directives_end = __pyx_v_tag_directives_value;
+
+ /* "_yaml.pyx":1319
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ * for handle in self.use_tags: # <<<<<<<<<<<<<<
+ * prefix = self.use_tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ */
+ if (PyList_CheckExact(__pyx_v_self->use_tags) || PyTuple_CheckExact(__pyx_v_self->use_tags)) {
+ __pyx_t_2 = __pyx_v_self->use_tags; __Pyx_INCREF(__pyx_t_2); __pyx_t_5 = 0;
+ __pyx_t_6 = NULL;
+ } else {
+ __pyx_t_5 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_self->use_tags); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1319; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_6 = Py_TYPE(__pyx_t_2)->tp_iternext;
+ }
+ for (;;) {
+ if (!__pyx_t_6 && PyList_CheckExact(__pyx_t_2)) {
+ if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_2)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_5); __Pyx_INCREF(__pyx_t_3); __pyx_t_5++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1319; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_2, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1319; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else if (!__pyx_t_6 && PyTuple_CheckExact(__pyx_t_2)) {
+ if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_2)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_5); __Pyx_INCREF(__pyx_t_3); __pyx_t_5++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1319; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_2, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1319; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else {
+ __pyx_t_3 = __pyx_t_6(__pyx_t_2);
+ if (unlikely(!__pyx_t_3)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1319; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_3);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_handle, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1320
+ * tag_directives_end = tag_directives_value
+ * for handle in self.use_tags:
+ * prefix = self.use_tags[handle] # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle)
+ */
+ __pyx_t_3 = PyObject_GetItem(__pyx_v_self->use_tags, __pyx_v_handle); if (unlikely(__pyx_t_3 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1320; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_XDECREF_SET(__pyx_v_prefix, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1321
+ * for handle in self.use_tags:
+ * prefix = self.use_tags[handle]
+ * if PyUnicode_CheckExact(handle): # <<<<<<<<<<<<<<
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ */
+ __pyx_t_1 = (PyUnicode_CheckExact(__pyx_v_handle) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1322
+ * prefix = self.use_tags[handle]
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle) # <<<<<<<<<<<<<<
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ */
+ __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_handle); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1322; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_handle, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1323
+ * if PyUnicode_CheckExact(handle):
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_7 = __Pyx_PyList_Append(__pyx_v_cache, __pyx_v_handle); if (unlikely(__pyx_t_7 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1323; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ goto __pyx_L11;
+ }
+ __pyx_L11:;
+
+ /* "_yaml.pyx":1324
+ * handle = PyUnicode_AsUTF8String(handle)
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string")
+ */
+ __pyx_t_1 = ((!(PyString_CheckExact(__pyx_v_handle) != 0)) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1325
+ * cache.append(handle)
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag handle must be a string")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1326
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__56, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1326; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1326; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1328
+ * raise TypeError("tag handle must be a string")
+ * else:
+ * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__57, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1328; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1328; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1329
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle) # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ */
+ __pyx_v_tag_directives_end->handle = PyString_AS_STRING(__pyx_v_handle);
+
+ /* "_yaml.pyx":1330
+ * raise TypeError(u"tag handle must be a string")
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ */
+ __pyx_t_1 = (PyUnicode_CheckExact(__pyx_v_prefix) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1331
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix) # <<<<<<<<<<<<<<
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ */
+ __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_prefix); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1331; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF_SET(__pyx_v_prefix, __pyx_t_3);
+ __pyx_t_3 = 0;
+
+ /* "_yaml.pyx":1332
+ * if PyUnicode_CheckExact(prefix):
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_7 = __Pyx_PyList_Append(__pyx_v_cache, __pyx_v_prefix); if (unlikely(__pyx_t_7 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1332; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ goto __pyx_L14;
+ }
+ __pyx_L14:;
+
+ /* "_yaml.pyx":1333
+ * prefix = PyUnicode_AsUTF8String(prefix)
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string")
+ */
+ __pyx_t_1 = ((!(PyString_CheckExact(__pyx_v_prefix) != 0)) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1334
+ * cache.append(prefix)
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ */
+ __pyx_t_1 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1335
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__58, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1335; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1335; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1337
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ */
+ __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__59, NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1337; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1337; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1338
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix) # <<<<<<<<<<<<<<
+ * tag_directives_end = tag_directives_end+1
+ * if yaml_document_start_event_initialize(&event, version_directive,
+ */
+ __pyx_v_tag_directives_end->prefix = PyString_AS_STRING(__pyx_v_prefix);
+
+ /* "_yaml.pyx":1339
+ * raise TypeError(u"tag prefix must be a string")
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1 # <<<<<<<<<<<<<<
+ * if yaml_document_start_event_initialize(&event, version_directive,
+ * tag_directives_start, tag_directives_end,
+ */
+ __pyx_v_tag_directives_end = (__pyx_v_tag_directives_end + 1);
+ }
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ goto __pyx_L6;
+ }
+ __pyx_L6:;
+
+ /* "_yaml.pyx":1342
+ * if yaml_document_start_event_initialize(&event, version_directive,
+ * tag_directives_start, tag_directives_end,
+ * self.document_start_implicit) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_1 = ((yaml_document_start_event_initialize((&__pyx_v_event), __pyx_v_version_directive, __pyx_v_tag_directives_start, __pyx_v_tag_directives_end, __pyx_v_self->document_start_implicit) == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1343
+ * tag_directives_start, tag_directives_end,
+ * self.document_start_implicit) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1343; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1344
+ * self.document_start_implicit) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1344; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_1 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1345
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self._anchor_node(node)
+ */
+ __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1345; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_error = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1346
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self._anchor_node(node)
+ * self._serialize_node(node, None, None)
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1346; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1347
+ * error = self._emitter_error()
+ * raise error
+ * self._anchor_node(node) # <<<<<<<<<<<<<<
+ * self._serialize_node(node, None, None)
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ */
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_node); if (unlikely(__pyx_t_4 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1347; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":1348
+ * raise error
+ * self._anchor_node(node)
+ * self._serialize_node(node, None, None) # <<<<<<<<<<<<<<
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_node, Py_None, Py_None); if (unlikely(__pyx_t_4 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1348; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":1349
+ * self._anchor_node(node)
+ * self._serialize_node(node, None, None)
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ yaml_document_end_event_initialize((&__pyx_v_event), __pyx_v_self->document_end_implicit);
+
+ /* "_yaml.pyx":1350
+ * self._serialize_node(node, None, None)
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1350; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_1 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1351
+ * yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self.serialized_nodes = {}
+ */
+ __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1351; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_v_error = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1352
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self.serialized_nodes = {}
+ * self.anchors = {}
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1352; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1353
+ * error = self._emitter_error()
+ * raise error
+ * self.serialized_nodes = {} # <<<<<<<<<<<<<<
+ * self.anchors = {}
+ * self.last_alias_id = 0
+ */
+ __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1353; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_GOTREF(__pyx_v_self->serialized_nodes);
+ __Pyx_DECREF(__pyx_v_self->serialized_nodes);
+ __pyx_v_self->serialized_nodes = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1354
+ * raise error
+ * self.serialized_nodes = {}
+ * self.anchors = {} # <<<<<<<<<<<<<<
+ * self.last_alias_id = 0
+ *
+ */
+ __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1354; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_2);
+ __Pyx_GOTREF(__pyx_v_self->anchors);
+ __Pyx_DECREF(__pyx_v_self->anchors);
+ __pyx_v_self->anchors = __pyx_t_2;
+ __pyx_t_2 = 0;
+
+ /* "_yaml.pyx":1355
+ * self.serialized_nodes = {}
+ * self.anchors = {}
+ * self.last_alias_id = 0 # <<<<<<<<<<<<<<
+ *
+ * cdef int _anchor_node(self, object node) except 0:
+ */
+ __pyx_v_self->last_alias_id = 0;
+
+ /* "_yaml.pyx":1286
+ * self.closed = 1
+ *
+ * def serialize(self, node): # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef yaml_version_directive_t version_directive_value
+ */
+
+ /* function exit code */
+ __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("_yaml.CEmitter.serialize", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_cache);
+ __Pyx_XDECREF(__pyx_v_handle);
+ __Pyx_XDECREF(__pyx_v_prefix);
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1357
+ * self.last_alias_id = 0
+ *
+ * cdef int _anchor_node(self, object node) except 0: # <<<<<<<<<<<<<<
+ * if node in self.anchors:
+ * if self.anchors[node] is None:
+ */
+
+static int __pyx_f_5_yaml_8CEmitter__anchor_node(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node) {
+ PyObject *__pyx_v_node_class = NULL;
+ PyObject *__pyx_v_item = NULL;
+ PyObject *__pyx_v_key = NULL;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ Py_ssize_t __pyx_t_5;
+ PyObject *(*__pyx_t_6)(PyObject *);
+ int __pyx_t_7;
+ PyObject *__pyx_t_8 = NULL;
+ PyObject *__pyx_t_9 = NULL;
+ PyObject *__pyx_t_10 = NULL;
+ PyObject *(*__pyx_t_11)(PyObject *);
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_anchor_node", 0);
+
+ /* "_yaml.pyx":1358
+ *
+ * cdef int _anchor_node(self, object node) except 0:
+ * if node in self.anchors: # <<<<<<<<<<<<<<
+ * if self.anchors[node] is None:
+ * self.last_alias_id = self.last_alias_id+1
+ */
+ __pyx_t_1 = (__Pyx_PySequence_Contains(__pyx_v_node, __pyx_v_self->anchors, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1358; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1359
+ * cdef int _anchor_node(self, object node) except 0:
+ * if node in self.anchors:
+ * if self.anchors[node] is None: # <<<<<<<<<<<<<<
+ * self.last_alias_id = self.last_alias_id+1
+ * self.anchors[node] = u"id%03d" % self.last_alias_id
+ */
+ __pyx_t_3 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_node); if (unlikely(__pyx_t_3 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1359; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = (__pyx_t_3 == Py_None);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1360
+ * if node in self.anchors:
+ * if self.anchors[node] is None:
+ * self.last_alias_id = self.last_alias_id+1 # <<<<<<<<<<<<<<
+ * self.anchors[node] = u"id%03d" % self.last_alias_id
+ * else:
+ */
+ __pyx_v_self->last_alias_id = (__pyx_v_self->last_alias_id + 1);
+
+ /* "_yaml.pyx":1361
+ * if self.anchors[node] is None:
+ * self.last_alias_id = self.last_alias_id+1
+ * self.anchors[node] = u"id%03d" % self.last_alias_id # <<<<<<<<<<<<<<
+ * else:
+ * self.anchors[node] = None
+ */
+ __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->last_alias_id); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1361; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_4 = PyUnicode_Format(__pyx_kp_u_id_03d, __pyx_t_3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1361; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_node, __pyx_t_4) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1361; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+ goto __pyx_L3;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1363
+ * self.anchors[node] = u"id%03d" % self.last_alias_id
+ * else:
+ * self.anchors[node] = None # <<<<<<<<<<<<<<
+ * node_class = node.__class__
+ * if node_class is SequenceNode:
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_node, Py_None) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1363; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":1364
+ * else:
+ * self.anchors[node] = None
+ * node_class = node.__class__ # <<<<<<<<<<<<<<
+ * if node_class is SequenceNode:
+ * for item in node.value:
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_class); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1364; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_v_node_class = __pyx_t_4;
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1365
+ * self.anchors[node] = None
+ * node_class = node.__class__
+ * if node_class is SequenceNode: # <<<<<<<<<<<<<<
+ * for item in node.value:
+ * self._anchor_node(item)
+ */
+ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1365; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_1 = (__pyx_v_node_class == __pyx_t_4);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_2 = (__pyx_t_1 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1366
+ * node_class = node.__class__
+ * if node_class is SequenceNode:
+ * for item in node.value: # <<<<<<<<<<<<<<
+ * self._anchor_node(item)
+ * elif node_class is MappingNode:
+ */
+ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ if (PyList_CheckExact(__pyx_t_4) || PyTuple_CheckExact(__pyx_t_4)) {
+ __pyx_t_3 = __pyx_t_4; __Pyx_INCREF(__pyx_t_3); __pyx_t_5 = 0;
+ __pyx_t_6 = NULL;
+ } else {
+ __pyx_t_5 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_6 = Py_TYPE(__pyx_t_3)->tp_iternext;
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ for (;;) {
+ if (!__pyx_t_6 && PyList_CheckExact(__pyx_t_3)) {
+ if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_3)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_4 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_5); __Pyx_INCREF(__pyx_t_4); __pyx_t_5++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else if (!__pyx_t_6 && PyTuple_CheckExact(__pyx_t_3)) {
+ if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_5); __Pyx_INCREF(__pyx_t_4); __pyx_t_5++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else {
+ __pyx_t_4 = __pyx_t_6(__pyx_t_3);
+ if (unlikely(!__pyx_t_4)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_4);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_4);
+ __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1367
+ * if node_class is SequenceNode:
+ * for item in node.value:
+ * self._anchor_node(item) # <<<<<<<<<<<<<<
+ * elif node_class is MappingNode:
+ * for key, value in node.value:
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_item); if (unlikely(__pyx_t_7 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1367; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ goto __pyx_L5;
+ }
+
+ /* "_yaml.pyx":1368
+ * for item in node.value:
+ * self._anchor_node(item)
+ * elif node_class is MappingNode: # <<<<<<<<<<<<<<
+ * for key, value in node.value:
+ * self._anchor_node(key)
+ */
+ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingNode); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1368; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_2 = (__pyx_v_node_class == __pyx_t_3);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_1 = (__pyx_t_2 != 0);
+ if (__pyx_t_1) {
+
+ /* "_yaml.pyx":1369
+ * self._anchor_node(item)
+ * elif node_class is MappingNode:
+ * for key, value in node.value: # <<<<<<<<<<<<<<
+ * self._anchor_node(key)
+ * self._anchor_node(value)
+ */
+ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ if (PyList_CheckExact(__pyx_t_3) || PyTuple_CheckExact(__pyx_t_3)) {
+ __pyx_t_4 = __pyx_t_3; __Pyx_INCREF(__pyx_t_4); __pyx_t_5 = 0;
+ __pyx_t_6 = NULL;
+ } else {
+ __pyx_t_5 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_6 = Py_TYPE(__pyx_t_4)->tp_iternext;
+ }
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ for (;;) {
+ if (!__pyx_t_6 && PyList_CheckExact(__pyx_t_4)) {
+ if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_4)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_3 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_5); __Pyx_INCREF(__pyx_t_3); __pyx_t_5++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else if (!__pyx_t_6 && PyTuple_CheckExact(__pyx_t_4)) {
+ if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_4)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_5); __Pyx_INCREF(__pyx_t_3); __pyx_t_5++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else {
+ __pyx_t_3 = __pyx_t_6(__pyx_t_4);
+ if (unlikely(!__pyx_t_3)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_3);
+ }
+ if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) {
+ PyObject* sequence = __pyx_t_3;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ Py_ssize_t size = Py_SIZE(sequence);
+ #else
+ Py_ssize_t size = PySequence_Size(sequence);
+ #endif
+ if (unlikely(size != 2)) {
+ if (size > 2) __Pyx_RaiseTooManyValuesError(2);
+ else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ #if CYTHON_COMPILING_IN_CPYTHON
+ if (likely(PyTuple_CheckExact(sequence))) {
+ __pyx_t_8 = PyTuple_GET_ITEM(sequence, 0);
+ __pyx_t_9 = PyTuple_GET_ITEM(sequence, 1);
+ } else {
+ __pyx_t_8 = PyList_GET_ITEM(sequence, 0);
+ __pyx_t_9 = PyList_GET_ITEM(sequence, 1);
+ }
+ __Pyx_INCREF(__pyx_t_8);
+ __Pyx_INCREF(__pyx_t_9);
+ #else
+ __pyx_t_8 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_8);
+ __pyx_t_9 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_9);
+ #endif
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ } else {
+ Py_ssize_t index = -1;
+ __pyx_t_10 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_10)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_10);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_11 = Py_TYPE(__pyx_t_10)->tp_iternext;
+ index = 0; __pyx_t_8 = __pyx_t_11(__pyx_t_10); if (unlikely(!__pyx_t_8)) goto __pyx_L10_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_8);
+ index = 1; __pyx_t_9 = __pyx_t_11(__pyx_t_10); if (unlikely(!__pyx_t_9)) goto __pyx_L10_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_9);
+ if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_10), 2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_11 = NULL;
+ __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
+ goto __pyx_L11_unpacking_done;
+ __pyx_L10_unpacking_failed:;
+ __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
+ __pyx_t_11 = NULL;
+ if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_L11_unpacking_done:;
+ }
+ __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_8);
+ __pyx_t_8 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_value, __pyx_t_9);
+ __pyx_t_9 = 0;
+
+ /* "_yaml.pyx":1370
+ * elif node_class is MappingNode:
+ * for key, value in node.value:
+ * self._anchor_node(key) # <<<<<<<<<<<<<<
+ * self._anchor_node(value)
+ * return 1
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_key); if (unlikely(__pyx_t_7 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1370; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":1371
+ * for key, value in node.value:
+ * self._anchor_node(key)
+ * self._anchor_node(value) # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_value); if (unlikely(__pyx_t_7 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1371; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ goto __pyx_L5;
+ }
+ __pyx_L5:;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1372
+ * self._anchor_node(key)
+ * self._anchor_node(value)
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * cdef int _serialize_node(self, object node, object parent, object index) except 0:
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1357
+ * self.last_alias_id = 0
+ *
+ * cdef int _anchor_node(self, object node) except 0: # <<<<<<<<<<<<<<
+ * if node in self.anchors:
+ * if self.anchors[node] is None:
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_8);
+ __Pyx_XDECREF(__pyx_t_9);
+ __Pyx_XDECREF(__pyx_t_10);
+ __Pyx_AddTraceback("_yaml.CEmitter._anchor_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_node_class);
+ __Pyx_XDECREF(__pyx_v_item);
+ __Pyx_XDECREF(__pyx_v_key);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1374
+ * return 1
+ *
+ * cdef int _serialize_node(self, object node, object parent, object index) except 0: # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef int implicit
+ */
+
+static int __pyx_f_5_yaml_8CEmitter__serialize_node(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node, PyObject *__pyx_v_parent, PyObject *__pyx_v_index) {
+ yaml_event_t __pyx_v_event;
+ int __pyx_v_implicit;
+ int __pyx_v_plain_implicit;
+ int __pyx_v_quoted_implicit;
+ char *__pyx_v_anchor;
+ char *__pyx_v_tag;
+ char *__pyx_v_value;
+ int __pyx_v_length;
+ int __pyx_v_item_index;
+ yaml_scalar_style_t __pyx_v_scalar_style;
+ yaml_sequence_style_t __pyx_v_sequence_style;
+ yaml_mapping_style_t __pyx_v_mapping_style;
+ PyObject *__pyx_v_anchor_object = NULL;
+ PyObject *__pyx_v_error = NULL;
+ PyObject *__pyx_v_node_class = NULL;
+ PyObject *__pyx_v_tag_object = NULL;
+ PyObject *__pyx_v_value_object = NULL;
+ PyObject *__pyx_v_style_object = NULL;
+ PyObject *__pyx_v_item = NULL;
+ PyObject *__pyx_v_item_key = NULL;
+ PyObject *__pyx_v_item_value = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ int __pyx_t_3;
+ int __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ PyObject *__pyx_t_7 = NULL;
+ int __pyx_t_8;
+ Py_ssize_t __pyx_t_9;
+ PyObject *(*__pyx_t_10)(PyObject *);
+ PyObject *__pyx_t_11 = NULL;
+ PyObject *(*__pyx_t_12)(PyObject *);
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("_serialize_node", 0);
+
+ /* "_yaml.pyx":1387
+ * cdef yaml_sequence_style_t sequence_style
+ * cdef yaml_mapping_style_t mapping_style
+ * anchor_object = self.anchors[node] # <<<<<<<<<<<<<<
+ * anchor = NULL
+ * if anchor_object is not None:
+ */
+ __pyx_t_1 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_node); if (unlikely(__pyx_t_1 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1387; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_anchor_object = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1388
+ * cdef yaml_mapping_style_t mapping_style
+ * anchor_object = self.anchors[node]
+ * anchor = NULL # <<<<<<<<<<<<<<
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ */
+ __pyx_v_anchor = NULL;
+
+ /* "_yaml.pyx":1389
+ * anchor_object = self.anchors[node]
+ * anchor = NULL
+ * if anchor_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ */
+ __pyx_t_2 = (__pyx_v_anchor_object != Py_None);
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1390
+ * anchor = NULL
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ */
+ __pyx_t_3 = (PyUnicode_CheckExact(__pyx_v_anchor_object) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1391
+ * if anchor_object is not None:
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1391; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_anchor_object, __pyx_t_1);
+ __pyx_t_1 = 0;
+ goto __pyx_L4;
+ }
+ __pyx_L4:;
+
+ /* "_yaml.pyx":1392
+ * if PyUnicode_CheckExact(anchor_object):
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string")
+ */
+ __pyx_t_3 = ((!(PyString_CheckExact(__pyx_v_anchor_object) != 0)) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1393
+ * anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("anchor must be a string")
+ * else:
+ */
+ __pyx_t_3 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1394
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__60, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1394; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1394; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1396
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes:
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__61, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1396; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1396; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1397
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<<
+ * if node in self.serialized_nodes:
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ */
+ __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object);
+ goto __pyx_L3;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1398
+ * raise TypeError(u"anchor must be a string")
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes: # <<<<<<<<<<<<<<
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ * raise MemoryError
+ */
+ __pyx_t_3 = (__Pyx_PySequence_Contains(__pyx_v_node, __pyx_v_self->serialized_nodes, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1398; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1399
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes:
+ * if yaml_alias_event_initialize(&event, anchor) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_2 = ((yaml_alias_event_initialize((&__pyx_v_event), __pyx_v_anchor) == 0) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1400
+ * if node in self.serialized_nodes:
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1400; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1401
+ * if yaml_alias_event_initialize(&event, anchor) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1401; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1402
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * else:
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1402; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_error = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1403
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * else:
+ * node_class = node.__class__
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1403; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L7;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1405
+ * raise error
+ * else:
+ * node_class = node.__class__ # <<<<<<<<<<<<<<
+ * self.serialized_nodes[node] = True
+ * self.descend_resolver(parent, index)
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_class); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1405; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_node_class = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1406
+ * else:
+ * node_class = node.__class__
+ * self.serialized_nodes[node] = True # <<<<<<<<<<<<<<
+ * self.descend_resolver(parent, index)
+ * if node_class is ScalarNode:
+ */
+ if (unlikely(PyObject_SetItem(__pyx_v_self->serialized_nodes, __pyx_v_node, Py_True) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1406; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":1407
+ * node_class = node.__class__
+ * self.serialized_nodes[node] = True
+ * self.descend_resolver(parent, index) # <<<<<<<<<<<<<<
+ * if node_class is ScalarNode:
+ * plain_implicit = 0
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_descend_resolver); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1407; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1407; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_INCREF(__pyx_v_parent);
+ PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_parent);
+ __Pyx_GIVEREF(__pyx_v_parent);
+ __Pyx_INCREF(__pyx_v_index);
+ PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_v_index);
+ __Pyx_GIVEREF(__pyx_v_index);
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_5, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1407; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":1408
+ * self.serialized_nodes[node] = True
+ * self.descend_resolver(parent, index)
+ * if node_class is ScalarNode: # <<<<<<<<<<<<<<
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ */
+ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1408; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_2 = (__pyx_v_node_class == __pyx_t_6);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_t_3 = (__pyx_t_2 != 0);
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1409
+ * self.descend_resolver(parent, index)
+ * if node_class is ScalarNode:
+ * plain_implicit = 0 # <<<<<<<<<<<<<<
+ * quoted_implicit = 0
+ * tag_object = node.tag
+ */
+ __pyx_v_plain_implicit = 0;
+
+ /* "_yaml.pyx":1410
+ * if node_class is ScalarNode:
+ * plain_implicit = 0
+ * quoted_implicit = 0 # <<<<<<<<<<<<<<
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ */
+ __pyx_v_quoted_implicit = 0;
+
+ /* "_yaml.pyx":1411
+ * plain_implicit = 0
+ * quoted_implicit = 0
+ * tag_object = node.tag # <<<<<<<<<<<<<<
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1
+ */
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_tag); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1411; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_v_tag_object = __pyx_t_6;
+ __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":1412
+ * quoted_implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: # <<<<<<<<<<<<<<
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ */
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_7 = PyTuple_New(3); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ __Pyx_INCREF(__pyx_tuple__62);
+ PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_tuple__62);
+ __Pyx_GIVEREF(__pyx_tuple__62);
+ __pyx_t_5 = 0;
+ __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __pyx_t_7 = PyObject_RichCompare(__pyx_t_1, __pyx_v_tag_object, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1413
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1 # <<<<<<<<<<<<<<
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ * quoted_implicit = 1
+ */
+ __pyx_v_plain_implicit = 1;
+ goto __pyx_L11;
+ }
+ __pyx_L11:;
+
+ /* "_yaml.pyx":1414
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: # <<<<<<<<<<<<<<
+ * quoted_implicit = 1
+ * tag = NULL
+ */
+ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_6);
+ __Pyx_GIVEREF(__pyx_t_6);
+ __Pyx_INCREF(__pyx_tuple__63);
+ PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_tuple__63);
+ __Pyx_GIVEREF(__pyx_tuple__63);
+ __pyx_t_1 = 0;
+ __pyx_t_6 = 0;
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_5, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_5 = PyObject_RichCompare(__pyx_t_6, __pyx_v_tag_object, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1415
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ * quoted_implicit = 1 # <<<<<<<<<<<<<<
+ * tag = NULL
+ * if tag_object is not None:
+ */
+ __pyx_v_quoted_implicit = 1;
+ goto __pyx_L12;
+ }
+ __pyx_L12:;
+
+ /* "_yaml.pyx":1416
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ * quoted_implicit = 1
+ * tag = NULL # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1417
+ * quoted_implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_3 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_2 = (__pyx_t_3 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1418
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1419
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1419; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L14;
+ }
+ __pyx_L14:;
+
+ /* "_yaml.pyx":1420
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1421
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1422
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__64, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1422; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1422; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1424
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = node.value
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__65, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1424; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1424; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1425
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * value_object = node.value
+ * if PyUnicode_CheckExact(value_object):
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+ goto __pyx_L13;
+ }
+ __pyx_L13:;
+
+ /* "_yaml.pyx":1426
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = node.value # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1426; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_value_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1427
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = node.value
+ * if PyUnicode_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ */
+ __pyx_t_2 = (PyUnicode_CheckExact(__pyx_v_value_object) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1428
+ * value_object = node.value
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_5 = PyUnicode_AsUTF8String(__pyx_v_value_object); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1428; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF_SET(__pyx_v_value_object, __pyx_t_5);
+ __pyx_t_5 = 0;
+ goto __pyx_L17;
+ }
+ __pyx_L17:;
+
+ /* "_yaml.pyx":1429
+ * if PyUnicode_CheckExact(value_object):
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string")
+ */
+ __pyx_t_2 = ((!(PyString_CheckExact(__pyx_v_value_object) != 0)) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1430
+ * value_object = PyUnicode_AsUTF8String(value_object)
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("value must be a string")
+ * else:
+ */
+ __pyx_t_2 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1431
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"value must be a string")
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__66, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1431; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1431; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1433
+ * raise TypeError("value must be a string")
+ * else:
+ * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<<
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ */
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__67, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1433; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_Raise(__pyx_t_5, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1433; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1434
+ * else:
+ * raise TypeError(u"value must be a string")
+ * value = PyString_AS_STRING(value_object) # <<<<<<<<<<<<<<
+ * length = PyString_GET_SIZE(value_object)
+ * style_object = node.style
+ */
+ __pyx_v_value = PyString_AS_STRING(__pyx_v_value_object);
+
+ /* "_yaml.pyx":1435
+ * raise TypeError(u"value must be a string")
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object) # <<<<<<<<<<<<<<
+ * style_object = node.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ */
+ __pyx_v_length = PyString_GET_SIZE(__pyx_v_value_object);
+
+ /* "_yaml.pyx":1436
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ * style_object = node.style # <<<<<<<<<<<<<<
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'":
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_style); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1436; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_style_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1437
+ * length = PyString_GET_SIZE(value_object)
+ * style_object = node.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_PLAIN_SCALAR_STYLE;
+
+ /* "_yaml.pyx":1438
+ * style_object = node.style
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ */
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__7, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1438; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_2) {
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__7, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1438; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_8 = __pyx_t_3;
+ } else {
+ __pyx_t_8 = __pyx_t_2;
+ }
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1439
+ * scalar_style = YAML_PLAIN_SCALAR_STYLE
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE;
+ goto __pyx_L20;
+ }
+
+ /* "_yaml.pyx":1440
+ * if style_object == "'" or style_object == u"'":
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ */
+ __pyx_t_8 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__8, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1440; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_8) {
+ __pyx_t_2 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__8, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1440; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_3 = __pyx_t_2;
+ } else {
+ __pyx_t_3 = __pyx_t_8;
+ }
+ if (__pyx_t_3) {
+
+ /* "_yaml.pyx":1441
+ * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE;
+ goto __pyx_L20;
+ }
+
+ /* "_yaml.pyx":1442
+ * elif style_object == "\"" or style_object == u"\"":
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ */
+ __pyx_t_3 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__9, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1442; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_3) {
+ __pyx_t_8 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__9, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1442; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_2 = __pyx_t_8;
+ } else {
+ __pyx_t_2 = __pyx_t_3;
+ }
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1443
+ * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ */
+ __pyx_v_scalar_style = YAML_LITERAL_SCALAR_STYLE;
+ goto __pyx_L20;
+ }
+
+ /* "_yaml.pyx":1444
+ * elif style_object == "|" or style_object == u"|":
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">": # <<<<<<<<<<<<<<
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ */
+ __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_style_object, __pyx_kp_s__10, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1444; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!__pyx_t_2) {
+ __pyx_t_3 = (__Pyx_PyUnicode_Equals(__pyx_v_style_object, __pyx_kp_u__10, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1444; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_8 = __pyx_t_3;
+ } else {
+ __pyx_t_8 = __pyx_t_2;
+ }
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1445
+ * scalar_style = YAML_LITERAL_SCALAR_STYLE
+ * elif style_object == ">" or style_object == u">":
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE # <<<<<<<<<<<<<<
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ */
+ __pyx_v_scalar_style = YAML_FOLDED_SCALAR_STYLE;
+ goto __pyx_L20;
+ }
+ __pyx_L20:;
+
+ /* "_yaml.pyx":1447
+ * scalar_style = YAML_FOLDED_SCALAR_STYLE
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_8 = ((yaml_scalar_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_value, __pyx_v_length, __pyx_v_plain_implicit, __pyx_v_quoted_implicit, __pyx_v_scalar_style) == 0) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1448
+ * if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1448; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1449
+ * plain_implicit, quoted_implicit, scalar_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1449; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_8 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1450
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * elif node_class is SequenceNode:
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1450; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_error = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1451
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * elif node_class is SequenceNode:
+ * implicit = 0
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1451; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L10;
+ }
+
+ /* "_yaml.pyx":1452
+ * error = self._emitter_error()
+ * raise error
+ * elif node_class is SequenceNode: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * tag_object = node.tag
+ */
+ __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1452; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_8 = (__pyx_v_node_class == __pyx_t_5);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_2 = (__pyx_t_8 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1453
+ * raise error
+ * elif node_class is SequenceNode:
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * tag_object = node.tag
+ * if self.resolve(SequenceNode, node.value, True) == tag_object:
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1454
+ * elif node_class is SequenceNode:
+ * implicit = 0
+ * tag_object = node.tag # <<<<<<<<<<<<<<
+ * if self.resolve(SequenceNode, node.value, True) == tag_object:
+ * implicit = 1
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_tag); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1454; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_tag_object = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1455
+ * implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(SequenceNode, node.value, True) == tag_object: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * tag = NULL
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_6);
+ __Pyx_GIVEREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_7);
+ __Pyx_GIVEREF(__pyx_t_7);
+ __Pyx_INCREF(Py_True);
+ PyTuple_SET_ITEM(__pyx_t_1, 2, Py_True);
+ __Pyx_GIVEREF(Py_True);
+ __pyx_t_6 = 0;
+ __pyx_t_7 = 0;
+ __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = PyObject_RichCompare(__pyx_t_7, __pyx_v_tag_object, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1456
+ * tag_object = node.tag
+ * if self.resolve(SequenceNode, node.value, True) == tag_object:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * tag = NULL
+ * if tag_object is not None:
+ */
+ __pyx_v_implicit = 1;
+ goto __pyx_L23;
+ }
+ __pyx_L23:;
+
+ /* "_yaml.pyx":1457
+ * if self.resolve(SequenceNode, node.value, True) == tag_object:
+ * implicit = 1
+ * tag = NULL # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1458
+ * implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_2 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_8 = (__pyx_t_2 != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1459
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_8 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1460
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_1 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1460; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_1);
+ __pyx_t_1 = 0;
+ goto __pyx_L25;
+ }
+ __pyx_L25:;
+
+ /* "_yaml.pyx":1461
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_8 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1462
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_8 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1463
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__68, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1463; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1463; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1465
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ */
+ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__69, NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1465; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_Raise(__pyx_t_1, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1465; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1466
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if node.flow_style:
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+ goto __pyx_L24;
+ }
+ __pyx_L24:;
+
+ /* "_yaml.pyx":1467
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE # <<<<<<<<<<<<<<
+ * if node.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ */
+ __pyx_v_sequence_style = YAML_BLOCK_SEQUENCE_STYLE;
+
+ /* "_yaml.pyx":1468
+ * tag = PyString_AS_STRING(tag_object)
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if node.flow_style: # <<<<<<<<<<<<<<
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_flow_style); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1468; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_8 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1468; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1469
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ * if node.flow_style:
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE # <<<<<<<<<<<<<<
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ * implicit, sequence_style) == 0:
+ */
+ __pyx_v_sequence_style = YAML_FLOW_SEQUENCE_STYLE;
+ goto __pyx_L28;
+ }
+ __pyx_L28:;
+
+ /* "_yaml.pyx":1471
+ * sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ * implicit, sequence_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_8 = ((yaml_sequence_start_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_sequence_style) == 0) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1472
+ * if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ * implicit, sequence_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1472; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1473
+ * implicit, sequence_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1473; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_8 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1474
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * item_index = 0
+ */
+ __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1474; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_error = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1475
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * item_index = 0
+ * for item in node.value:
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1475; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1476
+ * error = self._emitter_error()
+ * raise error
+ * item_index = 0 # <<<<<<<<<<<<<<
+ * for item in node.value:
+ * self._serialize_node(item, node, item_index)
+ */
+ __pyx_v_item_index = 0;
+
+ /* "_yaml.pyx":1477
+ * raise error
+ * item_index = 0
+ * for item in node.value: # <<<<<<<<<<<<<<
+ * self._serialize_node(item, node, item_index)
+ * item_index = item_index+1
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyList_CheckExact(__pyx_t_1) || PyTuple_CheckExact(__pyx_t_1)) {
+ __pyx_t_7 = __pyx_t_1; __Pyx_INCREF(__pyx_t_7); __pyx_t_9 = 0;
+ __pyx_t_10 = NULL;
+ } else {
+ __pyx_t_9 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_10 = Py_TYPE(__pyx_t_7)->tp_iternext;
+ }
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ for (;;) {
+ if (!__pyx_t_10 && PyList_CheckExact(__pyx_t_7)) {
+ if (__pyx_t_9 >= PyList_GET_SIZE(__pyx_t_7)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_1 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_9); __Pyx_INCREF(__pyx_t_1); __pyx_t_9++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_1 = PySequence_ITEM(__pyx_t_7, __pyx_t_9); __pyx_t_9++; if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else if (!__pyx_t_10 && PyTuple_CheckExact(__pyx_t_7)) {
+ if (__pyx_t_9 >= PyTuple_GET_SIZE(__pyx_t_7)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_9); __Pyx_INCREF(__pyx_t_1); __pyx_t_9++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_1 = PySequence_ITEM(__pyx_t_7, __pyx_t_9); __pyx_t_9++; if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else {
+ __pyx_t_1 = __pyx_t_10(__pyx_t_7);
+ if (unlikely(!__pyx_t_1)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_1);
+ }
+ __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1478
+ * item_index = 0
+ * for item in node.value:
+ * self._serialize_node(item, node, item_index) # <<<<<<<<<<<<<<
+ * item_index = item_index+1
+ * yaml_sequence_end_event_initialize(&event)
+ */
+ __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_item_index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1478; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item, __pyx_v_node, __pyx_t_1); if (unlikely(__pyx_t_4 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1478; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1479
+ * for item in node.value:
+ * self._serialize_node(item, node, item_index)
+ * item_index = item_index+1 # <<<<<<<<<<<<<<
+ * yaml_sequence_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_v_item_index = (__pyx_v_item_index + 1);
+ }
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+
+ /* "_yaml.pyx":1480
+ * self._serialize_node(item, node, item_index)
+ * item_index = item_index+1
+ * yaml_sequence_end_event_initialize(&event) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ yaml_sequence_end_event_initialize((&__pyx_v_event));
+
+ /* "_yaml.pyx":1481
+ * item_index = item_index+1
+ * yaml_sequence_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1481; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_8 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1482
+ * yaml_sequence_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * elif node_class is MappingNode:
+ */
+ __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1482; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_v_error = __pyx_t_7;
+ __pyx_t_7 = 0;
+
+ /* "_yaml.pyx":1483
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * elif node_class is MappingNode:
+ * implicit = 0
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1483; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L10;
+ }
+
+ /* "_yaml.pyx":1484
+ * error = self._emitter_error()
+ * raise error
+ * elif node_class is MappingNode: # <<<<<<<<<<<<<<
+ * implicit = 0
+ * tag_object = node.tag
+ */
+ __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingNode); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1484; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_8 = (__pyx_v_node_class == __pyx_t_7);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __pyx_t_2 = (__pyx_t_8 != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1485
+ * raise error
+ * elif node_class is MappingNode:
+ * implicit = 0 # <<<<<<<<<<<<<<
+ * tag_object = node.tag
+ * if self.resolve(MappingNode, node.value, True) == tag_object:
+ */
+ __pyx_v_implicit = 0;
+
+ /* "_yaml.pyx":1486
+ * elif node_class is MappingNode:
+ * implicit = 0
+ * tag_object = node.tag # <<<<<<<<<<<<<<
+ * if self.resolve(MappingNode, node.value, True) == tag_object:
+ * implicit = 1
+ */
+ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_tag); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1486; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_v_tag_object = __pyx_t_7;
+ __pyx_t_7 = 0;
+
+ /* "_yaml.pyx":1487
+ * implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(MappingNode, node.value, True) == tag_object: # <<<<<<<<<<<<<<
+ * implicit = 1
+ * tag = NULL
+ */
+ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_resolve); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_MappingNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_5);
+ __Pyx_INCREF(Py_True);
+ PyTuple_SET_ITEM(__pyx_t_6, 2, Py_True);
+ __Pyx_GIVEREF(Py_True);
+ __pyx_t_1 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_6, NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_t_6 = PyObject_RichCompare(__pyx_t_5, __pyx_v_tag_object, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1488
+ * tag_object = node.tag
+ * if self.resolve(MappingNode, node.value, True) == tag_object:
+ * implicit = 1 # <<<<<<<<<<<<<<
+ * tag = NULL
+ * if tag_object is not None:
+ */
+ __pyx_v_implicit = 1;
+ goto __pyx_L34;
+ }
+ __pyx_L34:;
+
+ /* "_yaml.pyx":1489
+ * if self.resolve(MappingNode, node.value, True) == tag_object:
+ * implicit = 1
+ * tag = NULL # <<<<<<<<<<<<<<
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ */
+ __pyx_v_tag = NULL;
+
+ /* "_yaml.pyx":1490
+ * implicit = 1
+ * tag = NULL
+ * if tag_object is not None: # <<<<<<<<<<<<<<
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ */
+ __pyx_t_2 = (__pyx_v_tag_object != Py_None);
+ __pyx_t_8 = (__pyx_t_2 != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1491
+ * tag = NULL
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ */
+ __pyx_t_8 = (PyUnicode_CheckExact(__pyx_v_tag_object) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1492
+ * if tag_object is not None:
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<<
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_t_6 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1492; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF_SET(__pyx_v_tag_object, __pyx_t_6);
+ __pyx_t_6 = 0;
+ goto __pyx_L36;
+ }
+ __pyx_L36:;
+
+ /* "_yaml.pyx":1493
+ * if PyUnicode_CheckExact(tag_object):
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<<
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string")
+ */
+ __pyx_t_8 = ((!(PyString_CheckExact(__pyx_v_tag_object) != 0)) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1494
+ * tag_object = PyUnicode_AsUTF8String(tag_object)
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<<
+ * raise TypeError("tag must be a string")
+ * else:
+ */
+ __pyx_t_8 = ((PY_MAJOR_VERSION < 3) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1495
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__70, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1495; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_Raise(__pyx_t_6, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1495; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1497
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ */
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__71, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1497; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_Raise(__pyx_t_6, 0, 0, 0);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1497; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+
+ /* "_yaml.pyx":1498
+ * else:
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<<
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if node.flow_style:
+ */
+ __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object);
+ goto __pyx_L35;
+ }
+ __pyx_L35:;
+
+ /* "_yaml.pyx":1499
+ * raise TypeError(u"tag must be a string")
+ * tag = PyString_AS_STRING(tag_object)
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE # <<<<<<<<<<<<<<
+ * if node.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ */
+ __pyx_v_mapping_style = YAML_BLOCK_MAPPING_STYLE;
+
+ /* "_yaml.pyx":1500
+ * tag = PyString_AS_STRING(tag_object)
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if node.flow_style: # <<<<<<<<<<<<<<
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ */
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_flow_style); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1500; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_8 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1500; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1501
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ * if node.flow_style:
+ * mapping_style = YAML_FLOW_MAPPING_STYLE # <<<<<<<<<<<<<<
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ * implicit, mapping_style) == 0:
+ */
+ __pyx_v_mapping_style = YAML_FLOW_MAPPING_STYLE;
+ goto __pyx_L39;
+ }
+ __pyx_L39:;
+
+ /* "_yaml.pyx":1503
+ * mapping_style = YAML_FLOW_MAPPING_STYLE
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ * implicit, mapping_style) == 0: # <<<<<<<<<<<<<<
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_8 = ((yaml_mapping_start_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_mapping_style) == 0) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1504
+ * if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ * implicit, mapping_style) == 0:
+ * raise MemoryError # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1504; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1505
+ * implicit, mapping_style) == 0:
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1505; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_8 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1506
+ * raise MemoryError
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * for item_key, item_value in node.value:
+ */
+ __pyx_t_6 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1506; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_v_error = __pyx_t_6;
+ __pyx_t_6 = 0;
+
+ /* "_yaml.pyx":1507
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * for item_key, item_value in node.value:
+ * self._serialize_node(item_key, node, None)
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1507; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+
+ /* "_yaml.pyx":1508
+ * error = self._emitter_error()
+ * raise error
+ * for item_key, item_value in node.value: # <<<<<<<<<<<<<<
+ * self._serialize_node(item_key, node, None)
+ * self._serialize_node(item_value, node, item_key)
+ */
+ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_node, __pyx_n_s_value); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ if (PyList_CheckExact(__pyx_t_6) || PyTuple_CheckExact(__pyx_t_6)) {
+ __pyx_t_5 = __pyx_t_6; __Pyx_INCREF(__pyx_t_5); __pyx_t_9 = 0;
+ __pyx_t_10 = NULL;
+ } else {
+ __pyx_t_9 = -1; __pyx_t_5 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_10 = Py_TYPE(__pyx_t_5)->tp_iternext;
+ }
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ for (;;) {
+ if (!__pyx_t_10 && PyList_CheckExact(__pyx_t_5)) {
+ if (__pyx_t_9 >= PyList_GET_SIZE(__pyx_t_5)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_6 = PyList_GET_ITEM(__pyx_t_5, __pyx_t_9); __Pyx_INCREF(__pyx_t_6); __pyx_t_9++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_6 = PySequence_ITEM(__pyx_t_5, __pyx_t_9); __pyx_t_9++; if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else if (!__pyx_t_10 && PyTuple_CheckExact(__pyx_t_5)) {
+ if (__pyx_t_9 >= PyTuple_GET_SIZE(__pyx_t_5)) break;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_5, __pyx_t_9); __Pyx_INCREF(__pyx_t_6); __pyx_t_9++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #else
+ __pyx_t_6 = PySequence_ITEM(__pyx_t_5, __pyx_t_9); __pyx_t_9++; if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ } else {
+ __pyx_t_6 = __pyx_t_10(__pyx_t_5);
+ if (unlikely(!__pyx_t_6)) {
+ PyObject* exc_type = PyErr_Occurred();
+ if (exc_type) {
+ if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
+ else {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ break;
+ }
+ __Pyx_GOTREF(__pyx_t_6);
+ }
+ if ((likely(PyTuple_CheckExact(__pyx_t_6))) || (PyList_CheckExact(__pyx_t_6))) {
+ PyObject* sequence = __pyx_t_6;
+ #if CYTHON_COMPILING_IN_CPYTHON
+ Py_ssize_t size = Py_SIZE(sequence);
+ #else
+ Py_ssize_t size = PySequence_Size(sequence);
+ #endif
+ if (unlikely(size != 2)) {
+ if (size > 2) __Pyx_RaiseTooManyValuesError(2);
+ else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ #if CYTHON_COMPILING_IN_CPYTHON
+ if (likely(PyTuple_CheckExact(sequence))) {
+ __pyx_t_7 = PyTuple_GET_ITEM(sequence, 0);
+ __pyx_t_1 = PyTuple_GET_ITEM(sequence, 1);
+ } else {
+ __pyx_t_7 = PyList_GET_ITEM(sequence, 0);
+ __pyx_t_1 = PyList_GET_ITEM(sequence, 1);
+ }
+ __Pyx_INCREF(__pyx_t_7);
+ __Pyx_INCREF(__pyx_t_1);
+ #else
+ __pyx_t_7 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_7);
+ __pyx_t_1 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ #endif
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ } else {
+ Py_ssize_t index = -1;
+ __pyx_t_11 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_11)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_11);
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ __pyx_t_12 = Py_TYPE(__pyx_t_11)->tp_iternext;
+ index = 0; __pyx_t_7 = __pyx_t_12(__pyx_t_11); if (unlikely(!__pyx_t_7)) goto __pyx_L44_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_7);
+ index = 1; __pyx_t_1 = __pyx_t_12(__pyx_t_11); if (unlikely(!__pyx_t_1)) goto __pyx_L44_unpacking_failed;
+ __Pyx_GOTREF(__pyx_t_1);
+ if (__Pyx_IternextUnpackEndCheck(__pyx_t_12(__pyx_t_11), 2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_12 = NULL;
+ __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;
+ goto __pyx_L45_unpacking_done;
+ __pyx_L44_unpacking_failed:;
+ __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;
+ __pyx_t_12 = NULL;
+ if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_L45_unpacking_done:;
+ }
+ __Pyx_XDECREF_SET(__pyx_v_item_key, __pyx_t_7);
+ __pyx_t_7 = 0;
+ __Pyx_XDECREF_SET(__pyx_v_item_value, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1509
+ * raise error
+ * for item_key, item_value in node.value:
+ * self._serialize_node(item_key, node, None) # <<<<<<<<<<<<<<
+ * self._serialize_node(item_value, node, item_key)
+ * yaml_mapping_end_event_initialize(&event)
+ */
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item_key, __pyx_v_node, Py_None); if (unlikely(__pyx_t_4 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1509; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":1510
+ * for item_key, item_value in node.value:
+ * self._serialize_node(item_key, node, None)
+ * self._serialize_node(item_value, node, item_key) # <<<<<<<<<<<<<<
+ * yaml_mapping_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ */
+ __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item_value, __pyx_v_node, __pyx_v_item_key); if (unlikely(__pyx_t_4 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1510; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1511
+ * self._serialize_node(item_key, node, None)
+ * self._serialize_node(item_value, node, item_key)
+ * yaml_mapping_end_event_initialize(&event) # <<<<<<<<<<<<<<
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ */
+ yaml_mapping_end_event_initialize((&__pyx_v_event));
+
+ /* "_yaml.pyx":1512
+ * self._serialize_node(item_value, node, item_key)
+ * yaml_mapping_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<<
+ * error = self._emitter_error()
+ * raise error
+ */
+ __pyx_t_4 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1512; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_t_8 = ((__pyx_t_4 == 0) != 0);
+ if (__pyx_t_8) {
+
+ /* "_yaml.pyx":1513
+ * yaml_mapping_end_event_initialize(&event)
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error() # <<<<<<<<<<<<<<
+ * raise error
+ * self.ascend_resolver()
+ */
+ __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1513; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_v_error = __pyx_t_5;
+ __pyx_t_5 = 0;
+
+ /* "_yaml.pyx":1514
+ * if yaml_emitter_emit(&self.emitter, &event) == 0:
+ * error = self._emitter_error()
+ * raise error # <<<<<<<<<<<<<<
+ * self.ascend_resolver()
+ * return 1
+ */
+ __Pyx_Raise(__pyx_v_error, 0, 0, 0);
+ {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1514; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ goto __pyx_L10;
+ }
+ __pyx_L10:;
+
+ /* "_yaml.pyx":1515
+ * error = self._emitter_error()
+ * raise error
+ * self.ascend_resolver() # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_ascend_resolver); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1515; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1515; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+ }
+ __pyx_L7:;
+
+ /* "_yaml.pyx":1516
+ * raise error
+ * self.ascend_resolver()
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ * cdef int output_handler(void *data, char *buffer, int size) except 0:
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1374
+ * return 1
+ *
+ * cdef int _serialize_node(self, object node, object parent, object index) except 0: # <<<<<<<<<<<<<<
+ * cdef yaml_event_t event
+ * cdef int implicit
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_XDECREF(__pyx_t_7);
+ __Pyx_XDECREF(__pyx_t_11);
+ __Pyx_AddTraceback("_yaml.CEmitter._serialize_node", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_anchor_object);
+ __Pyx_XDECREF(__pyx_v_error);
+ __Pyx_XDECREF(__pyx_v_node_class);
+ __Pyx_XDECREF(__pyx_v_tag_object);
+ __Pyx_XDECREF(__pyx_v_value_object);
+ __Pyx_XDECREF(__pyx_v_style_object);
+ __Pyx_XDECREF(__pyx_v_item);
+ __Pyx_XDECREF(__pyx_v_item_key);
+ __Pyx_XDECREF(__pyx_v_item_value);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+/* "_yaml.pyx":1518
+ * return 1
+ *
+ * cdef int output_handler(void *data, char *buffer, int size) except 0: # <<<<<<<<<<<<<<
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data
+ */
+
+static int __pyx_f_5_yaml_output_handler(void *__pyx_v_data, char *__pyx_v_buffer, int __pyx_v_size) {
+ struct __pyx_obj_5_yaml_CEmitter *__pyx_v_emitter = 0;
+ PyObject *__pyx_v_value = NULL;
+ int __pyx_r;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("output_handler", 0);
+
+ /* "_yaml.pyx":1520
+ * cdef int output_handler(void *data, char *buffer, int size) except 0:
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data # <<<<<<<<<<<<<<
+ * if emitter.dump_unicode == 0:
+ * value = PyString_FromStringAndSize(buffer, size)
+ */
+ __pyx_t_1 = ((PyObject *)__pyx_v_data);
+ __Pyx_INCREF(__pyx_t_1);
+ __pyx_v_emitter = ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":1521
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data
+ * if emitter.dump_unicode == 0: # <<<<<<<<<<<<<<
+ * value = PyString_FromStringAndSize(buffer, size)
+ * else:
+ */
+ __pyx_t_2 = ((__pyx_v_emitter->dump_unicode == 0) != 0);
+ if (__pyx_t_2) {
+
+ /* "_yaml.pyx":1522
+ * emitter = <CEmitter>data
+ * if emitter.dump_unicode == 0:
+ * value = PyString_FromStringAndSize(buffer, size) # <<<<<<<<<<<<<<
+ * else:
+ * value = PyUnicode_DecodeUTF8(buffer, size, 'strict')
+ */
+ __pyx_t_1 = PyString_FromStringAndSize(__pyx_v_buffer, __pyx_v_size); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1522; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_value = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L3;
+ }
+ /*else*/ {
+
+ /* "_yaml.pyx":1524
+ * value = PyString_FromStringAndSize(buffer, size)
+ * else:
+ * value = PyUnicode_DecodeUTF8(buffer, size, 'strict') # <<<<<<<<<<<<<<
+ * emitter.stream.write(value)
+ * return 1
+ */
+ __pyx_t_1 = PyUnicode_DecodeUTF8(__pyx_v_buffer, __pyx_v_size, __pyx_k_strict); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1524; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_value = __pyx_t_1;
+ __pyx_t_1 = 0;
+ }
+ __pyx_L3:;
+
+ /* "_yaml.pyx":1525
+ * else:
+ * value = PyUnicode_DecodeUTF8(buffer, size, 'strict')
+ * emitter.stream.write(value) # <<<<<<<<<<<<<<
+ * return 1
+ *
+ */
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_emitter->stream, __pyx_n_s_write); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1525; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1525; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_INCREF(__pyx_v_value);
+ PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_value);
+ __Pyx_GIVEREF(__pyx_v_value);
+ __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1525; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+
+ /* "_yaml.pyx":1526
+ * value = PyUnicode_DecodeUTF8(buffer, size, 'strict')
+ * emitter.stream.write(value)
+ * return 1 # <<<<<<<<<<<<<<
+ *
+ */
+ __pyx_r = 1;
+ goto __pyx_L0;
+
+ /* "_yaml.pyx":1518
+ * return 1
+ *
+ * cdef int output_handler(void *data, char *buffer, int size) except 0: # <<<<<<<<<<<<<<
+ * cdef CEmitter emitter
+ * emitter = <CEmitter>data
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_AddTraceback("_yaml.output_handler", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = 0;
+ __pyx_L0:;
+ __Pyx_XDECREF((PyObject *)__pyx_v_emitter);
+ __Pyx_XDECREF(__pyx_v_value);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_tp_new_5_yaml_Mark(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) {
+ struct __pyx_obj_5_yaml_Mark *p;
+ PyObject *o;
+ if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
+ o = (*t->tp_alloc)(t, 0);
+ } else {
+ o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
+ }
+ if (unlikely(!o)) return 0;
+ p = ((struct __pyx_obj_5_yaml_Mark *)o);
+ p->name = Py_None; Py_INCREF(Py_None);
+ p->buffer = Py_None; Py_INCREF(Py_None);
+ p->pointer = Py_None; Py_INCREF(Py_None);
+ return o;
+}
+
+static void __pyx_tp_dealloc_5_yaml_Mark(PyObject *o) {
+ struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o;
+ #if PY_VERSION_HEX >= 0x030400a1
+ if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
+ if (PyObject_CallFinalizerFromDealloc(o)) return;
+ }
+ #endif
+ PyObject_GC_UnTrack(o);
+ Py_CLEAR(p->name);
+ Py_CLEAR(p->buffer);
+ Py_CLEAR(p->pointer);
+ (*Py_TYPE(o)->tp_free)(o);
+}
+
+static int __pyx_tp_traverse_5_yaml_Mark(PyObject *o, visitproc v, void *a) {
+ int e;
+ struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o;
+ if (p->name) {
+ e = (*v)(p->name, a); if (e) return e;
+ }
+ if (p->buffer) {
+ e = (*v)(p->buffer, a); if (e) return e;
+ }
+ if (p->pointer) {
+ e = (*v)(p->pointer, a); if (e) return e;
+ }
+ return 0;
+}
+
+static int __pyx_tp_clear_5_yaml_Mark(PyObject *o) {
+ PyObject* tmp;
+ struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o;
+ tmp = ((PyObject*)p->name);
+ p->name = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->buffer);
+ p->buffer = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->pointer);
+ p->pointer = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ return 0;
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_name(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_4name_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_index(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_5index_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_line(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_4line_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_column(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_6column_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_buffer(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_6buffer_1__get__(o);
+}
+
+static PyObject *__pyx_getprop_5_yaml_4Mark_pointer(PyObject *o, CYTHON_UNUSED void *x) {
+ return __pyx_pw_5_yaml_4Mark_7pointer_1__get__(o);
+}
+
+static PyMethodDef __pyx_methods_5_yaml_Mark[] = {
+ {__Pyx_NAMESTR("get_snippet"), (PyCFunction)__pyx_pw_5_yaml_4Mark_3get_snippet, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {0, 0, 0, 0}
+};
+
+static struct PyGetSetDef __pyx_getsets_5_yaml_Mark[] = {
+ {(char *)"name", __pyx_getprop_5_yaml_4Mark_name, 0, 0, 0},
+ {(char *)"index", __pyx_getprop_5_yaml_4Mark_index, 0, 0, 0},
+ {(char *)"line", __pyx_getprop_5_yaml_4Mark_line, 0, 0, 0},
+ {(char *)"column", __pyx_getprop_5_yaml_4Mark_column, 0, 0, 0},
+ {(char *)"buffer", __pyx_getprop_5_yaml_4Mark_buffer, 0, 0, 0},
+ {(char *)"pointer", __pyx_getprop_5_yaml_4Mark_pointer, 0, 0, 0},
+ {0, 0, 0, 0, 0}
+};
+
+static PyTypeObject __pyx_type_5_yaml_Mark = {
+ PyVarObject_HEAD_INIT(0, 0)
+ __Pyx_NAMESTR("_yaml.Mark"), /*tp_name*/
+ sizeof(struct __pyx_obj_5_yaml_Mark), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ __pyx_tp_dealloc_5_yaml_Mark, /*tp_dealloc*/
+ 0, /*tp_print*/
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ #if PY_MAJOR_VERSION < 3
+ 0, /*tp_compare*/
+ #else
+ 0, /*reserved*/
+ #endif
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash*/
+ 0, /*tp_call*/
+ __pyx_pw_5_yaml_4Mark_5__str__, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
+ 0, /*tp_doc*/
+ __pyx_tp_traverse_5_yaml_Mark, /*tp_traverse*/
+ __pyx_tp_clear_5_yaml_Mark, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ __pyx_methods_5_yaml_Mark, /*tp_methods*/
+ 0, /*tp_members*/
+ __pyx_getsets_5_yaml_Mark, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ __pyx_pw_5_yaml_4Mark_1__init__, /*tp_init*/
+ 0, /*tp_alloc*/
+ __pyx_tp_new_5_yaml_Mark, /*tp_new*/
+ 0, /*tp_free*/
+ 0, /*tp_is_gc*/
+ 0, /*tp_bases*/
+ 0, /*tp_mro*/
+ 0, /*tp_cache*/
+ 0, /*tp_subclasses*/
+ 0, /*tp_weaklist*/
+ 0, /*tp_del*/
+ #if PY_VERSION_HEX >= 0x02060000
+ 0, /*tp_version_tag*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_finalize*/
+ #endif
+};
+static struct __pyx_vtabstruct_5_yaml_CParser __pyx_vtable_5_yaml_CParser;
+
+static PyObject *__pyx_tp_new_5_yaml_CParser(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) {
+ struct __pyx_obj_5_yaml_CParser *p;
+ PyObject *o;
+ if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
+ o = (*t->tp_alloc)(t, 0);
+ } else {
+ o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
+ }
+ if (unlikely(!o)) return 0;
+ p = ((struct __pyx_obj_5_yaml_CParser *)o);
+ p->__pyx_vtab = __pyx_vtabptr_5_yaml_CParser;
+ p->stream = Py_None; Py_INCREF(Py_None);
+ p->stream_name = Py_None; Py_INCREF(Py_None);
+ p->current_token = Py_None; Py_INCREF(Py_None);
+ p->current_event = Py_None; Py_INCREF(Py_None);
+ p->anchors = Py_None; Py_INCREF(Py_None);
+ p->stream_cache = Py_None; Py_INCREF(Py_None);
+ return o;
+}
+
+static void __pyx_tp_dealloc_5_yaml_CParser(PyObject *o) {
+ struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o;
+ #if PY_VERSION_HEX >= 0x030400a1
+ if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
+ if (PyObject_CallFinalizerFromDealloc(o)) return;
+ }
+ #endif
+ PyObject_GC_UnTrack(o);
+ {
+ PyObject *etype, *eval, *etb;
+ PyErr_Fetch(&etype, &eval, &etb);
+ ++Py_REFCNT(o);
+ __pyx_pw_5_yaml_7CParser_3__dealloc__(o);
+ --Py_REFCNT(o);
+ PyErr_Restore(etype, eval, etb);
+ }
+ Py_CLEAR(p->stream);
+ Py_CLEAR(p->stream_name);
+ Py_CLEAR(p->current_token);
+ Py_CLEAR(p->current_event);
+ Py_CLEAR(p->anchors);
+ Py_CLEAR(p->stream_cache);
+ (*Py_TYPE(o)->tp_free)(o);
+}
+
+static int __pyx_tp_traverse_5_yaml_CParser(PyObject *o, visitproc v, void *a) {
+ int e;
+ struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o;
+ if (p->stream) {
+ e = (*v)(p->stream, a); if (e) return e;
+ }
+ if (p->stream_name) {
+ e = (*v)(p->stream_name, a); if (e) return e;
+ }
+ if (p->current_token) {
+ e = (*v)(p->current_token, a); if (e) return e;
+ }
+ if (p->current_event) {
+ e = (*v)(p->current_event, a); if (e) return e;
+ }
+ if (p->anchors) {
+ e = (*v)(p->anchors, a); if (e) return e;
+ }
+ if (p->stream_cache) {
+ e = (*v)(p->stream_cache, a); if (e) return e;
+ }
+ return 0;
+}
+
+static int __pyx_tp_clear_5_yaml_CParser(PyObject *o) {
+ PyObject* tmp;
+ struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o;
+ tmp = ((PyObject*)p->stream);
+ p->stream = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->stream_name);
+ p->stream_name = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->current_token);
+ p->current_token = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->current_event);
+ p->current_event = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->anchors);
+ p->anchors = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->stream_cache);
+ p->stream_cache = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ return 0;
+}
+
+static PyMethodDef __pyx_methods_5_yaml_CParser[] = {
+ {__Pyx_NAMESTR("dispose"), (PyCFunction)__pyx_pw_5_yaml_7CParser_5dispose, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("raw_scan"), (PyCFunction)__pyx_pw_5_yaml_7CParser_7raw_scan, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("get_token"), (PyCFunction)__pyx_pw_5_yaml_7CParser_9get_token, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("peek_token"), (PyCFunction)__pyx_pw_5_yaml_7CParser_11peek_token, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("check_token"), (PyCFunction)__pyx_pw_5_yaml_7CParser_13check_token, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("raw_parse"), (PyCFunction)__pyx_pw_5_yaml_7CParser_15raw_parse, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("get_event"), (PyCFunction)__pyx_pw_5_yaml_7CParser_17get_event, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("peek_event"), (PyCFunction)__pyx_pw_5_yaml_7CParser_19peek_event, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("check_event"), (PyCFunction)__pyx_pw_5_yaml_7CParser_21check_event, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("check_node"), (PyCFunction)__pyx_pw_5_yaml_7CParser_23check_node, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("get_node"), (PyCFunction)__pyx_pw_5_yaml_7CParser_25get_node, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("get_single_node"), (PyCFunction)__pyx_pw_5_yaml_7CParser_27get_single_node, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {0, 0, 0, 0}
+};
+
+static PyTypeObject __pyx_type_5_yaml_CParser = {
+ PyVarObject_HEAD_INIT(0, 0)
+ __Pyx_NAMESTR("_yaml.CParser"), /*tp_name*/
+ sizeof(struct __pyx_obj_5_yaml_CParser), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ __pyx_tp_dealloc_5_yaml_CParser, /*tp_dealloc*/
+ 0, /*tp_print*/
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ #if PY_MAJOR_VERSION < 3
+ 0, /*tp_compare*/
+ #else
+ 0, /*reserved*/
+ #endif
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
+ 0, /*tp_doc*/
+ __pyx_tp_traverse_5_yaml_CParser, /*tp_traverse*/
+ __pyx_tp_clear_5_yaml_CParser, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ __pyx_methods_5_yaml_CParser, /*tp_methods*/
+ 0, /*tp_members*/
+ 0, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ __pyx_pw_5_yaml_7CParser_1__init__, /*tp_init*/
+ 0, /*tp_alloc*/
+ __pyx_tp_new_5_yaml_CParser, /*tp_new*/
+ 0, /*tp_free*/
+ 0, /*tp_is_gc*/
+ 0, /*tp_bases*/
+ 0, /*tp_mro*/
+ 0, /*tp_cache*/
+ 0, /*tp_subclasses*/
+ 0, /*tp_weaklist*/
+ 0, /*tp_del*/
+ #if PY_VERSION_HEX >= 0x02060000
+ 0, /*tp_version_tag*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_finalize*/
+ #endif
+};
+static struct __pyx_vtabstruct_5_yaml_CEmitter __pyx_vtable_5_yaml_CEmitter;
+
+static PyObject *__pyx_tp_new_5_yaml_CEmitter(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) {
+ struct __pyx_obj_5_yaml_CEmitter *p;
+ PyObject *o;
+ if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
+ o = (*t->tp_alloc)(t, 0);
+ } else {
+ o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
+ }
+ if (unlikely(!o)) return 0;
+ p = ((struct __pyx_obj_5_yaml_CEmitter *)o);
+ p->__pyx_vtab = __pyx_vtabptr_5_yaml_CEmitter;
+ p->stream = Py_None; Py_INCREF(Py_None);
+ p->use_version = Py_None; Py_INCREF(Py_None);
+ p->use_tags = Py_None; Py_INCREF(Py_None);
+ p->serialized_nodes = Py_None; Py_INCREF(Py_None);
+ p->anchors = Py_None; Py_INCREF(Py_None);
+ p->use_encoding = Py_None; Py_INCREF(Py_None);
+ return o;
+}
+
+static void __pyx_tp_dealloc_5_yaml_CEmitter(PyObject *o) {
+ struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o;
+ #if PY_VERSION_HEX >= 0x030400a1
+ if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
+ if (PyObject_CallFinalizerFromDealloc(o)) return;
+ }
+ #endif
+ PyObject_GC_UnTrack(o);
+ {
+ PyObject *etype, *eval, *etb;
+ PyErr_Fetch(&etype, &eval, &etb);
+ ++Py_REFCNT(o);
+ __pyx_pw_5_yaml_8CEmitter_3__dealloc__(o);
+ --Py_REFCNT(o);
+ PyErr_Restore(etype, eval, etb);
+ }
+ Py_CLEAR(p->stream);
+ Py_CLEAR(p->use_version);
+ Py_CLEAR(p->use_tags);
+ Py_CLEAR(p->serialized_nodes);
+ Py_CLEAR(p->anchors);
+ Py_CLEAR(p->use_encoding);
+ (*Py_TYPE(o)->tp_free)(o);
+}
+
+static int __pyx_tp_traverse_5_yaml_CEmitter(PyObject *o, visitproc v, void *a) {
+ int e;
+ struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o;
+ if (p->stream) {
+ e = (*v)(p->stream, a); if (e) return e;
+ }
+ if (p->use_version) {
+ e = (*v)(p->use_version, a); if (e) return e;
+ }
+ if (p->use_tags) {
+ e = (*v)(p->use_tags, a); if (e) return e;
+ }
+ if (p->serialized_nodes) {
+ e = (*v)(p->serialized_nodes, a); if (e) return e;
+ }
+ if (p->anchors) {
+ e = (*v)(p->anchors, a); if (e) return e;
+ }
+ if (p->use_encoding) {
+ e = (*v)(p->use_encoding, a); if (e) return e;
+ }
+ return 0;
+}
+
+static int __pyx_tp_clear_5_yaml_CEmitter(PyObject *o) {
+ PyObject* tmp;
+ struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o;
+ tmp = ((PyObject*)p->stream);
+ p->stream = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->use_version);
+ p->use_version = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->use_tags);
+ p->use_tags = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->serialized_nodes);
+ p->serialized_nodes = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->anchors);
+ p->anchors = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ tmp = ((PyObject*)p->use_encoding);
+ p->use_encoding = Py_None; Py_INCREF(Py_None);
+ Py_XDECREF(tmp);
+ return 0;
+}
+
+static PyMethodDef __pyx_methods_5_yaml_CEmitter[] = {
+ {__Pyx_NAMESTR("dispose"), (PyCFunction)__pyx_pw_5_yaml_8CEmitter_5dispose, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("emit"), (PyCFunction)__pyx_pw_5_yaml_8CEmitter_7emit, METH_O, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("open"), (PyCFunction)__pyx_pw_5_yaml_8CEmitter_9open, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("close"), (PyCFunction)__pyx_pw_5_yaml_8CEmitter_11close, METH_NOARGS, __Pyx_DOCSTR(0)},
+ {__Pyx_NAMESTR("serialize"), (PyCFunction)__pyx_pw_5_yaml_8CEmitter_13serialize, METH_O, __Pyx_DOCSTR(0)},
+ {0, 0, 0, 0}
+};
+
+static PyTypeObject __pyx_type_5_yaml_CEmitter = {
+ PyVarObject_HEAD_INIT(0, 0)
+ __Pyx_NAMESTR("_yaml.CEmitter"), /*tp_name*/
+ sizeof(struct __pyx_obj_5_yaml_CEmitter), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ __pyx_tp_dealloc_5_yaml_CEmitter, /*tp_dealloc*/
+ 0, /*tp_print*/
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ #if PY_MAJOR_VERSION < 3
+ 0, /*tp_compare*/
+ #else
+ 0, /*reserved*/
+ #endif
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
+ 0, /*tp_doc*/
+ __pyx_tp_traverse_5_yaml_CEmitter, /*tp_traverse*/
+ __pyx_tp_clear_5_yaml_CEmitter, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ __pyx_methods_5_yaml_CEmitter, /*tp_methods*/
+ 0, /*tp_members*/
+ 0, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ __pyx_pw_5_yaml_8CEmitter_1__init__, /*tp_init*/
+ 0, /*tp_alloc*/
+ __pyx_tp_new_5_yaml_CEmitter, /*tp_new*/
+ 0, /*tp_free*/
+ 0, /*tp_is_gc*/
+ 0, /*tp_bases*/
+ 0, /*tp_mro*/
+ 0, /*tp_cache*/
+ 0, /*tp_subclasses*/
+ 0, /*tp_weaklist*/
+ 0, /*tp_del*/
+ #if PY_VERSION_HEX >= 0x02060000
+ 0, /*tp_version_tag*/
+ #endif
+ #if PY_VERSION_HEX >= 0x030400a1
+ 0, /*tp_finalize*/
+ #endif
+};
+
+static PyMethodDef __pyx_methods[] = {
+ {0, 0, 0, 0}
+};
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef __pyx_moduledef = {
+ #if PY_VERSION_HEX < 0x03020000
+ { PyObject_HEAD_INIT(NULL) NULL, 0, NULL },
+ #else
+ PyModuleDef_HEAD_INIT,
+ #endif
+ __Pyx_NAMESTR("_yaml"),
+ 0, /* m_doc */
+ -1, /* m_size */
+ __pyx_methods /* m_methods */,
+ NULL, /* m_reload */
+ NULL, /* m_traverse */
+ NULL, /* m_clear */
+ NULL /* m_free */
+};
+#endif
+
+static __Pyx_StringTabEntry __pyx_string_tab[] = {
+ {&__pyx_n_s_AliasEvent, __pyx_k_AliasEvent, sizeof(__pyx_k_AliasEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_AliasToken, __pyx_k_AliasToken, sizeof(__pyx_k_AliasToken), 0, 0, 1, 1},
+ {&__pyx_n_s_AnchorToken, __pyx_k_AnchorToken, sizeof(__pyx_k_AnchorToken), 0, 0, 1, 1},
+ {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1},
+ {&__pyx_n_s_BlockEndToken, __pyx_k_BlockEndToken, sizeof(__pyx_k_BlockEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_BlockEntryToken, __pyx_k_BlockEntryToken, sizeof(__pyx_k_BlockEntryToken), 0, 0, 1, 1},
+ {&__pyx_n_s_BlockMappingStartToken, __pyx_k_BlockMappingStartToken, sizeof(__pyx_k_BlockMappingStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_BlockSequenceStartToken, __pyx_k_BlockSequenceStartToken, sizeof(__pyx_k_BlockSequenceStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_ComposerError, __pyx_k_ComposerError, sizeof(__pyx_k_ComposerError), 0, 0, 1, 1},
+ {&__pyx_n_s_ConstructorError, __pyx_k_ConstructorError, sizeof(__pyx_k_ConstructorError), 0, 0, 1, 1},
+ {&__pyx_n_s_DirectiveToken, __pyx_k_DirectiveToken, sizeof(__pyx_k_DirectiveToken), 0, 0, 1, 1},
+ {&__pyx_n_s_DocumentEndEvent, __pyx_k_DocumentEndEvent, sizeof(__pyx_k_DocumentEndEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_DocumentEndToken, __pyx_k_DocumentEndToken, sizeof(__pyx_k_DocumentEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_DocumentStartEvent, __pyx_k_DocumentStartEvent, sizeof(__pyx_k_DocumentStartEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_DocumentStartToken, __pyx_k_DocumentStartToken, sizeof(__pyx_k_DocumentStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_EmitterError, __pyx_k_EmitterError, sizeof(__pyx_k_EmitterError), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowEntryToken, __pyx_k_FlowEntryToken, sizeof(__pyx_k_FlowEntryToken), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowMappingEndToken, __pyx_k_FlowMappingEndToken, sizeof(__pyx_k_FlowMappingEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowMappingStartToken, __pyx_k_FlowMappingStartToken, sizeof(__pyx_k_FlowMappingStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowSequenceEndToken, __pyx_k_FlowSequenceEndToken, sizeof(__pyx_k_FlowSequenceEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_FlowSequenceStartToken, __pyx_k_FlowSequenceStartToken, sizeof(__pyx_k_FlowSequenceStartToken), 0, 0, 1, 1},
+ {&__pyx_n_s_KeyToken, __pyx_k_KeyToken, sizeof(__pyx_k_KeyToken), 0, 0, 1, 1},
+ {&__pyx_n_s_MappingEndEvent, __pyx_k_MappingEndEvent, sizeof(__pyx_k_MappingEndEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_MappingNode, __pyx_k_MappingNode, sizeof(__pyx_k_MappingNode), 0, 0, 1, 1},
+ {&__pyx_n_s_MappingStartEvent, __pyx_k_MappingStartEvent, sizeof(__pyx_k_MappingStartEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1},
+ {&__pyx_n_s_ParserError, __pyx_k_ParserError, sizeof(__pyx_k_ParserError), 0, 0, 1, 1},
+ {&__pyx_n_s_ReaderError, __pyx_k_ReaderError, sizeof(__pyx_k_ReaderError), 0, 0, 1, 1},
+ {&__pyx_n_s_RepresenterError, __pyx_k_RepresenterError, sizeof(__pyx_k_RepresenterError), 0, 0, 1, 1},
+ {&__pyx_n_s_ScalarEvent, __pyx_k_ScalarEvent, sizeof(__pyx_k_ScalarEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_ScalarNode, __pyx_k_ScalarNode, sizeof(__pyx_k_ScalarNode), 0, 0, 1, 1},
+ {&__pyx_n_s_ScalarToken, __pyx_k_ScalarToken, sizeof(__pyx_k_ScalarToken), 0, 0, 1, 1},
+ {&__pyx_n_s_ScannerError, __pyx_k_ScannerError, sizeof(__pyx_k_ScannerError), 0, 0, 1, 1},
+ {&__pyx_n_s_SequenceEndEvent, __pyx_k_SequenceEndEvent, sizeof(__pyx_k_SequenceEndEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_SequenceNode, __pyx_k_SequenceNode, sizeof(__pyx_k_SequenceNode), 0, 0, 1, 1},
+ {&__pyx_n_s_SequenceStartEvent, __pyx_k_SequenceStartEvent, sizeof(__pyx_k_SequenceStartEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_SerializerError, __pyx_k_SerializerError, sizeof(__pyx_k_SerializerError), 0, 0, 1, 1},
+ {&__pyx_n_s_StreamEndEvent, __pyx_k_StreamEndEvent, sizeof(__pyx_k_StreamEndEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_StreamEndToken, __pyx_k_StreamEndToken, sizeof(__pyx_k_StreamEndToken), 0, 0, 1, 1},
+ {&__pyx_n_s_StreamStartEvent, __pyx_k_StreamStartEvent, sizeof(__pyx_k_StreamStartEvent), 0, 0, 1, 1},
+ {&__pyx_n_s_StreamStartToken, __pyx_k_StreamStartToken, sizeof(__pyx_k_StreamStartToken), 0, 0, 1, 1},
+ {&__pyx_n_u_TAG, __pyx_k_TAG, sizeof(__pyx_k_TAG), 0, 1, 0, 1},
+ {&__pyx_n_s_TagToken, __pyx_k_TagToken, sizeof(__pyx_k_TagToken), 0, 0, 1, 1},
+ {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1},
+ {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1},
+ {&__pyx_n_s_ValueToken, __pyx_k_ValueToken, sizeof(__pyx_k_ValueToken), 0, 0, 1, 1},
+ {&__pyx_n_u_YAML, __pyx_k_YAML, sizeof(__pyx_k_YAML), 0, 1, 0, 1},
+ {&__pyx_n_s_YAMLError, __pyx_k_YAMLError, sizeof(__pyx_k_YAMLError), 0, 0, 1, 1},
+ {&__pyx_kp_s__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 0, 1, 0},
+ {&__pyx_kp_u__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 1, 0, 0},
+ {&__pyx_kp_s__17, __pyx_k__17, sizeof(__pyx_k__17), 0, 0, 1, 0},
+ {&__pyx_kp_s__18, __pyx_k__18, sizeof(__pyx_k__18), 0, 0, 1, 0},
+ {&__pyx_kp_s__19, __pyx_k__19, sizeof(__pyx_k__19), 0, 0, 1, 0},
+ {&__pyx_kp_s__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 0, 1, 0},
+ {&__pyx_kp_u__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 1, 0, 0},
+ {&__pyx_kp_u__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 1, 0, 0},
+ {&__pyx_kp_s__7, __pyx_k__7, sizeof(__pyx_k__7), 0, 0, 1, 0},
+ {&__pyx_kp_u__7, __pyx_k__7, sizeof(__pyx_k__7), 0, 1, 0, 0},
+ {&__pyx_kp_s__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 0, 1, 0},
+ {&__pyx_kp_u__8, __pyx_k__8, sizeof(__pyx_k__8), 0, 1, 0, 0},
+ {&__pyx_kp_s__9, __pyx_k__9, sizeof(__pyx_k__9), 0, 0, 1, 0},
+ {&__pyx_kp_u__9, __pyx_k__9, sizeof(__pyx_k__9), 0, 1, 0, 0},
+ {&__pyx_kp_s_a_string_or_stream_input_is_requ, __pyx_k_a_string_or_stream_input_is_requ, sizeof(__pyx_k_a_string_or_stream_input_is_requ), 0, 0, 1, 0},
+ {&__pyx_kp_u_a_string_or_stream_input_is_requ, __pyx_k_a_string_or_stream_input_is_requ, sizeof(__pyx_k_a_string_or_stream_input_is_requ), 0, 1, 0, 0},
+ {&__pyx_kp_s_a_string_value_is_expected, __pyx_k_a_string_value_is_expected, sizeof(__pyx_k_a_string_value_is_expected), 0, 0, 1, 0},
+ {&__pyx_kp_u_a_string_value_is_expected, __pyx_k_a_string_value_is_expected, sizeof(__pyx_k_a_string_value_is_expected), 0, 1, 0, 0},
+ {&__pyx_n_s_allow_unicode, __pyx_k_allow_unicode, sizeof(__pyx_k_allow_unicode), 0, 0, 1, 1},
+ {&__pyx_n_s_anchor, __pyx_k_anchor, sizeof(__pyx_k_anchor), 0, 0, 1, 1},
+ {&__pyx_kp_s_anchor_must_be_a_string, __pyx_k_anchor_must_be_a_string, sizeof(__pyx_k_anchor_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_anchor_must_be_a_string, __pyx_k_anchor_must_be_a_string, sizeof(__pyx_k_anchor_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_n_s_ascend_resolver, __pyx_k_ascend_resolver, sizeof(__pyx_k_ascend_resolver), 0, 0, 1, 1},
+ {&__pyx_n_s_buffer, __pyx_k_buffer, sizeof(__pyx_k_buffer), 0, 0, 1, 1},
+ {&__pyx_kp_s_but_found_another_document, __pyx_k_but_found_another_document, sizeof(__pyx_k_but_found_another_document), 0, 0, 1, 0},
+ {&__pyx_kp_u_but_found_another_document, __pyx_k_but_found_another_document, sizeof(__pyx_k_but_found_another_document), 0, 1, 0, 0},
+ {&__pyx_kp_s_byte_string, __pyx_k_byte_string, sizeof(__pyx_k_byte_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_byte_string, __pyx_k_byte_string, sizeof(__pyx_k_byte_string), 0, 1, 0, 0},
+ {&__pyx_n_s_canonical, __pyx_k_canonical, sizeof(__pyx_k_canonical), 0, 0, 1, 1},
+ {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1},
+ {&__pyx_n_s_column, __pyx_k_column, sizeof(__pyx_k_column), 0, 0, 1, 1},
+ {&__pyx_n_s_composer, __pyx_k_composer, sizeof(__pyx_k_composer), 0, 0, 1, 1},
+ {&__pyx_n_s_constructor, __pyx_k_constructor, sizeof(__pyx_k_constructor), 0, 0, 1, 1},
+ {&__pyx_n_s_descend_resolver, __pyx_k_descend_resolver, sizeof(__pyx_k_descend_resolver), 0, 0, 1, 1},
+ {&__pyx_n_s_emitter, __pyx_k_emitter, sizeof(__pyx_k_emitter), 0, 0, 1, 1},
+ {&__pyx_n_s_encoding, __pyx_k_encoding, sizeof(__pyx_k_encoding), 0, 0, 1, 1},
+ {&__pyx_n_u_encoding, __pyx_k_encoding, sizeof(__pyx_k_encoding), 0, 1, 0, 1},
+ {&__pyx_n_s_end_mark, __pyx_k_end_mark, sizeof(__pyx_k_end_mark), 0, 0, 1, 1},
+ {&__pyx_n_s_error, __pyx_k_error, sizeof(__pyx_k_error), 0, 0, 1, 1},
+ {&__pyx_n_s_events, __pyx_k_events, sizeof(__pyx_k_events), 0, 0, 1, 1},
+ {&__pyx_kp_s_expected_a_single_document_in_th, __pyx_k_expected_a_single_document_in_th, sizeof(__pyx_k_expected_a_single_document_in_th), 0, 0, 1, 0},
+ {&__pyx_kp_u_expected_a_single_document_in_th, __pyx_k_expected_a_single_document_in_th, sizeof(__pyx_k_expected_a_single_document_in_th), 0, 1, 0, 0},
+ {&__pyx_n_s_explicit, __pyx_k_explicit, sizeof(__pyx_k_explicit), 0, 0, 1, 1},
+ {&__pyx_n_s_explicit_end, __pyx_k_explicit_end, sizeof(__pyx_k_explicit_end), 0, 0, 1, 1},
+ {&__pyx_n_s_explicit_start, __pyx_k_explicit_start, sizeof(__pyx_k_explicit_start), 0, 0, 1, 1},
+ {&__pyx_kp_s_file, __pyx_k_file, sizeof(__pyx_k_file), 0, 0, 1, 0},
+ {&__pyx_kp_u_file, __pyx_k_file, sizeof(__pyx_k_file), 0, 1, 0, 0},
+ {&__pyx_n_s_flow_style, __pyx_k_flow_style, sizeof(__pyx_k_flow_style), 0, 0, 1, 1},
+ {&__pyx_kp_s_found_duplicate_anchor_first_occ, __pyx_k_found_duplicate_anchor_first_occ, sizeof(__pyx_k_found_duplicate_anchor_first_occ), 0, 0, 1, 0},
+ {&__pyx_kp_u_found_duplicate_anchor_first_occ, __pyx_k_found_duplicate_anchor_first_occ, sizeof(__pyx_k_found_duplicate_anchor_first_occ), 0, 1, 0, 0},
+ {&__pyx_kp_s_found_undefined_alias, __pyx_k_found_undefined_alias, sizeof(__pyx_k_found_undefined_alias), 0, 0, 1, 0},
+ {&__pyx_kp_u_found_undefined_alias, __pyx_k_found_undefined_alias, sizeof(__pyx_k_found_undefined_alias), 0, 1, 0, 0},
+ {&__pyx_n_s_get_version, __pyx_k_get_version, sizeof(__pyx_k_get_version), 0, 0, 1, 1},
+ {&__pyx_n_s_get_version_string, __pyx_k_get_version_string, sizeof(__pyx_k_get_version_string), 0, 0, 1, 1},
+ {&__pyx_kp_u_id_03d, __pyx_k_id_03d, sizeof(__pyx_k_id_03d), 0, 1, 0, 0},
+ {&__pyx_n_s_implicit, __pyx_k_implicit, sizeof(__pyx_k_implicit), 0, 0, 1, 1},
+ {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1},
+ {&__pyx_kp_s_in_s_line_d_column_d, __pyx_k_in_s_line_d_column_d, sizeof(__pyx_k_in_s_line_d_column_d), 0, 0, 1, 0},
+ {&__pyx_n_s_indent, __pyx_k_indent, sizeof(__pyx_k_indent), 0, 0, 1, 1},
+ {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1},
+ {&__pyx_kp_s_invalid_event_s, __pyx_k_invalid_event_s, sizeof(__pyx_k_invalid_event_s), 0, 0, 1, 0},
+ {&__pyx_kp_u_invalid_event_s, __pyx_k_invalid_event_s, sizeof(__pyx_k_invalid_event_s), 0, 1, 0, 0},
+ {&__pyx_n_s_line, __pyx_k_line, sizeof(__pyx_k_line), 0, 0, 1, 1},
+ {&__pyx_n_s_line_break, __pyx_k_line_break, sizeof(__pyx_k_line_break), 0, 0, 1, 1},
+ {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
+ {&__pyx_n_s_major, __pyx_k_major, sizeof(__pyx_k_major), 0, 0, 1, 1},
+ {&__pyx_n_s_minor, __pyx_k_minor, sizeof(__pyx_k_minor), 0, 0, 1, 1},
+ {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1},
+ {&__pyx_kp_s_no_emitter_error, __pyx_k_no_emitter_error, sizeof(__pyx_k_no_emitter_error), 0, 0, 1, 0},
+ {&__pyx_kp_u_no_emitter_error, __pyx_k_no_emitter_error, sizeof(__pyx_k_no_emitter_error), 0, 1, 0, 0},
+ {&__pyx_kp_s_no_parser_error, __pyx_k_no_parser_error, sizeof(__pyx_k_no_parser_error), 0, 0, 1, 0},
+ {&__pyx_kp_u_no_parser_error, __pyx_k_no_parser_error, sizeof(__pyx_k_no_parser_error), 0, 1, 0, 0},
+ {&__pyx_n_s_nodes, __pyx_k_nodes, sizeof(__pyx_k_nodes), 0, 0, 1, 1},
+ {&__pyx_n_s_parser, __pyx_k_parser, sizeof(__pyx_k_parser), 0, 0, 1, 1},
+ {&__pyx_n_s_patch, __pyx_k_patch, sizeof(__pyx_k_patch), 0, 0, 1, 1},
+ {&__pyx_n_s_pointer, __pyx_k_pointer, sizeof(__pyx_k_pointer), 0, 0, 1, 1},
+ {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1},
+ {&__pyx_n_s_read, __pyx_k_read, sizeof(__pyx_k_read), 0, 0, 1, 1},
+ {&__pyx_n_s_reader, __pyx_k_reader, sizeof(__pyx_k_reader), 0, 0, 1, 1},
+ {&__pyx_n_s_representer, __pyx_k_representer, sizeof(__pyx_k_representer), 0, 0, 1, 1},
+ {&__pyx_n_s_resolve, __pyx_k_resolve, sizeof(__pyx_k_resolve), 0, 0, 1, 1},
+ {&__pyx_kp_s_root_src_pyyaml_ext__yaml_pyx, __pyx_k_root_src_pyyaml_ext__yaml_pyx, sizeof(__pyx_k_root_src_pyyaml_ext__yaml_pyx), 0, 0, 1, 0},
+ {&__pyx_n_s_scanner, __pyx_k_scanner, sizeof(__pyx_k_scanner), 0, 0, 1, 1},
+ {&__pyx_kp_s_second_occurence, __pyx_k_second_occurence, sizeof(__pyx_k_second_occurence), 0, 0, 1, 0},
+ {&__pyx_kp_u_second_occurence, __pyx_k_second_occurence, sizeof(__pyx_k_second_occurence), 0, 1, 0, 0},
+ {&__pyx_n_s_serializer, __pyx_k_serializer, sizeof(__pyx_k_serializer), 0, 0, 1, 1},
+ {&__pyx_kp_s_serializer_is_already_opened, __pyx_k_serializer_is_already_opened, sizeof(__pyx_k_serializer_is_already_opened), 0, 0, 1, 0},
+ {&__pyx_kp_u_serializer_is_already_opened, __pyx_k_serializer_is_already_opened, sizeof(__pyx_k_serializer_is_already_opened), 0, 1, 0, 0},
+ {&__pyx_kp_s_serializer_is_closed, __pyx_k_serializer_is_closed, sizeof(__pyx_k_serializer_is_closed), 0, 0, 1, 0},
+ {&__pyx_kp_u_serializer_is_closed, __pyx_k_serializer_is_closed, sizeof(__pyx_k_serializer_is_closed), 0, 1, 0, 0},
+ {&__pyx_kp_s_serializer_is_not_opened, __pyx_k_serializer_is_not_opened, sizeof(__pyx_k_serializer_is_not_opened), 0, 0, 1, 0},
+ {&__pyx_kp_u_serializer_is_not_opened, __pyx_k_serializer_is_not_opened, sizeof(__pyx_k_serializer_is_not_opened), 0, 1, 0, 0},
+ {&__pyx_n_s_start_mark, __pyx_k_start_mark, sizeof(__pyx_k_start_mark), 0, 0, 1, 1},
+ {&__pyx_n_s_stream, __pyx_k_stream, sizeof(__pyx_k_stream), 0, 0, 1, 1},
+ {&__pyx_n_s_style, __pyx_k_style, sizeof(__pyx_k_style), 0, 0, 1, 1},
+ {&__pyx_n_s_tag, __pyx_k_tag, sizeof(__pyx_k_tag), 0, 0, 1, 1},
+ {&__pyx_kp_s_tag_handle_must_be_a_string, __pyx_k_tag_handle_must_be_a_string, sizeof(__pyx_k_tag_handle_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_tag_handle_must_be_a_string, __pyx_k_tag_handle_must_be_a_string, sizeof(__pyx_k_tag_handle_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_kp_s_tag_must_be_a_string, __pyx_k_tag_must_be_a_string, sizeof(__pyx_k_tag_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_tag_must_be_a_string, __pyx_k_tag_must_be_a_string, sizeof(__pyx_k_tag_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_kp_s_tag_prefix_must_be_a_string, __pyx_k_tag_prefix_must_be_a_string, sizeof(__pyx_k_tag_prefix_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_tag_prefix_must_be_a_string, __pyx_k_tag_prefix_must_be_a_string, sizeof(__pyx_k_tag_prefix_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_n_s_tags, __pyx_k_tags, sizeof(__pyx_k_tags), 0, 0, 1, 1},
+ {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
+ {&__pyx_n_s_tokens, __pyx_k_tokens, sizeof(__pyx_k_tokens), 0, 0, 1, 1},
+ {&__pyx_kp_s_too_many_tags, __pyx_k_too_many_tags, sizeof(__pyx_k_too_many_tags), 0, 0, 1, 0},
+ {&__pyx_kp_u_too_many_tags, __pyx_k_too_many_tags, sizeof(__pyx_k_too_many_tags), 0, 1, 0, 0},
+ {&__pyx_kp_s_unicode_string, __pyx_k_unicode_string, sizeof(__pyx_k_unicode_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_unicode_string, __pyx_k_unicode_string, sizeof(__pyx_k_unicode_string), 0, 1, 0, 0},
+ {&__pyx_kp_s_unknown_event_type, __pyx_k_unknown_event_type, sizeof(__pyx_k_unknown_event_type), 0, 0, 1, 0},
+ {&__pyx_kp_u_unknown_event_type, __pyx_k_unknown_event_type, sizeof(__pyx_k_unknown_event_type), 0, 1, 0, 0},
+ {&__pyx_kp_s_unknown_token_type, __pyx_k_unknown_token_type, sizeof(__pyx_k_unknown_token_type), 0, 0, 1, 0},
+ {&__pyx_kp_u_unknown_token_type, __pyx_k_unknown_token_type, sizeof(__pyx_k_unknown_token_type), 0, 1, 0, 0},
+ {&__pyx_kp_s_utf_16_be, __pyx_k_utf_16_be, sizeof(__pyx_k_utf_16_be), 0, 0, 1, 0},
+ {&__pyx_kp_u_utf_16_be, __pyx_k_utf_16_be, sizeof(__pyx_k_utf_16_be), 0, 1, 0, 0},
+ {&__pyx_kp_s_utf_16_le, __pyx_k_utf_16_le, sizeof(__pyx_k_utf_16_le), 0, 0, 1, 0},
+ {&__pyx_kp_u_utf_16_le, __pyx_k_utf_16_le, sizeof(__pyx_k_utf_16_le), 0, 1, 0, 0},
+ {&__pyx_kp_u_utf_8, __pyx_k_utf_8, sizeof(__pyx_k_utf_8), 0, 1, 0, 0},
+ {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1},
+ {&__pyx_kp_s_value_must_be_a_string, __pyx_k_value_must_be_a_string, sizeof(__pyx_k_value_must_be_a_string), 0, 0, 1, 0},
+ {&__pyx_kp_u_value_must_be_a_string, __pyx_k_value_must_be_a_string, sizeof(__pyx_k_value_must_be_a_string), 0, 1, 0, 0},
+ {&__pyx_n_s_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 0, 1, 1},
+ {&__pyx_n_s_width, __pyx_k_width, sizeof(__pyx_k_width), 0, 0, 1, 1},
+ {&__pyx_n_s_write, __pyx_k_write, sizeof(__pyx_k_write), 0, 0, 1, 1},
+ {&__pyx_n_s_yaml, __pyx_k_yaml, sizeof(__pyx_k_yaml), 0, 0, 1, 1},
+ {&__pyx_n_s_yaml_2, __pyx_k_yaml_2, sizeof(__pyx_k_yaml_2), 0, 0, 1, 1},
+ {0, 0, 0, 0, 0, 0, 0}
+};
+static int __Pyx_InitCachedBuiltins(void) {
+ __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 265; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 301; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 356; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ return 0;
+ __pyx_L1_error:;
+ return -1;
+}
+
+static int __Pyx_InitCachedConstants(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
+
+ /* "_yaml.pyx":301
+ * if PyString_CheckExact(stream) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string or stream input is required") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"a string or stream input is required")
+ */
+ __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_a_string_or_stream_input_is_requ); if (unlikely(!__pyx_tuple_)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 301; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple_);
+ __Pyx_GIVEREF(__pyx_tuple_);
+
+ /* "_yaml.pyx":303
+ * raise TypeError("a string or stream input is required")
+ * else:
+ * raise TypeError(u"a string or stream input is required") # <<<<<<<<<<<<<<
+ * self.stream = stream
+ * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ */
+ __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_a_string_or_stream_input_is_requ); if (unlikely(!__pyx_tuple__2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 303; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__2);
+ __Pyx_GIVEREF(__pyx_tuple__2);
+
+ /* "_yaml.pyx":356
+ * return ParserError(context, context_mark, problem, problem_mark)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no parser error") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"no parser error")
+ */
+ __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_s_no_parser_error); if (unlikely(!__pyx_tuple__4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 356; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__4);
+ __Pyx_GIVEREF(__pyx_tuple__4);
+
+ /* "_yaml.pyx":358
+ * raise ValueError("no parser error")
+ * else:
+ * raise ValueError(u"no parser error") # <<<<<<<<<<<<<<
+ *
+ * def raw_scan(self):
+ */
+ __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_no_parser_error); if (unlikely(!__pyx_tuple__5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 358; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__5);
+ __Pyx_GIVEREF(__pyx_tuple__5);
+
+ /* "_yaml.pyx":479
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("unknown token type") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown token type")
+ */
+ __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_unknown_token_type); if (unlikely(!__pyx_tuple__11)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 479; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__11);
+ __Pyx_GIVEREF(__pyx_tuple__11);
+
+ /* "_yaml.pyx":481
+ * raise ValueError("unknown token type")
+ * else:
+ * raise ValueError(u"unknown token type") # <<<<<<<<<<<<<<
+ *
+ * def get_token(self):
+ */
+ __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_u_unknown_token_type); if (unlikely(!__pyx_tuple__12)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 481; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__12);
+ __Pyx_GIVEREF(__pyx_tuple__12);
+
+ /* "_yaml.pyx":657
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("unknown event type") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"unknown event type")
+ */
+ __pyx_tuple__13 = PyTuple_Pack(1, __pyx_kp_s_unknown_event_type); if (unlikely(!__pyx_tuple__13)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 657; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__13);
+ __Pyx_GIVEREF(__pyx_tuple__13);
+
+ /* "_yaml.pyx":659
+ * raise ValueError("unknown event type")
+ * else:
+ * raise ValueError(u"unknown event type") # <<<<<<<<<<<<<<
+ *
+ * def get_event(self):
+ */
+ __pyx_tuple__14 = PyTuple_Pack(1, __pyx_kp_u_unknown_event_type); if (unlikely(!__pyx_tuple__14)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 659; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__14);
+ __Pyx_GIVEREF(__pyx_tuple__14);
+
+ /* "_yaml.pyx":918
+ * if PyString_CheckExact(value) == 0:
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("a string value is expected") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"a string value is expected")
+ */
+ __pyx_tuple__15 = PyTuple_Pack(1, __pyx_kp_s_a_string_value_is_expected); if (unlikely(!__pyx_tuple__15)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 918; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__15);
+ __Pyx_GIVEREF(__pyx_tuple__15);
+
+ /* "_yaml.pyx":920
+ * raise TypeError("a string value is expected")
+ * else:
+ * raise TypeError(u"a string value is expected") # <<<<<<<<<<<<<<
+ * parser.stream_cache = value
+ * parser.stream_cache_pos = 0
+ */
+ __pyx_tuple__16 = PyTuple_Pack(1, __pyx_kp_u_a_string_value_is_expected); if (unlikely(!__pyx_tuple__16)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 920; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__16);
+ __Pyx_GIVEREF(__pyx_tuple__16);
+
+ /* "_yaml.pyx":1012
+ * return EmitterError(problem)
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("no emitter error") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"no emitter error")
+ */
+ __pyx_tuple__20 = PyTuple_Pack(1, __pyx_kp_s_no_emitter_error); if (unlikely(!__pyx_tuple__20)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1012; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__20);
+ __Pyx_GIVEREF(__pyx_tuple__20);
+
+ /* "_yaml.pyx":1014
+ * raise ValueError("no emitter error")
+ * else:
+ * raise ValueError(u"no emitter error") # <<<<<<<<<<<<<<
+ *
+ * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0:
+ */
+ __pyx_tuple__21 = PyTuple_Pack(1, __pyx_kp_u_no_emitter_error); if (unlikely(!__pyx_tuple__21)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1014; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__21);
+ __Pyx_GIVEREF(__pyx_tuple__21);
+
+ /* "_yaml.pyx":1058
+ * if len(event_object.tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"too many tags")
+ */
+ __pyx_tuple__22 = PyTuple_Pack(1, __pyx_kp_s_too_many_tags); if (unlikely(!__pyx_tuple__22)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1058; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__22);
+ __Pyx_GIVEREF(__pyx_tuple__22);
+
+ /* "_yaml.pyx":1060
+ * raise ValueError("too many tags")
+ * else:
+ * raise ValueError(u"too many tags") # <<<<<<<<<<<<<<
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ */
+ __pyx_tuple__23 = PyTuple_Pack(1, __pyx_kp_u_too_many_tags); if (unlikely(!__pyx_tuple__23)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1060; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__23);
+ __Pyx_GIVEREF(__pyx_tuple__23);
+
+ /* "_yaml.pyx":1071
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ */
+ __pyx_tuple__24 = PyTuple_Pack(1, __pyx_kp_s_tag_handle_must_be_a_string); if (unlikely(!__pyx_tuple__24)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1071; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__24);
+ __Pyx_GIVEREF(__pyx_tuple__24);
+
+ /* "_yaml.pyx":1073
+ * raise TypeError("tag handle must be a string")
+ * else:
+ * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ */
+ __pyx_tuple__25 = PyTuple_Pack(1, __pyx_kp_u_tag_handle_must_be_a_string); if (unlikely(!__pyx_tuple__25)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1073; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__25);
+ __Pyx_GIVEREF(__pyx_tuple__25);
+
+ /* "_yaml.pyx":1080
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ */
+ __pyx_tuple__26 = PyTuple_Pack(1, __pyx_kp_s_tag_prefix_must_be_a_string); if (unlikely(!__pyx_tuple__26)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1080; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__26);
+ __Pyx_GIVEREF(__pyx_tuple__26);
+
+ /* "_yaml.pyx":1082
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ */
+ __pyx_tuple__27 = PyTuple_Pack(1, __pyx_kp_u_tag_prefix_must_be_a_string); if (unlikely(!__pyx_tuple__27)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1082; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__27);
+ __Pyx_GIVEREF(__pyx_tuple__27);
+
+ /* "_yaml.pyx":1103
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_tuple__28 = PyTuple_Pack(1, __pyx_kp_s_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__28)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1103; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__28);
+ __Pyx_GIVEREF(__pyx_tuple__28);
+
+ /* "_yaml.pyx":1105
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if yaml_alias_event_initialize(event, anchor) == 0:
+ */
+ __pyx_tuple__29 = PyTuple_Pack(1, __pyx_kp_u_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__29)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1105; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__29);
+ __Pyx_GIVEREF(__pyx_tuple__29);
+
+ /* "_yaml.pyx":1117
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_tuple__30 = PyTuple_Pack(1, __pyx_kp_s_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__30)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1117; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__30);
+ __Pyx_GIVEREF(__pyx_tuple__30);
+
+ /* "_yaml.pyx":1119
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ __pyx_tuple__31 = PyTuple_Pack(1, __pyx_kp_u_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__31)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1119; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__31);
+ __Pyx_GIVEREF(__pyx_tuple__31);
+
+ /* "_yaml.pyx":1128
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_tuple__32 = PyTuple_Pack(1, __pyx_kp_s_tag_must_be_a_string); if (unlikely(!__pyx_tuple__32)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1128; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__32);
+ __Pyx_GIVEREF(__pyx_tuple__32);
+
+ /* "_yaml.pyx":1130
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = event_object.value
+ */
+ __pyx_tuple__33 = PyTuple_Pack(1, __pyx_kp_u_tag_must_be_a_string); if (unlikely(!__pyx_tuple__33)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1130; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__33);
+ __Pyx_GIVEREF(__pyx_tuple__33);
+
+ /* "_yaml.pyx":1137
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"value must be a string")
+ */
+ __pyx_tuple__34 = PyTuple_Pack(1, __pyx_kp_s_value_must_be_a_string); if (unlikely(!__pyx_tuple__34)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1137; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__34);
+ __Pyx_GIVEREF(__pyx_tuple__34);
+
+ /* "_yaml.pyx":1139
+ * raise TypeError("value must be a string")
+ * else:
+ * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<<
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ */
+ __pyx_tuple__35 = PyTuple_Pack(1, __pyx_kp_u_value_must_be_a_string); if (unlikely(!__pyx_tuple__35)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1139; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__35);
+ __Pyx_GIVEREF(__pyx_tuple__35);
+
+ /* "_yaml.pyx":1168
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_tuple__36 = PyTuple_Pack(1, __pyx_kp_s_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__36)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1168; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__36);
+ __Pyx_GIVEREF(__pyx_tuple__36);
+
+ /* "_yaml.pyx":1170
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ __pyx_tuple__37 = PyTuple_Pack(1, __pyx_kp_u_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__37)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1170; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__37);
+ __Pyx_GIVEREF(__pyx_tuple__37);
+
+ /* "_yaml.pyx":1179
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_tuple__38 = PyTuple_Pack(1, __pyx_kp_s_tag_must_be_a_string); if (unlikely(!__pyx_tuple__38)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1179; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__38);
+ __Pyx_GIVEREF(__pyx_tuple__38);
+
+ /* "_yaml.pyx":1181
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ */
+ __pyx_tuple__39 = PyTuple_Pack(1, __pyx_kp_u_tag_must_be_a_string); if (unlikely(!__pyx_tuple__39)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1181; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__39);
+ __Pyx_GIVEREF(__pyx_tuple__39);
+
+ /* "_yaml.pyx":1200
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_tuple__40 = PyTuple_Pack(1, __pyx_kp_s_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__40)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1200; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__40);
+ __Pyx_GIVEREF(__pyx_tuple__40);
+
+ /* "_yaml.pyx":1202
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * tag = NULL
+ */
+ __pyx_tuple__41 = PyTuple_Pack(1, __pyx_kp_u_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__41)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1202; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__41);
+ __Pyx_GIVEREF(__pyx_tuple__41);
+
+ /* "_yaml.pyx":1211
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_tuple__42 = PyTuple_Pack(1, __pyx_kp_s_tag_must_be_a_string); if (unlikely(!__pyx_tuple__42)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1211; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__42);
+ __Pyx_GIVEREF(__pyx_tuple__42);
+
+ /* "_yaml.pyx":1213
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * implicit = 0
+ */
+ __pyx_tuple__43 = PyTuple_Pack(1, __pyx_kp_u_tag_must_be_a_string); if (unlikely(!__pyx_tuple__43)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1213; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__43);
+ __Pyx_GIVEREF(__pyx_tuple__43);
+
+ /* "_yaml.pyx":1263
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is closed")
+ */
+ __pyx_tuple__44 = PyTuple_Pack(1, __pyx_kp_s_serializer_is_closed); if (unlikely(!__pyx_tuple__44)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__44);
+ __Pyx_GIVEREF(__pyx_tuple__44);
+
+ /* "_yaml.pyx":1265
+ * raise SerializerError("serializer is closed")
+ * else:
+ * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_tuple__45 = PyTuple_Pack(1, __pyx_kp_u_serializer_is_closed); if (unlikely(!__pyx_tuple__45)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1265; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__45);
+ __Pyx_GIVEREF(__pyx_tuple__45);
+
+ /* "_yaml.pyx":1268
+ * else:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is already opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is already opened")
+ */
+ __pyx_tuple__46 = PyTuple_Pack(1, __pyx_kp_s_serializer_is_already_opened); if (unlikely(!__pyx_tuple__46)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1268; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__46);
+ __Pyx_GIVEREF(__pyx_tuple__46);
+
+ /* "_yaml.pyx":1270
+ * raise SerializerError("serializer is already opened")
+ * else:
+ * raise SerializerError(u"serializer is already opened") # <<<<<<<<<<<<<<
+ *
+ * def close(self):
+ */
+ __pyx_tuple__47 = PyTuple_Pack(1, __pyx_kp_u_serializer_is_already_opened); if (unlikely(!__pyx_tuple__47)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__47);
+ __Pyx_GIVEREF(__pyx_tuple__47);
+
+ /* "_yaml.pyx":1276
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ */
+ __pyx_tuple__48 = PyTuple_Pack(1, __pyx_kp_s_serializer_is_not_opened); if (unlikely(!__pyx_tuple__48)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__48);
+ __Pyx_GIVEREF(__pyx_tuple__48);
+
+ /* "_yaml.pyx":1278
+ * raise SerializerError("serializer is not opened")
+ * else:
+ * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<<
+ * elif self.closed == 0:
+ * yaml_stream_end_event_initialize(&event)
+ */
+ __pyx_tuple__49 = PyTuple_Pack(1, __pyx_kp_u_serializer_is_not_opened); if (unlikely(!__pyx_tuple__49)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1278; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__49);
+ __Pyx_GIVEREF(__pyx_tuple__49);
+
+ /* "_yaml.pyx":1295
+ * if self.closed == -1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is not opened")
+ */
+ __pyx_tuple__50 = PyTuple_Pack(1, __pyx_kp_s_serializer_is_not_opened); if (unlikely(!__pyx_tuple__50)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1295; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__50);
+ __Pyx_GIVEREF(__pyx_tuple__50);
+
+ /* "_yaml.pyx":1297
+ * raise SerializerError("serializer is not opened")
+ * else:
+ * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<<
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ */
+ __pyx_tuple__51 = PyTuple_Pack(1, __pyx_kp_u_serializer_is_not_opened); if (unlikely(!__pyx_tuple__51)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1297; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__51);
+ __Pyx_GIVEREF(__pyx_tuple__51);
+
+ /* "_yaml.pyx":1300
+ * elif self.closed == 1:
+ * if PY_MAJOR_VERSION < 3:
+ * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<<
+ * else:
+ * raise SerializerError(u"serializer is closed")
+ */
+ __pyx_tuple__52 = PyTuple_Pack(1, __pyx_kp_s_serializer_is_closed); if (unlikely(!__pyx_tuple__52)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1300; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__52);
+ __Pyx_GIVEREF(__pyx_tuple__52);
+
+ /* "_yaml.pyx":1302
+ * raise SerializerError("serializer is closed")
+ * else:
+ * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<<
+ * cache = []
+ * version_directive = NULL
+ */
+ __pyx_tuple__53 = PyTuple_Pack(1, __pyx_kp_u_serializer_is_closed); if (unlikely(!__pyx_tuple__53)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1302; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__53);
+ __Pyx_GIVEREF(__pyx_tuple__53);
+
+ /* "_yaml.pyx":1314
+ * if len(self.use_tags) > 128:
+ * if PY_MAJOR_VERSION < 3:
+ * raise ValueError("too many tags") # <<<<<<<<<<<<<<
+ * else:
+ * raise ValueError(u"too many tags")
+ */
+ __pyx_tuple__54 = PyTuple_Pack(1, __pyx_kp_s_too_many_tags); if (unlikely(!__pyx_tuple__54)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1314; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__54);
+ __Pyx_GIVEREF(__pyx_tuple__54);
+
+ /* "_yaml.pyx":1316
+ * raise ValueError("too many tags")
+ * else:
+ * raise ValueError(u"too many tags") # <<<<<<<<<<<<<<
+ * tag_directives_start = tag_directives_value
+ * tag_directives_end = tag_directives_value
+ */
+ __pyx_tuple__55 = PyTuple_Pack(1, __pyx_kp_u_too_many_tags); if (unlikely(!__pyx_tuple__55)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1316; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__55);
+ __Pyx_GIVEREF(__pyx_tuple__55);
+
+ /* "_yaml.pyx":1326
+ * if not PyString_CheckExact(handle):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag handle must be a string")
+ */
+ __pyx_tuple__56 = PyTuple_Pack(1, __pyx_kp_s_tag_handle_must_be_a_string); if (unlikely(!__pyx_tuple__56)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1326; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__56);
+ __Pyx_GIVEREF(__pyx_tuple__56);
+
+ /* "_yaml.pyx":1328
+ * raise TypeError("tag handle must be a string")
+ * else:
+ * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.handle = PyString_AS_STRING(handle)
+ * if PyUnicode_CheckExact(prefix):
+ */
+ __pyx_tuple__57 = PyTuple_Pack(1, __pyx_kp_u_tag_handle_must_be_a_string); if (unlikely(!__pyx_tuple__57)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1328; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__57);
+ __Pyx_GIVEREF(__pyx_tuple__57);
+
+ /* "_yaml.pyx":1335
+ * if not PyString_CheckExact(prefix):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag prefix must be a string")
+ */
+ __pyx_tuple__58 = PyTuple_Pack(1, __pyx_kp_s_tag_prefix_must_be_a_string); if (unlikely(!__pyx_tuple__58)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1335; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__58);
+ __Pyx_GIVEREF(__pyx_tuple__58);
+
+ /* "_yaml.pyx":1337
+ * raise TypeError("tag prefix must be a string")
+ * else:
+ * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<<
+ * tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ * tag_directives_end = tag_directives_end+1
+ */
+ __pyx_tuple__59 = PyTuple_Pack(1, __pyx_kp_u_tag_prefix_must_be_a_string); if (unlikely(!__pyx_tuple__59)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1337; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__59);
+ __Pyx_GIVEREF(__pyx_tuple__59);
+
+ /* "_yaml.pyx":1394
+ * if not PyString_CheckExact(anchor_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"anchor must be a string")
+ */
+ __pyx_tuple__60 = PyTuple_Pack(1, __pyx_kp_s_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__60)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1394; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__60);
+ __Pyx_GIVEREF(__pyx_tuple__60);
+
+ /* "_yaml.pyx":1396
+ * raise TypeError("anchor must be a string")
+ * else:
+ * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<<
+ * anchor = PyString_AS_STRING(anchor_object)
+ * if node in self.serialized_nodes:
+ */
+ __pyx_tuple__61 = PyTuple_Pack(1, __pyx_kp_u_anchor_must_be_a_string); if (unlikely(!__pyx_tuple__61)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1396; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__61);
+ __Pyx_GIVEREF(__pyx_tuple__61);
+
+ /* "_yaml.pyx":1412
+ * quoted_implicit = 0
+ * tag_object = node.tag
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: # <<<<<<<<<<<<<<
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ */
+ __pyx_tuple__62 = PyTuple_Pack(2, Py_True, Py_False); if (unlikely(!__pyx_tuple__62)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__62);
+ __Pyx_GIVEREF(__pyx_tuple__62);
+
+ /* "_yaml.pyx":1414
+ * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ * plain_implicit = 1
+ * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: # <<<<<<<<<<<<<<
+ * quoted_implicit = 1
+ * tag = NULL
+ */
+ __pyx_tuple__63 = PyTuple_Pack(2, Py_False, Py_True); if (unlikely(!__pyx_tuple__63)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__63);
+ __Pyx_GIVEREF(__pyx_tuple__63);
+
+ /* "_yaml.pyx":1422
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_tuple__64 = PyTuple_Pack(1, __pyx_kp_s_tag_must_be_a_string); if (unlikely(!__pyx_tuple__64)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1422; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__64);
+ __Pyx_GIVEREF(__pyx_tuple__64);
+
+ /* "_yaml.pyx":1424
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * value_object = node.value
+ */
+ __pyx_tuple__65 = PyTuple_Pack(1, __pyx_kp_u_tag_must_be_a_string); if (unlikely(!__pyx_tuple__65)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1424; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__65);
+ __Pyx_GIVEREF(__pyx_tuple__65);
+
+ /* "_yaml.pyx":1431
+ * if not PyString_CheckExact(value_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("value must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"value must be a string")
+ */
+ __pyx_tuple__66 = PyTuple_Pack(1, __pyx_kp_s_value_must_be_a_string); if (unlikely(!__pyx_tuple__66)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1431; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__66);
+ __Pyx_GIVEREF(__pyx_tuple__66);
+
+ /* "_yaml.pyx":1433
+ * raise TypeError("value must be a string")
+ * else:
+ * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<<
+ * value = PyString_AS_STRING(value_object)
+ * length = PyString_GET_SIZE(value_object)
+ */
+ __pyx_tuple__67 = PyTuple_Pack(1, __pyx_kp_u_value_must_be_a_string); if (unlikely(!__pyx_tuple__67)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1433; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__67);
+ __Pyx_GIVEREF(__pyx_tuple__67);
+
+ /* "_yaml.pyx":1463
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_tuple__68 = PyTuple_Pack(1, __pyx_kp_s_tag_must_be_a_string); if (unlikely(!__pyx_tuple__68)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1463; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__68);
+ __Pyx_GIVEREF(__pyx_tuple__68);
+
+ /* "_yaml.pyx":1465
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ */
+ __pyx_tuple__69 = PyTuple_Pack(1, __pyx_kp_u_tag_must_be_a_string); if (unlikely(!__pyx_tuple__69)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1465; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__69);
+ __Pyx_GIVEREF(__pyx_tuple__69);
+
+ /* "_yaml.pyx":1495
+ * if not PyString_CheckExact(tag_object):
+ * if PY_MAJOR_VERSION < 3:
+ * raise TypeError("tag must be a string") # <<<<<<<<<<<<<<
+ * else:
+ * raise TypeError(u"tag must be a string")
+ */
+ __pyx_tuple__70 = PyTuple_Pack(1, __pyx_kp_s_tag_must_be_a_string); if (unlikely(!__pyx_tuple__70)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1495; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__70);
+ __Pyx_GIVEREF(__pyx_tuple__70);
+
+ /* "_yaml.pyx":1497
+ * raise TypeError("tag must be a string")
+ * else:
+ * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<<
+ * tag = PyString_AS_STRING(tag_object)
+ * mapping_style = YAML_BLOCK_MAPPING_STYLE
+ */
+ __pyx_tuple__71 = PyTuple_Pack(1, __pyx_kp_u_tag_must_be_a_string); if (unlikely(!__pyx_tuple__71)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1497; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__71);
+ __Pyx_GIVEREF(__pyx_tuple__71);
+
+ /* "_yaml.pyx":4
+ * import yaml
+ *
+ * def get_version_string(): # <<<<<<<<<<<<<<
+ * cdef char *value
+ * value = yaml_get_version_string()
+ */
+ __pyx_tuple__72 = PyTuple_Pack(1, __pyx_n_s_value); if (unlikely(!__pyx_tuple__72)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__72);
+ __Pyx_GIVEREF(__pyx_tuple__72);
+ __pyx_codeobj__73 = (PyObject*)__Pyx_PyCode_New(0, 0, 1, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__72, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_root_src_pyyaml_ext__yaml_pyx, __pyx_n_s_get_version_string, 4, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__73)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+ /* "_yaml.pyx":12
+ * return PyUnicode_FromString(value)
+ *
+ * def get_version(): # <<<<<<<<<<<<<<
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ */
+ __pyx_tuple__74 = PyTuple_Pack(3, __pyx_n_s_major, __pyx_n_s_minor, __pyx_n_s_patch); if (unlikely(!__pyx_tuple__74)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 12; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_tuple__74);
+ __Pyx_GIVEREF(__pyx_tuple__74);
+ __pyx_codeobj__75 = (PyObject*)__Pyx_PyCode_New(0, 0, 3, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__74, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_root_src_pyyaml_ext__yaml_pyx, __pyx_n_s_get_version, 12, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__75)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 12; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_RefNannyFinishContext();
+ return 0;
+ __pyx_L1_error:;
+ __Pyx_RefNannyFinishContext();
+ return -1;
+}
+
+static int __Pyx_InitGlobals(void) {
+ if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ return 0;
+ __pyx_L1_error:;
+ return -1;
+}
+
+#if PY_MAJOR_VERSION < 3
+PyMODINIT_FUNC init_yaml(void); /*proto*/
+PyMODINIT_FUNC init_yaml(void)
+#else
+PyMODINIT_FUNC PyInit__yaml(void); /*proto*/
+PyMODINIT_FUNC PyInit__yaml(void)
+#endif
+{
+ PyObject *__pyx_t_1 = NULL;
+ PyObject *__pyx_t_2 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannyDeclarations
+ #if CYTHON_REFNANNY
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+ if (!__Pyx_RefNanny) {
+ PyErr_Clear();
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+ if (!__Pyx_RefNanny)
+ Py_FatalError("failed to import 'refnanny' module");
+ }
+ #endif
+ __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit__yaml(void)", 0);
+ if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #ifdef __Pyx_CyFunction_USED
+ if (__Pyx_CyFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ #ifdef __Pyx_FusedFunction_USED
+ if (__pyx_FusedFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ #ifdef __Pyx_Generator_USED
+ if (__pyx_Generator_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ /*--- Library function declarations ---*/
+ /*--- Threads initialization code ---*/
+ #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
+ #ifdef WITH_THREAD /* Python build with threading support? */
+ PyEval_InitThreads();
+ #endif
+ #endif
+ /*--- Module creation code ---*/
+ #if PY_MAJOR_VERSION < 3
+ __pyx_m = Py_InitModule4(__Pyx_NAMESTR("_yaml"), __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
+ #else
+ __pyx_m = PyModule_Create(&__pyx_moduledef);
+ #endif
+ if (unlikely(!__pyx_m)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ Py_INCREF(__pyx_d);
+ __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME)); if (unlikely(!__pyx_b)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #if CYTHON_COMPILING_IN_PYPY
+ Py_INCREF(__pyx_b);
+ #endif
+ if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ /*--- Initialize various global constants etc. ---*/
+ if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
+ if (__Pyx_init_sys_getdefaultencoding_params() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ #endif
+ if (__pyx_module_is_main__yaml) {
+ if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+ }
+ #if PY_MAJOR_VERSION >= 3
+ {
+ PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (!PyDict_GetItemString(modules, "_yaml")) {
+ if (unlikely(PyDict_SetItemString(modules, "_yaml", __pyx_m) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ }
+ }
+ #endif
+ /*--- Builtin init code ---*/
+ if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ /*--- Constants init code ---*/
+ if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ /*--- Global init code ---*/
+ /*--- Variable export code ---*/
+ /*--- Function export code ---*/
+ /*--- Type init code ---*/
+ if (PyType_Ready(&__pyx_type_5_yaml_Mark) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_type_5_yaml_Mark.tp_print = 0;
+ if (__Pyx_SetAttrString(__pyx_m, "Mark", (PyObject *)&__pyx_type_5_yaml_Mark) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_ptype_5_yaml_Mark = &__pyx_type_5_yaml_Mark;
+ __pyx_vtabptr_5_yaml_CParser = &__pyx_vtable_5_yaml_CParser;
+ __pyx_vtable_5_yaml_CParser._parser_error = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parser_error;
+ __pyx_vtable_5_yaml_CParser._scan = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__scan;
+ __pyx_vtable_5_yaml_CParser._token_to_object = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, yaml_token_t *))__pyx_f_5_yaml_7CParser__token_to_object;
+ __pyx_vtable_5_yaml_CParser._parse = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parse;
+ __pyx_vtable_5_yaml_CParser._event_to_object = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, yaml_event_t *))__pyx_f_5_yaml_7CParser__event_to_object;
+ __pyx_vtable_5_yaml_CParser._compose_document = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__compose_document;
+ __pyx_vtable_5_yaml_CParser._compose_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *, PyObject *))__pyx_f_5_yaml_7CParser__compose_node;
+ __pyx_vtable_5_yaml_CParser._compose_scalar_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_scalar_node;
+ __pyx_vtable_5_yaml_CParser._compose_sequence_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_sequence_node;
+ __pyx_vtable_5_yaml_CParser._compose_mapping_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_mapping_node;
+ __pyx_vtable_5_yaml_CParser._parse_next_event = (int (*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parse_next_event;
+ if (PyType_Ready(&__pyx_type_5_yaml_CParser) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_type_5_yaml_CParser.tp_print = 0;
+ if (__Pyx_SetVtable(__pyx_type_5_yaml_CParser.tp_dict, __pyx_vtabptr_5_yaml_CParser) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__Pyx_SetAttrString(__pyx_m, "CParser", (PyObject *)&__pyx_type_5_yaml_CParser) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_ptype_5_yaml_CParser = &__pyx_type_5_yaml_CParser;
+ __pyx_vtabptr_5_yaml_CEmitter = &__pyx_vtable_5_yaml_CEmitter;
+ __pyx_vtable_5_yaml_CEmitter._emitter_error = (PyObject *(*)(struct __pyx_obj_5_yaml_CEmitter *))__pyx_f_5_yaml_8CEmitter__emitter_error;
+ __pyx_vtable_5_yaml_CEmitter._object_to_event = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, yaml_event_t *))__pyx_f_5_yaml_8CEmitter__object_to_event;
+ __pyx_vtable_5_yaml_CEmitter._anchor_node = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *))__pyx_f_5_yaml_8CEmitter__anchor_node;
+ __pyx_vtable_5_yaml_CEmitter._serialize_node = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, PyObject *, PyObject *))__pyx_f_5_yaml_8CEmitter__serialize_node;
+ if (PyType_Ready(&__pyx_type_5_yaml_CEmitter) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 935; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_type_5_yaml_CEmitter.tp_print = 0;
+ if (__Pyx_SetVtable(__pyx_type_5_yaml_CEmitter.tp_dict, __pyx_vtabptr_5_yaml_CEmitter) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 935; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ if (__Pyx_SetAttrString(__pyx_m, "CEmitter", (PyObject *)&__pyx_type_5_yaml_CEmitter) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 935; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __pyx_ptype_5_yaml_CEmitter = &__pyx_type_5_yaml_CEmitter;
+ /*--- Type import code ---*/
+ /*--- Variable import code ---*/
+ /*--- Function import code ---*/
+ /*--- Execution code ---*/
+
+ /* "_yaml.pyx":2
+ *
+ * import yaml # <<<<<<<<<<<<<<
+ *
+ * def get_version_string():
+ */
+ __pyx_t_1 = __Pyx_Import(__pyx_n_s_yaml, 0, -1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_yaml, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":4
+ * import yaml
+ *
+ * def get_version_string(): # <<<<<<<<<<<<<<
+ * cdef char *value
+ * value = yaml_get_version_string()
+ */
+ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_5_yaml_1get_version_string, NULL, __pyx_n_s_yaml_2); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_version_string, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":12
+ * return PyUnicode_FromString(value)
+ *
+ * def get_version(): # <<<<<<<<<<<<<<
+ * cdef int major, minor, patch
+ * yaml_get_version(&major, &minor, &patch)
+ */
+ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_5_yaml_3get_version, NULL, __pyx_n_s_yaml_2); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 12; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_version, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 12; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":18
+ *
+ * #Mark = yaml.error.Mark
+ * YAMLError = yaml.error.YAMLError # <<<<<<<<<<<<<<
+ * ReaderError = yaml.reader.ReaderError
+ * ScannerError = yaml.scanner.ScannerError
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_error); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_YAMLError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_YAMLError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":19
+ * #Mark = yaml.error.Mark
+ * YAMLError = yaml.error.YAMLError
+ * ReaderError = yaml.reader.ReaderError # <<<<<<<<<<<<<<
+ * ScannerError = yaml.scanner.ScannerError
+ * ParserError = yaml.parser.ParserError
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_reader); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ReaderError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ReaderError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":20
+ * YAMLError = yaml.error.YAMLError
+ * ReaderError = yaml.reader.ReaderError
+ * ScannerError = yaml.scanner.ScannerError # <<<<<<<<<<<<<<
+ * ParserError = yaml.parser.ParserError
+ * ComposerError = yaml.composer.ComposerError
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_scanner); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ScannerError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ScannerError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":21
+ * ReaderError = yaml.reader.ReaderError
+ * ScannerError = yaml.scanner.ScannerError
+ * ParserError = yaml.parser.ParserError # <<<<<<<<<<<<<<
+ * ComposerError = yaml.composer.ComposerError
+ * ConstructorError = yaml.constructor.ConstructorError
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_parser); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ParserError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ParserError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":22
+ * ScannerError = yaml.scanner.ScannerError
+ * ParserError = yaml.parser.ParserError
+ * ComposerError = yaml.composer.ComposerError # <<<<<<<<<<<<<<
+ * ConstructorError = yaml.constructor.ConstructorError
+ * EmitterError = yaml.emitter.EmitterError
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_composer); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ComposerError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ComposerError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":23
+ * ParserError = yaml.parser.ParserError
+ * ComposerError = yaml.composer.ComposerError
+ * ConstructorError = yaml.constructor.ConstructorError # <<<<<<<<<<<<<<
+ * EmitterError = yaml.emitter.EmitterError
+ * SerializerError = yaml.serializer.SerializerError
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_constructor); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ConstructorError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ConstructorError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":24
+ * ComposerError = yaml.composer.ComposerError
+ * ConstructorError = yaml.constructor.ConstructorError
+ * EmitterError = yaml.emitter.EmitterError # <<<<<<<<<<<<<<
+ * SerializerError = yaml.serializer.SerializerError
+ * RepresenterError = yaml.representer.RepresenterError
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_emitter); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_EmitterError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_EmitterError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":25
+ * ConstructorError = yaml.constructor.ConstructorError
+ * EmitterError = yaml.emitter.EmitterError
+ * SerializerError = yaml.serializer.SerializerError # <<<<<<<<<<<<<<
+ * RepresenterError = yaml.representer.RepresenterError
+ *
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_serializer); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_SerializerError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_SerializerError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":26
+ * EmitterError = yaml.emitter.EmitterError
+ * SerializerError = yaml.serializer.SerializerError
+ * RepresenterError = yaml.representer.RepresenterError # <<<<<<<<<<<<<<
+ *
+ * StreamStartToken = yaml.tokens.StreamStartToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_representer); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_RepresenterError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_RepresenterError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":28
+ * RepresenterError = yaml.representer.RepresenterError
+ *
+ * StreamStartToken = yaml.tokens.StreamStartToken # <<<<<<<<<<<<<<
+ * StreamEndToken = yaml.tokens.StreamEndToken
+ * DirectiveToken = yaml.tokens.DirectiveToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_StreamStartToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamStartToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":29
+ *
+ * StreamStartToken = yaml.tokens.StreamStartToken
+ * StreamEndToken = yaml.tokens.StreamEndToken # <<<<<<<<<<<<<<
+ * DirectiveToken = yaml.tokens.DirectiveToken
+ * DocumentStartToken = yaml.tokens.DocumentStartToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_StreamEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":30
+ * StreamStartToken = yaml.tokens.StreamStartToken
+ * StreamEndToken = yaml.tokens.StreamEndToken
+ * DirectiveToken = yaml.tokens.DirectiveToken # <<<<<<<<<<<<<<
+ * DocumentStartToken = yaml.tokens.DocumentStartToken
+ * DocumentEndToken = yaml.tokens.DocumentEndToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DirectiveToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DirectiveToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":31
+ * StreamEndToken = yaml.tokens.StreamEndToken
+ * DirectiveToken = yaml.tokens.DirectiveToken
+ * DocumentStartToken = yaml.tokens.DocumentStartToken # <<<<<<<<<<<<<<
+ * DocumentEndToken = yaml.tokens.DocumentEndToken
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DocumentStartToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DocumentStartToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":32
+ * DirectiveToken = yaml.tokens.DirectiveToken
+ * DocumentStartToken = yaml.tokens.DocumentStartToken
+ * DocumentEndToken = yaml.tokens.DocumentEndToken # <<<<<<<<<<<<<<
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DocumentEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DocumentEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":33
+ * DocumentStartToken = yaml.tokens.DocumentStartToken
+ * DocumentEndToken = yaml.tokens.DocumentEndToken
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken # <<<<<<<<<<<<<<
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+ * BlockEndToken = yaml.tokens.BlockEndToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_BlockSequenceStartToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_BlockSequenceStartToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":34
+ * DocumentEndToken = yaml.tokens.DocumentEndToken
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken # <<<<<<<<<<<<<<
+ * BlockEndToken = yaml.tokens.BlockEndToken
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_BlockMappingStartToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_BlockMappingStartToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":35
+ * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+ * BlockEndToken = yaml.tokens.BlockEndToken # <<<<<<<<<<<<<<
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 35; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 35; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_BlockEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 35; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_BlockEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 35; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":36
+ * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+ * BlockEndToken = yaml.tokens.BlockEndToken
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken # <<<<<<<<<<<<<<
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 36; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 36; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowSequenceStartToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 36; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowSequenceStartToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 36; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":37
+ * BlockEndToken = yaml.tokens.BlockEndToken
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken # <<<<<<<<<<<<<<
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowMappingStartToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowMappingStartToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":38
+ * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken # <<<<<<<<<<<<<<
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+ * KeyToken = yaml.tokens.KeyToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowSequenceEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowSequenceEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":39
+ * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken # <<<<<<<<<<<<<<
+ * KeyToken = yaml.tokens.KeyToken
+ * ValueToken = yaml.tokens.ValueToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowMappingEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowMappingEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":40
+ * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+ * KeyToken = yaml.tokens.KeyToken # <<<<<<<<<<<<<<
+ * ValueToken = yaml.tokens.ValueToken
+ * BlockEntryToken = yaml.tokens.BlockEntryToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 40; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 40; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_KeyToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 40; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_KeyToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 40; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":41
+ * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+ * KeyToken = yaml.tokens.KeyToken
+ * ValueToken = yaml.tokens.ValueToken # <<<<<<<<<<<<<<
+ * BlockEntryToken = yaml.tokens.BlockEntryToken
+ * FlowEntryToken = yaml.tokens.FlowEntryToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ValueToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ValueToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":42
+ * KeyToken = yaml.tokens.KeyToken
+ * ValueToken = yaml.tokens.ValueToken
+ * BlockEntryToken = yaml.tokens.BlockEntryToken # <<<<<<<<<<<<<<
+ * FlowEntryToken = yaml.tokens.FlowEntryToken
+ * AliasToken = yaml.tokens.AliasToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_BlockEntryToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_BlockEntryToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":43
+ * ValueToken = yaml.tokens.ValueToken
+ * BlockEntryToken = yaml.tokens.BlockEntryToken
+ * FlowEntryToken = yaml.tokens.FlowEntryToken # <<<<<<<<<<<<<<
+ * AliasToken = yaml.tokens.AliasToken
+ * AnchorToken = yaml.tokens.AnchorToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 43; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 43; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_FlowEntryToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 43; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_FlowEntryToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 43; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":44
+ * BlockEntryToken = yaml.tokens.BlockEntryToken
+ * FlowEntryToken = yaml.tokens.FlowEntryToken
+ * AliasToken = yaml.tokens.AliasToken # <<<<<<<<<<<<<<
+ * AnchorToken = yaml.tokens.AnchorToken
+ * TagToken = yaml.tokens.TagToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 44; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 44; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_AliasToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 44; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_AliasToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 44; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":45
+ * FlowEntryToken = yaml.tokens.FlowEntryToken
+ * AliasToken = yaml.tokens.AliasToken
+ * AnchorToken = yaml.tokens.AnchorToken # <<<<<<<<<<<<<<
+ * TagToken = yaml.tokens.TagToken
+ * ScalarToken = yaml.tokens.ScalarToken
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_AnchorToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_AnchorToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":46
+ * AliasToken = yaml.tokens.AliasToken
+ * AnchorToken = yaml.tokens.AnchorToken
+ * TagToken = yaml.tokens.TagToken # <<<<<<<<<<<<<<
+ * ScalarToken = yaml.tokens.ScalarToken
+ *
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 46; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 46; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_TagToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 46; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_TagToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 46; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":47
+ * AnchorToken = yaml.tokens.AnchorToken
+ * TagToken = yaml.tokens.TagToken
+ * ScalarToken = yaml.tokens.ScalarToken # <<<<<<<<<<<<<<
+ *
+ * StreamStartEvent = yaml.events.StreamStartEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 47; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 47; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ScalarToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 47; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ScalarToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 47; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":49
+ * ScalarToken = yaml.tokens.ScalarToken
+ *
+ * StreamStartEvent = yaml.events.StreamStartEvent # <<<<<<<<<<<<<<
+ * StreamEndEvent = yaml.events.StreamEndEvent
+ * DocumentStartEvent = yaml.events.DocumentStartEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 49; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 49; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_StreamStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 49; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamStartEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 49; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":50
+ *
+ * StreamStartEvent = yaml.events.StreamStartEvent
+ * StreamEndEvent = yaml.events.StreamEndEvent # <<<<<<<<<<<<<<
+ * DocumentStartEvent = yaml.events.DocumentStartEvent
+ * DocumentEndEvent = yaml.events.DocumentEndEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_StreamEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamEndEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":51
+ * StreamStartEvent = yaml.events.StreamStartEvent
+ * StreamEndEvent = yaml.events.StreamEndEvent
+ * DocumentStartEvent = yaml.events.DocumentStartEvent # <<<<<<<<<<<<<<
+ * DocumentEndEvent = yaml.events.DocumentEndEvent
+ * AliasEvent = yaml.events.AliasEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 51; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 51; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DocumentStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 51; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DocumentStartEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 51; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":52
+ * StreamEndEvent = yaml.events.StreamEndEvent
+ * DocumentStartEvent = yaml.events.DocumentStartEvent
+ * DocumentEndEvent = yaml.events.DocumentEndEvent # <<<<<<<<<<<<<<
+ * AliasEvent = yaml.events.AliasEvent
+ * ScalarEvent = yaml.events.ScalarEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_DocumentEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_DocumentEndEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":53
+ * DocumentStartEvent = yaml.events.DocumentStartEvent
+ * DocumentEndEvent = yaml.events.DocumentEndEvent
+ * AliasEvent = yaml.events.AliasEvent # <<<<<<<<<<<<<<
+ * ScalarEvent = yaml.events.ScalarEvent
+ * SequenceStartEvent = yaml.events.SequenceStartEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_AliasEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_AliasEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":54
+ * DocumentEndEvent = yaml.events.DocumentEndEvent
+ * AliasEvent = yaml.events.AliasEvent
+ * ScalarEvent = yaml.events.ScalarEvent # <<<<<<<<<<<<<<
+ * SequenceStartEvent = yaml.events.SequenceStartEvent
+ * SequenceEndEvent = yaml.events.SequenceEndEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ScalarEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ScalarEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":55
+ * AliasEvent = yaml.events.AliasEvent
+ * ScalarEvent = yaml.events.ScalarEvent
+ * SequenceStartEvent = yaml.events.SequenceStartEvent # <<<<<<<<<<<<<<
+ * SequenceEndEvent = yaml.events.SequenceEndEvent
+ * MappingStartEvent = yaml.events.MappingStartEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_SequenceStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_SequenceStartEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":56
+ * ScalarEvent = yaml.events.ScalarEvent
+ * SequenceStartEvent = yaml.events.SequenceStartEvent
+ * SequenceEndEvent = yaml.events.SequenceEndEvent # <<<<<<<<<<<<<<
+ * MappingStartEvent = yaml.events.MappingStartEvent
+ * MappingEndEvent = yaml.events.MappingEndEvent
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_SequenceEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_SequenceEndEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":57
+ * SequenceStartEvent = yaml.events.SequenceStartEvent
+ * SequenceEndEvent = yaml.events.SequenceEndEvent
+ * MappingStartEvent = yaml.events.MappingStartEvent # <<<<<<<<<<<<<<
+ * MappingEndEvent = yaml.events.MappingEndEvent
+ *
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 57; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 57; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_MappingStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 57; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_MappingStartEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 57; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":58
+ * SequenceEndEvent = yaml.events.SequenceEndEvent
+ * MappingStartEvent = yaml.events.MappingStartEvent
+ * MappingEndEvent = yaml.events.MappingEndEvent # <<<<<<<<<<<<<<
+ *
+ * ScalarNode = yaml.nodes.ScalarNode
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_MappingEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_MappingEndEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":60
+ * MappingEndEvent = yaml.events.MappingEndEvent
+ *
+ * ScalarNode = yaml.nodes.ScalarNode # <<<<<<<<<<<<<<
+ * SequenceNode = yaml.nodes.SequenceNode
+ * MappingNode = yaml.nodes.MappingNode
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_nodes); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_ScalarNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_ScalarNode, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":61
+ *
+ * ScalarNode = yaml.nodes.ScalarNode
+ * SequenceNode = yaml.nodes.SequenceNode # <<<<<<<<<<<<<<
+ * MappingNode = yaml.nodes.MappingNode
+ *
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 61; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_nodes); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 61; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_SequenceNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 61; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_SequenceNode, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 61; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":62
+ * ScalarNode = yaml.nodes.ScalarNode
+ * SequenceNode = yaml.nodes.SequenceNode
+ * MappingNode = yaml.nodes.MappingNode # <<<<<<<<<<<<<<
+ *
+ * cdef class Mark:
+ */
+ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_nodes); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_MappingNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_MappingNode, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "_yaml.pyx":2
+ *
+ * import yaml # <<<<<<<<<<<<<<
+ *
+ * def get_version_string():
+ */
+ __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_2);
+ if (__pyx_m) {
+ __Pyx_AddTraceback("init _yaml", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ Py_DECREF(__pyx_m); __pyx_m = 0;
+ } else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_ImportError, "init _yaml");
+ }
+ __pyx_L0:;
+ __Pyx_RefNannyFinishContext();
+ #if PY_MAJOR_VERSION < 3
+ return;
+ #else
+ return __pyx_m;
+ #endif
+}
+
+/* Runtime support code */
+#if CYTHON_REFNANNY
+static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
+ PyObject *m = NULL, *p = NULL;
+ void *r = NULL;
+ m = PyImport_ImportModule((char *)modname);
+ if (!m) goto end;
+ p = PyObject_GetAttrString(m, (char *)"RefNannyAPI");
+ if (!p) goto end;
+ r = PyLong_AsVoidPtr(p);
+end:
+ Py_XDECREF(p);
+ Py_XDECREF(m);
+ return (__Pyx_RefNannyAPIStruct *)r;
+}
+#endif /* CYTHON_REFNANNY */
+
+static PyObject *__Pyx_GetBuiltinName(PyObject *name) {
+ PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name);
+ if (unlikely(!result)) {
+ PyErr_Format(PyExc_NameError,
+#if PY_MAJOR_VERSION >= 3
+ "name '%U' is not defined", name);
+#else
+ "name '%.200s' is not defined", PyString_AS_STRING(name));
+#endif
+ }
+ return result;
+}
+
+static void __Pyx_RaiseArgtupleInvalid(
+ const char* func_name,
+ int exact,
+ Py_ssize_t num_min,
+ Py_ssize_t num_max,
+ Py_ssize_t num_found)
+{
+ Py_ssize_t num_expected;
+ const char *more_or_less;
+ if (num_found < num_min) {
+ num_expected = num_min;
+ more_or_less = "at least";
+ } else {
+ num_expected = num_max;
+ more_or_less = "at most";
+ }
+ if (exact) {
+ more_or_less = "exactly";
+ }
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
+ func_name, more_or_less, num_expected,
+ (num_expected == 1) ? "" : "s", num_found);
+}
+
+static void __Pyx_RaiseDoubleKeywordsError(
+ const char* func_name,
+ PyObject* kw_name)
+{
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION >= 3
+ "%s() got multiple values for keyword argument '%U'", func_name, kw_name);
+ #else
+ "%s() got multiple values for keyword argument '%s'", func_name,
+ PyString_AsString(kw_name));
+ #endif
+}
+
+static int __Pyx_ParseOptionalKeywords(
+ PyObject *kwds,
+ PyObject **argnames[],
+ PyObject *kwds2,
+ PyObject *values[],
+ Py_ssize_t num_pos_args,
+ const char* function_name)
+{
+ PyObject *key = 0, *value = 0;
+ Py_ssize_t pos = 0;
+ PyObject*** name;
+ PyObject*** first_kw_arg = argnames + num_pos_args;
+ while (PyDict_Next(kwds, &pos, &key, &value)) {
+ name = first_kw_arg;
+ while (*name && (**name != key)) name++;
+ if (*name) {
+ values[name-argnames] = value;
+ continue;
+ }
+ name = first_kw_arg;
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
+ while (*name) {
+ if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**name, key)) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ if ((**argname == key) || (
+ (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**argname, key))) {
+ goto arg_passed_twice;
+ }
+ argname++;
+ }
+ }
+ } else
+ #endif
+ if (likely(PyUnicode_Check(key))) {
+ while (*name) {
+ int cmp = (**name == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**name, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ int cmp = (**argname == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**argname, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) goto arg_passed_twice;
+ argname++;
+ }
+ }
+ } else
+ goto invalid_keyword_type;
+ if (kwds2) {
+ if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
+ } else {
+ goto invalid_keyword;
+ }
+ }
+ return 0;
+arg_passed_twice:
+ __Pyx_RaiseDoubleKeywordsError(function_name, key);
+ goto bad;
+invalid_keyword_type:
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() keywords must be strings", function_name);
+ goto bad;
+invalid_keyword:
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION < 3
+ "%.200s() got an unexpected keyword argument '%.200s'",
+ function_name, PyString_AsString(key));
+ #else
+ "%s() got an unexpected keyword argument '%U'",
+ function_name, key);
+ #endif
+bad:
+ return -1;
+}
+
+static CYTHON_INLINE void __Pyx_ExceptionSave(PyObject **type, PyObject **value, PyObject **tb) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyThreadState *tstate = PyThreadState_GET();
+ *type = tstate->exc_type;
+ *value = tstate->exc_value;
+ *tb = tstate->exc_traceback;
+ Py_XINCREF(*type);
+ Py_XINCREF(*value);
+ Py_XINCREF(*tb);
+#else
+ PyErr_GetExcInfo(type, value, tb);
+#endif
+}
+static void __Pyx_ExceptionReset(PyObject *type, PyObject *value, PyObject *tb) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ PyThreadState *tstate = PyThreadState_GET();
+ tmp_type = tstate->exc_type;
+ tmp_value = tstate->exc_value;
+ tmp_tb = tstate->exc_traceback;
+ tstate->exc_type = type;
+ tstate->exc_value = value;
+ tstate->exc_traceback = tb;
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+#else
+ PyErr_SetExcInfo(type, value, tb);
+#endif
+}
+
+static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) {
+ PyObject *local_type, *local_value, *local_tb;
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ PyThreadState *tstate = PyThreadState_GET();
+ local_type = tstate->curexc_type;
+ local_value = tstate->curexc_value;
+ local_tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+#else
+ PyErr_Fetch(&local_type, &local_value, &local_tb);
+#endif
+ PyErr_NormalizeException(&local_type, &local_value, &local_tb);
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(tstate->curexc_type))
+#else
+ if (unlikely(PyErr_Occurred()))
+#endif
+ goto bad;
+ #if PY_MAJOR_VERSION >= 3
+ if (local_tb) {
+ if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0))
+ goto bad;
+ }
+ #endif
+ Py_XINCREF(local_tb);
+ Py_XINCREF(local_type);
+ Py_XINCREF(local_value);
+ *type = local_type;
+ *value = local_value;
+ *tb = local_tb;
+#if CYTHON_COMPILING_IN_CPYTHON
+ tmp_type = tstate->exc_type;
+ tmp_value = tstate->exc_value;
+ tmp_tb = tstate->exc_traceback;
+ tstate->exc_type = local_type;
+ tstate->exc_value = local_value;
+ tstate->exc_traceback = local_tb;
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+#else
+ PyErr_SetExcInfo(local_type, local_value, local_tb);
+#endif
+ return 0;
+bad:
+ *type = 0;
+ *value = 0;
+ *tb = 0;
+ Py_XDECREF(local_type);
+ Py_XDECREF(local_value);
+ Py_XDECREF(local_tb);
+ return -1;
+}
+
+#if CYTHON_COMPILING_IN_CPYTHON
+static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
+ PyObject *result;
+ ternaryfunc call = func->ob_type->tp_call;
+ if (unlikely(!call))
+ return PyObject_Call(func, arg, kw);
+#if PY_VERSION_HEX >= 0x02060000
+ if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
+ return NULL;
+#endif
+ result = (*call)(func, arg, kw);
+#if PY_VERSION_HEX >= 0x02060000
+ Py_LeaveRecursiveCall();
+#endif
+ if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
+ PyErr_SetString(
+ PyExc_SystemError,
+ "NULL result without error in PyObject_Call");
+ }
+ return result;
+}
+#endif
+
+static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ PyThreadState *tstate = PyThreadState_GET();
+ tmp_type = tstate->curexc_type;
+ tmp_value = tstate->curexc_value;
+ tmp_tb = tstate->curexc_traceback;
+ tstate->curexc_type = type;
+ tstate->curexc_value = value;
+ tstate->curexc_traceback = tb;
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+#else
+ PyErr_Restore(type, value, tb);
+#endif
+}
+static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyThreadState *tstate = PyThreadState_GET();
+ *type = tstate->curexc_type;
+ *value = tstate->curexc_value;
+ *tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+#else
+ PyErr_Fetch(type, value, tb);
+#endif
+}
+
+#if PY_MAJOR_VERSION < 3
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb,
+ CYTHON_UNUSED PyObject *cause) {
+ Py_XINCREF(type);
+ if (!value || value == Py_None)
+ value = NULL;
+ else
+ Py_INCREF(value);
+ if (!tb || tb == Py_None)
+ tb = NULL;
+ else {
+ Py_INCREF(tb);
+ if (!PyTraceBack_Check(tb)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: arg 3 must be a traceback or None");
+ goto raise_error;
+ }
+ }
+ #if PY_VERSION_HEX < 0x02050000
+ if (PyClass_Check(type)) {
+ #else
+ if (PyType_Check(type)) {
+ #endif
+#if CYTHON_COMPILING_IN_PYPY
+ if (!value) {
+ Py_INCREF(Py_None);
+ value = Py_None;
+ }
+#endif
+ PyErr_NormalizeException(&type, &value, &tb);
+ } else {
+ if (value) {
+ PyErr_SetString(PyExc_TypeError,
+ "instance exception may not have a separate value");
+ goto raise_error;
+ }
+ value = type;
+ #if PY_VERSION_HEX < 0x02050000
+ if (PyInstance_Check(type)) {
+ type = (PyObject*) ((PyInstanceObject*)type)->in_class;
+ Py_INCREF(type);
+ } else {
+ type = 0;
+ PyErr_SetString(PyExc_TypeError,
+ "raise: exception must be an old-style class or instance");
+ goto raise_error;
+ }
+ #else
+ type = (PyObject*) Py_TYPE(type);
+ Py_INCREF(type);
+ if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: exception class must be a subclass of BaseException");
+ goto raise_error;
+ }
+ #endif
+ }
+ __Pyx_ErrRestore(type, value, tb);
+ return;
+raise_error:
+ Py_XDECREF(value);
+ Py_XDECREF(type);
+ Py_XDECREF(tb);
+ return;
+}
+#else /* Python 3+ */
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
+ PyObject* owned_instance = NULL;
+ if (tb == Py_None) {
+ tb = 0;
+ } else if (tb && !PyTraceBack_Check(tb)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: arg 3 must be a traceback or None");
+ goto bad;
+ }
+ if (value == Py_None)
+ value = 0;
+ if (PyExceptionInstance_Check(type)) {
+ if (value) {
+ PyErr_SetString(PyExc_TypeError,
+ "instance exception may not have a separate value");
+ goto bad;
+ }
+ value = type;
+ type = (PyObject*) Py_TYPE(value);
+ } else if (PyExceptionClass_Check(type)) {
+ PyObject *instance_class = NULL;
+ if (value && PyExceptionInstance_Check(value)) {
+ instance_class = (PyObject*) Py_TYPE(value);
+ if (instance_class != type) {
+ if (PyObject_IsSubclass(instance_class, type)) {
+ type = instance_class;
+ } else {
+ instance_class = NULL;
+ }
+ }
+ }
+ if (!instance_class) {
+ PyObject *args;
+ if (!value)
+ args = PyTuple_New(0);
+ else if (PyTuple_Check(value)) {
+ Py_INCREF(value);
+ args = value;
+ } else
+ args = PyTuple_Pack(1, value);
+ if (!args)
+ goto bad;
+ owned_instance = PyObject_Call(type, args, NULL);
+ Py_DECREF(args);
+ if (!owned_instance)
+ goto bad;
+ value = owned_instance;
+ if (!PyExceptionInstance_Check(value)) {
+ PyErr_Format(PyExc_TypeError,
+ "calling %R should have returned an instance of "
+ "BaseException, not %R",
+ type, Py_TYPE(value));
+ goto bad;
+ }
+ }
+ } else {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: exception class must be a subclass of BaseException");
+ goto bad;
+ }
+#if PY_VERSION_HEX >= 0x03030000
+ if (cause) {
+#else
+ if (cause && cause != Py_None) {
+#endif
+ PyObject *fixed_cause;
+ if (cause == Py_None) {
+ fixed_cause = NULL;
+ } else if (PyExceptionClass_Check(cause)) {
+ fixed_cause = PyObject_CallObject(cause, NULL);
+ if (fixed_cause == NULL)
+ goto bad;
+ } else if (PyExceptionInstance_Check(cause)) {
+ fixed_cause = cause;
+ Py_INCREF(fixed_cause);
+ } else {
+ PyErr_SetString(PyExc_TypeError,
+ "exception causes must derive from "
+ "BaseException");
+ goto bad;
+ }
+ PyException_SetCause(value, fixed_cause);
+ }
+ PyErr_SetObject(type, value);
+ if (tb) {
+ PyThreadState *tstate = PyThreadState_GET();
+ PyObject* tmp_tb = tstate->curexc_traceback;
+ if (tb != tmp_tb) {
+ Py_INCREF(tb);
+ tstate->curexc_traceback = tb;
+ Py_XDECREF(tmp_tb);
+ }
+ }
+bad:
+ Py_XDECREF(owned_instance);
+ return;
+}
+#endif
+
+static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) {
+ PyObject *result;
+#if CYTHON_COMPILING_IN_CPYTHON
+ result = PyDict_GetItem(__pyx_d, name);
+ if (result) {
+ Py_INCREF(result);
+ } else {
+#else
+ result = PyObject_GetItem(__pyx_d, name);
+ if (!result) {
+ PyErr_Clear();
+#endif
+ result = __Pyx_GetBuiltinName(name);
+ }
+ return result;
+}
+
+static CYTHON_INLINE int __Pyx_CheckKeywordStrings(
+ PyObject *kwdict,
+ const char* function_name,
+ int kw_allowed)
+{
+ PyObject* key = 0;
+ Py_ssize_t pos = 0;
+#if CPYTHON_COMPILING_IN_PYPY
+ if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0))
+ goto invalid_keyword;
+ return 1;
+#else
+ while (PyDict_Next(kwdict, &pos, &key, 0)) {
+ #if PY_MAJOR_VERSION < 3
+ if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key)))
+ #endif
+ if (unlikely(!PyUnicode_Check(key)))
+ goto invalid_keyword_type;
+ }
+ if ((!kw_allowed) && unlikely(key))
+ goto invalid_keyword;
+ return 1;
+invalid_keyword_type:
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() keywords must be strings", function_name);
+ return 0;
+#endif
+invalid_keyword:
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION < 3
+ "%.200s() got an unexpected keyword argument '%.200s'",
+ function_name, PyString_AsString(key));
+ #else
+ "%s() got an unexpected keyword argument '%U'",
+ function_name, key);
+ #endif
+ return 0;
+}
+
+static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) {
+ PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname);
+}
+
+static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) {
+#if CYTHON_COMPILING_IN_CPYTHON
+#if PY_MAJOR_VERSION >= 3
+ if (likely(PyUnicode_Check(n)))
+#else
+ if (likely(PyString_Check(n)))
+#endif
+ return __Pyx_PyObject_GetAttrStr(o, n);
+#endif
+ return PyObject_GetAttr(o, n);
+}
+
+static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
+ PyObject *r = __Pyx_GetAttr(o, n);
+ if (unlikely(!r)) {
+ if (!PyErr_ExceptionMatches(PyExc_AttributeError))
+ goto bad;
+ PyErr_Clear();
+ r = d;
+ Py_INCREF(d);
+ }
+ return r;
+bad:
+ return NULL;
+}
+
+static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) {
+#if CYTHON_COMPILING_IN_PYPY
+ return PyObject_RichCompareBool(s1, s2, equals);
+#else
+ if (s1 == s2) {
+ return (equals == Py_EQ);
+ } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) {
+ const char *ps1, *ps2;
+ Py_ssize_t length = PyBytes_GET_SIZE(s1);
+ if (length != PyBytes_GET_SIZE(s2))
+ return (equals == Py_NE);
+ ps1 = PyBytes_AS_STRING(s1);
+ ps2 = PyBytes_AS_STRING(s2);
+ if (ps1[0] != ps2[0]) {
+ return (equals == Py_NE);
+ } else if (length == 1) {
+ return (equals == Py_EQ);
+ } else {
+ int result = memcmp(ps1, ps2, (size_t)length);
+ return (equals == Py_EQ) ? (result == 0) : (result != 0);
+ }
+ } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) {
+ return (equals == Py_NE);
+ } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) {
+ return (equals == Py_NE);
+ } else {
+ int result;
+ PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
+ if (!py_result)
+ return -1;
+ result = __Pyx_PyObject_IsTrue(py_result);
+ Py_DECREF(py_result);
+ return result;
+ }
+#endif
+}
+
+static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) {
+#if CYTHON_COMPILING_IN_PYPY
+ return PyObject_RichCompareBool(s1, s2, equals);
+#else
+#if PY_MAJOR_VERSION < 3
+ PyObject* owned_ref = NULL;
+#endif
+ int s1_is_unicode, s2_is_unicode;
+ if (s1 == s2) {
+ goto return_eq;
+ }
+ s1_is_unicode = PyUnicode_CheckExact(s1);
+ s2_is_unicode = PyUnicode_CheckExact(s2);
+#if PY_MAJOR_VERSION < 3
+ if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) {
+ owned_ref = PyUnicode_FromObject(s2);
+ if (unlikely(!owned_ref))
+ return -1;
+ s2 = owned_ref;
+ s2_is_unicode = 1;
+ } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) {
+ owned_ref = PyUnicode_FromObject(s1);
+ if (unlikely(!owned_ref))
+ return -1;
+ s1 = owned_ref;
+ s1_is_unicode = 1;
+ } else if (((!s2_is_unicode) & (!s1_is_unicode))) {
+ return __Pyx_PyBytes_Equals(s1, s2, equals);
+ }
+#endif
+ if (s1_is_unicode & s2_is_unicode) {
+ Py_ssize_t length;
+ int kind;
+ void *data1, *data2;
+ #if CYTHON_PEP393_ENABLED
+ if (unlikely(PyUnicode_READY(s1) < 0) || unlikely(PyUnicode_READY(s2) < 0))
+ return -1;
+ #endif
+ length = __Pyx_PyUnicode_GET_LENGTH(s1);
+ if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) {
+ goto return_ne;
+ }
+ kind = __Pyx_PyUnicode_KIND(s1);
+ if (kind != __Pyx_PyUnicode_KIND(s2)) {
+ goto return_ne;
+ }
+ data1 = __Pyx_PyUnicode_DATA(s1);
+ data2 = __Pyx_PyUnicode_DATA(s2);
+ if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) {
+ goto return_ne;
+ } else if (length == 1) {
+ goto return_eq;
+ } else {
+ int result = memcmp(data1, data2, length * kind);
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_EQ) ? (result == 0) : (result != 0);
+ }
+ } else if ((s1 == Py_None) & s2_is_unicode) {
+ goto return_ne;
+ } else if ((s2 == Py_None) & s1_is_unicode) {
+ goto return_ne;
+ } else {
+ int result;
+ PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
+ if (!py_result)
+ return -1;
+ result = __Pyx_PyObject_IsTrue(py_result);
+ Py_DECREF(py_result);
+ return result;
+ }
+return_eq:
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_EQ);
+return_ne:
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_NE);
+#endif
+}
+
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) {
+ PyObject *r;
+ if (!j) return NULL;
+ r = PyObject_GetItem(o, j);
+ Py_DECREF(j);
+ return r;
+}
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,
+ int wraparound, int boundscheck) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (wraparound & unlikely(i < 0)) i += PyList_GET_SIZE(o);
+ if ((!boundscheck) || likely((0 <= i) & (i < PyList_GET_SIZE(o)))) {
+ PyObject *r = PyList_GET_ITEM(o, i);
+ Py_INCREF(r);
+ return r;
+ }
+ return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+#else
+ return PySequence_GetItem(o, i);
+#endif
+}
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,
+ int wraparound, int boundscheck) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (wraparound & unlikely(i < 0)) i += PyTuple_GET_SIZE(o);
+ if ((!boundscheck) || likely((0 <= i) & (i < PyTuple_GET_SIZE(o)))) {
+ PyObject *r = PyTuple_GET_ITEM(o, i);
+ Py_INCREF(r);
+ return r;
+ }
+ return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+#else
+ return PySequence_GetItem(o, i);
+#endif
+}
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,
+ int is_list, int wraparound, int boundscheck) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (is_list || PyList_CheckExact(o)) {
+ Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o);
+ if ((!boundscheck) || (likely((n >= 0) & (n < PyList_GET_SIZE(o))))) {
+ PyObject *r = PyList_GET_ITEM(o, n);
+ Py_INCREF(r);
+ return r;
+ }
+ }
+ else if (PyTuple_CheckExact(o)) {
+ Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o);
+ if ((!boundscheck) || likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) {
+ PyObject *r = PyTuple_GET_ITEM(o, n);
+ Py_INCREF(r);
+ return r;
+ }
+ } else {
+ PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence;
+ if (likely(m && m->sq_item)) {
+ if (wraparound && unlikely(i < 0) && likely(m->sq_length)) {
+ Py_ssize_t l = m->sq_length(o);
+ if (likely(l >= 0)) {
+ i += l;
+ } else {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError))
+ PyErr_Clear();
+ else
+ return NULL;
+ }
+ }
+ return m->sq_item(o, i);
+ }
+ }
+#else
+ if (is_list || PySequence_Check(o)) {
+ return PySequence_GetItem(o, i);
+ }
+#endif
+ return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+}
+
+static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) {
+ PyErr_Format(PyExc_ValueError,
+ "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected);
+}
+
+static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) {
+ PyErr_Format(PyExc_ValueError,
+ "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack",
+ index, (index == 1) ? "" : "s");
+}
+
+static CYTHON_INLINE int __Pyx_IterFinish(void) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyThreadState *tstate = PyThreadState_GET();
+ PyObject* exc_type = tstate->curexc_type;
+ if (unlikely(exc_type)) {
+ if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) {
+ PyObject *exc_value, *exc_tb;
+ exc_value = tstate->curexc_value;
+ exc_tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+ Py_DECREF(exc_type);
+ Py_XDECREF(exc_value);
+ Py_XDECREF(exc_tb);
+ return 0;
+ } else {
+ return -1;
+ }
+ }
+ return 0;
+#else
+ if (unlikely(PyErr_Occurred())) {
+ if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) {
+ PyErr_Clear();
+ return 0;
+ } else {
+ return -1;
+ }
+ }
+ return 0;
+#endif
+}
+
+static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) {
+ if (unlikely(retval)) {
+ Py_DECREF(retval);
+ __Pyx_RaiseTooManyValuesError(expected);
+ return -1;
+ } else {
+ return __Pyx_IterFinish();
+ }
+ return 0;
+}
+
+static int __Pyx_SetVtable(PyObject *dict, void *vtable) {
+#if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION==3&&PY_MINOR_VERSION==0)
+ PyObject *ob = PyCapsule_New(vtable, 0, 0);
+#else
+ PyObject *ob = PyCObject_FromVoidPtr(vtable, 0);
+#endif
+ if (!ob)
+ goto bad;
+ if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0)
+ goto bad;
+ Py_DECREF(ob);
+ return 0;
+bad:
+ Py_XDECREF(ob);
+ return -1;
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) {
+ const int neg_one = (int) -1, const_zero = 0;
+ const int is_unsigned = neg_one > const_zero;
+ if (is_unsigned) {
+ if (sizeof(int) < sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(int) <= sizeof(unsigned long)) {
+ return PyLong_FromUnsignedLong((unsigned long) value);
+ } else if (sizeof(int) <= sizeof(unsigned long long)) {
+ return PyLong_FromUnsignedLongLong((unsigned long long) value);
+ }
+ } else {
+ if (sizeof(int) <= sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(int) <= sizeof(long long)) {
+ return PyLong_FromLongLong((long long) value);
+ }
+ }
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
+ return _PyLong_FromByteArray(bytes, sizeof(int),
+ little, !is_unsigned);
+ }
+}
+
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
+ PyObject *empty_list = 0;
+ PyObject *module = 0;
+ PyObject *global_dict = 0;
+ PyObject *empty_dict = 0;
+ PyObject *list;
+ #if PY_VERSION_HEX < 0x03030000
+ PyObject *py_import;
+ py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import);
+ if (!py_import)
+ goto bad;
+ #endif
+ if (from_list)
+ list = from_list;
+ else {
+ empty_list = PyList_New(0);
+ if (!empty_list)
+ goto bad;
+ list = empty_list;
+ }
+ global_dict = PyModule_GetDict(__pyx_m);
+ if (!global_dict)
+ goto bad;
+ empty_dict = PyDict_New();
+ if (!empty_dict)
+ goto bad;
+ #if PY_VERSION_HEX >= 0x02050000
+ {
+ #if PY_MAJOR_VERSION >= 3
+ if (level == -1) {
+ if (strchr(__Pyx_MODULE_NAME, '.')) {
+ #if PY_VERSION_HEX < 0x03030000
+ PyObject *py_level = PyInt_FromLong(1);
+ if (!py_level)
+ goto bad;
+ module = PyObject_CallFunctionObjArgs(py_import,
+ name, global_dict, empty_dict, list, py_level, NULL);
+ Py_DECREF(py_level);
+ #else
+ module = PyImport_ImportModuleLevelObject(
+ name, global_dict, empty_dict, list, 1);
+ #endif
+ if (!module) {
+ if (!PyErr_ExceptionMatches(PyExc_ImportError))
+ goto bad;
+ PyErr_Clear();
+ }
+ }
+ level = 0; /* try absolute import on failure */
+ }
+ #endif
+ if (!module) {
+ #if PY_VERSION_HEX < 0x03030000
+ PyObject *py_level = PyInt_FromLong(level);
+ if (!py_level)
+ goto bad;
+ module = PyObject_CallFunctionObjArgs(py_import,
+ name, global_dict, empty_dict, list, py_level, NULL);
+ Py_DECREF(py_level);
+ #else
+ module = PyImport_ImportModuleLevelObject(
+ name, global_dict, empty_dict, list, level);
+ #endif
+ }
+ }
+ #else
+ if (level>0) {
+ PyErr_SetString(PyExc_RuntimeError, "Relative import is not supported for Python <=2.4.");
+ goto bad;
+ }
+ module = PyObject_CallFunctionObjArgs(py_import,
+ name, global_dict, empty_dict, list, NULL);
+ #endif
+bad:
+ #if PY_VERSION_HEX < 0x03030000
+ Py_XDECREF(py_import);
+ #endif
+ Py_XDECREF(empty_list);
+ Py_XDECREF(empty_dict);
+ return module;
+}
+
+#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func) \
+ { \
+ func_type value = func(x); \
+ if (sizeof(target_type) < sizeof(func_type)) { \
+ if (unlikely(value != (func_type) (target_type) value)) { \
+ func_type zero = 0; \
+ PyErr_SetString(PyExc_OverflowError, \
+ (is_unsigned && unlikely(value < zero)) ? \
+ "can't convert negative value to " #target_type : \
+ "value too large to convert to " #target_type); \
+ return (target_type) -1; \
+ } \
+ } \
+ return (target_type) value; \
+ }
+
+#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
+ #if CYTHON_USE_PYLONG_INTERNALS
+ #include "longintrepr.h"
+ #endif
+#endif
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
+ const int neg_one = (int) -1, const_zero = 0;
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(int) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG)
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to int");
+ return (int) -1;
+ }
+ return (int) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
+ #if CYTHON_USE_PYLONG_INTERNALS
+ if (sizeof(digit) <= sizeof(int)) {
+ switch (Py_SIZE(x)) {
+ case 0: return 0;
+ case 1: return (int) ((PyLongObject*)x)->ob_digit[0];
+ }
+ }
+ #endif
+#endif
+ if (unlikely(Py_SIZE(x) < 0)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to int");
+ return (int) -1;
+ }
+ if (sizeof(int) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, PyLong_AsUnsignedLong)
+ } else if (sizeof(int) <= sizeof(unsigned long long)) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long long, PyLong_AsUnsignedLongLong)
+ }
+ } else {
+#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
+ #if CYTHON_USE_PYLONG_INTERNALS
+ if (sizeof(digit) <= sizeof(int)) {
+ switch (Py_SIZE(x)) {
+ case 0: return 0;
+ case 1: return +(int) ((PyLongObject*)x)->ob_digit[0];
+ case -1: return -(int) ((PyLongObject*)x)->ob_digit[0];
+ }
+ }
+ #endif
+#endif
+ if (sizeof(int) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(int, long, PyLong_AsLong)
+ } else if (sizeof(int) <= sizeof(long long)) {
+ __PYX_VERIFY_RETURN_INT(int, long long, PyLong_AsLongLong)
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ int val;
+ PyObject *v = __Pyx_PyNumber_Int(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (int) -1;
+ }
+ } else {
+ int val;
+ PyObject *tmp = __Pyx_PyNumber_Int(x);
+ if (!tmp) return (int) -1;
+ val = __Pyx_PyInt_As_int(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
+ const long neg_one = (long) -1, const_zero = 0;
+ const int is_unsigned = neg_one > const_zero;
+ if (is_unsigned) {
+ if (sizeof(long) < sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(long) <= sizeof(unsigned long)) {
+ return PyLong_FromUnsignedLong((unsigned long) value);
+ } else if (sizeof(long) <= sizeof(unsigned long long)) {
+ return PyLong_FromUnsignedLongLong((unsigned long long) value);
+ }
+ } else {
+ if (sizeof(long) <= sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(long) <= sizeof(long long)) {
+ return PyLong_FromLongLong((long long) value);
+ }
+ }
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
+ return _PyLong_FromByteArray(bytes, sizeof(long),
+ little, !is_unsigned);
+ }
+}
+
+#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
+ #if CYTHON_USE_PYLONG_INTERNALS
+ #include "longintrepr.h"
+ #endif
+#endif
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
+ const long neg_one = (long) -1, const_zero = 0;
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(long) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG)
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to long");
+ return (long) -1;
+ }
+ return (long) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
+ #if CYTHON_USE_PYLONG_INTERNALS
+ if (sizeof(digit) <= sizeof(long)) {
+ switch (Py_SIZE(x)) {
+ case 0: return 0;
+ case 1: return (long) ((PyLongObject*)x)->ob_digit[0];
+ }
+ }
+ #endif
+#endif
+ if (unlikely(Py_SIZE(x) < 0)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to long");
+ return (long) -1;
+ }
+ if (sizeof(long) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, PyLong_AsUnsignedLong)
+ } else if (sizeof(long) <= sizeof(unsigned long long)) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long long, PyLong_AsUnsignedLongLong)
+ }
+ } else {
+#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
+ #if CYTHON_USE_PYLONG_INTERNALS
+ if (sizeof(digit) <= sizeof(long)) {
+ switch (Py_SIZE(x)) {
+ case 0: return 0;
+ case 1: return +(long) ((PyLongObject*)x)->ob_digit[0];
+ case -1: return -(long) ((PyLongObject*)x)->ob_digit[0];
+ }
+ }
+ #endif
+#endif
+ if (sizeof(long) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(long, long, PyLong_AsLong)
+ } else if (sizeof(long) <= sizeof(long long)) {
+ __PYX_VERIFY_RETURN_INT(long, long long, PyLong_AsLongLong)
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ long val;
+ PyObject *v = __Pyx_PyNumber_Int(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (long) -1;
+ }
+ } else {
+ long val;
+ PyObject *tmp = __Pyx_PyNumber_Int(x);
+ if (!tmp) return (long) -1;
+ val = __Pyx_PyInt_As_long(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+}
+
+static int __Pyx_check_binary_version(void) {
+ char ctversion[4], rtversion[4];
+ PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
+ PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
+ if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
+ char message[200];
+ PyOS_snprintf(message, sizeof(message),
+ "compiletime version %s of module '%.100s' "
+ "does not match runtime version %s",
+ ctversion, __Pyx_MODULE_NAME, rtversion);
+ #if PY_VERSION_HEX < 0x02050000
+ return PyErr_Warn(NULL, message);
+ #else
+ return PyErr_WarnEx(NULL, message, 1);
+ #endif
+ }
+ return 0;
+}
+
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
+ int start = 0, mid = 0, end = count - 1;
+ if (end >= 0 && code_line > entries[end].code_line) {
+ return count;
+ }
+ while (start < end) {
+ mid = (start + end) / 2;
+ if (code_line < entries[mid].code_line) {
+ end = mid;
+ } else if (code_line > entries[mid].code_line) {
+ start = mid + 1;
+ } else {
+ return mid;
+ }
+ }
+ if (code_line <= entries[mid].code_line) {
+ return mid;
+ } else {
+ return mid + 1;
+ }
+}
+static PyCodeObject *__pyx_find_code_object(int code_line) {
+ PyCodeObject* code_object;
+ int pos;
+ if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
+ return NULL;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
+ return NULL;
+ }
+ code_object = __pyx_code_cache.entries[pos].code_object;
+ Py_INCREF(code_object);
+ return code_object;
+}
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
+ int pos, i;
+ __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
+ if (unlikely(!code_line)) {
+ return;
+ }
+ if (unlikely(!entries)) {
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
+ if (likely(entries)) {
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = 64;
+ __pyx_code_cache.count = 1;
+ entries[0].code_line = code_line;
+ entries[0].code_object = code_object;
+ Py_INCREF(code_object);
+ }
+ return;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
+ PyCodeObject* tmp = entries[pos].code_object;
+ entries[pos].code_object = code_object;
+ Py_DECREF(tmp);
+ return;
+ }
+ if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
+ int new_max = __pyx_code_cache.max_count + 64;
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
+ __pyx_code_cache.entries, new_max*sizeof(__Pyx_CodeObjectCacheEntry));
+ if (unlikely(!entries)) {
+ return;
+ }
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = new_max;
+ }
+ for (i=__pyx_code_cache.count; i>pos; i--) {
+ entries[i] = entries[i-1];
+ }
+ entries[pos].code_line = code_line;
+ entries[pos].code_object = code_object;
+ __pyx_code_cache.count++;
+ Py_INCREF(code_object);
+}
+
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
+ const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyObject *py_srcfile = 0;
+ PyObject *py_funcname = 0;
+ #if PY_MAJOR_VERSION < 3
+ py_srcfile = PyString_FromString(filename);
+ #else
+ py_srcfile = PyUnicode_FromString(filename);
+ #endif
+ if (!py_srcfile) goto bad;
+ if (c_line) {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #else
+ py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #endif
+ }
+ else {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromString(funcname);
+ #else
+ py_funcname = PyUnicode_FromString(funcname);
+ #endif
+ }
+ if (!py_funcname) goto bad;
+ py_code = __Pyx_PyCode_New(
+ 0, /*int argcount,*/
+ 0, /*int kwonlyargcount,*/
+ 0, /*int nlocals,*/
+ 0, /*int stacksize,*/
+ 0, /*int flags,*/
+ __pyx_empty_bytes, /*PyObject *code,*/
+ __pyx_empty_tuple, /*PyObject *consts,*/
+ __pyx_empty_tuple, /*PyObject *names,*/
+ __pyx_empty_tuple, /*PyObject *varnames,*/
+ __pyx_empty_tuple, /*PyObject *freevars,*/
+ __pyx_empty_tuple, /*PyObject *cellvars,*/
+ py_srcfile, /*PyObject *filename,*/
+ py_funcname, /*PyObject *name,*/
+ py_line, /*int firstlineno,*/
+ __pyx_empty_bytes /*PyObject *lnotab*/
+ );
+ Py_DECREF(py_srcfile);
+ Py_DECREF(py_funcname);
+ return py_code;
+bad:
+ Py_XDECREF(py_srcfile);
+ Py_XDECREF(py_funcname);
+ return NULL;
+}
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyObject *py_globals = 0;
+ PyFrameObject *py_frame = 0;
+ py_code = __pyx_find_code_object(c_line ? c_line : py_line);
+ if (!py_code) {
+ py_code = __Pyx_CreateCodeObjectForTraceback(
+ funcname, c_line, py_line, filename);
+ if (!py_code) goto bad;
+ __pyx_insert_code_object(c_line ? c_line : py_line, py_code);
+ }
+ py_globals = PyModule_GetDict(__pyx_m);
+ if (!py_globals) goto bad;
+ py_frame = PyFrame_New(
+ PyThreadState_GET(), /*PyThreadState *tstate,*/
+ py_code, /*PyCodeObject *code,*/
+ py_globals, /*PyObject *globals,*/
+ 0 /*PyObject *locals*/
+ );
+ if (!py_frame) goto bad;
+ py_frame->f_lineno = py_line;
+ PyTraceBack_Here(py_frame);
+bad:
+ Py_XDECREF(py_code);
+ Py_XDECREF(py_frame);
+}
+
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
+ while (t->p) {
+ #if PY_MAJOR_VERSION < 3
+ if (t->is_unicode) {
+ *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
+ } else if (t->intern) {
+ *t->p = PyString_InternFromString(t->s);
+ } else {
+ *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+ }
+ #else /* Python 3+ has unicode identifiers */
+ if (t->is_unicode | t->is_str) {
+ if (t->intern) {
+ *t->p = PyUnicode_InternFromString(t->s);
+ } else if (t->encoding) {
+ *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
+ } else {
+ *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
+ }
+ } else {
+ *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
+ }
+ #endif
+ if (!*t->p)
+ return -1;
+ ++t;
+ }
+ return 0;
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(char* c_str) {
+ return __Pyx_PyUnicode_FromStringAndSize(c_str, strlen(c_str));
+}
+static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject* o) {
+ Py_ssize_t ignore;
+ return __Pyx_PyObject_AsStringAndSize(o, &ignore);
+}
+static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+ if (
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ __Pyx_sys_getdefaultencoding_not_ascii &&
+#endif
+ PyUnicode_Check(o)) {
+#if PY_VERSION_HEX < 0x03030000
+ char* defenc_c;
+ PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
+ if (!defenc) return NULL;
+ defenc_c = PyBytes_AS_STRING(defenc);
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ {
+ char* end = defenc_c + PyBytes_GET_SIZE(defenc);
+ char* c;
+ for (c = defenc_c; c < end; c++) {
+ if ((unsigned char) (*c) >= 128) {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+ }
+ }
+#endif /*__PYX_DEFAULT_STRING_ENCODING_IS_ASCII*/
+ *length = PyBytes_GET_SIZE(defenc);
+ return defenc_c;
+#else /* PY_VERSION_HEX < 0x03030000 */
+ if (PyUnicode_READY(o) == -1) return NULL;
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ if (PyUnicode_IS_ASCII(o)) {
+ *length = PyUnicode_GET_DATA_SIZE(o);
+ return PyUnicode_AsUTF8(o);
+ } else {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+#else /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */
+ return PyUnicode_AsUTF8AndSize(o, length);
+#endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */
+#endif /* PY_VERSION_HEX < 0x03030000 */
+ } else
+#endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT */
+#if !CYTHON_COMPILING_IN_PYPY
+#if PY_VERSION_HEX >= 0x02060000
+ if (PyByteArray_Check(o)) {
+ *length = PyByteArray_GET_SIZE(o);
+ return PyByteArray_AS_STRING(o);
+ } else
+#endif
+#endif
+ {
+ char* result;
+ int r = PyBytes_AsStringAndSize(o, &result, length);
+ if (unlikely(r < 0)) {
+ return NULL;
+ } else {
+ return result;
+ }
+ }
+}
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
+ int is_true = x == Py_True;
+ if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
+ else return PyObject_IsTrue(x);
+}
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) {
+ PyNumberMethods *m;
+ const char *name = NULL;
+ PyObject *res = NULL;
+#if PY_MAJOR_VERSION < 3
+ if (PyInt_Check(x) || PyLong_Check(x))
+#else
+ if (PyLong_Check(x))
+#endif
+ return Py_INCREF(x), x;
+ m = Py_TYPE(x)->tp_as_number;
+#if PY_MAJOR_VERSION < 3
+ if (m && m->nb_int) {
+ name = "int";
+ res = PyNumber_Int(x);
+ }
+ else if (m && m->nb_long) {
+ name = "long";
+ res = PyNumber_Long(x);
+ }
+#else
+ if (m && m->nb_int) {
+ name = "int";
+ res = PyNumber_Long(x);
+ }
+#endif
+ if (res) {
+#if PY_MAJOR_VERSION < 3
+ if (!PyInt_Check(res) && !PyLong_Check(res)) {
+#else
+ if (!PyLong_Check(res)) {
+#endif
+ PyErr_Format(PyExc_TypeError,
+ "__%.4s__ returned non-%.4s (type %.200s)",
+ name, name, Py_TYPE(res)->tp_name);
+ Py_DECREF(res);
+ return NULL;
+ }
+ }
+ else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_TypeError,
+ "an integer is required");
+ }
+ return res;
+}
+#if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
+ #if CYTHON_USE_PYLONG_INTERNALS
+ #include "longintrepr.h"
+ #endif
+#endif
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
+ Py_ssize_t ival;
+ PyObject *x;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_CheckExact(b)))
+ return PyInt_AS_LONG(b);
+#endif
+ if (likely(PyLong_CheckExact(b))) {
+ #if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
+ #if CYTHON_USE_PYLONG_INTERNALS
+ switch (Py_SIZE(b)) {
+ case -1: return -(sdigit)((PyLongObject*)b)->ob_digit[0];
+ case 0: return 0;
+ case 1: return ((PyLongObject*)b)->ob_digit[0];
+ }
+ #endif
+ #endif
+ #if PY_VERSION_HEX < 0x02060000
+ return PyInt_AsSsize_t(b);
+ #else
+ return PyLong_AsSsize_t(b);
+ #endif
+ }
+ x = PyNumber_Index(b);
+ if (!x) return -1;
+ ival = PyInt_AsSsize_t(x);
+ Py_DECREF(x);
+ return ival;
+}
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
+#if PY_VERSION_HEX < 0x02050000
+ if (ival <= LONG_MAX)
+ return PyInt_FromLong((long)ival);
+ else {
+ unsigned char *bytes = (unsigned char *) &ival;
+ int one = 1; int little = (int)*(unsigned char*)&one;
+ return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0);
+ }
+#else
+ return PyInt_FromSize_t(ival);
+#endif
+}
+
+
+#endif /* Py_PYTHON_H */
diff --git a/python/pyyaml/ext/_yaml.h b/python/pyyaml/ext/_yaml.h
new file mode 100644
index 000000000..21fd6a991
--- /dev/null
+++ b/python/pyyaml/ext/_yaml.h
@@ -0,0 +1,23 @@
+
+#include <yaml.h>
+
+#if PY_MAJOR_VERSION < 3
+
+#define PyUnicode_FromString(s) PyUnicode_DecodeUTF8((s), strlen(s), "strict")
+
+#else
+
+#define PyString_CheckExact PyBytes_CheckExact
+#define PyString_AS_STRING PyBytes_AS_STRING
+#define PyString_GET_SIZE PyBytes_GET_SIZE
+#define PyString_FromStringAndSize PyBytes_FromStringAndSize
+
+#endif
+
+#ifdef _MSC_VER /* MS Visual C++ 6.0 */
+#if _MSC_VER == 1200
+
+#define PyLong_FromUnsignedLongLong(z) PyInt_FromLong(i)
+
+#endif
+#endif
diff --git a/python/pyyaml/ext/_yaml.pxd b/python/pyyaml/ext/_yaml.pxd
new file mode 100644
index 000000000..f47f459cc
--- /dev/null
+++ b/python/pyyaml/ext/_yaml.pxd
@@ -0,0 +1,251 @@
+
+cdef extern from "_yaml.h":
+
+ void malloc(int l)
+ void memcpy(char *d, char *s, int l)
+ int strlen(char *s)
+ int PyString_CheckExact(object o)
+ int PyUnicode_CheckExact(object o)
+ char *PyString_AS_STRING(object o)
+ int PyString_GET_SIZE(object o)
+ object PyString_FromStringAndSize(char *v, int l)
+ object PyUnicode_FromString(char *u)
+ object PyUnicode_DecodeUTF8(char *u, int s, char *e)
+ object PyUnicode_AsUTF8String(object o)
+ int PY_MAJOR_VERSION
+
+ ctypedef enum:
+ SIZEOF_VOID_P
+ ctypedef enum yaml_encoding_t:
+ YAML_ANY_ENCODING
+ YAML_UTF8_ENCODING
+ YAML_UTF16LE_ENCODING
+ YAML_UTF16BE_ENCODING
+ ctypedef enum yaml_break_t:
+ YAML_ANY_BREAK
+ YAML_CR_BREAK
+ YAML_LN_BREAK
+ YAML_CRLN_BREAK
+ ctypedef enum yaml_error_type_t:
+ YAML_NO_ERROR
+ YAML_MEMORY_ERROR
+ YAML_READER_ERROR
+ YAML_SCANNER_ERROR
+ YAML_PARSER_ERROR
+ YAML_WRITER_ERROR
+ YAML_EMITTER_ERROR
+ ctypedef enum yaml_scalar_style_t:
+ YAML_ANY_SCALAR_STYLE
+ YAML_PLAIN_SCALAR_STYLE
+ YAML_SINGLE_QUOTED_SCALAR_STYLE
+ YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ YAML_LITERAL_SCALAR_STYLE
+ YAML_FOLDED_SCALAR_STYLE
+ ctypedef enum yaml_sequence_style_t:
+ YAML_ANY_SEQUENCE_STYLE
+ YAML_BLOCK_SEQUENCE_STYLE
+ YAML_FLOW_SEQUENCE_STYLE
+ ctypedef enum yaml_mapping_style_t:
+ YAML_ANY_MAPPING_STYLE
+ YAML_BLOCK_MAPPING_STYLE
+ YAML_FLOW_MAPPING_STYLE
+ ctypedef enum yaml_token_type_t:
+ YAML_NO_TOKEN
+ YAML_STREAM_START_TOKEN
+ YAML_STREAM_END_TOKEN
+ YAML_VERSION_DIRECTIVE_TOKEN
+ YAML_TAG_DIRECTIVE_TOKEN
+ YAML_DOCUMENT_START_TOKEN
+ YAML_DOCUMENT_END_TOKEN
+ YAML_BLOCK_SEQUENCE_START_TOKEN
+ YAML_BLOCK_MAPPING_START_TOKEN
+ YAML_BLOCK_END_TOKEN
+ YAML_FLOW_SEQUENCE_START_TOKEN
+ YAML_FLOW_SEQUENCE_END_TOKEN
+ YAML_FLOW_MAPPING_START_TOKEN
+ YAML_FLOW_MAPPING_END_TOKEN
+ YAML_BLOCK_ENTRY_TOKEN
+ YAML_FLOW_ENTRY_TOKEN
+ YAML_KEY_TOKEN
+ YAML_VALUE_TOKEN
+ YAML_ALIAS_TOKEN
+ YAML_ANCHOR_TOKEN
+ YAML_TAG_TOKEN
+ YAML_SCALAR_TOKEN
+ ctypedef enum yaml_event_type_t:
+ YAML_NO_EVENT
+ YAML_STREAM_START_EVENT
+ YAML_STREAM_END_EVENT
+ YAML_DOCUMENT_START_EVENT
+ YAML_DOCUMENT_END_EVENT
+ YAML_ALIAS_EVENT
+ YAML_SCALAR_EVENT
+ YAML_SEQUENCE_START_EVENT
+ YAML_SEQUENCE_END_EVENT
+ YAML_MAPPING_START_EVENT
+ YAML_MAPPING_END_EVENT
+
+ ctypedef int yaml_read_handler_t(void *data, char *buffer,
+ int size, int *size_read) except 0
+
+ ctypedef int yaml_write_handler_t(void *data, char *buffer,
+ int size) except 0
+
+ ctypedef struct yaml_mark_t:
+ int index
+ int line
+ int column
+ ctypedef struct yaml_version_directive_t:
+ int major
+ int minor
+ ctypedef struct yaml_tag_directive_t:
+ char *handle
+ char *prefix
+
+ ctypedef struct _yaml_token_stream_start_data_t:
+ yaml_encoding_t encoding
+ ctypedef struct _yaml_token_alias_data_t:
+ char *value
+ ctypedef struct _yaml_token_anchor_data_t:
+ char *value
+ ctypedef struct _yaml_token_tag_data_t:
+ char *handle
+ char *suffix
+ ctypedef struct _yaml_token_scalar_data_t:
+ char *value
+ int length
+ yaml_scalar_style_t style
+ ctypedef struct _yaml_token_version_directive_data_t:
+ int major
+ int minor
+ ctypedef struct _yaml_token_tag_directive_data_t:
+ char *handle
+ char *prefix
+ ctypedef union _yaml_token_data_t:
+ _yaml_token_stream_start_data_t stream_start
+ _yaml_token_alias_data_t alias
+ _yaml_token_anchor_data_t anchor
+ _yaml_token_tag_data_t tag
+ _yaml_token_scalar_data_t scalar
+ _yaml_token_version_directive_data_t version_directive
+ _yaml_token_tag_directive_data_t tag_directive
+ ctypedef struct yaml_token_t:
+ yaml_token_type_t type
+ _yaml_token_data_t data
+ yaml_mark_t start_mark
+ yaml_mark_t end_mark
+
+ ctypedef struct _yaml_event_stream_start_data_t:
+ yaml_encoding_t encoding
+ ctypedef struct _yaml_event_document_start_data_tag_directives_t:
+ yaml_tag_directive_t *start
+ yaml_tag_directive_t *end
+ ctypedef struct _yaml_event_document_start_data_t:
+ yaml_version_directive_t *version_directive
+ _yaml_event_document_start_data_tag_directives_t tag_directives
+ int implicit
+ ctypedef struct _yaml_event_document_end_data_t:
+ int implicit
+ ctypedef struct _yaml_event_alias_data_t:
+ char *anchor
+ ctypedef struct _yaml_event_scalar_data_t:
+ char *anchor
+ char *tag
+ char *value
+ int length
+ int plain_implicit
+ int quoted_implicit
+ yaml_scalar_style_t style
+ ctypedef struct _yaml_event_sequence_start_data_t:
+ char *anchor
+ char *tag
+ int implicit
+ yaml_sequence_style_t style
+ ctypedef struct _yaml_event_mapping_start_data_t:
+ char *anchor
+ char *tag
+ int implicit
+ yaml_mapping_style_t style
+ ctypedef union _yaml_event_data_t:
+ _yaml_event_stream_start_data_t stream_start
+ _yaml_event_document_start_data_t document_start
+ _yaml_event_document_end_data_t document_end
+ _yaml_event_alias_data_t alias
+ _yaml_event_scalar_data_t scalar
+ _yaml_event_sequence_start_data_t sequence_start
+ _yaml_event_mapping_start_data_t mapping_start
+ ctypedef struct yaml_event_t:
+ yaml_event_type_t type
+ _yaml_event_data_t data
+ yaml_mark_t start_mark
+ yaml_mark_t end_mark
+
+ ctypedef struct yaml_parser_t:
+ yaml_error_type_t error
+ char *problem
+ int problem_offset
+ int problem_value
+ yaml_mark_t problem_mark
+ char *context
+ yaml_mark_t context_mark
+
+ ctypedef struct yaml_emitter_t:
+ yaml_error_type_t error
+ char *problem
+
+ char *yaml_get_version_string()
+ void yaml_get_version(int *major, int *minor, int *patch)
+
+ void yaml_token_delete(yaml_token_t *token)
+
+ int yaml_stream_start_event_initialize(yaml_event_t *event,
+ yaml_encoding_t encoding)
+ int yaml_stream_end_event_initialize(yaml_event_t *event)
+ int yaml_document_start_event_initialize(yaml_event_t *event,
+ yaml_version_directive_t *version_directive,
+ yaml_tag_directive_t *tag_directives_start,
+ yaml_tag_directive_t *tag_directives_end,
+ int implicit)
+ int yaml_document_end_event_initialize(yaml_event_t *event,
+ int implicit)
+ int yaml_alias_event_initialize(yaml_event_t *event, char *anchor)
+ int yaml_scalar_event_initialize(yaml_event_t *event,
+ char *anchor, char *tag, char *value, int length,
+ int plain_implicit, int quoted_implicit,
+ yaml_scalar_style_t style)
+ int yaml_sequence_start_event_initialize(yaml_event_t *event,
+ char *anchor, char *tag, int implicit, yaml_sequence_style_t style)
+ int yaml_sequence_end_event_initialize(yaml_event_t *event)
+ int yaml_mapping_start_event_initialize(yaml_event_t *event,
+ char *anchor, char *tag, int implicit, yaml_mapping_style_t style)
+ int yaml_mapping_end_event_initialize(yaml_event_t *event)
+ void yaml_event_delete(yaml_event_t *event)
+
+ int yaml_parser_initialize(yaml_parser_t *parser)
+ void yaml_parser_delete(yaml_parser_t *parser)
+ void yaml_parser_set_input_string(yaml_parser_t *parser,
+ char *input, int size)
+ void yaml_parser_set_input(yaml_parser_t *parser,
+ yaml_read_handler_t *handler, void *data)
+ void yaml_parser_set_encoding(yaml_parser_t *parser,
+ yaml_encoding_t encoding)
+ int yaml_parser_scan(yaml_parser_t *parser, yaml_token_t *token) except *
+ int yaml_parser_parse(yaml_parser_t *parser, yaml_event_t *event) except *
+
+ int yaml_emitter_initialize(yaml_emitter_t *emitter)
+ void yaml_emitter_delete(yaml_emitter_t *emitter)
+ void yaml_emitter_set_output_string(yaml_emitter_t *emitter,
+ char *output, int size, int *size_written)
+ void yaml_emitter_set_output(yaml_emitter_t *emitter,
+ yaml_write_handler_t *handler, void *data)
+ void yaml_emitter_set_encoding(yaml_emitter_t *emitter,
+ yaml_encoding_t encoding)
+ void yaml_emitter_set_canonical(yaml_emitter_t *emitter, int canonical)
+ void yaml_emitter_set_indent(yaml_emitter_t *emitter, int indent)
+ void yaml_emitter_set_width(yaml_emitter_t *emitter, int width)
+ void yaml_emitter_set_unicode(yaml_emitter_t *emitter, int unicode)
+ void yaml_emitter_set_break(yaml_emitter_t *emitter,
+ yaml_break_t line_break)
+ int yaml_emitter_emit(yaml_emitter_t *emitter, yaml_event_t *event) except *
+ int yaml_emitter_flush(yaml_emitter_t *emitter)
+
diff --git a/python/pyyaml/ext/_yaml.pyx b/python/pyyaml/ext/_yaml.pyx
new file mode 100644
index 000000000..5158fb4d9
--- /dev/null
+++ b/python/pyyaml/ext/_yaml.pyx
@@ -0,0 +1,1527 @@
+
+import yaml
+
+def get_version_string():
+ cdef char *value
+ value = yaml_get_version_string()
+ if PY_MAJOR_VERSION < 3:
+ return value
+ else:
+ return PyUnicode_FromString(value)
+
+def get_version():
+ cdef int major, minor, patch
+ yaml_get_version(&major, &minor, &patch)
+ return (major, minor, patch)
+
+#Mark = yaml.error.Mark
+YAMLError = yaml.error.YAMLError
+ReaderError = yaml.reader.ReaderError
+ScannerError = yaml.scanner.ScannerError
+ParserError = yaml.parser.ParserError
+ComposerError = yaml.composer.ComposerError
+ConstructorError = yaml.constructor.ConstructorError
+EmitterError = yaml.emitter.EmitterError
+SerializerError = yaml.serializer.SerializerError
+RepresenterError = yaml.representer.RepresenterError
+
+StreamStartToken = yaml.tokens.StreamStartToken
+StreamEndToken = yaml.tokens.StreamEndToken
+DirectiveToken = yaml.tokens.DirectiveToken
+DocumentStartToken = yaml.tokens.DocumentStartToken
+DocumentEndToken = yaml.tokens.DocumentEndToken
+BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
+BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
+BlockEndToken = yaml.tokens.BlockEndToken
+FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
+FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
+FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
+FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
+KeyToken = yaml.tokens.KeyToken
+ValueToken = yaml.tokens.ValueToken
+BlockEntryToken = yaml.tokens.BlockEntryToken
+FlowEntryToken = yaml.tokens.FlowEntryToken
+AliasToken = yaml.tokens.AliasToken
+AnchorToken = yaml.tokens.AnchorToken
+TagToken = yaml.tokens.TagToken
+ScalarToken = yaml.tokens.ScalarToken
+
+StreamStartEvent = yaml.events.StreamStartEvent
+StreamEndEvent = yaml.events.StreamEndEvent
+DocumentStartEvent = yaml.events.DocumentStartEvent
+DocumentEndEvent = yaml.events.DocumentEndEvent
+AliasEvent = yaml.events.AliasEvent
+ScalarEvent = yaml.events.ScalarEvent
+SequenceStartEvent = yaml.events.SequenceStartEvent
+SequenceEndEvent = yaml.events.SequenceEndEvent
+MappingStartEvent = yaml.events.MappingStartEvent
+MappingEndEvent = yaml.events.MappingEndEvent
+
+ScalarNode = yaml.nodes.ScalarNode
+SequenceNode = yaml.nodes.SequenceNode
+MappingNode = yaml.nodes.MappingNode
+
+cdef class Mark:
+ cdef readonly object name
+ cdef readonly int index
+ cdef readonly int line
+ cdef readonly int column
+ cdef readonly buffer
+ cdef readonly pointer
+
+ def __init__(self, object name, int index, int line, int column,
+ object buffer, object pointer):
+ self.name = name
+ self.index = index
+ self.line = line
+ self.column = column
+ self.buffer = buffer
+ self.pointer = pointer
+
+ def get_snippet(self):
+ return None
+
+ def __str__(self):
+ where = " in \"%s\", line %d, column %d" \
+ % (self.name, self.line+1, self.column+1)
+ return where
+
+#class YAMLError(Exception):
+# pass
+#
+#class MarkedYAMLError(YAMLError):
+#
+# def __init__(self, context=None, context_mark=None,
+# problem=None, problem_mark=None, note=None):
+# self.context = context
+# self.context_mark = context_mark
+# self.problem = problem
+# self.problem_mark = problem_mark
+# self.note = note
+#
+# def __str__(self):
+# lines = []
+# if self.context is not None:
+# lines.append(self.context)
+# if self.context_mark is not None \
+# and (self.problem is None or self.problem_mark is None
+# or self.context_mark.name != self.problem_mark.name
+# or self.context_mark.line != self.problem_mark.line
+# or self.context_mark.column != self.problem_mark.column):
+# lines.append(str(self.context_mark))
+# if self.problem is not None:
+# lines.append(self.problem)
+# if self.problem_mark is not None:
+# lines.append(str(self.problem_mark))
+# if self.note is not None:
+# lines.append(self.note)
+# return '\n'.join(lines)
+#
+#class ReaderError(YAMLError):
+#
+# def __init__(self, name, position, character, encoding, reason):
+# self.name = name
+# self.character = character
+# self.position = position
+# self.encoding = encoding
+# self.reason = reason
+#
+# def __str__(self):
+# if isinstance(self.character, str):
+# return "'%s' codec can't decode byte #x%02x: %s\n" \
+# " in \"%s\", position %d" \
+# % (self.encoding, ord(self.character), self.reason,
+# self.name, self.position)
+# else:
+# return "unacceptable character #x%04x: %s\n" \
+# " in \"%s\", position %d" \
+# % (ord(self.character), self.reason,
+# self.name, self.position)
+#
+#class ScannerError(MarkedYAMLError):
+# pass
+#
+#class ParserError(MarkedYAMLError):
+# pass
+#
+#class EmitterError(YAMLError):
+# pass
+#
+#cdef class Token:
+# cdef readonly Mark start_mark
+# cdef readonly Mark end_mark
+# def __init__(self, Mark start_mark, Mark end_mark):
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class StreamStartToken(Token):
+# cdef readonly object encoding
+# def __init__(self, Mark start_mark, Mark end_mark, encoding):
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+# self.encoding = encoding
+#
+#cdef class StreamEndToken(Token):
+# pass
+#
+#cdef class DirectiveToken(Token):
+# cdef readonly object name
+# cdef readonly object value
+# def __init__(self, name, value, Mark start_mark, Mark end_mark):
+# self.name = name
+# self.value = value
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class DocumentStartToken(Token):
+# pass
+#
+#cdef class DocumentEndToken(Token):
+# pass
+#
+#cdef class BlockSequenceStartToken(Token):
+# pass
+#
+#cdef class BlockMappingStartToken(Token):
+# pass
+#
+#cdef class BlockEndToken(Token):
+# pass
+#
+#cdef class FlowSequenceStartToken(Token):
+# pass
+#
+#cdef class FlowMappingStartToken(Token):
+# pass
+#
+#cdef class FlowSequenceEndToken(Token):
+# pass
+#
+#cdef class FlowMappingEndToken(Token):
+# pass
+#
+#cdef class KeyToken(Token):
+# pass
+#
+#cdef class ValueToken(Token):
+# pass
+#
+#cdef class BlockEntryToken(Token):
+# pass
+#
+#cdef class FlowEntryToken(Token):
+# pass
+#
+#cdef class AliasToken(Token):
+# cdef readonly object value
+# def __init__(self, value, Mark start_mark, Mark end_mark):
+# self.value = value
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class AnchorToken(Token):
+# cdef readonly object value
+# def __init__(self, value, Mark start_mark, Mark end_mark):
+# self.value = value
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class TagToken(Token):
+# cdef readonly object value
+# def __init__(self, value, Mark start_mark, Mark end_mark):
+# self.value = value
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+#
+#cdef class ScalarToken(Token):
+# cdef readonly object value
+# cdef readonly object plain
+# cdef readonly object style
+# def __init__(self, value, plain, Mark start_mark, Mark end_mark, style=None):
+# self.value = value
+# self.plain = plain
+# self.start_mark = start_mark
+# self.end_mark = end_mark
+# self.style = style
+
+cdef class CParser:
+
+ cdef yaml_parser_t parser
+ cdef yaml_event_t parsed_event
+
+ cdef object stream
+ cdef object stream_name
+ cdef object current_token
+ cdef object current_event
+ cdef object anchors
+ cdef object stream_cache
+ cdef int stream_cache_len
+ cdef int stream_cache_pos
+ cdef int unicode_source
+
+ def __init__(self, stream):
+ cdef is_readable
+ if yaml_parser_initialize(&self.parser) == 0:
+ raise MemoryError
+ self.parsed_event.type = YAML_NO_EVENT
+ is_readable = 1
+ try:
+ stream.read
+ except AttributeError:
+ is_readable = 0
+ self.unicode_source = 0
+ if is_readable:
+ self.stream = stream
+ try:
+ self.stream_name = stream.name
+ except AttributeError:
+ if PY_MAJOR_VERSION < 3:
+ self.stream_name = '<file>'
+ else:
+ self.stream_name = u'<file>'
+ self.stream_cache = None
+ self.stream_cache_len = 0
+ self.stream_cache_pos = 0
+ yaml_parser_set_input(&self.parser, input_handler, <void *>self)
+ else:
+ if PyUnicode_CheckExact(stream) != 0:
+ stream = PyUnicode_AsUTF8String(stream)
+ if PY_MAJOR_VERSION < 3:
+ self.stream_name = '<unicode string>'
+ else:
+ self.stream_name = u'<unicode string>'
+ self.unicode_source = 1
+ else:
+ if PY_MAJOR_VERSION < 3:
+ self.stream_name = '<byte string>'
+ else:
+ self.stream_name = u'<byte string>'
+ if PyString_CheckExact(stream) == 0:
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("a string or stream input is required")
+ else:
+ raise TypeError(u"a string or stream input is required")
+ self.stream = stream
+ yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
+ self.current_token = None
+ self.current_event = None
+ self.anchors = {}
+
+ def __dealloc__(self):
+ yaml_parser_delete(&self.parser)
+ yaml_event_delete(&self.parsed_event)
+
+ def dispose(self):
+ pass
+
+ cdef object _parser_error(self):
+ if self.parser.error == YAML_MEMORY_ERROR:
+ return MemoryError
+ elif self.parser.error == YAML_READER_ERROR:
+ if PY_MAJOR_VERSION < 3:
+ return ReaderError(self.stream_name, self.parser.problem_offset,
+ self.parser.problem_value, '?', self.parser.problem)
+ else:
+ return ReaderError(self.stream_name, self.parser.problem_offset,
+ self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem))
+ elif self.parser.error == YAML_SCANNER_ERROR \
+ or self.parser.error == YAML_PARSER_ERROR:
+ context_mark = None
+ problem_mark = None
+ if self.parser.context != NULL:
+ context_mark = Mark(self.stream_name,
+ self.parser.context_mark.index,
+ self.parser.context_mark.line,
+ self.parser.context_mark.column, None, None)
+ if self.parser.problem != NULL:
+ problem_mark = Mark(self.stream_name,
+ self.parser.problem_mark.index,
+ self.parser.problem_mark.line,
+ self.parser.problem_mark.column, None, None)
+ context = None
+ if self.parser.context != NULL:
+ if PY_MAJOR_VERSION < 3:
+ context = self.parser.context
+ else:
+ context = PyUnicode_FromString(self.parser.context)
+ if PY_MAJOR_VERSION < 3:
+ problem = self.parser.problem
+ else:
+ problem = PyUnicode_FromString(self.parser.problem)
+ if self.parser.error == YAML_SCANNER_ERROR:
+ return ScannerError(context, context_mark, problem, problem_mark)
+ else:
+ return ParserError(context, context_mark, problem, problem_mark)
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("no parser error")
+ else:
+ raise ValueError(u"no parser error")
+
+ def raw_scan(self):
+ cdef yaml_token_t token
+ cdef int done
+ cdef int count
+ count = 0
+ done = 0
+ while done == 0:
+ if yaml_parser_scan(&self.parser, &token) == 0:
+ error = self._parser_error()
+ raise error
+ if token.type == YAML_NO_TOKEN:
+ done = 1
+ else:
+ count = count+1
+ yaml_token_delete(&token)
+ return count
+
+ cdef object _scan(self):
+ cdef yaml_token_t token
+ if yaml_parser_scan(&self.parser, &token) == 0:
+ error = self._parser_error()
+ raise error
+ token_object = self._token_to_object(&token)
+ yaml_token_delete(&token)
+ return token_object
+
+ cdef object _token_to_object(self, yaml_token_t *token):
+ start_mark = Mark(self.stream_name,
+ token.start_mark.index,
+ token.start_mark.line,
+ token.start_mark.column,
+ None, None)
+ end_mark = Mark(self.stream_name,
+ token.end_mark.index,
+ token.end_mark.line,
+ token.end_mark.column,
+ None, None)
+ if token.type == YAML_NO_TOKEN:
+ return None
+ elif token.type == YAML_STREAM_START_TOKEN:
+ encoding = None
+ if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ if self.unicode_source == 0:
+ encoding = u"utf-8"
+ elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ encoding = u"utf-16-le"
+ elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ encoding = u"utf-16-be"
+ return StreamStartToken(start_mark, end_mark, encoding)
+ elif token.type == YAML_STREAM_END_TOKEN:
+ return StreamEndToken(start_mark, end_mark)
+ elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
+ return DirectiveToken(u"YAML",
+ (token.data.version_directive.major,
+ token.data.version_directive.minor),
+ start_mark, end_mark)
+ elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
+ handle = PyUnicode_FromString(token.data.tag_directive.handle)
+ prefix = PyUnicode_FromString(token.data.tag_directive.prefix)
+ return DirectiveToken(u"TAG", (handle, prefix),
+ start_mark, end_mark)
+ elif token.type == YAML_DOCUMENT_START_TOKEN:
+ return DocumentStartToken(start_mark, end_mark)
+ elif token.type == YAML_DOCUMENT_END_TOKEN:
+ return DocumentEndToken(start_mark, end_mark)
+ elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
+ return BlockSequenceStartToken(start_mark, end_mark)
+ elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
+ return BlockMappingStartToken(start_mark, end_mark)
+ elif token.type == YAML_BLOCK_END_TOKEN:
+ return BlockEndToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
+ return FlowSequenceStartToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
+ return FlowSequenceEndToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
+ return FlowMappingStartToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
+ return FlowMappingEndToken(start_mark, end_mark)
+ elif token.type == YAML_BLOCK_ENTRY_TOKEN:
+ return BlockEntryToken(start_mark, end_mark)
+ elif token.type == YAML_FLOW_ENTRY_TOKEN:
+ return FlowEntryToken(start_mark, end_mark)
+ elif token.type == YAML_KEY_TOKEN:
+ return KeyToken(start_mark, end_mark)
+ elif token.type == YAML_VALUE_TOKEN:
+ return ValueToken(start_mark, end_mark)
+ elif token.type == YAML_ALIAS_TOKEN:
+ value = PyUnicode_FromString(token.data.alias.value)
+ return AliasToken(value, start_mark, end_mark)
+ elif token.type == YAML_ANCHOR_TOKEN:
+ value = PyUnicode_FromString(token.data.anchor.value)
+ return AnchorToken(value, start_mark, end_mark)
+ elif token.type == YAML_TAG_TOKEN:
+ handle = PyUnicode_FromString(token.data.tag.handle)
+ suffix = PyUnicode_FromString(token.data.tag.suffix)
+ if not handle:
+ handle = None
+ return TagToken((handle, suffix), start_mark, end_mark)
+ elif token.type == YAML_SCALAR_TOKEN:
+ value = PyUnicode_DecodeUTF8(token.data.scalar.value,
+ token.data.scalar.length, 'strict')
+ plain = False
+ style = None
+ if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ plain = True
+ style = u''
+ elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ style = u'\''
+ elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ style = u'"'
+ elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ style = u'|'
+ elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ style = u'>'
+ return ScalarToken(value, plain,
+ start_mark, end_mark, style)
+ else:
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("unknown token type")
+ else:
+ raise ValueError(u"unknown token type")
+
+ def get_token(self):
+ if self.current_token is not None:
+ value = self.current_token
+ self.current_token = None
+ else:
+ value = self._scan()
+ return value
+
+ def peek_token(self):
+ if self.current_token is None:
+ self.current_token = self._scan()
+ return self.current_token
+
+ def check_token(self, *choices):
+ if self.current_token is None:
+ self.current_token = self._scan()
+ if self.current_token is None:
+ return False
+ if not choices:
+ return True
+ token_class = self.current_token.__class__
+ for choice in choices:
+ if token_class is choice:
+ return True
+ return False
+
+ def raw_parse(self):
+ cdef yaml_event_t event
+ cdef int done
+ cdef int count
+ count = 0
+ done = 0
+ while done == 0:
+ if yaml_parser_parse(&self.parser, &event) == 0:
+ error = self._parser_error()
+ raise error
+ if event.type == YAML_NO_EVENT:
+ done = 1
+ else:
+ count = count+1
+ yaml_event_delete(&event)
+ return count
+
+ cdef object _parse(self):
+ cdef yaml_event_t event
+ if yaml_parser_parse(&self.parser, &event) == 0:
+ error = self._parser_error()
+ raise error
+ event_object = self._event_to_object(&event)
+ yaml_event_delete(&event)
+ return event_object
+
+ cdef object _event_to_object(self, yaml_event_t *event):
+ cdef yaml_tag_directive_t *tag_directive
+ start_mark = Mark(self.stream_name,
+ event.start_mark.index,
+ event.start_mark.line,
+ event.start_mark.column,
+ None, None)
+ end_mark = Mark(self.stream_name,
+ event.end_mark.index,
+ event.end_mark.line,
+ event.end_mark.column,
+ None, None)
+ if event.type == YAML_NO_EVENT:
+ return None
+ elif event.type == YAML_STREAM_START_EVENT:
+ encoding = None
+ if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
+ if self.unicode_source == 0:
+ encoding = u"utf-8"
+ elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
+ encoding = u"utf-16-le"
+ elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
+ encoding = u"utf-16-be"
+ return StreamStartEvent(start_mark, end_mark, encoding)
+ elif event.type == YAML_STREAM_END_EVENT:
+ return StreamEndEvent(start_mark, end_mark)
+ elif event.type == YAML_DOCUMENT_START_EVENT:
+ explicit = False
+ if event.data.document_start.implicit == 0:
+ explicit = True
+ version = None
+ if event.data.document_start.version_directive != NULL:
+ version = (event.data.document_start.version_directive.major,
+ event.data.document_start.version_directive.minor)
+ tags = None
+ if event.data.document_start.tag_directives.start != NULL:
+ tags = {}
+ tag_directive = event.data.document_start.tag_directives.start
+ while tag_directive != event.data.document_start.tag_directives.end:
+ handle = PyUnicode_FromString(tag_directive.handle)
+ prefix = PyUnicode_FromString(tag_directive.prefix)
+ tags[handle] = prefix
+ tag_directive = tag_directive+1
+ return DocumentStartEvent(start_mark, end_mark,
+ explicit, version, tags)
+ elif event.type == YAML_DOCUMENT_END_EVENT:
+ explicit = False
+ if event.data.document_end.implicit == 0:
+ explicit = True
+ return DocumentEndEvent(start_mark, end_mark, explicit)
+ elif event.type == YAML_ALIAS_EVENT:
+ anchor = PyUnicode_FromString(event.data.alias.anchor)
+ return AliasEvent(anchor, start_mark, end_mark)
+ elif event.type == YAML_SCALAR_EVENT:
+ anchor = None
+ if event.data.scalar.anchor != NULL:
+ anchor = PyUnicode_FromString(event.data.scalar.anchor)
+ tag = None
+ if event.data.scalar.tag != NULL:
+ tag = PyUnicode_FromString(event.data.scalar.tag)
+ value = PyUnicode_DecodeUTF8(event.data.scalar.value,
+ event.data.scalar.length, 'strict')
+ plain_implicit = False
+ if event.data.scalar.plain_implicit == 1:
+ plain_implicit = True
+ quoted_implicit = False
+ if event.data.scalar.quoted_implicit == 1:
+ quoted_implicit = True
+ style = None
+ if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ style = u''
+ elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ style = u'\''
+ elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ style = u'"'
+ elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ style = u'|'
+ elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ style = u'>'
+ return ScalarEvent(anchor, tag,
+ (plain_implicit, quoted_implicit),
+ value, start_mark, end_mark, style)
+ elif event.type == YAML_SEQUENCE_START_EVENT:
+ anchor = None
+ if event.data.sequence_start.anchor != NULL:
+ anchor = PyUnicode_FromString(event.data.sequence_start.anchor)
+ tag = None
+ if event.data.sequence_start.tag != NULL:
+ tag = PyUnicode_FromString(event.data.sequence_start.tag)
+ implicit = False
+ if event.data.sequence_start.implicit == 1:
+ implicit = True
+ flow_style = None
+ if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ flow_style = True
+ elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ flow_style = False
+ return SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style)
+ elif event.type == YAML_MAPPING_START_EVENT:
+ anchor = None
+ if event.data.mapping_start.anchor != NULL:
+ anchor = PyUnicode_FromString(event.data.mapping_start.anchor)
+ tag = None
+ if event.data.mapping_start.tag != NULL:
+ tag = PyUnicode_FromString(event.data.mapping_start.tag)
+ implicit = False
+ if event.data.mapping_start.implicit == 1:
+ implicit = True
+ flow_style = None
+ if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ flow_style = True
+ elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ flow_style = False
+ return MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style)
+ elif event.type == YAML_SEQUENCE_END_EVENT:
+ return SequenceEndEvent(start_mark, end_mark)
+ elif event.type == YAML_MAPPING_END_EVENT:
+ return MappingEndEvent(start_mark, end_mark)
+ else:
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("unknown event type")
+ else:
+ raise ValueError(u"unknown event type")
+
+ def get_event(self):
+ if self.current_event is not None:
+ value = self.current_event
+ self.current_event = None
+ else:
+ value = self._parse()
+ return value
+
+ def peek_event(self):
+ if self.current_event is None:
+ self.current_event = self._parse()
+ return self.current_event
+
+ def check_event(self, *choices):
+ if self.current_event is None:
+ self.current_event = self._parse()
+ if self.current_event is None:
+ return False
+ if not choices:
+ return True
+ event_class = self.current_event.__class__
+ for choice in choices:
+ if event_class is choice:
+ return True
+ return False
+
+ def check_node(self):
+ self._parse_next_event()
+ if self.parsed_event.type == YAML_STREAM_START_EVENT:
+ yaml_event_delete(&self.parsed_event)
+ self._parse_next_event()
+ if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ return True
+ return False
+
+ def get_node(self):
+ self._parse_next_event()
+ if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ return self._compose_document()
+
+ def get_single_node(self):
+ self._parse_next_event()
+ yaml_event_delete(&self.parsed_event)
+ self._parse_next_event()
+ document = None
+ if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ document = self._compose_document()
+ self._parse_next_event()
+ if self.parsed_event.type != YAML_STREAM_END_EVENT:
+ mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ if PY_MAJOR_VERSION < 3:
+ raise ComposerError("expected a single document in the stream",
+ document.start_mark, "but found another document", mark)
+ else:
+ raise ComposerError(u"expected a single document in the stream",
+ document.start_mark, u"but found another document", mark)
+ return document
+
+ cdef object _compose_document(self):
+ yaml_event_delete(&self.parsed_event)
+ node = self._compose_node(None, None)
+ self._parse_next_event()
+ yaml_event_delete(&self.parsed_event)
+ self.anchors = {}
+ return node
+
+ cdef object _compose_node(self, object parent, object index):
+ self._parse_next_event()
+ if self.parsed_event.type == YAML_ALIAS_EVENT:
+ anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor)
+ if anchor not in self.anchors:
+ mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ if PY_MAJOR_VERSION < 3:
+ raise ComposerError(None, None, "found undefined alias", mark)
+ else:
+ raise ComposerError(None, None, u"found undefined alias", mark)
+ yaml_event_delete(&self.parsed_event)
+ return self.anchors[anchor]
+ anchor = None
+ if self.parsed_event.type == YAML_SCALAR_EVENT \
+ and self.parsed_event.data.scalar.anchor != NULL:
+ anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor)
+ elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
+ and self.parsed_event.data.sequence_start.anchor != NULL:
+ anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor)
+ elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
+ and self.parsed_event.data.mapping_start.anchor != NULL:
+ anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor)
+ if anchor is not None:
+ if anchor in self.anchors:
+ mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ if PY_MAJOR_VERSION < 3:
+ raise ComposerError("found duplicate anchor; first occurence",
+ self.anchors[anchor].start_mark, "second occurence", mark)
+ else:
+ raise ComposerError(u"found duplicate anchor; first occurence",
+ self.anchors[anchor].start_mark, u"second occurence", mark)
+ self.descend_resolver(parent, index)
+ if self.parsed_event.type == YAML_SCALAR_EVENT:
+ node = self._compose_scalar_node(anchor)
+ elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
+ node = self._compose_sequence_node(anchor)
+ elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
+ node = self._compose_mapping_node(anchor)
+ self.ascend_resolver()
+ return node
+
+ cdef _compose_scalar_node(self, object anchor):
+ start_mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ end_mark = Mark(self.stream_name,
+ self.parsed_event.end_mark.index,
+ self.parsed_event.end_mark.line,
+ self.parsed_event.end_mark.column,
+ None, None)
+ value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value,
+ self.parsed_event.data.scalar.length, 'strict')
+ plain_implicit = False
+ if self.parsed_event.data.scalar.plain_implicit == 1:
+ plain_implicit = True
+ quoted_implicit = False
+ if self.parsed_event.data.scalar.quoted_implicit == 1:
+ quoted_implicit = True
+ if self.parsed_event.data.scalar.tag == NULL \
+ or (self.parsed_event.data.scalar.tag[0] == c'!'
+ and self.parsed_event.data.scalar.tag[1] == c'\0'):
+ tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
+ else:
+ tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag)
+ style = None
+ if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
+ style = u''
+ elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
+ style = u'\''
+ elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
+ style = u'"'
+ elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
+ style = u'|'
+ elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
+ style = u'>'
+ node = ScalarNode(tag, value, start_mark, end_mark, style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ yaml_event_delete(&self.parsed_event)
+ return node
+
+ cdef _compose_sequence_node(self, object anchor):
+ cdef int index
+ start_mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ implicit = False
+ if self.parsed_event.data.sequence_start.implicit == 1:
+ implicit = True
+ if self.parsed_event.data.sequence_start.tag == NULL \
+ or (self.parsed_event.data.sequence_start.tag[0] == c'!'
+ and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
+ tag = self.resolve(SequenceNode, None, implicit)
+ else:
+ tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag)
+ flow_style = None
+ if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
+ flow_style = True
+ elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
+ flow_style = False
+ value = []
+ node = SequenceNode(tag, value, start_mark, None, flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ yaml_event_delete(&self.parsed_event)
+ index = 0
+ self._parse_next_event()
+ while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
+ value.append(self._compose_node(node, index))
+ index = index+1
+ self._parse_next_event()
+ node.end_mark = Mark(self.stream_name,
+ self.parsed_event.end_mark.index,
+ self.parsed_event.end_mark.line,
+ self.parsed_event.end_mark.column,
+ None, None)
+ yaml_event_delete(&self.parsed_event)
+ return node
+
+ cdef _compose_mapping_node(self, object anchor):
+ start_mark = Mark(self.stream_name,
+ self.parsed_event.start_mark.index,
+ self.parsed_event.start_mark.line,
+ self.parsed_event.start_mark.column,
+ None, None)
+ implicit = False
+ if self.parsed_event.data.mapping_start.implicit == 1:
+ implicit = True
+ if self.parsed_event.data.mapping_start.tag == NULL \
+ or (self.parsed_event.data.mapping_start.tag[0] == c'!'
+ and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
+ tag = self.resolve(MappingNode, None, implicit)
+ else:
+ tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag)
+ flow_style = None
+ if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
+ flow_style = True
+ elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
+ flow_style = False
+ value = []
+ node = MappingNode(tag, value, start_mark, None, flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ yaml_event_delete(&self.parsed_event)
+ self._parse_next_event()
+ while self.parsed_event.type != YAML_MAPPING_END_EVENT:
+ item_key = self._compose_node(node, None)
+ item_value = self._compose_node(node, item_key)
+ value.append((item_key, item_value))
+ self._parse_next_event()
+ node.end_mark = Mark(self.stream_name,
+ self.parsed_event.end_mark.index,
+ self.parsed_event.end_mark.line,
+ self.parsed_event.end_mark.column,
+ None, None)
+ yaml_event_delete(&self.parsed_event)
+ return node
+
+ cdef int _parse_next_event(self) except 0:
+ if self.parsed_event.type == YAML_NO_EVENT:
+ if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
+ error = self._parser_error()
+ raise error
+ return 1
+
+cdef int input_handler(void *data, char *buffer, int size, int *read) except 0:
+ cdef CParser parser
+ parser = <CParser>data
+ if parser.stream_cache is None:
+ value = parser.stream.read(size)
+ if PyUnicode_CheckExact(value) != 0:
+ value = PyUnicode_AsUTF8String(value)
+ parser.unicode_source = 1
+ if PyString_CheckExact(value) == 0:
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("a string value is expected")
+ else:
+ raise TypeError(u"a string value is expected")
+ parser.stream_cache = value
+ parser.stream_cache_pos = 0
+ parser.stream_cache_len = PyString_GET_SIZE(value)
+ if (parser.stream_cache_len - parser.stream_cache_pos) < size:
+ size = parser.stream_cache_len - parser.stream_cache_pos
+ if size > 0:
+ memcpy(buffer, PyString_AS_STRING(parser.stream_cache)
+ + parser.stream_cache_pos, size)
+ read[0] = size
+ parser.stream_cache_pos += size
+ if parser.stream_cache_pos == parser.stream_cache_len:
+ parser.stream_cache = None
+ return 1
+
+cdef class CEmitter:
+
+ cdef yaml_emitter_t emitter
+
+ cdef object stream
+
+ cdef int document_start_implicit
+ cdef int document_end_implicit
+ cdef object use_version
+ cdef object use_tags
+
+ cdef object serialized_nodes
+ cdef object anchors
+ cdef int last_alias_id
+ cdef int closed
+ cdef int dump_unicode
+ cdef object use_encoding
+
+ def __init__(self, stream, canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None, encoding=None,
+ explicit_start=None, explicit_end=None, version=None, tags=None):
+ if yaml_emitter_initialize(&self.emitter) == 0:
+ raise MemoryError
+ self.stream = stream
+ self.dump_unicode = 0
+ if PY_MAJOR_VERSION < 3:
+ if getattr3(stream, 'encoding', None):
+ self.dump_unicode = 1
+ else:
+ if hasattr(stream, u'encoding'):
+ self.dump_unicode = 1
+ self.use_encoding = encoding
+ yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
+ if canonical:
+ yaml_emitter_set_canonical(&self.emitter, 1)
+ if indent is not None:
+ yaml_emitter_set_indent(&self.emitter, indent)
+ if width is not None:
+ yaml_emitter_set_width(&self.emitter, width)
+ if allow_unicode:
+ yaml_emitter_set_unicode(&self.emitter, 1)
+ if line_break is not None:
+ if line_break == '\r':
+ yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
+ elif line_break == '\n':
+ yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
+ elif line_break == '\r\n':
+ yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
+ self.document_start_implicit = 1
+ if explicit_start:
+ self.document_start_implicit = 0
+ self.document_end_implicit = 1
+ if explicit_end:
+ self.document_end_implicit = 0
+ self.use_version = version
+ self.use_tags = tags
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_alias_id = 0
+ self.closed = -1
+
+ def __dealloc__(self):
+ yaml_emitter_delete(&self.emitter)
+
+ def dispose(self):
+ pass
+
+ cdef object _emitter_error(self):
+ if self.emitter.error == YAML_MEMORY_ERROR:
+ return MemoryError
+ elif self.emitter.error == YAML_EMITTER_ERROR:
+ if PY_MAJOR_VERSION < 3:
+ problem = self.emitter.problem
+ else:
+ problem = PyUnicode_FromString(self.emitter.problem)
+ return EmitterError(problem)
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("no emitter error")
+ else:
+ raise ValueError(u"no emitter error")
+
+ cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0:
+ cdef yaml_encoding_t encoding
+ cdef yaml_version_directive_t version_directive_value
+ cdef yaml_version_directive_t *version_directive
+ cdef yaml_tag_directive_t tag_directives_value[128]
+ cdef yaml_tag_directive_t *tag_directives_start
+ cdef yaml_tag_directive_t *tag_directives_end
+ cdef int implicit
+ cdef int plain_implicit
+ cdef int quoted_implicit
+ cdef char *anchor
+ cdef char *tag
+ cdef char *value
+ cdef int length
+ cdef yaml_scalar_style_t scalar_style
+ cdef yaml_sequence_style_t sequence_style
+ cdef yaml_mapping_style_t mapping_style
+ event_class = event_object.__class__
+ if event_class is StreamStartEvent:
+ encoding = YAML_UTF8_ENCODING
+ if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le':
+ encoding = YAML_UTF16LE_ENCODING
+ elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be':
+ encoding = YAML_UTF16BE_ENCODING
+ if event_object.encoding is None:
+ self.dump_unicode = 1
+ if self.dump_unicode == 1:
+ encoding = YAML_UTF8_ENCODING
+ yaml_stream_start_event_initialize(event, encoding)
+ elif event_class is StreamEndEvent:
+ yaml_stream_end_event_initialize(event)
+ elif event_class is DocumentStartEvent:
+ version_directive = NULL
+ if event_object.version:
+ version_directive_value.major = event_object.version[0]
+ version_directive_value.minor = event_object.version[1]
+ version_directive = &version_directive_value
+ tag_directives_start = NULL
+ tag_directives_end = NULL
+ if event_object.tags:
+ if len(event_object.tags) > 128:
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("too many tags")
+ else:
+ raise ValueError(u"too many tags")
+ tag_directives_start = tag_directives_value
+ tag_directives_end = tag_directives_value
+ cache = []
+ for handle in event_object.tags:
+ prefix = event_object.tags[handle]
+ if PyUnicode_CheckExact(handle):
+ handle = PyUnicode_AsUTF8String(handle)
+ cache.append(handle)
+ if not PyString_CheckExact(handle):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag handle must be a string")
+ else:
+ raise TypeError(u"tag handle must be a string")
+ tag_directives_end.handle = PyString_AS_STRING(handle)
+ if PyUnicode_CheckExact(prefix):
+ prefix = PyUnicode_AsUTF8String(prefix)
+ cache.append(prefix)
+ if not PyString_CheckExact(prefix):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag prefix must be a string")
+ else:
+ raise TypeError(u"tag prefix must be a string")
+ tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ tag_directives_end = tag_directives_end+1
+ implicit = 1
+ if event_object.explicit:
+ implicit = 0
+ if yaml_document_start_event_initialize(event, version_directive,
+ tag_directives_start, tag_directives_end, implicit) == 0:
+ raise MemoryError
+ elif event_class is DocumentEndEvent:
+ implicit = 1
+ if event_object.explicit:
+ implicit = 0
+ yaml_document_end_event_initialize(event, implicit)
+ elif event_class is AliasEvent:
+ anchor = NULL
+ anchor_object = event_object.anchor
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ if yaml_alias_event_initialize(event, anchor) == 0:
+ raise MemoryError
+ elif event_class is ScalarEvent:
+ anchor = NULL
+ anchor_object = event_object.anchor
+ if anchor_object is not None:
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ tag = NULL
+ tag_object = event_object.tag
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ value_object = event_object.value
+ if PyUnicode_CheckExact(value_object):
+ value_object = PyUnicode_AsUTF8String(value_object)
+ if not PyString_CheckExact(value_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("value must be a string")
+ else:
+ raise TypeError(u"value must be a string")
+ value = PyString_AS_STRING(value_object)
+ length = PyString_GET_SIZE(value_object)
+ plain_implicit = 0
+ quoted_implicit = 0
+ if event_object.implicit is not None:
+ plain_implicit = event_object.implicit[0]
+ quoted_implicit = event_object.implicit[1]
+ style_object = event_object.style
+ scalar_style = YAML_PLAIN_SCALAR_STYLE
+ if style_object == "'" or style_object == u"'":
+ scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ elif style_object == "\"" or style_object == u"\"":
+ scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ elif style_object == "|" or style_object == u"|":
+ scalar_style = YAML_LITERAL_SCALAR_STYLE
+ elif style_object == ">" or style_object == u">":
+ scalar_style = YAML_FOLDED_SCALAR_STYLE
+ if yaml_scalar_event_initialize(event, anchor, tag, value, length,
+ plain_implicit, quoted_implicit, scalar_style) == 0:
+ raise MemoryError
+ elif event_class is SequenceStartEvent:
+ anchor = NULL
+ anchor_object = event_object.anchor
+ if anchor_object is not None:
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ tag = NULL
+ tag_object = event_object.tag
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ implicit = 0
+ if event_object.implicit:
+ implicit = 1
+ sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ if event_object.flow_style:
+ sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ if yaml_sequence_start_event_initialize(event, anchor, tag,
+ implicit, sequence_style) == 0:
+ raise MemoryError
+ elif event_class is MappingStartEvent:
+ anchor = NULL
+ anchor_object = event_object.anchor
+ if anchor_object is not None:
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ tag = NULL
+ tag_object = event_object.tag
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ implicit = 0
+ if event_object.implicit:
+ implicit = 1
+ mapping_style = YAML_BLOCK_MAPPING_STYLE
+ if event_object.flow_style:
+ mapping_style = YAML_FLOW_MAPPING_STYLE
+ if yaml_mapping_start_event_initialize(event, anchor, tag,
+ implicit, mapping_style) == 0:
+ raise MemoryError
+ elif event_class is SequenceEndEvent:
+ yaml_sequence_end_event_initialize(event)
+ elif event_class is MappingEndEvent:
+ yaml_mapping_end_event_initialize(event)
+ else:
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("invalid event %s" % event_object)
+ else:
+ raise TypeError(u"invalid event %s" % event_object)
+ return 1
+
+ def emit(self, event_object):
+ cdef yaml_event_t event
+ self._object_to_event(event_object, &event)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+
+ def open(self):
+ cdef yaml_event_t event
+ cdef yaml_encoding_t encoding
+ if self.closed == -1:
+ if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le':
+ encoding = YAML_UTF16LE_ENCODING
+ elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be':
+ encoding = YAML_UTF16BE_ENCODING
+ else:
+ encoding = YAML_UTF8_ENCODING
+ if self.use_encoding is None:
+ self.dump_unicode = 1
+ if self.dump_unicode == 1:
+ encoding = YAML_UTF8_ENCODING
+ yaml_stream_start_event_initialize(&event, encoding)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self.closed = 0
+ elif self.closed == 1:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError(u"serializer is closed")
+ else:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is already opened")
+ else:
+ raise SerializerError(u"serializer is already opened")
+
+ def close(self):
+ cdef yaml_event_t event
+ if self.closed == -1:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is not opened")
+ else:
+ raise SerializerError(u"serializer is not opened")
+ elif self.closed == 0:
+ yaml_stream_end_event_initialize(&event)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self.closed = 1
+
+ def serialize(self, node):
+ cdef yaml_event_t event
+ cdef yaml_version_directive_t version_directive_value
+ cdef yaml_version_directive_t *version_directive
+ cdef yaml_tag_directive_t tag_directives_value[128]
+ cdef yaml_tag_directive_t *tag_directives_start
+ cdef yaml_tag_directive_t *tag_directives_end
+ if self.closed == -1:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is not opened")
+ else:
+ raise SerializerError(u"serializer is not opened")
+ elif self.closed == 1:
+ if PY_MAJOR_VERSION < 3:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError(u"serializer is closed")
+ cache = []
+ version_directive = NULL
+ if self.use_version:
+ version_directive_value.major = self.use_version[0]
+ version_directive_value.minor = self.use_version[1]
+ version_directive = &version_directive_value
+ tag_directives_start = NULL
+ tag_directives_end = NULL
+ if self.use_tags:
+ if len(self.use_tags) > 128:
+ if PY_MAJOR_VERSION < 3:
+ raise ValueError("too many tags")
+ else:
+ raise ValueError(u"too many tags")
+ tag_directives_start = tag_directives_value
+ tag_directives_end = tag_directives_value
+ for handle in self.use_tags:
+ prefix = self.use_tags[handle]
+ if PyUnicode_CheckExact(handle):
+ handle = PyUnicode_AsUTF8String(handle)
+ cache.append(handle)
+ if not PyString_CheckExact(handle):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag handle must be a string")
+ else:
+ raise TypeError(u"tag handle must be a string")
+ tag_directives_end.handle = PyString_AS_STRING(handle)
+ if PyUnicode_CheckExact(prefix):
+ prefix = PyUnicode_AsUTF8String(prefix)
+ cache.append(prefix)
+ if not PyString_CheckExact(prefix):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag prefix must be a string")
+ else:
+ raise TypeError(u"tag prefix must be a string")
+ tag_directives_end.prefix = PyString_AS_STRING(prefix)
+ tag_directives_end = tag_directives_end+1
+ if yaml_document_start_event_initialize(&event, version_directive,
+ tag_directives_start, tag_directives_end,
+ self.document_start_implicit) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self._anchor_node(node)
+ self._serialize_node(node, None, None)
+ yaml_document_end_event_initialize(&event, self.document_end_implicit)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_alias_id = 0
+
+ cdef int _anchor_node(self, object node) except 0:
+ if node in self.anchors:
+ if self.anchors[node] is None:
+ self.last_alias_id = self.last_alias_id+1
+ self.anchors[node] = u"id%03d" % self.last_alias_id
+ else:
+ self.anchors[node] = None
+ node_class = node.__class__
+ if node_class is SequenceNode:
+ for item in node.value:
+ self._anchor_node(item)
+ elif node_class is MappingNode:
+ for key, value in node.value:
+ self._anchor_node(key)
+ self._anchor_node(value)
+ return 1
+
+ cdef int _serialize_node(self, object node, object parent, object index) except 0:
+ cdef yaml_event_t event
+ cdef int implicit
+ cdef int plain_implicit
+ cdef int quoted_implicit
+ cdef char *anchor
+ cdef char *tag
+ cdef char *value
+ cdef int length
+ cdef int item_index
+ cdef yaml_scalar_style_t scalar_style
+ cdef yaml_sequence_style_t sequence_style
+ cdef yaml_mapping_style_t mapping_style
+ anchor_object = self.anchors[node]
+ anchor = NULL
+ if anchor_object is not None:
+ if PyUnicode_CheckExact(anchor_object):
+ anchor_object = PyUnicode_AsUTF8String(anchor_object)
+ if not PyString_CheckExact(anchor_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("anchor must be a string")
+ else:
+ raise TypeError(u"anchor must be a string")
+ anchor = PyString_AS_STRING(anchor_object)
+ if node in self.serialized_nodes:
+ if yaml_alias_event_initialize(&event, anchor) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ else:
+ node_class = node.__class__
+ self.serialized_nodes[node] = True
+ self.descend_resolver(parent, index)
+ if node_class is ScalarNode:
+ plain_implicit = 0
+ quoted_implicit = 0
+ tag_object = node.tag
+ if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
+ plain_implicit = 1
+ if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
+ quoted_implicit = 1
+ tag = NULL
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ value_object = node.value
+ if PyUnicode_CheckExact(value_object):
+ value_object = PyUnicode_AsUTF8String(value_object)
+ if not PyString_CheckExact(value_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("value must be a string")
+ else:
+ raise TypeError(u"value must be a string")
+ value = PyString_AS_STRING(value_object)
+ length = PyString_GET_SIZE(value_object)
+ style_object = node.style
+ scalar_style = YAML_PLAIN_SCALAR_STYLE
+ if style_object == "'" or style_object == u"'":
+ scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
+ elif style_object == "\"" or style_object == u"\"":
+ scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
+ elif style_object == "|" or style_object == u"|":
+ scalar_style = YAML_LITERAL_SCALAR_STYLE
+ elif style_object == ">" or style_object == u">":
+ scalar_style = YAML_FOLDED_SCALAR_STYLE
+ if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
+ plain_implicit, quoted_implicit, scalar_style) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ elif node_class is SequenceNode:
+ implicit = 0
+ tag_object = node.tag
+ if self.resolve(SequenceNode, node.value, True) == tag_object:
+ implicit = 1
+ tag = NULL
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ sequence_style = YAML_BLOCK_SEQUENCE_STYLE
+ if node.flow_style:
+ sequence_style = YAML_FLOW_SEQUENCE_STYLE
+ if yaml_sequence_start_event_initialize(&event, anchor, tag,
+ implicit, sequence_style) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ item_index = 0
+ for item in node.value:
+ self._serialize_node(item, node, item_index)
+ item_index = item_index+1
+ yaml_sequence_end_event_initialize(&event)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ elif node_class is MappingNode:
+ implicit = 0
+ tag_object = node.tag
+ if self.resolve(MappingNode, node.value, True) == tag_object:
+ implicit = 1
+ tag = NULL
+ if tag_object is not None:
+ if PyUnicode_CheckExact(tag_object):
+ tag_object = PyUnicode_AsUTF8String(tag_object)
+ if not PyString_CheckExact(tag_object):
+ if PY_MAJOR_VERSION < 3:
+ raise TypeError("tag must be a string")
+ else:
+ raise TypeError(u"tag must be a string")
+ tag = PyString_AS_STRING(tag_object)
+ mapping_style = YAML_BLOCK_MAPPING_STYLE
+ if node.flow_style:
+ mapping_style = YAML_FLOW_MAPPING_STYLE
+ if yaml_mapping_start_event_initialize(&event, anchor, tag,
+ implicit, mapping_style) == 0:
+ raise MemoryError
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ for item_key, item_value in node.value:
+ self._serialize_node(item_key, node, None)
+ self._serialize_node(item_value, node, item_key)
+ yaml_mapping_end_event_initialize(&event)
+ if yaml_emitter_emit(&self.emitter, &event) == 0:
+ error = self._emitter_error()
+ raise error
+ self.ascend_resolver()
+ return 1
+
+cdef int output_handler(void *data, char *buffer, int size) except 0:
+ cdef CEmitter emitter
+ emitter = <CEmitter>data
+ if emitter.dump_unicode == 0:
+ value = PyString_FromStringAndSize(buffer, size)
+ else:
+ value = PyUnicode_DecodeUTF8(buffer, size, 'strict')
+ emitter.stream.write(value)
+ return 1
+
diff --git a/python/pyyaml/lib/yaml/__init__.py b/python/pyyaml/lib/yaml/__init__.py
new file mode 100644
index 000000000..76e19e13f
--- /dev/null
+++ b/python/pyyaml/lib/yaml/__init__.py
@@ -0,0 +1,315 @@
+
+from error import *
+
+from tokens import *
+from events import *
+from nodes import *
+
+from loader import *
+from dumper import *
+
+__version__ = '3.11'
+
+try:
+ from cyaml import *
+ __with_libyaml__ = True
+except ImportError:
+ __with_libyaml__ = False
+
+def scan(stream, Loader=Loader):
+ """
+ Scan a YAML stream and produce scanning tokens.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_token():
+ yield loader.get_token()
+ finally:
+ loader.dispose()
+
+def parse(stream, Loader=Loader):
+ """
+ Parse a YAML stream and produce parsing events.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_event():
+ yield loader.get_event()
+ finally:
+ loader.dispose()
+
+def compose(stream, Loader=Loader):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding representation tree.
+ """
+ loader = Loader(stream)
+ try:
+ return loader.get_single_node()
+ finally:
+ loader.dispose()
+
+def compose_all(stream, Loader=Loader):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding representation trees.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_node():
+ yield loader.get_node()
+ finally:
+ loader.dispose()
+
+def load(stream, Loader=Loader):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ """
+ loader = Loader(stream)
+ try:
+ return loader.get_single_data()
+ finally:
+ loader.dispose()
+
+def load_all(stream, Loader=Loader):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_data():
+ yield loader.get_data()
+ finally:
+ loader.dispose()
+
+def safe_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ Resolve only basic YAML tags.
+ """
+ return load(stream, SafeLoader)
+
+def safe_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+ Resolve only basic YAML tags.
+ """
+ return load_all(stream, SafeLoader)
+
+def emit(events, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+ """
+ Emit YAML parsing events into a stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ from StringIO import StringIO
+ stream = StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ try:
+ for event in events:
+ dumper.emit(event)
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize_all(nodes, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding='utf-8', explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ """
+ Serialize a sequence of representation trees into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ from StringIO import StringIO
+ else:
+ from cStringIO import StringIO
+ stream = StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end)
+ try:
+ dumper.open()
+ for node in nodes:
+ dumper.serialize(node)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize(node, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a representation tree into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return serialize_all([node], stream, Dumper=Dumper, **kwds)
+
+def dump_all(documents, stream=None, Dumper=Dumper,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding='utf-8', explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ from StringIO import StringIO
+ else:
+ from cStringIO import StringIO
+ stream = StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, default_style=default_style,
+ default_flow_style=default_flow_style,
+ canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end)
+ try:
+ dumper.open()
+ for data in documents:
+ dumper.represent(data)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def dump(data, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=Dumper, **kwds)
+
+def safe_dump_all(documents, stream=None, **kwds):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
+
+def safe_dump(data, stream=None, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=SafeDumper, **kwds)
+
+def add_implicit_resolver(tag, regexp, first=None,
+ Loader=Loader, Dumper=Dumper):
+ """
+ Add an implicit scalar detector.
+ If an implicit scalar value matches the given regexp,
+ the corresponding tag is assigned to the scalar.
+ first is a sequence of possible initial characters or None.
+ """
+ Loader.add_implicit_resolver(tag, regexp, first)
+ Dumper.add_implicit_resolver(tag, regexp, first)
+
+def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
+ """
+ Add a path based resolver for the given tag.
+ A path is a list of keys that forms a path
+ to a node in the representation tree.
+ Keys can be string values, integers, or None.
+ """
+ Loader.add_path_resolver(tag, path, kind)
+ Dumper.add_path_resolver(tag, path, kind)
+
+def add_constructor(tag, constructor, Loader=Loader):
+ """
+ Add a constructor for the given tag.
+ Constructor is a function that accepts a Loader instance
+ and a node object and produces the corresponding Python object.
+ """
+ Loader.add_constructor(tag, constructor)
+
+def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
+ """
+ Add a multi-constructor for the given tag prefix.
+ Multi-constructor is called for a node if its tag starts with tag_prefix.
+ Multi-constructor accepts a Loader instance, a tag suffix,
+ and a node object and produces the corresponding Python object.
+ """
+ Loader.add_multi_constructor(tag_prefix, multi_constructor)
+
+def add_representer(data_type, representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Representer is a function accepting a Dumper instance
+ and an instance of the given data type
+ and producing the corresponding representation node.
+ """
+ Dumper.add_representer(data_type, representer)
+
+def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Multi-representer is a function accepting a Dumper instance
+ and an instance of the given data type or subtype
+ and producing the corresponding representation node.
+ """
+ Dumper.add_multi_representer(data_type, multi_representer)
+
+class YAMLObjectMetaclass(type):
+ """
+ The metaclass for YAMLObject.
+ """
+ def __init__(cls, name, bases, kwds):
+ super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+ if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
+ cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
+ cls.yaml_dumper.add_representer(cls, cls.to_yaml)
+
+class YAMLObject(object):
+ """
+ An object that can dump itself to a YAML stream
+ and load itself from a YAML stream.
+ """
+
+ __metaclass__ = YAMLObjectMetaclass
+ __slots__ = () # no direct instantiation, so allow immutable subclasses
+
+ yaml_loader = Loader
+ yaml_dumper = Dumper
+
+ yaml_tag = None
+ yaml_flow_style = None
+
+ def from_yaml(cls, loader, node):
+ """
+ Convert a representation node to a Python object.
+ """
+ return loader.construct_yaml_object(node, cls)
+ from_yaml = classmethod(from_yaml)
+
+ def to_yaml(cls, dumper, data):
+ """
+ Convert a Python object to a representation node.
+ """
+ return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
+ flow_style=cls.yaml_flow_style)
+ to_yaml = classmethod(to_yaml)
+
diff --git a/python/pyyaml/lib/yaml/composer.py b/python/pyyaml/lib/yaml/composer.py
new file mode 100644
index 000000000..06e5ac782
--- /dev/null
+++ b/python/pyyaml/lib/yaml/composer.py
@@ -0,0 +1,139 @@
+
+__all__ = ['Composer', 'ComposerError']
+
+from error import MarkedYAMLError
+from events import *
+from nodes import *
+
+class ComposerError(MarkedYAMLError):
+ pass
+
+class Composer(object):
+
+ def __init__(self):
+ self.anchors = {}
+
+ def check_node(self):
+ # Drop the STREAM-START event.
+ if self.check_event(StreamStartEvent):
+ self.get_event()
+
+ # If there are more documents available?
+ return not self.check_event(StreamEndEvent)
+
+ def get_node(self):
+ # Get the root node of the next document.
+ if not self.check_event(StreamEndEvent):
+ return self.compose_document()
+
+ def get_single_node(self):
+ # Drop the STREAM-START event.
+ self.get_event()
+
+ # Compose a document if the stream is not empty.
+ document = None
+ if not self.check_event(StreamEndEvent):
+ document = self.compose_document()
+
+ # Ensure that the stream contains no more documents.
+ if not self.check_event(StreamEndEvent):
+ event = self.get_event()
+ raise ComposerError("expected a single document in the stream",
+ document.start_mark, "but found another document",
+ event.start_mark)
+
+ # Drop the STREAM-END event.
+ self.get_event()
+
+ return document
+
+ def compose_document(self):
+ # Drop the DOCUMENT-START event.
+ self.get_event()
+
+ # Compose the root node.
+ node = self.compose_node(None, None)
+
+ # Drop the DOCUMENT-END event.
+ self.get_event()
+
+ self.anchors = {}
+ return node
+
+ def compose_node(self, parent, index):
+ if self.check_event(AliasEvent):
+ event = self.get_event()
+ anchor = event.anchor
+ if anchor not in self.anchors:
+ raise ComposerError(None, None, "found undefined alias %r"
+ % anchor.encode('utf-8'), event.start_mark)
+ return self.anchors[anchor]
+ event = self.peek_event()
+ anchor = event.anchor
+ if anchor is not None:
+ if anchor in self.anchors:
+ raise ComposerError("found duplicate anchor %r; first occurence"
+ % anchor.encode('utf-8'), self.anchors[anchor].start_mark,
+ "second occurence", event.start_mark)
+ self.descend_resolver(parent, index)
+ if self.check_event(ScalarEvent):
+ node = self.compose_scalar_node(anchor)
+ elif self.check_event(SequenceStartEvent):
+ node = self.compose_sequence_node(anchor)
+ elif self.check_event(MappingStartEvent):
+ node = self.compose_mapping_node(anchor)
+ self.ascend_resolver()
+ return node
+
+ def compose_scalar_node(self, anchor):
+ event = self.get_event()
+ tag = event.tag
+ if tag is None or tag == u'!':
+ tag = self.resolve(ScalarNode, event.value, event.implicit)
+ node = ScalarNode(tag, event.value,
+ event.start_mark, event.end_mark, style=event.style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ return node
+
+ def compose_sequence_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == u'!':
+ tag = self.resolve(SequenceNode, None, start_event.implicit)
+ node = SequenceNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ index = 0
+ while not self.check_event(SequenceEndEvent):
+ node.value.append(self.compose_node(node, index))
+ index += 1
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
+ def compose_mapping_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == u'!':
+ tag = self.resolve(MappingNode, None, start_event.implicit)
+ node = MappingNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ while not self.check_event(MappingEndEvent):
+ #key_event = self.peek_event()
+ item_key = self.compose_node(node, None)
+ #if item_key in node.value:
+ # raise ComposerError("while composing a mapping", start_event.start_mark,
+ # "found duplicate key", key_event.start_mark)
+ item_value = self.compose_node(node, item_key)
+ #node.value[item_key] = item_value
+ node.value.append((item_key, item_value))
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
diff --git a/python/pyyaml/lib/yaml/constructor.py b/python/pyyaml/lib/yaml/constructor.py
new file mode 100644
index 000000000..635faac3e
--- /dev/null
+++ b/python/pyyaml/lib/yaml/constructor.py
@@ -0,0 +1,675 @@
+
+__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
+ 'ConstructorError']
+
+from error import *
+from nodes import *
+
+import datetime
+
+import binascii, re, sys, types
+
+class ConstructorError(MarkedYAMLError):
+ pass
+
+class BaseConstructor(object):
+
+ yaml_constructors = {}
+ yaml_multi_constructors = {}
+
+ def __init__(self):
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.state_generators = []
+ self.deep_construct = False
+
+ def check_data(self):
+ # If there are more documents available?
+ return self.check_node()
+
+ def get_data(self):
+ # Construct and return the next document.
+ if self.check_node():
+ return self.construct_document(self.get_node())
+
+ def get_single_data(self):
+ # Ensure that the stream contains a single document and construct it.
+ node = self.get_single_node()
+ if node is not None:
+ return self.construct_document(node)
+ return None
+
+ def construct_document(self, node):
+ data = self.construct_object(node)
+ while self.state_generators:
+ state_generators = self.state_generators
+ self.state_generators = []
+ for generator in state_generators:
+ for dummy in generator:
+ pass
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.deep_construct = False
+ return data
+
+ def construct_object(self, node, deep=False):
+ if node in self.constructed_objects:
+ return self.constructed_objects[node]
+ if deep:
+ old_deep = self.deep_construct
+ self.deep_construct = True
+ if node in self.recursive_objects:
+ raise ConstructorError(None, None,
+ "found unconstructable recursive node", node.start_mark)
+ self.recursive_objects[node] = None
+ constructor = None
+ tag_suffix = None
+ if node.tag in self.yaml_constructors:
+ constructor = self.yaml_constructors[node.tag]
+ else:
+ for tag_prefix in self.yaml_multi_constructors:
+ if node.tag.startswith(tag_prefix):
+ tag_suffix = node.tag[len(tag_prefix):]
+ constructor = self.yaml_multi_constructors[tag_prefix]
+ break
+ else:
+ if None in self.yaml_multi_constructors:
+ tag_suffix = node.tag
+ constructor = self.yaml_multi_constructors[None]
+ elif None in self.yaml_constructors:
+ constructor = self.yaml_constructors[None]
+ elif isinstance(node, ScalarNode):
+ constructor = self.__class__.construct_scalar
+ elif isinstance(node, SequenceNode):
+ constructor = self.__class__.construct_sequence
+ elif isinstance(node, MappingNode):
+ constructor = self.__class__.construct_mapping
+ if tag_suffix is None:
+ data = constructor(self, node)
+ else:
+ data = constructor(self, tag_suffix, node)
+ if isinstance(data, types.GeneratorType):
+ generator = data
+ data = generator.next()
+ if self.deep_construct:
+ for dummy in generator:
+ pass
+ else:
+ self.state_generators.append(generator)
+ self.constructed_objects[node] = data
+ del self.recursive_objects[node]
+ if deep:
+ self.deep_construct = old_deep
+ return data
+
+ def construct_scalar(self, node):
+ if not isinstance(node, ScalarNode):
+ raise ConstructorError(None, None,
+ "expected a scalar node, but found %s" % node.id,
+ node.start_mark)
+ return node.value
+
+ def construct_sequence(self, node, deep=False):
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError(None, None,
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
+ return [self.construct_object(child, deep=deep)
+ for child in node.value]
+
+ def construct_mapping(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ mapping = {}
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ try:
+ hash(key)
+ except TypeError, exc:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "found unacceptable key (%s)" % exc, key_node.start_mark)
+ value = self.construct_object(value_node, deep=deep)
+ mapping[key] = value
+ return mapping
+
+ def construct_pairs(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ pairs = []
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ value = self.construct_object(value_node, deep=deep)
+ pairs.append((key, value))
+ return pairs
+
+ def add_constructor(cls, tag, constructor):
+ if not 'yaml_constructors' in cls.__dict__:
+ cls.yaml_constructors = cls.yaml_constructors.copy()
+ cls.yaml_constructors[tag] = constructor
+ add_constructor = classmethod(add_constructor)
+
+ def add_multi_constructor(cls, tag_prefix, multi_constructor):
+ if not 'yaml_multi_constructors' in cls.__dict__:
+ cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
+ cls.yaml_multi_constructors[tag_prefix] = multi_constructor
+ add_multi_constructor = classmethod(add_multi_constructor)
+
+class SafeConstructor(BaseConstructor):
+
+ def construct_scalar(self, node):
+ if isinstance(node, MappingNode):
+ for key_node, value_node in node.value:
+ if key_node.tag == u'tag:yaml.org,2002:value':
+ return self.construct_scalar(value_node)
+ return BaseConstructor.construct_scalar(self, node)
+
+ def flatten_mapping(self, node):
+ merge = []
+ index = 0
+ while index < len(node.value):
+ key_node, value_node = node.value[index]
+ if key_node.tag == u'tag:yaml.org,2002:merge':
+ del node.value[index]
+ if isinstance(value_node, MappingNode):
+ self.flatten_mapping(value_node)
+ merge.extend(value_node.value)
+ elif isinstance(value_node, SequenceNode):
+ submerge = []
+ for subnode in value_node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing a mapping",
+ node.start_mark,
+ "expected a mapping for merging, but found %s"
+ % subnode.id, subnode.start_mark)
+ self.flatten_mapping(subnode)
+ submerge.append(subnode.value)
+ submerge.reverse()
+ for value in submerge:
+ merge.extend(value)
+ else:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "expected a mapping or list of mappings for merging, but found %s"
+ % value_node.id, value_node.start_mark)
+ elif key_node.tag == u'tag:yaml.org,2002:value':
+ key_node.tag = u'tag:yaml.org,2002:str'
+ index += 1
+ else:
+ index += 1
+ if merge:
+ node.value = merge + node.value
+
+ def construct_mapping(self, node, deep=False):
+ if isinstance(node, MappingNode):
+ self.flatten_mapping(node)
+ return BaseConstructor.construct_mapping(self, node, deep=deep)
+
+ def construct_yaml_null(self, node):
+ self.construct_scalar(node)
+ return None
+
+ bool_values = {
+ u'yes': True,
+ u'no': False,
+ u'true': True,
+ u'false': False,
+ u'on': True,
+ u'off': False,
+ }
+
+ def construct_yaml_bool(self, node):
+ value = self.construct_scalar(node)
+ return self.bool_values[value.lower()]
+
+ def construct_yaml_int(self, node):
+ value = str(self.construct_scalar(node))
+ value = value.replace('_', '')
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '0':
+ return 0
+ elif value.startswith('0b'):
+ return sign*int(value[2:], 2)
+ elif value.startswith('0x'):
+ return sign*int(value[2:], 16)
+ elif value[0] == '0':
+ return sign*int(value, 8)
+ elif ':' in value:
+ digits = [int(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*int(value)
+
+ inf_value = 1e300
+ while inf_value != inf_value*inf_value:
+ inf_value *= inf_value
+ nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
+
+ def construct_yaml_float(self, node):
+ value = str(self.construct_scalar(node))
+ value = value.replace('_', '').lower()
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '.inf':
+ return sign*self.inf_value
+ elif value == '.nan':
+ return self.nan_value
+ elif ':' in value:
+ digits = [float(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0.0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*float(value)
+
+ def construct_yaml_binary(self, node):
+ value = self.construct_scalar(node)
+ try:
+ return str(value).decode('base64')
+ except (binascii.Error, UnicodeEncodeError), exc:
+ raise ConstructorError(None, None,
+ "failed to decode base64 data: %s" % exc, node.start_mark)
+
+ timestamp_regexp = re.compile(
+ ur'''^(?P<year>[0-9][0-9][0-9][0-9])
+ -(?P<month>[0-9][0-9]?)
+ -(?P<day>[0-9][0-9]?)
+ (?:(?:[Tt]|[ \t]+)
+ (?P<hour>[0-9][0-9]?)
+ :(?P<minute>[0-9][0-9])
+ :(?P<second>[0-9][0-9])
+ (?:\.(?P<fraction>[0-9]*))?
+ (?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
+ (?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
+
+ def construct_yaml_timestamp(self, node):
+ value = self.construct_scalar(node)
+ match = self.timestamp_regexp.match(node.value)
+ values = match.groupdict()
+ year = int(values['year'])
+ month = int(values['month'])
+ day = int(values['day'])
+ if not values['hour']:
+ return datetime.date(year, month, day)
+ hour = int(values['hour'])
+ minute = int(values['minute'])
+ second = int(values['second'])
+ fraction = 0
+ if values['fraction']:
+ fraction = values['fraction'][:6]
+ while len(fraction) < 6:
+ fraction += '0'
+ fraction = int(fraction)
+ delta = None
+ if values['tz_sign']:
+ tz_hour = int(values['tz_hour'])
+ tz_minute = int(values['tz_minute'] or 0)
+ delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
+ if values['tz_sign'] == '-':
+ delta = -delta
+ data = datetime.datetime(year, month, day, hour, minute, second, fraction)
+ if delta:
+ data -= delta
+ return data
+
+ def construct_yaml_omap(self, node):
+ # Note: we do not check for duplicate keys, because it's too
+ # CPU-expensive.
+ omap = []
+ yield omap
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ omap.append((key, value))
+
+ def construct_yaml_pairs(self, node):
+ # Note: the same code as `construct_yaml_omap`.
+ pairs = []
+ yield pairs
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ pairs.append((key, value))
+
+ def construct_yaml_set(self, node):
+ data = set()
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_str(self, node):
+ value = self.construct_scalar(node)
+ try:
+ return value.encode('ascii')
+ except UnicodeEncodeError:
+ return value
+
+ def construct_yaml_seq(self, node):
+ data = []
+ yield data
+ data.extend(self.construct_sequence(node))
+
+ def construct_yaml_map(self, node):
+ data = {}
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_object(self, node, cls):
+ data = cls.__new__(cls)
+ yield data
+ if hasattr(data, '__setstate__'):
+ state = self.construct_mapping(node, deep=True)
+ data.__setstate__(state)
+ else:
+ state = self.construct_mapping(node)
+ data.__dict__.update(state)
+
+ def construct_undefined(self, node):
+ raise ConstructorError(None, None,
+ "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
+ node.start_mark)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:null',
+ SafeConstructor.construct_yaml_null)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:bool',
+ SafeConstructor.construct_yaml_bool)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:int',
+ SafeConstructor.construct_yaml_int)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:float',
+ SafeConstructor.construct_yaml_float)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:binary',
+ SafeConstructor.construct_yaml_binary)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:timestamp',
+ SafeConstructor.construct_yaml_timestamp)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:omap',
+ SafeConstructor.construct_yaml_omap)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:pairs',
+ SafeConstructor.construct_yaml_pairs)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:set',
+ SafeConstructor.construct_yaml_set)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:str',
+ SafeConstructor.construct_yaml_str)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:seq',
+ SafeConstructor.construct_yaml_seq)
+
+SafeConstructor.add_constructor(
+ u'tag:yaml.org,2002:map',
+ SafeConstructor.construct_yaml_map)
+
+SafeConstructor.add_constructor(None,
+ SafeConstructor.construct_undefined)
+
+class Constructor(SafeConstructor):
+
+ def construct_python_str(self, node):
+ return self.construct_scalar(node).encode('utf-8')
+
+ def construct_python_unicode(self, node):
+ return self.construct_scalar(node)
+
+ def construct_python_long(self, node):
+ return long(self.construct_yaml_int(node))
+
+ def construct_python_complex(self, node):
+ return complex(self.construct_scalar(node))
+
+ def construct_python_tuple(self, node):
+ return tuple(self.construct_sequence(node))
+
+ def find_python_module(self, name, mark):
+ if not name:
+ raise ConstructorError("while constructing a Python module", mark,
+ "expected non-empty name appended to the tag", mark)
+ try:
+ __import__(name)
+ except ImportError, exc:
+ raise ConstructorError("while constructing a Python module", mark,
+ "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
+ return sys.modules[name]
+
+ def find_python_name(self, name, mark):
+ if not name:
+ raise ConstructorError("while constructing a Python object", mark,
+ "expected non-empty name appended to the tag", mark)
+ if u'.' in name:
+ module_name, object_name = name.rsplit('.', 1)
+ else:
+ module_name = '__builtin__'
+ object_name = name
+ try:
+ __import__(module_name)
+ except ImportError, exc:
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
+ module = sys.modules[module_name]
+ if not hasattr(module, object_name):
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find %r in the module %r" % (object_name.encode('utf-8'),
+ module.__name__), mark)
+ return getattr(module, object_name)
+
+ def construct_python_name(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python name", node.start_mark,
+ "expected the empty value, but found %r" % value.encode('utf-8'),
+ node.start_mark)
+ return self.find_python_name(suffix, node.start_mark)
+
+ def construct_python_module(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python module", node.start_mark,
+ "expected the empty value, but found %r" % value.encode('utf-8'),
+ node.start_mark)
+ return self.find_python_module(suffix, node.start_mark)
+
+ class classobj: pass
+
+ def make_python_instance(self, suffix, node,
+ args=None, kwds=None, newobj=False):
+ if not args:
+ args = []
+ if not kwds:
+ kwds = {}
+ cls = self.find_python_name(suffix, node.start_mark)
+ if newobj and isinstance(cls, type(self.classobj)) \
+ and not args and not kwds:
+ instance = self.classobj()
+ instance.__class__ = cls
+ return instance
+ elif newobj and isinstance(cls, type):
+ return cls.__new__(cls, *args, **kwds)
+ else:
+ return cls(*args, **kwds)
+
+ def set_python_instance_state(self, instance, state):
+ if hasattr(instance, '__setstate__'):
+ instance.__setstate__(state)
+ else:
+ slotstate = {}
+ if isinstance(state, tuple) and len(state) == 2:
+ state, slotstate = state
+ if hasattr(instance, '__dict__'):
+ instance.__dict__.update(state)
+ elif state:
+ slotstate.update(state)
+ for key, value in slotstate.items():
+ setattr(object, key, value)
+
+ def construct_python_object(self, suffix, node):
+ # Format:
+ # !!python/object:module.name { ... state ... }
+ instance = self.make_python_instance(suffix, node, newobj=True)
+ yield instance
+ deep = hasattr(instance, '__setstate__')
+ state = self.construct_mapping(node, deep=deep)
+ self.set_python_instance_state(instance, state)
+
+ def construct_python_object_apply(self, suffix, node, newobj=False):
+ # Format:
+ # !!python/object/apply # (or !!python/object/new)
+ # args: [ ... arguments ... ]
+ # kwds: { ... keywords ... }
+ # state: ... state ...
+ # listitems: [ ... listitems ... ]
+ # dictitems: { ... dictitems ... }
+ # or short format:
+ # !!python/object/apply [ ... arguments ... ]
+ # The difference between !!python/object/apply and !!python/object/new
+ # is how an object is created, check make_python_instance for details.
+ if isinstance(node, SequenceNode):
+ args = self.construct_sequence(node, deep=True)
+ kwds = {}
+ state = {}
+ listitems = []
+ dictitems = {}
+ else:
+ value = self.construct_mapping(node, deep=True)
+ args = value.get('args', [])
+ kwds = value.get('kwds', {})
+ state = value.get('state', {})
+ listitems = value.get('listitems', [])
+ dictitems = value.get('dictitems', {})
+ instance = self.make_python_instance(suffix, node, args, kwds, newobj)
+ if state:
+ self.set_python_instance_state(instance, state)
+ if listitems:
+ instance.extend(listitems)
+ if dictitems:
+ for key in dictitems:
+ instance[key] = dictitems[key]
+ return instance
+
+ def construct_python_object_new(self, suffix, node):
+ return self.construct_python_object_apply(suffix, node, newobj=True)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/none',
+ Constructor.construct_yaml_null)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/bool',
+ Constructor.construct_yaml_bool)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/str',
+ Constructor.construct_python_str)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/unicode',
+ Constructor.construct_python_unicode)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/int',
+ Constructor.construct_yaml_int)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/long',
+ Constructor.construct_python_long)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/float',
+ Constructor.construct_yaml_float)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/complex',
+ Constructor.construct_python_complex)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/list',
+ Constructor.construct_yaml_seq)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/tuple',
+ Constructor.construct_python_tuple)
+
+Constructor.add_constructor(
+ u'tag:yaml.org,2002:python/dict',
+ Constructor.construct_yaml_map)
+
+Constructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/name:',
+ Constructor.construct_python_name)
+
+Constructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/module:',
+ Constructor.construct_python_module)
+
+Constructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/object:',
+ Constructor.construct_python_object)
+
+Constructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/object/apply:',
+ Constructor.construct_python_object_apply)
+
+Constructor.add_multi_constructor(
+ u'tag:yaml.org,2002:python/object/new:',
+ Constructor.construct_python_object_new)
+
diff --git a/python/pyyaml/lib/yaml/cyaml.py b/python/pyyaml/lib/yaml/cyaml.py
new file mode 100644
index 000000000..68dcd7519
--- /dev/null
+++ b/python/pyyaml/lib/yaml/cyaml.py
@@ -0,0 +1,85 @@
+
+__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader',
+ 'CBaseDumper', 'CSafeDumper', 'CDumper']
+
+from _yaml import CParser, CEmitter
+
+from constructor import *
+
+from serializer import *
+from representer import *
+
+from resolver import *
+
+class CBaseLoader(CParser, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class CSafeLoader(CParser, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CLoader(CParser, Constructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
+class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class CDumper(CEmitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
diff --git a/python/pyyaml/lib/yaml/dumper.py b/python/pyyaml/lib/yaml/dumper.py
new file mode 100644
index 000000000..f811d2c91
--- /dev/null
+++ b/python/pyyaml/lib/yaml/dumper.py
@@ -0,0 +1,62 @@
+
+__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
+
+from emitter import *
+from serializer import *
+from representer import *
+from resolver import *
+
+class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class Dumper(Emitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
diff --git a/python/pyyaml/lib/yaml/emitter.py b/python/pyyaml/lib/yaml/emitter.py
new file mode 100644
index 000000000..e5bcdcccb
--- /dev/null
+++ b/python/pyyaml/lib/yaml/emitter.py
@@ -0,0 +1,1140 @@
+
+# Emitter expects events obeying the following grammar:
+# stream ::= STREAM-START document* STREAM-END
+# document ::= DOCUMENT-START node DOCUMENT-END
+# node ::= SCALAR | sequence | mapping
+# sequence ::= SEQUENCE-START node* SEQUENCE-END
+# mapping ::= MAPPING-START (node node)* MAPPING-END
+
+__all__ = ['Emitter', 'EmitterError']
+
+from error import YAMLError
+from events import *
+
+class EmitterError(YAMLError):
+ pass
+
+class ScalarAnalysis(object):
+ def __init__(self, scalar, empty, multiline,
+ allow_flow_plain, allow_block_plain,
+ allow_single_quoted, allow_double_quoted,
+ allow_block):
+ self.scalar = scalar
+ self.empty = empty
+ self.multiline = multiline
+ self.allow_flow_plain = allow_flow_plain
+ self.allow_block_plain = allow_block_plain
+ self.allow_single_quoted = allow_single_quoted
+ self.allow_double_quoted = allow_double_quoted
+ self.allow_block = allow_block
+
+class Emitter(object):
+
+ DEFAULT_TAG_PREFIXES = {
+ u'!' : u'!',
+ u'tag:yaml.org,2002:' : u'!!',
+ }
+
+ def __init__(self, stream, canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+
+ # The stream should have the methods `write` and possibly `flush`.
+ self.stream = stream
+
+ # Encoding can be overriden by STREAM-START.
+ self.encoding = None
+
+ # Emitter is a state machine with a stack of states to handle nested
+ # structures.
+ self.states = []
+ self.state = self.expect_stream_start
+
+ # Current event and the event queue.
+ self.events = []
+ self.event = None
+
+ # The current indentation level and the stack of previous indents.
+ self.indents = []
+ self.indent = None
+
+ # Flow level.
+ self.flow_level = 0
+
+ # Contexts.
+ self.root_context = False
+ self.sequence_context = False
+ self.mapping_context = False
+ self.simple_key_context = False
+
+ # Characteristics of the last emitted character:
+ # - current position.
+ # - is it a whitespace?
+ # - is it an indention character
+ # (indentation space, '-', '?', or ':')?
+ self.line = 0
+ self.column = 0
+ self.whitespace = True
+ self.indention = True
+
+ # Whether the document requires an explicit document indicator
+ self.open_ended = False
+
+ # Formatting details.
+ self.canonical = canonical
+ self.allow_unicode = allow_unicode
+ self.best_indent = 2
+ if indent and 1 < indent < 10:
+ self.best_indent = indent
+ self.best_width = 80
+ if width and width > self.best_indent*2:
+ self.best_width = width
+ self.best_line_break = u'\n'
+ if line_break in [u'\r', u'\n', u'\r\n']:
+ self.best_line_break = line_break
+
+ # Tag prefixes.
+ self.tag_prefixes = None
+
+ # Prepared anchor and tag.
+ self.prepared_anchor = None
+ self.prepared_tag = None
+
+ # Scalar analysis and style.
+ self.analysis = None
+ self.style = None
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def emit(self, event):
+ self.events.append(event)
+ while not self.need_more_events():
+ self.event = self.events.pop(0)
+ self.state()
+ self.event = None
+
+ # In some cases, we wait for a few next events before emitting.
+
+ def need_more_events(self):
+ if not self.events:
+ return True
+ event = self.events[0]
+ if isinstance(event, DocumentStartEvent):
+ return self.need_events(1)
+ elif isinstance(event, SequenceStartEvent):
+ return self.need_events(2)
+ elif isinstance(event, MappingStartEvent):
+ return self.need_events(3)
+ else:
+ return False
+
+ def need_events(self, count):
+ level = 0
+ for event in self.events[1:]:
+ if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
+ level += 1
+ elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
+ level -= 1
+ elif isinstance(event, StreamEndEvent):
+ level = -1
+ if level < 0:
+ return False
+ return (len(self.events) < count+1)
+
+ def increase_indent(self, flow=False, indentless=False):
+ self.indents.append(self.indent)
+ if self.indent is None:
+ if flow:
+ self.indent = self.best_indent
+ else:
+ self.indent = 0
+ elif not indentless:
+ self.indent += self.best_indent
+
+ # States.
+
+ # Stream handlers.
+
+ def expect_stream_start(self):
+ if isinstance(self.event, StreamStartEvent):
+ if self.event.encoding and not getattr(self.stream, 'encoding', None):
+ self.encoding = self.event.encoding
+ self.write_stream_start()
+ self.state = self.expect_first_document_start
+ else:
+ raise EmitterError("expected StreamStartEvent, but got %s"
+ % self.event)
+
+ def expect_nothing(self):
+ raise EmitterError("expected nothing, but got %s" % self.event)
+
+ # Document handlers.
+
+ def expect_first_document_start(self):
+ return self.expect_document_start(first=True)
+
+ def expect_document_start(self, first=False):
+ if isinstance(self.event, DocumentStartEvent):
+ if (self.event.version or self.event.tags) and self.open_ended:
+ self.write_indicator(u'...', True)
+ self.write_indent()
+ if self.event.version:
+ version_text = self.prepare_version(self.event.version)
+ self.write_version_directive(version_text)
+ self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
+ if self.event.tags:
+ handles = self.event.tags.keys()
+ handles.sort()
+ for handle in handles:
+ prefix = self.event.tags[handle]
+ self.tag_prefixes[prefix] = handle
+ handle_text = self.prepare_tag_handle(handle)
+ prefix_text = self.prepare_tag_prefix(prefix)
+ self.write_tag_directive(handle_text, prefix_text)
+ implicit = (first and not self.event.explicit and not self.canonical
+ and not self.event.version and not self.event.tags
+ and not self.check_empty_document())
+ if not implicit:
+ self.write_indent()
+ self.write_indicator(u'---', True)
+ if self.canonical:
+ self.write_indent()
+ self.state = self.expect_document_root
+ elif isinstance(self.event, StreamEndEvent):
+ if self.open_ended:
+ self.write_indicator(u'...', True)
+ self.write_indent()
+ self.write_stream_end()
+ self.state = self.expect_nothing
+ else:
+ raise EmitterError("expected DocumentStartEvent, but got %s"
+ % self.event)
+
+ def expect_document_end(self):
+ if isinstance(self.event, DocumentEndEvent):
+ self.write_indent()
+ if self.event.explicit:
+ self.write_indicator(u'...', True)
+ self.write_indent()
+ self.flush_stream()
+ self.state = self.expect_document_start
+ else:
+ raise EmitterError("expected DocumentEndEvent, but got %s"
+ % self.event)
+
+ def expect_document_root(self):
+ self.states.append(self.expect_document_end)
+ self.expect_node(root=True)
+
+ # Node handlers.
+
+ def expect_node(self, root=False, sequence=False, mapping=False,
+ simple_key=False):
+ self.root_context = root
+ self.sequence_context = sequence
+ self.mapping_context = mapping
+ self.simple_key_context = simple_key
+ if isinstance(self.event, AliasEvent):
+ self.expect_alias()
+ elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
+ self.process_anchor(u'&')
+ self.process_tag()
+ if isinstance(self.event, ScalarEvent):
+ self.expect_scalar()
+ elif isinstance(self.event, SequenceStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_sequence():
+ self.expect_flow_sequence()
+ else:
+ self.expect_block_sequence()
+ elif isinstance(self.event, MappingStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_mapping():
+ self.expect_flow_mapping()
+ else:
+ self.expect_block_mapping()
+ else:
+ raise EmitterError("expected NodeEvent, but got %s" % self.event)
+
+ def expect_alias(self):
+ if self.event.anchor is None:
+ raise EmitterError("anchor is not specified for alias")
+ self.process_anchor(u'*')
+ self.state = self.states.pop()
+
+ def expect_scalar(self):
+ self.increase_indent(flow=True)
+ self.process_scalar()
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+
+ # Flow sequence handlers.
+
+ def expect_flow_sequence(self):
+ self.write_indicator(u'[', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_sequence_item
+
+ def expect_first_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator(u']', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ def expect_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(u',', False)
+ self.write_indent()
+ self.write_indicator(u']', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(u',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Flow mapping handlers.
+
+ def expect_flow_mapping(self):
+ self.write_indicator(u'{', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_mapping_key
+
+ def expect_first_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator(u'}', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator(u'?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(u',', False)
+ self.write_indent()
+ self.write_indicator(u'}', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(u',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator(u'?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_simple_value(self):
+ self.write_indicator(u':', False)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_value(self):
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.write_indicator(u':', True)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Block sequence handlers.
+
+ def expect_block_sequence(self):
+ indentless = (self.mapping_context and not self.indention)
+ self.increase_indent(flow=False, indentless=indentless)
+ self.state = self.expect_first_block_sequence_item
+
+ def expect_first_block_sequence_item(self):
+ return self.expect_block_sequence_item(first=True)
+
+ def expect_block_sequence_item(self, first=False):
+ if not first and isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ self.write_indicator(u'-', True, indention=True)
+ self.states.append(self.expect_block_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Block mapping handlers.
+
+ def expect_block_mapping(self):
+ self.increase_indent(flow=False)
+ self.state = self.expect_first_block_mapping_key
+
+ def expect_first_block_mapping_key(self):
+ return self.expect_block_mapping_key(first=True)
+
+ def expect_block_mapping_key(self, first=False):
+ if not first and isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ if self.check_simple_key():
+ self.states.append(self.expect_block_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator(u'?', True, indention=True)
+ self.states.append(self.expect_block_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_simple_value(self):
+ self.write_indicator(u':', False)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_value(self):
+ self.write_indent()
+ self.write_indicator(u':', True, indention=True)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Checkers.
+
+ def check_empty_sequence(self):
+ return (isinstance(self.event, SequenceStartEvent) and self.events
+ and isinstance(self.events[0], SequenceEndEvent))
+
+ def check_empty_mapping(self):
+ return (isinstance(self.event, MappingStartEvent) and self.events
+ and isinstance(self.events[0], MappingEndEvent))
+
+ def check_empty_document(self):
+ if not isinstance(self.event, DocumentStartEvent) or not self.events:
+ return False
+ event = self.events[0]
+ return (isinstance(event, ScalarEvent) and event.anchor is None
+ and event.tag is None and event.implicit and event.value == u'')
+
+ def check_simple_key(self):
+ length = 0
+ if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ length += len(self.prepared_anchor)
+ if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
+ and self.event.tag is not None:
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(self.event.tag)
+ length += len(self.prepared_tag)
+ if isinstance(self.event, ScalarEvent):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ length += len(self.analysis.scalar)
+ return (length < 128 and (isinstance(self.event, AliasEvent)
+ or (isinstance(self.event, ScalarEvent)
+ and not self.analysis.empty and not self.analysis.multiline)
+ or self.check_empty_sequence() or self.check_empty_mapping()))
+
+ # Anchor, Tag, and Scalar processors.
+
+ def process_anchor(self, indicator):
+ if self.event.anchor is None:
+ self.prepared_anchor = None
+ return
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ if self.prepared_anchor:
+ self.write_indicator(indicator+self.prepared_anchor, True)
+ self.prepared_anchor = None
+
+ def process_tag(self):
+ tag = self.event.tag
+ if isinstance(self.event, ScalarEvent):
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ if ((not self.canonical or tag is None) and
+ ((self.style == '' and self.event.implicit[0])
+ or (self.style != '' and self.event.implicit[1]))):
+ self.prepared_tag = None
+ return
+ if self.event.implicit[0] and tag is None:
+ tag = u'!'
+ self.prepared_tag = None
+ else:
+ if (not self.canonical or tag is None) and self.event.implicit:
+ self.prepared_tag = None
+ return
+ if tag is None:
+ raise EmitterError("tag is not specified")
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(tag)
+ if self.prepared_tag:
+ self.write_indicator(self.prepared_tag, True)
+ self.prepared_tag = None
+
+ def choose_scalar_style(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.event.style == '"' or self.canonical:
+ return '"'
+ if not self.event.style and self.event.implicit[0]:
+ if (not (self.simple_key_context and
+ (self.analysis.empty or self.analysis.multiline))
+ and (self.flow_level and self.analysis.allow_flow_plain
+ or (not self.flow_level and self.analysis.allow_block_plain))):
+ return ''
+ if self.event.style and self.event.style in '|>':
+ if (not self.flow_level and not self.simple_key_context
+ and self.analysis.allow_block):
+ return self.event.style
+ if not self.event.style or self.event.style == '\'':
+ if (self.analysis.allow_single_quoted and
+ not (self.simple_key_context and self.analysis.multiline)):
+ return '\''
+ return '"'
+
+ def process_scalar(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ split = (not self.simple_key_context)
+ #if self.analysis.multiline and split \
+ # and (not self.style or self.style in '\'\"'):
+ # self.write_indent()
+ if self.style == '"':
+ self.write_double_quoted(self.analysis.scalar, split)
+ elif self.style == '\'':
+ self.write_single_quoted(self.analysis.scalar, split)
+ elif self.style == '>':
+ self.write_folded(self.analysis.scalar)
+ elif self.style == '|':
+ self.write_literal(self.analysis.scalar)
+ else:
+ self.write_plain(self.analysis.scalar, split)
+ self.analysis = None
+ self.style = None
+
+ # Analyzers.
+
+ def prepare_version(self, version):
+ major, minor = version
+ if major != 1:
+ raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
+ return u'%d.%d' % (major, minor)
+
+ def prepare_tag_handle(self, handle):
+ if not handle:
+ raise EmitterError("tag handle must not be empty")
+ if handle[0] != u'!' or handle[-1] != u'!':
+ raise EmitterError("tag handle must start and end with '!': %r"
+ % (handle.encode('utf-8')))
+ for ch in handle[1:-1]:
+ if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_'):
+ raise EmitterError("invalid character %r in the tag handle: %r"
+ % (ch.encode('utf-8'), handle.encode('utf-8')))
+ return handle
+
+ def prepare_tag_prefix(self, prefix):
+ if not prefix:
+ raise EmitterError("tag prefix must not be empty")
+ chunks = []
+ start = end = 0
+ if prefix[0] == u'!':
+ end = 1
+ while end < len(prefix):
+ ch = prefix[end]
+ if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-;/?!:@&=+$,_.~*\'()[]':
+ end += 1
+ else:
+ if start < end:
+ chunks.append(prefix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append(u'%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(prefix[start:end])
+ return u''.join(chunks)
+
+ def prepare_tag(self, tag):
+ if not tag:
+ raise EmitterError("tag must not be empty")
+ if tag == u'!':
+ return tag
+ handle = None
+ suffix = tag
+ prefixes = self.tag_prefixes.keys()
+ prefixes.sort()
+ for prefix in prefixes:
+ if tag.startswith(prefix) \
+ and (prefix == u'!' or len(prefix) < len(tag)):
+ handle = self.tag_prefixes[prefix]
+ suffix = tag[len(prefix):]
+ chunks = []
+ start = end = 0
+ while end < len(suffix):
+ ch = suffix[end]
+ if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-;/?:@&=+$,_.~*\'()[]' \
+ or (ch == u'!' and handle != u'!'):
+ end += 1
+ else:
+ if start < end:
+ chunks.append(suffix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append(u'%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(suffix[start:end])
+ suffix_text = u''.join(chunks)
+ if handle:
+ return u'%s%s' % (handle, suffix_text)
+ else:
+ return u'!<%s>' % suffix_text
+
+ def prepare_anchor(self, anchor):
+ if not anchor:
+ raise EmitterError("anchor must not be empty")
+ for ch in anchor:
+ if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_'):
+ raise EmitterError("invalid character %r in the anchor: %r"
+ % (ch.encode('utf-8'), anchor.encode('utf-8')))
+ return anchor
+
+ def analyze_scalar(self, scalar):
+
+ # Empty scalar is a special case.
+ if not scalar:
+ return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
+ allow_flow_plain=False, allow_block_plain=True,
+ allow_single_quoted=True, allow_double_quoted=True,
+ allow_block=False)
+
+ # Indicators and special characters.
+ block_indicators = False
+ flow_indicators = False
+ line_breaks = False
+ special_characters = False
+
+ # Important whitespace combinations.
+ leading_space = False
+ leading_break = False
+ trailing_space = False
+ trailing_break = False
+ break_space = False
+ space_break = False
+
+ # Check document indicators.
+ if scalar.startswith(u'---') or scalar.startswith(u'...'):
+ block_indicators = True
+ flow_indicators = True
+
+ # First character or preceded by a whitespace.
+ preceeded_by_whitespace = True
+
+ # Last character or followed by a whitespace.
+ followed_by_whitespace = (len(scalar) == 1 or
+ scalar[1] in u'\0 \t\r\n\x85\u2028\u2029')
+
+ # The previous character is a space.
+ previous_space = False
+
+ # The previous character is a break.
+ previous_break = False
+
+ index = 0
+ while index < len(scalar):
+ ch = scalar[index]
+
+ # Check for indicators.
+ if index == 0:
+ # Leading indicators are special characters.
+ if ch in u'#,[]{}&*!|>\'\"%@`':
+ flow_indicators = True
+ block_indicators = True
+ if ch in u'?:':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == u'-' and followed_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+ else:
+ # Some indicators cannot appear within a scalar as well.
+ if ch in u',?[]{}':
+ flow_indicators = True
+ if ch == u':':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == u'#' and preceeded_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+
+ # Check for line breaks, special, and unicode characters.
+ if ch in u'\n\x85\u2028\u2029':
+ line_breaks = True
+ if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'):
+ if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF'
+ or u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF':
+ unicode_characters = True
+ if not self.allow_unicode:
+ special_characters = True
+ else:
+ special_characters = True
+
+ # Detect important whitespace combinations.
+ if ch == u' ':
+ if index == 0:
+ leading_space = True
+ if index == len(scalar)-1:
+ trailing_space = True
+ if previous_break:
+ break_space = True
+ previous_space = True
+ previous_break = False
+ elif ch in u'\n\x85\u2028\u2029':
+ if index == 0:
+ leading_break = True
+ if index == len(scalar)-1:
+ trailing_break = True
+ if previous_space:
+ space_break = True
+ previous_space = False
+ previous_break = True
+ else:
+ previous_space = False
+ previous_break = False
+
+ # Prepare for the next character.
+ index += 1
+ preceeded_by_whitespace = (ch in u'\0 \t\r\n\x85\u2028\u2029')
+ followed_by_whitespace = (index+1 >= len(scalar) or
+ scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029')
+
+ # Let's decide what styles are allowed.
+ allow_flow_plain = True
+ allow_block_plain = True
+ allow_single_quoted = True
+ allow_double_quoted = True
+ allow_block = True
+
+ # Leading and trailing whitespaces are bad for plain scalars.
+ if (leading_space or leading_break
+ or trailing_space or trailing_break):
+ allow_flow_plain = allow_block_plain = False
+
+ # We do not permit trailing spaces for block scalars.
+ if trailing_space:
+ allow_block = False
+
+ # Spaces at the beginning of a new line are only acceptable for block
+ # scalars.
+ if break_space:
+ allow_flow_plain = allow_block_plain = allow_single_quoted = False
+
+ # Spaces followed by breaks, as well as special character are only
+ # allowed for double quoted scalars.
+ if space_break or special_characters:
+ allow_flow_plain = allow_block_plain = \
+ allow_single_quoted = allow_block = False
+
+ # Although the plain scalar writer supports breaks, we never emit
+ # multiline plain scalars.
+ if line_breaks:
+ allow_flow_plain = allow_block_plain = False
+
+ # Flow indicators are forbidden for flow plain scalars.
+ if flow_indicators:
+ allow_flow_plain = False
+
+ # Block indicators are forbidden for block plain scalars.
+ if block_indicators:
+ allow_block_plain = False
+
+ return ScalarAnalysis(scalar=scalar,
+ empty=False, multiline=line_breaks,
+ allow_flow_plain=allow_flow_plain,
+ allow_block_plain=allow_block_plain,
+ allow_single_quoted=allow_single_quoted,
+ allow_double_quoted=allow_double_quoted,
+ allow_block=allow_block)
+
+ # Writers.
+
+ def flush_stream(self):
+ if hasattr(self.stream, 'flush'):
+ self.stream.flush()
+
+ def write_stream_start(self):
+ # Write BOM if needed.
+ if self.encoding and self.encoding.startswith('utf-16'):
+ self.stream.write(u'\uFEFF'.encode(self.encoding))
+
+ def write_stream_end(self):
+ self.flush_stream()
+
+ def write_indicator(self, indicator, need_whitespace,
+ whitespace=False, indention=False):
+ if self.whitespace or not need_whitespace:
+ data = indicator
+ else:
+ data = u' '+indicator
+ self.whitespace = whitespace
+ self.indention = self.indention and indention
+ self.column += len(data)
+ self.open_ended = False
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_indent(self):
+ indent = self.indent or 0
+ if not self.indention or self.column > indent \
+ or (self.column == indent and not self.whitespace):
+ self.write_line_break()
+ if self.column < indent:
+ self.whitespace = True
+ data = u' '*(indent-self.column)
+ self.column = indent
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_line_break(self, data=None):
+ if data is None:
+ data = self.best_line_break
+ self.whitespace = True
+ self.indention = True
+ self.line += 1
+ self.column = 0
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_version_directive(self, version_text):
+ data = u'%%YAML %s' % version_text
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ def write_tag_directive(self, handle_text, prefix_text):
+ data = u'%%TAG %s %s' % (handle_text, prefix_text)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ # Scalar streams.
+
+ def write_single_quoted(self, text, split=True):
+ self.write_indicator(u'\'', True)
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch is None or ch != u' ':
+ if start+1 == end and self.column > self.best_width and split \
+ and start != 0 and end != len(text):
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch is None or ch not in u'\n\x85\u2028\u2029':
+ if text[start] == u'\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == u'\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'':
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch == u'\'':
+ data = u'\'\''
+ self.column += 2
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end + 1
+ if ch is not None:
+ spaces = (ch == u' ')
+ breaks = (ch in u'\n\x85\u2028\u2029')
+ end += 1
+ self.write_indicator(u'\'', False)
+
+ ESCAPE_REPLACEMENTS = {
+ u'\0': u'0',
+ u'\x07': u'a',
+ u'\x08': u'b',
+ u'\x09': u't',
+ u'\x0A': u'n',
+ u'\x0B': u'v',
+ u'\x0C': u'f',
+ u'\x0D': u'r',
+ u'\x1B': u'e',
+ u'\"': u'\"',
+ u'\\': u'\\',
+ u'\x85': u'N',
+ u'\xA0': u'_',
+ u'\u2028': u'L',
+ u'\u2029': u'P',
+ }
+
+ def write_double_quoted(self, text, split=True):
+ self.write_indicator(u'"', True)
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \
+ or not (u'\x20' <= ch <= u'\x7E'
+ or (self.allow_unicode
+ and (u'\xA0' <= ch <= u'\uD7FF'
+ or u'\uE000' <= ch <= u'\uFFFD'))):
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ if ch in self.ESCAPE_REPLACEMENTS:
+ data = u'\\'+self.ESCAPE_REPLACEMENTS[ch]
+ elif ch <= u'\xFF':
+ data = u'\\x%02X' % ord(ch)
+ elif ch <= u'\uFFFF':
+ data = u'\\u%04X' % ord(ch)
+ else:
+ data = u'\\U%08X' % ord(ch)
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end+1
+ if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \
+ and self.column+(end-start) > self.best_width and split:
+ data = text[start:end]+u'\\'
+ if start < end:
+ start = end
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ if text[start] == u' ':
+ data = u'\\'
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ end += 1
+ self.write_indicator(u'"', False)
+
+ def determine_block_hints(self, text):
+ hints = u''
+ if text:
+ if text[0] in u' \n\x85\u2028\u2029':
+ hints += unicode(self.best_indent)
+ if text[-1] not in u'\n\x85\u2028\u2029':
+ hints += u'-'
+ elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029':
+ hints += u'+'
+ return hints
+
+ def write_folded(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator(u'>'+hints, True)
+ if hints[-1:] == u'+':
+ self.open_ended = True
+ self.write_line_break()
+ leading_space = True
+ spaces = False
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in u'\n\x85\u2028\u2029':
+ if not leading_space and ch is not None and ch != u' ' \
+ and text[start] == u'\n':
+ self.write_line_break()
+ leading_space = (ch == u' ')
+ for br in text[start:end]:
+ if br == u'\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ elif spaces:
+ if ch != u' ':
+ if start+1 == end and self.column > self.best_width:
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ else:
+ if ch is None or ch in u' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in u'\n\x85\u2028\u2029')
+ spaces = (ch == u' ')
+ end += 1
+
+ def write_literal(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator(u'|'+hints, True)
+ if hints[-1:] == u'+':
+ self.open_ended = True
+ self.write_line_break()
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in u'\n\x85\u2028\u2029':
+ for br in text[start:end]:
+ if br == u'\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in u'\n\x85\u2028\u2029':
+ data = text[start:end]
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in u'\n\x85\u2028\u2029')
+ end += 1
+
+ def write_plain(self, text, split=True):
+ if self.root_context:
+ self.open_ended = True
+ if not text:
+ return
+ if not self.whitespace:
+ data = u' '
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.whitespace = False
+ self.indention = False
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch != u' ':
+ if start+1 == end and self.column > self.best_width and split:
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch not in u'\n\x85\u2028\u2029':
+ if text[start] == u'\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == u'\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ start = end
+ else:
+ if ch is None or ch in u' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ spaces = (ch == u' ')
+ breaks = (ch in u'\n\x85\u2028\u2029')
+ end += 1
+
diff --git a/python/pyyaml/lib/yaml/error.py b/python/pyyaml/lib/yaml/error.py
new file mode 100644
index 000000000..577686db5
--- /dev/null
+++ b/python/pyyaml/lib/yaml/error.py
@@ -0,0 +1,75 @@
+
+__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
+
+class Mark(object):
+
+ def __init__(self, name, index, line, column, buffer, pointer):
+ self.name = name
+ self.index = index
+ self.line = line
+ self.column = column
+ self.buffer = buffer
+ self.pointer = pointer
+
+ def get_snippet(self, indent=4, max_length=75):
+ if self.buffer is None:
+ return None
+ head = ''
+ start = self.pointer
+ while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029':
+ start -= 1
+ if self.pointer-start > max_length/2-1:
+ head = ' ... '
+ start += 5
+ break
+ tail = ''
+ end = self.pointer
+ while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029':
+ end += 1
+ if end-self.pointer > max_length/2-1:
+ tail = ' ... '
+ end -= 5
+ break
+ snippet = self.buffer[start:end].encode('utf-8')
+ return ' '*indent + head + snippet + tail + '\n' \
+ + ' '*(indent+self.pointer-start+len(head)) + '^'
+
+ def __str__(self):
+ snippet = self.get_snippet()
+ where = " in \"%s\", line %d, column %d" \
+ % (self.name, self.line+1, self.column+1)
+ if snippet is not None:
+ where += ":\n"+snippet
+ return where
+
+class YAMLError(Exception):
+ pass
+
+class MarkedYAMLError(YAMLError):
+
+ def __init__(self, context=None, context_mark=None,
+ problem=None, problem_mark=None, note=None):
+ self.context = context
+ self.context_mark = context_mark
+ self.problem = problem
+ self.problem_mark = problem_mark
+ self.note = note
+
+ def __str__(self):
+ lines = []
+ if self.context is not None:
+ lines.append(self.context)
+ if self.context_mark is not None \
+ and (self.problem is None or self.problem_mark is None
+ or self.context_mark.name != self.problem_mark.name
+ or self.context_mark.line != self.problem_mark.line
+ or self.context_mark.column != self.problem_mark.column):
+ lines.append(str(self.context_mark))
+ if self.problem is not None:
+ lines.append(self.problem)
+ if self.problem_mark is not None:
+ lines.append(str(self.problem_mark))
+ if self.note is not None:
+ lines.append(self.note)
+ return '\n'.join(lines)
+
diff --git a/python/pyyaml/lib/yaml/events.py b/python/pyyaml/lib/yaml/events.py
new file mode 100644
index 000000000..f79ad389c
--- /dev/null
+++ b/python/pyyaml/lib/yaml/events.py
@@ -0,0 +1,86 @@
+
+# Abstract classes.
+
+class Event(object):
+ def __init__(self, start_mark=None, end_mark=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
+ if hasattr(self, key)]
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+class NodeEvent(Event):
+ def __init__(self, anchor, start_mark=None, end_mark=None):
+ self.anchor = anchor
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class CollectionStartEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
+ flow_style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class CollectionEndEvent(Event):
+ pass
+
+# Implementations.
+
+class StreamStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None, encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndEvent(Event):
+ pass
+
+class DocumentStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None, version=None, tags=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+ self.version = version
+ self.tags = tags
+
+class DocumentEndEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+
+class AliasEvent(NodeEvent):
+ pass
+
+class ScalarEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, value,
+ start_mark=None, end_mark=None, style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class SequenceStartEvent(CollectionStartEvent):
+ pass
+
+class SequenceEndEvent(CollectionEndEvent):
+ pass
+
+class MappingStartEvent(CollectionStartEvent):
+ pass
+
+class MappingEndEvent(CollectionEndEvent):
+ pass
+
diff --git a/python/pyyaml/lib/yaml/loader.py b/python/pyyaml/lib/yaml/loader.py
new file mode 100644
index 000000000..293ff467b
--- /dev/null
+++ b/python/pyyaml/lib/yaml/loader.py
@@ -0,0 +1,40 @@
+
+__all__ = ['BaseLoader', 'SafeLoader', 'Loader']
+
+from reader import *
+from scanner import *
+from parser import *
+from composer import *
+from constructor import *
+from resolver import *
+
+class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
diff --git a/python/pyyaml/lib/yaml/nodes.py b/python/pyyaml/lib/yaml/nodes.py
new file mode 100644
index 000000000..c4f070c41
--- /dev/null
+++ b/python/pyyaml/lib/yaml/nodes.py
@@ -0,0 +1,49 @@
+
+class Node(object):
+ def __init__(self, tag, value, start_mark, end_mark):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ value = self.value
+ #if isinstance(value, list):
+ # if len(value) == 0:
+ # value = '<empty>'
+ # elif len(value) == 1:
+ # value = '<1 item>'
+ # else:
+ # value = '<%d items>' % len(value)
+ #else:
+ # if len(value) > 75:
+ # value = repr(value[:70]+u' ... ')
+ # else:
+ # value = repr(value)
+ value = repr(value)
+ return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
+
+class ScalarNode(Node):
+ id = 'scalar'
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class CollectionNode(Node):
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, flow_style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class SequenceNode(CollectionNode):
+ id = 'sequence'
+
+class MappingNode(CollectionNode):
+ id = 'mapping'
+
diff --git a/python/pyyaml/lib/yaml/parser.py b/python/pyyaml/lib/yaml/parser.py
new file mode 100644
index 000000000..f9e3057f3
--- /dev/null
+++ b/python/pyyaml/lib/yaml/parser.py
@@ -0,0 +1,589 @@
+
+# The following YAML grammar is LL(1) and is parsed by a recursive descent
+# parser.
+#
+# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+# implicit_document ::= block_node DOCUMENT-END*
+# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+# block_node_or_indentless_sequence ::=
+# ALIAS
+# | properties (block_content | indentless_block_sequence)?
+# | block_content
+# | indentless_block_sequence
+# block_node ::= ALIAS
+# | properties block_content?
+# | block_content
+# flow_node ::= ALIAS
+# | properties flow_content?
+# | flow_content
+# properties ::= TAG ANCHOR? | ANCHOR TAG?
+# block_content ::= block_collection | flow_collection | SCALAR
+# flow_content ::= flow_collection | SCALAR
+# block_collection ::= block_sequence | block_mapping
+# flow_collection ::= flow_sequence | flow_mapping
+# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+# block_mapping ::= BLOCK-MAPPING_START
+# ((KEY block_node_or_indentless_sequence?)?
+# (VALUE block_node_or_indentless_sequence?)?)*
+# BLOCK-END
+# flow_sequence ::= FLOW-SEQUENCE-START
+# (flow_sequence_entry FLOW-ENTRY)*
+# flow_sequence_entry?
+# FLOW-SEQUENCE-END
+# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+# flow_mapping ::= FLOW-MAPPING-START
+# (flow_mapping_entry FLOW-ENTRY)*
+# flow_mapping_entry?
+# FLOW-MAPPING-END
+# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+#
+# FIRST sets:
+#
+# stream: { STREAM-START }
+# explicit_document: { DIRECTIVE DOCUMENT-START }
+# implicit_document: FIRST(block_node)
+# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_sequence: { BLOCK-SEQUENCE-START }
+# block_mapping: { BLOCK-MAPPING-START }
+# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
+# indentless_sequence: { ENTRY }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_sequence: { FLOW-SEQUENCE-START }
+# flow_mapping: { FLOW-MAPPING-START }
+# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+
+__all__ = ['Parser', 'ParserError']
+
+from error import MarkedYAMLError
+from tokens import *
+from events import *
+from scanner import *
+
+class ParserError(MarkedYAMLError):
+ pass
+
+class Parser(object):
+ # Since writing a recursive-descendant parser is a straightforward task, we
+ # do not give many comments here.
+
+ DEFAULT_TAGS = {
+ u'!': u'!',
+ u'!!': u'tag:yaml.org,2002:',
+ }
+
+ def __init__(self):
+ self.current_event = None
+ self.yaml_version = None
+ self.tag_handles = {}
+ self.states = []
+ self.marks = []
+ self.state = self.parse_stream_start
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def check_event(self, *choices):
+ # Check the type of the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ if self.current_event is not None:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.current_event, choice):
+ return True
+ return False
+
+ def peek_event(self):
+ # Get the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ return self.current_event
+
+ def get_event(self):
+ # Get the next event and proceed further.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ value = self.current_event
+ self.current_event = None
+ return value
+
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+ # implicit_document ::= block_node DOCUMENT-END*
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+
+ def parse_stream_start(self):
+
+ # Parse the stream start.
+ token = self.get_token()
+ event = StreamStartEvent(token.start_mark, token.end_mark,
+ encoding=token.encoding)
+
+ # Prepare the next state.
+ self.state = self.parse_implicit_document_start
+
+ return event
+
+ def parse_implicit_document_start(self):
+
+ # Parse an implicit document.
+ if not self.check_token(DirectiveToken, DocumentStartToken,
+ StreamEndToken):
+ self.tag_handles = self.DEFAULT_TAGS
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=False)
+
+ # Prepare the next state.
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_block_node
+
+ return event
+
+ else:
+ return self.parse_document_start()
+
+ def parse_document_start(self):
+
+ # Parse any extra document end indicators.
+ while self.check_token(DocumentEndToken):
+ self.get_token()
+
+ # Parse an explicit document.
+ if not self.check_token(StreamEndToken):
+ token = self.peek_token()
+ start_mark = token.start_mark
+ version, tags = self.process_directives()
+ if not self.check_token(DocumentStartToken):
+ raise ParserError(None, None,
+ "expected '<document start>', but found %r"
+ % self.peek_token().id,
+ self.peek_token().start_mark)
+ token = self.get_token()
+ end_mark = token.end_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=True, version=version, tags=tags)
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_document_content
+ else:
+ # Parse the end of the stream.
+ token = self.get_token()
+ event = StreamEndEvent(token.start_mark, token.end_mark)
+ assert not self.states
+ assert not self.marks
+ self.state = None
+ return event
+
+ def parse_document_end(self):
+
+ # Parse the document end.
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ explicit = False
+ if self.check_token(DocumentEndToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ explicit = True
+ event = DocumentEndEvent(start_mark, end_mark,
+ explicit=explicit)
+
+ # Prepare the next state.
+ self.state = self.parse_document_start
+
+ return event
+
+ def parse_document_content(self):
+ if self.check_token(DirectiveToken,
+ DocumentStartToken, DocumentEndToken, StreamEndToken):
+ event = self.process_empty_scalar(self.peek_token().start_mark)
+ self.state = self.states.pop()
+ return event
+ else:
+ return self.parse_block_node()
+
+ def process_directives(self):
+ self.yaml_version = None
+ self.tag_handles = {}
+ while self.check_token(DirectiveToken):
+ token = self.get_token()
+ if token.name == u'YAML':
+ if self.yaml_version is not None:
+ raise ParserError(None, None,
+ "found duplicate YAML directive", token.start_mark)
+ major, minor = token.value
+ if major != 1:
+ raise ParserError(None, None,
+ "found incompatible YAML document (version 1.* is required)",
+ token.start_mark)
+ self.yaml_version = token.value
+ elif token.name == u'TAG':
+ handle, prefix = token.value
+ if handle in self.tag_handles:
+ raise ParserError(None, None,
+ "duplicate tag handle %r" % handle.encode('utf-8'),
+ token.start_mark)
+ self.tag_handles[handle] = prefix
+ if self.tag_handles:
+ value = self.yaml_version, self.tag_handles.copy()
+ else:
+ value = self.yaml_version, None
+ for key in self.DEFAULT_TAGS:
+ if key not in self.tag_handles:
+ self.tag_handles[key] = self.DEFAULT_TAGS[key]
+ return value
+
+ # block_node_or_indentless_sequence ::= ALIAS
+ # | properties (block_content | indentless_block_sequence)?
+ # | block_content
+ # | indentless_block_sequence
+ # block_node ::= ALIAS
+ # | properties block_content?
+ # | block_content
+ # flow_node ::= ALIAS
+ # | properties flow_content?
+ # | flow_content
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
+ # block_content ::= block_collection | flow_collection | SCALAR
+ # flow_content ::= flow_collection | SCALAR
+ # block_collection ::= block_sequence | block_mapping
+ # flow_collection ::= flow_sequence | flow_mapping
+
+ def parse_block_node(self):
+ return self.parse_node(block=True)
+
+ def parse_flow_node(self):
+ return self.parse_node()
+
+ def parse_block_node_or_indentless_sequence(self):
+ return self.parse_node(block=True, indentless_sequence=True)
+
+ def parse_node(self, block=False, indentless_sequence=False):
+ if self.check_token(AliasToken):
+ token = self.get_token()
+ event = AliasEvent(token.value, token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ else:
+ anchor = None
+ tag = None
+ start_mark = end_mark = tag_mark = None
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ start_mark = token.start_mark
+ end_mark = token.end_mark
+ anchor = token.value
+ if self.check_token(TagToken):
+ token = self.get_token()
+ tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ elif self.check_token(TagToken):
+ token = self.get_token()
+ start_mark = tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ anchor = token.value
+ if tag is not None:
+ handle, suffix = tag
+ if handle is not None:
+ if handle not in self.tag_handles:
+ raise ParserError("while parsing a node", start_mark,
+ "found undefined tag handle %r" % handle.encode('utf-8'),
+ tag_mark)
+ tag = self.tag_handles[handle]+suffix
+ else:
+ tag = suffix
+ #if tag == u'!':
+ # raise ParserError("while parsing a node", start_mark,
+ # "found non-specific tag '!'", tag_mark,
+ # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
+ if start_mark is None:
+ start_mark = end_mark = self.peek_token().start_mark
+ event = None
+ implicit = (tag is None or tag == u'!')
+ if indentless_sequence and self.check_token(BlockEntryToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark)
+ self.state = self.parse_indentless_sequence_entry
+ else:
+ if self.check_token(ScalarToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ if (token.plain and tag is None) or tag == u'!':
+ implicit = (True, False)
+ elif tag is None:
+ implicit = (False, True)
+ else:
+ implicit = (False, False)
+ event = ScalarEvent(anchor, tag, implicit, token.value,
+ start_mark, end_mark, style=token.style)
+ self.state = self.states.pop()
+ elif self.check_token(FlowSequenceStartToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_sequence_first_entry
+ elif self.check_token(FlowMappingStartToken):
+ end_mark = self.peek_token().end_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_mapping_first_key
+ elif block and self.check_token(BlockSequenceStartToken):
+ end_mark = self.peek_token().start_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_sequence_first_entry
+ elif block and self.check_token(BlockMappingStartToken):
+ end_mark = self.peek_token().start_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_mapping_first_key
+ elif anchor is not None or tag is not None:
+ # Empty scalars are allowed even if a tag or an anchor is
+ # specified.
+ event = ScalarEvent(anchor, tag, (implicit, False), u'',
+ start_mark, end_mark)
+ self.state = self.states.pop()
+ else:
+ if block:
+ node = 'block'
+ else:
+ node = 'flow'
+ token = self.peek_token()
+ raise ParserError("while parsing a %s node" % node, start_mark,
+ "expected the node content, but found %r" % token.id,
+ token.start_mark)
+ return event
+
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+
+ def parse_block_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_sequence_entry()
+
+ def parse_block_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken, BlockEndToken):
+ self.states.append(self.parse_block_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_block_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block collection", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+
+ def parse_indentless_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken,
+ KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_indentless_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_indentless_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ token = self.peek_token()
+ event = SequenceEndEvent(token.start_mark, token.start_mark)
+ self.state = self.states.pop()
+ return event
+
+ # block_mapping ::= BLOCK-MAPPING_START
+ # ((KEY block_node_or_indentless_sequence?)?
+ # (VALUE block_node_or_indentless_sequence?)?)*
+ # BLOCK-END
+
+ def parse_block_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_mapping_key()
+
+ def parse_block_mapping_key(self):
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_value)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block mapping", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_block_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_key)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_block_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ # flow_sequence ::= FLOW-SEQUENCE-START
+ # (flow_sequence_entry FLOW-ENTRY)*
+ # flow_sequence_entry?
+ # FLOW-SEQUENCE-END
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+ #
+ # Note that while production rules for both flow_sequence_entry and
+ # flow_mapping_entry are equal, their interpretations are different.
+ # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
+ # generate an inline mapping (set syntax).
+
+ def parse_flow_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_sequence_entry(first=True)
+
+ def parse_flow_sequence_entry(self, first=False):
+ if not self.check_token(FlowSequenceEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow sequence", self.marks[-1],
+ "expected ',' or ']', but got %r" % token.id, token.start_mark)
+
+ if self.check_token(KeyToken):
+ token = self.peek_token()
+ event = MappingStartEvent(None, None, True,
+ token.start_mark, token.end_mark,
+ flow_style=True)
+ self.state = self.parse_flow_sequence_entry_mapping_key
+ return event
+ elif not self.check_token(FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_sequence_entry_mapping_key(self):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+
+ def parse_flow_sequence_entry_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_end)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_sequence_entry_mapping_end(self):
+ self.state = self.parse_flow_sequence_entry
+ token = self.peek_token()
+ return MappingEndEvent(token.start_mark, token.start_mark)
+
+ # flow_mapping ::= FLOW-MAPPING-START
+ # (flow_mapping_entry FLOW-ENTRY)*
+ # flow_mapping_entry?
+ # FLOW-MAPPING-END
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+
+ def parse_flow_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_mapping_key(first=True)
+
+ def parse_flow_mapping_key(self, first=False):
+ if not self.check_token(FlowMappingEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow mapping", self.marks[-1],
+ "expected ',' or '}', but got %r" % token.id, token.start_mark)
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ elif not self.check_token(FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_empty_value)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_key)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_mapping_empty_value(self):
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(self.peek_token().start_mark)
+
+ def process_empty_scalar(self, mark):
+ return ScalarEvent(None, None, (True, False), u'', mark, mark)
+
diff --git a/python/pyyaml/lib/yaml/reader.py b/python/pyyaml/lib/yaml/reader.py
new file mode 100644
index 000000000..3249e6b9f
--- /dev/null
+++ b/python/pyyaml/lib/yaml/reader.py
@@ -0,0 +1,190 @@
+# This module contains abstractions for the input stream. You don't have to
+# looks further, there are no pretty code.
+#
+# We define two classes here.
+#
+# Mark(source, line, column)
+# It's just a record and its only use is producing nice error messages.
+# Parser does not use it for any other purposes.
+#
+# Reader(source, data)
+# Reader determines the encoding of `data` and converts it to unicode.
+# Reader provides the following methods and attributes:
+# reader.peek(length=1) - return the next `length` characters
+# reader.forward(length=1) - move the current position to `length` characters.
+# reader.index - the number of the current character.
+# reader.line, stream.column - the line and the column of the current character.
+
+__all__ = ['Reader', 'ReaderError']
+
+from error import YAMLError, Mark
+
+import codecs, re
+
+class ReaderError(YAMLError):
+
+ def __init__(self, name, position, character, encoding, reason):
+ self.name = name
+ self.character = character
+ self.position = position
+ self.encoding = encoding
+ self.reason = reason
+
+ def __str__(self):
+ if isinstance(self.character, str):
+ return "'%s' codec can't decode byte #x%02x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.encoding, ord(self.character), self.reason,
+ self.name, self.position)
+ else:
+ return "unacceptable character #x%04x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.character, self.reason,
+ self.name, self.position)
+
+class Reader(object):
+ # Reader:
+ # - determines the data encoding and converts it to unicode,
+ # - checks if characters are in allowed range,
+ # - adds '\0' to the end.
+
+ # Reader accepts
+ # - a `str` object,
+ # - a `unicode` object,
+ # - a file-like object with its `read` method returning `str`,
+ # - a file-like object with its `read` method returning `unicode`.
+
+ # Yeah, it's ugly and slow.
+
+ def __init__(self, stream):
+ self.name = None
+ self.stream = None
+ self.stream_pointer = 0
+ self.eof = True
+ self.buffer = u''
+ self.pointer = 0
+ self.raw_buffer = None
+ self.raw_decode = None
+ self.encoding = None
+ self.index = 0
+ self.line = 0
+ self.column = 0
+ if isinstance(stream, unicode):
+ self.name = "<unicode string>"
+ self.check_printable(stream)
+ self.buffer = stream+u'\0'
+ elif isinstance(stream, str):
+ self.name = "<string>"
+ self.raw_buffer = stream
+ self.determine_encoding()
+ else:
+ self.stream = stream
+ self.name = getattr(stream, 'name', "<file>")
+ self.eof = False
+ self.raw_buffer = ''
+ self.determine_encoding()
+
+ def peek(self, index=0):
+ try:
+ return self.buffer[self.pointer+index]
+ except IndexError:
+ self.update(index+1)
+ return self.buffer[self.pointer+index]
+
+ def prefix(self, length=1):
+ if self.pointer+length >= len(self.buffer):
+ self.update(length)
+ return self.buffer[self.pointer:self.pointer+length]
+
+ def forward(self, length=1):
+ if self.pointer+length+1 >= len(self.buffer):
+ self.update(length+1)
+ while length:
+ ch = self.buffer[self.pointer]
+ self.pointer += 1
+ self.index += 1
+ if ch in u'\n\x85\u2028\u2029' \
+ or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
+ self.line += 1
+ self.column = 0
+ elif ch != u'\uFEFF':
+ self.column += 1
+ length -= 1
+
+ def get_mark(self):
+ if self.stream is None:
+ return Mark(self.name, self.index, self.line, self.column,
+ self.buffer, self.pointer)
+ else:
+ return Mark(self.name, self.index, self.line, self.column,
+ None, None)
+
+ def determine_encoding(self):
+ while not self.eof and len(self.raw_buffer) < 2:
+ self.update_raw()
+ if not isinstance(self.raw_buffer, unicode):
+ if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
+ self.raw_decode = codecs.utf_16_le_decode
+ self.encoding = 'utf-16-le'
+ elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
+ self.raw_decode = codecs.utf_16_be_decode
+ self.encoding = 'utf-16-be'
+ else:
+ self.raw_decode = codecs.utf_8_decode
+ self.encoding = 'utf-8'
+ self.update(1)
+
+ NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
+ def check_printable(self, data):
+ match = self.NON_PRINTABLE.search(data)
+ if match:
+ character = match.group()
+ position = self.index+(len(self.buffer)-self.pointer)+match.start()
+ raise ReaderError(self.name, position, ord(character),
+ 'unicode', "special characters are not allowed")
+
+ def update(self, length):
+ if self.raw_buffer is None:
+ return
+ self.buffer = self.buffer[self.pointer:]
+ self.pointer = 0
+ while len(self.buffer) < length:
+ if not self.eof:
+ self.update_raw()
+ if self.raw_decode is not None:
+ try:
+ data, converted = self.raw_decode(self.raw_buffer,
+ 'strict', self.eof)
+ except UnicodeDecodeError, exc:
+ character = exc.object[exc.start]
+ if self.stream is not None:
+ position = self.stream_pointer-len(self.raw_buffer)+exc.start
+ else:
+ position = exc.start
+ raise ReaderError(self.name, position, character,
+ exc.encoding, exc.reason)
+ else:
+ data = self.raw_buffer
+ converted = len(data)
+ self.check_printable(data)
+ self.buffer += data
+ self.raw_buffer = self.raw_buffer[converted:]
+ if self.eof:
+ self.buffer += u'\0'
+ self.raw_buffer = None
+ break
+
+ def update_raw(self, size=1024):
+ data = self.stream.read(size)
+ if data:
+ self.raw_buffer += data
+ self.stream_pointer += len(data)
+ else:
+ self.eof = True
+
+#try:
+# import psyco
+# psyco.bind(Reader)
+#except ImportError:
+# pass
+
diff --git a/python/pyyaml/lib/yaml/representer.py b/python/pyyaml/lib/yaml/representer.py
new file mode 100644
index 000000000..5f4fc70db
--- /dev/null
+++ b/python/pyyaml/lib/yaml/representer.py
@@ -0,0 +1,484 @@
+
+__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
+ 'RepresenterError']
+
+from error import *
+from nodes import *
+
+import datetime
+
+import sys, copy_reg, types
+
+class RepresenterError(YAMLError):
+ pass
+
+class BaseRepresenter(object):
+
+ yaml_representers = {}
+ yaml_multi_representers = {}
+
+ def __init__(self, default_style=None, default_flow_style=None):
+ self.default_style = default_style
+ self.default_flow_style = default_flow_style
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def represent(self, data):
+ node = self.represent_data(data)
+ self.serialize(node)
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def get_classobj_bases(self, cls):
+ bases = [cls]
+ for base in cls.__bases__:
+ bases.extend(self.get_classobj_bases(base))
+ return bases
+
+ def represent_data(self, data):
+ if self.ignore_aliases(data):
+ self.alias_key = None
+ else:
+ self.alias_key = id(data)
+ if self.alias_key is not None:
+ if self.alias_key in self.represented_objects:
+ node = self.represented_objects[self.alias_key]
+ #if node is None:
+ # raise RepresenterError("recursive objects are not allowed: %r" % data)
+ return node
+ #self.represented_objects[alias_key] = None
+ self.object_keeper.append(data)
+ data_types = type(data).__mro__
+ if type(data) is types.InstanceType:
+ data_types = self.get_classobj_bases(data.__class__)+list(data_types)
+ if data_types[0] in self.yaml_representers:
+ node = self.yaml_representers[data_types[0]](self, data)
+ else:
+ for data_type in data_types:
+ if data_type in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[data_type](self, data)
+ break
+ else:
+ if None in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[None](self, data)
+ elif None in self.yaml_representers:
+ node = self.yaml_representers[None](self, data)
+ else:
+ node = ScalarNode(None, unicode(data))
+ #if alias_key is not None:
+ # self.represented_objects[alias_key] = node
+ return node
+
+ def add_representer(cls, data_type, representer):
+ if not 'yaml_representers' in cls.__dict__:
+ cls.yaml_representers = cls.yaml_representers.copy()
+ cls.yaml_representers[data_type] = representer
+ add_representer = classmethod(add_representer)
+
+ def add_multi_representer(cls, data_type, representer):
+ if not 'yaml_multi_representers' in cls.__dict__:
+ cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
+ cls.yaml_multi_representers[data_type] = representer
+ add_multi_representer = classmethod(add_multi_representer)
+
+ def represent_scalar(self, tag, value, style=None):
+ if style is None:
+ style = self.default_style
+ node = ScalarNode(tag, value, style=style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ return node
+
+ def represent_sequence(self, tag, sequence, flow_style=None):
+ value = []
+ node = SequenceNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ for item in sequence:
+ node_item = self.represent_data(item)
+ if not (isinstance(node_item, ScalarNode) and not node_item.style):
+ best_style = False
+ value.append(node_item)
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def represent_mapping(self, tag, mapping, flow_style=None):
+ value = []
+ node = MappingNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ if hasattr(mapping, 'items'):
+ mapping = mapping.items()
+ mapping.sort()
+ for item_key, item_value in mapping:
+ node_key = self.represent_data(item_key)
+ node_value = self.represent_data(item_value)
+ if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ best_style = False
+ if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ best_style = False
+ value.append((node_key, node_value))
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def ignore_aliases(self, data):
+ return False
+
+class SafeRepresenter(BaseRepresenter):
+
+ def ignore_aliases(self, data):
+ if data in [None, ()]:
+ return True
+ if isinstance(data, (str, unicode, bool, int, float)):
+ return True
+
+ def represent_none(self, data):
+ return self.represent_scalar(u'tag:yaml.org,2002:null',
+ u'null')
+
+ def represent_str(self, data):
+ tag = None
+ style = None
+ try:
+ data = unicode(data, 'ascii')
+ tag = u'tag:yaml.org,2002:str'
+ except UnicodeDecodeError:
+ try:
+ data = unicode(data, 'utf-8')
+ tag = u'tag:yaml.org,2002:str'
+ except UnicodeDecodeError:
+ data = data.encode('base64')
+ tag = u'tag:yaml.org,2002:binary'
+ style = '|'
+ return self.represent_scalar(tag, data, style=style)
+
+ def represent_unicode(self, data):
+ return self.represent_scalar(u'tag:yaml.org,2002:str', data)
+
+ def represent_bool(self, data):
+ if data:
+ value = u'true'
+ else:
+ value = u'false'
+ return self.represent_scalar(u'tag:yaml.org,2002:bool', value)
+
+ def represent_int(self, data):
+ return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
+
+ def represent_long(self, data):
+ return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
+
+ inf_value = 1e300
+ while repr(inf_value) != repr(inf_value*inf_value):
+ inf_value *= inf_value
+
+ def represent_float(self, data):
+ if data != data or (data == 0.0 and data == 1.0):
+ value = u'.nan'
+ elif data == self.inf_value:
+ value = u'.inf'
+ elif data == -self.inf_value:
+ value = u'-.inf'
+ else:
+ value = unicode(repr(data)).lower()
+ # Note that in some cases `repr(data)` represents a float number
+ # without the decimal parts. For instance:
+ # >>> repr(1e17)
+ # '1e17'
+ # Unfortunately, this is not a valid float representation according
+ # to the definition of the `!!float` tag. We fix this by adding
+ # '.0' before the 'e' symbol.
+ if u'.' not in value and u'e' in value:
+ value = value.replace(u'e', u'.0e', 1)
+ return self.represent_scalar(u'tag:yaml.org,2002:float', value)
+
+ def represent_list(self, data):
+ #pairs = (len(data) > 0 and isinstance(data, list))
+ #if pairs:
+ # for item in data:
+ # if not isinstance(item, tuple) or len(item) != 2:
+ # pairs = False
+ # break
+ #if not pairs:
+ return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
+ #value = []
+ #for item_key, item_value in data:
+ # value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
+ # [(item_key, item_value)]))
+ #return SequenceNode(u'tag:yaml.org,2002:pairs', value)
+
+ def represent_dict(self, data):
+ return self.represent_mapping(u'tag:yaml.org,2002:map', data)
+
+ def represent_set(self, data):
+ value = {}
+ for key in data:
+ value[key] = None
+ return self.represent_mapping(u'tag:yaml.org,2002:set', value)
+
+ def represent_date(self, data):
+ value = unicode(data.isoformat())
+ return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
+
+ def represent_datetime(self, data):
+ value = unicode(data.isoformat(' '))
+ return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
+
+ def represent_yaml_object(self, tag, data, cls, flow_style=None):
+ if hasattr(data, '__getstate__'):
+ state = data.__getstate__()
+ else:
+ state = data.__dict__.copy()
+ return self.represent_mapping(tag, state, flow_style=flow_style)
+
+ def represent_undefined(self, data):
+ raise RepresenterError("cannot represent an object: %s" % data)
+
+SafeRepresenter.add_representer(type(None),
+ SafeRepresenter.represent_none)
+
+SafeRepresenter.add_representer(str,
+ SafeRepresenter.represent_str)
+
+SafeRepresenter.add_representer(unicode,
+ SafeRepresenter.represent_unicode)
+
+SafeRepresenter.add_representer(bool,
+ SafeRepresenter.represent_bool)
+
+SafeRepresenter.add_representer(int,
+ SafeRepresenter.represent_int)
+
+SafeRepresenter.add_representer(long,
+ SafeRepresenter.represent_long)
+
+SafeRepresenter.add_representer(float,
+ SafeRepresenter.represent_float)
+
+SafeRepresenter.add_representer(list,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(tuple,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(dict,
+ SafeRepresenter.represent_dict)
+
+SafeRepresenter.add_representer(set,
+ SafeRepresenter.represent_set)
+
+SafeRepresenter.add_representer(datetime.date,
+ SafeRepresenter.represent_date)
+
+SafeRepresenter.add_representer(datetime.datetime,
+ SafeRepresenter.represent_datetime)
+
+SafeRepresenter.add_representer(None,
+ SafeRepresenter.represent_undefined)
+
+class Representer(SafeRepresenter):
+
+ def represent_str(self, data):
+ tag = None
+ style = None
+ try:
+ data = unicode(data, 'ascii')
+ tag = u'tag:yaml.org,2002:str'
+ except UnicodeDecodeError:
+ try:
+ data = unicode(data, 'utf-8')
+ tag = u'tag:yaml.org,2002:python/str'
+ except UnicodeDecodeError:
+ data = data.encode('base64')
+ tag = u'tag:yaml.org,2002:binary'
+ style = '|'
+ return self.represent_scalar(tag, data, style=style)
+
+ def represent_unicode(self, data):
+ tag = None
+ try:
+ data.encode('ascii')
+ tag = u'tag:yaml.org,2002:python/unicode'
+ except UnicodeEncodeError:
+ tag = u'tag:yaml.org,2002:str'
+ return self.represent_scalar(tag, data)
+
+ def represent_long(self, data):
+ tag = u'tag:yaml.org,2002:int'
+ if int(data) is not data:
+ tag = u'tag:yaml.org,2002:python/long'
+ return self.represent_scalar(tag, unicode(data))
+
+ def represent_complex(self, data):
+ if data.imag == 0.0:
+ data = u'%r' % data.real
+ elif data.real == 0.0:
+ data = u'%rj' % data.imag
+ elif data.imag > 0:
+ data = u'%r+%rj' % (data.real, data.imag)
+ else:
+ data = u'%r%rj' % (data.real, data.imag)
+ return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
+
+ def represent_tuple(self, data):
+ return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
+
+ def represent_name(self, data):
+ name = u'%s.%s' % (data.__module__, data.__name__)
+ return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'')
+
+ def represent_module(self, data):
+ return self.represent_scalar(
+ u'tag:yaml.org,2002:python/module:'+data.__name__, u'')
+
+ def represent_instance(self, data):
+ # For instances of classic classes, we use __getinitargs__ and
+ # __getstate__ to serialize the data.
+
+ # If data.__getinitargs__ exists, the object must be reconstructed by
+ # calling cls(**args), where args is a tuple returned by
+ # __getinitargs__. Otherwise, the cls.__init__ method should never be
+ # called and the class instance is created by instantiating a trivial
+ # class and assigning to the instance's __class__ variable.
+
+ # If data.__getstate__ exists, it returns the state of the object.
+ # Otherwise, the state of the object is data.__dict__.
+
+ # We produce either a !!python/object or !!python/object/new node.
+ # If data.__getinitargs__ does not exist and state is a dictionary, we
+ # produce a !!python/object node . Otherwise we produce a
+ # !!python/object/new node.
+
+ cls = data.__class__
+ class_name = u'%s.%s' % (cls.__module__, cls.__name__)
+ args = None
+ state = None
+ if hasattr(data, '__getinitargs__'):
+ args = list(data.__getinitargs__())
+ if hasattr(data, '__getstate__'):
+ state = data.__getstate__()
+ else:
+ state = data.__dict__
+ if args is None and isinstance(state, dict):
+ return self.represent_mapping(
+ u'tag:yaml.org,2002:python/object:'+class_name, state)
+ if isinstance(state, dict) and not state:
+ return self.represent_sequence(
+ u'tag:yaml.org,2002:python/object/new:'+class_name, args)
+ value = {}
+ if args:
+ value['args'] = args
+ value['state'] = state
+ return self.represent_mapping(
+ u'tag:yaml.org,2002:python/object/new:'+class_name, value)
+
+ def represent_object(self, data):
+ # We use __reduce__ API to save the data. data.__reduce__ returns
+ # a tuple of length 2-5:
+ # (function, args, state, listitems, dictitems)
+
+ # For reconstructing, we calls function(*args), then set its state,
+ # listitems, and dictitems if they are not None.
+
+ # A special case is when function.__name__ == '__newobj__'. In this
+ # case we create the object with args[0].__new__(*args).
+
+ # Another special case is when __reduce__ returns a string - we don't
+ # support it.
+
+ # We produce a !!python/object, !!python/object/new or
+ # !!python/object/apply node.
+
+ cls = type(data)
+ if cls in copy_reg.dispatch_table:
+ reduce = copy_reg.dispatch_table[cls](data)
+ elif hasattr(data, '__reduce_ex__'):
+ reduce = data.__reduce_ex__(2)
+ elif hasattr(data, '__reduce__'):
+ reduce = data.__reduce__()
+ else:
+ raise RepresenterError("cannot represent object: %r" % data)
+ reduce = (list(reduce)+[None]*5)[:5]
+ function, args, state, listitems, dictitems = reduce
+ args = list(args)
+ if state is None:
+ state = {}
+ if listitems is not None:
+ listitems = list(listitems)
+ if dictitems is not None:
+ dictitems = dict(dictitems)
+ if function.__name__ == '__newobj__':
+ function = args[0]
+ args = args[1:]
+ tag = u'tag:yaml.org,2002:python/object/new:'
+ newobj = True
+ else:
+ tag = u'tag:yaml.org,2002:python/object/apply:'
+ newobj = False
+ function_name = u'%s.%s' % (function.__module__, function.__name__)
+ if not args and not listitems and not dictitems \
+ and isinstance(state, dict) and newobj:
+ return self.represent_mapping(
+ u'tag:yaml.org,2002:python/object:'+function_name, state)
+ if not listitems and not dictitems \
+ and isinstance(state, dict) and not state:
+ return self.represent_sequence(tag+function_name, args)
+ value = {}
+ if args:
+ value['args'] = args
+ if state or not isinstance(state, dict):
+ value['state'] = state
+ if listitems:
+ value['listitems'] = listitems
+ if dictitems:
+ value['dictitems'] = dictitems
+ return self.represent_mapping(tag+function_name, value)
+
+Representer.add_representer(str,
+ Representer.represent_str)
+
+Representer.add_representer(unicode,
+ Representer.represent_unicode)
+
+Representer.add_representer(long,
+ Representer.represent_long)
+
+Representer.add_representer(complex,
+ Representer.represent_complex)
+
+Representer.add_representer(tuple,
+ Representer.represent_tuple)
+
+Representer.add_representer(type,
+ Representer.represent_name)
+
+Representer.add_representer(types.ClassType,
+ Representer.represent_name)
+
+Representer.add_representer(types.FunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.BuiltinFunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.ModuleType,
+ Representer.represent_module)
+
+Representer.add_multi_representer(types.InstanceType,
+ Representer.represent_instance)
+
+Representer.add_multi_representer(object,
+ Representer.represent_object)
+
diff --git a/python/pyyaml/lib/yaml/resolver.py b/python/pyyaml/lib/yaml/resolver.py
new file mode 100644
index 000000000..6b5ab8759
--- /dev/null
+++ b/python/pyyaml/lib/yaml/resolver.py
@@ -0,0 +1,224 @@
+
+__all__ = ['BaseResolver', 'Resolver']
+
+from error import *
+from nodes import *
+
+import re
+
+class ResolverError(YAMLError):
+ pass
+
+class BaseResolver(object):
+
+ DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
+ DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
+ DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
+
+ yaml_implicit_resolvers = {}
+ yaml_path_resolvers = {}
+
+ def __init__(self):
+ self.resolver_exact_paths = []
+ self.resolver_prefix_paths = []
+
+ def add_implicit_resolver(cls, tag, regexp, first):
+ if not 'yaml_implicit_resolvers' in cls.__dict__:
+ cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
+ if first is None:
+ first = [None]
+ for ch in first:
+ cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
+ add_implicit_resolver = classmethod(add_implicit_resolver)
+
+ def add_path_resolver(cls, tag, path, kind=None):
+ # Note: `add_path_resolver` is experimental. The API could be changed.
+ # `new_path` is a pattern that is matched against the path from the
+ # root to the node that is being considered. `node_path` elements are
+ # tuples `(node_check, index_check)`. `node_check` is a node class:
+ # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
+ # matches any kind of a node. `index_check` could be `None`, a boolean
+ # value, a string value, or a number. `None` and `False` match against
+ # any _value_ of sequence and mapping nodes. `True` matches against
+ # any _key_ of a mapping node. A string `index_check` matches against
+ # a mapping value that corresponds to a scalar key which content is
+ # equal to the `index_check` value. An integer `index_check` matches
+ # against a sequence value with the index equal to `index_check`.
+ if not 'yaml_path_resolvers' in cls.__dict__:
+ cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
+ new_path = []
+ for element in path:
+ if isinstance(element, (list, tuple)):
+ if len(element) == 2:
+ node_check, index_check = element
+ elif len(element) == 1:
+ node_check = element[0]
+ index_check = True
+ else:
+ raise ResolverError("Invalid path element: %s" % element)
+ else:
+ node_check = None
+ index_check = element
+ if node_check is str:
+ node_check = ScalarNode
+ elif node_check is list:
+ node_check = SequenceNode
+ elif node_check is dict:
+ node_check = MappingNode
+ elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
+ and not isinstance(node_check, basestring) \
+ and node_check is not None:
+ raise ResolverError("Invalid node checker: %s" % node_check)
+ if not isinstance(index_check, (basestring, int)) \
+ and index_check is not None:
+ raise ResolverError("Invalid index checker: %s" % index_check)
+ new_path.append((node_check, index_check))
+ if kind is str:
+ kind = ScalarNode
+ elif kind is list:
+ kind = SequenceNode
+ elif kind is dict:
+ kind = MappingNode
+ elif kind not in [ScalarNode, SequenceNode, MappingNode] \
+ and kind is not None:
+ raise ResolverError("Invalid node kind: %s" % kind)
+ cls.yaml_path_resolvers[tuple(new_path), kind] = tag
+ add_path_resolver = classmethod(add_path_resolver)
+
+ def descend_resolver(self, current_node, current_index):
+ if not self.yaml_path_resolvers:
+ return
+ exact_paths = {}
+ prefix_paths = []
+ if current_node:
+ depth = len(self.resolver_prefix_paths)
+ for path, kind in self.resolver_prefix_paths[-1]:
+ if self.check_resolver_prefix(depth, path, kind,
+ current_node, current_index):
+ if len(path) > depth:
+ prefix_paths.append((path, kind))
+ else:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ for path, kind in self.yaml_path_resolvers:
+ if not path:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ prefix_paths.append((path, kind))
+ self.resolver_exact_paths.append(exact_paths)
+ self.resolver_prefix_paths.append(prefix_paths)
+
+ def ascend_resolver(self):
+ if not self.yaml_path_resolvers:
+ return
+ self.resolver_exact_paths.pop()
+ self.resolver_prefix_paths.pop()
+
+ def check_resolver_prefix(self, depth, path, kind,
+ current_node, current_index):
+ node_check, index_check = path[depth-1]
+ if isinstance(node_check, basestring):
+ if current_node.tag != node_check:
+ return
+ elif node_check is not None:
+ if not isinstance(current_node, node_check):
+ return
+ if index_check is True and current_index is not None:
+ return
+ if (index_check is False or index_check is None) \
+ and current_index is None:
+ return
+ if isinstance(index_check, basestring):
+ if not (isinstance(current_index, ScalarNode)
+ and index_check == current_index.value):
+ return
+ elif isinstance(index_check, int) and not isinstance(index_check, bool):
+ if index_check != current_index:
+ return
+ return True
+
+ def resolve(self, kind, value, implicit):
+ if kind is ScalarNode and implicit[0]:
+ if value == u'':
+ resolvers = self.yaml_implicit_resolvers.get(u'', [])
+ else:
+ resolvers = self.yaml_implicit_resolvers.get(value[0], [])
+ resolvers += self.yaml_implicit_resolvers.get(None, [])
+ for tag, regexp in resolvers:
+ if regexp.match(value):
+ return tag
+ implicit = implicit[1]
+ if self.yaml_path_resolvers:
+ exact_paths = self.resolver_exact_paths[-1]
+ if kind in exact_paths:
+ return exact_paths[kind]
+ if None in exact_paths:
+ return exact_paths[None]
+ if kind is ScalarNode:
+ return self.DEFAULT_SCALAR_TAG
+ elif kind is SequenceNode:
+ return self.DEFAULT_SEQUENCE_TAG
+ elif kind is MappingNode:
+ return self.DEFAULT_MAPPING_TAG
+
+class Resolver(BaseResolver):
+ pass
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:bool',
+ re.compile(ur'''^(?:yes|Yes|YES|no|No|NO
+ |true|True|TRUE|false|False|FALSE
+ |on|On|ON|off|Off|OFF)$''', re.X),
+ list(u'yYnNtTfFoO'))
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:float',
+ re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
+ |\.[0-9_]+(?:[eE][-+][0-9]+)?
+ |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
+ |[-+]?\.(?:inf|Inf|INF)
+ |\.(?:nan|NaN|NAN))$''', re.X),
+ list(u'-+0123456789.'))
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:int',
+ re.compile(ur'''^(?:[-+]?0b[0-1_]+
+ |[-+]?0[0-7_]+
+ |[-+]?(?:0|[1-9][0-9_]*)
+ |[-+]?0x[0-9a-fA-F_]+
+ |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
+ list(u'-+0123456789'))
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:merge',
+ re.compile(ur'^(?:<<)$'),
+ [u'<'])
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:null',
+ re.compile(ur'''^(?: ~
+ |null|Null|NULL
+ | )$''', re.X),
+ [u'~', u'n', u'N', u''])
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:timestamp',
+ re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
+ |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
+ (?:[Tt]|[ \t]+)[0-9][0-9]?
+ :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
+ (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
+ list(u'0123456789'))
+
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:value',
+ re.compile(ur'^(?:=)$'),
+ [u'='])
+
+# The following resolver is only for documentation purposes. It cannot work
+# because plain scalars cannot start with '!', '&', or '*'.
+Resolver.add_implicit_resolver(
+ u'tag:yaml.org,2002:yaml',
+ re.compile(ur'^(?:!|&|\*)$'),
+ list(u'!&*'))
+
diff --git a/python/pyyaml/lib/yaml/scanner.py b/python/pyyaml/lib/yaml/scanner.py
new file mode 100644
index 000000000..5228fad65
--- /dev/null
+++ b/python/pyyaml/lib/yaml/scanner.py
@@ -0,0 +1,1457 @@
+
+# Scanner produces tokens of the following types:
+# STREAM-START
+# STREAM-END
+# DIRECTIVE(name, value)
+# DOCUMENT-START
+# DOCUMENT-END
+# BLOCK-SEQUENCE-START
+# BLOCK-MAPPING-START
+# BLOCK-END
+# FLOW-SEQUENCE-START
+# FLOW-MAPPING-START
+# FLOW-SEQUENCE-END
+# FLOW-MAPPING-END
+# BLOCK-ENTRY
+# FLOW-ENTRY
+# KEY
+# VALUE
+# ALIAS(value)
+# ANCHOR(value)
+# TAG(value)
+# SCALAR(value, plain, style)
+#
+# Read comments in the Scanner code for more details.
+#
+
+__all__ = ['Scanner', 'ScannerError']
+
+from error import MarkedYAMLError
+from tokens import *
+
+class ScannerError(MarkedYAMLError):
+ pass
+
+class SimpleKey(object):
+ # See below simple keys treatment.
+
+ def __init__(self, token_number, required, index, line, column, mark):
+ self.token_number = token_number
+ self.required = required
+ self.index = index
+ self.line = line
+ self.column = column
+ self.mark = mark
+
+class Scanner(object):
+
+ def __init__(self):
+ """Initialize the scanner."""
+ # It is assumed that Scanner and Reader will have a common descendant.
+ # Reader do the dirty work of checking for BOM and converting the
+ # input data to Unicode. It also adds NUL to the end.
+ #
+ # Reader supports the following methods
+ # self.peek(i=0) # peek the next i-th character
+ # self.prefix(l=1) # peek the next l characters
+ # self.forward(l=1) # read the next l characters and move the pointer.
+
+ # Had we reached the end of the stream?
+ self.done = False
+
+ # The number of unclosed '{' and '['. `flow_level == 0` means block
+ # context.
+ self.flow_level = 0
+
+ # List of processed tokens that are not yet emitted.
+ self.tokens = []
+
+ # Add the STREAM-START token.
+ self.fetch_stream_start()
+
+ # Number of tokens that were emitted through the `get_token` method.
+ self.tokens_taken = 0
+
+ # The current indentation level.
+ self.indent = -1
+
+ # Past indentation levels.
+ self.indents = []
+
+ # Variables related to simple keys treatment.
+
+ # A simple key is a key that is not denoted by the '?' indicator.
+ # Example of simple keys:
+ # ---
+ # block simple key: value
+ # ? not a simple key:
+ # : { flow simple key: value }
+ # We emit the KEY token before all keys, so when we find a potential
+ # simple key, we try to locate the corresponding ':' indicator.
+ # Simple keys should be limited to a single line and 1024 characters.
+
+ # Can a simple key start at the current position? A simple key may
+ # start:
+ # - at the beginning of the line, not counting indentation spaces
+ # (in block context),
+ # - after '{', '[', ',' (in the flow context),
+ # - after '?', ':', '-' (in the block context).
+ # In the block context, this flag also signifies if a block collection
+ # may start at the current position.
+ self.allow_simple_key = True
+
+ # Keep track of possible simple keys. This is a dictionary. The key
+ # is `flow_level`; there can be no more that one possible simple key
+ # for each level. The value is a SimpleKey record:
+ # (token_number, required, index, line, column, mark)
+ # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
+ # '[', or '{' tokens.
+ self.possible_simple_keys = {}
+
+ # Public methods.
+
+ def check_token(self, *choices):
+ # Check if the next token is one of the given types.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.tokens[0], choice):
+ return True
+ return False
+
+ def peek_token(self):
+ # Return the next token, but do not delete if from the queue.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ return self.tokens[0]
+
+ def get_token(self):
+ # Return the next token.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ self.tokens_taken += 1
+ return self.tokens.pop(0)
+
+ # Private methods.
+
+ def need_more_tokens(self):
+ if self.done:
+ return False
+ if not self.tokens:
+ return True
+ # The current token may be a potential simple key, so we
+ # need to look further.
+ self.stale_possible_simple_keys()
+ if self.next_possible_simple_key() == self.tokens_taken:
+ return True
+
+ def fetch_more_tokens(self):
+
+ # Eat whitespaces and comments until we reach the next token.
+ self.scan_to_next_token()
+
+ # Remove obsolete possible simple keys.
+ self.stale_possible_simple_keys()
+
+ # Compare the current indentation and column. It may add some tokens
+ # and decrease the current indentation level.
+ self.unwind_indent(self.column)
+
+ # Peek the next character.
+ ch = self.peek()
+
+ # Is it the end of stream?
+ if ch == u'\0':
+ return self.fetch_stream_end()
+
+ # Is it a directive?
+ if ch == u'%' and self.check_directive():
+ return self.fetch_directive()
+
+ # Is it the document start?
+ if ch == u'-' and self.check_document_start():
+ return self.fetch_document_start()
+
+ # Is it the document end?
+ if ch == u'.' and self.check_document_end():
+ return self.fetch_document_end()
+
+ # TODO: support for BOM within a stream.
+ #if ch == u'\uFEFF':
+ # return self.fetch_bom() <-- issue BOMToken
+
+ # Note: the order of the following checks is NOT significant.
+
+ # Is it the flow sequence start indicator?
+ if ch == u'[':
+ return self.fetch_flow_sequence_start()
+
+ # Is it the flow mapping start indicator?
+ if ch == u'{':
+ return self.fetch_flow_mapping_start()
+
+ # Is it the flow sequence end indicator?
+ if ch == u']':
+ return self.fetch_flow_sequence_end()
+
+ # Is it the flow mapping end indicator?
+ if ch == u'}':
+ return self.fetch_flow_mapping_end()
+
+ # Is it the flow entry indicator?
+ if ch == u',':
+ return self.fetch_flow_entry()
+
+ # Is it the block entry indicator?
+ if ch == u'-' and self.check_block_entry():
+ return self.fetch_block_entry()
+
+ # Is it the key indicator?
+ if ch == u'?' and self.check_key():
+ return self.fetch_key()
+
+ # Is it the value indicator?
+ if ch == u':' and self.check_value():
+ return self.fetch_value()
+
+ # Is it an alias?
+ if ch == u'*':
+ return self.fetch_alias()
+
+ # Is it an anchor?
+ if ch == u'&':
+ return self.fetch_anchor()
+
+ # Is it a tag?
+ if ch == u'!':
+ return self.fetch_tag()
+
+ # Is it a literal scalar?
+ if ch == u'|' and not self.flow_level:
+ return self.fetch_literal()
+
+ # Is it a folded scalar?
+ if ch == u'>' and not self.flow_level:
+ return self.fetch_folded()
+
+ # Is it a single quoted scalar?
+ if ch == u'\'':
+ return self.fetch_single()
+
+ # Is it a double quoted scalar?
+ if ch == u'\"':
+ return self.fetch_double()
+
+ # It must be a plain scalar then.
+ if self.check_plain():
+ return self.fetch_plain()
+
+ # No? It's an error. Let's produce a nice error message.
+ raise ScannerError("while scanning for the next token", None,
+ "found character %r that cannot start any token"
+ % ch.encode('utf-8'), self.get_mark())
+
+ # Simple keys treatment.
+
+ def next_possible_simple_key(self):
+ # Return the number of the nearest possible simple key. Actually we
+ # don't need to loop through the whole dictionary. We may replace it
+ # with the following code:
+ # if not self.possible_simple_keys:
+ # return None
+ # return self.possible_simple_keys[
+ # min(self.possible_simple_keys.keys())].token_number
+ min_token_number = None
+ for level in self.possible_simple_keys:
+ key = self.possible_simple_keys[level]
+ if min_token_number is None or key.token_number < min_token_number:
+ min_token_number = key.token_number
+ return min_token_number
+
+ def stale_possible_simple_keys(self):
+ # Remove entries that are no longer possible simple keys. According to
+ # the YAML specification, simple keys
+ # - should be limited to a single line,
+ # - should be no longer than 1024 characters.
+ # Disabling this procedure will allow simple keys of any length and
+ # height (may cause problems if indentation is broken though).
+ for level in self.possible_simple_keys.keys():
+ key = self.possible_simple_keys[level]
+ if key.line != self.line \
+ or self.index-key.index > 1024:
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not found expected ':'", self.get_mark())
+ del self.possible_simple_keys[level]
+
+ def save_possible_simple_key(self):
+ # The next token may start a simple key. We check if it's possible
+ # and save its position. This function is called for
+ # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
+
+ # Check if a simple key is required at the current position.
+ required = not self.flow_level and self.indent == self.column
+
+ # A simple key is required only if it is the first token in the current
+ # line. Therefore it is always allowed.
+ assert self.allow_simple_key or not required
+
+ # The next token might be a simple key. Let's save it's number and
+ # position.
+ if self.allow_simple_key:
+ self.remove_possible_simple_key()
+ token_number = self.tokens_taken+len(self.tokens)
+ key = SimpleKey(token_number, required,
+ self.index, self.line, self.column, self.get_mark())
+ self.possible_simple_keys[self.flow_level] = key
+
+ def remove_possible_simple_key(self):
+ # Remove the saved possible key position at the current flow level.
+ if self.flow_level in self.possible_simple_keys:
+ key = self.possible_simple_keys[self.flow_level]
+
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not found expected ':'", self.get_mark())
+
+ del self.possible_simple_keys[self.flow_level]
+
+ # Indentation functions.
+
+ def unwind_indent(self, column):
+
+ ## In flow context, tokens should respect indentation.
+ ## Actually the condition should be `self.indent >= column` according to
+ ## the spec. But this condition will prohibit intuitively correct
+ ## constructions such as
+ ## key : {
+ ## }
+ #if self.flow_level and self.indent > column:
+ # raise ScannerError(None, None,
+ # "invalid intendation or unclosed '[' or '{'",
+ # self.get_mark())
+
+ # In the flow context, indentation is ignored. We make the scanner less
+ # restrictive then specification requires.
+ if self.flow_level:
+ return
+
+ # In block context, we may need to issue the BLOCK-END tokens.
+ while self.indent > column:
+ mark = self.get_mark()
+ self.indent = self.indents.pop()
+ self.tokens.append(BlockEndToken(mark, mark))
+
+ def add_indent(self, column):
+ # Check if we need to increase indentation.
+ if self.indent < column:
+ self.indents.append(self.indent)
+ self.indent = column
+ return True
+ return False
+
+ # Fetchers.
+
+ def fetch_stream_start(self):
+ # We always add STREAM-START as the first token and STREAM-END as the
+ # last token.
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-START.
+ self.tokens.append(StreamStartToken(mark, mark,
+ encoding=self.encoding))
+
+
+ def fetch_stream_end(self):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+ self.possible_simple_keys = {}
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-END.
+ self.tokens.append(StreamEndToken(mark, mark))
+
+ # The steam is finished.
+ self.done = True
+
+ def fetch_directive(self):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Scan and add DIRECTIVE.
+ self.tokens.append(self.scan_directive())
+
+ def fetch_document_start(self):
+ self.fetch_document_indicator(DocumentStartToken)
+
+ def fetch_document_end(self):
+ self.fetch_document_indicator(DocumentEndToken)
+
+ def fetch_document_indicator(self, TokenClass):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys. Note that there could not be a block collection
+ # after '---'.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Add DOCUMENT-START or DOCUMENT-END.
+ start_mark = self.get_mark()
+ self.forward(3)
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_start(self):
+ self.fetch_flow_collection_start(FlowSequenceStartToken)
+
+ def fetch_flow_mapping_start(self):
+ self.fetch_flow_collection_start(FlowMappingStartToken)
+
+ def fetch_flow_collection_start(self, TokenClass):
+
+ # '[' and '{' may start a simple key.
+ self.save_possible_simple_key()
+
+ # Increase the flow level.
+ self.flow_level += 1
+
+ # Simple keys are allowed after '[' and '{'.
+ self.allow_simple_key = True
+
+ # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_end(self):
+ self.fetch_flow_collection_end(FlowSequenceEndToken)
+
+ def fetch_flow_mapping_end(self):
+ self.fetch_flow_collection_end(FlowMappingEndToken)
+
+ def fetch_flow_collection_end(self, TokenClass):
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Decrease the flow level.
+ self.flow_level -= 1
+
+ # No simple keys after ']' or '}'.
+ self.allow_simple_key = False
+
+ # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_entry(self):
+
+ # Simple keys are allowed after ','.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add FLOW-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(FlowEntryToken(start_mark, end_mark))
+
+ def fetch_block_entry(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a new entry?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "sequence entries are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-SEQUENCE-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockSequenceStartToken(mark, mark))
+
+ # It's an error for the block entry to occur in the flow context,
+ # but we let the parser detect this.
+ else:
+ pass
+
+ # Simple keys are allowed after '-'.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add BLOCK-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(BlockEntryToken(start_mark, end_mark))
+
+ def fetch_key(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a key (not nessesary a simple)?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping keys are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-MAPPING-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after '?' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add KEY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(KeyToken(start_mark, end_mark))
+
+ def fetch_value(self):
+
+ # Do we determine a simple key?
+ if self.flow_level in self.possible_simple_keys:
+
+ # Add KEY.
+ key = self.possible_simple_keys[self.flow_level]
+ del self.possible_simple_keys[self.flow_level]
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ KeyToken(key.mark, key.mark))
+
+ # If this key starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START.
+ if not self.flow_level:
+ if self.add_indent(key.column):
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ BlockMappingStartToken(key.mark, key.mark))
+
+ # There cannot be two simple keys one after another.
+ self.allow_simple_key = False
+
+ # It must be a part of a complex key.
+ else:
+
+ # Block context needs additional checks.
+ # (Do we really need them? They will be catched by the parser
+ # anyway.)
+ if not self.flow_level:
+
+ # We are allowed to start a complex value if and only if
+ # we can start a simple key.
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping values are not allowed here",
+ self.get_mark())
+
+ # If this value starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START. It will be detected as an error later by
+ # the parser.
+ if not self.flow_level:
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after ':' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add VALUE.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(ValueToken(start_mark, end_mark))
+
+ def fetch_alias(self):
+
+ # ALIAS could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ALIAS.
+ self.allow_simple_key = False
+
+ # Scan and add ALIAS.
+ self.tokens.append(self.scan_anchor(AliasToken))
+
+ def fetch_anchor(self):
+
+ # ANCHOR could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ANCHOR.
+ self.allow_simple_key = False
+
+ # Scan and add ANCHOR.
+ self.tokens.append(self.scan_anchor(AnchorToken))
+
+ def fetch_tag(self):
+
+ # TAG could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after TAG.
+ self.allow_simple_key = False
+
+ # Scan and add TAG.
+ self.tokens.append(self.scan_tag())
+
+ def fetch_literal(self):
+ self.fetch_block_scalar(style='|')
+
+ def fetch_folded(self):
+ self.fetch_block_scalar(style='>')
+
+ def fetch_block_scalar(self, style):
+
+ # A simple key may follow a block scalar.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_block_scalar(style))
+
+ def fetch_single(self):
+ self.fetch_flow_scalar(style='\'')
+
+ def fetch_double(self):
+ self.fetch_flow_scalar(style='"')
+
+ def fetch_flow_scalar(self, style):
+
+ # A flow scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after flow scalars.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_flow_scalar(style))
+
+ def fetch_plain(self):
+
+ # A plain scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after plain scalars. But note that `scan_plain` will
+ # change this flag if the scan is finished at the beginning of the
+ # line.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR. May change `allow_simple_key`.
+ self.tokens.append(self.scan_plain())
+
+ # Checkers.
+
+ def check_directive(self):
+
+ # DIRECTIVE: ^ '%' ...
+ # The '%' indicator is already checked.
+ if self.column == 0:
+ return True
+
+ def check_document_start(self):
+
+ # DOCUMENT-START: ^ '---' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == u'---' \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_document_end(self):
+
+ # DOCUMENT-END: ^ '...' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == u'...' \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_block_entry(self):
+
+ # BLOCK-ENTRY: '-' (' '|'\n')
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+
+ def check_key(self):
+
+ # KEY(flow context): '?'
+ if self.flow_level:
+ return True
+
+ # KEY(block context): '?' (' '|'\n')
+ else:
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+
+ def check_value(self):
+
+ # VALUE(flow context): ':'
+ if self.flow_level:
+ return True
+
+ # VALUE(block context): ':' (' '|'\n')
+ else:
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+
+ def check_plain(self):
+
+ # A plain scalar may start with any non-space character except:
+ # '-', '?', ':', ',', '[', ']', '{', '}',
+ # '#', '&', '*', '!', '|', '>', '\'', '\"',
+ # '%', '@', '`'.
+ #
+ # It may also start with
+ # '-', '?', ':'
+ # if it is followed by a non-space character.
+ #
+ # Note that we limit the last rule to the block context (except the
+ # '-' character) because we want the flow context to be space
+ # independent.
+ ch = self.peek()
+ return ch not in u'\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \
+ or (self.peek(1) not in u'\0 \t\r\n\x85\u2028\u2029'
+ and (ch == u'-' or (not self.flow_level and ch in u'?:')))
+
+ # Scanners.
+
+ def scan_to_next_token(self):
+ # We ignore spaces, line breaks and comments.
+ # If we find a line break in the block context, we set the flag
+ # `allow_simple_key` on.
+ # The byte order mark is stripped if it's the first character in the
+ # stream. We do not yet support BOM inside the stream as the
+ # specification requires. Any such mark will be considered as a part
+ # of the document.
+ #
+ # TODO: We need to make tab handling rules more sane. A good rule is
+ # Tabs cannot precede tokens
+ # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
+ # KEY(block), VALUE(block), BLOCK-ENTRY
+ # So the checking code is
+ # if <TAB>:
+ # self.allow_simple_keys = False
+ # We also need to add the check for `allow_simple_keys == True` to
+ # `unwind_indent` before issuing BLOCK-END.
+ # Scanners for block, flow, and plain scalars need to be modified.
+
+ if self.index == 0 and self.peek() == u'\uFEFF':
+ self.forward()
+ found = False
+ while not found:
+ while self.peek() == u' ':
+ self.forward()
+ if self.peek() == u'#':
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
+ self.forward()
+ if self.scan_line_break():
+ if not self.flow_level:
+ self.allow_simple_key = True
+ else:
+ found = True
+
+ def scan_directive(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ self.forward()
+ name = self.scan_directive_name(start_mark)
+ value = None
+ if name == u'YAML':
+ value = self.scan_yaml_directive_value(start_mark)
+ end_mark = self.get_mark()
+ elif name == u'TAG':
+ value = self.scan_tag_directive_value(start_mark)
+ end_mark = self.get_mark()
+ else:
+ end_mark = self.get_mark()
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
+ self.forward()
+ self.scan_directive_ignored_line(start_mark)
+ return DirectiveToken(name, value, start_mark, end_mark)
+
+ def scan_directive_name(self, start_mark):
+ # See the specification for details.
+ length = 0
+ ch = self.peek(length)
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ return value
+
+ def scan_yaml_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == u' ':
+ self.forward()
+ major = self.scan_yaml_directive_number(start_mark)
+ if self.peek() != '.':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or '.', but found %r"
+ % self.peek().encode('utf-8'),
+ self.get_mark())
+ self.forward()
+ minor = self.scan_yaml_directive_number(start_mark)
+ if self.peek() not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or ' ', but found %r"
+ % self.peek().encode('utf-8'),
+ self.get_mark())
+ return (major, minor)
+
+ def scan_yaml_directive_number(self, start_mark):
+ # See the specification for details.
+ ch = self.peek()
+ if not (u'0' <= ch <= u'9'):
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit, but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ length = 0
+ while u'0' <= self.peek(length) <= u'9':
+ length += 1
+ value = int(self.prefix(length))
+ self.forward(length)
+ return value
+
+ def scan_tag_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == u' ':
+ self.forward()
+ handle = self.scan_tag_directive_handle(start_mark)
+ while self.peek() == u' ':
+ self.forward()
+ prefix = self.scan_tag_directive_prefix(start_mark)
+ return (handle, prefix)
+
+ def scan_tag_directive_handle(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_handle('directive', start_mark)
+ ch = self.peek()
+ if ch != u' ':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ return value
+
+ def scan_tag_directive_prefix(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_uri('directive', start_mark)
+ ch = self.peek()
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ return value
+
+ def scan_directive_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == u' ':
+ self.forward()
+ if self.peek() == u'#':
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in u'\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a comment or a line break, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ self.scan_line_break()
+
+ def scan_anchor(self, TokenClass):
+ # The specification does not restrict characters for anchors and
+ # aliases. This may lead to problems, for instance, the document:
+ # [ *alias, value ]
+ # can be interpteted in two ways, as
+ # [ "value" ]
+ # and
+ # [ *alias , "value" ]
+ # Therefore we restrict aliases to numbers and ASCII letters.
+ start_mark = self.get_mark()
+ indicator = self.peek()
+ if indicator == u'*':
+ name = 'alias'
+ else:
+ name = 'anchor'
+ self.forward()
+ length = 0
+ ch = self.peek(length)
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in u'\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ end_mark = self.get_mark()
+ return TokenClass(value, start_mark, end_mark)
+
+ def scan_tag(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ ch = self.peek(1)
+ if ch == u'<':
+ handle = None
+ self.forward(2)
+ suffix = self.scan_tag_uri('tag', start_mark)
+ if self.peek() != u'>':
+ raise ScannerError("while parsing a tag", start_mark,
+ "expected '>', but found %r" % self.peek().encode('utf-8'),
+ self.get_mark())
+ self.forward()
+ elif ch in u'\0 \t\r\n\x85\u2028\u2029':
+ handle = None
+ suffix = u'!'
+ self.forward()
+ else:
+ length = 1
+ use_handle = False
+ while ch not in u'\0 \r\n\x85\u2028\u2029':
+ if ch == u'!':
+ use_handle = True
+ break
+ length += 1
+ ch = self.peek(length)
+ handle = u'!'
+ if use_handle:
+ handle = self.scan_tag_handle('tag', start_mark)
+ else:
+ handle = u'!'
+ self.forward()
+ suffix = self.scan_tag_uri('tag', start_mark)
+ ch = self.peek()
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a tag", start_mark,
+ "expected ' ', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ value = (handle, suffix)
+ end_mark = self.get_mark()
+ return TagToken(value, start_mark, end_mark)
+
+ def scan_block_scalar(self, style):
+ # See the specification for details.
+
+ if style == '>':
+ folded = True
+ else:
+ folded = False
+
+ chunks = []
+ start_mark = self.get_mark()
+
+ # Scan the header.
+ self.forward()
+ chomping, increment = self.scan_block_scalar_indicators(start_mark)
+ self.scan_block_scalar_ignored_line(start_mark)
+
+ # Determine the indentation level and go to the first non-empty line.
+ min_indent = self.indent+1
+ if min_indent < 1:
+ min_indent = 1
+ if increment is None:
+ breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
+ indent = max(min_indent, max_indent)
+ else:
+ indent = min_indent+increment-1
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ line_break = u''
+
+ # Scan the inner part of the block scalar.
+ while self.column == indent and self.peek() != u'\0':
+ chunks.extend(breaks)
+ leading_non_space = self.peek() not in u' \t'
+ length = 0
+ while self.peek(length) not in u'\0\r\n\x85\u2028\u2029':
+ length += 1
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ line_break = self.scan_line_break()
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ if self.column == indent and self.peek() != u'\0':
+
+ # Unfortunately, folding rules are ambiguous.
+ #
+ # This is the folding according to the specification:
+
+ if folded and line_break == u'\n' \
+ and leading_non_space and self.peek() not in u' \t':
+ if not breaks:
+ chunks.append(u' ')
+ else:
+ chunks.append(line_break)
+
+ # This is Clark Evans's interpretation (also in the spec
+ # examples):
+ #
+ #if folded and line_break == u'\n':
+ # if not breaks:
+ # if self.peek() not in ' \t':
+ # chunks.append(u' ')
+ # else:
+ # chunks.append(line_break)
+ #else:
+ # chunks.append(line_break)
+ else:
+ break
+
+ # Chomp the tail.
+ if chomping is not False:
+ chunks.append(line_break)
+ if chomping is True:
+ chunks.extend(breaks)
+
+ # We are done.
+ return ScalarToken(u''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ def scan_block_scalar_indicators(self, start_mark):
+ # See the specification for details.
+ chomping = None
+ increment = None
+ ch = self.peek()
+ if ch in u'+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch in u'0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ elif ch in u'0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ ch = self.peek()
+ if ch in u'+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected chomping or indentation indicators, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ return chomping, increment
+
+ def scan_block_scalar_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == u' ':
+ self.forward()
+ if self.peek() == u'#':
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in u'\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected a comment or a line break, but found %r"
+ % ch.encode('utf-8'), self.get_mark())
+ self.scan_line_break()
+
+ def scan_block_scalar_indentation(self):
+ # See the specification for details.
+ chunks = []
+ max_indent = 0
+ end_mark = self.get_mark()
+ while self.peek() in u' \r\n\x85\u2028\u2029':
+ if self.peek() != u' ':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ else:
+ self.forward()
+ if self.column > max_indent:
+ max_indent = self.column
+ return chunks, max_indent, end_mark
+
+ def scan_block_scalar_breaks(self, indent):
+ # See the specification for details.
+ chunks = []
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == u' ':
+ self.forward()
+ while self.peek() in u'\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == u' ':
+ self.forward()
+ return chunks, end_mark
+
+ def scan_flow_scalar(self, style):
+ # See the specification for details.
+ # Note that we loose indentation rules for quoted scalars. Quoted
+ # scalars don't need to adhere indentation because " and ' clearly
+ # mark the beginning and the end of them. Therefore we are less
+ # restrictive then the specification requires. We only need to check
+ # that document separators are not included in scalars.
+ if style == '"':
+ double = True
+ else:
+ double = False
+ chunks = []
+ start_mark = self.get_mark()
+ quote = self.peek()
+ self.forward()
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ while self.peek() != quote:
+ chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ self.forward()
+ end_mark = self.get_mark()
+ return ScalarToken(u''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ ESCAPE_REPLACEMENTS = {
+ u'0': u'\0',
+ u'a': u'\x07',
+ u'b': u'\x08',
+ u't': u'\x09',
+ u'\t': u'\x09',
+ u'n': u'\x0A',
+ u'v': u'\x0B',
+ u'f': u'\x0C',
+ u'r': u'\x0D',
+ u'e': u'\x1B',
+ u' ': u'\x20',
+ u'\"': u'\"',
+ u'\\': u'\\',
+ u'N': u'\x85',
+ u'_': u'\xA0',
+ u'L': u'\u2028',
+ u'P': u'\u2029',
+ }
+
+ ESCAPE_CODES = {
+ u'x': 2,
+ u'u': 4,
+ u'U': 8,
+ }
+
+ def scan_flow_scalar_non_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ length = 0
+ while self.peek(length) not in u'\'\"\\\0 \t\r\n\x85\u2028\u2029':
+ length += 1
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ ch = self.peek()
+ if not double and ch == u'\'' and self.peek(1) == u'\'':
+ chunks.append(u'\'')
+ self.forward(2)
+ elif (double and ch == u'\'') or (not double and ch in u'\"\\'):
+ chunks.append(ch)
+ self.forward()
+ elif double and ch == u'\\':
+ self.forward()
+ ch = self.peek()
+ if ch in self.ESCAPE_REPLACEMENTS:
+ chunks.append(self.ESCAPE_REPLACEMENTS[ch])
+ self.forward()
+ elif ch in self.ESCAPE_CODES:
+ length = self.ESCAPE_CODES[ch]
+ self.forward()
+ for k in range(length):
+ if self.peek(k) not in u'0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "expected escape sequence of %d hexdecimal numbers, but found %r" %
+ (length, self.peek(k).encode('utf-8')), self.get_mark())
+ code = int(self.prefix(length), 16)
+ chunks.append(unichr(code))
+ self.forward(length)
+ elif ch in u'\r\n\x85\u2028\u2029':
+ self.scan_line_break()
+ chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
+ else:
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "found unknown escape character %r" % ch.encode('utf-8'), self.get_mark())
+ else:
+ return chunks
+
+ def scan_flow_scalar_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ length = 0
+ while self.peek(length) in u' \t':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch == u'\0':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected end of stream", self.get_mark())
+ elif ch in u'\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ breaks = self.scan_flow_scalar_breaks(double, start_mark)
+ if line_break != u'\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(u' ')
+ chunks.extend(breaks)
+ else:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_flow_scalar_breaks(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ # Instead of checking indentation, we check for document
+ # separators.
+ prefix = self.prefix(3)
+ if (prefix == u'---' or prefix == u'...') \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected document separator", self.get_mark())
+ while self.peek() in u' \t':
+ self.forward()
+ if self.peek() in u'\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ else:
+ return chunks
+
+ def scan_plain(self):
+ # See the specification for details.
+ # We add an additional restriction for the flow context:
+ # plain scalars in the flow context cannot contain ',', ':' and '?'.
+ # We also keep track of the `allow_simple_key` flag here.
+ # Indentation rules are loosed for the flow context.
+ chunks = []
+ start_mark = self.get_mark()
+ end_mark = start_mark
+ indent = self.indent+1
+ # We allow zero indentation for scalars, but then we need to check for
+ # document separators at the beginning of the line.
+ #if indent == 0:
+ # indent = 1
+ spaces = []
+ while True:
+ length = 0
+ if self.peek() == u'#':
+ break
+ while True:
+ ch = self.peek(length)
+ if ch in u'\0 \t\r\n\x85\u2028\u2029' \
+ or (not self.flow_level and ch == u':' and
+ self.peek(length+1) in u'\0 \t\r\n\x85\u2028\u2029') \
+ or (self.flow_level and ch in u',:?[]{}'):
+ break
+ length += 1
+ # It's not clear what we should do with ':' in the flow context.
+ if (self.flow_level and ch == u':'
+ and self.peek(length+1) not in u'\0 \t\r\n\x85\u2028\u2029,[]{}'):
+ self.forward(length)
+ raise ScannerError("while scanning a plain scalar", start_mark,
+ "found unexpected ':'", self.get_mark(),
+ "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.")
+ if length == 0:
+ break
+ self.allow_simple_key = False
+ chunks.extend(spaces)
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ end_mark = self.get_mark()
+ spaces = self.scan_plain_spaces(indent, start_mark)
+ if not spaces or self.peek() == u'#' \
+ or (not self.flow_level and self.column < indent):
+ break
+ return ScalarToken(u''.join(chunks), True, start_mark, end_mark)
+
+ def scan_plain_spaces(self, indent, start_mark):
+ # See the specification for details.
+ # The specification is really confusing about tabs in plain scalars.
+ # We just forbid them completely. Do not use tabs in YAML!
+ chunks = []
+ length = 0
+ while self.peek(length) in u' ':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch in u'\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ self.allow_simple_key = True
+ prefix = self.prefix(3)
+ if (prefix == u'---' or prefix == u'...') \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ return
+ breaks = []
+ while self.peek() in u' \r\n\x85\u2028\u2029':
+ if self.peek() == ' ':
+ self.forward()
+ else:
+ breaks.append(self.scan_line_break())
+ prefix = self.prefix(3)
+ if (prefix == u'---' or prefix == u'...') \
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ return
+ if line_break != u'\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(u' ')
+ chunks.extend(breaks)
+ elif whitespaces:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_tag_handle(self, name, start_mark):
+ # See the specification for details.
+ # For some strange reasons, the specification does not allow '_' in
+ # tag handles. I have allowed it anyway.
+ ch = self.peek()
+ if ch != u'!':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ length = 1
+ ch = self.peek(length)
+ if ch != u' ':
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-_':
+ length += 1
+ ch = self.peek(length)
+ if ch != u'!':
+ self.forward(length)
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ length += 1
+ value = self.prefix(length)
+ self.forward(length)
+ return value
+
+ def scan_tag_uri(self, name, start_mark):
+ # See the specification for details.
+ # Note: we do not check if URI is well-formed.
+ chunks = []
+ length = 0
+ ch = self.peek(length)
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
+ or ch in u'-;/?:@&=+$,_.!~*\'()[]%':
+ if ch == u'%':
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ chunks.append(self.scan_uri_escapes(name, start_mark))
+ else:
+ length += 1
+ ch = self.peek(length)
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ if not chunks:
+ raise ScannerError("while parsing a %s" % name, start_mark,
+ "expected URI, but found %r" % ch.encode('utf-8'),
+ self.get_mark())
+ return u''.join(chunks)
+
+ def scan_uri_escapes(self, name, start_mark):
+ # See the specification for details.
+ bytes = []
+ mark = self.get_mark()
+ while self.peek() == u'%':
+ self.forward()
+ for k in range(2):
+ if self.peek(k) not in u'0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected URI escape sequence of 2 hexdecimal numbers, but found %r" %
+ (self.peek(k).encode('utf-8')), self.get_mark())
+ bytes.append(chr(int(self.prefix(2), 16)))
+ self.forward(2)
+ try:
+ value = unicode(''.join(bytes), 'utf-8')
+ except UnicodeDecodeError, exc:
+ raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
+ return value
+
+ def scan_line_break(self):
+ # Transforms:
+ # '\r\n' : '\n'
+ # '\r' : '\n'
+ # '\n' : '\n'
+ # '\x85' : '\n'
+ # '\u2028' : '\u2028'
+ # '\u2029 : '\u2029'
+ # default : ''
+ ch = self.peek()
+ if ch in u'\r\n\x85':
+ if self.prefix(2) == u'\r\n':
+ self.forward(2)
+ else:
+ self.forward()
+ return u'\n'
+ elif ch in u'\u2028\u2029':
+ self.forward()
+ return ch
+ return u''
+
+#try:
+# import psyco
+# psyco.bind(Scanner)
+#except ImportError:
+# pass
+
diff --git a/python/pyyaml/lib/yaml/serializer.py b/python/pyyaml/lib/yaml/serializer.py
new file mode 100644
index 000000000..0bf1e96dc
--- /dev/null
+++ b/python/pyyaml/lib/yaml/serializer.py
@@ -0,0 +1,111 @@
+
+__all__ = ['Serializer', 'SerializerError']
+
+from error import YAMLError
+from events import *
+from nodes import *
+
+class SerializerError(YAMLError):
+ pass
+
+class Serializer(object):
+
+ ANCHOR_TEMPLATE = u'id%03d'
+
+ def __init__(self, encoding=None,
+ explicit_start=None, explicit_end=None, version=None, tags=None):
+ self.use_encoding = encoding
+ self.use_explicit_start = explicit_start
+ self.use_explicit_end = explicit_end
+ self.use_version = version
+ self.use_tags = tags
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+ self.closed = None
+
+ def open(self):
+ if self.closed is None:
+ self.emit(StreamStartEvent(encoding=self.use_encoding))
+ self.closed = False
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError("serializer is already opened")
+
+ def close(self):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif not self.closed:
+ self.emit(StreamEndEvent())
+ self.closed = True
+
+ #def __del__(self):
+ # self.close()
+
+ def serialize(self, node):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
+ version=self.use_version, tags=self.use_tags))
+ self.anchor_node(node)
+ self.serialize_node(node, None, None)
+ self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+
+ def anchor_node(self, node):
+ if node in self.anchors:
+ if self.anchors[node] is None:
+ self.anchors[node] = self.generate_anchor(node)
+ else:
+ self.anchors[node] = None
+ if isinstance(node, SequenceNode):
+ for item in node.value:
+ self.anchor_node(item)
+ elif isinstance(node, MappingNode):
+ for key, value in node.value:
+ self.anchor_node(key)
+ self.anchor_node(value)
+
+ def generate_anchor(self, node):
+ self.last_anchor_id += 1
+ return self.ANCHOR_TEMPLATE % self.last_anchor_id
+
+ def serialize_node(self, node, parent, index):
+ alias = self.anchors[node]
+ if node in self.serialized_nodes:
+ self.emit(AliasEvent(alias))
+ else:
+ self.serialized_nodes[node] = True
+ self.descend_resolver(parent, index)
+ if isinstance(node, ScalarNode):
+ detected_tag = self.resolve(ScalarNode, node.value, (True, False))
+ default_tag = self.resolve(ScalarNode, node.value, (False, True))
+ implicit = (node.tag == detected_tag), (node.tag == default_tag)
+ self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
+ style=node.style))
+ elif isinstance(node, SequenceNode):
+ implicit = (node.tag
+ == self.resolve(SequenceNode, node.value, True))
+ self.emit(SequenceStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ index = 0
+ for item in node.value:
+ self.serialize_node(item, node, index)
+ index += 1
+ self.emit(SequenceEndEvent())
+ elif isinstance(node, MappingNode):
+ implicit = (node.tag
+ == self.resolve(MappingNode, node.value, True))
+ self.emit(MappingStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ for key, value in node.value:
+ self.serialize_node(key, node, None)
+ self.serialize_node(value, node, key)
+ self.emit(MappingEndEvent())
+ self.ascend_resolver()
+
diff --git a/python/pyyaml/lib/yaml/tokens.py b/python/pyyaml/lib/yaml/tokens.py
new file mode 100644
index 000000000..4d0b48a39
--- /dev/null
+++ b/python/pyyaml/lib/yaml/tokens.py
@@ -0,0 +1,104 @@
+
+class Token(object):
+ def __init__(self, start_mark, end_mark):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in self.__dict__
+ if not key.endswith('_mark')]
+ attributes.sort()
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+#class BOMToken(Token):
+# id = '<byte order mark>'
+
+class DirectiveToken(Token):
+ id = '<directive>'
+ def __init__(self, name, value, start_mark, end_mark):
+ self.name = name
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class DocumentStartToken(Token):
+ id = '<document start>'
+
+class DocumentEndToken(Token):
+ id = '<document end>'
+
+class StreamStartToken(Token):
+ id = '<stream start>'
+ def __init__(self, start_mark=None, end_mark=None,
+ encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndToken(Token):
+ id = '<stream end>'
+
+class BlockSequenceStartToken(Token):
+ id = '<block sequence start>'
+
+class BlockMappingStartToken(Token):
+ id = '<block mapping start>'
+
+class BlockEndToken(Token):
+ id = '<block end>'
+
+class FlowSequenceStartToken(Token):
+ id = '['
+
+class FlowMappingStartToken(Token):
+ id = '{'
+
+class FlowSequenceEndToken(Token):
+ id = ']'
+
+class FlowMappingEndToken(Token):
+ id = '}'
+
+class KeyToken(Token):
+ id = '?'
+
+class ValueToken(Token):
+ id = ':'
+
+class BlockEntryToken(Token):
+ id = '-'
+
+class FlowEntryToken(Token):
+ id = ','
+
+class AliasToken(Token):
+ id = '<alias>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class AnchorToken(Token):
+ id = '<anchor>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class TagToken(Token):
+ id = '<tag>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class ScalarToken(Token):
+ id = '<scalar>'
+ def __init__(self, value, plain, start_mark, end_mark, style=None):
+ self.value = value
+ self.plain = plain
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
diff --git a/python/pyyaml/lib3/yaml/__init__.py b/python/pyyaml/lib3/yaml/__init__.py
new file mode 100644
index 000000000..a5e20f94d
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/__init__.py
@@ -0,0 +1,312 @@
+
+from .error import *
+
+from .tokens import *
+from .events import *
+from .nodes import *
+
+from .loader import *
+from .dumper import *
+
+__version__ = '3.11'
+try:
+ from .cyaml import *
+ __with_libyaml__ = True
+except ImportError:
+ __with_libyaml__ = False
+
+import io
+
+def scan(stream, Loader=Loader):
+ """
+ Scan a YAML stream and produce scanning tokens.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_token():
+ yield loader.get_token()
+ finally:
+ loader.dispose()
+
+def parse(stream, Loader=Loader):
+ """
+ Parse a YAML stream and produce parsing events.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_event():
+ yield loader.get_event()
+ finally:
+ loader.dispose()
+
+def compose(stream, Loader=Loader):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding representation tree.
+ """
+ loader = Loader(stream)
+ try:
+ return loader.get_single_node()
+ finally:
+ loader.dispose()
+
+def compose_all(stream, Loader=Loader):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding representation trees.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_node():
+ yield loader.get_node()
+ finally:
+ loader.dispose()
+
+def load(stream, Loader=Loader):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ """
+ loader = Loader(stream)
+ try:
+ return loader.get_single_data()
+ finally:
+ loader.dispose()
+
+def load_all(stream, Loader=Loader):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_data():
+ yield loader.get_data()
+ finally:
+ loader.dispose()
+
+def safe_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ Resolve only basic YAML tags.
+ """
+ return load(stream, SafeLoader)
+
+def safe_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+ Resolve only basic YAML tags.
+ """
+ return load_all(stream, SafeLoader)
+
+def emit(events, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+ """
+ Emit YAML parsing events into a stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ stream = io.StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ try:
+ for event in events:
+ dumper.emit(event)
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize_all(nodes, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ """
+ Serialize a sequence of representation trees into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ stream = io.StringIO()
+ else:
+ stream = io.BytesIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end)
+ try:
+ dumper.open()
+ for node in nodes:
+ dumper.serialize(node)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize(node, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a representation tree into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return serialize_all([node], stream, Dumper=Dumper, **kwds)
+
+def dump_all(documents, stream=None, Dumper=Dumper,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ stream = io.StringIO()
+ else:
+ stream = io.BytesIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, default_style=default_style,
+ default_flow_style=default_flow_style,
+ canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end)
+ try:
+ dumper.open()
+ for data in documents:
+ dumper.represent(data)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def dump(data, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=Dumper, **kwds)
+
+def safe_dump_all(documents, stream=None, **kwds):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
+
+def safe_dump(data, stream=None, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=SafeDumper, **kwds)
+
+def add_implicit_resolver(tag, regexp, first=None,
+ Loader=Loader, Dumper=Dumper):
+ """
+ Add an implicit scalar detector.
+ If an implicit scalar value matches the given regexp,
+ the corresponding tag is assigned to the scalar.
+ first is a sequence of possible initial characters or None.
+ """
+ Loader.add_implicit_resolver(tag, regexp, first)
+ Dumper.add_implicit_resolver(tag, regexp, first)
+
+def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
+ """
+ Add a path based resolver for the given tag.
+ A path is a list of keys that forms a path
+ to a node in the representation tree.
+ Keys can be string values, integers, or None.
+ """
+ Loader.add_path_resolver(tag, path, kind)
+ Dumper.add_path_resolver(tag, path, kind)
+
+def add_constructor(tag, constructor, Loader=Loader):
+ """
+ Add a constructor for the given tag.
+ Constructor is a function that accepts a Loader instance
+ and a node object and produces the corresponding Python object.
+ """
+ Loader.add_constructor(tag, constructor)
+
+def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
+ """
+ Add a multi-constructor for the given tag prefix.
+ Multi-constructor is called for a node if its tag starts with tag_prefix.
+ Multi-constructor accepts a Loader instance, a tag suffix,
+ and a node object and produces the corresponding Python object.
+ """
+ Loader.add_multi_constructor(tag_prefix, multi_constructor)
+
+def add_representer(data_type, representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Representer is a function accepting a Dumper instance
+ and an instance of the given data type
+ and producing the corresponding representation node.
+ """
+ Dumper.add_representer(data_type, representer)
+
+def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Multi-representer is a function accepting a Dumper instance
+ and an instance of the given data type or subtype
+ and producing the corresponding representation node.
+ """
+ Dumper.add_multi_representer(data_type, multi_representer)
+
+class YAMLObjectMetaclass(type):
+ """
+ The metaclass for YAMLObject.
+ """
+ def __init__(cls, name, bases, kwds):
+ super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+ if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
+ cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
+ cls.yaml_dumper.add_representer(cls, cls.to_yaml)
+
+class YAMLObject(metaclass=YAMLObjectMetaclass):
+ """
+ An object that can dump itself to a YAML stream
+ and load itself from a YAML stream.
+ """
+
+ __slots__ = () # no direct instantiation, so allow immutable subclasses
+
+ yaml_loader = Loader
+ yaml_dumper = Dumper
+
+ yaml_tag = None
+ yaml_flow_style = None
+
+ @classmethod
+ def from_yaml(cls, loader, node):
+ """
+ Convert a representation node to a Python object.
+ """
+ return loader.construct_yaml_object(node, cls)
+
+ @classmethod
+ def to_yaml(cls, dumper, data):
+ """
+ Convert a Python object to a representation node.
+ """
+ return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
+ flow_style=cls.yaml_flow_style)
+
diff --git a/python/pyyaml/lib3/yaml/composer.py b/python/pyyaml/lib3/yaml/composer.py
new file mode 100644
index 000000000..d5c6a7acd
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/composer.py
@@ -0,0 +1,139 @@
+
+__all__ = ['Composer', 'ComposerError']
+
+from .error import MarkedYAMLError
+from .events import *
+from .nodes import *
+
+class ComposerError(MarkedYAMLError):
+ pass
+
+class Composer:
+
+ def __init__(self):
+ self.anchors = {}
+
+ def check_node(self):
+ # Drop the STREAM-START event.
+ if self.check_event(StreamStartEvent):
+ self.get_event()
+
+ # If there are more documents available?
+ return not self.check_event(StreamEndEvent)
+
+ def get_node(self):
+ # Get the root node of the next document.
+ if not self.check_event(StreamEndEvent):
+ return self.compose_document()
+
+ def get_single_node(self):
+ # Drop the STREAM-START event.
+ self.get_event()
+
+ # Compose a document if the stream is not empty.
+ document = None
+ if not self.check_event(StreamEndEvent):
+ document = self.compose_document()
+
+ # Ensure that the stream contains no more documents.
+ if not self.check_event(StreamEndEvent):
+ event = self.get_event()
+ raise ComposerError("expected a single document in the stream",
+ document.start_mark, "but found another document",
+ event.start_mark)
+
+ # Drop the STREAM-END event.
+ self.get_event()
+
+ return document
+
+ def compose_document(self):
+ # Drop the DOCUMENT-START event.
+ self.get_event()
+
+ # Compose the root node.
+ node = self.compose_node(None, None)
+
+ # Drop the DOCUMENT-END event.
+ self.get_event()
+
+ self.anchors = {}
+ return node
+
+ def compose_node(self, parent, index):
+ if self.check_event(AliasEvent):
+ event = self.get_event()
+ anchor = event.anchor
+ if anchor not in self.anchors:
+ raise ComposerError(None, None, "found undefined alias %r"
+ % anchor, event.start_mark)
+ return self.anchors[anchor]
+ event = self.peek_event()
+ anchor = event.anchor
+ if anchor is not None:
+ if anchor in self.anchors:
+ raise ComposerError("found duplicate anchor %r; first occurence"
+ % anchor, self.anchors[anchor].start_mark,
+ "second occurence", event.start_mark)
+ self.descend_resolver(parent, index)
+ if self.check_event(ScalarEvent):
+ node = self.compose_scalar_node(anchor)
+ elif self.check_event(SequenceStartEvent):
+ node = self.compose_sequence_node(anchor)
+ elif self.check_event(MappingStartEvent):
+ node = self.compose_mapping_node(anchor)
+ self.ascend_resolver()
+ return node
+
+ def compose_scalar_node(self, anchor):
+ event = self.get_event()
+ tag = event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(ScalarNode, event.value, event.implicit)
+ node = ScalarNode(tag, event.value,
+ event.start_mark, event.end_mark, style=event.style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ return node
+
+ def compose_sequence_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(SequenceNode, None, start_event.implicit)
+ node = SequenceNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ index = 0
+ while not self.check_event(SequenceEndEvent):
+ node.value.append(self.compose_node(node, index))
+ index += 1
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
+ def compose_mapping_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(MappingNode, None, start_event.implicit)
+ node = MappingNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ while not self.check_event(MappingEndEvent):
+ #key_event = self.peek_event()
+ item_key = self.compose_node(node, None)
+ #if item_key in node.value:
+ # raise ComposerError("while composing a mapping", start_event.start_mark,
+ # "found duplicate key", key_event.start_mark)
+ item_value = self.compose_node(node, item_key)
+ #node.value[item_key] = item_value
+ node.value.append((item_key, item_value))
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
diff --git a/python/pyyaml/lib3/yaml/constructor.py b/python/pyyaml/lib3/yaml/constructor.py
new file mode 100644
index 000000000..981543aeb
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/constructor.py
@@ -0,0 +1,686 @@
+
+__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
+ 'ConstructorError']
+
+from .error import *
+from .nodes import *
+
+import collections, datetime, base64, binascii, re, sys, types
+
+class ConstructorError(MarkedYAMLError):
+ pass
+
+class BaseConstructor:
+
+ yaml_constructors = {}
+ yaml_multi_constructors = {}
+
+ def __init__(self):
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.state_generators = []
+ self.deep_construct = False
+
+ def check_data(self):
+ # If there are more documents available?
+ return self.check_node()
+
+ def get_data(self):
+ # Construct and return the next document.
+ if self.check_node():
+ return self.construct_document(self.get_node())
+
+ def get_single_data(self):
+ # Ensure that the stream contains a single document and construct it.
+ node = self.get_single_node()
+ if node is not None:
+ return self.construct_document(node)
+ return None
+
+ def construct_document(self, node):
+ data = self.construct_object(node)
+ while self.state_generators:
+ state_generators = self.state_generators
+ self.state_generators = []
+ for generator in state_generators:
+ for dummy in generator:
+ pass
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.deep_construct = False
+ return data
+
+ def construct_object(self, node, deep=False):
+ if node in self.constructed_objects:
+ return self.constructed_objects[node]
+ if deep:
+ old_deep = self.deep_construct
+ self.deep_construct = True
+ if node in self.recursive_objects:
+ raise ConstructorError(None, None,
+ "found unconstructable recursive node", node.start_mark)
+ self.recursive_objects[node] = None
+ constructor = None
+ tag_suffix = None
+ if node.tag in self.yaml_constructors:
+ constructor = self.yaml_constructors[node.tag]
+ else:
+ for tag_prefix in self.yaml_multi_constructors:
+ if node.tag.startswith(tag_prefix):
+ tag_suffix = node.tag[len(tag_prefix):]
+ constructor = self.yaml_multi_constructors[tag_prefix]
+ break
+ else:
+ if None in self.yaml_multi_constructors:
+ tag_suffix = node.tag
+ constructor = self.yaml_multi_constructors[None]
+ elif None in self.yaml_constructors:
+ constructor = self.yaml_constructors[None]
+ elif isinstance(node, ScalarNode):
+ constructor = self.__class__.construct_scalar
+ elif isinstance(node, SequenceNode):
+ constructor = self.__class__.construct_sequence
+ elif isinstance(node, MappingNode):
+ constructor = self.__class__.construct_mapping
+ if tag_suffix is None:
+ data = constructor(self, node)
+ else:
+ data = constructor(self, tag_suffix, node)
+ if isinstance(data, types.GeneratorType):
+ generator = data
+ data = next(generator)
+ if self.deep_construct:
+ for dummy in generator:
+ pass
+ else:
+ self.state_generators.append(generator)
+ self.constructed_objects[node] = data
+ del self.recursive_objects[node]
+ if deep:
+ self.deep_construct = old_deep
+ return data
+
+ def construct_scalar(self, node):
+ if not isinstance(node, ScalarNode):
+ raise ConstructorError(None, None,
+ "expected a scalar node, but found %s" % node.id,
+ node.start_mark)
+ return node.value
+
+ def construct_sequence(self, node, deep=False):
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError(None, None,
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
+ return [self.construct_object(child, deep=deep)
+ for child in node.value]
+
+ def construct_mapping(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ mapping = {}
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ if not isinstance(key, collections.Hashable):
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "found unhashable key", key_node.start_mark)
+ value = self.construct_object(value_node, deep=deep)
+ mapping[key] = value
+ return mapping
+
+ def construct_pairs(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ pairs = []
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ value = self.construct_object(value_node, deep=deep)
+ pairs.append((key, value))
+ return pairs
+
+ @classmethod
+ def add_constructor(cls, tag, constructor):
+ if not 'yaml_constructors' in cls.__dict__:
+ cls.yaml_constructors = cls.yaml_constructors.copy()
+ cls.yaml_constructors[tag] = constructor
+
+ @classmethod
+ def add_multi_constructor(cls, tag_prefix, multi_constructor):
+ if not 'yaml_multi_constructors' in cls.__dict__:
+ cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
+ cls.yaml_multi_constructors[tag_prefix] = multi_constructor
+
+class SafeConstructor(BaseConstructor):
+
+ def construct_scalar(self, node):
+ if isinstance(node, MappingNode):
+ for key_node, value_node in node.value:
+ if key_node.tag == 'tag:yaml.org,2002:value':
+ return self.construct_scalar(value_node)
+ return super().construct_scalar(node)
+
+ def flatten_mapping(self, node):
+ merge = []
+ index = 0
+ while index < len(node.value):
+ key_node, value_node = node.value[index]
+ if key_node.tag == 'tag:yaml.org,2002:merge':
+ del node.value[index]
+ if isinstance(value_node, MappingNode):
+ self.flatten_mapping(value_node)
+ merge.extend(value_node.value)
+ elif isinstance(value_node, SequenceNode):
+ submerge = []
+ for subnode in value_node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing a mapping",
+ node.start_mark,
+ "expected a mapping for merging, but found %s"
+ % subnode.id, subnode.start_mark)
+ self.flatten_mapping(subnode)
+ submerge.append(subnode.value)
+ submerge.reverse()
+ for value in submerge:
+ merge.extend(value)
+ else:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "expected a mapping or list of mappings for merging, but found %s"
+ % value_node.id, value_node.start_mark)
+ elif key_node.tag == 'tag:yaml.org,2002:value':
+ key_node.tag = 'tag:yaml.org,2002:str'
+ index += 1
+ else:
+ index += 1
+ if merge:
+ node.value = merge + node.value
+
+ def construct_mapping(self, node, deep=False):
+ if isinstance(node, MappingNode):
+ self.flatten_mapping(node)
+ return super().construct_mapping(node, deep=deep)
+
+ def construct_yaml_null(self, node):
+ self.construct_scalar(node)
+ return None
+
+ bool_values = {
+ 'yes': True,
+ 'no': False,
+ 'true': True,
+ 'false': False,
+ 'on': True,
+ 'off': False,
+ }
+
+ def construct_yaml_bool(self, node):
+ value = self.construct_scalar(node)
+ return self.bool_values[value.lower()]
+
+ def construct_yaml_int(self, node):
+ value = self.construct_scalar(node)
+ value = value.replace('_', '')
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '0':
+ return 0
+ elif value.startswith('0b'):
+ return sign*int(value[2:], 2)
+ elif value.startswith('0x'):
+ return sign*int(value[2:], 16)
+ elif value[0] == '0':
+ return sign*int(value, 8)
+ elif ':' in value:
+ digits = [int(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*int(value)
+
+ inf_value = 1e300
+ while inf_value != inf_value*inf_value:
+ inf_value *= inf_value
+ nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
+
+ def construct_yaml_float(self, node):
+ value = self.construct_scalar(node)
+ value = value.replace('_', '').lower()
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '.inf':
+ return sign*self.inf_value
+ elif value == '.nan':
+ return self.nan_value
+ elif ':' in value:
+ digits = [float(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0.0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*float(value)
+
+ def construct_yaml_binary(self, node):
+ try:
+ value = self.construct_scalar(node).encode('ascii')
+ except UnicodeEncodeError as exc:
+ raise ConstructorError(None, None,
+ "failed to convert base64 data into ascii: %s" % exc,
+ node.start_mark)
+ try:
+ if hasattr(base64, 'decodebytes'):
+ return base64.decodebytes(value)
+ else:
+ return base64.decodestring(value)
+ except binascii.Error as exc:
+ raise ConstructorError(None, None,
+ "failed to decode base64 data: %s" % exc, node.start_mark)
+
+ timestamp_regexp = re.compile(
+ r'''^(?P<year>[0-9][0-9][0-9][0-9])
+ -(?P<month>[0-9][0-9]?)
+ -(?P<day>[0-9][0-9]?)
+ (?:(?:[Tt]|[ \t]+)
+ (?P<hour>[0-9][0-9]?)
+ :(?P<minute>[0-9][0-9])
+ :(?P<second>[0-9][0-9])
+ (?:\.(?P<fraction>[0-9]*))?
+ (?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
+ (?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
+
+ def construct_yaml_timestamp(self, node):
+ value = self.construct_scalar(node)
+ match = self.timestamp_regexp.match(node.value)
+ values = match.groupdict()
+ year = int(values['year'])
+ month = int(values['month'])
+ day = int(values['day'])
+ if not values['hour']:
+ return datetime.date(year, month, day)
+ hour = int(values['hour'])
+ minute = int(values['minute'])
+ second = int(values['second'])
+ fraction = 0
+ if values['fraction']:
+ fraction = values['fraction'][:6]
+ while len(fraction) < 6:
+ fraction += '0'
+ fraction = int(fraction)
+ delta = None
+ if values['tz_sign']:
+ tz_hour = int(values['tz_hour'])
+ tz_minute = int(values['tz_minute'] or 0)
+ delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
+ if values['tz_sign'] == '-':
+ delta = -delta
+ data = datetime.datetime(year, month, day, hour, minute, second, fraction)
+ if delta:
+ data -= delta
+ return data
+
+ def construct_yaml_omap(self, node):
+ # Note: we do not check for duplicate keys, because it's too
+ # CPU-expensive.
+ omap = []
+ yield omap
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ omap.append((key, value))
+
+ def construct_yaml_pairs(self, node):
+ # Note: the same code as `construct_yaml_omap`.
+ pairs = []
+ yield pairs
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ pairs.append((key, value))
+
+ def construct_yaml_set(self, node):
+ data = set()
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_str(self, node):
+ return self.construct_scalar(node)
+
+ def construct_yaml_seq(self, node):
+ data = []
+ yield data
+ data.extend(self.construct_sequence(node))
+
+ def construct_yaml_map(self, node):
+ data = {}
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_object(self, node, cls):
+ data = cls.__new__(cls)
+ yield data
+ if hasattr(data, '__setstate__'):
+ state = self.construct_mapping(node, deep=True)
+ data.__setstate__(state)
+ else:
+ state = self.construct_mapping(node)
+ data.__dict__.update(state)
+
+ def construct_undefined(self, node):
+ raise ConstructorError(None, None,
+ "could not determine a constructor for the tag %r" % node.tag,
+ node.start_mark)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:null',
+ SafeConstructor.construct_yaml_null)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:bool',
+ SafeConstructor.construct_yaml_bool)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:int',
+ SafeConstructor.construct_yaml_int)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:float',
+ SafeConstructor.construct_yaml_float)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:binary',
+ SafeConstructor.construct_yaml_binary)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:timestamp',
+ SafeConstructor.construct_yaml_timestamp)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:omap',
+ SafeConstructor.construct_yaml_omap)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:pairs',
+ SafeConstructor.construct_yaml_pairs)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:set',
+ SafeConstructor.construct_yaml_set)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:str',
+ SafeConstructor.construct_yaml_str)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:seq',
+ SafeConstructor.construct_yaml_seq)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:map',
+ SafeConstructor.construct_yaml_map)
+
+SafeConstructor.add_constructor(None,
+ SafeConstructor.construct_undefined)
+
+class Constructor(SafeConstructor):
+
+ def construct_python_str(self, node):
+ return self.construct_scalar(node)
+
+ def construct_python_unicode(self, node):
+ return self.construct_scalar(node)
+
+ def construct_python_bytes(self, node):
+ try:
+ value = self.construct_scalar(node).encode('ascii')
+ except UnicodeEncodeError as exc:
+ raise ConstructorError(None, None,
+ "failed to convert base64 data into ascii: %s" % exc,
+ node.start_mark)
+ try:
+ if hasattr(base64, 'decodebytes'):
+ return base64.decodebytes(value)
+ else:
+ return base64.decodestring(value)
+ except binascii.Error as exc:
+ raise ConstructorError(None, None,
+ "failed to decode base64 data: %s" % exc, node.start_mark)
+
+ def construct_python_long(self, node):
+ return self.construct_yaml_int(node)
+
+ def construct_python_complex(self, node):
+ return complex(self.construct_scalar(node))
+
+ def construct_python_tuple(self, node):
+ return tuple(self.construct_sequence(node))
+
+ def find_python_module(self, name, mark):
+ if not name:
+ raise ConstructorError("while constructing a Python module", mark,
+ "expected non-empty name appended to the tag", mark)
+ try:
+ __import__(name)
+ except ImportError as exc:
+ raise ConstructorError("while constructing a Python module", mark,
+ "cannot find module %r (%s)" % (name, exc), mark)
+ return sys.modules[name]
+
+ def find_python_name(self, name, mark):
+ if not name:
+ raise ConstructorError("while constructing a Python object", mark,
+ "expected non-empty name appended to the tag", mark)
+ if '.' in name:
+ module_name, object_name = name.rsplit('.', 1)
+ else:
+ module_name = 'builtins'
+ object_name = name
+ try:
+ __import__(module_name)
+ except ImportError as exc:
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find module %r (%s)" % (module_name, exc), mark)
+ module = sys.modules[module_name]
+ if not hasattr(module, object_name):
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find %r in the module %r"
+ % (object_name, module.__name__), mark)
+ return getattr(module, object_name)
+
+ def construct_python_name(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python name", node.start_mark,
+ "expected the empty value, but found %r" % value, node.start_mark)
+ return self.find_python_name(suffix, node.start_mark)
+
+ def construct_python_module(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python module", node.start_mark,
+ "expected the empty value, but found %r" % value, node.start_mark)
+ return self.find_python_module(suffix, node.start_mark)
+
+ def make_python_instance(self, suffix, node,
+ args=None, kwds=None, newobj=False):
+ if not args:
+ args = []
+ if not kwds:
+ kwds = {}
+ cls = self.find_python_name(suffix, node.start_mark)
+ if newobj and isinstance(cls, type):
+ return cls.__new__(cls, *args, **kwds)
+ else:
+ return cls(*args, **kwds)
+
+ def set_python_instance_state(self, instance, state):
+ if hasattr(instance, '__setstate__'):
+ instance.__setstate__(state)
+ else:
+ slotstate = {}
+ if isinstance(state, tuple) and len(state) == 2:
+ state, slotstate = state
+ if hasattr(instance, '__dict__'):
+ instance.__dict__.update(state)
+ elif state:
+ slotstate.update(state)
+ for key, value in slotstate.items():
+ setattr(object, key, value)
+
+ def construct_python_object(self, suffix, node):
+ # Format:
+ # !!python/object:module.name { ... state ... }
+ instance = self.make_python_instance(suffix, node, newobj=True)
+ yield instance
+ deep = hasattr(instance, '__setstate__')
+ state = self.construct_mapping(node, deep=deep)
+ self.set_python_instance_state(instance, state)
+
+ def construct_python_object_apply(self, suffix, node, newobj=False):
+ # Format:
+ # !!python/object/apply # (or !!python/object/new)
+ # args: [ ... arguments ... ]
+ # kwds: { ... keywords ... }
+ # state: ... state ...
+ # listitems: [ ... listitems ... ]
+ # dictitems: { ... dictitems ... }
+ # or short format:
+ # !!python/object/apply [ ... arguments ... ]
+ # The difference between !!python/object/apply and !!python/object/new
+ # is how an object is created, check make_python_instance for details.
+ if isinstance(node, SequenceNode):
+ args = self.construct_sequence(node, deep=True)
+ kwds = {}
+ state = {}
+ listitems = []
+ dictitems = {}
+ else:
+ value = self.construct_mapping(node, deep=True)
+ args = value.get('args', [])
+ kwds = value.get('kwds', {})
+ state = value.get('state', {})
+ listitems = value.get('listitems', [])
+ dictitems = value.get('dictitems', {})
+ instance = self.make_python_instance(suffix, node, args, kwds, newobj)
+ if state:
+ self.set_python_instance_state(instance, state)
+ if listitems:
+ instance.extend(listitems)
+ if dictitems:
+ for key in dictitems:
+ instance[key] = dictitems[key]
+ return instance
+
+ def construct_python_object_new(self, suffix, node):
+ return self.construct_python_object_apply(suffix, node, newobj=True)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/none',
+ Constructor.construct_yaml_null)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/bool',
+ Constructor.construct_yaml_bool)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/str',
+ Constructor.construct_python_str)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/unicode',
+ Constructor.construct_python_unicode)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/bytes',
+ Constructor.construct_python_bytes)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/int',
+ Constructor.construct_yaml_int)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/long',
+ Constructor.construct_python_long)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/float',
+ Constructor.construct_yaml_float)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/complex',
+ Constructor.construct_python_complex)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/list',
+ Constructor.construct_yaml_seq)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/tuple',
+ Constructor.construct_python_tuple)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/dict',
+ Constructor.construct_yaml_map)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/name:',
+ Constructor.construct_python_name)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/module:',
+ Constructor.construct_python_module)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object:',
+ Constructor.construct_python_object)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object/apply:',
+ Constructor.construct_python_object_apply)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object/new:',
+ Constructor.construct_python_object_new)
+
diff --git a/python/pyyaml/lib3/yaml/cyaml.py b/python/pyyaml/lib3/yaml/cyaml.py
new file mode 100644
index 000000000..d5cb87e99
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/cyaml.py
@@ -0,0 +1,85 @@
+
+__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader',
+ 'CBaseDumper', 'CSafeDumper', 'CDumper']
+
+from _yaml import CParser, CEmitter
+
+from .constructor import *
+
+from .serializer import *
+from .representer import *
+
+from .resolver import *
+
+class CBaseLoader(CParser, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class CSafeLoader(CParser, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CLoader(CParser, Constructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
+class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class CDumper(CEmitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
diff --git a/python/pyyaml/lib3/yaml/dumper.py b/python/pyyaml/lib3/yaml/dumper.py
new file mode 100644
index 000000000..0b6912877
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/dumper.py
@@ -0,0 +1,62 @@
+
+__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
+
+from .emitter import *
+from .serializer import *
+from .representer import *
+from .resolver import *
+
+class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class Dumper(Emitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
diff --git a/python/pyyaml/lib3/yaml/emitter.py b/python/pyyaml/lib3/yaml/emitter.py
new file mode 100644
index 000000000..34cb145a5
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/emitter.py
@@ -0,0 +1,1137 @@
+
+# Emitter expects events obeying the following grammar:
+# stream ::= STREAM-START document* STREAM-END
+# document ::= DOCUMENT-START node DOCUMENT-END
+# node ::= SCALAR | sequence | mapping
+# sequence ::= SEQUENCE-START node* SEQUENCE-END
+# mapping ::= MAPPING-START (node node)* MAPPING-END
+
+__all__ = ['Emitter', 'EmitterError']
+
+from .error import YAMLError
+from .events import *
+
+class EmitterError(YAMLError):
+ pass
+
+class ScalarAnalysis:
+ def __init__(self, scalar, empty, multiline,
+ allow_flow_plain, allow_block_plain,
+ allow_single_quoted, allow_double_quoted,
+ allow_block):
+ self.scalar = scalar
+ self.empty = empty
+ self.multiline = multiline
+ self.allow_flow_plain = allow_flow_plain
+ self.allow_block_plain = allow_block_plain
+ self.allow_single_quoted = allow_single_quoted
+ self.allow_double_quoted = allow_double_quoted
+ self.allow_block = allow_block
+
+class Emitter:
+
+ DEFAULT_TAG_PREFIXES = {
+ '!' : '!',
+ 'tag:yaml.org,2002:' : '!!',
+ }
+
+ def __init__(self, stream, canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+
+ # The stream should have the methods `write` and possibly `flush`.
+ self.stream = stream
+
+ # Encoding can be overriden by STREAM-START.
+ self.encoding = None
+
+ # Emitter is a state machine with a stack of states to handle nested
+ # structures.
+ self.states = []
+ self.state = self.expect_stream_start
+
+ # Current event and the event queue.
+ self.events = []
+ self.event = None
+
+ # The current indentation level and the stack of previous indents.
+ self.indents = []
+ self.indent = None
+
+ # Flow level.
+ self.flow_level = 0
+
+ # Contexts.
+ self.root_context = False
+ self.sequence_context = False
+ self.mapping_context = False
+ self.simple_key_context = False
+
+ # Characteristics of the last emitted character:
+ # - current position.
+ # - is it a whitespace?
+ # - is it an indention character
+ # (indentation space, '-', '?', or ':')?
+ self.line = 0
+ self.column = 0
+ self.whitespace = True
+ self.indention = True
+
+ # Whether the document requires an explicit document indicator
+ self.open_ended = False
+
+ # Formatting details.
+ self.canonical = canonical
+ self.allow_unicode = allow_unicode
+ self.best_indent = 2
+ if indent and 1 < indent < 10:
+ self.best_indent = indent
+ self.best_width = 80
+ if width and width > self.best_indent*2:
+ self.best_width = width
+ self.best_line_break = '\n'
+ if line_break in ['\r', '\n', '\r\n']:
+ self.best_line_break = line_break
+
+ # Tag prefixes.
+ self.tag_prefixes = None
+
+ # Prepared anchor and tag.
+ self.prepared_anchor = None
+ self.prepared_tag = None
+
+ # Scalar analysis and style.
+ self.analysis = None
+ self.style = None
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def emit(self, event):
+ self.events.append(event)
+ while not self.need_more_events():
+ self.event = self.events.pop(0)
+ self.state()
+ self.event = None
+
+ # In some cases, we wait for a few next events before emitting.
+
+ def need_more_events(self):
+ if not self.events:
+ return True
+ event = self.events[0]
+ if isinstance(event, DocumentStartEvent):
+ return self.need_events(1)
+ elif isinstance(event, SequenceStartEvent):
+ return self.need_events(2)
+ elif isinstance(event, MappingStartEvent):
+ return self.need_events(3)
+ else:
+ return False
+
+ def need_events(self, count):
+ level = 0
+ for event in self.events[1:]:
+ if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
+ level += 1
+ elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
+ level -= 1
+ elif isinstance(event, StreamEndEvent):
+ level = -1
+ if level < 0:
+ return False
+ return (len(self.events) < count+1)
+
+ def increase_indent(self, flow=False, indentless=False):
+ self.indents.append(self.indent)
+ if self.indent is None:
+ if flow:
+ self.indent = self.best_indent
+ else:
+ self.indent = 0
+ elif not indentless:
+ self.indent += self.best_indent
+
+ # States.
+
+ # Stream handlers.
+
+ def expect_stream_start(self):
+ if isinstance(self.event, StreamStartEvent):
+ if self.event.encoding and not hasattr(self.stream, 'encoding'):
+ self.encoding = self.event.encoding
+ self.write_stream_start()
+ self.state = self.expect_first_document_start
+ else:
+ raise EmitterError("expected StreamStartEvent, but got %s"
+ % self.event)
+
+ def expect_nothing(self):
+ raise EmitterError("expected nothing, but got %s" % self.event)
+
+ # Document handlers.
+
+ def expect_first_document_start(self):
+ return self.expect_document_start(first=True)
+
+ def expect_document_start(self, first=False):
+ if isinstance(self.event, DocumentStartEvent):
+ if (self.event.version or self.event.tags) and self.open_ended:
+ self.write_indicator('...', True)
+ self.write_indent()
+ if self.event.version:
+ version_text = self.prepare_version(self.event.version)
+ self.write_version_directive(version_text)
+ self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
+ if self.event.tags:
+ handles = sorted(self.event.tags.keys())
+ for handle in handles:
+ prefix = self.event.tags[handle]
+ self.tag_prefixes[prefix] = handle
+ handle_text = self.prepare_tag_handle(handle)
+ prefix_text = self.prepare_tag_prefix(prefix)
+ self.write_tag_directive(handle_text, prefix_text)
+ implicit = (first and not self.event.explicit and not self.canonical
+ and not self.event.version and not self.event.tags
+ and not self.check_empty_document())
+ if not implicit:
+ self.write_indent()
+ self.write_indicator('---', True)
+ if self.canonical:
+ self.write_indent()
+ self.state = self.expect_document_root
+ elif isinstance(self.event, StreamEndEvent):
+ if self.open_ended:
+ self.write_indicator('...', True)
+ self.write_indent()
+ self.write_stream_end()
+ self.state = self.expect_nothing
+ else:
+ raise EmitterError("expected DocumentStartEvent, but got %s"
+ % self.event)
+
+ def expect_document_end(self):
+ if isinstance(self.event, DocumentEndEvent):
+ self.write_indent()
+ if self.event.explicit:
+ self.write_indicator('...', True)
+ self.write_indent()
+ self.flush_stream()
+ self.state = self.expect_document_start
+ else:
+ raise EmitterError("expected DocumentEndEvent, but got %s"
+ % self.event)
+
+ def expect_document_root(self):
+ self.states.append(self.expect_document_end)
+ self.expect_node(root=True)
+
+ # Node handlers.
+
+ def expect_node(self, root=False, sequence=False, mapping=False,
+ simple_key=False):
+ self.root_context = root
+ self.sequence_context = sequence
+ self.mapping_context = mapping
+ self.simple_key_context = simple_key
+ if isinstance(self.event, AliasEvent):
+ self.expect_alias()
+ elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
+ self.process_anchor('&')
+ self.process_tag()
+ if isinstance(self.event, ScalarEvent):
+ self.expect_scalar()
+ elif isinstance(self.event, SequenceStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_sequence():
+ self.expect_flow_sequence()
+ else:
+ self.expect_block_sequence()
+ elif isinstance(self.event, MappingStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_mapping():
+ self.expect_flow_mapping()
+ else:
+ self.expect_block_mapping()
+ else:
+ raise EmitterError("expected NodeEvent, but got %s" % self.event)
+
+ def expect_alias(self):
+ if self.event.anchor is None:
+ raise EmitterError("anchor is not specified for alias")
+ self.process_anchor('*')
+ self.state = self.states.pop()
+
+ def expect_scalar(self):
+ self.increase_indent(flow=True)
+ self.process_scalar()
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+
+ # Flow sequence handlers.
+
+ def expect_flow_sequence(self):
+ self.write_indicator('[', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_sequence_item
+
+ def expect_first_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator(']', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ def expect_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(',', False)
+ self.write_indent()
+ self.write_indicator(']', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Flow mapping handlers.
+
+ def expect_flow_mapping(self):
+ self.write_indicator('{', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_mapping_key
+
+ def expect_first_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator('}', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(',', False)
+ self.write_indent()
+ self.write_indicator('}', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_simple_value(self):
+ self.write_indicator(':', False)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_value(self):
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.write_indicator(':', True)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Block sequence handlers.
+
+ def expect_block_sequence(self):
+ indentless = (self.mapping_context and not self.indention)
+ self.increase_indent(flow=False, indentless=indentless)
+ self.state = self.expect_first_block_sequence_item
+
+ def expect_first_block_sequence_item(self):
+ return self.expect_block_sequence_item(first=True)
+
+ def expect_block_sequence_item(self, first=False):
+ if not first and isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ self.write_indicator('-', True, indention=True)
+ self.states.append(self.expect_block_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Block mapping handlers.
+
+ def expect_block_mapping(self):
+ self.increase_indent(flow=False)
+ self.state = self.expect_first_block_mapping_key
+
+ def expect_first_block_mapping_key(self):
+ return self.expect_block_mapping_key(first=True)
+
+ def expect_block_mapping_key(self, first=False):
+ if not first and isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ if self.check_simple_key():
+ self.states.append(self.expect_block_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True, indention=True)
+ self.states.append(self.expect_block_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_simple_value(self):
+ self.write_indicator(':', False)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_value(self):
+ self.write_indent()
+ self.write_indicator(':', True, indention=True)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Checkers.
+
+ def check_empty_sequence(self):
+ return (isinstance(self.event, SequenceStartEvent) and self.events
+ and isinstance(self.events[0], SequenceEndEvent))
+
+ def check_empty_mapping(self):
+ return (isinstance(self.event, MappingStartEvent) and self.events
+ and isinstance(self.events[0], MappingEndEvent))
+
+ def check_empty_document(self):
+ if not isinstance(self.event, DocumentStartEvent) or not self.events:
+ return False
+ event = self.events[0]
+ return (isinstance(event, ScalarEvent) and event.anchor is None
+ and event.tag is None and event.implicit and event.value == '')
+
+ def check_simple_key(self):
+ length = 0
+ if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ length += len(self.prepared_anchor)
+ if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
+ and self.event.tag is not None:
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(self.event.tag)
+ length += len(self.prepared_tag)
+ if isinstance(self.event, ScalarEvent):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ length += len(self.analysis.scalar)
+ return (length < 128 and (isinstance(self.event, AliasEvent)
+ or (isinstance(self.event, ScalarEvent)
+ and not self.analysis.empty and not self.analysis.multiline)
+ or self.check_empty_sequence() or self.check_empty_mapping()))
+
+ # Anchor, Tag, and Scalar processors.
+
+ def process_anchor(self, indicator):
+ if self.event.anchor is None:
+ self.prepared_anchor = None
+ return
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ if self.prepared_anchor:
+ self.write_indicator(indicator+self.prepared_anchor, True)
+ self.prepared_anchor = None
+
+ def process_tag(self):
+ tag = self.event.tag
+ if isinstance(self.event, ScalarEvent):
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ if ((not self.canonical or tag is None) and
+ ((self.style == '' and self.event.implicit[0])
+ or (self.style != '' and self.event.implicit[1]))):
+ self.prepared_tag = None
+ return
+ if self.event.implicit[0] and tag is None:
+ tag = '!'
+ self.prepared_tag = None
+ else:
+ if (not self.canonical or tag is None) and self.event.implicit:
+ self.prepared_tag = None
+ return
+ if tag is None:
+ raise EmitterError("tag is not specified")
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(tag)
+ if self.prepared_tag:
+ self.write_indicator(self.prepared_tag, True)
+ self.prepared_tag = None
+
+ def choose_scalar_style(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.event.style == '"' or self.canonical:
+ return '"'
+ if not self.event.style and self.event.implicit[0]:
+ if (not (self.simple_key_context and
+ (self.analysis.empty or self.analysis.multiline))
+ and (self.flow_level and self.analysis.allow_flow_plain
+ or (not self.flow_level and self.analysis.allow_block_plain))):
+ return ''
+ if self.event.style and self.event.style in '|>':
+ if (not self.flow_level and not self.simple_key_context
+ and self.analysis.allow_block):
+ return self.event.style
+ if not self.event.style or self.event.style == '\'':
+ if (self.analysis.allow_single_quoted and
+ not (self.simple_key_context and self.analysis.multiline)):
+ return '\''
+ return '"'
+
+ def process_scalar(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ split = (not self.simple_key_context)
+ #if self.analysis.multiline and split \
+ # and (not self.style or self.style in '\'\"'):
+ # self.write_indent()
+ if self.style == '"':
+ self.write_double_quoted(self.analysis.scalar, split)
+ elif self.style == '\'':
+ self.write_single_quoted(self.analysis.scalar, split)
+ elif self.style == '>':
+ self.write_folded(self.analysis.scalar)
+ elif self.style == '|':
+ self.write_literal(self.analysis.scalar)
+ else:
+ self.write_plain(self.analysis.scalar, split)
+ self.analysis = None
+ self.style = None
+
+ # Analyzers.
+
+ def prepare_version(self, version):
+ major, minor = version
+ if major != 1:
+ raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
+ return '%d.%d' % (major, minor)
+
+ def prepare_tag_handle(self, handle):
+ if not handle:
+ raise EmitterError("tag handle must not be empty")
+ if handle[0] != '!' or handle[-1] != '!':
+ raise EmitterError("tag handle must start and end with '!': %r" % handle)
+ for ch in handle[1:-1]:
+ if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_'):
+ raise EmitterError("invalid character %r in the tag handle: %r"
+ % (ch, handle))
+ return handle
+
+ def prepare_tag_prefix(self, prefix):
+ if not prefix:
+ raise EmitterError("tag prefix must not be empty")
+ chunks = []
+ start = end = 0
+ if prefix[0] == '!':
+ end = 1
+ while end < len(prefix):
+ ch = prefix[end]
+ if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?!:@&=+$,_.~*\'()[]':
+ end += 1
+ else:
+ if start < end:
+ chunks.append(prefix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append('%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(prefix[start:end])
+ return ''.join(chunks)
+
+ def prepare_tag(self, tag):
+ if not tag:
+ raise EmitterError("tag must not be empty")
+ if tag == '!':
+ return tag
+ handle = None
+ suffix = tag
+ prefixes = sorted(self.tag_prefixes.keys())
+ for prefix in prefixes:
+ if tag.startswith(prefix) \
+ and (prefix == '!' or len(prefix) < len(tag)):
+ handle = self.tag_prefixes[prefix]
+ suffix = tag[len(prefix):]
+ chunks = []
+ start = end = 0
+ while end < len(suffix):
+ ch = suffix[end]
+ if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?:@&=+$,_.~*\'()[]' \
+ or (ch == '!' and handle != '!'):
+ end += 1
+ else:
+ if start < end:
+ chunks.append(suffix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append('%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(suffix[start:end])
+ suffix_text = ''.join(chunks)
+ if handle:
+ return '%s%s' % (handle, suffix_text)
+ else:
+ return '!<%s>' % suffix_text
+
+ def prepare_anchor(self, anchor):
+ if not anchor:
+ raise EmitterError("anchor must not be empty")
+ for ch in anchor:
+ if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_'):
+ raise EmitterError("invalid character %r in the anchor: %r"
+ % (ch, anchor))
+ return anchor
+
+ def analyze_scalar(self, scalar):
+
+ # Empty scalar is a special case.
+ if not scalar:
+ return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
+ allow_flow_plain=False, allow_block_plain=True,
+ allow_single_quoted=True, allow_double_quoted=True,
+ allow_block=False)
+
+ # Indicators and special characters.
+ block_indicators = False
+ flow_indicators = False
+ line_breaks = False
+ special_characters = False
+
+ # Important whitespace combinations.
+ leading_space = False
+ leading_break = False
+ trailing_space = False
+ trailing_break = False
+ break_space = False
+ space_break = False
+
+ # Check document indicators.
+ if scalar.startswith('---') or scalar.startswith('...'):
+ block_indicators = True
+ flow_indicators = True
+
+ # First character or preceded by a whitespace.
+ preceeded_by_whitespace = True
+
+ # Last character or followed by a whitespace.
+ followed_by_whitespace = (len(scalar) == 1 or
+ scalar[1] in '\0 \t\r\n\x85\u2028\u2029')
+
+ # The previous character is a space.
+ previous_space = False
+
+ # The previous character is a break.
+ previous_break = False
+
+ index = 0
+ while index < len(scalar):
+ ch = scalar[index]
+
+ # Check for indicators.
+ if index == 0:
+ # Leading indicators are special characters.
+ if ch in '#,[]{}&*!|>\'\"%@`':
+ flow_indicators = True
+ block_indicators = True
+ if ch in '?:':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == '-' and followed_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+ else:
+ # Some indicators cannot appear within a scalar as well.
+ if ch in ',?[]{}':
+ flow_indicators = True
+ if ch == ':':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == '#' and preceeded_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+
+ # Check for line breaks, special, and unicode characters.
+ if ch in '\n\x85\u2028\u2029':
+ line_breaks = True
+ if not (ch == '\n' or '\x20' <= ch <= '\x7E'):
+ if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF'
+ or '\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF':
+ unicode_characters = True
+ if not self.allow_unicode:
+ special_characters = True
+ else:
+ special_characters = True
+
+ # Detect important whitespace combinations.
+ if ch == ' ':
+ if index == 0:
+ leading_space = True
+ if index == len(scalar)-1:
+ trailing_space = True
+ if previous_break:
+ break_space = True
+ previous_space = True
+ previous_break = False
+ elif ch in '\n\x85\u2028\u2029':
+ if index == 0:
+ leading_break = True
+ if index == len(scalar)-1:
+ trailing_break = True
+ if previous_space:
+ space_break = True
+ previous_space = False
+ previous_break = True
+ else:
+ previous_space = False
+ previous_break = False
+
+ # Prepare for the next character.
+ index += 1
+ preceeded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029')
+ followed_by_whitespace = (index+1 >= len(scalar) or
+ scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029')
+
+ # Let's decide what styles are allowed.
+ allow_flow_plain = True
+ allow_block_plain = True
+ allow_single_quoted = True
+ allow_double_quoted = True
+ allow_block = True
+
+ # Leading and trailing whitespaces are bad for plain scalars.
+ if (leading_space or leading_break
+ or trailing_space or trailing_break):
+ allow_flow_plain = allow_block_plain = False
+
+ # We do not permit trailing spaces for block scalars.
+ if trailing_space:
+ allow_block = False
+
+ # Spaces at the beginning of a new line are only acceptable for block
+ # scalars.
+ if break_space:
+ allow_flow_plain = allow_block_plain = allow_single_quoted = False
+
+ # Spaces followed by breaks, as well as special character are only
+ # allowed for double quoted scalars.
+ if space_break or special_characters:
+ allow_flow_plain = allow_block_plain = \
+ allow_single_quoted = allow_block = False
+
+ # Although the plain scalar writer supports breaks, we never emit
+ # multiline plain scalars.
+ if line_breaks:
+ allow_flow_plain = allow_block_plain = False
+
+ # Flow indicators are forbidden for flow plain scalars.
+ if flow_indicators:
+ allow_flow_plain = False
+
+ # Block indicators are forbidden for block plain scalars.
+ if block_indicators:
+ allow_block_plain = False
+
+ return ScalarAnalysis(scalar=scalar,
+ empty=False, multiline=line_breaks,
+ allow_flow_plain=allow_flow_plain,
+ allow_block_plain=allow_block_plain,
+ allow_single_quoted=allow_single_quoted,
+ allow_double_quoted=allow_double_quoted,
+ allow_block=allow_block)
+
+ # Writers.
+
+ def flush_stream(self):
+ if hasattr(self.stream, 'flush'):
+ self.stream.flush()
+
+ def write_stream_start(self):
+ # Write BOM if needed.
+ if self.encoding and self.encoding.startswith('utf-16'):
+ self.stream.write('\uFEFF'.encode(self.encoding))
+
+ def write_stream_end(self):
+ self.flush_stream()
+
+ def write_indicator(self, indicator, need_whitespace,
+ whitespace=False, indention=False):
+ if self.whitespace or not need_whitespace:
+ data = indicator
+ else:
+ data = ' '+indicator
+ self.whitespace = whitespace
+ self.indention = self.indention and indention
+ self.column += len(data)
+ self.open_ended = False
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_indent(self):
+ indent = self.indent or 0
+ if not self.indention or self.column > indent \
+ or (self.column == indent and not self.whitespace):
+ self.write_line_break()
+ if self.column < indent:
+ self.whitespace = True
+ data = ' '*(indent-self.column)
+ self.column = indent
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_line_break(self, data=None):
+ if data is None:
+ data = self.best_line_break
+ self.whitespace = True
+ self.indention = True
+ self.line += 1
+ self.column = 0
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_version_directive(self, version_text):
+ data = '%%YAML %s' % version_text
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ def write_tag_directive(self, handle_text, prefix_text):
+ data = '%%TAG %s %s' % (handle_text, prefix_text)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ # Scalar streams.
+
+ def write_single_quoted(self, text, split=True):
+ self.write_indicator('\'', True)
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch is None or ch != ' ':
+ if start+1 == end and self.column > self.best_width and split \
+ and start != 0 and end != len(text):
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ if text[start] == '\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'':
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch == '\'':
+ data = '\'\''
+ self.column += 2
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end + 1
+ if ch is not None:
+ spaces = (ch == ' ')
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
+ self.write_indicator('\'', False)
+
+ ESCAPE_REPLACEMENTS = {
+ '\0': '0',
+ '\x07': 'a',
+ '\x08': 'b',
+ '\x09': 't',
+ '\x0A': 'n',
+ '\x0B': 'v',
+ '\x0C': 'f',
+ '\x0D': 'r',
+ '\x1B': 'e',
+ '\"': '\"',
+ '\\': '\\',
+ '\x85': 'N',
+ '\xA0': '_',
+ '\u2028': 'L',
+ '\u2029': 'P',
+ }
+
+ def write_double_quoted(self, text, split=True):
+ self.write_indicator('"', True)
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \
+ or not ('\x20' <= ch <= '\x7E'
+ or (self.allow_unicode
+ and ('\xA0' <= ch <= '\uD7FF'
+ or '\uE000' <= ch <= '\uFFFD'))):
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ if ch in self.ESCAPE_REPLACEMENTS:
+ data = '\\'+self.ESCAPE_REPLACEMENTS[ch]
+ elif ch <= '\xFF':
+ data = '\\x%02X' % ord(ch)
+ elif ch <= '\uFFFF':
+ data = '\\u%04X' % ord(ch)
+ else:
+ data = '\\U%08X' % ord(ch)
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end+1
+ if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \
+ and self.column+(end-start) > self.best_width and split:
+ data = text[start:end]+'\\'
+ if start < end:
+ start = end
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ if text[start] == ' ':
+ data = '\\'
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ end += 1
+ self.write_indicator('"', False)
+
+ def determine_block_hints(self, text):
+ hints = ''
+ if text:
+ if text[0] in ' \n\x85\u2028\u2029':
+ hints += str(self.best_indent)
+ if text[-1] not in '\n\x85\u2028\u2029':
+ hints += '-'
+ elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029':
+ hints += '+'
+ return hints
+
+ def write_folded(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator('>'+hints, True)
+ if hints[-1:] == '+':
+ self.open_ended = True
+ self.write_line_break()
+ leading_space = True
+ spaces = False
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ if not leading_space and ch is not None and ch != ' ' \
+ and text[start] == '\n':
+ self.write_line_break()
+ leading_space = (ch == ' ')
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ elif spaces:
+ if ch != ' ':
+ if start+1 == end and self.column > self.best_width:
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in '\n\x85\u2028\u2029')
+ spaces = (ch == ' ')
+ end += 1
+
+ def write_literal(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator('|'+hints, True)
+ if hints[-1:] == '+':
+ self.open_ended = True
+ self.write_line_break()
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in '\n\x85\u2028\u2029':
+ data = text[start:end]
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
+
+ def write_plain(self, text, split=True):
+ if self.root_context:
+ self.open_ended = True
+ if not text:
+ return
+ if not self.whitespace:
+ data = ' '
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.whitespace = False
+ self.indention = False
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch != ' ':
+ if start+1 == end and self.column > self.best_width and split:
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch not in '\n\x85\u2028\u2029':
+ if text[start] == '\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ spaces = (ch == ' ')
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
+
diff --git a/python/pyyaml/lib3/yaml/error.py b/python/pyyaml/lib3/yaml/error.py
new file mode 100644
index 000000000..b796b4dc5
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/error.py
@@ -0,0 +1,75 @@
+
+__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
+
+class Mark:
+
+ def __init__(self, name, index, line, column, buffer, pointer):
+ self.name = name
+ self.index = index
+ self.line = line
+ self.column = column
+ self.buffer = buffer
+ self.pointer = pointer
+
+ def get_snippet(self, indent=4, max_length=75):
+ if self.buffer is None:
+ return None
+ head = ''
+ start = self.pointer
+ while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029':
+ start -= 1
+ if self.pointer-start > max_length/2-1:
+ head = ' ... '
+ start += 5
+ break
+ tail = ''
+ end = self.pointer
+ while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
+ end += 1
+ if end-self.pointer > max_length/2-1:
+ tail = ' ... '
+ end -= 5
+ break
+ snippet = self.buffer[start:end]
+ return ' '*indent + head + snippet + tail + '\n' \
+ + ' '*(indent+self.pointer-start+len(head)) + '^'
+
+ def __str__(self):
+ snippet = self.get_snippet()
+ where = " in \"%s\", line %d, column %d" \
+ % (self.name, self.line+1, self.column+1)
+ if snippet is not None:
+ where += ":\n"+snippet
+ return where
+
+class YAMLError(Exception):
+ pass
+
+class MarkedYAMLError(YAMLError):
+
+ def __init__(self, context=None, context_mark=None,
+ problem=None, problem_mark=None, note=None):
+ self.context = context
+ self.context_mark = context_mark
+ self.problem = problem
+ self.problem_mark = problem_mark
+ self.note = note
+
+ def __str__(self):
+ lines = []
+ if self.context is not None:
+ lines.append(self.context)
+ if self.context_mark is not None \
+ and (self.problem is None or self.problem_mark is None
+ or self.context_mark.name != self.problem_mark.name
+ or self.context_mark.line != self.problem_mark.line
+ or self.context_mark.column != self.problem_mark.column):
+ lines.append(str(self.context_mark))
+ if self.problem is not None:
+ lines.append(self.problem)
+ if self.problem_mark is not None:
+ lines.append(str(self.problem_mark))
+ if self.note is not None:
+ lines.append(self.note)
+ return '\n'.join(lines)
+
diff --git a/python/pyyaml/lib3/yaml/events.py b/python/pyyaml/lib3/yaml/events.py
new file mode 100644
index 000000000..f79ad389c
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/events.py
@@ -0,0 +1,86 @@
+
+# Abstract classes.
+
+class Event(object):
+ def __init__(self, start_mark=None, end_mark=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
+ if hasattr(self, key)]
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+class NodeEvent(Event):
+ def __init__(self, anchor, start_mark=None, end_mark=None):
+ self.anchor = anchor
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class CollectionStartEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
+ flow_style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class CollectionEndEvent(Event):
+ pass
+
+# Implementations.
+
+class StreamStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None, encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndEvent(Event):
+ pass
+
+class DocumentStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None, version=None, tags=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+ self.version = version
+ self.tags = tags
+
+class DocumentEndEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+
+class AliasEvent(NodeEvent):
+ pass
+
+class ScalarEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, value,
+ start_mark=None, end_mark=None, style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class SequenceStartEvent(CollectionStartEvent):
+ pass
+
+class SequenceEndEvent(CollectionEndEvent):
+ pass
+
+class MappingStartEvent(CollectionStartEvent):
+ pass
+
+class MappingEndEvent(CollectionEndEvent):
+ pass
+
diff --git a/python/pyyaml/lib3/yaml/loader.py b/python/pyyaml/lib3/yaml/loader.py
new file mode 100644
index 000000000..08c8f01b3
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/loader.py
@@ -0,0 +1,40 @@
+
+__all__ = ['BaseLoader', 'SafeLoader', 'Loader']
+
+from .reader import *
+from .scanner import *
+from .parser import *
+from .composer import *
+from .constructor import *
+from .resolver import *
+
+class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
diff --git a/python/pyyaml/lib3/yaml/nodes.py b/python/pyyaml/lib3/yaml/nodes.py
new file mode 100644
index 000000000..c4f070c41
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/nodes.py
@@ -0,0 +1,49 @@
+
+class Node(object):
+ def __init__(self, tag, value, start_mark, end_mark):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ value = self.value
+ #if isinstance(value, list):
+ # if len(value) == 0:
+ # value = '<empty>'
+ # elif len(value) == 1:
+ # value = '<1 item>'
+ # else:
+ # value = '<%d items>' % len(value)
+ #else:
+ # if len(value) > 75:
+ # value = repr(value[:70]+u' ... ')
+ # else:
+ # value = repr(value)
+ value = repr(value)
+ return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
+
+class ScalarNode(Node):
+ id = 'scalar'
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class CollectionNode(Node):
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, flow_style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class SequenceNode(CollectionNode):
+ id = 'sequence'
+
+class MappingNode(CollectionNode):
+ id = 'mapping'
+
diff --git a/python/pyyaml/lib3/yaml/parser.py b/python/pyyaml/lib3/yaml/parser.py
new file mode 100644
index 000000000..13a5995d2
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/parser.py
@@ -0,0 +1,589 @@
+
+# The following YAML grammar is LL(1) and is parsed by a recursive descent
+# parser.
+#
+# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+# implicit_document ::= block_node DOCUMENT-END*
+# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+# block_node_or_indentless_sequence ::=
+# ALIAS
+# | properties (block_content | indentless_block_sequence)?
+# | block_content
+# | indentless_block_sequence
+# block_node ::= ALIAS
+# | properties block_content?
+# | block_content
+# flow_node ::= ALIAS
+# | properties flow_content?
+# | flow_content
+# properties ::= TAG ANCHOR? | ANCHOR TAG?
+# block_content ::= block_collection | flow_collection | SCALAR
+# flow_content ::= flow_collection | SCALAR
+# block_collection ::= block_sequence | block_mapping
+# flow_collection ::= flow_sequence | flow_mapping
+# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+# block_mapping ::= BLOCK-MAPPING_START
+# ((KEY block_node_or_indentless_sequence?)?
+# (VALUE block_node_or_indentless_sequence?)?)*
+# BLOCK-END
+# flow_sequence ::= FLOW-SEQUENCE-START
+# (flow_sequence_entry FLOW-ENTRY)*
+# flow_sequence_entry?
+# FLOW-SEQUENCE-END
+# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+# flow_mapping ::= FLOW-MAPPING-START
+# (flow_mapping_entry FLOW-ENTRY)*
+# flow_mapping_entry?
+# FLOW-MAPPING-END
+# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+#
+# FIRST sets:
+#
+# stream: { STREAM-START }
+# explicit_document: { DIRECTIVE DOCUMENT-START }
+# implicit_document: FIRST(block_node)
+# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_sequence: { BLOCK-SEQUENCE-START }
+# block_mapping: { BLOCK-MAPPING-START }
+# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
+# indentless_sequence: { ENTRY }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_sequence: { FLOW-SEQUENCE-START }
+# flow_mapping: { FLOW-MAPPING-START }
+# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+
+__all__ = ['Parser', 'ParserError']
+
+from .error import MarkedYAMLError
+from .tokens import *
+from .events import *
+from .scanner import *
+
+class ParserError(MarkedYAMLError):
+ pass
+
+class Parser:
+ # Since writing a recursive-descendant parser is a straightforward task, we
+ # do not give many comments here.
+
+ DEFAULT_TAGS = {
+ '!': '!',
+ '!!': 'tag:yaml.org,2002:',
+ }
+
+ def __init__(self):
+ self.current_event = None
+ self.yaml_version = None
+ self.tag_handles = {}
+ self.states = []
+ self.marks = []
+ self.state = self.parse_stream_start
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def check_event(self, *choices):
+ # Check the type of the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ if self.current_event is not None:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.current_event, choice):
+ return True
+ return False
+
+ def peek_event(self):
+ # Get the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ return self.current_event
+
+ def get_event(self):
+ # Get the next event and proceed further.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ value = self.current_event
+ self.current_event = None
+ return value
+
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+ # implicit_document ::= block_node DOCUMENT-END*
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+
+ def parse_stream_start(self):
+
+ # Parse the stream start.
+ token = self.get_token()
+ event = StreamStartEvent(token.start_mark, token.end_mark,
+ encoding=token.encoding)
+
+ # Prepare the next state.
+ self.state = self.parse_implicit_document_start
+
+ return event
+
+ def parse_implicit_document_start(self):
+
+ # Parse an implicit document.
+ if not self.check_token(DirectiveToken, DocumentStartToken,
+ StreamEndToken):
+ self.tag_handles = self.DEFAULT_TAGS
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=False)
+
+ # Prepare the next state.
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_block_node
+
+ return event
+
+ else:
+ return self.parse_document_start()
+
+ def parse_document_start(self):
+
+ # Parse any extra document end indicators.
+ while self.check_token(DocumentEndToken):
+ self.get_token()
+
+ # Parse an explicit document.
+ if not self.check_token(StreamEndToken):
+ token = self.peek_token()
+ start_mark = token.start_mark
+ version, tags = self.process_directives()
+ if not self.check_token(DocumentStartToken):
+ raise ParserError(None, None,
+ "expected '<document start>', but found %r"
+ % self.peek_token().id,
+ self.peek_token().start_mark)
+ token = self.get_token()
+ end_mark = token.end_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=True, version=version, tags=tags)
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_document_content
+ else:
+ # Parse the end of the stream.
+ token = self.get_token()
+ event = StreamEndEvent(token.start_mark, token.end_mark)
+ assert not self.states
+ assert not self.marks
+ self.state = None
+ return event
+
+ def parse_document_end(self):
+
+ # Parse the document end.
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ explicit = False
+ if self.check_token(DocumentEndToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ explicit = True
+ event = DocumentEndEvent(start_mark, end_mark,
+ explicit=explicit)
+
+ # Prepare the next state.
+ self.state = self.parse_document_start
+
+ return event
+
+ def parse_document_content(self):
+ if self.check_token(DirectiveToken,
+ DocumentStartToken, DocumentEndToken, StreamEndToken):
+ event = self.process_empty_scalar(self.peek_token().start_mark)
+ self.state = self.states.pop()
+ return event
+ else:
+ return self.parse_block_node()
+
+ def process_directives(self):
+ self.yaml_version = None
+ self.tag_handles = {}
+ while self.check_token(DirectiveToken):
+ token = self.get_token()
+ if token.name == 'YAML':
+ if self.yaml_version is not None:
+ raise ParserError(None, None,
+ "found duplicate YAML directive", token.start_mark)
+ major, minor = token.value
+ if major != 1:
+ raise ParserError(None, None,
+ "found incompatible YAML document (version 1.* is required)",
+ token.start_mark)
+ self.yaml_version = token.value
+ elif token.name == 'TAG':
+ handle, prefix = token.value
+ if handle in self.tag_handles:
+ raise ParserError(None, None,
+ "duplicate tag handle %r" % handle,
+ token.start_mark)
+ self.tag_handles[handle] = prefix
+ if self.tag_handles:
+ value = self.yaml_version, self.tag_handles.copy()
+ else:
+ value = self.yaml_version, None
+ for key in self.DEFAULT_TAGS:
+ if key not in self.tag_handles:
+ self.tag_handles[key] = self.DEFAULT_TAGS[key]
+ return value
+
+ # block_node_or_indentless_sequence ::= ALIAS
+ # | properties (block_content | indentless_block_sequence)?
+ # | block_content
+ # | indentless_block_sequence
+ # block_node ::= ALIAS
+ # | properties block_content?
+ # | block_content
+ # flow_node ::= ALIAS
+ # | properties flow_content?
+ # | flow_content
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
+ # block_content ::= block_collection | flow_collection | SCALAR
+ # flow_content ::= flow_collection | SCALAR
+ # block_collection ::= block_sequence | block_mapping
+ # flow_collection ::= flow_sequence | flow_mapping
+
+ def parse_block_node(self):
+ return self.parse_node(block=True)
+
+ def parse_flow_node(self):
+ return self.parse_node()
+
+ def parse_block_node_or_indentless_sequence(self):
+ return self.parse_node(block=True, indentless_sequence=True)
+
+ def parse_node(self, block=False, indentless_sequence=False):
+ if self.check_token(AliasToken):
+ token = self.get_token()
+ event = AliasEvent(token.value, token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ else:
+ anchor = None
+ tag = None
+ start_mark = end_mark = tag_mark = None
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ start_mark = token.start_mark
+ end_mark = token.end_mark
+ anchor = token.value
+ if self.check_token(TagToken):
+ token = self.get_token()
+ tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ elif self.check_token(TagToken):
+ token = self.get_token()
+ start_mark = tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ anchor = token.value
+ if tag is not None:
+ handle, suffix = tag
+ if handle is not None:
+ if handle not in self.tag_handles:
+ raise ParserError("while parsing a node", start_mark,
+ "found undefined tag handle %r" % handle,
+ tag_mark)
+ tag = self.tag_handles[handle]+suffix
+ else:
+ tag = suffix
+ #if tag == '!':
+ # raise ParserError("while parsing a node", start_mark,
+ # "found non-specific tag '!'", tag_mark,
+ # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
+ if start_mark is None:
+ start_mark = end_mark = self.peek_token().start_mark
+ event = None
+ implicit = (tag is None or tag == '!')
+ if indentless_sequence and self.check_token(BlockEntryToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark)
+ self.state = self.parse_indentless_sequence_entry
+ else:
+ if self.check_token(ScalarToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ if (token.plain and tag is None) or tag == '!':
+ implicit = (True, False)
+ elif tag is None:
+ implicit = (False, True)
+ else:
+ implicit = (False, False)
+ event = ScalarEvent(anchor, tag, implicit, token.value,
+ start_mark, end_mark, style=token.style)
+ self.state = self.states.pop()
+ elif self.check_token(FlowSequenceStartToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_sequence_first_entry
+ elif self.check_token(FlowMappingStartToken):
+ end_mark = self.peek_token().end_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_mapping_first_key
+ elif block and self.check_token(BlockSequenceStartToken):
+ end_mark = self.peek_token().start_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_sequence_first_entry
+ elif block and self.check_token(BlockMappingStartToken):
+ end_mark = self.peek_token().start_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_mapping_first_key
+ elif anchor is not None or tag is not None:
+ # Empty scalars are allowed even if a tag or an anchor is
+ # specified.
+ event = ScalarEvent(anchor, tag, (implicit, False), '',
+ start_mark, end_mark)
+ self.state = self.states.pop()
+ else:
+ if block:
+ node = 'block'
+ else:
+ node = 'flow'
+ token = self.peek_token()
+ raise ParserError("while parsing a %s node" % node, start_mark,
+ "expected the node content, but found %r" % token.id,
+ token.start_mark)
+ return event
+
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+
+ def parse_block_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_sequence_entry()
+
+ def parse_block_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken, BlockEndToken):
+ self.states.append(self.parse_block_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_block_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block collection", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+
+ def parse_indentless_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken,
+ KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_indentless_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_indentless_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ token = self.peek_token()
+ event = SequenceEndEvent(token.start_mark, token.start_mark)
+ self.state = self.states.pop()
+ return event
+
+ # block_mapping ::= BLOCK-MAPPING_START
+ # ((KEY block_node_or_indentless_sequence?)?
+ # (VALUE block_node_or_indentless_sequence?)?)*
+ # BLOCK-END
+
+ def parse_block_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_mapping_key()
+
+ def parse_block_mapping_key(self):
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_value)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block mapping", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_block_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_key)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_block_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ # flow_sequence ::= FLOW-SEQUENCE-START
+ # (flow_sequence_entry FLOW-ENTRY)*
+ # flow_sequence_entry?
+ # FLOW-SEQUENCE-END
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+ #
+ # Note that while production rules for both flow_sequence_entry and
+ # flow_mapping_entry are equal, their interpretations are different.
+ # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
+ # generate an inline mapping (set syntax).
+
+ def parse_flow_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_sequence_entry(first=True)
+
+ def parse_flow_sequence_entry(self, first=False):
+ if not self.check_token(FlowSequenceEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow sequence", self.marks[-1],
+ "expected ',' or ']', but got %r" % token.id, token.start_mark)
+
+ if self.check_token(KeyToken):
+ token = self.peek_token()
+ event = MappingStartEvent(None, None, True,
+ token.start_mark, token.end_mark,
+ flow_style=True)
+ self.state = self.parse_flow_sequence_entry_mapping_key
+ return event
+ elif not self.check_token(FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_sequence_entry_mapping_key(self):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+
+ def parse_flow_sequence_entry_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_end)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_sequence_entry_mapping_end(self):
+ self.state = self.parse_flow_sequence_entry
+ token = self.peek_token()
+ return MappingEndEvent(token.start_mark, token.start_mark)
+
+ # flow_mapping ::= FLOW-MAPPING-START
+ # (flow_mapping_entry FLOW-ENTRY)*
+ # flow_mapping_entry?
+ # FLOW-MAPPING-END
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+
+ def parse_flow_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_mapping_key(first=True)
+
+ def parse_flow_mapping_key(self, first=False):
+ if not self.check_token(FlowMappingEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow mapping", self.marks[-1],
+ "expected ',' or '}', but got %r" % token.id, token.start_mark)
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ elif not self.check_token(FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_empty_value)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_key)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_mapping_empty_value(self):
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(self.peek_token().start_mark)
+
+ def process_empty_scalar(self, mark):
+ return ScalarEvent(None, None, (True, False), '', mark, mark)
+
diff --git a/python/pyyaml/lib3/yaml/reader.py b/python/pyyaml/lib3/yaml/reader.py
new file mode 100644
index 000000000..f70e920f4
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/reader.py
@@ -0,0 +1,192 @@
+# This module contains abstractions for the input stream. You don't have to
+# looks further, there are no pretty code.
+#
+# We define two classes here.
+#
+# Mark(source, line, column)
+# It's just a record and its only use is producing nice error messages.
+# Parser does not use it for any other purposes.
+#
+# Reader(source, data)
+# Reader determines the encoding of `data` and converts it to unicode.
+# Reader provides the following methods and attributes:
+# reader.peek(length=1) - return the next `length` characters
+# reader.forward(length=1) - move the current position to `length` characters.
+# reader.index - the number of the current character.
+# reader.line, stream.column - the line and the column of the current character.
+
+__all__ = ['Reader', 'ReaderError']
+
+from .error import YAMLError, Mark
+
+import codecs, re
+
+class ReaderError(YAMLError):
+
+ def __init__(self, name, position, character, encoding, reason):
+ self.name = name
+ self.character = character
+ self.position = position
+ self.encoding = encoding
+ self.reason = reason
+
+ def __str__(self):
+ if isinstance(self.character, bytes):
+ return "'%s' codec can't decode byte #x%02x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.encoding, ord(self.character), self.reason,
+ self.name, self.position)
+ else:
+ return "unacceptable character #x%04x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.character, self.reason,
+ self.name, self.position)
+
+class Reader(object):
+ # Reader:
+ # - determines the data encoding and converts it to a unicode string,
+ # - checks if characters are in allowed range,
+ # - adds '\0' to the end.
+
+ # Reader accepts
+ # - a `bytes` object,
+ # - a `str` object,
+ # - a file-like object with its `read` method returning `str`,
+ # - a file-like object with its `read` method returning `unicode`.
+
+ # Yeah, it's ugly and slow.
+
+ def __init__(self, stream):
+ self.name = None
+ self.stream = None
+ self.stream_pointer = 0
+ self.eof = True
+ self.buffer = ''
+ self.pointer = 0
+ self.raw_buffer = None
+ self.raw_decode = None
+ self.encoding = None
+ self.index = 0
+ self.line = 0
+ self.column = 0
+ if isinstance(stream, str):
+ self.name = "<unicode string>"
+ self.check_printable(stream)
+ self.buffer = stream+'\0'
+ elif isinstance(stream, bytes):
+ self.name = "<byte string>"
+ self.raw_buffer = stream
+ self.determine_encoding()
+ else:
+ self.stream = stream
+ self.name = getattr(stream, 'name', "<file>")
+ self.eof = False
+ self.raw_buffer = None
+ self.determine_encoding()
+
+ def peek(self, index=0):
+ try:
+ return self.buffer[self.pointer+index]
+ except IndexError:
+ self.update(index+1)
+ return self.buffer[self.pointer+index]
+
+ def prefix(self, length=1):
+ if self.pointer+length >= len(self.buffer):
+ self.update(length)
+ return self.buffer[self.pointer:self.pointer+length]
+
+ def forward(self, length=1):
+ if self.pointer+length+1 >= len(self.buffer):
+ self.update(length+1)
+ while length:
+ ch = self.buffer[self.pointer]
+ self.pointer += 1
+ self.index += 1
+ if ch in '\n\x85\u2028\u2029' \
+ or (ch == '\r' and self.buffer[self.pointer] != '\n'):
+ self.line += 1
+ self.column = 0
+ elif ch != '\uFEFF':
+ self.column += 1
+ length -= 1
+
+ def get_mark(self):
+ if self.stream is None:
+ return Mark(self.name, self.index, self.line, self.column,
+ self.buffer, self.pointer)
+ else:
+ return Mark(self.name, self.index, self.line, self.column,
+ None, None)
+
+ def determine_encoding(self):
+ while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
+ self.update_raw()
+ if isinstance(self.raw_buffer, bytes):
+ if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
+ self.raw_decode = codecs.utf_16_le_decode
+ self.encoding = 'utf-16-le'
+ elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
+ self.raw_decode = codecs.utf_16_be_decode
+ self.encoding = 'utf-16-be'
+ else:
+ self.raw_decode = codecs.utf_8_decode
+ self.encoding = 'utf-8'
+ self.update(1)
+
+ NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
+ def check_printable(self, data):
+ match = self.NON_PRINTABLE.search(data)
+ if match:
+ character = match.group()
+ position = self.index+(len(self.buffer)-self.pointer)+match.start()
+ raise ReaderError(self.name, position, ord(character),
+ 'unicode', "special characters are not allowed")
+
+ def update(self, length):
+ if self.raw_buffer is None:
+ return
+ self.buffer = self.buffer[self.pointer:]
+ self.pointer = 0
+ while len(self.buffer) < length:
+ if not self.eof:
+ self.update_raw()
+ if self.raw_decode is not None:
+ try:
+ data, converted = self.raw_decode(self.raw_buffer,
+ 'strict', self.eof)
+ except UnicodeDecodeError as exc:
+ character = self.raw_buffer[exc.start]
+ if self.stream is not None:
+ position = self.stream_pointer-len(self.raw_buffer)+exc.start
+ else:
+ position = exc.start
+ raise ReaderError(self.name, position, character,
+ exc.encoding, exc.reason)
+ else:
+ data = self.raw_buffer
+ converted = len(data)
+ self.check_printable(data)
+ self.buffer += data
+ self.raw_buffer = self.raw_buffer[converted:]
+ if self.eof:
+ self.buffer += '\0'
+ self.raw_buffer = None
+ break
+
+ def update_raw(self, size=4096):
+ data = self.stream.read(size)
+ if self.raw_buffer is None:
+ self.raw_buffer = data
+ else:
+ self.raw_buffer += data
+ self.stream_pointer += len(data)
+ if not data:
+ self.eof = True
+
+#try:
+# import psyco
+# psyco.bind(Reader)
+#except ImportError:
+# pass
+
diff --git a/python/pyyaml/lib3/yaml/representer.py b/python/pyyaml/lib3/yaml/representer.py
new file mode 100644
index 000000000..67cd6fd25
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/representer.py
@@ -0,0 +1,374 @@
+
+__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
+ 'RepresenterError']
+
+from .error import *
+from .nodes import *
+
+import datetime, sys, copyreg, types, base64
+
+class RepresenterError(YAMLError):
+ pass
+
+class BaseRepresenter:
+
+ yaml_representers = {}
+ yaml_multi_representers = {}
+
+ def __init__(self, default_style=None, default_flow_style=None):
+ self.default_style = default_style
+ self.default_flow_style = default_flow_style
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def represent(self, data):
+ node = self.represent_data(data)
+ self.serialize(node)
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def represent_data(self, data):
+ if self.ignore_aliases(data):
+ self.alias_key = None
+ else:
+ self.alias_key = id(data)
+ if self.alias_key is not None:
+ if self.alias_key in self.represented_objects:
+ node = self.represented_objects[self.alias_key]
+ #if node is None:
+ # raise RepresenterError("recursive objects are not allowed: %r" % data)
+ return node
+ #self.represented_objects[alias_key] = None
+ self.object_keeper.append(data)
+ data_types = type(data).__mro__
+ if data_types[0] in self.yaml_representers:
+ node = self.yaml_representers[data_types[0]](self, data)
+ else:
+ for data_type in data_types:
+ if data_type in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[data_type](self, data)
+ break
+ else:
+ if None in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[None](self, data)
+ elif None in self.yaml_representers:
+ node = self.yaml_representers[None](self, data)
+ else:
+ node = ScalarNode(None, str(data))
+ #if alias_key is not None:
+ # self.represented_objects[alias_key] = node
+ return node
+
+ @classmethod
+ def add_representer(cls, data_type, representer):
+ if not 'yaml_representers' in cls.__dict__:
+ cls.yaml_representers = cls.yaml_representers.copy()
+ cls.yaml_representers[data_type] = representer
+
+ @classmethod
+ def add_multi_representer(cls, data_type, representer):
+ if not 'yaml_multi_representers' in cls.__dict__:
+ cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
+ cls.yaml_multi_representers[data_type] = representer
+
+ def represent_scalar(self, tag, value, style=None):
+ if style is None:
+ style = self.default_style
+ node = ScalarNode(tag, value, style=style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ return node
+
+ def represent_sequence(self, tag, sequence, flow_style=None):
+ value = []
+ node = SequenceNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ for item in sequence:
+ node_item = self.represent_data(item)
+ if not (isinstance(node_item, ScalarNode) and not node_item.style):
+ best_style = False
+ value.append(node_item)
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def represent_mapping(self, tag, mapping, flow_style=None):
+ value = []
+ node = MappingNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ if hasattr(mapping, 'items'):
+ mapping = list(mapping.items())
+ try:
+ mapping = sorted(mapping)
+ except TypeError:
+ pass
+ for item_key, item_value in mapping:
+ node_key = self.represent_data(item_key)
+ node_value = self.represent_data(item_value)
+ if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ best_style = False
+ if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ best_style = False
+ value.append((node_key, node_value))
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def ignore_aliases(self, data):
+ return False
+
+class SafeRepresenter(BaseRepresenter):
+
+ def ignore_aliases(self, data):
+ if data in [None, ()]:
+ return True
+ if isinstance(data, (str, bytes, bool, int, float)):
+ return True
+
+ def represent_none(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:null', 'null')
+
+ def represent_str(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:str', data)
+
+ def represent_binary(self, data):
+ if hasattr(base64, 'encodebytes'):
+ data = base64.encodebytes(data).decode('ascii')
+ else:
+ data = base64.encodestring(data).decode('ascii')
+ return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|')
+
+ def represent_bool(self, data):
+ if data:
+ value = 'true'
+ else:
+ value = 'false'
+ return self.represent_scalar('tag:yaml.org,2002:bool', value)
+
+ def represent_int(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:int', str(data))
+
+ inf_value = 1e300
+ while repr(inf_value) != repr(inf_value*inf_value):
+ inf_value *= inf_value
+
+ def represent_float(self, data):
+ if data != data or (data == 0.0 and data == 1.0):
+ value = '.nan'
+ elif data == self.inf_value:
+ value = '.inf'
+ elif data == -self.inf_value:
+ value = '-.inf'
+ else:
+ value = repr(data).lower()
+ # Note that in some cases `repr(data)` represents a float number
+ # without the decimal parts. For instance:
+ # >>> repr(1e17)
+ # '1e17'
+ # Unfortunately, this is not a valid float representation according
+ # to the definition of the `!!float` tag. We fix this by adding
+ # '.0' before the 'e' symbol.
+ if '.' not in value and 'e' in value:
+ value = value.replace('e', '.0e', 1)
+ return self.represent_scalar('tag:yaml.org,2002:float', value)
+
+ def represent_list(self, data):
+ #pairs = (len(data) > 0 and isinstance(data, list))
+ #if pairs:
+ # for item in data:
+ # if not isinstance(item, tuple) or len(item) != 2:
+ # pairs = False
+ # break
+ #if not pairs:
+ return self.represent_sequence('tag:yaml.org,2002:seq', data)
+ #value = []
+ #for item_key, item_value in data:
+ # value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
+ # [(item_key, item_value)]))
+ #return SequenceNode(u'tag:yaml.org,2002:pairs', value)
+
+ def represent_dict(self, data):
+ return self.represent_mapping('tag:yaml.org,2002:map', data)
+
+ def represent_set(self, data):
+ value = {}
+ for key in data:
+ value[key] = None
+ return self.represent_mapping('tag:yaml.org,2002:set', value)
+
+ def represent_date(self, data):
+ value = data.isoformat()
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
+
+ def represent_datetime(self, data):
+ value = data.isoformat(' ')
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
+
+ def represent_yaml_object(self, tag, data, cls, flow_style=None):
+ if hasattr(data, '__getstate__'):
+ state = data.__getstate__()
+ else:
+ state = data.__dict__.copy()
+ return self.represent_mapping(tag, state, flow_style=flow_style)
+
+ def represent_undefined(self, data):
+ raise RepresenterError("cannot represent an object: %s" % data)
+
+SafeRepresenter.add_representer(type(None),
+ SafeRepresenter.represent_none)
+
+SafeRepresenter.add_representer(str,
+ SafeRepresenter.represent_str)
+
+SafeRepresenter.add_representer(bytes,
+ SafeRepresenter.represent_binary)
+
+SafeRepresenter.add_representer(bool,
+ SafeRepresenter.represent_bool)
+
+SafeRepresenter.add_representer(int,
+ SafeRepresenter.represent_int)
+
+SafeRepresenter.add_representer(float,
+ SafeRepresenter.represent_float)
+
+SafeRepresenter.add_representer(list,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(tuple,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(dict,
+ SafeRepresenter.represent_dict)
+
+SafeRepresenter.add_representer(set,
+ SafeRepresenter.represent_set)
+
+SafeRepresenter.add_representer(datetime.date,
+ SafeRepresenter.represent_date)
+
+SafeRepresenter.add_representer(datetime.datetime,
+ SafeRepresenter.represent_datetime)
+
+SafeRepresenter.add_representer(None,
+ SafeRepresenter.represent_undefined)
+
+class Representer(SafeRepresenter):
+
+ def represent_complex(self, data):
+ if data.imag == 0.0:
+ data = '%r' % data.real
+ elif data.real == 0.0:
+ data = '%rj' % data.imag
+ elif data.imag > 0:
+ data = '%r+%rj' % (data.real, data.imag)
+ else:
+ data = '%r%rj' % (data.real, data.imag)
+ return self.represent_scalar('tag:yaml.org,2002:python/complex', data)
+
+ def represent_tuple(self, data):
+ return self.represent_sequence('tag:yaml.org,2002:python/tuple', data)
+
+ def represent_name(self, data):
+ name = '%s.%s' % (data.__module__, data.__name__)
+ return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '')
+
+ def represent_module(self, data):
+ return self.represent_scalar(
+ 'tag:yaml.org,2002:python/module:'+data.__name__, '')
+
+ def represent_object(self, data):
+ # We use __reduce__ API to save the data. data.__reduce__ returns
+ # a tuple of length 2-5:
+ # (function, args, state, listitems, dictitems)
+
+ # For reconstructing, we calls function(*args), then set its state,
+ # listitems, and dictitems if they are not None.
+
+ # A special case is when function.__name__ == '__newobj__'. In this
+ # case we create the object with args[0].__new__(*args).
+
+ # Another special case is when __reduce__ returns a string - we don't
+ # support it.
+
+ # We produce a !!python/object, !!python/object/new or
+ # !!python/object/apply node.
+
+ cls = type(data)
+ if cls in copyreg.dispatch_table:
+ reduce = copyreg.dispatch_table[cls](data)
+ elif hasattr(data, '__reduce_ex__'):
+ reduce = data.__reduce_ex__(2)
+ elif hasattr(data, '__reduce__'):
+ reduce = data.__reduce__()
+ else:
+ raise RepresenterError("cannot represent object: %r" % data)
+ reduce = (list(reduce)+[None]*5)[:5]
+ function, args, state, listitems, dictitems = reduce
+ args = list(args)
+ if state is None:
+ state = {}
+ if listitems is not None:
+ listitems = list(listitems)
+ if dictitems is not None:
+ dictitems = dict(dictitems)
+ if function.__name__ == '__newobj__':
+ function = args[0]
+ args = args[1:]
+ tag = 'tag:yaml.org,2002:python/object/new:'
+ newobj = True
+ else:
+ tag = 'tag:yaml.org,2002:python/object/apply:'
+ newobj = False
+ function_name = '%s.%s' % (function.__module__, function.__name__)
+ if not args and not listitems and not dictitems \
+ and isinstance(state, dict) and newobj:
+ return self.represent_mapping(
+ 'tag:yaml.org,2002:python/object:'+function_name, state)
+ if not listitems and not dictitems \
+ and isinstance(state, dict) and not state:
+ return self.represent_sequence(tag+function_name, args)
+ value = {}
+ if args:
+ value['args'] = args
+ if state or not isinstance(state, dict):
+ value['state'] = state
+ if listitems:
+ value['listitems'] = listitems
+ if dictitems:
+ value['dictitems'] = dictitems
+ return self.represent_mapping(tag+function_name, value)
+
+Representer.add_representer(complex,
+ Representer.represent_complex)
+
+Representer.add_representer(tuple,
+ Representer.represent_tuple)
+
+Representer.add_representer(type,
+ Representer.represent_name)
+
+Representer.add_representer(types.FunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.BuiltinFunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.ModuleType,
+ Representer.represent_module)
+
+Representer.add_multi_representer(object,
+ Representer.represent_object)
+
diff --git a/python/pyyaml/lib3/yaml/resolver.py b/python/pyyaml/lib3/yaml/resolver.py
new file mode 100644
index 000000000..0eece2582
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/resolver.py
@@ -0,0 +1,224 @@
+
+__all__ = ['BaseResolver', 'Resolver']
+
+from .error import *
+from .nodes import *
+
+import re
+
+class ResolverError(YAMLError):
+ pass
+
+class BaseResolver:
+
+ DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str'
+ DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
+ DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
+
+ yaml_implicit_resolvers = {}
+ yaml_path_resolvers = {}
+
+ def __init__(self):
+ self.resolver_exact_paths = []
+ self.resolver_prefix_paths = []
+
+ @classmethod
+ def add_implicit_resolver(cls, tag, regexp, first):
+ if not 'yaml_implicit_resolvers' in cls.__dict__:
+ cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
+ if first is None:
+ first = [None]
+ for ch in first:
+ cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
+
+ @classmethod
+ def add_path_resolver(cls, tag, path, kind=None):
+ # Note: `add_path_resolver` is experimental. The API could be changed.
+ # `new_path` is a pattern that is matched against the path from the
+ # root to the node that is being considered. `node_path` elements are
+ # tuples `(node_check, index_check)`. `node_check` is a node class:
+ # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
+ # matches any kind of a node. `index_check` could be `None`, a boolean
+ # value, a string value, or a number. `None` and `False` match against
+ # any _value_ of sequence and mapping nodes. `True` matches against
+ # any _key_ of a mapping node. A string `index_check` matches against
+ # a mapping value that corresponds to a scalar key which content is
+ # equal to the `index_check` value. An integer `index_check` matches
+ # against a sequence value with the index equal to `index_check`.
+ if not 'yaml_path_resolvers' in cls.__dict__:
+ cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
+ new_path = []
+ for element in path:
+ if isinstance(element, (list, tuple)):
+ if len(element) == 2:
+ node_check, index_check = element
+ elif len(element) == 1:
+ node_check = element[0]
+ index_check = True
+ else:
+ raise ResolverError("Invalid path element: %s" % element)
+ else:
+ node_check = None
+ index_check = element
+ if node_check is str:
+ node_check = ScalarNode
+ elif node_check is list:
+ node_check = SequenceNode
+ elif node_check is dict:
+ node_check = MappingNode
+ elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
+ and not isinstance(node_check, str) \
+ and node_check is not None:
+ raise ResolverError("Invalid node checker: %s" % node_check)
+ if not isinstance(index_check, (str, int)) \
+ and index_check is not None:
+ raise ResolverError("Invalid index checker: %s" % index_check)
+ new_path.append((node_check, index_check))
+ if kind is str:
+ kind = ScalarNode
+ elif kind is list:
+ kind = SequenceNode
+ elif kind is dict:
+ kind = MappingNode
+ elif kind not in [ScalarNode, SequenceNode, MappingNode] \
+ and kind is not None:
+ raise ResolverError("Invalid node kind: %s" % kind)
+ cls.yaml_path_resolvers[tuple(new_path), kind] = tag
+
+ def descend_resolver(self, current_node, current_index):
+ if not self.yaml_path_resolvers:
+ return
+ exact_paths = {}
+ prefix_paths = []
+ if current_node:
+ depth = len(self.resolver_prefix_paths)
+ for path, kind in self.resolver_prefix_paths[-1]:
+ if self.check_resolver_prefix(depth, path, kind,
+ current_node, current_index):
+ if len(path) > depth:
+ prefix_paths.append((path, kind))
+ else:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ for path, kind in self.yaml_path_resolvers:
+ if not path:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ prefix_paths.append((path, kind))
+ self.resolver_exact_paths.append(exact_paths)
+ self.resolver_prefix_paths.append(prefix_paths)
+
+ def ascend_resolver(self):
+ if not self.yaml_path_resolvers:
+ return
+ self.resolver_exact_paths.pop()
+ self.resolver_prefix_paths.pop()
+
+ def check_resolver_prefix(self, depth, path, kind,
+ current_node, current_index):
+ node_check, index_check = path[depth-1]
+ if isinstance(node_check, str):
+ if current_node.tag != node_check:
+ return
+ elif node_check is not None:
+ if not isinstance(current_node, node_check):
+ return
+ if index_check is True and current_index is not None:
+ return
+ if (index_check is False or index_check is None) \
+ and current_index is None:
+ return
+ if isinstance(index_check, str):
+ if not (isinstance(current_index, ScalarNode)
+ and index_check == current_index.value):
+ return
+ elif isinstance(index_check, int) and not isinstance(index_check, bool):
+ if index_check != current_index:
+ return
+ return True
+
+ def resolve(self, kind, value, implicit):
+ if kind is ScalarNode and implicit[0]:
+ if value == '':
+ resolvers = self.yaml_implicit_resolvers.get('', [])
+ else:
+ resolvers = self.yaml_implicit_resolvers.get(value[0], [])
+ resolvers += self.yaml_implicit_resolvers.get(None, [])
+ for tag, regexp in resolvers:
+ if regexp.match(value):
+ return tag
+ implicit = implicit[1]
+ if self.yaml_path_resolvers:
+ exact_paths = self.resolver_exact_paths[-1]
+ if kind in exact_paths:
+ return exact_paths[kind]
+ if None in exact_paths:
+ return exact_paths[None]
+ if kind is ScalarNode:
+ return self.DEFAULT_SCALAR_TAG
+ elif kind is SequenceNode:
+ return self.DEFAULT_SEQUENCE_TAG
+ elif kind is MappingNode:
+ return self.DEFAULT_MAPPING_TAG
+
+class Resolver(BaseResolver):
+ pass
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:bool',
+ re.compile(r'''^(?:yes|Yes|YES|no|No|NO
+ |true|True|TRUE|false|False|FALSE
+ |on|On|ON|off|Off|OFF)$''', re.X),
+ list('yYnNtTfFoO'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:float',
+ re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
+ |\.[0-9_]+(?:[eE][-+][0-9]+)?
+ |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
+ |[-+]?\.(?:inf|Inf|INF)
+ |\.(?:nan|NaN|NAN))$''', re.X),
+ list('-+0123456789.'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:int',
+ re.compile(r'''^(?:[-+]?0b[0-1_]+
+ |[-+]?0[0-7_]+
+ |[-+]?(?:0|[1-9][0-9_]*)
+ |[-+]?0x[0-9a-fA-F_]+
+ |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
+ list('-+0123456789'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:merge',
+ re.compile(r'^(?:<<)$'),
+ ['<'])
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:null',
+ re.compile(r'''^(?: ~
+ |null|Null|NULL
+ | )$''', re.X),
+ ['~', 'n', 'N', ''])
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:timestamp',
+ re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
+ |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
+ (?:[Tt]|[ \t]+)[0-9][0-9]?
+ :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
+ (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
+ list('0123456789'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:value',
+ re.compile(r'^(?:=)$'),
+ ['='])
+
+# The following resolver is only for documentation purposes. It cannot work
+# because plain scalars cannot start with '!', '&', or '*'.
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:yaml',
+ re.compile(r'^(?:!|&|\*)$'),
+ list('!&*'))
+
diff --git a/python/pyyaml/lib3/yaml/scanner.py b/python/pyyaml/lib3/yaml/scanner.py
new file mode 100644
index 000000000..494d975ba
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/scanner.py
@@ -0,0 +1,1448 @@
+
+# Scanner produces tokens of the following types:
+# STREAM-START
+# STREAM-END
+# DIRECTIVE(name, value)
+# DOCUMENT-START
+# DOCUMENT-END
+# BLOCK-SEQUENCE-START
+# BLOCK-MAPPING-START
+# BLOCK-END
+# FLOW-SEQUENCE-START
+# FLOW-MAPPING-START
+# FLOW-SEQUENCE-END
+# FLOW-MAPPING-END
+# BLOCK-ENTRY
+# FLOW-ENTRY
+# KEY
+# VALUE
+# ALIAS(value)
+# ANCHOR(value)
+# TAG(value)
+# SCALAR(value, plain, style)
+#
+# Read comments in the Scanner code for more details.
+#
+
+__all__ = ['Scanner', 'ScannerError']
+
+from .error import MarkedYAMLError
+from .tokens import *
+
+class ScannerError(MarkedYAMLError):
+ pass
+
+class SimpleKey:
+ # See below simple keys treatment.
+
+ def __init__(self, token_number, required, index, line, column, mark):
+ self.token_number = token_number
+ self.required = required
+ self.index = index
+ self.line = line
+ self.column = column
+ self.mark = mark
+
+class Scanner:
+
+ def __init__(self):
+ """Initialize the scanner."""
+ # It is assumed that Scanner and Reader will have a common descendant.
+ # Reader do the dirty work of checking for BOM and converting the
+ # input data to Unicode. It also adds NUL to the end.
+ #
+ # Reader supports the following methods
+ # self.peek(i=0) # peek the next i-th character
+ # self.prefix(l=1) # peek the next l characters
+ # self.forward(l=1) # read the next l characters and move the pointer.
+
+ # Had we reached the end of the stream?
+ self.done = False
+
+ # The number of unclosed '{' and '['. `flow_level == 0` means block
+ # context.
+ self.flow_level = 0
+
+ # List of processed tokens that are not yet emitted.
+ self.tokens = []
+
+ # Add the STREAM-START token.
+ self.fetch_stream_start()
+
+ # Number of tokens that were emitted through the `get_token` method.
+ self.tokens_taken = 0
+
+ # The current indentation level.
+ self.indent = -1
+
+ # Past indentation levels.
+ self.indents = []
+
+ # Variables related to simple keys treatment.
+
+ # A simple key is a key that is not denoted by the '?' indicator.
+ # Example of simple keys:
+ # ---
+ # block simple key: value
+ # ? not a simple key:
+ # : { flow simple key: value }
+ # We emit the KEY token before all keys, so when we find a potential
+ # simple key, we try to locate the corresponding ':' indicator.
+ # Simple keys should be limited to a single line and 1024 characters.
+
+ # Can a simple key start at the current position? A simple key may
+ # start:
+ # - at the beginning of the line, not counting indentation spaces
+ # (in block context),
+ # - after '{', '[', ',' (in the flow context),
+ # - after '?', ':', '-' (in the block context).
+ # In the block context, this flag also signifies if a block collection
+ # may start at the current position.
+ self.allow_simple_key = True
+
+ # Keep track of possible simple keys. This is a dictionary. The key
+ # is `flow_level`; there can be no more that one possible simple key
+ # for each level. The value is a SimpleKey record:
+ # (token_number, required, index, line, column, mark)
+ # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
+ # '[', or '{' tokens.
+ self.possible_simple_keys = {}
+
+ # Public methods.
+
+ def check_token(self, *choices):
+ # Check if the next token is one of the given types.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.tokens[0], choice):
+ return True
+ return False
+
+ def peek_token(self):
+ # Return the next token, but do not delete if from the queue.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ return self.tokens[0]
+
+ def get_token(self):
+ # Return the next token.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ self.tokens_taken += 1
+ return self.tokens.pop(0)
+
+ # Private methods.
+
+ def need_more_tokens(self):
+ if self.done:
+ return False
+ if not self.tokens:
+ return True
+ # The current token may be a potential simple key, so we
+ # need to look further.
+ self.stale_possible_simple_keys()
+ if self.next_possible_simple_key() == self.tokens_taken:
+ return True
+
+ def fetch_more_tokens(self):
+
+ # Eat whitespaces and comments until we reach the next token.
+ self.scan_to_next_token()
+
+ # Remove obsolete possible simple keys.
+ self.stale_possible_simple_keys()
+
+ # Compare the current indentation and column. It may add some tokens
+ # and decrease the current indentation level.
+ self.unwind_indent(self.column)
+
+ # Peek the next character.
+ ch = self.peek()
+
+ # Is it the end of stream?
+ if ch == '\0':
+ return self.fetch_stream_end()
+
+ # Is it a directive?
+ if ch == '%' and self.check_directive():
+ return self.fetch_directive()
+
+ # Is it the document start?
+ if ch == '-' and self.check_document_start():
+ return self.fetch_document_start()
+
+ # Is it the document end?
+ if ch == '.' and self.check_document_end():
+ return self.fetch_document_end()
+
+ # TODO: support for BOM within a stream.
+ #if ch == '\uFEFF':
+ # return self.fetch_bom() <-- issue BOMToken
+
+ # Note: the order of the following checks is NOT significant.
+
+ # Is it the flow sequence start indicator?
+ if ch == '[':
+ return self.fetch_flow_sequence_start()
+
+ # Is it the flow mapping start indicator?
+ if ch == '{':
+ return self.fetch_flow_mapping_start()
+
+ # Is it the flow sequence end indicator?
+ if ch == ']':
+ return self.fetch_flow_sequence_end()
+
+ # Is it the flow mapping end indicator?
+ if ch == '}':
+ return self.fetch_flow_mapping_end()
+
+ # Is it the flow entry indicator?
+ if ch == ',':
+ return self.fetch_flow_entry()
+
+ # Is it the block entry indicator?
+ if ch == '-' and self.check_block_entry():
+ return self.fetch_block_entry()
+
+ # Is it the key indicator?
+ if ch == '?' and self.check_key():
+ return self.fetch_key()
+
+ # Is it the value indicator?
+ if ch == ':' and self.check_value():
+ return self.fetch_value()
+
+ # Is it an alias?
+ if ch == '*':
+ return self.fetch_alias()
+
+ # Is it an anchor?
+ if ch == '&':
+ return self.fetch_anchor()
+
+ # Is it a tag?
+ if ch == '!':
+ return self.fetch_tag()
+
+ # Is it a literal scalar?
+ if ch == '|' and not self.flow_level:
+ return self.fetch_literal()
+
+ # Is it a folded scalar?
+ if ch == '>' and not self.flow_level:
+ return self.fetch_folded()
+
+ # Is it a single quoted scalar?
+ if ch == '\'':
+ return self.fetch_single()
+
+ # Is it a double quoted scalar?
+ if ch == '\"':
+ return self.fetch_double()
+
+ # It must be a plain scalar then.
+ if self.check_plain():
+ return self.fetch_plain()
+
+ # No? It's an error. Let's produce a nice error message.
+ raise ScannerError("while scanning for the next token", None,
+ "found character %r that cannot start any token" % ch,
+ self.get_mark())
+
+ # Simple keys treatment.
+
+ def next_possible_simple_key(self):
+ # Return the number of the nearest possible simple key. Actually we
+ # don't need to loop through the whole dictionary. We may replace it
+ # with the following code:
+ # if not self.possible_simple_keys:
+ # return None
+ # return self.possible_simple_keys[
+ # min(self.possible_simple_keys.keys())].token_number
+ min_token_number = None
+ for level in self.possible_simple_keys:
+ key = self.possible_simple_keys[level]
+ if min_token_number is None or key.token_number < min_token_number:
+ min_token_number = key.token_number
+ return min_token_number
+
+ def stale_possible_simple_keys(self):
+ # Remove entries that are no longer possible simple keys. According to
+ # the YAML specification, simple keys
+ # - should be limited to a single line,
+ # - should be no longer than 1024 characters.
+ # Disabling this procedure will allow simple keys of any length and
+ # height (may cause problems if indentation is broken though).
+ for level in list(self.possible_simple_keys):
+ key = self.possible_simple_keys[level]
+ if key.line != self.line \
+ or self.index-key.index > 1024:
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not found expected ':'", self.get_mark())
+ del self.possible_simple_keys[level]
+
+ def save_possible_simple_key(self):
+ # The next token may start a simple key. We check if it's possible
+ # and save its position. This function is called for
+ # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
+
+ # Check if a simple key is required at the current position.
+ required = not self.flow_level and self.indent == self.column
+
+ # A simple key is required only if it is the first token in the current
+ # line. Therefore it is always allowed.
+ assert self.allow_simple_key or not required
+
+ # The next token might be a simple key. Let's save it's number and
+ # position.
+ if self.allow_simple_key:
+ self.remove_possible_simple_key()
+ token_number = self.tokens_taken+len(self.tokens)
+ key = SimpleKey(token_number, required,
+ self.index, self.line, self.column, self.get_mark())
+ self.possible_simple_keys[self.flow_level] = key
+
+ def remove_possible_simple_key(self):
+ # Remove the saved possible key position at the current flow level.
+ if self.flow_level in self.possible_simple_keys:
+ key = self.possible_simple_keys[self.flow_level]
+
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not found expected ':'", self.get_mark())
+
+ del self.possible_simple_keys[self.flow_level]
+
+ # Indentation functions.
+
+ def unwind_indent(self, column):
+
+ ## In flow context, tokens should respect indentation.
+ ## Actually the condition should be `self.indent >= column` according to
+ ## the spec. But this condition will prohibit intuitively correct
+ ## constructions such as
+ ## key : {
+ ## }
+ #if self.flow_level and self.indent > column:
+ # raise ScannerError(None, None,
+ # "invalid intendation or unclosed '[' or '{'",
+ # self.get_mark())
+
+ # In the flow context, indentation is ignored. We make the scanner less
+ # restrictive then specification requires.
+ if self.flow_level:
+ return
+
+ # In block context, we may need to issue the BLOCK-END tokens.
+ while self.indent > column:
+ mark = self.get_mark()
+ self.indent = self.indents.pop()
+ self.tokens.append(BlockEndToken(mark, mark))
+
+ def add_indent(self, column):
+ # Check if we need to increase indentation.
+ if self.indent < column:
+ self.indents.append(self.indent)
+ self.indent = column
+ return True
+ return False
+
+ # Fetchers.
+
+ def fetch_stream_start(self):
+ # We always add STREAM-START as the first token and STREAM-END as the
+ # last token.
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-START.
+ self.tokens.append(StreamStartToken(mark, mark,
+ encoding=self.encoding))
+
+
+ def fetch_stream_end(self):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+ self.possible_simple_keys = {}
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-END.
+ self.tokens.append(StreamEndToken(mark, mark))
+
+ # The steam is finished.
+ self.done = True
+
+ def fetch_directive(self):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Scan and add DIRECTIVE.
+ self.tokens.append(self.scan_directive())
+
+ def fetch_document_start(self):
+ self.fetch_document_indicator(DocumentStartToken)
+
+ def fetch_document_end(self):
+ self.fetch_document_indicator(DocumentEndToken)
+
+ def fetch_document_indicator(self, TokenClass):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys. Note that there could not be a block collection
+ # after '---'.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Add DOCUMENT-START or DOCUMENT-END.
+ start_mark = self.get_mark()
+ self.forward(3)
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_start(self):
+ self.fetch_flow_collection_start(FlowSequenceStartToken)
+
+ def fetch_flow_mapping_start(self):
+ self.fetch_flow_collection_start(FlowMappingStartToken)
+
+ def fetch_flow_collection_start(self, TokenClass):
+
+ # '[' and '{' may start a simple key.
+ self.save_possible_simple_key()
+
+ # Increase the flow level.
+ self.flow_level += 1
+
+ # Simple keys are allowed after '[' and '{'.
+ self.allow_simple_key = True
+
+ # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_end(self):
+ self.fetch_flow_collection_end(FlowSequenceEndToken)
+
+ def fetch_flow_mapping_end(self):
+ self.fetch_flow_collection_end(FlowMappingEndToken)
+
+ def fetch_flow_collection_end(self, TokenClass):
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Decrease the flow level.
+ self.flow_level -= 1
+
+ # No simple keys after ']' or '}'.
+ self.allow_simple_key = False
+
+ # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_entry(self):
+
+ # Simple keys are allowed after ','.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add FLOW-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(FlowEntryToken(start_mark, end_mark))
+
+ def fetch_block_entry(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a new entry?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "sequence entries are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-SEQUENCE-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockSequenceStartToken(mark, mark))
+
+ # It's an error for the block entry to occur in the flow context,
+ # but we let the parser detect this.
+ else:
+ pass
+
+ # Simple keys are allowed after '-'.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add BLOCK-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(BlockEntryToken(start_mark, end_mark))
+
+ def fetch_key(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a key (not nessesary a simple)?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping keys are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-MAPPING-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after '?' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add KEY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(KeyToken(start_mark, end_mark))
+
+ def fetch_value(self):
+
+ # Do we determine a simple key?
+ if self.flow_level in self.possible_simple_keys:
+
+ # Add KEY.
+ key = self.possible_simple_keys[self.flow_level]
+ del self.possible_simple_keys[self.flow_level]
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ KeyToken(key.mark, key.mark))
+
+ # If this key starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START.
+ if not self.flow_level:
+ if self.add_indent(key.column):
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ BlockMappingStartToken(key.mark, key.mark))
+
+ # There cannot be two simple keys one after another.
+ self.allow_simple_key = False
+
+ # It must be a part of a complex key.
+ else:
+
+ # Block context needs additional checks.
+ # (Do we really need them? They will be catched by the parser
+ # anyway.)
+ if not self.flow_level:
+
+ # We are allowed to start a complex value if and only if
+ # we can start a simple key.
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping values are not allowed here",
+ self.get_mark())
+
+ # If this value starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START. It will be detected as an error later by
+ # the parser.
+ if not self.flow_level:
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after ':' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add VALUE.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(ValueToken(start_mark, end_mark))
+
+ def fetch_alias(self):
+
+ # ALIAS could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ALIAS.
+ self.allow_simple_key = False
+
+ # Scan and add ALIAS.
+ self.tokens.append(self.scan_anchor(AliasToken))
+
+ def fetch_anchor(self):
+
+ # ANCHOR could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ANCHOR.
+ self.allow_simple_key = False
+
+ # Scan and add ANCHOR.
+ self.tokens.append(self.scan_anchor(AnchorToken))
+
+ def fetch_tag(self):
+
+ # TAG could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after TAG.
+ self.allow_simple_key = False
+
+ # Scan and add TAG.
+ self.tokens.append(self.scan_tag())
+
+ def fetch_literal(self):
+ self.fetch_block_scalar(style='|')
+
+ def fetch_folded(self):
+ self.fetch_block_scalar(style='>')
+
+ def fetch_block_scalar(self, style):
+
+ # A simple key may follow a block scalar.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_block_scalar(style))
+
+ def fetch_single(self):
+ self.fetch_flow_scalar(style='\'')
+
+ def fetch_double(self):
+ self.fetch_flow_scalar(style='"')
+
+ def fetch_flow_scalar(self, style):
+
+ # A flow scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after flow scalars.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_flow_scalar(style))
+
+ def fetch_plain(self):
+
+ # A plain scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after plain scalars. But note that `scan_plain` will
+ # change this flag if the scan is finished at the beginning of the
+ # line.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR. May change `allow_simple_key`.
+ self.tokens.append(self.scan_plain())
+
+ # Checkers.
+
+ def check_directive(self):
+
+ # DIRECTIVE: ^ '%' ...
+ # The '%' indicator is already checked.
+ if self.column == 0:
+ return True
+
+ def check_document_start(self):
+
+ # DOCUMENT-START: ^ '---' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == '---' \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_document_end(self):
+
+ # DOCUMENT-END: ^ '...' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == '...' \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_block_entry(self):
+
+ # BLOCK-ENTRY: '-' (' '|'\n')
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_key(self):
+
+ # KEY(flow context): '?'
+ if self.flow_level:
+ return True
+
+ # KEY(block context): '?' (' '|'\n')
+ else:
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_value(self):
+
+ # VALUE(flow context): ':'
+ if self.flow_level:
+ return True
+
+ # VALUE(block context): ':' (' '|'\n')
+ else:
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_plain(self):
+
+ # A plain scalar may start with any non-space character except:
+ # '-', '?', ':', ',', '[', ']', '{', '}',
+ # '#', '&', '*', '!', '|', '>', '\'', '\"',
+ # '%', '@', '`'.
+ #
+ # It may also start with
+ # '-', '?', ':'
+ # if it is followed by a non-space character.
+ #
+ # Note that we limit the last rule to the block context (except the
+ # '-' character) because we want the flow context to be space
+ # independent.
+ ch = self.peek()
+ return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \
+ or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029'
+ and (ch == '-' or (not self.flow_level and ch in '?:')))
+
+ # Scanners.
+
+ def scan_to_next_token(self):
+ # We ignore spaces, line breaks and comments.
+ # If we find a line break in the block context, we set the flag
+ # `allow_simple_key` on.
+ # The byte order mark is stripped if it's the first character in the
+ # stream. We do not yet support BOM inside the stream as the
+ # specification requires. Any such mark will be considered as a part
+ # of the document.
+ #
+ # TODO: We need to make tab handling rules more sane. A good rule is
+ # Tabs cannot precede tokens
+ # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
+ # KEY(block), VALUE(block), BLOCK-ENTRY
+ # So the checking code is
+ # if <TAB>:
+ # self.allow_simple_keys = False
+ # We also need to add the check for `allow_simple_keys == True` to
+ # `unwind_indent` before issuing BLOCK-END.
+ # Scanners for block, flow, and plain scalars need to be modified.
+
+ if self.index == 0 and self.peek() == '\uFEFF':
+ self.forward()
+ found = False
+ while not found:
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ if self.scan_line_break():
+ if not self.flow_level:
+ self.allow_simple_key = True
+ else:
+ found = True
+
+ def scan_directive(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ self.forward()
+ name = self.scan_directive_name(start_mark)
+ value = None
+ if name == 'YAML':
+ value = self.scan_yaml_directive_value(start_mark)
+ end_mark = self.get_mark()
+ elif name == 'TAG':
+ value = self.scan_tag_directive_value(start_mark)
+ end_mark = self.get_mark()
+ else:
+ end_mark = self.get_mark()
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ self.scan_directive_ignored_line(start_mark)
+ return DirectiveToken(name, value, start_mark, end_mark)
+
+ def scan_directive_name(self, start_mark):
+ # See the specification for details.
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ return value
+
+ def scan_yaml_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ major = self.scan_yaml_directive_number(start_mark)
+ if self.peek() != '.':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or '.', but found %r" % self.peek(),
+ self.get_mark())
+ self.forward()
+ minor = self.scan_yaml_directive_number(start_mark)
+ if self.peek() not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or ' ', but found %r" % self.peek(),
+ self.get_mark())
+ return (major, minor)
+
+ def scan_yaml_directive_number(self, start_mark):
+ # See the specification for details.
+ ch = self.peek()
+ if not ('0' <= ch <= '9'):
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit, but found %r" % ch, self.get_mark())
+ length = 0
+ while '0' <= self.peek(length) <= '9':
+ length += 1
+ value = int(self.prefix(length))
+ self.forward(length)
+ return value
+
+ def scan_tag_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ handle = self.scan_tag_directive_handle(start_mark)
+ while self.peek() == ' ':
+ self.forward()
+ prefix = self.scan_tag_directive_prefix(start_mark)
+ return (handle, prefix)
+
+ def scan_tag_directive_handle(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_handle('directive', start_mark)
+ ch = self.peek()
+ if ch != ' ':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ return value
+
+ def scan_tag_directive_prefix(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_uri('directive', start_mark)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ return value
+
+ def scan_directive_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a comment or a line break, but found %r"
+ % ch, self.get_mark())
+ self.scan_line_break()
+
+ def scan_anchor(self, TokenClass):
+ # The specification does not restrict characters for anchors and
+ # aliases. This may lead to problems, for instance, the document:
+ # [ *alias, value ]
+ # can be interpteted in two ways, as
+ # [ "value" ]
+ # and
+ # [ *alias , "value" ]
+ # Therefore we restrict aliases to numbers and ASCII letters.
+ start_mark = self.get_mark()
+ indicator = self.peek()
+ if indicator == '*':
+ name = 'alias'
+ else:
+ name = 'anchor'
+ self.forward()
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ end_mark = self.get_mark()
+ return TokenClass(value, start_mark, end_mark)
+
+ def scan_tag(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ ch = self.peek(1)
+ if ch == '<':
+ handle = None
+ self.forward(2)
+ suffix = self.scan_tag_uri('tag', start_mark)
+ if self.peek() != '>':
+ raise ScannerError("while parsing a tag", start_mark,
+ "expected '>', but found %r" % self.peek(),
+ self.get_mark())
+ self.forward()
+ elif ch in '\0 \t\r\n\x85\u2028\u2029':
+ handle = None
+ suffix = '!'
+ self.forward()
+ else:
+ length = 1
+ use_handle = False
+ while ch not in '\0 \r\n\x85\u2028\u2029':
+ if ch == '!':
+ use_handle = True
+ break
+ length += 1
+ ch = self.peek(length)
+ handle = '!'
+ if use_handle:
+ handle = self.scan_tag_handle('tag', start_mark)
+ else:
+ handle = '!'
+ self.forward()
+ suffix = self.scan_tag_uri('tag', start_mark)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a tag", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ value = (handle, suffix)
+ end_mark = self.get_mark()
+ return TagToken(value, start_mark, end_mark)
+
+ def scan_block_scalar(self, style):
+ # See the specification for details.
+
+ if style == '>':
+ folded = True
+ else:
+ folded = False
+
+ chunks = []
+ start_mark = self.get_mark()
+
+ # Scan the header.
+ self.forward()
+ chomping, increment = self.scan_block_scalar_indicators(start_mark)
+ self.scan_block_scalar_ignored_line(start_mark)
+
+ # Determine the indentation level and go to the first non-empty line.
+ min_indent = self.indent+1
+ if min_indent < 1:
+ min_indent = 1
+ if increment is None:
+ breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
+ indent = max(min_indent, max_indent)
+ else:
+ indent = min_indent+increment-1
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ line_break = ''
+
+ # Scan the inner part of the block scalar.
+ while self.column == indent and self.peek() != '\0':
+ chunks.extend(breaks)
+ leading_non_space = self.peek() not in ' \t'
+ length = 0
+ while self.peek(length) not in '\0\r\n\x85\u2028\u2029':
+ length += 1
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ line_break = self.scan_line_break()
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ if self.column == indent and self.peek() != '\0':
+
+ # Unfortunately, folding rules are ambiguous.
+ #
+ # This is the folding according to the specification:
+
+ if folded and line_break == '\n' \
+ and leading_non_space and self.peek() not in ' \t':
+ if not breaks:
+ chunks.append(' ')
+ else:
+ chunks.append(line_break)
+
+ # This is Clark Evans's interpretation (also in the spec
+ # examples):
+ #
+ #if folded and line_break == '\n':
+ # if not breaks:
+ # if self.peek() not in ' \t':
+ # chunks.append(' ')
+ # else:
+ # chunks.append(line_break)
+ #else:
+ # chunks.append(line_break)
+ else:
+ break
+
+ # Chomp the tail.
+ if chomping is not False:
+ chunks.append(line_break)
+ if chomping is True:
+ chunks.extend(breaks)
+
+ # We are done.
+ return ScalarToken(''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ def scan_block_scalar_indicators(self, start_mark):
+ # See the specification for details.
+ chomping = None
+ increment = None
+ ch = self.peek()
+ if ch in '+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch in '0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ elif ch in '0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ ch = self.peek()
+ if ch in '+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected chomping or indentation indicators, but found %r"
+ % ch, self.get_mark())
+ return chomping, increment
+
+ def scan_block_scalar_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected a comment or a line break, but found %r" % ch,
+ self.get_mark())
+ self.scan_line_break()
+
+ def scan_block_scalar_indentation(self):
+ # See the specification for details.
+ chunks = []
+ max_indent = 0
+ end_mark = self.get_mark()
+ while self.peek() in ' \r\n\x85\u2028\u2029':
+ if self.peek() != ' ':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ else:
+ self.forward()
+ if self.column > max_indent:
+ max_indent = self.column
+ return chunks, max_indent, end_mark
+
+ def scan_block_scalar_breaks(self, indent):
+ # See the specification for details.
+ chunks = []
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == ' ':
+ self.forward()
+ while self.peek() in '\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == ' ':
+ self.forward()
+ return chunks, end_mark
+
+ def scan_flow_scalar(self, style):
+ # See the specification for details.
+ # Note that we loose indentation rules for quoted scalars. Quoted
+ # scalars don't need to adhere indentation because " and ' clearly
+ # mark the beginning and the end of them. Therefore we are less
+ # restrictive then the specification requires. We only need to check
+ # that document separators are not included in scalars.
+ if style == '"':
+ double = True
+ else:
+ double = False
+ chunks = []
+ start_mark = self.get_mark()
+ quote = self.peek()
+ self.forward()
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ while self.peek() != quote:
+ chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ self.forward()
+ end_mark = self.get_mark()
+ return ScalarToken(''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ ESCAPE_REPLACEMENTS = {
+ '0': '\0',
+ 'a': '\x07',
+ 'b': '\x08',
+ 't': '\x09',
+ '\t': '\x09',
+ 'n': '\x0A',
+ 'v': '\x0B',
+ 'f': '\x0C',
+ 'r': '\x0D',
+ 'e': '\x1B',
+ ' ': '\x20',
+ '\"': '\"',
+ '\\': '\\',
+ 'N': '\x85',
+ '_': '\xA0',
+ 'L': '\u2028',
+ 'P': '\u2029',
+ }
+
+ ESCAPE_CODES = {
+ 'x': 2,
+ 'u': 4,
+ 'U': 8,
+ }
+
+ def scan_flow_scalar_non_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ length = 0
+ while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029':
+ length += 1
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ ch = self.peek()
+ if not double and ch == '\'' and self.peek(1) == '\'':
+ chunks.append('\'')
+ self.forward(2)
+ elif (double and ch == '\'') or (not double and ch in '\"\\'):
+ chunks.append(ch)
+ self.forward()
+ elif double and ch == '\\':
+ self.forward()
+ ch = self.peek()
+ if ch in self.ESCAPE_REPLACEMENTS:
+ chunks.append(self.ESCAPE_REPLACEMENTS[ch])
+ self.forward()
+ elif ch in self.ESCAPE_CODES:
+ length = self.ESCAPE_CODES[ch]
+ self.forward()
+ for k in range(length):
+ if self.peek(k) not in '0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "expected escape sequence of %d hexdecimal numbers, but found %r" %
+ (length, self.peek(k)), self.get_mark())
+ code = int(self.prefix(length), 16)
+ chunks.append(chr(code))
+ self.forward(length)
+ elif ch in '\r\n\x85\u2028\u2029':
+ self.scan_line_break()
+ chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
+ else:
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "found unknown escape character %r" % ch, self.get_mark())
+ else:
+ return chunks
+
+ def scan_flow_scalar_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ length = 0
+ while self.peek(length) in ' \t':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch == '\0':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected end of stream", self.get_mark())
+ elif ch in '\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ breaks = self.scan_flow_scalar_breaks(double, start_mark)
+ if line_break != '\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(' ')
+ chunks.extend(breaks)
+ else:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_flow_scalar_breaks(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ # Instead of checking indentation, we check for document
+ # separators.
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected document separator", self.get_mark())
+ while self.peek() in ' \t':
+ self.forward()
+ if self.peek() in '\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ else:
+ return chunks
+
+ def scan_plain(self):
+ # See the specification for details.
+ # We add an additional restriction for the flow context:
+ # plain scalars in the flow context cannot contain ',', ':' and '?'.
+ # We also keep track of the `allow_simple_key` flag here.
+ # Indentation rules are loosed for the flow context.
+ chunks = []
+ start_mark = self.get_mark()
+ end_mark = start_mark
+ indent = self.indent+1
+ # We allow zero indentation for scalars, but then we need to check for
+ # document separators at the beginning of the line.
+ #if indent == 0:
+ # indent = 1
+ spaces = []
+ while True:
+ length = 0
+ if self.peek() == '#':
+ break
+ while True:
+ ch = self.peek(length)
+ if ch in '\0 \t\r\n\x85\u2028\u2029' \
+ or (not self.flow_level and ch == ':' and
+ self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029') \
+ or (self.flow_level and ch in ',:?[]{}'):
+ break
+ length += 1
+ # It's not clear what we should do with ':' in the flow context.
+ if (self.flow_level and ch == ':'
+ and self.peek(length+1) not in '\0 \t\r\n\x85\u2028\u2029,[]{}'):
+ self.forward(length)
+ raise ScannerError("while scanning a plain scalar", start_mark,
+ "found unexpected ':'", self.get_mark(),
+ "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.")
+ if length == 0:
+ break
+ self.allow_simple_key = False
+ chunks.extend(spaces)
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ end_mark = self.get_mark()
+ spaces = self.scan_plain_spaces(indent, start_mark)
+ if not spaces or self.peek() == '#' \
+ or (not self.flow_level and self.column < indent):
+ break
+ return ScalarToken(''.join(chunks), True, start_mark, end_mark)
+
+ def scan_plain_spaces(self, indent, start_mark):
+ # See the specification for details.
+ # The specification is really confusing about tabs in plain scalars.
+ # We just forbid them completely. Do not use tabs in YAML!
+ chunks = []
+ length = 0
+ while self.peek(length) in ' ':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch in '\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ self.allow_simple_key = True
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return
+ breaks = []
+ while self.peek() in ' \r\n\x85\u2028\u2029':
+ if self.peek() == ' ':
+ self.forward()
+ else:
+ breaks.append(self.scan_line_break())
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return
+ if line_break != '\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(' ')
+ chunks.extend(breaks)
+ elif whitespaces:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_tag_handle(self, name, start_mark):
+ # See the specification for details.
+ # For some strange reasons, the specification does not allow '_' in
+ # tag handles. I have allowed it anyway.
+ ch = self.peek()
+ if ch != '!':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch, self.get_mark())
+ length = 1
+ ch = self.peek(length)
+ if ch != ' ':
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if ch != '!':
+ self.forward(length)
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch, self.get_mark())
+ length += 1
+ value = self.prefix(length)
+ self.forward(length)
+ return value
+
+ def scan_tag_uri(self, name, start_mark):
+ # See the specification for details.
+ # Note: we do not check if URI is well-formed.
+ chunks = []
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?:@&=+$,_.!~*\'()[]%':
+ if ch == '%':
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ chunks.append(self.scan_uri_escapes(name, start_mark))
+ else:
+ length += 1
+ ch = self.peek(length)
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ if not chunks:
+ raise ScannerError("while parsing a %s" % name, start_mark,
+ "expected URI, but found %r" % ch, self.get_mark())
+ return ''.join(chunks)
+
+ def scan_uri_escapes(self, name, start_mark):
+ # See the specification for details.
+ codes = []
+ mark = self.get_mark()
+ while self.peek() == '%':
+ self.forward()
+ for k in range(2):
+ if self.peek(k) not in '0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected URI escape sequence of 2 hexdecimal numbers, but found %r"
+ % self.peek(k), self.get_mark())
+ codes.append(int(self.prefix(2), 16))
+ self.forward(2)
+ try:
+ value = bytes(codes).decode('utf-8')
+ except UnicodeDecodeError as exc:
+ raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
+ return value
+
+ def scan_line_break(self):
+ # Transforms:
+ # '\r\n' : '\n'
+ # '\r' : '\n'
+ # '\n' : '\n'
+ # '\x85' : '\n'
+ # '\u2028' : '\u2028'
+ # '\u2029 : '\u2029'
+ # default : ''
+ ch = self.peek()
+ if ch in '\r\n\x85':
+ if self.prefix(2) == '\r\n':
+ self.forward(2)
+ else:
+ self.forward()
+ return '\n'
+ elif ch in '\u2028\u2029':
+ self.forward()
+ return ch
+ return ''
+
+#try:
+# import psyco
+# psyco.bind(Scanner)
+#except ImportError:
+# pass
+
diff --git a/python/pyyaml/lib3/yaml/serializer.py b/python/pyyaml/lib3/yaml/serializer.py
new file mode 100644
index 000000000..fe911e67a
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/serializer.py
@@ -0,0 +1,111 @@
+
+__all__ = ['Serializer', 'SerializerError']
+
+from .error import YAMLError
+from .events import *
+from .nodes import *
+
+class SerializerError(YAMLError):
+ pass
+
+class Serializer:
+
+ ANCHOR_TEMPLATE = 'id%03d'
+
+ def __init__(self, encoding=None,
+ explicit_start=None, explicit_end=None, version=None, tags=None):
+ self.use_encoding = encoding
+ self.use_explicit_start = explicit_start
+ self.use_explicit_end = explicit_end
+ self.use_version = version
+ self.use_tags = tags
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+ self.closed = None
+
+ def open(self):
+ if self.closed is None:
+ self.emit(StreamStartEvent(encoding=self.use_encoding))
+ self.closed = False
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError("serializer is already opened")
+
+ def close(self):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif not self.closed:
+ self.emit(StreamEndEvent())
+ self.closed = True
+
+ #def __del__(self):
+ # self.close()
+
+ def serialize(self, node):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
+ version=self.use_version, tags=self.use_tags))
+ self.anchor_node(node)
+ self.serialize_node(node, None, None)
+ self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+
+ def anchor_node(self, node):
+ if node in self.anchors:
+ if self.anchors[node] is None:
+ self.anchors[node] = self.generate_anchor(node)
+ else:
+ self.anchors[node] = None
+ if isinstance(node, SequenceNode):
+ for item in node.value:
+ self.anchor_node(item)
+ elif isinstance(node, MappingNode):
+ for key, value in node.value:
+ self.anchor_node(key)
+ self.anchor_node(value)
+
+ def generate_anchor(self, node):
+ self.last_anchor_id += 1
+ return self.ANCHOR_TEMPLATE % self.last_anchor_id
+
+ def serialize_node(self, node, parent, index):
+ alias = self.anchors[node]
+ if node in self.serialized_nodes:
+ self.emit(AliasEvent(alias))
+ else:
+ self.serialized_nodes[node] = True
+ self.descend_resolver(parent, index)
+ if isinstance(node, ScalarNode):
+ detected_tag = self.resolve(ScalarNode, node.value, (True, False))
+ default_tag = self.resolve(ScalarNode, node.value, (False, True))
+ implicit = (node.tag == detected_tag), (node.tag == default_tag)
+ self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
+ style=node.style))
+ elif isinstance(node, SequenceNode):
+ implicit = (node.tag
+ == self.resolve(SequenceNode, node.value, True))
+ self.emit(SequenceStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ index = 0
+ for item in node.value:
+ self.serialize_node(item, node, index)
+ index += 1
+ self.emit(SequenceEndEvent())
+ elif isinstance(node, MappingNode):
+ implicit = (node.tag
+ == self.resolve(MappingNode, node.value, True))
+ self.emit(MappingStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ for key, value in node.value:
+ self.serialize_node(key, node, None)
+ self.serialize_node(value, node, key)
+ self.emit(MappingEndEvent())
+ self.ascend_resolver()
+
diff --git a/python/pyyaml/lib3/yaml/tokens.py b/python/pyyaml/lib3/yaml/tokens.py
new file mode 100644
index 000000000..4d0b48a39
--- /dev/null
+++ b/python/pyyaml/lib3/yaml/tokens.py
@@ -0,0 +1,104 @@
+
+class Token(object):
+ def __init__(self, start_mark, end_mark):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in self.__dict__
+ if not key.endswith('_mark')]
+ attributes.sort()
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+#class BOMToken(Token):
+# id = '<byte order mark>'
+
+class DirectiveToken(Token):
+ id = '<directive>'
+ def __init__(self, name, value, start_mark, end_mark):
+ self.name = name
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class DocumentStartToken(Token):
+ id = '<document start>'
+
+class DocumentEndToken(Token):
+ id = '<document end>'
+
+class StreamStartToken(Token):
+ id = '<stream start>'
+ def __init__(self, start_mark=None, end_mark=None,
+ encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndToken(Token):
+ id = '<stream end>'
+
+class BlockSequenceStartToken(Token):
+ id = '<block sequence start>'
+
+class BlockMappingStartToken(Token):
+ id = '<block mapping start>'
+
+class BlockEndToken(Token):
+ id = '<block end>'
+
+class FlowSequenceStartToken(Token):
+ id = '['
+
+class FlowMappingStartToken(Token):
+ id = '{'
+
+class FlowSequenceEndToken(Token):
+ id = ']'
+
+class FlowMappingEndToken(Token):
+ id = '}'
+
+class KeyToken(Token):
+ id = '?'
+
+class ValueToken(Token):
+ id = ':'
+
+class BlockEntryToken(Token):
+ id = '-'
+
+class FlowEntryToken(Token):
+ id = ','
+
+class AliasToken(Token):
+ id = '<alias>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class AnchorToken(Token):
+ id = '<anchor>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class TagToken(Token):
+ id = '<tag>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class ScalarToken(Token):
+ id = '<scalar>'
+ def __init__(self, value, plain, start_mark, end_mark, style=None):
+ self.value = value
+ self.plain = plain
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
diff --git a/python/pyyaml/setup.cfg b/python/pyyaml/setup.cfg
new file mode 100644
index 000000000..d0239e45e
--- /dev/null
+++ b/python/pyyaml/setup.cfg
@@ -0,0 +1,29 @@
+
+# The INCLUDE and LIB directories to build the '_yaml' extension.
+# You may also set them using the options '-I' and '-L'.
+[build_ext]
+
+# List of directories to search for 'yaml.h' (separated by ':').
+#include_dirs=/usr/local/include:../../include
+
+# List of directories to search for 'libyaml.a' (separated by ':').
+#library_dirs=/usr/local/lib:../../lib
+
+# An alternative compiler to build the extention.
+#compiler=mingw32
+
+# Additional preprocessor definitions might be required.
+#define=YAML_DECLARE_STATIC
+
+# The following options are used to build PyYAML Windows installer
+# for Python 2.5 on my PC:
+#include_dirs=../../../libyaml/tags/0.1.4/include
+#library_dirs=../../../libyaml/tags/0.1.4/win32/vs2003/output/release/lib
+#define=YAML_DECLARE_STATIC
+
+# The following options are used to build PyYAML Windows installer
+# for Python 2.6, 2.7, 3.0, 3.1 and 3.2 on my PC:
+#include_dirs=../../../libyaml/tags/0.1.4/include
+#library_dirs=../../../libyaml/tags/0.1.4/win32/vs2008/output/release/lib
+#define=YAML_DECLARE_STATIC
+
diff --git a/python/pyyaml/setup.py b/python/pyyaml/setup.py
new file mode 100644
index 000000000..727c3e06e
--- /dev/null
+++ b/python/pyyaml/setup.py
@@ -0,0 +1,345 @@
+
+NAME = 'PyYAML'
+VERSION = '3.11'
+DESCRIPTION = "YAML parser and emitter for Python"
+LONG_DESCRIPTION = """\
+YAML is a data serialization format designed for human readability
+and interaction with scripting languages. PyYAML is a YAML parser
+and emitter for Python.
+
+PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
+support, capable extension API, and sensible error messages. PyYAML
+supports standard YAML tags and provides Python-specific tags that
+allow to represent an arbitrary Python object.
+
+PyYAML is applicable for a broad range of tasks from complex
+configuration files to object serialization and persistance."""
+AUTHOR = "Kirill Simonov"
+AUTHOR_EMAIL = 'xi@resolvent.net'
+LICENSE = "MIT"
+PLATFORMS = "Any"
+URL = "http://pyyaml.org/wiki/PyYAML"
+DOWNLOAD_URL = "http://pyyaml.org/download/pyyaml/%s-%s.tar.gz" % (NAME, VERSION)
+CLASSIFIERS = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.5",
+ "Programming Language :: Python :: 2.6",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.0",
+ "Programming Language :: Python :: 3.1",
+ "Programming Language :: Python :: 3.2",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Text Processing :: Markup",
+]
+
+
+LIBYAML_CHECK = """
+#include <yaml.h>
+
+int main(void) {
+ yaml_parser_t parser;
+ yaml_emitter_t emitter;
+
+ yaml_parser_initialize(&parser);
+ yaml_parser_delete(&parser);
+
+ yaml_emitter_initialize(&emitter);
+ yaml_emitter_delete(&emitter);
+
+ return 0;
+}
+"""
+
+
+import sys, os.path
+
+from distutils import log
+from distutils.core import setup, Command
+from distutils.core import Distribution as _Distribution
+from distutils.core import Extension as _Extension
+from distutils.dir_util import mkpath
+from distutils.command.build_ext import build_ext as _build_ext
+from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
+from distutils.errors import CompileError, LinkError, DistutilsPlatformError
+
+if 'setuptools.extension' in sys.modules:
+ _Extension = sys.modules['setuptools.extension']._Extension
+ sys.modules['distutils.core'].Extension = _Extension
+ sys.modules['distutils.extension'].Extension = _Extension
+ sys.modules['distutils.command.build_ext'].Extension = _Extension
+
+with_pyrex = None
+if sys.version_info[0] < 3:
+ try:
+ from Cython.Distutils.extension import Extension as _Extension
+ from Cython.Distutils import build_ext as _build_ext
+ with_pyrex = 'cython'
+ except ImportError:
+ try:
+ # Pyrex cannot build _yaml.c at the moment,
+ # but it may get fixed eventually.
+ from Pyrex.Distutils import Extension as _Extension
+ from Pyrex.Distutils import build_ext as _build_ext
+ with_pyrex = 'pyrex'
+ except ImportError:
+ pass
+
+
+class Distribution(_Distribution):
+
+ def __init__(self, attrs=None):
+ _Distribution.__init__(self, attrs)
+ if not self.ext_modules:
+ return
+ for idx in range(len(self.ext_modules)-1, -1, -1):
+ ext = self.ext_modules[idx]
+ if not isinstance(ext, Extension):
+ continue
+ setattr(self, ext.attr_name, None)
+ self.global_options = [
+ (ext.option_name, None,
+ "include %s (default if %s is available)"
+ % (ext.feature_description, ext.feature_name)),
+ (ext.neg_option_name, None,
+ "exclude %s" % ext.feature_description),
+ ] + self.global_options
+ self.negative_opt = self.negative_opt.copy()
+ self.negative_opt[ext.neg_option_name] = ext.option_name
+
+ def has_ext_modules(self):
+ if not self.ext_modules:
+ return False
+ for ext in self.ext_modules:
+ with_ext = self.ext_status(ext)
+ if with_ext is None or with_ext:
+ return True
+ return False
+
+ def ext_status(self, ext):
+ if 'Java' in sys.version or 'IronPython' in sys.version or 'PyPy' in sys.version:
+ return False
+ if isinstance(ext, Extension):
+ with_ext = getattr(self, ext.attr_name)
+ return with_ext
+ else:
+ return True
+
+
+class Extension(_Extension):
+
+ def __init__(self, name, sources, feature_name, feature_description,
+ feature_check, **kwds):
+ if not with_pyrex:
+ for filename in sources[:]:
+ base, ext = os.path.splitext(filename)
+ if ext == '.pyx':
+ sources.remove(filename)
+ sources.append('%s.c' % base)
+ _Extension.__init__(self, name, sources, **kwds)
+ self.feature_name = feature_name
+ self.feature_description = feature_description
+ self.feature_check = feature_check
+ self.attr_name = 'with_' + feature_name.replace('-', '_')
+ self.option_name = 'with-' + feature_name
+ self.neg_option_name = 'without-' + feature_name
+
+
+class build_ext(_build_ext):
+
+ def run(self):
+ optional = True
+ disabled = True
+ for ext in self.extensions:
+ with_ext = self.distribution.ext_status(ext)
+ if with_ext is None:
+ disabled = False
+ elif with_ext:
+ optional = False
+ disabled = False
+ break
+ if disabled:
+ return
+ try:
+ _build_ext.run(self)
+ except DistutilsPlatformError:
+ exc = sys.exc_info()[1]
+ if optional:
+ log.warn(str(exc))
+ log.warn("skipping build_ext")
+ else:
+ raise
+
+ def get_source_files(self):
+ self.check_extensions_list(self.extensions)
+ filenames = []
+ for ext in self.extensions:
+ if with_pyrex == 'pyrex':
+ self.pyrex_sources(ext.sources, ext)
+ elif with_pyrex == 'cython':
+ self.cython_sources(ext.sources, ext)
+ for filename in ext.sources:
+ filenames.append(filename)
+ base = os.path.splitext(filename)[0]
+ for ext in ['c', 'h', 'pyx', 'pxd']:
+ filename = '%s.%s' % (base, ext)
+ if filename not in filenames and os.path.isfile(filename):
+ filenames.append(filename)
+ return filenames
+
+ def get_outputs(self):
+ self.check_extensions_list(self.extensions)
+ outputs = []
+ for ext in self.extensions:
+ fullname = self.get_ext_fullname(ext.name)
+ filename = os.path.join(self.build_lib,
+ self.get_ext_filename(fullname))
+ if os.path.isfile(filename):
+ outputs.append(filename)
+ return outputs
+
+ def build_extensions(self):
+ self.check_extensions_list(self.extensions)
+ for ext in self.extensions:
+ with_ext = self.distribution.ext_status(ext)
+ if with_ext is None:
+ with_ext = self.check_extension_availability(ext)
+ if not with_ext:
+ continue
+ if with_pyrex == 'pyrex':
+ ext.sources = self.pyrex_sources(ext.sources, ext)
+ elif with_pyrex == 'cython':
+ ext.sources = self.cython_sources(ext.sources, ext)
+ self.build_extension(ext)
+
+ def check_extension_availability(self, ext):
+ cache = os.path.join(self.build_temp, 'check_%s.out' % ext.feature_name)
+ if not self.force and os.path.isfile(cache):
+ data = open(cache).read().strip()
+ if data == '1':
+ return True
+ elif data == '0':
+ return False
+ mkpath(self.build_temp)
+ src = os.path.join(self.build_temp, 'check_%s.c' % ext.feature_name)
+ open(src, 'w').write(ext.feature_check)
+ log.info("checking if %s is compilable" % ext.feature_name)
+ try:
+ [obj] = self.compiler.compile([src],
+ macros=ext.define_macros+[(undef,) for undef in ext.undef_macros],
+ include_dirs=ext.include_dirs,
+ extra_postargs=(ext.extra_compile_args or []),
+ depends=ext.depends)
+ except CompileError:
+ log.warn("")
+ log.warn("%s is not found or a compiler error: forcing --%s"
+ % (ext.feature_name, ext.neg_option_name))
+ log.warn("(if %s is installed correctly, you may need to"
+ % ext.feature_name)
+ log.warn(" specify the option --include-dirs or uncomment and")
+ log.warn(" modify the parameter include_dirs in setup.cfg)")
+ open(cache, 'w').write('0\n')
+ return False
+ prog = 'check_%s' % ext.feature_name
+ log.info("checking if %s is linkable" % ext.feature_name)
+ try:
+ self.compiler.link_executable([obj], prog,
+ output_dir=self.build_temp,
+ libraries=ext.libraries,
+ library_dirs=ext.library_dirs,
+ runtime_library_dirs=ext.runtime_library_dirs,
+ extra_postargs=(ext.extra_link_args or []))
+ except LinkError:
+ log.warn("")
+ log.warn("%s is not found or a linker error: forcing --%s"
+ % (ext.feature_name, ext.neg_option_name))
+ log.warn("(if %s is installed correctly, you may need to"
+ % ext.feature_name)
+ log.warn(" specify the option --library-dirs or uncomment and")
+ log.warn(" modify the parameter library_dirs in setup.cfg)")
+ open(cache, 'w').write('0\n')
+ return False
+ open(cache, 'w').write('1\n')
+ return True
+
+
+class bdist_rpm(_bdist_rpm):
+
+ def _make_spec_file(self):
+ argv0 = sys.argv[0]
+ features = []
+ for ext in self.distribution.ext_modules:
+ if not isinstance(ext, Extension):
+ continue
+ with_ext = getattr(self.distribution, ext.attr_name)
+ if with_ext is None:
+ continue
+ if with_ext:
+ features.append('--'+ext.option_name)
+ else:
+ features.append('--'+ext.neg_option_name)
+ sys.argv[0] = ' '.join([argv0]+features)
+ spec_file = _bdist_rpm._make_spec_file(self)
+ sys.argv[0] = argv0
+ return spec_file
+
+
+class test(Command):
+
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ build_cmd = self.get_finalized_command('build')
+ build_cmd.run()
+ sys.path.insert(0, build_cmd.build_lib)
+ if sys.version_info[0] < 3:
+ sys.path.insert(0, 'tests/lib')
+ else:
+ sys.path.insert(0, 'tests/lib3')
+ import test_all
+ test_all.main([])
+
+
+if __name__ == '__main__':
+
+ setup(
+ name=NAME,
+ version=VERSION,
+ description=DESCRIPTION,
+ long_description=LONG_DESCRIPTION,
+ author=AUTHOR,
+ author_email=AUTHOR_EMAIL,
+ license=LICENSE,
+ platforms=PLATFORMS,
+ url=URL,
+ download_url=DOWNLOAD_URL,
+ classifiers=CLASSIFIERS,
+
+ package_dir={'': {2: 'lib', 3: 'lib3'}[sys.version_info[0]]},
+ packages=['yaml'],
+ ext_modules=[
+ Extension('_yaml', ['ext/_yaml.pyx'],
+ 'libyaml', "LibYAML bindings", LIBYAML_CHECK,
+ libraries=['yaml']),
+ ],
+
+ distclass=Distribution,
+
+ cmdclass={
+ 'build_ext': build_ext,
+ 'bdist_rpm': bdist_rpm,
+ 'test': test,
+ },
+ )
+
diff --git a/python/redo/PKG-INFO b/python/redo/PKG-INFO
new file mode 100644
index 000000000..1f2f84d85
--- /dev/null
+++ b/python/redo/PKG-INFO
@@ -0,0 +1,10 @@
+Metadata-Version: 1.0
+Name: redo
+Version: 1.6
+Summary: Utilities to retry Python callables.
+Home-page: https://github.com/bhearsum/redo
+Author: Ben Hearsum
+Author-email: ben@hearsum.ca
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
diff --git a/python/redo/README b/python/redo/README
new file mode 100644
index 000000000..d2247eb55
--- /dev/null
+++ b/python/redo/README
@@ -0,0 +1,4 @@
+Redo - Utilities to retry Python callables
+******************************************
+
+Redo provides various means to add seamless retriability to any Python callable. Redo includes a plain function (redo.retry), a decorator (redo.retriable), and a context manager (redo.retrying) to enable you to integrate it in the best possible way for your project. As a bonus, a standalone interface is also included ("retry"). For details and sample invocations have a look at the docstrings in redo/__init__.py.
diff --git a/python/redo/redo/__init__.py b/python/redo/redo/__init__.py
new file mode 100644
index 000000000..a124eeaaa
--- /dev/null
+++ b/python/redo/redo/__init__.py
@@ -0,0 +1,240 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import time
+from functools import wraps
+from contextlib import contextmanager
+import logging
+import random
+log = logging.getLogger(__name__)
+
+
+def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=1):
+ """
+ A generator function that sleeps between retries, handles exponential
+ backoff and jitter. The action you are retrying is meant to run after
+ retrier yields.
+
+ At each iteration, we sleep for sleeptime + random.randint(-jitter, jitter).
+ Afterwards sleeptime is multiplied by sleepscale for the next iteration.
+
+ Args:
+ attempts (int): maximum number of times to try; defaults to 5
+ sleeptime (float): how many seconds to sleep between tries; defaults to
+ 60s (one minute)
+ max_sleeptime (float): the longest we'll sleep, in seconds; defaults to
+ 300s (five minutes)
+ sleepscale (float): how much to multiply the sleep time by each
+ iteration; defaults to 1.5
+ jitter (int): random jitter to introduce to sleep time each iteration.
+ the amount is chosen at random between [-jitter, +jitter]
+ defaults to 1
+
+ Yields:
+ None, a maximum of `attempts` number of times
+
+ Example:
+ >>> n = 0
+ >>> for _ in retrier(sleeptime=0, jitter=0):
+ ... if n == 3:
+ ... # We did the thing!
+ ... break
+ ... n += 1
+ >>> n
+ 3
+
+ >>> n = 0
+ >>> for _ in retrier(sleeptime=0, jitter=0):
+ ... if n == 6:
+ ... # We did the thing!
+ ... break
+ ... n += 1
+ ... else:
+ ... print("max tries hit")
+ max tries hit
+ """
+ jitter = jitter or 0 # py35 barfs on the next line if jitter is None
+ if jitter > sleeptime:
+ # To prevent negative sleep times
+ raise Exception('jitter ({}) must be less than sleep time ({})'.format(jitter, sleeptime))
+
+ sleeptime_real = sleeptime
+ for _ in range(attempts):
+ log.debug("attempt %i/%i", _ + 1, attempts)
+
+ yield sleeptime_real
+
+ if jitter:
+ sleeptime_real = sleeptime + random.randint(-jitter, jitter)
+ # our jitter should scale along with the sleeptime
+ jitter = int(jitter * sleepscale)
+ else:
+ sleeptime_real = sleeptime
+
+ sleeptime *= sleepscale
+
+ if sleeptime_real > max_sleeptime:
+ sleeptime_real = max_sleeptime
+
+ # Don't need to sleep the last time
+ if _ < attempts - 1:
+ log.debug("sleeping for %.2fs (attempt %i/%i)", sleeptime_real, _ + 1, attempts)
+ time.sleep(sleeptime_real)
+
+
+def retry(action, attempts=5, sleeptime=60, max_sleeptime=5 * 60,
+ sleepscale=1.5, jitter=1, retry_exceptions=(Exception,),
+ cleanup=None, args=(), kwargs={}):
+ """
+ Calls an action function until it succeeds, or we give up.
+
+ Args:
+ action (callable): the function to retry
+ attempts (int): maximum number of times to try; defaults to 5
+ sleeptime (float): how many seconds to sleep between tries; defaults to
+ 60s (one minute)
+ max_sleeptime (float): the longest we'll sleep, in seconds; defaults to
+ 300s (five minutes)
+ sleepscale (float): how much to multiply the sleep time by each
+ iteration; defaults to 1.5
+ jitter (int): random jitter to introduce to sleep time each iteration.
+ the amount is chosen at random between [-jitter, +jitter]
+ defaults to 1
+ retry_exceptions (tuple): tuple of exceptions to be caught. If other
+ exceptions are raised by action(), then these
+ are immediately re-raised to the caller.
+ cleanup (callable): optional; called if one of `retry_exceptions` is
+ caught. No arguments are passed to the cleanup
+ function; if your cleanup requires arguments,
+ consider using functools.partial or a lambda
+ function.
+ args (tuple): positional arguments to call `action` with
+ kwargs (dict): keyword arguments to call `action` with
+
+ Returns:
+ Whatever action(*args, **kwargs) returns
+
+ Raises:
+ Whatever action(*args, **kwargs) raises. `retry_exceptions` are caught
+ up until the last attempt, in which case they are re-raised.
+
+ Example:
+ >>> count = 0
+ >>> def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count is too small!")
+ ... return "success!"
+ >>> retry(foo, sleeptime=0, jitter=0)
+ 1
+ 2
+ 3
+ 'success!'
+ """
+ assert callable(action)
+ assert not cleanup or callable(cleanup)
+
+ action_name = getattr(action, '__name__', action)
+ if args or kwargs:
+ log_attempt_format = ("retry: calling %s with args: %s,"
+ " kwargs: %s, attempt #%%d"
+ % (action_name, args, kwargs))
+ else:
+ log_attempt_format = ("retry: calling %s, attempt #%%d"
+ % action_name)
+
+ if max_sleeptime < sleeptime:
+ log.debug("max_sleeptime %d less than sleeptime %d" % (
+ max_sleeptime, sleeptime))
+
+ n = 1
+ for _ in retrier(attempts=attempts, sleeptime=sleeptime,
+ max_sleeptime=max_sleeptime, sleepscale=sleepscale,
+ jitter=jitter):
+ try:
+ logfn = log.info if n != 1 else log.debug
+ logfn(log_attempt_format, n)
+ return action(*args, **kwargs)
+ except retry_exceptions:
+ log.debug("retry: Caught exception: ", exc_info=True)
+ if cleanup:
+ cleanup()
+ if n == attempts:
+ log.info("retry: Giving up on %s" % action_name)
+ raise
+ continue
+ finally:
+ n += 1
+
+
+def retriable(*retry_args, **retry_kwargs):
+ """
+ A decorator factory for retry(). Wrap your function in @retriable(...) to
+ give it retry powers!
+
+ Arguments:
+ Same as for `retry`, with the exception of `action`, `args`, and `kwargs`,
+ which are left to the normal function definition.
+
+ Returns:
+ A function decorator
+
+ Example:
+ >>> count = 0
+ >>> @retriable(sleeptime=0, jitter=0)
+ ... def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count too small")
+ ... return "success!"
+ >>> foo()
+ 1
+ 2
+ 3
+ 'success!'
+ """
+ def _retriable_factory(func):
+ @wraps(func)
+ def _retriable_wrapper(*args, **kwargs):
+ return retry(func, args=args, kwargs=kwargs, *retry_args,
+ **retry_kwargs)
+ return _retriable_wrapper
+ return _retriable_factory
+
+
+@contextmanager
+def retrying(func, *retry_args, **retry_kwargs):
+ """
+ A context manager for wrapping functions with retry functionality.
+
+ Arguments:
+ func (callable): the function to wrap
+ other arguments as per `retry`
+
+ Returns:
+ A context manager that returns retriable(func) on __enter__
+
+ Example:
+ >>> count = 0
+ >>> def foo():
+ ... global count
+ ... count += 1
+ ... print(count)
+ ... if count < 3:
+ ... raise ValueError("count too small")
+ ... return "success!"
+ >>> with retrying(foo, sleeptime=0, jitter=0) as f:
+ ... f()
+ 1
+ 2
+ 3
+ 'success!'
+ """
+ yield retriable(*retry_args, **retry_kwargs)(func)
diff --git a/python/redo/redo/cmd.py b/python/redo/redo/cmd.py
new file mode 100644
index 000000000..afd98e744
--- /dev/null
+++ b/python/redo/redo/cmd.py
@@ -0,0 +1,53 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+import logging
+from subprocess import check_call, CalledProcessError
+import sys
+
+from redo import retrying
+
+log = logging.getLogger(__name__)
+
+
+def main():
+ from argparse import ArgumentParser
+
+ parser = ArgumentParser()
+ parser.add_argument(
+ "-a", "--attempts", type=int, default=5,
+ help="How many times to retry.")
+ parser.add_argument(
+ "-s", "--sleeptime", type=int, default=60,
+ help="How long to sleep between attempts. Sleeptime doubles after each attempt.")
+ parser.add_argument(
+ "-m", "--max-sleeptime", type=int, default=5*60,
+ help="Maximum length of time to sleep between attempts (limits backoff length).")
+ parser.add_argument("-v", "--verbose", action="store_true", default=False)
+ parser.add_argument("cmd", nargs="+", help="Command to run. Eg: wget http://blah")
+
+ args = parser.parse_args()
+
+ if args.verbose:
+ logging.basicConfig(level=logging.INFO)
+ logging.getLogger("retry").setLevel(logging.INFO)
+ else:
+ logging.basicConfig(level=logging.ERROR)
+ logging.getLogger("retry").setLevel(logging.ERROR)
+
+ try:
+ with retrying(check_call, attempts=args.attempts, sleeptime=args.sleeptime,
+ max_sleeptime=args.max_sleeptime,
+ retry_exceptions=(CalledProcessError,)) as r_check_call:
+ r_check_call(args.cmd)
+ except KeyboardInterrupt:
+ sys.exit(-1)
+ except Exception as e:
+ log.error("Unable to run command after %d attempts" % args.attempts, exc_info=True)
+ rc = getattr(e, "returncode", -2)
+ sys.exit(rc)
+
+if __name__ == "__main__":
+ main()
diff --git a/python/redo/setup.cfg b/python/redo/setup.cfg
new file mode 100644
index 000000000..6c71b612d
--- /dev/null
+++ b/python/redo/setup.cfg
@@ -0,0 +1,8 @@
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/redo/setup.py b/python/redo/setup.py
new file mode 100644
index 000000000..a1e57d7f0
--- /dev/null
+++ b/python/redo/setup.py
@@ -0,0 +1,18 @@
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+
+setup(
+ name="redo",
+ version="1.6",
+ description="Utilities to retry Python callables.",
+ author="Ben Hearsum",
+ author_email="ben@hearsum.ca",
+ packages=["redo"],
+ entry_points={
+ "console_scripts": ["retry = redo.cmd:main"],
+ },
+ url="https://github.com/bhearsum/redo",
+)
diff --git a/python/requests/HISTORY.rst b/python/requests/HISTORY.rst
new file mode 100644
index 000000000..f8c1a5454
--- /dev/null
+++ b/python/requests/HISTORY.rst
@@ -0,0 +1,1130 @@
+.. :changelog:
+
+Release History
+---------------
+
+2.9.1 (2015-12-21)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Resolve regression introduced in 2.9.0 that made it impossible to send binary
+ strings as bodies in Python 3.
+- Fixed errors when calculating cookie expiration dates in certain locales.
+
+**Miscellaneous**
+
+- Updated bundled urllib3 to 1.13.1.
+
+2.9.0 (2015-12-15)
+++++++++++++++++++
+
+**Minor Improvements** (Backwards compatible)
+
+- The ``verify`` keyword argument now supports being passed a path to a
+ directory of CA certificates, not just a single-file bundle.
+- Warnings are now emitted when sending files opened in text mode.
+- Added the 511 Network Authentication Required status code to the status code
+ registry.
+
+**Bugfixes**
+
+- For file-like objects that are not seeked to the very beginning, we now
+ send the content length for the number of bytes we will actually read, rather
+ than the total size of the file, allowing partial file uploads.
+- When uploading file-like objects, if they are empty or have no obvious
+ content length we set ``Transfer-Encoding: chunked`` rather than
+ ``Content-Length: 0``.
+- We correctly receive the response in buffered mode when uploading chunked
+ bodies.
+- We now handle being passed a query string as a bytestring on Python 3, by
+ decoding it as UTF-8.
+- Sessions are now closed in all cases (exceptional and not) when using the
+ functional API rather than leaking and waiting for the garbage collector to
+ clean them up.
+- Correctly handle digest auth headers with a malformed ``qop`` directive that
+ contains no token, by treating it the same as if no ``qop`` directive was
+ provided at all.
+- Minor performance improvements when removing specific cookies by name.
+
+**Miscellaneous**
+
+- Updated urllib3 to 1.13.
+
+2.8.1 (2015-10-13)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate
+ bundle.
+- Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of
+ ``ConnectionError``
+- When using the PreparedRequest flow, requests will now correctly respect the
+ ``json`` parameter. Broken in 2.8.0.
+- When using the PreparedRequest flow, requests will now correctly handle a
+ Unicode-string method name on Python 2. Broken in 2.8.0.
+
+2.8.0 (2015-10-05)
+++++++++++++++++++
+
+**Minor Improvements** (Backwards Compatible)
+
+- Requests now supports per-host proxies. This allows the ``proxies``
+ dictionary to have entries of the form
+ ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used
+ in preference to the previously-supported scheme-specific ones, but the
+ previous syntax will continue to work.
+- ``Response.raise_for_status`` now prints the URL that failed as part of the
+ exception message.
+- ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg,
+ defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause
+ exceptions to be thrown.
+- Change to bundled projects import logic to make it easier to unbundle
+ requests downstream.
+- Changed the default User-Agent string to avoid leaking data on Linux: now
+ contains only the requests version.
+
+**Bugfixes**
+
+- The ``json`` parameter to ``post()`` and friends will now only be used if
+ neither ``data`` nor ``files`` are present, consistent with the
+ documentation.
+- We now ignore empty fields in the ``NO_PROXY`` environment variable.
+- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
+ ``stream=True`` with ``contextlib.closing``.
+- Prevented bugs where we would attempt to return the same connection back to
+ the connection pool twice when sending a Chunked body.
+- Miscellaneous minor internal changes.
+- Digest Auth support is now thread safe.
+
+**Updates**
+
+- Updated urllib3 to 1.12.
+
+2.7.0 (2015-05-03)
+++++++++++++++++++
+
+This is the first release that follows our new release process. For more, see
+`our documentation
+<http://docs.python-requests.org/en/latest/community/release-process/>`_.
+
+**Bugfixes**
+
+- Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer
+ encoding and response framing.
+
+2.6.2 (2015-04-23)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Fix regression where compressed data that was sent as chunked data was not
+ properly decompressed. (#2561)
+
+2.6.1 (2015-04-22)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Remove VendorAlias import machinery introduced in v2.5.2.
+
+- Simplify the PreparedRequest.prepare API: We no longer require the user to
+ pass an empty list to the hooks keyword argument. (c.f. #2552)
+
+- Resolve redirects now receives and forwards all of the original arguments to
+ the adapter. (#2503)
+
+- Handle UnicodeDecodeErrors when trying to deal with a unicode URL that
+ cannot be encoded in ASCII. (#2540)
+
+- Populate the parsed path of the URI field when performing Digest
+ Authentication. (#2426)
+
+- Copy a PreparedRequest's CookieJar more reliably when it is not an instance
+ of RequestsCookieJar. (#2527)
+
+2.6.0 (2015-03-14)
+++++++++++++++++++
+
+**Bugfixes**
+
+- CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie
+ without a host value set would use the hostname for the redirected URL
+ exposing requests users to session fixation attacks and potentially cookie
+ stealing. This was disclosed privately by Matthew Daley of
+ `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from
+ v2.1.0 to v2.5.3 (inclusive on both ends).
+
+- Fix error when requests is an ``install_requires`` dependency and ``python
+ setup.py test`` is run. (#2462)
+
+- Fix error when urllib3 is unbundled and requests continues to use the
+ vendored import location.
+
+- Include fixes to ``urllib3``'s header handling.
+
+- Requests' handling of unvendored dependencies is now more restrictive.
+
+**Features and Improvements**
+
+- Support bytearrays when passed as parameters in the ``files`` argument.
+ (#2468)
+
+- Avoid data duplication when creating a request with ``str``, ``bytes``, or
+ ``bytearray`` input to the ``files`` argument.
+
+2.5.3 (2015-02-24)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Revert changes to our vendored certificate bundle. For more context see
+ (#2455, #2456, and http://bugs.python.org/issue23476)
+
+2.5.2 (2015-02-23)
+++++++++++++++++++
+
+**Features and Improvements**
+
+- Add sha256 fingerprint support. (`shazow/urllib3#540`_)
+
+- Improve the performance of headers. (`shazow/urllib3#544`_)
+
+**Bugfixes**
+
+- Copy pip's import machinery. When downstream redistributors remove
+ requests.packages.urllib3 the import machinery will continue to let those
+ same symbols work. Example usage in requests' documentation and 3rd-party
+ libraries relying on the vendored copies of urllib3 will work without having
+ to fallback to the system urllib3.
+
+- Attempt to quote parts of the URL on redirect if unquoting and then quoting
+ fails. (#2356)
+
+- Fix filename type check for multipart form-data uploads. (#2411)
+
+- Properly handle the case where a server issuing digest authentication
+ challenges provides both auth and auth-int qop-values. (#2408)
+
+- Fix a socket leak. (`shazow/urllib3#549`_)
+
+- Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_)
+
+- Disable the built-in hostname verification. (`shazow/urllib3#526`_)
+
+- Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_)
+
+**Security**
+
+- Pulled in an updated ``cacert.pem``.
+
+- Drop RC4 from the default cipher list. (`shazow/urllib3#551`_)
+
+.. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551
+.. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549
+.. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544
+.. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540
+.. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535
+.. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534
+.. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526
+
+2.5.1 (2014-12-23)
+++++++++++++++++++
+
+**Behavioural Changes**
+
+- Only catch HTTPErrors in raise_for_status (#2382)
+
+**Bugfixes**
+
+- Handle LocationParseError from urllib3 (#2344)
+- Handle file-like object filenames that are not strings (#2379)
+- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389)
+
+2.5.0 (2014-12-01)
+++++++++++++++++++
+
+**Improvements**
+
+- Allow usage of urllib3's Retry object with HTTPAdapters (#2216)
+- The ``iter_lines`` method on a response now accepts a delimiter with which
+ to split the content (#2295)
+
+**Behavioural Changes**
+
+- Add deprecation warnings to functions in requests.utils that will be removed
+ in 3.0 (#2309)
+- Sessions used by the functional API are always closed (#2326)
+- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323)
+
+**Bugfixes**
+
+- Only parse the URL once (#2353)
+- Allow Content-Length header to always be overridden (#2332)
+- Properly handle files in HTTPDigestAuth (#2333)
+- Cap redirect_cache size to prevent memory abuse (#2299)
+- Fix HTTPDigestAuth handling of redirects after authenticating successfully
+ (#2253)
+- Fix crash with custom method parameter to Session.request (#2317)
+- Fix how Link headers are parsed using the regular expression library (#2271)
+
+**Documentation**
+
+- Add more references for interlinking (#2348)
+- Update CSS for theme (#2290)
+- Update width of buttons and sidebar (#2289)
+- Replace references of Gittip with Gratipay (#2282)
+- Add link to changelog in sidebar (#2273)
+
+2.4.3 (2014-10-06)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Unicode URL improvements for Python 2.
+- Re-order JSON param for backwards compat.
+- Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/kennethreitz/requests/issues/2249>`_)
+
+
+2.4.2 (2014-10-05)
+++++++++++++++++++
+
+**Improvements**
+
+- FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/kennethreitz/requests/pull/2258>`_)
+- Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/kennethreitz/requests/pull/2238>`_)
+
+**Bugfixes**
+
+- Avoid getting stuck in a loop (`#2244 <https://github.com/kennethreitz/requests/pull/2244>`_)
+- Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/kennethreitz/requests/issues/2240>`_, `#2241 <https://github.com/kennethreitz/requests/issues/2241>`_)
+
+**Documentation**
+
+- Correct redirection introduction (`#2245 <https://github.com/kennethreitz/requests/pull/2245/>`_)
+- Added example of how to send multiple files in one request. (`#2227 <https://github.com/kennethreitz/requests/pull/2227/>`_)
+- Clarify how to pass a custom set of CAs (`#2248 <https://github.com/kennethreitz/requests/pull/2248/>`_)
+
+
+
+2.4.1 (2014-09-09)
+++++++++++++++++++
+
+- Now has a "security" package extras set, ``$ pip install requests[security]``
+- Requests will now use Certifi if it is available.
+- Capture and re-raise urllib3 ProtocolError
+- Bugfix for responses that attempt to redirect to themselves forever (wtf?).
+
+
+2.4.0 (2014-08-29)
+++++++++++++++++++
+
+**Behavioral Changes**
+
+- ``Connection: keep-alive`` header is now sent automatically.
+
+**Improvements**
+
+- Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts.
+- Allow copying of PreparedRequests without headers/cookies.
+- Updated bundled urllib3 version.
+- Refactored settings loading from environment -- new `Session.merge_environment_settings`.
+- Handle socket errors in iter_content.
+
+
+2.3.0 (2014-05-16)
+++++++++++++++++++
+
+**API Changes**
+
+- New ``Response`` property ``is_redirect``, which is true when the
+ library could have processed this response as a redirection (whether
+ or not it actually did).
+- The ``timeout`` parameter now affects requests with both ``stream=True`` and
+ ``stream=False`` equally.
+- The change in v2.0.0 to mandate explicit proxy schemes has been reverted.
+ Proxy schemes now default to ``http://``.
+- The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal
+ dictionary when references as string or viewed in the interpreter.
+
+**Bugfixes**
+
+- No longer expose Authorization or Proxy-Authorization headers on redirect.
+ Fix CVE-2014-1829 and CVE-2014-1830 respectively.
+- Authorization is re-evaluated each redirect.
+- On redirect, pass url as native strings.
+- Fall-back to autodetected encoding for JSON when Unicode detection fails.
+- Headers set to ``None`` on the ``Session`` are now correctly not sent.
+- Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same
+ response.
+- Stop advertising ``compress`` as a supported Content-Encoding.
+- The ``Response.history`` parameter is now always a list.
+- Many, many ``urllib3`` bugfixes.
+
+2.2.1 (2014-01-23)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character.
+- Assorted urllib3 fixes.
+
+2.2.0 (2014-01-09)
+++++++++++++++++++
+
+**API Changes**
+
+- New exception: ``ContentDecodingError``. Raised instead of ``urllib3``
+ ``DecodeError`` exceptions.
+
+**Bugfixes**
+
+- Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6.
+- Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory.
+- Use the correct pool size for pools of connections to proxies.
+- Fix iteration of ``CookieJar`` objects.
+- Ensure that cookies are persisted over redirect.
+- Switch back to using chardet, since it has merged with charade.
+
+2.1.0 (2013-12-05)
+++++++++++++++++++
+
+- Updated CA Bundle, of course.
+- Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``.
+- Clean up connections when we hit problems during chunked upload, rather than leaking them.
+- Return connections to the pool when a chunked upload is successful, rather than leaking it.
+- Match the HTTPbis recommendation for HTTP 301 redirects.
+- Prevent hanging when using streaming uploads and Digest Auth when a 401 is received.
+- Values of headers set by Requests are now always the native string type.
+- Fix previously broken SNI support.
+- Fix accessing HTTP proxies using proxy authentication.
+- Unencode HTTP Basic usernames and passwords extracted from URLs.
+- Support for IP address ranges for no_proxy environment variable
+- Parse headers correctly when users override the default ``Host:`` header.
+- Avoid munging the URL in case of case-sensitive servers.
+- Looser URL handling for non-HTTP/HTTPS urls.
+- Accept unicode methods in Python 2.6 and 2.7.
+- More resilient cookie handling.
+- Make ``Response`` objects pickleable.
+- Actually added MD5-sess to Digest Auth instead of pretending to like last time.
+- Updated internal urllib3.
+- Fixed @Lukasa's lack of taste.
+
+2.0.1 (2013-10-24)
+++++++++++++++++++
+
+- Updated included CA Bundle with new mistrusts and automated process for the future
+- Added MD5-sess to Digest Auth
+- Accept per-file headers in multipart file POST messages.
+- Fixed: Don't send the full URL on CONNECT messages.
+- Fixed: Correctly lowercase a redirect scheme.
+- Fixed: Cookies not persisted when set via functional API.
+- Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError.
+- Updated internal urllib3 and chardet.
+
+2.0.0 (2013-09-24)
+++++++++++++++++++
+
+**API Changes:**
+
+- Keys in the Headers dictionary are now native strings on all Python versions,
+ i.e. bytestrings on Python 2, unicode on Python 3.
+- Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception
+ will be raised if they don't.
+- Timeouts now apply to read time if ``Stream=False``.
+- ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``.
+- Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``.
+- Added new method to ``Session`` objects: ``Session.update_request()``. This
+ method updates a ``Request`` object with the data (e.g. cookies) stored on
+ the ``Session``.
+- Added new method to ``Session`` objects: ``Session.prepare_request()``. This
+ method updates and prepares a ``Request`` object, and returns the
+ corresponding ``PreparedRequest`` object.
+- Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``.
+ This should not be called directly, but improves the subclass interface.
+- ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding
+ will now raise a Requests ``ChunkedEncodingError`` instead.
+- Invalid percent-escape sequences now cause a Requests ``InvalidURL``
+ exception to be raised.
+- HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses
+ ``"already_reported"``.
+- HTTP 226 reason added (``"im_used"``).
+
+**Bugfixes:**
+
+- Vastly improved proxy support, including the CONNECT verb. Special thanks to
+ the many contributors who worked towards this improvement.
+- Cookies are now properly managed when 401 authentication responses are
+ received.
+- Chunked encoding fixes.
+- Support for mixed case schemes.
+- Better handling of streaming downloads.
+- Retrieve environment proxies from more locations.
+- Minor cookies fixes.
+- Improved redirect behaviour.
+- Improved streaming behaviour, particularly for compressed data.
+- Miscellaneous small Python 3 text encoding bugs.
+- ``.netrc`` no longer overrides explicit auth.
+- Cookies set by hooks are now correctly persisted on Sessions.
+- Fix problem with cookies that specify port numbers in their host field.
+- ``BytesIO`` can be used to perform streaming uploads.
+- More generous parsing of the ``no_proxy`` environment variable.
+- Non-string objects can be passed in data values alongside files.
+
+1.2.3 (2013-05-25)
+++++++++++++++++++
+
+- Simple packaging fix
+
+
+1.2.2 (2013-05-23)
+++++++++++++++++++
+
+- Simple packaging fix
+
+
+1.2.1 (2013-05-20)
+++++++++++++++++++
+
+- 301 and 302 redirects now change the verb to GET for all verbs, not just
+ POST, improving browser compatibility.
+- Python 3.3.2 compatibility
+- Always percent-encode location headers
+- Fix connection adapter matching to be most-specific first
+- new argument to the default connection adapter for passing a block argument
+- prevent a KeyError when there's no link headers
+
+1.2.0 (2013-03-31)
+++++++++++++++++++
+
+- Fixed cookies on sessions and on requests
+- Significantly change how hooks are dispatched - hooks now receive all the
+ arguments specified by the user when making a request so hooks can make a
+ secondary request with the same parameters. This is especially necessary for
+ authentication handler authors
+- certifi support was removed
+- Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
+- Major proxy work thanks to @Lukasa including parsing of proxy authentication
+ from the proxy url
+- Fix DigestAuth handling too many 401s
+- Update vendored urllib3 to include SSL bug fixes
+- Allow keyword arguments to be passed to ``json.loads()`` via the
+ ``Response.json()`` method
+- Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
+ requests
+- Add ``elapsed`` attribute to ``Response`` objects to time how long a request
+ took.
+- Fix ``RequestsCookieJar``
+- Sessions and Adapters are now picklable, i.e., can be used with the
+ multiprocessing library
+- Update charade to version 1.0.3
+
+The change in how hooks are dispatched will likely cause a great deal of
+issues.
+
+1.1.0 (2013-01-10)
+++++++++++++++++++
+
+- CHUNKED REQUESTS
+- Support for iterable response bodies
+- Assume servers persist redirect params
+- Allow explicit content types to be specified for file data
+- Make merge_kwargs case-insensitive when looking up keys
+
+1.0.3 (2012-12-18)
+++++++++++++++++++
+
+- Fix file upload encoding bug
+- Fix cookie behavior
+
+1.0.2 (2012-12-17)
+++++++++++++++++++
+
+- Proxy fix for HTTPAdapter.
+
+1.0.1 (2012-12-17)
+++++++++++++++++++
+
+- Cert verification exception bug.
+- Proxy fix for HTTPAdapter.
+
+1.0.0 (2012-12-17)
+++++++++++++++++++
+
+- Massive Refactor and Simplification
+- Switch to Apache 2.0 license
+- Swappable Connection Adapters
+- Mountable Connection Adapters
+- Mutable ProcessedRequest chain
+- /s/prefetch/stream
+- Removal of all configuration
+- Standard library logging
+- Make Response.json() callable, not property.
+- Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
+- Removal of all hooks except 'response'
+- Removal of all authentication helpers (OAuth, Kerberos)
+
+This is not a backwards compatible change.
+
+0.14.2 (2012-10-27)
++++++++++++++++++++
+
+- Improved mime-compatible JSON handling
+- Proxy fixes
+- Path hack fixes
+- Case-Insensitive Content-Encoding headers
+- Support for CJK parameters in form posts
+
+
+0.14.1 (2012-10-01)
++++++++++++++++++++
+
+- Python 3.3 Compatibility
+- Simply default accept-encoding
+- Bugfixes
+
+
+0.14.0 (2012-09-02)
+++++++++++++++++++++
+
+- No more iter_content errors if already downloaded.
+
+0.13.9 (2012-08-25)
++++++++++++++++++++
+
+- Fix for OAuth + POSTs
+- Remove exception eating from dispatch_hook
+- General bugfixes
+
+0.13.8 (2012-08-21)
++++++++++++++++++++
+
+- Incredible Link header support :)
+
+0.13.7 (2012-08-19)
++++++++++++++++++++
+
+- Support for (key, value) lists everywhere.
+- Digest Authentication improvements.
+- Ensure proxy exclusions work properly.
+- Clearer UnicodeError exceptions.
+- Automatic casting of URLs to strings (fURL and such)
+- Bugfixes.
+
+0.13.6 (2012-08-06)
++++++++++++++++++++
+
+- Long awaited fix for hanging connections!
+
+0.13.5 (2012-07-27)
++++++++++++++++++++
+
+- Packaging fix
+
+0.13.4 (2012-07-27)
++++++++++++++++++++
+
+- GSSAPI/Kerberos authentication!
+- App Engine 2.7 Fixes!
+- Fix leaking connections (from urllib3 update)
+- OAuthlib path hack fix
+- OAuthlib URL parameters fix.
+
+0.13.3 (2012-07-12)
++++++++++++++++++++
+
+- Use simplejson if available.
+- Do not hide SSLErrors behind Timeouts.
+- Fixed param handling with urls containing fragments.
+- Significantly improved information in User Agent.
+- client certificates are ignored when verify=False
+
+0.13.2 (2012-06-28)
++++++++++++++++++++
+
+- Zero dependencies (once again)!
+- New: Response.reason
+- Sign querystring parameters in OAuth 1.0
+- Client certificates no longer ignored when verify=False
+- Add openSUSE certificate support
+
+0.13.1 (2012-06-07)
++++++++++++++++++++
+
+- Allow passing a file or file-like object as data.
+- Allow hooks to return responses that indicate errors.
+- Fix Response.text and Response.json for body-less responses.
+
+0.13.0 (2012-05-29)
++++++++++++++++++++
+
+- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
+- Allow disabling of cookie persistence.
+- New implementation of safe_mode
+- cookies.get now supports default argument
+- Session cookies not saved when Session.request is called with return_response=False
+- Env: no_proxy support.
+- RequestsCookieJar improvements.
+- Various bug fixes.
+
+0.12.1 (2012-05-08)
++++++++++++++++++++
+
+- New ``Response.json`` property.
+- Ability to add string file uploads.
+- Fix out-of-range issue with iter_lines.
+- Fix iter_content default size.
+- Fix POST redirects containing files.
+
+0.12.0 (2012-05-02)
++++++++++++++++++++
+
+- EXPERIMENTAL OAUTH SUPPORT!
+- Proper CookieJar-backed cookies interface with awesome dict-like interface.
+- Speed fix for non-iterated content chunks.
+- Move ``pre_request`` to a more usable place.
+- New ``pre_send`` hook.
+- Lazily encode data, params, files.
+- Load system Certificate Bundle if ``certify`` isn't available.
+- Cleanups, fixes.
+
+0.11.2 (2012-04-22)
++++++++++++++++++++
+
+- Attempt to use the OS's certificate bundle if ``certifi`` isn't available.
+- Infinite digest auth redirect fix.
+- Multi-part file upload improvements.
+- Fix decoding of invalid %encodings in URLs.
+- If there is no content in a response don't throw an error the second time that content is attempted to be read.
+- Upload data on redirects.
+
+0.11.1 (2012-03-30)
++++++++++++++++++++
+
+* POST redirects now break RFC to do what browsers do: Follow up with a GET.
+* New ``strict_mode`` configuration to disable new redirect behavior.
+
+
+0.11.0 (2012-03-14)
++++++++++++++++++++
+
+* Private SSL Certificate support
+* Remove select.poll from Gevent monkeypatching
+* Remove redundant generator for chunked transfer encoding
+* Fix: Response.ok raises Timeout Exception in safe_mode
+
+0.10.8 (2012-03-09)
++++++++++++++++++++
+
+* Generate chunked ValueError fix
+* Proxy configuration by environment variables
+* Simplification of iter_lines.
+* New `trust_env` configuration for disabling system/environment hints.
+* Suppress cookie errors.
+
+0.10.7 (2012-03-07)
++++++++++++++++++++
+
+* `encode_uri` = False
+
+0.10.6 (2012-02-25)
++++++++++++++++++++
+
+* Allow '=' in cookies.
+
+0.10.5 (2012-02-25)
++++++++++++++++++++
+
+* Response body with 0 content-length fix.
+* New async.imap.
+* Don't fail on netrc.
+
+
+0.10.4 (2012-02-20)
++++++++++++++++++++
+
+* Honor netrc.
+
+0.10.3 (2012-02-20)
++++++++++++++++++++
+
+* HEAD requests don't follow redirects anymore.
+* raise_for_status() doesn't raise for 3xx anymore.
+* Make Session objects picklable.
+* ValueError for invalid schema URLs.
+
+0.10.2 (2012-01-15)
++++++++++++++++++++
+
+* Vastly improved URL quoting.
+* Additional allowed cookie key values.
+* Attempted fix for "Too many open files" Error
+* Replace unicode errors on first pass, no need for second pass.
+* Append '/' to bare-domain urls before query insertion.
+* Exceptions now inherit from RuntimeError.
+* Binary uploads + auth fix.
+* Bugfixes.
+
+
+0.10.1 (2012-01-23)
++++++++++++++++++++
+
+* PYTHON 3 SUPPORT!
+* Dropped 2.5 Support. (*Backwards Incompatible*)
+
+0.10.0 (2012-01-21)
++++++++++++++++++++
+
+* ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
+* New ``Response.text`` is unicode-only.
+* If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding.
+* Default to ISO-8859-1 (Western) encoding for "text" subtypes.
+* Removal of `decode_unicode`. (*Backwards Incompatible*)
+* New multiple-hooks system.
+* New ``Response.register_hook`` for registering hooks within the pipeline.
+* ``Response.url`` is now Unicode.
+
+0.9.3 (2012-01-18)
+++++++++++++++++++
+
+* SSL verify=False bugfix (apparent on windows machines).
+
+0.9.2 (2012-01-18)
+++++++++++++++++++
+
+* Asynchronous async.send method.
+* Support for proper chunk streams with boundaries.
+* session argument for Session classes.
+* Print entire hook tracebacks, not just exception instance.
+* Fix response.iter_lines from pending next line.
+* Fix but in HTTP-digest auth w/ URI having query strings.
+* Fix in Event Hooks section.
+* Urllib3 update.
+
+
+0.9.1 (2012-01-06)
+++++++++++++++++++
+
+* danger_mode for automatic Response.raise_for_status()
+* Response.iter_lines refactor
+
+0.9.0 (2011-12-28)
+++++++++++++++++++
+
+* verify ssl is default.
+
+
+0.8.9 (2011-12-28)
+++++++++++++++++++
+
+* Packaging fix.
+
+
+0.8.8 (2011-12-28)
+++++++++++++++++++
+
+* SSL CERT VERIFICATION!
+* Release of Cerifi: Mozilla's cert list.
+* New 'verify' argument for SSL requests.
+* Urllib3 update.
+
+0.8.7 (2011-12-24)
+++++++++++++++++++
+
+* iter_lines last-line truncation fix
+* Force safe_mode for async requests
+* Handle safe_mode exceptions more consistently
+* Fix iteration on null responses in safe_mode
+
+0.8.6 (2011-12-18)
+++++++++++++++++++
+
+* Socket timeout fixes.
+* Proxy Authorization support.
+
+0.8.5 (2011-12-14)
+++++++++++++++++++
+
+* Response.iter_lines!
+
+0.8.4 (2011-12-11)
+++++++++++++++++++
+
+* Prefetch bugfix.
+* Added license to installed version.
+
+0.8.3 (2011-11-27)
+++++++++++++++++++
+
+* Converted auth system to use simpler callable objects.
+* New session parameter to API methods.
+* Display full URL while logging.
+
+0.8.2 (2011-11-19)
+++++++++++++++++++
+
+* New Unicode decoding system, based on over-ridable `Response.encoding`.
+* Proper URL slash-quote handling.
+* Cookies with ``[``, ``]``, and ``_`` allowed.
+
+0.8.1 (2011-11-15)
+++++++++++++++++++
+
+* URL Request path fix
+* Proxy fix.
+* Timeouts fix.
+
+0.8.0 (2011-11-13)
+++++++++++++++++++
+
+* Keep-alive support!
+* Complete removal of Urllib2
+* Complete removal of Poster
+* Complete removal of CookieJars
+* New ConnectionError raising
+* Safe_mode for error catching
+* prefetch parameter for request methods
+* OPTION method
+* Async pool size throttling
+* File uploads send real names
+* Vendored in urllib3
+
+0.7.6 (2011-11-07)
+++++++++++++++++++
+
+* Digest authentication bugfix (attach query data to path)
+
+0.7.5 (2011-11-04)
+++++++++++++++++++
+
+* Response.content = None if there was an invalid response.
+* Redirection auth handling.
+
+0.7.4 (2011-10-26)
+++++++++++++++++++
+
+* Session Hooks fix.
+
+0.7.3 (2011-10-23)
+++++++++++++++++++
+
+* Digest Auth fix.
+
+
+0.7.2 (2011-10-23)
+++++++++++++++++++
+
+* PATCH Fix.
+
+
+0.7.1 (2011-10-23)
+++++++++++++++++++
+
+* Move away from urllib2 authentication handling.
+* Fully Remove AuthManager, AuthObject, &c.
+* New tuple-based auth system with handler callbacks.
+
+
+0.7.0 (2011-10-22)
+++++++++++++++++++
+
+* Sessions are now the primary interface.
+* Deprecated InvalidMethodException.
+* PATCH fix.
+* New config system (no more global settings).
+
+
+0.6.6 (2011-10-19)
+++++++++++++++++++
+
+* Session parameter bugfix (params merging).
+
+
+0.6.5 (2011-10-18)
+++++++++++++++++++
+
+* Offline (fast) test suite.
+* Session dictionary argument merging.
+
+
+0.6.4 (2011-10-13)
+++++++++++++++++++
+
+* Automatic decoding of unicode, based on HTTP Headers.
+* New ``decode_unicode`` setting.
+* Removal of ``r.read/close`` methods.
+* New ``r.faw`` interface for advanced response usage.*
+* Automatic expansion of parameterized headers.
+
+
+0.6.3 (2011-10-13)
+++++++++++++++++++
+
+* Beautiful ``requests.async`` module, for making async requests w/ gevent.
+
+
+0.6.2 (2011-10-09)
+++++++++++++++++++
+
+* GET/HEAD obeys allow_redirects=False.
+
+
+0.6.1 (2011-08-20)
+++++++++++++++++++
+
+* Enhanced status codes experience ``\o/``
+* Set a maximum number of redirects (``settings.max_redirects``)
+* Full Unicode URL support
+* Support for protocol-less redirects.
+* Allow for arbitrary request types.
+* Bugfixes
+
+
+0.6.0 (2011-08-17)
+++++++++++++++++++
+
+* New callback hook system
+* New persistent sessions object and context manager
+* Transparent Dict-cookie handling
+* Status code reference object
+* Removed Response.cached
+* Added Response.request
+* All args are kwargs
+* Relative redirect support
+* HTTPError handling improvements
+* Improved https testing
+* Bugfixes
+
+
+0.5.1 (2011-07-23)
+++++++++++++++++++
+
+* International Domain Name Support!
+* Access headers without fetching entire body (``read()``)
+* Use lists as dicts for parameters
+* Add Forced Basic Authentication
+* Forced Basic is default authentication type
+* ``python-requests.org`` default User-Agent header
+* CaseInsensitiveDict lower-case caching
+* Response.history bugfix
+
+
+0.5.0 (2011-06-21)
+++++++++++++++++++
+
+* PATCH Support
+* Support for Proxies
+* HTTPBin Test Suite
+* Redirect Fixes
+* settings.verbose stream writing
+* Querystrings for all methods
+* URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised
+ ``r.requests.get('hwe://blah'); r.raise_for_status()``
+
+
+0.4.1 (2011-05-22)
+++++++++++++++++++
+
+* Improved Redirection Handling
+* New 'allow_redirects' param for following non-GET/HEAD Redirects
+* Settings module refactoring
+
+
+0.4.0 (2011-05-15)
+++++++++++++++++++
+
+* Response.history: list of redirected responses
+* Case-Insensitive Header Dictionaries!
+* Unicode URLs
+
+
+0.3.4 (2011-05-14)
+++++++++++++++++++
+
+* Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)
+* Internal Refactor
+* Bytes data upload Bugfix
+
+
+
+0.3.3 (2011-05-12)
+++++++++++++++++++
+
+* Request timeouts
+* Unicode url-encoded data
+* Settings context manager and module
+
+
+0.3.2 (2011-04-15)
+++++++++++++++++++
+
+* Automatic Decompression of GZip Encoded Content
+* AutoAuth Support for Tupled HTTP Auth
+
+
+0.3.1 (2011-04-01)
+++++++++++++++++++
+
+* Cookie Changes
+* Response.read()
+* Poster fix
+
+
+0.3.0 (2011-02-25)
+++++++++++++++++++
+
+* Automatic Authentication API Change
+* Smarter Query URL Parameterization
+* Allow file uploads and POST data together
+* New Authentication Manager System
+ - Simpler Basic HTTP System
+ - Supports all build-in urllib2 Auths
+ - Allows for custom Auth Handlers
+
+
+0.2.4 (2011-02-19)
+++++++++++++++++++
+
+* Python 2.5 Support
+* PyPy-c v1.4 Support
+* Auto-Authentication tests
+* Improved Request object constructor
+
+0.2.3 (2011-02-15)
+++++++++++++++++++
+
+* New HTTPHandling Methods
+ - Response.__nonzero__ (false if bad HTTP Status)
+ - Response.ok (True if expected HTTP Status)
+ - Response.error (Logged HTTPError if bad HTTP Status)
+ - Response.raise_for_status() (Raises stored HTTPError)
+
+
+0.2.2 (2011-02-14)
+++++++++++++++++++
+
+* Still handles request in the event of an HTTPError. (Issue #2)
+* Eventlet and Gevent Monkeypatch support.
+* Cookie Support (Issue #1)
+
+
+0.2.1 (2011-02-14)
+++++++++++++++++++
+
+* Added file attribute to POST and PUT requests for multipart-encode file uploads.
+* Added Request.url attribute for context and redirects
+
+
+0.2.0 (2011-02-14)
+++++++++++++++++++
+
+* Birth!
+
+
+0.0.1 (2011-02-13)
+++++++++++++++++++
+
+* Frustration
+* Conception
+
diff --git a/python/requests/LICENSE b/python/requests/LICENSE
new file mode 100644
index 000000000..a103fc915
--- /dev/null
+++ b/python/requests/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2015 Kenneth Reitz
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/python/requests/MANIFEST.in b/python/requests/MANIFEST.in
new file mode 100644
index 000000000..439de496f
--- /dev/null
+++ b/python/requests/MANIFEST.in
@@ -0,0 +1 @@
+include README.rst LICENSE NOTICE HISTORY.rst test_requests.py requirements.txt requests/cacert.pem
diff --git a/python/requests/NOTICE b/python/requests/NOTICE
new file mode 100644
index 000000000..f583e47ab
--- /dev/null
+++ b/python/requests/NOTICE
@@ -0,0 +1,54 @@
+Requests includes some vendorized python libraries to ease installation.
+
+Urllib3 License
+===============
+
+This is the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt),
+Modifications copyright 2012 Kenneth Reitz.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Chardet License
+===============
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+02110-1301 USA
+
+
+CA Bundle License
+=================
+
+This Source Code Form is subject to the terms of the Mozilla Public
+License, v. 2.0. If a copy of the MPL was not distributed with this
+file, You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/python/requests/PKG-INFO b/python/requests/PKG-INFO
new file mode 100644
index 000000000..d75ebac87
--- /dev/null
+++ b/python/requests/PKG-INFO
@@ -0,0 +1,1238 @@
+Metadata-Version: 1.1
+Name: requests
+Version: 2.9.1
+Summary: Python HTTP for Humans.
+Home-page: http://python-requests.org
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: Apache 2.0
+Description: Requests: HTTP for Humans
+ =========================
+
+ .. image:: https://img.shields.io/pypi/v/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+ .. image:: https://img.shields.io/pypi/dm/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+
+
+
+ Requests is an Apache2 Licensed HTTP library, written in Python, for human
+ beings.
+
+ Most existing Python modules for sending HTTP requests are extremely
+ verbose and cumbersome. Python's builtin urllib2 module provides most of
+ the HTTP capabilities you should need, but the api is thoroughly broken.
+ It requires an enormous amount of work (even method overrides) to
+ perform the simplest of tasks.
+
+ Things shouldn't be this way. Not in Python.
+
+ .. code-block:: python
+
+ >>> r = requests.get('https://api.github.com', auth=('user', 'pass'))
+ >>> r.status_code
+ 204
+ >>> r.headers['content-type']
+ 'application/json'
+ >>> r.text
+ ...
+
+ See `the same code, without Requests <https://gist.github.com/973705>`_.
+
+ Requests allow you to send HTTP/1.1 requests. You can add headers, form data,
+ multipart files, and parameters with simple Python dictionaries, and access the
+ response data in the same way. It's powered by httplib and `urllib3
+ <https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
+ hacks for you.
+
+
+ Features
+ --------
+
+ - International Domains and URLs
+ - Keep-Alive & Connection Pooling
+ - Sessions with Cookie Persistence
+ - Browser-style SSL Verification
+ - Basic/Digest Authentication
+ - Elegant Key/Value Cookies
+ - Automatic Decompression
+ - Unicode Response Bodies
+ - Multipart File Uploads
+ - Connection Timeouts
+ - Thread-safety
+ - HTTP(S) proxy support
+
+
+ Installation
+ ------------
+
+ To install Requests, simply:
+
+ .. code-block:: bash
+
+ $ pip install requests
+
+
+ Documentation
+ -------------
+
+ Documentation is available at http://docs.python-requests.org/.
+
+
+ Contribute
+ ----------
+
+ #. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
+ #. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
+ #. Write a test which shows that the bug was fixed or that the feature works as expected.
+ #. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
+
+ .. _`the repository`: http://github.com/kennethreitz/requests
+ .. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
+ .. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
+
+
+ .. :changelog:
+
+ Release History
+ ---------------
+
+ 2.9.1 (2015-12-21)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Resolve regression introduced in 2.9.0 that made it impossible to send binary
+ strings as bodies in Python 3.
+ - Fixed errors when calculating cookie expiration dates in certain locales.
+
+ **Miscellaneous**
+
+ - Updated bundled urllib3 to 1.13.1.
+
+ 2.9.0 (2015-12-15)
+ ++++++++++++++++++
+
+ **Minor Improvements** (Backwards compatible)
+
+ - The ``verify`` keyword argument now supports being passed a path to a
+ directory of CA certificates, not just a single-file bundle.
+ - Warnings are now emitted when sending files opened in text mode.
+ - Added the 511 Network Authentication Required status code to the status code
+ registry.
+
+ **Bugfixes**
+
+ - For file-like objects that are not seeked to the very beginning, we now
+ send the content length for the number of bytes we will actually read, rather
+ than the total size of the file, allowing partial file uploads.
+ - When uploading file-like objects, if they are empty or have no obvious
+ content length we set ``Transfer-Encoding: chunked`` rather than
+ ``Content-Length: 0``.
+ - We correctly receive the response in buffered mode when uploading chunked
+ bodies.
+ - We now handle being passed a query string as a bytestring on Python 3, by
+ decoding it as UTF-8.
+ - Sessions are now closed in all cases (exceptional and not) when using the
+ functional API rather than leaking and waiting for the garbage collector to
+ clean them up.
+ - Correctly handle digest auth headers with a malformed ``qop`` directive that
+ contains no token, by treating it the same as if no ``qop`` directive was
+ provided at all.
+ - Minor performance improvements when removing specific cookies by name.
+
+ **Miscellaneous**
+
+ - Updated urllib3 to 1.13.
+
+ 2.8.1 (2015-10-13)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate
+ bundle.
+ - Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of
+ ``ConnectionError``
+ - When using the PreparedRequest flow, requests will now correctly respect the
+ ``json`` parameter. Broken in 2.8.0.
+ - When using the PreparedRequest flow, requests will now correctly handle a
+ Unicode-string method name on Python 2. Broken in 2.8.0.
+
+ 2.8.0 (2015-10-05)
+ ++++++++++++++++++
+
+ **Minor Improvements** (Backwards Compatible)
+
+ - Requests now supports per-host proxies. This allows the ``proxies``
+ dictionary to have entries of the form
+ ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used
+ in preference to the previously-supported scheme-specific ones, but the
+ previous syntax will continue to work.
+ - ``Response.raise_for_status`` now prints the URL that failed as part of the
+ exception message.
+ - ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg,
+ defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause
+ exceptions to be thrown.
+ - Change to bundled projects import logic to make it easier to unbundle
+ requests downstream.
+ - Changed the default User-Agent string to avoid leaking data on Linux: now
+ contains only the requests version.
+
+ **Bugfixes**
+
+ - The ``json`` parameter to ``post()`` and friends will now only be used if
+ neither ``data`` nor ``files`` are present, consistent with the
+ documentation.
+ - We now ignore empty fields in the ``NO_PROXY`` environment variable.
+ - Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
+ ``stream=True`` with ``contextlib.closing``.
+ - Prevented bugs where we would attempt to return the same connection back to
+ the connection pool twice when sending a Chunked body.
+ - Miscellaneous minor internal changes.
+ - Digest Auth support is now thread safe.
+
+ **Updates**
+
+ - Updated urllib3 to 1.12.
+
+ 2.7.0 (2015-05-03)
+ ++++++++++++++++++
+
+ This is the first release that follows our new release process. For more, see
+ `our documentation
+ <http://docs.python-requests.org/en/latest/community/release-process/>`_.
+
+ **Bugfixes**
+
+ - Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer
+ encoding and response framing.
+
+ 2.6.2 (2015-04-23)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Fix regression where compressed data that was sent as chunked data was not
+ properly decompressed. (#2561)
+
+ 2.6.1 (2015-04-22)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Remove VendorAlias import machinery introduced in v2.5.2.
+
+ - Simplify the PreparedRequest.prepare API: We no longer require the user to
+ pass an empty list to the hooks keyword argument. (c.f. #2552)
+
+ - Resolve redirects now receives and forwards all of the original arguments to
+ the adapter. (#2503)
+
+ - Handle UnicodeDecodeErrors when trying to deal with a unicode URL that
+ cannot be encoded in ASCII. (#2540)
+
+ - Populate the parsed path of the URI field when performing Digest
+ Authentication. (#2426)
+
+ - Copy a PreparedRequest's CookieJar more reliably when it is not an instance
+ of RequestsCookieJar. (#2527)
+
+ 2.6.0 (2015-03-14)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie
+ without a host value set would use the hostname for the redirected URL
+ exposing requests users to session fixation attacks and potentially cookie
+ stealing. This was disclosed privately by Matthew Daley of
+ `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from
+ v2.1.0 to v2.5.3 (inclusive on both ends).
+
+ - Fix error when requests is an ``install_requires`` dependency and ``python
+ setup.py test`` is run. (#2462)
+
+ - Fix error when urllib3 is unbundled and requests continues to use the
+ vendored import location.
+
+ - Include fixes to ``urllib3``'s header handling.
+
+ - Requests' handling of unvendored dependencies is now more restrictive.
+
+ **Features and Improvements**
+
+ - Support bytearrays when passed as parameters in the ``files`` argument.
+ (#2468)
+
+ - Avoid data duplication when creating a request with ``str``, ``bytes``, or
+ ``bytearray`` input to the ``files`` argument.
+
+ 2.5.3 (2015-02-24)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Revert changes to our vendored certificate bundle. For more context see
+ (#2455, #2456, and http://bugs.python.org/issue23476)
+
+ 2.5.2 (2015-02-23)
+ ++++++++++++++++++
+
+ **Features and Improvements**
+
+ - Add sha256 fingerprint support. (`shazow/urllib3#540`_)
+
+ - Improve the performance of headers. (`shazow/urllib3#544`_)
+
+ **Bugfixes**
+
+ - Copy pip's import machinery. When downstream redistributors remove
+ requests.packages.urllib3 the import machinery will continue to let those
+ same symbols work. Example usage in requests' documentation and 3rd-party
+ libraries relying on the vendored copies of urllib3 will work without having
+ to fallback to the system urllib3.
+
+ - Attempt to quote parts of the URL on redirect if unquoting and then quoting
+ fails. (#2356)
+
+ - Fix filename type check for multipart form-data uploads. (#2411)
+
+ - Properly handle the case where a server issuing digest authentication
+ challenges provides both auth and auth-int qop-values. (#2408)
+
+ - Fix a socket leak. (`shazow/urllib3#549`_)
+
+ - Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_)
+
+ - Disable the built-in hostname verification. (`shazow/urllib3#526`_)
+
+ - Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_)
+
+ **Security**
+
+ - Pulled in an updated ``cacert.pem``.
+
+ - Drop RC4 from the default cipher list. (`shazow/urllib3#551`_)
+
+ .. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551
+ .. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549
+ .. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544
+ .. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540
+ .. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535
+ .. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534
+ .. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526
+
+ 2.5.1 (2014-12-23)
+ ++++++++++++++++++
+
+ **Behavioural Changes**
+
+ - Only catch HTTPErrors in raise_for_status (#2382)
+
+ **Bugfixes**
+
+ - Handle LocationParseError from urllib3 (#2344)
+ - Handle file-like object filenames that are not strings (#2379)
+ - Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389)
+
+ 2.5.0 (2014-12-01)
+ ++++++++++++++++++
+
+ **Improvements**
+
+ - Allow usage of urllib3's Retry object with HTTPAdapters (#2216)
+ - The ``iter_lines`` method on a response now accepts a delimiter with which
+ to split the content (#2295)
+
+ **Behavioural Changes**
+
+ - Add deprecation warnings to functions in requests.utils that will be removed
+ in 3.0 (#2309)
+ - Sessions used by the functional API are always closed (#2326)
+ - Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323)
+
+ **Bugfixes**
+
+ - Only parse the URL once (#2353)
+ - Allow Content-Length header to always be overridden (#2332)
+ - Properly handle files in HTTPDigestAuth (#2333)
+ - Cap redirect_cache size to prevent memory abuse (#2299)
+ - Fix HTTPDigestAuth handling of redirects after authenticating successfully
+ (#2253)
+ - Fix crash with custom method parameter to Session.request (#2317)
+ - Fix how Link headers are parsed using the regular expression library (#2271)
+
+ **Documentation**
+
+ - Add more references for interlinking (#2348)
+ - Update CSS for theme (#2290)
+ - Update width of buttons and sidebar (#2289)
+ - Replace references of Gittip with Gratipay (#2282)
+ - Add link to changelog in sidebar (#2273)
+
+ 2.4.3 (2014-10-06)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Unicode URL improvements for Python 2.
+ - Re-order JSON param for backwards compat.
+ - Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/kennethreitz/requests/issues/2249>`_)
+
+
+ 2.4.2 (2014-10-05)
+ ++++++++++++++++++
+
+ **Improvements**
+
+ - FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/kennethreitz/requests/pull/2258>`_)
+ - Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/kennethreitz/requests/pull/2238>`_)
+
+ **Bugfixes**
+
+ - Avoid getting stuck in a loop (`#2244 <https://github.com/kennethreitz/requests/pull/2244>`_)
+ - Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/kennethreitz/requests/issues/2240>`_, `#2241 <https://github.com/kennethreitz/requests/issues/2241>`_)
+
+ **Documentation**
+
+ - Correct redirection introduction (`#2245 <https://github.com/kennethreitz/requests/pull/2245/>`_)
+ - Added example of how to send multiple files in one request. (`#2227 <https://github.com/kennethreitz/requests/pull/2227/>`_)
+ - Clarify how to pass a custom set of CAs (`#2248 <https://github.com/kennethreitz/requests/pull/2248/>`_)
+
+
+
+ 2.4.1 (2014-09-09)
+ ++++++++++++++++++
+
+ - Now has a "security" package extras set, ``$ pip install requests[security]``
+ - Requests will now use Certifi if it is available.
+ - Capture and re-raise urllib3 ProtocolError
+ - Bugfix for responses that attempt to redirect to themselves forever (wtf?).
+
+
+ 2.4.0 (2014-08-29)
+ ++++++++++++++++++
+
+ **Behavioral Changes**
+
+ - ``Connection: keep-alive`` header is now sent automatically.
+
+ **Improvements**
+
+ - Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts.
+ - Allow copying of PreparedRequests without headers/cookies.
+ - Updated bundled urllib3 version.
+ - Refactored settings loading from environment -- new `Session.merge_environment_settings`.
+ - Handle socket errors in iter_content.
+
+
+ 2.3.0 (2014-05-16)
+ ++++++++++++++++++
+
+ **API Changes**
+
+ - New ``Response`` property ``is_redirect``, which is true when the
+ library could have processed this response as a redirection (whether
+ or not it actually did).
+ - The ``timeout`` parameter now affects requests with both ``stream=True`` and
+ ``stream=False`` equally.
+ - The change in v2.0.0 to mandate explicit proxy schemes has been reverted.
+ Proxy schemes now default to ``http://``.
+ - The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal
+ dictionary when references as string or viewed in the interpreter.
+
+ **Bugfixes**
+
+ - No longer expose Authorization or Proxy-Authorization headers on redirect.
+ Fix CVE-2014-1829 and CVE-2014-1830 respectively.
+ - Authorization is re-evaluated each redirect.
+ - On redirect, pass url as native strings.
+ - Fall-back to autodetected encoding for JSON when Unicode detection fails.
+ - Headers set to ``None`` on the ``Session`` are now correctly not sent.
+ - Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same
+ response.
+ - Stop advertising ``compress`` as a supported Content-Encoding.
+ - The ``Response.history`` parameter is now always a list.
+ - Many, many ``urllib3`` bugfixes.
+
+ 2.2.1 (2014-01-23)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character.
+ - Assorted urllib3 fixes.
+
+ 2.2.0 (2014-01-09)
+ ++++++++++++++++++
+
+ **API Changes**
+
+ - New exception: ``ContentDecodingError``. Raised instead of ``urllib3``
+ ``DecodeError`` exceptions.
+
+ **Bugfixes**
+
+ - Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6.
+ - Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory.
+ - Use the correct pool size for pools of connections to proxies.
+ - Fix iteration of ``CookieJar`` objects.
+ - Ensure that cookies are persisted over redirect.
+ - Switch back to using chardet, since it has merged with charade.
+
+ 2.1.0 (2013-12-05)
+ ++++++++++++++++++
+
+ - Updated CA Bundle, of course.
+ - Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``.
+ - Clean up connections when we hit problems during chunked upload, rather than leaking them.
+ - Return connections to the pool when a chunked upload is successful, rather than leaking it.
+ - Match the HTTPbis recommendation for HTTP 301 redirects.
+ - Prevent hanging when using streaming uploads and Digest Auth when a 401 is received.
+ - Values of headers set by Requests are now always the native string type.
+ - Fix previously broken SNI support.
+ - Fix accessing HTTP proxies using proxy authentication.
+ - Unencode HTTP Basic usernames and passwords extracted from URLs.
+ - Support for IP address ranges for no_proxy environment variable
+ - Parse headers correctly when users override the default ``Host:`` header.
+ - Avoid munging the URL in case of case-sensitive servers.
+ - Looser URL handling for non-HTTP/HTTPS urls.
+ - Accept unicode methods in Python 2.6 and 2.7.
+ - More resilient cookie handling.
+ - Make ``Response`` objects pickleable.
+ - Actually added MD5-sess to Digest Auth instead of pretending to like last time.
+ - Updated internal urllib3.
+ - Fixed @Lukasa's lack of taste.
+
+ 2.0.1 (2013-10-24)
+ ++++++++++++++++++
+
+ - Updated included CA Bundle with new mistrusts and automated process for the future
+ - Added MD5-sess to Digest Auth
+ - Accept per-file headers in multipart file POST messages.
+ - Fixed: Don't send the full URL on CONNECT messages.
+ - Fixed: Correctly lowercase a redirect scheme.
+ - Fixed: Cookies not persisted when set via functional API.
+ - Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError.
+ - Updated internal urllib3 and chardet.
+
+ 2.0.0 (2013-09-24)
+ ++++++++++++++++++
+
+ **API Changes:**
+
+ - Keys in the Headers dictionary are now native strings on all Python versions,
+ i.e. bytestrings on Python 2, unicode on Python 3.
+ - Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception
+ will be raised if they don't.
+ - Timeouts now apply to read time if ``Stream=False``.
+ - ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``.
+ - Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``.
+ - Added new method to ``Session`` objects: ``Session.update_request()``. This
+ method updates a ``Request`` object with the data (e.g. cookies) stored on
+ the ``Session``.
+ - Added new method to ``Session`` objects: ``Session.prepare_request()``. This
+ method updates and prepares a ``Request`` object, and returns the
+ corresponding ``PreparedRequest`` object.
+ - Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``.
+ This should not be called directly, but improves the subclass interface.
+ - ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding
+ will now raise a Requests ``ChunkedEncodingError`` instead.
+ - Invalid percent-escape sequences now cause a Requests ``InvalidURL``
+ exception to be raised.
+ - HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses
+ ``"already_reported"``.
+ - HTTP 226 reason added (``"im_used"``).
+
+ **Bugfixes:**
+
+ - Vastly improved proxy support, including the CONNECT verb. Special thanks to
+ the many contributors who worked towards this improvement.
+ - Cookies are now properly managed when 401 authentication responses are
+ received.
+ - Chunked encoding fixes.
+ - Support for mixed case schemes.
+ - Better handling of streaming downloads.
+ - Retrieve environment proxies from more locations.
+ - Minor cookies fixes.
+ - Improved redirect behaviour.
+ - Improved streaming behaviour, particularly for compressed data.
+ - Miscellaneous small Python 3 text encoding bugs.
+ - ``.netrc`` no longer overrides explicit auth.
+ - Cookies set by hooks are now correctly persisted on Sessions.
+ - Fix problem with cookies that specify port numbers in their host field.
+ - ``BytesIO`` can be used to perform streaming uploads.
+ - More generous parsing of the ``no_proxy`` environment variable.
+ - Non-string objects can be passed in data values alongside files.
+
+ 1.2.3 (2013-05-25)
+ ++++++++++++++++++
+
+ - Simple packaging fix
+
+
+ 1.2.2 (2013-05-23)
+ ++++++++++++++++++
+
+ - Simple packaging fix
+
+
+ 1.2.1 (2013-05-20)
+ ++++++++++++++++++
+
+ - 301 and 302 redirects now change the verb to GET for all verbs, not just
+ POST, improving browser compatibility.
+ - Python 3.3.2 compatibility
+ - Always percent-encode location headers
+ - Fix connection adapter matching to be most-specific first
+ - new argument to the default connection adapter for passing a block argument
+ - prevent a KeyError when there's no link headers
+
+ 1.2.0 (2013-03-31)
+ ++++++++++++++++++
+
+ - Fixed cookies on sessions and on requests
+ - Significantly change how hooks are dispatched - hooks now receive all the
+ arguments specified by the user when making a request so hooks can make a
+ secondary request with the same parameters. This is especially necessary for
+ authentication handler authors
+ - certifi support was removed
+ - Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
+ - Major proxy work thanks to @Lukasa including parsing of proxy authentication
+ from the proxy url
+ - Fix DigestAuth handling too many 401s
+ - Update vendored urllib3 to include SSL bug fixes
+ - Allow keyword arguments to be passed to ``json.loads()`` via the
+ ``Response.json()`` method
+ - Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
+ requests
+ - Add ``elapsed`` attribute to ``Response`` objects to time how long a request
+ took.
+ - Fix ``RequestsCookieJar``
+ - Sessions and Adapters are now picklable, i.e., can be used with the
+ multiprocessing library
+ - Update charade to version 1.0.3
+
+ The change in how hooks are dispatched will likely cause a great deal of
+ issues.
+
+ 1.1.0 (2013-01-10)
+ ++++++++++++++++++
+
+ - CHUNKED REQUESTS
+ - Support for iterable response bodies
+ - Assume servers persist redirect params
+ - Allow explicit content types to be specified for file data
+ - Make merge_kwargs case-insensitive when looking up keys
+
+ 1.0.3 (2012-12-18)
+ ++++++++++++++++++
+
+ - Fix file upload encoding bug
+ - Fix cookie behavior
+
+ 1.0.2 (2012-12-17)
+ ++++++++++++++++++
+
+ - Proxy fix for HTTPAdapter.
+
+ 1.0.1 (2012-12-17)
+ ++++++++++++++++++
+
+ - Cert verification exception bug.
+ - Proxy fix for HTTPAdapter.
+
+ 1.0.0 (2012-12-17)
+ ++++++++++++++++++
+
+ - Massive Refactor and Simplification
+ - Switch to Apache 2.0 license
+ - Swappable Connection Adapters
+ - Mountable Connection Adapters
+ - Mutable ProcessedRequest chain
+ - /s/prefetch/stream
+ - Removal of all configuration
+ - Standard library logging
+ - Make Response.json() callable, not property.
+ - Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
+ - Removal of all hooks except 'response'
+ - Removal of all authentication helpers (OAuth, Kerberos)
+
+ This is not a backwards compatible change.
+
+ 0.14.2 (2012-10-27)
+ +++++++++++++++++++
+
+ - Improved mime-compatible JSON handling
+ - Proxy fixes
+ - Path hack fixes
+ - Case-Insensitive Content-Encoding headers
+ - Support for CJK parameters in form posts
+
+
+ 0.14.1 (2012-10-01)
+ +++++++++++++++++++
+
+ - Python 3.3 Compatibility
+ - Simply default accept-encoding
+ - Bugfixes
+
+
+ 0.14.0 (2012-09-02)
+ ++++++++++++++++++++
+
+ - No more iter_content errors if already downloaded.
+
+ 0.13.9 (2012-08-25)
+ +++++++++++++++++++
+
+ - Fix for OAuth + POSTs
+ - Remove exception eating from dispatch_hook
+ - General bugfixes
+
+ 0.13.8 (2012-08-21)
+ +++++++++++++++++++
+
+ - Incredible Link header support :)
+
+ 0.13.7 (2012-08-19)
+ +++++++++++++++++++
+
+ - Support for (key, value) lists everywhere.
+ - Digest Authentication improvements.
+ - Ensure proxy exclusions work properly.
+ - Clearer UnicodeError exceptions.
+ - Automatic casting of URLs to strings (fURL and such)
+ - Bugfixes.
+
+ 0.13.6 (2012-08-06)
+ +++++++++++++++++++
+
+ - Long awaited fix for hanging connections!
+
+ 0.13.5 (2012-07-27)
+ +++++++++++++++++++
+
+ - Packaging fix
+
+ 0.13.4 (2012-07-27)
+ +++++++++++++++++++
+
+ - GSSAPI/Kerberos authentication!
+ - App Engine 2.7 Fixes!
+ - Fix leaking connections (from urllib3 update)
+ - OAuthlib path hack fix
+ - OAuthlib URL parameters fix.
+
+ 0.13.3 (2012-07-12)
+ +++++++++++++++++++
+
+ - Use simplejson if available.
+ - Do not hide SSLErrors behind Timeouts.
+ - Fixed param handling with urls containing fragments.
+ - Significantly improved information in User Agent.
+ - client certificates are ignored when verify=False
+
+ 0.13.2 (2012-06-28)
+ +++++++++++++++++++
+
+ - Zero dependencies (once again)!
+ - New: Response.reason
+ - Sign querystring parameters in OAuth 1.0
+ - Client certificates no longer ignored when verify=False
+ - Add openSUSE certificate support
+
+ 0.13.1 (2012-06-07)
+ +++++++++++++++++++
+
+ - Allow passing a file or file-like object as data.
+ - Allow hooks to return responses that indicate errors.
+ - Fix Response.text and Response.json for body-less responses.
+
+ 0.13.0 (2012-05-29)
+ +++++++++++++++++++
+
+ - Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
+ - Allow disabling of cookie persistence.
+ - New implementation of safe_mode
+ - cookies.get now supports default argument
+ - Session cookies not saved when Session.request is called with return_response=False
+ - Env: no_proxy support.
+ - RequestsCookieJar improvements.
+ - Various bug fixes.
+
+ 0.12.1 (2012-05-08)
+ +++++++++++++++++++
+
+ - New ``Response.json`` property.
+ - Ability to add string file uploads.
+ - Fix out-of-range issue with iter_lines.
+ - Fix iter_content default size.
+ - Fix POST redirects containing files.
+
+ 0.12.0 (2012-05-02)
+ +++++++++++++++++++
+
+ - EXPERIMENTAL OAUTH SUPPORT!
+ - Proper CookieJar-backed cookies interface with awesome dict-like interface.
+ - Speed fix for non-iterated content chunks.
+ - Move ``pre_request`` to a more usable place.
+ - New ``pre_send`` hook.
+ - Lazily encode data, params, files.
+ - Load system Certificate Bundle if ``certify`` isn't available.
+ - Cleanups, fixes.
+
+ 0.11.2 (2012-04-22)
+ +++++++++++++++++++
+
+ - Attempt to use the OS's certificate bundle if ``certifi`` isn't available.
+ - Infinite digest auth redirect fix.
+ - Multi-part file upload improvements.
+ - Fix decoding of invalid %encodings in URLs.
+ - If there is no content in a response don't throw an error the second time that content is attempted to be read.
+ - Upload data on redirects.
+
+ 0.11.1 (2012-03-30)
+ +++++++++++++++++++
+
+ * POST redirects now break RFC to do what browsers do: Follow up with a GET.
+ * New ``strict_mode`` configuration to disable new redirect behavior.
+
+
+ 0.11.0 (2012-03-14)
+ +++++++++++++++++++
+
+ * Private SSL Certificate support
+ * Remove select.poll from Gevent monkeypatching
+ * Remove redundant generator for chunked transfer encoding
+ * Fix: Response.ok raises Timeout Exception in safe_mode
+
+ 0.10.8 (2012-03-09)
+ +++++++++++++++++++
+
+ * Generate chunked ValueError fix
+ * Proxy configuration by environment variables
+ * Simplification of iter_lines.
+ * New `trust_env` configuration for disabling system/environment hints.
+ * Suppress cookie errors.
+
+ 0.10.7 (2012-03-07)
+ +++++++++++++++++++
+
+ * `encode_uri` = False
+
+ 0.10.6 (2012-02-25)
+ +++++++++++++++++++
+
+ * Allow '=' in cookies.
+
+ 0.10.5 (2012-02-25)
+ +++++++++++++++++++
+
+ * Response body with 0 content-length fix.
+ * New async.imap.
+ * Don't fail on netrc.
+
+
+ 0.10.4 (2012-02-20)
+ +++++++++++++++++++
+
+ * Honor netrc.
+
+ 0.10.3 (2012-02-20)
+ +++++++++++++++++++
+
+ * HEAD requests don't follow redirects anymore.
+ * raise_for_status() doesn't raise for 3xx anymore.
+ * Make Session objects picklable.
+ * ValueError for invalid schema URLs.
+
+ 0.10.2 (2012-01-15)
+ +++++++++++++++++++
+
+ * Vastly improved URL quoting.
+ * Additional allowed cookie key values.
+ * Attempted fix for "Too many open files" Error
+ * Replace unicode errors on first pass, no need for second pass.
+ * Append '/' to bare-domain urls before query insertion.
+ * Exceptions now inherit from RuntimeError.
+ * Binary uploads + auth fix.
+ * Bugfixes.
+
+
+ 0.10.1 (2012-01-23)
+ +++++++++++++++++++
+
+ * PYTHON 3 SUPPORT!
+ * Dropped 2.5 Support. (*Backwards Incompatible*)
+
+ 0.10.0 (2012-01-21)
+ +++++++++++++++++++
+
+ * ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
+ * New ``Response.text`` is unicode-only.
+ * If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding.
+ * Default to ISO-8859-1 (Western) encoding for "text" subtypes.
+ * Removal of `decode_unicode`. (*Backwards Incompatible*)
+ * New multiple-hooks system.
+ * New ``Response.register_hook`` for registering hooks within the pipeline.
+ * ``Response.url`` is now Unicode.
+
+ 0.9.3 (2012-01-18)
+ ++++++++++++++++++
+
+ * SSL verify=False bugfix (apparent on windows machines).
+
+ 0.9.2 (2012-01-18)
+ ++++++++++++++++++
+
+ * Asynchronous async.send method.
+ * Support for proper chunk streams with boundaries.
+ * session argument for Session classes.
+ * Print entire hook tracebacks, not just exception instance.
+ * Fix response.iter_lines from pending next line.
+ * Fix but in HTTP-digest auth w/ URI having query strings.
+ * Fix in Event Hooks section.
+ * Urllib3 update.
+
+
+ 0.9.1 (2012-01-06)
+ ++++++++++++++++++
+
+ * danger_mode for automatic Response.raise_for_status()
+ * Response.iter_lines refactor
+
+ 0.9.0 (2011-12-28)
+ ++++++++++++++++++
+
+ * verify ssl is default.
+
+
+ 0.8.9 (2011-12-28)
+ ++++++++++++++++++
+
+ * Packaging fix.
+
+
+ 0.8.8 (2011-12-28)
+ ++++++++++++++++++
+
+ * SSL CERT VERIFICATION!
+ * Release of Cerifi: Mozilla's cert list.
+ * New 'verify' argument for SSL requests.
+ * Urllib3 update.
+
+ 0.8.7 (2011-12-24)
+ ++++++++++++++++++
+
+ * iter_lines last-line truncation fix
+ * Force safe_mode for async requests
+ * Handle safe_mode exceptions more consistently
+ * Fix iteration on null responses in safe_mode
+
+ 0.8.6 (2011-12-18)
+ ++++++++++++++++++
+
+ * Socket timeout fixes.
+ * Proxy Authorization support.
+
+ 0.8.5 (2011-12-14)
+ ++++++++++++++++++
+
+ * Response.iter_lines!
+
+ 0.8.4 (2011-12-11)
+ ++++++++++++++++++
+
+ * Prefetch bugfix.
+ * Added license to installed version.
+
+ 0.8.3 (2011-11-27)
+ ++++++++++++++++++
+
+ * Converted auth system to use simpler callable objects.
+ * New session parameter to API methods.
+ * Display full URL while logging.
+
+ 0.8.2 (2011-11-19)
+ ++++++++++++++++++
+
+ * New Unicode decoding system, based on over-ridable `Response.encoding`.
+ * Proper URL slash-quote handling.
+ * Cookies with ``[``, ``]``, and ``_`` allowed.
+
+ 0.8.1 (2011-11-15)
+ ++++++++++++++++++
+
+ * URL Request path fix
+ * Proxy fix.
+ * Timeouts fix.
+
+ 0.8.0 (2011-11-13)
+ ++++++++++++++++++
+
+ * Keep-alive support!
+ * Complete removal of Urllib2
+ * Complete removal of Poster
+ * Complete removal of CookieJars
+ * New ConnectionError raising
+ * Safe_mode for error catching
+ * prefetch parameter for request methods
+ * OPTION method
+ * Async pool size throttling
+ * File uploads send real names
+ * Vendored in urllib3
+
+ 0.7.6 (2011-11-07)
+ ++++++++++++++++++
+
+ * Digest authentication bugfix (attach query data to path)
+
+ 0.7.5 (2011-11-04)
+ ++++++++++++++++++
+
+ * Response.content = None if there was an invalid response.
+ * Redirection auth handling.
+
+ 0.7.4 (2011-10-26)
+ ++++++++++++++++++
+
+ * Session Hooks fix.
+
+ 0.7.3 (2011-10-23)
+ ++++++++++++++++++
+
+ * Digest Auth fix.
+
+
+ 0.7.2 (2011-10-23)
+ ++++++++++++++++++
+
+ * PATCH Fix.
+
+
+ 0.7.1 (2011-10-23)
+ ++++++++++++++++++
+
+ * Move away from urllib2 authentication handling.
+ * Fully Remove AuthManager, AuthObject, &c.
+ * New tuple-based auth system with handler callbacks.
+
+
+ 0.7.0 (2011-10-22)
+ ++++++++++++++++++
+
+ * Sessions are now the primary interface.
+ * Deprecated InvalidMethodException.
+ * PATCH fix.
+ * New config system (no more global settings).
+
+
+ 0.6.6 (2011-10-19)
+ ++++++++++++++++++
+
+ * Session parameter bugfix (params merging).
+
+
+ 0.6.5 (2011-10-18)
+ ++++++++++++++++++
+
+ * Offline (fast) test suite.
+ * Session dictionary argument merging.
+
+
+ 0.6.4 (2011-10-13)
+ ++++++++++++++++++
+
+ * Automatic decoding of unicode, based on HTTP Headers.
+ * New ``decode_unicode`` setting.
+ * Removal of ``r.read/close`` methods.
+ * New ``r.faw`` interface for advanced response usage.*
+ * Automatic expansion of parameterized headers.
+
+
+ 0.6.3 (2011-10-13)
+ ++++++++++++++++++
+
+ * Beautiful ``requests.async`` module, for making async requests w/ gevent.
+
+
+ 0.6.2 (2011-10-09)
+ ++++++++++++++++++
+
+ * GET/HEAD obeys allow_redirects=False.
+
+
+ 0.6.1 (2011-08-20)
+ ++++++++++++++++++
+
+ * Enhanced status codes experience ``\o/``
+ * Set a maximum number of redirects (``settings.max_redirects``)
+ * Full Unicode URL support
+ * Support for protocol-less redirects.
+ * Allow for arbitrary request types.
+ * Bugfixes
+
+
+ 0.6.0 (2011-08-17)
+ ++++++++++++++++++
+
+ * New callback hook system
+ * New persistent sessions object and context manager
+ * Transparent Dict-cookie handling
+ * Status code reference object
+ * Removed Response.cached
+ * Added Response.request
+ * All args are kwargs
+ * Relative redirect support
+ * HTTPError handling improvements
+ * Improved https testing
+ * Bugfixes
+
+
+ 0.5.1 (2011-07-23)
+ ++++++++++++++++++
+
+ * International Domain Name Support!
+ * Access headers without fetching entire body (``read()``)
+ * Use lists as dicts for parameters
+ * Add Forced Basic Authentication
+ * Forced Basic is default authentication type
+ * ``python-requests.org`` default User-Agent header
+ * CaseInsensitiveDict lower-case caching
+ * Response.history bugfix
+
+
+ 0.5.0 (2011-06-21)
+ ++++++++++++++++++
+
+ * PATCH Support
+ * Support for Proxies
+ * HTTPBin Test Suite
+ * Redirect Fixes
+ * settings.verbose stream writing
+ * Querystrings for all methods
+ * URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised
+ ``r.requests.get('hwe://blah'); r.raise_for_status()``
+
+
+ 0.4.1 (2011-05-22)
+ ++++++++++++++++++
+
+ * Improved Redirection Handling
+ * New 'allow_redirects' param for following non-GET/HEAD Redirects
+ * Settings module refactoring
+
+
+ 0.4.0 (2011-05-15)
+ ++++++++++++++++++
+
+ * Response.history: list of redirected responses
+ * Case-Insensitive Header Dictionaries!
+ * Unicode URLs
+
+
+ 0.3.4 (2011-05-14)
+ ++++++++++++++++++
+
+ * Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)
+ * Internal Refactor
+ * Bytes data upload Bugfix
+
+
+
+ 0.3.3 (2011-05-12)
+ ++++++++++++++++++
+
+ * Request timeouts
+ * Unicode url-encoded data
+ * Settings context manager and module
+
+
+ 0.3.2 (2011-04-15)
+ ++++++++++++++++++
+
+ * Automatic Decompression of GZip Encoded Content
+ * AutoAuth Support for Tupled HTTP Auth
+
+
+ 0.3.1 (2011-04-01)
+ ++++++++++++++++++
+
+ * Cookie Changes
+ * Response.read()
+ * Poster fix
+
+
+ 0.3.0 (2011-02-25)
+ ++++++++++++++++++
+
+ * Automatic Authentication API Change
+ * Smarter Query URL Parameterization
+ * Allow file uploads and POST data together
+ * New Authentication Manager System
+ - Simpler Basic HTTP System
+ - Supports all build-in urllib2 Auths
+ - Allows for custom Auth Handlers
+
+
+ 0.2.4 (2011-02-19)
+ ++++++++++++++++++
+
+ * Python 2.5 Support
+ * PyPy-c v1.4 Support
+ * Auto-Authentication tests
+ * Improved Request object constructor
+
+ 0.2.3 (2011-02-15)
+ ++++++++++++++++++
+
+ * New HTTPHandling Methods
+ - Response.__nonzero__ (false if bad HTTP Status)
+ - Response.ok (True if expected HTTP Status)
+ - Response.error (Logged HTTPError if bad HTTP Status)
+ - Response.raise_for_status() (Raises stored HTTPError)
+
+
+ 0.2.2 (2011-02-14)
+ ++++++++++++++++++
+
+ * Still handles request in the event of an HTTPError. (Issue #2)
+ * Eventlet and Gevent Monkeypatch support.
+ * Cookie Support (Issue #1)
+
+
+ 0.2.1 (2011-02-14)
+ ++++++++++++++++++
+
+ * Added file attribute to POST and PUT requests for multipart-encode file uploads.
+ * Added Request.url attribute for context and redirects
+
+
+ 0.2.0 (2011-02-14)
+ ++++++++++++++++++
+
+ * Birth!
+
+
+ 0.0.1 (2011-02-13)
+ ++++++++++++++++++
+
+ * Frustration
+ * Conception
+
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/python/requests/README.rst b/python/requests/README.rst
new file mode 100644
index 000000000..99d30e724
--- /dev/null
+++ b/python/requests/README.rst
@@ -0,0 +1,86 @@
+Requests: HTTP for Humans
+=========================
+
+.. image:: https://img.shields.io/pypi/v/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+.. image:: https://img.shields.io/pypi/dm/requests.svg
+ :target: https://pypi.python.org/pypi/requests
+
+
+
+
+Requests is an Apache2 Licensed HTTP library, written in Python, for human
+beings.
+
+Most existing Python modules for sending HTTP requests are extremely
+verbose and cumbersome. Python's builtin urllib2 module provides most of
+the HTTP capabilities you should need, but the api is thoroughly broken.
+It requires an enormous amount of work (even method overrides) to
+perform the simplest of tasks.
+
+Things shouldn't be this way. Not in Python.
+
+.. code-block:: python
+
+ >>> r = requests.get('https://api.github.com', auth=('user', 'pass'))
+ >>> r.status_code
+ 204
+ >>> r.headers['content-type']
+ 'application/json'
+ >>> r.text
+ ...
+
+See `the same code, without Requests <https://gist.github.com/973705>`_.
+
+Requests allow you to send HTTP/1.1 requests. You can add headers, form data,
+multipart files, and parameters with simple Python dictionaries, and access the
+response data in the same way. It's powered by httplib and `urllib3
+<https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
+hacks for you.
+
+
+Features
+--------
+
+- International Domains and URLs
+- Keep-Alive & Connection Pooling
+- Sessions with Cookie Persistence
+- Browser-style SSL Verification
+- Basic/Digest Authentication
+- Elegant Key/Value Cookies
+- Automatic Decompression
+- Unicode Response Bodies
+- Multipart File Uploads
+- Connection Timeouts
+- Thread-safety
+- HTTP(S) proxy support
+
+
+Installation
+------------
+
+To install Requests, simply:
+
+.. code-block:: bash
+
+ $ pip install requests
+
+
+Documentation
+-------------
+
+Documentation is available at http://docs.python-requests.org/.
+
+
+Contribute
+----------
+
+#. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
+#. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
+#. Write a test which shows that the bug was fixed or that the feature works as expected.
+#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
+
+.. _`the repository`: http://github.com/kennethreitz/requests
+.. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
+.. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
diff --git a/python/requests/requests/__init__.py b/python/requests/requests/__init__.py
new file mode 100644
index 000000000..bd5b5b974
--- /dev/null
+++ b/python/requests/requests/__init__.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+
+# __
+# /__) _ _ _ _ _/ _
+# / ( (- (/ (/ (- _) / _)
+# /
+
+"""
+Requests HTTP library
+~~~~~~~~~~~~~~~~~~~~~
+
+Requests is an HTTP library, written in Python, for human beings. Basic GET
+usage:
+
+ >>> import requests
+ >>> r = requests.get('https://www.python.org')
+ >>> r.status_code
+ 200
+ >>> 'Python is a programming language' in r.content
+ True
+
+... or POST:
+
+ >>> payload = dict(key1='value1', key2='value2')
+ >>> r = requests.post('http://httpbin.org/post', data=payload)
+ >>> print(r.text)
+ {
+ ...
+ "form": {
+ "key2": "value2",
+ "key1": "value1"
+ },
+ ...
+ }
+
+The other HTTP methods are supported - see `requests.api`. Full documentation
+is at <http://python-requests.org>.
+
+:copyright: (c) 2015 by Kenneth Reitz.
+:license: Apache 2.0, see LICENSE for more details.
+
+"""
+
+__title__ = 'requests'
+__version__ = '2.9.1'
+__build__ = 0x020901
+__author__ = 'Kenneth Reitz'
+__license__ = 'Apache 2.0'
+__copyright__ = 'Copyright 2015 Kenneth Reitz'
+
+# Attempt to enable urllib3's SNI support, if possible
+try:
+ from .packages.urllib3.contrib import pyopenssl
+ pyopenssl.inject_into_urllib3()
+except ImportError:
+ pass
+
+from . import utils
+from .models import Request, Response, PreparedRequest
+from .api import request, get, head, post, patch, put, delete, options
+from .sessions import session, Session
+from .status_codes import codes
+from .exceptions import (
+ RequestException, Timeout, URLRequired,
+ TooManyRedirects, HTTPError, ConnectionError,
+ FileModeWarning,
+)
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+try: # Python 2.7+
+ from logging import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+import warnings
+
+# FileModeWarnings go off per the default.
+warnings.simplefilter('default', FileModeWarning, append=True)
diff --git a/python/requests/requests/adapters.py b/python/requests/requests/adapters.py
new file mode 100644
index 000000000..6266d5be3
--- /dev/null
+++ b/python/requests/requests/adapters.py
@@ -0,0 +1,453 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.adapters
+~~~~~~~~~~~~~~~~~
+
+This module contains the transport adapters that Requests uses to define
+and maintain connections.
+"""
+
+import os.path
+import socket
+
+from .models import Response
+from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
+from .packages.urllib3.response import HTTPResponse
+from .packages.urllib3.util import Timeout as TimeoutSauce
+from .packages.urllib3.util.retry import Retry
+from .compat import urlparse, basestring
+from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
+ prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
+ select_proxy)
+from .structures import CaseInsensitiveDict
+from .packages.urllib3.exceptions import ClosedPoolError
+from .packages.urllib3.exceptions import ConnectTimeoutError
+from .packages.urllib3.exceptions import HTTPError as _HTTPError
+from .packages.urllib3.exceptions import MaxRetryError
+from .packages.urllib3.exceptions import NewConnectionError
+from .packages.urllib3.exceptions import ProxyError as _ProxyError
+from .packages.urllib3.exceptions import ProtocolError
+from .packages.urllib3.exceptions import ReadTimeoutError
+from .packages.urllib3.exceptions import SSLError as _SSLError
+from .packages.urllib3.exceptions import ResponseError
+from .cookies import extract_cookies_to_jar
+from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
+ ProxyError, RetryError)
+from .auth import _basic_auth_str
+
+DEFAULT_POOLBLOCK = False
+DEFAULT_POOLSIZE = 10
+DEFAULT_RETRIES = 0
+DEFAULT_POOL_TIMEOUT = None
+
+
+class BaseAdapter(object):
+ """The Base Transport Adapter"""
+
+ def __init__(self):
+ super(BaseAdapter, self).__init__()
+
+ def send(self):
+ raise NotImplementedError
+
+ def close(self):
+ raise NotImplementedError
+
+
+class HTTPAdapter(BaseAdapter):
+ """The built-in HTTP Adapter for urllib3.
+
+ Provides a general-case interface for Requests sessions to contact HTTP and
+ HTTPS urls by implementing the Transport Adapter interface. This class will
+ usually be created by the :class:`Session <Session>` class under the
+ covers.
+
+ :param pool_connections: The number of urllib3 connection pools to cache.
+ :param pool_maxsize: The maximum number of connections to save in the pool.
+ :param int max_retries: The maximum number of retries each connection
+ should attempt. Note, this applies only to failed DNS lookups, socket
+ connections and connection timeouts, never to requests where data has
+ made it to the server. By default, Requests does not retry failed
+ connections. If you need granular control over the conditions under
+ which we retry a request, import urllib3's ``Retry`` class and pass
+ that instead.
+ :param pool_block: Whether the connection pool should block for connections.
+
+ Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> a = requests.adapters.HTTPAdapter(max_retries=3)
+ >>> s.mount('http://', a)
+ """
+ __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
+ '_pool_block']
+
+ def __init__(self, pool_connections=DEFAULT_POOLSIZE,
+ pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
+ pool_block=DEFAULT_POOLBLOCK):
+ if max_retries == DEFAULT_RETRIES:
+ self.max_retries = Retry(0, read=False)
+ else:
+ self.max_retries = Retry.from_int(max_retries)
+ self.config = {}
+ self.proxy_manager = {}
+
+ super(HTTPAdapter, self).__init__()
+
+ self._pool_connections = pool_connections
+ self._pool_maxsize = pool_maxsize
+ self._pool_block = pool_block
+
+ self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
+
+ def __getstate__(self):
+ return dict((attr, getattr(self, attr, None)) for attr in
+ self.__attrs__)
+
+ def __setstate__(self, state):
+ # Can't handle by adding 'proxy_manager' to self.__attrs__ because
+ # self.poolmanager uses a lambda function, which isn't pickleable.
+ self.proxy_manager = {}
+ self.config = {}
+
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.init_poolmanager(self._pool_connections, self._pool_maxsize,
+ block=self._pool_block)
+
+ def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
+ """Initializes a urllib3 PoolManager.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param connections: The number of urllib3 connection pools to cache.
+ :param maxsize: The maximum number of connections to save in the pool.
+ :param block: Block when no free connections are available.
+ :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
+ """
+ # save these values for pickling
+ self._pool_connections = connections
+ self._pool_maxsize = maxsize
+ self._pool_block = block
+
+ self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
+ block=block, strict=True, **pool_kwargs)
+
+ def proxy_manager_for(self, proxy, **proxy_kwargs):
+ """Return urllib3 ProxyManager for the given proxy.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxy: The proxy to return a urllib3 ProxyManager for.
+ :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
+ :returns: ProxyManager
+ """
+ if not proxy in self.proxy_manager:
+ proxy_headers = self.proxy_headers(proxy)
+ self.proxy_manager[proxy] = proxy_from_url(
+ proxy,
+ proxy_headers=proxy_headers,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs)
+
+ return self.proxy_manager[proxy]
+
+ def cert_verify(self, conn, url, verify, cert):
+ """Verify a SSL certificate. This method should not be called from user
+ code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param conn: The urllib3 connection object associated with the cert.
+ :param url: The requested URL.
+ :param verify: Whether we should actually verify the certificate.
+ :param cert: The SSL certificate to verify.
+ """
+ if url.lower().startswith('https') and verify:
+
+ cert_loc = None
+
+ # Allow self-specified cert location.
+ if verify is not True:
+ cert_loc = verify
+
+ if not cert_loc:
+ cert_loc = DEFAULT_CA_BUNDLE_PATH
+
+ if not cert_loc:
+ raise Exception("Could not find a suitable SSL CA certificate bundle.")
+
+ conn.cert_reqs = 'CERT_REQUIRED'
+
+ if not os.path.isdir(cert_loc):
+ conn.ca_certs = cert_loc
+ else:
+ conn.ca_cert_dir = cert_loc
+ else:
+ conn.cert_reqs = 'CERT_NONE'
+ conn.ca_certs = None
+ conn.ca_cert_dir = None
+
+ if cert:
+ if not isinstance(cert, basestring):
+ conn.cert_file = cert[0]
+ conn.key_file = cert[1]
+ else:
+ conn.cert_file = cert
+
+ def build_response(self, req, resp):
+ """Builds a :class:`Response <requests.Response>` object from a urllib3
+ response. This should not be called from user code, and is only exposed
+ for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
+
+ :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
+ :param resp: The urllib3 response object.
+ """
+ response = Response()
+
+ # Fallback to None if there's no status_code, for whatever reason.
+ response.status_code = getattr(resp, 'status', None)
+
+ # Make headers case-insensitive.
+ response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
+
+ # Set encoding.
+ response.encoding = get_encoding_from_headers(response.headers)
+ response.raw = resp
+ response.reason = response.raw.reason
+
+ if isinstance(req.url, bytes):
+ response.url = req.url.decode('utf-8')
+ else:
+ response.url = req.url
+
+ # Add new cookies from the server.
+ extract_cookies_to_jar(response.cookies, req, resp)
+
+ # Give the Response some context.
+ response.request = req
+ response.connection = self
+
+ return response
+
+ def get_connection(self, url, proxies=None):
+ """Returns a urllib3 connection for the given URL. This should not be
+ called from user code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param url: The URL to connect to.
+ :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
+ """
+ proxy = select_proxy(url, proxies)
+
+ if proxy:
+ proxy = prepend_scheme_if_needed(proxy, 'http')
+ proxy_manager = self.proxy_manager_for(proxy)
+ conn = proxy_manager.connection_from_url(url)
+ else:
+ # Only scheme should be lower case
+ parsed = urlparse(url)
+ url = parsed.geturl()
+ conn = self.poolmanager.connection_from_url(url)
+
+ return conn
+
+ def close(self):
+ """Disposes of any internal state.
+
+ Currently, this just closes the PoolManager, which closes pooled
+ connections.
+ """
+ self.poolmanager.clear()
+
+ def request_url(self, request, proxies):
+ """Obtain the url to use when making the final request.
+
+ If the message is being sent through a HTTP proxy, the full URL has to
+ be used. Otherwise, we should only use the path portion of the URL.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
+ """
+ proxy = select_proxy(request.url, proxies)
+ scheme = urlparse(request.url).scheme
+ if proxy and scheme != 'https':
+ url = urldefragauth(request.url)
+ else:
+ url = request.path_url
+
+ return url
+
+ def add_headers(self, request, **kwargs):
+ """Add any headers needed by the connection. As of v2.0 this does
+ nothing by default, but is left for overriding by users that subclass
+ the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
+ :param kwargs: The keyword arguments from the call to send().
+ """
+ pass
+
+ def proxy_headers(self, proxy):
+ """Returns a dictionary of the headers to add to any request sent
+ through a proxy. This works with urllib3 magic to ensure that they are
+ correctly sent to the proxy, rather than in a tunnelled request if
+ CONNECT is being used.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxies: The url of the proxy being used for this request.
+ """
+ headers = {}
+ username, password = get_auth_from_url(proxy)
+
+ if username and password:
+ headers['Proxy-Authorization'] = _basic_auth_str(username,
+ password)
+
+ return headers
+
+ def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param verify: (optional) Whether to verify SSL certificates.
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
+ """
+
+ conn = self.get_connection(request.url, proxies)
+
+ self.cert_verify(conn, request.url, verify, cert)
+ url = self.request_url(request, proxies)
+ self.add_headers(request)
+
+ chunked = not (request.body is None or 'Content-Length' in request.headers)
+
+ if isinstance(timeout, tuple):
+ try:
+ connect, read = timeout
+ timeout = TimeoutSauce(connect=connect, read=read)
+ except ValueError as e:
+ # this may raise a string formatting error.
+ err = ("Invalid timeout {0}. Pass a (connect, read) "
+ "timeout tuple, or a single float to set "
+ "both timeouts to the same value".format(timeout))
+ raise ValueError(err)
+ else:
+ timeout = TimeoutSauce(connect=timeout, read=timeout)
+
+ try:
+ if not chunked:
+ resp = conn.urlopen(
+ method=request.method,
+ url=url,
+ body=request.body,
+ headers=request.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=False,
+ decode_content=False,
+ retries=self.max_retries,
+ timeout=timeout
+ )
+
+ # Send the request.
+ else:
+ if hasattr(conn, 'proxy_pool'):
+ conn = conn.proxy_pool
+
+ low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
+
+ try:
+ low_conn.putrequest(request.method,
+ url,
+ skip_accept_encoding=True)
+
+ for header, value in request.headers.items():
+ low_conn.putheader(header, value)
+
+ low_conn.endheaders()
+
+ for i in request.body:
+ low_conn.send(hex(len(i))[2:].encode('utf-8'))
+ low_conn.send(b'\r\n')
+ low_conn.send(i)
+ low_conn.send(b'\r\n')
+ low_conn.send(b'0\r\n\r\n')
+
+ # Receive the response from the server
+ try:
+ # For Python 2.7+ versions, use buffering of HTTP
+ # responses
+ r = low_conn.getresponse(buffering=True)
+ except TypeError:
+ # For compatibility with Python 2.6 versions and back
+ r = low_conn.getresponse()
+
+ resp = HTTPResponse.from_httplib(
+ r,
+ pool=conn,
+ connection=low_conn,
+ preload_content=False,
+ decode_content=False
+ )
+ except:
+ # If we hit any problems here, clean up the connection.
+ # Then, reraise so that we can handle the actual exception.
+ low_conn.close()
+ raise
+
+ except (ProtocolError, socket.error) as err:
+ raise ConnectionError(err, request=request)
+
+ except MaxRetryError as e:
+ if isinstance(e.reason, ConnectTimeoutError):
+ # TODO: Remove this in 3.0.0: see #2811
+ if not isinstance(e.reason, NewConnectionError):
+ raise ConnectTimeout(e, request=request)
+
+ if isinstance(e.reason, ResponseError):
+ raise RetryError(e, request=request)
+
+ raise ConnectionError(e, request=request)
+
+ except ClosedPoolError as e:
+ raise ConnectionError(e, request=request)
+
+ except _ProxyError as e:
+ raise ProxyError(e)
+
+ except (_SSLError, _HTTPError) as e:
+ if isinstance(e, _SSLError):
+ raise SSLError(e, request=request)
+ elif isinstance(e, ReadTimeoutError):
+ raise ReadTimeout(e, request=request)
+ else:
+ raise
+
+ return self.build_response(request, resp)
diff --git a/python/requests/requests/api.py b/python/requests/requests/api.py
new file mode 100644
index 000000000..b21a1a4fa
--- /dev/null
+++ b/python/requests/requests/api.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.api
+~~~~~~~~~~~~
+
+This module implements the Requests API.
+
+:copyright: (c) 2012 by Kenneth Reitz.
+:license: Apache2, see LICENSE for more details.
+
+"""
+
+from . import sessions
+
+
+def request(method, url, **kwargs):
+ """Constructs and sends a :class:`Request <Request>`.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
+ :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How long to wait for the server to send data
+ before giving up, as a float, or a :ref:`(connect timeout, read
+ timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
+ :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.
+ :param stream: (optional) if ``False``, the response content will be immediately downloaded.
+ :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.request('GET', 'http://httpbin.org/get')
+ <Response [200]>
+ """
+
+ # By using the 'with' statement we are sure the session is closed, thus we
+ # avoid leaving sockets open which can trigger a ResourceWarning in some
+ # cases, and look like a memory leak in others.
+ with sessions.Session() as session:
+ return session.request(method=method, url=url, **kwargs)
+
+
+def get(url, params=None, **kwargs):
+ """Sends a GET request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return request('get', url, params=params, **kwargs)
+
+
+def options(url, **kwargs):
+ """Sends a OPTIONS request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return request('options', url, **kwargs)
+
+
+def head(url, **kwargs):
+ """Sends a HEAD request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', False)
+ return request('head', url, **kwargs)
+
+
+def post(url, data=None, json=None, **kwargs):
+ """Sends a POST request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('post', url, data=data, json=json, **kwargs)
+
+
+def put(url, data=None, **kwargs):
+ """Sends a PUT request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('put', url, data=data, **kwargs)
+
+
+def patch(url, data=None, **kwargs):
+ """Sends a PATCH request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('patch', url, data=data, **kwargs)
+
+
+def delete(url, **kwargs):
+ """Sends a DELETE request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('delete', url, **kwargs)
diff --git a/python/requests/requests/auth.py b/python/requests/requests/auth.py
new file mode 100644
index 000000000..2af55fb5e
--- /dev/null
+++ b/python/requests/requests/auth.py
@@ -0,0 +1,223 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.auth
+~~~~~~~~~~~~~
+
+This module contains the authentication handlers for Requests.
+"""
+
+import os
+import re
+import time
+import hashlib
+import threading
+
+from base64 import b64encode
+
+from .compat import urlparse, str
+from .cookies import extract_cookies_to_jar
+from .utils import parse_dict_header, to_native_string
+from .status_codes import codes
+
+CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
+CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
+
+
+def _basic_auth_str(username, password):
+ """Returns a Basic Auth string."""
+
+ authstr = 'Basic ' + to_native_string(
+ b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
+ )
+
+ return authstr
+
+
+class AuthBase(object):
+ """Base class that all auth implementations derive from"""
+
+ def __call__(self, r):
+ raise NotImplementedError('Auth hooks must be callable.')
+
+
+class HTTPBasicAuth(AuthBase):
+ """Attaches HTTP Basic Authentication to the given Request object."""
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
+ def __call__(self, r):
+ r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPProxyAuth(HTTPBasicAuth):
+ """Attaches HTTP Proxy Authentication to a given Request object."""
+ def __call__(self, r):
+ r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPDigestAuth(AuthBase):
+ """Attaches HTTP Digest Authentication to the given Request object."""
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+ # Keep state in per-thread local storage
+ self._thread_local = threading.local()
+
+ def init_per_thread_state(self):
+ # Ensure state is initialized just once per-thread
+ if not hasattr(self._thread_local, 'init'):
+ self._thread_local.init = True
+ self._thread_local.last_nonce = ''
+ self._thread_local.nonce_count = 0
+ self._thread_local.chal = {}
+ self._thread_local.pos = None
+ self._thread_local.num_401_calls = None
+
+ def build_digest_header(self, method, url):
+
+ realm = self._thread_local.chal['realm']
+ nonce = self._thread_local.chal['nonce']
+ qop = self._thread_local.chal.get('qop')
+ algorithm = self._thread_local.chal.get('algorithm')
+ opaque = self._thread_local.chal.get('opaque')
+
+ if algorithm is None:
+ _algorithm = 'MD5'
+ else:
+ _algorithm = algorithm.upper()
+ # lambdas assume digest modules are imported at the top level
+ if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
+ def md5_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.md5(x).hexdigest()
+ hash_utf8 = md5_utf8
+ elif _algorithm == 'SHA':
+ def sha_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha1(x).hexdigest()
+ hash_utf8 = sha_utf8
+
+ KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
+
+ if hash_utf8 is None:
+ return None
+
+ # XXX not implemented yet
+ entdig = None
+ p_parsed = urlparse(url)
+ #: path is request-uri defined in RFC 2616 which should not be empty
+ path = p_parsed.path or "/"
+ if p_parsed.query:
+ path += '?' + p_parsed.query
+
+ A1 = '%s:%s:%s' % (self.username, realm, self.password)
+ A2 = '%s:%s' % (method, path)
+
+ HA1 = hash_utf8(A1)
+ HA2 = hash_utf8(A2)
+
+ if nonce == self._thread_local.last_nonce:
+ self._thread_local.nonce_count += 1
+ else:
+ self._thread_local.nonce_count = 1
+ ncvalue = '%08x' % self._thread_local.nonce_count
+ s = str(self._thread_local.nonce_count).encode('utf-8')
+ s += nonce.encode('utf-8')
+ s += time.ctime().encode('utf-8')
+ s += os.urandom(8)
+
+ cnonce = (hashlib.sha1(s).hexdigest()[:16])
+ if _algorithm == 'MD5-SESS':
+ HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
+
+ if not qop:
+ respdig = KD(HA1, "%s:%s" % (nonce, HA2))
+ elif qop == 'auth' or 'auth' in qop.split(','):
+ noncebit = "%s:%s:%s:%s:%s" % (
+ nonce, ncvalue, cnonce, 'auth', HA2
+ )
+ respdig = KD(HA1, noncebit)
+ else:
+ # XXX handle auth-int.
+ return None
+
+ self._thread_local.last_nonce = nonce
+
+ # XXX should the partial digests be encoded too?
+ base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
+ 'response="%s"' % (self.username, realm, nonce, path, respdig)
+ if opaque:
+ base += ', opaque="%s"' % opaque
+ if algorithm:
+ base += ', algorithm="%s"' % algorithm
+ if entdig:
+ base += ', digest="%s"' % entdig
+ if qop:
+ base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
+
+ return 'Digest %s' % (base)
+
+ def handle_redirect(self, r, **kwargs):
+ """Reset num_401_calls counter on redirects."""
+ if r.is_redirect:
+ self._thread_local.num_401_calls = 1
+
+ def handle_401(self, r, **kwargs):
+ """Takes the given response and tries digest-auth, if needed."""
+
+ if self._thread_local.pos is not None:
+ # Rewind the file position indicator of the body to where
+ # it was to resend the request.
+ r.request.body.seek(self._thread_local.pos)
+ s_auth = r.headers.get('www-authenticate', '')
+
+ if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
+
+ self._thread_local.num_401_calls += 1
+ pat = re.compile(r'digest ', flags=re.IGNORECASE)
+ self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
+
+ # Consume content and release the original connection
+ # to allow our new request to reuse the same one.
+ r.content
+ r.close()
+ prep = r.request.copy()
+ extract_cookies_to_jar(prep._cookies, r.request, r.raw)
+ prep.prepare_cookies(prep._cookies)
+
+ prep.headers['Authorization'] = self.build_digest_header(
+ prep.method, prep.url)
+ _r = r.connection.send(prep, **kwargs)
+ _r.history.append(r)
+ _r.request = prep
+
+ return _r
+
+ self._thread_local.num_401_calls = 1
+ return r
+
+ def __call__(self, r):
+ # Initialize per-thread state, if needed
+ self.init_per_thread_state()
+ # If we have a saved nonce, skip the 401
+ if self._thread_local.last_nonce:
+ r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
+ try:
+ self._thread_local.pos = r.body.tell()
+ except AttributeError:
+ # In the case of HTTPDigestAuth being reused and the body of
+ # the previous request was a file-like object, pos has the
+ # file position of the previous body. Ensure it's set to
+ # None.
+ self._thread_local.pos = None
+ r.register_hook('response', self.handle_401)
+ r.register_hook('response', self.handle_redirect)
+ self._thread_local.num_401_calls = 1
+
+ return r
diff --git a/python/requests/requests/cacert.pem b/python/requests/requests/cacert.pem
new file mode 100644
index 000000000..6a66daa99
--- /dev/null
+++ b/python/requests/requests/cacert.pem
@@ -0,0 +1,5616 @@
+
+# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
+# Subject: O=Equifax OU=Equifax Secure Certificate Authority
+# Label: "Equifax Secure CA"
+# Serial: 903804111
+# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
+# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
+# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
+dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
+MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
+dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
+BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
+cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
+MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
+aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
+ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
+IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
+7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
+1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
+# Serial: 314531972711909413743075096039378935511
+# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
+# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
+# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
+GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
+U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
+NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
+ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
+ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
+CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
+g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
+fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
+2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
+bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Low-Value Services Root"
+# Serial: 1
+# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
+# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
+# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
+-----BEGIN CERTIFICATE-----
+MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
+MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
+QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
+VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
+CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
+tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
+dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
+PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
+BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
+MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
+ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
+IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
+7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
+43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
+eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
+pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
+WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Public Services Root"
+# Serial: 1
+# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
+# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
+# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
+MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
+ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
+BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
+6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
+GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
+dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
+1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
+62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
+BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
+MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
+cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
+b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
+IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
+iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
+GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
+4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
+XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Qualified Certificates Root"
+# Serial: 1
+# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
+# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
+# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
+-----BEGIN CERTIFICATE-----
+MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
+MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
+EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
+BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
+xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
+87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
+2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
+WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
+0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
+A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
+pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
+ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
+aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
+hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
+hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
+dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
+P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
+iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
+xqE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: O=RSA Security Inc OU=RSA Security 2048 V3
+# Subject: O=RSA Security Inc OU=RSA Security 2048 V3
+# Label: "RSA Security 2048 v3"
+# Serial: 13297492616345471454730593562152402946
+# MD5 Fingerprint: 77:0d:19:b1:21:fd:00:42:9c:3e:0c:a5:dd:0b:02:8e
+# SHA1 Fingerprint: 25:01:90:19:cf:fb:d9:99:1c:b7:68:25:74:8d:94:5f:30:93:95:42
+# SHA256 Fingerprint: af:8b:67:62:a1:e5:28:22:81:61:a9:5d:5c:55:9e:e2:66:27:8f:75:d7:9e:83:01:89:a5:03:50:6a:bd:6b:4c
+-----BEGIN CERTIFICATE-----
+MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6
+MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp
+dHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAX
+BgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAy
+MDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3Xtp
+eafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg
+/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGl
+wSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnh
+AMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2
+PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpu
+AWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NR
+MKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYc
+HnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/
+Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+
+f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVO
+rSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch
+6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d3
+7CAFYd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Global CA 2"
+# Serial: 1
+# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
+# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
+# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
+-----BEGIN CERTIFICATE-----
+MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
+IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
+R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
+PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
+Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
+TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
+5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
+S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
+2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
+FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
+EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
+EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
+/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
+A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
+abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
+I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
+4iIprn2DQKi6bA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Label: "Visa eCommerce Root"
+# Serial: 25952180776285836048024890241505565794
+# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
+# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
+# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
+-----BEGIN CERTIFICATE-----
+MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
+MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
+cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
+bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
+CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
+dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
+cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
+2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
+lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
+ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
+299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
+vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
+dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
+AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
+zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
+LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
+7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
+++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
+398znM/jra6O1I7mT1GvFpLgXPYHDw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum CA O=Unizeto Sp. z o.o.
+# Subject: CN=Certum CA O=Unizeto Sp. z o.o.
+# Label: "Certum Root CA"
+# Serial: 65568
+# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9
+# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18
+# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24
+-----BEGIN CERTIFICATE-----
+MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM
+MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
+QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM
+MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
+QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E
+jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo
+ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI
+ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu
+Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg
+AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7
+HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA
+uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa
+TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg
+xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q
+CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x
+O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs
+6GAqm4VKQPNriiTsBhYscw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
+# Subject: CN=Secure Certificate Services O=Comodo CA Limited
+# Label: "Comodo Secure Services root"
+# Serial: 1
+# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
+# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
+# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
+-----BEGIN CERTIFICATE-----
+MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
+ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
+fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
+BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
+cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
+HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
+CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
+3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
+6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
+HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
+EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
+Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
+Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
+DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
+5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
+Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
+gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
+aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
+izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
+# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
+# Label: "Comodo Trusted Services root"
+# Serial: 1
+# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
+# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
+# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
+aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
+MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
+BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
+VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
+fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
+TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
+fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
+1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
+kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
+A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
+ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
+dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
+Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
+HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
+pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
+jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
+xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
+dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA"
+# Serial: 10000010
+# MD5 Fingerprint: 60:84:7c:5a:ce:db:0c:d4:cb:a7:e9:fe:02:c6:a9:c0
+# SHA1 Fingerprint: 10:1d:fa:3f:d5:0b:cb:bb:9b:b5:60:0c:19:55:a4:1a:f4:73:3a:04
+# SHA256 Fingerprint: d4:1d:82:9e:8c:16:59:82:2a:f9:3f:ce:62:bf:fc:de:26:4f:c8:4e:8b:95:0c:5f:f2:75:d0:52:35:46:95:a3
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJO
+TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEy
+MTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVk
+ZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENB
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvszn
+ExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw71
+9tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MO
+hXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+U
+tFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3o
+BmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAh
+SQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDww
+OgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMv
+cm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA
+7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k
+/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzm
+eafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6
+u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy
+7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR
+iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN DATACorp SGC Root CA"
+# Serial: 91374294542884689855167577680241077609
+# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
+# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
+# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
+-----BEGIN CERTIFICATE-----
+MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
+kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
+IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
+EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
+VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
+dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
+E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
+D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
+4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
+lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
+bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
+o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
+MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
+LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
+BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
+AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
+Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
+j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
+KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
+2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
+mfnGV/TJVTl4uix5yaaIK/QI
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN USERFirst Hardware Root CA"
+# Serial: 91374294542884704022267039221184531197
+# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
+# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
+# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
+-----BEGIN CERTIFICATE-----
+MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
+lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
+SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
+A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
+MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
+d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
+cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
+0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
+M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
+MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
+oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
+DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
+oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
+dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
+bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
+BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
+//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
+CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
+CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
+3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
+KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
+# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
+# Label: "Camerfirma Chambers of Commerce Root"
+# Serial: 0
+# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84
+# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1
+# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3
+-----BEGIN CERTIFICATE-----
+MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn
+MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
+ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg
+b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa
+MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB
+ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw
+IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B
+AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb
+unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d
+BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq
+7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3
+0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX
+roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG
+A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j
+aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p
+26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA
+BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud
+EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN
+BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz
+aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB
+AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd
+p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi
+1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc
+XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0
+eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu
+tGWaIZDgqtCYvDi1czyL+Nw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
+# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
+# Label: "Camerfirma Global Chambersign Root"
+# Serial: 0
+# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19
+# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9
+# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed
+-----BEGIN CERTIFICATE-----
+MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn
+MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
+ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo
+YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9
+MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy
+NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G
+A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA
+A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0
+Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s
+QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV
+eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795
+B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh
+z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T
+AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i
+ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w
+TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH
+MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD
+VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE
+VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh
+bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B
+AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM
+bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi
+ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG
+VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c
+ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/
+AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Subject: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Label: "NetLock Notary (Class A) Root"
+# Serial: 259
+# MD5 Fingerprint: 86:38:6d:5e:49:63:6c:85:5c:db:6d:dc:94:b7:d0:f7
+# SHA1 Fingerprint: ac:ed:5f:65:53:fd:25:ce:01:5f:1f:7a:48:3b:6a:74:9f:61:78:c6
+# SHA256 Fingerprint: 7f:12:cd:5f:7e:5e:29:0e:c7:d8:51:79:d5:b7:2c:20:a5:be:75:08:ff:db:5b:f8:1a:b9:68:4a:7f:c9:f6:67
+-----BEGIN CERTIFICATE-----
+MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhV
+MRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMe
+TmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0
+dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBB
+KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0
+N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhC
+dWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQu
+MRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBL
+b3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiD
+zl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi
+3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8
+WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LY
+Oph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2Esi
+NCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCC
+ApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4
+QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0
+YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFz
+aSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu
+IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtm
+ZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMg
+ZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVs
+amFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRv
+IGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3
+Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6
+ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1
+YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3Qg
+dG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRs
+b2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0G
+CSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQO
+xmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP
+0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQ
+QeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxk
+f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK
+8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
+# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
+# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
+-----BEGIN CERTIFICATE-----
+MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
+FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
+ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
+LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
+BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
+Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
+dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
+cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
+YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
+dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
+bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
+YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
+TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
+9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
+jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
+FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
+ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
+ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
+EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
+L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
+yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
+O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
+um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
+NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
+# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
+# Label: "Swisscom Root CA 1"
+# Serial: 122348795730808398873664200247279986742
+# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9
+# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51
+# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e
+-----BEGIN CERTIFICATE-----
+MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk
+MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
+YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
+Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT
+AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
+Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9
+m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih
+FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/
+TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F
+EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco
+kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu
+HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF
+vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo
+19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC
+L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW
+bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX
+JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
+FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j
+BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc
+K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf
+ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik
+Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB
+sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e
+3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR
+ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip
+mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH
+b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf
+rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms
+hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y
+zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6
+MBr1mmz0DlP5OlvRHA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=Class 2 Primary CA O=Certplus
+# Subject: CN=Class 2 Primary CA O=Certplus
+# Label: "Certplus Class 2 Primary CA"
+# Serial: 177770208045934040241468760488327595043
+# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
+# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
+# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
+# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
+# Label: "DST ACES CA X6"
+# Serial: 17771143917277623872238992636097467865
+# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8
+# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d
+# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx
+ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w
+MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD
+VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx
+FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu
+ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7
+gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH
+fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a
+ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT
+ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk
+c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto
+dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt
+aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI
+hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk
+QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/
+h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq
+nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR
+rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2
+9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
+# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
+# Label: "TURKTRUST Certificate Services Provider Root 2"
+# Serial: 1
+# MD5 Fingerprint: 37:a5:6e:d4:b1:25:84:97:b7:fd:56:15:7a:f9:a2:00
+# SHA1 Fingerprint: b4:35:d4:e1:11:9d:1c:66:90:a7:49:eb:b3:94:bd:63:7b:a7:82:b7
+# SHA256 Fingerprint: c4:70:cf:54:7e:23:02:b9:77:fb:29:dd:71:a8:9a:7b:6c:1f:60:77:7b:03:29:f5:60:17:f3:28:bf:4f:6b:e6
+-----BEGIN CERTIFICATE-----
+MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOc
+UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xS
+S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
+SGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3
+WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJv
+bmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJU
+UjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSw
+bGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWe
+LiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnef
+J1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdh
+R3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJ
+Qv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGX
+JHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1p
+zpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58S
+Fq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJ
+KoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwq
+ECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4
+Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFz
+gw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotH
+uFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LS
+y3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
+# Subject: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
+# Label: "WellsSecure Public Root Certificate Authority"
+# Serial: 1
+# MD5 Fingerprint: 15:ac:a5:c2:92:2d:79:bc:e8:7f:cb:67:ed:02:cf:36
+# SHA1 Fingerprint: e7:b4:f6:9d:61:ec:90:69:db:7e:90:a7:40:1a:3c:f4:7d:4f:e8:ee
+# SHA256 Fingerprint: a7:12:72:ae:aa:a3:cf:e8:72:7f:7f:b3:9f:0f:b3:d1:e5:42:6e:90:60:b0:6e:e6:f1:3e:9a:3c:58:33:cd:43
+-----BEGIN CERTIFICATE-----
+MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMx
+IDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxs
+cyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9v
+dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0
+MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdl
+bGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQD
+DC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkw
+ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+r
+WxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjU
+Dk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcs
+HqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNj
+z7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFaf
+SZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/Slwxl
+AgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqG
+KGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0P
+AQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0j
+BIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMC
+VVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNX
+ZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg
+Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEB
+ALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd
+/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pB
+A4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWn
+k4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9
+iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv
+2G0xffX8oRAHh84vWdw+WNs=
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI
+# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI
+# Label: "IGC/A"
+# Serial: 245102874772
+# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37
+# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c
+# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32
+-----BEGIN CERTIFICATE-----
+MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT
+AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ
+TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG
+9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw
+MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM
+BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO
+MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2
+LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI
+s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2
+xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4
+u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b
+F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx
+Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd
+PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV
+HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx
+NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF
+AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ
+L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY
+YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg
+Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a
+NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R
+0982gaEbeC9xs/FZTEYYKKuF0mBWWg==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
+# Label: "Security Communication EV RootCA1"
+# Serial: 0
+# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3
+# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d
+# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37
+-----BEGIN CERTIFICATE-----
+MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz
+MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N
+IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11
+bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE
+RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO
+zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5
+bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF
+MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1
+VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC
+OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
+CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW
+tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ
+q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb
+EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+
+Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O
+VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
+# Subject: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
+# Label: "Microsec e-Szigno Root CA"
+# Serial: 272122594155480254301341951808045322001
+# MD5 Fingerprint: f0:96:b6:2f:c5:10:d5:67:8e:83:25:32:e8:5e:2e:e5
+# SHA1 Fingerprint: 23:88:c9:d3:71:cc:9e:96:3d:ff:7d:3c:a7:ce:fc:d6:25:ec:19:0d
+# SHA256 Fingerprint: 32:7a:3d:76:1a:ba:de:a0:34:eb:99:84:06:27:5c:b1:a4:77:6e:fd:ae:2f:df:6d:01:68:ea:1c:4f:55:67:d0
+-----BEGIN CERTIFICATE-----
+MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAw
+cjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNy
+b3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9z
+ZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4
+NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMN
+TWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1p
+Y3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2u
+uO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+
+LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabA
+vjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770
+Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx
+62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcB
+AQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3Aw
+LQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAP
+BgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIB
+AQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ov
+MIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5
+ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn
+AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABT
+AHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABh
+ACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABo
+AHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBa
+AFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemln
+bm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1p
+Y3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxP
+PU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZv
+Y2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuB
+EGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWdu
+w7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWlj
+cm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNV
+HSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJI
+VTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDAS
+BgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBS
+b290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS
+8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgds
+ZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl
+7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a
+86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfR
+hUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/
+MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Label: "Deutsche Telekom Root CA 2"
+# Serial: 38
+# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
+# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
+# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
+# Subject: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
+# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3"
+# Serial: 17
+# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26
+# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96
+# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a
+-----BEGIN CERTIFICATE-----
+MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS
+MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp
+bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw
+VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy
+YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy
+dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2
+ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe
+Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls
+aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU
+QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh
+xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0
+aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr
+IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB
+IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h
+gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK
+O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO
+fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw
+lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL
+hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID
+AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP
+NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t
+wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM
+7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh
+gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n
+oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs
+yZyQ2uypQjyttgI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
+# Label: "Buypass Class 2 CA 1"
+# Serial: 1
+# MD5 Fingerprint: b8:08:9a:f0:03:cc:1b:0d:c8:6c:0b:76:a1:75:64:23
+# SHA1 Fingerprint: a0:a1:ab:90:c9:fc:84:7b:3b:12:61:e8:97:7d:5f:d3:22:61:d3:cc
+# SHA256 Fingerprint: 0f:4e:9c:dd:26:4b:02:55:50:d1:70:80:63:40:21:4f:e9:44:34:c9:b0:2f:69:7e:c7:10:fc:5f:ea:fb:5e:38
+-----BEGIN CERTIFICATE-----
+MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg
+Q2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzEL
+MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD
+VQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0
+ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLX
+l18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVB
+HfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B
+5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3
+WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQD
+AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLP
+gcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+
+DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKu
+BctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHs
+h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk
+LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho
+-----END CERTIFICATE-----
+
+# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
+# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
+# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1"
+# Serial: 5525761995591021570
+# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37
+# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58
+# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2
+-----BEGIN CERTIFICATE-----
+MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV
+BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt
+ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4
+MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg
+SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl
+a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h
+4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk
+tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s
+tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL
+dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4
+c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um
+TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z
++kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O
+Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW
+OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW
+fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2
+l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB
+/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw
+FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+
+8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI
+6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO
+TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME
+wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY
+Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn
+xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q
+DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q
+Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t
+hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4
+7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7
+QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=CNNIC ROOT O=CNNIC
+# Subject: CN=CNNIC ROOT O=CNNIC
+# Label: "CNNIC ROOT"
+# Serial: 1228079105
+# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19
+# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f
+# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7
+-----BEGIN CERTIFICATE-----
+MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD
+TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2
+MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF
+Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB
+DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh
+IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6
+dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO
+V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC
+GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN
+v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB
+AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB
+Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO
+76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK
+OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH
+ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi
+yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL
+buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj
+2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE=
+-----END CERTIFICATE-----
+
+# Issuer: O=Japanese Government OU=ApplicationCA
+# Subject: O=Japanese Government OU=ApplicationCA
+# Label: "ApplicationCA - Japanese Government"
+# Serial: 49
+# MD5 Fingerprint: 7e:23:4e:5b:a7:a5:b4:25:e9:00:07:74:11:62:ae:d6
+# SHA1 Fingerprint: 7f:8a:b0:cf:d0:51:87:6a:66:f3:36:0f:47:c8:8d:8c:d3:35:fc:74
+# SHA256 Fingerprint: 2d:47:43:7d:e1:79:51:21:5a:12:f3:c5:8e:51:c7:29:a5:80:26:ef:1f:cc:0a:5f:b3:d9:dc:01:2f:60:0d:19
+-----BEGIN CERTIFICATE-----
+MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEc
+MBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRp
+b25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYT
+AkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBs
+aWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6H
+j6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+K
+f5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55
+IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cw
+FO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDiht
+QWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm
+/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQ
+k/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQ
+MRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOC
+seODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJ
+hyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+
+eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89U
+DNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+Sj
+B1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL
+rosot4LKGAfmt1t06SAZf7IbiVQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
+# Label: "NetLock Arany (Class Gold) Főtanúsítvány"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G2"
+# Serial: 10000012
+# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
+# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
+# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig O=Disig a.s.
+# Subject: CN=CA Disig O=Disig a.s.
+# Label: "CA Disig"
+# Serial: 1
+# MD5 Fingerprint: 3f:45:96:39:e2:50:87:f7:bb:fe:98:0c:3c:20:98:e6
+# SHA1 Fingerprint: 2a:c8:d5:8b:57:ce:bf:2f:49:af:f2:fc:76:8f:51:14:62:90:7a:41
+# SHA256 Fingerprint: 92:bf:51:19:ab:ec:ca:d0:b1:33:2d:c4:e1:d0:5f:ba:75:b5:67:90:44:ee:0c:a2:6e:93:1f:74:4f:2f:33:cf
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzET
+MBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UE
+AxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQsw
+CQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcg
+YS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErE
+Nx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnX
+mjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYD
+XcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhW
+S8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8Kp
+FhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQD
+AgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cu
+ZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5z
+ay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2sv
+Y2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEw
+DQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6
+yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxq
+EEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/
+CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeB
+EicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFN
+PGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus
+# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus
+# Label: "Juur-SK"
+# Serial: 999181308
+# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55
+# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89
+# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39
+-----BEGIN CERTIFICATE-----
+MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN
+AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp
+dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw
+MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw
+CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ
+MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB
+SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz
+ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH
+LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP
+PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL
+2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w
+ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC
+MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk
+AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0
+AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz
+AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz
+AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f
+BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE
+FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY
+P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi
+CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g
+kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95
+HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS
+na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q
+qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z
+TbvGRNs2yyqcjg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI
+# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI
+# Label: "ACEDICOM Root"
+# Serial: 7029493972724711941
+# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6
+# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84
+# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a
+-----BEGIN CERTIFICATE-----
+MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE
+AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x
+CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW
+MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF
+RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
+AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7
+09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7
+XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P
+Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK
+t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb
+X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28
+MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU
+fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI
+2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH
+K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae
+ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP
+BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ
+MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw
+RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv
+bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm
+fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3
+gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe
+I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i
+5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi
+ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn
+MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ
+o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6
+zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN
+GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt
+r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK
+Z05phkOTOPu220+DkdRgfks+KzgHVZhepA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
+# Subject: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
+# Label: "Certinomis - Autorité Racine"
+# Serial: 1
+# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a
+# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3
+# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17
+-----BEGIN CERTIFICATE-----
+MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET
+MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk
+BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4
+Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl
+cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0
+aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY
+F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N
+8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe
+rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K
+/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu
+7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC
+28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6
+lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E
+nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB
+0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09
+5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj
+WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN
+jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ
+KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s
+ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM
+OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q
+619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn
+2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj
+o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v
+nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG
+5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq
+pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb
+dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0
+BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
+# Subject: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
+# Label: "Root CA Generalitat Valenciana"
+# Serial: 994436456
+# MD5 Fingerprint: 2c:8c:17:5e:b1:54:ab:93:17:b5:36:5a:db:d1:c6:f2
+# SHA1 Fingerprint: a0:73:e5:c5:bd:43:61:0d:86:4c:21:13:0a:85:58:57:cc:9c:ea:46
+# SHA256 Fingerprint: 8c:4e:df:d0:43:48:f3:22:96:9e:7e:29:a4:cd:4d:ca:00:46:55:06:1c:16:e1:b0:76:42:2e:f3:42:ad:63:0e
+-----BEGIN CERTIFICATE-----
+MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJF
+UzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJ
+R1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcN
+MDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0G
+A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScw
+JQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+
+WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKj
+SgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGl
+u6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOy
+A8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxk
+Hl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7
+MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBr
+aS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIIC
+IwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8A
+cgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIA
+YQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEA
+bABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMA
+bgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA
+aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMA
+aQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQA
+ZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEA
+YwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAA
+ZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcA
+LgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6
+Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+y
+eAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQsw
+CQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0G
+A1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVu
+Y2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3Fbcrn
+lD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLt
+b8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg
+9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XF
+ducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmC
+IoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
+# Subject: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
+# Label: "A-Trust-nQual-03"
+# Serial: 93214
+# MD5 Fingerprint: 49:63:ae:27:f4:d5:95:3d:d8:db:24:86:b8:9c:07:53
+# SHA1 Fingerprint: d3:c0:63:f2:19:ed:07:3e:34:ad:5d:75:0b:32:76:29:ff:d5:9a:f2
+# SHA256 Fingerprint: 79:3c:bf:45:59:b9:fd:e3:8a:b2:2d:f1:68:69:f6:98:81:ae:14:c4:b0:13:9a:c7:88:a7:8a:1a:fc:ca:02:fb
+-----BEGIN CERTIFICATE-----
+MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJB
+VDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBp
+bSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5R
+dWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAw
+MFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRy
+dXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52
+ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMM
+EEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUj
+lUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZ
+znF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH
+2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1
+k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs
+2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYD
+VR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fG
+KOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+
+8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4R
+FGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS
+mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmE
+DNuxUCAKGkq6ahq97BvIxYSazQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 45
+# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
+# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
+# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
+-----BEGIN CERTIFICATE-----
+MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
+F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
+ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
+ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
+aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
+YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
+c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
+d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
+CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
+dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
+wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
+Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
+0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
+pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
+CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
+P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
+1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
+KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
+JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
+8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
+fyWl8kgAwKQB2j8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Label: "StartCom Certification Authority G2"
+# Serial: 59
+# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
+# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
+# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
+-----BEGIN CERTIFICATE-----
+MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
+OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
+A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
+JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
+vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
+D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
+Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
+RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
+HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
+nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
+0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
+UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
+Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
+TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
+BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
+2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
+UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
+6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
+9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
+HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
+wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
+XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
+IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
+hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
+so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
+# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
+# Label: "TURKTRUST Certificate Services Provider Root 2007"
+# Serial: 1
+# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72
+# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33
+# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50
+-----BEGIN CERTIFICATE-----
+MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc
+UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS
+S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
+SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx
+OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry
+b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC
+VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE
+sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F
+ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY
+KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG
++7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG
+HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P
+IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M
+733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk
+Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
+CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW
+AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I
+aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5
+mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa
+XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ
+qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica
+# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT
+# Label: "PSCProcert"
+# Serial: 11
+# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec
+# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74
+# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0
+-----BEGIN CERTIFICATE-----
+MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1
+dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s
+YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz
+dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0
+aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh
+IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ
+KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw
+MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy
+b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx
+KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG
+A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u
+aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9
+7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74
+BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G
+ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9
+JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0
+PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2
+0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH
+0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/
+6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m
+v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7
+K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev
+bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw
+MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w
+MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD
+gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0
+b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh
+bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0
+cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp
+ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg
+ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq
+hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD
+AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w
+MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag
+RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t
+UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl
+cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v
+Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG
+AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN
+AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS
+1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB
+3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv
+Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh
+HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm
+pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz
+sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE
+qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb
+mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9
+opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H
+YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km
+-----END CERTIFICATE-----
+
+# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
+# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
+# Label: "China Internet Network Information Center EV Certificates Root"
+# Serial: 1218379777
+# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15
+# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e
+# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7
+-----BEGIN CERTIFICATE-----
+MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC
+Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g
+Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0
+aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa
+Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg
+SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo
+aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp
+ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z
+7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA//
+DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx
+zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8
+hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs
+4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u
+gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY
+NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E
+FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3
+j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG
+52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB
+echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws
+ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI
+zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy
+wy39FCqQmbkHzJ8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
+# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
+# Label: "Swisscom Root CA 2"
+# Serial: 40698052477090394928831521023204026294
+# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19
+# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec
+# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41
+-----BEGIN CERTIFICATE-----
+MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk
+MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
+YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
+Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT
+AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
+Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr
+jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r
+0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f
+2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP
+ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF
+y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA
+tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL
+6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0
+uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL
+acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh
+k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q
+VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
+FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O
+BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh
+b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R
+fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv
+/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI
+REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx
+srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv
+aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT
+woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n
+Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W
+t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N
+8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2
+9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5
+wSsSnqaeG8XmDtkx2Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
+# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
+# Label: "Swisscom Root EV CA 2"
+# Serial: 322973295377129385374608406479535262296
+# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec
+# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b
+# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d
+-----BEGIN CERTIFICATE-----
+MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw
+ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp
+dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290
+IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD
+VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy
+dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg
+MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx
+UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD
+1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH
+oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR
+HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/
+5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv
+idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL
+OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC
+NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f
+46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB
+UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth
+7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G
+A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED
+MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB
+bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x
+XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T
+PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0
+Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70
+WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL
+Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm
+7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S
+nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN
+vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB
+WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI
+fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb
+I+2ksx0WckNLIOFZfsLorSa/ovc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R1 O=Disig a.s.
+# Subject: CN=CA Disig Root R1 O=Disig a.s.
+# Label: "CA Disig Root R1"
+# Serial: 14052245610670616104
+# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a
+# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6
+# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy
+MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk
+D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o
+OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A
+fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe
+IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n
+oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK
+/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj
+rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD
+3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE
+7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC
+yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd
+qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI
+hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR
+xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA
+SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo
+HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB
+emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC
+AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb
+7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x
+DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk
+F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF
+a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT
+Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-TuÄŸra EBG BiliÅŸim Teknolojileri ve Hizmetleri A.Åž. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-TuÄŸra EBG BiliÅŸim Teknolojileri ve Hizmetleri A.Åž. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited
+# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited
+# Label: "WoSign"
+# Serial: 125491772294754854453622855443212256657
+# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d
+# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb
+# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08
+-----BEGIN CERTIFICATE-----
+MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV
+MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV
+BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw
+MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX
+b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN
+rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U
+fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc
+f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2
+ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M
+x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR
+aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch
+zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar
+uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K
+mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA
+Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv
+HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H
+EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1
+LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ
+MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e
+JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN
+g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp
+dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab
+R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ
+PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce
+xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+
+J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl
+OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT
+ee5Ehr7XHuQe+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA 沃通根è¯ä¹¦ O=WoSign CA Limited
+# Subject: CN=CA 沃通根è¯ä¹¦ O=WoSign CA Limited
+# Label: "WoSign China"
+# Serial: 106921963437422998931660691310149453965
+# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93
+# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6
+# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54
+-----BEGIN CERTIFICATE-----
+MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG
+MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV
+BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw
+MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl
+ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r
+D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1
+9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf
+v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk
+UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L
+NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb
++gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V
+qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K
+yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G
+AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK
+J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC
+AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4
+WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6
+yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj
+/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6
+jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2
+ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX
+X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n
+FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D
+u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l
+O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le
+ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1
+2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
+# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
+# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5"
+# Serial: 156233699172481
+# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e
+# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb
+# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78
+-----BEGIN CERTIFICATE-----
+MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE
+BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn
+aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg
+QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg
+SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0
+MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD
+VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
+dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF
+bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB
+IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom
+/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR
+Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3
+4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z
+5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0
+hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID
+AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX
+SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l
+VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq
+URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf
+peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF
+Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW
++qtB4Uu2NQvAmxU=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
+# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
+# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6"
+# Serial: 138134509972618
+# MD5 Fingerprint: f8:c5:ee:2a:6b:be:95:8d:08:f7:25:4a:ea:71:3e:46
+# SHA1 Fingerprint: 8a:5c:8c:ee:a5:03:e6:05:56:ba:d8:1b:d4:f6:c9:b0:ed:e5:2f:e0
+# SHA256 Fingerprint: 8d:e7:86:55:e1:be:7f:78:47:80:0b:93:f6:94:d2:1d:36:8c:c0:6e:03:3e:7f:ab:04:bb:5e:b9:9d:a6:b7:00
+-----BEGIN CERTIFICATE-----
+MIIEJjCCAw6gAwIBAgIGfaHyZeyKMA0GCSqGSIb3DQEBCwUAMIGxMQswCQYDVQQG
+EwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdp
+IMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBB
+LsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBI
+aXptZXQgU2HEn2xhecSxY8Sxc8SxIEg2MB4XDTEzMTIxODA5MDQxMFoXDTIzMTIx
+NjA5MDQxMFowgbExCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExTTBLBgNV
+BAoMRFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2
+ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMUIwQAYDVQQDDDlUw5xSS1RSVVNUIEVs
+ZWt0cm9uaWsgU2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLEgSDYwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCdsGjW6L0UlqMACprx9MfMkU1x
+eHe59yEmFXNRFpQJRwXiM/VomjX/3EsvMsew7eKC5W/a2uqsxgbPJQ1BgfbBOCK9
++bGlprMBvD9QFyv26WZV1DOzXPhDIHiTVRZwGTLmiddk671IUP320EEDwnS3/faA
+z1vFq6TWlRKb55cTMgPp1KtDWxbtMyJkKbbSk60vbNg9tvYdDjTu0n2pVQ8g9P0p
+u5FbHH3GQjhtQiht1AH7zYiXSX6484P4tZgvsycLSF5W506jM7NE1qXyGJTtHB6p
+lVxiSvgNZ1GpryHV+DKdeboaX+UEVU0TRv/yz3THGmNtwx8XEsMeED5gCLMxAgMB
+AAGjQjBAMB0GA1UdDgQWBBTdVRcT9qzoSCHK77Wv0QAy7Z6MtTAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAb1gNl0Oq
+FlQ+v6nfkkU/hQu7VtMMUszIv3ZnXuaqs6fvuay0EBQNdH49ba3RfdCaqaXKGDsC
+QC4qnFAUi/5XfldcEQlLNkVS9z2sFP1E34uXI9TDwe7UU5X+LEr+DXCqu4svLcsy
+o4LyVN/Y8t3XSHLuSqMplsNEzm61kod2pLv0kmzOLBQJZo6NrRa1xxsJYTvjIKID
+gI6tflEATseWhvtDmHd9KMeP2Cpu54Rvl0EpABZeTeIT6lnAY2c6RPuY/ATTMHKm
+9ocJV612ph1jmv3XZch4gyt1O6VbuA1df74jrlZVlFjvH4GMKrLN5ptjnhi85WsG
+tAuYSyher4hYyw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Label: "Certinomis - Root CA"
+# Serial: 1
+# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
+# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
+# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
+-----BEGIN CERTIFICATE-----
+MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
+MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
+BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
+MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
+FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
+Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
+fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
+LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
+WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
+TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
+5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
+CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
+wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
+wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
+m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
+F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
+WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
+2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
+AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
+0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
+F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
+g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
+qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
+h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
+ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
+btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
+Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
+8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
+gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
+-----END CERTIFICATE-----
+# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Secure Server CA"
+# Serial: 927650371
+# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
+# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
+# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
+-----BEGIN CERTIFICATE-----
+MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
+VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
+ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
+KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
+ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
+MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
+ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
+b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
+bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
+U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
+A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
+I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
+wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
+AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
+oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
+BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
+dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
+MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
+b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
+dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
+MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
+E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
+MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
+hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
+95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
+2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Label: "ValiCert Class 2 VA"
+# Serial: 1
+# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
+# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
+# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
+NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
+dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
+WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
+v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
+UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
+IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
+W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Label: "NetLock Express (Class C) Root"
+# Serial: 104
+# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4
+# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b
+# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f
+-----BEGIN CERTIFICATE-----
+MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx
+ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
+b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD
+EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X
+DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw
+DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u
+c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr
+TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN
+BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA
+OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC
+2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW
+RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P
+AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW
+ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0
+YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz
+b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO
+ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB
+IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs
+b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs
+ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s
+YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg
+a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g
+SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0
+aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg
+YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg
+Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY
+ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g
+pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4
+Fp1hBWeAyNDYpQcCNJgEjTME1A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
+# Label: "NetLock Business (Class B) Root"
+# Serial: 105
+# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6
+# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af
+# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12
+-----BEGIN CERTIFICATE-----
+MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx
+ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
+b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD
+EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05
+OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G
+A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh
+Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l
+dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG
+SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK
+gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX
+iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc
+Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E
+BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G
+SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu
+b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh
+bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv
+Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln
+aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0
+IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh
+c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph
+biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo
+ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP
+UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj
+YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo
+dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA
+bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06
+sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa
+n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS
+NitjrFgBazMpUIaD8QFI
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Label: "RSA Root Certificate 1"
+# Serial: 1
+# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
+# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
+# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Label: "ValiCert Class 1 VA"
+# Serial: 1
+# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
+# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
+# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
+NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
+LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
+TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
+LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
+I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
+nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure eBusiness CA 1"
+# Serial: 4
+# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
+# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
+# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
+-----BEGIN CERTIFICATE-----
+MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
+ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
+MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
+LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
+KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
+RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
+WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
+Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
+eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
+zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
+/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure Global eBusiness CA"
+# Serial: 1
+# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
+# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
+# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
+-----BEGIN CERTIFICATE-----
+MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
+ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
+MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
+dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
+c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
+UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
+58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
+o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
+aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
+A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
+Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
+8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Premium Server CA"
+# Serial: 1
+# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
+# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
+# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
+-----BEGIN CERTIFICATE-----
+MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
+dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
+MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
+MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
+A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
+cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
+VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
+ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
+uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
+9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
+hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
+pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Server CA"
+# Serial: 1
+# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
+# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
+# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
+-----BEGIN CERTIFICATE-----
+MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
+MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
+MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
+DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
+dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
+cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
+DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
+gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
+yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
+L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
+EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
+7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
+QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
+qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Label: "Verisign Class 3 Public Primary Certification Authority"
+# Serial: 149843929435818692848040365716851702463
+# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67
+# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2
+# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
+BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
+I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
+CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
+lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
+AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Label: "Verisign Class 3 Public Primary Certification Authority"
+# Serial: 80507572722862485515306429940691309246
+# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4
+# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b
+# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
+BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
+I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
+CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i
+2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ
+2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
+# Serial: 167285380242319648451154478808036881606
+# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
+# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
+# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
+pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
+13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
+U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
+F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
+oJ2daZH9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Label: "GTE CyberTrust Global Root"
+# Serial: 421
+# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
+# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
+# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
+-----BEGIN CERTIFICATE-----
+MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
+VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
+bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
+b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
+UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
+cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
+b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
+iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
+r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
+04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
+GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
+3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
+lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
+-----END CERTIFICATE-----
diff --git a/python/requests/requests/certs.py b/python/requests/requests/certs.py
new file mode 100644
index 000000000..07e647507
--- /dev/null
+++ b/python/requests/requests/certs.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+certs.py
+~~~~~~~~
+
+This module returns the preferred default CA certificate bundle.
+
+If you are packaging Requests, e.g., for a Linux distribution or a managed
+environment, you can change the definition of where() to return a separately
+packaged CA bundle.
+"""
+import os.path
+
+try:
+ from certifi import where
+except ImportError:
+ def where():
+ """Return the preferred certificate bundle."""
+ # vendored bundle inside Requests
+ return os.path.join(os.path.dirname(__file__), 'cacert.pem')
+
+if __name__ == '__main__':
+ print(where())
diff --git a/python/requests/requests/compat.py b/python/requests/requests/compat.py
new file mode 100644
index 000000000..70edff784
--- /dev/null
+++ b/python/requests/requests/compat.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+"""
+pythoncompat
+"""
+
+from .packages import chardet
+
+import sys
+
+# -------
+# Pythons
+# -------
+
+# Syntax sugar.
+_ver = sys.version_info
+
+#: Python 2.x?
+is_py2 = (_ver[0] == 2)
+
+#: Python 3.x?
+is_py3 = (_ver[0] == 3)
+
+try:
+ import simplejson as json
+except (ImportError, SyntaxError):
+ # simplejson does not support Python 3.2, it throws a SyntaxError
+ # because of u'...' Unicode literals.
+ import json
+
+# ---------
+# Specifics
+# ---------
+
+if is_py2:
+ from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
+ from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
+ from urllib2 import parse_http_list
+ import cookielib
+ from Cookie import Morsel
+ from StringIO import StringIO
+ from .packages.urllib3.packages.ordered_dict import OrderedDict
+
+ builtin_str = str
+ bytes = str
+ str = unicode
+ basestring = basestring
+ numeric_types = (int, long, float)
+
+elif is_py3:
+ from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
+ from urllib.request import parse_http_list, getproxies, proxy_bypass
+ from http import cookiejar as cookielib
+ from http.cookies import Morsel
+ from io import StringIO
+ from collections import OrderedDict
+
+ builtin_str = str
+ str = str
+ bytes = bytes
+ basestring = (str, bytes)
+ numeric_types = (int, float)
diff --git a/python/requests/requests/cookies.py b/python/requests/requests/cookies.py
new file mode 100644
index 000000000..b85fd2b62
--- /dev/null
+++ b/python/requests/requests/cookies.py
@@ -0,0 +1,487 @@
+# -*- coding: utf-8 -*-
+
+"""
+Compatibility code to be able to use `cookielib.CookieJar` with requests.
+
+requests.utils imports from here, so be careful with imports.
+"""
+
+import copy
+import time
+import calendar
+import collections
+from .compat import cookielib, urlparse, urlunparse, Morsel
+
+try:
+ import threading
+ # grr, pyflakes: this fixes "redefinition of unused 'threading'"
+ threading
+except ImportError:
+ import dummy_threading as threading
+
+
+class MockRequest(object):
+ """Wraps a `requests.Request` to mimic a `urllib2.Request`.
+
+ The code in `cookielib.CookieJar` expects this interface in order to correctly
+ manage cookie policies, i.e., determine whether a cookie can be set, given the
+ domains of the request and the cookie.
+
+ The original request object is read-only. The client is responsible for collecting
+ the new headers via `get_new_headers()` and interpreting them appropriately. You
+ probably want `get_cookie_header`, defined below.
+ """
+
+ def __init__(self, request):
+ self._r = request
+ self._new_headers = {}
+ self.type = urlparse(self._r.url).scheme
+
+ def get_type(self):
+ return self.type
+
+ def get_host(self):
+ return urlparse(self._r.url).netloc
+
+ def get_origin_req_host(self):
+ return self.get_host()
+
+ def get_full_url(self):
+ # Only return the response's URL if the user hadn't set the Host
+ # header
+ if not self._r.headers.get('Host'):
+ return self._r.url
+ # If they did set it, retrieve it and reconstruct the expected domain
+ host = self._r.headers['Host']
+ parsed = urlparse(self._r.url)
+ # Reconstruct the URL as we expect it
+ return urlunparse([
+ parsed.scheme, host, parsed.path, parsed.params, parsed.query,
+ parsed.fragment
+ ])
+
+ def is_unverifiable(self):
+ return True
+
+ def has_header(self, name):
+ return name in self._r.headers or name in self._new_headers
+
+ def get_header(self, name, default=None):
+ return self._r.headers.get(name, self._new_headers.get(name, default))
+
+ def add_header(self, key, val):
+ """cookielib has no legitimate use for this method; add it back if you find one."""
+ raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
+
+ def add_unredirected_header(self, name, value):
+ self._new_headers[name] = value
+
+ def get_new_headers(self):
+ return self._new_headers
+
+ @property
+ def unverifiable(self):
+ return self.is_unverifiable()
+
+ @property
+ def origin_req_host(self):
+ return self.get_origin_req_host()
+
+ @property
+ def host(self):
+ return self.get_host()
+
+
+class MockResponse(object):
+ """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
+
+ ...what? Basically, expose the parsed HTTP headers from the server response
+ the way `cookielib` expects to see them.
+ """
+
+ def __init__(self, headers):
+ """Make a MockResponse for `cookielib` to read.
+
+ :param headers: a httplib.HTTPMessage or analogous carrying the headers
+ """
+ self._headers = headers
+
+ def info(self):
+ return self._headers
+
+ def getheaders(self, name):
+ self._headers.getheaders(name)
+
+
+def extract_cookies_to_jar(jar, request, response):
+ """Extract the cookies from the response into a CookieJar.
+
+ :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
+ :param request: our own requests.Request object
+ :param response: urllib3.HTTPResponse object
+ """
+ if not (hasattr(response, '_original_response') and
+ response._original_response):
+ return
+ # the _original_response field is the wrapped httplib.HTTPResponse object,
+ req = MockRequest(request)
+ # pull out the HTTPMessage with the headers and put it in the mock:
+ res = MockResponse(response._original_response.msg)
+ jar.extract_cookies(res, req)
+
+
+def get_cookie_header(jar, request):
+ """Produce an appropriate Cookie header string to be sent with `request`, or None."""
+ r = MockRequest(request)
+ jar.add_cookie_header(r)
+ return r.get_new_headers().get('Cookie')
+
+
+def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
+ """Unsets a cookie by name, by default over all domains and paths.
+
+ Wraps CookieJar.clear(), is O(n).
+ """
+ clearables = []
+ for cookie in cookiejar:
+ if cookie.name != name:
+ continue
+ if domain is not None and domain != cookie.domain:
+ continue
+ if path is not None and path != cookie.path:
+ continue
+ clearables.append((cookie.domain, cookie.path, cookie.name))
+
+ for domain, path, name in clearables:
+ cookiejar.clear(domain, path, name)
+
+
+class CookieConflictError(RuntimeError):
+ """There are two cookies that meet the criteria specified in the cookie jar.
+ Use .get and .set and include domain and path args in order to be more specific."""
+
+
+class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
+ """Compatibility class; is a cookielib.CookieJar, but exposes a dict
+ interface.
+
+ This is the CookieJar we create by default for requests and sessions that
+ don't specify one, since some clients may expect response.cookies and
+ session.cookies to support dict operations.
+
+ Requests does not use the dict interface internally; it's just for
+ compatibility with external client code. All requests code should work
+ out of the box with externally provided instances of ``CookieJar``, e.g.
+ ``LWPCookieJar`` and ``FileCookieJar``.
+
+ Unlike a regular CookieJar, this class is pickleable.
+
+ .. warning:: dictionary operations that are normally O(1) may be O(n).
+ """
+ def get(self, name, default=None, domain=None, path=None):
+ """Dict-like get() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains.
+
+ .. warning:: operation is O(n), not O(1)."""
+ try:
+ return self._find_no_duplicates(name, domain, path)
+ except KeyError:
+ return default
+
+ def set(self, name, value, **kwargs):
+ """Dict-like set() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains."""
+ # support client code that unsets cookies by assignment of a None value:
+ if value is None:
+ remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
+ return
+
+ if isinstance(value, Morsel):
+ c = morsel_to_cookie(value)
+ else:
+ c = create_cookie(name, value, **kwargs)
+ self.set_cookie(c)
+ return c
+
+ def iterkeys(self):
+ """Dict-like iterkeys() that returns an iterator of names of cookies
+ from the jar. See itervalues() and iteritems()."""
+ for cookie in iter(self):
+ yield cookie.name
+
+ def keys(self):
+ """Dict-like keys() that returns a list of names of cookies from the
+ jar. See values() and items()."""
+ return list(self.iterkeys())
+
+ def itervalues(self):
+ """Dict-like itervalues() that returns an iterator of values of cookies
+ from the jar. See iterkeys() and iteritems()."""
+ for cookie in iter(self):
+ yield cookie.value
+
+ def values(self):
+ """Dict-like values() that returns a list of values of cookies from the
+ jar. See keys() and items()."""
+ return list(self.itervalues())
+
+ def iteritems(self):
+ """Dict-like iteritems() that returns an iterator of name-value tuples
+ from the jar. See iterkeys() and itervalues()."""
+ for cookie in iter(self):
+ yield cookie.name, cookie.value
+
+ def items(self):
+ """Dict-like items() that returns a list of name-value tuples from the
+ jar. See keys() and values(). Allows client-code to call
+ ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
+ pairs."""
+ return list(self.iteritems())
+
+ def list_domains(self):
+ """Utility method to list all the domains in the jar."""
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain not in domains:
+ domains.append(cookie.domain)
+ return domains
+
+ def list_paths(self):
+ """Utility method to list all the paths in the jar."""
+ paths = []
+ for cookie in iter(self):
+ if cookie.path not in paths:
+ paths.append(cookie.path)
+ return paths
+
+ def multiple_domains(self):
+ """Returns True if there are multiple domains in the jar.
+ Returns False otherwise."""
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain is not None and cookie.domain in domains:
+ return True
+ domains.append(cookie.domain)
+ return False # there is only one domain in jar
+
+ def get_dict(self, domain=None, path=None):
+ """Takes as an argument an optional domain and path and returns a plain
+ old Python dict of name-value pairs of cookies that meet the
+ requirements."""
+ dictionary = {}
+ for cookie in iter(self):
+ if (domain is None or cookie.domain == domain) and (path is None
+ or cookie.path == path):
+ dictionary[cookie.name] = cookie.value
+ return dictionary
+
+ def __getitem__(self, name):
+ """Dict-like __getitem__() for compatibility with client code. Throws
+ exception if there are more than one cookie with name. In that case,
+ use the more explicit get() method instead.
+
+ .. warning:: operation is O(n), not O(1)."""
+
+ return self._find_no_duplicates(name)
+
+ def __setitem__(self, name, value):
+ """Dict-like __setitem__ for compatibility with client code. Throws
+ exception if there is already a cookie of that name in the jar. In that
+ case, use the more explicit set() method instead."""
+
+ self.set(name, value)
+
+ def __delitem__(self, name):
+ """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
+ ``remove_cookie_by_name()``."""
+ remove_cookie_by_name(self, name)
+
+ def set_cookie(self, cookie, *args, **kwargs):
+ if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
+ cookie.value = cookie.value.replace('\\"', '')
+ return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
+
+ def update(self, other):
+ """Updates this jar with cookies from another CookieJar or dict-like"""
+ if isinstance(other, cookielib.CookieJar):
+ for cookie in other:
+ self.set_cookie(copy.copy(cookie))
+ else:
+ super(RequestsCookieJar, self).update(other)
+
+ def _find(self, name, domain=None, path=None):
+ """Requests uses this method internally to get cookie values. Takes as
+ args name and optional domain and path. Returns a cookie.value. If
+ there are conflicting cookies, _find arbitrarily chooses one. See
+ _find_no_duplicates if you want an exception thrown if there are
+ conflicting cookies."""
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ return cookie.value
+
+ raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
+
+ def _find_no_duplicates(self, name, domain=None, path=None):
+ """Both ``__get_item__`` and ``get`` call this function: it's never
+ used elsewhere in Requests. Takes as args name and optional domain and
+ path. Returns a cookie.value. Throws KeyError if cookie is not found
+ and CookieConflictError if there are multiple cookies that match name
+ and optionally domain and path."""
+ toReturn = None
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ if toReturn is not None: # if there are multiple cookies that meet passed in criteria
+ raise CookieConflictError('There are multiple cookies with name, %r' % (name))
+ toReturn = cookie.value # we will eventually return this as long as no cookie conflict
+
+ if toReturn:
+ return toReturn
+ raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
+
+ def __getstate__(self):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ state = self.__dict__.copy()
+ # remove the unpickleable RLock object
+ state.pop('_cookies_lock')
+ return state
+
+ def __setstate__(self, state):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ self.__dict__.update(state)
+ if '_cookies_lock' not in self.__dict__:
+ self._cookies_lock = threading.RLock()
+
+ def copy(self):
+ """Return a copy of this RequestsCookieJar."""
+ new_cj = RequestsCookieJar()
+ new_cj.update(self)
+ return new_cj
+
+
+def _copy_cookie_jar(jar):
+ if jar is None:
+ return None
+
+ if hasattr(jar, 'copy'):
+ # We're dealing with an instance of RequestsCookieJar
+ return jar.copy()
+ # We're dealing with a generic CookieJar instance
+ new_jar = copy.copy(jar)
+ new_jar.clear()
+ for cookie in jar:
+ new_jar.set_cookie(copy.copy(cookie))
+ return new_jar
+
+
+def create_cookie(name, value, **kwargs):
+ """Make a cookie from underspecified parameters.
+
+ By default, the pair of `name` and `value` will be set for the domain ''
+ and sent on every request (this is sometimes called a "supercookie").
+ """
+ result = dict(
+ version=0,
+ name=name,
+ value=value,
+ port=None,
+ domain='',
+ path='/',
+ secure=False,
+ expires=None,
+ discard=True,
+ comment=None,
+ comment_url=None,
+ rest={'HttpOnly': None},
+ rfc2109=False,)
+
+ badargs = set(kwargs) - set(result)
+ if badargs:
+ err = 'create_cookie() got unexpected keyword arguments: %s'
+ raise TypeError(err % list(badargs))
+
+ result.update(kwargs)
+ result['port_specified'] = bool(result['port'])
+ result['domain_specified'] = bool(result['domain'])
+ result['domain_initial_dot'] = result['domain'].startswith('.')
+ result['path_specified'] = bool(result['path'])
+
+ return cookielib.Cookie(**result)
+
+
+def morsel_to_cookie(morsel):
+ """Convert a Morsel object into a Cookie containing the one k/v pair."""
+
+ expires = None
+ if morsel['max-age']:
+ try:
+ expires = int(time.time() + int(morsel['max-age']))
+ except ValueError:
+ raise TypeError('max-age: %s must be integer' % morsel['max-age'])
+ elif morsel['expires']:
+ time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
+ expires = calendar.timegm(
+ time.strptime(morsel['expires'], time_template)
+ )
+ return create_cookie(
+ comment=morsel['comment'],
+ comment_url=bool(morsel['comment']),
+ discard=False,
+ domain=morsel['domain'],
+ expires=expires,
+ name=morsel.key,
+ path=morsel['path'],
+ port=None,
+ rest={'HttpOnly': morsel['httponly']},
+ rfc2109=False,
+ secure=bool(morsel['secure']),
+ value=morsel.value,
+ version=morsel['version'] or 0,
+ )
+
+
+def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ :param cookiejar: (optional) A cookiejar to add the cookies to.
+ :param overwrite: (optional) If False, will not replace cookies
+ already in the jar with new ones.
+ """
+ if cookiejar is None:
+ cookiejar = RequestsCookieJar()
+
+ if cookie_dict is not None:
+ names_from_jar = [cookie.name for cookie in cookiejar]
+ for name in cookie_dict:
+ if overwrite or (name not in names_from_jar):
+ cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
+
+ return cookiejar
+
+
+def merge_cookies(cookiejar, cookies):
+ """Add cookies to cookiejar and returns a merged CookieJar.
+
+ :param cookiejar: CookieJar object to add the cookies to.
+ :param cookies: Dictionary or CookieJar object to be added.
+ """
+ if not isinstance(cookiejar, cookielib.CookieJar):
+ raise ValueError('You can only merge into CookieJar')
+
+ if isinstance(cookies, dict):
+ cookiejar = cookiejar_from_dict(
+ cookies, cookiejar=cookiejar, overwrite=False)
+ elif isinstance(cookies, cookielib.CookieJar):
+ try:
+ cookiejar.update(cookies)
+ except AttributeError:
+ for cookie_in_jar in cookies:
+ cookiejar.set_cookie(cookie_in_jar)
+
+ return cookiejar
diff --git a/python/requests/requests/exceptions.py b/python/requests/requests/exceptions.py
new file mode 100644
index 000000000..ba0b910e3
--- /dev/null
+++ b/python/requests/requests/exceptions.py
@@ -0,0 +1,114 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.exceptions
+~~~~~~~~~~~~~~~~~~~
+
+This module contains the set of Requests' exceptions.
+
+"""
+from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
+
+
+class RequestException(IOError):
+ """There was an ambiguous exception that occurred while handling your
+ request."""
+
+ def __init__(self, *args, **kwargs):
+ """
+ Initialize RequestException with `request` and `response` objects.
+ """
+ response = kwargs.pop('response', None)
+ self.response = response
+ self.request = kwargs.pop('request', None)
+ if (response is not None and not self.request and
+ hasattr(response, 'request')):
+ self.request = self.response.request
+ super(RequestException, self).__init__(*args, **kwargs)
+
+
+class HTTPError(RequestException):
+ """An HTTP error occurred."""
+
+
+class ConnectionError(RequestException):
+ """A Connection error occurred."""
+
+
+class ProxyError(ConnectionError):
+ """A proxy error occurred."""
+
+
+class SSLError(ConnectionError):
+ """An SSL error occurred."""
+
+
+class Timeout(RequestException):
+ """The request timed out.
+
+ Catching this error will catch both
+ :exc:`~requests.exceptions.ConnectTimeout` and
+ :exc:`~requests.exceptions.ReadTimeout` errors.
+ """
+
+
+class ConnectTimeout(ConnectionError, Timeout):
+ """The request timed out while trying to connect to the remote server.
+
+ Requests that produced this error are safe to retry.
+ """
+
+
+class ReadTimeout(Timeout):
+ """The server did not send any data in the allotted amount of time."""
+
+
+class URLRequired(RequestException):
+ """A valid URL is required to make a request."""
+
+
+class TooManyRedirects(RequestException):
+ """Too many redirects."""
+
+
+class MissingSchema(RequestException, ValueError):
+ """The URL schema (e.g. http or https) is missing."""
+
+
+class InvalidSchema(RequestException, ValueError):
+ """See defaults.py for valid schemas."""
+
+
+class InvalidURL(RequestException, ValueError):
+ """ The URL provided was somehow invalid. """
+
+
+class ChunkedEncodingError(RequestException):
+ """The server declared chunked encoding but sent an invalid chunk."""
+
+
+class ContentDecodingError(RequestException, BaseHTTPError):
+ """Failed to decode response content"""
+
+
+class StreamConsumedError(RequestException, TypeError):
+ """The content for this response was already consumed"""
+
+
+class RetryError(RequestException):
+ """Custom retries logic failed"""
+
+
+# Warnings
+
+
+class RequestsWarning(Warning):
+ """Base warning for Requests."""
+ pass
+
+
+class FileModeWarning(RequestsWarning, DeprecationWarning):
+ """
+ A file was opened in text mode, but Requests determined its binary length.
+ """
+ pass
diff --git a/python/requests/requests/hooks.py b/python/requests/requests/hooks.py
new file mode 100644
index 000000000..9da94366d
--- /dev/null
+++ b/python/requests/requests/hooks.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.hooks
+~~~~~~~~~~~~~~
+
+This module provides the capabilities for the Requests hooks system.
+
+Available hooks:
+
+``response``:
+ The response generated from a Request.
+
+"""
+HOOKS = ['response']
+
+def default_hooks():
+ return dict((event, []) for event in HOOKS)
+
+# TODO: response is the only one
+
+
+def dispatch_hook(key, hooks, hook_data, **kwargs):
+ """Dispatches a hook dictionary on a given piece of data."""
+ hooks = hooks or dict()
+ hooks = hooks.get(key)
+ if hooks:
+ if hasattr(hooks, '__call__'):
+ hooks = [hooks]
+ for hook in hooks:
+ _hook_data = hook(hook_data, **kwargs)
+ if _hook_data is not None:
+ hook_data = _hook_data
+ return hook_data
diff --git a/python/requests/requests/models.py b/python/requests/requests/models.py
new file mode 100644
index 000000000..4bcbc5484
--- /dev/null
+++ b/python/requests/requests/models.py
@@ -0,0 +1,851 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.models
+~~~~~~~~~~~~~~~
+
+This module contains the primary objects that power Requests.
+"""
+
+import collections
+import datetime
+
+from io import BytesIO, UnsupportedOperation
+from .hooks import default_hooks
+from .structures import CaseInsensitiveDict
+
+from .auth import HTTPBasicAuth
+from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
+from .packages.urllib3.fields import RequestField
+from .packages.urllib3.filepost import encode_multipart_formdata
+from .packages.urllib3.util import parse_url
+from .packages.urllib3.exceptions import (
+ DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
+from .exceptions import (
+ HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
+ ContentDecodingError, ConnectionError, StreamConsumedError)
+from .utils import (
+ guess_filename, get_auth_from_url, requote_uri,
+ stream_decode_response_unicode, to_key_val_list, parse_header_links,
+ iter_slices, guess_json_utf, super_len, to_native_string)
+from .compat import (
+ cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
+ is_py2, chardet, builtin_str, basestring)
+from .compat import json as complexjson
+from .status_codes import codes
+
+#: The set of HTTP status codes that indicate an automatically
+#: processable redirect.
+REDIRECT_STATI = (
+ codes.moved, # 301
+ codes.found, # 302
+ codes.other, # 303
+ codes.temporary_redirect, # 307
+ codes.permanent_redirect, # 308
+)
+
+DEFAULT_REDIRECT_LIMIT = 30
+CONTENT_CHUNK_SIZE = 10 * 1024
+ITER_CHUNK_SIZE = 512
+
+
+class RequestEncodingMixin(object):
+ @property
+ def path_url(self):
+ """Build the path URL to use."""
+
+ url = []
+
+ p = urlsplit(self.url)
+
+ path = p.path
+ if not path:
+ path = '/'
+
+ url.append(path)
+
+ query = p.query
+ if query:
+ url.append('?')
+ url.append(query)
+
+ return ''.join(url)
+
+ @staticmethod
+ def _encode_params(data):
+ """Encode parameters in a piece of data.
+
+ Will successfully encode parameters when passed as a dict or a list of
+ 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
+ if parameters are supplied as a dict.
+ """
+
+ if isinstance(data, (str, bytes)):
+ return data
+ elif hasattr(data, 'read'):
+ return data
+ elif hasattr(data, '__iter__'):
+ result = []
+ for k, vs in to_key_val_list(data):
+ if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
+ vs = [vs]
+ for v in vs:
+ if v is not None:
+ result.append(
+ (k.encode('utf-8') if isinstance(k, str) else k,
+ v.encode('utf-8') if isinstance(v, str) else v))
+ return urlencode(result, doseq=True)
+ else:
+ return data
+
+ @staticmethod
+ def _encode_files(files, data):
+ """Build the body for a multipart/form-data request.
+
+ Will successfully encode files when passed as a dict or a list of
+ 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
+ if parameters are supplied as a dict.
+
+ """
+ if (not files):
+ raise ValueError("Files must be provided.")
+ elif isinstance(data, basestring):
+ raise ValueError("Data must not be a string.")
+
+ new_fields = []
+ fields = to_key_val_list(data or {})
+ files = to_key_val_list(files or {})
+
+ for field, val in fields:
+ if isinstance(val, basestring) or not hasattr(val, '__iter__'):
+ val = [val]
+ for v in val:
+ if v is not None:
+ # Don't call str() on bytestrings: in Py3 it all goes wrong.
+ if not isinstance(v, bytes):
+ v = str(v)
+
+ new_fields.append(
+ (field.decode('utf-8') if isinstance(field, bytes) else field,
+ v.encode('utf-8') if isinstance(v, str) else v))
+
+ for (k, v) in files:
+ # support for explicit filename
+ ft = None
+ fh = None
+ if isinstance(v, (tuple, list)):
+ if len(v) == 2:
+ fn, fp = v
+ elif len(v) == 3:
+ fn, fp, ft = v
+ else:
+ fn, fp, ft, fh = v
+ else:
+ fn = guess_filename(v) or k
+ fp = v
+
+ if isinstance(fp, (str, bytes, bytearray)):
+ fdata = fp
+ else:
+ fdata = fp.read()
+
+ rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
+ rf.make_multipart(content_type=ft)
+ new_fields.append(rf)
+
+ body, content_type = encode_multipart_formdata(new_fields)
+
+ return body, content_type
+
+
+class RequestHooksMixin(object):
+ def register_hook(self, event, hook):
+ """Properly register a hook."""
+
+ if event not in self.hooks:
+ raise ValueError('Unsupported event specified, with event name "%s"' % (event))
+
+ if isinstance(hook, collections.Callable):
+ self.hooks[event].append(hook)
+ elif hasattr(hook, '__iter__'):
+ self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
+
+ def deregister_hook(self, event, hook):
+ """Deregister a previously registered hook.
+ Returns True if the hook existed, False if not.
+ """
+
+ try:
+ self.hooks[event].remove(hook)
+ return True
+ except ValueError:
+ return False
+
+
+class Request(RequestHooksMixin):
+ """A user-created :class:`Request <Request>` object.
+
+ Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
+
+ :param method: HTTP method to use.
+ :param url: URL to send.
+ :param headers: dictionary of headers to send.
+ :param files: dictionary of {filename: fileobject} files to multipart upload.
+ :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
+ :param json: json for the body to attach to the request (if files or data is not specified).
+ :param params: dictionary of URL parameters to append to the URL.
+ :param auth: Auth handler or (user, pass) tuple.
+ :param cookies: dictionary or CookieJar of cookies to attach to this request.
+ :param hooks: dictionary of callback hooks, for internal usage.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'http://httpbin.org/get')
+ >>> req.prepare()
+ <PreparedRequest [GET]>
+
+ """
+ def __init__(self, method=None, url=None, headers=None, files=None,
+ data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
+
+ # Default empty dicts for dict params.
+ data = [] if data is None else data
+ files = [] if files is None else files
+ headers = {} if headers is None else headers
+ params = {} if params is None else params
+ hooks = {} if hooks is None else hooks
+
+ self.hooks = default_hooks()
+ for (k, v) in list(hooks.items()):
+ self.register_hook(event=k, hook=v)
+
+ self.method = method
+ self.url = url
+ self.headers = headers
+ self.files = files
+ self.data = data
+ self.json = json
+ self.params = params
+ self.auth = auth
+ self.cookies = cookies
+
+ def __repr__(self):
+ return '<Request [%s]>' % (self.method)
+
+ def prepare(self):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
+ p = PreparedRequest()
+ p.prepare(
+ method=self.method,
+ url=self.url,
+ headers=self.headers,
+ files=self.files,
+ data=self.data,
+ json=self.json,
+ params=self.params,
+ auth=self.auth,
+ cookies=self.cookies,
+ hooks=self.hooks,
+ )
+ return p
+
+
+class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
+ """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
+ containing the exact bytes that will be sent to the server.
+
+ Generated from either a :class:`Request <Request>` object or manually.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'http://httpbin.org/get')
+ >>> r = req.prepare()
+ <PreparedRequest [GET]>
+
+ >>> s = requests.Session()
+ >>> s.send(r)
+ <Response [200]>
+
+ """
+
+ def __init__(self):
+ #: HTTP verb to send to the server.
+ self.method = None
+ #: HTTP URL to send the request to.
+ self.url = None
+ #: dictionary of HTTP headers.
+ self.headers = None
+ # The `CookieJar` used to create the Cookie header will be stored here
+ # after prepare_cookies is called
+ self._cookies = None
+ #: request body to send to the server.
+ self.body = None
+ #: dictionary of callback hooks, for internal usage.
+ self.hooks = default_hooks()
+
+ def prepare(self, method=None, url=None, headers=None, files=None,
+ data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
+ """Prepares the entire request with the given parameters."""
+
+ self.prepare_method(method)
+ self.prepare_url(url, params)
+ self.prepare_headers(headers)
+ self.prepare_cookies(cookies)
+ self.prepare_body(data, files, json)
+ self.prepare_auth(auth, url)
+
+ # Note that prepare_auth must be last to enable authentication schemes
+ # such as OAuth to work on a fully prepared request.
+
+ # This MUST go after prepare_auth. Authenticators could add a hook
+ self.prepare_hooks(hooks)
+
+ def __repr__(self):
+ return '<PreparedRequest [%s]>' % (self.method)
+
+ def copy(self):
+ p = PreparedRequest()
+ p.method = self.method
+ p.url = self.url
+ p.headers = self.headers.copy() if self.headers is not None else None
+ p._cookies = _copy_cookie_jar(self._cookies)
+ p.body = self.body
+ p.hooks = self.hooks
+ return p
+
+ def prepare_method(self, method):
+ """Prepares the given HTTP method."""
+ self.method = method
+ if self.method is not None:
+ self.method = to_native_string(self.method.upper())
+
+ def prepare_url(self, url, params):
+ """Prepares the given HTTP URL."""
+ #: Accept objects that have string representations.
+ #: We're unable to blindly call unicode/str functions
+ #: as this will include the bytestring indicator (b'')
+ #: on python 3.x.
+ #: https://github.com/kennethreitz/requests/pull/2238
+ if isinstance(url, bytes):
+ url = url.decode('utf8')
+ else:
+ url = unicode(url) if is_py2 else str(url)
+
+ # Don't do any URL preparation for non-HTTP schemes like `mailto`,
+ # `data` etc to work around exceptions from `url_parse`, which
+ # handles RFC 3986 only.
+ if ':' in url and not url.lower().startswith('http'):
+ self.url = url
+ return
+
+ # Support for unicode domain names and paths.
+ try:
+ scheme, auth, host, port, path, query, fragment = parse_url(url)
+ except LocationParseError as e:
+ raise InvalidURL(*e.args)
+
+ if not scheme:
+ error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
+ error = error.format(to_native_string(url, 'utf8'))
+
+ raise MissingSchema(error)
+
+ if not host:
+ raise InvalidURL("Invalid URL %r: No host supplied" % url)
+
+ # Only want to apply IDNA to the hostname
+ try:
+ host = host.encode('idna').decode('utf-8')
+ except UnicodeError:
+ raise InvalidURL('URL has an invalid label.')
+
+ # Carefully reconstruct the network location
+ netloc = auth or ''
+ if netloc:
+ netloc += '@'
+ netloc += host
+ if port:
+ netloc += ':' + str(port)
+
+ # Bare domains aren't valid URLs.
+ if not path:
+ path = '/'
+
+ if is_py2:
+ if isinstance(scheme, str):
+ scheme = scheme.encode('utf-8')
+ if isinstance(netloc, str):
+ netloc = netloc.encode('utf-8')
+ if isinstance(path, str):
+ path = path.encode('utf-8')
+ if isinstance(query, str):
+ query = query.encode('utf-8')
+ if isinstance(fragment, str):
+ fragment = fragment.encode('utf-8')
+
+ if isinstance(params, (str, bytes)):
+ params = to_native_string(params)
+
+ enc_params = self._encode_params(params)
+ if enc_params:
+ if query:
+ query = '%s&%s' % (query, enc_params)
+ else:
+ query = enc_params
+
+ url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
+ self.url = url
+
+ def prepare_headers(self, headers):
+ """Prepares the given HTTP headers."""
+
+ if headers:
+ self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
+ else:
+ self.headers = CaseInsensitiveDict()
+
+ def prepare_body(self, data, files, json=None):
+ """Prepares the given HTTP body data."""
+
+ # Check if file, fo, generator, iterator.
+ # If not, run through normal process.
+
+ # Nottin' on you.
+ body = None
+ content_type = None
+ length = None
+
+ if not data and json is not None:
+ content_type = 'application/json'
+ body = complexjson.dumps(json)
+
+ is_stream = all([
+ hasattr(data, '__iter__'),
+ not isinstance(data, (basestring, list, tuple, dict))
+ ])
+
+ try:
+ length = super_len(data)
+ except (TypeError, AttributeError, UnsupportedOperation):
+ length = None
+
+ if is_stream:
+ body = data
+
+ if files:
+ raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
+
+ if length:
+ self.headers['Content-Length'] = builtin_str(length)
+ else:
+ self.headers['Transfer-Encoding'] = 'chunked'
+ else:
+ # Multi-part file uploads.
+ if files:
+ (body, content_type) = self._encode_files(files, data)
+ else:
+ if data:
+ body = self._encode_params(data)
+ if isinstance(data, basestring) or hasattr(data, 'read'):
+ content_type = None
+ else:
+ content_type = 'application/x-www-form-urlencoded'
+
+ self.prepare_content_length(body)
+
+ # Add content-type if it wasn't explicitly provided.
+ if content_type and ('content-type' not in self.headers):
+ self.headers['Content-Type'] = content_type
+
+ self.body = body
+
+ def prepare_content_length(self, body):
+ if hasattr(body, 'seek') and hasattr(body, 'tell'):
+ body.seek(0, 2)
+ self.headers['Content-Length'] = builtin_str(body.tell())
+ body.seek(0, 0)
+ elif body is not None:
+ l = super_len(body)
+ if l:
+ self.headers['Content-Length'] = builtin_str(l)
+ elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
+ self.headers['Content-Length'] = '0'
+
+ def prepare_auth(self, auth, url=''):
+ """Prepares the given HTTP auth data."""
+
+ # If no Auth is explicitly provided, extract it from the URL first.
+ if auth is None:
+ url_auth = get_auth_from_url(self.url)
+ auth = url_auth if any(url_auth) else None
+
+ if auth:
+ if isinstance(auth, tuple) and len(auth) == 2:
+ # special-case basic HTTP auth
+ auth = HTTPBasicAuth(*auth)
+
+ # Allow auth to make its changes.
+ r = auth(self)
+
+ # Update self to reflect the auth changes.
+ self.__dict__.update(r.__dict__)
+
+ # Recompute Content-Length
+ self.prepare_content_length(self.body)
+
+ def prepare_cookies(self, cookies):
+ """Prepares the given HTTP cookie data.
+
+ This function eventually generates a ``Cookie`` header from the
+ given cookies using cookielib. Due to cookielib's design, the header
+ will not be regenerated if it already exists, meaning this function
+ can only be called once for the life of the
+ :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
+ to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
+ header is removed beforehand."""
+
+ if isinstance(cookies, cookielib.CookieJar):
+ self._cookies = cookies
+ else:
+ self._cookies = cookiejar_from_dict(cookies)
+
+ cookie_header = get_cookie_header(self._cookies, self)
+ if cookie_header is not None:
+ self.headers['Cookie'] = cookie_header
+
+ def prepare_hooks(self, hooks):
+ """Prepares the given hooks."""
+ # hooks can be passed as None to the prepare method and to this
+ # method. To prevent iterating over None, simply use an empty list
+ # if hooks is False-y
+ hooks = hooks or []
+ for event in hooks:
+ self.register_hook(event, hooks[event])
+
+
+class Response(object):
+ """The :class:`Response <Response>` object, which contains a
+ server's response to an HTTP request.
+ """
+
+ __attrs__ = [
+ '_content', 'status_code', 'headers', 'url', 'history',
+ 'encoding', 'reason', 'cookies', 'elapsed', 'request'
+ ]
+
+ def __init__(self):
+ super(Response, self).__init__()
+
+ self._content = False
+ self._content_consumed = False
+
+ #: Integer Code of responded HTTP Status, e.g. 404 or 200.
+ self.status_code = None
+
+ #: Case-insensitive Dictionary of Response Headers.
+ #: For example, ``headers['content-encoding']`` will return the
+ #: value of a ``'Content-Encoding'`` response header.
+ self.headers = CaseInsensitiveDict()
+
+ #: File-like object representation of response (for advanced usage).
+ #: Use of ``raw`` requires that ``stream=True`` be set on the request.
+ # This requirement does not apply for use internally to Requests.
+ self.raw = None
+
+ #: Final URL location of Response.
+ self.url = None
+
+ #: Encoding to decode with when accessing r.text.
+ self.encoding = None
+
+ #: A list of :class:`Response <Response>` objects from
+ #: the history of the Request. Any redirect responses will end
+ #: up here. The list is sorted from the oldest to the most recent request.
+ self.history = []
+
+ #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
+ self.reason = None
+
+ #: A CookieJar of Cookies the server sent back.
+ self.cookies = cookiejar_from_dict({})
+
+ #: The amount of time elapsed between sending the request
+ #: and the arrival of the response (as a timedelta).
+ #: This property specifically measures the time taken between sending
+ #: the first byte of the request and finishing parsing the headers. It
+ #: is therefore unaffected by consuming the response content or the
+ #: value of the ``stream`` keyword argument.
+ self.elapsed = datetime.timedelta(0)
+
+ #: The :class:`PreparedRequest <PreparedRequest>` object to which this
+ #: is a response.
+ self.request = None
+
+ def __getstate__(self):
+ # Consume everything; accessing the content attribute makes
+ # sure the content has been fully read.
+ if not self._content_consumed:
+ self.content
+
+ return dict(
+ (attr, getattr(self, attr, None))
+ for attr in self.__attrs__
+ )
+
+ def __setstate__(self, state):
+ for name, value in state.items():
+ setattr(self, name, value)
+
+ # pickled objects do not have .raw
+ setattr(self, '_content_consumed', True)
+ setattr(self, 'raw', None)
+
+ def __repr__(self):
+ return '<Response [%s]>' % (self.status_code)
+
+ def __bool__(self):
+ """Returns true if :attr:`status_code` is 'OK'."""
+ return self.ok
+
+ def __nonzero__(self):
+ """Returns true if :attr:`status_code` is 'OK'."""
+ return self.ok
+
+ def __iter__(self):
+ """Allows you to use a response as an iterator."""
+ return self.iter_content(128)
+
+ @property
+ def ok(self):
+ try:
+ self.raise_for_status()
+ except HTTPError:
+ return False
+ return True
+
+ @property
+ def is_redirect(self):
+ """True if this Response is a well-formed HTTP redirect that could have
+ been processed automatically (by :meth:`Session.resolve_redirects`).
+ """
+ return ('location' in self.headers and self.status_code in REDIRECT_STATI)
+
+ @property
+ def is_permanent_redirect(self):
+ """True if this Response one of the permanent versions of redirect"""
+ return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
+
+ @property
+ def apparent_encoding(self):
+ """The apparent encoding, provided by the chardet library"""
+ return chardet.detect(self.content)['encoding']
+
+ def iter_content(self, chunk_size=1, decode_unicode=False):
+ """Iterates over the response data. When stream=True is set on the
+ request, this avoids reading the content at once into memory for
+ large responses. The chunk size is the number of bytes it should
+ read into memory. This is not necessarily the length of each item
+ returned as decoding can take place.
+
+ If decode_unicode is True, content will be decoded using the best
+ available encoding based on the response.
+ """
+
+ def generate():
+ # Special case for urllib3.
+ if hasattr(self.raw, 'stream'):
+ try:
+ for chunk in self.raw.stream(chunk_size, decode_content=True):
+ yield chunk
+ except ProtocolError as e:
+ raise ChunkedEncodingError(e)
+ except DecodeError as e:
+ raise ContentDecodingError(e)
+ except ReadTimeoutError as e:
+ raise ConnectionError(e)
+ else:
+ # Standard file-like object.
+ while True:
+ chunk = self.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ self._content_consumed = True
+
+ if self._content_consumed and isinstance(self._content, bool):
+ raise StreamConsumedError()
+ # simulate reading small chunks of the content
+ reused_chunks = iter_slices(self._content, chunk_size)
+
+ stream_chunks = generate()
+
+ chunks = reused_chunks if self._content_consumed else stream_chunks
+
+ if decode_unicode:
+ chunks = stream_decode_response_unicode(chunks, self)
+
+ return chunks
+
+ def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
+ """Iterates over the response data, one line at a time. When
+ stream=True is set on the request, this avoids reading the
+ content at once into memory for large responses.
+
+ .. note:: This method is not reentrant safe.
+ """
+
+ pending = None
+
+ for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
+
+ if pending is not None:
+ chunk = pending + chunk
+
+ if delimiter:
+ lines = chunk.split(delimiter)
+ else:
+ lines = chunk.splitlines()
+
+ if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
+ pending = lines.pop()
+ else:
+ pending = None
+
+ for line in lines:
+ yield line
+
+ if pending is not None:
+ yield pending
+
+ @property
+ def content(self):
+ """Content of the response, in bytes."""
+
+ if self._content is False:
+ # Read the contents.
+ try:
+ if self._content_consumed:
+ raise RuntimeError(
+ 'The content for this response was already consumed')
+
+ if self.status_code == 0:
+ self._content = None
+ else:
+ self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
+
+ except AttributeError:
+ self._content = None
+
+ self._content_consumed = True
+ # don't need to release the connection; that's been handled by urllib3
+ # since we exhausted the data.
+ return self._content
+
+ @property
+ def text(self):
+ """Content of the response, in unicode.
+
+ If Response.encoding is None, encoding will be guessed using
+ ``chardet``.
+
+ The encoding of the response content is determined based solely on HTTP
+ headers, following RFC 2616 to the letter. If you can take advantage of
+ non-HTTP knowledge to make a better guess at the encoding, you should
+ set ``r.encoding`` appropriately before accessing this property.
+ """
+
+ # Try charset from content-type
+ content = None
+ encoding = self.encoding
+
+ if not self.content:
+ return str('')
+
+ # Fallback to auto-detected encoding.
+ if self.encoding is None:
+ encoding = self.apparent_encoding
+
+ # Decode unicode from given encoding.
+ try:
+ content = str(self.content, encoding, errors='replace')
+ except (LookupError, TypeError):
+ # A LookupError is raised if the encoding was not found which could
+ # indicate a misspelling or similar mistake.
+ #
+ # A TypeError can be raised if encoding is None
+ #
+ # So we try blindly encoding.
+ content = str(self.content, errors='replace')
+
+ return content
+
+ def json(self, **kwargs):
+ """Returns the json-encoded content of a response, if any.
+
+ :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
+ """
+
+ if not self.encoding and len(self.content) > 3:
+ # No encoding set. JSON RFC 4627 section 3 states we should expect
+ # UTF-8, -16 or -32. Detect which one to use; If the detection or
+ # decoding fails, fall back to `self.text` (using chardet to make
+ # a best guess).
+ encoding = guess_json_utf(self.content)
+ if encoding is not None:
+ try:
+ return complexjson.loads(
+ self.content.decode(encoding), **kwargs
+ )
+ except UnicodeDecodeError:
+ # Wrong UTF codec detected; usually because it's not UTF-8
+ # but some other 8-bit codec. This is an RFC violation,
+ # and the server didn't bother to tell us what codec *was*
+ # used.
+ pass
+ return complexjson.loads(self.text, **kwargs)
+
+ @property
+ def links(self):
+ """Returns the parsed header links of the response, if any."""
+
+ header = self.headers.get('link')
+
+ # l = MultiDict()
+ l = {}
+
+ if header:
+ links = parse_header_links(header)
+
+ for link in links:
+ key = link.get('rel') or link.get('url')
+ l[key] = link
+
+ return l
+
+ def raise_for_status(self):
+ """Raises stored :class:`HTTPError`, if one occurred."""
+
+ http_error_msg = ''
+
+ if 400 <= self.status_code < 500:
+ http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
+
+ elif 500 <= self.status_code < 600:
+ http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
+
+ if http_error_msg:
+ raise HTTPError(http_error_msg, response=self)
+
+ def close(self):
+ """Releases the connection back to the pool. Once this method has been
+ called the underlying ``raw`` object must not be accessed again.
+
+ *Note: Should not normally need to be called explicitly.*
+ """
+ if not self._content_consumed:
+ return self.raw.close()
+
+ return self.raw.release_conn()
diff --git a/python/requests/requests/packages/__init__.py b/python/requests/requests/packages/__init__.py
new file mode 100644
index 000000000..971c2ad02
--- /dev/null
+++ b/python/requests/requests/packages/__init__.py
@@ -0,0 +1,36 @@
+'''
+Debian and other distributions "unbundle" requests' vendored dependencies, and
+rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
+The problem with this is that not only requests itself imports those
+dependencies, but third-party code outside of the distros' control too.
+
+In reaction to these problems, the distro maintainers replaced
+``requests.packages`` with a magical "stub module" that imports the correct
+modules. The implementations were varying in quality and all had severe
+problems. For example, a symlink (or hardlink) that links the correct modules
+into place introduces problems regarding object identity, since you now have
+two modules in `sys.modules` with the same API, but different identities::
+
+ requests.packages.urllib3 is not urllib3
+
+With version ``2.5.2``, requests started to maintain its own stub, so that
+distro-specific breakage would be reduced to a minimum, even though the whole
+issue is not requests' fault in the first place. See
+https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
+request.
+'''
+
+from __future__ import absolute_import
+import sys
+
+try:
+ from . import urllib3
+except ImportError:
+ import urllib3
+ sys.modules['%s.urllib3' % __name__] = urllib3
+
+try:
+ from . import chardet
+except ImportError:
+ import chardet
+ sys.modules['%s.chardet' % __name__] = chardet
diff --git a/python/requests/requests/packages/chardet/__init__.py b/python/requests/requests/packages/chardet/__init__.py
new file mode 100644
index 000000000..82c2a48d2
--- /dev/null
+++ b/python/requests/requests/packages/chardet/__init__.py
@@ -0,0 +1,32 @@
+######################## BEGIN LICENSE BLOCK ########################
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+__version__ = "2.3.0"
+from sys import version_info
+
+
+def detect(aBuf):
+ if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
+ (version_info >= (3, 0) and not isinstance(aBuf, bytes))):
+ raise ValueError('Expected a bytes object, not a unicode object')
+
+ from . import universaldetector
+ u = universaldetector.UniversalDetector()
+ u.reset()
+ u.feed(aBuf)
+ u.close()
+ return u.result
diff --git a/python/requests/requests/packages/chardet/big5freq.py b/python/requests/requests/packages/chardet/big5freq.py
new file mode 100644
index 000000000..65bffc04b
--- /dev/null
+++ b/python/requests/requests/packages/chardet/big5freq.py
@@ -0,0 +1,925 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Big5 frequency table
+# by Taiwan's Mandarin Promotion Council
+# <http://www.edu.tw:81/mandr/>
+#
+# 128 --> 0.42261
+# 256 --> 0.57851
+# 512 --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+#Char to FreqOrder table
+BIG5_TABLE_SIZE = 5376
+
+Big5CharToFreqOrder = (
+ 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
+3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
+1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
+ 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
+3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
+4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
+5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
+ 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
+ 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
+ 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
+2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
+1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
+3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
+ 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
+3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
+2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
+ 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
+3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
+1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
+5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
+ 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
+5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
+1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
+ 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
+ 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
+3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
+3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
+ 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
+2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
+2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
+ 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
+ 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
+3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
+1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
+1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
+1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
+2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
+ 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
+4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
+1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
+5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
+2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
+ 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
+ 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
+ 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
+ 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
+5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
+ 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
+1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
+ 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
+ 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
+5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
+1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
+ 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
+3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
+4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
+3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
+ 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
+ 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
+1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
+4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
+3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
+3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
+2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
+5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
+3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
+5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
+1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
+2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
+1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
+ 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
+1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
+4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
+3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
+ 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
+ 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
+ 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
+2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
+5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
+1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
+2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
+1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
+1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
+5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
+5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
+5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
+3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
+4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
+4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
+2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
+5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
+3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
+ 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
+5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
+5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
+1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
+2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
+3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
+4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
+5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
+3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
+4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
+1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
+1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
+4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
+1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
+ 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
+1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
+1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
+3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
+ 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
+5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
+2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
+1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
+1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
+5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
+ 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
+4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
+ 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
+2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
+ 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
+1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
+1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
+ 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
+4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
+4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
+1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
+3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
+5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
+5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
+1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
+2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
+1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
+3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
+2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
+3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
+2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
+4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
+4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
+3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
+ 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
+3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
+ 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
+3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
+4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
+3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
+1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
+5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
+ 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
+5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
+1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
+ 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
+4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
+4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
+ 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
+2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
+2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
+3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
+1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
+4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
+2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
+1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
+1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
+2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
+3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
+1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
+5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
+1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
+4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
+1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
+ 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
+1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
+4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
+4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
+2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
+1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
+4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
+ 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
+5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
+2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
+3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
+4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
+ 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
+5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
+5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
+1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
+4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
+4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
+2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
+3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
+3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
+2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
+1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
+4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
+3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
+3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
+2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
+4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
+5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
+3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
+2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
+3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
+1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
+2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
+3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
+4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
+2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
+2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
+5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
+1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
+2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
+1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
+3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
+4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
+2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
+3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
+3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
+2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
+4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
+2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
+3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
+4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
+5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
+3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
+ 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
+1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
+4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
+1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
+4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
+5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
+ 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
+5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
+5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
+2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
+3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
+2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
+2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
+ 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
+1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
+4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
+3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
+3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
+ 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
+2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
+ 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
+2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
+4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
+1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
+4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
+1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
+3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
+ 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
+3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
+5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
+5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
+3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
+3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
+1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
+2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
+5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
+1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
+1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
+3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
+ 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
+1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
+4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
+5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
+2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
+3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
+ 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
+1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
+2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
+2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
+5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
+5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
+5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
+2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
+2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
+1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
+4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
+3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
+3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
+4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
+4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
+2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
+2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
+5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
+4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
+5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
+4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
+ 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
+ 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
+1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
+3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
+4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
+1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
+5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
+2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
+2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
+3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
+5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
+1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
+3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
+5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
+1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
+5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
+2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
+3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
+2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
+3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
+3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
+3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
+4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
+ 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
+2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
+4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
+3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
+5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
+1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
+5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
+ 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
+1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
+ 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
+4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
+1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
+4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
+1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
+ 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
+3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
+4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
+5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
+ 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
+3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
+ 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
+2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512
+#Everything below is of no interest for detection purpose
+2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392
+2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408
+5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424
+5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440
+5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456
+5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472
+5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488
+5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504
+5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520
+5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536
+5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552
+5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568
+5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584
+5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600
+6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616
+6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632
+6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648
+6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664
+6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680
+6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696
+6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712
+6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728
+6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744
+6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760
+6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776
+6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792
+6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808
+6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824
+6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840
+6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856
+6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872
+6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888
+6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904
+6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920
+6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936
+6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952
+6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968
+6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984
+6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000
+6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016
+6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032
+6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048
+6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064
+6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080
+6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096
+6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112
+6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128
+6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144
+6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160
+6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176
+6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192
+6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208
+6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224
+6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240
+6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256
+3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272
+6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288
+6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304
+3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320
+6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336
+6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352
+6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368
+6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384
+6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400
+6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416
+6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432
+4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448
+6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464
+6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480
+3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496
+6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512
+6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528
+6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544
+6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560
+6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576
+6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592
+6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608
+6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624
+6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640
+6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656
+6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672
+7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688
+7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704
+7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720
+7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736
+7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752
+7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768
+7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784
+7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800
+7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816
+7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832
+7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848
+7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864
+7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880
+7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896
+7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912
+7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928
+7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944
+7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960
+7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976
+7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992
+7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008
+7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024
+7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040
+7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056
+7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072
+7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088
+7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104
+7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120
+7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136
+7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152
+7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168
+7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184
+7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200
+7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216
+7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232
+7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248
+7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264
+7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280
+7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296
+7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312
+7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328
+7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344
+7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360
+7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376
+7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392
+7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408
+7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424
+7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440
+3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456
+7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472
+7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488
+7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504
+7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520
+4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536
+7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552
+7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568
+7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584
+7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600
+7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616
+7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632
+7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648
+7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664
+7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680
+7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696
+7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712
+8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728
+8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744
+8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760
+8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776
+8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792
+8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808
+8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824
+8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840
+8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856
+8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872
+8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888
+8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904
+8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920
+8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936
+8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952
+8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968
+8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984
+8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000
+8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016
+8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032
+8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048
+8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064
+8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080
+8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096
+8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112
+8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128
+8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144
+8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160
+8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176
+8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192
+8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208
+8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224
+8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240
+8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256
+8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272
+8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288
+8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304
+8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320
+8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336
+8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352
+8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368
+8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384
+8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400
+8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416
+8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432
+8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448
+8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464
+8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480
+8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496
+8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512
+8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528
+8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544
+8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560
+8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576
+8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592
+8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608
+8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624
+8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640
+8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656
+8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672
+8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688
+4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704
+8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720
+8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736
+8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752
+8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768
+9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784
+9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800
+9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816
+9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832
+9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848
+9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864
+9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880
+9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896
+9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912
+9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928
+9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944
+9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960
+9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976
+9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992
+9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008
+9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024
+9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040
+9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056
+9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072
+9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088
+9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104
+9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120
+9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136
+9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152
+9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168
+9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184
+9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200
+9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216
+9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232
+9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248
+9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264
+9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280
+9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296
+9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312
+9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328
+9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344
+9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360
+9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376
+3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392
+9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408
+9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424
+9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440
+4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456
+9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472
+9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488
+9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504
+9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520
+9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536
+9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552
+9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568
+9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584
+9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600
+9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616
+9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632
+9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648
+9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664
+9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680
+9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696
+9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712
+9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728
+9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744
+9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760
+9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776
+9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792
+9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808
+9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824
+10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840
+10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856
+10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872
+10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888
+10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904
+10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920
+10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936
+10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952
+10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968
+4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984
+10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000
+10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016
+10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032
+10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048
+10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064
+10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080
+10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096
+10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112
+4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128
+10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144
+10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160
+10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176
+10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192
+10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208
+10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224
+10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240
+10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256
+10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272
+10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288
+10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304
+10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320
+10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336
+10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352
+10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368
+10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384
+10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400
+4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416
+10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432
+10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448
+10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464
+10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480
+10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496
+10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512
+10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528
+10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544
+10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560
+10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576
+10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592
+10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608
+10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624
+10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640
+10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656
+10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672
+10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688
+10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704
+10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720
+10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736
+10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752
+10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768
+10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784
+10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800
+10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816
+10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832
+10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848
+10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864
+10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880
+10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896
+11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912
+11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928
+11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944
+4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960
+11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976
+11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992
+11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008
+11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024
+11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040
+11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056
+11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072
+11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088
+11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104
+11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120
+11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136
+11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152
+11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168
+11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184
+11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200
+11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216
+11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232
+11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248
+11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264
+11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280
+11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296
+11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312
+11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328
+11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344
+11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360
+11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376
+11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392
+11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408
+11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424
+11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440
+11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456
+11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472
+4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488
+11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504
+11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520
+11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536
+11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552
+11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568
+11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584
+11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600
+11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616
+11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632
+11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648
+11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664
+11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680
+11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696
+11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712
+11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728
+11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744
+11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760
+11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776
+11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792
+11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808
+11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824
+11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840
+11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856
+11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872
+11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888
+11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904
+11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920
+11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936
+12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952
+12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968
+12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984
+12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000
+12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016
+12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032
+12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048
+12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064
+12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080
+12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096
+12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112
+12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128
+12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144
+12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160
+12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176
+4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192
+4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208
+4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224
+12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240
+12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256
+12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272
+12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288
+12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304
+12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320
+12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336
+12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352
+12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368
+12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384
+12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400
+12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416
+12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432
+12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448
+12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464
+12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480
+12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496
+12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512
+12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528
+12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544
+12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560
+12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576
+12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592
+12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608
+12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624
+12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640
+12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656
+12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672
+12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688
+12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704
+12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720
+12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736
+12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752
+12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768
+12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784
+12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800
+12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816
+12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832
+12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848
+12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864
+12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880
+12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896
+12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912
+12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928
+12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944
+12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960
+12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976
+4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992
+13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008
+13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024
+13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040
+13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056
+13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072
+13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088
+13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104
+4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120
+13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136
+13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152
+13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168
+13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184
+13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200
+13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216
+13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232
+13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248
+13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264
+13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280
+13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296
+13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312
+13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328
+13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344
+13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360
+5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376
+13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392
+13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408
+13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424
+13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440
+13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456
+13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472
+13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488
+13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504
+13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520
+13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536
+13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552
+13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568
+13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584
+13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600
+13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616
+13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632
+13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648
+13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664
+13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680
+13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696
+13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712
+13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728
+13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744
+13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760
+13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776
+13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792
+13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808
+13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824
+13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840
+13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856
+13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872
+13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888
+13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904
+13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920
+13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936
+13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952
+13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968
+13968,13969,13970,13971,13972) #13973
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/big5prober.py b/python/requests/requests/packages/chardet/big5prober.py
new file mode 100644
index 000000000..becce81e5
--- /dev/null
+++ b/python/requests/requests/packages/chardet/big5prober.py
@@ -0,0 +1,42 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import Big5DistributionAnalysis
+from .mbcssm import Big5SMModel
+
+
+class Big5Prober(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(Big5SMModel)
+ self._mDistributionAnalyzer = Big5DistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "Big5"
diff --git a/python/requests/requests/packages/chardet/chardetect.py b/python/requests/requests/packages/chardet/chardetect.py
new file mode 100755
index 000000000..ffe892f25
--- /dev/null
+++ b/python/requests/requests/packages/chardet/chardetect.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+"""
+Script which takes one or more file paths and reports on their detected
+encodings
+
+Example::
+
+ % chardetect somefile someotherfile
+ somefile: windows-1252 with confidence 0.5
+ someotherfile: ascii with confidence 1.0
+
+If no paths are provided, it takes its input from stdin.
+
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import sys
+from io import open
+
+from chardet import __version__
+from chardet.universaldetector import UniversalDetector
+
+
+def description_of(lines, name='stdin'):
+ """
+ Return a string describing the probable encoding of a file or
+ list of strings.
+
+ :param lines: The lines to get the encoding of.
+ :type lines: Iterable of bytes
+ :param name: Name of file or collection of lines
+ :type name: str
+ """
+ u = UniversalDetector()
+ for line in lines:
+ u.feed(line)
+ u.close()
+ result = u.result
+ if result['encoding']:
+ return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
+ result['confidence'])
+ else:
+ return '{0}: no result'.format(name)
+
+
+def main(argv=None):
+ '''
+ Handles command line arguments and gets things started.
+
+ :param argv: List of arguments, as if specified on the command-line.
+ If None, ``sys.argv[1:]`` is used instead.
+ :type argv: list of str
+ '''
+ # Get command line arguments
+ parser = argparse.ArgumentParser(
+ description="Takes one or more file paths and reports their detected \
+ encodings",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ conflict_handler='resolve')
+ parser.add_argument('input',
+ help='File whose encoding we would like to determine.',
+ type=argparse.FileType('rb'), nargs='*',
+ default=[sys.stdin])
+ parser.add_argument('--version', action='version',
+ version='%(prog)s {0}'.format(__version__))
+ args = parser.parse_args(argv)
+
+ for f in args.input:
+ if f.isatty():
+ print("You are running chardetect interactively. Press " +
+ "CTRL-D twice at the start of a blank line to signal the " +
+ "end of your input. If you want help, run chardetect " +
+ "--help\n", file=sys.stderr)
+ print(description_of(f, f.name))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/requests/requests/packages/chardet/chardistribution.py b/python/requests/requests/packages/chardet/chardistribution.py
new file mode 100644
index 000000000..4e64a00be
--- /dev/null
+++ b/python/requests/requests/packages/chardet/chardistribution.py
@@ -0,0 +1,231 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
+ EUCTW_TYPICAL_DISTRIBUTION_RATIO)
+from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
+ EUCKR_TYPICAL_DISTRIBUTION_RATIO)
+from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
+ GB2312_TYPICAL_DISTRIBUTION_RATIO)
+from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
+ BIG5_TYPICAL_DISTRIBUTION_RATIO)
+from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
+ JIS_TYPICAL_DISTRIBUTION_RATIO)
+from .compat import wrap_ord
+
+ENOUGH_DATA_THRESHOLD = 1024
+SURE_YES = 0.99
+SURE_NO = 0.01
+MINIMUM_DATA_THRESHOLD = 3
+
+
+class CharDistributionAnalysis:
+ def __init__(self):
+ # Mapping table to get frequency order from char order (get from
+ # GetOrder())
+ self._mCharToFreqOrder = None
+ self._mTableSize = None # Size of above table
+ # This is a constant value which varies from language to language,
+ # used in calculating confidence. See
+ # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
+ # for further detail.
+ self._mTypicalDistributionRatio = None
+ self.reset()
+
+ def reset(self):
+ """reset analyser, clear any state"""
+ # If this flag is set to True, detection is done and conclusion has
+ # been made
+ self._mDone = False
+ self._mTotalChars = 0 # Total characters encountered
+ # The number of characters whose frequency order is less than 512
+ self._mFreqChars = 0
+
+ def feed(self, aBuf, aCharLen):
+ """feed a character with known length"""
+ if aCharLen == 2:
+ # we only care about 2-bytes character in our distribution analysis
+ order = self.get_order(aBuf)
+ else:
+ order = -1
+ if order >= 0:
+ self._mTotalChars += 1
+ # order is valid
+ if order < self._mTableSize:
+ if 512 > self._mCharToFreqOrder[order]:
+ self._mFreqChars += 1
+
+ def get_confidence(self):
+ """return confidence based on existing data"""
+ # if we didn't receive any character in our consideration range,
+ # return negative answer
+ if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
+ return SURE_NO
+
+ if self._mTotalChars != self._mFreqChars:
+ r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
+ * self._mTypicalDistributionRatio))
+ if r < SURE_YES:
+ return r
+
+ # normalize confidence (we don't want to be 100% sure)
+ return SURE_YES
+
+ def got_enough_data(self):
+ # It is not necessary to receive all data to draw conclusion.
+ # For charset detection, certain amount of data is enough
+ return self._mTotalChars > ENOUGH_DATA_THRESHOLD
+
+ def get_order(self, aBuf):
+ # We do not handle characters based on the original encoding string,
+ # but convert this encoding string to a number, here called order.
+ # This allows multiple encodings of a language to share one frequency
+ # table.
+ return -1
+
+
+class EUCTWDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = EUCTWCharToFreqOrder
+ self._mTableSize = EUCTW_TABLE_SIZE
+ self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for euc-TW encoding, we are interested
+ # first byte range: 0xc4 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char = wrap_ord(aBuf[0])
+ if first_char >= 0xC4:
+ return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
+ else:
+ return -1
+
+
+class EUCKRDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = EUCKRCharToFreqOrder
+ self._mTableSize = EUCKR_TABLE_SIZE
+ self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for euc-KR encoding, we are interested
+ # first byte range: 0xb0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char = wrap_ord(aBuf[0])
+ if first_char >= 0xB0:
+ return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
+ else:
+ return -1
+
+
+class GB2312DistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = GB2312CharToFreqOrder
+ self._mTableSize = GB2312_TABLE_SIZE
+ self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for GB2312 encoding, we are interested
+ # first byte range: 0xb0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ if (first_char >= 0xB0) and (second_char >= 0xA1):
+ return 94 * (first_char - 0xB0) + second_char - 0xA1
+ else:
+ return -1
+
+
+class Big5DistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = Big5CharToFreqOrder
+ self._mTableSize = BIG5_TABLE_SIZE
+ self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for big5 encoding, we are interested
+ # first byte range: 0xa4 -- 0xfe
+ # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ if first_char >= 0xA4:
+ if second_char >= 0xA1:
+ return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
+ else:
+ return 157 * (first_char - 0xA4) + second_char - 0x40
+ else:
+ return -1
+
+
+class SJISDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = JISCharToFreqOrder
+ self._mTableSize = JIS_TABLE_SIZE
+ self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for sjis encoding, we are interested
+ # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
+ # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
+ if (first_char >= 0x81) and (first_char <= 0x9F):
+ order = 188 * (first_char - 0x81)
+ elif (first_char >= 0xE0) and (first_char <= 0xEF):
+ order = 188 * (first_char - 0xE0 + 31)
+ else:
+ return -1
+ order = order + second_char - 0x40
+ if second_char > 0x7F:
+ order = -1
+ return order
+
+
+class EUCJPDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ CharDistributionAnalysis.__init__(self)
+ self._mCharToFreqOrder = JISCharToFreqOrder
+ self._mTableSize = JIS_TABLE_SIZE
+ self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, aBuf):
+ # for euc-JP encoding, we are interested
+ # first byte range: 0xa0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ char = wrap_ord(aBuf[0])
+ if char >= 0xA0:
+ return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
+ else:
+ return -1
diff --git a/python/requests/requests/packages/chardet/charsetgroupprober.py b/python/requests/requests/packages/chardet/charsetgroupprober.py
new file mode 100644
index 000000000..85e7a1c67
--- /dev/null
+++ b/python/requests/requests/packages/chardet/charsetgroupprober.py
@@ -0,0 +1,106 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+import sys
+from .charsetprober import CharSetProber
+
+
+class CharSetGroupProber(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mActiveNum = 0
+ self._mProbers = []
+ self._mBestGuessProber = None
+
+ def reset(self):
+ CharSetProber.reset(self)
+ self._mActiveNum = 0
+ for prober in self._mProbers:
+ if prober:
+ prober.reset()
+ prober.active = True
+ self._mActiveNum += 1
+ self._mBestGuessProber = None
+
+ def get_charset_name(self):
+ if not self._mBestGuessProber:
+ self.get_confidence()
+ if not self._mBestGuessProber:
+ return None
+# self._mBestGuessProber = self._mProbers[0]
+ return self._mBestGuessProber.get_charset_name()
+
+ def feed(self, aBuf):
+ for prober in self._mProbers:
+ if not prober:
+ continue
+ if not prober.active:
+ continue
+ st = prober.feed(aBuf)
+ if not st:
+ continue
+ if st == constants.eFoundIt:
+ self._mBestGuessProber = prober
+ return self.get_state()
+ elif st == constants.eNotMe:
+ prober.active = False
+ self._mActiveNum -= 1
+ if self._mActiveNum <= 0:
+ self._mState = constants.eNotMe
+ return self.get_state()
+ return self.get_state()
+
+ def get_confidence(self):
+ st = self.get_state()
+ if st == constants.eFoundIt:
+ return 0.99
+ elif st == constants.eNotMe:
+ return 0.01
+ bestConf = 0.0
+ self._mBestGuessProber = None
+ for prober in self._mProbers:
+ if not prober:
+ continue
+ if not prober.active:
+ if constants._debug:
+ sys.stderr.write(prober.get_charset_name()
+ + ' not active\n')
+ continue
+ cf = prober.get_confidence()
+ if constants._debug:
+ sys.stderr.write('%s confidence = %s\n' %
+ (prober.get_charset_name(), cf))
+ if bestConf < cf:
+ bestConf = cf
+ self._mBestGuessProber = prober
+ if not self._mBestGuessProber:
+ return 0.0
+ return bestConf
+# else:
+# self._mBestGuessProber = self._mProbers[0]
+# return self._mBestGuessProber.get_confidence()
diff --git a/python/requests/requests/packages/chardet/charsetprober.py b/python/requests/requests/packages/chardet/charsetprober.py
new file mode 100644
index 000000000..97581712c
--- /dev/null
+++ b/python/requests/requests/packages/chardet/charsetprober.py
@@ -0,0 +1,62 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+import re
+
+
+class CharSetProber:
+ def __init__(self):
+ pass
+
+ def reset(self):
+ self._mState = constants.eDetecting
+
+ def get_charset_name(self):
+ return None
+
+ def feed(self, aBuf):
+ pass
+
+ def get_state(self):
+ return self._mState
+
+ def get_confidence(self):
+ return 0.0
+
+ def filter_high_bit_only(self, aBuf):
+ aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
+ return aBuf
+
+ def filter_without_english_letters(self, aBuf):
+ aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
+ return aBuf
+
+ def filter_with_english_letters(self, aBuf):
+ # TODO
+ return aBuf
diff --git a/python/requests/requests/packages/chardet/codingstatemachine.py b/python/requests/requests/packages/chardet/codingstatemachine.py
new file mode 100644
index 000000000..8dd8c9179
--- /dev/null
+++ b/python/requests/requests/packages/chardet/codingstatemachine.py
@@ -0,0 +1,61 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .constants import eStart
+from .compat import wrap_ord
+
+
+class CodingStateMachine:
+ def __init__(self, sm):
+ self._mModel = sm
+ self._mCurrentBytePos = 0
+ self._mCurrentCharLen = 0
+ self.reset()
+
+ def reset(self):
+ self._mCurrentState = eStart
+
+ def next_state(self, c):
+ # for each byte we get its class
+ # if it is first byte, we also get byte length
+ # PY3K: aBuf is a byte stream, so c is an int, not a byte
+ byteCls = self._mModel['classTable'][wrap_ord(c)]
+ if self._mCurrentState == eStart:
+ self._mCurrentBytePos = 0
+ self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]
+ # from byte's class and stateTable, we get its next state
+ curr_state = (self._mCurrentState * self._mModel['classFactor']
+ + byteCls)
+ self._mCurrentState = self._mModel['stateTable'][curr_state]
+ self._mCurrentBytePos += 1
+ return self._mCurrentState
+
+ def get_current_charlen(self):
+ return self._mCurrentCharLen
+
+ def get_coding_state_machine(self):
+ return self._mModel['name']
diff --git a/python/requests/requests/packages/chardet/compat.py b/python/requests/requests/packages/chardet/compat.py
new file mode 100644
index 000000000..d9e30addf
--- /dev/null
+++ b/python/requests/requests/packages/chardet/compat.py
@@ -0,0 +1,34 @@
+######################## BEGIN LICENSE BLOCK ########################
+# Contributor(s):
+# Ian Cordasco - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+
+
+if sys.version_info < (3, 0):
+ base_str = (str, unicode)
+else:
+ base_str = (bytes, str)
+
+
+def wrap_ord(a):
+ if sys.version_info < (3, 0) and isinstance(a, base_str):
+ return ord(a)
+ else:
+ return a
diff --git a/python/requests/requests/packages/chardet/constants.py b/python/requests/requests/packages/chardet/constants.py
new file mode 100644
index 000000000..e4d148b3c
--- /dev/null
+++ b/python/requests/requests/packages/chardet/constants.py
@@ -0,0 +1,39 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+_debug = 0
+
+eDetecting = 0
+eFoundIt = 1
+eNotMe = 2
+
+eStart = 0
+eError = 1
+eItsMe = 2
+
+SHORTCUT_THRESHOLD = 0.95
diff --git a/python/requests/requests/packages/chardet/cp949prober.py b/python/requests/requests/packages/chardet/cp949prober.py
new file mode 100644
index 000000000..ff4272f82
--- /dev/null
+++ b/python/requests/requests/packages/chardet/cp949prober.py
@@ -0,0 +1,44 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCKRDistributionAnalysis
+from .mbcssm import CP949SMModel
+
+
+class CP949Prober(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(CP949SMModel)
+ # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
+ # not different.
+ self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "CP949"
diff --git a/python/requests/requests/packages/chardet/escprober.py b/python/requests/requests/packages/chardet/escprober.py
new file mode 100644
index 000000000..80a844ff3
--- /dev/null
+++ b/python/requests/requests/packages/chardet/escprober.py
@@ -0,0 +1,86 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
+ ISO2022KRSMModel)
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .compat import wrap_ord
+
+
+class EscCharSetProber(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mCodingSM = [
+ CodingStateMachine(HZSMModel),
+ CodingStateMachine(ISO2022CNSMModel),
+ CodingStateMachine(ISO2022JPSMModel),
+ CodingStateMachine(ISO2022KRSMModel)
+ ]
+ self.reset()
+
+ def reset(self):
+ CharSetProber.reset(self)
+ for codingSM in self._mCodingSM:
+ if not codingSM:
+ continue
+ codingSM.active = True
+ codingSM.reset()
+ self._mActiveSM = len(self._mCodingSM)
+ self._mDetectedCharset = None
+
+ def get_charset_name(self):
+ return self._mDetectedCharset
+
+ def get_confidence(self):
+ if self._mDetectedCharset:
+ return 0.99
+ else:
+ return 0.00
+
+ def feed(self, aBuf):
+ for c in aBuf:
+ # PY3K: aBuf is a byte array, so c is an int, not a byte
+ for codingSM in self._mCodingSM:
+ if not codingSM:
+ continue
+ if not codingSM.active:
+ continue
+ codingState = codingSM.next_state(wrap_ord(c))
+ if codingState == constants.eError:
+ codingSM.active = False
+ self._mActiveSM -= 1
+ if self._mActiveSM <= 0:
+ self._mState = constants.eNotMe
+ return self.get_state()
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
+ return self.get_state()
+
+ return self.get_state()
diff --git a/python/requests/requests/packages/chardet/escsm.py b/python/requests/requests/packages/chardet/escsm.py
new file mode 100644
index 000000000..bd302b4c6
--- /dev/null
+++ b/python/requests/requests/packages/chardet/escsm.py
@@ -0,0 +1,242 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .constants import eStart, eError, eItsMe
+
+HZ_cls = (
+1,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,0,0,0,0, # 20 - 27
+0,0,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,0,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,4,0,5,2,0, # 78 - 7f
+1,1,1,1,1,1,1,1, # 80 - 87
+1,1,1,1,1,1,1,1, # 88 - 8f
+1,1,1,1,1,1,1,1, # 90 - 97
+1,1,1,1,1,1,1,1, # 98 - 9f
+1,1,1,1,1,1,1,1, # a0 - a7
+1,1,1,1,1,1,1,1, # a8 - af
+1,1,1,1,1,1,1,1, # b0 - b7
+1,1,1,1,1,1,1,1, # b8 - bf
+1,1,1,1,1,1,1,1, # c0 - c7
+1,1,1,1,1,1,1,1, # c8 - cf
+1,1,1,1,1,1,1,1, # d0 - d7
+1,1,1,1,1,1,1,1, # d8 - df
+1,1,1,1,1,1,1,1, # e0 - e7
+1,1,1,1,1,1,1,1, # e8 - ef
+1,1,1,1,1,1,1,1, # f0 - f7
+1,1,1,1,1,1,1,1, # f8 - ff
+)
+
+HZ_st = (
+eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07
+eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
+eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17
+ 5,eError, 6,eError, 5, 5, 4,eError,# 18-1f
+ 4,eError, 4, 4, 4,eError, 4,eError,# 20-27
+ 4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f
+)
+
+HZCharLenTable = (0, 0, 0, 0, 0, 0)
+
+HZSMModel = {'classTable': HZ_cls,
+ 'classFactor': 6,
+ 'stateTable': HZ_st,
+ 'charLenTable': HZCharLenTable,
+ 'name': "HZ-GB-2312"}
+
+ISO2022CN_cls = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,0,0,0,0, # 20 - 27
+0,3,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,4,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022CN_st = (
+eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
+eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f
+eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
+eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f
+eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27
+ 5, 6,eError,eError,eError,eError,eError,eError,# 28-2f
+eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37
+eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f
+)
+
+ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022CNSMModel = {'classTable': ISO2022CN_cls,
+ 'classFactor': 9,
+ 'stateTable': ISO2022CN_st,
+ 'charLenTable': ISO2022CNCharLenTable,
+ 'name': "ISO-2022-CN"}
+
+ISO2022JP_cls = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,2,2, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,7,0,0,0, # 20 - 27
+3,0,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+6,0,4,0,8,0,0,0, # 40 - 47
+0,9,5,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022JP_st = (
+eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
+eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f
+eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
+eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f
+eError, 5,eError,eError,eError, 4,eError,eError,# 20-27
+eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f
+eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37
+eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f
+eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47
+)
+
+ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022JPSMModel = {'classTable': ISO2022JP_cls,
+ 'classFactor': 10,
+ 'stateTable': ISO2022JP_st,
+ 'charLenTable': ISO2022JPCharLenTable,
+ 'name': "ISO-2022-JP"}
+
+ISO2022KR_cls = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,3,0,0,0, # 20 - 27
+0,4,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,5,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022KR_st = (
+eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07
+eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
+eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17
+eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f
+eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27
+)
+
+ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)
+
+ISO2022KRSMModel = {'classTable': ISO2022KR_cls,
+ 'classFactor': 6,
+ 'stateTable': ISO2022KR_st,
+ 'charLenTable': ISO2022KRCharLenTable,
+ 'name': "ISO-2022-KR"}
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/eucjpprober.py b/python/requests/requests/packages/chardet/eucjpprober.py
new file mode 100644
index 000000000..8e64fdcc2
--- /dev/null
+++ b/python/requests/requests/packages/chardet/eucjpprober.py
@@ -0,0 +1,90 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from . import constants
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCJPDistributionAnalysis
+from .jpcntx import EUCJPContextAnalysis
+from .mbcssm import EUCJPSMModel
+
+
+class EUCJPProber(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(EUCJPSMModel)
+ self._mDistributionAnalyzer = EUCJPDistributionAnalysis()
+ self._mContextAnalyzer = EUCJPContextAnalysis()
+ self.reset()
+
+ def reset(self):
+ MultiByteCharSetProber.reset(self)
+ self._mContextAnalyzer.reset()
+
+ def get_charset_name(self):
+ return "EUC-JP"
+
+ def feed(self, aBuf):
+ aLen = len(aBuf)
+ for i in range(0, aLen):
+ # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte
+ codingState = self._mCodingSM.next_state(aBuf[i])
+ if codingState == constants.eError:
+ if constants._debug:
+ sys.stderr.write(self.get_charset_name()
+ + ' prober hit error at byte ' + str(i)
+ + '\n')
+ self._mState = constants.eNotMe
+ break
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ break
+ elif codingState == constants.eStart:
+ charLen = self._mCodingSM.get_current_charlen()
+ if i == 0:
+ self._mLastChar[1] = aBuf[0]
+ self._mContextAnalyzer.feed(self._mLastChar, charLen)
+ self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
+ else:
+ self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen)
+ self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
+ charLen)
+
+ self._mLastChar[0] = aBuf[aLen - 1]
+
+ if self.get_state() == constants.eDetecting:
+ if (self._mContextAnalyzer.got_enough_data() and
+ (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
+ self._mState = constants.eFoundIt
+
+ return self.get_state()
+
+ def get_confidence(self):
+ contxtCf = self._mContextAnalyzer.get_confidence()
+ distribCf = self._mDistributionAnalyzer.get_confidence()
+ return max(contxtCf, distribCf)
diff --git a/python/requests/requests/packages/chardet/euckrfreq.py b/python/requests/requests/packages/chardet/euckrfreq.py
new file mode 100644
index 000000000..a179e4c21
--- /dev/null
+++ b/python/requests/requests/packages/chardet/euckrfreq.py
@@ -0,0 +1,596 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+
+# 128 --> 0.79
+# 256 --> 0.92
+# 512 --> 0.986
+# 1024 --> 0.99944
+# 2048 --> 0.99999
+#
+# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
+# Random Distribution Ration = 512 / (2350-512) = 0.279.
+#
+# Typical Distribution Ratio
+
+EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
+
+EUCKR_TABLE_SIZE = 2352
+
+# Char to FreqOrder table ,
+EUCKRCharToFreqOrder = ( \
+ 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
+1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
+1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
+ 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
+ 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
+ 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
+1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
+ 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
+ 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
+1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
+1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
+1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
+1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
+1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
+ 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
+1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
+1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
+1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
+1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
+ 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
+1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
+ 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
+ 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
+1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
+ 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
+1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
+ 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
+ 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
+1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
+1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
+1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
+1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
+ 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
+1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
+ 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
+ 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
+1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
+1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
+1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
+1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
+1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
+1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
+ 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
+ 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
+ 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
+1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
+ 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
+1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
+ 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
+ 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
+2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
+ 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
+ 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
+2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
+2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
+2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
+ 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
+ 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
+2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
+ 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
+1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
+2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
+1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
+2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
+2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
+1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
+ 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
+2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
+2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
+ 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
+ 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
+2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
+1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
+2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
+2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
+2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
+2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
+2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
+2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
+1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
+2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
+2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
+2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
+2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
+2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
+1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
+1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
+2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
+1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
+2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
+1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
+ 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
+2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
+ 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
+2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
+ 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
+2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
+2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
+ 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
+2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
+1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
+ 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
+1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
+2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
+1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
+2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
+ 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
+2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
+1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
+2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
+1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
+2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
+1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
+ 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
+2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
+2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
+ 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
+ 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
+1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
+1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
+ 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
+2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
+2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
+ 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
+ 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
+ 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
+2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
+ 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
+ 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
+2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
+2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
+ 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
+2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
+1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
+ 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
+2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
+2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
+2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
+ 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
+ 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
+ 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
+2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
+2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
+2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
+1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
+2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
+ 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
+#Everything below is of no interest for detection purpose
+2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
+2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
+2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
+2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
+2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
+2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
+2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
+2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
+2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
+2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
+2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
+2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
+2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
+2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
+1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
+2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
+2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
+2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
+2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
+2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
+2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
+2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
+2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
+2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
+3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
+3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
+3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
+3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
+3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
+3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
+3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
+3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
+3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
+3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
+3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
+3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
+3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
+3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
+3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
+3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
+3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
+3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
+3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
+3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
+3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
+3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
+3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
+3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
+3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
+3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
+3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
+3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
+3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
+3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
+3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
+3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
+1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
+1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
+3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
+3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
+3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
+3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
+3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
+3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
+3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
+3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
+3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
+3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
+3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
+3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
+3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
+1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
+3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
+3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
+3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
+3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
+3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
+3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
+3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
+1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
+3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
+3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
+3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
+3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
+1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
+3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
+3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
+3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
+3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
+3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
+3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
+3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
+4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
+4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
+1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
+4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
+4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
+4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
+4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
+4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
+4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
+4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
+4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
+4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
+4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
+4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
+4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
+4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
+4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
+4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
+4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
+4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
+4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
+4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
+4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
+4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
+4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
+4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
+4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
+4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
+4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
+4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
+4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
+4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
+4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
+4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
+4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
+4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
+4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
+4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
+4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
+4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
+4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
+4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
+4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
+4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
+4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
+1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
+4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
+4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
+4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
+4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
+4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
+4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
+4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
+4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
+4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
+4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
+4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
+4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
+4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
+4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
+4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
+4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
+4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
+4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
+4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
+4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
+5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
+5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
+1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
+5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
+5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
+5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
+5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
+5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
+1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
+5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
+5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
+5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
+5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
+5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
+1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
+5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
+5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
+5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
+5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
+5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
+5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
+5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
+5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
+5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
+5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
+5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
+5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
+5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
+5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
+5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
+5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
+5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
+5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
+5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
+5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
+5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
+5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
+5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
+1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
+5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
+5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
+5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
+5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
+5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
+1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
+5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
+5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
+5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
+5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
+5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
+1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
+5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
+1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
+5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
+5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
+5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
+5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
+5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
+5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
+5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
+5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
+5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
+5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
+5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
+5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
+5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
+5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
+5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
+6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
+6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
+6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
+6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
+6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
+6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
+6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
+6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
+6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
+6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
+6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
+6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
+6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
+6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
+6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
+6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
+6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
+6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
+6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
+6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
+6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
+6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
+6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
+6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
+6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
+6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
+6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
+6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
+6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
+6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
+6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
+6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
+6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
+6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
+6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
+1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
+6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
+6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
+6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
+6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
+6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
+1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
+6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
+1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
+6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
+6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
+6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
+1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
+6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
+6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
+6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
+6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
+6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
+6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
+6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
+6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
+6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
+6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
+6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
+6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
+6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
+6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
+6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
+6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
+6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
+7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
+7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
+7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
+7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
+7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
+7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
+7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
+7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
+7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
+7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
+7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
+7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
+7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
+7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
+7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
+7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
+7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
+7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
+7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
+7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
+7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
+7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
+7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
+7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
+7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
+7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
+7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
+7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
+7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
+7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
+7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
+7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
+7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
+7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
+7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
+7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
+7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
+7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
+7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
+7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
+7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
+7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
+7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
+7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
+7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
+7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
+7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
+7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
+7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
+7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
+7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
+7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
+7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
+7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
+7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
+7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
+7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
+7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
+7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
+7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
+7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
+7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
+8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
+8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
+8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
+8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
+8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
+8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
+8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
+8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
+8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
+8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
+8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
+8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
+8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
+8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
+8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
+8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
+8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
+8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
+8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
+8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
+8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
+8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
+8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
+8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
+8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
+8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
+8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
+8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
+8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
+8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
+8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
+8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
+8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
+8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
+8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
+8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
+8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
+8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
+8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
+8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
+8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
+8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
+8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
+8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
+8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
+8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
+8736,8737,8738,8739,8740,8741)
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/euckrprober.py b/python/requests/requests/packages/chardet/euckrprober.py
new file mode 100644
index 000000000..5982a46b6
--- /dev/null
+++ b/python/requests/requests/packages/chardet/euckrprober.py
@@ -0,0 +1,42 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCKRDistributionAnalysis
+from .mbcssm import EUCKRSMModel
+
+
+class EUCKRProber(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(EUCKRSMModel)
+ self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "EUC-KR"
diff --git a/python/requests/requests/packages/chardet/euctwfreq.py b/python/requests/requests/packages/chardet/euctwfreq.py
new file mode 100644
index 000000000..576e7504d
--- /dev/null
+++ b/python/requests/requests/packages/chardet/euctwfreq.py
@@ -0,0 +1,428 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# EUCTW frequency table
+# Converted from big5 work
+# by Taiwan's Mandarin Promotion Council
+# <http:#www.edu.tw:81/mandr/>
+
+# 128 --> 0.42261
+# 256 --> 0.57851
+# 512 --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+# Char to FreqOrder table ,
+EUCTW_TABLE_SIZE = 8102
+
+EUCTWCharToFreqOrder = (
+ 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
+3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
+1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
+ 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
+3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
+4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
+7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
+ 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
+ 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
+ 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
+2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
+1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
+3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
+ 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
+3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
+2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
+ 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
+3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
+1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
+7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
+ 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
+7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
+1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
+ 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
+ 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
+3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
+3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
+ 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
+2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
+2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
+ 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
+ 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
+3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
+1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
+1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
+1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
+2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
+ 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
+4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
+1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
+7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
+2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
+ 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
+ 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
+ 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
+ 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
+7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
+ 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
+1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
+ 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
+ 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
+7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
+1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
+ 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
+3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
+4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
+3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
+ 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
+ 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
+1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
+4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
+3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
+3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
+2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
+7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
+3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
+7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
+1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
+2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
+1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
+ 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
+1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
+4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
+3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
+ 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
+ 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
+ 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
+2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
+7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
+1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
+2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
+1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
+1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
+7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
+7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
+7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
+3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
+4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
+1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
+7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
+2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
+7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
+3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
+3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
+7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
+2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
+7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
+ 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
+4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
+2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
+7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
+3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
+2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
+2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
+ 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
+2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
+1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
+1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
+2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
+1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
+7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
+7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
+2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
+4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
+1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
+7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
+ 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
+4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
+ 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
+2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
+ 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
+1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
+1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
+ 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
+3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
+3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
+1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
+3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
+7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
+7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
+1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
+2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
+1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
+3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
+2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
+3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
+2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
+4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
+4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
+3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
+ 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
+3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
+ 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
+3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
+3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
+3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
+1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
+7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
+ 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
+7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
+1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
+ 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
+4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
+3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
+ 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
+2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
+2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
+3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
+1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
+4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
+2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
+1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
+1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
+2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
+3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
+1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
+7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
+1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
+4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
+1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
+ 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
+1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
+3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
+3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
+2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
+1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
+4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
+ 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
+7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
+2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
+3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
+4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
+ 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
+7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
+7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
+1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
+4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
+3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
+2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
+3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
+3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
+2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
+1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
+4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
+3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
+3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
+2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
+4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
+7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
+3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
+2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
+3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
+1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
+2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
+3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
+4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
+2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
+2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
+7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
+1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
+2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
+1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
+3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
+4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
+2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
+3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
+3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
+2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
+4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
+2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
+3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
+4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
+7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
+3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
+ 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
+1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
+4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
+1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
+4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
+7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
+ 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
+7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
+2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
+1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
+1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
+3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
+ 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
+ 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
+ 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
+3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
+2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
+ 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
+7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
+1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
+3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
+7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
+1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
+7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
+4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
+1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
+2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
+2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
+4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
+ 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
+ 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
+3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
+3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
+1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
+2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
+7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
+1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
+1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
+3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
+ 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
+1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
+4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
+7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
+2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
+3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
+ 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
+1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
+2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
+2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
+7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
+7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
+7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
+2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
+2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
+1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
+4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
+3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
+3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
+4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
+4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
+2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
+2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
+7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
+4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
+7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
+2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
+1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
+3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
+4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
+2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
+ 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
+2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
+1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
+2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
+2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
+4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
+7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
+1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
+3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
+7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
+1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
+8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
+2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
+8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
+2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
+2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
+8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
+8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
+8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
+ 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
+8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
+4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
+3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
+8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
+1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
+8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
+ 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
+1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
+ 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
+4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
+1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
+4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
+1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
+ 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
+3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
+4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
+8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
+ 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
+3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
+ 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
+2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
+#Everything below is of no interest for detection purpose
+2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
+2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
+8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
+8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
+8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
+8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
+8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
+8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
+8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
+8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
+8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
+8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
+8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
+8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
+8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
+8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
+8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
+8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
+8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
+8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
+8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
+8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
+8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
+8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
+8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
+8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
+8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
+8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
+8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
+8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
+8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
+8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
+8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
+8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
+8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
+8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
+8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
+8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
+8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
+8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/euctwprober.py b/python/requests/requests/packages/chardet/euctwprober.py
new file mode 100644
index 000000000..fe652fe37
--- /dev/null
+++ b/python/requests/requests/packages/chardet/euctwprober.py
@@ -0,0 +1,41 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCTWDistributionAnalysis
+from .mbcssm import EUCTWSMModel
+
+class EUCTWProber(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(EUCTWSMModel)
+ self._mDistributionAnalyzer = EUCTWDistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "EUC-TW"
diff --git a/python/requests/requests/packages/chardet/gb2312freq.py b/python/requests/requests/packages/chardet/gb2312freq.py
new file mode 100644
index 000000000..1238f510f
--- /dev/null
+++ b/python/requests/requests/packages/chardet/gb2312freq.py
@@ -0,0 +1,472 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# GB2312 most frequently used character table
+#
+# Char to FreqOrder table , from hz6763
+
+# 512 --> 0.79 -- 0.79
+# 1024 --> 0.92 -- 0.13
+# 2048 --> 0.98 -- 0.06
+# 6768 --> 1.00 -- 0.02
+#
+# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
+# Random Distribution Ration = 512 / (3755 - 512) = 0.157
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
+
+GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
+
+GB2312_TABLE_SIZE = 3760
+
+GB2312CharToFreqOrder = (
+1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
+2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
+2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
+ 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
+1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
+1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
+ 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
+1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,
+2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
+3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
+ 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
+1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
+ 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
+2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,
+ 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
+2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
+1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
+3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,
+ 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
+1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
+ 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
+2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
+1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,
+3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
+1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
+2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
+1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
+ 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
+3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,
+3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
+ 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
+3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,
+ 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,
+1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
+3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
+2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,
+1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
+ 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
+1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,
+4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
+ 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
+3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,
+3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,
+ 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
+1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,
+2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
+1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
+1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
+ 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
+3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
+3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,
+4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
+ 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
+3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,
+1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
+1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
+4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
+ 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
+ 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,
+3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
+1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
+ 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,
+1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
+2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,
+ 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
+ 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,
+ 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
+3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
+4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,
+3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
+ 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
+2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
+2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,
+2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,
+ 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
+2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
+ 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,
+ 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
+ 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
+3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
+2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
+2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,
+1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
+ 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
+2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
+ 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
+ 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
+1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
+1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,
+ 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
+ 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
+1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
+2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
+3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
+2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
+2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
+2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
+3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
+1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,
+1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
+2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
+1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
+3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,
+1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
+1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
+3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
+ 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
+2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
+1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
+4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
+1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
+1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
+3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
+1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
+ 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
+ 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,
+1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,
+ 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
+1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
+1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
+ 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
+3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
+4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
+3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
+2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
+2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
+1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
+3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
+2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
+1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
+1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,
+ 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
+2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
+2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
+3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
+4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
+3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
+ 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
+3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
+2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
+1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,
+ 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,
+ 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
+3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,
+4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
+2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
+1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
+1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
+ 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
+1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,
+3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
+ 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
+ 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,
+1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,
+ 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
+1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,
+ 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
+2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
+ 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
+2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
+2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
+1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
+1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
+2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
+ 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
+1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
+1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
+2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
+2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,
+3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
+1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
+4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
+ 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
+ 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
+3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,
+1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,
+ 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,
+3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
+1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
+4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
+1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
+2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
+1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
+ 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
+1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
+3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,
+ 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
+2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
+ 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
+1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
+1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,
+1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
+3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
+2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
+3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
+3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
+3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
+ 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
+2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
+ 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
+2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
+ 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
+1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,
+ 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
+ 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
+1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
+3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
+3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,
+1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,
+1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
+3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
+2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
+2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
+1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,
+3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
+ 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
+4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
+1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
+2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,
+3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
+3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
+1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,
+ 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
+ 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
+2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
+ 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,
+1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,
+ 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
+1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,
+1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
+1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
+1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
+1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
+ 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
+ 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512
+#Everything below is of no interest for detection purpose
+5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,
+5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,
+5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,
+3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,
+4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,
+5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,
+5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,
+4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,
+4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,
+4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,
+4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,
+3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,
+6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,
+4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,
+6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,
+4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,
+4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,
+4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,
+5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,
+3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,
+4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,
+3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,
+4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,
+4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,
+6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,
+6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,
+5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,
+4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,
+6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,
+4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,
+5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,
+5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,
+5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,
+6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,
+3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,
+6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,
+4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,
+5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,
+6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,
+6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,
+4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,
+5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,
+4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,
+5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,
+5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,
+4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,
+4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,
+5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,
+4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,
+4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,
+5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,
+4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,
+4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,
+4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,
+5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,
+5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,
+4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,
+3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,
+4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,
+6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,
+5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,
+5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,
+4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,
+6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,
+5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,
+6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,
+4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,
+5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,
+5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,
+3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,
+5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,
+6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,
+4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,
+6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,
+4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,
+4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,
+6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,
+3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,
+6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,
+4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,
+3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,
+3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,
+3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,
+4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,
+2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,
+5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,
+4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,
+5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,
+5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,
+5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,
+4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,
+5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,
+4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,
+5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,
+1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,
+3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,
+4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,
+4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,
+6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,
+4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,
+5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,
+3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,
+5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,
+5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,
+5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,
+3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,
+5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,
+5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,
+3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,
+5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,
+5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,
+5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,
+6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,
+4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,
+6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,
+4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,
+3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,
+4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,
+5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,
+5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,
+5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,
+3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,
+3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,
+6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,
+6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,
+5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,
+6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,
+6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,
+6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,
+6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,
+6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,
+5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,
+6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,
+6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,
+3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,
+3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,
+4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,
+4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,
+3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,
+5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,
+5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,
+5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,
+5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,
+5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,
+4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,
+5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,
+6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,
+5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,
+4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,
+4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,
+6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,
+3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,
+4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,
+4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,
+5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,
+6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,
+6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,
+4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,
+6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,
+5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,
+5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,
+5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,
+5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,
+5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,
+4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,
+5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,
+5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,
+5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,
+5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,
+6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,
+4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,
+5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,
+4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,
+4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,
+6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,
+4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,
+6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,
+3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,
+5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,
+6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,
+6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,
+6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,
+5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,
+6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,
+6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,
+3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,
+5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,
+4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/gb2312prober.py b/python/requests/requests/packages/chardet/gb2312prober.py
new file mode 100644
index 000000000..0325a2d86
--- /dev/null
+++ b/python/requests/requests/packages/chardet/gb2312prober.py
@@ -0,0 +1,41 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import GB2312DistributionAnalysis
+from .mbcssm import GB2312SMModel
+
+class GB2312Prober(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(GB2312SMModel)
+ self._mDistributionAnalyzer = GB2312DistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "GB2312"
diff --git a/python/requests/requests/packages/chardet/hebrewprober.py b/python/requests/requests/packages/chardet/hebrewprober.py
new file mode 100644
index 000000000..ba225c5ef
--- /dev/null
+++ b/python/requests/requests/packages/chardet/hebrewprober.py
@@ -0,0 +1,283 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Shy Shalom
+# Portions created by the Initial Developer are Copyright (C) 2005
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .constants import eNotMe, eDetecting
+from .compat import wrap_ord
+
+# This prober doesn't actually recognize a language or a charset.
+# It is a helper prober for the use of the Hebrew model probers
+
+### General ideas of the Hebrew charset recognition ###
+#
+# Four main charsets exist in Hebrew:
+# "ISO-8859-8" - Visual Hebrew
+# "windows-1255" - Logical Hebrew
+# "ISO-8859-8-I" - Logical Hebrew
+# "x-mac-hebrew" - ?? Logical Hebrew ??
+#
+# Both "ISO" charsets use a completely identical set of code points, whereas
+# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
+# these code points. windows-1255 defines additional characters in the range
+# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
+# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
+# x-mac-hebrew defines similar additional code points but with a different
+# mapping.
+#
+# As far as an average Hebrew text with no diacritics is concerned, all four
+# charsets are identical with respect to code points. Meaning that for the
+# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
+# (including final letters).
+#
+# The dominant difference between these charsets is their directionality.
+# "Visual" directionality means that the text is ordered as if the renderer is
+# not aware of a BIDI rendering algorithm. The renderer sees the text and
+# draws it from left to right. The text itself when ordered naturally is read
+# backwards. A buffer of Visual Hebrew generally looks like so:
+# "[last word of first line spelled backwards] [whole line ordered backwards
+# and spelled backwards] [first word of first line spelled backwards]
+# [end of line] [last word of second line] ... etc' "
+# adding punctuation marks, numbers and English text to visual text is
+# naturally also "visual" and from left to right.
+#
+# "Logical" directionality means the text is ordered "naturally" according to
+# the order it is read. It is the responsibility of the renderer to display
+# the text from right to left. A BIDI algorithm is used to place general
+# punctuation marks, numbers and English text in the text.
+#
+# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
+# what little evidence I could find, it seems that its general directionality
+# is Logical.
+#
+# To sum up all of the above, the Hebrew probing mechanism knows about two
+# charsets:
+# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
+# backwards while line order is natural. For charset recognition purposes
+# the line order is unimportant (In fact, for this implementation, even
+# word order is unimportant).
+# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
+#
+# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
+# specifically identified.
+# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
+# that contain special punctuation marks or diacritics is displayed with
+# some unconverted characters showing as question marks. This problem might
+# be corrected using another model prober for x-mac-hebrew. Due to the fact
+# that x-mac-hebrew texts are so rare, writing another model prober isn't
+# worth the effort and performance hit.
+#
+#### The Prober ####
+#
+# The prober is divided between two SBCharSetProbers and a HebrewProber,
+# all of which are managed, created, fed data, inquired and deleted by the
+# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
+# fact some kind of Hebrew, Logical or Visual. The final decision about which
+# one is it is made by the HebrewProber by combining final-letter scores
+# with the scores of the two SBCharSetProbers to produce a final answer.
+#
+# The SBCSGroupProber is responsible for stripping the original text of HTML
+# tags, English characters, numbers, low-ASCII punctuation characters, spaces
+# and new lines. It reduces any sequence of such characters to a single space.
+# The buffer fed to each prober in the SBCS group prober is pure text in
+# high-ASCII.
+# The two SBCharSetProbers (model probers) share the same language model:
+# Win1255Model.
+# The first SBCharSetProber uses the model normally as any other
+# SBCharSetProber does, to recognize windows-1255, upon which this model was
+# built. The second SBCharSetProber is told to make the pair-of-letter
+# lookup in the language model backwards. This in practice exactly simulates
+# a visual Hebrew model using the windows-1255 logical Hebrew model.
+#
+# The HebrewProber is not using any language model. All it does is look for
+# final-letter evidence suggesting the text is either logical Hebrew or visual
+# Hebrew. Disjointed from the model probers, the results of the HebrewProber
+# alone are meaningless. HebrewProber always returns 0.00 as confidence
+# since it never identifies a charset by itself. Instead, the pointer to the
+# HebrewProber is passed to the model probers as a helper "Name Prober".
+# When the Group prober receives a positive identification from any prober,
+# it asks for the name of the charset identified. If the prober queried is a
+# Hebrew model prober, the model prober forwards the call to the
+# HebrewProber to make the final decision. In the HebrewProber, the
+# decision is made according to the final-letters scores maintained and Both
+# model probers scores. The answer is returned in the form of the name of the
+# charset identified, either "windows-1255" or "ISO-8859-8".
+
+# windows-1255 / ISO-8859-8 code points of interest
+FINAL_KAF = 0xea
+NORMAL_KAF = 0xeb
+FINAL_MEM = 0xed
+NORMAL_MEM = 0xee
+FINAL_NUN = 0xef
+NORMAL_NUN = 0xf0
+FINAL_PE = 0xf3
+NORMAL_PE = 0xf4
+FINAL_TSADI = 0xf5
+NORMAL_TSADI = 0xf6
+
+# Minimum Visual vs Logical final letter score difference.
+# If the difference is below this, don't rely solely on the final letter score
+# distance.
+MIN_FINAL_CHAR_DISTANCE = 5
+
+# Minimum Visual vs Logical model score difference.
+# If the difference is below this, don't rely at all on the model score
+# distance.
+MIN_MODEL_DISTANCE = 0.01
+
+VISUAL_HEBREW_NAME = "ISO-8859-8"
+LOGICAL_HEBREW_NAME = "windows-1255"
+
+
+class HebrewProber(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mLogicalProber = None
+ self._mVisualProber = None
+ self.reset()
+
+ def reset(self):
+ self._mFinalCharLogicalScore = 0
+ self._mFinalCharVisualScore = 0
+ # The two last characters seen in the previous buffer,
+ # mPrev and mBeforePrev are initialized to space in order to simulate
+ # a word delimiter at the beginning of the data
+ self._mPrev = ' '
+ self._mBeforePrev = ' '
+ # These probers are owned by the group prober.
+
+ def set_model_probers(self, logicalProber, visualProber):
+ self._mLogicalProber = logicalProber
+ self._mVisualProber = visualProber
+
+ def is_final(self, c):
+ return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
+ FINAL_TSADI]
+
+ def is_non_final(self, c):
+ # The normal Tsadi is not a good Non-Final letter due to words like
+ # 'lechotet' (to chat) containing an apostrophe after the tsadi. This
+ # apostrophe is converted to a space in FilterWithoutEnglishLetters
+ # causing the Non-Final tsadi to appear at an end of a word even
+ # though this is not the case in the original text.
+ # The letters Pe and Kaf rarely display a related behavior of not being
+ # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
+ # for example legally end with a Non-Final Pe or Kaf. However, the
+ # benefit of these letters as Non-Final letters outweighs the damage
+ # since these words are quite rare.
+ return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
+
+ def feed(self, aBuf):
+ # Final letter analysis for logical-visual decision.
+ # Look for evidence that the received buffer is either logical Hebrew
+ # or visual Hebrew.
+ # The following cases are checked:
+ # 1) A word longer than 1 letter, ending with a final letter. This is
+ # an indication that the text is laid out "naturally" since the
+ # final letter really appears at the end. +1 for logical score.
+ # 2) A word longer than 1 letter, ending with a Non-Final letter. In
+ # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
+ # should not end with the Non-Final form of that letter. Exceptions
+ # to this rule are mentioned above in isNonFinal(). This is an
+ # indication that the text is laid out backwards. +1 for visual
+ # score
+ # 3) A word longer than 1 letter, starting with a final letter. Final
+ # letters should not appear at the beginning of a word. This is an
+ # indication that the text is laid out backwards. +1 for visual
+ # score.
+ #
+ # The visual score and logical score are accumulated throughout the
+ # text and are finally checked against each other in GetCharSetName().
+ # No checking for final letters in the middle of words is done since
+ # that case is not an indication for either Logical or Visual text.
+ #
+ # We automatically filter out all 7-bit characters (replace them with
+ # spaces) so the word boundary detection works properly. [MAP]
+
+ if self.get_state() == eNotMe:
+ # Both model probers say it's not them. No reason to continue.
+ return eNotMe
+
+ aBuf = self.filter_high_bit_only(aBuf)
+
+ for cur in aBuf:
+ if cur == ' ':
+ # We stand on a space - a word just ended
+ if self._mBeforePrev != ' ':
+ # next-to-last char was not a space so self._mPrev is not a
+ # 1 letter word
+ if self.is_final(self._mPrev):
+ # case (1) [-2:not space][-1:final letter][cur:space]
+ self._mFinalCharLogicalScore += 1
+ elif self.is_non_final(self._mPrev):
+ # case (2) [-2:not space][-1:Non-Final letter][
+ # cur:space]
+ self._mFinalCharVisualScore += 1
+ else:
+ # Not standing on a space
+ if ((self._mBeforePrev == ' ') and
+ (self.is_final(self._mPrev)) and (cur != ' ')):
+ # case (3) [-2:space][-1:final letter][cur:not space]
+ self._mFinalCharVisualScore += 1
+ self._mBeforePrev = self._mPrev
+ self._mPrev = cur
+
+ # Forever detecting, till the end or until both model probers return
+ # eNotMe (handled above)
+ return eDetecting
+
+ def get_charset_name(self):
+ # Make the decision: is it Logical or Visual?
+ # If the final letter score distance is dominant enough, rely on it.
+ finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
+ if finalsub >= MIN_FINAL_CHAR_DISTANCE:
+ return LOGICAL_HEBREW_NAME
+ if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
+ return VISUAL_HEBREW_NAME
+
+ # It's not dominant enough, try to rely on the model scores instead.
+ modelsub = (self._mLogicalProber.get_confidence()
+ - self._mVisualProber.get_confidence())
+ if modelsub > MIN_MODEL_DISTANCE:
+ return LOGICAL_HEBREW_NAME
+ if modelsub < -MIN_MODEL_DISTANCE:
+ return VISUAL_HEBREW_NAME
+
+ # Still no good, back to final letter distance, maybe it'll save the
+ # day.
+ if finalsub < 0.0:
+ return VISUAL_HEBREW_NAME
+
+ # (finalsub > 0 - Logical) or (don't know what to do) default to
+ # Logical.
+ return LOGICAL_HEBREW_NAME
+
+ def get_state(self):
+ # Remain active as long as any of the model probers are active.
+ if (self._mLogicalProber.get_state() == eNotMe) and \
+ (self._mVisualProber.get_state() == eNotMe):
+ return eNotMe
+ return eDetecting
diff --git a/python/requests/requests/packages/chardet/jisfreq.py b/python/requests/requests/packages/chardet/jisfreq.py
new file mode 100644
index 000000000..064345b08
--- /dev/null
+++ b/python/requests/requests/packages/chardet/jisfreq.py
@@ -0,0 +1,569 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+#
+# Japanese frequency table, applied to both S-JIS and EUC-JP
+# They are sorted in order.
+
+# 128 --> 0.77094
+# 256 --> 0.85710
+# 512 --> 0.92635
+# 1024 --> 0.97130
+# 2048 --> 0.99431
+#
+# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
+# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
+#
+# Typical Distribution Ratio, 25% of IDR
+
+JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
+
+# Char to FreqOrder table ,
+JIS_TABLE_SIZE = 4368
+
+JISCharToFreqOrder = (
+ 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
+3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
+1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
+2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64
+2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80
+5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96
+1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112
+5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128
+5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144
+5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160
+5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176
+5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192
+5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208
+1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224
+1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240
+1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256
+2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272
+3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288
+3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304
+ 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320
+ 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336
+1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352
+ 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368
+5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384
+ 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400
+ 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416
+ 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432
+ 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448
+ 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464
+5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480
+5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496
+5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512
+4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528
+5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544
+5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560
+5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576
+5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592
+5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608
+5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624
+5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640
+5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656
+5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672
+3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688
+5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704
+5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720
+5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736
+5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752
+5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768
+5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784
+5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800
+5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816
+5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832
+5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848
+5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864
+5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880
+5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896
+5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912
+5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928
+5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944
+5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960
+5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976
+5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992
+5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
+5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
+5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
+5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
+5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
+5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
+5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
+5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
+5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
+5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
+5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
+5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
+5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
+5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
+5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
+5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
+5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
+5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
+5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
+6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
+6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
+6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
+6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
+6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
+6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
+6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
+6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
+4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
+ 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
+ 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
+1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488
+1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
+ 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
+3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
+3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
+ 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
+3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
+3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
+ 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
+2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
+ 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
+3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
+1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
+ 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
+1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
+ 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
+2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
+2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
+2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
+2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
+1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
+1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
+1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
+1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
+2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
+1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
+2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
+1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
+1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
+1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
+1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
+1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
+1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
+ 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
+ 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
+1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
+2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
+2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
+2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
+3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
+3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
+ 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
+3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
+1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176
+ 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
+2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
+1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
+ 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
+3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
+4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
+2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
+1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
+2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
+1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
+ 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
+ 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
+1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
+2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
+2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
+2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
+3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
+1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
+2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
+ 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
+ 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
+ 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
+1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544
+2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
+ 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
+1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
+1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
+ 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
+1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
+1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
+1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
+ 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
+2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
+ 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
+2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
+3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
+2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
+1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
+6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800
+1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
+2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
+1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
+ 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
+ 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
+3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
+3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
+1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
+1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
+1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
+1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
+ 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
+ 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
+2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
+ 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
+3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
+2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
+ 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
+1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
+2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
+ 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
+1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
+ 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
+4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
+2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
+1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
+ 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
+1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
+2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
+ 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
+6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
+1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
+1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
+2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
+3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
+ 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
+3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
+1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
+ 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
+1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
+ 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
+3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
+ 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
+2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
+ 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
+4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
+2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
+1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
+1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
+1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
+ 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
+1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
+3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
+1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
+3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
+ 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
+ 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
+ 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
+2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
+1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
+ 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
+1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
+ 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
+1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
+ 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
+ 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
+ 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872
+1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
+1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
+2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
+4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
+ 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952
+1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
+ 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
+1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
+3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
+1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
+2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
+2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
+1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
+1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
+2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
+ 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
+2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
+1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
+1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
+1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192
+1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
+3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
+2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
+2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
+ 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
+3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
+3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
+1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
+2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
+1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
+2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
+#Everything below is of no interest for detection purpose
+2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384
+6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400
+6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416
+6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432
+6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448
+4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464
+4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480
+3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496
+3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512
+4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528
+3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544
+6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560
+4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576
+6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592
+6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608
+6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624
+6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640
+6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656
+6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672
+3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688
+3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704
+6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720
+2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736
+4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752
+4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768
+4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784
+6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800
+3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816
+4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832
+4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848
+6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864
+4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880
+6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896
+3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912
+2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928
+4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944
+2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960
+6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976
+4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992
+6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008
+6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024
+6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040
+4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056
+6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072
+2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088
+6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104
+4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120
+6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136
+4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152
+4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168
+6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184
+6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200
+6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216
+3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232
+1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248
+3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264
+3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280
+4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296
+6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312
+3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328
+6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344
+3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360
+3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376
+2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392
+6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408
+6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424
+3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440
+6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456
+3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472
+6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488
+6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504
+6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520
+4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536
+6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552
+4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568
+3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584
+3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600
+6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616
+6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632
+4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648
+6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664
+6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680
+6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696
+6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712
+6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728
+6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744
+4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760
+4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776
+3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792
+6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808
+4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824
+2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840
+6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856
+6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872
+4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888
+2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904
+4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920
+2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936
+4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952
+4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968
+4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984
+6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000
+3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016
+6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032
+3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048
+6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064
+2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080
+3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096
+7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112
+2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128
+3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144
+3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160
+3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176
+3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192
+7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208
+7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224
+7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240
+7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256
+7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272
+4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288
+3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304
+3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320
+4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336
+3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352
+3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368
+7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384
+4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400
+7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416
+7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432
+7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448
+7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464
+7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480
+4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496
+4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512
+7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528
+3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544
+4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560
+7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576
+7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592
+4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608
+3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624
+3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640
+7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656
+4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672
+4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688
+4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704
+4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720
+4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736
+4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752
+7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768
+7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784
+7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800
+7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816
+7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832
+2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848
+3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864
+7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880
+7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896
+3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912
+4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928
+3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944
+3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960
+2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976
+7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992
+7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008
+4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024
+3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040
+3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056
+7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072
+7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088
+7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104
+4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120
+7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136
+2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152
+3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168
+4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184
+7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200
+4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216
+4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232
+7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248
+7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264
+5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280
+7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296
+7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312
+7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328
+7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344
+7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360
+5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376
+5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392
+7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408
+3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424
+7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440
+7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456
+3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472
+7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488
+7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504
+1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520
+3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536
+4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552
+2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568
+3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584
+2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600
+5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616
+4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632
+4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648
+5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664
+7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680
+7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696
+7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712
+7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728
+3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744
+7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760
+3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776
+7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792
+4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808
+7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824
+7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840
+7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856
+7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872
+7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888
+7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904
+7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920
+7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936
+7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952
+7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968
+7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984
+7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000
+8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016
+8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032
+8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048
+8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064
+8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080
+8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096
+8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112
+8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128
+8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144
+8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160
+8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176
+8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192
+8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208
+8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224
+8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240
+8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256
+8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/jpcntx.py b/python/requests/requests/packages/chardet/jpcntx.py
new file mode 100644
index 000000000..59aeb6a87
--- /dev/null
+++ b/python/requests/requests/packages/chardet/jpcntx.py
@@ -0,0 +1,227 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .compat import wrap_ord
+
+NUM_OF_CATEGORY = 6
+DONT_KNOW = -1
+ENOUGH_REL_THRESHOLD = 100
+MAX_REL_THRESHOLD = 1000
+MINIMUM_DATA_THRESHOLD = 4
+
+# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
+jp2CharContext = (
+(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
+(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
+(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
+(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
+(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
+(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
+(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
+(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
+(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
+(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
+(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
+(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
+(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
+(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
+(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
+(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
+(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
+(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
+(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
+(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
+(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
+(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
+(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
+(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
+(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
+(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
+(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
+(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
+(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
+(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
+(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
+(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
+(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
+(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
+(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
+(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
+(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
+(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
+(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
+(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
+(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
+(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
+(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
+(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
+(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
+(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
+(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
+(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
+(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
+(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
+(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
+(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
+(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
+(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
+(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
+(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
+(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
+(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
+(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
+(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
+(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
+(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
+(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
+(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
+(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
+(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
+(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
+(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
+(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
+(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
+(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
+(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
+(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
+(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
+)
+
+class JapaneseContextAnalysis:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self._mTotalRel = 0 # total sequence received
+ # category counters, each interger counts sequence in its category
+ self._mRelSample = [0] * NUM_OF_CATEGORY
+ # if last byte in current buffer is not the last byte of a character,
+ # we need to know how many bytes to skip in next buffer
+ self._mNeedToSkipCharNum = 0
+ self._mLastCharOrder = -1 # The order of previous char
+ # If this flag is set to True, detection is done and conclusion has
+ # been made
+ self._mDone = False
+
+ def feed(self, aBuf, aLen):
+ if self._mDone:
+ return
+
+ # The buffer we got is byte oriented, and a character may span in more than one
+ # buffers. In case the last one or two byte in last buffer is not
+ # complete, we record how many byte needed to complete that character
+ # and skip these bytes here. We can choose to record those bytes as
+ # well and analyse the character once it is complete, but since a
+ # character will not make much difference, by simply skipping
+ # this character will simply our logic and improve performance.
+ i = self._mNeedToSkipCharNum
+ while i < aLen:
+ order, charLen = self.get_order(aBuf[i:i + 2])
+ i += charLen
+ if i > aLen:
+ self._mNeedToSkipCharNum = i - aLen
+ self._mLastCharOrder = -1
+ else:
+ if (order != -1) and (self._mLastCharOrder != -1):
+ self._mTotalRel += 1
+ if self._mTotalRel > MAX_REL_THRESHOLD:
+ self._mDone = True
+ break
+ self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
+ self._mLastCharOrder = order
+
+ def got_enough_data(self):
+ return self._mTotalRel > ENOUGH_REL_THRESHOLD
+
+ def get_confidence(self):
+ # This is just one way to calculate confidence. It works well for me.
+ if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
+ return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
+ else:
+ return DONT_KNOW
+
+ def get_order(self, aBuf):
+ return -1, 1
+
+class SJISContextAnalysis(JapaneseContextAnalysis):
+ def __init__(self):
+ self.charset_name = "SHIFT_JIS"
+
+ def get_charset_name(self):
+ return self.charset_name
+
+ def get_order(self, aBuf):
+ if not aBuf:
+ return -1, 1
+ # find out current char's byte length
+ first_char = wrap_ord(aBuf[0])
+ if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
+ charLen = 2
+ if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
+ self.charset_name = "CP932"
+ else:
+ charLen = 1
+
+ # return its order if it is hiragana
+ if len(aBuf) > 1:
+ second_char = wrap_ord(aBuf[1])
+ if (first_char == 202) and (0x9F <= second_char <= 0xF1):
+ return second_char - 0x9F, charLen
+
+ return -1, charLen
+
+class EUCJPContextAnalysis(JapaneseContextAnalysis):
+ def get_order(self, aBuf):
+ if not aBuf:
+ return -1, 1
+ # find out current char's byte length
+ first_char = wrap_ord(aBuf[0])
+ if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
+ charLen = 2
+ elif first_char == 0x8F:
+ charLen = 3
+ else:
+ charLen = 1
+
+ # return its order if it is hiragana
+ if len(aBuf) > 1:
+ second_char = wrap_ord(aBuf[1])
+ if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
+ return second_char - 0xA1, charLen
+
+ return -1, charLen
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/langbulgarianmodel.py b/python/requests/requests/packages/chardet/langbulgarianmodel.py
new file mode 100644
index 000000000..e5788fc64
--- /dev/null
+++ b/python/requests/requests/packages/chardet/langbulgarianmodel.py
@@ -0,0 +1,229 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+# this table is modified base on win1251BulgarianCharToOrderMap, so
+# only number <64 is sure valid
+
+Latin5_BulgarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
+110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
+253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
+116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
+194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
+210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
+ 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
+ 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
+ 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
+ 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
+ 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
+ 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
+)
+
+win1251BulgarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
+110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
+253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
+116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
+206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
+221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
+ 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
+ 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
+ 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
+ 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
+ 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
+ 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 96.9392%
+# first 1024 sequences:3.0618%
+# rest sequences: 0.2992%
+# negative sequences: 0.0020%
+BulgarianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
+3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
+0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
+0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
+0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
+0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
+0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
+2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
+3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
+1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
+3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
+1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
+2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
+2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
+3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
+1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
+2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
+2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
+3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
+1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
+2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
+2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
+2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
+1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
+2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
+1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
+3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
+1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
+3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
+1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
+2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
+1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
+2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
+1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
+2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
+1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
+1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
+1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
+2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
+1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
+2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
+1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
+0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
+1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
+1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
+1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
+0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
+0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
+0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
+1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
+1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
+1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
+1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+)
+
+Latin5BulgarianModel = {
+ 'charToOrderMap': Latin5_BulgarianCharToOrderMap,
+ 'precedenceMatrix': BulgarianLangModel,
+ 'mTypicalPositiveRatio': 0.969392,
+ 'keepEnglishLetter': False,
+ 'charsetName': "ISO-8859-5"
+}
+
+Win1251BulgarianModel = {
+ 'charToOrderMap': win1251BulgarianCharToOrderMap,
+ 'precedenceMatrix': BulgarianLangModel,
+ 'mTypicalPositiveRatio': 0.969392,
+ 'keepEnglishLetter': False,
+ 'charsetName': "windows-1251"
+}
+
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/langcyrillicmodel.py b/python/requests/requests/packages/chardet/langcyrillicmodel.py
new file mode 100644
index 000000000..a86f54bd5
--- /dev/null
+++ b/python/requests/requests/packages/chardet/langcyrillicmodel.py
@@ -0,0 +1,329 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# KOI8-R language model
+# Character Mapping Table:
+KOI8R_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
+223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
+238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
+ 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
+ 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
+ 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
+ 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
+)
+
+win1251_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+)
+
+latin5_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
+)
+
+macCyrillic_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
+)
+
+IBM855_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
+206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
+ 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
+220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
+230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
+ 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
+ 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
+250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
+)
+
+IBM866_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 97.6601%
+# first 1024 sequences: 2.3389%
+# rest sequences: 0.1237%
+# negative sequences: 0.0009%
+RussianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
+3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
+0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
+0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
+1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
+1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
+2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
+1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
+3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
+1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
+2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
+1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
+1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
+1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
+2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
+1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
+3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
+1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
+2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
+1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
+2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
+1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
+1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
+1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
+3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
+2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
+3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
+1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
+1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
+0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
+1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
+1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
+0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
+1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
+2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
+1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
+1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
+2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
+1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
+0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
+2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
+1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
+1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
+0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
+0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
+0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
+0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
+0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
+0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
+2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
+0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
+)
+
+Koi8rModel = {
+ 'charToOrderMap': KOI8R_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "KOI8-R"
+}
+
+Win1251CyrillicModel = {
+ 'charToOrderMap': win1251_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "windows-1251"
+}
+
+Latin5CyrillicModel = {
+ 'charToOrderMap': latin5_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "ISO-8859-5"
+}
+
+MacCyrillicModel = {
+ 'charToOrderMap': macCyrillic_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "MacCyrillic"
+};
+
+Ibm866Model = {
+ 'charToOrderMap': IBM866_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "IBM866"
+}
+
+Ibm855Model = {
+ 'charToOrderMap': IBM855_CharToOrderMap,
+ 'precedenceMatrix': RussianLangModel,
+ 'mTypicalPositiveRatio': 0.976601,
+ 'keepEnglishLetter': False,
+ 'charsetName': "IBM855"
+}
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/langgreekmodel.py b/python/requests/requests/packages/chardet/langgreekmodel.py
new file mode 100644
index 000000000..ddb583765
--- /dev/null
+++ b/python/requests/requests/packages/chardet/langgreekmodel.py
@@ -0,0 +1,225 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+Latin7_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
+ 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
+253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
+ 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
+253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
+253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0
+110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
+ 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
+124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
+ 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
+)
+
+win1253_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
+ 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
+253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
+ 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
+253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
+253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0
+110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
+ 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
+124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
+ 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 98.2851%
+# first 1024 sequences:1.7001%
+# rest sequences: 0.0359%
+# negative sequences: 0.0148%
+GreekLangModel = (
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,
+3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,
+2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,
+0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,
+2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,
+2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,
+0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,
+2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,
+0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,
+3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,
+3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,
+2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,
+2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,
+0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,
+0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,
+0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,
+0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,
+0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,
+0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,
+0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,
+0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,
+0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,
+0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,
+0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,
+0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,
+0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,
+0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,
+0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,
+0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,
+0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,
+0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,
+0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,
+0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,
+0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,
+0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,
+0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,
+0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,
+0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,
+0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,
+0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,
+0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,
+0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+)
+
+Latin7GreekModel = {
+ 'charToOrderMap': Latin7_CharToOrderMap,
+ 'precedenceMatrix': GreekLangModel,
+ 'mTypicalPositiveRatio': 0.982851,
+ 'keepEnglishLetter': False,
+ 'charsetName': "ISO-8859-7"
+}
+
+Win1253GreekModel = {
+ 'charToOrderMap': win1253_CharToOrderMap,
+ 'precedenceMatrix': GreekLangModel,
+ 'mTypicalPositiveRatio': 0.982851,
+ 'keepEnglishLetter': False,
+ 'charsetName': "windows-1253"
+}
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/langhebrewmodel.py b/python/requests/requests/packages/chardet/langhebrewmodel.py
new file mode 100644
index 000000000..75f2bc7fe
--- /dev/null
+++ b/python/requests/requests/packages/chardet/langhebrewmodel.py
@@ -0,0 +1,201 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Simon Montagu
+# Portions created by the Initial Developer are Copyright (C) 2005
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Shoshannah Forbes - original C code (?)
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Windows-1255 language model
+# Character Mapping Table:
+win1255_CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
+ 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
+253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
+ 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
+124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
+215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
+ 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
+106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
+ 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
+238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
+ 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
+ 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 98.4004%
+# first 1024 sequences: 1.5981%
+# rest sequences: 0.087%
+# negative sequences: 0.0015%
+HebrewLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
+3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
+1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
+1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
+1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
+1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
+1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
+0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
+0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
+1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
+0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
+0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
+0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
+0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
+0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
+0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
+0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
+0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
+0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
+0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
+0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
+0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
+1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
+0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
+0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
+0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
+0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
+0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
+2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
+0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
+0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
+1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
+0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
+2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
+1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
+2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
+1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
+2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
+0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
+1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
+0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
+)
+
+Win1255HebrewModel = {
+ 'charToOrderMap': win1255_CharToOrderMap,
+ 'precedenceMatrix': HebrewLangModel,
+ 'mTypicalPositiveRatio': 0.984004,
+ 'keepEnglishLetter': False,
+ 'charsetName': "windows-1255"
+}
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/langhungarianmodel.py b/python/requests/requests/packages/chardet/langhungarianmodel.py
new file mode 100644
index 000000000..49d2f0fe7
--- /dev/null
+++ b/python/requests/requests/packages/chardet/langhungarianmodel.py
@@ -0,0 +1,225 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+Latin2_HungarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
+ 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
+253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
+ 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
+159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
+175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
+191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
+ 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
+221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
+232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
+ 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
+245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
+)
+
+win1250HungarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
+ 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
+253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
+ 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
+161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
+177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
+191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
+ 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
+221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
+232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
+ 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
+245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 94.7368%
+# first 1024 sequences:5.2623%
+# rest sequences: 0.8894%
+# negative sequences: 0.0009%
+HungarianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
+3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
+3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
+3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
+0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
+0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
+1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
+1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
+1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
+3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
+2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
+2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
+2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
+2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
+2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
+3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
+2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
+2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
+2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
+1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
+1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
+3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
+1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
+1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
+2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
+2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
+2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
+3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
+2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
+1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
+1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
+2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
+2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
+1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
+1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
+2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
+1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
+1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
+2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
+2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
+2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
+1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
+1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
+1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
+0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
+2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
+2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
+1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
+2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
+1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
+1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
+2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
+2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
+2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
+1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
+2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
+0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
+0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
+)
+
+Latin2HungarianModel = {
+ 'charToOrderMap': Latin2_HungarianCharToOrderMap,
+ 'precedenceMatrix': HungarianLangModel,
+ 'mTypicalPositiveRatio': 0.947368,
+ 'keepEnglishLetter': True,
+ 'charsetName': "ISO-8859-2"
+}
+
+Win1250HungarianModel = {
+ 'charToOrderMap': win1250HungarianCharToOrderMap,
+ 'precedenceMatrix': HungarianLangModel,
+ 'mTypicalPositiveRatio': 0.947368,
+ 'keepEnglishLetter': True,
+ 'charsetName': "windows-1250"
+}
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/langthaimodel.py b/python/requests/requests/packages/chardet/langthaimodel.py
new file mode 100644
index 000000000..0508b1b1a
--- /dev/null
+++ b/python/requests/requests/packages/chardet/langthaimodel.py
@@ -0,0 +1,200 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# The following result for thai was collected from a limited sample (1M).
+
+# Character Mapping Table:
+TIS620CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
+188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
+253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
+ 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
+209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
+223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
+236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
+ 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
+ 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
+ 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
+ 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
+ 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 92.6386%
+# first 1024 sequences:7.3177%
+# rest sequences: 1.0230%
+# negative sequences: 0.0436%
+ThaiLangModel = (
+0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
+0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
+3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
+0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
+3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
+3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
+3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
+3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
+3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
+3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
+2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
+3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
+0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
+0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
+1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
+3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
+3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
+1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
+0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
+0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
+3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
+2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
+3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
+0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
+3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
+3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
+2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
+3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
+2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
+3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
+3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
+3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
+3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
+1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
+0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
+0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
+3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
+3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
+1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
+3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
+3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
+0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
+0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
+1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
+1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
+3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
+0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
+3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
+0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
+0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
+0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
+0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
+0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
+0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
+0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
+3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
+0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
+0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
+3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
+2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
+0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
+3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
+1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
+1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
+1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+)
+
+TIS620ThaiModel = {
+ 'charToOrderMap': TIS620CharToOrderMap,
+ 'precedenceMatrix': ThaiLangModel,
+ 'mTypicalPositiveRatio': 0.926386,
+ 'keepEnglishLetter': False,
+ 'charsetName': "TIS-620"
+}
+
+# flake8: noqa
diff --git a/python/requests/requests/packages/chardet/latin1prober.py b/python/requests/requests/packages/chardet/latin1prober.py
new file mode 100644
index 000000000..eef357354
--- /dev/null
+++ b/python/requests/requests/packages/chardet/latin1prober.py
@@ -0,0 +1,139 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .constants import eNotMe
+from .compat import wrap_ord
+
+FREQ_CAT_NUM = 4
+
+UDF = 0 # undefined
+OTH = 1 # other
+ASC = 2 # ascii capital letter
+ASS = 3 # ascii small letter
+ACV = 4 # accent capital vowel
+ACO = 5 # accent capital other
+ASV = 6 # accent small vowel
+ASO = 7 # accent small other
+CLASS_NUM = 8 # total classes
+
+Latin1_CharToClass = (
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
+ OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
+ ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
+ ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
+ ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
+ OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
+ ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
+ ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
+ ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
+ OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
+ OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
+ UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
+ OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
+ ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
+ ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
+ ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
+ ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
+ ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
+ ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
+ ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
+ ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
+)
+
+# 0 : illegal
+# 1 : very unlikely
+# 2 : normal
+# 3 : very likely
+Latin1ClassModel = (
+ # UDF OTH ASC ASS ACV ACO ASV ASO
+ 0, 0, 0, 0, 0, 0, 0, 0, # UDF
+ 0, 3, 3, 3, 3, 3, 3, 3, # OTH
+ 0, 3, 3, 3, 3, 3, 3, 3, # ASC
+ 0, 3, 3, 3, 1, 1, 3, 3, # ASS
+ 0, 3, 3, 3, 1, 2, 1, 2, # ACV
+ 0, 3, 3, 3, 3, 3, 3, 3, # ACO
+ 0, 3, 1, 3, 1, 1, 1, 3, # ASV
+ 0, 3, 1, 3, 1, 1, 3, 3, # ASO
+)
+
+
+class Latin1Prober(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self.reset()
+
+ def reset(self):
+ self._mLastCharClass = OTH
+ self._mFreqCounter = [0] * FREQ_CAT_NUM
+ CharSetProber.reset(self)
+
+ def get_charset_name(self):
+ return "windows-1252"
+
+ def feed(self, aBuf):
+ aBuf = self.filter_with_english_letters(aBuf)
+ for c in aBuf:
+ charClass = Latin1_CharToClass[wrap_ord(c)]
+ freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
+ + charClass]
+ if freq == 0:
+ self._mState = eNotMe
+ break
+ self._mFreqCounter[freq] += 1
+ self._mLastCharClass = charClass
+
+ return self.get_state()
+
+ def get_confidence(self):
+ if self.get_state() == eNotMe:
+ return 0.01
+
+ total = sum(self._mFreqCounter)
+ if total < 0.01:
+ confidence = 0.0
+ else:
+ confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
+ / total)
+ if confidence < 0.0:
+ confidence = 0.0
+ # lower the confidence of latin1 so that other more accurate
+ # detector can take priority.
+ confidence = confidence * 0.73
+ return confidence
diff --git a/python/requests/requests/packages/chardet/mbcharsetprober.py b/python/requests/requests/packages/chardet/mbcharsetprober.py
new file mode 100644
index 000000000..bb42f2fb5
--- /dev/null
+++ b/python/requests/requests/packages/chardet/mbcharsetprober.py
@@ -0,0 +1,86 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from . import constants
+from .charsetprober import CharSetProber
+
+
+class MultiByteCharSetProber(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mDistributionAnalyzer = None
+ self._mCodingSM = None
+ self._mLastChar = [0, 0]
+
+ def reset(self):
+ CharSetProber.reset(self)
+ if self._mCodingSM:
+ self._mCodingSM.reset()
+ if self._mDistributionAnalyzer:
+ self._mDistributionAnalyzer.reset()
+ self._mLastChar = [0, 0]
+
+ def get_charset_name(self):
+ pass
+
+ def feed(self, aBuf):
+ aLen = len(aBuf)
+ for i in range(0, aLen):
+ codingState = self._mCodingSM.next_state(aBuf[i])
+ if codingState == constants.eError:
+ if constants._debug:
+ sys.stderr.write(self.get_charset_name()
+ + ' prober hit error at byte ' + str(i)
+ + '\n')
+ self._mState = constants.eNotMe
+ break
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ break
+ elif codingState == constants.eStart:
+ charLen = self._mCodingSM.get_current_charlen()
+ if i == 0:
+ self._mLastChar[1] = aBuf[0]
+ self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
+ else:
+ self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
+ charLen)
+
+ self._mLastChar[0] = aBuf[aLen - 1]
+
+ if self.get_state() == constants.eDetecting:
+ if (self._mDistributionAnalyzer.got_enough_data() and
+ (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
+ self._mState = constants.eFoundIt
+
+ return self.get_state()
+
+ def get_confidence(self):
+ return self._mDistributionAnalyzer.get_confidence()
diff --git a/python/requests/requests/packages/chardet/mbcsgroupprober.py b/python/requests/requests/packages/chardet/mbcsgroupprober.py
new file mode 100644
index 000000000..03c9dcf3e
--- /dev/null
+++ b/python/requests/requests/packages/chardet/mbcsgroupprober.py
@@ -0,0 +1,54 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetgroupprober import CharSetGroupProber
+from .utf8prober import UTF8Prober
+from .sjisprober import SJISProber
+from .eucjpprober import EUCJPProber
+from .gb2312prober import GB2312Prober
+from .euckrprober import EUCKRProber
+from .cp949prober import CP949Prober
+from .big5prober import Big5Prober
+from .euctwprober import EUCTWProber
+
+
+class MBCSGroupProber(CharSetGroupProber):
+ def __init__(self):
+ CharSetGroupProber.__init__(self)
+ self._mProbers = [
+ UTF8Prober(),
+ SJISProber(),
+ EUCJPProber(),
+ GB2312Prober(),
+ EUCKRProber(),
+ CP949Prober(),
+ Big5Prober(),
+ EUCTWProber()
+ ]
+ self.reset()
diff --git a/python/requests/requests/packages/chardet/mbcssm.py b/python/requests/requests/packages/chardet/mbcssm.py
new file mode 100644
index 000000000..efe678ca0
--- /dev/null
+++ b/python/requests/requests/packages/chardet/mbcssm.py
@@ -0,0 +1,572 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .constants import eStart, eError, eItsMe
+
+# BIG5
+
+BIG5_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,1, # 78 - 7f
+ 4,4,4,4,4,4,4,4, # 80 - 87
+ 4,4,4,4,4,4,4,4, # 88 - 8f
+ 4,4,4,4,4,4,4,4, # 90 - 97
+ 4,4,4,4,4,4,4,4, # 98 - 9f
+ 4,3,3,3,3,3,3,3, # a0 - a7
+ 3,3,3,3,3,3,3,3, # a8 - af
+ 3,3,3,3,3,3,3,3, # b0 - b7
+ 3,3,3,3,3,3,3,3, # b8 - bf
+ 3,3,3,3,3,3,3,3, # c0 - c7
+ 3,3,3,3,3,3,3,3, # c8 - cf
+ 3,3,3,3,3,3,3,3, # d0 - d7
+ 3,3,3,3,3,3,3,3, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,3,3,3, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,3,3,0 # f8 - ff
+)
+
+BIG5_st = (
+ eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
+ eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
+ eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17
+)
+
+Big5CharLenTable = (0, 1, 1, 2, 0)
+
+Big5SMModel = {'classTable': BIG5_cls,
+ 'classFactor': 5,
+ 'stateTable': BIG5_st,
+ 'charLenTable': Big5CharLenTable,
+ 'name': 'Big5'}
+
+# CP949
+
+CP949_cls = (
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
+ 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
+ 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
+ 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
+ 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
+ 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
+ 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
+ 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
+ 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
+ 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
+ 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
+)
+
+CP949_st = (
+#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
+ eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart
+ eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
+ eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
+ eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
+ eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
+ eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
+)
+
+CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
+
+CP949SMModel = {'classTable': CP949_cls,
+ 'classFactor': 10,
+ 'stateTable': CP949_st,
+ 'charLenTable': CP949CharLenTable,
+ 'name': 'CP949'}
+
+# EUC-JP
+
+EUCJP_cls = (
+ 4,4,4,4,4,4,4,4, # 00 - 07
+ 4,4,4,4,4,4,5,5, # 08 - 0f
+ 4,4,4,4,4,4,4,4, # 10 - 17
+ 4,4,4,5,4,4,4,4, # 18 - 1f
+ 4,4,4,4,4,4,4,4, # 20 - 27
+ 4,4,4,4,4,4,4,4, # 28 - 2f
+ 4,4,4,4,4,4,4,4, # 30 - 37
+ 4,4,4,4,4,4,4,4, # 38 - 3f
+ 4,4,4,4,4,4,4,4, # 40 - 47
+ 4,4,4,4,4,4,4,4, # 48 - 4f
+ 4,4,4,4,4,4,4,4, # 50 - 57
+ 4,4,4,4,4,4,4,4, # 58 - 5f
+ 4,4,4,4,4,4,4,4, # 60 - 67
+ 4,4,4,4,4,4,4,4, # 68 - 6f
+ 4,4,4,4,4,4,4,4, # 70 - 77
+ 4,4,4,4,4,4,4,4, # 78 - 7f
+ 5,5,5,5,5,5,5,5, # 80 - 87
+ 5,5,5,5,5,5,1,3, # 88 - 8f
+ 5,5,5,5,5,5,5,5, # 90 - 97
+ 5,5,5,5,5,5,5,5, # 98 - 9f
+ 5,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,2,2,2, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,2,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,0,5 # f8 - ff
+)
+
+EUCJP_st = (
+ 3, 4, 3, 5,eStart,eError,eError,eError,#00-07
+ eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
+ eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
+ 3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27
+)
+
+EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
+
+EUCJPSMModel = {'classTable': EUCJP_cls,
+ 'classFactor': 6,
+ 'stateTable': EUCJP_st,
+ 'charLenTable': EUCJPCharLenTable,
+ 'name': 'EUC-JP'}
+
+# EUC-KR
+
+EUCKR_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 1,1,1,1,1,1,1,1, # 40 - 47
+ 1,1,1,1,1,1,1,1, # 48 - 4f
+ 1,1,1,1,1,1,1,1, # 50 - 57
+ 1,1,1,1,1,1,1,1, # 58 - 5f
+ 1,1,1,1,1,1,1,1, # 60 - 67
+ 1,1,1,1,1,1,1,1, # 68 - 6f
+ 1,1,1,1,1,1,1,1, # 70 - 77
+ 1,1,1,1,1,1,1,1, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,3,3,3, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,3,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 2,2,2,2,2,2,2,2, # e0 - e7
+ 2,2,2,2,2,2,2,2, # e8 - ef
+ 2,2,2,2,2,2,2,2, # f0 - f7
+ 2,2,2,2,2,2,2,0 # f8 - ff
+)
+
+EUCKR_st = (
+ eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
+ eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f
+)
+
+EUCKRCharLenTable = (0, 1, 2, 0)
+
+EUCKRSMModel = {'classTable': EUCKR_cls,
+ 'classFactor': 4,
+ 'stateTable': EUCKR_st,
+ 'charLenTable': EUCKRCharLenTable,
+ 'name': 'EUC-KR'}
+
+# EUC-TW
+
+EUCTW_cls = (
+ 2,2,2,2,2,2,2,2, # 00 - 07
+ 2,2,2,2,2,2,0,0, # 08 - 0f
+ 2,2,2,2,2,2,2,2, # 10 - 17
+ 2,2,2,0,2,2,2,2, # 18 - 1f
+ 2,2,2,2,2,2,2,2, # 20 - 27
+ 2,2,2,2,2,2,2,2, # 28 - 2f
+ 2,2,2,2,2,2,2,2, # 30 - 37
+ 2,2,2,2,2,2,2,2, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,2, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,6,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,3,4,4,4,4,4,4, # a0 - a7
+ 5,5,1,1,1,1,1,1, # a8 - af
+ 1,1,1,1,1,1,1,1, # b0 - b7
+ 1,1,1,1,1,1,1,1, # b8 - bf
+ 1,1,3,1,3,3,3,3, # c0 - c7
+ 3,3,3,3,3,3,3,3, # c8 - cf
+ 3,3,3,3,3,3,3,3, # d0 - d7
+ 3,3,3,3,3,3,3,3, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,3,3,3, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,3,3,0 # f8 - ff
+)
+
+EUCTW_st = (
+ eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
+ eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
+ eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
+ 5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
+ eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
+)
+
+EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
+
+EUCTWSMModel = {'classTable': EUCTW_cls,
+ 'classFactor': 7,
+ 'stateTable': EUCTW_st,
+ 'charLenTable': EUCTWCharLenTable,
+ 'name': 'x-euc-tw'}
+
+# GB2312
+
+GB2312_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 3,3,3,3,3,3,3,3, # 30 - 37
+ 3,3,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,4, # 78 - 7f
+ 5,6,6,6,6,6,6,6, # 80 - 87
+ 6,6,6,6,6,6,6,6, # 88 - 8f
+ 6,6,6,6,6,6,6,6, # 90 - 97
+ 6,6,6,6,6,6,6,6, # 98 - 9f
+ 6,6,6,6,6,6,6,6, # a0 - a7
+ 6,6,6,6,6,6,6,6, # a8 - af
+ 6,6,6,6,6,6,6,6, # b0 - b7
+ 6,6,6,6,6,6,6,6, # b8 - bf
+ 6,6,6,6,6,6,6,6, # c0 - c7
+ 6,6,6,6,6,6,6,6, # c8 - cf
+ 6,6,6,6,6,6,6,6, # d0 - d7
+ 6,6,6,6,6,6,6,6, # d8 - df
+ 6,6,6,6,6,6,6,6, # e0 - e7
+ 6,6,6,6,6,6,6,6, # e8 - ef
+ 6,6,6,6,6,6,6,6, # f0 - f7
+ 6,6,6,6,6,6,6,0 # f8 - ff
+)
+
+GB2312_st = (
+ eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
+ eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
+ 4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
+ eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
+ eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
+)
+
+# To be accurate, the length of class 6 can be either 2 or 4.
+# But it is not necessary to discriminate between the two since
+# it is used for frequency analysis only, and we are validing
+# each code range there as well. So it is safe to set it to be
+# 2 here.
+GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
+
+GB2312SMModel = {'classTable': GB2312_cls,
+ 'classFactor': 7,
+ 'stateTable': GB2312_st,
+ 'charLenTable': GB2312CharLenTable,
+ 'name': 'GB2312'}
+
+# Shift_JIS
+
+SJIS_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,1, # 78 - 7f
+ 3,3,3,3,3,2,2,3, # 80 - 87
+ 3,3,3,3,3,3,3,3, # 88 - 8f
+ 3,3,3,3,3,3,3,3, # 90 - 97
+ 3,3,3,3,3,3,3,3, # 98 - 9f
+ #0xa0 is illegal in sjis encoding, but some pages does
+ #contain such byte. We need to be more error forgiven.
+ 2,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,2,2,2, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,2,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,4,4,4, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,0,0,0) # f8 - ff
+
+
+SJIS_st = (
+ eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
+ eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17
+)
+
+SJISCharLenTable = (0, 1, 1, 2, 0, 0)
+
+SJISSMModel = {'classTable': SJIS_cls,
+ 'classFactor': 6,
+ 'stateTable': SJIS_st,
+ 'charLenTable': SJISCharLenTable,
+ 'name': 'Shift_JIS'}
+
+# UCS2-BE
+
+UCS2BE_cls = (
+ 0,0,0,0,0,0,0,0, # 00 - 07
+ 0,0,1,0,0,2,0,0, # 08 - 0f
+ 0,0,0,0,0,0,0,0, # 10 - 17
+ 0,0,0,3,0,0,0,0, # 18 - 1f
+ 0,0,0,0,0,0,0,0, # 20 - 27
+ 0,3,3,3,3,3,0,0, # 28 - 2f
+ 0,0,0,0,0,0,0,0, # 30 - 37
+ 0,0,0,0,0,0,0,0, # 38 - 3f
+ 0,0,0,0,0,0,0,0, # 40 - 47
+ 0,0,0,0,0,0,0,0, # 48 - 4f
+ 0,0,0,0,0,0,0,0, # 50 - 57
+ 0,0,0,0,0,0,0,0, # 58 - 5f
+ 0,0,0,0,0,0,0,0, # 60 - 67
+ 0,0,0,0,0,0,0,0, # 68 - 6f
+ 0,0,0,0,0,0,0,0, # 70 - 77
+ 0,0,0,0,0,0,0,0, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,0,0,0,0,0,0,0, # a0 - a7
+ 0,0,0,0,0,0,0,0, # a8 - af
+ 0,0,0,0,0,0,0,0, # b0 - b7
+ 0,0,0,0,0,0,0,0, # b8 - bf
+ 0,0,0,0,0,0,0,0, # c0 - c7
+ 0,0,0,0,0,0,0,0, # c8 - cf
+ 0,0,0,0,0,0,0,0, # d0 - d7
+ 0,0,0,0,0,0,0,0, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,4,5 # f8 - ff
+)
+
+UCS2BE_st = (
+ 5, 7, 7,eError, 4, 3,eError,eError,#00-07
+ eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
+ 6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
+ 6, 6, 6, 6, 5, 7, 7,eError,#20-27
+ 5, 8, 6, 6,eError, 6, 6, 6,#28-2f
+ 6, 6, 6, 6,eError,eError,eStart,eStart #30-37
+)
+
+UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
+
+UCS2BESMModel = {'classTable': UCS2BE_cls,
+ 'classFactor': 6,
+ 'stateTable': UCS2BE_st,
+ 'charLenTable': UCS2BECharLenTable,
+ 'name': 'UTF-16BE'}
+
+# UCS2-LE
+
+UCS2LE_cls = (
+ 0,0,0,0,0,0,0,0, # 00 - 07
+ 0,0,1,0,0,2,0,0, # 08 - 0f
+ 0,0,0,0,0,0,0,0, # 10 - 17
+ 0,0,0,3,0,0,0,0, # 18 - 1f
+ 0,0,0,0,0,0,0,0, # 20 - 27
+ 0,3,3,3,3,3,0,0, # 28 - 2f
+ 0,0,0,0,0,0,0,0, # 30 - 37
+ 0,0,0,0,0,0,0,0, # 38 - 3f
+ 0,0,0,0,0,0,0,0, # 40 - 47
+ 0,0,0,0,0,0,0,0, # 48 - 4f
+ 0,0,0,0,0,0,0,0, # 50 - 57
+ 0,0,0,0,0,0,0,0, # 58 - 5f
+ 0,0,0,0,0,0,0,0, # 60 - 67
+ 0,0,0,0,0,0,0,0, # 68 - 6f
+ 0,0,0,0,0,0,0,0, # 70 - 77
+ 0,0,0,0,0,0,0,0, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,0,0,0,0,0,0,0, # a0 - a7
+ 0,0,0,0,0,0,0,0, # a8 - af
+ 0,0,0,0,0,0,0,0, # b0 - b7
+ 0,0,0,0,0,0,0,0, # b8 - bf
+ 0,0,0,0,0,0,0,0, # c0 - c7
+ 0,0,0,0,0,0,0,0, # c8 - cf
+ 0,0,0,0,0,0,0,0, # d0 - d7
+ 0,0,0,0,0,0,0,0, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,4,5 # f8 - ff
+)
+
+UCS2LE_st = (
+ 6, 6, 7, 6, 4, 3,eError,eError,#00-07
+ eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
+ eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
+ 5, 5, 5,eError, 5,eError, 6, 6,#18-1f
+ 7, 6, 8, 8, 5, 5, 5,eError,#20-27
+ 5, 5, 5,eError,eError,eError, 5, 5,#28-2f
+ 5, 5, 5,eError, 5,eError,eStart,eStart #30-37
+)
+
+UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
+
+UCS2LESMModel = {'classTable': UCS2LE_cls,
+ 'classFactor': 6,
+ 'stateTable': UCS2LE_st,
+ 'charLenTable': UCS2LECharLenTable,
+ 'name': 'UTF-16LE'}
+
+# UTF-8
+
+UTF8_cls = (
+ 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 1,1,1,1,1,1,1,1, # 40 - 47
+ 1,1,1,1,1,1,1,1, # 48 - 4f
+ 1,1,1,1,1,1,1,1, # 50 - 57
+ 1,1,1,1,1,1,1,1, # 58 - 5f
+ 1,1,1,1,1,1,1,1, # 60 - 67
+ 1,1,1,1,1,1,1,1, # 68 - 6f
+ 1,1,1,1,1,1,1,1, # 70 - 77
+ 1,1,1,1,1,1,1,1, # 78 - 7f
+ 2,2,2,2,3,3,3,3, # 80 - 87
+ 4,4,4,4,4,4,4,4, # 88 - 8f
+ 4,4,4,4,4,4,4,4, # 90 - 97
+ 4,4,4,4,4,4,4,4, # 98 - 9f
+ 5,5,5,5,5,5,5,5, # a0 - a7
+ 5,5,5,5,5,5,5,5, # a8 - af
+ 5,5,5,5,5,5,5,5, # b0 - b7
+ 5,5,5,5,5,5,5,5, # b8 - bf
+ 0,0,6,6,6,6,6,6, # c0 - c7
+ 6,6,6,6,6,6,6,6, # c8 - cf
+ 6,6,6,6,6,6,6,6, # d0 - d7
+ 6,6,6,6,6,6,6,6, # d8 - df
+ 7,8,8,8,8,8,8,8, # e0 - e7
+ 8,8,8,8,8,9,8,8, # e8 - ef
+ 10,11,11,11,11,11,11,11, # f0 - f7
+ 12,13,13,13,14,15,0,0 # f8 - ff
+)
+
+UTF8_st = (
+ eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
+ 9, 11, 8, 7, 6, 5, 4, 3,#08-0f
+ eError,eError,eError,eError,eError,eError,eError,eError,#10-17
+ eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
+ eError,eError, 5, 5, 5, 5,eError,eError,#30-37
+ eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
+ eError,eError,eError, 5, 5, 5,eError,eError,#40-47
+ eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
+ eError,eError, 7, 7, 7, 7,eError,eError,#50-57
+ eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
+ eError,eError,eError,eError, 7, 7,eError,eError,#60-67
+ eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
+ eError,eError, 9, 9, 9, 9,eError,eError,#70-77
+ eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
+ eError,eError,eError,eError,eError, 9,eError,eError,#80-87
+ eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
+ eError,eError, 12, 12, 12, 12,eError,eError,#90-97
+ eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
+ eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
+ eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
+ eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
+ eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
+ eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
+ eError,eError,eError,eError,eError,eError,eError,eError #c8-cf
+)
+
+UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
+
+UTF8SMModel = {'classTable': UTF8_cls,
+ 'classFactor': 16,
+ 'stateTable': UTF8_st,
+ 'charLenTable': UTF8CharLenTable,
+ 'name': 'UTF-8'}
diff --git a/python/requests/requests/packages/chardet/sbcharsetprober.py b/python/requests/requests/packages/chardet/sbcharsetprober.py
new file mode 100644
index 000000000..37291bd27
--- /dev/null
+++ b/python/requests/requests/packages/chardet/sbcharsetprober.py
@@ -0,0 +1,120 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from . import constants
+from .charsetprober import CharSetProber
+from .compat import wrap_ord
+
+SAMPLE_SIZE = 64
+SB_ENOUGH_REL_THRESHOLD = 1024
+POSITIVE_SHORTCUT_THRESHOLD = 0.95
+NEGATIVE_SHORTCUT_THRESHOLD = 0.05
+SYMBOL_CAT_ORDER = 250
+NUMBER_OF_SEQ_CAT = 4
+POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
+#NEGATIVE_CAT = 0
+
+
+class SingleByteCharSetProber(CharSetProber):
+ def __init__(self, model, reversed=False, nameProber=None):
+ CharSetProber.__init__(self)
+ self._mModel = model
+ # TRUE if we need to reverse every pair in the model lookup
+ self._mReversed = reversed
+ # Optional auxiliary prober for name decision
+ self._mNameProber = nameProber
+ self.reset()
+
+ def reset(self):
+ CharSetProber.reset(self)
+ # char order of last character
+ self._mLastOrder = 255
+ self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
+ self._mTotalSeqs = 0
+ self._mTotalChar = 0
+ # characters that fall in our sampling range
+ self._mFreqChar = 0
+
+ def get_charset_name(self):
+ if self._mNameProber:
+ return self._mNameProber.get_charset_name()
+ else:
+ return self._mModel['charsetName']
+
+ def feed(self, aBuf):
+ if not self._mModel['keepEnglishLetter']:
+ aBuf = self.filter_without_english_letters(aBuf)
+ aLen = len(aBuf)
+ if not aLen:
+ return self.get_state()
+ for c in aBuf:
+ order = self._mModel['charToOrderMap'][wrap_ord(c)]
+ if order < SYMBOL_CAT_ORDER:
+ self._mTotalChar += 1
+ if order < SAMPLE_SIZE:
+ self._mFreqChar += 1
+ if self._mLastOrder < SAMPLE_SIZE:
+ self._mTotalSeqs += 1
+ if not self._mReversed:
+ i = (self._mLastOrder * SAMPLE_SIZE) + order
+ model = self._mModel['precedenceMatrix'][i]
+ else: # reverse the order of the letters in the lookup
+ i = (order * SAMPLE_SIZE) + self._mLastOrder
+ model = self._mModel['precedenceMatrix'][i]
+ self._mSeqCounters[model] += 1
+ self._mLastOrder = order
+
+ if self.get_state() == constants.eDetecting:
+ if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
+ cf = self.get_confidence()
+ if cf > POSITIVE_SHORTCUT_THRESHOLD:
+ if constants._debug:
+ sys.stderr.write('%s confidence = %s, we have a'
+ 'winner\n' %
+ (self._mModel['charsetName'], cf))
+ self._mState = constants.eFoundIt
+ elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
+ if constants._debug:
+ sys.stderr.write('%s confidence = %s, below negative'
+ 'shortcut threshhold %s\n' %
+ (self._mModel['charsetName'], cf,
+ NEGATIVE_SHORTCUT_THRESHOLD))
+ self._mState = constants.eNotMe
+
+ return self.get_state()
+
+ def get_confidence(self):
+ r = 0.01
+ if self._mTotalSeqs > 0:
+ r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
+ / self._mModel['mTypicalPositiveRatio'])
+ r = r * self._mFreqChar / self._mTotalChar
+ if r >= 1.0:
+ r = 0.99
+ return r
diff --git a/python/requests/requests/packages/chardet/sbcsgroupprober.py b/python/requests/requests/packages/chardet/sbcsgroupprober.py
new file mode 100644
index 000000000..1b6196cd1
--- /dev/null
+++ b/python/requests/requests/packages/chardet/sbcsgroupprober.py
@@ -0,0 +1,69 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetgroupprober import CharSetGroupProber
+from .sbcharsetprober import SingleByteCharSetProber
+from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
+ Latin5CyrillicModel, MacCyrillicModel,
+ Ibm866Model, Ibm855Model)
+from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
+from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
+from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
+from .langthaimodel import TIS620ThaiModel
+from .langhebrewmodel import Win1255HebrewModel
+from .hebrewprober import HebrewProber
+
+
+class SBCSGroupProber(CharSetGroupProber):
+ def __init__(self):
+ CharSetGroupProber.__init__(self)
+ self._mProbers = [
+ SingleByteCharSetProber(Win1251CyrillicModel),
+ SingleByteCharSetProber(Koi8rModel),
+ SingleByteCharSetProber(Latin5CyrillicModel),
+ SingleByteCharSetProber(MacCyrillicModel),
+ SingleByteCharSetProber(Ibm866Model),
+ SingleByteCharSetProber(Ibm855Model),
+ SingleByteCharSetProber(Latin7GreekModel),
+ SingleByteCharSetProber(Win1253GreekModel),
+ SingleByteCharSetProber(Latin5BulgarianModel),
+ SingleByteCharSetProber(Win1251BulgarianModel),
+ SingleByteCharSetProber(Latin2HungarianModel),
+ SingleByteCharSetProber(Win1250HungarianModel),
+ SingleByteCharSetProber(TIS620ThaiModel),
+ ]
+ hebrewProber = HebrewProber()
+ logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel,
+ False, hebrewProber)
+ visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True,
+ hebrewProber)
+ hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber)
+ self._mProbers.extend([hebrewProber, logicalHebrewProber,
+ visualHebrewProber])
+
+ self.reset()
diff --git a/python/requests/requests/packages/chardet/sjisprober.py b/python/requests/requests/packages/chardet/sjisprober.py
new file mode 100644
index 000000000..cd0e9e707
--- /dev/null
+++ b/python/requests/requests/packages/chardet/sjisprober.py
@@ -0,0 +1,91 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import SJISDistributionAnalysis
+from .jpcntx import SJISContextAnalysis
+from .mbcssm import SJISSMModel
+from . import constants
+
+
+class SJISProber(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(SJISSMModel)
+ self._mDistributionAnalyzer = SJISDistributionAnalysis()
+ self._mContextAnalyzer = SJISContextAnalysis()
+ self.reset()
+
+ def reset(self):
+ MultiByteCharSetProber.reset(self)
+ self._mContextAnalyzer.reset()
+
+ def get_charset_name(self):
+ return self._mContextAnalyzer.get_charset_name()
+
+ def feed(self, aBuf):
+ aLen = len(aBuf)
+ for i in range(0, aLen):
+ codingState = self._mCodingSM.next_state(aBuf[i])
+ if codingState == constants.eError:
+ if constants._debug:
+ sys.stderr.write(self.get_charset_name()
+ + ' prober hit error at byte ' + str(i)
+ + '\n')
+ self._mState = constants.eNotMe
+ break
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ break
+ elif codingState == constants.eStart:
+ charLen = self._mCodingSM.get_current_charlen()
+ if i == 0:
+ self._mLastChar[1] = aBuf[0]
+ self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
+ charLen)
+ self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
+ else:
+ self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
+ - charLen], charLen)
+ self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
+ charLen)
+
+ self._mLastChar[0] = aBuf[aLen - 1]
+
+ if self.get_state() == constants.eDetecting:
+ if (self._mContextAnalyzer.got_enough_data() and
+ (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
+ self._mState = constants.eFoundIt
+
+ return self.get_state()
+
+ def get_confidence(self):
+ contxtCf = self._mContextAnalyzer.get_confidence()
+ distribCf = self._mDistributionAnalyzer.get_confidence()
+ return max(contxtCf, distribCf)
diff --git a/python/requests/requests/packages/chardet/universaldetector.py b/python/requests/requests/packages/chardet/universaldetector.py
new file mode 100644
index 000000000..476522b99
--- /dev/null
+++ b/python/requests/requests/packages/chardet/universaldetector.py
@@ -0,0 +1,170 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+import sys
+import codecs
+from .latin1prober import Latin1Prober # windows-1252
+from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
+from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
+from .escprober import EscCharSetProber # ISO-2122, etc.
+import re
+
+MINIMUM_THRESHOLD = 0.20
+ePureAscii = 0
+eEscAscii = 1
+eHighbyte = 2
+
+
+class UniversalDetector:
+ def __init__(self):
+ self._highBitDetector = re.compile(b'[\x80-\xFF]')
+ self._escDetector = re.compile(b'(\033|~{)')
+ self._mEscCharSetProber = None
+ self._mCharSetProbers = []
+ self.reset()
+
+ def reset(self):
+ self.result = {'encoding': None, 'confidence': 0.0}
+ self.done = False
+ self._mStart = True
+ self._mGotData = False
+ self._mInputState = ePureAscii
+ self._mLastChar = b''
+ if self._mEscCharSetProber:
+ self._mEscCharSetProber.reset()
+ for prober in self._mCharSetProbers:
+ prober.reset()
+
+ def feed(self, aBuf):
+ if self.done:
+ return
+
+ aLen = len(aBuf)
+ if not aLen:
+ return
+
+ if not self._mGotData:
+ # If the data starts with BOM, we know it is UTF
+ if aBuf[:3] == codecs.BOM_UTF8:
+ # EF BB BF UTF-8 with BOM
+ self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
+ elif aBuf[:4] == codecs.BOM_UTF32_LE:
+ # FF FE 00 00 UTF-32, little-endian BOM
+ self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
+ elif aBuf[:4] == codecs.BOM_UTF32_BE:
+ # 00 00 FE FF UTF-32, big-endian BOM
+ self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
+ elif aBuf[:4] == b'\xFE\xFF\x00\x00':
+ # FE FF 00 00 UCS-4, unusual octet order BOM (3412)
+ self.result = {
+ 'encoding': "X-ISO-10646-UCS-4-3412",
+ 'confidence': 1.0
+ }
+ elif aBuf[:4] == b'\x00\x00\xFF\xFE':
+ # 00 00 FF FE UCS-4, unusual octet order BOM (2143)
+ self.result = {
+ 'encoding': "X-ISO-10646-UCS-4-2143",
+ 'confidence': 1.0
+ }
+ elif aBuf[:2] == codecs.BOM_LE:
+ # FF FE UTF-16, little endian BOM
+ self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
+ elif aBuf[:2] == codecs.BOM_BE:
+ # FE FF UTF-16, big endian BOM
+ self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
+
+ self._mGotData = True
+ if self.result['encoding'] and (self.result['confidence'] > 0.0):
+ self.done = True
+ return
+
+ if self._mInputState == ePureAscii:
+ if self._highBitDetector.search(aBuf):
+ self._mInputState = eHighbyte
+ elif ((self._mInputState == ePureAscii) and
+ self._escDetector.search(self._mLastChar + aBuf)):
+ self._mInputState = eEscAscii
+
+ self._mLastChar = aBuf[-1:]
+
+ if self._mInputState == eEscAscii:
+ if not self._mEscCharSetProber:
+ self._mEscCharSetProber = EscCharSetProber()
+ if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
+ self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
+ 'confidence': self._mEscCharSetProber.get_confidence()}
+ self.done = True
+ elif self._mInputState == eHighbyte:
+ if not self._mCharSetProbers:
+ self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
+ Latin1Prober()]
+ for prober in self._mCharSetProbers:
+ if prober.feed(aBuf) == constants.eFoundIt:
+ self.result = {'encoding': prober.get_charset_name(),
+ 'confidence': prober.get_confidence()}
+ self.done = True
+ break
+
+ def close(self):
+ if self.done:
+ return
+ if not self._mGotData:
+ if constants._debug:
+ sys.stderr.write('no data received!\n')
+ return
+ self.done = True
+
+ if self._mInputState == ePureAscii:
+ self.result = {'encoding': 'ascii', 'confidence': 1.0}
+ return self.result
+
+ if self._mInputState == eHighbyte:
+ proberConfidence = None
+ maxProberConfidence = 0.0
+ maxProber = None
+ for prober in self._mCharSetProbers:
+ if not prober:
+ continue
+ proberConfidence = prober.get_confidence()
+ if proberConfidence > maxProberConfidence:
+ maxProberConfidence = proberConfidence
+ maxProber = prober
+ if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
+ self.result = {'encoding': maxProber.get_charset_name(),
+ 'confidence': maxProber.get_confidence()}
+ return self.result
+
+ if constants._debug:
+ sys.stderr.write('no probers hit minimum threshhold\n')
+ for prober in self._mCharSetProbers[0].mProbers:
+ if not prober:
+ continue
+ sys.stderr.write('%s confidence = %s\n' %
+ (prober.get_charset_name(),
+ prober.get_confidence()))
diff --git a/python/requests/requests/packages/chardet/utf8prober.py b/python/requests/requests/packages/chardet/utf8prober.py
new file mode 100644
index 000000000..1c0bb5d8f
--- /dev/null
+++ b/python/requests/requests/packages/chardet/utf8prober.py
@@ -0,0 +1,76 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from . import constants
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .mbcssm import UTF8SMModel
+
+ONE_CHAR_PROB = 0.5
+
+
+class UTF8Prober(CharSetProber):
+ def __init__(self):
+ CharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(UTF8SMModel)
+ self.reset()
+
+ def reset(self):
+ CharSetProber.reset(self)
+ self._mCodingSM.reset()
+ self._mNumOfMBChar = 0
+
+ def get_charset_name(self):
+ return "utf-8"
+
+ def feed(self, aBuf):
+ for c in aBuf:
+ codingState = self._mCodingSM.next_state(c)
+ if codingState == constants.eError:
+ self._mState = constants.eNotMe
+ break
+ elif codingState == constants.eItsMe:
+ self._mState = constants.eFoundIt
+ break
+ elif codingState == constants.eStart:
+ if self._mCodingSM.get_current_charlen() >= 2:
+ self._mNumOfMBChar += 1
+
+ if self.get_state() == constants.eDetecting:
+ if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
+ self._mState = constants.eFoundIt
+
+ return self.get_state()
+
+ def get_confidence(self):
+ unlike = 0.99
+ if self._mNumOfMBChar < 6:
+ for i in range(0, self._mNumOfMBChar):
+ unlike = unlike * ONE_CHAR_PROB
+ return 1.0 - unlike
+ else:
+ return unlike
diff --git a/python/requests/requests/packages/urllib3/__init__.py b/python/requests/requests/packages/urllib3/__init__.py
new file mode 100644
index 000000000..e43991a97
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/__init__.py
@@ -0,0 +1,93 @@
+"""
+urllib3 - Thread-safe connection pooling and re-using.
+"""
+
+from __future__ import absolute_import
+import warnings
+
+from .connectionpool import (
+ HTTPConnectionPool,
+ HTTPSConnectionPool,
+ connection_from_url
+)
+
+from . import exceptions
+from .filepost import encode_multipart_formdata
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import HTTPResponse
+from .util.request import make_headers
+from .util.url import get_host
+from .util.timeout import Timeout
+from .util.retry import Retry
+
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+try: # Python 2.7+
+ from logging import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
+__license__ = 'MIT'
+__version__ = '1.13.1'
+
+__all__ = (
+ 'HTTPConnectionPool',
+ 'HTTPSConnectionPool',
+ 'PoolManager',
+ 'ProxyManager',
+ 'HTTPResponse',
+ 'Retry',
+ 'Timeout',
+ 'add_stderr_logger',
+ 'connection_from_url',
+ 'disable_warnings',
+ 'encode_multipart_formdata',
+ 'get_host',
+ 'make_headers',
+ 'proxy_from_url',
+)
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+
+def add_stderr_logger(level=logging.DEBUG):
+ """
+ Helper for quickly adding a StreamHandler to the logger. Useful for
+ debugging.
+
+ Returns the handler after adding it.
+ """
+ # This method needs to be in this __init__.py to get the __name__ correct
+ # even if urllib3 is vendored within another package.
+ logger = logging.getLogger(__name__)
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
+ logger.addHandler(handler)
+ logger.setLevel(level)
+ logger.debug('Added a stderr logging handler to logger: %s' % __name__)
+ return handler
+
+# ... Clean up.
+del NullHandler
+
+
+# SecurityWarning's always go off by default.
+warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
+# SubjectAltNameWarning's should go off once per host
+warnings.simplefilter('default', exceptions.SubjectAltNameWarning)
+# InsecurePlatformWarning's don't vary between requests, so we keep it default.
+warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
+ append=True)
+# SNIMissingWarnings should go off only once.
+warnings.simplefilter('default', exceptions.SNIMissingWarning)
+
+
+def disable_warnings(category=exceptions.HTTPWarning):
+ """
+ Helper for quickly disabling all urllib3 warnings.
+ """
+ warnings.simplefilter('ignore', category)
diff --git a/python/requests/requests/packages/urllib3/_collections.py b/python/requests/requests/packages/urllib3/_collections.py
new file mode 100644
index 000000000..67f3ce994
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/_collections.py
@@ -0,0 +1,324 @@
+from __future__ import absolute_import
+from collections import Mapping, MutableMapping
+try:
+ from threading import RLock
+except ImportError: # Platform-specific: No threads available
+ class RLock:
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
+
+
+try: # Python 2.7+
+ from collections import OrderedDict
+except ImportError:
+ from .packages.ordered_dict import OrderedDict
+from .packages.six import iterkeys, itervalues, PY3
+
+
+__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
+
+
+_Null = object()
+
+
+class RecentlyUsedContainer(MutableMapping):
+ """
+ Provides a thread-safe dict-like container which maintains up to
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
+ ``maxsize``.
+
+ :param maxsize:
+ Maximum number of recent elements to retain.
+
+ :param dispose_func:
+ Every time an item is evicted from the container,
+ ``dispose_func(value)`` is called. Callback which will get called
+ """
+
+ ContainerCls = OrderedDict
+
+ def __init__(self, maxsize=10, dispose_func=None):
+ self._maxsize = maxsize
+ self.dispose_func = dispose_func
+
+ self._container = self.ContainerCls()
+ self.lock = RLock()
+
+ def __getitem__(self, key):
+ # Re-insert the item, moving it to the end of the eviction line.
+ with self.lock:
+ item = self._container.pop(key)
+ self._container[key] = item
+ return item
+
+ def __setitem__(self, key, value):
+ evicted_value = _Null
+ with self.lock:
+ # Possibly evict the existing value of 'key'
+ evicted_value = self._container.get(key, _Null)
+ self._container[key] = value
+
+ # If we didn't evict an existing value, we might have to evict the
+ # least recently used item from the beginning of the container.
+ if len(self._container) > self._maxsize:
+ _key, evicted_value = self._container.popitem(last=False)
+
+ if self.dispose_func and evicted_value is not _Null:
+ self.dispose_func(evicted_value)
+
+ def __delitem__(self, key):
+ with self.lock:
+ value = self._container.pop(key)
+
+ if self.dispose_func:
+ self.dispose_func(value)
+
+ def __len__(self):
+ with self.lock:
+ return len(self._container)
+
+ def __iter__(self):
+ raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
+
+ def clear(self):
+ with self.lock:
+ # Copy pointers to all values, then wipe the mapping
+ values = list(itervalues(self._container))
+ self._container.clear()
+
+ if self.dispose_func:
+ for value in values:
+ self.dispose_func(value)
+
+ def keys(self):
+ with self.lock:
+ return list(iterkeys(self._container))
+
+
+class HTTPHeaderDict(MutableMapping):
+ """
+ :param headers:
+ An iterable of field-value pairs. Must not contain multiple field names
+ when compared case-insensitively.
+
+ :param kwargs:
+ Additional field-value pairs to pass in to ``dict.update``.
+
+ A ``dict`` like container for storing HTTP Headers.
+
+ Field names are stored and compared case-insensitively in compliance with
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
+ case-insensitive pair.
+
+ Using ``__setitem__`` syntax overwrites fields that compare equal
+ case-insensitively in order to maintain ``dict``'s api. For fields that
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
+ in a loop.
+
+ If multiple fields that are equal case-insensitively are passed to the
+ constructor or ``.update``, the behavior is undefined and some will be
+ lost.
+
+ >>> headers = HTTPHeaderDict()
+ >>> headers.add('Set-Cookie', 'foo=bar')
+ >>> headers.add('set-cookie', 'baz=quxx')
+ >>> headers['content-length'] = '7'
+ >>> headers['SET-cookie']
+ 'foo=bar, baz=quxx'
+ >>> headers['Content-Length']
+ '7'
+ """
+
+ def __init__(self, headers=None, **kwargs):
+ super(HTTPHeaderDict, self).__init__()
+ self._container = {}
+ if headers is not None:
+ if isinstance(headers, HTTPHeaderDict):
+ self._copy_from(headers)
+ else:
+ self.extend(headers)
+ if kwargs:
+ self.extend(kwargs)
+
+ def __setitem__(self, key, val):
+ self._container[key.lower()] = (key, val)
+ return self._container[key.lower()]
+
+ def __getitem__(self, key):
+ val = self._container[key.lower()]
+ return ', '.join(val[1:])
+
+ def __delitem__(self, key):
+ del self._container[key.lower()]
+
+ def __contains__(self, key):
+ return key.lower() in self._container
+
+ def __eq__(self, other):
+ if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
+ return False
+ if not isinstance(other, type(self)):
+ other = type(self)(other)
+ return (dict((k.lower(), v) for k, v in self.itermerged()) ==
+ dict((k.lower(), v) for k, v in other.itermerged()))
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ if not PY3: # Python 2
+ iterkeys = MutableMapping.iterkeys
+ itervalues = MutableMapping.itervalues
+
+ __marker = object()
+
+ def __len__(self):
+ return len(self._container)
+
+ def __iter__(self):
+ # Only provide the originally cased names
+ for vals in self._container.values():
+ yield vals[0]
+
+ def pop(self, key, default=__marker):
+ '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+ '''
+ # Using the MutableMapping function directly fails due to the private marker.
+ # Using ordinary dict.pop would expose the internal structures.
+ # So let's reinvent the wheel.
+ try:
+ value = self[key]
+ except KeyError:
+ if default is self.__marker:
+ raise
+ return default
+ else:
+ del self[key]
+ return value
+
+ def discard(self, key):
+ try:
+ del self[key]
+ except KeyError:
+ pass
+
+ def add(self, key, val):
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
+ exists.
+
+ >>> headers = HTTPHeaderDict(foo='bar')
+ >>> headers.add('Foo', 'baz')
+ >>> headers['foo']
+ 'bar, baz'
+ """
+ key_lower = key.lower()
+ new_vals = key, val
+ # Keep the common case aka no item present as fast as possible
+ vals = self._container.setdefault(key_lower, new_vals)
+ if new_vals is not vals:
+ # new_vals was not inserted, as there was a previous one
+ if isinstance(vals, list):
+ # If already several items got inserted, we have a list
+ vals.append(val)
+ else:
+ # vals should be a tuple then, i.e. only one item so far
+ # Need to convert the tuple to list for further extension
+ self._container[key_lower] = [vals[0], vals[1], val]
+
+ def extend(self, *args, **kwargs):
+ """Generic import function for any type of header-like object.
+ Adapted version of MutableMapping.update in order to insert items
+ with self.add instead of self.__setitem__
+ """
+ if len(args) > 1:
+ raise TypeError("extend() takes at most 1 positional "
+ "arguments ({0} given)".format(len(args)))
+ other = args[0] if len(args) >= 1 else ()
+
+ if isinstance(other, HTTPHeaderDict):
+ for key, val in other.iteritems():
+ self.add(key, val)
+ elif isinstance(other, Mapping):
+ for key in other:
+ self.add(key, other[key])
+ elif hasattr(other, "keys"):
+ for key in other.keys():
+ self.add(key, other[key])
+ else:
+ for key, value in other:
+ self.add(key, value)
+
+ for key, value in kwargs.items():
+ self.add(key, value)
+
+ def getlist(self, key):
+ """Returns a list of all the values for the named field. Returns an
+ empty list if the key doesn't exist."""
+ try:
+ vals = self._container[key.lower()]
+ except KeyError:
+ return []
+ else:
+ if isinstance(vals, tuple):
+ return [vals[1]]
+ else:
+ return vals[1:]
+
+ # Backwards compatibility for httplib
+ getheaders = getlist
+ getallmatchingheaders = getlist
+ iget = getlist
+
+ def __repr__(self):
+ return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
+
+ def _copy_from(self, other):
+ for key in other:
+ val = other.getlist(key)
+ if isinstance(val, list):
+ # Don't need to convert tuples
+ val = list(val)
+ self._container[key.lower()] = [key] + val
+
+ def copy(self):
+ clone = type(self)()
+ clone._copy_from(self)
+ return clone
+
+ def iteritems(self):
+ """Iterate over all header lines, including duplicate ones."""
+ for key in self:
+ vals = self._container[key.lower()]
+ for val in vals[1:]:
+ yield vals[0], val
+
+ def itermerged(self):
+ """Iterate over all headers, merging duplicate ones together."""
+ for key in self:
+ val = self._container[key.lower()]
+ yield val[0], ', '.join(val[1:])
+
+ def items(self):
+ return list(self.iteritems())
+
+ @classmethod
+ def from_httplib(cls, message): # Python 2
+ """Read headers from a Python 2 httplib message object."""
+ # python2.7 does not expose a proper API for exporting multiheaders
+ # efficiently. This function re-reads raw lines from the message
+ # object and extracts the multiheaders properly.
+ headers = []
+
+ for line in message.headers:
+ if line.startswith((' ', '\t')):
+ key, value = headers[-1]
+ headers[-1] = (key, value + '\r\n' + line.rstrip())
+ continue
+
+ key, value = line.split(':', 1)
+ headers.append((key, value.strip()))
+
+ return cls(headers)
diff --git a/python/requests/requests/packages/urllib3/connection.py b/python/requests/requests/packages/urllib3/connection.py
new file mode 100644
index 000000000..1e4cd4175
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/connection.py
@@ -0,0 +1,288 @@
+from __future__ import absolute_import
+import datetime
+import os
+import sys
+import socket
+from socket import error as SocketError, timeout as SocketTimeout
+import warnings
+from .packages import six
+
+try: # Python 3
+ from http.client import HTTPConnection as _HTTPConnection
+ from http.client import HTTPException # noqa: unused in this module
+except ImportError:
+ from httplib import HTTPConnection as _HTTPConnection
+ from httplib import HTTPException # noqa: unused in this module
+
+try: # Compiled with SSL?
+ import ssl
+ BaseSSLError = ssl.SSLError
+except (ImportError, AttributeError): # Platform-specific: No SSL.
+ ssl = None
+
+ class BaseSSLError(BaseException):
+ pass
+
+
+try: # Python 3:
+ # Not a no-op, we're adding this to the namespace so it can be imported.
+ ConnectionError = ConnectionError
+except NameError: # Python 2:
+ class ConnectionError(Exception):
+ pass
+
+
+from .exceptions import (
+ NewConnectionError,
+ ConnectTimeoutError,
+ SubjectAltNameWarning,
+ SystemTimeWarning,
+)
+from .packages.ssl_match_hostname import match_hostname
+
+from .util.ssl_ import (
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+ assert_fingerprint,
+)
+
+
+from .util import connection
+
+port_by_scheme = {
+ 'http': 80,
+ 'https': 443,
+}
+
+RECENT_DATE = datetime.date(2014, 1, 1)
+
+
+class DummyConnection(object):
+ """Used to detect a failed ConnectionCls import."""
+ pass
+
+
+class HTTPConnection(_HTTPConnection, object):
+ """
+ Based on httplib.HTTPConnection but provides an extra constructor
+ backwards-compatibility layer between older and newer Pythons.
+
+ Additional keyword parameters are used to configure attributes of the connection.
+ Accepted parameters include:
+
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+ - ``source_address``: Set the source address for the current connection.
+
+ .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
+
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+ you might pass::
+
+ HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ]
+
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+ """
+
+ default_port = port_by_scheme['http']
+
+ #: Disable Nagle's algorithm by default.
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+
+ #: Whether this connection verifies the host's certificate.
+ is_verified = False
+
+ def __init__(self, *args, **kw):
+ if six.PY3: # Python 3
+ kw.pop('strict', None)
+
+ # Pre-set source_address in case we have an older Python like 2.6.
+ self.source_address = kw.get('source_address')
+
+ if sys.version_info < (2, 7): # Python 2.6
+ # _HTTPConnection on Python 2.6 will balk at this keyword arg, but
+ # not newer versions. We can still use it when creating a
+ # connection though, so we pop it *after* we have saved it as
+ # self.source_address.
+ kw.pop('source_address', None)
+
+ #: The socket options provided by the user. If no options are
+ #: provided, we use the default options.
+ self.socket_options = kw.pop('socket_options', self.default_socket_options)
+
+ # Superclass also sets self.source_address in Python 2.7+.
+ _HTTPConnection.__init__(self, *args, **kw)
+
+ def _new_conn(self):
+ """ Establish a socket connection and set nodelay settings on it.
+
+ :return: New socket connection.
+ """
+ extra_kw = {}
+ if self.source_address:
+ extra_kw['source_address'] = self.source_address
+
+ if self.socket_options:
+ extra_kw['socket_options'] = self.socket_options
+
+ try:
+ conn = connection.create_connection(
+ (self.host, self.port), self.timeout, **extra_kw)
+
+ except SocketTimeout as e:
+ raise ConnectTimeoutError(
+ self, "Connection to %s timed out. (connect timeout=%s)" %
+ (self.host, self.timeout))
+
+ except SocketError as e:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e)
+
+ return conn
+
+ def _prepare_conn(self, conn):
+ self.sock = conn
+ # the _tunnel_host attribute was added in python 2.6.3 (via
+ # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
+ # not have them.
+ if getattr(self, '_tunnel_host', None):
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ def connect(self):
+ conn = self._new_conn()
+ self._prepare_conn(conn)
+
+
+class HTTPSConnection(HTTPConnection):
+ default_port = port_by_scheme['https']
+
+ def __init__(self, host, port=None, key_file=None, cert_file=None,
+ strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
+
+ HTTPConnection.__init__(self, host, port, strict=strict,
+ timeout=timeout, **kw)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+
+ # Required property for Google AppEngine 1.9.0 which otherwise causes
+ # HTTPS requests to go out as HTTP. (See Issue #356)
+ self._protocol = 'https'
+
+ def connect(self):
+ conn = self._new_conn()
+ self._prepare_conn(conn)
+ self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
+
+
+class VerifiedHTTPSConnection(HTTPSConnection):
+ """
+ Based on httplib.HTTPSConnection but wraps the socket with
+ SSL certification.
+ """
+ cert_reqs = None
+ ca_certs = None
+ ca_cert_dir = None
+ ssl_version = None
+ assert_fingerprint = None
+
+ def set_cert(self, key_file=None, cert_file=None,
+ cert_reqs=None, ca_certs=None,
+ assert_hostname=None, assert_fingerprint=None,
+ ca_cert_dir=None):
+
+ if (ca_certs or ca_cert_dir) and cert_reqs is None:
+ cert_reqs = 'CERT_REQUIRED'
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+
+ def connect(self):
+ # Add certificate verification
+ conn = self._new_conn()
+
+ resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
+ resolved_ssl_version = resolve_ssl_version(self.ssl_version)
+
+ hostname = self.host
+ if getattr(self, '_tunnel_host', None):
+ # _tunnel_host was added in Python 2.6.3
+ # (See: http://hg.python.org/cpython/rev/0f57b30a152f)
+
+ self.sock = conn
+ # Calls self._set_hostport(), so self.host is
+ # self._tunnel_host below.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ # Override the host with the one we're requesting data from.
+ hostname = self._tunnel_host
+
+ is_time_off = datetime.date.today() < RECENT_DATE
+ if is_time_off:
+ warnings.warn((
+ 'System time is way off (before {0}). This will probably '
+ 'lead to SSL verification errors').format(RECENT_DATE),
+ SystemTimeWarning
+ )
+
+ # Wrap socket using verification with the root certs in
+ # trusted_root_certs
+ self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
+ cert_reqs=resolved_cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ server_hostname=hostname,
+ ssl_version=resolved_ssl_version)
+
+ if self.assert_fingerprint:
+ assert_fingerprint(self.sock.getpeercert(binary_form=True),
+ self.assert_fingerprint)
+ elif resolved_cert_reqs != ssl.CERT_NONE \
+ and self.assert_hostname is not False:
+ cert = self.sock.getpeercert()
+ if not cert.get('subjectAltName', ()):
+ warnings.warn((
+ 'Certificate for {0} has no `subjectAltName`, falling back to check for a '
+ '`commonName` for now. This feature is being removed by major browsers and '
+ 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
+ 'for details.)'.format(hostname)),
+ SubjectAltNameWarning
+ )
+
+ # In case the hostname is an IPv6 address, strip the square
+ # brackets from it before using it to validate. This is because
+ # a certificate with an IPv6 address in it won't have square
+ # brackets around that address. Sadly, match_hostname won't do this
+ # for us: it expects the plain host part without any extra work
+ # that might have been done to make it palatable to httplib.
+ asserted_hostname = self.assert_hostname or hostname
+ asserted_hostname = asserted_hostname.strip('[]')
+ match_hostname(cert, asserted_hostname)
+
+ self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
+ self.assert_fingerprint is not None)
+
+
+if ssl:
+ # Make a copy for testing.
+ UnverifiedHTTPSConnection = HTTPSConnection
+ HTTPSConnection = VerifiedHTTPSConnection
+else:
+ HTTPSConnection = DummyConnection
diff --git a/python/requests/requests/packages/urllib3/connectionpool.py b/python/requests/requests/packages/urllib3/connectionpool.py
new file mode 100644
index 000000000..995b4167b
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/connectionpool.py
@@ -0,0 +1,818 @@
+from __future__ import absolute_import
+import errno
+import logging
+import sys
+import warnings
+
+from socket import error as SocketError, timeout as SocketTimeout
+import socket
+
+try: # Python 3
+ from queue import LifoQueue, Empty, Full
+except ImportError:
+ from Queue import LifoQueue, Empty, Full
+ # Queue is imported for side effects on MS Windows
+ import Queue as _unused_module_Queue # noqa: unused
+
+
+from .exceptions import (
+ ClosedPoolError,
+ ProtocolError,
+ EmptyPoolError,
+ HeaderParsingError,
+ HostChangedError,
+ LocationValueError,
+ MaxRetryError,
+ ProxyError,
+ ReadTimeoutError,
+ SSLError,
+ TimeoutError,
+ InsecureRequestWarning,
+ NewConnectionError,
+)
+from .packages.ssl_match_hostname import CertificateError
+from .packages import six
+from .connection import (
+ port_by_scheme,
+ DummyConnection,
+ HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
+ HTTPException, BaseSSLError,
+)
+from .request import RequestMethods
+from .response import HTTPResponse
+
+from .util.connection import is_connection_dropped
+from .util.response import assert_header_parsing
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import get_host, Url
+
+
+xrange = six.moves.xrange
+
+log = logging.getLogger(__name__)
+
+_Default = object()
+
+
+# Pool objects
+class ConnectionPool(object):
+ """
+ Base class for all connection pools, such as
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+ """
+
+ scheme = None
+ QueueCls = LifoQueue
+
+ def __init__(self, host, port=None):
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ self.host = host
+ self.port = port
+
+ def __str__(self):
+ return '%s(host=%r, port=%r)' % (type(self).__name__,
+ self.host, self.port)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def close():
+ """
+ Close all pooled connections and disable the pool.
+ """
+ pass
+
+
+# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
+_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+ """
+ Thread-safe connection pool for one host.
+
+ :param host:
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
+ :class:`httplib.HTTPConnection`.
+
+ :param port:
+ Port used for this HTTP Connection (None is equivalent to 80), passed
+ into :class:`httplib.HTTPConnection`.
+
+ :param strict:
+ Causes BadStatusLine to be raised if the status line can't be parsed
+ as a valid HTTP/1.0 or 1.1 status line, passed into
+ :class:`httplib.HTTPConnection`.
+
+ .. note::
+ Only works in Python 2. This parameter is ignored in Python 3.
+
+ :param timeout:
+ Socket timeout in seconds for each individual connection. This can
+ be a float or integer, which sets the timeout for the HTTP request,
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
+ fine-grained control over request timeouts. After the constructor has
+ been parsed, this is always a `urllib3.util.Timeout` object.
+
+ :param maxsize:
+ Number of connections to save that can be reused. More than 1 is useful
+ in multithreaded situations. If ``block`` is set to False, more
+ connections will be created but they will not be saved once they've
+ been used.
+
+ :param block:
+ If set to True, no more than ``maxsize`` connections will be used at
+ a time. When no free connections are available, the call will block
+ until a connection has been released. This is a useful side effect for
+ particular multithreaded situations where one does not want to use more
+ than maxsize connections per host to prevent flooding.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param retries:
+ Retry configuration to use by default with requests in this pool.
+
+ :param _proxy:
+ Parsed proxy URL, should not be used directly, instead, see
+ :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param _proxy_headers:
+ A dictionary with proxy headers, should not be used directly,
+ instead, see :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param \**conn_kw:
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+ :class:`urllib3.connection.HTTPSConnection` instances.
+ """
+
+ scheme = 'http'
+ ConnectionCls = HTTPConnection
+
+ def __init__(self, host, port=None, strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
+ headers=None, retries=None,
+ _proxy=None, _proxy_headers=None,
+ **conn_kw):
+ ConnectionPool.__init__(self, host, port)
+ RequestMethods.__init__(self, headers)
+
+ self.strict = strict
+
+ if not isinstance(timeout, Timeout):
+ timeout = Timeout.from_float(timeout)
+
+ if retries is None:
+ retries = Retry.DEFAULT
+
+ self.timeout = timeout
+ self.retries = retries
+
+ self.pool = self.QueueCls(maxsize)
+ self.block = block
+
+ self.proxy = _proxy
+ self.proxy_headers = _proxy_headers or {}
+
+ # Fill the queue up so that doing get() on it will block properly
+ for _ in xrange(maxsize):
+ self.pool.put(None)
+
+ # These are mostly for testing and debugging purposes.
+ self.num_connections = 0
+ self.num_requests = 0
+ self.conn_kw = conn_kw
+
+ if self.proxy:
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+ # We cannot know if the user has added default socket options, so we cannot replace the
+ # list.
+ self.conn_kw.setdefault('socket_options', [])
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.info("Starting new HTTP connection (%d): %s" %
+ (self.num_connections, self.host))
+
+ conn = self.ConnectionCls(host=self.host, port=self.port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict, **self.conn_kw)
+ return conn
+
+ def _get_conn(self, timeout=None):
+ """
+ Get a connection. Will return a pooled connection if one is available.
+
+ If no connections are available and :prop:`.block` is ``False``, then a
+ fresh connection is returned.
+
+ :param timeout:
+ Seconds to wait before giving up and raising
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+ :prop:`.block` is ``True``.
+ """
+ conn = None
+ try:
+ conn = self.pool.get(block=self.block, timeout=timeout)
+
+ except AttributeError: # self.pool is None
+ raise ClosedPoolError(self, "Pool is closed.")
+
+ except Empty:
+ if self.block:
+ raise EmptyPoolError(self,
+ "Pool reached maximum size and no more "
+ "connections are allowed.")
+ pass # Oh well, we'll create a new connection then
+
+ # If this is a persistent connection, check if it got disconnected
+ if conn and is_connection_dropped(conn):
+ log.info("Resetting dropped connection: %s" % self.host)
+ conn.close()
+ if getattr(conn, 'auto_open', 1) == 0:
+ # This is a proxied connection that has been mutated by
+ # httplib._tunnel() and cannot be reused (since it would
+ # attempt to bypass the proxy)
+ conn = None
+
+ return conn or self._new_conn()
+
+ def _put_conn(self, conn):
+ """
+ Put a connection back into the pool.
+
+ :param conn:
+ Connection object for the current host and port as returned by
+ :meth:`._new_conn` or :meth:`._get_conn`.
+
+ If the pool is already full, the connection is closed and discarded
+ because we exceeded maxsize. If connections are discarded frequently,
+ then maxsize should be increased.
+
+ If the pool is closed, then the connection will be closed and discarded.
+ """
+ try:
+ self.pool.put(conn, block=False)
+ return # Everything is dandy, done.
+ except AttributeError:
+ # self.pool is None.
+ pass
+ except Full:
+ # This should never happen if self.block == True
+ log.warning(
+ "Connection pool is full, discarding connection: %s" %
+ self.host)
+
+ # Connection never got put back into the pool, close it.
+ if conn:
+ conn.close()
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ pass
+
+ def _prepare_proxy(self, conn):
+ # Nothing to do for HTTP connections.
+ pass
+
+ def _get_timeout(self, timeout):
+ """ Helper that always returns a :class:`urllib3.util.Timeout` """
+ if timeout is _Default:
+ return self.timeout.clone()
+
+ if isinstance(timeout, Timeout):
+ return timeout.clone()
+ else:
+ # User passed us an int/float. This is for backwards compatibility,
+ # can be removed later
+ return Timeout.from_float(timeout)
+
+ def _raise_timeout(self, err, url, timeout_value):
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+ if isinstance(err, SocketTimeout):
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # See the above comment about EAGAIN in Python 3. In Python 2 we have
+ # to specifically catch it and throw the timeout error
+ if hasattr(err, 'errno') and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # Catch possible read timeouts thrown as SSL errors. If not the
+ # case, rethrow the original. We need to do this because of:
+ # http://bugs.python.org/issue10272
+ if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ def _make_request(self, conn, method, url, timeout=_Default,
+ **httplib_request_kw):
+ """
+ Perform a request on a given urllib connection object taken from our
+ pool.
+
+ :param conn:
+ a connection from one of our connection pools
+
+ :param timeout:
+ Socket timeout in seconds for the request. This can be a
+ float or integer, which will set the same timeout value for
+ the socket connect and the socket read, or an instance of
+ :class:`urllib3.util.Timeout`, which gives you more fine-grained
+ control over your timeouts.
+ """
+ self.num_requests += 1
+
+ timeout_obj = self._get_timeout(timeout)
+ timeout_obj.start_connect()
+ conn.timeout = timeout_obj.connect_timeout
+
+ # Trigger any extra validation we need to do.
+ try:
+ self._validate_conn(conn)
+ except (SocketTimeout, BaseSSLError) as e:
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+ raise
+
+ # conn.request() calls httplib.*.request, not the method in
+ # urllib3.request. It also calls makefile (recv) on the socket.
+ conn.request(method, url, **httplib_request_kw)
+
+ # Reset the timeout for the recv() on the socket
+ read_timeout = timeout_obj.read_timeout
+
+ # App Engine doesn't have a sock attr
+ if getattr(conn, 'sock', None):
+ # In Python 3 socket.py will catch EAGAIN and return None when you
+ # try and read into the file pointer created by http.client, which
+ # instead raises a BadStatusLine exception. Instead of catching
+ # the exception and assuming all BadStatusLine exceptions are read
+ # timeouts, check for a zero timeout before making the request.
+ if read_timeout == 0:
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % read_timeout)
+ if read_timeout is Timeout.DEFAULT_TIMEOUT:
+ conn.sock.settimeout(socket.getdefaulttimeout())
+ else: # None or a value
+ conn.sock.settimeout(read_timeout)
+
+ # Receive the response from the server
+ try:
+ try: # Python 2.7, use buffering of HTTP responses
+ httplib_response = conn.getresponse(buffering=True)
+ except TypeError: # Python 2.6 and older
+ httplib_response = conn.getresponse()
+ except (SocketTimeout, BaseSSLError, SocketError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
+ raise
+
+ # AppEngine doesn't have a version attr.
+ http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
+ log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
+ httplib_response.status,
+ httplib_response.length))
+
+ try:
+ assert_header_parsing(httplib_response.msg)
+ except HeaderParsingError as hpe: # Platform-specific: Python 3
+ log.warning(
+ 'Failed to parse headers (url=%s): %s',
+ self._absolute_url(url), hpe, exc_info=True)
+
+ return httplib_response
+
+ def _absolute_url(self, path):
+ return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
+
+ def close(self):
+ """
+ Close all pooled connections and disable the pool.
+ """
+ # Disable access to the pool
+ old_pool, self.pool = self.pool, None
+
+ try:
+ while True:
+ conn = old_pool.get(block=False)
+ if conn:
+ conn.close()
+
+ except Empty:
+ pass # Done.
+
+ def is_same_host(self, url):
+ """
+ Check if the given ``url`` is a member of the same host as this
+ connection pool.
+ """
+ if url.startswith('/'):
+ return True
+
+ # TODO: Add optional support for socket.gethostbyname checking.
+ scheme, host, port = get_host(url)
+
+ # Use explicit default port for comparison when none is given
+ if self.port and not port:
+ port = port_by_scheme.get(scheme)
+ elif not self.port and port == port_by_scheme.get(scheme):
+ port = None
+
+ return (scheme, host, port) == (self.scheme, self.host, self.port)
+
+ def urlopen(self, method, url, body=None, headers=None, retries=None,
+ redirect=True, assert_same_host=True, timeout=_Default,
+ pool_timeout=None, release_conn=None, **response_kw):
+ """
+ Get a connection from the pool and perform an HTTP request. This is the
+ lowest level call for making a request, so you'll need to specify all
+ the raw details.
+
+ .. note::
+
+ More commonly, it's appropriate to use a convenience method provided
+ by :class:`.RequestMethods`, such as :meth:`request`.
+
+ .. note::
+
+ `release_conn` will only behave as expected if
+ `preload_content=False` because we want to make
+ `preload_content=False` the default behaviour someday soon without
+ breaking backwards compatibility.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param body:
+ Data to send in the request body (useful for creating
+ POST requests, see HTTPConnectionPool.post_url for
+ more convenience).
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ Pass ``None`` to retry until you receive a response. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+ :param redirect:
+ If True, automatically handle redirects (status codes 301, 302,
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
+ will disable redirect, too.
+
+ :param assert_same_host:
+ If ``True``, will make sure that the host of the pool requests is
+ consistent else will raise HostChangedError. When False, you can
+ use the pool on an HTTP proxy and request foreign hosts.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param pool_timeout:
+ If set and the pool is set to block=True, then this method will
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+ connection is available within the time period.
+
+ :param release_conn:
+ If False, then the urlopen call will not release the connection
+ back into the pool once a response is received (but will release if
+ you read the entire contents of the response such as when
+ `preload_content=True`). This is useful if you're not preloading
+ the response's content immediately. You will need to call
+ ``r.release_conn()`` on the response ``r`` to return the connection
+ back into the pool. If None, it takes the value of
+ ``response_kw.get('preload_content', True)``.
+
+ :param \**response_kw:
+ Additional parameters are passed to
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
+ """
+ if headers is None:
+ headers = self.headers
+
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+ if release_conn is None:
+ release_conn = response_kw.get('preload_content', True)
+
+ # Check host
+ if assert_same_host and not self.is_same_host(url):
+ raise HostChangedError(self, url, retries)
+
+ conn = None
+
+ # Merge the proxy headers. Only do this in HTTP. We have to copy the
+ # headers dict so we can safely change it without those changes being
+ # reflected in anyone else's copy.
+ if self.scheme == 'http':
+ headers = headers.copy()
+ headers.update(self.proxy_headers)
+
+ # Must keep the exception bound to a separate variable or else Python 3
+ # complains about UnboundLocalError.
+ err = None
+
+ try:
+ # Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
+ conn = self._get_conn(timeout=pool_timeout)
+
+ conn.timeout = timeout_obj.connect_timeout
+
+ is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
+ if is_new_proxy_conn:
+ self._prepare_proxy(conn)
+
+ # Make the request on the httplib connection object.
+ httplib_response = self._make_request(conn, method, url,
+ timeout=timeout_obj,
+ body=body, headers=headers)
+
+ # If we're going to release the connection in ``finally:``, then
+ # the request doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = not release_conn and conn
+
+ # Import httplib's response into our own wrapper object
+ response = HTTPResponse.from_httplib(httplib_response,
+ pool=self,
+ connection=response_conn,
+ **response_kw)
+
+ # else:
+ # The connection will be put back into the pool when
+ # ``response.release_conn()`` is called (implicitly by
+ # ``response.read()``)
+
+ except Empty:
+ # Timed out by queue.
+ raise EmptyPoolError(self, "No pool connections are available.")
+
+ except (BaseSSLError, CertificateError) as e:
+ # Close the connection. If a connection is reused on which there
+ # was a Certificate error, the next request will certainly raise
+ # another Certificate error.
+ conn = conn and conn.close()
+ release_conn = True
+ raise SSLError(e)
+
+ except SSLError:
+ # Treat SSLError separately from BaseSSLError to preserve
+ # traceback.
+ conn = conn and conn.close()
+ release_conn = True
+ raise
+
+ except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
+ # Discard the connection for these exceptions. It will be
+ # be replaced during the next _get_conn() call.
+ conn = conn and conn.close()
+ release_conn = True
+
+ if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
+ e = ProxyError('Cannot connect to proxy.', e)
+ elif isinstance(e, (SocketError, HTTPException)):
+ e = ProtocolError('Connection aborted.', e)
+
+ retries = retries.increment(method, url, error=e, _pool=self,
+ _stacktrace=sys.exc_info()[2])
+ retries.sleep()
+
+ # Keep track of the error for the retry warning.
+ err = e
+
+ finally:
+ if release_conn:
+ # Put the connection back to be reused. If the connection is
+ # expired then it will be None, which will get replaced with a
+ # fresh connection during _get_conn.
+ self._put_conn(conn)
+
+ if not conn:
+ # Try again
+ log.warning("Retrying (%r) after connection "
+ "broken by '%r': %s" % (retries, err, url))
+ return self.urlopen(method, url, body, headers, retries,
+ redirect, assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
+
+ # Handle redirect?
+ redirect_location = redirect and response.get_redirect_location()
+ if redirect_location:
+ if response.status == 303:
+ method = 'GET'
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ # Release the connection for this response, since we're not
+ # returning it to be released manually.
+ response.release_conn()
+ raise
+ return response
+
+ log.info("Redirecting %s -> %s" % (url, redirect_location))
+ return self.urlopen(
+ method, redirect_location, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
+
+ # Check if we should retry the HTTP response.
+ if retries.is_forced_retry(method, status_code=response.status):
+ retries = retries.increment(method, url, response=response, _pool=self)
+ retries.sleep()
+ log.info("Forced retry: %s" % url)
+ return self.urlopen(
+ method, url, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
+
+ return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+ """
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+ When Python is compiled with the :mod:`ssl` module, then
+ :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
+ instead of :class:`.HTTPSConnection`.
+
+ :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
+ ``assert_hostname`` and ``host`` in this order to verify connections.
+ If ``assert_hostname`` is False, no verification is done.
+
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
+ ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
+ available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+ the connection socket into an SSL socket.
+ """
+
+ scheme = 'https'
+ ConnectionCls = HTTPSConnection
+
+ def __init__(self, host, port=None,
+ strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
+ block=False, headers=None, retries=None,
+ _proxy=None, _proxy_headers=None,
+ key_file=None, cert_file=None, cert_reqs=None,
+ ca_certs=None, ssl_version=None,
+ assert_hostname=None, assert_fingerprint=None,
+ ca_cert_dir=None, **conn_kw):
+
+ HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
+ block, headers, retries, _proxy, _proxy_headers,
+ **conn_kw)
+
+ if ca_certs and cert_reqs is None:
+ cert_reqs = 'CERT_REQUIRED'
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.ca_certs = ca_certs
+ self.ca_cert_dir = ca_cert_dir
+ self.ssl_version = ssl_version
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+
+ def _prepare_conn(self, conn):
+ """
+ Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
+ and establish the tunnel if proxy is used.
+ """
+
+ if isinstance(conn, VerifiedHTTPSConnection):
+ conn.set_cert(key_file=self.key_file,
+ cert_file=self.cert_file,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ assert_hostname=self.assert_hostname,
+ assert_fingerprint=self.assert_fingerprint)
+ conn.ssl_version = self.ssl_version
+
+ return conn
+
+ def _prepare_proxy(self, conn):
+ """
+ Establish tunnel connection early, because otherwise httplib
+ would improperly set Host: header to proxy's IP:port.
+ """
+ # Python 2.7+
+ try:
+ set_tunnel = conn.set_tunnel
+ except AttributeError: # Platform-specific: Python 2.6
+ set_tunnel = conn._set_tunnel
+
+ if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
+ set_tunnel(self.host, self.port)
+ else:
+ set_tunnel(self.host, self.port, self.proxy_headers)
+
+ conn.connect()
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`httplib.HTTPSConnection`.
+ """
+ self.num_connections += 1
+ log.info("Starting new HTTPS connection (%d): %s"
+ % (self.num_connections, self.host))
+
+ if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
+ raise SSLError("Can't connect to HTTPS URL because the SSL "
+ "module is not available.")
+
+ actual_host = self.host
+ actual_port = self.port
+ if self.proxy is not None:
+ actual_host = self.proxy.host
+ actual_port = self.proxy.port
+
+ conn = self.ConnectionCls(host=actual_host, port=actual_port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict, **self.conn_kw)
+
+ return self._prepare_conn(conn)
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ super(HTTPSConnectionPool, self)._validate_conn(conn)
+
+ # Force connect early to allow us to validate the connection.
+ if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
+ conn.connect()
+
+ if not conn.is_verified:
+ warnings.warn((
+ 'Unverified HTTPS request is being made. '
+ 'Adding certificate verification is strongly advised. See: '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'),
+ InsecureRequestWarning)
+
+
+def connection_from_url(url, **kw):
+ """
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+ This is a shortcut for not having to parse out the scheme, host, and port
+ of the url before creating an :class:`.ConnectionPool` instance.
+
+ :param url:
+ Absolute URL string that must include the scheme. Port is optional.
+
+ :param \**kw:
+ Passes additional parameters to the constructor of the appropriate
+ :class:`.ConnectionPool`. Useful for specifying things like
+ timeout, maxsize, headers, etc.
+
+ Example::
+
+ >>> conn = connection_from_url('http://google.com/')
+ >>> r = conn.request('GET', '/')
+ """
+ scheme, host, port = get_host(url)
+ if scheme == 'https':
+ return HTTPSConnectionPool(host, port=port, **kw)
+ else:
+ return HTTPConnectionPool(host, port=port, **kw)
diff --git a/python/requests/requests/packages/urllib3/contrib/__init__.py b/python/requests/requests/packages/urllib3/contrib/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/contrib/__init__.py
diff --git a/python/requests/requests/packages/urllib3/contrib/appengine.py b/python/requests/requests/packages/urllib3/contrib/appengine.py
new file mode 100644
index 000000000..884cdb220
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/contrib/appengine.py
@@ -0,0 +1,223 @@
+from __future__ import absolute_import
+import logging
+import os
+import warnings
+
+from ..exceptions import (
+ HTTPError,
+ HTTPWarning,
+ MaxRetryError,
+ ProtocolError,
+ TimeoutError,
+ SSLError
+)
+
+from ..packages.six import BytesIO
+from ..request import RequestMethods
+from ..response import HTTPResponse
+from ..util.timeout import Timeout
+from ..util.retry import Retry
+
+try:
+ from google.appengine.api import urlfetch
+except ImportError:
+ urlfetch = None
+
+
+log = logging.getLogger(__name__)
+
+
+class AppEnginePlatformWarning(HTTPWarning):
+ pass
+
+
+class AppEnginePlatformError(HTTPError):
+ pass
+
+
+class AppEngineManager(RequestMethods):
+ """
+ Connection manager for Google App Engine sandbox applications.
+
+ This manager uses the URLFetch service directly instead of using the
+ emulated httplib, and is subject to URLFetch limitations as described in
+ the App Engine documentation here:
+
+ https://cloud.google.com/appengine/docs/python/urlfetch
+
+ Notably it will raise an AppEnginePlatformError if:
+ * URLFetch is not available.
+ * If you attempt to use this on GAEv2 (Managed VMs), as full socket
+ support is available.
+ * If a request size is more than 10 megabytes.
+ * If a response size is more than 32 megabtyes.
+ * If you use an unsupported request method such as OPTIONS.
+
+ Beyond those cases, it will raise normal urllib3 errors.
+ """
+
+ def __init__(self, headers=None, retries=None, validate_certificate=True):
+ if not urlfetch:
+ raise AppEnginePlatformError(
+ "URLFetch is not available in this environment.")
+
+ if is_prod_appengine_mvms():
+ raise AppEnginePlatformError(
+ "Use normal urllib3.PoolManager instead of AppEngineManager"
+ "on Managed VMs, as using URLFetch is not necessary in "
+ "this environment.")
+
+ warnings.warn(
+ "urllib3 is using URLFetch on Google App Engine sandbox instead "
+ "of sockets. To use sockets directly instead of URLFetch see "
+ "https://urllib3.readthedocs.org/en/latest/contrib.html.",
+ AppEnginePlatformWarning)
+
+ RequestMethods.__init__(self, headers)
+ self.validate_certificate = validate_certificate
+
+ self.retries = retries or Retry.DEFAULT
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def urlopen(self, method, url, body=None, headers=None,
+ retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
+ **response_kw):
+
+ retries = self._get_retries(retries, redirect)
+
+ try:
+ response = urlfetch.fetch(
+ url,
+ payload=body,
+ method=method,
+ headers=headers or {},
+ allow_truncated=False,
+ follow_redirects=(
+ redirect and
+ retries.redirect != 0 and
+ retries.total),
+ deadline=self._get_absolute_timeout(timeout),
+ validate_certificate=self.validate_certificate,
+ )
+ except urlfetch.DeadlineExceededError as e:
+ raise TimeoutError(self, e)
+
+ except urlfetch.InvalidURLError as e:
+ if 'too large' in str(e):
+ raise AppEnginePlatformError(
+ "URLFetch request too large, URLFetch only "
+ "supports requests up to 10mb in size.", e)
+ raise ProtocolError(e)
+
+ except urlfetch.DownloadError as e:
+ if 'Too many redirects' in str(e):
+ raise MaxRetryError(self, url, reason=e)
+ raise ProtocolError(e)
+
+ except urlfetch.ResponseTooLargeError as e:
+ raise AppEnginePlatformError(
+ "URLFetch response too large, URLFetch only supports"
+ "responses up to 32mb in size.", e)
+
+ except urlfetch.SSLCertificateError as e:
+ raise SSLError(e)
+
+ except urlfetch.InvalidMethodError as e:
+ raise AppEnginePlatformError(
+ "URLFetch does not support method: %s" % method, e)
+
+ http_response = self._urlfetch_response_to_http_response(
+ response, **response_kw)
+
+ # Check for redirect response
+ if (http_response.get_redirect_location() and
+ retries.raise_on_redirect and redirect):
+ raise MaxRetryError(self, url, "too many redirects")
+
+ # Check if we should retry the HTTP response.
+ if retries.is_forced_retry(method, status_code=http_response.status):
+ retries = retries.increment(
+ method, url, response=http_response, _pool=self)
+ log.info("Forced retry: %s" % url)
+ retries.sleep()
+ return self.urlopen(
+ method, url,
+ body=body, headers=headers,
+ retries=retries, redirect=redirect,
+ timeout=timeout, **response_kw)
+
+ return http_response
+
+ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
+
+ if is_prod_appengine():
+ # Production GAE handles deflate encoding automatically, but does
+ # not remove the encoding header.
+ content_encoding = urlfetch_resp.headers.get('content-encoding')
+
+ if content_encoding == 'deflate':
+ del urlfetch_resp.headers['content-encoding']
+
+ return HTTPResponse(
+ # In order for decoding to work, we must present the content as
+ # a file-like object.
+ body=BytesIO(urlfetch_resp.content),
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ **response_kw
+ )
+
+ def _get_absolute_timeout(self, timeout):
+ if timeout is Timeout.DEFAULT_TIMEOUT:
+ return 5 # 5s is the default timeout for URLFetch.
+ if isinstance(timeout, Timeout):
+ if timeout.read is not timeout.connect:
+ warnings.warn(
+ "URLFetch does not support granular timeout settings, "
+ "reverting to total timeout.", AppEnginePlatformWarning)
+ return timeout.total
+ return timeout
+
+ def _get_retries(self, retries, redirect):
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(
+ retries, redirect=redirect, default=self.retries)
+
+ if retries.connect or retries.read or retries.redirect:
+ warnings.warn(
+ "URLFetch only supports total retries and does not "
+ "recognize connect, read, or redirect retry parameters.",
+ AppEnginePlatformWarning)
+
+ return retries
+
+
+def is_appengine():
+ return (is_local_appengine() or
+ is_prod_appengine() or
+ is_prod_appengine_mvms())
+
+
+def is_appengine_sandbox():
+ return is_appengine() and not is_prod_appengine_mvms()
+
+
+def is_local_appengine():
+ return ('APPENGINE_RUNTIME' in os.environ and
+ 'Development/' in os.environ['SERVER_SOFTWARE'])
+
+
+def is_prod_appengine():
+ return ('APPENGINE_RUNTIME' in os.environ and
+ 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
+ not is_prod_appengine_mvms())
+
+
+def is_prod_appengine_mvms():
+ return os.environ.get('GAE_VM', False) == 'true'
diff --git a/python/requests/requests/packages/urllib3/contrib/ntlmpool.py b/python/requests/requests/packages/urllib3/contrib/ntlmpool.py
new file mode 100644
index 000000000..c136a238d
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/contrib/ntlmpool.py
@@ -0,0 +1,115 @@
+"""
+NTLM authenticating pool, contributed by erikcederstran
+
+Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+"""
+from __future__ import absolute_import
+
+try:
+ from http.client import HTTPSConnection
+except ImportError:
+ from httplib import HTTPSConnection
+from logging import getLogger
+from ntlm import ntlm
+
+from urllib3 import HTTPSConnectionPool
+
+
+log = getLogger(__name__)
+
+
+class NTLMConnectionPool(HTTPSConnectionPool):
+ """
+ Implements an NTLM authentication version of an urllib3 connection pool
+ """
+
+ scheme = 'https'
+
+ def __init__(self, user, pw, authurl, *args, **kwargs):
+ """
+ authurl is a random URL on the server that is protected by NTLM.
+ user is the Windows user, probably in the DOMAIN\\username format.
+ pw is the password for the user.
+ """
+ super(NTLMConnectionPool, self).__init__(*args, **kwargs)
+ self.authurl = authurl
+ self.rawuser = user
+ user_parts = user.split('\\', 1)
+ self.domain = user_parts[0].upper()
+ self.user = user_parts[1]
+ self.pw = pw
+
+ def _new_conn(self):
+ # Performs the NTLM handshake that secures the connection. The socket
+ # must be kept open while requests are performed.
+ self.num_connections += 1
+ log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' %
+ (self.num_connections, self.host, self.authurl))
+
+ headers = {}
+ headers['Connection'] = 'Keep-Alive'
+ req_header = 'Authorization'
+ resp_header = 'www-authenticate'
+
+ conn = HTTPSConnection(host=self.host, port=self.port)
+
+ # Send negotiation message
+ headers[req_header] = (
+ 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
+ log.debug('Request headers: %s' % headers)
+ conn.request('GET', self.authurl, None, headers)
+ res = conn.getresponse()
+ reshdr = dict(res.getheaders())
+ log.debug('Response status: %s %s' % (res.status, res.reason))
+ log.debug('Response headers: %s' % reshdr)
+ log.debug('Response data: %s [...]' % res.read(100))
+
+ # Remove the reference to the socket, so that it can not be closed by
+ # the response object (we want to keep the socket open)
+ res.fp = None
+
+ # Server should respond with a challenge message
+ auth_header_values = reshdr[resp_header].split(', ')
+ auth_header_value = None
+ for s in auth_header_values:
+ if s[:5] == 'NTLM ':
+ auth_header_value = s[5:]
+ if auth_header_value is None:
+ raise Exception('Unexpected %s response header: %s' %
+ (resp_header, reshdr[resp_header]))
+
+ # Send authentication message
+ ServerChallenge, NegotiateFlags = \
+ ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
+ self.user,
+ self.domain,
+ self.pw,
+ NegotiateFlags)
+ headers[req_header] = 'NTLM %s' % auth_msg
+ log.debug('Request headers: %s' % headers)
+ conn.request('GET', self.authurl, None, headers)
+ res = conn.getresponse()
+ log.debug('Response status: %s %s' % (res.status, res.reason))
+ log.debug('Response headers: %s' % dict(res.getheaders()))
+ log.debug('Response data: %s [...]' % res.read()[:100])
+ if res.status != 200:
+ if res.status == 401:
+ raise Exception('Server rejected request: wrong '
+ 'username or password')
+ raise Exception('Wrong server response: %s %s' %
+ (res.status, res.reason))
+
+ res.fp = None
+ log.debug('Connection established')
+ return conn
+
+ def urlopen(self, method, url, body=None, headers=None, retries=3,
+ redirect=True, assert_same_host=True):
+ if headers is None:
+ headers = {}
+ headers['Connection'] = 'Keep-Alive'
+ return super(NTLMConnectionPool, self).urlopen(method, url, body,
+ headers, retries,
+ redirect,
+ assert_same_host)
diff --git a/python/requests/requests/packages/urllib3/contrib/pyopenssl.py b/python/requests/requests/packages/urllib3/contrib/pyopenssl.py
new file mode 100644
index 000000000..5996153af
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/contrib/pyopenssl.py
@@ -0,0 +1,310 @@
+'''SSL with SNI_-support for Python 2. Follow these instructions if you would
+like to verify SSL certificates in Python 2. Note, the default libraries do
+*not* do certificate checking; you need to do additional work to validate
+certificates yourself.
+
+This needs the following packages installed:
+
+* pyOpenSSL (tested with 0.13)
+* ndg-httpsclient (tested with 0.3.2)
+* pyasn1 (tested with 0.1.6)
+
+You can install them with the following command:
+
+ pip install pyopenssl ndg-httpsclient pyasn1
+
+To activate certificate checking, call
+:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+before you begin making HTTP requests. This can be done in a ``sitecustomize``
+module, or at any other time before your application begins using ``urllib3``,
+like this::
+
+ try:
+ import urllib3.contrib.pyopenssl
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+ except ImportError:
+ pass
+
+Now you can use :mod:`urllib3` as you normally would, and it will support SNI
+when the required modules are installed.
+
+Activating this module also has the positive side effect of disabling SSL/TLS
+compression in Python 2 (see `CRIME attack`_).
+
+If you want to configure the default list of supported cipher suites, you can
+set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
+
+Module Variables
+----------------
+
+:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
+
+.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
+.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
+
+'''
+from __future__ import absolute_import
+
+try:
+ from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
+ from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
+except SyntaxError as e:
+ raise ImportError(e)
+
+import OpenSSL.SSL
+from pyasn1.codec.der import decoder as der_decoder
+from pyasn1.type import univ, constraint
+from socket import _fileobject, timeout, error as SocketError
+import ssl
+import select
+
+from .. import connection
+from .. import util
+
+__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
+
+# SNI only *really* works if we can read the subjectAltName of certificates.
+HAS_SNI = SUBJ_ALT_NAME_SUPPORT
+
+# Map from urllib3 to PyOpenSSL compatible parameter-values.
+_openssl_versions = {
+ ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
+ ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
+}
+
+if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
+try:
+ _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
+except AttributeError:
+ pass
+
+_openssl_verify = {
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
+ ssl.CERT_REQUIRED:
+ OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+}
+
+DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
+
+# OpenSSL will only write 16K at a time
+SSL_WRITE_BLOCKSIZE = 16384
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
+
+
+def inject_into_urllib3():
+ 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
+
+ connection.ssl_wrap_socket = ssl_wrap_socket
+ util.HAS_SNI = HAS_SNI
+
+
+def extract_from_urllib3():
+ 'Undo monkey-patching by :func:`inject_into_urllib3`.'
+
+ connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
+ util.HAS_SNI = orig_util_HAS_SNI
+
+
+# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
+class SubjectAltName(BaseSubjectAltName):
+ '''ASN.1 implementation for subjectAltNames support'''
+
+ # There is no limit to how many SAN certificates a certificate may have,
+ # however this needs to have some limit so we'll set an arbitrarily high
+ # limit.
+ sizeSpec = univ.SequenceOf.sizeSpec + \
+ constraint.ValueSizeConstraint(1, 1024)
+
+
+# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
+def get_subj_alt_name(peer_cert):
+ # Search through extensions
+ dns_name = []
+ if not SUBJ_ALT_NAME_SUPPORT:
+ return dns_name
+
+ general_names = SubjectAltName()
+ for i in range(peer_cert.get_extension_count()):
+ ext = peer_cert.get_extension(i)
+ ext_name = ext.get_short_name()
+ if ext_name != 'subjectAltName':
+ continue
+
+ # PyOpenSSL returns extension data in ASN.1 encoded form
+ ext_dat = ext.get_data()
+ decoded_dat = der_decoder.decode(ext_dat,
+ asn1Spec=general_names)
+
+ for name in decoded_dat:
+ if not isinstance(name, SubjectAltName):
+ continue
+ for entry in range(len(name)):
+ component = name.getComponentByPosition(entry)
+ if component.getName() != 'dNSName':
+ continue
+ dns_name.append(str(component.getComponent()))
+
+ return dns_name
+
+
+class WrappedSocket(object):
+ '''API-compatibility wrapper for Python OpenSSL's Connection-class.
+
+ Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
+ collector of pypy.
+ '''
+
+ def __init__(self, connection, socket, suppress_ragged_eofs=True):
+ self.connection = connection
+ self.socket = socket
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self._makefile_refs = 0
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ def makefile(self, mode, bufsize=-1):
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+
+ def recv(self, *args, **kwargs):
+ try:
+ data = self.connection.recv(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
+ return b''
+ else:
+ raise SocketError(e)
+ except OpenSSL.SSL.ZeroReturnError as e:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b''
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ rd, wd, ed = select.select(
+ [self.socket], [], [], self.socket.gettimeout())
+ if not rd:
+ raise timeout('The read operation timed out')
+ else:
+ return self.recv(*args, **kwargs)
+ else:
+ return data
+
+ def settimeout(self, timeout):
+ return self.socket.settimeout(timeout)
+
+ def _send_until_done(self, data):
+ while True:
+ try:
+ return self.connection.send(data)
+ except OpenSSL.SSL.WantWriteError:
+ _, wlist, _ = select.select([], [self.socket], [],
+ self.socket.gettimeout())
+ if not wlist:
+ raise timeout()
+ continue
+
+ def sendall(self, data):
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
+ total_sent += sent
+
+ def shutdown(self):
+ # FIXME rethrow compatible exceptions should we ever use this
+ self.connection.shutdown()
+
+ def close(self):
+ if self._makefile_refs < 1:
+ try:
+ return self.connection.close()
+ except OpenSSL.SSL.Error:
+ return
+ else:
+ self._makefile_refs -= 1
+
+ def getpeercert(self, binary_form=False):
+ x509 = self.connection.get_peer_certificate()
+
+ if not x509:
+ return x509
+
+ if binary_form:
+ return OpenSSL.crypto.dump_certificate(
+ OpenSSL.crypto.FILETYPE_ASN1,
+ x509)
+
+ return {
+ 'subject': (
+ (('commonName', x509.get_subject().CN),),
+ ),
+ 'subjectAltName': [
+ ('DNS', value)
+ for value in get_subj_alt_name(x509)
+ ]
+ }
+
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
+
+def _verify_callback(cnx, x509, err_no, err_depth, return_code):
+ return err_no == 0
+
+
+def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
+ ca_certs=None, server_hostname=None,
+ ssl_version=None, ca_cert_dir=None):
+ ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
+ if certfile:
+ keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
+ ctx.use_certificate_file(certfile)
+ if keyfile:
+ ctx.use_privatekey_file(keyfile)
+ if cert_reqs != ssl.CERT_NONE:
+ ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
+ if ca_certs or ca_cert_dir:
+ try:
+ ctx.load_verify_locations(ca_certs, ca_cert_dir)
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
+ else:
+ ctx.set_default_verify_paths()
+
+ # Disable TLS compression to migitate CRIME attack (issue #309)
+ OP_NO_COMPRESSION = 0x20000
+ ctx.set_options(OP_NO_COMPRESSION)
+
+ # Set list of supported ciphersuites.
+ ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
+
+ cnx = OpenSSL.SSL.Connection(ctx, sock)
+ cnx.set_tlsext_host_name(server_hostname)
+ cnx.set_connect_state()
+ while True:
+ try:
+ cnx.do_handshake()
+ except OpenSSL.SSL.WantReadError:
+ rd, _, _ = select.select([sock], [], [], sock.gettimeout())
+ if not rd:
+ raise timeout('select timed out')
+ continue
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError('bad handshake: %r' % e)
+ break
+
+ return WrappedSocket(cnx, sock)
diff --git a/python/requests/requests/packages/urllib3/exceptions.py b/python/requests/requests/packages/urllib3/exceptions.py
new file mode 100644
index 000000000..8e07eb619
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/exceptions.py
@@ -0,0 +1,201 @@
+from __future__ import absolute_import
+# Base Exceptions
+
+
+class HTTPError(Exception):
+ "Base exception used by this module."
+ pass
+
+
+class HTTPWarning(Warning):
+ "Base warning used by this module."
+ pass
+
+
+class PoolError(HTTPError):
+ "Base exception for errors caused within a pool."
+ def __init__(self, pool, message):
+ self.pool = pool
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, None)
+
+
+class RequestError(PoolError):
+ "Base exception for PoolErrors that have associated URLs."
+ def __init__(self, pool, url, message):
+ self.url = url
+ PoolError.__init__(self, pool, message)
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, self.url, None)
+
+
+class SSLError(HTTPError):
+ "Raised when SSL certificate fails in an HTTPS connection."
+ pass
+
+
+class ProxyError(HTTPError):
+ "Raised when the connection to a proxy fails."
+ pass
+
+
+class DecodeError(HTTPError):
+ "Raised when automatic decoding based on Content-Type fails."
+ pass
+
+
+class ProtocolError(HTTPError):
+ "Raised when something unexpected happens mid-request/response."
+ pass
+
+
+#: Renamed to ProtocolError but aliased for backwards compatibility.
+ConnectionError = ProtocolError
+
+
+# Leaf Exceptions
+
+class MaxRetryError(RequestError):
+ """Raised when the maximum number of retries is exceeded.
+
+ :param pool: The connection pool
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
+ :param string url: The requested Url
+ :param exceptions.Exception reason: The underlying error
+
+ """
+
+ def __init__(self, pool, url, reason=None):
+ self.reason = reason
+
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (
+ url, reason)
+
+ RequestError.__init__(self, pool, url, message)
+
+
+class HostChangedError(RequestError):
+ "Raised when an existing pool gets a request for a foreign host."
+
+ def __init__(self, pool, url, retries=3):
+ message = "Tried to open a foreign host with url: %s" % url
+ RequestError.__init__(self, pool, url, message)
+ self.retries = retries
+
+
+class TimeoutStateError(HTTPError):
+ """ Raised when passing an invalid state to a timeout """
+ pass
+
+
+class TimeoutError(HTTPError):
+ """ Raised when a socket timeout error occurs.
+
+ Catching this error will catch both :exc:`ReadTimeoutErrors
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
+ """
+ pass
+
+
+class ReadTimeoutError(TimeoutError, RequestError):
+ "Raised when a socket timeout occurs while receiving data from a server"
+ pass
+
+
+# This timeout error does not have a URL attached and needs to inherit from the
+# base HTTPError
+class ConnectTimeoutError(TimeoutError):
+ "Raised when a socket timeout occurs while connecting to a server"
+ pass
+
+
+class NewConnectionError(ConnectTimeoutError, PoolError):
+ "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
+ pass
+
+
+class EmptyPoolError(PoolError):
+ "Raised when a pool runs out of connections and no more are allowed."
+ pass
+
+
+class ClosedPoolError(PoolError):
+ "Raised when a request enters a pool after the pool has been closed."
+ pass
+
+
+class LocationValueError(ValueError, HTTPError):
+ "Raised when there is something wrong with a given URL input."
+ pass
+
+
+class LocationParseError(LocationValueError):
+ "Raised when get_host or similar fails to parse the URL input."
+
+ def __init__(self, location):
+ message = "Failed to parse: %s" % location
+ HTTPError.__init__(self, message)
+
+ self.location = location
+
+
+class ResponseError(HTTPError):
+ "Used as a container for an error reason supplied in a MaxRetryError."
+ GENERIC_ERROR = 'too many error responses'
+ SPECIFIC_ERROR = 'too many {status_code} error responses'
+
+
+class SecurityWarning(HTTPWarning):
+ "Warned when perfoming security reducing actions"
+ pass
+
+
+class SubjectAltNameWarning(SecurityWarning):
+ "Warned when connecting to a host with a certificate missing a SAN."
+ pass
+
+
+class InsecureRequestWarning(SecurityWarning):
+ "Warned when making an unverified HTTPS request."
+ pass
+
+
+class SystemTimeWarning(SecurityWarning):
+ "Warned when system time is suspected to be wrong"
+ pass
+
+
+class InsecurePlatformWarning(SecurityWarning):
+ "Warned when certain SSL configuration is not available on a platform."
+ pass
+
+
+class SNIMissingWarning(HTTPWarning):
+ "Warned when making a HTTPS request without SNI available."
+ pass
+
+
+class ResponseNotChunked(ProtocolError, ValueError):
+ "Response needs to be chunked in order to read it as chunks."
+ pass
+
+
+class ProxySchemeUnknown(AssertionError, ValueError):
+ "ProxyManager does not support the supplied scheme"
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+
+ def __init__(self, scheme):
+ message = "Not supported proxy scheme %s" % scheme
+ super(ProxySchemeUnknown, self).__init__(message)
+
+
+class HeaderParsingError(HTTPError):
+ "Raised by assert_header_parsing, but we convert it to a log.warning statement."
+ def __init__(self, defects, unparsed_data):
+ message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
+ super(HeaderParsingError, self).__init__(message)
diff --git a/python/requests/requests/packages/urllib3/fields.py b/python/requests/requests/packages/urllib3/fields.py
new file mode 100644
index 000000000..c7d48113b
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/fields.py
@@ -0,0 +1,178 @@
+from __future__ import absolute_import
+import email.utils
+import mimetypes
+
+from .packages import six
+
+
+def guess_content_type(filename, default='application/octet-stream'):
+ """
+ Guess the "Content-Type" of a file.
+
+ :param filename:
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
+ :param default:
+ If no "Content-Type" can be guessed, default to `default`.
+ """
+ if filename:
+ return mimetypes.guess_type(filename)[0] or default
+ return default
+
+
+def format_header_param(name, value):
+ """
+ Helper function to format and quote a single header parameter.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows RFC 2231, as
+ suggested by RFC 2388 Section 4.4.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as a unicode string.
+ """
+ if not any(ch in value for ch in '"\\\r\n'):
+ result = '%s="%s"' % (name, value)
+ try:
+ result.encode('ascii')
+ except UnicodeEncodeError:
+ pass
+ else:
+ return result
+ if not six.PY3: # Python 2:
+ value = value.encode('utf-8')
+ value = email.utils.encode_rfc2231(value, 'utf-8')
+ value = '%s*=%s' % (name, value)
+ return value
+
+
+class RequestField(object):
+ """
+ A data container for request body parameters.
+
+ :param name:
+ The name of this request field.
+ :param data:
+ The data/value body.
+ :param filename:
+ An optional filename of the request field.
+ :param headers:
+ An optional dict-like object of headers to initially use for the field.
+ """
+ def __init__(self, name, data, filename=None, headers=None):
+ self._name = name
+ self._filename = filename
+ self.data = data
+ self.headers = {}
+ if headers:
+ self.headers = dict(headers)
+
+ @classmethod
+ def from_tuples(cls, fieldname, value):
+ """
+ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
+
+ Supports constructing :class:`~urllib3.fields.RequestField` from
+ parameter of key/value strings AND key/filetuple. A filetuple is a
+ (filename, data, MIME type) tuple where the MIME type is optional.
+ For example::
+
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+
+ Field names and filenames must be unicode.
+ """
+ if isinstance(value, tuple):
+ if len(value) == 3:
+ filename, data, content_type = value
+ else:
+ filename, data = value
+ content_type = guess_content_type(filename)
+ else:
+ filename = None
+ content_type = None
+ data = value
+
+ request_param = cls(fieldname, data, filename=filename)
+ request_param.make_multipart(content_type=content_type)
+
+ return request_param
+
+ def _render_part(self, name, value):
+ """
+ Overridable helper function to format a single header parameter.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as a unicode string.
+ """
+ return format_header_param(name, value)
+
+ def _render_parts(self, header_parts):
+ """
+ Helper function to format and quote a single header.
+
+ Useful for single headers that are composed of multiple items. E.g.,
+ 'Content-Disposition' fields.
+
+ :param header_parts:
+ A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
+ as `k1="v1"; k2="v2"; ...`.
+ """
+ parts = []
+ iterable = header_parts
+ if isinstance(header_parts, dict):
+ iterable = header_parts.items()
+
+ for name, value in iterable:
+ if value:
+ parts.append(self._render_part(name, value))
+
+ return '; '.join(parts)
+
+ def render_headers(self):
+ """
+ Renders the headers for this request field.
+ """
+ lines = []
+
+ sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
+ for sort_key in sort_keys:
+ if self.headers.get(sort_key, False):
+ lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
+
+ for header_name, header_value in self.headers.items():
+ if header_name not in sort_keys:
+ if header_value:
+ lines.append('%s: %s' % (header_name, header_value))
+
+ lines.append('\r\n')
+ return '\r\n'.join(lines)
+
+ def make_multipart(self, content_disposition=None, content_type=None,
+ content_location=None):
+ """
+ Makes this request field into a multipart request field.
+
+ This method overrides "Content-Disposition", "Content-Type" and
+ "Content-Location" headers to the request parameter.
+
+ :param content_type:
+ The 'Content-Type' of the request body.
+ :param content_location:
+ The 'Content-Location' of the request body.
+
+ """
+ self.headers['Content-Disposition'] = content_disposition or 'form-data'
+ self.headers['Content-Disposition'] += '; '.join([
+ '', self._render_parts(
+ (('name', self._name), ('filename', self._filename))
+ )
+ ])
+ self.headers['Content-Type'] = content_type
+ self.headers['Content-Location'] = content_location
diff --git a/python/requests/requests/packages/urllib3/filepost.py b/python/requests/requests/packages/urllib3/filepost.py
new file mode 100644
index 000000000..97a2843ca
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/filepost.py
@@ -0,0 +1,94 @@
+from __future__ import absolute_import
+import codecs
+
+from uuid import uuid4
+from io import BytesIO
+
+from .packages import six
+from .packages.six import b
+from .fields import RequestField
+
+writer = codecs.lookup('utf-8')[3]
+
+
+def choose_boundary():
+ """
+ Our embarassingly-simple replacement for mimetools.choose_boundary.
+ """
+ return uuid4().hex
+
+
+def iter_field_objects(fields):
+ """
+ Iterate over fields.
+
+ Supports list of (k, v) tuples and dicts, and lists of
+ :class:`~urllib3.fields.RequestField`.
+
+ """
+ if isinstance(fields, dict):
+ i = six.iteritems(fields)
+ else:
+ i = iter(fields)
+
+ for field in i:
+ if isinstance(field, RequestField):
+ yield field
+ else:
+ yield RequestField.from_tuples(*field)
+
+
+def iter_fields(fields):
+ """
+ .. deprecated:: 1.6
+
+ Iterate over fields.
+
+ The addition of :class:`~urllib3.fields.RequestField` makes this function
+ obsolete. Instead, use :func:`iter_field_objects`, which returns
+ :class:`~urllib3.fields.RequestField` objects.
+
+ Supports list of (k, v) tuples and dicts.
+ """
+ if isinstance(fields, dict):
+ return ((k, v) for k, v in six.iteritems(fields))
+
+ return ((k, v) for k, v in fields)
+
+
+def encode_multipart_formdata(fields, boundary=None):
+ """
+ Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
+
+ :param fields:
+ Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
+
+ :param boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`mimetools.choose_boundary`.
+ """
+ body = BytesIO()
+ if boundary is None:
+ boundary = choose_boundary()
+
+ for field in iter_field_objects(fields):
+ body.write(b('--%s\r\n' % (boundary)))
+
+ writer(body).write(field.render_headers())
+ data = field.data
+
+ if isinstance(data, int):
+ data = str(data) # Backwards compatibility
+
+ if isinstance(data, six.text_type):
+ writer(body).write(data)
+ else:
+ body.write(data)
+
+ body.write(b'\r\n')
+
+ body.write(b('--%s--\r\n' % (boundary)))
+
+ content_type = str('multipart/form-data; boundary=%s' % boundary)
+
+ return body.getvalue(), content_type
diff --git a/python/requests/requests/packages/urllib3/packages/__init__.py b/python/requests/requests/packages/urllib3/packages/__init__.py
new file mode 100644
index 000000000..170e974c1
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/packages/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+
+from . import ssl_match_hostname
+
+__all__ = ('ssl_match_hostname', )
diff --git a/python/requests/requests/packages/urllib3/packages/ordered_dict.py b/python/requests/requests/packages/urllib3/packages/ordered_dict.py
new file mode 100644
index 000000000..4479363cc
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/packages/ordered_dict.py
@@ -0,0 +1,259 @@
+# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+# Passes Python2.7's test suite and incorporates all the latest updates.
+# Copyright 2009 Raymond Hettinger, released under the MIT License.
+# http://code.activestate.com/recipes/576693/
+try:
+ from thread import get_ident as _get_ident
+except ImportError:
+ from dummy_thread import get_ident as _get_ident
+
+try:
+ from _abcoll import KeysView, ValuesView, ItemsView
+except ImportError:
+ pass
+
+
+class OrderedDict(dict):
+ 'Dictionary that remembers insertion order'
+ # An inherited dict maps keys to values.
+ # The inherited dict provides __getitem__, __len__, __contains__, and get.
+ # The remaining methods are order-aware.
+ # Big-O running times for all methods are the same as for regular dictionaries.
+
+ # The internal self.__map dictionary maps keys to links in a doubly linked list.
+ # The circular doubly linked list starts and ends with a sentinel element.
+ # The sentinel element never gets deleted (this simplifies the algorithm).
+ # Each link is stored as a list of length three: [PREV, NEXT, KEY].
+
+ def __init__(self, *args, **kwds):
+ '''Initialize an ordered dictionary. Signature is the same as for
+ regular dictionaries, but keyword arguments are not recommended
+ because their insertion order is arbitrary.
+
+ '''
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__root
+ except AttributeError:
+ self.__root = root = [] # sentinel node
+ root[:] = [root, root, None]
+ self.__map = {}
+ self.__update(*args, **kwds)
+
+ def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+ 'od.__setitem__(i, y) <==> od[i]=y'
+ # Setting a new item creates a new link which goes at the end of the linked
+ # list, and the inherited dictionary is updated with the new key/value pair.
+ if key not in self:
+ root = self.__root
+ last = root[0]
+ last[1] = root[0] = self.__map[key] = [last, root, key]
+ dict_setitem(self, key, value)
+
+ def __delitem__(self, key, dict_delitem=dict.__delitem__):
+ 'od.__delitem__(y) <==> del od[y]'
+ # Deleting an existing item uses self.__map to find the link which is
+ # then removed by updating the links in the predecessor and successor nodes.
+ dict_delitem(self, key)
+ link_prev, link_next, key = self.__map.pop(key)
+ link_prev[1] = link_next
+ link_next[0] = link_prev
+
+ def __iter__(self):
+ 'od.__iter__() <==> iter(od)'
+ root = self.__root
+ curr = root[1]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[1]
+
+ def __reversed__(self):
+ 'od.__reversed__() <==> reversed(od)'
+ root = self.__root
+ curr = root[0]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[0]
+
+ def clear(self):
+ 'od.clear() -> None. Remove all items from od.'
+ try:
+ for node in self.__map.itervalues():
+ del node[:]
+ root = self.__root
+ root[:] = [root, root, None]
+ self.__map.clear()
+ except AttributeError:
+ pass
+ dict.clear(self)
+
+ def popitem(self, last=True):
+ '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+ Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+ '''
+ if not self:
+ raise KeyError('dictionary is empty')
+ root = self.__root
+ if last:
+ link = root[0]
+ link_prev = link[0]
+ link_prev[1] = root
+ root[0] = link_prev
+ else:
+ link = root[1]
+ link_next = link[1]
+ root[1] = link_next
+ link_next[0] = root
+ key = link[2]
+ del self.__map[key]
+ value = dict.pop(self, key)
+ return key, value
+
+ # -- the following methods do not depend on the internal structure --
+
+ def keys(self):
+ 'od.keys() -> list of keys in od'
+ return list(self)
+
+ def values(self):
+ 'od.values() -> list of values in od'
+ return [self[key] for key in self]
+
+ def items(self):
+ 'od.items() -> list of (key, value) pairs in od'
+ return [(key, self[key]) for key in self]
+
+ def iterkeys(self):
+ 'od.iterkeys() -> an iterator over the keys in od'
+ return iter(self)
+
+ def itervalues(self):
+ 'od.itervalues -> an iterator over the values in od'
+ for k in self:
+ yield self[k]
+
+ def iteritems(self):
+ 'od.iteritems -> an iterator over the (key, value) items in od'
+ for k in self:
+ yield (k, self[k])
+
+ def update(*args, **kwds):
+ '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
+
+ If E is a dict instance, does: for k in E: od[k] = E[k]
+ If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
+ Or if E is an iterable of items, does: for k, v in E: od[k] = v
+ In either case, this is followed by: for k, v in F.items(): od[k] = v
+
+ '''
+ if len(args) > 2:
+ raise TypeError('update() takes at most 2 positional '
+ 'arguments (%d given)' % (len(args),))
+ elif not args:
+ raise TypeError('update() takes at least 1 argument (0 given)')
+ self = args[0]
+ # Make progressively weaker assumptions about "other"
+ other = ()
+ if len(args) == 2:
+ other = args[1]
+ if isinstance(other, dict):
+ for key in other:
+ self[key] = other[key]
+ elif hasattr(other, 'keys'):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
+ for key, value in kwds.items():
+ self[key] = value
+
+ __update = update # let subclasses override update without breaking __init__
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+
+ '''
+ if key in self:
+ result = self[key]
+ del self[key]
+ return result
+ if default is self.__marker:
+ raise KeyError(key)
+ return default
+
+ def setdefault(self, key, default=None):
+ 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+ if key in self:
+ return self[key]
+ self[key] = default
+ return default
+
+ def __repr__(self, _repr_running={}):
+ 'od.__repr__() <==> repr(od)'
+ call_key = id(self), _get_ident()
+ if call_key in _repr_running:
+ return '...'
+ _repr_running[call_key] = 1
+ try:
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, self.items())
+ finally:
+ del _repr_running[call_key]
+
+ def __reduce__(self):
+ 'Return state information for pickling'
+ items = [[k, self[k]] for k in self]
+ inst_dict = vars(self).copy()
+ for k in vars(OrderedDict()):
+ inst_dict.pop(k, None)
+ if inst_dict:
+ return (self.__class__, (items,), inst_dict)
+ return self.__class__, (items,)
+
+ def copy(self):
+ 'od.copy() -> a shallow copy of od'
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+ and values equal to v (which defaults to None).
+
+ '''
+ d = cls()
+ for key in iterable:
+ d[key] = value
+ return d
+
+ def __eq__(self, other):
+ '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
+ while comparison to a regular mapping is order-insensitive.
+
+ '''
+ if isinstance(other, OrderedDict):
+ return len(self)==len(other) and self.items() == other.items()
+ return dict.__eq__(self, other)
+
+ def __ne__(self, other):
+ return not self == other
+
+ # -- the following methods are only used in Python 2.7 --
+
+ def viewkeys(self):
+ "od.viewkeys() -> a set-like object providing a view on od's keys"
+ return KeysView(self)
+
+ def viewvalues(self):
+ "od.viewvalues() -> an object providing a view on od's values"
+ return ValuesView(self)
+
+ def viewitems(self):
+ "od.viewitems() -> a set-like object providing a view on od's items"
+ return ItemsView(self)
diff --git a/python/requests/requests/packages/urllib3/packages/six.py b/python/requests/requests/packages/urllib3/packages/six.py
new file mode 100644
index 000000000..27d80112b
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/packages/six.py
@@ -0,0 +1,385 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+#Copyright (c) 2010-2011 Benjamin Peterson
+
+#Permission is hereby granted, free of charge, to any person obtaining a copy of
+#this software and associated documentation files (the "Software"), to deal in
+#the Software without restriction, including without limitation the rights to
+#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+#the Software, and to permit persons to whom the Software is furnished to do so,
+#subject to the following conditions:
+
+#The above copyright notice and this permission notice shall be included in all
+#copies or substantial portions of the Software.
+
+#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.2.0" # Revision 41c74fef2ded
+
+
+# True if we are running on Python 3.
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result)
+ # This is a bit ugly, but it avoids running this again.
+ delattr(tp, self.name)
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+
+class _MovedItems(types.ModuleType):
+ """Lazy loading of moved objects"""
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("winreg", "_winreg"),
+]
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+del attr
+
+moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+
+ _iterkeys = "keys"
+ _itervalues = "values"
+ _iteritems = "items"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+
+ _iterkeys = "iterkeys"
+ _itervalues = "itervalues"
+ _iteritems = "iteritems"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ Iterator = object
+
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+
+
+def iterkeys(d):
+ """Return an iterator over the keys of a dictionary."""
+ return iter(getattr(d, _iterkeys)())
+
+def itervalues(d):
+ """Return an iterator over the values of a dictionary."""
+ return iter(getattr(d, _itervalues)())
+
+def iteritems(d):
+ """Return an iterator over the (key, value) pairs of a dictionary."""
+ return iter(getattr(d, _iteritems)())
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+ def u(s):
+ return s
+ if sys.version_info[1] <= 1:
+ def int2byte(i):
+ return bytes((i,))
+ else:
+ # This is about 2x faster than the implementation above on 3.2+
+ int2byte = operator.methodcaller("to_bytes", 1, "big")
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+else:
+ def b(s):
+ return s
+ def u(s):
+ return unicode(s, "unicode_escape")
+ int2byte = chr
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+if PY3:
+ import builtins
+ exec_ = getattr(builtins, "exec")
+
+
+ def reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+
+ print_ = getattr(builtins, "print")
+ del builtins
+
+else:
+ def exec_(code, globs=None, locs=None):
+ """Execute code in a namespace."""
+ if globs is None:
+ frame = sys._getframe(1)
+ globs = frame.f_globals
+ if locs is None:
+ locs = frame.f_locals
+ del frame
+ elif locs is None:
+ locs = globs
+ exec("""exec code in globs, locs""")
+
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+ def print_(*args, **kwargs):
+ """The new-style print function."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+
+_add_doc(reraise, """Reraise an exception.""")
+
+
+def with_metaclass(meta, base=object):
+ """Create a base class with a metaclass."""
+ return meta("NewBase", (base,), {})
diff --git a/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
new file mode 100644
index 000000000..dd59a75fd
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
@@ -0,0 +1,13 @@
+try:
+ # Python 3.2+
+ from ssl import CertificateError, match_hostname
+except ImportError:
+ try:
+ # Backport of the function from a pypi module
+ from backports.ssl_match_hostname import CertificateError, match_hostname
+ except ImportError:
+ # Our vendored copy
+ from ._implementation import CertificateError, match_hostname
+
+# Not needed, but documenting what we provide.
+__all__ = ('CertificateError', 'match_hostname')
diff --git a/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py b/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
new file mode 100644
index 000000000..52f428733
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
@@ -0,0 +1,105 @@
+"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
+
+# Note: This file is under the PSF license as the code comes from the python
+# stdlib. http://docs.python.org/3/license.html
+
+import re
+
+__version__ = '3.4.0.2'
+
+class CertificateError(ValueError):
+ pass
+
+
+def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ # Ported from python3-syntax:
+ # leftmost, *remainder = dn.split(r'.')
+ parts = dn.split(r'.')
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count('*')
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn))
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ return pat.match(hostname)
+
+
+def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate")
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
diff --git a/python/requests/requests/packages/urllib3/poolmanager.py b/python/requests/requests/packages/urllib3/poolmanager.py
new file mode 100644
index 000000000..f13e673d1
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/poolmanager.py
@@ -0,0 +1,281 @@
+from __future__ import absolute_import
+import logging
+
+try: # Python 3
+ from urllib.parse import urljoin
+except ImportError:
+ from urlparse import urljoin
+
+from ._collections import RecentlyUsedContainer
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from .connectionpool import port_by_scheme
+from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
+from .request import RequestMethods
+from .util.url import parse_url
+from .util.retry import Retry
+
+
+__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
+
+
+pool_classes_by_scheme = {
+ 'http': HTTPConnectionPool,
+ 'https': HTTPSConnectionPool,
+}
+
+log = logging.getLogger(__name__)
+
+SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
+ 'ssl_version', 'ca_cert_dir')
+
+
+class PoolManager(RequestMethods):
+ """
+ Allows for arbitrary requests while transparently keeping track of
+ necessary connection pools for you.
+
+ :param num_pools:
+ Number of connection pools to cache before discarding the least
+ recently used pool.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param \**connection_pool_kw:
+ Additional parameters are used to create fresh
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+ Example::
+
+ >>> manager = PoolManager(num_pools=2)
+ >>> r = manager.request('GET', 'http://google.com/')
+ >>> r = manager.request('GET', 'http://google.com/mail')
+ >>> r = manager.request('GET', 'http://yahoo.com/')
+ >>> len(manager.pools)
+ 2
+
+ """
+
+ proxy = None
+
+ def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
+ RequestMethods.__init__(self, headers)
+ self.connection_pool_kw = connection_pool_kw
+ self.pools = RecentlyUsedContainer(num_pools,
+ dispose_func=lambda p: p.close())
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.clear()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def _new_pool(self, scheme, host, port):
+ """
+ Create a new :class:`ConnectionPool` based on host, port and scheme.
+
+ This method is used to actually create the connection pools handed out
+ by :meth:`connection_from_url` and companion methods. It is intended
+ to be overridden for customization.
+ """
+ pool_cls = pool_classes_by_scheme[scheme]
+ kwargs = self.connection_pool_kw
+ if scheme == 'http':
+ kwargs = self.connection_pool_kw.copy()
+ for kw in SSL_KEYWORDS:
+ kwargs.pop(kw, None)
+
+ return pool_cls(host, port, **kwargs)
+
+ def clear(self):
+ """
+ Empty our store of pools and direct them all to close.
+
+ This will not affect in-flight connections, but they will not be
+ re-used after completion.
+ """
+ self.pools.clear()
+
+ def connection_from_host(self, host, port=None, scheme='http'):
+ """
+ Get a :class:`ConnectionPool` based on the host, port, and scheme.
+
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
+ ``urllib3.connectionpool.port_by_scheme``.
+ """
+
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ scheme = scheme or 'http'
+ port = port or port_by_scheme.get(scheme, 80)
+ pool_key = (scheme, host, port)
+
+ with self.pools.lock:
+ # If the scheme, host, or port doesn't match existing open
+ # connections, open a new ConnectionPool.
+ pool = self.pools.get(pool_key)
+ if pool:
+ return pool
+
+ # Make a fresh ConnectionPool of the desired type
+ pool = self._new_pool(scheme, host, port)
+ self.pools[pool_key] = pool
+
+ return pool
+
+ def connection_from_url(self, url):
+ """
+ Similar to :func:`urllib3.connectionpool.connection_from_url` but
+ doesn't pass any additional parameters to the
+ :class:`urllib3.connectionpool.ConnectionPool` constructor.
+
+ Additional parameters are taken from the :class:`.PoolManager`
+ constructor.
+ """
+ u = parse_url(url)
+ return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ """
+ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
+ with custom cross-host redirect logic and only sends the request-uri
+ portion of the ``url``.
+
+ The given ``url`` parameter must be absolute, such that an appropriate
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+ """
+ u = parse_url(url)
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+ kw['assert_same_host'] = False
+ kw['redirect'] = False
+ if 'headers' not in kw:
+ kw['headers'] = self.headers
+
+ if self.proxy is not None and u.scheme == "http":
+ response = conn.urlopen(method, url, **kw)
+ else:
+ response = conn.urlopen(method, u.request_uri, **kw)
+
+ redirect_location = redirect and response.get_redirect_location()
+ if not redirect_location:
+ return response
+
+ # Support relative URLs for redirecting.
+ redirect_location = urljoin(url, redirect_location)
+
+ # RFC 7231, Section 6.4.4
+ if response.status == 303:
+ method = 'GET'
+
+ retries = kw.get('retries')
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect)
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=conn)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ raise
+ return response
+
+ kw['retries'] = retries
+ kw['redirect'] = redirect
+
+ log.info("Redirecting %s -> %s" % (url, redirect_location))
+ return self.urlopen(method, redirect_location, **kw)
+
+
+class ProxyManager(PoolManager):
+ """
+ Behaves just like :class:`PoolManager`, but sends all requests through
+ the defined proxy, using the CONNECT method for HTTPS URLs.
+
+ :param proxy_url:
+ The URL of the proxy to be used.
+
+ :param proxy_headers:
+ A dictionary contaning headers that will be sent to the proxy. In case
+ of HTTP they are being sent with each request, while in the
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
+ authentication.
+
+ Example:
+ >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
+ >>> r1 = proxy.request('GET', 'http://google.com/')
+ >>> r2 = proxy.request('GET', 'http://httpbin.org/')
+ >>> len(proxy.pools)
+ 1
+ >>> r3 = proxy.request('GET', 'https://httpbin.org/')
+ >>> r4 = proxy.request('GET', 'https://twitter.com/')
+ >>> len(proxy.pools)
+ 3
+
+ """
+
+ def __init__(self, proxy_url, num_pools=10, headers=None,
+ proxy_headers=None, **connection_pool_kw):
+
+ if isinstance(proxy_url, HTTPConnectionPool):
+ proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
+ proxy_url.port)
+ proxy = parse_url(proxy_url)
+ if not proxy.port:
+ port = port_by_scheme.get(proxy.scheme, 80)
+ proxy = proxy._replace(port=port)
+
+ if proxy.scheme not in ("http", "https"):
+ raise ProxySchemeUnknown(proxy.scheme)
+
+ self.proxy = proxy
+ self.proxy_headers = proxy_headers or {}
+
+ connection_pool_kw['_proxy'] = self.proxy
+ connection_pool_kw['_proxy_headers'] = self.proxy_headers
+
+ super(ProxyManager, self).__init__(
+ num_pools, headers, **connection_pool_kw)
+
+ def connection_from_host(self, host, port=None, scheme='http'):
+ if scheme == "https":
+ return super(ProxyManager, self).connection_from_host(
+ host, port, scheme)
+
+ return super(ProxyManager, self).connection_from_host(
+ self.proxy.host, self.proxy.port, self.proxy.scheme)
+
+ def _set_proxy_headers(self, url, headers=None):
+ """
+ Sets headers needed by proxies: specifically, the Accept and Host
+ headers. Only sets headers not provided by the user.
+ """
+ headers_ = {'Accept': '*/*'}
+
+ netloc = parse_url(url).netloc
+ if netloc:
+ headers_['Host'] = netloc
+
+ if headers:
+ headers_.update(headers)
+ return headers_
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+ u = parse_url(url)
+
+ if u.scheme == "http":
+ # For proxied HTTPS requests, httplib sets the necessary headers
+ # on the CONNECT to the proxy. For HTTP, we'll definitely
+ # need to set 'Host' at the very least.
+ headers = kw.get('headers', self.headers)
+ kw['headers'] = self._set_proxy_headers(url, headers)
+
+ return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
+
+
+def proxy_from_url(url, **kw):
+ return ProxyManager(proxy_url=url, **kw)
diff --git a/python/requests/requests/packages/urllib3/request.py b/python/requests/requests/packages/urllib3/request.py
new file mode 100644
index 000000000..d5aa62d88
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/request.py
@@ -0,0 +1,151 @@
+from __future__ import absolute_import
+try:
+ from urllib.parse import urlencode
+except ImportError:
+ from urllib import urlencode
+
+from .filepost import encode_multipart_formdata
+
+
+__all__ = ['RequestMethods']
+
+
+class RequestMethods(object):
+ """
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
+ as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
+ :class:`~urllib3.poolmanager.PoolManager`.
+
+ Provides behavior for making common types of HTTP request methods and
+ decides which type of request field encoding to use.
+
+ Specifically,
+
+ :meth:`.request_encode_url` is for sending requests whose fields are
+ encoded in the URL (such as GET, HEAD, DELETE).
+
+ :meth:`.request_encode_body` is for sending requests whose fields are
+ encoded in the *body* of the request using multipart or www-form-urlencoded
+ (such as for POST, PUT, PATCH).
+
+ :meth:`.request` is for making any kind of request, it will look up the
+ appropriate encoding format and use one of the above two methods to make
+ the request.
+
+ Initializer parameters:
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+ """
+
+ _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
+
+ def __init__(self, headers=None):
+ self.headers = headers or {}
+
+ def urlopen(self, method, url, body=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **kw): # Abstract
+ raise NotImplemented("Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method.")
+
+ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the appropriate encoding of
+ ``fields`` based on the ``method`` used.
+
+ This is a convenience method that requires the least amount of manual
+ effort. It can be used in most situations, while still having the
+ option to drop down to more specific methods when necessary, such as
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
+ or even the lowest level :meth:`urlopen`.
+ """
+ method = method.upper()
+
+ if method in self._encode_url_methods:
+ return self.request_encode_url(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+ else:
+ return self.request_encode_body(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+
+ def request_encode_url(self, method, url, fields=None, headers=None,
+ **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {'headers': headers}
+ extra_kw.update(urlopen_kw)
+
+ if fields:
+ url += '?' + urlencode(fields)
+
+ return self.urlopen(method, url, **extra_kw)
+
+ def request_encode_body(self, method, url, fields=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+ When ``encode_multipart=True`` (default), then
+ :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
+ the payload with the appropriate content type. Otherwise
+ :meth:`urllib.urlencode` is used with the
+ 'application/x-www-form-urlencoded' content type.
+
+ Multipart encoding must be used when posting files, and it's reasonably
+ safe to use it in other times too. However, it may break request
+ signing, such as with OAuth.
+
+ Supports an optional ``fields`` parameter of key/value strings AND
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
+ the MIME type is optional. For example::
+
+ fields = {
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(),
+ 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+ }
+
+ When uploading a file, providing a filename (the first parameter of the
+ tuple) is optional but recommended to best mimick behavior of browsers.
+
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
+ be overwritten because it depends on the dynamic random boundary string
+ which is used to compose the body of the request. The random boundary
+ string can be explicitly set with the ``multipart_boundary`` parameter.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {'headers': {}}
+
+ if fields:
+ if 'body' in urlopen_kw:
+ raise TypeError(
+ "request got values for both 'fields' and 'body', can only specify one.")
+
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
+ else:
+ body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
+
+ extra_kw['body'] = body
+ extra_kw['headers'] = {'Content-Type': content_type}
+
+ extra_kw['headers'].update(headers)
+ extra_kw.update(urlopen_kw)
+
+ return self.urlopen(method, url, **extra_kw)
diff --git a/python/requests/requests/packages/urllib3/response.py b/python/requests/requests/packages/urllib3/response.py
new file mode 100644
index 000000000..8f2a1b5c2
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/response.py
@@ -0,0 +1,514 @@
+from __future__ import absolute_import
+from contextlib import contextmanager
+import zlib
+import io
+from socket import timeout as SocketTimeout
+from socket import error as SocketError
+
+from ._collections import HTTPHeaderDict
+from .exceptions import (
+ ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
+)
+from .packages.six import string_types as basestring, binary_type, PY3
+from .packages.six.moves import http_client as httplib
+from .connection import HTTPException, BaseSSLError
+from .util.response import is_fp_closed, is_response_to_head
+
+
+class DeflateDecoder(object):
+
+ def __init__(self):
+ self._first_try = True
+ self._data = binary_type()
+ self._obj = zlib.decompressobj()
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ if not data:
+ return data
+
+ if not self._first_try:
+ return self._obj.decompress(data)
+
+ self._data += data
+ try:
+ return self._obj.decompress(data)
+ except zlib.error:
+ self._first_try = False
+ self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
+ try:
+ return self.decompress(self._data)
+ finally:
+ self._data = None
+
+
+class GzipDecoder(object):
+
+ def __init__(self):
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ if not data:
+ return data
+ return self._obj.decompress(data)
+
+
+def _get_decoder(mode):
+ if mode == 'gzip':
+ return GzipDecoder()
+
+ return DeflateDecoder()
+
+
+class HTTPResponse(io.IOBase):
+ """
+ HTTP Response container.
+
+ Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
+ loaded and decoded on-demand when the ``data`` property is accessed. This
+ class is also compatible with the Python standard library's :mod:`io`
+ module, and can hence be treated as a readable object in the context of that
+ framework.
+
+ Extra parameters for behaviour not present in httplib.HTTPResponse:
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, attempts to decode specific content-encoding's based on headers
+ (like 'gzip' and 'deflate') will be skipped and raw data will be used
+ instead.
+
+ :param original_response:
+ When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
+ object, it's convenient to include the original for debug purposes. It's
+ otherwise unused.
+ """
+
+ CONTENT_DECODERS = ['gzip', 'deflate']
+ REDIRECT_STATUSES = [301, 302, 303, 307, 308]
+
+ def __init__(self, body='', headers=None, status=0, version=0, reason=None,
+ strict=0, preload_content=True, decode_content=True,
+ original_response=None, pool=None, connection=None):
+
+ if isinstance(headers, HTTPHeaderDict):
+ self.headers = headers
+ else:
+ self.headers = HTTPHeaderDict(headers)
+ self.status = status
+ self.version = version
+ self.reason = reason
+ self.strict = strict
+ self.decode_content = decode_content
+
+ self._decoder = None
+ self._body = None
+ self._fp = None
+ self._original_response = original_response
+ self._fp_bytes_read = 0
+
+ if body and isinstance(body, (basestring, binary_type)):
+ self._body = body
+
+ self._pool = pool
+ self._connection = connection
+
+ if hasattr(body, 'read'):
+ self._fp = body
+
+ # Are we using the chunked-style of transfer encoding?
+ self.chunked = False
+ self.chunk_left = None
+ tr_enc = self.headers.get('transfer-encoding', '').lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
+ self.chunked = True
+
+ # If requested, preload the body.
+ if preload_content and not self._body:
+ self._body = self.read(decode_content=decode_content)
+
+ def get_redirect_location(self):
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in self.REDIRECT_STATUSES:
+ return self.headers.get('location')
+
+ return False
+
+ def release_conn(self):
+ if not self._pool or not self._connection:
+ return
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ @property
+ def data(self):
+ # For backwords-compat with earlier urllib3 0.4 and earlier.
+ if self._body:
+ return self._body
+
+ if self._fp:
+ return self.read(cache_content=True)
+
+ def tell(self):
+ """
+ Obtain the number of bytes pulled over the wire so far. May differ from
+ the amount of content returned by :meth:``HTTPResponse.read`` if bytes
+ are encoded on the wire (e.g, compressed).
+ """
+ return self._fp_bytes_read
+
+ def _init_decoder(self):
+ """
+ Set-up the _decoder attribute if necessar.
+ """
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
+ content_encoding = self.headers.get('content-encoding', '').lower()
+ if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+
+ def _decode(self, data, decode_content, flush_decoder):
+ """
+ Decode the data passed in and potentially flush the decoder.
+ """
+ try:
+ if decode_content and self._decoder:
+ data = self._decoder.decompress(data)
+ except (IOError, zlib.error) as e:
+ content_encoding = self.headers.get('content-encoding', '').lower()
+ raise DecodeError(
+ "Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding, e)
+
+ if flush_decoder and decode_content:
+ data += self._flush_decoder()
+
+ return data
+
+ def _flush_decoder(self):
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ buf = self._decoder.decompress(b'')
+ return buf + self._decoder.flush()
+
+ return b''
+
+ @contextmanager
+ def _error_catcher(self):
+ """
+ Catch low-level python exceptions, instead re-raising urllib3
+ variants, so that low-level exceptions are not leaked in the
+ high-level api.
+
+ On exit, release the connection back to the pool.
+ """
+ try:
+ try:
+ yield
+
+ except SocketTimeout:
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+ # there is yet no clean way to get at it from this context.
+ raise ReadTimeoutError(self._pool, None, 'Read timed out.')
+
+ except BaseSSLError as e:
+ # FIXME: Is there a better way to differentiate between SSLErrors?
+ if 'read operation timed out' not in str(e): # Defensive:
+ # This shouldn't happen but just in case we're missing an edge
+ # case, let's avoid swallowing SSL errors.
+ raise
+
+ raise ReadTimeoutError(self._pool, None, 'Read timed out.')
+
+ except (HTTPException, SocketError) as e:
+ # This includes IncompleteRead.
+ raise ProtocolError('Connection broken: %r' % e, e)
+
+ except Exception:
+ # The response may not be closed but we're not going to use it anymore
+ # so close it now to ensure that the connection is released back to the pool.
+ if self._original_response and not self._original_response.isclosed():
+ self._original_response.close()
+
+ # Closing the response may not actually be sufficient to close
+ # everything, so if we have a hold of the connection close that
+ # too.
+ if self._connection is not None:
+ self._connection.close()
+
+ raise
+ finally:
+ if self._original_response and self._original_response.isclosed():
+ self.release_conn()
+
+ def read(self, amt=None, decode_content=None, cache_content=False):
+ """
+ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
+ parameters: ``decode_content`` and ``cache_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param cache_content:
+ If True, will save the returned data such that the same result is
+ returned despite of the state of the underlying file object. This
+ is useful if you want the ``.data`` property to continue working
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
+ set.)
+ """
+ self._init_decoder()
+ if decode_content is None:
+ decode_content = self.decode_content
+
+ if self._fp is None:
+ return
+
+ flush_decoder = False
+ data = None
+
+ with self._error_catcher():
+ if amt is None:
+ # cStringIO doesn't like amt=None
+ data = self._fp.read()
+ flush_decoder = True
+ else:
+ cache_content = False
+ data = self._fp.read(amt)
+ if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
+ # not properly close the connection in all cases. There is
+ # no harm in redundantly calling close.
+ self._fp.close()
+ flush_decoder = True
+
+ if data:
+ self._fp_bytes_read += len(data)
+
+ data = self._decode(data, decode_content, flush_decoder)
+
+ if cache_content:
+ self._body = data
+
+ return data
+
+ def stream(self, amt=2**16, decode_content=None):
+ """
+ A generator wrapper for the read() method. A call will block until
+ ``amt`` bytes have been read from the connection or until the
+ connection is closed.
+
+ :param amt:
+ How much of the content to read. The generator will return up to
+ much data per iteration, but may return less. This is particularly
+ likely when using compressed data. However, the empty string will
+ never be returned.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ if self.chunked:
+ for line in self.read_chunked(amt, decode_content=decode_content):
+ yield line
+ else:
+ while not is_fp_closed(self._fp):
+ data = self.read(amt=amt, decode_content=decode_content)
+
+ if data:
+ yield data
+
+ @classmethod
+ def from_httplib(ResponseCls, r, **response_kw):
+ """
+ Given an :class:`httplib.HTTPResponse` instance ``r``, return a
+ corresponding :class:`urllib3.response.HTTPResponse` object.
+
+ Remaining parameters are passed to the HTTPResponse constructor, along
+ with ``original_response=r``.
+ """
+ headers = r.msg
+
+ if not isinstance(headers, HTTPHeaderDict):
+ if PY3: # Python 3
+ headers = HTTPHeaderDict(headers.items())
+ else: # Python 2
+ headers = HTTPHeaderDict.from_httplib(headers)
+
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
+ strict = getattr(r, 'strict', 0)
+ resp = ResponseCls(body=r,
+ headers=headers,
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=strict,
+ original_response=r,
+ **response_kw)
+ return resp
+
+ # Backwards-compatibility methods for httplib.HTTPResponse
+ def getheaders(self):
+ return self.headers
+
+ def getheader(self, name, default=None):
+ return self.headers.get(name, default)
+
+ # Overrides from io.IOBase
+ def close(self):
+ if not self.closed:
+ self._fp.close()
+
+ @property
+ def closed(self):
+ if self._fp is None:
+ return True
+ elif hasattr(self._fp, 'closed'):
+ return self._fp.closed
+ elif hasattr(self._fp, 'isclosed'): # Python 2
+ return self._fp.isclosed()
+ else:
+ return True
+
+ def fileno(self):
+ if self._fp is None:
+ raise IOError("HTTPResponse has no file to get a fileno from")
+ elif hasattr(self._fp, "fileno"):
+ return self._fp.fileno()
+ else:
+ raise IOError("The file-like object this HTTPResponse is wrapped "
+ "around has no file descriptor")
+
+ def flush(self):
+ if self._fp is not None and hasattr(self._fp, 'flush'):
+ return self._fp.flush()
+
+ def readable(self):
+ # This method is required for `io` module compatibility.
+ return True
+
+ def readinto(self, b):
+ # This method is required for `io` module compatibility.
+ temp = self.read(len(b))
+ if len(temp) == 0:
+ return 0
+ else:
+ b[:len(temp)] = temp
+ return len(temp)
+
+ def _update_chunk_length(self):
+ # First, we'll figure out length of a chunk and then
+ # we'll try to read it from socket.
+ if self.chunk_left is not None:
+ return
+ line = self._fp.fp.readline()
+ line = line.split(b';', 1)[0]
+ try:
+ self.chunk_left = int(line, 16)
+ except ValueError:
+ # Invalid chunked protocol response, abort.
+ self.close()
+ raise httplib.IncompleteRead(line)
+
+ def _handle_chunk(self, amt):
+ returned_chunk = None
+ if amt is None:
+ chunk = self._fp._safe_read(self.chunk_left)
+ returned_chunk = chunk
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ elif amt < self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self.chunk_left = self.chunk_left - amt
+ returned_chunk = value
+ elif amt == self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ returned_chunk = value
+ else: # amt > self.chunk_left
+ returned_chunk = self._fp._safe_read(self.chunk_left)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ return returned_chunk
+
+ def read_chunked(self, amt=None, decode_content=None):
+ """
+ Similar to :meth:`HTTPResponse.read`, but with an additional
+ parameter: ``decode_content``.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ self._init_decoder()
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
+ if not self.chunked:
+ raise ResponseNotChunked(
+ "Response is not chunked. "
+ "Header 'transfer-encoding: chunked' is missing.")
+
+ # Don't bother reading the body of a HEAD request.
+ if self._original_response and is_response_to_head(self._original_response):
+ self._original_response.close()
+ return
+
+ with self._error_catcher():
+ while True:
+ self._update_chunk_length()
+ if self.chunk_left == 0:
+ break
+ chunk = self._handle_chunk(amt)
+ decoded = self._decode(chunk, decode_content=decode_content,
+ flush_decoder=False)
+ if decoded:
+ yield decoded
+
+ if decode_content:
+ # On CPython and PyPy, we should never need to flush the
+ # decoder. However, on Jython we *might* need to, so
+ # lets defensively do it anyway.
+ decoded = self._flush_decoder()
+ if decoded: # Platform-specific: Jython.
+ yield decoded
+
+ # Chunk content ends with \r\n: discard it.
+ while True:
+ line = self._fp.fp.readline()
+ if not line:
+ # Some sites may not end with '\r\n'.
+ break
+ if line == b'\r\n':
+ break
+
+ # We read everything; close the "file".
+ if self._original_response:
+ self._original_response.close()
diff --git a/python/requests/requests/packages/urllib3/util/__init__.py b/python/requests/requests/packages/urllib3/util/__init__.py
new file mode 100644
index 000000000..c6c6243cf
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/util/__init__.py
@@ -0,0 +1,44 @@
+from __future__ import absolute_import
+# For backwards compatibility, provide imports that used to be here.
+from .connection import is_connection_dropped
+from .request import make_headers
+from .response import is_fp_closed
+from .ssl_ import (
+ SSLContext,
+ HAS_SNI,
+ assert_fingerprint,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+)
+from .timeout import (
+ current_time,
+ Timeout,
+)
+
+from .retry import Retry
+from .url import (
+ get_host,
+ parse_url,
+ split_first,
+ Url,
+)
+
+__all__ = (
+ 'HAS_SNI',
+ 'SSLContext',
+ 'Retry',
+ 'Timeout',
+ 'Url',
+ 'assert_fingerprint',
+ 'current_time',
+ 'is_connection_dropped',
+ 'is_fp_closed',
+ 'get_host',
+ 'parse_url',
+ 'make_headers',
+ 'resolve_cert_reqs',
+ 'resolve_ssl_version',
+ 'split_first',
+ 'ssl_wrap_socket',
+)
diff --git a/python/requests/requests/packages/urllib3/util/connection.py b/python/requests/requests/packages/urllib3/util/connection.py
new file mode 100644
index 000000000..01a4812f2
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/util/connection.py
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+import socket
+try:
+ from select import poll, POLLIN
+except ImportError: # `poll` doesn't exist on OSX and other platforms
+ poll = False
+ try:
+ from select import select
+ except ImportError: # `select` doesn't exist on AppEngine.
+ select = False
+
+
+def is_connection_dropped(conn): # Platform-specific
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ :class:`httplib.HTTPConnection` object.
+
+ Note: For platforms like AppEngine, this will always return ``False`` to
+ let the platform handle connection recycling transparently for us.
+ """
+ sock = getattr(conn, 'sock', False)
+ if sock is False: # Platform-specific: AppEngine
+ return False
+ if sock is None: # Connection already closed (such as by httplib).
+ return True
+
+ if not poll:
+ if not select: # Platform-specific: AppEngine
+ return False
+
+ try:
+ return select([sock], [], [], 0.0)[0]
+ except socket.error:
+ return True
+
+ # This version is better on platforms that support it.
+ p = poll()
+ p.register(sock, POLLIN)
+ for (fno, ev) in p.poll(0.0):
+ if fno == sock.fileno():
+ # Either data is buffered (bad), or the connection is dropped.
+ return True
+
+
+# This function is copied from socket.py in the Python 2.7 standard
+# library test suite. Added to its signature is only `socket_options`.
+def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None, socket_options=None):
+ """Connect to *address* and return the socket object.
+
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
+ port)``) and return the socket object. Passing the optional
+ *timeout* parameter will set the timeout on the socket instance
+ before attempting to connect. If no *timeout* is supplied, the
+ global default timeout setting returned by :func:`getdefaulttimeout`
+ is used. If *source_address* is set it must be a tuple of (host, port)
+ for the socket to bind as a source address before making the connection.
+ An host of '' or port 0 tells the OS to use the default.
+ """
+
+ host, port = address
+ if host.startswith('['):
+ host = host.strip('[]')
+ err = None
+ for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ sock = None
+ try:
+ sock = socket.socket(af, socktype, proto)
+
+ # If provided, set socket level options before connecting.
+ # This is the only addition urllib3 makes to this function.
+ _set_socket_options(sock, socket_options)
+
+ if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
+ sock.settimeout(timeout)
+ if source_address:
+ sock.bind(source_address)
+ sock.connect(sa)
+ return sock
+
+ except socket.error as e:
+ err = e
+ if sock is not None:
+ sock.close()
+ sock = None
+
+ if err is not None:
+ raise err
+
+ raise socket.error("getaddrinfo returns an empty list")
+
+
+def _set_socket_options(sock, options):
+ if options is None:
+ return
+
+ for opt in options:
+ sock.setsockopt(*opt)
diff --git a/python/requests/requests/packages/urllib3/util/request.py b/python/requests/requests/packages/urllib3/util/request.py
new file mode 100644
index 000000000..73779315f
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/util/request.py
@@ -0,0 +1,72 @@
+from __future__ import absolute_import
+from base64 import b64encode
+
+from ..packages.six import b
+
+ACCEPT_ENCODING = 'gzip,deflate'
+
+
+def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
+ basic_auth=None, proxy_basic_auth=None, disable_cache=None):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ :param proxy_basic_auth:
+ Colon-separated username:password string for 'proxy-authorization: basic ...'
+ auth header.
+
+ :param disable_cache:
+ If ``True``, adds 'cache-control: no-cache' header.
+
+ Example::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ','.join(accept_encoding)
+ else:
+ accept_encoding = ACCEPT_ENCODING
+ headers['accept-encoding'] = accept_encoding
+
+ if user_agent:
+ headers['user-agent'] = user_agent
+
+ if keep_alive:
+ headers['connection'] = 'keep-alive'
+
+ if basic_auth:
+ headers['authorization'] = 'Basic ' + \
+ b64encode(b(basic_auth)).decode('utf-8')
+
+ if proxy_basic_auth:
+ headers['proxy-authorization'] = 'Basic ' + \
+ b64encode(b(proxy_basic_auth)).decode('utf-8')
+
+ if disable_cache:
+ headers['cache-control'] = 'no-cache'
+
+ return headers
diff --git a/python/requests/requests/packages/urllib3/util/response.py b/python/requests/requests/packages/urllib3/util/response.py
new file mode 100644
index 000000000..bc7232720
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/util/response.py
@@ -0,0 +1,74 @@
+from __future__ import absolute_import
+from ..packages.six.moves import http_client as httplib
+
+from ..exceptions import HeaderParsingError
+
+
+def is_fp_closed(obj):
+ """
+ Checks whether a given file-like object is closed.
+
+ :param obj:
+ The file-like object to check.
+ """
+
+ try:
+ # Check via the official file-like-object way.
+ return obj.closed
+ except AttributeError:
+ pass
+
+ try:
+ # Check if the object is a container for another file-like object that
+ # gets released on exhaustion (e.g. HTTPResponse).
+ return obj.fp is None
+ except AttributeError:
+ pass
+
+ raise ValueError("Unable to determine whether fp is closed.")
+
+
+def assert_header_parsing(headers):
+ """
+ Asserts whether all headers have been successfully parsed.
+ Extracts encountered errors from the result of parsing headers.
+
+ Only works on Python 3.
+
+ :param headers: Headers to verify.
+ :type headers: `httplib.HTTPMessage`.
+
+ :raises urllib3.exceptions.HeaderParsingError:
+ If parsing errors are found.
+ """
+
+ # This will fail silently if we pass in the wrong kind of parameter.
+ # To make debugging easier add an explicit check.
+ if not isinstance(headers, httplib.HTTPMessage):
+ raise TypeError('expected httplib.Message, got {0}.'.format(
+ type(headers)))
+
+ defects = getattr(headers, 'defects', None)
+ get_payload = getattr(headers, 'get_payload', None)
+
+ unparsed_data = None
+ if get_payload: # Platform-specific: Python 3.
+ unparsed_data = get_payload()
+
+ if defects or unparsed_data:
+ raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
+
+
+def is_response_to_head(response):
+ """
+ Checks, wether a the request of a response has been a HEAD-request.
+ Handles the quirks of AppEngine.
+
+ :param conn:
+ :type conn: :class:`httplib.HTTPResponse`
+ """
+ # FIXME: Can we do this somehow without accessing private httplib _method?
+ method = response._method
+ if isinstance(method, int): # Platform-specific: Appengine
+ return method == 3
+ return method.upper() == 'HEAD'
diff --git a/python/requests/requests/packages/urllib3/util/retry.py b/python/requests/requests/packages/urllib3/util/retry.py
new file mode 100644
index 000000000..03a01249d
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/util/retry.py
@@ -0,0 +1,286 @@
+from __future__ import absolute_import
+import time
+import logging
+
+from ..exceptions import (
+ ConnectTimeoutError,
+ MaxRetryError,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseError,
+)
+from ..packages import six
+
+
+log = logging.getLogger(__name__)
+
+
+class Retry(object):
+ """ Retry configuration.
+
+ Each retry attempt will create a new Retry object with updated values, so
+ they can be safely reused.
+
+ Retries can be defined as a default for a pool::
+
+ retries = Retry(connect=5, read=2, redirect=5)
+ http = PoolManager(retries=retries)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', retries=Retry(10))
+
+ Retries can be disabled by passing ``False``::
+
+ response = http.request('GET', 'http://example.com/', retries=False)
+
+ Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+ retries are disabled, in which case the causing exception will be raised.
+
+ :param int total:
+ Total number of retries to allow. Takes precedence over other counts.
+
+ Set to ``None`` to remove this constraint and fall back on other
+ counts. It's a good idea to set this to some sensibly-high value to
+ account for unexpected edge cases and avoid infinite retry loops.
+
+ Set to ``0`` to fail on the first retry.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int connect:
+ How many connection-related errors to retry on.
+
+ These are errors raised before the request is sent to the remote server,
+ which we assume has not triggered the server to process the request.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int read:
+ How many times to retry on read errors.
+
+ These errors are raised after the request was sent to the server, so the
+ request may have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int redirect:
+ How many redirects to perform. Limit this to avoid infinite redirect
+ loops.
+
+ A redirect is a HTTP response with a status code 301, 302, 303, 307 or
+ 308.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param iterable method_whitelist:
+ Set of uppercased HTTP method verbs that we should retry on.
+
+ By default, we only retry on methods which are considered to be
+ indempotent (multiple requests with the same parameters end with the
+ same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
+
+ :param iterable status_forcelist:
+ A set of HTTP status codes that we should force a retry on.
+
+ By default, this is disabled with ``None``.
+
+ :param float backoff_factor:
+ A backoff factor to apply between attempts. urllib3 will sleep for::
+
+ {backoff factor} * (2 ^ ({number of total retries} - 1))
+
+ seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
+ for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
+ than :attr:`Retry.BACKOFF_MAX`.
+
+ By default, backoff is disabled (set to 0).
+
+ :param bool raise_on_redirect: Whether, if the number of redirects is
+ exhausted, to raise a MaxRetryError, or to return a response with a
+ response code in the 3xx range.
+ """
+
+ DEFAULT_METHOD_WHITELIST = frozenset([
+ 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
+
+ #: Maximum backoff time.
+ BACKOFF_MAX = 120
+
+ def __init__(self, total=10, connect=None, read=None, redirect=None,
+ method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
+ backoff_factor=0, raise_on_redirect=True, _observed_errors=0):
+
+ self.total = total
+ self.connect = connect
+ self.read = read
+
+ if redirect is False or total is False:
+ redirect = 0
+ raise_on_redirect = False
+
+ self.redirect = redirect
+ self.status_forcelist = status_forcelist or set()
+ self.method_whitelist = method_whitelist
+ self.backoff_factor = backoff_factor
+ self.raise_on_redirect = raise_on_redirect
+ self._observed_errors = _observed_errors # TODO: use .history instead?
+
+ def new(self, **kw):
+ params = dict(
+ total=self.total,
+ connect=self.connect, read=self.read, redirect=self.redirect,
+ method_whitelist=self.method_whitelist,
+ status_forcelist=self.status_forcelist,
+ backoff_factor=self.backoff_factor,
+ raise_on_redirect=self.raise_on_redirect,
+ _observed_errors=self._observed_errors,
+ )
+ params.update(kw)
+ return type(self)(**params)
+
+ @classmethod
+ def from_int(cls, retries, redirect=True, default=None):
+ """ Backwards-compatibility for the old retries format."""
+ if retries is None:
+ retries = default if default is not None else cls.DEFAULT
+
+ if isinstance(retries, Retry):
+ return retries
+
+ redirect = bool(redirect) and None
+ new_retries = cls(retries, redirect=redirect)
+ log.debug("Converted retries value: %r -> %r" % (retries, new_retries))
+ return new_retries
+
+ def get_backoff_time(self):
+ """ Formula for computing the current backoff
+
+ :rtype: float
+ """
+ if self._observed_errors <= 1:
+ return 0
+
+ backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
+ return min(self.BACKOFF_MAX, backoff_value)
+
+ def sleep(self):
+ """ Sleep between retry attempts using an exponential backoff.
+
+ By default, the backoff factor is 0 and this method will return
+ immediately.
+ """
+ backoff = self.get_backoff_time()
+ if backoff <= 0:
+ return
+ time.sleep(backoff)
+
+ def _is_connection_error(self, err):
+ """ Errors when we're fairly sure that the server did not receive the
+ request, so it should be safe to retry.
+ """
+ return isinstance(err, ConnectTimeoutError)
+
+ def _is_read_error(self, err):
+ """ Errors that occur after the request has been started, so we should
+ assume that the server began processing it.
+ """
+ return isinstance(err, (ReadTimeoutError, ProtocolError))
+
+ def is_forced_retry(self, method, status_code):
+ """ Is this method/status code retryable? (Based on method/codes whitelists)
+ """
+ if self.method_whitelist and method.upper() not in self.method_whitelist:
+ return False
+
+ return self.status_forcelist and status_code in self.status_forcelist
+
+ def is_exhausted(self):
+ """ Are we out of retries? """
+ retry_counts = (self.total, self.connect, self.read, self.redirect)
+ retry_counts = list(filter(None, retry_counts))
+ if not retry_counts:
+ return False
+
+ return min(retry_counts) < 0
+
+ def increment(self, method=None, url=None, response=None, error=None,
+ _pool=None, _stacktrace=None):
+ """ Return a new Retry object with incremented retry counters.
+
+ :param response: A response object, or None, if the server did not
+ return a response.
+ :type response: :class:`~urllib3.response.HTTPResponse`
+ :param Exception error: An error encountered during the request, or
+ None if the response was received successfully.
+
+ :return: A new ``Retry`` object.
+ """
+ if self.total is False and error:
+ # Disabled, indicate to re-raise the error.
+ raise six.reraise(type(error), error, _stacktrace)
+
+ total = self.total
+ if total is not None:
+ total -= 1
+
+ _observed_errors = self._observed_errors
+ connect = self.connect
+ read = self.read
+ redirect = self.redirect
+ cause = 'unknown'
+
+ if error and self._is_connection_error(error):
+ # Connect retry?
+ if connect is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif connect is not None:
+ connect -= 1
+ _observed_errors += 1
+
+ elif error and self._is_read_error(error):
+ # Read retry?
+ if read is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif read is not None:
+ read -= 1
+ _observed_errors += 1
+
+ elif response and response.get_redirect_location():
+ # Redirect retry?
+ if redirect is not None:
+ redirect -= 1
+ cause = 'too many redirects'
+
+ else:
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and a the given method is in the whitelist
+ _observed_errors += 1
+ cause = ResponseError.GENERIC_ERROR
+ if response and response.status:
+ cause = ResponseError.SPECIFIC_ERROR.format(
+ status_code=response.status)
+
+ new_retry = self.new(
+ total=total,
+ connect=connect, read=read, redirect=redirect,
+ _observed_errors=_observed_errors)
+
+ if new_retry.is_exhausted():
+ raise MaxRetryError(_pool, url, error or ResponseError(cause))
+
+ log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry))
+
+ return new_retry
+
+ def __repr__(self):
+ return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
+ 'read={self.read}, redirect={self.redirect})').format(
+ cls=type(self), self=self)
+
+
+# For backwards compatibility (equivalent to pre-v1.9):
+Retry.DEFAULT = Retry(3)
diff --git a/python/requests/requests/packages/urllib3/util/ssl_.py b/python/requests/requests/packages/urllib3/util/ssl_.py
new file mode 100644
index 000000000..67f83441e
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/util/ssl_.py
@@ -0,0 +1,317 @@
+from __future__ import absolute_import
+import errno
+import warnings
+import hmac
+
+from binascii import hexlify, unhexlify
+from hashlib import md5, sha1, sha256
+
+from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
+
+
+SSLContext = None
+HAS_SNI = False
+create_default_context = None
+
+# Maps the length of a digest to a possible hash function producing this digest
+HASHFUNC_MAP = {
+ 32: md5,
+ 40: sha1,
+ 64: sha256,
+}
+
+
+def _const_compare_digest_backport(a, b):
+ """
+ Compare two digests of equal length in constant time.
+
+ The digests must be of type str/bytes.
+ Returns True if the digests match, and False otherwise.
+ """
+ result = abs(len(a) - len(b))
+ for l, r in zip(bytearray(a), bytearray(b)):
+ result |= l ^ r
+ return result == 0
+
+
+_const_compare_digest = getattr(hmac, 'compare_digest',
+ _const_compare_digest_backport)
+
+
+try: # Test for SSL features
+ import ssl
+ from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
+ from ssl import HAS_SNI # Has SNI?
+except ImportError:
+ pass
+
+
+try:
+ from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
+except ImportError:
+ OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+ OP_NO_COMPRESSION = 0x20000
+
+# A secure default.
+# Sources for more information on TLS ciphers:
+#
+# - https://wiki.mozilla.org/Security/Server_Side_TLS
+# - https://www.ssllabs.com/projects/best-practices/index.html
+# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+#
+# The general intent is:
+# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
+# - prefer ECDHE over DHE for better performance,
+# - prefer any AES-GCM over any AES-CBC for better performance and security,
+# - use 3DES as fallback which is secure but slow,
+# - disable NULL authentication, MD5 MACs and DSS for security reasons.
+DEFAULT_CIPHERS = (
+ 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
+ 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
+ '!eNULL:!MD5'
+)
+
+try:
+ from ssl import SSLContext # Modern SSL?
+except ImportError:
+ import sys
+
+ class SSLContext(object): # Platform-specific: Python 2 & 3.1
+ supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
+ (3, 2) <= sys.version_info)
+
+ def __init__(self, protocol_version):
+ self.protocol = protocol_version
+ # Use default values from a real SSLContext
+ self.check_hostname = False
+ self.verify_mode = ssl.CERT_NONE
+ self.ca_certs = None
+ self.options = 0
+ self.certfile = None
+ self.keyfile = None
+ self.ciphers = None
+
+ def load_cert_chain(self, certfile, keyfile):
+ self.certfile = certfile
+ self.keyfile = keyfile
+
+ def load_verify_locations(self, cafile=None, capath=None):
+ self.ca_certs = cafile
+
+ if capath is not None:
+ raise SSLError("CA directories not supported in older Pythons")
+
+ def set_ciphers(self, cipher_suite):
+ if not self.supports_set_ciphers:
+ raise TypeError(
+ 'Your version of Python does not support setting '
+ 'a custom cipher suite. Please upgrade to Python '
+ '2.7, 3.2, or later if you need this functionality.'
+ )
+ self.ciphers = cipher_suite
+
+ def wrap_socket(self, socket, server_hostname=None):
+ warnings.warn(
+ 'A true SSLContext object is not available. This prevents '
+ 'urllib3 from configuring SSL appropriately and may cause '
+ 'certain SSL connections to fail. For more information, see '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'
+ '#insecureplatformwarning.',
+ InsecurePlatformWarning
+ )
+ kwargs = {
+ 'keyfile': self.keyfile,
+ 'certfile': self.certfile,
+ 'ca_certs': self.ca_certs,
+ 'cert_reqs': self.verify_mode,
+ 'ssl_version': self.protocol,
+ }
+ if self.supports_set_ciphers: # Platform-specific: Python 2.7+
+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+ else: # Platform-specific: Python 2.6
+ return wrap_socket(socket, **kwargs)
+
+
+def assert_fingerprint(cert, fingerprint):
+ """
+ Checks if given fingerprint matches the supplied certificate.
+
+ :param cert:
+ Certificate as bytes object.
+ :param fingerprint:
+ Fingerprint as string of hexdigits, can be interspersed by colons.
+ """
+
+ fingerprint = fingerprint.replace(':', '').lower()
+ digest_length = len(fingerprint)
+ hashfunc = HASHFUNC_MAP.get(digest_length)
+ if not hashfunc:
+ raise SSLError(
+ 'Fingerprint of invalid length: {0}'.format(fingerprint))
+
+ # We need encode() here for py32; works on py2 and p33.
+ fingerprint_bytes = unhexlify(fingerprint.encode())
+
+ cert_digest = hashfunc(cert).digest()
+
+ if not _const_compare_digest(cert_digest, fingerprint_bytes):
+ raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
+ .format(fingerprint, hexlify(cert_digest)))
+
+
+def resolve_cert_reqs(candidate):
+ """
+ Resolves the argument to a numeric constant, which can be passed to
+ the wrap_socket function/method from the ssl module.
+ Defaults to :data:`ssl.CERT_NONE`.
+ If given a string it is assumed to be the name of the constant in the
+ :mod:`ssl` module or its abbrevation.
+ (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+ If it's neither `None` nor a string we assume it is already the numeric
+ constant which can directly be passed to wrap_socket.
+ """
+ if candidate is None:
+ return CERT_NONE
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'CERT_' + candidate)
+ return res
+
+ return candidate
+
+
+def resolve_ssl_version(candidate):
+ """
+ like resolve_cert_reqs
+ """
+ if candidate is None:
+ return PROTOCOL_SSLv23
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'PROTOCOL_' + candidate)
+ return res
+
+ return candidate
+
+
+def create_urllib3_context(ssl_version=None, cert_reqs=None,
+ options=None, ciphers=None):
+ """All arguments have the same meaning as ``ssl_wrap_socket``.
+
+ By default, this function does a lot of the same work that
+ ``ssl.create_default_context`` does on Python 3.4+. It:
+
+ - Disables SSLv2, SSLv3, and compression
+ - Sets a restricted set of server ciphers
+
+ If you wish to enable SSLv3, you can do::
+
+ from urllib3.util import ssl_
+ context = ssl_.create_urllib3_context()
+ context.options &= ~ssl_.OP_NO_SSLv3
+
+ You can do the same to enable compression (substituting ``COMPRESSION``
+ for ``SSLv3`` in the last line above).
+
+ :param ssl_version:
+ The desired protocol version to use. This will default to
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+ the server and your installation of OpenSSL support.
+ :param cert_reqs:
+ Whether to require the certificate verification. This defaults to
+ ``ssl.CERT_REQUIRED``.
+ :param options:
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
+ :param ciphers:
+ Which cipher suites to allow the server to select.
+ :returns:
+ Constructed SSLContext object with specified options
+ :rtype: SSLContext
+ """
+ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
+
+ # Setting the default here, as we may have no ssl module on import
+ cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+
+ if options is None:
+ options = 0
+ # SSLv2 is easily broken and is considered harmful and dangerous
+ options |= OP_NO_SSLv2
+ # SSLv3 has several problems and is now dangerous
+ options |= OP_NO_SSLv3
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+ # (issue #309)
+ options |= OP_NO_COMPRESSION
+
+ context.options |= options
+
+ if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
+ context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+
+ context.verify_mode = cert_reqs
+ if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
+ # We do our own verification, including fingerprints and alternative
+ # hostnames. So disable it here
+ context.check_hostname = False
+ return context
+
+
+def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
+ ca_certs=None, server_hostname=None,
+ ssl_version=None, ciphers=None, ssl_context=None,
+ ca_cert_dir=None):
+ """
+ All arguments except for server_hostname, ssl_context, and ca_cert_dir have
+ the same meaning as they do when using :func:`ssl.wrap_socket`.
+
+ :param server_hostname:
+ When SNI is supported, the expected hostname of the certificate
+ :param ssl_context:
+ A pre-made :class:`SSLContext` object. If none is provided, one will
+ be created using :func:`create_urllib3_context`.
+ :param ciphers:
+ A string of ciphers we wish the client to support. This is not
+ supported on Python 2.6 as the ssl module does not support it.
+ :param ca_cert_dir:
+ A directory containing CA certificates in multiple separate files, as
+ supported by OpenSSL's -CApath flag or the capath argument to
+ SSLContext.load_verify_locations().
+ """
+ context = ssl_context
+ if context is None:
+ context = create_urllib3_context(ssl_version, cert_reqs,
+ ciphers=ciphers)
+
+ if ca_certs or ca_cert_dir:
+ try:
+ context.load_verify_locations(ca_certs, ca_cert_dir)
+ except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
+ raise SSLError(e)
+ # Py33 raises FileNotFoundError which subclasses OSError
+ # These are not equivalent unless we check the errno attribute
+ except OSError as e: # Platform-specific: Python 3.3 and beyond
+ if e.errno == errno.ENOENT:
+ raise SSLError(e)
+ raise
+
+ if certfile:
+ context.load_cert_chain(certfile, keyfile)
+ if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
+ return context.wrap_socket(sock, server_hostname=server_hostname)
+
+ warnings.warn(
+ 'An HTTPS request has been made, but the SNI (Subject Name '
+ 'Indication) extension to TLS is not available on this platform. '
+ 'This may cause the server to present an incorrect TLS '
+ 'certificate, which can cause validation failures. For more '
+ 'information, see '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'
+ '#snimissingwarning.',
+ SNIMissingWarning
+ )
+ return context.wrap_socket(sock)
diff --git a/python/requests/requests/packages/urllib3/util/timeout.py b/python/requests/requests/packages/urllib3/util/timeout.py
new file mode 100644
index 000000000..ff62f4764
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/util/timeout.py
@@ -0,0 +1,242 @@
+from __future__ import absolute_import
+# The default socket timeout, used by httplib to indicate that no timeout was
+# specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT
+import time
+
+from ..exceptions import TimeoutStateError
+
+# A sentinel value to indicate that no timeout was specified by the user in
+# urllib3
+_Default = object()
+
+
+def current_time():
+ """
+ Retrieve the current time. This function is mocked out in unit testing.
+ """
+ return time.time()
+
+
+class Timeout(object):
+ """ Timeout configuration.
+
+ Timeouts can be defined as a default for a pool::
+
+ timeout = Timeout(connect=2.0, read=7.0)
+ http = PoolManager(timeout=timeout)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+
+ Timeouts can be disabled by setting all the parameters to ``None``::
+
+ no_timeout = Timeout(connect=None, read=None)
+ response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+
+
+ :param total:
+ This combines the connect and read timeouts into one; the read timeout
+ will be set to the time leftover from the connect attempt. In the
+ event that both a connect timeout and a total are specified, or a read
+ timeout and a total are specified, the shorter timeout will be applied.
+
+ Defaults to None.
+
+ :type total: integer, float, or None
+
+ :param connect:
+ The maximum amount of time to wait for a connection attempt to a server
+ to succeed. Omitting the parameter will default the connect timeout to
+ the system default, probably `the global default timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout for connection attempts.
+
+ :type connect: integer, float, or None
+
+ :param read:
+ The maximum amount of time to wait between consecutive
+ read operations for a response from the server. Omitting
+ the parameter will default the read timeout to the system
+ default, probably `the global default timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout.
+
+ :type read: integer, float, or None
+
+ .. note::
+
+ Many factors can affect the total amount of time for urllib3 to return
+ an HTTP response.
+
+ For example, Python's DNS resolver does not obey the timeout specified
+ on the socket. Other factors that can affect total request time include
+ high CPU load, high swap, the program running at a low priority level,
+ or other behaviors.
+
+ In addition, the read and total timeouts only measure the time between
+ read operations on the socket connecting the client and the server,
+ not the total amount of time for the request to return a complete
+ response. For most requests, the timeout is raised because the server
+ has not sent the first byte in the specified time. This is not always
+ the case; if a server streams one byte every fifteen seconds, a timeout
+ of 20 seconds will not trigger, even though the request will take
+ several minutes to complete.
+
+ If your goal is to cut off any request after a set amount of wall clock
+ time, consider having a second "watcher" thread to cut off a slow
+ request.
+ """
+
+ #: A sentinel object representing the default timeout value
+ DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
+
+ def __init__(self, total=None, connect=_Default, read=_Default):
+ self._connect = self._validate_timeout(connect, 'connect')
+ self._read = self._validate_timeout(read, 'read')
+ self.total = self._validate_timeout(total, 'total')
+ self._start_connect = None
+
+ def __str__(self):
+ return '%s(connect=%r, read=%r, total=%r)' % (
+ type(self).__name__, self._connect, self._read, self.total)
+
+ @classmethod
+ def _validate_timeout(cls, value, name):
+ """ Check that a timeout attribute is valid.
+
+ :param value: The timeout value to validate
+ :param name: The name of the timeout attribute to validate. This is
+ used to specify in error messages.
+ :return: The validated and casted version of the given value.
+ :raises ValueError: If the type is not an integer or a float, or if it
+ is a numeric value less than zero.
+ """
+ if value is _Default:
+ return cls.DEFAULT_TIMEOUT
+
+ if value is None or value is cls.DEFAULT_TIMEOUT:
+ return value
+
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ raise ValueError("Timeout value %s was %s, but it must be an "
+ "int or float." % (name, value))
+
+ try:
+ if value < 0:
+ raise ValueError("Attempted to set %s timeout to %s, but the "
+ "timeout cannot be set to a value less "
+ "than 0." % (name, value))
+ except TypeError: # Python 3
+ raise ValueError("Timeout value %s was %s, but it must be an "
+ "int or float." % (name, value))
+
+ return value
+
+ @classmethod
+ def from_float(cls, timeout):
+ """ Create a new Timeout from a legacy timeout value.
+
+ The timeout value used by httplib.py sets the same timeout on the
+ connect(), and recv() socket requests. This creates a :class:`Timeout`
+ object that sets the individual timeouts to the ``timeout`` value
+ passed to this function.
+
+ :param timeout: The legacy timeout value.
+ :type timeout: integer, float, sentinel default object, or None
+ :return: Timeout object
+ :rtype: :class:`Timeout`
+ """
+ return Timeout(read=timeout, connect=timeout)
+
+ def clone(self):
+ """ Create a copy of the timeout object
+
+ Timeout properties are stored per-pool but each request needs a fresh
+ Timeout object to ensure each one has its own start/stop configured.
+
+ :return: a copy of the timeout object
+ :rtype: :class:`Timeout`
+ """
+ # We can't use copy.deepcopy because that will also create a new object
+ # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+ # detect the user default.
+ return Timeout(connect=self._connect, read=self._read,
+ total=self.total)
+
+ def start_connect(self):
+ """ Start the timeout clock, used during a connect() attempt
+
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to start a timer that has been started already.
+ """
+ if self._start_connect is not None:
+ raise TimeoutStateError("Timeout timer has already been started.")
+ self._start_connect = current_time()
+ return self._start_connect
+
+ def get_connect_duration(self):
+ """ Gets the time elapsed since the call to :meth:`start_connect`.
+
+ :return: Elapsed time.
+ :rtype: float
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to get duration for a timer that hasn't been started.
+ """
+ if self._start_connect is None:
+ raise TimeoutStateError("Can't get connect duration for timer "
+ "that has not started.")
+ return current_time() - self._start_connect
+
+ @property
+ def connect_timeout(self):
+ """ Get the value to use when setting a connection timeout.
+
+ This will be a positive float or integer, the value None
+ (never timeout), or the default system timeout.
+
+ :return: Connect timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ """
+ if self.total is None:
+ return self._connect
+
+ if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+ return self.total
+
+ return min(self._connect, self.total)
+
+ @property
+ def read_timeout(self):
+ """ Get the value for the read timeout.
+
+ This assumes some time has elapsed in the connection timeout and
+ computes the read timeout appropriately.
+
+ If self.total is set, the read timeout is dependent on the amount of
+ time taken by the connect timeout. If the connection time has not been
+ established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+ raised.
+
+ :return: Value to use for the read timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
+ has not yet been called on this object.
+ """
+ if (self.total is not None and
+ self.total is not self.DEFAULT_TIMEOUT and
+ self._read is not None and
+ self._read is not self.DEFAULT_TIMEOUT):
+ # In case the connect timeout has not yet been established.
+ if self._start_connect is None:
+ return self._read
+ return max(0, min(self.total - self.get_connect_duration(),
+ self._read))
+ elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
+ return max(0, self.total - self.get_connect_duration())
+ else:
+ return self._read
diff --git a/python/requests/requests/packages/urllib3/util/url.py b/python/requests/requests/packages/urllib3/util/url.py
new file mode 100644
index 000000000..e996204a0
--- /dev/null
+++ b/python/requests/requests/packages/urllib3/util/url.py
@@ -0,0 +1,217 @@
+from __future__ import absolute_import
+from collections import namedtuple
+
+from ..exceptions import LocationParseError
+
+
+url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
+
+
+class Url(namedtuple('Url', url_attrs)):
+ """
+ Datastructure for representing an HTTP URL. Used as a return value for
+ :func:`parse_url`.
+ """
+ slots = ()
+
+ def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
+ query=None, fragment=None):
+ if path and not path.startswith('/'):
+ path = '/' + path
+ return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
+ query, fragment)
+
+ @property
+ def hostname(self):
+ """For backwards-compatibility with urlparse. We're nice like that."""
+ return self.host
+
+ @property
+ def request_uri(self):
+ """Absolute path including the query string."""
+ uri = self.path or '/'
+
+ if self.query is not None:
+ uri += '?' + self.query
+
+ return uri
+
+ @property
+ def netloc(self):
+ """Network location including host and port"""
+ if self.port:
+ return '%s:%d' % (self.host, self.port)
+ return self.host
+
+ @property
+ def url(self):
+ """
+ Convert self into a url
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port will have : removed).
+
+ Example: ::
+
+ >>> U = parse_url('http://google.com/mail/')
+ >>> U.url
+ 'http://google.com/mail/'
+ >>> Url('http', 'username:password', 'host.com', 80,
+ ... '/path', 'query', 'fragment').url
+ 'http://username:password@host.com:80/path?query#fragment'
+ """
+ scheme, auth, host, port, path, query, fragment = self
+ url = ''
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url += scheme + '://'
+ if auth is not None:
+ url += auth + '@'
+ if host is not None:
+ url += host
+ if port is not None:
+ url += ':' + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += '?' + query
+ if fragment is not None:
+ url += '#' + fragment
+
+ return url
+
+ def __str__(self):
+ return self.url
+
+
+def split_first(s, delims):
+ """
+ Given a string and an iterable of delimiters, split on the first found
+ delimiter. Return two split parts and the matched delimiter.
+
+ If not found, then the first part is the full input string.
+
+ Example::
+
+ >>> split_first('foo/bar?baz', '?/=')
+ ('foo', 'bar?baz', '/')
+ >>> split_first('foo/bar?baz', '123')
+ ('foo/bar?baz', '', None)
+
+ Scales linearly with number of delims. Not ideal for large number of delims.
+ """
+ min_idx = None
+ min_delim = None
+ for d in delims:
+ idx = s.find(d)
+ if idx < 0:
+ continue
+
+ if min_idx is None or idx < min_idx:
+ min_idx = idx
+ min_delim = d
+
+ if min_idx is None or min_idx < 0:
+ return s, '', None
+
+ return s[:min_idx], s[min_idx + 1:], min_delim
+
+
+def parse_url(url):
+ """
+ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+ performed to parse incomplete urls. Fields not provided will be None.
+
+ Partly backwards-compatible with :mod:`urlparse`.
+
+ Example::
+
+ >>> parse_url('http://google.com/mail/')
+ Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
+ >>> parse_url('google.com:80')
+ Url(scheme=None, host='google.com', port=80, path=None, ...)
+ >>> parse_url('/foo?bar')
+ Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
+ """
+
+ # While this code has overlap with stdlib's urlparse, it is much
+ # simplified for our needs and less annoying.
+ # Additionally, this implementations does silly things to be optimal
+ # on CPython.
+
+ if not url:
+ # Empty
+ return Url()
+
+ scheme = None
+ auth = None
+ host = None
+ port = None
+ path = None
+ fragment = None
+ query = None
+
+ # Scheme
+ if '://' in url:
+ scheme, url = url.split('://', 1)
+
+ # Find the earliest Authority Terminator
+ # (http://tools.ietf.org/html/rfc3986#section-3.2)
+ url, path_, delim = split_first(url, ['/', '?', '#'])
+
+ if delim:
+ # Reassemble the path
+ path = delim + path_
+
+ # Auth
+ if '@' in url:
+ # Last '@' denotes end of auth part
+ auth, url = url.rsplit('@', 1)
+
+ # IPv6
+ if url and url[0] == '[':
+ host, url = url.split(']', 1)
+ host += ']'
+
+ # Port
+ if ':' in url:
+ _host, port = url.split(':', 1)
+
+ if not host:
+ host = _host
+
+ if port:
+ # If given, ports must be integers.
+ if not port.isdigit():
+ raise LocationParseError(url)
+ port = int(port)
+ else:
+ # Blank ports are cool, too. (rfc3986#section-3.2.3)
+ port = None
+
+ elif not host and url:
+ host = url
+
+ if not path:
+ return Url(scheme, auth, host, port, path, query, fragment)
+
+ # Fragment
+ if '#' in path:
+ path, fragment = path.split('#', 1)
+
+ # Query
+ if '?' in path:
+ path, query = path.split('?', 1)
+
+ return Url(scheme, auth, host, port, path, query, fragment)
+
+
+def get_host(url):
+ """
+ Deprecated. Use :func:`.parse_url` instead.
+ """
+ p = parse_url(url)
+ return p.scheme or 'http', p.hostname, p.port
diff --git a/python/requests/requests/sessions.py b/python/requests/requests/sessions.py
new file mode 100644
index 000000000..9eaa36ae4
--- /dev/null
+++ b/python/requests/requests/sessions.py
@@ -0,0 +1,680 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.session
+~~~~~~~~~~~~~~~~
+
+This module provides a Session object to manage and persist settings across
+requests (cookies, auth, proxies).
+
+"""
+import os
+from collections import Mapping
+from datetime import datetime
+
+from .auth import _basic_auth_str
+from .compat import cookielib, OrderedDict, urljoin, urlparse
+from .cookies import (
+ cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
+from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
+from .hooks import default_hooks, dispatch_hook
+from .utils import to_key_val_list, default_headers, to_native_string
+from .exceptions import (
+ TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
+from .packages.urllib3._collections import RecentlyUsedContainer
+from .structures import CaseInsensitiveDict
+
+from .adapters import HTTPAdapter
+
+from .utils import (
+ requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
+ get_auth_from_url
+)
+
+from .status_codes import codes
+
+# formerly defined here, reexposed here for backward compatibility
+from .models import REDIRECT_STATI
+
+REDIRECT_CACHE_SIZE = 1000
+
+
+def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
+ """
+ Determines appropriate setting for a given request, taking into account the
+ explicit setting on that request, and the setting in the session. If a
+ setting is a dictionary, they will be merged together using `dict_class`
+ """
+
+ if session_setting is None:
+ return request_setting
+
+ if request_setting is None:
+ return session_setting
+
+ # Bypass if not a dictionary (e.g. verify)
+ if not (
+ isinstance(session_setting, Mapping) and
+ isinstance(request_setting, Mapping)
+ ):
+ return request_setting
+
+ merged_setting = dict_class(to_key_val_list(session_setting))
+ merged_setting.update(to_key_val_list(request_setting))
+
+ # Remove keys that are set to None. Extract keys first to avoid altering
+ # the dictionary during iteration.
+ none_keys = [k for (k, v) in merged_setting.items() if v is None]
+ for key in none_keys:
+ del merged_setting[key]
+
+ return merged_setting
+
+
+def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
+ """
+ Properly merges both requests and session hooks.
+
+ This is necessary because when request_hooks == {'response': []}, the
+ merge breaks Session hooks entirely.
+ """
+ if session_hooks is None or session_hooks.get('response') == []:
+ return request_hooks
+
+ if request_hooks is None or request_hooks.get('response') == []:
+ return session_hooks
+
+ return merge_setting(request_hooks, session_hooks, dict_class)
+
+
+class SessionRedirectMixin(object):
+ def resolve_redirects(self, resp, req, stream=False, timeout=None,
+ verify=True, cert=None, proxies=None, **adapter_kwargs):
+ """Receives a Response. Returns a generator of Responses."""
+
+ i = 0
+ hist = [] # keep track of history
+
+ while resp.is_redirect:
+ prepared_request = req.copy()
+
+ if i > 0:
+ # Update history and keep track of redirects.
+ hist.append(resp)
+ new_hist = list(hist)
+ resp.history = new_hist
+
+ try:
+ resp.content # Consume socket so it can be released
+ except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
+ resp.raw.read(decode_content=False)
+
+ if i >= self.max_redirects:
+ raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
+
+ # Release the connection back into the pool.
+ resp.close()
+
+ url = resp.headers['location']
+ method = req.method
+
+ # Handle redirection without scheme (see: RFC 1808 Section 4)
+ if url.startswith('//'):
+ parsed_rurl = urlparse(resp.url)
+ url = '%s:%s' % (parsed_rurl.scheme, url)
+
+ # The scheme should be lower case...
+ parsed = urlparse(url)
+ url = parsed.geturl()
+
+ # Facilitate relative 'location' headers, as allowed by RFC 7231.
+ # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
+ # Compliant with RFC3986, we percent encode the url.
+ if not parsed.netloc:
+ url = urljoin(resp.url, requote_uri(url))
+ else:
+ url = requote_uri(url)
+
+ prepared_request.url = to_native_string(url)
+ # Cache the url, unless it redirects to itself.
+ if resp.is_permanent_redirect and req.url != prepared_request.url:
+ self.redirect_cache[req.url] = prepared_request.url
+
+ # http://tools.ietf.org/html/rfc7231#section-6.4.4
+ if (resp.status_code == codes.see_other and
+ method != 'HEAD'):
+ method = 'GET'
+
+ # Do what the browsers do, despite standards...
+ # First, turn 302s into GETs.
+ if resp.status_code == codes.found and method != 'HEAD':
+ method = 'GET'
+
+ # Second, if a POST is responded to with a 301, turn it into a GET.
+ # This bizarre behaviour is explained in Issue 1704.
+ if resp.status_code == codes.moved and method == 'POST':
+ method = 'GET'
+
+ prepared_request.method = method
+
+ # https://github.com/kennethreitz/requests/issues/1084
+ if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
+ if 'Content-Length' in prepared_request.headers:
+ del prepared_request.headers['Content-Length']
+
+ prepared_request.body = None
+
+ headers = prepared_request.headers
+ try:
+ del headers['Cookie']
+ except KeyError:
+ pass
+
+ # Extract any cookies sent on the response to the cookiejar
+ # in the new request. Because we've mutated our copied prepared
+ # request, use the old one that we haven't yet touched.
+ extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
+ prepared_request._cookies.update(self.cookies)
+ prepared_request.prepare_cookies(prepared_request._cookies)
+
+ # Rebuild auth and proxy information.
+ proxies = self.rebuild_proxies(prepared_request, proxies)
+ self.rebuild_auth(prepared_request, resp)
+
+ # Override the original request.
+ req = prepared_request
+
+ resp = self.send(
+ req,
+ stream=stream,
+ timeout=timeout,
+ verify=verify,
+ cert=cert,
+ proxies=proxies,
+ allow_redirects=False,
+ **adapter_kwargs
+ )
+
+ extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
+
+ i += 1
+ yield resp
+
+ def rebuild_auth(self, prepared_request, response):
+ """
+ When being redirected we may want to strip authentication from the
+ request to avoid leaking credentials. This method intelligently removes
+ and reapplies authentication where possible to avoid credential loss.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+
+ if 'Authorization' in headers:
+ # If we get redirected to a new host, we should strip out any
+ # authentication headers.
+ original_parsed = urlparse(response.request.url)
+ redirect_parsed = urlparse(url)
+
+ if (original_parsed.hostname != redirect_parsed.hostname):
+ del headers['Authorization']
+
+ # .netrc might have more auth for us on our new host.
+ new_auth = get_netrc_auth(url) if self.trust_env else None
+ if new_auth is not None:
+ prepared_request.prepare_auth(new_auth)
+
+ return
+
+ def rebuild_proxies(self, prepared_request, proxies):
+ """
+ This method re-evaluates the proxy configuration by considering the
+ environment variables. If we are redirected to a URL covered by
+ NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
+ proxy keys for this URL (in case they were stripped by a previous
+ redirect).
+
+ This method also replaces the Proxy-Authorization header where
+ necessary.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+ scheme = urlparse(url).scheme
+ new_proxies = proxies.copy() if proxies is not None else {}
+
+ if self.trust_env and not should_bypass_proxies(url):
+ environ_proxies = get_environ_proxies(url)
+
+ proxy = environ_proxies.get(scheme)
+
+ if proxy:
+ new_proxies.setdefault(scheme, environ_proxies[scheme])
+
+ if 'Proxy-Authorization' in headers:
+ del headers['Proxy-Authorization']
+
+ try:
+ username, password = get_auth_from_url(new_proxies[scheme])
+ except KeyError:
+ username, password = None, None
+
+ if username and password:
+ headers['Proxy-Authorization'] = _basic_auth_str(username, password)
+
+ return new_proxies
+
+
+class Session(SessionRedirectMixin):
+ """A Requests session.
+
+ Provides cookie persistence, connection-pooling, and configuration.
+
+ Basic Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> s.get('http://httpbin.org/get')
+ <Response [200]>
+
+ Or as a context manager::
+
+ >>> with requests.Session() as s:
+ >>> s.get('http://httpbin.org/get')
+ <Response [200]>
+ """
+
+ __attrs__ = [
+ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
+ 'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
+ 'max_redirects',
+ ]
+
+ def __init__(self):
+
+ #: A case-insensitive dictionary of headers to be sent on each
+ #: :class:`Request <Request>` sent from this
+ #: :class:`Session <Session>`.
+ self.headers = default_headers()
+
+ #: Default Authentication tuple or object to attach to
+ #: :class:`Request <Request>`.
+ self.auth = None
+
+ #: Dictionary mapping protocol or protocol and host to the URL of the proxy
+ #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
+ #: be used on each :class:`Request <Request>`.
+ self.proxies = {}
+
+ #: Event-handling hooks.
+ self.hooks = default_hooks()
+
+ #: Dictionary of querystring data to attach to each
+ #: :class:`Request <Request>`. The dictionary values may be lists for
+ #: representing multivalued query parameters.
+ self.params = {}
+
+ #: Stream response content default.
+ self.stream = False
+
+ #: SSL Verification default.
+ self.verify = True
+
+ #: SSL certificate default.
+ self.cert = None
+
+ #: Maximum number of redirects allowed. If the request exceeds this
+ #: limit, a :class:`TooManyRedirects` exception is raised.
+ self.max_redirects = DEFAULT_REDIRECT_LIMIT
+
+ #: Trust environment settings for proxy configuration, default
+ #: authentication and similar.
+ self.trust_env = True
+
+ #: A CookieJar containing all currently outstanding cookies set on this
+ #: session. By default it is a
+ #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
+ #: may be any other ``cookielib.CookieJar`` compatible object.
+ self.cookies = cookiejar_from_dict({})
+
+ # Default connection adapters.
+ self.adapters = OrderedDict()
+ self.mount('https://', HTTPAdapter())
+ self.mount('http://', HTTPAdapter())
+
+ # Only store 1000 redirects to prevent using infinite memory
+ self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def prepare_request(self, request):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for
+ transmission and returns it. The :class:`PreparedRequest` has settings
+ merged from the :class:`Request <Request>` instance and those of the
+ :class:`Session`.
+
+ :param request: :class:`Request` instance to prepare with this
+ session's settings.
+ """
+ cookies = request.cookies or {}
+
+ # Bootstrap CookieJar.
+ if not isinstance(cookies, cookielib.CookieJar):
+ cookies = cookiejar_from_dict(cookies)
+
+ # Merge with session cookies
+ merged_cookies = merge_cookies(
+ merge_cookies(RequestsCookieJar(), self.cookies), cookies)
+
+
+ # Set environment's basic authentication if not explicitly set.
+ auth = request.auth
+ if self.trust_env and not auth and not self.auth:
+ auth = get_netrc_auth(request.url)
+
+ p = PreparedRequest()
+ p.prepare(
+ method=request.method.upper(),
+ url=request.url,
+ files=request.files,
+ data=request.data,
+ json=request.json,
+ headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
+ params=merge_setting(request.params, self.params),
+ auth=merge_setting(auth, self.auth),
+ cookies=merged_cookies,
+ hooks=merge_hooks(request.hooks, self.hooks),
+ )
+ return p
+
+ def request(self, method, url,
+ params=None,
+ data=None,
+ headers=None,
+ cookies=None,
+ files=None,
+ auth=None,
+ timeout=None,
+ allow_redirects=True,
+ proxies=None,
+ hooks=None,
+ stream=None,
+ verify=None,
+ cert=None,
+ json=None):
+ """Constructs a :class:`Request <Request>`, prepares it and sends it.
+ Returns :class:`Response <Response>` object.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query
+ string for the :class:`Request`.
+ :param data: (optional) Dictionary, bytes, or file-like object to send
+ in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the
+ :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the
+ :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the
+ :class:`Request`.
+ :param files: (optional) Dictionary of ``'filename': file-like-objects``
+ for multipart encoding upload.
+ :param auth: (optional) Auth tuple or callable to enable
+ Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Set to True by default.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol or protocol and
+ hostname to the URL of the proxy.
+ :param stream: (optional) whether to immediately download the response
+ content. Defaults to ``False``.
+ :param verify: (optional) whether the SSL cert will be verified.
+ A CA_BUNDLE path can also be provided. Defaults to ``True``.
+ :param cert: (optional) if String, path to ssl client cert file (.pem).
+ If Tuple, ('cert', 'key') pair.
+ """
+ # Create the Request.
+ req = Request(
+ method = method.upper(),
+ url = url,
+ headers = headers,
+ files = files,
+ data = data or {},
+ json = json,
+ params = params or {},
+ auth = auth,
+ cookies = cookies,
+ hooks = hooks,
+ )
+ prep = self.prepare_request(req)
+
+ proxies = proxies or {}
+
+ settings = self.merge_environment_settings(
+ prep.url, proxies, stream, verify, cert
+ )
+
+ # Send the request.
+ send_kwargs = {
+ 'timeout': timeout,
+ 'allow_redirects': allow_redirects,
+ }
+ send_kwargs.update(settings)
+ resp = self.send(prep, **send_kwargs)
+
+ return resp
+
+ def get(self, url, **kwargs):
+ """Sends a GET request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('GET', url, **kwargs)
+
+ def options(self, url, **kwargs):
+ """Sends a OPTIONS request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('OPTIONS', url, **kwargs)
+
+ def head(self, url, **kwargs):
+ """Sends a HEAD request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ kwargs.setdefault('allow_redirects', False)
+ return self.request('HEAD', url, **kwargs)
+
+ def post(self, url, data=None, json=None, **kwargs):
+ """Sends a POST request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('POST', url, data=data, json=json, **kwargs)
+
+ def put(self, url, data=None, **kwargs):
+ """Sends a PUT request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('PUT', url, data=data, **kwargs)
+
+ def patch(self, url, data=None, **kwargs):
+ """Sends a PATCH request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('PATCH', url, data=data, **kwargs)
+
+ def delete(self, url, **kwargs):
+ """Sends a DELETE request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('DELETE', url, **kwargs)
+
+ def send(self, request, **kwargs):
+ """Send a given PreparedRequest."""
+ # Set defaults that the hooks can utilize to ensure they always have
+ # the correct parameters to reproduce the previous request.
+ kwargs.setdefault('stream', self.stream)
+ kwargs.setdefault('verify', self.verify)
+ kwargs.setdefault('cert', self.cert)
+ kwargs.setdefault('proxies', self.proxies)
+
+ # It's possible that users might accidentally send a Request object.
+ # Guard against that specific failure case.
+ if not isinstance(request, PreparedRequest):
+ raise ValueError('You can only send PreparedRequests.')
+
+ checked_urls = set()
+ while request.url in self.redirect_cache:
+ checked_urls.add(request.url)
+ new_url = self.redirect_cache.get(request.url)
+ if new_url in checked_urls:
+ break
+ request.url = new_url
+
+ # Set up variables needed for resolve_redirects and dispatching of hooks
+ allow_redirects = kwargs.pop('allow_redirects', True)
+ stream = kwargs.get('stream')
+ hooks = request.hooks
+
+ # Get the appropriate adapter to use
+ adapter = self.get_adapter(url=request.url)
+
+ # Start time (approximately) of the request
+ start = datetime.utcnow()
+
+ # Send the request
+ r = adapter.send(request, **kwargs)
+
+ # Total elapsed time of the request (approximately)
+ r.elapsed = datetime.utcnow() - start
+
+ # Response manipulation hooks
+ r = dispatch_hook('response', hooks, r, **kwargs)
+
+ # Persist cookies
+ if r.history:
+
+ # If the hooks create history then we want those cookies too
+ for resp in r.history:
+ extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
+
+ extract_cookies_to_jar(self.cookies, request, r.raw)
+
+ # Redirect resolving generator.
+ gen = self.resolve_redirects(r, request, **kwargs)
+
+ # Resolve redirects if allowed.
+ history = [resp for resp in gen] if allow_redirects else []
+
+ # Shuffle things around if there's history.
+ if history:
+ # Insert the first (original) request at the start
+ history.insert(0, r)
+ # Get the last request made
+ r = history.pop()
+ r.history = history
+
+ if not stream:
+ r.content
+
+ return r
+
+ def merge_environment_settings(self, url, proxies, stream, verify, cert):
+ """Check the environment and merge it with some settings."""
+ # Gather clues from the surrounding environment.
+ if self.trust_env:
+ # Set environment's proxies.
+ env_proxies = get_environ_proxies(url) or {}
+ for (k, v) in env_proxies.items():
+ proxies.setdefault(k, v)
+
+ # Look for requests environment configuration and be compatible
+ # with cURL.
+ if verify is True or verify is None:
+ verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
+ os.environ.get('CURL_CA_BUNDLE'))
+
+ # Merge all the kwargs.
+ proxies = merge_setting(proxies, self.proxies)
+ stream = merge_setting(stream, self.stream)
+ verify = merge_setting(verify, self.verify)
+ cert = merge_setting(cert, self.cert)
+
+ return {'verify': verify, 'proxies': proxies, 'stream': stream,
+ 'cert': cert}
+
+ def get_adapter(self, url):
+ """Returns the appropriate connection adapter for the given URL."""
+ for (prefix, adapter) in self.adapters.items():
+
+ if url.lower().startswith(prefix):
+ return adapter
+
+ # Nothing matches :-/
+ raise InvalidSchema("No connection adapters were found for '%s'" % url)
+
+ def close(self):
+ """Closes all adapters and as such the session"""
+ for v in self.adapters.values():
+ v.close()
+
+ def mount(self, prefix, adapter):
+ """Registers a connection adapter to a prefix.
+
+ Adapters are sorted in descending order by key length."""
+
+ self.adapters[prefix] = adapter
+ keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
+
+ for key in keys_to_move:
+ self.adapters[key] = self.adapters.pop(key)
+
+ def __getstate__(self):
+ state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
+ state['redirect_cache'] = dict(self.redirect_cache)
+ return state
+
+ def __setstate__(self, state):
+ redirect_cache = state.pop('redirect_cache', {})
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
+ for redirect, to in redirect_cache.items():
+ self.redirect_cache[redirect] = to
+
+
+def session():
+ """Returns a :class:`Session` for context-management."""
+
+ return Session()
diff --git a/python/requests/requests/status_codes.py b/python/requests/requests/status_codes.py
new file mode 100644
index 000000000..a852574a4
--- /dev/null
+++ b/python/requests/requests/status_codes.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+from .structures import LookupDict
+
+_codes = {
+
+ # Informational.
+ 100: ('continue',),
+ 101: ('switching_protocols',),
+ 102: ('processing',),
+ 103: ('checkpoint',),
+ 122: ('uri_too_long', 'request_uri_too_long'),
+ 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
+ 201: ('created',),
+ 202: ('accepted',),
+ 203: ('non_authoritative_info', 'non_authoritative_information'),
+ 204: ('no_content',),
+ 205: ('reset_content', 'reset'),
+ 206: ('partial_content', 'partial'),
+ 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
+ 208: ('already_reported',),
+ 226: ('im_used',),
+
+ # Redirection.
+ 300: ('multiple_choices',),
+ 301: ('moved_permanently', 'moved', '\\o-'),
+ 302: ('found',),
+ 303: ('see_other', 'other'),
+ 304: ('not_modified',),
+ 305: ('use_proxy',),
+ 306: ('switch_proxy',),
+ 307: ('temporary_redirect', 'temporary_moved', 'temporary'),
+ 308: ('permanent_redirect',
+ 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
+
+ # Client Error.
+ 400: ('bad_request', 'bad'),
+ 401: ('unauthorized',),
+ 402: ('payment_required', 'payment'),
+ 403: ('forbidden',),
+ 404: ('not_found', '-o-'),
+ 405: ('method_not_allowed', 'not_allowed'),
+ 406: ('not_acceptable',),
+ 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
+ 408: ('request_timeout', 'timeout'),
+ 409: ('conflict',),
+ 410: ('gone',),
+ 411: ('length_required',),
+ 412: ('precondition_failed', 'precondition'),
+ 413: ('request_entity_too_large',),
+ 414: ('request_uri_too_large',),
+ 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
+ 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
+ 417: ('expectation_failed',),
+ 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
+ 422: ('unprocessable_entity', 'unprocessable'),
+ 423: ('locked',),
+ 424: ('failed_dependency', 'dependency'),
+ 425: ('unordered_collection', 'unordered'),
+ 426: ('upgrade_required', 'upgrade'),
+ 428: ('precondition_required', 'precondition'),
+ 429: ('too_many_requests', 'too_many'),
+ 431: ('header_fields_too_large', 'fields_too_large'),
+ 444: ('no_response', 'none'),
+ 449: ('retry_with', 'retry'),
+ 450: ('blocked_by_windows_parental_controls', 'parental_controls'),
+ 451: ('unavailable_for_legal_reasons', 'legal_reasons'),
+ 499: ('client_closed_request',),
+
+ # Server Error.
+ 500: ('internal_server_error', 'server_error', '/o\\', '✗'),
+ 501: ('not_implemented',),
+ 502: ('bad_gateway',),
+ 503: ('service_unavailable', 'unavailable'),
+ 504: ('gateway_timeout',),
+ 505: ('http_version_not_supported', 'http_version'),
+ 506: ('variant_also_negotiates',),
+ 507: ('insufficient_storage',),
+ 509: ('bandwidth_limit_exceeded', 'bandwidth'),
+ 510: ('not_extended',),
+ 511: ('network_authentication_required', 'network_auth', 'network_authentication'),
+}
+
+codes = LookupDict(name='status_codes')
+
+for code, titles in _codes.items():
+ for title in titles:
+ setattr(codes, title, code)
+ if not title.startswith('\\'):
+ setattr(codes, title.upper(), code)
diff --git a/python/requests/requests/structures.py b/python/requests/requests/structures.py
new file mode 100644
index 000000000..3e5f2faa2
--- /dev/null
+++ b/python/requests/requests/structures.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.structures
+~~~~~~~~~~~~~~~~~~~
+
+Data structures that power Requests.
+
+"""
+
+import collections
+
+
+class CaseInsensitiveDict(collections.MutableMapping):
+ """
+ A case-insensitive ``dict``-like object.
+
+ Implements all methods and operations of
+ ``collections.MutableMapping`` as well as dict's ``copy``. Also
+ provides ``lower_items``.
+
+ All keys are expected to be strings. The structure remembers the
+ case of the last key to be set, and ``iter(instance)``,
+ ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
+ will contain case-sensitive keys. However, querying and contains
+ testing is case insensitive::
+
+ cid = CaseInsensitiveDict()
+ cid['Accept'] = 'application/json'
+ cid['aCCEPT'] == 'application/json' # True
+ list(cid) == ['Accept'] # True
+
+ For example, ``headers['content-encoding']`` will return the
+ value of a ``'Content-Encoding'`` response header, regardless
+ of how the header name was originally stored.
+
+ If the constructor, ``.update``, or equality comparison
+ operations are given keys that have equal ``.lower()``s, the
+ behavior is undefined.
+
+ """
+ def __init__(self, data=None, **kwargs):
+ self._store = dict()
+ if data is None:
+ data = {}
+ self.update(data, **kwargs)
+
+ def __setitem__(self, key, value):
+ # Use the lowercased key for lookups, but store the actual
+ # key alongside the value.
+ self._store[key.lower()] = (key, value)
+
+ def __getitem__(self, key):
+ return self._store[key.lower()][1]
+
+ def __delitem__(self, key):
+ del self._store[key.lower()]
+
+ def __iter__(self):
+ return (casedkey for casedkey, mappedvalue in self._store.values())
+
+ def __len__(self):
+ return len(self._store)
+
+ def lower_items(self):
+ """Like iteritems(), but with all lowercase keys."""
+ return (
+ (lowerkey, keyval[1])
+ for (lowerkey, keyval)
+ in self._store.items()
+ )
+
+ def __eq__(self, other):
+ if isinstance(other, collections.Mapping):
+ other = CaseInsensitiveDict(other)
+ else:
+ return NotImplemented
+ # Compare insensitively
+ return dict(self.lower_items()) == dict(other.lower_items())
+
+ # Copy is required
+ def copy(self):
+ return CaseInsensitiveDict(self._store.values())
+
+ def __repr__(self):
+ return str(dict(self.items()))
+
+class LookupDict(dict):
+ """Dictionary lookup object."""
+
+ def __init__(self, name=None):
+ self.name = name
+ super(LookupDict, self).__init__()
+
+ def __repr__(self):
+ return '<lookup \'%s\'>' % (self.name)
+
+ def __getitem__(self, key):
+ # We allow fall-through here, so values default to None
+
+ return self.__dict__.get(key, None)
+
+ def get(self, key, default=None):
+ return self.__dict__.get(key, default)
diff --git a/python/requests/requests/utils.py b/python/requests/requests/utils.py
new file mode 100644
index 000000000..c5c3fd01d
--- /dev/null
+++ b/python/requests/requests/utils.py
@@ -0,0 +1,721 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.utils
+~~~~~~~~~~~~~~
+
+This module provides utility functions that are used within Requests
+that are also useful for external consumption.
+
+"""
+
+import cgi
+import codecs
+import collections
+import io
+import os
+import platform
+import re
+import sys
+import socket
+import struct
+import warnings
+
+from . import __version__
+from . import certs
+from .compat import parse_http_list as _parse_list_header
+from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
+ builtin_str, getproxies, proxy_bypass, urlunparse,
+ basestring)
+from .cookies import RequestsCookieJar, cookiejar_from_dict
+from .structures import CaseInsensitiveDict
+from .exceptions import InvalidURL, FileModeWarning
+
+_hush_pyflakes = (RequestsCookieJar,)
+
+NETRC_FILES = ('.netrc', '_netrc')
+
+DEFAULT_CA_BUNDLE_PATH = certs.where()
+
+
+def dict_to_sequence(d):
+ """Returns an internal sequence dictionary update."""
+
+ if hasattr(d, 'items'):
+ d = d.items()
+
+ return d
+
+
+def super_len(o):
+ total_length = 0
+ current_position = 0
+
+ if hasattr(o, '__len__'):
+ total_length = len(o)
+
+ elif hasattr(o, 'len'):
+ total_length = o.len
+
+ elif hasattr(o, 'getvalue'):
+ # e.g. BytesIO, cStringIO.StringIO
+ total_length = len(o.getvalue())
+
+ elif hasattr(o, 'fileno'):
+ try:
+ fileno = o.fileno()
+ except io.UnsupportedOperation:
+ pass
+ else:
+ total_length = os.fstat(fileno).st_size
+
+ # Having used fstat to determine the file length, we need to
+ # confirm that this file was opened up in binary mode.
+ if 'b' not in o.mode:
+ warnings.warn((
+ "Requests has determined the content-length for this "
+ "request using the binary size of the file: however, the "
+ "file has been opened in text mode (i.e. without the 'b' "
+ "flag in the mode). This may lead to an incorrect "
+ "content-length. In Requests 3.0, support will be removed "
+ "for files in text mode."),
+ FileModeWarning
+ )
+
+ if hasattr(o, 'tell'):
+ current_position = o.tell()
+
+ return max(0, total_length - current_position)
+
+
+def get_netrc_auth(url, raise_errors=False):
+ """Returns the Requests tuple auth for a given url from netrc."""
+
+ try:
+ from netrc import netrc, NetrcParseError
+
+ netrc_path = None
+
+ for f in NETRC_FILES:
+ try:
+ loc = os.path.expanduser('~/{0}'.format(f))
+ except KeyError:
+ # os.path.expanduser can fail when $HOME is undefined and
+ # getpwuid fails. See http://bugs.python.org/issue20164 &
+ # https://github.com/kennethreitz/requests/issues/1846
+ return
+
+ if os.path.exists(loc):
+ netrc_path = loc
+ break
+
+ # Abort early if there isn't one.
+ if netrc_path is None:
+ return
+
+ ri = urlparse(url)
+
+ # Strip port numbers from netloc. This weird `if...encode`` dance is
+ # used for Python 3.2, which doesn't support unicode literals.
+ splitstr = b':'
+ if isinstance(url, str):
+ splitstr = splitstr.decode('ascii')
+ host = ri.netloc.split(splitstr)[0]
+
+ try:
+ _netrc = netrc(netrc_path).authenticators(host)
+ if _netrc:
+ # Return with login / password
+ login_i = (0 if _netrc[0] else 1)
+ return (_netrc[login_i], _netrc[2])
+ except (NetrcParseError, IOError):
+ # If there was a parsing error or a permissions issue reading the file,
+ # we'll just skip netrc auth unless explicitly asked to raise errors.
+ if raise_errors:
+ raise
+
+ # AppEngine hackiness.
+ except (ImportError, AttributeError):
+ pass
+
+
+def guess_filename(obj):
+ """Tries to guess the filename of the given object."""
+ name = getattr(obj, 'name', None)
+ if (name and isinstance(name, basestring) and name[0] != '<' and
+ name[-1] != '>'):
+ return os.path.basename(name)
+
+
+def from_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. Unless it can not be represented as such, return an
+ OrderedDict, e.g.,
+
+ ::
+
+ >>> from_key_val_list([('key', 'val')])
+ OrderedDict([('key', 'val')])
+ >>> from_key_val_list('string')
+ ValueError: need more than 1 value to unpack
+ >>> from_key_val_list({'key': 'val'})
+ OrderedDict([('key', 'val')])
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError('cannot encode objects that are not 2-tuples')
+
+ return OrderedDict(value)
+
+
+def to_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. If it can be, return a list of tuples, e.g.,
+
+ ::
+
+ >>> to_key_val_list([('key', 'val')])
+ [('key', 'val')]
+ >>> to_key_val_list({'key': 'val'})
+ [('key', 'val')]
+ >>> to_key_val_list('string')
+ ValueError: cannot encode objects that are not 2-tuples.
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError('cannot encode objects that are not 2-tuples')
+
+ if isinstance(value, collections.Mapping):
+ value = value.items()
+
+ return list(value)
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_list_header(value):
+ """Parse lists as described by RFC 2068 Section 2.
+
+ In particular, parse comma-separated lists where the elements of
+ the list may include quoted-strings. A quoted-string could
+ contain a comma. A non-quoted string could have quotes in the
+ middle. Quotes are removed automatically after parsing.
+
+ It basically works like :func:`parse_set_header` just that items
+ may appear multiple times and case sensitivity is preserved.
+
+ The return value is a standard :class:`list`:
+
+ >>> parse_list_header('token, "quoted value"')
+ ['token', 'quoted value']
+
+ To create a header from the :class:`list` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a list header.
+ :return: :class:`list`
+ """
+ result = []
+ for item in _parse_list_header(value):
+ if item[:1] == item[-1:] == '"':
+ item = unquote_header_value(item[1:-1])
+ result.append(item)
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_dict_header(value):
+ """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
+ convert them into a python dict:
+
+ >>> d = parse_dict_header('foo="is a fish", bar="as well"')
+ >>> type(d) is dict
+ True
+ >>> sorted(d.items())
+ [('bar', 'as well'), ('foo', 'is a fish')]
+
+ If there is no value for a key it will be `None`:
+
+ >>> parse_dict_header('key_without_value')
+ {'key_without_value': None}
+
+ To create a header from the :class:`dict` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a dict header.
+ :return: :class:`dict`
+ """
+ result = {}
+ for item in _parse_list_header(value):
+ if '=' not in item:
+ result[item] = None
+ continue
+ name, value = item.split('=', 1)
+ if value[:1] == value[-1:] == '"':
+ value = unquote_header_value(value[1:-1])
+ result[name] = value
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def unquote_header_value(value, is_filename=False):
+ r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
+ This does not use the real unquoting but what browsers are actually
+ using for quoting.
+
+ :param value: the header value to unquote.
+ """
+ if value and value[0] == value[-1] == '"':
+ # this is not the real unquoting, but fixing this so that the
+ # RFC is met will result in bugs with internet explorer and
+ # probably some other browsers as well. IE for example is
+ # uploading files with "C:\foo\bar.txt" as filename
+ value = value[1:-1]
+
+ # if this is a filename and the starting characters look like
+ # a UNC path, then just return the value without quotes. Using the
+ # replace sequence below on a UNC path has the effect of turning
+ # the leading double slash into a single slash and then
+ # _fix_ie_filename() doesn't work correctly. See #458.
+ if not is_filename or value[:2] != '\\\\':
+ return value.replace('\\\\', '\\').replace('\\"', '"')
+ return value
+
+
+def dict_from_cookiejar(cj):
+ """Returns a key/value dictionary from a CookieJar.
+
+ :param cj: CookieJar object to extract cookies from.
+ """
+
+ cookie_dict = {}
+
+ for cookie in cj:
+ cookie_dict[cookie.name] = cookie.value
+
+ return cookie_dict
+
+
+def add_dict_to_cookiejar(cj, cookie_dict):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cj: CookieJar to insert cookies into.
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ """
+
+ cj2 = cookiejar_from_dict(cookie_dict)
+ cj.update(cj2)
+ return cj
+
+
+def get_encodings_from_content(content):
+ """Returns encodings from given content string.
+
+ :param content: bytestring to extract encodings from.
+ """
+ warnings.warn((
+ 'In requests 3.0, get_encodings_from_content will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
+
+ charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
+ pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
+ xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
+
+ return (charset_re.findall(content) +
+ pragma_re.findall(content) +
+ xml_re.findall(content))
+
+
+def get_encoding_from_headers(headers):
+ """Returns encodings from given HTTP Header Dict.
+
+ :param headers: dictionary to extract encoding from.
+ """
+
+ content_type = headers.get('content-type')
+
+ if not content_type:
+ return None
+
+ content_type, params = cgi.parse_header(content_type)
+
+ if 'charset' in params:
+ return params['charset'].strip("'\"")
+
+ if 'text' in content_type:
+ return 'ISO-8859-1'
+
+
+def stream_decode_response_unicode(iterator, r):
+ """Stream decodes a iterator."""
+
+ if r.encoding is None:
+ for item in iterator:
+ yield item
+ return
+
+ decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
+ for chunk in iterator:
+ rv = decoder.decode(chunk)
+ if rv:
+ yield rv
+ rv = decoder.decode(b'', final=True)
+ if rv:
+ yield rv
+
+
+def iter_slices(string, slice_length):
+ """Iterate over slices of a string."""
+ pos = 0
+ while pos < len(string):
+ yield string[pos:pos + slice_length]
+ pos += slice_length
+
+
+def get_unicode_from_response(r):
+ """Returns the requested content back in unicode.
+
+ :param r: Response object to get unicode content from.
+
+ Tried:
+
+ 1. charset from content-type
+ 2. fall back and replace all unicode characters
+
+ """
+ warnings.warn((
+ 'In requests 3.0, get_unicode_from_response will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
+
+ tried_encodings = []
+
+ # Try charset from content-type
+ encoding = get_encoding_from_headers(r.headers)
+
+ if encoding:
+ try:
+ return str(r.content, encoding)
+ except UnicodeError:
+ tried_encodings.append(encoding)
+
+ # Fall back:
+ try:
+ return str(r.content, encoding, errors='replace')
+ except TypeError:
+ return r.content
+
+
+# The unreserved URI characters (RFC 3986)
+UNRESERVED_SET = frozenset(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+ + "0123456789-._~")
+
+
+def unquote_unreserved(uri):
+ """Un-escape any percent-escape sequences in a URI that are unreserved
+ characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
+ """
+ parts = uri.split('%')
+ for i in range(1, len(parts)):
+ h = parts[i][0:2]
+ if len(h) == 2 and h.isalnum():
+ try:
+ c = chr(int(h, 16))
+ except ValueError:
+ raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
+
+ if c in UNRESERVED_SET:
+ parts[i] = c + parts[i][2:]
+ else:
+ parts[i] = '%' + parts[i]
+ else:
+ parts[i] = '%' + parts[i]
+ return ''.join(parts)
+
+
+def requote_uri(uri):
+ """Re-quote the given URI.
+
+ This function passes the given URI through an unquote/quote cycle to
+ ensure that it is fully and consistently quoted.
+ """
+ safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
+ safe_without_percent = "!#$&'()*+,/:;=?@[]~"
+ try:
+ # Unquote only the unreserved characters
+ # Then quote only illegal characters (do not quote reserved,
+ # unreserved, or '%')
+ return quote(unquote_unreserved(uri), safe=safe_with_percent)
+ except InvalidURL:
+ # We couldn't unquote the given URI, so let's try quoting it, but
+ # there may be unquoted '%'s in the URI. We need to make sure they're
+ # properly quoted so they do not cause issues elsewhere.
+ return quote(uri, safe=safe_without_percent)
+
+
+def address_in_network(ip, net):
+ """
+ This function allows you to check if on IP belongs to a network subnet
+ Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
+ returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
+ """
+ ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
+ netaddr, bits = net.split('/')
+ netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
+ network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
+ return (ipaddr & netmask) == (network & netmask)
+
+
+def dotted_netmask(mask):
+ """
+ Converts mask from /xx format to xxx.xxx.xxx.xxx
+ Example: if mask is 24 function returns 255.255.255.0
+ """
+ bits = 0xffffffff ^ (1 << 32 - mask) - 1
+ return socket.inet_ntoa(struct.pack('>I', bits))
+
+
+def is_ipv4_address(string_ip):
+ try:
+ socket.inet_aton(string_ip)
+ except socket.error:
+ return False
+ return True
+
+
+def is_valid_cidr(string_network):
+ """Very simple check of the cidr format in no_proxy variable"""
+ if string_network.count('/') == 1:
+ try:
+ mask = int(string_network.split('/')[1])
+ except ValueError:
+ return False
+
+ if mask < 1 or mask > 32:
+ return False
+
+ try:
+ socket.inet_aton(string_network.split('/')[0])
+ except socket.error:
+ return False
+ else:
+ return False
+ return True
+
+
+def should_bypass_proxies(url):
+ """
+ Returns whether we should bypass proxies or not.
+ """
+ get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
+
+ # First check whether no_proxy is defined. If it is, check that the URL
+ # we're getting isn't in the no_proxy list.
+ no_proxy = get_proxy('no_proxy')
+ netloc = urlparse(url).netloc
+
+ if no_proxy:
+ # We need to check whether we match here. We need to see if we match
+ # the end of the netloc, both with and without the port.
+ no_proxy = (
+ host for host in no_proxy.replace(' ', '').split(',') if host
+ )
+
+ ip = netloc.split(':')[0]
+ if is_ipv4_address(ip):
+ for proxy_ip in no_proxy:
+ if is_valid_cidr(proxy_ip):
+ if address_in_network(ip, proxy_ip):
+ return True
+ else:
+ for host in no_proxy:
+ if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
+ # The URL does match something in no_proxy, so we don't want
+ # to apply the proxies on this URL.
+ return True
+
+ # If the system proxy settings indicate that this URL should be bypassed,
+ # don't proxy.
+ # The proxy_bypass function is incredibly buggy on OS X in early versions
+ # of Python 2.6, so allow this call to fail. Only catch the specific
+ # exceptions we've seen, though: this call failing in other ways can reveal
+ # legitimate problems.
+ try:
+ bypass = proxy_bypass(netloc)
+ except (TypeError, socket.gaierror):
+ bypass = False
+
+ if bypass:
+ return True
+
+ return False
+
+def get_environ_proxies(url):
+ """Return a dict of environment proxies."""
+ if should_bypass_proxies(url):
+ return {}
+ else:
+ return getproxies()
+
+def select_proxy(url, proxies):
+ """Select a proxy for the url, if applicable.
+
+ :param url: The url being for the request
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
+ """
+ proxies = proxies or {}
+ urlparts = urlparse(url)
+ proxy = proxies.get(urlparts.scheme+'://'+urlparts.hostname)
+ if proxy is None:
+ proxy = proxies.get(urlparts.scheme)
+ return proxy
+
+def default_user_agent(name="python-requests"):
+ """Return a string representing the default user agent."""
+ return '%s/%s' % (name, __version__)
+
+
+def default_headers():
+ return CaseInsensitiveDict({
+ 'User-Agent': default_user_agent(),
+ 'Accept-Encoding': ', '.join(('gzip', 'deflate')),
+ 'Accept': '*/*',
+ 'Connection': 'keep-alive',
+ })
+
+
+def parse_header_links(value):
+ """Return a dict of parsed link headers proxies.
+
+ i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
+
+ """
+
+ links = []
+
+ replace_chars = " '\""
+
+ for val in re.split(", *<", value):
+ try:
+ url, params = val.split(";", 1)
+ except ValueError:
+ url, params = val, ''
+
+ link = {}
+
+ link["url"] = url.strip("<> '\"")
+
+ for param in params.split(";"):
+ try:
+ key, value = param.split("=")
+ except ValueError:
+ break
+
+ link[key.strip(replace_chars)] = value.strip(replace_chars)
+
+ links.append(link)
+
+ return links
+
+
+# Null bytes; no need to recreate these on each call to guess_json_utf
+_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
+_null2 = _null * 2
+_null3 = _null * 3
+
+
+def guess_json_utf(data):
+ # JSON always starts with two ASCII characters, so detection is as
+ # easy as counting the nulls and from their location and count
+ # determine the encoding. Also detect a BOM, if present.
+ sample = data[:4]
+ if sample in (codecs.BOM_UTF32_LE, codecs.BOM32_BE):
+ return 'utf-32' # BOM included
+ if sample[:3] == codecs.BOM_UTF8:
+ return 'utf-8-sig' # BOM included, MS style (discouraged)
+ if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
+ return 'utf-16' # BOM included
+ nullcount = sample.count(_null)
+ if nullcount == 0:
+ return 'utf-8'
+ if nullcount == 2:
+ if sample[::2] == _null2: # 1st and 3rd are null
+ return 'utf-16-be'
+ if sample[1::2] == _null2: # 2nd and 4th are null
+ return 'utf-16-le'
+ # Did not detect 2 valid UTF-16 ascii-range characters
+ if nullcount == 3:
+ if sample[:3] == _null3:
+ return 'utf-32-be'
+ if sample[1:] == _null3:
+ return 'utf-32-le'
+ # Did not detect a valid UTF-32 ascii-range character
+ return None
+
+
+def prepend_scheme_if_needed(url, new_scheme):
+ '''Given a URL that may or may not have a scheme, prepend the given scheme.
+ Does not replace a present scheme with the one provided as an argument.'''
+ scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
+
+ # urlparse is a finicky beast, and sometimes decides that there isn't a
+ # netloc present. Assume that it's being over-cautious, and switch netloc
+ # and path if urlparse decided there was no netloc.
+ if not netloc:
+ netloc, path = path, netloc
+
+ return urlunparse((scheme, netloc, path, params, query, fragment))
+
+
+def get_auth_from_url(url):
+ """Given a url with authentication components, extract them into a tuple of
+ username,password."""
+ parsed = urlparse(url)
+
+ try:
+ auth = (unquote(parsed.username), unquote(parsed.password))
+ except (AttributeError, TypeError):
+ auth = ('', '')
+
+ return auth
+
+
+def to_native_string(string, encoding='ascii'):
+ """
+ Given a string object, regardless of type, returns a representation of that
+ string in the native string type, encoding and decoding where necessary.
+ This assumes ASCII unless told otherwise.
+ """
+ out = None
+
+ if isinstance(string, builtin_str):
+ out = string
+ else:
+ if is_py2:
+ out = string.encode(encoding)
+ else:
+ out = string.decode(encoding)
+
+ return out
+
+
+def urldefragauth(url):
+ """
+ Given a url remove the fragment and the authentication part
+ """
+ scheme, netloc, path, params, query, fragment = urlparse(url)
+
+ # see func:`prepend_scheme_if_needed`
+ if not netloc:
+ netloc, path = path, netloc
+
+ netloc = netloc.rsplit('@', 1)[-1]
+
+ return urlunparse((scheme, netloc, path, params, query, ''))
diff --git a/python/requests/requirements.txt b/python/requests/requirements.txt
new file mode 100644
index 000000000..ad5da7615
--- /dev/null
+++ b/python/requests/requirements.txt
@@ -0,0 +1,6 @@
+py==1.4.30
+pytest==2.8.1
+pytest-cov==2.1.0
+pytest-httpbin==0.0.7
+httpbin==0.4.0
+wheel
diff --git a/python/requests/setup.cfg b/python/requests/setup.cfg
new file mode 100644
index 000000000..6c71b612d
--- /dev/null
+++ b/python/requests/setup.cfg
@@ -0,0 +1,8 @@
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/requests/setup.py b/python/requests/setup.py
new file mode 100755
index 000000000..b7ed12ba2
--- /dev/null
+++ b/python/requests/setup.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+
+import os
+import re
+import sys
+
+from codecs import open
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+if sys.argv[-1] == 'publish':
+ os.system('python setup.py sdist upload')
+ sys.exit()
+
+packages = [
+ 'requests',
+ 'requests.packages',
+ 'requests.packages.chardet',
+ 'requests.packages.urllib3',
+ 'requests.packages.urllib3.packages',
+ 'requests.packages.urllib3.contrib',
+ 'requests.packages.urllib3.util',
+ 'requests.packages.urllib3.packages.ssl_match_hostname',
+]
+
+requires = []
+
+version = ''
+with open('requests/__init__.py', 'r') as fd:
+ version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
+ fd.read(), re.MULTILINE).group(1)
+
+if not version:
+ raise RuntimeError('Cannot find version information')
+
+with open('README.rst', 'r', 'utf-8') as f:
+ readme = f.read()
+with open('HISTORY.rst', 'r', 'utf-8') as f:
+ history = f.read()
+
+setup(
+ name='requests',
+ version=version,
+ description='Python HTTP for Humans.',
+ long_description=readme + '\n\n' + history,
+ author='Kenneth Reitz',
+ author_email='me@kennethreitz.com',
+ url='http://python-requests.org',
+ packages=packages,
+ package_data={'': ['LICENSE', 'NOTICE'], 'requests': ['*.pem']},
+ package_dir={'requests': 'requests'},
+ include_package_data=True,
+ install_requires=requires,
+ license='Apache 2.0',
+ zip_safe=False,
+ classifiers=(
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ ),
+ extras_require={
+ 'security': ['pyOpenSSL>=0.13', 'ndg-httpsclient', 'pyasn1'],
+ },
+)
diff --git a/python/requests/test_requests.py b/python/requests/test_requests.py
new file mode 100755
index 000000000..079524186
--- /dev/null
+++ b/python/requests/test_requests.py
@@ -0,0 +1,1746 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""Tests for Requests."""
+
+from __future__ import division
+import json
+import os
+import pickle
+import unittest
+import collections
+import contextlib
+
+import io
+import requests
+import pytest
+from requests.adapters import HTTPAdapter
+from requests.auth import HTTPDigestAuth, _basic_auth_str
+from requests.compat import (
+ Morsel, cookielib, getproxies, str, urljoin, urlparse, is_py3,
+ builtin_str, OrderedDict
+ )
+from requests.cookies import cookiejar_from_dict, morsel_to_cookie
+from requests.exceptions import (ConnectionError, ConnectTimeout,
+ InvalidSchema, InvalidURL, MissingSchema,
+ ReadTimeout, Timeout, RetryError)
+from requests.models import PreparedRequest
+from requests.structures import CaseInsensitiveDict
+from requests.sessions import SessionRedirectMixin
+from requests.models import urlencode
+from requests.hooks import default_hooks
+
+try:
+ import StringIO
+except ImportError:
+ import io as StringIO
+
+try:
+ from multiprocessing.pool import ThreadPool
+except ImportError:
+ ThreadPool = None
+
+if is_py3:
+ def u(s):
+ return s
+else:
+ def u(s):
+ return s.decode('unicode-escape')
+
+
+@pytest.fixture
+def httpbin(httpbin):
+ # Issue #1483: Make sure the URL always has a trailing slash
+ httpbin_url = httpbin.url.rstrip('/') + '/'
+
+ def inner(*suffix):
+ return urljoin(httpbin_url, '/'.join(suffix))
+
+ return inner
+
+
+@pytest.fixture
+def httpsbin_url(httpbin_secure):
+ # Issue #1483: Make sure the URL always has a trailing slash
+ httpbin_url = httpbin_secure.url.rstrip('/') + '/'
+
+ def inner(*suffix):
+ return urljoin(httpbin_url, '/'.join(suffix))
+
+ return inner
+
+
+# Requests to this URL should always fail with a connection timeout (nothing
+# listening on that port)
+TARPIT = "http://10.255.255.1"
+
+class TestRequests(object):
+
+ _multiprocess_can_split_ = True
+
+ def setUp(self):
+ """Create simple data set with headers."""
+ pass
+
+ def tearDown(self):
+ """Teardown."""
+ pass
+
+ def test_entry_points(self):
+
+ requests.session
+ requests.session().get
+ requests.session().head
+ requests.get
+ requests.head
+ requests.put
+ requests.patch
+ requests.post
+
+ def test_invalid_url(self):
+ with pytest.raises(MissingSchema):
+ requests.get('hiwpefhipowhefopw')
+ with pytest.raises(InvalidSchema):
+ requests.get('localhost:3128')
+ with pytest.raises(InvalidSchema):
+ requests.get('localhost.localdomain:3128/')
+ with pytest.raises(InvalidSchema):
+ requests.get('10.122.1.1:3128/')
+ with pytest.raises(InvalidURL):
+ requests.get('http://')
+
+ def test_basic_building(self):
+ req = requests.Request()
+ req.url = 'http://kennethreitz.org/'
+ req.data = {'life': '42'}
+
+ pr = req.prepare()
+ assert pr.url == req.url
+ assert pr.body == 'life=42'
+
+ def test_no_content_length(self, httpbin):
+ get_req = requests.Request('GET', httpbin('get')).prepare()
+ assert 'Content-Length' not in get_req.headers
+ head_req = requests.Request('HEAD', httpbin('head')).prepare()
+ assert 'Content-Length' not in head_req.headers
+
+ def test_override_content_length(self, httpbin):
+ headers = {
+ 'Content-Length': 'not zero'
+ }
+ r = requests.Request('POST', httpbin('post'), headers=headers).prepare()
+ assert 'Content-Length' in r.headers
+ assert r.headers['Content-Length'] == 'not zero'
+
+ def test_path_is_not_double_encoded(self):
+ request = requests.Request('GET', "http://0.0.0.0/get/test case").prepare()
+
+ assert request.path_url == '/get/test%20case'
+
+ def test_params_are_added_before_fragment(self):
+ request = requests.Request('GET',
+ "http://example.com/path#fragment", params={"a": "b"}).prepare()
+ assert request.url == "http://example.com/path?a=b#fragment"
+ request = requests.Request('GET',
+ "http://example.com/path?key=value#fragment", params={"a": "b"}).prepare()
+ assert request.url == "http://example.com/path?key=value&a=b#fragment"
+
+ def test_params_original_order_is_preserved_by_default(self):
+ param_ordered_dict = OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))
+ session = requests.Session()
+ request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)
+ prep = session.prepare_request(request)
+ assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'
+
+ def test_params_bytes_are_encoded(self):
+ request = requests.Request('GET', 'http://example.com',
+ params=b'test=foo').prepare()
+ assert request.url == 'http://example.com/?test=foo'
+
+ def test_binary_put(self):
+ request = requests.Request('PUT', 'http://example.com',
+ data=u"ööö".encode("utf-8")).prepare()
+ assert isinstance(request.body, bytes)
+
+ def test_mixed_case_scheme_acceptable(self, httpbin):
+ s = requests.Session()
+ s.proxies = getproxies()
+ parts = urlparse(httpbin('get'))
+ schemes = ['http://', 'HTTP://', 'hTTp://', 'HttP://']
+ for scheme in schemes:
+ url = scheme + parts.netloc + parts.path
+ r = requests.Request('GET', url)
+ r = s.send(r.prepare())
+ assert r.status_code == 200, 'failed for scheme {0}'.format(scheme)
+
+ def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):
+ r = requests.Request('GET', httpbin('get'))
+ s = requests.Session()
+ s.proxies = getproxies()
+
+ r = s.send(r.prepare())
+
+ assert r.status_code == 200
+
+ def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):
+ r = requests.get(httpbin('redirect', '1'))
+ assert r.status_code == 200
+ assert r.history[0].status_code == 302
+ assert r.history[0].is_redirect
+
+ # def test_HTTP_302_ALLOW_REDIRECT_POST(self):
+ # r = requests.post(httpbin('status', '302'), data={'some': 'data'})
+ # self.assertEqual(r.status_code, 200)
+
+ def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):
+ heads = {'User-agent': 'Mozilla/5.0'}
+
+ r = requests.get(httpbin('user-agent'), headers=heads)
+
+ assert heads['User-agent'] in r.text
+ assert r.status_code == 200
+
+ def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):
+ heads = {'User-agent': 'Mozilla/5.0'}
+
+ r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)
+ assert r.status_code == 200
+
+ def test_set_cookie_on_301(self, httpbin):
+ s = requests.session()
+ url = httpbin('cookies/set?foo=bar')
+ s.get(url)
+ assert s.cookies['foo'] == 'bar'
+
+ def test_cookie_sent_on_redirect(self, httpbin):
+ s = requests.session()
+ s.get(httpbin('cookies/set?foo=bar'))
+ r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')
+ assert 'Cookie' in r.json()['headers']
+
+ def test_cookie_removed_on_expire(self, httpbin):
+ s = requests.session()
+ s.get(httpbin('cookies/set?foo=bar'))
+ assert s.cookies['foo'] == 'bar'
+ s.get(
+ httpbin('response-headers'),
+ params={
+ 'Set-Cookie':
+ 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'
+ }
+ )
+ assert 'foo' not in s.cookies
+
+ def test_cookie_quote_wrapped(self, httpbin):
+ s = requests.session()
+ s.get(httpbin('cookies/set?foo="bar:baz"'))
+ assert s.cookies['foo'] == '"bar:baz"'
+
+ def test_cookie_persists_via_api(self, httpbin):
+ s = requests.session()
+ r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})
+ assert 'foo' in r.request.headers['Cookie']
+ assert 'foo' in r.history[0].request.headers['Cookie']
+
+ def test_request_cookie_overrides_session_cookie(self, httpbin):
+ s = requests.session()
+ s.cookies['foo'] = 'bar'
+ r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})
+ assert r.json()['cookies']['foo'] == 'baz'
+ # Session cookie should not be modified
+ assert s.cookies['foo'] == 'bar'
+
+ def test_request_cookies_not_persisted(self, httpbin):
+ s = requests.session()
+ s.get(httpbin('cookies'), cookies={'foo': 'baz'})
+ # Sending a request with cookies should not add cookies to the session
+ assert not s.cookies
+
+ def test_generic_cookiejar_works(self, httpbin):
+ cj = cookielib.CookieJar()
+ cookiejar_from_dict({'foo': 'bar'}, cj)
+ s = requests.session()
+ s.cookies = cj
+ r = s.get(httpbin('cookies'))
+ # Make sure the cookie was sent
+ assert r.json()['cookies']['foo'] == 'bar'
+ # Make sure the session cj is still the custom one
+ assert s.cookies is cj
+
+ def test_param_cookiejar_works(self, httpbin):
+ cj = cookielib.CookieJar()
+ cookiejar_from_dict({'foo': 'bar'}, cj)
+ s = requests.session()
+ r = s.get(httpbin('cookies'), cookies=cj)
+ # Make sure the cookie was sent
+ assert r.json()['cookies']['foo'] == 'bar'
+
+ def test_requests_in_history_are_not_overridden(self, httpbin):
+ resp = requests.get(httpbin('redirect/3'))
+ urls = [r.url for r in resp.history]
+ req_urls = [r.request.url for r in resp.history]
+ assert urls == req_urls
+
+ def test_history_is_always_a_list(self, httpbin):
+ """
+ Show that even with redirects, Response.history is always a list.
+ """
+ resp = requests.get(httpbin('get'))
+ assert isinstance(resp.history, list)
+ resp = requests.get(httpbin('redirect/1'))
+ assert isinstance(resp.history, list)
+ assert not isinstance(resp.history, tuple)
+
+ def test_headers_on_session_with_None_are_not_sent(self, httpbin):
+ """Do not send headers in Session.headers with None values."""
+ ses = requests.Session()
+ ses.headers['Accept-Encoding'] = None
+ req = requests.Request('GET', httpbin('get'))
+ prep = ses.prepare_request(req)
+ assert 'Accept-Encoding' not in prep.headers
+
+ def test_user_agent_transfers(self, httpbin):
+
+ heads = {
+ 'User-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
+ }
+
+ r = requests.get(httpbin('user-agent'), headers=heads)
+ assert heads['User-agent'] in r.text
+
+ heads = {
+ 'user-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
+ }
+
+ r = requests.get(httpbin('user-agent'), headers=heads)
+ assert heads['user-agent'] in r.text
+
+ def test_HTTP_200_OK_HEAD(self, httpbin):
+ r = requests.head(httpbin('get'))
+ assert r.status_code == 200
+
+ def test_HTTP_200_OK_PUT(self, httpbin):
+ r = requests.put(httpbin('put'))
+ assert r.status_code == 200
+
+ def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):
+ auth = ('user', 'pass')
+ url = httpbin('basic-auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ assert r.status_code == 200
+
+ r = requests.get(url)
+ assert r.status_code == 401
+
+ s = requests.session()
+ s.auth = auth
+ r = s.get(url)
+ assert r.status_code == 200
+
+ def test_connection_error_invalid_domain(self):
+ """Connecting to an unknown domain should raise a ConnectionError"""
+ with pytest.raises(ConnectionError):
+ requests.get("http://doesnotexist.google.com")
+
+ def test_connection_error_invalid_port(self):
+ """Connecting to an invalid port should raise a ConnectionError"""
+ with pytest.raises(ConnectionError):
+ requests.get("http://localhost:1", timeout=1)
+
+ def test_LocationParseError(self):
+ """Inputing a URL that cannot be parsed should raise an InvalidURL error"""
+ with pytest.raises(InvalidURL):
+ requests.get("http://fe80::5054:ff:fe5a:fc0")
+
+ def test_basicauth_with_netrc(self, httpbin):
+ auth = ('user', 'pass')
+ wrong_auth = ('wronguser', 'wrongpass')
+ url = httpbin('basic-auth', 'user', 'pass')
+
+ old_auth = requests.sessions.get_netrc_auth
+
+ try:
+ def get_netrc_auth_mock(url):
+ return auth
+ requests.sessions.get_netrc_auth = get_netrc_auth_mock
+
+ # Should use netrc and work.
+ r = requests.get(url)
+ assert r.status_code == 200
+
+ # Given auth should override and fail.
+ r = requests.get(url, auth=wrong_auth)
+ assert r.status_code == 401
+
+ s = requests.session()
+
+ # Should use netrc and work.
+ r = s.get(url)
+ assert r.status_code == 200
+
+ # Given auth should override and fail.
+ s.auth = wrong_auth
+ r = s.get(url)
+ assert r.status_code == 401
+ finally:
+ requests.sessions.get_netrc_auth = old_auth
+
+ def test_DIGEST_HTTP_200_OK_GET(self, httpbin):
+
+ auth = HTTPDigestAuth('user', 'pass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ assert r.status_code == 200
+
+ r = requests.get(url)
+ assert r.status_code == 401
+
+ s = requests.session()
+ s.auth = HTTPDigestAuth('user', 'pass')
+ r = s.get(url)
+ assert r.status_code == 200
+
+ def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+ auth = HTTPDigestAuth('user', 'pass')
+ r = requests.get(url)
+ assert r.cookies['fake'] == 'fake_value'
+
+ r = requests.get(url, auth=auth)
+ assert r.status_code == 200
+
+ def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+ auth = HTTPDigestAuth('user', 'pass')
+ s = requests.Session()
+ s.get(url, auth=auth)
+ assert s.cookies['fake'] == 'fake_value'
+
+ def test_DIGEST_STREAM(self, httpbin):
+
+ auth = HTTPDigestAuth('user', 'pass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth, stream=True)
+ assert r.raw.read() != b''
+
+ r = requests.get(url, auth=auth, stream=False)
+ assert r.raw.read() == b''
+
+ def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):
+
+ auth = HTTPDigestAuth('user', 'wrongpass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ assert r.status_code == 401
+
+ r = requests.get(url)
+ assert r.status_code == 401
+
+ s = requests.session()
+ s.auth = auth
+ r = s.get(url)
+ assert r.status_code == 401
+
+ def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):
+
+ auth = HTTPDigestAuth('user', 'pass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ assert '"auth"' in r.request.headers['Authorization']
+
+ def test_POSTBIN_GET_POST_FILES(self, httpbin):
+
+ url = httpbin('post')
+ post1 = requests.post(url).raise_for_status()
+
+ post1 = requests.post(url, data={'some': 'data'})
+ assert post1.status_code == 200
+
+ with open('requirements.txt') as f:
+ post2 = requests.post(url, files={'some': f})
+ assert post2.status_code == 200
+
+ post4 = requests.post(url, data='[{"some": "json"}]')
+ assert post4.status_code == 200
+
+ with pytest.raises(ValueError):
+ requests.post(url, files=['bad file data'])
+
+ def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):
+
+ url = httpbin('post')
+ post1 = requests.post(url).raise_for_status()
+
+ post1 = requests.post(url, data={'some': 'data'})
+ assert post1.status_code == 200
+
+ with open('requirements.txt') as f:
+ post2 = requests.post(url,
+ data={'some': 'data'}, files={'some': f})
+ assert post2.status_code == 200
+
+ post4 = requests.post(url, data='[{"some": "json"}]')
+ assert post4.status_code == 200
+
+ with pytest.raises(ValueError):
+ requests.post(url, files=['bad file data'])
+
+ def test_conflicting_post_params(self, httpbin):
+ url = httpbin('post')
+ with open('requirements.txt') as f:
+ pytest.raises(ValueError, "requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})")
+ pytest.raises(ValueError, "requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})")
+
+ def test_request_ok_set(self, httpbin):
+ r = requests.get(httpbin('status', '404'))
+ assert not r.ok
+
+ def test_status_raising(self, httpbin):
+ r = requests.get(httpbin('status', '404'))
+ with pytest.raises(requests.exceptions.HTTPError):
+ r.raise_for_status()
+
+ r = requests.get(httpbin('status', '500'))
+ assert not r.ok
+
+ def test_decompress_gzip(self, httpbin):
+ r = requests.get(httpbin('gzip'))
+ r.content.decode('ascii')
+
+ def test_unicode_get(self, httpbin):
+ url = httpbin('/get')
+ requests.get(url, params={'foo': 'føø'})
+ requests.get(url, params={'føø': 'føø'})
+ requests.get(url, params={'føø': 'føø'})
+ requests.get(url, params={'foo': 'foo'})
+ requests.get(httpbin('ø'), params={'foo': 'foo'})
+
+ def test_unicode_header_name(self, httpbin):
+ requests.put(
+ httpbin('put'),
+ headers={str('Content-Type'): 'application/octet-stream'},
+ data='\xff') # compat.str is unicode.
+
+ def test_pyopenssl_redirect(self, httpsbin_url, httpbin_ca_bundle):
+ requests.get(httpsbin_url('status', '301'), verify=httpbin_ca_bundle)
+
+ def test_urlencoded_get_query_multivalued_param(self, httpbin):
+
+ r = requests.get(httpbin('get'), params=dict(test=['foo', 'baz']))
+ assert r.status_code == 200
+ assert r.url == httpbin('get?test=foo&test=baz')
+
+ def test_different_encodings_dont_break_post(self, httpbin):
+ r = requests.post(httpbin('post'),
+ data={'stuff': json.dumps({'a': 123})},
+ params={'blah': 'asdf1234'},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ def test_unicode_multipart_post(self, httpbin):
+ r = requests.post(httpbin('post'),
+ data={'stuff': u('ëlïxr')},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ r = requests.post(httpbin('post'),
+ data={'stuff': u('ëlïxr').encode('utf-8')},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ r = requests.post(httpbin('post'),
+ data={'stuff': 'elixr'},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ r = requests.post(httpbin('post'),
+ data={'stuff': 'elixr'.encode('utf-8')},
+ files={'file': ('test_requests.py', open(__file__, 'rb'))})
+ assert r.status_code == 200
+
+ def test_unicode_multipart_post_fieldnames(self, httpbin):
+ filename = os.path.splitext(__file__)[0] + '.py'
+ r = requests.Request(method='POST',
+ url=httpbin('post'),
+ data={'stuff'.encode('utf-8'): 'elixr'},
+ files={'file': ('test_requests.py',
+ open(filename, 'rb'))})
+ prep = r.prepare()
+ assert b'name="stuff"' in prep.body
+ assert b'name="b\'stuff\'"' not in prep.body
+
+ def test_unicode_method_name(self, httpbin):
+ files = {'file': open('test_requests.py', 'rb')}
+ r = requests.request(
+ method=u('POST'), url=httpbin('post'), files=files)
+ assert r.status_code == 200
+
+ def test_unicode_method_name_with_request_object(self, httpbin):
+ files = {'file': open('test_requests.py', 'rb')}
+ s = requests.Session()
+ req = requests.Request(u("POST"), httpbin('post'), files=files)
+ prep = s.prepare_request(req)
+ assert isinstance(prep.method, builtin_str)
+ assert prep.method == "POST"
+
+ resp = s.send(prep)
+ assert resp.status_code == 200
+
+ def test_custom_content_type(self, httpbin):
+ r = requests.post(
+ httpbin('post'),
+ data={'stuff': json.dumps({'a': 123})},
+ files={'file1': ('test_requests.py', open(__file__, 'rb')),
+ 'file2': ('test_requests', open(__file__, 'rb'),
+ 'text/py-content-type')})
+ assert r.status_code == 200
+ assert b"text/py-content-type" in r.request.body
+
+ def test_hook_receives_request_arguments(self, httpbin):
+ def hook(resp, **kwargs):
+ assert resp is not None
+ assert kwargs != {}
+
+ requests.Request('GET', httpbin(), hooks={'response': hook})
+
+ def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):
+ hook = lambda x, *args, **kwargs: x
+ s = requests.Session()
+ s.hooks['response'].append(hook)
+ r = requests.Request('GET', httpbin())
+ prep = s.prepare_request(r)
+ assert prep.hooks['response'] != []
+ assert prep.hooks['response'] == [hook]
+
+ def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):
+ hook1 = lambda x, *args, **kwargs: x
+ hook2 = lambda x, *args, **kwargs: x
+ assert hook1 is not hook2
+ s = requests.Session()
+ s.hooks['response'].append(hook2)
+ r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})
+ prep = s.prepare_request(r)
+ assert prep.hooks['response'] == [hook1]
+
+ def test_prepared_request_hook(self, httpbin):
+ def hook(resp, **kwargs):
+ resp.hook_working = True
+ return resp
+
+ req = requests.Request('GET', httpbin(), hooks={'response': hook})
+ prep = req.prepare()
+
+ s = requests.Session()
+ s.proxies = getproxies()
+ resp = s.send(prep)
+
+ assert hasattr(resp, 'hook_working')
+
+ def test_prepared_from_session(self, httpbin):
+ class DummyAuth(requests.auth.AuthBase):
+ def __call__(self, r):
+ r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'
+ return r
+
+ req = requests.Request('GET', httpbin('headers'))
+ assert not req.auth
+
+ s = requests.Session()
+ s.auth = DummyAuth()
+
+ prep = s.prepare_request(req)
+ resp = s.send(prep)
+
+ assert resp.json()['headers'][
+ 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'
+
+ def test_prepare_request_with_bytestring_url(self):
+ req = requests.Request('GET', b'https://httpbin.org/')
+ s = requests.Session()
+ prep = s.prepare_request(req)
+ assert prep.url == "https://httpbin.org/"
+
+ def test_links(self):
+ r = requests.Response()
+ r.headers = {
+ 'cache-control': 'public, max-age=60, s-maxage=60',
+ 'connection': 'keep-alive',
+ 'content-encoding': 'gzip',
+ 'content-type': 'application/json; charset=utf-8',
+ 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',
+ 'etag': '"6ff6a73c0e446c1f61614769e3ceb778"',
+ 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',
+ 'link': ('<https://api.github.com/users/kennethreitz/repos?'
+ 'page=2&per_page=10>; rel="next", <https://api.github.'
+ 'com/users/kennethreitz/repos?page=7&per_page=10>; '
+ ' rel="last"'),
+ 'server': 'GitHub.com',
+ 'status': '200 OK',
+ 'vary': 'Accept',
+ 'x-content-type-options': 'nosniff',
+ 'x-github-media-type': 'github.beta',
+ 'x-ratelimit-limit': '60',
+ 'x-ratelimit-remaining': '57'
+ }
+ assert r.links['next']['rel'] == 'next'
+
+ def test_cookie_parameters(self):
+ key = 'some_cookie'
+ value = 'some_value'
+ secure = True
+ domain = 'test.com'
+ rest = {'HttpOnly': True}
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value, secure=secure, domain=domain, rest=rest)
+
+ assert len(jar) == 1
+ assert 'some_cookie' in jar
+
+ cookie = list(jar)[0]
+ assert cookie.secure == secure
+ assert cookie.domain == domain
+ assert cookie._rest['HttpOnly'] == rest['HttpOnly']
+
+ def test_cookie_as_dict_keeps_len(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ d1 = dict(jar)
+ d2 = dict(jar.iteritems())
+ d3 = dict(jar.items())
+
+ assert len(jar) == 2
+ assert len(d1) == 2
+ assert len(d2) == 2
+ assert len(d3) == 2
+
+ def test_cookie_as_dict_keeps_items(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ d1 = dict(jar)
+ d2 = dict(jar.iteritems())
+ d3 = dict(jar.items())
+
+ assert d1['some_cookie'] == 'some_value'
+ assert d2['some_cookie'] == 'some_value'
+ assert d3['some_cookie1'] == 'some_value1'
+
+ def test_cookie_as_dict_keys(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ keys = jar.keys()
+ assert keys == list(keys)
+ # make sure one can use keys multiple times
+ assert list(keys) == list(keys)
+
+ def test_cookie_as_dict_values(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ values = jar.values()
+ assert values == list(values)
+ # make sure one can use values multiple times
+ assert list(values) == list(values)
+
+ def test_cookie_as_dict_items(self):
+ key = 'some_cookie'
+ value = 'some_value'
+
+ key1 = 'some_cookie1'
+ value1 = 'some_value1'
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value)
+ jar.set(key1, value1)
+
+ items = jar.items()
+ assert items == list(items)
+ # make sure one can use items multiple times
+ assert list(items) == list(items)
+
+ def test_time_elapsed_blank(self, httpbin):
+ r = requests.get(httpbin('get'))
+ td = r.elapsed
+ total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600)
+ * 10**6) / 10**6)
+ assert total_seconds > 0.0
+
+ def test_response_is_iterable(self):
+ r = requests.Response()
+ io = StringIO.StringIO('abc')
+ read_ = io.read
+
+ def read_mock(amt, decode_content=None):
+ return read_(amt)
+ setattr(io, 'read', read_mock)
+ r.raw = io
+ assert next(iter(r))
+ io.close()
+
+ def test_response_decode_unicode(self):
+ """
+ When called with decode_unicode, Response.iter_content should always
+ return unicode.
+ """
+ r = requests.Response()
+ r._content_consumed = True
+ r._content = b'the content'
+ r.encoding = 'ascii'
+
+ chunks = r.iter_content(decode_unicode=True)
+ assert all(isinstance(chunk, str) for chunk in chunks)
+
+ # also for streaming
+ r = requests.Response()
+ r.raw = io.BytesIO(b'the content')
+ r.encoding = 'ascii'
+ chunks = r.iter_content(decode_unicode=True)
+ assert all(isinstance(chunk, str) for chunk in chunks)
+
+ def test_request_and_response_are_pickleable(self, httpbin):
+ r = requests.get(httpbin('get'))
+
+ # verify we can pickle the original request
+ assert pickle.loads(pickle.dumps(r.request))
+
+ # verify we can pickle the response and that we have access to
+ # the original request.
+ pr = pickle.loads(pickle.dumps(r))
+ assert r.request.url == pr.request.url
+ assert r.request.headers == pr.request.headers
+
+ def test_get_auth_from_url(self):
+ url = 'http://user:pass@complex.url.com/path?query=yes'
+ assert ('user', 'pass') == requests.utils.get_auth_from_url(url)
+
+ def test_get_auth_from_url_encoded_spaces(self):
+ url = 'http://user:pass%20pass@complex.url.com/path?query=yes'
+ assert ('user', 'pass pass') == requests.utils.get_auth_from_url(url)
+
+ def test_get_auth_from_url_not_encoded_spaces(self):
+ url = 'http://user:pass pass@complex.url.com/path?query=yes'
+ assert ('user', 'pass pass') == requests.utils.get_auth_from_url(url)
+
+ def test_get_auth_from_url_percent_chars(self):
+ url = 'http://user%25user:pass@complex.url.com/path?query=yes'
+ assert ('user%user', 'pass') == requests.utils.get_auth_from_url(url)
+
+ def test_get_auth_from_url_encoded_hashes(self):
+ url = 'http://user:pass%23pass@complex.url.com/path?query=yes'
+ assert ('user', 'pass#pass') == requests.utils.get_auth_from_url(url)
+
+ def test_cannot_send_unprepared_requests(self, httpbin):
+ r = requests.Request(url=httpbin())
+ with pytest.raises(ValueError):
+ requests.Session().send(r)
+
+ def test_http_error(self):
+ error = requests.exceptions.HTTPError()
+ assert not error.response
+ response = requests.Response()
+ error = requests.exceptions.HTTPError(response=response)
+ assert error.response == response
+ error = requests.exceptions.HTTPError('message', response=response)
+ assert str(error) == 'message'
+ assert error.response == response
+
+ def test_session_pickling(self, httpbin):
+ r = requests.Request('GET', httpbin('get'))
+ s = requests.Session()
+
+ s = pickle.loads(pickle.dumps(s))
+ s.proxies = getproxies()
+
+ r = s.send(r.prepare())
+ assert r.status_code == 200
+
+ def test_fixes_1329(self, httpbin):
+ """
+ Ensure that header updates are done case-insensitively.
+ """
+ s = requests.Session()
+ s.headers.update({'ACCEPT': 'BOGUS'})
+ s.headers.update({'accept': 'application/json'})
+ r = s.get(httpbin('get'))
+ headers = r.request.headers
+ assert headers['accept'] == 'application/json'
+ assert headers['Accept'] == 'application/json'
+ assert headers['ACCEPT'] == 'application/json'
+
+ def test_uppercase_scheme_redirect(self, httpbin):
+ parts = urlparse(httpbin('html'))
+ url = "HTTP://" + parts.netloc + parts.path
+ r = requests.get(httpbin('redirect-to'), params={'url': url})
+ assert r.status_code == 200
+ assert r.url.lower() == url.lower()
+
+ def test_transport_adapter_ordering(self):
+ s = requests.Session()
+ order = ['https://', 'http://']
+ assert order == list(s.adapters)
+ s.mount('http://git', HTTPAdapter())
+ s.mount('http://github', HTTPAdapter())
+ s.mount('http://github.com', HTTPAdapter())
+ s.mount('http://github.com/about/', HTTPAdapter())
+ order = [
+ 'http://github.com/about/',
+ 'http://github.com',
+ 'http://github',
+ 'http://git',
+ 'https://',
+ 'http://',
+ ]
+ assert order == list(s.adapters)
+ s.mount('http://gittip', HTTPAdapter())
+ s.mount('http://gittip.com', HTTPAdapter())
+ s.mount('http://gittip.com/about/', HTTPAdapter())
+ order = [
+ 'http://github.com/about/',
+ 'http://gittip.com/about/',
+ 'http://github.com',
+ 'http://gittip.com',
+ 'http://github',
+ 'http://gittip',
+ 'http://git',
+ 'https://',
+ 'http://',
+ ]
+ assert order == list(s.adapters)
+ s2 = requests.Session()
+ s2.adapters = {'http://': HTTPAdapter()}
+ s2.mount('https://', HTTPAdapter())
+ assert 'http://' in s2.adapters
+ assert 'https://' in s2.adapters
+
+ def test_header_remove_is_case_insensitive(self, httpbin):
+ # From issue #1321
+ s = requests.Session()
+ s.headers['foo'] = 'bar'
+ r = s.get(httpbin('get'), headers={'FOO': None})
+ assert 'foo' not in r.request.headers
+
+ def test_params_are_merged_case_sensitive(self, httpbin):
+ s = requests.Session()
+ s.params['foo'] = 'bar'
+ r = s.get(httpbin('get'), params={'FOO': 'bar'})
+ assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}
+
+ def test_long_authinfo_in_url(self):
+ url = 'http://{0}:{1}@{2}:9000/path?query#frag'.format(
+ 'E8A3BE87-9E3F-4620-8858-95478E385B5B',
+ 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',
+ 'exactly-------------sixty-----------three------------characters',
+ )
+ r = requests.Request('GET', url).prepare()
+ assert r.url == url
+
+ def test_header_keys_are_native(self, httpbin):
+ headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}
+ r = requests.Request('GET', httpbin('get'), headers=headers)
+ p = r.prepare()
+
+ # This is testing that they are builtin strings. A bit weird, but there
+ # we go.
+ assert 'unicode' in p.headers.keys()
+ assert 'byte' in p.headers.keys()
+
+ def test_can_send_nonstring_objects_with_files(self, httpbin):
+ data = {'a': 0.0}
+ files = {'b': 'foo'}
+ r = requests.Request('POST', httpbin('post'), data=data, files=files)
+ p = r.prepare()
+
+ assert 'multipart/form-data' in p.headers['Content-Type']
+
+ def test_can_send_bytes_bytearray_objects_with_files(self, httpbin):
+ # Test bytes:
+ data = {'a': 'this is a string'}
+ files = {'b': b'foo'}
+ r = requests.Request('POST', httpbin('post'), data=data, files=files)
+ p = r.prepare()
+ assert 'multipart/form-data' in p.headers['Content-Type']
+ # Test bytearrays:
+ files = {'b': bytearray(b'foo')}
+ r = requests.Request('POST', httpbin('post'), data=data, files=files)
+ p = r.prepare()
+ assert 'multipart/form-data' in p.headers['Content-Type']
+
+ def test_can_send_file_object_with_non_string_filename(self, httpbin):
+ f = io.BytesIO()
+ f.name = 2
+ r = requests.Request('POST', httpbin('post'), files={'f': f})
+ p = r.prepare()
+
+ assert 'multipart/form-data' in p.headers['Content-Type']
+
+ def test_autoset_header_values_are_native(self, httpbin):
+ data = 'this is a string'
+ length = '16'
+ req = requests.Request('POST', httpbin('post'), data=data)
+ p = req.prepare()
+
+ assert p.headers['Content-Length'] == length
+
+ def test_nonhttp_schemes_dont_check_URLs(self):
+ test_urls = (
+ 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',
+ 'file:///etc/passwd',
+ 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',
+ )
+ for test_url in test_urls:
+ req = requests.Request('GET', test_url)
+ preq = req.prepare()
+ assert test_url == preq.url
+
+ def test_auth_is_stripped_on_redirect_off_host(self, httpbin):
+ r = requests.get(
+ httpbin('redirect-to'),
+ params={'url': 'http://www.google.co.uk'},
+ auth=('user', 'pass'),
+ )
+ assert r.history[0].request.headers['Authorization']
+ assert not r.request.headers.get('Authorization', '')
+
+ def test_auth_is_retained_for_redirect_on_host(self, httpbin):
+ r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))
+ h1 = r.history[0].request.headers['Authorization']
+ h2 = r.request.headers['Authorization']
+
+ assert h1 == h2
+
+ def test_manual_redirect_with_partial_body_read(self, httpbin):
+ s = requests.Session()
+ r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)
+ assert r1.is_redirect
+ rg = s.resolve_redirects(r1, r1.request, stream=True)
+
+ # read only the first eight bytes of the response body,
+ # then follow the redirect
+ r1.iter_content(8)
+ r2 = next(rg)
+ assert r2.is_redirect
+
+ # read all of the response via iter_content,
+ # then follow the redirect
+ for _ in r2.iter_content():
+ pass
+ r3 = next(rg)
+ assert not r3.is_redirect
+
+ def _patch_adapter_gzipped_redirect(self, session, url):
+ adapter = session.get_adapter(url=url)
+ org_build_response = adapter.build_response
+ self._patched_response = False
+
+ def build_response(*args, **kwargs):
+ resp = org_build_response(*args, **kwargs)
+ if not self._patched_response:
+ resp.raw.headers['content-encoding'] = 'gzip'
+ self._patched_response = True
+ return resp
+
+ adapter.build_response = build_response
+
+ def test_redirect_with_wrong_gzipped_header(self, httpbin):
+ s = requests.Session()
+ url = httpbin('redirect/1')
+ self._patch_adapter_gzipped_redirect(s, url)
+ s.get(url)
+
+ def test_basic_auth_str_is_always_native(self):
+ s = _basic_auth_str("test", "test")
+ assert isinstance(s, builtin_str)
+ assert s == "Basic dGVzdDp0ZXN0"
+
+ def test_requests_history_is_saved(self, httpbin):
+ r = requests.get(httpbin('redirect/5'))
+ total = r.history[-1].history
+ i = 0
+ for item in r.history:
+ assert item.history == total[0:i]
+ i = i + 1
+
+ def test_json_param_post_content_type_works(self, httpbin):
+ r = requests.post(
+ httpbin('post'),
+ json={'life': 42}
+ )
+ assert r.status_code == 200
+ assert 'application/json' in r.request.headers['Content-Type']
+ assert {'life': 42} == r.json()['json']
+
+ def test_json_param_post_should_not_override_data_param(self, httpbin):
+ r = requests.Request(method='POST', url=httpbin('post'),
+ data={'stuff': 'elixr'},
+ json={'music': 'flute'})
+ prep = r.prepare()
+ assert 'stuff=elixr' == prep.body
+
+ def test_response_iter_lines(self, httpbin):
+ r = requests.get(httpbin('stream/4'), stream=True)
+ assert r.status_code == 200
+
+ it = r.iter_lines()
+ next(it)
+ assert len(list(it)) == 3
+
+ def test_unconsumed_session_response_closes_connection(self, httpbin):
+ s = requests.session()
+
+ with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:
+ pass
+
+ assert response._content_consumed is False
+ assert response.raw.closed
+
+ @pytest.mark.xfail
+ def test_response_iter_lines_reentrant(self, httpbin):
+ """Response.iter_lines() is not reentrant safe"""
+ r = requests.get(httpbin('stream/4'), stream=True)
+ assert r.status_code == 200
+
+ next(r.iter_lines())
+ assert len(list(r.iter_lines())) == 3
+
+
+class TestContentEncodingDetection(unittest.TestCase):
+
+ def test_none(self):
+ encodings = requests.utils.get_encodings_from_content('')
+ assert not len(encodings)
+
+ def test_html_charset(self):
+ """HTML5 meta charset attribute"""
+ content = '<meta charset="UTF-8">'
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert len(encodings) == 1
+ assert encodings[0] == 'UTF-8'
+
+ def test_html4_pragma(self):
+ """HTML4 pragma directive"""
+ content = '<meta http-equiv="Content-type" content="text/html;charset=UTF-8">'
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert len(encodings) == 1
+ assert encodings[0] == 'UTF-8'
+
+ def test_xhtml_pragma(self):
+ """XHTML 1.x served with text/html MIME type"""
+ content = '<meta http-equiv="Content-type" content="text/html;charset=UTF-8" />'
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert len(encodings) == 1
+ assert encodings[0] == 'UTF-8'
+
+ def test_xml(self):
+ """XHTML 1.x served as XML"""
+ content = '<?xml version="1.0" encoding="UTF-8"?>'
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert len(encodings) == 1
+ assert encodings[0] == 'UTF-8'
+
+ def test_precedence(self):
+ content = '''
+ <?xml version="1.0" encoding="XML"?>
+ <meta charset="HTML5">
+ <meta http-equiv="Content-type" content="text/html;charset=HTML4" />
+ '''.strip()
+ encodings = requests.utils.get_encodings_from_content(content)
+ assert encodings == ['HTML5', 'HTML4', 'XML']
+
+
+class TestCaseInsensitiveDict(unittest.TestCase):
+
+ def test_mapping_init(self):
+ cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})
+ assert len(cid) == 2
+ assert 'foo' in cid
+ assert 'bar' in cid
+
+ def test_iterable_init(self):
+ cid = CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')])
+ assert len(cid) == 2
+ assert 'foo' in cid
+ assert 'bar' in cid
+
+ def test_kwargs_init(self):
+ cid = CaseInsensitiveDict(FOO='foo', BAr='bar')
+ assert len(cid) == 2
+ assert 'foo' in cid
+ assert 'bar' in cid
+
+ def test_docstring_example(self):
+ cid = CaseInsensitiveDict()
+ cid['Accept'] = 'application/json'
+ assert cid['aCCEPT'] == 'application/json'
+ assert list(cid) == ['Accept']
+
+ def test_len(self):
+ cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})
+ cid['A'] = 'a'
+ assert len(cid) == 2
+
+ def test_getitem(self):
+ cid = CaseInsensitiveDict({'Spam': 'blueval'})
+ assert cid['spam'] == 'blueval'
+ assert cid['SPAM'] == 'blueval'
+
+ def test_fixes_649(self):
+ """__setitem__ should behave case-insensitively."""
+ cid = CaseInsensitiveDict()
+ cid['spam'] = 'oneval'
+ cid['Spam'] = 'twoval'
+ cid['sPAM'] = 'redval'
+ cid['SPAM'] = 'blueval'
+ assert cid['spam'] == 'blueval'
+ assert cid['SPAM'] == 'blueval'
+ assert list(cid.keys()) == ['SPAM']
+
+ def test_delitem(self):
+ cid = CaseInsensitiveDict()
+ cid['Spam'] = 'someval'
+ del cid['sPam']
+ assert 'spam' not in cid
+ assert len(cid) == 0
+
+ def test_contains(self):
+ cid = CaseInsensitiveDict()
+ cid['Spam'] = 'someval'
+ assert 'Spam' in cid
+ assert 'spam' in cid
+ assert 'SPAM' in cid
+ assert 'sPam' in cid
+ assert 'notspam' not in cid
+
+ def test_get(self):
+ cid = CaseInsensitiveDict()
+ cid['spam'] = 'oneval'
+ cid['SPAM'] = 'blueval'
+ assert cid.get('spam') == 'blueval'
+ assert cid.get('SPAM') == 'blueval'
+ assert cid.get('sPam') == 'blueval'
+ assert cid.get('notspam', 'default') == 'default'
+
+ def test_update(self):
+ cid = CaseInsensitiveDict()
+ cid['spam'] = 'blueval'
+ cid.update({'sPam': 'notblueval'})
+ assert cid['spam'] == 'notblueval'
+ cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})
+ cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})
+ assert len(cid) == 2
+ assert cid['foo'] == 'anotherfoo'
+ assert cid['bar'] == 'anotherbar'
+
+ def test_update_retains_unchanged(self):
+ cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})
+ cid.update({'foo': 'newfoo'})
+ assert cid['bar'] == 'bar'
+
+ def test_iter(self):
+ cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})
+ keys = frozenset(['Spam', 'Eggs'])
+ assert frozenset(iter(cid)) == keys
+
+ def test_equality(self):
+ cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})
+ othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})
+ assert cid == othercid
+ del othercid['spam']
+ assert cid != othercid
+ assert cid == {'spam': 'blueval', 'eggs': 'redval'}
+ assert cid != object()
+
+ def test_setdefault(self):
+ cid = CaseInsensitiveDict({'Spam': 'blueval'})
+ assert cid.setdefault('spam', 'notblueval') == 'blueval'
+ assert cid.setdefault('notspam', 'notblueval') == 'notblueval'
+
+ def test_lower_items(self):
+ cid = CaseInsensitiveDict({
+ 'Accept': 'application/json',
+ 'user-Agent': 'requests',
+ })
+ keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())
+ lowerkeyset = frozenset(['accept', 'user-agent'])
+ assert keyset == lowerkeyset
+
+ def test_preserve_key_case(self):
+ cid = CaseInsensitiveDict({
+ 'Accept': 'application/json',
+ 'user-Agent': 'requests',
+ })
+ keyset = frozenset(['Accept', 'user-Agent'])
+ assert frozenset(i[0] for i in cid.items()) == keyset
+ assert frozenset(cid.keys()) == keyset
+ assert frozenset(cid) == keyset
+
+ def test_preserve_last_key_case(self):
+ cid = CaseInsensitiveDict({
+ 'Accept': 'application/json',
+ 'user-Agent': 'requests',
+ })
+ cid.update({'ACCEPT': 'application/json'})
+ cid['USER-AGENT'] = 'requests'
+ keyset = frozenset(['ACCEPT', 'USER-AGENT'])
+ assert frozenset(i[0] for i in cid.items()) == keyset
+ assert frozenset(cid.keys()) == keyset
+ assert frozenset(cid) == keyset
+
+ def test_copy(self):
+ cid = CaseInsensitiveDict({
+ 'Accept': 'application/json',
+ 'user-Agent': 'requests',
+ })
+ cid_copy = cid.copy()
+ assert cid == cid_copy
+ cid['changed'] = True
+ assert cid != cid_copy
+
+
+class UtilsTestCase(unittest.TestCase):
+
+ def test_super_len_io_streams(self):
+ """ Ensures that we properly deal with different kinds of IO streams. """
+ # uses StringIO or io.StringIO (see import above)
+ from io import BytesIO
+ from requests.utils import super_len
+
+ assert super_len(StringIO.StringIO()) == 0
+ assert super_len(
+ StringIO.StringIO('with so much drama in the LBC')) == 29
+
+ assert super_len(BytesIO()) == 0
+ assert super_len(
+ BytesIO(b"it's kinda hard bein' snoop d-o-double-g")) == 40
+
+ try:
+ import cStringIO
+ except ImportError:
+ pass
+ else:
+ assert super_len(
+ cStringIO.StringIO('but some how, some way...')) == 25
+
+ def test_super_len_correctly_calculates_len_of_partially_read_file(self):
+ """Ensure that we handle partially consumed file like objects."""
+ from requests.utils import super_len
+ s = StringIO.StringIO()
+ s.write('foobarbogus')
+ assert super_len(s) == 0
+
+ def test_get_environ_proxies_ip_ranges(self):
+ """Ensures that IP addresses are correctly matches with ranges
+ in no_proxy variable."""
+ from requests.utils import get_environ_proxies
+ os.environ['no_proxy'] = "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1"
+ assert get_environ_proxies('http://192.168.0.1:5000/') == {}
+ assert get_environ_proxies('http://192.168.0.1/') == {}
+ assert get_environ_proxies('http://172.16.1.1/') == {}
+ assert get_environ_proxies('http://172.16.1.1:5000/') == {}
+ assert get_environ_proxies('http://192.168.1.1:5000/') != {}
+ assert get_environ_proxies('http://192.168.1.1/') != {}
+
+ def test_get_environ_proxies(self):
+ """Ensures that IP addresses are correctly matches with ranges
+ in no_proxy variable."""
+ from requests.utils import get_environ_proxies
+ os.environ['no_proxy'] = "127.0.0.1,localhost.localdomain,192.168.0.0/24,172.16.1.1"
+ assert get_environ_proxies(
+ 'http://localhost.localdomain:5000/v1.0/') == {}
+ assert get_environ_proxies('http://www.requests.com/') != {}
+
+ def test_select_proxies(self):
+ """Make sure we can select per-host proxies correctly."""
+ from requests.utils import select_proxy
+ proxies = {'http': 'http://http.proxy',
+ 'http://some.host': 'http://some.host.proxy'}
+ assert select_proxy('hTTp://u:p@Some.Host/path', proxies) == 'http://some.host.proxy'
+ assert select_proxy('hTTp://u:p@Other.Host/path', proxies) == 'http://http.proxy'
+ assert select_proxy('hTTps://Other.Host', proxies) is None
+
+ def test_guess_filename_when_int(self):
+ from requests.utils import guess_filename
+ assert None is guess_filename(1)
+
+ def test_guess_filename_when_filename_is_an_int(self):
+ from requests.utils import guess_filename
+ fake = type('Fake', (object,), {'name': 1})()
+ assert None is guess_filename(fake)
+
+ def test_guess_filename_with_file_like_obj(self):
+ from requests.utils import guess_filename
+ from requests import compat
+ fake = type('Fake', (object,), {'name': b'value'})()
+ guessed_name = guess_filename(fake)
+ assert b'value' == guessed_name
+ assert isinstance(guessed_name, compat.bytes)
+
+ def test_guess_filename_with_unicode_name(self):
+ from requests.utils import guess_filename
+ from requests import compat
+ filename = b'value'.decode('utf-8')
+ fake = type('Fake', (object,), {'name': filename})()
+ guessed_name = guess_filename(fake)
+ assert filename == guessed_name
+ assert isinstance(guessed_name, compat.str)
+
+ def test_is_ipv4_address(self):
+ from requests.utils import is_ipv4_address
+ assert is_ipv4_address('8.8.8.8')
+ assert not is_ipv4_address('8.8.8.8.8')
+ assert not is_ipv4_address('localhost.localdomain')
+
+ def test_is_valid_cidr(self):
+ from requests.utils import is_valid_cidr
+ assert not is_valid_cidr('8.8.8.8')
+ assert is_valid_cidr('192.168.1.0/24')
+
+ def test_dotted_netmask(self):
+ from requests.utils import dotted_netmask
+ assert dotted_netmask(8) == '255.0.0.0'
+ assert dotted_netmask(24) == '255.255.255.0'
+ assert dotted_netmask(25) == '255.255.255.128'
+
+ def test_address_in_network(self):
+ from requests.utils import address_in_network
+ assert address_in_network('192.168.1.1', '192.168.1.0/24')
+ assert not address_in_network('172.16.0.1', '192.168.1.0/24')
+
+ def test_get_auth_from_url(self):
+ """Ensures that username and password in well-encoded URI as per
+ RFC 3986 are correclty extracted."""
+ from requests.utils import get_auth_from_url
+ from requests.compat import quote
+ percent_encoding_test_chars = "%!*'();:@&=+$,/?#[] "
+ url_address = "request.com/url.html#test"
+ url = "http://" + quote(
+ percent_encoding_test_chars, '') + ':' + quote(
+ percent_encoding_test_chars, '') + '@' + url_address
+ (username, password) = get_auth_from_url(url)
+ assert username == percent_encoding_test_chars
+ assert password == percent_encoding_test_chars
+
+ def test_requote_uri_with_unquoted_percents(self):
+ """Ensure we handle unquoted percent signs in redirects.
+
+ See: https://github.com/kennethreitz/requests/issues/2356
+ """
+ from requests.utils import requote_uri
+ bad_uri = 'http://example.com/fiz?buz=%ppicture'
+ quoted = 'http://example.com/fiz?buz=%25ppicture'
+ assert quoted == requote_uri(bad_uri)
+
+ def test_requote_uri_properly_requotes(self):
+ """Ensure requoting doesn't break expectations."""
+ from requests.utils import requote_uri
+ quoted = 'http://example.com/fiz?buz=%25ppicture'
+ assert quoted == requote_uri(quoted)
+
+
+class TestMorselToCookieExpires(unittest.TestCase):
+
+ """Tests for morsel_to_cookie when morsel contains expires."""
+
+ def test_expires_valid_str(self):
+ """Test case where we convert expires from string time."""
+
+ morsel = Morsel()
+ morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'
+ cookie = morsel_to_cookie(morsel)
+ assert cookie.expires == 1
+
+ def test_expires_invalid_int(self):
+ """Test case where an invalid type is passed for expires."""
+
+ morsel = Morsel()
+ morsel['expires'] = 100
+ with pytest.raises(TypeError):
+ morsel_to_cookie(morsel)
+
+ def test_expires_invalid_str(self):
+ """Test case where an invalid string is input."""
+
+ morsel = Morsel()
+ morsel['expires'] = 'woops'
+ with pytest.raises(ValueError):
+ morsel_to_cookie(morsel)
+
+ def test_expires_none(self):
+ """Test case where expires is None."""
+
+ morsel = Morsel()
+ morsel['expires'] = None
+ cookie = morsel_to_cookie(morsel)
+ assert cookie.expires is None
+
+
+class TestMorselToCookieMaxAge(unittest.TestCase):
+
+ """Tests for morsel_to_cookie when morsel contains max-age."""
+
+ def test_max_age_valid_int(self):
+ """Test case where a valid max age in seconds is passed."""
+
+ morsel = Morsel()
+ morsel['max-age'] = 60
+ cookie = morsel_to_cookie(morsel)
+ assert isinstance(cookie.expires, int)
+
+ def test_max_age_invalid_str(self):
+ """Test case where a invalid max age is passed."""
+
+ morsel = Morsel()
+ morsel['max-age'] = 'woops'
+ with pytest.raises(TypeError):
+ morsel_to_cookie(morsel)
+
+
+class TestTimeout:
+ def test_stream_timeout(self, httpbin):
+ try:
+ requests.get(httpbin('delay/10'), timeout=2.0)
+ except requests.exceptions.Timeout as e:
+ assert 'Read timed out' in e.args[0].args[0]
+
+ def test_invalid_timeout(self, httpbin):
+ with pytest.raises(ValueError) as e:
+ requests.get(httpbin('get'), timeout=(3, 4, 5))
+ assert '(connect, read)' in str(e)
+
+ with pytest.raises(ValueError) as e:
+ requests.get(httpbin('get'), timeout="foo")
+ assert 'must be an int or float' in str(e)
+
+ def test_none_timeout(self, httpbin):
+ """ Check that you can set None as a valid timeout value.
+
+ To actually test this behavior, we'd want to check that setting the
+ timeout to None actually lets the request block past the system default
+ timeout. However, this would make the test suite unbearably slow.
+ Instead we verify that setting the timeout to None does not prevent the
+ request from succeeding.
+ """
+ r = requests.get(httpbin('get'), timeout=None)
+ assert r.status_code == 200
+
+ def test_read_timeout(self, httpbin):
+ try:
+ requests.get(httpbin('delay/10'), timeout=(None, 0.1))
+ assert False, "The recv() request should time out."
+ except ReadTimeout:
+ pass
+
+ def test_connect_timeout(self):
+ try:
+ requests.get(TARPIT, timeout=(0.1, None))
+ assert False, "The connect() request should time out."
+ except ConnectTimeout as e:
+ assert isinstance(e, ConnectionError)
+ assert isinstance(e, Timeout)
+
+ def test_total_timeout_connect(self):
+ try:
+ requests.get(TARPIT, timeout=(0.1, 0.1))
+ assert False, "The connect() request should time out."
+ except ConnectTimeout:
+ pass
+
+ def test_encoded_methods(self, httpbin):
+ """See: https://github.com/kennethreitz/requests/issues/2316"""
+ r = requests.request(b'GET', httpbin('get'))
+ assert r.ok
+
+
+SendCall = collections.namedtuple('SendCall', ('args', 'kwargs'))
+
+
+class RedirectSession(SessionRedirectMixin):
+ def __init__(self, order_of_redirects):
+ self.redirects = order_of_redirects
+ self.calls = []
+ self.max_redirects = 30
+ self.cookies = {}
+ self.trust_env = False
+
+ def send(self, *args, **kwargs):
+ self.calls.append(SendCall(args, kwargs))
+ return self.build_response()
+
+ def build_response(self):
+ request = self.calls[-1].args[0]
+ r = requests.Response()
+
+ try:
+ r.status_code = int(self.redirects.pop(0))
+ except IndexError:
+ r.status_code = 200
+
+ r.headers = CaseInsensitiveDict({'Location': '/'})
+ r.raw = self._build_raw()
+ r.request = request
+ return r
+
+ def _build_raw(self):
+ string = StringIO.StringIO('')
+ setattr(string, 'release_conn', lambda *args: args)
+ return string
+
+
+class TestRedirects:
+ default_keyword_args = {
+ 'stream': False,
+ 'verify': True,
+ 'cert': None,
+ 'timeout': None,
+ 'allow_redirects': False,
+ 'proxies': {},
+ }
+
+ def test_requests_are_updated_each_time(self, httpbin):
+ session = RedirectSession([303, 307])
+ prep = requests.Request('POST', httpbin('post')).prepare()
+ r0 = session.send(prep)
+ assert r0.request.method == 'POST'
+ assert session.calls[-1] == SendCall((r0.request,), {})
+ redirect_generator = session.resolve_redirects(r0, prep)
+ for response in redirect_generator:
+ assert response.request.method == 'GET'
+ send_call = SendCall((response.request,),
+ TestRedirects.default_keyword_args)
+ assert session.calls[-1] == send_call
+
+
+
+@pytest.fixture
+def list_of_tuples():
+ return [
+ (('a', 'b'), ('c', 'd')),
+ (('c', 'd'), ('a', 'b')),
+ (('a', 'b'), ('c', 'd'), ('e', 'f')),
+ ]
+
+
+def test_data_argument_accepts_tuples(list_of_tuples):
+ """
+ Ensure that the data argument will accept tuples of strings
+ and properly encode them.
+ """
+ for data in list_of_tuples:
+ p = PreparedRequest()
+ p.prepare(
+ method='GET',
+ url='http://www.example.com',
+ data=data,
+ hooks=default_hooks()
+ )
+ assert p.body == urlencode(data)
+
+
+def assert_copy(p, p_copy):
+ for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'):
+ assert getattr(p, attr) == getattr(p_copy, attr)
+
+
+def test_prepared_request_empty_copy():
+ p = PreparedRequest()
+ assert_copy(p, p.copy())
+
+
+def test_prepared_request_no_cookies_copy():
+ p = PreparedRequest()
+ p.prepare(
+ method='GET',
+ url='http://www.example.com',
+ data='foo=bar',
+ hooks=default_hooks()
+ )
+ assert_copy(p, p.copy())
+
+
+def test_prepared_request_complete_copy():
+ p = PreparedRequest()
+ p.prepare(
+ method='GET',
+ url='http://www.example.com',
+ data='foo=bar',
+ hooks=default_hooks(),
+ cookies={'foo': 'bar'}
+ )
+ assert_copy(p, p.copy())
+
+
+def test_prepare_unicode_url():
+ p = PreparedRequest()
+ p.prepare(
+ method='GET',
+ url=u('http://www.example.com/üniçø∂é'),
+ )
+ assert_copy(p, p.copy())
+
+
+def test_urllib3_retries(httpbin):
+ from requests.packages.urllib3.util import Retry
+ s = requests.Session()
+ s.mount('http://', HTTPAdapter(max_retries=Retry(
+ total=2, status_forcelist=[500]
+ )))
+
+ with pytest.raises(RetryError):
+ s.get(httpbin('status/500'))
+
+
+def test_urllib3_pool_connection_closed(httpbin):
+ s = requests.Session()
+ s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))
+
+ try:
+ s.get(httpbin('status/200'))
+ except ConnectionError as e:
+ assert u"Pool is closed." in str(e)
+
+
+def test_vendor_aliases():
+ from requests.packages import urllib3
+ from requests.packages import chardet
+
+ with pytest.raises(ImportError):
+ from requests.packages import webbrowser
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/python/rsa/LICENSE b/python/rsa/LICENSE
new file mode 100644
index 000000000..da76c9d7f
--- /dev/null
+++ b/python/rsa/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/python/rsa/MANIFEST.in b/python/rsa/MANIFEST.in
new file mode 100644
index 000000000..8cf0021b9
--- /dev/null
+++ b/python/rsa/MANIFEST.in
@@ -0,0 +1,5 @@
+include README
+include LICENSE
+include *.py
+recursive-include rsa *.py
+recursive-include tests *.py
diff --git a/python/rsa/PKG-INFO b/python/rsa/PKG-INFO
new file mode 100644
index 000000000..399ba7b3e
--- /dev/null
+++ b/python/rsa/PKG-INFO
@@ -0,0 +1,18 @@
+Metadata-Version: 1.1
+Name: rsa
+Version: 3.1.4
+Summary: Pure-Python RSA implementation
+Home-page: http://stuvel.eu/rsa
+Author: Sybren A. Stuvel
+Author-email: sybren@stuvel.eu
+License: ASL 2
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Security :: Cryptography
diff --git a/python/rsa/README.rst b/python/rsa/README.rst
new file mode 100644
index 000000000..9f348636d
--- /dev/null
+++ b/python/rsa/README.rst
@@ -0,0 +1,31 @@
+Pure Python RSA implementation
+==============================
+
+`Python-RSA`_ is a pure-Python RSA implementation. It supports
+encryption and decryption, signing and verifying signatures, and key
+generation according to PKCS#1 version 1.5. It can be used as a Python
+library as well as on the commandline. The code was mostly written by
+Sybren A. Stüvel.
+
+Documentation can be found at the Python-RSA homepage:
+http://stuvel.eu/rsa
+
+Download and install using::
+
+ pip install rsa
+
+or::
+
+ easy_install rsa
+
+or download it from the `Python Package Index`_.
+
+The source code is maintained in a `Mercurial repository`_ and is
+licensed under the `Apache License, version 2.0`_
+
+
+.. _`Python-RSA`: http://stuvel.eu/rsa
+.. _`Mercurial repository`: https://bitbucket.org/sybren/python-rsa
+.. _`Python Package Index`: http://pypi.python.org/pypi/rsa
+.. _`Apache License, version 2.0`: http://www.apache.org/licenses/LICENSE-2.0
+
diff --git a/python/rsa/create_timing_table.py b/python/rsa/create_timing_table.py
new file mode 100755
index 000000000..b1b2871b3
--- /dev/null
+++ b/python/rsa/create_timing_table.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+import time
+import rsa
+
+poolsize = 8
+accurate = True
+
+def run_speed_test(bitsize):
+
+ iterations = 0
+ start = end = time.time()
+
+ # At least a number of iterations, and at least 2 seconds
+ while iterations < 10 or end - start < 2:
+ iterations += 1
+ rsa.newkeys(bitsize, accurate=accurate, poolsize=poolsize)
+ end = time.time()
+
+ duration = end - start
+ dur_per_call = duration / iterations
+
+ print '%5i bit: %9.3f sec. (%i iterations over %.1f seconds)' % (bitsize,
+ dur_per_call, iterations, duration)
+
+for bitsize in (128, 256, 384, 512, 1024, 2048, 3072, 4096):
+ run_speed_test(bitsize)
+
+
diff --git a/python/rsa/playstuff.py b/python/rsa/playstuff.py
new file mode 100755
index 000000000..bfb941b88
--- /dev/null
+++ b/python/rsa/playstuff.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+import re
+import rsa
+
+def _logon( username, password ):
+ # Retrive the public key
+ # network stuff # req = urllib2.Request(AAA_GET_KEY, headers={'User-Agent': CLIENT_ID})
+ # network stuff # response = urllib2.urlopen(req)
+ # network stuff # html = response.read()
+ # network stuff # print response.info() # DEBUG
+ # network stuff # print html # DEBUG
+
+ # replacement for network stuff #
+ html="<x509PublicKey>30820122300d06092a864886f70d01010105000382010f003082010a0282010100dad8e3c084137bab285e869ae99a5de9752a095753680e9128adbe981e8141225704e558b8ee437836ec8c5460514efae61550bfdd883549981458bae388c9490b5ab43475068b169b32da446b0aae2dfbb3a5f425c74b284ced3f57ed33b30ec7b4b95a8216f8b063e34af2c84fef58bab381f3b79b80d06b687e0b5fc7aaeb311a88389ab7aa1422ae0b58956bb9e91c5cbf2b98422b05e1eacb82e29938566f6f05274294a8c596677c950ce97dcd003709d008f1ae6418ce5bf55ad2bf921318c6e31b324bdda4b4f12ff1fd86b5b71e647d1fc175aea137ba0ff869d5fbcf9ed0289fe7da3619c1204fc42d616462ac1b6a4e6ca2655d44bce039db519d0203010001</x509PublicKey>"
+ # end replacement for network stuff #
+
+ # This shall pick the key
+ hexstring = re.compile('<x509PublicKey[^>]*>([0-9a-fA-F]+)</x509PublicKey>')
+
+ # pick the key and convert it to der format
+ hex_pub_der = hexstring.search(html).group(1)
+ pub_der = hex_pub_der.decode('hex')
+
+ # Convert it to a public key
+ pub_key = rsa.PublicKey.load_pkcs1_openssl_der(pub_der)
+
+ # encode the password
+ enc_pass = rsa.encrypt(password, pub_key)
+
+ # and hex-encode it
+ hex_pass = enc_pass.encode('hex')
+
+# _logon('me', 'MyPass')
+
+import timeit
+timeit.timeit('_logon( "me", "MyPass" )',
+ setup='from __main__ import _logon',
+ number=1000)
+
+
diff --git a/python/rsa/rsa/__init__.py b/python/rsa/rsa/__init__.py
new file mode 100644
index 000000000..2d01c12e0
--- /dev/null
+++ b/python/rsa/rsa/__init__.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""RSA module
+
+Module for calculating large primes, and RSA encryption, decryption, signing
+and verification. Includes generating public and private keys.
+
+WARNING: this implementation does not use random padding, compression of the
+cleartext input to prevent repetitions, or other common security improvements.
+Use with care.
+
+If you want to have a more secure implementation, use the functions from the
+``rsa.pkcs1`` module.
+
+"""
+
+__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly"
+__date__ = "2014-02-22"
+__version__ = '3.1.4'
+
+from rsa.key import newkeys, PrivateKey, PublicKey
+from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \
+ VerificationError
+
+# Do doctest if we're run directly
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
+
+__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey',
+ 'PrivateKey', 'DecryptionError', 'VerificationError']
+
diff --git a/python/rsa/rsa/_compat.py b/python/rsa/rsa/_compat.py
new file mode 100644
index 000000000..3c4eb81b1
--- /dev/null
+++ b/python/rsa/rsa/_compat.py
@@ -0,0 +1,160 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python compatibility wrappers."""
+
+
+from __future__ import absolute_import
+
+import sys
+from struct import pack
+
+try:
+ MAX_INT = sys.maxsize
+except AttributeError:
+ MAX_INT = sys.maxint
+
+MAX_INT64 = (1 << 63) - 1
+MAX_INT32 = (1 << 31) - 1
+MAX_INT16 = (1 << 15) - 1
+
+# Determine the word size of the processor.
+if MAX_INT == MAX_INT64:
+ # 64-bit processor.
+ MACHINE_WORD_SIZE = 64
+elif MAX_INT == MAX_INT32:
+ # 32-bit processor.
+ MACHINE_WORD_SIZE = 32
+else:
+ # Else we just assume 64-bit processor keeping up with modern times.
+ MACHINE_WORD_SIZE = 64
+
+
+try:
+ # < Python3
+ unicode_type = unicode
+ have_python3 = False
+except NameError:
+ # Python3.
+ unicode_type = str
+ have_python3 = True
+
+# Fake byte literals.
+if str is unicode_type:
+ def byte_literal(s):
+ return s.encode('latin1')
+else:
+ def byte_literal(s):
+ return s
+
+# ``long`` is no more. Do type detection using this instead.
+try:
+ integer_types = (int, long)
+except NameError:
+ integer_types = (int,)
+
+b = byte_literal
+
+try:
+ # Python 2.6 or higher.
+ bytes_type = bytes
+except NameError:
+ # Python 2.5
+ bytes_type = str
+
+
+# To avoid calling b() multiple times in tight loops.
+ZERO_BYTE = b('\x00')
+EMPTY_BYTE = b('')
+
+
+def is_bytes(obj):
+ """
+ Determines whether the given value is a byte string.
+
+ :param obj:
+ The value to test.
+ :returns:
+ ``True`` if ``value`` is a byte string; ``False`` otherwise.
+ """
+ return isinstance(obj, bytes_type)
+
+
+def is_integer(obj):
+ """
+ Determines whether the given value is an integer.
+
+ :param obj:
+ The value to test.
+ :returns:
+ ``True`` if ``value`` is an integer; ``False`` otherwise.
+ """
+ return isinstance(obj, integer_types)
+
+
+def byte(num):
+ """
+ Converts a number between 0 and 255 (both inclusive) to a base-256 (byte)
+ representation.
+
+ Use it as a replacement for ``chr`` where you are expecting a byte
+ because this will work on all current versions of Python::
+
+ :param num:
+ An unsigned integer between 0 and 255 (both inclusive).
+ :returns:
+ A single byte.
+ """
+ return pack("B", num)
+
+
+def get_word_alignment(num, force_arch=64,
+ _machine_word_size=MACHINE_WORD_SIZE):
+ """
+ Returns alignment details for the given number based on the platform
+ Python is running on.
+
+ :param num:
+ Unsigned integral number.
+ :param force_arch:
+ If you don't want to use 64-bit unsigned chunks, set this to
+ anything other than 64. 32-bit chunks will be preferred then.
+ Default 64 will be used when on a 64-bit machine.
+ :param _machine_word_size:
+ (Internal) The machine word size used for alignment.
+ :returns:
+ 4-tuple::
+
+ (word_bits, word_bytes,
+ max_uint, packing_format_type)
+ """
+ max_uint64 = 0xffffffffffffffff
+ max_uint32 = 0xffffffff
+ max_uint16 = 0xffff
+ max_uint8 = 0xff
+
+ if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32:
+ # 64-bit unsigned integer.
+ return 64, 8, max_uint64, "Q"
+ elif num > max_uint16:
+ # 32-bit unsigned integer
+ return 32, 4, max_uint32, "L"
+ elif num > max_uint8:
+ # 16-bit unsigned integer.
+ return 16, 2, max_uint16, "H"
+ else:
+ # 8-bit unsigned integer.
+ return 8, 1, max_uint8, "B"
diff --git a/python/rsa/rsa/_version133.py b/python/rsa/rsa/_version133.py
new file mode 100644
index 000000000..230a03c84
--- /dev/null
+++ b/python/rsa/rsa/_version133.py
@@ -0,0 +1,442 @@
+"""RSA module
+pri = k[1] //Private part of keys d,p,q
+
+Module for calculating large primes, and RSA encryption, decryption,
+signing and verification. Includes generating public and private keys.
+
+WARNING: this code implements the mathematics of RSA. It is not suitable for
+real-world secure cryptography purposes. It has not been reviewed by a security
+expert. It does not include padding of data. There are many ways in which the
+output of this module, when used without any modification, can be sucessfully
+attacked.
+"""
+
+__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer"
+__date__ = "2010-02-05"
+__version__ = '1.3.3'
+
+# NOTE: Python's modulo can return negative numbers. We compensate for
+# this behaviour using the abs() function
+
+from cPickle import dumps, loads
+import base64
+import math
+import os
+import random
+import sys
+import types
+import zlib
+
+from rsa._compat import byte
+
+# Display a warning that this insecure version is imported.
+import warnings
+warnings.warn('Insecure version of the RSA module is imported as %s, be careful'
+ % __name__)
+
+def gcd(p, q):
+ """Returns the greatest common divisor of p and q
+
+
+ >>> gcd(42, 6)
+ 6
+ """
+ if p<q: return gcd(q, p)
+ if q == 0: return p
+ return gcd(q, abs(p%q))
+
+def bytes2int(bytes):
+ """Converts a list of bytes or a string to an integer
+
+ >>> (128*256 + 64)*256 + + 15
+ 8405007
+ >>> l = [128, 64, 15]
+ >>> bytes2int(l)
+ 8405007
+ """
+
+ if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
+ raise TypeError("You must pass a string or a list")
+
+ # Convert byte stream to integer
+ integer = 0
+ for byte in bytes:
+ integer *= 256
+ if type(byte) is types.StringType: byte = ord(byte)
+ integer += byte
+
+ return integer
+
+def int2bytes(number):
+ """Converts a number to a string of bytes
+
+ >>> bytes2int(int2bytes(123456789))
+ 123456789
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ string = ""
+
+ while number > 0:
+ string = "%s%s" % (byte(number & 0xFF), string)
+ number /= 256
+
+ return string
+
+def fast_exponentiation(a, p, n):
+ """Calculates r = a^p mod n
+ """
+ result = a % n
+ remainders = []
+ while p != 1:
+ remainders.append(p & 1)
+ p = p >> 1
+ while remainders:
+ rem = remainders.pop()
+ result = ((a ** rem) * result ** 2) % n
+ return result
+
+def read_random_int(nbits):
+ """Reads a random integer of approximately nbits bits rounded up
+ to whole bytes"""
+
+ nbytes = ceil(nbits/8.)
+ randomdata = os.urandom(nbytes)
+ return bytes2int(randomdata)
+
+def ceil(x):
+ """ceil(x) -> int(math.ceil(x))"""
+
+ return int(math.ceil(x))
+
+def randint(minvalue, maxvalue):
+ """Returns a random integer x with minvalue <= x <= maxvalue"""
+
+ # Safety - get a lot of random data even if the range is fairly
+ # small
+ min_nbits = 32
+
+ # The range of the random numbers we need to generate
+ range = maxvalue - minvalue
+
+ # Which is this number of bytes
+ rangebytes = ceil(math.log(range, 2) / 8.)
+
+ # Convert to bits, but make sure it's always at least min_nbits*2
+ rangebits = max(rangebytes * 8, min_nbits * 2)
+
+ # Take a random number of bits between min_nbits and rangebits
+ nbits = random.randint(min_nbits, rangebits)
+
+ return (read_random_int(nbits) % range) + minvalue
+
+def fermat_little_theorem(p):
+ """Returns 1 if p may be prime, and something else if p definitely
+ is not prime"""
+
+ a = randint(1, p-1)
+ return fast_exponentiation(a, p-1, p)
+
+def jacobi(a, b):
+ """Calculates the value of the Jacobi symbol (a/b)
+ """
+
+ if a % b == 0:
+ return 0
+ result = 1
+ while a > 1:
+ if a & 1:
+ if ((a-1)*(b-1) >> 2) & 1:
+ result = -result
+ b, a = a, b % a
+ else:
+ if ((b ** 2 - 1) >> 3) & 1:
+ result = -result
+ a = a >> 1
+ return result
+
+def jacobi_witness(x, n):
+ """Returns False if n is an Euler pseudo-prime with base x, and
+ True otherwise.
+ """
+
+ j = jacobi(x, n) % n
+ f = fast_exponentiation(x, (n-1)/2, n)
+
+ if j == f: return False
+ return True
+
+def randomized_primality_testing(n, k):
+ """Calculates whether n is composite (which is always correct) or
+ prime (which is incorrect with error probability 2**-k)
+
+ Returns False if the number if composite, and True if it's
+ probably prime.
+ """
+
+ q = 0.5 # Property of the jacobi_witness function
+
+ # t = int(math.ceil(k / math.log(1/q, 2)))
+ t = ceil(k / math.log(1/q, 2))
+ for i in range(t+1):
+ x = randint(1, n-1)
+ if jacobi_witness(x, n): return False
+
+ return True
+
+def is_prime(number):
+ """Returns True if the number is prime, and False otherwise.
+
+ >>> is_prime(42)
+ 0
+ >>> is_prime(41)
+ 1
+ """
+
+ """
+ if not fermat_little_theorem(number) == 1:
+ # Not prime, according to Fermat's little theorem
+ return False
+ """
+
+ if randomized_primality_testing(number, 5):
+ # Prime, according to Jacobi
+ return True
+
+ # Not prime
+ return False
+
+
+def getprime(nbits):
+ """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
+ other words: nbits is rounded up to whole bytes.
+
+ >>> p = getprime(8)
+ >>> is_prime(p-1)
+ 0
+ >>> is_prime(p)
+ 1
+ >>> is_prime(p+1)
+ 0
+ """
+
+ nbytes = int(math.ceil(nbits/8.))
+
+ while True:
+ integer = read_random_int(nbits)
+
+ # Make sure it's odd
+ integer |= 1
+
+ # Test for primeness
+ if is_prime(integer): break
+
+ # Retry if not prime
+
+ return integer
+
+def are_relatively_prime(a, b):
+ """Returns True if a and b are relatively prime, and False if they
+ are not.
+
+ >>> are_relatively_prime(2, 3)
+ 1
+ >>> are_relatively_prime(2, 4)
+ 0
+ """
+
+ d = gcd(a, b)
+ return (d == 1)
+
+def find_p_q(nbits):
+ """Returns a tuple of two different primes of nbits bits"""
+
+ p = getprime(nbits)
+ while True:
+ q = getprime(nbits)
+ if not q == p: break
+
+ return (p, q)
+
+def extended_euclid_gcd(a, b):
+ """Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb
+ """
+
+ if b == 0:
+ return (a, 1, 0)
+
+ q = abs(a % b)
+ r = long(a / b)
+ (d, k, l) = extended_euclid_gcd(b, q)
+
+ return (d, l, k - l*r)
+
+# Main function: calculate encryption and decryption keys
+def calculate_keys(p, q, nbits):
+ """Calculates an encryption and a decryption key for p and q, and
+ returns them as a tuple (e, d)"""
+
+ n = p * q
+ phi_n = (p-1) * (q-1)
+
+ while True:
+ # Make sure e has enough bits so we ensure "wrapping" through
+ # modulo n
+ e = getprime(max(8, nbits/2))
+ if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
+
+ (d, i, j) = extended_euclid_gcd(e, phi_n)
+
+ if not d == 1:
+ raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
+
+ if not (e * i) % phi_n == 1:
+ raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
+
+ return (e, i)
+
+
+def gen_keys(nbits):
+ """Generate RSA keys of nbits bits. Returns (p, q, e, d).
+
+ Note: this can take a long time, depending on the key size.
+ """
+
+ while True:
+ (p, q) = find_p_q(nbits)
+ (e, d) = calculate_keys(p, q, nbits)
+
+ # For some reason, d is sometimes negative. We don't know how
+ # to fix it (yet), so we keep trying until everything is shiny
+ if d > 0: break
+
+ return (p, q, e, d)
+
+def gen_pubpriv_keys(nbits):
+ """Generates public and private keys, and returns them as (pub,
+ priv).
+
+ The public key consists of a dict {e: ..., , n: ....). The private
+ key consists of a dict {d: ...., p: ...., q: ....).
+ """
+
+ (p, q, e, d) = gen_keys(nbits)
+
+ return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
+
+def encrypt_int(message, ekey, n):
+ """Encrypts a message using encryption key 'ekey', working modulo
+ n"""
+
+ if type(message) is types.IntType:
+ return encrypt_int(long(message), ekey, n)
+
+ if not type(message) is types.LongType:
+ raise TypeError("You must pass a long or an int")
+
+ if message > 0 and \
+ math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)):
+ raise OverflowError("The message is too long")
+
+ return fast_exponentiation(message, ekey, n)
+
+def decrypt_int(cyphertext, dkey, n):
+ """Decrypts a cypher text using the decryption key 'dkey', working
+ modulo n"""
+
+ return encrypt_int(cyphertext, dkey, n)
+
+def sign_int(message, dkey, n):
+ """Signs 'message' using key 'dkey', working modulo n"""
+
+ return decrypt_int(message, dkey, n)
+
+def verify_int(signed, ekey, n):
+ """verifies 'signed' using key 'ekey', working modulo n"""
+
+ return encrypt_int(signed, ekey, n)
+
+def picklechops(chops):
+ """Pickles and base64encodes it's argument chops"""
+
+ value = zlib.compress(dumps(chops))
+ encoded = base64.encodestring(value)
+ return encoded.strip()
+
+def unpicklechops(string):
+ """base64decodes and unpickes it's argument string into chops"""
+
+ return loads(zlib.decompress(base64.decodestring(string)))
+
+def chopstring(message, key, n, funcref):
+ """Splits 'message' into chops that are at most as long as n,
+ converts these into integers, and calls funcref(integer, key, n)
+ for each chop.
+
+ Used by 'encrypt' and 'sign'.
+ """
+
+ msglen = len(message)
+ mbits = msglen * 8
+ nbits = int(math.floor(math.log(n, 2)))
+ nbytes = nbits / 8
+ blocks = msglen / nbytes
+
+ if msglen % nbytes > 0:
+ blocks += 1
+
+ cypher = []
+
+ for bindex in range(blocks):
+ offset = bindex * nbytes
+ block = message[offset:offset+nbytes]
+ value = bytes2int(block)
+ cypher.append(funcref(value, key, n))
+
+ return picklechops(cypher)
+
+def gluechops(chops, key, n, funcref):
+ """Glues chops back together into a string. calls
+ funcref(integer, key, n) for each chop.
+
+ Used by 'decrypt' and 'verify'.
+ """
+ message = ""
+
+ chops = unpicklechops(chops)
+
+ for cpart in chops:
+ mpart = funcref(cpart, key, n)
+ message += int2bytes(mpart)
+
+ return message
+
+def encrypt(message, key):
+ """Encrypts a string 'message' with the public key 'key'"""
+
+ return chopstring(message, key['e'], key['n'], encrypt_int)
+
+def sign(message, key):
+ """Signs a string 'message' with the private key 'key'"""
+
+ return chopstring(message, key['d'], key['p']*key['q'], decrypt_int)
+
+def decrypt(cypher, key):
+ """Decrypts a cypher with the private key 'key'"""
+
+ return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
+
+def verify(cypher, key):
+ """Verifies a cypher with the public key 'key'"""
+
+ return gluechops(cypher, key['e'], key['n'], encrypt_int)
+
+# Do doctest if we're not imported
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
+
+__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"]
+
diff --git a/python/rsa/rsa/_version200.py b/python/rsa/rsa/_version200.py
new file mode 100644
index 000000000..f91565385
--- /dev/null
+++ b/python/rsa/rsa/_version200.py
@@ -0,0 +1,529 @@
+"""RSA module
+
+Module for calculating large primes, and RSA encryption, decryption,
+signing and verification. Includes generating public and private keys.
+
+WARNING: this implementation does not use random padding, compression of the
+cleartext input to prevent repetitions, or other common security improvements.
+Use with care.
+
+"""
+
+__author__ = "Sybren Stuvel, Marloes de Boer, Ivo Tamboer, and Barry Mead"
+__date__ = "2010-02-08"
+__version__ = '2.0'
+
+import math
+import os
+import random
+import sys
+import types
+from rsa._compat import byte
+
+# Display a warning that this insecure version is imported.
+import warnings
+warnings.warn('Insecure version of the RSA module is imported as %s' % __name__)
+
+
+def bit_size(number):
+ """Returns the number of bits required to hold a specific long number"""
+
+ return int(math.ceil(math.log(number,2)))
+
+def gcd(p, q):
+ """Returns the greatest common divisor of p and q
+ >>> gcd(48, 180)
+ 12
+ """
+ # Iterateive Version is faster and uses much less stack space
+ while q != 0:
+ if p < q: (p,q) = (q,p)
+ (p,q) = (q, p % q)
+ return p
+
+
+def bytes2int(bytes):
+ """Converts a list of bytes or a string to an integer
+
+ >>> (((128 * 256) + 64) * 256) + 15
+ 8405007
+ >>> l = [128, 64, 15]
+ >>> bytes2int(l) #same as bytes2int('\x80@\x0f')
+ 8405007
+ """
+
+ if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
+ raise TypeError("You must pass a string or a list")
+
+ # Convert byte stream to integer
+ integer = 0
+ for byte in bytes:
+ integer *= 256
+ if type(byte) is types.StringType: byte = ord(byte)
+ integer += byte
+
+ return integer
+
+def int2bytes(number):
+ """
+ Converts a number to a string of bytes
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ string = ""
+
+ while number > 0:
+ string = "%s%s" % (byte(number & 0xFF), string)
+ number /= 256
+
+ return string
+
+def to64(number):
+ """Converts a number in the range of 0 to 63 into base 64 digit
+ character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'.
+
+ >>> to64(10)
+ 'A'
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ if 0 <= number <= 9: #00-09 translates to '0' - '9'
+ return byte(number + 48)
+
+ if 10 <= number <= 35:
+ return byte(number + 55) #10-35 translates to 'A' - 'Z'
+
+ if 36 <= number <= 61:
+ return byte(number + 61) #36-61 translates to 'a' - 'z'
+
+ if number == 62: # 62 translates to '-' (minus)
+ return byte(45)
+
+ if number == 63: # 63 translates to '_' (underscore)
+ return byte(95)
+
+ raise ValueError('Invalid Base64 value: %i' % number)
+
+
+def from64(number):
+ """Converts an ordinal character value in the range of
+ 0-9,A-Z,a-z,-,_ to a number in the range of 0-63.
+
+ >>> from64(49)
+ 1
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ if 48 <= number <= 57: #ord('0') - ord('9') translates to 0-9
+ return(number - 48)
+
+ if 65 <= number <= 90: #ord('A') - ord('Z') translates to 10-35
+ return(number - 55)
+
+ if 97 <= number <= 122: #ord('a') - ord('z') translates to 36-61
+ return(number - 61)
+
+ if number == 45: #ord('-') translates to 62
+ return(62)
+
+ if number == 95: #ord('_') translates to 63
+ return(63)
+
+ raise ValueError('Invalid Base64 value: %i' % number)
+
+
+def int2str64(number):
+ """Converts a number to a string of base64 encoded characters in
+ the range of '0'-'9','A'-'Z,'a'-'z','-','_'.
+
+ >>> int2str64(123456789)
+ '7MyqL'
+ """
+
+ if not (type(number) is types.LongType or type(number) is types.IntType):
+ raise TypeError("You must pass a long or an int")
+
+ string = ""
+
+ while number > 0:
+ string = "%s%s" % (to64(number & 0x3F), string)
+ number /= 64
+
+ return string
+
+
+def str642int(string):
+ """Converts a base64 encoded string into an integer.
+ The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_'
+
+ >>> str642int('7MyqL')
+ 123456789
+ """
+
+ if not (type(string) is types.ListType or type(string) is types.StringType):
+ raise TypeError("You must pass a string or a list")
+
+ integer = 0
+ for byte in string:
+ integer *= 64
+ if type(byte) is types.StringType: byte = ord(byte)
+ integer += from64(byte)
+
+ return integer
+
+def read_random_int(nbits):
+ """Reads a random integer of approximately nbits bits rounded up
+ to whole bytes"""
+
+ nbytes = int(math.ceil(nbits/8.))
+ randomdata = os.urandom(nbytes)
+ return bytes2int(randomdata)
+
+def randint(minvalue, maxvalue):
+ """Returns a random integer x with minvalue <= x <= maxvalue"""
+
+ # Safety - get a lot of random data even if the range is fairly
+ # small
+ min_nbits = 32
+
+ # The range of the random numbers we need to generate
+ range = (maxvalue - minvalue) + 1
+
+ # Which is this number of bytes
+ rangebytes = ((bit_size(range) + 7) / 8)
+
+ # Convert to bits, but make sure it's always at least min_nbits*2
+ rangebits = max(rangebytes * 8, min_nbits * 2)
+
+ # Take a random number of bits between min_nbits and rangebits
+ nbits = random.randint(min_nbits, rangebits)
+
+ return (read_random_int(nbits) % range) + minvalue
+
+def jacobi(a, b):
+ """Calculates the value of the Jacobi symbol (a/b)
+ where both a and b are positive integers, and b is odd
+ """
+
+ if a == 0: return 0
+ result = 1
+ while a > 1:
+ if a & 1:
+ if ((a-1)*(b-1) >> 2) & 1:
+ result = -result
+ a, b = b % a, a
+ else:
+ if (((b * b) - 1) >> 3) & 1:
+ result = -result
+ a >>= 1
+ if a == 0: return 0
+ return result
+
+def jacobi_witness(x, n):
+ """Returns False if n is an Euler pseudo-prime with base x, and
+ True otherwise.
+ """
+
+ j = jacobi(x, n) % n
+ f = pow(x, (n-1)/2, n)
+
+ if j == f: return False
+ return True
+
+def randomized_primality_testing(n, k):
+ """Calculates whether n is composite (which is always correct) or
+ prime (which is incorrect with error probability 2**-k)
+
+ Returns False if the number is composite, and True if it's
+ probably prime.
+ """
+
+ # 50% of Jacobi-witnesses can report compositness of non-prime numbers
+
+ for i in range(k):
+ x = randint(1, n-1)
+ if jacobi_witness(x, n): return False
+
+ return True
+
+def is_prime(number):
+ """Returns True if the number is prime, and False otherwise.
+
+ >>> is_prime(42)
+ 0
+ >>> is_prime(41)
+ 1
+ """
+
+ if randomized_primality_testing(number, 6):
+ # Prime, according to Jacobi
+ return True
+
+ # Not prime
+ return False
+
+
+def getprime(nbits):
+ """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
+ other words: nbits is rounded up to whole bytes.
+
+ >>> p = getprime(8)
+ >>> is_prime(p-1)
+ 0
+ >>> is_prime(p)
+ 1
+ >>> is_prime(p+1)
+ 0
+ """
+
+ while True:
+ integer = read_random_int(nbits)
+
+ # Make sure it's odd
+ integer |= 1
+
+ # Test for primeness
+ if is_prime(integer): break
+
+ # Retry if not prime
+
+ return integer
+
+def are_relatively_prime(a, b):
+ """Returns True if a and b are relatively prime, and False if they
+ are not.
+
+ >>> are_relatively_prime(2, 3)
+ 1
+ >>> are_relatively_prime(2, 4)
+ 0
+ """
+
+ d = gcd(a, b)
+ return (d == 1)
+
+def find_p_q(nbits):
+ """Returns a tuple of two different primes of nbits bits"""
+ pbits = nbits + (nbits/16) #Make sure that p and q aren't too close
+ qbits = nbits - (nbits/16) #or the factoring programs can factor n
+ p = getprime(pbits)
+ while True:
+ q = getprime(qbits)
+ #Make sure p and q are different.
+ if not q == p: break
+ return (p, q)
+
+def extended_gcd(a, b):
+ """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb
+ """
+ # r = gcd(a,b) i = multiplicitive inverse of a mod b
+ # or j = multiplicitive inverse of b mod a
+ # Neg return values for i or j are made positive mod b or a respectively
+ # Iterateive Version is faster and uses much less stack space
+ x = 0
+ y = 1
+ lx = 1
+ ly = 0
+ oa = a #Remember original a/b to remove
+ ob = b #negative values from return results
+ while b != 0:
+ q = long(a/b)
+ (a, b) = (b, a % b)
+ (x, lx) = ((lx - (q * x)),x)
+ (y, ly) = ((ly - (q * y)),y)
+ if (lx < 0): lx += ob #If neg wrap modulo orignal b
+ if (ly < 0): ly += oa #If neg wrap modulo orignal a
+ return (a, lx, ly) #Return only positive values
+
+# Main function: calculate encryption and decryption keys
+def calculate_keys(p, q, nbits):
+ """Calculates an encryption and a decryption key for p and q, and
+ returns them as a tuple (e, d)"""
+
+ n = p * q
+ phi_n = (p-1) * (q-1)
+
+ while True:
+ # Make sure e has enough bits so we ensure "wrapping" through
+ # modulo n
+ e = max(65537,getprime(nbits/4))
+ if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
+
+ (d, i, j) = extended_gcd(e, phi_n)
+
+ if not d == 1:
+ raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
+ if (i < 0):
+ raise Exception("New extended_gcd shouldn't return negative values")
+ if not (e * i) % phi_n == 1:
+ raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
+
+ return (e, i)
+
+
+def gen_keys(nbits):
+ """Generate RSA keys of nbits bits. Returns (p, q, e, d).
+
+ Note: this can take a long time, depending on the key size.
+ """
+
+ (p, q) = find_p_q(nbits)
+ (e, d) = calculate_keys(p, q, nbits)
+
+ return (p, q, e, d)
+
+def newkeys(nbits):
+ """Generates public and private keys, and returns them as (pub,
+ priv).
+
+ The public key consists of a dict {e: ..., , n: ....). The private
+ key consists of a dict {d: ...., p: ...., q: ....).
+ """
+ nbits = max(9,nbits) # Don't let nbits go below 9 bits
+ (p, q, e, d) = gen_keys(nbits)
+
+ return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
+
+def encrypt_int(message, ekey, n):
+ """Encrypts a message using encryption key 'ekey', working modulo n"""
+
+ if type(message) is types.IntType:
+ message = long(message)
+
+ if not type(message) is types.LongType:
+ raise TypeError("You must pass a long or int")
+
+ if message < 0 or message > n:
+ raise OverflowError("The message is too long")
+
+ #Note: Bit exponents start at zero (bit counts start at 1) this is correct
+ safebit = bit_size(n) - 2 #compute safe bit (MSB - 1)
+ message += (1 << safebit) #add safebit to ensure folding
+
+ return pow(message, ekey, n)
+
+def decrypt_int(cyphertext, dkey, n):
+ """Decrypts a cypher text using the decryption key 'dkey', working
+ modulo n"""
+
+ message = pow(cyphertext, dkey, n)
+
+ safebit = bit_size(n) - 2 #compute safe bit (MSB - 1)
+ message -= (1 << safebit) #remove safebit before decode
+
+ return message
+
+def encode64chops(chops):
+ """base64encodes chops and combines them into a ',' delimited string"""
+
+ chips = [] #chips are character chops
+
+ for value in chops:
+ chips.append(int2str64(value))
+
+ #delimit chops with comma
+ encoded = ','.join(chips)
+
+ return encoded
+
+def decode64chops(string):
+ """base64decodes and makes a ',' delimited string into chops"""
+
+ chips = string.split(',') #split chops at commas
+
+ chops = []
+
+ for string in chips: #make char chops (chips) into chops
+ chops.append(str642int(string))
+
+ return chops
+
+def chopstring(message, key, n, funcref):
+ """Chops the 'message' into integers that fit into n,
+ leaving room for a safebit to be added to ensure that all
+ messages fold during exponentiation. The MSB of the number n
+ is not independant modulo n (setting it could cause overflow), so
+ use the next lower bit for the safebit. Therefore reserve 2-bits
+ in the number n for non-data bits. Calls specified encryption
+ function for each chop.
+
+ Used by 'encrypt' and 'sign'.
+ """
+
+ msglen = len(message)
+ mbits = msglen * 8
+ #Set aside 2-bits so setting of safebit won't overflow modulo n.
+ nbits = bit_size(n) - 2 # leave room for safebit
+ nbytes = nbits / 8
+ blocks = msglen / nbytes
+
+ if msglen % nbytes > 0:
+ blocks += 1
+
+ cypher = []
+
+ for bindex in range(blocks):
+ offset = bindex * nbytes
+ block = message[offset:offset+nbytes]
+ value = bytes2int(block)
+ cypher.append(funcref(value, key, n))
+
+ return encode64chops(cypher) #Encode encrypted ints to base64 strings
+
+def gluechops(string, key, n, funcref):
+ """Glues chops back together into a string. calls
+ funcref(integer, key, n) for each chop.
+
+ Used by 'decrypt' and 'verify'.
+ """
+ message = ""
+
+ chops = decode64chops(string) #Decode base64 strings into integer chops
+
+ for cpart in chops:
+ mpart = funcref(cpart, key, n) #Decrypt each chop
+ message += int2bytes(mpart) #Combine decrypted strings into a msg
+
+ return message
+
+def encrypt(message, key):
+ """Encrypts a string 'message' with the public key 'key'"""
+ if 'n' not in key:
+ raise Exception("You must use the public key with encrypt")
+
+ return chopstring(message, key['e'], key['n'], encrypt_int)
+
+def sign(message, key):
+ """Signs a string 'message' with the private key 'key'"""
+ if 'p' not in key:
+ raise Exception("You must use the private key with sign")
+
+ return chopstring(message, key['d'], key['p']*key['q'], encrypt_int)
+
+def decrypt(cypher, key):
+ """Decrypts a string 'cypher' with the private key 'key'"""
+ if 'p' not in key:
+ raise Exception("You must use the private key with decrypt")
+
+ return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
+
+def verify(cypher, key):
+ """Verifies a string 'cypher' with the public key 'key'"""
+ if 'n' not in key:
+ raise Exception("You must use the public key with verify")
+
+ return gluechops(cypher, key['e'], key['n'], decrypt_int)
+
+# Do doctest if we're not imported
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
+
+__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify"]
+
diff --git a/python/rsa/rsa/asn1.py b/python/rsa/rsa/asn1.py
new file mode 100644
index 000000000..706e6cf22
--- /dev/null
+++ b/python/rsa/rsa/asn1.py
@@ -0,0 +1,35 @@
+'''ASN.1 definitions.
+
+Not all ASN.1-handling code use these definitions, but when it does, they should be here.
+'''
+
+from pyasn1.type import univ, namedtype, tag
+
+class PubKeyHeader(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('oid', univ.ObjectIdentifier()),
+ namedtype.NamedType('parameters', univ.Null()),
+ )
+
+class OpenSSLPubKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('header', PubKeyHeader()),
+
+ # This little hack (the implicit tag) allows us to get a Bit String as Octet String
+ namedtype.NamedType('key', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))),
+ )
+
+
+class AsnPubKey(univ.Sequence):
+ '''ASN.1 contents of DER encoded public key:
+
+ RSAPublicKey ::= SEQUENCE {
+ modulus INTEGER, -- n
+ publicExponent INTEGER, -- e
+ '''
+
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer()),
+ )
diff --git a/python/rsa/rsa/bigfile.py b/python/rsa/rsa/bigfile.py
new file mode 100644
index 000000000..516cf56b5
--- /dev/null
+++ b/python/rsa/rsa/bigfile.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Large file support
+
+ - break a file into smaller blocks, and encrypt them, and store the
+ encrypted blocks in another file.
+
+ - take such an encrypted files, decrypt its blocks, and reconstruct the
+ original file.
+
+The encrypted file format is as follows, where || denotes byte concatenation:
+
+ FILE := VERSION || BLOCK || BLOCK ...
+
+ BLOCK := LENGTH || DATA
+
+ LENGTH := varint-encoded length of the subsequent data. Varint comes from
+ Google Protobuf, and encodes an integer into a variable number of bytes.
+ Each byte uses the 7 lowest bits to encode the value. The highest bit set
+ to 1 indicates the next byte is also part of the varint. The last byte will
+ have this bit set to 0.
+
+This file format is called the VARBLOCK format, in line with the varint format
+used to denote the block sizes.
+
+'''
+
+from rsa import key, common, pkcs1, varblock
+from rsa._compat import byte
+
+def encrypt_bigfile(infile, outfile, pub_key):
+ '''Encrypts a file, writing it to 'outfile' in VARBLOCK format.
+
+ :param infile: file-like object to read the cleartext from
+ :param outfile: file-like object to write the crypto in VARBLOCK format to
+ :param pub_key: :py:class:`rsa.PublicKey` to encrypt with
+
+ '''
+
+ if not isinstance(pub_key, key.PublicKey):
+ raise TypeError('Public key required, but got %r' % pub_key)
+
+ key_bytes = common.bit_size(pub_key.n) // 8
+ blocksize = key_bytes - 11 # keep space for PKCS#1 padding
+
+ # Write the version number to the VARBLOCK file
+ outfile.write(byte(varblock.VARBLOCK_VERSION))
+
+ # Encrypt and write each block
+ for block in varblock.yield_fixedblocks(infile, blocksize):
+ crypto = pkcs1.encrypt(block, pub_key)
+
+ varblock.write_varint(outfile, len(crypto))
+ outfile.write(crypto)
+
+def decrypt_bigfile(infile, outfile, priv_key):
+ '''Decrypts an encrypted VARBLOCK file, writing it to 'outfile'
+
+ :param infile: file-like object to read the crypto in VARBLOCK format from
+ :param outfile: file-like object to write the cleartext to
+ :param priv_key: :py:class:`rsa.PrivateKey` to decrypt with
+
+ '''
+
+ if not isinstance(priv_key, key.PrivateKey):
+ raise TypeError('Private key required, but got %r' % priv_key)
+
+ for block in varblock.yield_varblocks(infile):
+ cleartext = pkcs1.decrypt(block, priv_key)
+ outfile.write(cleartext)
+
+__all__ = ['encrypt_bigfile', 'decrypt_bigfile']
+
diff --git a/python/rsa/rsa/cli.py b/python/rsa/rsa/cli.py
new file mode 100644
index 000000000..527cc4979
--- /dev/null
+++ b/python/rsa/rsa/cli.py
@@ -0,0 +1,379 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Commandline scripts.
+
+These scripts are called by the executables defined in setup.py.
+'''
+
+from __future__ import with_statement, print_function
+
+import abc
+import sys
+from optparse import OptionParser
+
+import rsa
+import rsa.bigfile
+import rsa.pkcs1
+
+HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys())
+
+def keygen():
+ '''Key generator.'''
+
+ # Parse the CLI options
+ parser = OptionParser(usage='usage: %prog [options] keysize',
+ description='Generates a new RSA keypair of "keysize" bits.')
+
+ parser.add_option('--pubout', type='string',
+ help='Output filename for the public key. The public key is '
+ 'not saved if this option is not present. You can use '
+ 'pyrsa-priv2pub to create the public key file later.')
+
+ parser.add_option('-o', '--out', type='string',
+ help='Output filename for the private key. The key is '
+ 'written to stdout if this option is not present.')
+
+ parser.add_option('--form',
+ help='key format of the private and public keys - default PEM',
+ choices=('PEM', 'DER'), default='PEM')
+
+ (cli, cli_args) = parser.parse_args(sys.argv[1:])
+
+ if len(cli_args) != 1:
+ parser.print_help()
+ raise SystemExit(1)
+
+ try:
+ keysize = int(cli_args[0])
+ except ValueError:
+ parser.print_help()
+ print('Not a valid number: %s' % cli_args[0], file=sys.stderr)
+ raise SystemExit(1)
+
+ print('Generating %i-bit key' % keysize, file=sys.stderr)
+ (pub_key, priv_key) = rsa.newkeys(keysize)
+
+
+ # Save public key
+ if cli.pubout:
+ print('Writing public key to %s' % cli.pubout, file=sys.stderr)
+ data = pub_key.save_pkcs1(format=cli.form)
+ with open(cli.pubout, 'wb') as outfile:
+ outfile.write(data)
+
+ # Save private key
+ data = priv_key.save_pkcs1(format=cli.form)
+
+ if cli.out:
+ print('Writing private key to %s' % cli.out, file=sys.stderr)
+ with open(cli.out, 'wb') as outfile:
+ outfile.write(data)
+ else:
+ print('Writing private key to stdout', file=sys.stderr)
+ sys.stdout.write(data)
+
+
+class CryptoOperation(object):
+ '''CLI callable that operates with input, output, and a key.'''
+
+ __metaclass__ = abc.ABCMeta
+
+ keyname = 'public' # or 'private'
+ usage = 'usage: %%prog [options] %(keyname)s_key'
+ description = None
+ operation = 'decrypt'
+ operation_past = 'decrypted'
+ operation_progressive = 'decrypting'
+ input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \
+ 'not specified.'
+ output_help = 'Name of the file to write the %(operation_past)s file ' \
+ 'to. Written to stdout if this option is not present.'
+ expected_cli_args = 1
+ has_output = True
+
+ key_class = rsa.PublicKey
+
+ def __init__(self):
+ self.usage = self.usage % self.__class__.__dict__
+ self.input_help = self.input_help % self.__class__.__dict__
+ self.output_help = self.output_help % self.__class__.__dict__
+
+ @abc.abstractmethod
+ def perform_operation(self, indata, key, cli_args=None):
+ '''Performs the program's operation.
+
+ Implement in a subclass.
+
+ :returns: the data to write to the output.
+ '''
+
+ def __call__(self):
+ '''Runs the program.'''
+
+ (cli, cli_args) = self.parse_cli()
+
+ key = self.read_key(cli_args[0], cli.keyform)
+
+ indata = self.read_infile(cli.input)
+
+ print(self.operation_progressive.title(), file=sys.stderr)
+ outdata = self.perform_operation(indata, key, cli_args)
+
+ if self.has_output:
+ self.write_outfile(outdata, cli.output)
+
+ def parse_cli(self):
+ '''Parse the CLI options
+
+ :returns: (cli_opts, cli_args)
+ '''
+
+ parser = OptionParser(usage=self.usage, description=self.description)
+
+ parser.add_option('-i', '--input', type='string', help=self.input_help)
+
+ if self.has_output:
+ parser.add_option('-o', '--output', type='string', help=self.output_help)
+
+ parser.add_option('--keyform',
+ help='Key format of the %s key - default PEM' % self.keyname,
+ choices=('PEM', 'DER'), default='PEM')
+
+ (cli, cli_args) = parser.parse_args(sys.argv[1:])
+
+ if len(cli_args) != self.expected_cli_args:
+ parser.print_help()
+ raise SystemExit(1)
+
+ return (cli, cli_args)
+
+ def read_key(self, filename, keyform):
+ '''Reads a public or private key.'''
+
+ print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr)
+ with open(filename, 'rb') as keyfile:
+ keydata = keyfile.read()
+
+ return self.key_class.load_pkcs1(keydata, keyform)
+
+ def read_infile(self, inname):
+ '''Read the input file'''
+
+ if inname:
+ print('Reading input from %s' % inname, file=sys.stderr)
+ with open(inname, 'rb') as infile:
+ return infile.read()
+
+ print('Reading input from stdin', file=sys.stderr)
+ return sys.stdin.read()
+
+ def write_outfile(self, outdata, outname):
+ '''Write the output file'''
+
+ if outname:
+ print('Writing output to %s' % outname, file=sys.stderr)
+ with open(outname, 'wb') as outfile:
+ outfile.write(outdata)
+ else:
+ print('Writing output to stdout', file=sys.stderr)
+ sys.stdout.write(outdata)
+
+class EncryptOperation(CryptoOperation):
+ '''Encrypts a file.'''
+
+ keyname = 'public'
+ description = ('Encrypts a file. The file must be shorter than the key '
+ 'length in order to be encrypted. For larger files, use the '
+ 'pyrsa-encrypt-bigfile command.')
+ operation = 'encrypt'
+ operation_past = 'encrypted'
+ operation_progressive = 'encrypting'
+
+
+ def perform_operation(self, indata, pub_key, cli_args=None):
+ '''Encrypts files.'''
+
+ return rsa.encrypt(indata, pub_key)
+
+class DecryptOperation(CryptoOperation):
+ '''Decrypts a file.'''
+
+ keyname = 'private'
+ description = ('Decrypts a file. The original file must be shorter than '
+ 'the key length in order to have been encrypted. For larger '
+ 'files, use the pyrsa-decrypt-bigfile command.')
+ operation = 'decrypt'
+ operation_past = 'decrypted'
+ operation_progressive = 'decrypting'
+ key_class = rsa.PrivateKey
+
+ def perform_operation(self, indata, priv_key, cli_args=None):
+ '''Decrypts files.'''
+
+ return rsa.decrypt(indata, priv_key)
+
+class SignOperation(CryptoOperation):
+ '''Signs a file.'''
+
+ keyname = 'private'
+ usage = 'usage: %%prog [options] private_key hash_method'
+ description = ('Signs a file, outputs the signature. Choose the hash '
+ 'method from %s' % ', '.join(HASH_METHODS))
+ operation = 'sign'
+ operation_past = 'signature'
+ operation_progressive = 'Signing'
+ key_class = rsa.PrivateKey
+ expected_cli_args = 2
+
+ output_help = ('Name of the file to write the signature to. Written '
+ 'to stdout if this option is not present.')
+
+ def perform_operation(self, indata, priv_key, cli_args):
+ '''Decrypts files.'''
+
+ hash_method = cli_args[1]
+ if hash_method not in HASH_METHODS:
+ raise SystemExit('Invalid hash method, choose one of %s' %
+ ', '.join(HASH_METHODS))
+
+ return rsa.sign(indata, priv_key, hash_method)
+
+class VerifyOperation(CryptoOperation):
+ '''Verify a signature.'''
+
+ keyname = 'public'
+ usage = 'usage: %%prog [options] public_key signature_file'
+ description = ('Verifies a signature, exits with status 0 upon success, '
+ 'prints an error message and exits with status 1 upon error.')
+ operation = 'verify'
+ operation_past = 'verified'
+ operation_progressive = 'Verifying'
+ key_class = rsa.PublicKey
+ expected_cli_args = 2
+ has_output = False
+
+ def perform_operation(self, indata, pub_key, cli_args):
+ '''Decrypts files.'''
+
+ signature_file = cli_args[1]
+
+ with open(signature_file, 'rb') as sigfile:
+ signature = sigfile.read()
+
+ try:
+ rsa.verify(indata, signature, pub_key)
+ except rsa.VerificationError:
+ raise SystemExit('Verification failed.')
+
+ print('Verification OK', file=sys.stderr)
+
+
+class BigfileOperation(CryptoOperation):
+ '''CryptoOperation that doesn't read the entire file into memory.'''
+
+ def __init__(self):
+ CryptoOperation.__init__(self)
+
+ self.file_objects = []
+
+ def __del__(self):
+ '''Closes any open file handles.'''
+
+ for fobj in self.file_objects:
+ fobj.close()
+
+ def __call__(self):
+ '''Runs the program.'''
+
+ (cli, cli_args) = self.parse_cli()
+
+ key = self.read_key(cli_args[0], cli.keyform)
+
+ # Get the file handles
+ infile = self.get_infile(cli.input)
+ outfile = self.get_outfile(cli.output)
+
+ # Call the operation
+ print(self.operation_progressive.title(), file=sys.stderr)
+ self.perform_operation(infile, outfile, key, cli_args)
+
+ def get_infile(self, inname):
+ '''Returns the input file object'''
+
+ if inname:
+ print('Reading input from %s' % inname, file=sys.stderr)
+ fobj = open(inname, 'rb')
+ self.file_objects.append(fobj)
+ else:
+ print('Reading input from stdin', file=sys.stderr)
+ fobj = sys.stdin
+
+ return fobj
+
+ def get_outfile(self, outname):
+ '''Returns the output file object'''
+
+ if outname:
+ print('Will write output to %s' % outname, file=sys.stderr)
+ fobj = open(outname, 'wb')
+ self.file_objects.append(fobj)
+ else:
+ print('Will write output to stdout', file=sys.stderr)
+ fobj = sys.stdout
+
+ return fobj
+
+class EncryptBigfileOperation(BigfileOperation):
+ '''Encrypts a file to VARBLOCK format.'''
+
+ keyname = 'public'
+ description = ('Encrypts a file to an encrypted VARBLOCK file. The file '
+ 'can be larger than the key length, but the output file is only '
+ 'compatible with Python-RSA.')
+ operation = 'encrypt'
+ operation_past = 'encrypted'
+ operation_progressive = 'encrypting'
+
+ def perform_operation(self, infile, outfile, pub_key, cli_args=None):
+ '''Encrypts files to VARBLOCK.'''
+
+ return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key)
+
+class DecryptBigfileOperation(BigfileOperation):
+ '''Decrypts a file in VARBLOCK format.'''
+
+ keyname = 'private'
+ description = ('Decrypts an encrypted VARBLOCK file that was encrypted '
+ 'with pyrsa-encrypt-bigfile')
+ operation = 'decrypt'
+ operation_past = 'decrypted'
+ operation_progressive = 'decrypting'
+ key_class = rsa.PrivateKey
+
+ def perform_operation(self, infile, outfile, priv_key, cli_args=None):
+ '''Decrypts a VARBLOCK file.'''
+
+ return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key)
+
+
+encrypt = EncryptOperation()
+decrypt = DecryptOperation()
+sign = SignOperation()
+verify = VerifyOperation()
+encrypt_bigfile = EncryptBigfileOperation()
+decrypt_bigfile = DecryptBigfileOperation()
+
diff --git a/python/rsa/rsa/common.py b/python/rsa/rsa/common.py
new file mode 100644
index 000000000..39feb8c22
--- /dev/null
+++ b/python/rsa/rsa/common.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Common functionality shared by several modules.'''
+
+
+def bit_size(num):
+ '''
+ Number of bits needed to represent a integer excluding any prefix
+ 0 bits.
+
+ As per definition from http://wiki.python.org/moin/BitManipulation and
+ to match the behavior of the Python 3 API.
+
+ Usage::
+
+ >>> bit_size(1023)
+ 10
+ >>> bit_size(1024)
+ 11
+ >>> bit_size(1025)
+ 11
+
+ :param num:
+ Integer value. If num is 0, returns 0. Only the absolute value of the
+ number is considered. Therefore, signed integers will be abs(num)
+ before the number's bit length is determined.
+ :returns:
+ Returns the number of bits in the integer.
+ '''
+ if num == 0:
+ return 0
+ if num < 0:
+ num = -num
+
+ # Make sure this is an int and not a float.
+ num & 1
+
+ hex_num = "%x" % num
+ return ((len(hex_num) - 1) * 4) + {
+ '0':0, '1':1, '2':2, '3':2,
+ '4':3, '5':3, '6':3, '7':3,
+ '8':4, '9':4, 'a':4, 'b':4,
+ 'c':4, 'd':4, 'e':4, 'f':4,
+ }[hex_num[0]]
+
+
+def _bit_size(number):
+ '''
+ Returns the number of bits required to hold a specific long number.
+ '''
+ if number < 0:
+ raise ValueError('Only nonnegative numbers possible: %s' % number)
+
+ if number == 0:
+ return 0
+
+ # This works, even with very large numbers. When using math.log(number, 2),
+ # you'll get rounding errors and it'll fail.
+ bits = 0
+ while number:
+ bits += 1
+ number >>= 1
+
+ return bits
+
+
+def byte_size(number):
+ '''
+ Returns the number of bytes required to hold a specific long number.
+
+ The number of bytes is rounded up.
+
+ Usage::
+
+ >>> byte_size(1 << 1023)
+ 128
+ >>> byte_size((1 << 1024) - 1)
+ 128
+ >>> byte_size(1 << 1024)
+ 129
+
+ :param number:
+ An unsigned integer
+ :returns:
+ The number of bytes required to hold a specific long number.
+ '''
+ quanta, mod = divmod(bit_size(number), 8)
+ if mod or number == 0:
+ quanta += 1
+ return quanta
+ #return int(math.ceil(bit_size(number) / 8.0))
+
+
+def extended_gcd(a, b):
+ '''Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb
+ '''
+ # r = gcd(a,b) i = multiplicitive inverse of a mod b
+ # or j = multiplicitive inverse of b mod a
+ # Neg return values for i or j are made positive mod b or a respectively
+ # Iterateive Version is faster and uses much less stack space
+ x = 0
+ y = 1
+ lx = 1
+ ly = 0
+ oa = a #Remember original a/b to remove
+ ob = b #negative values from return results
+ while b != 0:
+ q = a // b
+ (a, b) = (b, a % b)
+ (x, lx) = ((lx - (q * x)),x)
+ (y, ly) = ((ly - (q * y)),y)
+ if (lx < 0): lx += ob #If neg wrap modulo orignal b
+ if (ly < 0): ly += oa #If neg wrap modulo orignal a
+ return (a, lx, ly) #Return only positive values
+
+
+def inverse(x, n):
+ '''Returns x^-1 (mod n)
+
+ >>> inverse(7, 4)
+ 3
+ >>> (inverse(143, 4) * 143) % 4
+ 1
+ '''
+
+ (divider, inv, _) = extended_gcd(x, n)
+
+ if divider != 1:
+ raise ValueError("x (%d) and n (%d) are not relatively prime" % (x, n))
+
+ return inv
+
+
+def crt(a_values, modulo_values):
+ '''Chinese Remainder Theorem.
+
+ Calculates x such that x = a[i] (mod m[i]) for each i.
+
+ :param a_values: the a-values of the above equation
+ :param modulo_values: the m-values of the above equation
+ :returns: x such that x = a[i] (mod m[i]) for each i
+
+
+ >>> crt([2, 3], [3, 5])
+ 8
+
+ >>> crt([2, 3, 2], [3, 5, 7])
+ 23
+
+ >>> crt([2, 3, 0], [7, 11, 15])
+ 135
+ '''
+
+ m = 1
+ x = 0
+
+ for modulo in modulo_values:
+ m *= modulo
+
+ for (m_i, a_i) in zip(modulo_values, a_values):
+ M_i = m // m_i
+ inv = inverse(M_i, m_i)
+
+ x = (x + a_i * M_i * inv) % m
+
+ return x
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
+
diff --git a/python/rsa/rsa/core.py b/python/rsa/rsa/core.py
new file mode 100644
index 000000000..90dfee8e5
--- /dev/null
+++ b/python/rsa/rsa/core.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Core mathematical operations.
+
+This is the actual core RSA implementation, which is only defined
+mathematically on integers.
+'''
+
+
+from rsa._compat import is_integer
+
+def assert_int(var, name):
+
+ if is_integer(var):
+ return
+
+ raise TypeError('%s should be an integer, not %s' % (name, var.__class__))
+
+def encrypt_int(message, ekey, n):
+ '''Encrypts a message using encryption key 'ekey', working modulo n'''
+
+ assert_int(message, 'message')
+ assert_int(ekey, 'ekey')
+ assert_int(n, 'n')
+
+ if message < 0:
+ raise ValueError('Only non-negative numbers are supported')
+
+ if message > n:
+ raise OverflowError("The message %i is too long for n=%i" % (message, n))
+
+ return pow(message, ekey, n)
+
+def decrypt_int(cyphertext, dkey, n):
+ '''Decrypts a cypher text using the decryption key 'dkey', working
+ modulo n'''
+
+ assert_int(cyphertext, 'cyphertext')
+ assert_int(dkey, 'dkey')
+ assert_int(n, 'n')
+
+ message = pow(cyphertext, dkey, n)
+ return message
+
diff --git a/python/rsa/rsa/key.py b/python/rsa/rsa/key.py
new file mode 100644
index 000000000..b6de7b3f3
--- /dev/null
+++ b/python/rsa/rsa/key.py
@@ -0,0 +1,612 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''RSA key generation code.
+
+Create new keys with the newkeys() function. It will give you a PublicKey and a
+PrivateKey object.
+
+Loading and saving keys requires the pyasn1 module. This module is imported as
+late as possible, such that other functionality will remain working in absence
+of pyasn1.
+
+'''
+
+import logging
+from rsa._compat import b, bytes_type
+
+import rsa.prime
+import rsa.pem
+import rsa.common
+
+log = logging.getLogger(__name__)
+
+
+
+class AbstractKey(object):
+ '''Abstract superclass for private and public keys.'''
+
+ @classmethod
+ def load_pkcs1(cls, keyfile, format='PEM'):
+ r'''Loads a key in PKCS#1 DER or PEM format.
+
+ :param keyfile: contents of a DER- or PEM-encoded file that contains
+ the public key.
+ :param format: the format of the file to load; 'PEM' or 'DER'
+
+ :return: a PublicKey object
+
+ '''
+
+ methods = {
+ 'PEM': cls._load_pkcs1_pem,
+ 'DER': cls._load_pkcs1_der,
+ }
+
+ if format not in methods:
+ formats = ', '.join(sorted(methods.keys()))
+ raise ValueError('Unsupported format: %r, try one of %s' % (format,
+ formats))
+
+ method = methods[format]
+ return method(keyfile)
+
+ def save_pkcs1(self, format='PEM'):
+ '''Saves the public key in PKCS#1 DER or PEM format.
+
+ :param format: the format to save; 'PEM' or 'DER'
+ :returns: the DER- or PEM-encoded public key.
+
+ '''
+
+ methods = {
+ 'PEM': self._save_pkcs1_pem,
+ 'DER': self._save_pkcs1_der,
+ }
+
+ if format not in methods:
+ formats = ', '.join(sorted(methods.keys()))
+ raise ValueError('Unsupported format: %r, try one of %s' % (format,
+ formats))
+
+ method = methods[format]
+ return method()
+
+class PublicKey(AbstractKey):
+ '''Represents a public RSA key.
+
+ This key is also known as the 'encryption key'. It contains the 'n' and 'e'
+ values.
+
+ Supports attributes as well as dictionary-like access. Attribute accesss is
+ faster, though.
+
+ >>> PublicKey(5, 3)
+ PublicKey(5, 3)
+
+ >>> key = PublicKey(5, 3)
+ >>> key.n
+ 5
+ >>> key['n']
+ 5
+ >>> key.e
+ 3
+ >>> key['e']
+ 3
+
+ '''
+
+ __slots__ = ('n', 'e')
+
+ def __init__(self, n, e):
+ self.n = n
+ self.e = e
+
+ def __getitem__(self, key):
+ return getattr(self, key)
+
+ def __repr__(self):
+ return 'PublicKey(%i, %i)' % (self.n, self.e)
+
+ def __eq__(self, other):
+ if other is None:
+ return False
+
+ if not isinstance(other, PublicKey):
+ return False
+
+ return self.n == other.n and self.e == other.e
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ @classmethod
+ def _load_pkcs1_der(cls, keyfile):
+ r'''Loads a key in PKCS#1 DER format.
+
+ @param keyfile: contents of a DER-encoded file that contains the public
+ key.
+ @return: a PublicKey object
+
+ First let's construct a DER encoded key:
+
+ >>> import base64
+ >>> b64der = 'MAwCBQCNGmYtAgMBAAE='
+ >>> der = base64.decodestring(b64der)
+
+ This loads the file:
+
+ >>> PublicKey._load_pkcs1_der(der)
+ PublicKey(2367317549, 65537)
+
+ '''
+
+ from pyasn1.codec.der import decoder
+ from rsa.asn1 import AsnPubKey
+
+ (priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey())
+ return cls(n=int(priv['modulus']), e=int(priv['publicExponent']))
+
+ def _save_pkcs1_der(self):
+ '''Saves the public key in PKCS#1 DER format.
+
+ @returns: the DER-encoded public key.
+ '''
+
+ from pyasn1.codec.der import encoder
+ from rsa.asn1 import AsnPubKey
+
+ # Create the ASN object
+ asn_key = AsnPubKey()
+ asn_key.setComponentByName('modulus', self.n)
+ asn_key.setComponentByName('publicExponent', self.e)
+
+ return encoder.encode(asn_key)
+
+ @classmethod
+ def _load_pkcs1_pem(cls, keyfile):
+ '''Loads a PKCS#1 PEM-encoded public key file.
+
+ The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and
+ after the "-----END RSA PUBLIC KEY-----" lines is ignored.
+
+ @param keyfile: contents of a PEM-encoded file that contains the public
+ key.
+ @return: a PublicKey object
+ '''
+
+ der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY')
+ return cls._load_pkcs1_der(der)
+
+ def _save_pkcs1_pem(self):
+ '''Saves a PKCS#1 PEM-encoded public key file.
+
+ @return: contents of a PEM-encoded file that contains the public key.
+ '''
+
+ der = self._save_pkcs1_der()
+ return rsa.pem.save_pem(der, 'RSA PUBLIC KEY')
+
+ @classmethod
+ def load_pkcs1_openssl_pem(cls, keyfile):
+ '''Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL.
+
+ These files can be recognised in that they start with BEGIN PUBLIC KEY
+ rather than BEGIN RSA PUBLIC KEY.
+
+ The contents of the file before the "-----BEGIN PUBLIC KEY-----" and
+ after the "-----END PUBLIC KEY-----" lines is ignored.
+
+ @param keyfile: contents of a PEM-encoded file that contains the public
+ key, from OpenSSL.
+ @return: a PublicKey object
+ '''
+
+ der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY')
+ return cls.load_pkcs1_openssl_der(der)
+
+ @classmethod
+ def load_pkcs1_openssl_der(cls, keyfile):
+ '''Loads a PKCS#1 DER-encoded public key file from OpenSSL.
+
+ @param keyfile: contents of a DER-encoded file that contains the public
+ key, from OpenSSL.
+ @return: a PublicKey object
+ '''
+
+ from rsa.asn1 import OpenSSLPubKey
+ from pyasn1.codec.der import decoder
+ from pyasn1.type import univ
+
+ (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey())
+
+ if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'):
+ raise TypeError("This is not a DER-encoded OpenSSL-compatible public key")
+
+ return cls._load_pkcs1_der(keyinfo['key'][1:])
+
+
+
+
+class PrivateKey(AbstractKey):
+ '''Represents a private RSA key.
+
+ This key is also known as the 'decryption key'. It contains the 'n', 'e',
+ 'd', 'p', 'q' and other values.
+
+ Supports attributes as well as dictionary-like access. Attribute accesss is
+ faster, though.
+
+ >>> PrivateKey(3247, 65537, 833, 191, 17)
+ PrivateKey(3247, 65537, 833, 191, 17)
+
+ exp1, exp2 and coef don't have to be given, they will be calculated:
+
+ >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+ >>> pk.exp1
+ 55063
+ >>> pk.exp2
+ 10095
+ >>> pk.coef
+ 50797
+
+ If you give exp1, exp2 or coef, they will be used as-is:
+
+ >>> pk = PrivateKey(1, 2, 3, 4, 5, 6, 7, 8)
+ >>> pk.exp1
+ 6
+ >>> pk.exp2
+ 7
+ >>> pk.coef
+ 8
+
+ '''
+
+ __slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef')
+
+ def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None):
+ self.n = n
+ self.e = e
+ self.d = d
+ self.p = p
+ self.q = q
+
+ # Calculate the other values if they aren't supplied
+ if exp1 is None:
+ self.exp1 = int(d % (p - 1))
+ else:
+ self.exp1 = exp1
+
+ if exp1 is None:
+ self.exp2 = int(d % (q - 1))
+ else:
+ self.exp2 = exp2
+
+ if coef is None:
+ self.coef = rsa.common.inverse(q, p)
+ else:
+ self.coef = coef
+
+ def __getitem__(self, key):
+ return getattr(self, key)
+
+ def __repr__(self):
+ return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self
+
+ def __eq__(self, other):
+ if other is None:
+ return False
+
+ if not isinstance(other, PrivateKey):
+ return False
+
+ return (self.n == other.n and
+ self.e == other.e and
+ self.d == other.d and
+ self.p == other.p and
+ self.q == other.q and
+ self.exp1 == other.exp1 and
+ self.exp2 == other.exp2 and
+ self.coef == other.coef)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ @classmethod
+ def _load_pkcs1_der(cls, keyfile):
+ r'''Loads a key in PKCS#1 DER format.
+
+ @param keyfile: contents of a DER-encoded file that contains the private
+ key.
+ @return: a PrivateKey object
+
+ First let's construct a DER encoded key:
+
+ >>> import base64
+ >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt'
+ >>> der = base64.decodestring(b64der)
+
+ This loads the file:
+
+ >>> PrivateKey._load_pkcs1_der(der)
+ PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+
+ '''
+
+ from pyasn1.codec.der import decoder
+ (priv, _) = decoder.decode(keyfile)
+
+ # ASN.1 contents of DER encoded private key:
+ #
+ # RSAPrivateKey ::= SEQUENCE {
+ # version Version,
+ # modulus INTEGER, -- n
+ # publicExponent INTEGER, -- e
+ # privateExponent INTEGER, -- d
+ # prime1 INTEGER, -- p
+ # prime2 INTEGER, -- q
+ # exponent1 INTEGER, -- d mod (p-1)
+ # exponent2 INTEGER, -- d mod (q-1)
+ # coefficient INTEGER, -- (inverse of q) mod p
+ # otherPrimeInfos OtherPrimeInfos OPTIONAL
+ # }
+
+ if priv[0] != 0:
+ raise ValueError('Unable to read this file, version %s != 0' % priv[0])
+
+ as_ints = tuple(int(x) for x in priv[1:9])
+ return cls(*as_ints)
+
+ def _save_pkcs1_der(self):
+ '''Saves the private key in PKCS#1 DER format.
+
+ @returns: the DER-encoded private key.
+ '''
+
+ from pyasn1.type import univ, namedtype
+ from pyasn1.codec.der import encoder
+
+ class AsnPrivKey(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer()),
+ namedtype.NamedType('modulus', univ.Integer()),
+ namedtype.NamedType('publicExponent', univ.Integer()),
+ namedtype.NamedType('privateExponent', univ.Integer()),
+ namedtype.NamedType('prime1', univ.Integer()),
+ namedtype.NamedType('prime2', univ.Integer()),
+ namedtype.NamedType('exponent1', univ.Integer()),
+ namedtype.NamedType('exponent2', univ.Integer()),
+ namedtype.NamedType('coefficient', univ.Integer()),
+ )
+
+ # Create the ASN object
+ asn_key = AsnPrivKey()
+ asn_key.setComponentByName('version', 0)
+ asn_key.setComponentByName('modulus', self.n)
+ asn_key.setComponentByName('publicExponent', self.e)
+ asn_key.setComponentByName('privateExponent', self.d)
+ asn_key.setComponentByName('prime1', self.p)
+ asn_key.setComponentByName('prime2', self.q)
+ asn_key.setComponentByName('exponent1', self.exp1)
+ asn_key.setComponentByName('exponent2', self.exp2)
+ asn_key.setComponentByName('coefficient', self.coef)
+
+ return encoder.encode(asn_key)
+
+ @classmethod
+ def _load_pkcs1_pem(cls, keyfile):
+ '''Loads a PKCS#1 PEM-encoded private key file.
+
+ The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and
+ after the "-----END RSA PRIVATE KEY-----" lines is ignored.
+
+ @param keyfile: contents of a PEM-encoded file that contains the private
+ key.
+ @return: a PrivateKey object
+ '''
+
+ der = rsa.pem.load_pem(keyfile, b('RSA PRIVATE KEY'))
+ return cls._load_pkcs1_der(der)
+
+ def _save_pkcs1_pem(self):
+ '''Saves a PKCS#1 PEM-encoded private key file.
+
+ @return: contents of a PEM-encoded file that contains the private key.
+ '''
+
+ der = self._save_pkcs1_der()
+ return rsa.pem.save_pem(der, b('RSA PRIVATE KEY'))
+
+def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True):
+ ''''Returns a tuple of two different primes of nbits bits each.
+
+ The resulting p * q has exacty 2 * nbits bits, and the returned p and q
+ will not be equal.
+
+ :param nbits: the number of bits in each of p and q.
+ :param getprime_func: the getprime function, defaults to
+ :py:func:`rsa.prime.getprime`.
+
+ *Introduced in Python-RSA 3.1*
+
+ :param accurate: whether to enable accurate mode or not.
+ :returns: (p, q), where p > q
+
+ >>> (p, q) = find_p_q(128)
+ >>> from rsa import common
+ >>> common.bit_size(p * q)
+ 256
+
+ When not in accurate mode, the number of bits can be slightly less
+
+ >>> (p, q) = find_p_q(128, accurate=False)
+ >>> from rsa import common
+ >>> common.bit_size(p * q) <= 256
+ True
+ >>> common.bit_size(p * q) > 240
+ True
+
+ '''
+
+ total_bits = nbits * 2
+
+ # Make sure that p and q aren't too close or the factoring programs can
+ # factor n.
+ shift = nbits // 16
+ pbits = nbits + shift
+ qbits = nbits - shift
+
+ # Choose the two initial primes
+ log.debug('find_p_q(%i): Finding p', nbits)
+ p = getprime_func(pbits)
+ log.debug('find_p_q(%i): Finding q', nbits)
+ q = getprime_func(qbits)
+
+ def is_acceptable(p, q):
+ '''Returns True iff p and q are acceptable:
+
+ - p and q differ
+ - (p * q) has the right nr of bits (when accurate=True)
+ '''
+
+ if p == q:
+ return False
+
+ if not accurate:
+ return True
+
+ # Make sure we have just the right amount of bits
+ found_size = rsa.common.bit_size(p * q)
+ return total_bits == found_size
+
+ # Keep choosing other primes until they match our requirements.
+ change_p = False
+ while not is_acceptable(p, q):
+ # Change p on one iteration and q on the other
+ if change_p:
+ p = getprime_func(pbits)
+ else:
+ q = getprime_func(qbits)
+
+ change_p = not change_p
+
+ # We want p > q as described on
+ # http://www.di-mgt.com.au/rsa_alg.html#crt
+ return (max(p, q), min(p, q))
+
+def calculate_keys(p, q, nbits):
+ '''Calculates an encryption and a decryption key given p and q, and
+ returns them as a tuple (e, d)
+
+ '''
+
+ phi_n = (p - 1) * (q - 1)
+
+ # A very common choice for e is 65537
+ e = 65537
+
+ try:
+ d = rsa.common.inverse(e, phi_n)
+ except ValueError:
+ raise ValueError("e (%d) and phi_n (%d) are not relatively prime" %
+ (e, phi_n))
+
+ if (e * d) % phi_n != 1:
+ raise ValueError("e (%d) and d (%d) are not mult. inv. modulo "
+ "phi_n (%d)" % (e, d, phi_n))
+
+ return (e, d)
+
+def gen_keys(nbits, getprime_func, accurate=True):
+ '''Generate RSA keys of nbits bits. Returns (p, q, e, d).
+
+ Note: this can take a long time, depending on the key size.
+
+ :param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and
+ ``q`` will use ``nbits/2`` bits.
+ :param getprime_func: either :py:func:`rsa.prime.getprime` or a function
+ with similar signature.
+ '''
+
+ (p, q) = find_p_q(nbits // 2, getprime_func, accurate)
+ (e, d) = calculate_keys(p, q, nbits // 2)
+
+ return (p, q, e, d)
+
+def newkeys(nbits, accurate=True, poolsize=1):
+ '''Generates public and private keys, and returns them as (pub, priv).
+
+ The public key is also known as the 'encryption key', and is a
+ :py:class:`rsa.PublicKey` object. The private key is also known as the
+ 'decryption key' and is a :py:class:`rsa.PrivateKey` object.
+
+ :param nbits: the number of bits required to store ``n = p*q``.
+ :param accurate: when True, ``n`` will have exactly the number of bits you
+ asked for. However, this makes key generation much slower. When False,
+ `n`` may have slightly less bits.
+ :param poolsize: the number of processes to use to generate the prime
+ numbers. If set to a number > 1, a parallel algorithm will be used.
+ This requires Python 2.6 or newer.
+
+ :returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`)
+
+ The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires
+ Python 2.6 or newer.
+
+ '''
+
+ if nbits < 16:
+ raise ValueError('Key too small')
+
+ if poolsize < 1:
+ raise ValueError('Pool size (%i) should be >= 1' % poolsize)
+
+ # Determine which getprime function to use
+ if poolsize > 1:
+ from rsa import parallel
+ import functools
+
+ getprime_func = functools.partial(parallel.getprime, poolsize=poolsize)
+ else: getprime_func = rsa.prime.getprime
+
+ # Generate the key components
+ (p, q, e, d) = gen_keys(nbits, getprime_func)
+
+ # Create the key objects
+ n = p * q
+
+ return (
+ PublicKey(n, e),
+ PrivateKey(n, e, d, p, q)
+ )
+
+__all__ = ['PublicKey', 'PrivateKey', 'newkeys']
+
+if __name__ == '__main__':
+ import doctest
+
+ try:
+ for count in range(100):
+ (failures, tests) = doctest.testmod()
+ if failures:
+ break
+
+ if (count and count % 10 == 0) or count == 1:
+ print('%i times' % count)
+ except KeyboardInterrupt:
+ print('Aborted')
+ else:
+ print('Doctests done')
diff --git a/python/rsa/rsa/parallel.py b/python/rsa/rsa/parallel.py
new file mode 100644
index 000000000..e5034ac70
--- /dev/null
+++ b/python/rsa/rsa/parallel.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Functions for parallel computation on multiple cores.
+
+Introduced in Python-RSA 3.1.
+
+.. note::
+
+ Requires Python 2.6 or newer.
+
+'''
+
+from __future__ import print_function
+
+import multiprocessing as mp
+
+import rsa.prime
+import rsa.randnum
+
+def _find_prime(nbits, pipe):
+ while True:
+ integer = rsa.randnum.read_random_int(nbits)
+
+ # Make sure it's odd
+ integer |= 1
+
+ # Test for primeness
+ if rsa.prime.is_prime(integer):
+ pipe.send(integer)
+ return
+
+def getprime(nbits, poolsize):
+ '''Returns a prime number that can be stored in 'nbits' bits.
+
+ Works in multiple threads at the same time.
+
+ >>> p = getprime(128, 3)
+ >>> rsa.prime.is_prime(p-1)
+ False
+ >>> rsa.prime.is_prime(p)
+ True
+ >>> rsa.prime.is_prime(p+1)
+ False
+
+ >>> from rsa import common
+ >>> common.bit_size(p) == 128
+ True
+
+ '''
+
+ (pipe_recv, pipe_send) = mp.Pipe(duplex=False)
+
+ # Create processes
+ procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send))
+ for _ in range(poolsize)]
+ [p.start() for p in procs]
+
+ result = pipe_recv.recv()
+
+ [p.terminate() for p in procs]
+
+ return result
+
+__all__ = ['getprime']
+
+
+if __name__ == '__main__':
+ print('Running doctests 1000x or until failure')
+ import doctest
+
+ for count in range(100):
+ (failures, tests) = doctest.testmod()
+ if failures:
+ break
+
+ if count and count % 10 == 0:
+ print('%i times' % count)
+
+ print('Doctests done')
+
diff --git a/python/rsa/rsa/pem.py b/python/rsa/rsa/pem.py
new file mode 100644
index 000000000..b1c3a0edb
--- /dev/null
+++ b/python/rsa/rsa/pem.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Functions that load and write PEM-encoded files.'''
+
+import base64
+from rsa._compat import b, is_bytes
+
+def _markers(pem_marker):
+ '''
+ Returns the start and end PEM markers
+ '''
+
+ if is_bytes(pem_marker):
+ pem_marker = pem_marker.decode('utf-8')
+
+ return (b('-----BEGIN %s-----' % pem_marker),
+ b('-----END %s-----' % pem_marker))
+
+def load_pem(contents, pem_marker):
+ '''Loads a PEM file.
+
+ @param contents: the contents of the file to interpret
+ @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
+ when your file has '-----BEGIN RSA PRIVATE KEY-----' and
+ '-----END RSA PRIVATE KEY-----' markers.
+
+ @return the base64-decoded content between the start and end markers.
+
+ @raise ValueError: when the content is invalid, for example when the start
+ marker cannot be found.
+
+ '''
+
+ (pem_start, pem_end) = _markers(pem_marker)
+
+ pem_lines = []
+ in_pem_part = False
+
+ for line in contents.splitlines():
+ line = line.strip()
+
+ # Skip empty lines
+ if not line:
+ continue
+
+ # Handle start marker
+ if line == pem_start:
+ if in_pem_part:
+ raise ValueError('Seen start marker "%s" twice' % pem_start)
+
+ in_pem_part = True
+ continue
+
+ # Skip stuff before first marker
+ if not in_pem_part:
+ continue
+
+ # Handle end marker
+ if in_pem_part and line == pem_end:
+ in_pem_part = False
+ break
+
+ # Load fields
+ if b(':') in line:
+ continue
+
+ pem_lines.append(line)
+
+ # Do some sanity checks
+ if not pem_lines:
+ raise ValueError('No PEM start marker "%s" found' % pem_start)
+
+ if in_pem_part:
+ raise ValueError('No PEM end marker "%s" found' % pem_end)
+
+ # Base64-decode the contents
+ pem = b('').join(pem_lines)
+ return base64.decodestring(pem)
+
+
+def save_pem(contents, pem_marker):
+ '''Saves a PEM file.
+
+ @param contents: the contents to encode in PEM format
+ @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
+ when your file has '-----BEGIN RSA PRIVATE KEY-----' and
+ '-----END RSA PRIVATE KEY-----' markers.
+
+ @return the base64-encoded content between the start and end markers.
+
+ '''
+
+ (pem_start, pem_end) = _markers(pem_marker)
+
+ b64 = base64.encodestring(contents).replace(b('\n'), b(''))
+ pem_lines = [pem_start]
+
+ for block_start in range(0, len(b64), 64):
+ block = b64[block_start:block_start + 64]
+ pem_lines.append(block)
+
+ pem_lines.append(pem_end)
+ pem_lines.append(b(''))
+
+ return b('\n').join(pem_lines)
+
diff --git a/python/rsa/rsa/pkcs1.py b/python/rsa/rsa/pkcs1.py
new file mode 100644
index 000000000..15e4cf639
--- /dev/null
+++ b/python/rsa/rsa/pkcs1.py
@@ -0,0 +1,391 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Functions for PKCS#1 version 1.5 encryption and signing
+
+This module implements certain functionality from PKCS#1 version 1.5. For a
+very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes
+
+At least 8 bytes of random padding is used when encrypting a message. This makes
+these methods much more secure than the ones in the ``rsa`` module.
+
+WARNING: this module leaks information when decryption or verification fails.
+The exceptions that are raised contain the Python traceback information, which
+can be used to deduce where in the process the failure occurred. DO NOT PASS
+SUCH INFORMATION to your users.
+'''
+
+import hashlib
+import os
+
+from rsa._compat import b
+from rsa import common, transform, core, varblock
+
+# ASN.1 codes that describe the hash algorithm used.
+HASH_ASN1 = {
+ 'MD5': b('\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10'),
+ 'SHA-1': b('\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'),
+ 'SHA-256': b('\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20'),
+ 'SHA-384': b('\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30'),
+ 'SHA-512': b('\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40'),
+}
+
+HASH_METHODS = {
+ 'MD5': hashlib.md5,
+ 'SHA-1': hashlib.sha1,
+ 'SHA-256': hashlib.sha256,
+ 'SHA-384': hashlib.sha384,
+ 'SHA-512': hashlib.sha512,
+}
+
+class CryptoError(Exception):
+ '''Base class for all exceptions in this module.'''
+
+class DecryptionError(CryptoError):
+ '''Raised when decryption fails.'''
+
+class VerificationError(CryptoError):
+ '''Raised when verification fails.'''
+
+def _pad_for_encryption(message, target_length):
+ r'''Pads the message for encryption, returning the padded message.
+
+ :return: 00 02 RANDOM_DATA 00 MESSAGE
+
+ >>> block = _pad_for_encryption('hello', 16)
+ >>> len(block)
+ 16
+ >>> block[0:2]
+ '\x00\x02'
+ >>> block[-6:]
+ '\x00hello'
+
+ '''
+
+ max_msglength = target_length - 11
+ msglength = len(message)
+
+ if msglength > max_msglength:
+ raise OverflowError('%i bytes needed for message, but there is only'
+ ' space for %i' % (msglength, max_msglength))
+
+ # Get random padding
+ padding = b('')
+ padding_length = target_length - msglength - 3
+
+ # We remove 0-bytes, so we'll end up with less padding than we've asked for,
+ # so keep adding data until we're at the correct length.
+ while len(padding) < padding_length:
+ needed_bytes = padding_length - len(padding)
+
+ # Always read at least 8 bytes more than we need, and trim off the rest
+ # after removing the 0-bytes. This increases the chance of getting
+ # enough bytes, especially when needed_bytes is small
+ new_padding = os.urandom(needed_bytes + 5)
+ new_padding = new_padding.replace(b('\x00'), b(''))
+ padding = padding + new_padding[:needed_bytes]
+
+ assert len(padding) == padding_length
+
+ return b('').join([b('\x00\x02'),
+ padding,
+ b('\x00'),
+ message])
+
+
+def _pad_for_signing(message, target_length):
+ r'''Pads the message for signing, returning the padded message.
+
+ The padding is always a repetition of FF bytes.
+
+ :return: 00 01 PADDING 00 MESSAGE
+
+ >>> block = _pad_for_signing('hello', 16)
+ >>> len(block)
+ 16
+ >>> block[0:2]
+ '\x00\x01'
+ >>> block[-6:]
+ '\x00hello'
+ >>> block[2:-6]
+ '\xff\xff\xff\xff\xff\xff\xff\xff'
+
+ '''
+
+ max_msglength = target_length - 11
+ msglength = len(message)
+
+ if msglength > max_msglength:
+ raise OverflowError('%i bytes needed for message, but there is only'
+ ' space for %i' % (msglength, max_msglength))
+
+ padding_length = target_length - msglength - 3
+
+ return b('').join([b('\x00\x01'),
+ padding_length * b('\xff'),
+ b('\x00'),
+ message])
+
+
+def encrypt(message, pub_key):
+ '''Encrypts the given message using PKCS#1 v1.5
+
+ :param message: the message to encrypt. Must be a byte string no longer than
+ ``k-11`` bytes, where ``k`` is the number of bytes needed to encode
+ the ``n`` component of the public key.
+ :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with.
+ :raise OverflowError: when the message is too large to fit in the padded
+ block.
+
+ >>> from rsa import key, common
+ >>> (pub_key, priv_key) = key.newkeys(256)
+ >>> message = 'hello'
+ >>> crypto = encrypt(message, pub_key)
+
+ The crypto text should be just as long as the public key 'n' component:
+
+ >>> len(crypto) == common.byte_size(pub_key.n)
+ True
+
+ '''
+
+ keylength = common.byte_size(pub_key.n)
+ padded = _pad_for_encryption(message, keylength)
+
+ payload = transform.bytes2int(padded)
+ encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n)
+ block = transform.int2bytes(encrypted, keylength)
+
+ return block
+
+def decrypt(crypto, priv_key):
+ r'''Decrypts the given message using PKCS#1 v1.5
+
+ The decryption is considered 'failed' when the resulting cleartext doesn't
+ start with the bytes 00 02, or when the 00 byte between the padding and
+ the message cannot be found.
+
+ :param crypto: the crypto text as returned by :py:func:`rsa.encrypt`
+ :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with.
+ :raise DecryptionError: when the decryption fails. No details are given as
+ to why the code thinks the decryption fails, as this would leak
+ information about the private key.
+
+
+ >>> import rsa
+ >>> (pub_key, priv_key) = rsa.newkeys(256)
+
+ It works with strings:
+
+ >>> crypto = encrypt('hello', pub_key)
+ >>> decrypt(crypto, priv_key)
+ 'hello'
+
+ And with binary data:
+
+ >>> crypto = encrypt('\x00\x00\x00\x00\x01', pub_key)
+ >>> decrypt(crypto, priv_key)
+ '\x00\x00\x00\x00\x01'
+
+ Altering the encrypted information will *likely* cause a
+ :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use
+ :py:func:`rsa.sign`.
+
+
+ .. warning::
+
+ Never display the stack trace of a
+ :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the
+ code the exception occurred, and thus leaks information about the key.
+ It's only a tiny bit of information, but every bit makes cracking the
+ keys easier.
+
+ >>> crypto = encrypt('hello', pub_key)
+ >>> crypto = crypto[0:5] + 'X' + crypto[6:] # change a byte
+ >>> decrypt(crypto, priv_key)
+ Traceback (most recent call last):
+ ...
+ DecryptionError: Decryption failed
+
+ '''
+
+ blocksize = common.byte_size(priv_key.n)
+ encrypted = transform.bytes2int(crypto)
+ decrypted = core.decrypt_int(encrypted, priv_key.d, priv_key.n)
+ cleartext = transform.int2bytes(decrypted, blocksize)
+
+ # If we can't find the cleartext marker, decryption failed.
+ if cleartext[0:2] != b('\x00\x02'):
+ raise DecryptionError('Decryption failed')
+
+ # Find the 00 separator between the padding and the message
+ try:
+ sep_idx = cleartext.index(b('\x00'), 2)
+ except ValueError:
+ raise DecryptionError('Decryption failed')
+
+ return cleartext[sep_idx+1:]
+
+def sign(message, priv_key, hash):
+ '''Signs the message with the private key.
+
+ Hashes the message, then signs the hash with the given key. This is known
+ as a "detached signature", because the message itself isn't altered.
+
+ :param message: the message to sign. Can be an 8-bit string or a file-like
+ object. If ``message`` has a ``read()`` method, it is assumed to be a
+ file-like object.
+ :param priv_key: the :py:class:`rsa.PrivateKey` to sign with
+ :param hash: the hash method used on the message. Use 'MD5', 'SHA-1',
+ 'SHA-256', 'SHA-384' or 'SHA-512'.
+ :return: a message signature block.
+ :raise OverflowError: if the private key is too small to contain the
+ requested hash.
+
+ '''
+
+ # Get the ASN1 code for this hash method
+ if hash not in HASH_ASN1:
+ raise ValueError('Invalid hash method: %s' % hash)
+ asn1code = HASH_ASN1[hash]
+
+ # Calculate the hash
+ hash = _hash(message, hash)
+
+ # Encrypt the hash with the private key
+ cleartext = asn1code + hash
+ keylength = common.byte_size(priv_key.n)
+ padded = _pad_for_signing(cleartext, keylength)
+
+ payload = transform.bytes2int(padded)
+ encrypted = core.encrypt_int(payload, priv_key.d, priv_key.n)
+ block = transform.int2bytes(encrypted, keylength)
+
+ return block
+
+def verify(message, signature, pub_key):
+ '''Verifies that the signature matches the message.
+
+ The hash method is detected automatically from the signature.
+
+ :param message: the signed message. Can be an 8-bit string or a file-like
+ object. If ``message`` has a ``read()`` method, it is assumed to be a
+ file-like object.
+ :param signature: the signature block, as created with :py:func:`rsa.sign`.
+ :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message.
+ :raise VerificationError: when the signature doesn't match the message.
+
+ .. warning::
+
+ Never display the stack trace of a
+ :py:class:`rsa.pkcs1.VerificationError` exception. It shows where in
+ the code the exception occurred, and thus leaks information about the
+ key. It's only a tiny bit of information, but every bit makes cracking
+ the keys easier.
+
+ '''
+
+ blocksize = common.byte_size(pub_key.n)
+ encrypted = transform.bytes2int(signature)
+ decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n)
+ clearsig = transform.int2bytes(decrypted, blocksize)
+
+ # If we can't find the signature marker, verification failed.
+ if clearsig[0:2] != b('\x00\x01'):
+ raise VerificationError('Verification failed')
+
+ # Find the 00 separator between the padding and the payload
+ try:
+ sep_idx = clearsig.index(b('\x00'), 2)
+ except ValueError:
+ raise VerificationError('Verification failed')
+
+ # Get the hash and the hash method
+ (method_name, signature_hash) = _find_method_hash(clearsig[sep_idx+1:])
+ message_hash = _hash(message, method_name)
+
+ # Compare the real hash to the hash in the signature
+ if message_hash != signature_hash:
+ raise VerificationError('Verification failed')
+
+ return True
+
+def _hash(message, method_name):
+ '''Returns the message digest.
+
+ :param message: the signed message. Can be an 8-bit string or a file-like
+ object. If ``message`` has a ``read()`` method, it is assumed to be a
+ file-like object.
+ :param method_name: the hash method, must be a key of
+ :py:const:`HASH_METHODS`.
+
+ '''
+
+ if method_name not in HASH_METHODS:
+ raise ValueError('Invalid hash method: %s' % method_name)
+
+ method = HASH_METHODS[method_name]
+ hasher = method()
+
+ if hasattr(message, 'read') and hasattr(message.read, '__call__'):
+ # read as 1K blocks
+ for block in varblock.yield_fixedblocks(message, 1024):
+ hasher.update(block)
+ else:
+ # hash the message object itself.
+ hasher.update(message)
+
+ return hasher.digest()
+
+
+def _find_method_hash(method_hash):
+ '''Finds the hash method and the hash itself.
+
+ :param method_hash: ASN1 code for the hash method concatenated with the
+ hash itself.
+
+ :return: tuple (method, hash) where ``method`` is the used hash method, and
+ ``hash`` is the hash itself.
+
+ :raise VerificationFailed: when the hash method cannot be found
+
+ '''
+
+ for (hashname, asn1code) in HASH_ASN1.items():
+ if not method_hash.startswith(asn1code):
+ continue
+
+ return (hashname, method_hash[len(asn1code):])
+
+ raise VerificationError('Verification failed')
+
+
+__all__ = ['encrypt', 'decrypt', 'sign', 'verify',
+ 'DecryptionError', 'VerificationError', 'CryptoError']
+
+if __name__ == '__main__':
+ print('Running doctests 1000x or until failure')
+ import doctest
+
+ for count in range(1000):
+ (failures, tests) = doctest.testmod()
+ if failures:
+ break
+
+ if count and count % 100 == 0:
+ print('%i times' % count)
+
+ print('Doctests done')
diff --git a/python/rsa/rsa/prime.py b/python/rsa/rsa/prime.py
new file mode 100644
index 000000000..7422eb1d2
--- /dev/null
+++ b/python/rsa/rsa/prime.py
@@ -0,0 +1,166 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Numerical functions related to primes.
+
+Implementation based on the book Algorithm Design by Michael T. Goodrich and
+Roberto Tamassia, 2002.
+'''
+
+__all__ = [ 'getprime', 'are_relatively_prime']
+
+import rsa.randnum
+
+def gcd(p, q):
+ '''Returns the greatest common divisor of p and q
+
+ >>> gcd(48, 180)
+ 12
+ '''
+
+ while q != 0:
+ if p < q: (p,q) = (q,p)
+ (p,q) = (q, p % q)
+ return p
+
+
+def jacobi(a, b):
+ '''Calculates the value of the Jacobi symbol (a/b) where both a and b are
+ positive integers, and b is odd
+
+ :returns: -1, 0 or 1
+ '''
+
+ assert a > 0
+ assert b > 0
+
+ if a == 0: return 0
+ result = 1
+ while a > 1:
+ if a & 1:
+ if ((a-1)*(b-1) >> 2) & 1:
+ result = -result
+ a, b = b % a, a
+ else:
+ if (((b * b) - 1) >> 3) & 1:
+ result = -result
+ a >>= 1
+ if a == 0: return 0
+ return result
+
+def jacobi_witness(x, n):
+ '''Returns False if n is an Euler pseudo-prime with base x, and
+ True otherwise.
+ '''
+
+ j = jacobi(x, n) % n
+
+ f = pow(x, n >> 1, n)
+
+ if j == f: return False
+ return True
+
+def randomized_primality_testing(n, k):
+ '''Calculates whether n is composite (which is always correct) or
+ prime (which is incorrect with error probability 2**-k)
+
+ Returns False if the number is composite, and True if it's
+ probably prime.
+ '''
+
+ # 50% of Jacobi-witnesses can report compositness of non-prime numbers
+
+ # The implemented algorithm using the Jacobi witness function has error
+ # probability q <= 0.5, according to Goodrich et. al
+ #
+ # q = 0.5
+ # t = int(math.ceil(k / log(1 / q, 2)))
+ # So t = k / log(2, 2) = k / 1 = k
+ # this means we can use range(k) rather than range(t)
+
+ for _ in range(k):
+ x = rsa.randnum.randint(n-1)
+ if jacobi_witness(x, n): return False
+
+ return True
+
+def is_prime(number):
+ '''Returns True if the number is prime, and False otherwise.
+
+ >>> is_prime(42)
+ False
+ >>> is_prime(41)
+ True
+ '''
+
+ return randomized_primality_testing(number, 6)
+
+def getprime(nbits):
+ '''Returns a prime number that can be stored in 'nbits' bits.
+
+ >>> p = getprime(128)
+ >>> is_prime(p-1)
+ False
+ >>> is_prime(p)
+ True
+ >>> is_prime(p+1)
+ False
+
+ >>> from rsa import common
+ >>> common.bit_size(p) == 128
+ True
+
+ '''
+
+ while True:
+ integer = rsa.randnum.read_random_int(nbits)
+
+ # Make sure it's odd
+ integer |= 1
+
+ # Test for primeness
+ if is_prime(integer):
+ return integer
+
+ # Retry if not prime
+
+
+def are_relatively_prime(a, b):
+ '''Returns True if a and b are relatively prime, and False if they
+ are not.
+
+ >>> are_relatively_prime(2, 3)
+ 1
+ >>> are_relatively_prime(2, 4)
+ 0
+ '''
+
+ d = gcd(a, b)
+ return (d == 1)
+
+if __name__ == '__main__':
+ print('Running doctests 1000x or until failure')
+ import doctest
+
+ for count in range(1000):
+ (failures, tests) = doctest.testmod()
+ if failures:
+ break
+
+ if count and count % 100 == 0:
+ print('%i times' % count)
+
+ print('Doctests done')
diff --git a/python/rsa/rsa/randnum.py b/python/rsa/rsa/randnum.py
new file mode 100644
index 000000000..0e782744c
--- /dev/null
+++ b/python/rsa/rsa/randnum.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Functions for generating random numbers.'''
+
+# Source inspired by code by Yesudeep Mangalapilly <yesudeep@gmail.com>
+
+import os
+
+from rsa import common, transform
+from rsa._compat import byte
+
+def read_random_bits(nbits):
+ '''Reads 'nbits' random bits.
+
+ If nbits isn't a whole number of bytes, an extra byte will be appended with
+ only the lower bits set.
+ '''
+
+ nbytes, rbits = divmod(nbits, 8)
+
+ # Get the random bytes
+ randomdata = os.urandom(nbytes)
+
+ # Add the remaining random bits
+ if rbits > 0:
+ randomvalue = ord(os.urandom(1))
+ randomvalue >>= (8 - rbits)
+ randomdata = byte(randomvalue) + randomdata
+
+ return randomdata
+
+
+def read_random_int(nbits):
+ '''Reads a random integer of approximately nbits bits.
+ '''
+
+ randomdata = read_random_bits(nbits)
+ value = transform.bytes2int(randomdata)
+
+ # Ensure that the number is large enough to just fill out the required
+ # number of bits.
+ value |= 1 << (nbits - 1)
+
+ return value
+
+def randint(maxvalue):
+ '''Returns a random integer x with 1 <= x <= maxvalue
+
+ May take a very long time in specific situations. If maxvalue needs N bits
+ to store, the closer maxvalue is to (2 ** N) - 1, the faster this function
+ is.
+ '''
+
+ bit_size = common.bit_size(maxvalue)
+
+ tries = 0
+ while True:
+ value = read_random_int(bit_size)
+ if value <= maxvalue:
+ break
+
+ if tries and tries % 10 == 0:
+ # After a lot of tries to get the right number of bits but still
+ # smaller than maxvalue, decrease the number of bits by 1. That'll
+ # dramatically increase the chances to get a large enough number.
+ bit_size -= 1
+ tries += 1
+
+ return value
+
+
diff --git a/python/rsa/rsa/transform.py b/python/rsa/rsa/transform.py
new file mode 100644
index 000000000..c740b2d27
--- /dev/null
+++ b/python/rsa/rsa/transform.py
@@ -0,0 +1,220 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Data transformation functions.
+
+From bytes to a number, number to bytes, etc.
+'''
+
+from __future__ import absolute_import
+
+try:
+ # We'll use psyco if available on 32-bit architectures to speed up code.
+ # Using psyco (if available) cuts down the execution time on Python 2.5
+ # at least by half.
+ import psyco
+ psyco.full()
+except ImportError:
+ pass
+
+import binascii
+from struct import pack
+from rsa import common
+from rsa._compat import is_integer, b, byte, get_word_alignment, ZERO_BYTE, EMPTY_BYTE
+
+
+def bytes2int(raw_bytes):
+ r'''Converts a list of bytes or an 8-bit string to an integer.
+
+ When using unicode strings, encode it to some encoding like UTF8 first.
+
+ >>> (((128 * 256) + 64) * 256) + 15
+ 8405007
+ >>> bytes2int('\x80@\x0f')
+ 8405007
+
+ '''
+
+ return int(binascii.hexlify(raw_bytes), 16)
+
+
+def _int2bytes(number, block_size=None):
+ r'''Converts a number to a string of bytes.
+
+ Usage::
+
+ >>> _int2bytes(123456789)
+ '\x07[\xcd\x15'
+ >>> bytes2int(_int2bytes(123456789))
+ 123456789
+
+ >>> _int2bytes(123456789, 6)
+ '\x00\x00\x07[\xcd\x15'
+ >>> bytes2int(_int2bytes(123456789, 128))
+ 123456789
+
+ >>> _int2bytes(123456789, 3)
+ Traceback (most recent call last):
+ ...
+ OverflowError: Needed 4 bytes for number, but block size is 3
+
+ @param number: the number to convert
+ @param block_size: the number of bytes to output. If the number encoded to
+ bytes is less than this, the block will be zero-padded. When not given,
+ the returned block is not padded.
+
+ @throws OverflowError when block_size is given and the number takes up more
+ bytes than fit into the block.
+ '''
+ # Type checking
+ if not is_integer(number):
+ raise TypeError("You must pass an integer for 'number', not %s" %
+ number.__class__)
+
+ if number < 0:
+ raise ValueError('Negative numbers cannot be used: %i' % number)
+
+ # Do some bounds checking
+ if number == 0:
+ needed_bytes = 1
+ raw_bytes = [ZERO_BYTE]
+ else:
+ needed_bytes = common.byte_size(number)
+ raw_bytes = []
+
+ # You cannot compare None > 0 in Python 3x. It will fail with a TypeError.
+ if block_size and block_size > 0:
+ if needed_bytes > block_size:
+ raise OverflowError('Needed %i bytes for number, but block size '
+ 'is %i' % (needed_bytes, block_size))
+
+ # Convert the number to bytes.
+ while number > 0:
+ raw_bytes.insert(0, byte(number & 0xFF))
+ number >>= 8
+
+ # Pad with zeroes to fill the block
+ if block_size and block_size > 0:
+ padding = (block_size - needed_bytes) * ZERO_BYTE
+ else:
+ padding = EMPTY_BYTE
+
+ return padding + EMPTY_BYTE.join(raw_bytes)
+
+
+def bytes_leading(raw_bytes, needle=ZERO_BYTE):
+ '''
+ Finds the number of prefixed byte occurrences in the haystack.
+
+ Useful when you want to deal with padding.
+
+ :param raw_bytes:
+ Raw bytes.
+ :param needle:
+ The byte to count. Default \000.
+ :returns:
+ The number of leading needle bytes.
+ '''
+ leading = 0
+ # Indexing keeps compatibility between Python 2.x and Python 3.x
+ _byte = needle[0]
+ for x in raw_bytes:
+ if x == _byte:
+ leading += 1
+ else:
+ break
+ return leading
+
+
+def int2bytes(number, fill_size=None, chunk_size=None, overflow=False):
+ '''
+ Convert an unsigned integer to bytes (base-256 representation)::
+
+ Does not preserve leading zeros if you don't specify a chunk size or
+ fill size.
+
+ .. NOTE:
+ You must not specify both fill_size and chunk_size. Only one
+ of them is allowed.
+
+ :param number:
+ Integer value
+ :param fill_size:
+ If the optional fill size is given the length of the resulting
+ byte string is expected to be the fill size and will be padded
+ with prefix zero bytes to satisfy that length.
+ :param chunk_size:
+ If optional chunk size is given and greater than zero, pad the front of
+ the byte string with binary zeros so that the length is a multiple of
+ ``chunk_size``.
+ :param overflow:
+ ``False`` (default). If this is ``True``, no ``OverflowError``
+ will be raised when the fill_size is shorter than the length
+ of the generated byte sequence. Instead the byte sequence will
+ be returned as is.
+ :returns:
+ Raw bytes (base-256 representation).
+ :raises:
+ ``OverflowError`` when fill_size is given and the number takes up more
+ bytes than fit into the block. This requires the ``overflow``
+ argument to this function to be set to ``False`` otherwise, no
+ error will be raised.
+ '''
+ if number < 0:
+ raise ValueError("Number must be an unsigned integer: %d" % number)
+
+ if fill_size and chunk_size:
+ raise ValueError("You can either fill or pad chunks, but not both")
+
+ # Ensure these are integers.
+ number & 1
+
+ raw_bytes = b('')
+
+ # Pack the integer one machine word at a time into bytes.
+ num = number
+ word_bits, _, max_uint, pack_type = get_word_alignment(num)
+ pack_format = ">%s" % pack_type
+ while num > 0:
+ raw_bytes = pack(pack_format, num & max_uint) + raw_bytes
+ num >>= word_bits
+ # Obtain the index of the first non-zero byte.
+ zero_leading = bytes_leading(raw_bytes)
+ if number == 0:
+ raw_bytes = ZERO_BYTE
+ # De-padding.
+ raw_bytes = raw_bytes[zero_leading:]
+
+ length = len(raw_bytes)
+ if fill_size and fill_size > 0:
+ if not overflow and length > fill_size:
+ raise OverflowError(
+ "Need %d bytes for number, but fill size is %d" %
+ (length, fill_size)
+ )
+ raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE)
+ elif chunk_size and chunk_size > 0:
+ remainder = length % chunk_size
+ if remainder:
+ padding_size = chunk_size - remainder
+ raw_bytes = raw_bytes.rjust(length + padding_size, ZERO_BYTE)
+ return raw_bytes
+
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
+
diff --git a/python/rsa/rsa/util.py b/python/rsa/rsa/util.py
new file mode 100644
index 000000000..5bbb70be1
--- /dev/null
+++ b/python/rsa/rsa/util.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Utility functions.'''
+
+from __future__ import with_statement, print_function
+
+import sys
+from optparse import OptionParser
+
+import rsa.key
+
+def private_to_public():
+ '''Reads a private key and outputs the corresponding public key.'''
+
+ # Parse the CLI options
+ parser = OptionParser(usage='usage: %prog [options]',
+ description='Reads a private key and outputs the '
+ 'corresponding public key. Both private and public keys use '
+ 'the format described in PKCS#1 v1.5')
+
+ parser.add_option('-i', '--input', dest='infilename', type='string',
+ help='Input filename. Reads from stdin if not specified')
+ parser.add_option('-o', '--output', dest='outfilename', type='string',
+ help='Output filename. Writes to stdout of not specified')
+
+ parser.add_option('--inform', dest='inform',
+ help='key format of input - default PEM',
+ choices=('PEM', 'DER'), default='PEM')
+
+ parser.add_option('--outform', dest='outform',
+ help='key format of output - default PEM',
+ choices=('PEM', 'DER'), default='PEM')
+
+ (cli, cli_args) = parser.parse_args(sys.argv)
+
+ # Read the input data
+ if cli.infilename:
+ print('Reading private key from %s in %s format' % \
+ (cli.infilename, cli.inform), file=sys.stderr)
+ with open(cli.infilename, 'rb') as infile:
+ in_data = infile.read()
+ else:
+ print('Reading private key from stdin in %s format' % cli.inform,
+ file=sys.stderr)
+ in_data = sys.stdin.read().encode('ascii')
+
+ assert type(in_data) == bytes, type(in_data)
+
+
+ # Take the public fields and create a public key
+ priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform)
+ pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e)
+
+ # Save to the output file
+ out_data = pub_key.save_pkcs1(cli.outform)
+
+ if cli.outfilename:
+ print('Writing public key to %s in %s format' % \
+ (cli.outfilename, cli.outform), file=sys.stderr)
+ with open(cli.outfilename, 'wb') as outfile:
+ outfile.write(out_data)
+ else:
+ print('Writing public key to stdout in %s format' % cli.outform,
+ file=sys.stderr)
+ sys.stdout.write(out_data.decode('ascii'))
+
+
diff --git a/python/rsa/rsa/varblock.py b/python/rsa/rsa/varblock.py
new file mode 100644
index 000000000..c7d96ae6a
--- /dev/null
+++ b/python/rsa/rsa/varblock.py
@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''VARBLOCK file support
+
+The VARBLOCK file format is as follows, where || denotes byte concatenation:
+
+ FILE := VERSION || BLOCK || BLOCK ...
+
+ BLOCK := LENGTH || DATA
+
+ LENGTH := varint-encoded length of the subsequent data. Varint comes from
+ Google Protobuf, and encodes an integer into a variable number of bytes.
+ Each byte uses the 7 lowest bits to encode the value. The highest bit set
+ to 1 indicates the next byte is also part of the varint. The last byte will
+ have this bit set to 0.
+
+This file format is called the VARBLOCK format, in line with the varint format
+used to denote the block sizes.
+
+'''
+
+from rsa._compat import byte, b
+
+
+ZERO_BYTE = b('\x00')
+VARBLOCK_VERSION = 1
+
+def read_varint(infile):
+ '''Reads a varint from the file.
+
+ When the first byte to be read indicates EOF, (0, 0) is returned. When an
+ EOF occurs when at least one byte has been read, an EOFError exception is
+ raised.
+
+ @param infile: the file-like object to read from. It should have a read()
+ method.
+ @returns (varint, length), the read varint and the number of read bytes.
+ '''
+
+ varint = 0
+ read_bytes = 0
+
+ while True:
+ char = infile.read(1)
+ if len(char) == 0:
+ if read_bytes == 0:
+ return (0, 0)
+ raise EOFError('EOF while reading varint, value is %i so far' %
+ varint)
+
+ byte = ord(char)
+ varint += (byte & 0x7F) << (7 * read_bytes)
+
+ read_bytes += 1
+
+ if not byte & 0x80:
+ return (varint, read_bytes)
+
+
+def write_varint(outfile, value):
+ '''Writes a varint to a file.
+
+ @param outfile: the file-like object to write to. It should have a write()
+ method.
+ @returns the number of written bytes.
+ '''
+
+ # there is a big difference between 'write the value 0' (this case) and
+ # 'there is nothing left to write' (the false-case of the while loop)
+
+ if value == 0:
+ outfile.write(ZERO_BYTE)
+ return 1
+
+ written_bytes = 0
+ while value > 0:
+ to_write = value & 0x7f
+ value = value >> 7
+
+ if value > 0:
+ to_write |= 0x80
+
+ outfile.write(byte(to_write))
+ written_bytes += 1
+
+ return written_bytes
+
+
+def yield_varblocks(infile):
+ '''Generator, yields each block in the input file.
+
+ @param infile: file to read, is expected to have the VARBLOCK format as
+ described in the module's docstring.
+ @yields the contents of each block.
+ '''
+
+ # Check the version number
+ first_char = infile.read(1)
+ if len(first_char) == 0:
+ raise EOFError('Unable to read VARBLOCK version number')
+
+ version = ord(first_char)
+ if version != VARBLOCK_VERSION:
+ raise ValueError('VARBLOCK version %i not supported' % version)
+
+ while True:
+ (block_size, read_bytes) = read_varint(infile)
+
+ # EOF at block boundary, that's fine.
+ if read_bytes == 0 and block_size == 0:
+ break
+
+ block = infile.read(block_size)
+
+ read_size = len(block)
+ if read_size != block_size:
+ raise EOFError('Block size is %i, but could read only %i bytes' %
+ (block_size, read_size))
+
+ yield block
+
+
+def yield_fixedblocks(infile, blocksize):
+ '''Generator, yields each block of ``blocksize`` bytes in the input file.
+
+ :param infile: file to read and separate in blocks.
+ :returns: a generator that yields the contents of each block
+ '''
+
+ while True:
+ block = infile.read(blocksize)
+
+ read_bytes = len(block)
+ if read_bytes == 0:
+ break
+
+ yield block
+
+ if read_bytes < blocksize:
+ break
+
diff --git a/python/rsa/run_tests.py b/python/rsa/run_tests.py
new file mode 100644
index 000000000..e0f249081
--- /dev/null
+++ b/python/rsa/run_tests.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+import unittest2 as unittest
+
+current_path = os.path.abspath(os.path.dirname(__file__))
+tests_path = os.path.join(current_path, 'tests')
+sys.path[0:0] = [
+ current_path,
+ tests_path,
+]
+
+all_tests = [f[:-3] for f in os.listdir(tests_path)
+ if f.startswith('test_') and f.endswith(".py")]
+
+def get_suite(tests):
+ tests = sorted(tests)
+ suite = unittest.TestSuite()
+ loader = unittest.TestLoader()
+ for test in tests:
+ suite.addTest(loader.loadTestsFromName(test))
+ return suite
+
+if __name__ == '__main__':
+ """
+ To run all tests:
+ $ python run_tests.py
+ To run a single test:
+ $ python run_tests.py app
+ To run a couple of tests:
+ $ python run_tests.py app config sessions
+ To run code coverage:
+ $ coverage run run_tests.py
+ $ coverage report -m
+ """
+ tests = sys.argv[1:]
+ if not tests:
+ tests = all_tests
+ tests = ['%s' % t for t in tests]
+ suite = get_suite(tests)
+ unittest.TextTestRunner(verbosity=1).run(suite)
diff --git a/python/rsa/setup.cfg b/python/rsa/setup.cfg
new file mode 100644
index 000000000..2675c2767
--- /dev/null
+++ b/python/rsa/setup.cfg
@@ -0,0 +1,8 @@
+[nosetests]
+verbosity = 2
+
+[egg_info]
+tag_date = 0
+tag_build =
+tag_svn_revision = 0
+
diff --git a/python/rsa/setup.py b/python/rsa/setup.py
new file mode 100755
index 000000000..8a2df8d1f
--- /dev/null
+++ b/python/rsa/setup.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+from setuptools import setup
+
+import rsa
+
+setup(name='rsa',
+ version=rsa.__version__,
+ description='Pure-Python RSA implementation',
+ author='Sybren A. Stuvel',
+ author_email='sybren@stuvel.eu',
+ maintainer='Sybren A. Stuvel',
+ maintainer_email='sybren@stuvel.eu',
+ url='http://stuvel.eu/rsa',
+ packages=['rsa'],
+ license='ASL 2',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: Education',
+ 'Intended Audience :: Information Technology',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Security :: Cryptography',
+ ],
+ install_requires=[
+ 'pyasn1 >= 0.1.3',
+ ],
+ entry_points={ 'console_scripts': [
+ 'pyrsa-priv2pub = rsa.util:private_to_public',
+ 'pyrsa-keygen = rsa.cli:keygen',
+ 'pyrsa-encrypt = rsa.cli:encrypt',
+ 'pyrsa-decrypt = rsa.cli:decrypt',
+ 'pyrsa-sign = rsa.cli:sign',
+ 'pyrsa-verify = rsa.cli:verify',
+ 'pyrsa-encrypt-bigfile = rsa.cli:encrypt_bigfile',
+ 'pyrsa-decrypt-bigfile = rsa.cli:decrypt_bigfile',
+ ]},
+
+)
diff --git a/python/rsa/tests/__init__.py b/python/rsa/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/rsa/tests/__init__.py
diff --git a/python/rsa/tests/constants.py b/python/rsa/tests/constants.py
new file mode 100644
index 000000000..6a0d08183
--- /dev/null
+++ b/python/rsa/tests/constants.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -*-
+
+from rsa._compat import have_python3
+
+if have_python3:
+ from py3kconstants import *
+else:
+ from py2kconstants import *
+
diff --git a/python/rsa/tests/py2kconstants.py b/python/rsa/tests/py2kconstants.py
new file mode 100644
index 000000000..5f695dd22
--- /dev/null
+++ b/python/rsa/tests/py2kconstants.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+unicode_string = u"Euro=\u20ac ABCDEFGHIJKLMNOPQRSTUVWXYZ"
diff --git a/python/rsa/tests/py3kconstants.py b/python/rsa/tests/py3kconstants.py
new file mode 100644
index 000000000..83b67129c
--- /dev/null
+++ b/python/rsa/tests/py3kconstants.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+unicode_string = "Euro=\u20ac ABCDEFGHIJKLMNOPQRSTUVWXYZ"
diff --git a/python/rsa/tests/test_bigfile.py b/python/rsa/tests/test_bigfile.py
new file mode 100644
index 000000000..86bcbbac6
--- /dev/null
+++ b/python/rsa/tests/test_bigfile.py
@@ -0,0 +1,60 @@
+'''Tests block operations.'''
+from rsa._compat import b
+
+try:
+ from StringIO import StringIO as BytesIO
+except ImportError:
+ from io import BytesIO
+import unittest2
+
+import rsa
+from rsa import bigfile, varblock, pkcs1
+
+class BigfileTest(unittest2.TestCase):
+
+ def test_encrypt_decrypt_bigfile(self):
+
+ # Expected block size + 11 bytes padding
+ pub_key, priv_key = rsa.newkeys((6 + 11) * 8)
+
+ # Encrypt the file
+ message = b('123456Sybren')
+ infile = BytesIO(message)
+ outfile = BytesIO()
+
+ bigfile.encrypt_bigfile(infile, outfile, pub_key)
+
+ # Test
+ crypto = outfile.getvalue()
+
+ cryptfile = BytesIO(crypto)
+ clearfile = BytesIO()
+
+ bigfile.decrypt_bigfile(cryptfile, clearfile, priv_key)
+ self.assertEquals(clearfile.getvalue(), message)
+
+ # We have 2x6 bytes in the message, so that should result in two
+ # bigfile.
+ cryptfile.seek(0)
+ varblocks = list(varblock.yield_varblocks(cryptfile))
+ self.assertEqual(2, len(varblocks))
+
+
+ def test_sign_verify_bigfile(self):
+
+ # Large enough to store MD5-sum and ASN.1 code for MD5
+ pub_key, priv_key = rsa.newkeys((34 + 11) * 8)
+
+ # Sign the file
+ msgfile = BytesIO(b('123456Sybren'))
+ signature = pkcs1.sign(msgfile, priv_key, 'MD5')
+
+ # Check the signature
+ msgfile.seek(0)
+ self.assertTrue(pkcs1.verify(msgfile, signature, pub_key))
+
+ # Alter the message, re-check
+ msgfile = BytesIO(b('123456sybren'))
+ self.assertRaises(pkcs1.VerificationError,
+ pkcs1.verify, msgfile, signature, pub_key)
+
diff --git a/python/rsa/tests/test_common.py b/python/rsa/tests/test_common.py
new file mode 100644
index 000000000..d105dc020
--- /dev/null
+++ b/python/rsa/tests/test_common.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import unittest2
+import struct
+from rsa._compat import byte, b
+from rsa.common import byte_size, bit_size, _bit_size
+
+
+class Test_byte(unittest2.TestCase):
+ def test_values(self):
+ self.assertEqual(byte(0), b('\x00'))
+ self.assertEqual(byte(255), b('\xff'))
+
+ def test_struct_error_when_out_of_bounds(self):
+ self.assertRaises(struct.error, byte, 256)
+ self.assertRaises(struct.error, byte, -1)
+
+class Test_byte_size(unittest2.TestCase):
+ def test_values(self):
+ self.assertEqual(byte_size(1 << 1023), 128)
+ self.assertEqual(byte_size((1 << 1024) - 1), 128)
+ self.assertEqual(byte_size(1 << 1024), 129)
+ self.assertEqual(byte_size(255), 1)
+ self.assertEqual(byte_size(256), 2)
+ self.assertEqual(byte_size(0xffff), 2)
+ self.assertEqual(byte_size(0xffffff), 3)
+ self.assertEqual(byte_size(0xffffffff), 4)
+ self.assertEqual(byte_size(0xffffffffff), 5)
+ self.assertEqual(byte_size(0xffffffffffff), 6)
+ self.assertEqual(byte_size(0xffffffffffffff), 7)
+ self.assertEqual(byte_size(0xffffffffffffffff), 8)
+
+ def test_zero(self):
+ self.assertEqual(byte_size(0), 1)
+
+ def test_bad_type(self):
+ self.assertRaises(TypeError, byte_size, [])
+ self.assertRaises(TypeError, byte_size, ())
+ self.assertRaises(TypeError, byte_size, dict())
+ self.assertRaises(TypeError, byte_size, "")
+ self.assertRaises(TypeError, byte_size, None)
+
+class Test_bit_size(unittest2.TestCase):
+ def test_zero(self):
+ self.assertEqual(bit_size(0), 0)
+
+ def test_values(self):
+ self.assertEqual(bit_size(1023), 10)
+ self.assertEqual(bit_size(1024), 11)
+ self.assertEqual(bit_size(1025), 11)
+ self.assertEqual(bit_size(1 << 1024), 1025)
+ self.assertEqual(bit_size((1 << 1024) + 1), 1025)
+ self.assertEqual(bit_size((1 << 1024) - 1), 1024)
+
+ self.assertEqual(_bit_size(1023), 10)
+ self.assertEqual(_bit_size(1024), 11)
+ self.assertEqual(_bit_size(1025), 11)
+ self.assertEqual(_bit_size(1 << 1024), 1025)
+ self.assertEqual(_bit_size((1 << 1024) + 1), 1025)
+ self.assertEqual(_bit_size((1 << 1024) - 1), 1024)
diff --git a/python/rsa/tests/test_compat.py b/python/rsa/tests/test_compat.py
new file mode 100644
index 000000000..3652c82d5
--- /dev/null
+++ b/python/rsa/tests/test_compat.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+import unittest2
+import struct
+
+from rsa._compat import is_bytes, byte
+
+class Test_byte(unittest2.TestCase):
+ def test_byte(self):
+ for i in range(256):
+ byt = byte(i)
+ self.assertTrue(is_bytes(byt))
+ self.assertEqual(ord(byt), i)
+
+ def test_raises_StructError_on_overflow(self):
+ self.assertRaises(struct.error, byte, 256)
+ self.assertRaises(struct.error, byte, -1)
diff --git a/python/rsa/tests/test_integers.py b/python/rsa/tests/test_integers.py
new file mode 100644
index 000000000..0a712aa0f
--- /dev/null
+++ b/python/rsa/tests/test_integers.py
@@ -0,0 +1,36 @@
+'''Tests integer operations.'''
+
+import unittest2
+
+import rsa.core
+
+class IntegerTest(unittest2.TestCase):
+
+ def setUp(self):
+ (self.pub, self.priv) = rsa.newkeys(64)
+
+ def test_enc_dec(self):
+
+ message = 42
+ print("\tMessage: %d" % message)
+
+ encrypted = rsa.core.encrypt_int(message, self.pub.e, self.pub.n)
+ print("\tEncrypted: %d" % encrypted)
+
+ decrypted = rsa.core.decrypt_int(encrypted, self.priv.d, self.pub.n)
+ print("\tDecrypted: %d" % decrypted)
+
+ self.assertEqual(message, decrypted)
+
+ def test_sign_verify(self):
+
+ message = 42
+
+ signed = rsa.core.encrypt_int(message,self.priv.d, self.pub.n)
+ print("\tSigned: %d" % signed)
+
+ verified = rsa.core.decrypt_int(signed, self.pub.e,self.pub.n)
+ print("\tVerified: %d" % verified)
+
+ self.assertEqual(message, verified)
+
diff --git a/python/rsa/tests/test_load_save_keys.py b/python/rsa/tests/test_load_save_keys.py
new file mode 100644
index 000000000..fc1a1aaae
--- /dev/null
+++ b/python/rsa/tests/test_load_save_keys.py
@@ -0,0 +1,127 @@
+'''Unittest for saving and loading keys.'''
+
+import base64
+import unittest2
+from rsa._compat import b
+
+import rsa.key
+
+B64PRIV_DER = b('MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt')
+PRIVATE_DER = base64.decodestring(B64PRIV_DER)
+
+B64PUB_DER = b('MAwCBQDeKYlRAgMBAAE=')
+PUBLIC_DER = base64.decodestring(B64PUB_DER)
+
+PRIVATE_PEM = b('''
+-----BEGIN CONFUSING STUFF-----
+Cruft before the key
+
+-----BEGIN RSA PRIVATE KEY-----
+Comment: something blah
+
+%s
+-----END RSA PRIVATE KEY-----
+
+Stuff after the key
+-----END CONFUSING STUFF-----
+''' % B64PRIV_DER.decode("utf-8"))
+
+CLEAN_PRIVATE_PEM = b('''\
+-----BEGIN RSA PRIVATE KEY-----
+%s
+-----END RSA PRIVATE KEY-----
+''' % B64PRIV_DER.decode("utf-8"))
+
+PUBLIC_PEM = b('''
+-----BEGIN CONFUSING STUFF-----
+Cruft before the key
+
+-----BEGIN RSA PUBLIC KEY-----
+Comment: something blah
+
+%s
+-----END RSA PUBLIC KEY-----
+
+Stuff after the key
+-----END CONFUSING STUFF-----
+''' % B64PUB_DER.decode("utf-8"))
+
+CLEAN_PUBLIC_PEM = b('''\
+-----BEGIN RSA PUBLIC KEY-----
+%s
+-----END RSA PUBLIC KEY-----
+''' % B64PUB_DER.decode("utf-8"))
+
+
+class DerTest(unittest2.TestCase):
+ '''Test saving and loading DER keys.'''
+
+ def test_load_private_key(self):
+ '''Test loading private DER keys.'''
+
+ key = rsa.key.PrivateKey.load_pkcs1(PRIVATE_DER, 'DER')
+ expected = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+
+ self.assertEqual(expected, key)
+
+ def test_save_private_key(self):
+ '''Test saving private DER keys.'''
+
+ key = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+ der = key.save_pkcs1('DER')
+
+ self.assertEqual(PRIVATE_DER, der)
+
+ def test_load_public_key(self):
+ '''Test loading public DER keys.'''
+
+ key = rsa.key.PublicKey.load_pkcs1(PUBLIC_DER, 'DER')
+ expected = rsa.key.PublicKey(3727264081, 65537)
+
+ self.assertEqual(expected, key)
+
+ def test_save_public_key(self):
+ '''Test saving public DER keys.'''
+
+ key = rsa.key.PublicKey(3727264081, 65537)
+ der = key.save_pkcs1('DER')
+
+ self.assertEqual(PUBLIC_DER, der)
+
+class PemTest(unittest2.TestCase):
+ '''Test saving and loading PEM keys.'''
+
+
+ def test_load_private_key(self):
+ '''Test loading private PEM files.'''
+
+ key = rsa.key.PrivateKey.load_pkcs1(PRIVATE_PEM, 'PEM')
+ expected = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+
+ self.assertEqual(expected, key)
+
+ def test_save_private_key(self):
+ '''Test saving private PEM files.'''
+
+ key = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
+ pem = key.save_pkcs1('PEM')
+
+ self.assertEqual(CLEAN_PRIVATE_PEM, pem)
+
+ def test_load_public_key(self):
+ '''Test loading public PEM files.'''
+
+ key = rsa.key.PublicKey.load_pkcs1(PUBLIC_PEM, 'PEM')
+ expected = rsa.key.PublicKey(3727264081, 65537)
+
+ self.assertEqual(expected, key)
+
+ def test_save_public_key(self):
+ '''Test saving public PEM files.'''
+
+ key = rsa.key.PublicKey(3727264081, 65537)
+ pem = key.save_pkcs1('PEM')
+
+ self.assertEqual(CLEAN_PUBLIC_PEM, pem)
+
+
diff --git a/python/rsa/tests/test_pem.py b/python/rsa/tests/test_pem.py
new file mode 100644
index 000000000..867f678a0
--- /dev/null
+++ b/python/rsa/tests/test_pem.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+import unittest2
+from rsa._compat import b
+from rsa.pem import _markers
+
+
+class Test__markers(unittest2.TestCase):
+ def test_values(self):
+ self.assertEqual(_markers('RSA PRIVATE KEY'),
+ (b('-----BEGIN RSA PRIVATE KEY-----'),
+ b('-----END RSA PRIVATE KEY-----')))
diff --git a/python/rsa/tests/test_pkcs1.py b/python/rsa/tests/test_pkcs1.py
new file mode 100644
index 000000000..d5882dfd1
--- /dev/null
+++ b/python/rsa/tests/test_pkcs1.py
@@ -0,0 +1,94 @@
+'''Tests string operations.'''
+
+import struct
+import unittest2
+
+import rsa
+from rsa import pkcs1
+from rsa._compat import byte, is_integer, b, is_bytes
+
+class BinaryTest(unittest2.TestCase):
+
+ def setUp(self):
+ (self.pub, self.priv) = rsa.newkeys(256)
+
+ def test_enc_dec(self):
+
+ message = struct.pack('>IIII', 0, 0, 0, 1)
+ print("\tMessage: %r" % message)
+
+ encrypted = pkcs1.encrypt(message, self.pub)
+ print("\tEncrypted: %r" % encrypted)
+
+ decrypted = pkcs1.decrypt(encrypted, self.priv)
+ print("\tDecrypted: %r" % decrypted)
+
+ self.assertEqual(message, decrypted)
+
+ def test_decoding_failure(self):
+
+ message = struct.pack('>IIII', 0, 0, 0, 1)
+ encrypted = pkcs1.encrypt(message, self.pub)
+
+ # Alter the encrypted stream
+ a = encrypted[5]
+ if is_bytes(a):
+ a = ord(a)
+ encrypted = encrypted[:5] + byte(a + 1) + encrypted[6:]
+
+ self.assertRaises(pkcs1.DecryptionError, pkcs1.decrypt, encrypted,
+ self.priv)
+
+ def test_randomness(self):
+ '''Encrypting the same message twice should result in different
+ cryptos.
+ '''
+
+ message = struct.pack('>IIII', 0, 0, 0, 1)
+ encrypted1 = pkcs1.encrypt(message, self.pub)
+ encrypted2 = pkcs1.encrypt(message, self.pub)
+
+ self.assertNotEqual(encrypted1, encrypted2)
+
+class SignatureTest(unittest2.TestCase):
+
+ def setUp(self):
+ (self.pub, self.priv) = rsa.newkeys(512)
+
+ def test_sign_verify(self):
+ '''Test happy flow of sign and verify'''
+
+ message = b('je moeder')
+ print("\tMessage: %r" % message)
+
+ signature = pkcs1.sign(message, self.priv, 'SHA-256')
+ print("\tSignature: %r" % signature)
+
+ self.assertTrue(pkcs1.verify(message, signature, self.pub))
+
+ def test_alter_message(self):
+ '''Altering the message should let the verification fail.'''
+
+ signature = pkcs1.sign(b('je moeder'), self.priv, 'SHA-256')
+ self.assertRaises(pkcs1.VerificationError, pkcs1.verify,
+ b('mijn moeder'), signature, self.pub)
+
+ def test_sign_different_key(self):
+ '''Signing with another key should let the verification fail.'''
+
+ (otherpub, _) = rsa.newkeys(512)
+
+ message = b('je moeder')
+ signature = pkcs1.sign(message, self.priv, 'SHA-256')
+ self.assertRaises(pkcs1.VerificationError, pkcs1.verify,
+ message, signature, otherpub)
+
+ def test_multiple_signings(self):
+ '''Signing the same message twice should return the same signatures.'''
+
+ message = struct.pack('>IIII', 0, 0, 0, 1)
+ signature1 = pkcs1.sign(message, self.priv, 'SHA-1')
+ signature2 = pkcs1.sign(message, self.priv, 'SHA-1')
+
+ self.assertEqual(signature1, signature2)
+
diff --git a/python/rsa/tests/test_strings.py b/python/rsa/tests/test_strings.py
new file mode 100644
index 000000000..4af06291d
--- /dev/null
+++ b/python/rsa/tests/test_strings.py
@@ -0,0 +1,28 @@
+'''Tests string operations.'''
+
+from __future__ import absolute_import
+
+import unittest2
+
+import rsa
+
+from constants import unicode_string
+
+class StringTest(unittest2.TestCase):
+
+ def setUp(self):
+ (self.pub, self.priv) = rsa.newkeys(384)
+
+ def test_enc_dec(self):
+
+ message = unicode_string.encode('utf-8')
+ print("\tMessage: %s" % message)
+
+ encrypted = rsa.encrypt(message, self.pub)
+ print("\tEncrypted: %s" % encrypted)
+
+ decrypted = rsa.decrypt(encrypted, self.priv)
+ print("\tDecrypted: %s" % decrypted)
+
+ self.assertEqual(message, decrypted)
+
diff --git a/python/rsa/tests/test_transform.py b/python/rsa/tests/test_transform.py
new file mode 100644
index 000000000..ffd9ec892
--- /dev/null
+++ b/python/rsa/tests/test_transform.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+
+import unittest2
+from rsa._compat import b
+from rsa.transform import int2bytes, bytes2int, _int2bytes
+
+
+class Test_int2bytes(unittest2.TestCase):
+ def test_accuracy(self):
+ self.assertEqual(int2bytes(123456789), b('\x07[\xcd\x15'))
+ self.assertEqual(_int2bytes(123456789), b('\x07[\xcd\x15'))
+
+ def test_codec_identity(self):
+ self.assertEqual(bytes2int(int2bytes(123456789, 128)), 123456789)
+ self.assertEqual(bytes2int(_int2bytes(123456789, 128)), 123456789)
+
+ def test_chunk_size(self):
+ self.assertEqual(int2bytes(123456789, 6), b('\x00\x00\x07[\xcd\x15'))
+ self.assertEqual(int2bytes(123456789, 7),
+ b('\x00\x00\x00\x07[\xcd\x15'))
+
+ self.assertEqual(_int2bytes(123456789, 6),
+ b('\x00\x00\x07[\xcd\x15'))
+ self.assertEqual(_int2bytes(123456789, 7),
+ b('\x00\x00\x00\x07[\xcd\x15'))
+
+ def test_zero(self):
+ self.assertEqual(int2bytes(0, 4), b('\x00') * 4)
+ self.assertEqual(int2bytes(0, 7), b('\x00') * 7)
+ self.assertEqual(int2bytes(0), b('\x00'))
+
+ self.assertEqual(_int2bytes(0, 4), b('\x00') * 4)
+ self.assertEqual(_int2bytes(0, 7), b('\x00') * 7)
+ self.assertEqual(_int2bytes(0), b('\x00'))
+
+ def test_correctness_against_base_implementation(self):
+ # Slow test.
+ values = [
+ 1 << 512,
+ 1 << 8192,
+ 1 << 77,
+ ]
+ for value in values:
+ self.assertEqual(int2bytes(value), _int2bytes(value),
+ "Boom %d" % value)
+ self.assertEqual(bytes2int(int2bytes(value)),
+ value,
+ "Boom %d" % value)
+ self.assertEqual(bytes2int(_int2bytes(value)),
+ value,
+ "Boom %d" % value)
+
+ def test_raises_OverflowError_when_chunk_size_is_insufficient(self):
+ self.assertRaises(OverflowError, int2bytes, 123456789, 3)
+ self.assertRaises(OverflowError, int2bytes, 299999999999, 4)
+
+ self.assertRaises(OverflowError, _int2bytes, 123456789, 3)
+ self.assertRaises(OverflowError, _int2bytes, 299999999999, 4)
+
+ def test_raises_ValueError_when_negative_integer(self):
+ self.assertRaises(ValueError, int2bytes, -1)
+ self.assertRaises(ValueError, _int2bytes, -1)
+
+ def test_raises_TypeError_when_not_integer(self):
+ self.assertRaises(TypeError, int2bytes, None)
+ self.assertRaises(TypeError, _int2bytes, None)
diff --git a/python/rsa/tests/test_varblock.py b/python/rsa/tests/test_varblock.py
new file mode 100644
index 000000000..24ea50f1f
--- /dev/null
+++ b/python/rsa/tests/test_varblock.py
@@ -0,0 +1,82 @@
+'''Tests varblock operations.'''
+
+
+try:
+ from StringIO import StringIO as BytesIO
+except ImportError:
+ from io import BytesIO
+import unittest
+
+import rsa
+from rsa._compat import b
+from rsa import varblock
+
+class VarintTest(unittest.TestCase):
+
+ def test_read_varint(self):
+
+ encoded = b('\xac\x02crummy')
+ infile = BytesIO(encoded)
+
+ (decoded, read) = varblock.read_varint(infile)
+
+ # Test the returned values
+ self.assertEqual(300, decoded)
+ self.assertEqual(2, read)
+
+ # The rest of the file should be untouched
+ self.assertEqual(b('crummy'), infile.read())
+
+ def test_read_zero(self):
+
+ encoded = b('\x00crummy')
+ infile = BytesIO(encoded)
+
+ (decoded, read) = varblock.read_varint(infile)
+
+ # Test the returned values
+ self.assertEqual(0, decoded)
+ self.assertEqual(1, read)
+
+ # The rest of the file should be untouched
+ self.assertEqual(b('crummy'), infile.read())
+
+ def test_write_varint(self):
+
+ expected = b('\xac\x02')
+ outfile = BytesIO()
+
+ written = varblock.write_varint(outfile, 300)
+
+ # Test the returned values
+ self.assertEqual(expected, outfile.getvalue())
+ self.assertEqual(2, written)
+
+
+ def test_write_zero(self):
+
+ outfile = BytesIO()
+ written = varblock.write_varint(outfile, 0)
+
+ # Test the returned values
+ self.assertEqual(b('\x00'), outfile.getvalue())
+ self.assertEqual(1, written)
+
+
+class VarblockTest(unittest.TestCase):
+
+ def test_yield_varblock(self):
+ infile = BytesIO(b('\x01\x0512345\x06Sybren'))
+
+ varblocks = list(varblock.yield_varblocks(infile))
+ self.assertEqual([b('12345'), b('Sybren')], varblocks)
+
+class FixedblockTest(unittest.TestCase):
+
+ def test_yield_fixedblock(self):
+
+ infile = BytesIO(b('123456Sybren'))
+
+ fixedblocks = list(varblock.yield_fixedblocks(infile, 6))
+ self.assertEqual([b('123456'), b('Sybren')], fixedblocks)
+
diff --git a/python/slugid/.gitignore b/python/slugid/.gitignore
new file mode 100644
index 000000000..ba7466050
--- /dev/null
+++ b/python/slugid/.gitignore
@@ -0,0 +1,57 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
diff --git a/python/slugid/.travis.yml b/python/slugid/.travis.yml
new file mode 100644
index 000000000..dcc430347
--- /dev/null
+++ b/python/slugid/.travis.yml
@@ -0,0 +1,27 @@
+language: python
+python:
+ - 2.7
+
+install:
+ - pip install -r requirements.txt
+
+script:
+ - tox
+
+after_script:
+ - tox -e coveralls
+
+# currently cannot customise per user fork, see:
+# https://github.com/travis-ci/travis-ci/issues/1094
+# please comment out this section in your personal fork!
+notifications:
+ irc:
+ channels:
+ - "irc.mozilla.org#taskcluster-bots"
+ on_success: always
+ on_failure: always
+ template:
+ - "\x02%{repository}\x0314#%{build_number}\x03\x02 (%{branch} - %{commit} : %{author}): \x02\x0312%{message}\x02\x03"
+ - "\x02Change view\x02 : \x0314%{compare_url}\x03"
+ - "\x02Build details\x02 : \x0314%{build_url}\x03"
+ - "\x02Commit message\x02 : \x0314%{commit_message}\x03"
diff --git a/python/slugid/LICENSE b/python/slugid/LICENSE
new file mode 100644
index 000000000..e87a115e4
--- /dev/null
+++ b/python/slugid/LICENSE
@@ -0,0 +1,363 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. "Contributor"
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the terms of
+ a Secondary License.
+
+1.6. "Executable Form"
+
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+
+ means a work that combines Covered Software with other material, in a
+ separate file or files, that is not Covered Software.
+
+1.8. "License"
+
+ means this document.
+
+1.9. "Licensable"
+
+ means having the right to grant, to the maximum extent possible, whether
+ at the time of the initial grant or subsequently, any and all of the
+ rights conveyed by this License.
+
+1.10. "Modifications"
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. "Patent Claims" of a Contributor
+
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the License,
+ by the making, using, selling, offering for sale, having made, import,
+ or transfer of either its Contributions or its Contributor Version.
+
+1.12. "Secondary License"
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. "Source Code Form"
+
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, "control" means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution
+ become effective for each Contribution on the date the Contributor first
+ distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under
+ this License. No additional rights or licenses will be implied from the
+ distribution or licensing of Covered Software under this License.
+ Notwithstanding Section 2.1(b) above, no patent license is granted by a
+ Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+ This License does not grant any rights in the trademarks, service marks,
+ or logos of any Contributor (except as may be necessary to comply with
+ the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this
+ License (see Section 10.2) or under the terms of a Secondary License (if
+ permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its
+ Contributions are its original creation(s) or it has sufficient rights to
+ grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under
+ applicable copyright doctrines of fair use, fair dealing, or other
+ equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under
+ the terms of this License. You must inform recipients that the Source
+ Code Form of the Covered Software is governed by the terms of this
+ License, and how they can obtain a copy of this License. You may not
+ attempt to alter or restrict the recipients' rights in the Source Code
+ Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter the
+ recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for
+ the Covered Software. If the Larger Work is a combination of Covered
+ Software with a work governed by one or more Secondary Licenses, and the
+ Covered Software is not Incompatible With Secondary Licenses, this
+ License permits You to additionally distribute such Covered Software
+ under the terms of such Secondary License(s), so that the recipient of
+ the Larger Work may, at their option, further distribute the Covered
+ Software under the terms of either this License or such Secondary
+ License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices
+ (including copyright notices, patent notices, disclaimers of warranty, or
+ limitations of liability) contained within the Source Code Form of the
+ Covered Software, except that You may alter any license notices to the
+ extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on
+ behalf of any Contributor. You must make it absolutely clear that any
+ such warranty, support, indemnity, or liability obligation is offered by
+ You alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute,
+ judicial order, or regulation then You must: (a) comply with the terms of
+ this License to the maximum extent possible; and (b) describe the
+ limitations and the code they affect. Such description must be placed in a
+ text file included with all distributions of the Covered Software under
+ this License. Except to the extent prohibited by statute or regulation,
+ such description must be sufficiently detailed for a recipient of ordinary
+ skill to be able to understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing
+ basis, if such Contributor fails to notify You of the non-compliance by
+ some reasonable means prior to 60 days after You have come back into
+ compliance. Moreover, Your grants from a particular Contributor are
+ reinstated on an ongoing basis if such Contributor notifies You of the
+ non-compliance by some reasonable means, this is the first time You have
+ received notice of non-compliance with this License from such
+ Contributor, and You become compliant prior to 30 days after Your receipt
+ of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions,
+ counter-claims, and cross-claims) alleging that a Contributor Version
+ directly or indirectly infringes any patent, then the rights granted to
+ You by any and all Contributors for the Covered Software under Section
+ 2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an "as is" basis,
+ without warranty of any kind, either expressed, implied, or statutory,
+ including, without limitation, warranties that the Covered Software is free
+ of defects, merchantable, fit for a particular purpose or non-infringing.
+ The entire risk as to the quality and performance of the Covered Software
+ is with You. Should any Covered Software prove defective in any respect,
+ You (not any Contributor) assume the cost of any necessary servicing,
+ repair, or correction. This disclaimer of warranty constitutes an essential
+ part of this License. No use of any Covered Software is authorized under
+ this License except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from
+ such party's negligence to the extent applicable law prohibits such
+ limitation. Some jurisdictions do not allow the exclusion or limitation of
+ incidental or consequential damages, so this exclusion and limitation may
+ not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts
+ of a jurisdiction where the defendant maintains its principal place of
+ business and such litigation shall be governed by laws of that
+ jurisdiction, without reference to its conflict-of-law provisions. Nothing
+ in this Section shall prevent a party's ability to bring cross-claims or
+ counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject
+ matter hereof. If any provision of this License is held to be
+ unenforceable, such provision shall be reformed only to the extent
+ necessary to make it enforceable. Any law or regulation which provides that
+ the language of a contract shall be construed against the drafter shall not
+ be used to construe this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version
+ of the License under which You originally received the Covered Software,
+ or under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a
+ modified version of this License if you rename the license and remove
+ any references to the name of the license steward (except to note that
+ such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+ Licenses If You choose to distribute Source Code Form that is
+ Incompatible With Secondary Licenses under the terms of this version of
+ the License, the notice described in Exhibit B of this License must be
+ attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file,
+then You may include the notice in a location (such as a LICENSE file in a
+relevant directory) where a recipient would be likely to look for such a
+notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
+ the Mozilla Public License, v. 2.0.
+
diff --git a/python/slugid/README.rst b/python/slugid/README.rst
new file mode 100644
index 000000000..587cd7004
--- /dev/null
+++ b/python/slugid/README.rst
@@ -0,0 +1,121 @@
+slugid.py - Compressed UUIDs for python
+=======================================
+
+.. image:: https://tools.taskcluster.net/lib/assets/taskcluster-120.png
+
+|Build Status| |Coverage Status| |License| |pypi Version| |Downloads|
+
+A python module for generating v4 UUIDs and encoding them into 22 character
+URL-safe base64 slug representation (see `RFC 4648 sec. 5`_).
+
+Slugs are url-safe base64 encoded v4 uuids, stripped of base64 ``=`` padding.
+
+There are two methods for generating slugs - ``slugid.v4()`` and
+``slugid.nice()``.
+
+- The ``slugid.v4()`` method returns a slug from a randomly generated v4 uuid.
+- The ``slugid.nice()`` method returns a v4 slug which conforms to a set of
+ "nice" properties. At the moment the only "nice" property is that the slug
+ starts with ``[A-Za-f]``, which in turn implies that the first (most
+ significant) bit of its associated uuid is set to 0.
+
+The purpose of the ``slugid.nice()`` method is to support having slugids which
+can be used in more contexts safely. Regular slugids can safely be used in
+urls, and for example in AMQP routing keys. However, slugs beginning with ``-``
+may cause problems when used as command line parameters.
+
+In contrast, slugids generated by the ``slugid.nice()`` method can safely be
+used as command line parameters. This comes at a cost to entropy (121 bits vs
+122 bits for regular v4 slugs).
+
+Slug consumers should consider carefully which of these two slug generation
+methods to call. Is it more important to have maximum entropy, or to have
+slugids that do not need special treatment when used as command line
+parameters? This is especially important if you are providing a service which
+supplies slugs to unexpecting tool developers downstream, who may not realise
+the risks of using your regular v4 slugs as command line parameters, especially
+since this would arise only as an intermittent issue (one time in 64).
+
+Generated slugs take the form ``[A-Za-z0-9_-]{22}``, or more precisely:
+
+- ``slugid.v4()`` slugs conform to
+ ``[A-Za-z0-9_-]{8}[Q-T][A-Za-z0-9_-][CGKOSWaeimquy26-][A-Za-z0-9_-]{10}[AQgw]``
+
+- ``slugid.nice()`` slugs conform to
+ ``[A-Za-f][A-Za-z0-9_-]{7}[Q-T][A-Za-z0-9_-][CGKOSWaeimquy26-][A-Za-z0-9_-]{10}[AQgw]``
+
+RFC 4122 defines the setting of 6 bits of the v4 UUID which implies v4 slugs
+provide 128 - 6 = 122 bits entropy. Due to the (un)setting of the first bit
+of "nice" slugs, nice slugs provide therefore 121 bits entropy.
+
+
+Usage
+-----
+
+.. code-block:: python
+
+ import slugid
+
+ # Generate "nice" URL-safe base64 encoded UUID version 4 (random)
+ slug = slugid.nice() # a8_YezW8T7e1jLxG7evy-A
+
+ # Alternative, if slugs will not be used as command line parameters
+ slug = slugid.v4() # -9OpXaCORAaFh4sJRk7PUA
+
+ # Get python uuid.UUID object
+ uuid = slugid.decode(slug)
+
+ # Compress to slug again
+ assert(slug == slugid.encode(uuid))
+
+
+RNG Characteristics
+-------------------
+UUID generation is performed by the built-in python `uuid library`_ which does
+not document its randomness, but falls back to system uuid-generation libraries
+where available, then urandom, then random. Therefore generated slugids match
+these rng characteristics.
+
+License
+-------
+The ``slugid`` library is released on the MPL 2.0 license, see the ``LICENSE``
+for complete license.
+
+Testing
+-------
+
+.. code-block:: bash
+
+ pip install -r requirements.txt
+ tox
+
+Publishing
+----------
+To republish this library to pypi.python.org, update the version number in
+``slugid/__init__.py``, commit it, push to github, and then run:
+
+.. code-block:: bash
+
+ # delete stale versions
+ rm -rf dist
+
+ # build source package
+ python setup.py sdist
+
+ # publish it
+ twine upload -s dist/*
+
+
+.. _RFC 4648 sec. 5: http://tools.ietf.org/html/rfc4648#section-5
+.. _uuid library: https://docs.python.org/2/library/uuid.html
+
+.. |Build Status| image:: https://travis-ci.org/taskcluster/slugid.py.svg?branch=master
+ :target: http://travis-ci.org/taskcluster/slugid.py
+.. |Coverage Status| image:: https://coveralls.io/repos/taskcluster/slugid.py/badge.svg?branch=master&service=github
+ :target: https://coveralls.io/github/taskcluster/slugid.py?branch=master
+.. |License| image:: https://img.shields.io/badge/license-MPL%202.0-orange.svg
+ :target: https://github.com/taskcluster/slugid.py/blob/master/LICENSE
+.. |pypi Version| image:: https://img.shields.io/pypi/v/slugid.svg
+ :target: https://pypi.python.org/pypi/slugid
+.. |Downloads| image:: https://img.shields.io/pypi/dm/slugid.svg
+ :target: https://pypi.python.org/pypi/slugid
diff --git a/python/slugid/requirements.txt b/python/slugid/requirements.txt
new file mode 100644
index 000000000..16caa8d62
--- /dev/null
+++ b/python/slugid/requirements.txt
@@ -0,0 +1,2 @@
+tox
+twine
diff --git a/python/slugid/setup.py b/python/slugid/setup.py
new file mode 100644
index 000000000..d7c8b328b
--- /dev/null
+++ b/python/slugid/setup.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+import re
+
+from codecs import open
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+packages = [
+ 'slugid',
+]
+
+version = ''
+with open('slugid/__init__.py', 'r') as fd:
+ version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
+ fd.read(), re.MULTILINE).group(1)
+
+if not version:
+ raise RuntimeError('Cannot find version information')
+
+setup(
+ name='slugid',
+ version=version,
+ description='Base64 encoded uuid v4 slugs',
+ author='Pete Moore',
+ author_email='pmoore@mozilla.com',
+ url='http://taskcluster.github.io/slugid.py',
+ packages=packages,
+ package_data={'': ['LICENSE', 'README.md']},
+ license='MPL 2.0',
+ classifiers=(
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'Programming Language :: Python :: 2.7',
+ ),
+)
diff --git a/python/slugid/slugid/__init__.py b/python/slugid/slugid/__init__.py
new file mode 100644
index 000000000..ca7de07e2
--- /dev/null
+++ b/python/slugid/slugid/__init__.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+# **************
+# * Slugid API *
+# **************
+#
+# @)@)
+# _|_| ( )
+# _(___,`\ _,--------------._ (( /`, ))
+# `==` `*-_,' O `~._ ( ( _/ | ) )
+# `, : o } `~._.~` * ',
+# \ - _ O - ,'
+# | ; - - " ; o /
+# | O o ,-`
+# \ _,-:""""""'`:-._ - . O /
+# `""""""~'` `._ _,-`
+# """"""
+
+"""
+SlugID: Base 64 encoded v4 UUIDs
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Usage:
+
+ >>> import slugid
+ >>> s = slugid.nice()
+ >>> s
+ eWIgwMgxSfeXQ36iPbOxiQ
+ >>> u = slugid.decode(s)
+ >>> u
+ UUID('796220c0-c831-49f7-9743-7ea23db3b189')
+ >>> slugid.encode(u)
+ eWIgwMgxSfeXQ36iPbOxiQ
+ >>> slugid.v4()
+ -9OpXaCORAaFh4sJRk7PUA
+"""
+
+__title__ = 'slugid'
+__version__ = '1.0.6'
+__author__ = 'Peter Moore'
+__license__ = 'MPL 2.0'
+
+from .slugid import decode, encode, nice, v4
diff --git a/python/slugid/slugid/slugid.py b/python/slugid/slugid/slugid.py
new file mode 100644
index 000000000..cd7dc9ab9
--- /dev/null
+++ b/python/slugid/slugid/slugid.py
@@ -0,0 +1,43 @@
+# Licensed under the Mozilla Public Licence 2.0.
+# https://www.mozilla.org/en-US/MPL/2.0
+
+import uuid
+import base64
+
+def encode(uuid_):
+ """
+ Returns the given uuid.UUID object as a 22 character slug. This can be a
+ regular v4 slug or a "nice" slug.
+ """
+ return base64.urlsafe_b64encode(uuid_.bytes)[:-2] # Drop '==' padding
+
+
+def decode(slug):
+ """
+ Returns the uuid.UUID object represented by the given v4 or "nice" slug
+ """
+ return uuid.UUID(bytes=base64.urlsafe_b64decode(slug + '==')) # b64 padding
+
+
+def v4():
+ """
+ Returns a randomly generated uuid v4 compliant slug
+ """
+ return base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2] # Drop '==' padding
+
+
+def nice():
+ """
+ Returns a randomly generated uuid v4 compliant slug which conforms to a set
+ of "nice" properties, at the cost of some entropy. Currently this means one
+ extra fixed bit (the first bit of the uuid is set to 0) which guarantees the
+ slug will begin with [A-Za-f]. For example such slugs don't require special
+ handling when used as command line parameters (whereas non-nice slugs may
+ start with `-` which can confuse command line tools).
+
+ Potentially other "nice" properties may be added in future to further
+ restrict the range of potential uuids that may be generated.
+ """
+ rawBytes = uuid.uuid4().bytes
+ rawBytes = chr(ord(rawBytes[0]) & 0x7f) + rawBytes[1:] # Ensure slug starts with [A-Za-f]
+ return base64.urlsafe_b64encode(rawBytes)[:-2] # Drop '==' padding
diff --git a/python/slugid/test.py b/python/slugid/test.py
new file mode 100644
index 000000000..55103453a
--- /dev/null
+++ b/python/slugid/test.py
@@ -0,0 +1,167 @@
+# Licensed under the Mozilla Public Licence 2.0.
+# https://www.mozilla.org/en-US/MPL/2.0
+
+import uuid
+import slugid
+
+
+def testEncode():
+ """ Test that we can correctly encode a "non-nice" uuid (with first bit
+ set) to its known slug. The specific uuid was chosen since it has a slug
+ which contains both `-` and `_` characters."""
+
+ # 10000000010011110011111111001000110111111100101101001011000001101000100111111011101011101111101011010101111000011000011101010100....
+ # <8 ><0 ><4 ><f ><3 ><f ><c ><8 ><d ><f ><c ><b ><4 ><b ><0 ><6 ><8 ><9 ><f ><b ><a ><e ><f ><a ><d ><5 ><e ><1 ><8 ><7 ><5 ><4 >
+ # < g >< E >< 8 >< _ >< y >< N >< _ >< L >< S >< w >< a >< J >< - >< 6 >< 7 >< 6 >< 1 >< e >< G >< H >< V >< A >
+ uuid_ = uuid.UUID('{804f3fc8-dfcb-4b06-89fb-aefad5e18754}')
+ expectedSlug = 'gE8_yN_LSwaJ-6761eGHVA'
+ actualSlug = slugid.encode(uuid_)
+
+ assert expectedSlug == actualSlug, "UUID not correctly encoded into slug: '" + expectedSlug + "' != '" + actualSlug + "'"
+
+
+def testDecode():
+ """ Test that we can decode a "non-nice" slug (first bit of uuid is set)
+ that begins with `-`"""
+
+ # 11111011111011111011111011111011111011111011111001000011111011111011111111111111111111111111111111111111111111111111111111111101....
+ # <f ><b ><e ><f ><b ><e ><f ><b ><e ><f ><b ><e ><4 ><3 ><e ><f ><b ><f ><f ><f ><f ><f ><f ><f ><f ><f ><f ><f ><f ><f ><f ><d >
+ # < - >< - >< - >< - >< - >< - >< - >< - >< Q >< - >< - >< - >< _ >< _ >< _ >< _ >< _ >< _ >< _ >< _ >< _ >< Q >
+ slug = '--------Q--__________Q'
+ expectedUuid = uuid.UUID('{fbefbefb-efbe-43ef-bfff-fffffffffffd}')
+ actualUuid = slugid.decode(slug)
+
+ assert expectedUuid == actualUuid, "Slug not correctly decoded into uuid: '" + str(expectedUuid) + "' != '" + str(actualUuid) + "'"
+
+
+def testUuidEncodeDecode():
+ """ Test that 10000 v4 uuids are unchanged after encoding and then decoding them"""
+
+ for i in range(0, 10000):
+ uuid1 = uuid.uuid4()
+ slug = slugid.encode(uuid1)
+ uuid2 = slugid.decode(slug)
+
+ assert uuid1 == uuid2, "Encode and decode isn't identity: '" + str(uuid1) + "' != '" + str(uuid2) + "'"
+
+
+def testSlugDecodeEncode():
+ """ Test that 10000 v4 slugs are unchanged after decoding and then encoding them."""
+
+ for i in range(0, 10000):
+ slug1 = slugid.v4()
+ uuid_ = slugid.decode(slug1)
+ slug2 = slugid.encode(uuid_)
+
+ assert slug1 == slug2, "Decode and encode isn't identity"
+
+
+def testSpreadNice():
+ """ Make sure that all allowed characters can appear in all allowed
+ positions within the "nice" slug. In this test we generate over a thousand
+ slugids, and make sure that every possible allowed character per position
+ appears at least once in the sample of all slugids generated. We also make
+ sure that no other characters appear in positions in which they are not
+ allowed.
+
+ base 64 encoding char -> value:
+ ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_
+ 0 1 2 3 4 5 6
+ 0123456789012345678901234567890123456789012345678901234567890123
+
+ e.g. from this we can see 'j' represents 35 in base64
+
+ The following comments show the 128 bits of the v4 uuid in binary, hex and
+ base 64 encodings. The 6 fixed bits (`0`/`1`) according to RFC 4122, plus
+ the first (most significant) fixed bit (`0`) are shown among the 121
+ arbitrary value bits (`.`/`x`). The `x` means the same as `.` but just
+ highlights which bits are grouped together for the respective encoding.
+
+ schema:
+ <..........time_low............><...time_mid...><time_hi_+_vers><clk_hi><clk_lo><.....................node.....................>
+
+ bin: 0xxx............................................0100............10xx............................................................
+ hex: $A <01><02><03><04><05><06><07><08><09><10><11> 4 <13><14><15> $B <17><18><19><20><21><22><23><24><25><26><27><28><29><30><31>
+
+ => $A in {0, 1, 2, 3, 4, 5, 6, 7} (0b0xxx)
+ => $B in {8, 9, A, B} (0b10xx)
+
+ bin: 0xxxxx..........................................0100xx......xxxx10............................................................xx0000
+ b64: $C < 01 >< 02 >< 03 >< 04 >< 05 >< 06 >< 07 > $D < 09 > $E < 11 >< 12 >< 13 >< 14 >< 15 >< 16 >< 17 >< 18 >< 19 >< 20 > $F
+
+ => $C in {A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z, a, b, c, d, e, f} (0b0xxxxx)
+ => $D in {Q, R, S, T} (0b0100xx)
+ => $E in {C, G, K, O, S, W, a, e, i, m, q, u, y, 2, 6, -} (0bxxxx10)
+ => $F in {A, Q, g, w} (0bxx0000)"""
+
+ charsAll = ''.join(sorted('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'))
+ # 0 - 31: 0b0xxxxx
+ charsC = ''.join(sorted('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdef'))
+ # 16, 17, 18, 19: 0b0100xx
+ charsD = ''.join(sorted('QRST'))
+ # 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 42, 46, 50, 54, 58, 62: 0bxxxx10
+ charsE = ''.join(sorted('CGKOSWaeimquy26-'))
+ # 0, 16, 32, 48: 0bxx0000
+ charsF = ''.join(sorted('AQgw'))
+ expected = [charsC, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsD, charsAll, charsE, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsF]
+ spreadTest(slugid.nice, expected)
+
+
+def testSpreadV4():
+ """ This test is the same as niceSpreadTest but for slugid.v4() rather than
+ slugid.nice(). The only difference is that a v4() slug can start with any of
+ the base64 characters since the first six bits of the uuid are random."""
+
+ charsAll = ''.join(sorted('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'))
+ # 16, 17, 18, 19: 0b0100xx
+ charsD = ''.join(sorted('QRST'))
+ # 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 42, 46, 50, 54, 58, 62: 0bxxxx10
+ charsE = ''.join(sorted('CGKOSWaeimquy26-'))
+ # 0, 16, 32, 48: 0bxx0000
+ charsF = ''.join(sorted('AQgw'))
+ expected = [charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsD, charsAll, charsE, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsAll, charsF]
+ spreadTest(slugid.v4, expected)
+
+
+def spreadTest(generator, expected):
+ """ `spreadTest` runs a test against the `generator` function, to check that
+ when calling it 64*40 times, the range of characters per string position it
+ returns matches the array `expected`, where each entry in `expected` is a
+ string of all possible characters that should appear in that position in the
+ string, at least once in the sample of 64*40 responses from the `generator`
+ function"""
+ # k is an array which stores which characters were found at which
+ # positions. It has one entry per slugid character, therefore 22 entries.
+ # Each entry is a dict with a key for each character found, and its value
+ # as the number of times that character appeared at that position in the
+ # slugid in the large sample of slugids generated in this test.
+ k = [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}]
+
+ # Generate a large sample of slugids, and record what characters appeared
+ # where... A monte-carlo test has demonstrated that with 64 * 20
+ # iterations, no failure occurred in 1000 simulations, so 64 * 40 should be
+ # suitably large to rule out false positives.
+ for i in range(0, 64 * 40):
+ slug = generator()
+ assert len(slug) == 22
+ for j in range(0, 22):
+ if slug[j] in k[j]:
+ k[j][slug[j]] = k[j][slug[j]] + 1
+ else:
+ k[j][slug[j]] = 1
+
+ # Compose results into an array `actual`, for comparison with `expected`
+ actual = []
+ for j in range(0, len(k)):
+ actual.append('')
+ for a in k[j].keys():
+ if k[j][a] > 0:
+ actual[j] += a
+ # sort for easy comparison
+ actual[j] = ''.join(sorted(actual[j]))
+
+ assert arraysEqual(expected, actual), "In a large sample of generated slugids, the range of characters found per character position in the sample did not match expected results.\n\nExpected: " + str(expected) + "\n\nActual: " + str(actual)
+
+def arraysEqual(a, b):
+ """ returns True if arrays a and b are equal"""
+ return cmp(a, b) == 0
diff --git a/python/slugid/tox.ini b/python/slugid/tox.ini
new file mode 100644
index 000000000..87326e4d4
--- /dev/null
+++ b/python/slugid/tox.ini
@@ -0,0 +1,26 @@
+[tox]
+envlist = py27
+
+
+[base]
+deps =
+ coverage
+ nose
+ rednose
+commands =
+ coverage run --source slugid --branch {envbindir}/nosetests -v --with-xunit --rednose --force-color
+
+
+[testenv:py27]
+deps=
+ {[base]deps}
+basepython = python2.7
+commands =
+ {[base]commands}
+
+
+[testenv:coveralls]
+deps=
+ python-coveralls
+commands=
+ coveralls
diff --git a/python/virtualenv/AUTHORS.txt b/python/virtualenv/AUTHORS.txt
new file mode 100644
index 000000000..272494163
--- /dev/null
+++ b/python/virtualenv/AUTHORS.txt
@@ -0,0 +1,91 @@
+Author
+------
+
+Ian Bicking
+
+Maintainers
+-----------
+
+Brian Rosner
+Carl Meyer
+Jannis Leidel
+Paul Moore
+Paul Nasrat
+Marcus Smith
+
+Contributors
+------------
+
+Alex Grönholm
+Anatoly Techtonik
+Antonio Cuni
+Antonio Valentino
+Armin Ronacher
+Barry Warsaw
+Benjamin Root
+Bradley Ayers
+Branden Rolston
+Brandon Carl
+Brian Kearns
+Cap Petschulat
+CBWhiz
+Chris Adams
+Chris McDonough
+Christos Kontas
+Christian Hudon
+Christian Stefanescu
+Christopher Nilsson
+Cliff Xuan
+Curt Micol
+Damien Nozay
+Dan Sully
+Daniel Hahler
+Daniel Holth
+David Schoonover
+Denis Costa
+Doug Hellmann
+Doug Napoleone
+Douglas Creager
+Eduard-Cristian Stefan
+Erik M. Bray
+Ethan Jucovy
+Gabriel de Perthuis
+Gunnlaugur Thor Briem
+Graham Dennis
+Greg Haskins
+Jason Penney
+Jason R. Coombs
+Jeff Hammel
+Jeremy Orem
+Jason Penney
+Jason R. Coombs
+John Kleint
+Jonathan Griffin
+Jonathan Hitchcock
+Jorge Vargas
+Josh Bronson
+Kamil Kisiel
+Kyle Gibson
+Konstantin Zemlyak
+Kumar McMillan
+Lars Francke
+Marc Abramowitz
+Mika Laitio
+Mike Hommey
+Miki Tebeka
+Philip Jenvey
+Philippe Ombredanne
+Piotr Dobrogost
+Preston Holmes
+Ralf Schmitt
+Raul Leal
+Ronny Pfannschmidt
+Satrajit Ghosh
+Sergio de Carvalho
+Stefano Rivera
+Tarek Ziadé
+Thomas Aglassinger
+Vinay Sajip
+Vitaly Babiy
+Vladimir Rutsky
+Wang Xuerui \ No newline at end of file
diff --git a/python/virtualenv/LICENSE.txt b/python/virtualenv/LICENSE.txt
new file mode 100644
index 000000000..ab145001f
--- /dev/null
+++ b/python/virtualenv/LICENSE.txt
@@ -0,0 +1,22 @@
+Copyright (c) 2007 Ian Bicking and Contributors
+Copyright (c) 2009 Ian Bicking, The Open Planning Project
+Copyright (c) 2011-2016 The virtualenv developers
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/python/virtualenv/MANIFEST.in b/python/virtualenv/MANIFEST.in
new file mode 100644
index 000000000..49037ada6
--- /dev/null
+++ b/python/virtualenv/MANIFEST.in
@@ -0,0 +1,12 @@
+recursive-include docs *
+recursive-include tests *.py *.sh *.expected
+recursive-include virtualenv_support *.whl
+recursive-include virtualenv_embedded *
+recursive-exclude docs/_templates *
+recursive-exclude docs/_build *
+include virtualenv_support/__init__.py
+include bin/*
+include scripts/*
+include *.py
+include AUTHORS.txt
+include LICENSE.txt
diff --git a/python/virtualenv/PKG-INFO b/python/virtualenv/PKG-INFO
new file mode 100644
index 000000000..dbfda645d
--- /dev/null
+++ b/python/virtualenv/PKG-INFO
@@ -0,0 +1,87 @@
+Metadata-Version: 1.1
+Name: virtualenv
+Version: 15.0.1
+Summary: Virtual Python Environment builder
+Home-page: https://virtualenv.pypa.io/
+Author: Jannis Leidel, Carl Meyer and Brian Rosner
+Author-email: python-virtualenv@groups.google.com
+License: MIT
+Description: Virtualenv
+ ==========
+
+ `Mailing list <http://groups.google.com/group/python-virtualenv>`_ |
+ `Issues <https://github.com/pypa/virtualenv/issues>`_ |
+ `Github <https://github.com/pypa/virtualenv>`_ |
+ `PyPI <https://pypi.python.org/pypi/virtualenv/>`_ |
+ User IRC: #pypa
+ Dev IRC: #pypa-dev
+
+ Introduction
+ ------------
+
+ ``virtualenv`` is a tool to create isolated Python environments.
+
+ The basic problem being addressed is one of dependencies and versions,
+ and indirectly permissions. Imagine you have an application that
+ needs version 1 of LibFoo, but another application requires version
+ 2. How can you use both these applications? If you install
+ everything into ``/usr/lib/python2.7/site-packages`` (or whatever your
+ platform's standard location is), it's easy to end up in a situation
+ where you unintentionally upgrade an application that shouldn't be
+ upgraded.
+
+ Or more generally, what if you want to install an application *and
+ leave it be*? If an application works, any change in its libraries or
+ the versions of those libraries can break the application.
+
+ Also, what if you can't install packages into the global
+ ``site-packages`` directory? For instance, on a shared host.
+
+ In all these cases, ``virtualenv`` can help you. It creates an
+ environment that has its own installation directories, that doesn't
+ share libraries with other virtualenv environments (and optionally
+ doesn't access the globally installed libraries either).
+
+ .. comment:
+
+ Release History
+ ===============
+
+ 15.0.1 (2016-03-17)
+ -------------------
+
+ * Print error message when DEST_DIR exists and is a file
+
+ * Upgrade setuptools to 20.3
+
+ * Upgrade pip to 8.1.1.
+
+
+ 15.0.0 (2016-03-05)
+ -------------------
+
+ * Remove the `virtualenv-N.N` script from the package; this can no longer be
+ correctly created from a wheel installation.
+ Resolves #851, #692
+
+ * Remove accidental runtime dependency on pip by extracting certificate in the
+ subprocess.
+
+ * Upgrade setuptools 20.2.2.
+
+ * Upgrade pip to 8.1.0.
+
+
+ `Full Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_.
+Keywords: setuptools deployment installation distutils
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/python/virtualenv/README.rst b/python/virtualenv/README.rst
new file mode 100644
index 000000000..0d5984dce
--- /dev/null
+++ b/python/virtualenv/README.rst
@@ -0,0 +1,31 @@
+virtualenv
+==========
+
+A tool for creating isolated 'virtual' python environments.
+
+.. image:: https://img.shields.io/pypi/v/virtualenv.svg
+ :target: https://pypi.python.org/pypi/virtualenv
+
+.. image:: https://img.shields.io/travis/pypa/virtualenv/develop.svg
+ :target: http://travis-ci.org/pypa/virtualenv
+
+* `Installation <https://virtualenv.pypa.io/en/latest/installation.html>`_
+* `Documentation <https://virtualenv.pypa.io/>`_
+* `Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_
+* `Issues <https://github.com/pypa/virtualenv/issues>`_
+* `PyPI <https://pypi.python.org/pypi/virtualenv/>`_
+* `Github <https://github.com/pypa/virtualenv>`_
+* `User mailing list <http://groups.google.com/group/python-virtualenv>`_
+* `Dev mailing list <http://groups.google.com/group/pypa-dev>`_
+* User IRC: #pypa on Freenode.
+* Dev IRC: #pypa-dev on Freenode.
+
+
+Code of Conduct
+---------------
+
+Everyone interacting in the virtualenv project's codebases, issue trackers,
+chat rooms, and mailing lists is expected to follow the
+`PyPA Code of Conduct`_.
+
+.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
diff --git a/python/virtualenv/bin/rebuild-script.py b/python/virtualenv/bin/rebuild-script.py
new file mode 100755
index 000000000..a816af3eb
--- /dev/null
+++ b/python/virtualenv/bin/rebuild-script.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+"""
+Helper script to rebuild virtualenv.py from virtualenv_support
+"""
+from __future__ import print_function
+
+import os
+import re
+import codecs
+from zlib import crc32
+
+here = os.path.dirname(__file__)
+script = os.path.join(here, '..', 'virtualenv.py')
+
+gzip = codecs.lookup('zlib')
+b64 = codecs.lookup('base64')
+
+file_regex = re.compile(
+ br'##file (.*?)\n([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*convert\("""\n(.*?)"""\)',
+ re.S)
+file_template = b'##file %(filename)s\n%(varname)s = convert("""\n%(data)s""")'
+
+def rebuild(script_path):
+ with open(script_path, 'rb') as f:
+ script_content = f.read()
+ parts = []
+ last_pos = 0
+ match = None
+ for match in file_regex.finditer(script_content):
+ parts += [script_content[last_pos:match.start()]]
+ last_pos = match.end()
+ filename, fn_decoded = match.group(1), match.group(1).decode()
+ varname = match.group(2)
+ data = match.group(3)
+
+ print('Found file %s' % fn_decoded)
+ pathname = os.path.join(here, '..', 'virtualenv_embedded', fn_decoded)
+
+ with open(pathname, 'rb') as f:
+ embedded = f.read()
+ new_crc = crc32(embedded)
+ new_data = b64.encode(gzip.encode(embedded)[0])[0]
+
+ if new_data == data:
+ print(' File up to date (crc: %s)' % new_crc)
+ parts += [match.group(0)]
+ continue
+ # Else: content has changed
+ crc = crc32(gzip.decode(b64.decode(data)[0])[0])
+ print(' Content changed (crc: %s -> %s)' %
+ (crc, new_crc))
+ new_match = file_template % {
+ b'filename': filename,
+ b'varname': varname,
+ b'data': new_data
+ }
+ parts += [new_match]
+
+ parts += [script_content[last_pos:]]
+ new_content = b''.join(parts)
+
+ if new_content != script_content:
+ print('Content updated; overwriting... ', end='')
+ with open(script_path, 'wb') as f:
+ f.write(new_content)
+ print('done.')
+ else:
+ print('No changes in content')
+ if match is None:
+ print('No variables were matched/found')
+
+if __name__ == '__main__':
+ rebuild(script)
diff --git a/python/virtualenv/docs/Makefile b/python/virtualenv/docs/Makefile
new file mode 100644
index 000000000..e4de9f847
--- /dev/null
+++ b/python/virtualenv/docs/Makefile
@@ -0,0 +1,130 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-compressor.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-compressor.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/django-compressor"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-compressor"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ make -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/python/virtualenv/docs/changes.rst b/python/virtualenv/docs/changes.rst
new file mode 100644
index 000000000..2df19f666
--- /dev/null
+++ b/python/virtualenv/docs/changes.rst
@@ -0,0 +1,985 @@
+Release History
+===============
+
+15.0.1 (2016-03-17)
+-------------------
+
+* Print error message when DEST_DIR exists and is a file
+
+* Upgrade setuptools to 20.3
+
+* Upgrade pip to 8.1.1.
+
+
+15.0.0 (2016-03-05)
+-------------------
+
+* Remove the `virtualenv-N.N` script from the package; this can no longer be
+ correctly created from a wheel installation.
+ Resolves :issue:`851`, :issue:`692`
+
+* Remove accidental runtime dependency on pip by extracting certificate in the
+ subprocess.
+
+* Upgrade setuptools 20.2.2.
+
+* Upgrade pip to 8.1.0.
+
+
+14.0.6 (2016-02-07)
+-------------------
+
+* Upgrade setuptools to 20.0
+
+* Upgrade wheel to 0.29.0
+
+* Fix an error where virtualenv didn't pass in a working ssl certificate for
+ pip, causing "weird" errors related to ssl.
+
+
+14.0.5 (2016-02-01)
+-------------------
+
+* Homogenize drive letter casing for both prefixes and filenames. :issue:`858`
+
+
+14.0.4 (2016-01-31)
+-------------------
+
+* Upgrade setuptools to 19.6.2
+
+* Revert ac4ea65; only correct drive letter case.
+ Fixes :issue:`856`, :issue:`815`
+
+
+14.0.3 (2016-01-28)
+-------------------
+
+* Upgrade setuptools to 19.6.1
+
+
+14.0.2 (2016-01-28)
+-------------------
+
+* Upgrade setuptools to 19.6
+
+* Supress any errors from `unset` on different shells (:pull:`843`)
+
+* Normalize letter case for prefix path checking. Fixes :issue:`837`
+
+
+14.0.1 (2016-01-21)
+-------------------
+
+* Upgrade from pip 8.0.0 to 8.0.2.
+
+* Fix the default of ``--(no-)download`` to default to downloading.
+
+
+14.0.0 (2016-01-19)
+-------------------
+
+* **BACKWARDS INCOMPATIBLE** Drop support for Python 3.2.
+
+* Upgrade setuptools to 19.4
+
+* Upgrade wheel to 0.26.0
+
+* Upgrade pip to 8.0.0
+
+* Upgrade argparse to 1.4.0
+
+* Added support for ``python-config`` script (:pull:`798`)
+
+* Updated activate.fish (:pull:`589`) (:pull:`799`)
+
+* Account for a ``site.pyo`` correctly in some python implementations (:pull:`759`)
+
+* Properly restore an empty PS1 (:issue:`407`)
+
+* Properly remove ``pydoc`` when deactivating
+
+* Remove workaround for very old Mageia / Mandriva linuxes (:pull:`472`)
+
+* Added a space after virtualenv name in the prompt: ``(env) $PS1``
+
+* Make sure not to run a --user install when creating the virtualenv (:pull:`803`)
+
+* Remove virtualenv.py's path from sys.path when executing with a new
+ python. Fixes issue :issue:`779`, :issue:`763` (:pull:`805`)
+
+* Remove use of () in .bat files so ``Program Files (x86)`` works :issue:`35`
+
+* Download new releases of the preinstalled software from PyPI when there are
+ new releases available. This behavior can be disabled using
+ ``--no-download``.
+
+* Make ``--no-setuptools``, ``--no-pip``, and ``--no-wheel`` independent of
+ each other.
+
+
+13.1.2 (2015-08-23)
+-------------------
+
+* Upgrade pip to 7.1.2.
+
+
+13.1.1 (2015-08-20)
+-------------------
+
+* Upgrade pip to 7.1.1.
+
+* Upgrade setuptools to 18.2.
+
+* Make the activate script safe to use when bash is running with ``-u``.
+
+
+13.1.0 (2015-06-30)
+-------------------
+
+* Upgrade pip to 7.1.0
+
+* Upgrade setuptools to 18.0.1
+
+
+13.0.3 (2015-06-01)
+-------------------
+
+* Upgrade pip to 7.0.3
+
+
+13.0.2 (2015-06-01)
+-------------------
+
+* Upgrade pip to 7.0.2
+
+* Upgrade setuptools to 17.0
+
+
+13.0.1 (2015-05-22)
+-------------------
+
+* Upgrade pip to 7.0.1
+
+
+13.0.0 (2015-05-21)
+-------------------
+
+* Automatically install wheel when creating a new virutalenv. This can be
+ disabled by using the ``--no-wheel`` option.
+
+* Don't trust the current directory as a location to discover files to install
+ packages from.
+
+* Upgrade setuptools to 16.0.
+
+* Upgrade pip to 7.0.0.
+
+
+12.1.1 (2015-04-07)
+-------------------
+
+* Upgrade pip to 6.1.1
+
+
+12.1.0 (2015-04-07)
+-------------------
+
+* Upgrade setuptools to 15.0
+
+* Upgrade pip to 6.1.0
+
+
+12.0.7 (2015-02-04)
+-------------------
+
+* Upgrade pip to 6.0.8
+
+
+12.0.6 (2015-01-28)
+-------------------
+
+* Upgrade pip to 6.0.7
+
+* Upgrade setuptools to 12.0.5
+
+
+12.0.5 (2015-01-03)
+-------------------
+
+* Upgrade pip to 6.0.6
+
+* Upgrade setuptools to 11.0
+
+
+12.0.4 (2014-12-23)
+-------------------
+
+* Revert the fix to ``-p`` on Debian based pythons as it was broken in other
+ situations.
+
+* Revert several sys.path changes new in 12.0 which were breaking virtualenv.
+
+12.0.3 (2014-12-23)
+-------------------
+
+* Fix an issue where Debian based Pythons would fail when using -p with the
+ host Python.
+
+* Upgrade pip to 6.0.3
+
+12.0.2 (2014-12-23)
+-------------------
+
+* Upgraded pip to 6.0.2
+
+12.0.1 (2014-12-22)
+-------------------
+
+* Upgraded pip to 6.0.1
+
+
+12.0 (2014-12-22)
+-----------------
+
+* **PROCESS** Version numbers are now simply ``X.Y`` where the leading ``1``
+ has been dropped.
+* Split up documentation into structured pages
+* Now using pytest framework
+* Correct sys.path ordering for debian, issue #461
+* Correctly throws error on older Pythons, issue #619
+* Allow for empty $PATH, pull #601
+* Don't set prompt if $env:VIRTUAL_ENV_DISABLE_PROMPT is set for Powershell
+* Updated setuptools to 7.0
+
+1.11.6 (2014-05-16)
+-------------------
+
+* Updated setuptools to 3.6
+* Updated pip to 1.5.6
+
+1.11.5 (2014-05-03)
+-------------------
+
+* Updated setuptools to 3.4.4
+* Updated documentation to use https://virtualenv.pypa.io/
+* Updated pip to 1.5.5
+
+1.11.4 (2014-02-21)
+-------------------
+
+* Updated pip to 1.5.4
+
+
+1.11.3 (2014-02-20)
+-------------------
+
+* Updated setuptools to 2.2
+* Updated pip to 1.5.3
+
+
+1.11.2 (2014-01-26)
+-------------------
+
+* Fixed easy_install installed virtualenvs by updated pip to 1.5.2
+
+1.11.1 (2014-01-20)
+-------------------
+
+* Fixed an issue where pip and setuptools were not getting installed when using
+ the ``--system-site-packages`` flag.
+* Updated setuptools to fix an issue when installed with easy_install
+* Fixed an issue with Python 3.4 and sys.stdout encoding being set to ascii
+* Upgraded pip to v1.5.1
+* Upgraded setuptools to v2.1
+
+1.11 (2014-01-02)
+-----------------
+
+* **BACKWARDS INCOMPATIBLE** Switched to using wheels for the bundled copies of
+ setuptools and pip. Using sdists is no longer supported - users supplying
+ their own versions of pip/setuptools will need to provide wheels.
+* **BACKWARDS INCOMPATIBLE** Modified the handling of ``--extra-search-dirs``.
+ This option now works like pip's ``--find-links`` option, in that it adds
+ extra directories to search for compatible wheels for pip and setuptools.
+ The actual wheel selected is chosen based on version and compatibility, using
+ the same algorithm as ``pip install setuptools``.
+* Fixed #495, --always-copy was failing (#PR 511)
+* Upgraded pip to v1.5
+* Upgraded setuptools to v1.4
+
+1.10.1 (2013-08-07)
+-------------------
+
+* **New Signing Key** Release 1.10.1 is using a different key than normal with
+ fingerprint: 7C6B 7C5D 5E2B 6356 A926 F04F 6E3C BCE9 3372 DCFA
+* Upgraded pip to v1.4.1
+* Upgraded setuptools to v0.9.8
+
+
+1.10 (2013-07-23)
+-----------------
+
+* **BACKWARDS INCOMPATIBLE** Dropped support for Python 2.5. The minimum
+ supported Python version is now Python 2.6.
+
+* **BACKWARDS INCOMPATIBLE** Using ``virtualenv.py`` as an isolated script
+ (i.e. without an associated ``virtualenv_support`` directory) is no longer
+ supported for security reasons and will fail with an error.
+
+ Along with this, ``--never-download`` is now always pinned to ``True``, and
+ is only being maintained in the short term for backward compatibility
+ (Pull #412).
+
+* **IMPORTANT** Switched to the new setuptools (v0.9.7) which has been merged
+ with Distribute_ again and works for Python 2 and 3 with one codebase.
+ The ``--distribute`` and ``--setuptools`` options are now no-op.
+
+* Updated to pip 1.4.
+
+* Added support for PyPy3k
+
+* Added the option to use a version number with the ``-p`` option to get the
+ system copy of that Python version (Windows only)
+
+* Removed embedded ``ez_setup.py``, ``distribute_setup.py`` and
+ ``distribute_from_egg.py`` files as part of switching to merged setuptools.
+
+* Fixed ``--relocatable`` to work better on Windows.
+
+* Fixed issue with readline on Windows.
+
+.. _Distribute: https://pypi.python.org/pypi/distribute
+
+1.9.1 (2013-03-08)
+------------------
+
+* Updated to pip 1.3.1 that fixed a major backward incompatible change of
+ parsing URLs to externally hosted packages that got accidentily included
+ in pip 1.3.
+
+1.9 (2013-03-07)
+----------------
+
+* Unset VIRTUAL_ENV environment variable in deactivate.bat (Pull #364)
+* Upgraded distribute to 0.6.34.
+* Added ``--no-setuptools`` and ``--no-pip`` options (Pull #336).
+* Fixed Issue #373. virtualenv-1.8.4 was failing in cygwin (Pull #382).
+* Fixed Issue #378. virtualenv is now "multiarch" aware on debian/ubuntu (Pull #379).
+* Fixed issue with readline module path on pypy and OSX (Pull #374).
+* Made 64bit detection compatible with Python 2.5 (Pull #393).
+
+
+1.8.4 (2012-11-25)
+------------------
+
+* Updated distribute to 0.6.31. This fixes #359 (numpy install regression) on
+ UTF-8 platforms, and provides a workaround on other platforms:
+ ``PYTHONIOENCODING=utf8 pip install numpy``.
+
+* When installing virtualenv via curl, don't forget to filter out arguments
+ the distribute setup script won't understand. Fixes #358.
+
+* Added some more integration tests.
+
+* Removed the unsupported embedded setuptools egg for Python 2.4 to reduce
+ file size.
+
+1.8.3 (2012-11-21)
+------------------
+
+* Fixed readline on OS X. Thanks minrk
+
+* Updated distribute to 0.6.30 (improves our error reporting, plus new
+ distribute features and fixes). Thanks Gabriel (g2p)
+
+* Added compatibility with multiarch Python (Python 3.3 for example). Added an
+ integration test. Thanks Gabriel (g2p)
+
+* Added ability to install distribute from a user-provided egg, rather than the
+ bundled sdist, for better speed. Thanks Paul Moore.
+
+* Make the creation of lib64 symlink smarter about already-existing symlink,
+ and more explicit about full paths. Fixes #334 and #330. Thanks Jeremy Orem.
+
+* Give lib64 site-dir preference over lib on 64-bit systems, to avoid wrong
+ 32-bit compiles in the venv. Fixes #328. Thanks Damien Nozay.
+
+* Fix a bug with prompt-handling in ``activate.csh`` in non-interactive csh
+ shells. Fixes #332. Thanks Benjamin Root for report and patch.
+
+* Make it possible to create a virtualenv from within a Python
+ 3.3. pyvenv. Thanks Chris McDonough for the report.
+
+* Add optional --setuptools option to be able to switch to it in case
+ distribute is the default (like in Debian).
+
+1.8.2 (2012-09-06)
+------------------
+
+* Updated the included pip version to 1.2.1 to fix regressions introduced
+ there in 1.2.
+
+
+1.8.1 (2012-09-03)
+------------------
+
+* Fixed distribute version used with `--never-download`. Thanks michr for
+ report and patch.
+
+* Fix creating Python 3.3 based virtualenvs by unsetting the
+ ``__PYVENV_LAUNCHER__`` environment variable in subprocesses.
+
+
+1.8 (2012-09-01)
+----------------
+
+* **Dropped support for Python 2.4** The minimum supported Python version is
+ now Python 2.5.
+
+* Fix `--relocatable` on systems that use lib64. Fixes #78. Thanks Branden
+ Rolston.
+
+* Symlink some additional modules under Python 3. Fixes #194. Thanks Vinay
+ Sajip, Ian Clelland, and Stefan Holek for the report.
+
+* Fix ``--relocatable`` when a script uses ``__future__`` imports. Thanks
+ Branden Rolston.
+
+* Fix a bug in the config option parser that prevented setting negative
+ options with environment variables. Thanks Ralf Schmitt.
+
+* Allow setting ``--no-site-packages`` from the config file.
+
+* Use ``/usr/bin/multiarch-platform`` if available to figure out the include
+ directory. Thanks for the patch, Mika Laitio.
+
+* Fix ``install_name_tool`` replacement to work on Python 3.X.
+
+* Handle paths of users' site-packages on Mac OS X correctly when changing
+ the prefix.
+
+* Updated the embedded version of distribute to 0.6.28 and pip to 1.2.
+
+
+1.7.2 (2012-06-22)
+------------------
+
+* Updated to distribute 0.6.27.
+
+* Fix activate.fish on OS X. Fixes #8. Thanks David Schoonover.
+
+* Create a virtualenv-x.x script with the Python version when installing, so
+ virtualenv for multiple Python versions can be installed to the same
+ script location. Thanks Miki Tebeka.
+
+* Restored ability to create a virtualenv with a path longer than 78
+ characters, without breaking creation of virtualenvs with non-ASCII paths.
+ Thanks, Bradley Ayers.
+
+* Added ability to create virtualenvs without having installed Apple's
+ developers tools (using an own implementation of ``install_name_tool``).
+ Thanks Mike Hommey.
+
+* Fixed PyPy and Jython support on Windows. Thanks Konstantin Zemlyak.
+
+* Added pydoc script to ease use. Thanks Marc Abramowitz. Fixes #149.
+
+* Fixed creating a bootstrap script on Python 3. Thanks Raul Leal. Fixes #280.
+
+* Fixed inconsistency when having set the ``PYTHONDONTWRITEBYTECODE`` env var
+ with the --distribute option or the ``VIRTUALENV_USE_DISTRIBUTE`` env var.
+ ``VIRTUALENV_USE_DISTRIBUTE`` is now considered again as a legacy alias.
+
+
+1.7.1.2 (2012-02-17)
+--------------------
+
+* Fixed minor issue in `--relocatable`. Thanks, Cap Petschulat.
+
+
+1.7.1.1 (2012-02-16)
+--------------------
+
+* Bumped the version string in ``virtualenv.py`` up, too.
+
+* Fixed rST rendering bug of long description.
+
+
+1.7.1 (2012-02-16)
+------------------
+
+* Update embedded pip to version 1.1.
+
+* Fix `--relocatable` under Python 3. Thanks Doug Hellmann.
+
+* Added environ PATH modification to activate_this.py. Thanks Doug
+ Napoleone. Fixes #14.
+
+* Support creating virtualenvs directly from a Python build directory on
+ Windows. Thanks CBWhiz. Fixes #139.
+
+* Use non-recursive symlinks to fix things up for posix_local install
+ scheme. Thanks michr.
+
+* Made activate script available for use with msys and cygwin on Windows.
+ Thanks Greg Haskins, Cliff Xuan, Jonathan Griffin and Doug Napoleone.
+ Fixes #176.
+
+* Fixed creation of virtualenvs on Windows when Python is not installed for
+ all users. Thanks Anatoly Techtonik for report and patch and Doug
+ Napoleone for testing and confirmation. Fixes #87.
+
+* Fixed creation of virtualenvs using -p in installs where some modules
+ that ought to be in the standard library (e.g. `readline`) are actually
+ installed in `site-packages` next to `virtualenv.py`. Thanks Greg Haskins
+ for report and fix. Fixes #167.
+
+* Added activation script for Powershell (signed by Jannis Leidel). Many
+ thanks to Jason R. Coombs.
+
+
+1.7 (2011-11-30)
+----------------
+
+* Gave user-provided ``--extra-search-dir`` priority over default dirs for
+ finding setuptools/distribute (it already had priority for finding pip).
+ Thanks Ethan Jucovy.
+
+* Updated embedded Distribute release to 0.6.24. Thanks Alex Gronholm.
+
+* Made ``--no-site-packages`` behavior the default behavior. The
+ ``--no-site-packages`` flag is still permitted, but displays a warning when
+ used. Thanks Chris McDonough.
+
+* New flag: ``--system-site-packages``; this flag should be passed to get the
+ previous default global-site-package-including behavior back.
+
+* Added ability to set command options as environment variables and options
+ in a ``virtualenv.ini`` file.
+
+* Fixed various encoding related issues with paths. Thanks Gunnlaugur Thor Briem.
+
+* Made ``virtualenv.py`` script executable.
+
+
+1.6.4 (2011-07-21)
+------------------
+
+* Restored ability to run on Python 2.4, too.
+
+
+1.6.3 (2011-07-16)
+------------------
+
+* Restored ability to run on Python < 2.7.
+
+
+1.6.2 (2011-07-16)
+------------------
+
+* Updated embedded distribute release to 0.6.19.
+
+* Updated embedded pip release to 1.0.2.
+
+* Fixed #141 - Be smarter about finding pkg_resources when using the
+ non-default Python interpreter (by using the ``-p`` option).
+
+* Fixed #112 - Fixed path in docs.
+
+* Fixed #109 - Corrected doctests of a Logger method.
+
+* Fixed #118 - Fixed creating virtualenvs on platforms that use the
+ "posix_local" install scheme, such as Ubuntu with Python 2.7.
+
+* Add missing library to Python 3 virtualenvs (``_dummy_thread``).
+
+
+1.6.1 (2011-04-30)
+------------------
+
+* Start to use git-flow.
+
+* Added support for PyPy 1.5
+
+* Fixed #121 -- added sanity-checking of the -p argument. Thanks Paul Nasrat.
+
+* Added progress meter for pip installation as well as setuptools. Thanks Ethan
+ Jucovy.
+
+* Added --never-download and --search-dir options. Thanks Ethan Jucovy.
+
+
+1.6
+---
+
+* Added Python 3 support! Huge thanks to Vinay Sajip and Vitaly Babiy.
+
+* Fixed creation of virtualenvs on Mac OS X when standard library modules
+ (readline) are installed outside the standard library.
+
+* Updated bundled pip to 1.0.
+
+
+1.5.2
+-----
+
+* Moved main repository to Github: https://github.com/pypa/virtualenv
+
+* Transferred primary maintenance from Ian to Jannis Leidel, Carl Meyer and Brian Rosner
+
+* Fixed a few more pypy related bugs.
+
+* Updated bundled pip to 0.8.2.
+
+* Handed project over to new team of maintainers.
+
+* Moved virtualenv to Github at https://github.com/pypa/virtualenv
+
+
+1.5.1
+-----
+
+* Added ``_weakrefset`` requirement for Python 2.7.1.
+
+* Fixed Windows regression in 1.5
+
+
+1.5
+---
+
+* Include pip 0.8.1.
+
+* Add support for PyPy.
+
+* Uses a proper temporary dir when installing environment requirements.
+
+* Add ``--prompt`` option to be able to override the default prompt prefix.
+
+* Fix an issue with ``--relocatable`` on Windows.
+
+* Fix issue with installing the wrong version of distribute.
+
+* Add fish and csh activate scripts.
+
+
+1.4.9
+-----
+
+* Include pip 0.7.2
+
+
+1.4.8
+-----
+
+* Fix for Mac OS X Framework builds that use
+ ``--universal-archs=intel``
+
+* Fix ``activate_this.py`` on Windows.
+
+* Allow ``$PYTHONHOME`` to be set, so long as you use ``source
+ bin/activate`` it will get unset; if you leave it set and do not
+ activate the environment it will still break the environment.
+
+* Include pip 0.7.1
+
+
+1.4.7
+-----
+
+* Include pip 0.7
+
+
+1.4.6
+-----
+
+* Allow ``activate.sh`` to skip updating the prompt (by setting
+ ``$VIRTUAL_ENV_DISABLE_PROMPT``).
+
+
+1.4.5
+-----
+
+* Include pip 0.6.3
+
+* Fix ``activate.bat`` and ``deactivate.bat`` under Windows when
+ ``PATH`` contained a parenthesis
+
+
+1.4.4
+-----
+
+* Include pip 0.6.2 and Distribute 0.6.10
+
+* Create the ``virtualenv`` script even when Setuptools isn't
+ installed
+
+* Fix problem with ``virtualenv --relocate`` when ``bin/`` has
+ subdirectories (e.g., ``bin/.svn/``); from Alan Franzoni.
+
+* If you set ``$VIRTUALENV_DISTRIBUTE`` then virtualenv will use
+ Distribute by default (so you don't have to remember to use
+ ``--distribute``).
+
+
+1.4.3
+-----
+
+* Include pip 0.6.1
+
+
+1.4.2
+-----
+
+* Fix pip installation on Windows
+
+* Fix use of stand-alone ``virtualenv.py`` (and boot scripts)
+
+* Exclude ~/.local (user site-packages) from environments when using
+ ``--no-site-packages``
+
+
+1.4.1
+-----
+
+* Include pip 0.6
+
+
+1.4
+---
+
+* Updated setuptools to 0.6c11
+
+* Added the --distribute option
+
+* Fixed packaging problem of support-files
+
+
+1.3.4
+-----
+
+* Virtualenv now copies the actual embedded Python binary on
+ Mac OS X to fix a hang on Snow Leopard (10.6).
+
+* Fail more gracefully on Windows when ``win32api`` is not installed.
+
+* Fix site-packages taking precedent over Jython's ``__classpath__``
+ and also specially handle the new ``__pyclasspath__`` entry in
+ ``sys.path``.
+
+* Now copies Jython's ``registry`` file to the virtualenv if it exists.
+
+* Better find libraries when compiling extensions on Windows.
+
+* Create ``Scripts\pythonw.exe`` on Windows.
+
+* Added support for the Debian/Ubuntu
+ ``/usr/lib/pythonX.Y/dist-packages`` directory.
+
+* Set ``distutils.sysconfig.get_config_vars()['LIBDIR']`` (based on
+ ``sys.real_prefix``) which is reported to help building on Windows.
+
+* Make ``deactivate`` work on ksh
+
+* Fixes for ``--python``: make it work with ``--relocatable`` and the
+ symlink created to the exact Python version.
+
+
+1.3.3
+-----
+
+* Use Windows newlines in ``activate.bat``, which has been reported to help
+ when using non-ASCII directory names.
+
+* Fixed compatibility with Jython 2.5b1.
+
+* Added a function ``virtualenv.install_python`` for more fine-grained
+ access to what ``virtualenv.create_environment`` does.
+
+* Fix `a problem <https://bugs.launchpad.net/virtualenv/+bug/241581>`_
+ with Windows and paths that contain spaces.
+
+* If ``/path/to/env/.pydistutils.cfg`` exists (or
+ ``/path/to/env/pydistutils.cfg`` on Windows systems) then ignore
+ ``~/.pydistutils.cfg`` and use that other file instead.
+
+* Fix ` a problem
+ <https://bugs.launchpad.net/virtualenv/+bug/340050>`_ picking up
+ some ``.so`` libraries in ``/usr/local``.
+
+
+1.3.2
+-----
+
+* Remove the ``[install] prefix = ...`` setting from the virtualenv
+ ``distutils.cfg`` -- this has been causing problems for a lot of
+ people, in rather obscure ways.
+
+* If you use a boot script it will attempt to import ``virtualenv``
+ and find a pre-downloaded Setuptools egg using that.
+
+* Added platform-specific paths, like ``/usr/lib/pythonX.Y/plat-linux2``
+
+
+1.3.1
+-----
+
+* Real Python 2.6 compatibility. Backported the Python 2.6 updates to
+ ``site.py``, including `user directories
+ <http://docs.python.org/dev/whatsnew/2.6.html#pep-370-per-user-site-packages-directory>`_
+ (this means older versions of Python will support user directories,
+ whether intended or not).
+
+* Always set ``[install] prefix`` in ``distutils.cfg`` -- previously
+ on some platforms where a system-wide ``distutils.cfg`` was present
+ with a ``prefix`` setting, packages would be installed globally
+ (usually in ``/usr/local/lib/pythonX.Y/site-packages``).
+
+* Sometimes Cygwin seems to leave ``.exe`` off ``sys.executable``; a
+ workaround is added.
+
+* Fix ``--python`` option.
+
+* Fixed handling of Jython environments that use a
+ jython-complete.jar.
+
+
+1.3
+---
+
+* Update to Setuptools 0.6c9
+* Added an option ``virtualenv --relocatable EXISTING_ENV``, which
+ will make an existing environment "relocatable" -- the paths will
+ not be absolute in scripts, ``.egg-info`` and ``.pth`` files. This
+ may assist in building environments that can be moved and copied.
+ You have to run this *after* any new packages installed.
+* Added ``bin/activate_this.py``, a file you can use like
+ ``execfile("path_to/activate_this.py",
+ dict(__file__="path_to/activate_this.py"))`` -- this will activate
+ the environment in place, similar to what `the mod_wsgi example
+ does <http://code.google.com/p/modwsgi/wiki/VirtualEnvironments>`_.
+* For Mac framework builds of Python, the site-packages directory
+ ``/Library/Python/X.Y/site-packages`` is added to ``sys.path``, from
+ Andrea Rech.
+* Some platform-specific modules in Macs are added to the path now
+ (``plat-darwin/``, ``plat-mac/``, ``plat-mac/lib-scriptpackages``),
+ from Andrea Rech.
+* Fixed a small Bashism in the ``bin/activate`` shell script.
+* Added ``__future__`` to the list of required modules, for Python
+ 2.3. You'll still need to backport your own ``subprocess`` module.
+* Fixed the ``__classpath__`` entry in Jython's ``sys.path`` taking
+ precedent over virtualenv's libs.
+
+
+1.2
+---
+
+* Added a ``--python`` option to select the Python interpreter.
+* Add ``warnings`` to the modules copied over, for Python 2.6 support.
+* Add ``sets`` to the module copied over for Python 2.3 (though Python
+ 2.3 still probably doesn't work).
+
+
+1.1.1
+-----
+
+* Added support for Jython 2.5.
+
+
+1.1
+---
+
+* Added support for Python 2.6.
+* Fix a problem with missing ``DLLs/zlib.pyd`` on Windows. Create
+* ``bin/python`` (or ``bin/python.exe``) even when you run virtualenv
+ with an interpreter named, e.g., ``python2.4``
+* Fix MacPorts Python
+* Added --unzip-setuptools option
+* Update to Setuptools 0.6c8
+* If the current directory is not writable, run ez_setup.py in ``/tmp``
+* Copy or symlink over the ``include`` directory so that packages will
+ more consistently compile.
+
+
+1.0
+---
+
+* Fix build on systems that use ``/usr/lib64``, distinct from
+ ``/usr/lib`` (specifically CentOS x64).
+* Fixed bug in ``--clear``.
+* Fixed typos in ``deactivate.bat``.
+* Preserve ``$PYTHONPATH`` when calling subprocesses.
+
+
+0.9.2
+-----
+
+* Fix include dir copying on Windows (makes compiling possible).
+* Include the main ``lib-tk`` in the path.
+* Patch ``distutils.sysconfig``: ``get_python_inc`` and
+ ``get_python_lib`` to point to the global locations.
+* Install ``distutils.cfg`` before Setuptools, so that system
+ customizations of ``distutils.cfg`` won't effect the installation.
+* Add ``bin/pythonX.Y`` to the virtualenv (in addition to
+ ``bin/python``).
+* Fixed an issue with Mac Framework Python builds, and absolute paths
+ (from Ronald Oussoren).
+
+
+0.9.1
+-----
+
+* Improve ability to create a virtualenv from inside a virtualenv.
+* Fix a little bug in ``bin/activate``.
+* Actually get ``distutils.cfg`` to work reliably.
+
+
+0.9
+---
+
+* Added ``lib-dynload`` and ``config`` to things that need to be
+ copied over in an environment.
+* Copy over or symlink the ``include`` directory, so that you can
+ build packages that need the C headers.
+* Include a ``distutils`` package, so you can locally update
+ ``distutils.cfg`` (in ``lib/pythonX.Y/distutils/distutils.cfg``).
+* Better avoid downloading Setuptools, and hitting PyPI on environment
+ creation.
+* Fix a problem creating a ``lib64/`` directory.
+* Should work on MacOSX Framework builds (the default Python
+ installations on Mac). Thanks to Ronald Oussoren.
+
+
+0.8.4
+-----
+
+* Windows installs would sometimes give errors about ``sys.prefix`` that
+ were inaccurate.
+* Slightly prettier output.
+
+
+0.8.3
+-----
+
+* Added support for Windows.
+
+
+0.8.2
+-----
+
+* Give a better warning if you are on an unsupported platform (Mac
+ Framework Pythons, and Windows).
+* Give error about running while inside a workingenv.
+* Give better error message about Python 2.3.
+
+
+0.8.1
+-----
+
+Fixed packaging of the library.
+
+
+0.8
+---
+
+Initial release. Everything is changed and new!
diff --git a/python/virtualenv/docs/conf.py b/python/virtualenv/docs/conf.py
new file mode 100644
index 000000000..9332aa1bc
--- /dev/null
+++ b/python/virtualenv/docs/conf.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+#
+# Paste documentation build configuration file, created by
+# sphinx-quickstart on Tue Apr 22 22:08:49 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import os
+import sys
+
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+# If your extensions are in another directory, add it here.
+sys.path.insert(0, os.path.abspath(os.pardir))
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks']
+
+# Add any paths that contain templates here, relative to this directory.
+#templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = 'virtualenv'
+copyright = '2007-2014, Ian Bicking, The Open Planning Project, PyPA'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+try:
+ from virtualenv import __version__
+ # The short X.Y version.
+ version = '.'.join(__version__.split('.')[:2])
+ # The full version, including alpha/beta/rc tags.
+ release = __version__
+except ImportError:
+ version = release = 'dev'
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+unused_docs = []
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+extlinks = {
+ 'issue': ('https://github.com/pypa/virtualenv/issues/%s', '#'),
+ 'pull': ('https://github.com/pypa/virtualenv/pull/%s', 'PR #'),
+}
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+#html_style = 'default.css'
+
+html_theme = 'default'
+if not on_rtd:
+ try:
+ import sphinx_rtd_theme
+ html_theme = 'sphinx_rtd_theme'
+ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+ except ImportError:
+ pass
+
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Content template for the index page.
+#html_index = ''
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Pastedoc'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+#latex_documents = []
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/python/virtualenv/docs/development.rst b/python/virtualenv/docs/development.rst
new file mode 100644
index 000000000..aba2785a3
--- /dev/null
+++ b/python/virtualenv/docs/development.rst
@@ -0,0 +1,61 @@
+Development
+===========
+
+Contributing
+------------
+
+Refer to the `pip development`_ documentation - it applies equally to
+virtualenv, except that virtualenv issues should filed on the `virtualenv
+repo`_ at GitHub.
+
+Virtualenv's release schedule is tied to pip's -- each time there's a new pip
+release, there will be a new virtualenv release that bundles the new version of
+pip.
+
+Files in the `virtualenv_embedded/` subdirectory are embedded into
+`virtualenv.py` itself as base64-encoded strings (in order to support
+single-file use of `virtualenv.py` without installing it). If your patch
+changes any file in `virtualenv_embedded/`, run `bin/rebuild-script.py` to
+update the embedded version of that file in `virtualenv.py`; commit that and
+submit it as part of your patch / pull request.
+
+.. _pip development: http://www.pip-installer.org/en/latest/development.html
+.. _virtualenv repo: https://github.com/pypa/virtualenv/
+
+Running the tests
+-----------------
+
+Virtualenv's test suite is small and not yet at all comprehensive, but we aim
+to grow it.
+
+The easy way to run tests (handles test dependencies automatically)::
+
+ $ python setup.py test
+
+If you want to run only a selection of the tests, you'll need to run them
+directly with pytest instead. Create a virtualenv, and install required
+packages::
+
+ $ pip install pytest mock
+
+Run pytest::
+
+ $ pytest
+
+Or select just a single test file to run::
+
+ $ pytest tests/test_virtualenv
+
+Status and License
+------------------
+
+``virtualenv`` is a successor to `workingenv
+<http://cheeseshop.python.org/pypi/workingenv.py>`_, and an extension
+of `virtual-python
+<http://peak.telecommunity.com/DevCenter/EasyInstall#creating-a-virtual-python>`_.
+
+It was written by Ian Bicking, sponsored by the `Open Planning
+Project <http://openplans.org>`_ and is now maintained by a
+`group of developers <https://github.com/pypa/virtualenv/raw/master/AUTHORS.txt>`_.
+It is licensed under an
+`MIT-style permissive license <https://github.com/pypa/virtualenv/raw/master/LICENSE.txt>`_.
diff --git a/python/virtualenv/docs/index.rst b/python/virtualenv/docs/index.rst
new file mode 100644
index 000000000..e745a87b7
--- /dev/null
+++ b/python/virtualenv/docs/index.rst
@@ -0,0 +1,137 @@
+Virtualenv
+==========
+
+`Mailing list <http://groups.google.com/group/python-virtualenv>`_ |
+`Issues <https://github.com/pypa/virtualenv/issues>`_ |
+`Github <https://github.com/pypa/virtualenv>`_ |
+`PyPI <https://pypi.python.org/pypi/virtualenv/>`_ |
+User IRC: #pypa
+Dev IRC: #pypa-dev
+
+Introduction
+------------
+
+``virtualenv`` is a tool to create isolated Python environments.
+
+The basic problem being addressed is one of dependencies and versions,
+and indirectly permissions. Imagine you have an application that
+needs version 1 of LibFoo, but another application requires version
+2. How can you use both these applications? If you install
+everything into ``/usr/lib/python2.7/site-packages`` (or whatever your
+platform's standard location is), it's easy to end up in a situation
+where you unintentionally upgrade an application that shouldn't be
+upgraded.
+
+Or more generally, what if you want to install an application *and
+leave it be*? If an application works, any change in its libraries or
+the versions of those libraries can break the application.
+
+Also, what if you can't install packages into the global
+``site-packages`` directory? For instance, on a shared host.
+
+In all these cases, ``virtualenv`` can help you. It creates an
+environment that has its own installation directories, that doesn't
+share libraries with other virtualenv environments (and optionally
+doesn't access the globally installed libraries either).
+
+.. comment: split here
+
+.. toctree::
+ :maxdepth: 2
+
+ installation
+ userguide
+ reference
+ development
+ changes
+
+.. warning::
+
+ Python bugfix releases 2.6.8, 2.7.3, 3.1.5 and 3.2.3 include a change that
+ will cause "import random" to fail with "cannot import name urandom" on any
+ virtualenv created on a Unix host with an earlier release of Python
+ 2.6/2.7/3.1/3.2, if the underlying system Python is upgraded. This is due to
+ the fact that a virtualenv uses the system Python's standard library but
+ contains its own copy of the Python interpreter, so an upgrade to the system
+ Python results in a mismatch between the version of the Python interpreter
+ and the version of the standard library. It can be fixed by removing
+ ``$ENV/bin/python`` and re-running virtualenv on the same target directory
+ with the upgraded Python.
+
+Other Documentation and Links
+-----------------------------
+
+* `Blog announcement of virtualenv`__.
+
+ .. __: http://blog.ianbicking.org/2007/10/10/workingenv-is-dead-long-live-virtualenv/
+
+* James Gardner has written a tutorial on using `virtualenv with
+ Pylons
+ <http://wiki.pylonshq.com/display/pylonscookbook/Using+a+Virtualenv+Sandbox>`_.
+
+* Chris Perkins created a `showmedo video including virtualenv
+ <http://showmedo.com/videos/video?name=2910000&fromSeriesID=291>`_.
+
+* Doug Hellmann's `virtualenvwrapper`_ is a useful set of scripts to make
+ your workflow with many virtualenvs even easier. `His initial blog post on it`__.
+ He also wrote `an example of using virtualenv to try IPython`__.
+
+ .. _virtualenvwrapper: https://pypi.python.org/pypi/virtualenvwrapper/
+ .. __: https://doughellmann.com/blog/2008/05/01/virtualenvwrapper/
+ .. __: https://doughellmann.com/blog/2008/02/01/ipython-and-virtualenv/
+
+* `Pew`_ is another wrapper for virtualenv that makes use of a different
+ activation technique.
+
+ .. _Pew: https://pypi.python.org/pypi/pew/
+
+* `Using virtualenv with mod_wsgi
+ <http://code.google.com/p/modwsgi/wiki/VirtualEnvironments>`_.
+
+* `virtualenv commands
+ <https://github.com/thisismedium/virtualenv-commands>`_ for some more
+ workflow-related tools around virtualenv.
+
+* PyCon US 2011 talk: `Reverse-engineering Ian Bicking's brain: inside pip and virtualenv
+ <http://pyvideo.org/video/568/reverse-engineering-ian-bicking--39-s-brain--insi>`_.
+ By the end of the talk, you'll have a good idea exactly how pip
+ and virtualenv do their magic, and where to go looking in the source
+ for particular behaviors or bug fixes.
+
+Compare & Contrast with Alternatives
+------------------------------------
+
+There are several alternatives that create isolated environments:
+
+* ``workingenv`` (which I do not suggest you use anymore) is the
+ predecessor to this library. It used the main Python interpreter,
+ but relied on setting ``$PYTHONPATH`` to activate the environment.
+ This causes problems when running Python scripts that aren't part of
+ the environment (e.g., a globally installed ``hg`` or ``bzr``). It
+ also conflicted a lot with Setuptools.
+
+* `virtual-python
+ <http://peak.telecommunity.com/DevCenter/EasyInstall#creating-a-virtual-python>`_
+ is also a predecessor to this library. It uses only symlinks, so it
+ couldn't work on Windows. It also symlinks over the *entire*
+ standard library and global ``site-packages``. As a result, it
+ won't see new additions to the global ``site-packages``.
+
+ This script only symlinks a small portion of the standard library
+ into the environment, and so on Windows it is feasible to simply
+ copy these files over. Also, it creates a new/empty
+ ``site-packages`` and also adds the global ``site-packages`` to the
+ path, so updates are tracked separately. This script also installs
+ Setuptools automatically, saving a step and avoiding the need for
+ network access.
+
+* `zc.buildout <http://pypi.python.org/pypi/zc.buildout>`_ doesn't
+ create an isolated Python environment in the same style, but
+ achieves similar results through a declarative config file that sets
+ up scripts with very particular packages. As a declarative system,
+ it is somewhat easier to repeat and manage, but more difficult to
+ experiment with. ``zc.buildout`` includes the ability to setup
+ non-Python systems (e.g., a database server or an Apache instance).
+
+I *strongly* recommend anyone doing application development or
+deployment use one of these tools.
diff --git a/python/virtualenv/docs/installation.rst b/python/virtualenv/docs/installation.rst
new file mode 100644
index 000000000..3006d7617
--- /dev/null
+++ b/python/virtualenv/docs/installation.rst
@@ -0,0 +1,58 @@
+Installation
+============
+
+.. warning::
+
+ We advise installing virtualenv-1.9 or greater. Prior to version 1.9, the
+ pip included in virtualenv did not download from PyPI over SSL.
+
+.. warning::
+
+ When using pip to install virtualenv, we advise using pip 1.3 or greater.
+ Prior to version 1.3, pip did not download from PyPI over SSL.
+
+.. warning::
+
+ We advise against using easy_install to install virtualenv when using
+ setuptools < 0.9.7, because easy_install didn't download from PyPI over SSL
+ and was broken in some subtle ways.
+
+To install globally with `pip` (if you have pip 1.3 or greater installed globally):
+
+::
+
+ $ [sudo] pip install virtualenv
+
+Or to get the latest unreleased dev version:
+
+::
+
+ $ [sudo] pip install https://github.com/pypa/virtualenv/tarball/develop
+
+
+To install version X.X globally from source:
+
+::
+
+ $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-X.X.tar.gz
+ $ tar xvfz virtualenv-X.X.tar.gz
+ $ cd virtualenv-X.X
+ $ [sudo] python setup.py install
+
+
+To *use* locally from source:
+
+::
+
+ $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-X.X.tar.gz
+ $ tar xvfz virtualenv-X.X.tar.gz
+ $ cd virtualenv-X.X
+ $ python virtualenv.py myVE
+
+.. note::
+
+ The ``virtualenv.py`` script is *not* supported if run without the
+ necessary pip/setuptools/virtualenv distributions available locally. All
+ of the installation methods above include a ``virtualenv_support``
+ directory alongside ``virtualenv.py`` which contains a complete set of
+ pip and setuptools distributions, and so are fully supported.
diff --git a/python/virtualenv/docs/make.bat b/python/virtualenv/docs/make.bat
new file mode 100644
index 000000000..aa5c189fc
--- /dev/null
+++ b/python/virtualenv/docs/make.bat
@@ -0,0 +1,170 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-compressor.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-compressor.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/python/virtualenv/docs/reference.rst b/python/virtualenv/docs/reference.rst
new file mode 100644
index 000000000..9249473c9
--- /dev/null
+++ b/python/virtualenv/docs/reference.rst
@@ -0,0 +1,261 @@
+Reference Guide
+===============
+
+``virtualenv`` Command
+----------------------
+
+.. _usage:
+
+Usage
+~~~~~
+
+:command:`virtualenv [OPTIONS] ENV_DIR`
+
+ Where ``ENV_DIR`` is an absolute or relative path to a directory to create
+ the virtual environment in.
+
+.. _options:
+
+Options
+~~~~~~~
+
+.. program: virtualenv
+
+.. option:: --version
+
+ show program's version number and exit
+
+.. option:: -h, --help
+
+ show this help message and exit
+
+.. option:: -v, --verbose
+
+ Increase verbosity.
+
+.. option:: -q, --quiet
+
+ Decrease verbosity.
+
+.. option:: -p PYTHON_EXE, --python=PYTHON_EXE
+
+ The Python interpreter to use, e.g.,
+ --python=python2.5 will use the python2.5 interpreter
+ to create the new environment. The default is the
+ interpreter that virtualenv was installed with
+ (like ``/usr/bin/python``)
+
+.. option:: --clear
+
+ Clear out the non-root install and start from scratch.
+
+.. option:: --system-site-packages
+
+ Give the virtual environment access to the global
+ site-packages.
+
+.. option:: --always-copy
+
+ Always copy files rather than symlinking.
+
+.. option:: --relocatable
+
+ Make an EXISTING virtualenv environment relocatable.
+ This fixes up scripts and makes all .pth files relative.
+
+.. option:: --unzip-setuptools
+
+ Unzip Setuptools when installing it.
+
+.. option:: --no-setuptools
+
+ Do not install setuptools in the new virtualenv.
+
+.. option:: --no-pip
+
+ Do not install pip in the new virtualenv.
+
+.. option:: --no-wheel
+
+ Do not install wheel in the new virtualenv.
+
+.. option:: --extra-search-dir=DIR
+
+ Directory to look for setuptools/pip distributions in.
+ This option can be specified multiple times.
+
+.. option:: --prompt=PROMPT
+
+ Provides an alternative prompt prefix for this
+ environment.
+
+.. option:: --download
+
+ Download preinstalled packages from PyPI.
+
+.. option:: --no-download
+
+ Do not download preinstalled packages from PyPI.
+
+.. option:: --no-site-packages
+
+ DEPRECATED. Retained only for backward compatibility.
+ Not having access to global site-packages is now the
+ default behavior.
+
+.. option:: --distribute
+.. option:: --setuptools
+
+ Legacy; now have no effect. Before version 1.10 these could be used
+ to choose whether to install Distribute_ or Setuptools_ into the created
+ virtualenv. Distribute has now been merged into Setuptools, and the
+ latter is always installed.
+
+.. _Distribute: https://pypi.python.org/pypi/distribute
+.. _Setuptools: https://pypi.python.org/pypi/setuptools
+
+
+Configuration
+-------------
+
+Environment Variables
+~~~~~~~~~~~~~~~~~~~~~
+
+Each command line option is automatically used to look for environment
+variables with the name format ``VIRTUALENV_<UPPER_NAME>``. That means
+the name of the command line options are capitalized and have dashes
+(``'-'``) replaced with underscores (``'_'``).
+
+For example, to automatically use a custom Python binary instead of the
+one virtualenv is run with you can also set an environment variable::
+
+ $ export VIRTUALENV_PYTHON=/opt/python-3.3/bin/python
+ $ virtualenv ENV
+
+It's the same as passing the option to virtualenv directly::
+
+ $ virtualenv --python=/opt/python-3.3/bin/python ENV
+
+This also works for appending command line options, like ``--find-links``.
+Just leave an empty space between the passed values, e.g.::
+
+ $ export VIRTUALENV_EXTRA_SEARCH_DIR="/path/to/dists /path/to/other/dists"
+ $ virtualenv ENV
+
+is the same as calling::
+
+ $ virtualenv --extra-search-dir=/path/to/dists --extra-search-dir=/path/to/other/dists ENV
+
+.. envvar:: VIRTUAL_ENV_DISABLE_PROMPT
+
+ Any virtualenv created when this is set to a non-empty value will not have
+ it's :ref:`activate` modify the shell prompt.
+
+
+Configuration File
+~~~~~~~~~~~~~~~~~~
+
+virtualenv also looks for a standard ini config file. On Unix and Mac OS X
+that's ``$HOME/.virtualenv/virtualenv.ini`` and on Windows, it's
+``%APPDATA%\virtualenv\virtualenv.ini``.
+
+The names of the settings are derived from the long command line option,
+e.g. the option :option:`--python <-p>` would look like this::
+
+ [virtualenv]
+ python = /opt/python-3.3/bin/python
+
+Appending options like :option:`--extra-search-dir` can be written on multiple
+lines::
+
+ [virtualenv]
+ extra-search-dir =
+ /path/to/dists
+ /path/to/other/dists
+
+Please have a look at the output of :option:`--help <-h>` for a full list
+of supported options.
+
+
+Extending Virtualenv
+--------------------
+
+
+Creating Your Own Bootstrap Scripts
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+While this creates an environment, it doesn't put anything into the
+environment. Developers may find it useful to distribute a script
+that sets up a particular environment, for example a script that
+installs a particular web application.
+
+To create a script like this, call
+:py:func:`virtualenv.create_bootstrap_script`, and write the
+result to your new bootstrapping script.
+
+.. py:function:: create_bootstrap_script(extra_text)
+
+ Creates a bootstrap script from ``extra_text``, which is like
+ this script but with extend_parser, adjust_options, and after_install hooks.
+
+This returns a string that (written to disk of course) can be used
+as a bootstrap script with your own customizations. The script
+will be the standard virtualenv.py script, with your extra text
+added (your extra text should be Python code).
+
+If you include these functions, they will be called:
+
+.. py:function:: extend_parser(optparse_parser)
+
+ You can add or remove options from the parser here.
+
+.. py:function:: adjust_options(options, args)
+
+ You can change options here, or change the args (if you accept
+ different kinds of arguments, be sure you modify ``args`` so it is
+ only ``[DEST_DIR]``).
+
+.. py:function:: after_install(options, home_dir)
+
+ After everything is installed, this function is called. This
+ is probably the function you are most likely to use. An
+ example would be::
+
+ def after_install(options, home_dir):
+ if sys.platform == 'win32':
+ bin = 'Scripts'
+ else:
+ bin = 'bin'
+ subprocess.call([join(home_dir, bin, 'easy_install'),
+ 'MyPackage'])
+ subprocess.call([join(home_dir, bin, 'my-package-script'),
+ 'setup', home_dir])
+
+ This example immediately installs a package, and runs a setup
+ script from that package.
+
+Bootstrap Example
+~~~~~~~~~~~~~~~~~
+
+Here's a more concrete example of how you could use this::
+
+ import virtualenv, textwrap
+ output = virtualenv.create_bootstrap_script(textwrap.dedent("""
+ import os, subprocess
+ def after_install(options, home_dir):
+ etc = join(home_dir, 'etc')
+ if not os.path.exists(etc):
+ os.makedirs(etc)
+ subprocess.call([join(home_dir, 'bin', 'easy_install'),
+ 'BlogApplication'])
+ subprocess.call([join(home_dir, 'bin', 'paster'),
+ 'make-config', 'BlogApplication',
+ join(etc, 'blog.ini')])
+ subprocess.call([join(home_dir, 'bin', 'paster'),
+ 'setup-app', join(etc, 'blog.ini')])
+ """))
+ f = open('blog-bootstrap.py', 'w').write(output)
+
+Another example is available `here`__.
+
+.. __: https://github.com/socialplanning/fassembler/blob/master/fassembler/create-venv-script.py
diff --git a/python/virtualenv/docs/userguide.rst b/python/virtualenv/docs/userguide.rst
new file mode 100644
index 000000000..35f0dc950
--- /dev/null
+++ b/python/virtualenv/docs/userguide.rst
@@ -0,0 +1,258 @@
+User Guide
+==========
+
+
+Usage
+-----
+
+Virtualenv has one basic command::
+
+ $ virtualenv ENV
+
+Where ``ENV`` is a directory to place the new virtual environment. It has
+a number of usual effects (modifiable by many :ref:`options`):
+
+ - :file:`ENV/lib/` and :file:`ENV/include/` are created, containing supporting
+ library files for a new virtualenv python. Packages installed in this
+ environment will live under :file:`ENV/lib/pythonX.X/site-packages/`.
+
+ - :file:`ENV/bin` is created, where executables live - noticeably a new
+ :command:`python`. Thus running a script with ``#! /path/to/ENV/bin/python``
+ would run that script under this virtualenv's python.
+
+ - The crucial packages pip_ and setuptools_ are installed, which allow other
+ packages to be easily installed to the environment. This associated pip
+ can be run from :file:`ENV/bin/pip`.
+
+The python in your new virtualenv is effectively isolated from the python that
+was used to create it.
+
+.. _pip: https://pypi.python.org/pypi/pip
+.. _setuptools: https://pypi.python.org/pypi/setuptools
+
+
+.. _activate:
+
+activate script
+~~~~~~~~~~~~~~~
+
+In a newly created virtualenv there will also be a :command:`activate` shell
+script. For Windows systems, activation scripts are provided for
+the Command Prompt and Powershell.
+
+On Posix systems, this resides in :file:`/ENV/bin/`, so you can run::
+
+ $ source bin/activate
+
+For some shells (e.g. the original Bourne Shell) you may need to use the
+:command:`.` command, when :command:`source` does not exist. There are also
+separate activate files for some other shells, like csh and fish.
+:file:`bin/activate` should work for bash/zsh/dash.
+
+This will change your ``$PATH`` so its first entry is the virtualenv's
+``bin/`` directory. (You have to use ``source`` because it changes your
+shell environment in-place.) This is all it does; it's purely a
+convenience. If you directly run a script or the python interpreter
+from the virtualenv's ``bin/`` directory (e.g. ``path/to/ENV/bin/pip``
+or ``/path/to/ENV/bin/python-script.py``) there's no need for
+activation.
+
+The ``activate`` script will also modify your shell prompt to indicate
+which environment is currently active. To disable this behaviour, see
+:envvar:`VIRTUAL_ENV_DISABLE_PROMPT`.
+
+To undo these changes to your path (and prompt), just run::
+
+ $ deactivate
+
+On Windows, the equivalent `activate` script is in the ``Scripts`` folder::
+
+ > \path\to\env\Scripts\activate
+
+And type ``deactivate`` to undo the changes.
+
+Based on your active shell (CMD.exe or Powershell.exe), Windows will use
+either activate.bat or activate.ps1 (as appropriate) to activate the
+virtual environment. If using Powershell, see the notes about code signing
+below.
+
+.. note::
+
+ If using Powershell, the ``activate`` script is subject to the
+ `execution policies`_ on the system. By default on Windows 7, the system's
+ excution policy is set to ``Restricted``, meaning no scripts like the
+ ``activate`` script are allowed to be executed. But that can't stop us
+ from changing that slightly to allow it to be executed.
+
+ In order to use the script, you can relax your system's execution
+ policy to ``AllSigned``, meaning all scripts on the system must be
+ digitally signed to be executed. Since the virtualenv activation
+ script is signed by one of the authors (Jannis Leidel) this level of
+ the execution policy suffices. As an administrator run::
+
+ PS C:\> Set-ExecutionPolicy AllSigned
+
+ Then you'll be asked to trust the signer, when executing the script.
+ You will be prompted with the following::
+
+ PS C:\> virtualenv .\foo
+ New python executable in C:\foo\Scripts\python.exe
+ Installing setuptools................done.
+ Installing pip...................done.
+ PS C:\> .\foo\scripts\activate
+
+ Do you want to run software from this untrusted publisher?
+ File C:\foo\scripts\activate.ps1 is published by E=jannis@leidel.info,
+ CN=Jannis Leidel, L=Berlin, S=Berlin, C=DE, Description=581796-Gh7xfJxkxQSIO4E0
+ and is not trusted on your system. Only run scripts from trusted publishers.
+ [V] Never run [D] Do not run [R] Run once [A] Always run [?] Help
+ (default is "D"):A
+ (foo) PS C:\>
+
+ If you select ``[A] Always Run``, the certificate will be added to the
+ Trusted Publishers of your user account, and will be trusted in this
+ user's context henceforth. If you select ``[R] Run Once``, the script will
+ be run, but you will be prometed on a subsequent invocation. Advanced users
+ can add the signer's certificate to the Trusted Publishers of the Computer
+ account to apply to all users (though this technique is out of scope of this
+ document).
+
+ Alternatively, you may relax the system execution policy to allow running
+ of local scripts without verifying the code signature using the following::
+
+ PS C:\> Set-ExecutionPolicy RemoteSigned
+
+ Since the ``activate.ps1`` script is generated locally for each virtualenv,
+ it is not considered a remote script and can then be executed.
+
+.. _`execution policies`: http://technet.microsoft.com/en-us/library/dd347641.aspx
+
+Removing an Environment
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Removing a virtual environment is simply done by deactivating it and deleting the
+environment folder with all its contents::
+
+ (ENV)$ deactivate
+ $ rm -r /path/to/ENV
+
+The :option:`--system-site-packages` Option
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you build with ``virtualenv --system-site-packages ENV``, your virtual
+environment will inherit packages from ``/usr/lib/python2.7/site-packages``
+(or wherever your global site-packages directory is).
+
+This can be used if you have control over the global site-packages directory,
+and you want to depend on the packages there. If you want isolation from the
+global system, do not use this flag.
+
+Windows Notes
+~~~~~~~~~~~~~
+
+Some paths within the virtualenv are slightly different on Windows: scripts and
+executables on Windows go in ``ENV\Scripts\`` instead of ``ENV/bin/`` and
+libraries go in ``ENV\Lib\`` rather than ``ENV/lib/``.
+
+To create a virtualenv under a path with spaces in it on Windows, you'll need
+the `win32api <http://sourceforge.net/projects/pywin32/>`_ library installed.
+
+
+Using Virtualenv without ``bin/python``
+---------------------------------------
+
+Sometimes you can't or don't want to use the Python interpreter
+created by the virtualenv. For instance, in a `mod_python
+<http://www.modpython.org/>`_ or `mod_wsgi <http://www.modwsgi.org/>`_
+environment, there is only one interpreter.
+
+Luckily, it's easy. You must use the custom Python interpreter to
+*install* libraries. But to *use* libraries, you just have to be sure
+the path is correct. A script is available to correct the path. You
+can setup the environment like::
+
+ activate_this = '/path/to/env/bin/activate_this.py'
+ execfile(activate_this, dict(__file__=activate_this))
+
+This will change ``sys.path`` and even change ``sys.prefix``, but also allow
+you to use an existing interpreter. Items in your environment will show up
+first on ``sys.path``, before global items. However, global items will
+always be accessible (as if the :option:`--system-site-packages` flag had been
+used in creating the environment, whether it was or not). Also, this cannot undo
+the activation of other environments, or modules that have been imported.
+You shouldn't try to, for instance, activate an environment before a web
+request; you should activate *one* environment as early as possible, and not
+do it again in that process.
+
+Making Environments Relocatable
+-------------------------------
+
+**Note:** this option is somewhat experimental, and there are probably
+caveats that have not yet been identified.
+
+.. warning::
+
+ The ``--relocatable`` option currently has a number of issues,
+ and is not guaranteed to work in all circumstances. It is possible
+ that the option will be deprecated in a future version of ``virtualenv``.
+
+Normally environments are tied to a specific path. That means that
+you cannot move an environment around or copy it to another computer.
+You can fix up an environment to make it relocatable with the
+command::
+
+ $ virtualenv --relocatable ENV
+
+This will make some of the files created by setuptools use relative paths,
+and will change all the scripts to use ``activate_this.py`` instead of using
+the location of the Python interpreter to select the environment.
+
+**Note:** scripts which have been made relocatable will only work if
+the virtualenv is activated, specifically the python executable from
+the virtualenv must be the first one on the system PATH. Also note that
+the activate scripts are not currently made relocatable by
+``virtualenv --relocatable``.
+
+**Note:** you must run this after you've installed *any* packages into
+the environment. If you make an environment relocatable, then
+install a new package, you must run ``virtualenv --relocatable``
+again.
+
+Also, this **does not make your packages cross-platform**. You can
+move the directory around, but it can only be used on other similar
+computers. Some known environmental differences that can cause
+incompatibilities: a different version of Python, when one platform
+uses UCS2 for its internal unicode representation and another uses
+UCS4 (a compile-time option), obvious platform changes like Windows
+vs. Linux, or Intel vs. ARM, and if you have libraries that bind to C
+libraries on the system, if those C libraries are located somewhere
+different (either different versions, or a different filesystem
+layout).
+
+If you use this flag to create an environment, currently, the
+:option:`--system-site-packages` option will be implied.
+
+The :option:`--extra-search-dir` option
+---------------------------------------
+
+This option allows you to provide your own versions of setuptools and/or
+pip to use instead of the embedded versions that come with virtualenv.
+
+To use this feature, pass one or more ``--extra-search-dir`` options to
+virtualenv like this::
+
+ $ virtualenv --extra-search-dir=/path/to/distributions ENV
+
+The ``/path/to/distributions`` path should point to a directory that contains
+setuptools and/or pip wheels.
+
+virtualenv will look for wheels in the specified directories, but will use
+pip's standard algorithm for selecting the wheel to install, which looks for
+the latest compatible wheel.
+
+As well as the extra directories, the search order includes:
+
+#. The ``virtualenv_support`` directory relative to virtualenv.py
+#. The directory where virtualenv.py is located.
+#. The current directory.
+
diff --git a/python/virtualenv/scripts/virtualenv b/python/virtualenv/scripts/virtualenv
new file mode 100644
index 000000000..c961dd7db
--- /dev/null
+++ b/python/virtualenv/scripts/virtualenv
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+import virtualenv
+virtualenv.main()
diff --git a/python/virtualenv/setup.cfg b/python/virtualenv/setup.cfg
new file mode 100644
index 000000000..6662fa569
--- /dev/null
+++ b/python/virtualenv/setup.cfg
@@ -0,0 +1,8 @@
+[bdist_wheel]
+universal = 1
+
+[egg_info]
+tag_date = 0
+tag_build =
+tag_svn_revision = 0
+
diff --git a/python/virtualenv/setup.py b/python/virtualenv/setup.py
new file mode 100644
index 000000000..ee03bc531
--- /dev/null
+++ b/python/virtualenv/setup.py
@@ -0,0 +1,123 @@
+import os
+import re
+import shutil
+import sys
+
+if sys.version_info[:2] < (2, 6):
+ sys.exit('virtualenv requires Python 2.6 or higher.')
+
+try:
+ from setuptools import setup
+ from setuptools.command.test import test as TestCommand
+
+ class PyTest(TestCommand):
+ user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
+
+ def initialize_options(self):
+ TestCommand.initialize_options(self)
+ self.pytest_args = []
+
+ def finalize_options(self):
+ TestCommand.finalize_options(self)
+ #self.test_args = []
+ #self.test_suite = True
+
+ def run_tests(self):
+ # import here, because outside the eggs aren't loaded
+ import pytest
+ sys.exit(pytest.main(self.pytest_args))
+
+ setup_params = {
+ 'entry_points': {
+ 'console_scripts': ['virtualenv=virtualenv:main'],
+ },
+ 'zip_safe': False,
+ 'cmdclass': {'test': PyTest},
+ 'tests_require': ['pytest', 'mock'],
+ }
+except ImportError:
+ from distutils.core import setup
+ if sys.platform == 'win32':
+ print('Note: without Setuptools installed you will '
+ 'have to use "python -m virtualenv ENV"')
+ setup_params = {}
+ else:
+ script = 'scripts/virtualenv'
+ setup_params = {'scripts': [script]}
+
+
+def read_file(*paths):
+ here = os.path.dirname(os.path.abspath(__file__))
+ with open(os.path.join(here, *paths)) as f:
+ return f.read()
+
+# Get long_description from index.rst:
+long_description = read_file('docs', 'index.rst')
+long_description = long_description.strip().split('split here', 1)[0]
+# Add release history
+changes = read_file('docs', 'changes.rst')
+# Only report last two releases for brevity
+releases_found = 0
+change_lines = []
+for line in changes.splitlines():
+ change_lines.append(line)
+ if line.startswith('--------------'):
+ releases_found += 1
+ if releases_found > 2:
+ break
+
+changes = '\n'.join(change_lines[:-2]) + '\n'
+changes += '`Full Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_.'
+# Replace issue/pull directives
+changes = re.sub(r':pull:`(\d+)`', r'PR #\1', changes)
+changes = re.sub(r':issue:`(\d+)`', r'#\1', changes)
+
+long_description += '\n\n' + changes
+
+
+def get_version():
+ version_file = read_file('virtualenv.py')
+ version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
+ version_file, re.M)
+ if version_match:
+ return version_match.group(1)
+ raise RuntimeError("Unable to find version string.")
+
+
+# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
+# exit of python setup.py test # in multiprocessing/util.py _exit_function when
+# running python setup.py test (see
+# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
+try:
+ import multiprocessing # noqa
+except ImportError:
+ pass
+
+setup(
+ name='virtualenv',
+ version=get_version(),
+ description="Virtual Python Environment builder",
+ long_description=long_description,
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ ],
+ keywords='setuptools deployment installation distutils',
+ author='Ian Bicking',
+ author_email='ianb@colorstudy.com',
+ maintainer='Jannis Leidel, Carl Meyer and Brian Rosner',
+ maintainer_email='python-virtualenv@groups.google.com',
+ url='https://virtualenv.pypa.io/',
+ license='MIT',
+ py_modules=['virtualenv'],
+ packages=['virtualenv_support'],
+ package_data={'virtualenv_support': ['*.whl']},
+ **setup_params)
diff --git a/python/virtualenv/site.py b/python/virtualenv/site.py
new file mode 100644
index 000000000..4e426cdb6
--- /dev/null
+++ b/python/virtualenv/site.py
@@ -0,0 +1,760 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code. Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path. On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+It also supports the Debian convention of
+lib/python<version>/dist-packages. On other platforms (mainly Mac and
+Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
+but this is unlikely). The resulting directories, if they exist, are
+appended to sys.path, and also inspected for path configuration files.
+
+FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
+Local addons go into /usr/local/lib/python<version>/site-packages
+(resp. /usr/local/lib/site-python), Debian addons install into
+/usr/{lib,share}/python<version>/dist-packages.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path. Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once. Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth. Assume foo.pth contains the
+following:
+
+ # foo package configuration
+ foo
+ bar
+ bletch
+
+and bar.pth contains:
+
+ # bar package configuration
+ bar
+
+Then the following directories are added to sys.path, in this order:
+
+ /usr/local/lib/python2.X/site-packages/bar
+ /usr/local/lib/python2.X/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations. If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+
+try:
+ import __builtin__ as builtins
+except ImportError:
+ import builtins
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+# for distutils.commands.install
+USER_SITE = None
+USER_BASE = None
+
+_is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32
+_is_pypy = hasattr(sys, 'pypy_version_info')
+_is_jython = sys.platform[:4] == 'java'
+if _is_jython:
+ ModuleType = type(os)
+
+def makepath(*paths):
+ dir = os.path.join(*paths)
+ if _is_jython and (dir == '__classpath__' or
+ dir.startswith('__pyclasspath__')):
+ return dir, dir
+ dir = os.path.abspath(dir)
+ return dir, os.path.normcase(dir)
+
+def abs__file__():
+ """Set all module' __file__ attribute to an absolute path"""
+ for m in sys.modules.values():
+ if ((_is_jython and not isinstance(m, ModuleType)) or
+ hasattr(m, '__loader__')):
+ # only modules need the abspath in Jython. and don't mess
+ # with a PEP 302-supplied __file__
+ continue
+ f = getattr(m, '__file__', None)
+ if f is None:
+ continue
+ m.__file__ = os.path.abspath(f)
+
+def removeduppaths():
+ """ Remove duplicate entries from sys.path along with making them
+ absolute"""
+ # This ensures that the initial path provided by the interpreter contains
+ # only absolute pathnames, even if we're running from the build directory.
+ L = []
+ known_paths = set()
+ for dir in sys.path:
+ # Filter out duplicate paths (on case-insensitive file systems also
+ # if they only differ in case); turn relative paths into absolute
+ # paths.
+ dir, dircase = makepath(dir)
+ if not dircase in known_paths:
+ L.append(dir)
+ known_paths.add(dircase)
+ sys.path[:] = L
+ return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python. See http://www.python.org/sf/586680
+def addbuilddir():
+ """Append ./build/lib.<platform> in case we're running in the build dir
+ (especially for Guido :-)"""
+ from distutils.util import get_platform
+ s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+ if hasattr(sys, 'gettotalrefcount'):
+ s += '-pydebug'
+ s = os.path.join(os.path.dirname(sys.path[-1]), s)
+ sys.path.append(s)
+
+def _init_pathinfo():
+ """Return a set containing all existing directory entries from sys.path"""
+ d = set()
+ for dir in sys.path:
+ try:
+ if os.path.isdir(dir):
+ dir, dircase = makepath(dir)
+ d.add(dircase)
+ except TypeError:
+ continue
+ return d
+
+def addpackage(sitedir, name, known_paths):
+ """Add a new path to known_paths by combining sitedir and 'name' or execute
+ sitedir if it starts with 'import'"""
+ if known_paths is None:
+ _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ fullname = os.path.join(sitedir, name)
+ try:
+ f = open(fullname, "rU")
+ except IOError:
+ return
+ try:
+ for line in f:
+ if line.startswith("#"):
+ continue
+ if line.startswith("import"):
+ exec(line)
+ continue
+ line = line.rstrip()
+ dir, dircase = makepath(sitedir, line)
+ if not dircase in known_paths and os.path.exists(dir):
+ sys.path.append(dir)
+ known_paths.add(dircase)
+ finally:
+ f.close()
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+ """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+ 'sitedir'"""
+ if known_paths is None:
+ known_paths = _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ sitedir, sitedircase = makepath(sitedir)
+ if not sitedircase in known_paths:
+ sys.path.append(sitedir) # Add path component
+ try:
+ names = os.listdir(sitedir)
+ except os.error:
+ return
+ names.sort()
+ for name in names:
+ if name.endswith(os.extsep + "pth"):
+ addpackage(sitedir, name, known_paths)
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
+ """Add site-packages (and possibly site-python) to sys.path"""
+ prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
+ if exec_prefix != sys_prefix:
+ prefixes.append(os.path.join(exec_prefix, "local"))
+
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos') or _is_jython:
+ sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+ elif _is_pypy:
+ sitedirs = [os.path.join(prefix, 'site-packages')]
+ elif sys.platform == 'darwin' and prefix == sys_prefix:
+
+ if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
+
+ sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+ os.path.join(prefix, "Extras", "lib", "python")]
+
+ else: # any other Python distros on OSX work this way
+ sitedirs = [os.path.join(prefix, "lib",
+ "python" + sys.version[:3], "site-packages")]
+
+ elif os.sep == '/':
+ sitedirs = [os.path.join(prefix,
+ "lib",
+ "python" + sys.version[:3],
+ "site-packages"),
+ os.path.join(prefix, "lib", "site-python"),
+ os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
+ lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+ if (os.path.exists(lib64_dir) and
+ os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+ if _is_64bit:
+ sitedirs.insert(0, lib64_dir)
+ else:
+ sitedirs.append(lib64_dir)
+ try:
+ # sys.getobjects only available in --with-pydebug build
+ sys.getobjects
+ sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
+ except AttributeError:
+ pass
+ # Debian-specific dist-packages directories:
+ sitedirs.append(os.path.join(prefix, "local/lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ if sys.version[0] == '2':
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ else:
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[0],
+ "dist-packages"))
+ sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
+ else:
+ sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ sitedirs.append(
+ os.path.join(home,
+ 'Library',
+ 'Python',
+ sys.version[:3],
+ 'site-packages'))
+ for sitedir in sitedirs:
+ if os.path.isdir(sitedir):
+ addsitedir(sitedir, known_paths)
+ return None
+
+def check_enableusersite():
+ """Check if user site directory is safe for inclusion
+
+ The function tests for the command line flag (including environment var),
+ process uid/gid equal to effective uid/gid.
+
+ None: Disabled for security reasons
+ False: Disabled by user (command line option)
+ True: Safe and enabled
+ """
+ if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
+ return False
+
+ if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+ # check process uid == effective uid
+ if os.geteuid() != os.getuid():
+ return None
+ if hasattr(os, "getgid") and hasattr(os, "getegid"):
+ # check process gid == effective gid
+ if os.getegid() != os.getgid():
+ return None
+
+ return True
+
+def addusersitepackages(known_paths):
+ """Add a per user site-package to sys.path
+
+ Each user has its own python directory with site-packages in the
+ home directory.
+
+ USER_BASE is the root directory for all Python versions
+
+ USER_SITE is the user specific site-packages directory
+
+ USER_SITE/.. can be used for data.
+ """
+ global USER_BASE, USER_SITE, ENABLE_USER_SITE
+ env_base = os.environ.get("PYTHONUSERBASE", None)
+
+ def joinuser(*args):
+ return os.path.expanduser(os.path.join(*args))
+
+ #if sys.platform in ('os2emx', 'riscos'):
+ # # Don't know what to put here
+ # USER_BASE = ''
+ # USER_SITE = ''
+ if os.name == "nt":
+ base = os.environ.get("APPDATA") or "~"
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser(base, "Python")
+ USER_SITE = os.path.join(USER_BASE,
+ "Python" + sys.version[0] + sys.version[2],
+ "site-packages")
+ else:
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser("~", ".local")
+ USER_SITE = os.path.join(USER_BASE, "lib",
+ "python" + sys.version[:3],
+ "site-packages")
+
+ if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
+ addsitedir(USER_SITE, known_paths)
+ if ENABLE_USER_SITE:
+ for dist_libdir in ("lib", "local/lib"):
+ user_site = os.path.join(USER_BASE, dist_libdir,
+ "python" + sys.version[:3],
+ "dist-packages")
+ if os.path.isdir(user_site):
+ addsitedir(user_site, known_paths)
+ return known_paths
+
+
+
+def setBEGINLIBPATH():
+ """The OS/2 EMX port has optional extension modules that do double duty
+ as DLLs (and must use the .DLL file extension) for other extensions.
+ The library search path needs to be amended so these will be found
+ during module import. Use BEGINLIBPATH so that these are at the start
+ of the library search path.
+
+ """
+ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+ libpath = os.environ['BEGINLIBPATH'].split(';')
+ if libpath[-1]:
+ libpath.append(dllpath)
+ else:
+ libpath[-1] = dllpath
+ os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+ """Define new built-ins 'quit' and 'exit'.
+ These are simply strings that display a hint on how to exit.
+
+ """
+ if os.sep == ':':
+ eof = 'Cmd-Q'
+ elif os.sep == '\\':
+ eof = 'Ctrl-Z plus Return'
+ else:
+ eof = 'Ctrl-D (i.e. EOF)'
+
+ class Quitter(object):
+ def __init__(self, name):
+ self.name = name
+ def __repr__(self):
+ return 'Use %s() or %s to exit' % (self.name, eof)
+ def __call__(self, code=None):
+ # Shells like IDLE catch the SystemExit, but listen when their
+ # stdin wrapper is closed.
+ try:
+ sys.stdin.close()
+ except:
+ pass
+ raise SystemExit(code)
+ builtins.quit = Quitter('quit')
+ builtins.exit = Quitter('exit')
+
+
+class _Printer(object):
+ """interactive prompt objects for printing the license text, a list of
+ contributors and the copyright notice."""
+
+ MAXLINES = 23
+
+ def __init__(self, name, data, files=(), dirs=()):
+ self.__name = name
+ self.__data = data
+ self.__files = files
+ self.__dirs = dirs
+ self.__lines = None
+
+ def __setup(self):
+ if self.__lines:
+ return
+ data = None
+ for dir in self.__dirs:
+ for filename in self.__files:
+ filename = os.path.join(dir, filename)
+ try:
+ fp = open(filename, "rU")
+ data = fp.read()
+ fp.close()
+ break
+ except IOError:
+ pass
+ if data:
+ break
+ if not data:
+ data = self.__data
+ self.__lines = data.split('\n')
+ self.__linecnt = len(self.__lines)
+
+ def __repr__(self):
+ self.__setup()
+ if len(self.__lines) <= self.MAXLINES:
+ return "\n".join(self.__lines)
+ else:
+ return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+ def __call__(self):
+ self.__setup()
+ prompt = 'Hit Return for more, or q (and Return) to quit: '
+ lineno = 0
+ while 1:
+ try:
+ for i in range(lineno, lineno + self.MAXLINES):
+ print(self.__lines[i])
+ except IndexError:
+ break
+ else:
+ lineno += self.MAXLINES
+ key = None
+ while key is None:
+ try:
+ key = raw_input(prompt)
+ except NameError:
+ key = input(prompt)
+ if key not in ('', 'q'):
+ key = None
+ if key == 'q':
+ break
+
+def setcopyright():
+ """Set 'copyright' and 'credits' in __builtin__"""
+ builtins.copyright = _Printer("copyright", sys.copyright)
+ if _is_jython:
+ builtins.credits = _Printer(
+ "credits",
+ "Jython is maintained by the Jython developers (www.jython.org).")
+ elif _is_pypy:
+ builtins.credits = _Printer(
+ "credits",
+ "PyPy is maintained by the PyPy developers: http://pypy.org/")
+ else:
+ builtins.credits = _Printer("credits", """\
+ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+ for supporting Python development. See www.python.org for more information.""")
+ here = os.path.dirname(os.__file__)
+ builtins.license = _Printer(
+ "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+ ["LICENSE.txt", "LICENSE"],
+ [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+ """Define the built-in 'help'.
+ This is a wrapper around pydoc.help (with a twist).
+
+ """
+
+ def __repr__(self):
+ return "Type help() for interactive help, " \
+ "or help(object) for help about object."
+ def __call__(self, *args, **kwds):
+ import pydoc
+ return pydoc.help(*args, **kwds)
+
+def sethelper():
+ builtins.help = _Helper()
+
+def aliasmbcs():
+ """On Windows, some default encodings are not provided by Python,
+ while they are always available as "mbcs" in each locale. Make
+ them usable by aliasing to "mbcs" in such a case."""
+ if sys.platform == 'win32':
+ import locale, codecs
+ enc = locale.getdefaultlocale()[1]
+ if enc.startswith('cp'): # "cp***" ?
+ try:
+ codecs.lookup(enc)
+ except LookupError:
+ import encodings
+ encodings._cache[enc] = encodings._unknown
+ encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+ """Set the string encoding used by the Unicode implementation. The
+ default is 'ascii', but if you're willing to experiment, you can
+ change this."""
+ encoding = "ascii" # Default value set by _PyUnicode_Init()
+ if 0:
+ # Enable to support locale aware default string encodings.
+ import locale
+ loc = locale.getdefaultlocale()
+ if loc[1]:
+ encoding = loc[1]
+ if 0:
+ # Enable to switch off string to Unicode coercion and implicit
+ # Unicode to string conversion.
+ encoding = "undefined"
+ if encoding != "ascii":
+ # On Non-Unicode builds this will raise an AttributeError...
+ sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+ """Run custom site specific code, if available."""
+ try:
+ import sitecustomize
+ except ImportError:
+ pass
+
+def virtual_install_main_packages():
+ f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
+ sys.real_prefix = f.read().strip()
+ f.close()
+ pos = 2
+ hardcoded_relative_dirs = []
+ if sys.path[0] == '':
+ pos += 1
+ if _is_jython:
+ paths = [os.path.join(sys.real_prefix, 'Lib')]
+ elif _is_pypy:
+ if sys.version_info > (3, 2):
+ cpyver = '%d' % sys.version_info[0]
+ elif sys.pypy_version_info >= (1, 5):
+ cpyver = '%d.%d' % sys.version_info[:2]
+ else:
+ cpyver = '%d.%d.%d' % sys.version_info[:3]
+ paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
+ os.path.join(sys.real_prefix, 'lib-python', cpyver)]
+ if sys.pypy_version_info < (1, 9):
+ paths.insert(1, os.path.join(sys.real_prefix,
+ 'lib-python', 'modified-%s' % cpyver))
+ hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+ #
+ # This is hardcoded in the Python executable, but relative to sys.prefix:
+ for path in paths[:]:
+ plat_path = os.path.join(path, 'plat-%s' % sys.platform)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ # MOZ: The MSYS2 and MinGW versions of Python have their main packages in the UNIX directory this checks specifically for the native win32 python
+ elif sys.platform == 'win32' and os.sep == '\\':
+ paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
+ else:
+ paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
+ hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+ lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+ if os.path.exists(lib64_path):
+ if _is_64bit:
+ paths.insert(0, lib64_path)
+ else:
+ paths.append(lib64_path)
+ # This is hardcoded in the Python executable, but relative to
+ # sys.prefix. Debian change: we need to add the multiarch triplet
+ # here, which is where the real stuff lives. As per PEP 421, in
+ # Python 3.3+, this lives in sys.implementation, while in Python 2.7
+ # it lives in sys.
+ try:
+ arch = getattr(sys, 'implementation', sys)._multiarch
+ except AttributeError:
+ # This is a non-multiarch aware Python. Fallback to the old way.
+ arch = sys.platform
+ plat_path = os.path.join(sys.real_prefix, 'lib',
+ 'python'+sys.version[:3],
+ 'plat-%s' % arch)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ # This is hardcoded in the Python executable, but
+ # relative to sys.prefix, so we have to fix up:
+ for path in list(paths):
+ tk_dir = os.path.join(path, 'lib-tk')
+ if os.path.exists(tk_dir):
+ paths.append(tk_dir)
+
+ # These are hardcoded in the Apple's Python executable,
+ # but relative to sys.prefix, so we have to fix them up:
+ if sys.platform == 'darwin':
+ hardcoded_paths = [os.path.join(relative_dir, module)
+ for relative_dir in hardcoded_relative_dirs
+ for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
+
+ for path in hardcoded_paths:
+ if os.path.exists(path):
+ paths.append(path)
+
+ sys.path.extend(paths)
+
+def force_global_eggs_after_local_site_packages():
+ """
+ Force easy_installed eggs in the global environment to get placed
+ in sys.path after all packages inside the virtualenv. This
+ maintains the "least surprise" result that packages in the
+ virtualenv always mask global packages, never the other way
+ around.
+
+ """
+ egginsert = getattr(sys, '__egginsert', 0)
+ for i, path in enumerate(sys.path):
+ if i > egginsert and path.startswith(sys.prefix):
+ egginsert = i
+ sys.__egginsert = egginsert + 1
+
+def virtual_addsitepackages(known_paths):
+ force_global_eggs_after_local_site_packages()
+ return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
+
+def fixclasspath():
+ """Adjust the special classpath sys.path entries for Jython. These
+ entries should follow the base virtualenv lib directories.
+ """
+ paths = []
+ classpaths = []
+ for path in sys.path:
+ if path == '__classpath__' or path.startswith('__pyclasspath__'):
+ classpaths.append(path)
+ else:
+ paths.append(path)
+ sys.path = paths
+ sys.path.extend(classpaths)
+
+def execusercustomize():
+ """Run custom user specific code, if available."""
+ try:
+ import usercustomize
+ except ImportError:
+ pass
+
+
+def main():
+ global ENABLE_USER_SITE
+ virtual_install_main_packages()
+ abs__file__()
+ paths_in_sys = removeduppaths()
+ if (os.name == "posix" and sys.path and
+ os.path.basename(sys.path[-1]) == "Modules"):
+ addbuilddir()
+ if _is_jython:
+ fixclasspath()
+ GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
+ if not GLOBAL_SITE_PACKAGES:
+ ENABLE_USER_SITE = False
+ if ENABLE_USER_SITE is None:
+ ENABLE_USER_SITE = check_enableusersite()
+ paths_in_sys = addsitepackages(paths_in_sys)
+ paths_in_sys = addusersitepackages(paths_in_sys)
+ if GLOBAL_SITE_PACKAGES:
+ paths_in_sys = virtual_addsitepackages(paths_in_sys)
+ if sys.platform == 'os2emx':
+ setBEGINLIBPATH()
+ setquit()
+ setcopyright()
+ sethelper()
+ aliasmbcs()
+ setencoding()
+ execsitecustomize()
+ if ENABLE_USER_SITE:
+ execusercustomize()
+ # Remove sys.setdefaultencoding() so that users cannot change the
+ # encoding after initialization. The test for presence is needed when
+ # this module is run as a script, because this code is executed twice.
+ if hasattr(sys, "setdefaultencoding"):
+ del sys.setdefaultencoding
+
+main()
+
+def _script():
+ help = """\
+ %s [--user-base] [--user-site]
+
+ Without arguments print some useful information
+ With arguments print the value of USER_BASE and/or USER_SITE separated
+ by '%s'.
+
+ Exit codes with --user-base or --user-site:
+ 0 - user site directory is enabled
+ 1 - user site directory is disabled by user
+ 2 - uses site directory is disabled by super user
+ or for security reasons
+ >2 - unknown error
+ """
+ args = sys.argv[1:]
+ if not args:
+ print("sys.path = [")
+ for dir in sys.path:
+ print(" %r," % (dir,))
+ print("]")
+ def exists(path):
+ if os.path.isdir(path):
+ return "exists"
+ else:
+ return "doesn't exist"
+ print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)))
+ print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)))
+ print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE)
+ sys.exit(0)
+
+ buffer = []
+ if '--user-base' in args:
+ buffer.append(USER_BASE)
+ if '--user-site' in args:
+ buffer.append(USER_SITE)
+
+ if buffer:
+ print(os.pathsep.join(buffer))
+ if ENABLE_USER_SITE:
+ sys.exit(0)
+ elif ENABLE_USER_SITE is False:
+ sys.exit(1)
+ elif ENABLE_USER_SITE is None:
+ sys.exit(2)
+ else:
+ sys.exit(3)
+ else:
+ import textwrap
+ print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
+ sys.exit(10)
+
+if __name__ == '__main__':
+ _script()
diff --git a/python/virtualenv/tests/__init__.py b/python/virtualenv/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/virtualenv/tests/__init__.py
diff --git a/python/virtualenv/tests/test_activate.sh b/python/virtualenv/tests/test_activate.sh
new file mode 100755
index 000000000..e27727386
--- /dev/null
+++ b/python/virtualenv/tests/test_activate.sh
@@ -0,0 +1,96 @@
+#!/bin/sh
+
+set -u
+
+ROOT="$(dirname $0)/.."
+VIRTUALENV="${ROOT}/virtualenv.py"
+TESTENV="/tmp/test_virtualenv_activate.venv"
+
+rm -rf ${TESTENV}
+
+echo "$0: Creating virtualenv ${TESTENV}..." 1>&2
+
+${VIRTUALENV} ${TESTENV} | tee ${ROOT}/tests/test_activate_output.actual
+if ! diff ${ROOT}/tests/test_activate_output.expected ${ROOT}/tests/test_activate_output.actual; then
+ echo "$0: Failed to get expected output from ${VIRTUALENV}!" 1>&2
+ exit 1
+fi
+
+echo "$0: Created virtualenv ${TESTENV}." 1>&2
+
+echo "$0: Activating ${TESTENV}..." 1>&2
+. ${TESTENV}/bin/activate
+echo "$0: Activated ${TESTENV}." 1>&2
+
+echo "$0: Checking value of \$VIRTUAL_ENV..." 1>&2
+
+if [ "$VIRTUAL_ENV" != "${TESTENV}" ]; then
+ echo "$0: Expected \$VIRTUAL_ENV to be set to \"${TESTENV}\"; actual value: \"${VIRTUAL_ENV}\"!" 1>&2
+ exit 2
+fi
+
+echo "$0: \$VIRTUAL_ENV = \"${VIRTUAL_ENV}\" -- OK." 1>&2
+
+echo "$0: Checking output of \$(which python)..." 1>&2
+
+if [ "$(which python)" != "${TESTENV}/bin/python" ]; then
+ echo "$0: Expected \$(which python) to return \"${TESTENV}/bin/python\"; actual value: \"$(which python)\"!" 1>&2
+ exit 3
+fi
+
+echo "$0: Output of \$(which python) is OK." 1>&2
+
+echo "$0: Checking output of \$(which pip)..." 1>&2
+
+if [ "$(which pip)" != "${TESTENV}/bin/pip" ]; then
+ echo "$0: Expected \$(which pip) to return \"${TESTENV}/bin/pip\"; actual value: \"$(which pip)\"!" 1>&2
+ exit 4
+fi
+
+echo "$0: Output of \$(which pip) is OK." 1>&2
+
+echo "$0: Checking output of \$(which easy_install)..." 1>&2
+
+if [ "$(which easy_install)" != "${TESTENV}/bin/easy_install" ]; then
+ echo "$0: Expected \$(which easy_install) to return \"${TESTENV}/bin/easy_install\"; actual value: \"$(which easy_install)\"!" 1>&2
+ exit 5
+fi
+
+echo "$0: Output of \$(which easy_install) is OK." 1>&2
+
+echo "$0: Executing a simple Python program..." 1>&2
+
+TESTENV=${TESTENV} python <<__END__
+import os, sys
+
+expected_site_packages = os.path.join(os.environ['TESTENV'], 'lib','python%s' % sys.version[:3], 'site-packages')
+site_packages = os.path.join(os.environ['VIRTUAL_ENV'], 'lib', 'python%s' % sys.version[:3], 'site-packages')
+
+assert site_packages == expected_site_packages, 'site_packages did not have expected value; actual value: %r' % site_packages
+
+open(os.path.join(site_packages, 'pydoc_test.py'), 'w').write('"""This is pydoc_test.py"""\n')
+__END__
+
+if [ $? -ne 0 ]; then
+ echo "$0: Python script failed!" 1>&2
+ exit 6
+fi
+
+echo "$0: Execution of a simple Python program -- OK." 1>&2
+
+echo "$0: Testing pydoc..." 1>&2
+
+if ! PAGER=cat pydoc pydoc_test | grep 'This is pydoc_test.py' > /dev/null; then
+ echo "$0: pydoc test failed!" 1>&2
+ exit 7
+fi
+
+echo "$0: pydoc is OK." 1>&2
+
+echo "$0: Deactivating ${TESTENV}..." 1>&2
+deactivate
+echo "$0: Deactivated ${TESTENV}." 1>&2
+echo "$0: OK!" 1>&2
+
+rm -rf ${TESTENV}
+
diff --git a/python/virtualenv/tests/test_activate_output.expected b/python/virtualenv/tests/test_activate_output.expected
new file mode 100644
index 000000000..d49469feb
--- /dev/null
+++ b/python/virtualenv/tests/test_activate_output.expected
@@ -0,0 +1,2 @@
+New python executable in /tmp/test_virtualenv_activate.venv/bin/python
+Installing setuptools, pip, wheel...done.
diff --git a/python/virtualenv/tests/test_cmdline.py b/python/virtualenv/tests/test_cmdline.py
new file mode 100644
index 000000000..9682ef003
--- /dev/null
+++ b/python/virtualenv/tests/test_cmdline.py
@@ -0,0 +1,44 @@
+import sys
+import subprocess
+import virtualenv
+import pytest
+
+VIRTUALENV_SCRIPT = virtualenv.__file__
+
+def test_commandline_basic(tmpdir):
+ """Simple command line usage should work"""
+ subprocess.check_call([
+ sys.executable,
+ VIRTUALENV_SCRIPT,
+ str(tmpdir.join('venv'))
+ ])
+
+def test_commandline_explicit_interp(tmpdir):
+ """Specifying the Python interpreter should work"""
+ subprocess.check_call([
+ sys.executable,
+ VIRTUALENV_SCRIPT,
+ '-p', sys.executable,
+ str(tmpdir.join('venv'))
+ ])
+
+# The registry lookups to support the abbreviated "-p 3.5" form of specifying
+# a Python interpreter on Windows don't seem to work with Python 3.5. The
+# registry layout is not well documented, and it's not clear that the feature
+# is sufficiently widely used to be worth fixing.
+# See https://github.com/pypa/virtualenv/issues/864
+@pytest.mark.skipif("sys.platform == 'win32' and sys.version_info[:2] >= (3,5)")
+def test_commandline_abbrev_interp(tmpdir):
+ """Specifying abbreviated forms of the Python interpreter should work"""
+ if sys.platform == 'win32':
+ fmt = '%s.%s'
+ else:
+ fmt = 'python%s.%s'
+ abbrev = fmt % (sys.version_info[0], sys.version_info[1])
+ subprocess.check_call([
+ sys.executable,
+ VIRTUALENV_SCRIPT,
+ '-p', abbrev,
+ str(tmpdir.join('venv'))
+ ])
+
diff --git a/python/virtualenv/tests/test_virtualenv.py b/python/virtualenv/tests/test_virtualenv.py
new file mode 100644
index 000000000..756cde936
--- /dev/null
+++ b/python/virtualenv/tests/test_virtualenv.py
@@ -0,0 +1,139 @@
+import virtualenv
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+import pytest
+import platform # noqa
+
+from mock import patch, Mock
+
+
+def test_version():
+ """Should have a version string"""
+ assert virtualenv.virtualenv_version, "Should have version"
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_absolute_path(mock_exists):
+ """Should return absolute path if given and exists"""
+ mock_exists.return_value = True
+ virtualenv.is_executable = Mock(return_value=True)
+ test_abs_path = os.path.abspath("/usr/bin/python53")
+
+ exe = virtualenv.resolve_interpreter(test_abs_path)
+
+ assert exe == test_abs_path, "Absolute path should return as is"
+ mock_exists.assert_called_with(test_abs_path)
+ virtualenv.is_executable.assert_called_with(test_abs_path)
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_nonexistent_interpreter(mock_exists):
+ """Should SystemExit with an nonexistent python interpreter path"""
+ mock_exists.return_value = False
+
+ with pytest.raises(SystemExit):
+ virtualenv.resolve_interpreter("/usr/bin/python53")
+
+ mock_exists.assert_called_with("/usr/bin/python53")
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_invalid_interpreter(mock_exists):
+ """Should exit when with absolute path if not exists"""
+ mock_exists.return_value = True
+ virtualenv.is_executable = Mock(return_value=False)
+ invalid = os.path.abspath("/usr/bin/pyt_hon53")
+
+ with pytest.raises(SystemExit):
+ virtualenv.resolve_interpreter(invalid)
+
+ mock_exists.assert_called_with(invalid)
+ virtualenv.is_executable.assert_called_with(invalid)
+
+
+def test_activate_after_future_statements():
+ """Should insert activation line after last future statement"""
+ script = [
+ '#!/usr/bin/env python',
+ 'from __future__ import with_statement',
+ 'from __future__ import print_function',
+ 'print("Hello, world!")'
+ ]
+ assert virtualenv.relative_script(script) == [
+ '#!/usr/bin/env python',
+ 'from __future__ import with_statement',
+ 'from __future__ import print_function',
+ '',
+ "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this",
+ '',
+ 'print("Hello, world!")'
+ ]
+
+
+def test_cop_update_defaults_with_store_false():
+ """store_false options need reverted logic"""
+ class MyConfigOptionParser(virtualenv.ConfigOptionParser):
+ def __init__(self, *args, **kwargs):
+ self.config = virtualenv.ConfigParser.RawConfigParser()
+ self.files = []
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def get_environ_vars(self, prefix='VIRTUALENV_'):
+ yield ("no_site_packages", "1")
+
+ cop = MyConfigOptionParser()
+ cop.add_option(
+ '--no-site-packages',
+ dest='system_site_packages',
+ action='store_false',
+ help="Don't give access to the global site-packages dir to the "
+ "virtual environment (default)")
+
+ defaults = {}
+ cop.update_defaults(defaults)
+ assert defaults == {'system_site_packages': 0}
+
+def test_install_python_bin():
+ """Should create the right python executables and links"""
+ tmp_virtualenv = tempfile.mkdtemp()
+ try:
+ home_dir, lib_dir, inc_dir, bin_dir = \
+ virtualenv.path_locations(tmp_virtualenv)
+ virtualenv.install_python(home_dir, lib_dir, inc_dir, bin_dir, False,
+ False)
+
+ if virtualenv.is_win:
+ required_executables = [ 'python.exe', 'pythonw.exe']
+ else:
+ py_exe_no_version = 'python'
+ py_exe_version_major = 'python%s' % sys.version_info[0]
+ py_exe_version_major_minor = 'python%s.%s' % (
+ sys.version_info[0], sys.version_info[1])
+ required_executables = [ py_exe_no_version, py_exe_version_major,
+ py_exe_version_major_minor ]
+
+ for pth in required_executables:
+ assert os.path.exists(os.path.join(bin_dir, pth)), ("%s should "
+ "exist in bin_dir" % pth)
+ finally:
+ shutil.rmtree(tmp_virtualenv)
+
+
+@pytest.mark.skipif("platform.python_implementation() == 'PyPy'")
+def test_always_copy_option():
+ """Should be no symlinks in directory tree"""
+ tmp_virtualenv = tempfile.mkdtemp()
+ ve_path = os.path.join(tmp_virtualenv, 'venv')
+ try:
+ virtualenv.create_environment(ve_path, symlink=False)
+
+ for root, dirs, files in os.walk(tmp_virtualenv):
+ for f in files + dirs:
+ full_name = os.path.join(root, f)
+ assert not os.path.islink(full_name), "%s should not be a" \
+ " symlink (to %s)" % (full_name, os.readlink(full_name))
+ finally:
+ shutil.rmtree(tmp_virtualenv)
diff --git a/python/virtualenv/virtualenv.py b/python/virtualenv/virtualenv.py
new file mode 100755
index 000000000..e363021cc
--- /dev/null
+++ b/python/virtualenv/virtualenv.py
@@ -0,0 +1,2329 @@
+#!/usr/bin/env python
+"""Create a "virtual" Python installation"""
+
+import os
+import sys
+
+# If we are running in a new interpreter to create a virtualenv,
+# we do NOT want paths from our existing location interfering with anything,
+# So we remove this file's directory from sys.path - most likely to be
+# the previous interpreter's site-packages. Solves #705, #763, #779
+if os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
+ for path in sys.path[:]:
+ if os.path.realpath(os.path.dirname(__file__)) == os.path.realpath(path):
+ sys.path.remove(path)
+
+import base64
+import codecs
+import optparse
+import re
+import shutil
+import logging
+import zlib
+import errno
+import glob
+import distutils.sysconfig
+import struct
+import subprocess
+import pkgutil
+import tempfile
+import textwrap
+from distutils.util import strtobool
+from os.path import join
+
+try:
+ import ConfigParser
+except ImportError:
+ import configparser as ConfigParser
+
+__version__ = "15.0.1"
+virtualenv_version = __version__ # legacy
+
+if sys.version_info < (2, 6):
+ print('ERROR: %s' % sys.exc_info()[1])
+ print('ERROR: this script requires Python 2.6 or greater.')
+ sys.exit(101)
+
+try:
+ basestring
+except NameError:
+ basestring = str
+
+py_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
+
+is_jython = sys.platform.startswith('java')
+is_pypy = hasattr(sys, 'pypy_version_info')
+is_win = (sys.platform == 'win32' and os.sep == '\\')
+is_cygwin = (sys.platform == 'cygwin')
+is_msys2 = (sys.platform == 'win32' and os.sep == '/')
+is_darwin = (sys.platform == 'darwin')
+abiflags = getattr(sys, 'abiflags', '')
+
+user_dir = os.path.expanduser('~')
+if is_win:
+ default_storage_dir = os.path.join(user_dir, 'virtualenv')
+else:
+ default_storage_dir = os.path.join(user_dir, '.virtualenv')
+default_config_file = os.path.join(default_storage_dir, 'virtualenv.ini')
+
+if is_pypy:
+ expected_exe = 'pypy'
+elif is_jython:
+ expected_exe = 'jython'
+else:
+ expected_exe = 'python'
+
+# Return a mapping of version -> Python executable
+# Only provided for Windows, where the information in the registry is used
+if not is_win:
+ def get_installed_pythons():
+ return {}
+else:
+ try:
+ import winreg
+ except ImportError:
+ import _winreg as winreg
+
+ def get_installed_pythons():
+ try:
+ python_core = winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE,
+ "Software\\Python\\PythonCore")
+ except WindowsError:
+ # No registered Python installations
+ return {}
+ i = 0
+ versions = []
+ while True:
+ try:
+ versions.append(winreg.EnumKey(python_core, i))
+ i = i + 1
+ except WindowsError:
+ break
+ exes = dict()
+ for ver in versions:
+ try:
+ path = winreg.QueryValue(python_core, "%s\\InstallPath" % ver)
+ except WindowsError:
+ continue
+ exes[ver] = join(path, "python.exe")
+
+ winreg.CloseKey(python_core)
+
+ # Add the major versions
+ # Sort the keys, then repeatedly update the major version entry
+ # Last executable (i.e., highest version) wins with this approach
+ for ver in sorted(exes):
+ exes[ver[0]] = exes[ver]
+
+ return exes
+
+REQUIRED_MODULES = ['os', 'posix', 'posixpath', 'nt', 'ntpath', 'genericpath',
+ 'fnmatch', 'locale', 'encodings', 'codecs',
+ 'stat', 'UserDict', 'readline', 'copy_reg', 'types',
+ 're', 'sre', 'sre_parse', 'sre_constants', 'sre_compile',
+ 'zlib']
+
+REQUIRED_FILES = ['lib-dynload', 'config']
+
+majver, minver = sys.version_info[:2]
+if majver == 2:
+ if minver >= 6:
+ REQUIRED_MODULES.extend(['warnings', 'linecache', '_abcoll', 'abc'])
+ if minver >= 7:
+ REQUIRED_MODULES.extend(['_weakrefset'])
+ if is_msys2:
+ REQUIRED_MODULES.extend(['functools'])
+elif majver == 3:
+ # Some extra modules are needed for Python 3, but different ones
+ # for different versions.
+ REQUIRED_MODULES.extend([
+ '_abcoll', 'warnings', 'linecache', 'abc', 'io', '_weakrefset',
+ 'copyreg', 'tempfile', 'random', '__future__', 'collections',
+ 'keyword', 'tarfile', 'shutil', 'struct', 'copy', 'tokenize',
+ 'token', 'functools', 'heapq', 'bisect', 'weakref', 'reprlib'
+ ])
+ if minver >= 2:
+ REQUIRED_FILES[-1] = 'config-%s' % majver
+ if minver >= 3:
+ import sysconfig
+ platdir = sysconfig.get_config_var('PLATDIR')
+ REQUIRED_FILES.append(platdir)
+ REQUIRED_MODULES.extend([
+ 'base64', '_dummy_thread', 'hashlib', 'hmac',
+ 'imp', 'importlib', 'rlcompleter'
+ ])
+ if minver >= 4:
+ REQUIRED_MODULES.extend([
+ 'operator',
+ '_collections_abc',
+ '_bootlocale',
+ ])
+
+if is_pypy:
+ # these are needed to correctly display the exceptions that may happen
+ # during the bootstrap
+ REQUIRED_MODULES.extend(['traceback', 'linecache'])
+
+
+class Logger(object):
+
+ """
+ Logging object for use in command-line script. Allows ranges of
+ levels, to avoid some redundancy of displayed information.
+ """
+
+ DEBUG = logging.DEBUG
+ INFO = logging.INFO
+ NOTIFY = (logging.INFO+logging.WARN)/2
+ WARN = WARNING = logging.WARN
+ ERROR = logging.ERROR
+ FATAL = logging.FATAL
+
+ LEVELS = [DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
+
+ def __init__(self, consumers):
+ self.consumers = consumers
+ self.indent = 0
+ self.in_progress = None
+ self.in_progress_hanging = False
+
+ def debug(self, msg, *args, **kw):
+ self.log(self.DEBUG, msg, *args, **kw)
+
+ def info(self, msg, *args, **kw):
+ self.log(self.INFO, msg, *args, **kw)
+
+ def notify(self, msg, *args, **kw):
+ self.log(self.NOTIFY, msg, *args, **kw)
+
+ def warn(self, msg, *args, **kw):
+ self.log(self.WARN, msg, *args, **kw)
+
+ def error(self, msg, *args, **kw):
+ self.log(self.ERROR, msg, *args, **kw)
+
+ def fatal(self, msg, *args, **kw):
+ self.log(self.FATAL, msg, *args, **kw)
+
+ def log(self, level, msg, *args, **kw):
+ if args:
+ if kw:
+ raise TypeError(
+ "You may give positional or keyword arguments, not both")
+ args = args or kw
+ rendered = None
+ for consumer_level, consumer in self.consumers:
+ if self.level_matches(level, consumer_level):
+ if (self.in_progress_hanging
+ and consumer in (sys.stdout, sys.stderr)):
+ self.in_progress_hanging = False
+ sys.stdout.write('\n')
+ sys.stdout.flush()
+ if rendered is None:
+ if args:
+ rendered = msg % args
+ else:
+ rendered = msg
+ rendered = ' '*self.indent + rendered
+ if hasattr(consumer, 'write'):
+ consumer.write(rendered+'\n')
+ else:
+ consumer(rendered)
+
+ def start_progress(self, msg):
+ assert not self.in_progress, (
+ "Tried to start_progress(%r) while in_progress %r"
+ % (msg, self.in_progress))
+ if self.level_matches(self.NOTIFY, self._stdout_level()):
+ sys.stdout.write(msg)
+ sys.stdout.flush()
+ self.in_progress_hanging = True
+ else:
+ self.in_progress_hanging = False
+ self.in_progress = msg
+
+ def end_progress(self, msg='done.'):
+ assert self.in_progress, (
+ "Tried to end_progress without start_progress")
+ if self.stdout_level_matches(self.NOTIFY):
+ if not self.in_progress_hanging:
+ # Some message has been printed out since start_progress
+ sys.stdout.write('...' + self.in_progress + msg + '\n')
+ sys.stdout.flush()
+ else:
+ sys.stdout.write(msg + '\n')
+ sys.stdout.flush()
+ self.in_progress = None
+ self.in_progress_hanging = False
+
+ def show_progress(self):
+ """If we are in a progress scope, and no log messages have been
+ shown, write out another '.'"""
+ if self.in_progress_hanging:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+
+ def stdout_level_matches(self, level):
+ """Returns true if a message at this level will go to stdout"""
+ return self.level_matches(level, self._stdout_level())
+
+ def _stdout_level(self):
+ """Returns the level that stdout runs at"""
+ for level, consumer in self.consumers:
+ if consumer is sys.stdout:
+ return level
+ return self.FATAL
+
+ def level_matches(self, level, consumer_level):
+ """
+ >>> l = Logger([])
+ >>> l.level_matches(3, 4)
+ False
+ >>> l.level_matches(3, 2)
+ True
+ >>> l.level_matches(slice(None, 3), 3)
+ False
+ >>> l.level_matches(slice(None, 3), 2)
+ True
+ >>> l.level_matches(slice(1, 3), 1)
+ True
+ >>> l.level_matches(slice(2, 3), 1)
+ False
+ """
+ if isinstance(level, slice):
+ start, stop = level.start, level.stop
+ if start is not None and start > consumer_level:
+ return False
+ if stop is not None and stop <= consumer_level:
+ return False
+ return True
+ else:
+ return level >= consumer_level
+
+ #@classmethod
+ def level_for_integer(cls, level):
+ levels = cls.LEVELS
+ if level < 0:
+ return levels[0]
+ if level >= len(levels):
+ return levels[-1]
+ return levels[level]
+
+ level_for_integer = classmethod(level_for_integer)
+
+# create a silent logger just to prevent this from being undefined
+# will be overridden with requested verbosity main() is called.
+logger = Logger([(Logger.LEVELS[-1], sys.stdout)])
+
+def mkdir(path):
+ if not os.path.exists(path):
+ logger.info('Creating %s', path)
+ os.makedirs(path)
+ else:
+ logger.info('Directory %s already exists', path)
+
+def copyfileordir(src, dest, symlink=True):
+ if os.path.isdir(src):
+ shutil.copytree(src, dest, symlink)
+ else:
+ shutil.copy2(src, dest)
+
+def copyfile(src, dest, symlink=True):
+ if not os.path.exists(src):
+ # Some bad symlink in the src
+ logger.warn('Cannot find file %s (bad symlink)', src)
+ return
+ if os.path.exists(dest):
+ logger.debug('File %s already exists', dest)
+ return
+ if not os.path.exists(os.path.dirname(dest)):
+ logger.info('Creating parent directories for %s', os.path.dirname(dest))
+ os.makedirs(os.path.dirname(dest))
+ if not os.path.islink(src):
+ srcpath = os.path.abspath(src)
+ else:
+ srcpath = os.readlink(src)
+ if symlink and hasattr(os, 'symlink') and not is_win:
+ logger.info('Symlinking %s', dest)
+ try:
+ os.symlink(srcpath, dest)
+ except (OSError, NotImplementedError):
+ logger.info('Symlinking failed, copying to %s', dest)
+ copyfileordir(src, dest, symlink)
+ else:
+ logger.info('Copying to %s', dest)
+ copyfileordir(src, dest, symlink)
+
+def writefile(dest, content, overwrite=True):
+ if not os.path.exists(dest):
+ logger.info('Writing %s', dest)
+ with open(dest, 'wb') as f:
+ f.write(content.encode('utf-8'))
+ return
+ else:
+ with open(dest, 'rb') as f:
+ c = f.read()
+ if c != content.encode("utf-8"):
+ if not overwrite:
+ logger.notify('File %s exists with different content; not overwriting', dest)
+ return
+ logger.notify('Overwriting %s with new content', dest)
+ with open(dest, 'wb') as f:
+ f.write(content.encode('utf-8'))
+ else:
+ logger.info('Content %s already in place', dest)
+
+def rmtree(dir):
+ if os.path.exists(dir):
+ logger.notify('Deleting tree %s', dir)
+ shutil.rmtree(dir)
+ else:
+ logger.info('Do not need to delete %s; already gone', dir)
+
+def make_exe(fn):
+ if hasattr(os, 'chmod'):
+ oldmode = os.stat(fn).st_mode & 0xFFF # 0o7777
+ newmode = (oldmode | 0x16D) & 0xFFF # 0o555, 0o7777
+ os.chmod(fn, newmode)
+ logger.info('Changed mode of %s to %s', fn, oct(newmode))
+
+def _find_file(filename, dirs):
+ for dir in reversed(dirs):
+ files = glob.glob(os.path.join(dir, filename))
+ if files and os.path.isfile(files[0]):
+ return True, files[0]
+ return False, filename
+
+def file_search_dirs():
+ here = os.path.dirname(os.path.abspath(__file__))
+ dirs = [here, join(here, 'virtualenv_support')]
+ if os.path.splitext(os.path.dirname(__file__))[0] != 'virtualenv':
+ # Probably some boot script; just in case virtualenv is installed...
+ try:
+ import virtualenv
+ except ImportError:
+ pass
+ else:
+ dirs.append(os.path.join(
+ os.path.dirname(virtualenv.__file__), 'virtualenv_support'))
+ return [d for d in dirs if os.path.isdir(d)]
+
+
+class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
+ """
+ Custom help formatter for use in ConfigOptionParser that updates
+ the defaults before expanding them, allowing them to show up correctly
+ in the help listing
+ """
+ def expand_default(self, option):
+ if self.parser is not None:
+ self.parser.update_defaults(self.parser.defaults)
+ return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class ConfigOptionParser(optparse.OptionParser):
+ """
+ Custom option parser which updates its defaults by checking the
+ configuration files and environmental variables
+ """
+ def __init__(self, *args, **kwargs):
+ self.config = ConfigParser.RawConfigParser()
+ self.files = self.get_config_files()
+ self.config.read(self.files)
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def get_config_files(self):
+ config_file = os.environ.get('VIRTUALENV_CONFIG_FILE', False)
+ if config_file and os.path.exists(config_file):
+ return [config_file]
+ return [default_config_file]
+
+ def update_defaults(self, defaults):
+ """
+ Updates the given defaults with values from the config files and
+ the environ. Does a little special handling for certain types of
+ options (lists).
+ """
+ # Then go and look for the other sources of configuration:
+ config = {}
+ # 1. config files
+ config.update(dict(self.get_config_section('virtualenv')))
+ # 2. environmental variables
+ config.update(dict(self.get_environ_vars()))
+ # Then set the options with those values
+ for key, val in config.items():
+ key = key.replace('_', '-')
+ if not key.startswith('--'):
+ key = '--%s' % key # only prefer long opts
+ option = self.get_option(key)
+ if option is not None:
+ # ignore empty values
+ if not val:
+ continue
+ # handle multiline configs
+ if option.action == 'append':
+ val = val.split()
+ else:
+ option.nargs = 1
+ if option.action == 'store_false':
+ val = not strtobool(val)
+ elif option.action in ('store_true', 'count'):
+ val = strtobool(val)
+ try:
+ val = option.convert_value(key, val)
+ except optparse.OptionValueError:
+ e = sys.exc_info()[1]
+ print("An error occurred during configuration: %s" % e)
+ sys.exit(3)
+ defaults[option.dest] = val
+ return defaults
+
+ def get_config_section(self, name):
+ """
+ Get a section of a configuration
+ """
+ if self.config.has_section(name):
+ return self.config.items(name)
+ return []
+
+ def get_environ_vars(self, prefix='VIRTUALENV_'):
+ """
+ Returns a generator with all environmental vars with prefix VIRTUALENV
+ """
+ for key, val in os.environ.items():
+ if key.startswith(prefix):
+ yield (key.replace(prefix, '').lower(), val)
+
+ def get_default_values(self):
+ """
+ Overridding to make updating the defaults after instantiation of
+ the option parser possible, update_defaults() does the dirty work.
+ """
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return optparse.Values(self.defaults)
+
+ defaults = self.update_defaults(self.defaults.copy()) # ours
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isinstance(default, basestring):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+ return optparse.Values(defaults)
+
+
+def main():
+ parser = ConfigOptionParser(
+ version=virtualenv_version,
+ usage="%prog [OPTIONS] DEST_DIR",
+ formatter=UpdatingDefaultsHelpFormatter())
+
+ parser.add_option(
+ '-v', '--verbose',
+ action='count',
+ dest='verbose',
+ default=0,
+ help="Increase verbosity.")
+
+ parser.add_option(
+ '-q', '--quiet',
+ action='count',
+ dest='quiet',
+ default=0,
+ help='Decrease verbosity.')
+
+ parser.add_option(
+ '-p', '--python',
+ dest='python',
+ metavar='PYTHON_EXE',
+ help='The Python interpreter to use, e.g., --python=python2.5 will use the python2.5 '
+ 'interpreter to create the new environment. The default is the interpreter that '
+ 'virtualenv was installed with (%s)' % sys.executable)
+
+ parser.add_option(
+ '--clear',
+ dest='clear',
+ action='store_true',
+ help="Clear out the non-root install and start from scratch.")
+
+ parser.set_defaults(system_site_packages=False)
+ parser.add_option(
+ '--no-site-packages',
+ dest='system_site_packages',
+ action='store_false',
+ help="DEPRECATED. Retained only for backward compatibility. "
+ "Not having access to global site-packages is now the default behavior.")
+
+ parser.add_option(
+ '--system-site-packages',
+ dest='system_site_packages',
+ action='store_true',
+ help="Give the virtual environment access to the global site-packages.")
+
+ parser.add_option(
+ '--always-copy',
+ dest='symlink',
+ action='store_false',
+ default=True,
+ help="Always copy files rather than symlinking.")
+
+ parser.add_option(
+ '--unzip-setuptools',
+ dest='unzip_setuptools',
+ action='store_true',
+ help="Unzip Setuptools when installing it.")
+
+ parser.add_option(
+ '--relocatable',
+ dest='relocatable',
+ action='store_true',
+ help='Make an EXISTING virtualenv environment relocatable. '
+ 'This fixes up scripts and makes all .pth files relative.')
+
+ parser.add_option(
+ '--no-setuptools',
+ dest='no_setuptools',
+ action='store_true',
+ help='Do not install setuptools in the new virtualenv.')
+
+ parser.add_option(
+ '--no-pip',
+ dest='no_pip',
+ action='store_true',
+ help='Do not install pip in the new virtualenv.')
+
+ parser.add_option(
+ '--no-wheel',
+ dest='no_wheel',
+ action='store_true',
+ help='Do not install wheel in the new virtualenv.')
+
+ default_search_dirs = file_search_dirs()
+ parser.add_option(
+ '--extra-search-dir',
+ dest="search_dirs",
+ action="append",
+ metavar='DIR',
+ default=default_search_dirs,
+ help="Directory to look for setuptools/pip distributions in. "
+ "This option can be used multiple times.")
+
+ parser.add_option(
+ "--download",
+ dest="download",
+ default=True,
+ action="store_true",
+ help="Download preinstalled packages from PyPI.",
+ )
+
+ parser.add_option(
+ "--no-download",
+ '--never-download',
+ dest="download",
+ action="store_false",
+ help="Do not download preinstalled packages from PyPI.",
+ )
+
+ parser.add_option(
+ '--prompt',
+ dest='prompt',
+ help='Provides an alternative prompt prefix for this environment.')
+
+ parser.add_option(
+ '--setuptools',
+ dest='setuptools',
+ action='store_true',
+ help="DEPRECATED. Retained only for backward compatibility. This option has no effect.")
+
+ parser.add_option(
+ '--distribute',
+ dest='distribute',
+ action='store_true',
+ help="DEPRECATED. Retained only for backward compatibility. This option has no effect.")
+
+ if 'extend_parser' in globals():
+ extend_parser(parser)
+
+ options, args = parser.parse_args()
+
+ global logger
+
+ if 'adjust_options' in globals():
+ adjust_options(options, args)
+
+ verbosity = options.verbose - options.quiet
+ logger = Logger([(Logger.level_for_integer(2 - verbosity), sys.stdout)])
+
+ if options.python and not os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
+ env = os.environ.copy()
+ interpreter = resolve_interpreter(options.python)
+ if interpreter == sys.executable:
+ logger.warn('Already using interpreter %s' % interpreter)
+ else:
+ logger.notify('Running virtualenv with interpreter %s' % interpreter)
+ env['VIRTUALENV_INTERPRETER_RUNNING'] = 'true'
+ file = __file__
+ if file.endswith('.pyc'):
+ file = file[:-1]
+ popen = subprocess.Popen([interpreter, file] + sys.argv[1:], env=env)
+ raise SystemExit(popen.wait())
+
+ if not args:
+ print('You must provide a DEST_DIR')
+ parser.print_help()
+ sys.exit(2)
+ if len(args) > 1:
+ print('There must be only one argument: DEST_DIR (you gave %s)' % (
+ ' '.join(args)))
+ parser.print_help()
+ sys.exit(2)
+
+ home_dir = args[0]
+
+ if os.path.exists(home_dir) and os.path.isfile(home_dir):
+ logger.fatal('ERROR: File already exists and is not a directory.')
+ logger.fatal('Please provide a different path or delete the file.')
+ sys.exit(3)
+
+ if os.environ.get('WORKING_ENV'):
+ logger.fatal('ERROR: you cannot run virtualenv while in a workingenv')
+ logger.fatal('Please deactivate your workingenv, then re-run this script')
+ sys.exit(3)
+
+ if 'PYTHONHOME' in os.environ:
+ logger.warn('PYTHONHOME is set. You *must* activate the virtualenv before using it')
+ del os.environ['PYTHONHOME']
+
+ if options.relocatable:
+ make_environment_relocatable(home_dir)
+ return
+
+ create_environment(home_dir,
+ site_packages=options.system_site_packages,
+ clear=options.clear,
+ unzip_setuptools=options.unzip_setuptools,
+ prompt=options.prompt,
+ search_dirs=options.search_dirs,
+ download=options.download,
+ no_setuptools=options.no_setuptools,
+ no_pip=options.no_pip,
+ no_wheel=options.no_wheel,
+ symlink=options.symlink and hasattr(os, 'symlink')) # MOZ: Make sure we don't use symlink when we don't have it
+ if 'after_install' in globals():
+ after_install(options, home_dir)
+
+def call_subprocess(cmd, show_stdout=True,
+ filter_stdout=None, cwd=None,
+ raise_on_returncode=True, extra_env=None,
+ remove_from_env=None, stdin=None):
+ cmd_parts = []
+ for part in cmd:
+ if len(part) > 45:
+ part = part[:20]+"..."+part[-20:]
+ if ' ' in part or '\n' in part or '"' in part or "'" in part:
+ part = '"%s"' % part.replace('"', '\\"')
+ if hasattr(part, 'decode'):
+ try:
+ part = part.decode(sys.getdefaultencoding())
+ except UnicodeDecodeError:
+ part = part.decode(sys.getfilesystemencoding())
+ cmd_parts.append(part)
+ cmd_desc = ' '.join(cmd_parts)
+ if show_stdout:
+ stdout = None
+ else:
+ stdout = subprocess.PIPE
+ logger.debug("Running command %s" % cmd_desc)
+ if extra_env or remove_from_env:
+ env = os.environ.copy()
+ if extra_env:
+ env.update(extra_env)
+ if remove_from_env:
+ for varname in remove_from_env:
+ env.pop(varname, None)
+ else:
+ env = None
+ try:
+ proc = subprocess.Popen(
+ cmd, stderr=subprocess.STDOUT,
+ stdin=None if stdin is None else subprocess.PIPE,
+ stdout=stdout,
+ cwd=cwd, env=env)
+ except Exception:
+ e = sys.exc_info()[1]
+ logger.fatal(
+ "Error %s while executing command %s" % (e, cmd_desc))
+ raise
+ all_output = []
+ if stdout is not None:
+ if stdin is not None:
+ proc.stdin.write(stdin)
+ proc.stdin.close()
+
+ stdout = proc.stdout
+ encoding = sys.getdefaultencoding()
+ fs_encoding = sys.getfilesystemencoding()
+ while 1:
+ line = stdout.readline()
+ try:
+ line = line.decode(encoding)
+ except UnicodeDecodeError:
+ line = line.decode(fs_encoding)
+ if not line:
+ break
+ line = line.rstrip()
+ all_output.append(line)
+ if filter_stdout:
+ level = filter_stdout(line)
+ if isinstance(level, tuple):
+ level, line = level
+ logger.log(level, line)
+ if not logger.stdout_level_matches(level):
+ logger.show_progress()
+ else:
+ logger.info(line)
+ else:
+ proc.communicate(stdin)
+ proc.wait()
+ if proc.returncode:
+ if raise_on_returncode:
+ if all_output:
+ logger.notify('Complete output from command %s:' % cmd_desc)
+ logger.notify('\n'.join(all_output) + '\n----------------------------------------')
+ raise OSError(
+ "Command %s failed with error code %s"
+ % (cmd_desc, proc.returncode))
+ else:
+ logger.warn(
+ "Command %s had error code %s"
+ % (cmd_desc, proc.returncode))
+
+def filter_install_output(line):
+ if line.strip().startswith('running'):
+ return Logger.INFO
+ return Logger.DEBUG
+
+def find_wheels(projects, search_dirs):
+ """Find wheels from which we can import PROJECTS.
+
+ Scan through SEARCH_DIRS for a wheel for each PROJECT in turn. Return
+ a list of the first wheel found for each PROJECT
+ """
+
+ wheels = []
+
+ # Look through SEARCH_DIRS for the first suitable wheel. Don't bother
+ # about version checking here, as this is simply to get something we can
+ # then use to install the correct version.
+ for project in projects:
+ for dirname in search_dirs:
+ # This relies on only having "universal" wheels available.
+ # The pattern could be tightened to require -py2.py3-none-any.whl.
+ files = glob.glob(os.path.join(dirname, project + '-*.whl'))
+ if files:
+ wheels.append(os.path.abspath(files[0]))
+ break
+ else:
+ # We're out of luck, so quit with a suitable error
+ logger.fatal('Cannot find a wheel for %s' % (project,))
+
+ return wheels
+
+def install_wheel(project_names, py_executable, search_dirs=None,
+ download=False):
+ if search_dirs is None:
+ search_dirs = file_search_dirs()
+
+ wheels = find_wheels(['setuptools', 'pip'], search_dirs)
+ pythonpath = os.pathsep.join(wheels)
+
+ # PIP_FIND_LINKS uses space as the path separator and thus cannot have paths
+ # with spaces in them. Convert any of those to local file:// URL form.
+ try:
+ from urlparse import urljoin
+ from urllib import pathname2url
+ except ImportError:
+ from urllib.parse import urljoin
+ from urllib.request import pathname2url
+ def space_path2url(p):
+ if ' ' not in p:
+ return p
+ return urljoin('file:', pathname2url(os.path.abspath(p)))
+ findlinks = ' '.join(space_path2url(d) for d in search_dirs)
+
+ SCRIPT = textwrap.dedent("""
+ import sys
+ import pkgutil
+ import tempfile
+ import os
+
+ import pip
+
+ cert_data = pkgutil.get_data("pip._vendor.requests", "cacert.pem")
+ if cert_data is not None:
+ cert_file = tempfile.NamedTemporaryFile(delete=False)
+ cert_file.write(cert_data)
+ cert_file.close()
+ else:
+ cert_file = None
+
+ try:
+ args = ["install", "--ignore-installed"]
+ if cert_file is not None:
+ args += ["--cert", cert_file.name]
+ args += sys.argv[1:]
+
+ sys.exit(pip.main(args))
+ finally:
+ if cert_file is not None:
+ os.remove(cert_file.name)
+ """).encode("utf8")
+
+ cmd = [py_executable, '-'] + project_names
+ logger.start_progress('Installing %s...' % (', '.join(project_names)))
+ logger.indent += 2
+
+ env = {
+ "PYTHONPATH": pythonpath,
+ "JYTHONPATH": pythonpath, # for Jython < 3.x
+ "PIP_FIND_LINKS": findlinks,
+ "PIP_USE_WHEEL": "1",
+ "PIP_ONLY_BINARY": ":all:",
+ "PIP_PRE": "1",
+ "PIP_USER": "0",
+ }
+
+ if not download:
+ env["PIP_NO_INDEX"] = "1"
+
+ try:
+ call_subprocess(cmd, show_stdout=False, extra_env=env, stdin=SCRIPT)
+ finally:
+ logger.indent -= 2
+ logger.end_progress()
+
+
+def create_environment(home_dir, site_packages=False, clear=False,
+ unzip_setuptools=False,
+ prompt=None, search_dirs=None, download=False,
+ no_setuptools=False, no_pip=False, no_wheel=False,
+ symlink=True):
+ """
+ Creates a new environment in ``home_dir``.
+
+ If ``site_packages`` is true, then the global ``site-packages/``
+ directory will be on the path.
+
+ If ``clear`` is true (default False) then the environment will
+ first be cleared.
+ """
+ home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+
+ py_executable = os.path.abspath(install_python(
+ home_dir, lib_dir, inc_dir, bin_dir,
+ site_packages=site_packages, clear=clear, symlink=symlink))
+
+ install_distutils(home_dir)
+
+ to_install = []
+
+ if not no_setuptools:
+ to_install.append('setuptools')
+
+ if not no_pip:
+ to_install.append('pip')
+
+ if not no_wheel:
+ to_install.append('wheel')
+
+ if to_install:
+ install_wheel(
+ to_install,
+ py_executable,
+ search_dirs,
+ download=download,
+ )
+
+ install_activate(home_dir, bin_dir, prompt)
+
+ install_python_config(home_dir, bin_dir, prompt)
+
+def is_executable_file(fpath):
+ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+def path_locations(home_dir):
+ """Return the path locations for the environment (where libraries are,
+ where scripts go, etc)"""
+ home_dir = os.path.abspath(home_dir)
+ # XXX: We'd use distutils.sysconfig.get_python_inc/lib but its
+ # prefix arg is broken: http://bugs.python.org/issue3386
+ if is_win:
+ # Windows has lots of problems with executables with spaces in
+ # the name; this function will remove them (using the ~1
+ # format):
+ mkdir(home_dir)
+ if ' ' in home_dir:
+ import ctypes
+ GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW
+ size = max(len(home_dir)+1, 256)
+ buf = ctypes.create_unicode_buffer(size)
+ try:
+ u = unicode
+ except NameError:
+ u = str
+ ret = GetShortPathName(u(home_dir), buf, size)
+ if not ret:
+ print('Error: the path "%s" has a space in it' % home_dir)
+ print('We could not determine the short pathname for it.')
+ print('Exiting.')
+ sys.exit(3)
+ home_dir = str(buf.value)
+ lib_dir = join(home_dir, 'Lib')
+ inc_dir = join(home_dir, 'Include')
+ bin_dir = join(home_dir, 'Scripts')
+ if is_jython:
+ lib_dir = join(home_dir, 'Lib')
+ inc_dir = join(home_dir, 'Include')
+ bin_dir = join(home_dir, 'bin')
+ elif is_pypy:
+ lib_dir = home_dir
+ inc_dir = join(home_dir, 'include')
+ bin_dir = join(home_dir, 'bin')
+ elif not is_win:
+ lib_dir = join(home_dir, 'lib', py_version)
+ inc_dir = join(home_dir, 'include', py_version + abiflags)
+ bin_dir = join(home_dir, 'bin')
+ return home_dir, lib_dir, inc_dir, bin_dir
+
+
+def change_prefix(filename, dst_prefix):
+ prefixes = [sys.prefix]
+
+ if is_darwin:
+ prefixes.extend((
+ os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+ os.path.join(sys.prefix, "Extras", "lib", "python"),
+ os.path.join("~", "Library", "Python", sys.version[:3], "site-packages"),
+ # Python 2.6 no-frameworks
+ os.path.join("~", ".local", "lib","python", sys.version[:3], "site-packages"),
+ # System Python 2.7 on OSX Mountain Lion
+ os.path.join("~", "Library", "Python", sys.version[:3], "lib", "python", "site-packages")))
+
+ if hasattr(sys, 'real_prefix'):
+ prefixes.append(sys.real_prefix)
+ if hasattr(sys, 'base_prefix'):
+ prefixes.append(sys.base_prefix)
+ prefixes = list(map(os.path.expanduser, prefixes))
+ prefixes = list(map(os.path.abspath, prefixes))
+ # Check longer prefixes first so we don't split in the middle of a filename
+ prefixes = sorted(prefixes, key=len, reverse=True)
+ filename = os.path.abspath(filename)
+ # On Windows, make sure drive letter is uppercase
+ if is_win and filename[0] in 'abcdefghijklmnopqrstuvwxyz':
+ filename = filename[0].upper() + filename[1:]
+ for i, prefix in enumerate(prefixes):
+ if is_win and prefix[0] in 'abcdefghijklmnopqrstuvwxyz':
+ prefixes[i] = prefix[0].upper() + prefix[1:]
+ for src_prefix in prefixes:
+ if filename.startswith(src_prefix):
+ _, relpath = filename.split(src_prefix, 1)
+ if src_prefix != os.sep: # sys.prefix == "/"
+ assert relpath[0] == os.sep
+ relpath = relpath[1:]
+ return join(dst_prefix, relpath)
+ assert False, "Filename %s does not start with any of these prefixes: %s" % \
+ (filename, prefixes)
+
+def copy_required_modules(dst_prefix, symlink):
+ import imp
+
+ for modname in REQUIRED_MODULES:
+ if modname in sys.builtin_module_names:
+ logger.info("Ignoring built-in bootstrap module: %s" % modname)
+ continue
+ try:
+ f, filename, _ = imp.find_module(modname)
+ except ImportError:
+ logger.info("Cannot import bootstrap module: %s" % modname)
+ else:
+ if f is not None:
+ f.close()
+ # special-case custom readline.so on OS X, but not for pypy:
+ if modname == 'readline' and sys.platform == 'darwin' and not (
+ is_pypy or filename.endswith(join('lib-dynload', 'readline.so'))):
+ dst_filename = join(dst_prefix, 'lib', 'python%s' % sys.version[:3], 'readline.so')
+ elif modname == 'readline' and sys.platform == 'win32':
+ # special-case for Windows, where readline is not a
+ # standard module, though it may have been installed in
+ # site-packages by a third-party package
+ pass
+ else:
+ dst_filename = change_prefix(filename, dst_prefix)
+ copyfile(filename, dst_filename, symlink)
+ if filename.endswith('.pyc'):
+ pyfile = filename[:-1]
+ if os.path.exists(pyfile):
+ copyfile(pyfile, dst_filename[:-1], symlink)
+
+
+def subst_path(prefix_path, prefix, home_dir):
+ prefix_path = os.path.normpath(prefix_path)
+ prefix = os.path.normpath(prefix)
+ home_dir = os.path.normpath(home_dir)
+ if not prefix_path.startswith(prefix):
+ logger.warn('Path not in prefix %r %r', prefix_path, prefix)
+ return
+ return prefix_path.replace(prefix, home_dir, 1)
+
+
+def install_python(home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear, symlink=True):
+ """Install just the base environment, no distutils patches etc"""
+ if sys.executable.startswith(bin_dir):
+ print('Please use the *system* python to run this script')
+ return
+
+ if clear:
+ rmtree(lib_dir)
+ ## FIXME: why not delete it?
+ ## Maybe it should delete everything with #!/path/to/venv/python in it
+ logger.notify('Not deleting %s', bin_dir)
+
+ if hasattr(sys, 'real_prefix'):
+ logger.notify('Using real prefix %r' % sys.real_prefix)
+ prefix = sys.real_prefix
+ elif hasattr(sys, 'base_prefix'):
+ logger.notify('Using base prefix %r' % sys.base_prefix)
+ prefix = sys.base_prefix
+ else:
+ prefix = sys.prefix
+ mkdir(lib_dir)
+ fix_lib64(lib_dir, symlink)
+ stdlib_dirs = [os.path.dirname(os.__file__)]
+ if is_win:
+ stdlib_dirs.append(join(os.path.dirname(stdlib_dirs[0]), 'DLLs'))
+ elif is_darwin:
+ stdlib_dirs.append(join(stdlib_dirs[0], 'site-packages'))
+ if hasattr(os, 'symlink'):
+ logger.info('Symlinking Python bootstrap modules')
+ else:
+ logger.info('Copying Python bootstrap modules')
+ logger.indent += 2
+ try:
+ # copy required files...
+ for stdlib_dir in stdlib_dirs:
+ if not os.path.isdir(stdlib_dir):
+ continue
+ for fn in os.listdir(stdlib_dir):
+ bn = os.path.splitext(fn)[0]
+ if fn != 'site-packages' and bn in REQUIRED_FILES:
+ copyfile(join(stdlib_dir, fn), join(lib_dir, fn), symlink)
+ # ...and modules
+ copy_required_modules(home_dir, symlink)
+ finally:
+ logger.indent -= 2
+ mkdir(join(lib_dir, 'site-packages'))
+ import site
+ site_filename = site.__file__
+ if site_filename.endswith('.pyc') or site_filename.endswith('.pyo'):
+ site_filename = site_filename[:-1]
+ elif site_filename.endswith('$py.class'):
+ site_filename = site_filename.replace('$py.class', '.py')
+ site_filename_dst = change_prefix(site_filename, home_dir)
+ site_dir = os.path.dirname(site_filename_dst)
+ # MOZ: Copies a site.py if it exists instead of using the one hex encoded in
+ # this file. Necessary for some site.py fixes for MinGW64 version of python
+ site_py_src_path = os.path.join(os.path.dirname(__file__), 'site.py')
+ if os.path.isfile(site_py_src_path):
+ shutil.copy(site_py_src_path, site_filename_dst)
+ else:
+ writefile(site_filename_dst, SITE_PY)
+ writefile(join(site_dir, 'orig-prefix.txt'), prefix)
+ site_packages_filename = join(site_dir, 'no-global-site-packages.txt')
+ if not site_packages:
+ writefile(site_packages_filename, '')
+
+ if is_pypy or is_win:
+ stdinc_dir = join(prefix, 'include')
+ else:
+ stdinc_dir = join(prefix, 'include', py_version + abiflags)
+ if os.path.exists(stdinc_dir):
+ copyfile(stdinc_dir, inc_dir, symlink)
+ else:
+ logger.debug('No include dir %s' % stdinc_dir)
+
+ platinc_dir = distutils.sysconfig.get_python_inc(plat_specific=1)
+ if platinc_dir != stdinc_dir:
+ platinc_dest = distutils.sysconfig.get_python_inc(
+ plat_specific=1, prefix=home_dir)
+ if platinc_dir == platinc_dest:
+ # Do platinc_dest manually due to a CPython bug;
+ # not http://bugs.python.org/issue3386 but a close cousin
+ platinc_dest = subst_path(platinc_dir, prefix, home_dir)
+ if platinc_dest:
+ # PyPy's stdinc_dir and prefix are relative to the original binary
+ # (traversing virtualenvs), whereas the platinc_dir is relative to
+ # the inner virtualenv and ignores the prefix argument.
+ # This seems more evolved than designed.
+ copyfile(platinc_dir, platinc_dest, symlink)
+
+ # pypy never uses exec_prefix, just ignore it
+ if sys.exec_prefix != prefix and not is_pypy:
+ if is_win:
+ exec_dir = join(sys.exec_prefix, 'lib')
+ elif is_jython:
+ exec_dir = join(sys.exec_prefix, 'Lib')
+ else:
+ exec_dir = join(sys.exec_prefix, 'lib', py_version)
+ for fn in os.listdir(exec_dir):
+ copyfile(join(exec_dir, fn), join(lib_dir, fn), symlink)
+
+ if is_jython:
+ # Jython has either jython-dev.jar and javalib/ dir, or just
+ # jython.jar
+ for name in 'jython-dev.jar', 'javalib', 'jython.jar':
+ src = join(prefix, name)
+ if os.path.exists(src):
+ copyfile(src, join(home_dir, name), symlink)
+ # XXX: registry should always exist after Jython 2.5rc1
+ src = join(prefix, 'registry')
+ if os.path.exists(src):
+ copyfile(src, join(home_dir, 'registry'), symlink=False)
+ copyfile(join(prefix, 'cachedir'), join(home_dir, 'cachedir'),
+ symlink=False)
+
+ mkdir(bin_dir)
+ py_executable = join(bin_dir, os.path.basename(sys.executable))
+ if 'Python.framework' in prefix:
+ # OS X framework builds cause validation to break
+ # https://github.com/pypa/virtualenv/issues/322
+ if os.environ.get('__PYVENV_LAUNCHER__'):
+ del os.environ["__PYVENV_LAUNCHER__"]
+ if re.search(r'/Python(?:-32|-64)*$', py_executable):
+ # The name of the python executable is not quite what
+ # we want, rename it.
+ py_executable = os.path.join(
+ os.path.dirname(py_executable), 'python')
+
+ logger.notify('New %s executable in %s', expected_exe, py_executable)
+ pcbuild_dir = os.path.dirname(sys.executable)
+ pyd_pth = os.path.join(lib_dir, 'site-packages', 'virtualenv_builddir_pyd.pth')
+ if is_win and os.path.exists(os.path.join(pcbuild_dir, 'build.bat')):
+ logger.notify('Detected python running from build directory %s', pcbuild_dir)
+ logger.notify('Writing .pth file linking to build directory for *.pyd files')
+ writefile(pyd_pth, pcbuild_dir)
+ else:
+ pcbuild_dir = None
+ if os.path.exists(pyd_pth):
+ logger.info('Deleting %s (not Windows env or not build directory python)' % pyd_pth)
+ os.unlink(pyd_pth)
+
+ if sys.executable != py_executable:
+ ## FIXME: could I just hard link?
+ executable = sys.executable
+ shutil.copyfile(executable, py_executable)
+ make_exe(py_executable)
+ if is_win or is_cygwin:
+ pythonw = os.path.join(os.path.dirname(sys.executable), 'pythonw.exe')
+ if os.path.exists(pythonw):
+ logger.info('Also created pythonw.exe')
+ shutil.copyfile(pythonw, os.path.join(os.path.dirname(py_executable), 'pythonw.exe'))
+ python_d = os.path.join(os.path.dirname(sys.executable), 'python_d.exe')
+ python_d_dest = os.path.join(os.path.dirname(py_executable), 'python_d.exe')
+ if os.path.exists(python_d):
+ logger.info('Also created python_d.exe')
+ shutil.copyfile(python_d, python_d_dest)
+ elif os.path.exists(python_d_dest):
+ logger.info('Removed python_d.exe as it is no longer at the source')
+ os.unlink(python_d_dest)
+ # we need to copy the DLL to enforce that windows will load the correct one.
+ # may not exist if we are cygwin.
+ py_executable_dll = 'python%s%s.dll' % (
+ sys.version_info[0], sys.version_info[1])
+ py_executable_dll_d = 'python%s%s_d.dll' % (
+ sys.version_info[0], sys.version_info[1])
+ pythondll = os.path.join(os.path.dirname(sys.executable), py_executable_dll)
+ pythondll_d = os.path.join(os.path.dirname(sys.executable), py_executable_dll_d)
+ pythondll_d_dest = os.path.join(os.path.dirname(py_executable), py_executable_dll_d)
+ if os.path.exists(pythondll):
+ logger.info('Also created %s' % py_executable_dll)
+ shutil.copyfile(pythondll, os.path.join(os.path.dirname(py_executable), py_executable_dll))
+ if os.path.exists(pythondll_d):
+ logger.info('Also created %s' % py_executable_dll_d)
+ shutil.copyfile(pythondll_d, pythondll_d_dest)
+ elif os.path.exists(pythondll_d_dest):
+ logger.info('Removed %s as the source does not exist' % pythondll_d_dest)
+ os.unlink(pythondll_d_dest)
+ if is_pypy:
+ # make a symlink python --> pypy-c
+ python_executable = os.path.join(os.path.dirname(py_executable), 'python')
+ if sys.platform in ('win32', 'cygwin'):
+ python_executable += '.exe'
+ logger.info('Also created executable %s' % python_executable)
+ copyfile(py_executable, python_executable, symlink)
+
+ if is_win:
+ for name in ['libexpat.dll', 'libpypy.dll', 'libpypy-c.dll',
+ 'libeay32.dll', 'ssleay32.dll', 'sqlite3.dll',
+ 'tcl85.dll', 'tk85.dll']:
+ src = join(prefix, name)
+ if os.path.exists(src):
+ copyfile(src, join(bin_dir, name), symlink)
+
+ for d in sys.path:
+ if d.endswith('lib_pypy'):
+ break
+ else:
+ logger.fatal('Could not find lib_pypy in sys.path')
+ raise SystemExit(3)
+ logger.info('Copying lib_pypy')
+ copyfile(d, os.path.join(home_dir, 'lib_pypy'), symlink)
+
+ if os.path.splitext(os.path.basename(py_executable))[0] != expected_exe:
+ secondary_exe = os.path.join(os.path.dirname(py_executable),
+ expected_exe)
+ py_executable_ext = os.path.splitext(py_executable)[1]
+ if py_executable_ext.lower() == '.exe':
+ # python2.4 gives an extension of '.4' :P
+ secondary_exe += py_executable_ext
+ if os.path.exists(secondary_exe):
+ logger.warn('Not overwriting existing %s script %s (you must use %s)'
+ % (expected_exe, secondary_exe, py_executable))
+ else:
+ logger.notify('Also creating executable in %s' % secondary_exe)
+ shutil.copyfile(sys.executable, secondary_exe)
+ make_exe(secondary_exe)
+
+ if '.framework' in prefix:
+ if 'Python.framework' in prefix:
+ logger.debug('MacOSX Python framework detected')
+ # Make sure we use the embedded interpreter inside
+ # the framework, even if sys.executable points to
+ # the stub executable in ${sys.prefix}/bin
+ # See http://groups.google.com/group/python-virtualenv/
+ # browse_thread/thread/17cab2f85da75951
+ original_python = os.path.join(
+ prefix, 'Resources/Python.app/Contents/MacOS/Python')
+ if 'EPD' in prefix:
+ logger.debug('EPD framework detected')
+ original_python = os.path.join(prefix, 'bin/python')
+ shutil.copy(original_python, py_executable)
+
+ # Copy the framework's dylib into the virtual
+ # environment
+ virtual_lib = os.path.join(home_dir, '.Python')
+
+ if os.path.exists(virtual_lib):
+ os.unlink(virtual_lib)
+ copyfile(
+ os.path.join(prefix, 'Python'),
+ virtual_lib,
+ symlink)
+
+ # And then change the install_name of the copied python executable
+ try:
+ mach_o_change(py_executable,
+ os.path.join(prefix, 'Python'),
+ '@executable_path/../.Python')
+ except:
+ e = sys.exc_info()[1]
+ logger.warn("Could not call mach_o_change: %s. "
+ "Trying to call install_name_tool instead." % e)
+ try:
+ call_subprocess(
+ ["install_name_tool", "-change",
+ os.path.join(prefix, 'Python'),
+ '@executable_path/../.Python',
+ py_executable])
+ except:
+ logger.fatal("Could not call install_name_tool -- you must "
+ "have Apple's development tools installed")
+ raise
+
+ if not is_win:
+ # Ensure that 'python', 'pythonX' and 'pythonX.Y' all exist
+ py_exe_version_major = 'python%s' % sys.version_info[0]
+ py_exe_version_major_minor = 'python%s.%s' % (
+ sys.version_info[0], sys.version_info[1])
+ py_exe_no_version = 'python'
+ required_symlinks = [ py_exe_no_version, py_exe_version_major,
+ py_exe_version_major_minor ]
+
+ py_executable_base = os.path.basename(py_executable)
+
+ if py_executable_base in required_symlinks:
+ # Don't try to symlink to yourself.
+ required_symlinks.remove(py_executable_base)
+
+ for pth in required_symlinks:
+ full_pth = join(bin_dir, pth)
+ if os.path.exists(full_pth):
+ os.unlink(full_pth)
+ if symlink:
+ os.symlink(py_executable_base, full_pth)
+ else:
+ copyfile(py_executable, full_pth, symlink)
+
+ if is_win and ' ' in py_executable:
+ # There's a bug with subprocess on Windows when using a first
+ # argument that has a space in it. Instead we have to quote
+ # the value:
+ py_executable = '"%s"' % py_executable
+ # NOTE: keep this check as one line, cmd.exe doesn't cope with line breaks
+ cmd = [py_executable, '-c', 'import sys;out=sys.stdout;'
+ 'getattr(out, "buffer", out).write(sys.prefix.encode("utf-8"))']
+ logger.info('Testing executable with %s %s "%s"' % tuple(cmd))
+ try:
+ proc = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE)
+ proc_stdout, proc_stderr = proc.communicate()
+ except OSError:
+ e = sys.exc_info()[1]
+ if e.errno == errno.EACCES:
+ logger.fatal('ERROR: The executable %s could not be run: %s' % (py_executable, e))
+ sys.exit(100)
+ else:
+ raise e
+
+ proc_stdout = proc_stdout.strip().decode("utf-8")
+ proc_stdout = os.path.normcase(os.path.abspath(proc_stdout))
+ norm_home_dir = os.path.normcase(os.path.abspath(home_dir))
+ if hasattr(norm_home_dir, 'decode'):
+ norm_home_dir = norm_home_dir.decode(sys.getfilesystemencoding())
+ if proc_stdout != norm_home_dir:
+ logger.fatal(
+ 'ERROR: The executable %s is not functioning' % py_executable)
+ logger.fatal(
+ 'ERROR: It thinks sys.prefix is %r (should be %r)'
+ % (proc_stdout, norm_home_dir))
+ logger.fatal(
+ 'ERROR: virtualenv is not compatible with this system or executable')
+ if is_win:
+ logger.fatal(
+ 'Note: some Windows users have reported this error when they '
+ 'installed Python for "Only this user" or have multiple '
+ 'versions of Python installed. Copying the appropriate '
+ 'PythonXX.dll to the virtualenv Scripts/ directory may fix '
+ 'this problem.')
+ sys.exit(100)
+ else:
+ logger.info('Got sys.prefix result: %r' % proc_stdout)
+
+ pydistutils = os.path.expanduser('~/.pydistutils.cfg')
+ if os.path.exists(pydistutils):
+ logger.notify('Please make sure you remove any previous custom paths from '
+ 'your %s file.' % pydistutils)
+ ## FIXME: really this should be calculated earlier
+
+ fix_local_scheme(home_dir, symlink)
+
+ if site_packages:
+ if os.path.exists(site_packages_filename):
+ logger.info('Deleting %s' % site_packages_filename)
+ os.unlink(site_packages_filename)
+
+ return py_executable
+
+
+def install_activate(home_dir, bin_dir, prompt=None):
+ if is_win or is_jython and os._name == 'nt':
+ files = {
+ 'activate.bat': ACTIVATE_BAT,
+ 'deactivate.bat': DEACTIVATE_BAT,
+ 'activate.ps1': ACTIVATE_PS,
+ }
+
+ # MSYS needs paths of the form /c/path/to/file
+ drive, tail = os.path.splitdrive(home_dir.replace(os.sep, '/'))
+ home_dir_msys = (drive and "/%s%s" or "%s%s") % (drive[:1], tail)
+
+ # Run-time conditional enables (basic) Cygwin compatibility
+ home_dir_sh = ("""$(if [ "$OSTYPE" "==" "cygwin" ]; then cygpath -u '%s'; else echo '%s'; fi;)""" %
+ (home_dir, home_dir_msys))
+ files['activate'] = ACTIVATE_SH.replace('__VIRTUAL_ENV__', home_dir_sh)
+
+ else:
+ files = {'activate': ACTIVATE_SH}
+
+ # suppling activate.fish in addition to, not instead of, the
+ # bash script support.
+ files['activate.fish'] = ACTIVATE_FISH
+
+ # same for csh/tcsh support...
+ files['activate.csh'] = ACTIVATE_CSH
+
+ files['activate_this.py'] = ACTIVATE_THIS
+
+ install_files(home_dir, bin_dir, prompt, files)
+
+def install_files(home_dir, bin_dir, prompt, files):
+ if hasattr(home_dir, 'decode'):
+ home_dir = home_dir.decode(sys.getfilesystemencoding())
+ vname = os.path.basename(home_dir)
+ for name, content in files.items():
+ content = content.replace('__VIRTUAL_PROMPT__', prompt or '')
+ content = content.replace('__VIRTUAL_WINPROMPT__', prompt or '(%s)' % vname)
+ content = content.replace('__VIRTUAL_ENV__', home_dir)
+ content = content.replace('__VIRTUAL_NAME__', vname)
+ content = content.replace('__BIN_NAME__', os.path.basename(bin_dir))
+ writefile(os.path.join(bin_dir, name), content)
+
+def install_python_config(home_dir, bin_dir, prompt=None):
+ if sys.platform == 'win32' or is_jython and os._name == 'nt':
+ files = {}
+ else:
+ files = {'python-config': PYTHON_CONFIG}
+ install_files(home_dir, bin_dir, prompt, files)
+ for name, content in files.items():
+ make_exe(os.path.join(bin_dir, name))
+
+def install_distutils(home_dir):
+ distutils_path = change_prefix(distutils.__path__[0], home_dir)
+ mkdir(distutils_path)
+ ## FIXME: maybe this prefix setting should only be put in place if
+ ## there's a local distutils.cfg with a prefix setting?
+ home_dir = os.path.abspath(home_dir)
+ ## FIXME: this is breaking things, removing for now:
+ #distutils_cfg = DISTUTILS_CFG + "\n[install]\nprefix=%s\n" % home_dir
+ writefile(os.path.join(distutils_path, '__init__.py'), DISTUTILS_INIT)
+ writefile(os.path.join(distutils_path, 'distutils.cfg'), DISTUTILS_CFG, overwrite=False)
+
+def fix_local_scheme(home_dir, symlink=True):
+ """
+ Platforms that use the "posix_local" install scheme (like Ubuntu with
+ Python 2.7) need to be given an additional "local" location, sigh.
+ """
+ try:
+ import sysconfig
+ except ImportError:
+ pass
+ else:
+ if sysconfig._get_default_scheme() == 'posix_local':
+ local_path = os.path.join(home_dir, 'local')
+ if not os.path.exists(local_path):
+ os.mkdir(local_path)
+ for subdir_name in os.listdir(home_dir):
+ if subdir_name == 'local':
+ continue
+ copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), \
+ os.path.join(local_path, subdir_name), symlink)
+
+def fix_lib64(lib_dir, symlink=True):
+ """
+ Some platforms (particularly Gentoo on x64) put things in lib64/pythonX.Y
+ instead of lib/pythonX.Y. If this is such a platform we'll just create a
+ symlink so lib64 points to lib
+ """
+ # PyPy's library path scheme is not affected by this.
+ # Return early or we will die on the following assert.
+ if is_pypy:
+ logger.debug('PyPy detected, skipping lib64 symlinking')
+ return
+ # Check we have a lib64 library path
+ if not [p for p in distutils.sysconfig.get_config_vars().values()
+ if isinstance(p, basestring) and 'lib64' in p]:
+ return
+
+ logger.debug('This system uses lib64; symlinking lib64 to lib')
+
+ assert os.path.basename(lib_dir) == 'python%s' % sys.version[:3], (
+ "Unexpected python lib dir: %r" % lib_dir)
+ lib_parent = os.path.dirname(lib_dir)
+ top_level = os.path.dirname(lib_parent)
+ lib_dir = os.path.join(top_level, 'lib')
+ lib64_link = os.path.join(top_level, 'lib64')
+ assert os.path.basename(lib_parent) == 'lib', (
+ "Unexpected parent dir: %r" % lib_parent)
+ if os.path.lexists(lib64_link):
+ return
+ if symlink:
+ os.symlink('lib', lib64_link)
+ else:
+ copyfile('lib', lib64_link)
+
+def resolve_interpreter(exe):
+ """
+ If the executable given isn't an absolute path, search $PATH for the interpreter
+ """
+ # If the "executable" is a version number, get the installed executable for
+ # that version
+ python_versions = get_installed_pythons()
+ if exe in python_versions:
+ exe = python_versions[exe]
+
+ if os.path.abspath(exe) != exe:
+ paths = os.environ.get('PATH', '').split(os.pathsep)
+ for path in paths:
+ if os.path.exists(join(path, exe)):
+ exe = join(path, exe)
+ break
+ if not os.path.exists(exe):
+ logger.fatal('The executable %s (from --python=%s) does not exist' % (exe, exe))
+ raise SystemExit(3)
+ if not is_executable(exe):
+ logger.fatal('The executable %s (from --python=%s) is not executable' % (exe, exe))
+ raise SystemExit(3)
+ return exe
+
+def is_executable(exe):
+ """Checks a file is executable"""
+ return os.access(exe, os.X_OK)
+
+############################################################
+## Relocating the environment:
+
+def make_environment_relocatable(home_dir):
+ """
+ Makes the already-existing environment use relative paths, and takes out
+ the #!-based environment selection in scripts.
+ """
+ home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+ activate_this = os.path.join(bin_dir, 'activate_this.py')
+ if not os.path.exists(activate_this):
+ logger.fatal(
+ 'The environment doesn\'t have a file %s -- please re-run virtualenv '
+ 'on this environment to update it' % activate_this)
+ fixup_scripts(home_dir, bin_dir)
+ fixup_pth_and_egg_link(home_dir)
+ ## FIXME: need to fix up distutils.cfg
+
+OK_ABS_SCRIPTS = ['python', 'python%s' % sys.version[:3],
+ 'activate', 'activate.bat', 'activate_this.py',
+ 'activate.fish', 'activate.csh']
+
+def fixup_scripts(home_dir, bin_dir):
+ if is_win:
+ new_shebang_args = (
+ '%s /c' % os.path.normcase(os.environ.get('COMSPEC', 'cmd.exe')),
+ '', '.exe')
+ else:
+ new_shebang_args = ('/usr/bin/env', sys.version[:3], '')
+
+ # This is what we expect at the top of scripts:
+ shebang = '#!%s' % os.path.normcase(os.path.join(
+ os.path.abspath(bin_dir), 'python%s' % new_shebang_args[2]))
+ # This is what we'll put:
+ new_shebang = '#!%s python%s%s' % new_shebang_args
+
+ for filename in os.listdir(bin_dir):
+ filename = os.path.join(bin_dir, filename)
+ if not os.path.isfile(filename):
+ # ignore subdirs, e.g. .svn ones.
+ continue
+ lines = None
+ with open(filename, 'rb') as f:
+ try:
+ lines = f.read().decode('utf-8').splitlines()
+ except UnicodeDecodeError:
+ # This is probably a binary program instead
+ # of a script, so just ignore it.
+ continue
+ if not lines:
+ logger.warn('Script %s is an empty file' % filename)
+ continue
+
+ old_shebang = lines[0].strip()
+ old_shebang = old_shebang[0:2] + os.path.normcase(old_shebang[2:])
+
+ if not old_shebang.startswith(shebang):
+ if os.path.basename(filename) in OK_ABS_SCRIPTS:
+ logger.debug('Cannot make script %s relative' % filename)
+ elif lines[0].strip() == new_shebang:
+ logger.info('Script %s has already been made relative' % filename)
+ else:
+ logger.warn('Script %s cannot be made relative (it\'s not a normal script that starts with %s)'
+ % (filename, shebang))
+ continue
+ logger.notify('Making script %s relative' % filename)
+ script = relative_script([new_shebang] + lines[1:])
+ with open(filename, 'wb') as f:
+ f.write('\n'.join(script).encode('utf-8'))
+
+
+def relative_script(lines):
+ "Return a script that'll work in a relocatable environment."
+ activate = "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this"
+ # Find the last future statement in the script. If we insert the activation
+ # line before a future statement, Python will raise a SyntaxError.
+ activate_at = None
+ for idx, line in reversed(list(enumerate(lines))):
+ if line.split()[:3] == ['from', '__future__', 'import']:
+ activate_at = idx + 1
+ break
+ if activate_at is None:
+ # Activate after the shebang.
+ activate_at = 1
+ return lines[:activate_at] + ['', activate, ''] + lines[activate_at:]
+
+def fixup_pth_and_egg_link(home_dir, sys_path=None):
+ """Makes .pth and .egg-link files use relative paths"""
+ home_dir = os.path.normcase(os.path.abspath(home_dir))
+ if sys_path is None:
+ sys_path = sys.path
+ for path in sys_path:
+ if not path:
+ path = '.'
+ if not os.path.isdir(path):
+ continue
+ path = os.path.normcase(os.path.abspath(path))
+ if not path.startswith(home_dir):
+ logger.debug('Skipping system (non-environment) directory %s' % path)
+ continue
+ for filename in os.listdir(path):
+ filename = os.path.join(path, filename)
+ if filename.endswith('.pth'):
+ if not os.access(filename, os.W_OK):
+ logger.warn('Cannot write .pth file %s, skipping' % filename)
+ else:
+ fixup_pth_file(filename)
+ if filename.endswith('.egg-link'):
+ if not os.access(filename, os.W_OK):
+ logger.warn('Cannot write .egg-link file %s, skipping' % filename)
+ else:
+ fixup_egg_link(filename)
+
+def fixup_pth_file(filename):
+ lines = []
+ prev_lines = []
+ with open(filename) as f:
+ prev_lines = f.readlines()
+ for line in prev_lines:
+ line = line.strip()
+ if (not line or line.startswith('#') or line.startswith('import ')
+ or os.path.abspath(line) != line):
+ lines.append(line)
+ else:
+ new_value = make_relative_path(filename, line)
+ if line != new_value:
+ logger.debug('Rewriting path %s as %s (in %s)' % (line, new_value, filename))
+ lines.append(new_value)
+ if lines == prev_lines:
+ logger.info('No changes to .pth file %s' % filename)
+ return
+ logger.notify('Making paths in .pth file %s relative' % filename)
+ with open(filename, 'w') as f:
+ f.write('\n'.join(lines) + '\n')
+
+def fixup_egg_link(filename):
+ with open(filename) as f:
+ link = f.readline().strip()
+ if os.path.abspath(link) != link:
+ logger.debug('Link in %s already relative' % filename)
+ return
+ new_link = make_relative_path(filename, link)
+ logger.notify('Rewriting link %s in %s as %s' % (link, filename, new_link))
+ with open(filename, 'w') as f:
+ f.write(new_link)
+
+def make_relative_path(source, dest, dest_is_directory=True):
+ """
+ Make a filename relative, where the filename is dest, and it is
+ being referred to from the filename source.
+
+ >>> make_relative_path('/usr/share/something/a-file.pth',
+ ... '/usr/share/another-place/src/Directory')
+ '../another-place/src/Directory'
+ >>> make_relative_path('/usr/share/something/a-file.pth',
+ ... '/home/user/src/Directory')
+ '../../../home/user/src/Directory'
+ >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
+ './'
+ """
+ source = os.path.dirname(source)
+ if not dest_is_directory:
+ dest_filename = os.path.basename(dest)
+ dest = os.path.dirname(dest)
+ dest = os.path.normpath(os.path.abspath(dest))
+ source = os.path.normpath(os.path.abspath(source))
+ dest_parts = dest.strip(os.path.sep).split(os.path.sep)
+ source_parts = source.strip(os.path.sep).split(os.path.sep)
+ while dest_parts and source_parts and dest_parts[0] == source_parts[0]:
+ dest_parts.pop(0)
+ source_parts.pop(0)
+ full_parts = ['..']*len(source_parts) + dest_parts
+ if not dest_is_directory:
+ full_parts.append(dest_filename)
+ if not full_parts:
+ # Special case for the current directory (otherwise it'd be '')
+ return './'
+ return os.path.sep.join(full_parts)
+
+
+
+############################################################
+## Bootstrap script creation:
+
+def create_bootstrap_script(extra_text, python_version=''):
+ """
+ Creates a bootstrap script, which is like this script but with
+ extend_parser, adjust_options, and after_install hooks.
+
+ This returns a string that (written to disk of course) can be used
+ as a bootstrap script with your own customizations. The script
+ will be the standard virtualenv.py script, with your extra text
+ added (your extra text should be Python code).
+
+ If you include these functions, they will be called:
+
+ ``extend_parser(optparse_parser)``:
+ You can add or remove options from the parser here.
+
+ ``adjust_options(options, args)``:
+ You can change options here, or change the args (if you accept
+ different kinds of arguments, be sure you modify ``args`` so it is
+ only ``[DEST_DIR]``).
+
+ ``after_install(options, home_dir)``:
+
+ After everything is installed, this function is called. This
+ is probably the function you are most likely to use. An
+ example would be::
+
+ def after_install(options, home_dir):
+ subprocess.call([join(home_dir, 'bin', 'easy_install'),
+ 'MyPackage'])
+ subprocess.call([join(home_dir, 'bin', 'my-package-script'),
+ 'setup', home_dir])
+
+ This example immediately installs a package, and runs a setup
+ script from that package.
+
+ If you provide something like ``python_version='2.5'`` then the
+ script will start with ``#!/usr/bin/env python2.5`` instead of
+ ``#!/usr/bin/env python``. You can use this when the script must
+ be run with a particular Python version.
+ """
+ filename = __file__
+ if filename.endswith('.pyc'):
+ filename = filename[:-1]
+ with codecs.open(filename, 'r', encoding='utf-8') as f:
+ content = f.read()
+ py_exe = 'python%s' % python_version
+ content = (('#!/usr/bin/env %s\n' % py_exe)
+ + '## WARNING: This file is generated\n'
+ + content)
+ return content.replace('##EXT' 'END##', extra_text)
+
+##EXTEND##
+
+def convert(s):
+ b = base64.b64decode(s.encode('ascii'))
+ return zlib.decompress(b).decode('utf-8')
+
+##file site.py
+SITE_PY = convert("""
+eJzFPf1z2zaWv/OvwMqToZTKdOJ0e3tO3RsncVrfuYm3yc7m1vXoKAmyWFMkS5C2tTd3f/u9DwAE
++CHb2+6cphNLJPDw8PC+8PAeOhqNTopCZkuxyZd1KoWScblYiyKu1kqs8lJU66Rc7hdxWW3h6eIm
+vpZKVLlQWxVhqygInv/GT/BcfF4nyqAA3+K6yjdxlSziNN2KZFPkZSWXYlmXSXYtkiypkjhN/g4t
+8iwSz387BsFZJmDmaSJLcStLBXCVyFfiYlut80yM6wLn/DL6Y/xqMhVqUSZFBQ1KjTNQZB1XQSbl
+EtCElrUCUiaV3FeFXCSrZGEb3uV1uhRFGi+k+K//4qlR0zAMVL6Rd2tZSpEBMgBTAqwC8YCvSSkW
++VJGQryRixgH4OcNsQKGNsU1U0jGLBdpnl3DnDK5kErF5VaM53VFgAhlscwBpwQwqJI0De7y8kZN
+YElpPe7gkYiZPfzJMHvAPHH8LucAjh+z4C9Zcj9l2MA9CK5aM9uUcpXcixjBwk95Lxcz/WycrMQy
+Wa2ABlk1wSYBI6BEmswPClqOb/UKfXdAWFmujGEMiShzY35JPaLgrBJxqoBt6wJppAjzd3KexBlQ
+I7uF4QAikDToG2eZqMqOQ7MTOQAocR0rkJKNEuNNnGTArD/GC0L7r0m2zO/UhCgAq6XEL7Wq3PmP
+ewgArR0CTANcLLOadZYmNzLdTgCBz4B9KVWdVigQy6SUiyovE6kIAKC2FfIekJ6KuJSahMyZRm6n
+RH+iSZLhwqKAocDjSyTJKrmuS5IwsUqAc4Er3n/8Sbw7fXN28kHzmAHGMnu9AZwBCi20gxMMIA5q
+VR6kOQh0FJzjHxEvlyhk1zg+4NU0OHhwpYMxzL2I2n2cBQey68XVw8AcK1AmNFZA/f4bukzVGujz
+Pw+sdxCcDFGFJs7f7tY5yGQWb6RYx8xfyBnBtxrOd1FRrV8DNyiEUwGpFC4OIpggPCCJS7NxnklR
+AIulSSYnAVBoTm39VQRW+JBn+7TWLU4ACGWQwUvn2YRGzCRMtAvrNeoL03hLM9NNArvOm7wkxQH8
+ny1IF6VxdkM4KmIo/jaX10mWIULIC0G4F9LA6iYBTlxG4pxakV4wjUTI2otbokjUwEvIdMCT8j7e
+FKmcsviibt2tRmgwWQmz1ilzHLSsSL3SqjVT7eW9w+hLi+sIzWpdSgBezz2hW+X5VMxBZxM2Rbxh
+8arucuKcoEeeqBPyBLWEvvgdKHqiVL2R9iXyCmgWYqhgladpfgckOwoCIfawkTHKPnPCW3gH/wJc
+/DeV1WIdBM5IFrAGhcgPgUIgYBJkprlaI+Fxm2bltpJJMtYUebmUJQ31OGIfMOKPbIxzDT7klTZq
+PF1c5XyTVKiS5tpkJmzxsrBi/fia5w3TAMutiGamaUOnDU4vLdbxXBqXZC5XKAl6kV7bZYcxg54x
+yRZXYsNWBt4BWWTCFqRfsaDSWVWSnACAwcIXZ0lRp9RIIYOJGAbaFAR/E6NJz7WzBOzNZjlAhcTm
+ewH2B3D7O4jR3ToB+iwAAmgY1FKwfPOkKtFBaPRR4Bt905/HB049W2nbxEOu4iTVVj7OgjN6eFqW
+JL4LWWCvqSaGghlmFbp21xnQEcV8NBoFgXGHtsp8zVVQldsjYAVhxpnN5nWChm82Q1Ovf6iARxHO
+wF43287CAw1hOn0AKjldVmW+wdd2bp9AmcBY2CPYExekZSQ7yB4nvkbyuSq9ME3RdjvsLFAPBRc/
+nb4/+3L6SRyLy0alTdv67ArGPM1iYGuyCMBUrWEbXQYtUfElqPvEezDvxBRgz6g3ia+Mqxp4F1D/
+XNb0Gqax8F4Gpx9O3pyfzv7y6fSn2aezz6eAINgZGezRlNE81uAwqgiEA7hyqSJtX4NOD3rw5uST
+fRDMEjX75mtgN3gyvpYVMHE5hhlPRbiJ7xUwaDilphPEsdMALHg4mYjvxOHz568OCVqxLbYADMyu
+0xQfzrRFnyXZKg8n1PgXdumPWUlp/+3y6OsrcXwswl/i2zgMwIdqmjJL/Eji9HlbSOhawZ9xriZB
+sJQrEL0biQI6fk5+8YQ7wJJAy1zb6V/yJDPvmSvdIUh/jKkH4DCbLdJYKWw8m4VABOrQ84EOETvX
+KHVj6Fhs3a4TjQp+SgkLm2GXKf7Tg2I8p36IBqPodjGNQFw3i1hJbkXTh36zGeqs2WysBwRhJokB
+h4vVUChME9RZZQJ+LXEe6rC5ylP8ifBRC5AA4tYKtSQukt46RbdxWks1diYFRByPW2RERZso4kdw
+UcZgiZulm0za1DQ8A82AfGkOWrRsUQ4/e+DvgLoymzjc6PHei2mGmP477zQIB3A5Q1T3SrWgsHYU
+F6cX4tWLw310Z2DPubTU8ZqjhU6yWtqHK1gtIw+MMPcy8uLSZYV6Fp8e7Ya5iezKdFlhpZe4lJv8
+Vi4BW2RgZ5XFT/QGduYwj0UMqwh6nfwBVqHGb4xxH8qzB2lB3wGotyEoZv3N0u9xMEBmChQRb6yJ
+1HrXz6awKPPbBJ2N+Va/BFsJyhItpnFsAmfhPCZDkwgaArzgDCl1J0NQh2XNDivhjSDRXiwbxRoR
+uHPU1Ff09SbL77IZ74SPUemOJ5Z1UbA082KDZgn2xHuwQoBkDhu7hmgMBVx+gbK1D8jD9GG6QFna
+WwAgMPSKtmsOLLPVoynyrhGHRRiT14KEt5ToL9yaIWirZYjhQKK3kX1gtARCgslZBWdVg2YylDXT
+DAZ2SOJz3XnEW1AfQIuKEZjNsYbGjQz9Lo9AOYtzVyk5/dAif/nyhdlGrSm+gojNcdLoQqzIWEbF
+FgxrAjrBeGQcrSE2uAPnFsDUSrOm2P8k8oK9MVjPCy3b4AfA7q6qiqODg7u7u0hHF/Ly+kCtDv74
+p2+++dML1onLJfEPTMeRFh1qiw7oHXq00bfGAn1nVq7Fj0nmcyPBGkvyysgVRfy+r5NlLo72J1Z/
+Ihc3Zhr/Na4MKJCZGZSpDLQdNRg9U/vPoldqJJ6RdbZtxxP2S7RJtVbMt7rQo8rBEwC/ZZHXaKob
+TlDiK7BusENfynl9HdrBPRtpfsBUUU7Hlgf2X14hBj5nGL4ypniGWoLYAi2+Q/qfmG1i8o60hkDy
+oonq7J63/VrMEHf5eHm3vqYjNGaGiULuQInwmzxaAG3jruTgR7u2aPcc19Z8PENgLH1gmFc7lmMU
+HMIF12LqSp3D1ejxgjTdsWoGBeOqRlDQ4CTOmdoaHNnIEEGid2M2+7ywugXQqRU5NPEBswrQwh2n
+Y+3arOB4QsgDx+IlPZHgIh913r3gpa3TlAI6LR71qMKAvYVGO50DX44NgKkYlX8ZcUuzTfnYWhRe
+gx5gOceAkMFWHWbCN64PONob9bBTx+oP9WYa94HARRpzLOpR0AnlYx6hVCBNxdjvOcTilrjdwXZa
+HGIqs0wk0mpAuNrKo1eodhqmVZKh7nUWKVqkOXjFVisSIzXvfWeB9kH4uM+YaQnUZGjI4TQ6Jm/P
+E8BQt8Pw2XWNgQY3DoMYbRJF1g3JtIZ/wK2g+AYFo4CWBM2CeayU+RP7HWTOzld/GWAPS2hkCLfp
+kBvSsRgajnm/J5CMOhoDUpABCbvCSK4jq4MUOMxZIE+44bUclG6CESmQM8eCkJoB3Omlt8HBJxGe
+gJCEIuT7SslCfCVGsHxtUX2c7v5dudQEIcZOA3IVdPTi2I1sOFGN41aUw2doP75BZyVFDhw8B5fH
+DfS7bG6Y1gZdwFn3FbdFCjQyxWFGExfVK0MYN5j8h2OnRUMsM4hhKG8g70jHjDQJ7HJr0LDgBoy3
+5u2x9GM3YoF9x2GuDuXmHvZ/YZmoRa5Cipm0YxfuR3NFlzYW2/NkPoI/3gKMJlceJJnq+AVGWf6B
+QUIPetgH3ZsshkWWcXmXZCEpME2/Y39pOnhYUnpG7uATbacOYKIY8Tx4X4KA0NHnAYgTagLYlctQ
+abe/C3bnFEcWLncfeW7z5dGrqy5xp0MRHvvpX6rT+6qMFa5WyovGQoGr1TXgqHRhcnG21YeX+nAb
+twllrmAXKT5++iKQEBzXvYu3T5t6w/CIzYNz8j4GddBrD5KrNTtiF0AEtSIyykH4dI58PLJPndyO
+iT0ByJMYZseiGEiaT/4ROLsWCsbYX24zjKO1VQZ+4PU3X896IqMukt98PXpglBYx+sR+3PIE7cic
+VLBrtqWMU3I1nD4UVMwa1rFtignrc9r+aR676vE5NVo29t3fAj8GCobUJfgIL6YN2bpTxY/vTg3C
+03ZqB7DObtV89mgRYG+fz3+BHbLSQbXbOEnpXAEmv7+PytVs7jle0a89PEg7FYxDgr79l7p8AdwQ
+cjRh0p2OdsZOTMC5ZxdsPkWsuqjs6RyC5gjMywtwjz+HFU6ve+B7Bge/r7p8IiBvTqMeMmpbbIZ4
+wQclhz1K9gnzfvqMf9dZP27mw4L1/zHLF/+cST5hKgaaNh4+rH5iuXbXAHuEeRpwO3e4hd2h+axy
+ZZw7VklKPEfd9VzcUboCxVbxpAigLNnv64GDUqoPvd/WZclH16QCC1nu43HsVGCmlvH8ek3Mnjj4
+ICvExDZbUKzayevJ+4Qv1NFnO5Ow2Tf0c+c6NzErmd0mJfQFhTsOf/j442nYb0IwjgudHm9FHu83
+INwnMG6oiRM+pQ9T6Cld/nH10d66+AQ1GQEmIqzJ1iVsJxBs4gj9a/BARMg7sOVjdtyhL9ZycTOT
+lDqAbIpdnaD4W3yNmNiMAj//S8UrSmKDmSzSGmnFjjdmH67qbEHnI5UE/0qnCmPqECUEcPhvlcbX
+Ykydlxh60txI0anbuNTeZ1HmmJwq6mR5cJ0shfy1jlPc1svVCnDBwyv9KuLhKQIl3nFOAyctKrmo
+y6TaAglileuzP0p/cBrOtzzRsYckH/MwATEh4kh8wmnjeybc0pDLBAf8Ew+cJO67sYOTrBDRc3if
+5TMcdUY5vlNGqnsuT4+D9gg5ABgBUJj/aKIjd/4bSa/cA0Zac5eoqCU9UrqRhpycMYQynmCkg3/T
+T58RXd4awPJ6GMvr3Vhet7G87sXy2sfyejeWrkjgwtqglZGEvsBV+1ijN9/GjTnxMKfxYs3tMPcT
+czwBoijMBtvIFKdAe5EtPt8jIKS2nQNnetjkzyScVFrmHALXIJH78RBLb+ZN8rrTmbJxdGeeinFn
+h3KI/L4HUUSpYnPqzvK2jKs48uTiOs3nILYW3WkDYCra6UQcK81uZ3OO7rYs1ejiPz//8PEDNkdQ
+I5PeQN1wEdGw4FTGz+PyWnWlqdn8FcCO1NJPxKFuGuDeIyNrPMoe//OOMjyQccQdZSjkogAPgLK6
+bDM39ykMW891kpR+zkzOh03HYpRVo2ZSA0Q6ubh4d/L5ZEQhv9H/jlyBMbT1pcPFx7SwDbr+m9vc
+Uhz7gFDr2FZj/Nw5ebRuOOJhG2vAdjzf1oPDxxjs3jCBP8t/KqVgSYBQkQ7+PoVQj945/Kb9UIc+
+hhE7yX/uyRo7K/adI3uOi+KIft+xQ3sA/7AT9xgzIIB2ocZmZ9DslVtK35rXHRR1gD7S1/vNe832
+1qu9k/EpaifR4wA6lLXNht0/75yGjZ6S1ZvT788+nJ+9uTj5/IPjAqIr9/HTwaE4/fGLoPwQNGDs
+E8WYGlFhJhIYFrfQSSxz+K/GyM+yrjhIDL3enZ/rk5oNlrpg7jPanAiecxqThcZBM45C24c6/wgx
+SvUGyakponQdqjnC/dKG61lUrvOjqVRpjs5qrbdeulbM1JTRuXYE0geNXVIwCE4xg1eUxV6ZXWHJ
+J4C6zqoHKW2jbWJISkHBTrqAc/5lTle8QCl1hidNZ63oL0MX1/AqUkWawE7udWhlSXfD9JiGcfRD
+e8DNePVpQKc7jKwb8qwHsUCr9Trkuen+k4bRfq0Bw4bB3sG8M0npIZSBjcltIsRGfJITynv4apde
+r4GCBcODvgoX0TBdArOPYXMt1glsIIAn12B9cZ8AEFor4R8IHDnRAZljdkb4drPc/3OoCeK3/vnn
+nuZVme7/TRSwCxKcShT2ENNt/A42PpGMxOnH95OQkaPUXPHnGssDwCGhAKgj7ZS/xCfos7GS6Urn
+l/j6AF9oP4Fet7qXsih1937XOEQJeKbG5DU8U4Z+IaZ7WdhTnMqkBRorHyxmWEHopiGYz574tJZp
+qvPdz96dn4LviMUYKEF87nYKw3G8BI/QdfIdVzi2QOEBO7wukY1LdGEpyWIZec16g9YoctTby8uw
+60SB4W6vThS4jBPloj3GaTMsU04QISvDWphlZdZutUEKu22I4igzzBKzi5ISWH2eAF6mpzFviWCv
+hKUeJgLPp8hJVpmMxTRZgB4FlQsKdQpCgsTFekbivDzjGHheKlMGBQ+LbZlcrys83YDOEZVgYPMf
+T76cn32gsoTDV43X3cOcU9oJTDmJ5BhTBDHaAV/ctD/kqtmsj2f1K4SB2gf+tF9xdsoxD9Dpx4FF
+/NN+xXVox85OkGcACqou2uKBGwCnW5/cNLLAuNp9MH7cFMAGMx8MxSKx7EUnerjz63KibdkyJRT3
+MS+fcICzKmxKmu7spqS1P3qOqwLPuZbj/kbwtk+2zGcOXW86b4aS39xPRwqxJBYw6rb2xzDZYZ2m
+ejoOsw1xC21rtY39OXNipU67RYaiDEQcu50nLpP1K2HdnDnQS6PuABPfanSNJPaq8tHP2Uh7GB4m
+ltidfYrpSGUsZAQwkiF17U8NPhRaBFAglP07diR3Onl+6M3RsQYPz1HrLrCNP4Ai1Lm4VOORl8CJ
+8OVXdhz5FaGFevRIhI6nkskst3li+Llbo1f50p9jrwxQEBPFroyzazlmWFMD8yuf2AMhWNK2Hqkv
+k6s+wyLOwDm9H+Dwrlz0H5wY1FqM0Gl3I7dtdeSTBxv0loLsJJgPvozvQPcXdTXmlRw4h+6tpRuG
++jBEzD6Epvr0fRxiOObXcGB9GsC91NCw0MP7deDsktfGOLLWPraqmkL7QnuwixK2ZpWiYxmnONH4
+otYLaAzucWPyR/apThSyv3vqxJyYkAXKg7sgvbmNdINWOGHE5UpcOZpQOnxTTaPfLeWtTMFogJEd
+Y7XDL7baYRLZcEpvHthvxu5ie7Htx43eNJgdmXIMRIAKMXoDPbsQanDAFf5Z70Ti7Iac47d/PZuK
+tx9+gn/fyI9gQbHmcSr+BqOLt3kJ20ou2qXbFLCAo+L9Yl4rLIwkaHRCwRdPoLd24ZEXT0N0ZYlf
+UmIVpMBk2nLDt50AijxBKmRv3ANTLwG/TUFXywk1DmLfWoz0S6TBcI0L1oUc6JbRutqkaCac4Eiz
+iJej87O3px8+nUbVPTK2+Tlygid+HhZORx8Nl3gMNhX2yaLGJ1eOv/yDTIsed1nvNU29DO41RQjb
+kcLuL/kmjdjuKeISAwai2C7zRYQtgdO5RK+6A/954mwrH7TvnnFFWOOJPjxrnHh8DNQQP7f1zwga
+Uh89J+pJCMVzrBXjx9Go3wJPBUW04c/zm7ulGxDXRT80wTamzazHfnerAtdMZw3PchLhdWyXwdSB
+pkmsNvOFWx/4MRP6IhRQbnS8IVdxnVZCZrCVor093UgBCt4t6WMJYVZhK0Z1bhSdSe/irXJyj2Il
+RjjqiIrq8RyGAoWw9f4xvmEzgLWGouYSaIBOiNK2KXe6qnqxZgnmnRBRryff4C7JXrnJL5rCPChv
+jBeN/wrzRG+RMbqWlZ4/PxhPLl82CQ4UjF54Bb2LAoydyyZ7oDGL58+fj8S/Pez0MCpRmuc34I0B
+7F5n5ZxeDxhsPTm7Wl2H3ryJgB8Xa3kJD64oaG6f1xlFJHd0pQWR9q+BEeLahJYZTfuWOeZYXcnn
+y9yCz6m0wfhLltB1RxhRkqhs9a1RGG0y0kQsCYohjNUiSUKOTsB6bPMaa/Ewuqj5Rd4DxycIZopv
+8WCMd9hrdCwpb9Zyj0XnWIwI8IhSyng0KmamajTAc3ax1WjOzrKkaspIXrhnpvoKgMreYqT5SsR3
+KBlmHi1iOGWdHqs2jnW+k0W9jUq+uHTjjK1Z8uuHcAfWBknLVyuDKTw0i7TIZbkw5hRXLFkklQPG
+tEM43JkubyLrEwU9KI1AvZNVWFqJtm//YNfFxfQjHR/vm5F01lBlL8TimFCctfIKo6gZn6JPlpCW
+b82XCYzygaLZ2hPwxhJ/0LFUrCHw7u1wyxnrTN/HwWkbzSUdAIfugLIK0rKjpyOci8csfGbagVs0
+8EM7c8LtNimrOk5n+tqHGfppM3uervG0ZXA7CzyttwK+fQ6O777O2AfHwSTXID0x49ZUZByLlY5M
+RG5lmV+EVeTo5R2yrwQ+BVJmOTP10CZ2dGnZ1Raa6gRHR8UjqK9M8dKAQ26qZjoFJy7mU0pvMuUO
+A86zn29JV1eI78T41VQctnY+i2KLNzkBss+Woe+KUTeYihMMMHNs34shvjsW45dT8ccd0KOBAY4O
+3RHa+9gWhEEgr66eTMY0mRPZwr4U9of76hxG0PSM4+SqTf4umb4lKv1ri0pcIagTlV+2E5VbYw/u
+WzsfH8lwA4pjlcjl/jOFJNRIN7p5mMEJPyyg37M5Wrp2vKmoocK5OWxG7ho96GhE4zbbQUxRulZf
+XL+LuoYNp71zwKTJtFIV7S1zmMao0WsRFQDM+o7S8Bve7QLvNSlc/2zwiFUXAViwPREEXenJB2ZN
+w0ZQH3QEn6QBHmAUEeJhaqMoXMl6goiEdA8OMdFXrUNsh+N/d+bhEoOho9AOlt98vQtPVzB7izp6
+FnR3pYUnsra8ollu8+kPzHmM0tf1NwmMA6URHXBWzVWV5GYeYfYy30GT2yzmDV4GSSfTaBJT6bpN
+vJXmW7/Qj6HYASWTwVqAJ1Wv8CD5lu62PFGU9IZX1Hx9+HJqKoMZkJ7Aq+jVV/oKSOpmLj/wfeyp
+3rvBS93vMPoXB1hS+b3tq85uhqZ13LoLyh8spOjZJJpZOjSG6eE6kGbNYoF3JjbEZN/aXgDyHryd
+Ofg55vLTHBw22JBGfei6GqOR3iHVNiDAD5uMIcl5VNdGkSLSu4RtSHnuUpxPFgXdq9+CYAgBOX8d
+8xt0BeviyIbYjE3Bk8+xm82Jn+qmt+6M7Qka2+om3DV97r9r7rpFYGdukhk6c/frS10a6L7DVrSP
+Bhze0IR4VIlEo/H7jYlrB6Y6h6Y/Qq8/SH63E850wKw8BMZk7GC8n9hTY2/M/iZeuN8xIWyfL2R2
+y4l7nY3WtDs2o83xj/EUOPkFn9sbBiijaak5kPdLdMPejHNkZ/L6Ws1ivN1xRptsyufq7J7Mtu09
+Xc4nY7U1uy28tAhAGG7Smbducj0wBuhKvmWa06Gc22kEDU1Jw04WskqWbBL01g7ARRwxpf4mEM9p
+xKNUYqBb1WVRwm54pO8i5jydvtTmBqgJ4G1idWNQNz2m+mpaUqyUHGZKkDlO20ryASKwEe+YhtnM
+vgNeedFcs5BMLTPIrN7IMq6aK4b8jIAENl3NCFR0jovrhOcaqWxxiYtYYnnDQQoDZPb7V7Cx9DbV
+O+5VmFht93h2oh465PuUKxscY2S4OLm31wu611ot6Wpr1zu0zRqus1cqwTKYu/JIR+pYGb/V93fx
+HbMcyUf/0uEfkHe38tLPQrfqjL1bi4bzzFUI3Qub8MYAMs599zB2OKB742JrA2zH9/WFZZSOhznQ
+2FJR++S9CqcZbdJEkDBh9IEIkl8U8MQIkgf/kREkfWsmGBqNj9YDvWUCD4SaWD24V1A2jAB9ZkAk
+PMBuXWBoTOXYTbovcpXcj+yF0qwrnUo+Yx6QI7t3kxEIvmpSuRnK3lVwuyJIvnTR4+/PP745OSda
+zC5O3v7HyfeUlIXHJS1b9egQW5bvM7X3vfRvN9ymE2n6Bm+w7bkhlmuYNITO+04OQg+E/nq1vgVt
+KzL39VCHTt1PtxMgvnvaLahDKrsXcscv0zUmbvpMK0870E85qdb8cjITzCNzUsfi0JzEmffN4YmW
+0U5seWjhnPTWrjrR/qq+BXQg7j2xSda0Anhmgvxlj0xMxYwNzLOD0v7ffFBmOFYbmht0QAoX0rnJ
+kS5xZFCV//8TKUHZxbi3Y0dxau/mpnZ8PKTspfN49ruQkSGIV+436s7PFfalTAeoEASs8PQ9hYyI
+0X/6QNWmHzxT4nKfCov3Udlc2V+4Ztq5/WuCSQaVve9LcYISH7NC41WduokDtk+nAzl9dBqVr5xK
+FtB8B0DnRjwVsDf6S6wQ51sRwsZRu2SYHEt01Jf1Ocij3XSwN7R6IfaHyk7dskshXg43XLYqO3WP
+Q+6hHuihalPc51hgzNIcqicV3xFkPs4UdMGX53zgGbre9sPX28uXR/ZwAfkdXzuKhLLJRo5hv3Sy
+MXdeKul0J2Ypp5Suh3s1JySsW1w5UNknGNrbdEpSBvY/Js+BIY289/0hM9PDu3p/1MbUst4RTEmM
+n6kJTcsp4tG42yeT7nQbtdUFwgVJjwDSUYEAC8F0dKOTILrlLO/xC70bnNd0Ha97whQ6UkHJYj5H
+cA/j+zX4tbtTIfGjujOKpj83aHOgXnIQbvYduNXEC4UMm4T21Bs+GHABuCa7v//LR/TvpjHa7oe7
+/Grb6lVvHSD7spj5iplBLRKZxxEYGdCbY9LWWC5hBB2voWno6DJUMzfkC3T8KJsWL9umDQY5szPt
+AVijEPwfucjncQ==
+""")
+
+##file activate.sh
+ACTIVATE_SH = convert("""
+eJytVd9v2kAMfs9fYQLq2m4MscdNVKMqEkgtVIQxbeuUHolpTgsXdHehpT/+9/mSEBJS2MOaB0ji
+z77P9menDpOAK5jzEGERKw0zhFihD/dcB2CrKJYewoyLFvM0XzGNNpzOZbSAGVPBqVWHdRSDx4SI
+NMhYANfgc4meDteW5ePGC45P4MkCumKhUENzDsu1H3lw1vJx1RJxGMKns6O2lWDqINGgotAHFCsu
+I7FAoWHFJGezEFWGqsEvaD5C42naHb93X+A3+elYCgVaxgh8DmQAys9HL2SS0mIaWBgm7mTN/O3G
+kzu6vHCng/HkW/fSve5O+hTOpnhfQAcoEry5jKVjNypoO0fgwzKSOgHm79KUK06Jfc7/RebHpD8a
+9kdXvT2UcnuFWG6p0stNB0mWUUQ1q3uiGRVEMfXHR03dTuQATPjwqIIPcB9wL4CArRAY/ZHJixYL
+Y9YBtcAoLQtFevOoI9QaHcEdMSAB0d08kuZhyUiSmav6CPCdVBnFOjNrLu6yMCWgKRA0TInBC5i4
+QwX3JG/mm581GKnSsSSxJTFHf9MAKr8w5T/vOv1mUurn5/zlT6fvTntjZzAaNl9rQ5JkU5KIc0GX
+inagwU57T2eddqWlTrvaS6d9sImZeUMkhWysveF0m37NcGub9Dpgi0j4qGiOzATjDr06OBjOYQOo
+7RBoGtNm9Denv1i0LVI7lxJDXLHSSBeWRflsyyqw7diuW3h0XdvK6lBMyaoMG1UyHdTsoYBuue75
+YOgOu1c91/2cwYpznPPeDoQpGL2xSm09NKp7BsvQ2hnT3aMs07lUnskpxewvBk73/LLnXo9HV9eT
+ijB3hWBO2ygoiWg/bKuZxqCCQq0DD3vkWIVvI2KosIw+vqW1gIItEG5KJb+xb09g65ktwYKgTc51
+uGJ/EFQs0ayEWLCQM5V9N4g+1+8UbXOJzF8bqhKtIqIwicWvzNFROZJlpfD8A7Vc044R0FxkcezG
+VzsV75usvTdYef+57v5n1b225qhXfwEmxHEs
+""")
+
+##file activate.fish
+ACTIVATE_FISH = convert("""
+eJyFVVFv0zAQfs+vONJO3RDNxCsSQoMVrdK2Vl03CSHkesllMXLsYDvZivjx2GmTOG0YfWhV+7u7
+73z33Y1gnTENKeMIeakNPCKUGhP7xcQTbCJ4ZOKcxoZV1GCUMp1t4O0zMxkTQEGVQjicO4dTyIwp
+Ppyfu386Q86jWOZwBhq1ZlK8jYIRXEoQ0jhDYAYSpjA2fBsFQVoKG0UKSLAJB9MEJrMXi6uYMiXl
+KCrIZYJARQIKTakEGAkmQ+tU5ZSDRTAlRY7CRJMA7GdkgRoNSJ74t1BRxegjR12jWAoGbfpTAeGY
+LK4vycN8tb6/uCbLi/VVWGPcx3maPr2AO4VjYB+HMAxAkQT/i/ptfbW4vVrczAZit3eHDNqL13n0
+Ya+w+Tq/uyLL1eJmuSaLh9lqNb/0+IzgznqnAjAvzBa4jG0BNmNXfdJUkxTU2I6xRaKcy+e6VApz
+WVmoTGFTgwslrYdN03ONrbbMN1E/FQ7H7gOP0UxRjV67TPRBjF3naCMV1mSkYk9MUN7F8cODZzsE
+iIHYviIe6n8WeGQxWKuhl+9Xa49uijq7fehXMRxT9VR9f/8jhDcfYSKkSOyxKp22cNIrIk+nzd2b
+Yc7FNpHx8FUn15ZfzXEE98JxZEohx4r6kosCT+R9ZkHQtLmXGYSEeH8JCTvYkcRgXAutp9Rw7Jmf
+E/J5fktuL25m1tMe3vLdjDt9bNxr2sMo2P3C9BccqGeYhqfQITz6XurXaqdf99LF1mT2YJrvzqCu
+5w7dKvV3PzNyOb+7+Hw923dOuB+AX2SxrZs9Lm0xbCH6kmhjUyuWw+7cC7DX8367H3VzDz6oBtty
+tMIeobE21JT6HaRS+TbaoqhbE7rgdGs3xtE4cOF3xo0TfxwsdyRlhUoxuzes18r+Jp88zDx1G+kd
+/HTrr1BY2CeuyfnbQtAcu9j+pOw6cy9X0k3IuoyKCZPC5ESf6MkgHE5tLiSW3Oa+W2NnrQfkGv/h
+7tR5PNFnMBlw4B9NJTxnzKA9fLTT0aXSb5vw7FUKzcTZPddqYHi2T9/axJmEEN3qHncVCuEPaFmq
+uEtpcBj2Z1wjrqGReJBHrY6/go21NA==
+""")
+
+##file activate.csh
+ACTIVATE_CSH = convert("""
+eJx1U2FP2zAQ/e5f8TAV3Soo+0zXbYUiDQkKQgVp2ibjJNfFUuIg22nVf885SVFLO3+I7Lt3fr6X
+d8eY58ZjYQpCWfuAhFB7yrAyIYf0Ve1SQmLsuU6DWepAw9TnEoOFq0rwdjAUx/hV1Ui1tVWAqy1M
+QGYcpaFYx+yVI67LkKwx1UuTEaYGl4X2Bl+zJpAlP/6V2hTDtCq/DYXQhdEeGW040Q/Eb+t9V/e3
+U/V88zh/mtyqh8n8J47G+IKTE3gKZJdoYrK3h5MRU1tGYS83gqNc+3yEgyyP93cP820evHLvr2H8
+kaYB/peoyY7aVHzpJnE9e+6I5Z+ji4GMTNJWNuOQq6MA1N25p8pW9HWdVWlfsNpPDbdxjgpaahuw
+1M7opCA/FFu1uwxC7L8KUqmto1KyQe3rx0I0Eovdf7BVe67U5c1MzSZ310pddGheZoFPWyytRkzU
+aCA/I+RkBXhFXr5aWV0SxjhUI6jwdAj8kmhPzX7nTfJFkM3MImp2VdVFFq1vLHSU5szYQK4Ri+Jd
+xlW2JBtOGcyYVW7SnB3v6RS91g3gKapZ0oWxbHVteYIIq3iv7QeuSrUj6KSqQ+yqsxDj1ivNQxKF
+YON10Q+NH/ARS95i5Tuqq2Vxfvc23f/FO6zrtXXmJr+ZtMY9/A15ZXFWtmch2rEQ4g1ryVHH
+""")
+
+##file activate.bat
+ACTIVATE_BAT = convert("""
+eJx9Ul9LhEAQfxf8DoOclI/dYyFkaCmcq4gZQTBUrincuZFbff12T133TM+nnd35/Zvxlr7XDFhV
+mUZHOVhFlOWP3g4DUriIWoVomYZpNBWUtGpaWgImO191pFkSpzlcmgaI70jVX7n2Qp8tuByg+46O
+CMHbMq64T+nmlJt082D1T44muCDk2prgEHF4mdI9RaS/QwSt3zSyIAaftRccvqVTBziD1x/WlPD5
+xd729NDBb8Nr4DU9QNMKsJeH9pkhPedhQsIkDuCDCa6A+NF9IevVFAohkqizdHetg/tkWvPoftWJ
+MCqnOxv7/x7Np6yv9P2Ker5dmX8yNyCkkWnbZy3N5LarczlqL8htx2EM9rQ/2H5BvIsIEi8OEG8U
++g8CsNTr
+""")
+
+##file deactivate.bat
+DEACTIVATE_BAT = convert("""
+eJyFkN0KgkAUhO8F32EQpHqFQEjQUPAPMaErqVxzId3IrV6/XST/UDx3c86c4WMO5FYysKJQFVVp
+CEfqxsnJ9DI7SA25i20fFqs3HO+GYLsDZ7h8GM3xfLHrg1QNvpSX4CWpQGvokZk4uqrQAjXjyElB
+a5IjCz0r+2dHcehHCe5MZNmB5R7TdqMqECMptHZh6DN/utb7Zs6Cej8OXYE5J04YOKFvD4GkHuJ0
+pilSd1jG6n87tDZ+BUwUOepI6CGSkFMYWf0ihvT33Qj1A+tCkSI=
+""")
+
+##file activate.ps1
+ACTIVATE_PS = convert("""
+eJylWdmO41hyfW+g/0FTU7C7IXeJIqmtB/3AnZRIStxF2kaBm7gv4ipyMF/mB3+Sf8GXVGVl1tLT
+43ECSqR4b5wbETeWE8z/+a///vNCDaN6cYtSf5G1dbNw/IVXNIu6aCvX9xa3qsgWl0IJ/7IYinbh
+2nkOVqs2X0TNjz/8eeFFle826fBhQRaLBkD9uviw+LCy3Sbq7Mb/UNbrH3+YNtLcVaB+Xbipb+eL
+tly0eVsD/M6u6g8//vC+dquobH5VWU75eMFUdvHb4n02RHlXuHYTFfmHbHCLLLNz70NpN+GrBI4p
+1EeSk4FAXaZR88u0vPip8usi7fznt3fvP+OuPnx49/Pil4td+XnzigIAPoqYQH2J8v4z+C+8b98m
+Q25t7k76LIK0cOz0V89/MXXx0+Lf6z5q3PA/F+/FIif9uqnaadFf/PzXSXYBfqIb2NeApecJwPzI
+dlL/149nnvyoc7KqYfzTAT8v/voUmX7e+3n364tffl/oVaDyswKY/7J18e6bve8Wv9RuUfqfLHmK
+/u139Hwx+9ePRep97KKqae30YwmCo2y+0vTz1k+rv7159B3pb1SOGj97Pe8/flfkC1Vn/7xYR4n6
+lypNEGDDV5f7lcjil3S+4++p881Wv6qKyn5GQg1yJwcp4BZ5E+Wt/z1P/umbiHir4J8Xip/eFt6n
+9T/9gU9eY+7zUX97Jlmb136ziKrKT/3OzpvP8VX/+MObSP0lL3LvVZlJ9v1b8357jXyw8rXxYPXN
+11n4UzJ8G8S/vUbuJ6RPj999DbtS5kys//JusXwrNLnvT99cFlBNwXCe+niRz8JF/ezNr9Pze+H6
+18W7d5PPvozW7+387Zto/v4pL8BvbxTzvIW9KCv/Fj0WzVQb/YXbVlPZWTz3/9vCaRtQbPN/Bb+j
+2rUrDxTVD68gfQXu/ZewAFX53U/vf/rD2P3558W7+W79Po1y/xXoX/6RFHyNIoVjgAG4H0RTcAe5
+3bSVv3DSwk2mZYHjFB8zj6fC4sLOFTHJJQrwzFYJgso0ApOoBzFiRzzQKjIQCCbQMIFJGCKqGUyS
+8AkjiF2wTwmMEbcEUvq8Nj+X0f4YcCQmYRiOY7eRbAJDqzm1chOoNstbJ8oTBhZQ2NcfgaB6QjLp
+U4+SWFjQGCZpyqby8V4JkPGs9eH1BscXIrTG24QxXLIgCLYNsIlxSYLA6SjAeg7HAg4/kpiIB8k9
+TCLm0EM4gKIxEj8IUj2dQeqSxEwYVH88qiRlCLjEYGuNIkJB1BA5dHOZdGAoUFk54WOqEojkuf4Q
+Ig3WY+96TDlKLicMC04h0+gDCdYHj0kz2xBDj9ECDU5zJ0tba6RKgXBneewhBG/xJ5m5FX+WSzsn
+wnHvKhcOciw9NunZ0BUF0n0IJAcJMdcLqgQb0zP19dl8t9PzmMBjkuIF7KkvHgqEovUPOsY0PBB1
+HCtUUhch83qEJPjQcNQDsgj0cRqx2ZbnnlrlUjE1EX2wFJyyDa/0GLrmKDEFepdWlsbmVU45Wiwt
+eFM6mfs4kxg8yc4YmKDy67dniLV5FUeO5AKNPZaOQQ++gh+dXE7dbJ1aTDr7S4WPd8sQoQkDyODg
+XnEu/voeKRAXZxB/e2xaJ4LTFLPYEJ15Ltb87I45l+P6OGFA5F5Ix8A4ORV6M1NH1uMuZMnmFtLi
+VpYed+gSq9JDBoHc05J4OhKetrk1p0LYiKipxLMe3tYS7c5V7O1KcPU8BJGdLfcswhoFCSGQqJ8f
+ThyQKy5EWFtHVuNhvTnkeTc8JMpN5li3buURh0+3ZGuzdwM55kon+8urbintjdQJf9U1D0ah+hNh
+i1XNu4fSKbTC5AikGEaj0CYM1dpuli7EoqUt7929f1plxGGNZnixFSFP2qzhlZMonu2bB9OWSqYx
+VuHKWNGJI8kqUhMTRtk0vJ5ycZ60JlodlmN3D9XiEj/cG2lSt+WV3OtMgt1Tf4/Z+1BaCus740kx
+Nvj78+jMd9tq537Xz/mNFyiHb0HdwHytJ3uQUzKkYhK7wjGtx3oKX43YeYoJVtqDSrCnQFzMemCS
+2bPSvP+M4yZFi/iZhAjL4UOeMfa7Ex8HKBqw4umOCPh+imOP6yVTwG2MplB+wtg97olEtykNZ6wg
+FJBNXSTJ3g0CCTEEMdUjjcaBDjhJ9fyINXgQVHhA0bjk9lhhhhOGzcqQSxYdj3iIN2xGEOODx4qj
+Q2xikJudC1ujCVOtiRwhga5nPdhe1gSa649bLJ0wCuLMcEYIeSy25YcDQHJb95nfowv3rQnin0fE
+zIXFkM/EwSGxvCCMgEPNcDp/wph1gMEa8Xd1qAWOwWZ/KhjlqzgisBpDDDXz9Cmov46GYBKHC4zZ
+84HJnXoTxyWNBbXV4LK/r+OEwSN45zBp7Cub3gIYIvYlxon5BzDgtPUYfXAMPbENGrI+YVGSeTQ5
+i8NMB5UCcC+YRGIBhgs0xhAGwSgYwywpbu4vpCSTdEKrsy8osXMUnHQYenQHbOBofLCNNTg3CRRj
+A1nXY2MZcjnXI+oQ2Zk+561H4CqoW61tbPKv65Y7fqc3TDUF9CA3F3gM0e0JQ0TPADJFJXVzphpr
+2FzwAY8apGCju1QGOiUVO5KV6/hKbtgVN6hRVwpRYtu+/OC6w2bCcGzZQ8NCc4WejNEjFxOIgR3o
+QqR1ZK0IaUxZ9nbL7GWJIjxBARUhAMnYrq/S0tVOjzlOSYRqeIZxaSaOBX5HSR3MFekOXVdUPbjX
+nru61fDwI8HRYPUS7a6Inzq9JLjokU6P6OzT4UCH+Nha+JrU4VqEo4rRHQJhVuulAnvFhYz5NWFT
+aS/bKxW6J3e46y4PLagGrCDKcq5B9EmP+s1QMCaxHNeM7deGEV3WPn3CeKjndlygdPyoIcNaL3dd
+bdqPs47frcZ3aNWQ2Tk+rjFR01Ul4XnQQB6CSKA+cZusD0CP3F2Ph0e78baybgioepG12luSpFXi
+bHbI6rGLDsGEodMObDG7uyxfCeU+1OiyXYk8fnGu0SpbpRoEuWdSUlNi5bd9nBxYqZGrq7Qa7zV+
+VLazLcelzzP9+n6+xUtWx9OVJZW3gk92XGGkstTJ/LreFVFF2feLpXGGuQqq6/1QbWPyhJXIXIMs
+7ySVlzMYqoPmnmrobbeauMIxrCr3sM+qs5HpwmmFt7SM3aRNQWpCrmeAXY28EJ9uc966urGKBL9H
+18MtDE5OX97GDOHxam11y5LCAzcwtkUu8wqWI1dWgHyxGZdY8mC3lXzbzncLZ2bIUxTD2yW7l9eY
+gBUo7uj02ZI3ydUViL7oAVFag37JsjYG8o4Csc5R7SeONGF8yZP+7xxi9scnHvHPcogJ44VH/LMc
+Yu6Vn3jEzCFw9Eqq1ENQAW8aqbUwSiAqi+nZ+OkZJKpBL66Bj8z+ATqb/8qDIJUeNRTwrI0YrVmb
+9FArKVEbCWUNSi8ipfVv+STgkpSsUhcBg541eeKLoBpLGaiHTNoK0r4nn3tZqrcIULtq20Df+FVQ
+Sa0MnWxTugMuzD410sQygF4qdntbswiJMqjs014Irz/tm+pd5oygJ0fcdNbMg165Pqi7EkYGAXcB
+dwxioCDA3+BY9+JjuOmJu/xyX2GJtaKSQcOZxyqFzTaa6/ot21sez0BtKjirROKRm2zuai02L0N+
+ULaX8H5P6VwsGPbYOY7sAy5FHBROMrMzFVPYhFHZ7M3ZCZa2hsT4jGow6TGtG8Nje9405uMUjdF4
+PtKQjw6yZOmPUmO8LjFWS4aPCfE011N+l3EdYq09O3iQJ9a01B3KXiMF1WmtZ+l1gmyJ/ibAHZil
+vQzdOl6g9PoSJ4TM4ghTnTndEVMOmsSSu+SCVlGCOLQRaw9oLzamSWP62VuxPZ77mZYdfTRGuNBi
+KyhZL32S2YckO/tU7y4Bf+QKKibQSKCTDWPUwWaE8yCBeL5FjpbQuAlb53mGX1jptLeRotREbx96
+gnicYz0496dYauCjpTCA4VA0cdLJewzRmZeTwuXWD0talJsSF9J1Pe72nkaHSpULgNeK1+o+9yi0
+YpYwXZyvaZatK2eL0U0ZY6ekZkFPdC8JTF4Yo1ytawNfepqUKEhwznp6HO6+2l7L2R9Q3N49JMIe
+Z+ax1mVaWussz98QbNTRPo1xu4W33LJpd9H14dd66ype7UktfEDi3oUTccJ4nODjwBKFxS7lYWiq
+XoHu/b7ZVcK5TbRD0F/2GShg2ywwUl07k4LLqhofKxFBNd1grWY+Zt/cPtacBpV9ys2z1moMLrT3
+W0Elrjtt5y/dvDQYtObYS97pqj0eqmwvD3jCPRqamGthLiF0XkgB6IdHLBBwDGPiIDh7oPaRmTrN
+tYA/yQKFxRiok+jM6ciJq/ZgiOi5+W4DEmufPEubeSuYJaM3/JHEevM08yJAXUQwb9LS2+8FOfds
+FfOe3Bel6EDSjIEIKs4o9tyt67L1ylQlzhe0Q+7ue/bJnWMcD3q6wDSIQi8ThnRM65aqLWesi/ZM
+xhHmQvfKBbWcC194IPjbBLYR9JTPITbzwRcu+OSFHDHNSYCLt29sAHO6Gf0h/2UO9Xwvhrjhczyx
+Ygz6CqP4IwxQj5694Q1Pe2IR+KF/yy+5PvCL/vgwv5mPp9n4kx7fnY/nmV++410qF/ZVCMyv5nAP
+pkeOSce53yJ6ahF4aMJi52by1HcCj9mDT5i+7TF6RoPaLL+cN1hXem2DmX/mdIbeeqwQOLD5lKO/
+6FM4x77w6D5wMx3g0IAfa2D/pgY9a7bFQbinLDPz5dZi9ATIrd0cB5xfC0BfCCZO7TKP0jQ2Meih
+nRXhkA3smTAnDN9IW2vA++lsgNuZ2QP0UhqyjUPrDmgfWP2bWWiKA+YiEK7xou8cY0+d3/bk0oHR
+QLrq4KzDYF/ljQDmNhBHtkVNuoDey6TTeaD3SHO/Bf4d3IwGdqQp6FuhmwFbmbQBssDXVKDBYOpk
+Jy7wxOaSRwr0rDmGbsFdCM+7XU/84JPu3D/gW7QXgzlvbjixn99/8CpWFUQWHFEz/RyXvzNXTTOd
+OXLNNFc957Jn/YikNzEpUdRNxXcC6b76ccTwMGoKj5X7c7TvHFgc3Tf4892+5A+iR+D8OaaE6ACe
+gdgHcyCoPm/xiDCWP+OZRjpzfj5/2u0i4qQfmIEOsTV9Hw6jZ3Agnh6hiwjDtGYxWvt5TiWEuabN
+77YCyRXwO8P8wdzG/8489KwfFBZWI6Vvx76gmlOc03JI1HEfXYZEL4sNFQ3+bqf7e2hdSWQknwKF
+ICJjGyDs3fdmnnxubKXebpQYLjPgEt9GTzKkUgTvOoQa1J7N3nv4sR6uvYFLhkXZ+pbCoU3K9bfq
+gF7W82tNutRRZExad+k4GYYsCfmEbvizS4jsRr3fdzqjEthpEwm7pmN7OgVzRbrktjrFw1lc0vM8
+V7dyTJ71qlsd7v3KhmHzeJB35pqEOk2pEe5uPeCToNkmedmxcKbIj+MZzjFSsvCmimaMQB1uJJKa
++hoWUi7aEFLvIxKxJavqpggXBIk2hr0608dIgnfG5ZEprqmH0b0YSy6jVXTCuIB+WER4d5BPVy9Q
+M4taX0RIlDYxQ2CjBuq78AAcHQf5qoKP8BXHnDnd/+ed5fS+csL4g3eWqECaL+8suy9r8hx7c+4L
+EegEWdqAWN1w1NezP34xsxLkvRRI0DRzKOg0U+BKfQY128YlYsbwSczEg2LqKxRmcgiwHdhc9MQJ
+IwKQHlgBejWeMGDYYxTOQUiJOmIjJbzIzHH6lAMP+y/fR0v1g4wx4St8fcqTt3gz5wc+xXFZZ3qI
+JpXI5iJk7xmNL2tYsDpcqu0375Snd5EKsIvg8u5szTOyZ4v06Ny2TZXRpHUSinh4IFp8Eoi7GINJ
+02lPJnS/9jSxolJwp2slPMIEbjleWw3eec4XaetyEnSSqTPRZ9fVA0cPXMqzrPYQQyrRux3LaAh1
+wujbgcObg1nt4iiJ5IMbc/WNPc280I2T4nTkdwG8H6iS5xO2WfsFsruBwf2QkgZlb6w7om2G65Lr
+r2Gl4dk63F8rCEHoUJ3fW+pU2Srjlmcbp+JXY3DMifEI22HcHAvT7zzXiMTr7VbUR5a2lZtJkk4k
+1heZZFdru8ucCWMTr3Z4eNnjLm7LW7rcN7QjMpxrsCzjxndeyFUX7deIs3PQkgyH8k6luI0uUyLr
+va47TBjM4JmNHFzGPcP6BV6cYgQy8VQYZe5GmzZHMxyBYhGiUdekZQ/qwyxC3WGylQGdUpSf9ZCP
+a7qPdJd31fPRC0TOgzupO7nLuBGr2A02yuUQwt2KQG31sW8Gd9tQiHq+hPDt4OzJuY4pS8XRsepY
+tsd7dVEfJFmc15IYqwHverrpWyS1rFZibDPW1hUUb+85CGUzSBSTK8hpvee/ZxonW51TUXekMy3L
+uy25tMTg4mqbSLQQJ+skiQu2toIfBFYrOWql+EQipgfT15P1aq6FDK3xgSjIGWde0BPftYchDTdM
+i4QdudHFkN0u6fSKiT09QLv2mtSblt5nNzBR6UReePNs+khE4rHcXuoK21igUKHl1c3MXMgPu7y8
+rKQDxR6N/rffXv+lROXet/9Q+l9I4D1U
+""")
+
+##file distutils-init.py
+DISTUTILS_INIT = convert("""
+eJytV1uL4zYUfvevOE0ottuMW9q3gVDa3aUMXXbLMlDKMBiNrSTqOJKRlMxkf33PkXyRbGe7Dw2E
+UXTu37lpxLFV2oIyifAncxmOL0xLIfcG+gv80x9VW6maw7o/CANSWWBwFtqeWMPlGY6qPjV8A0bB
+C4eKSTgZ5LRgFeyErMEeOBhbN+Ipgeizhjtnhkn7DdyjuNLPoCS0l/ayQTG0djwZC08cLXozeMss
+aG5EzQ0IScpnWtHSTXuxByV/QCmxE7y+eS0uxWeoheaVVfqSJHiU7Mhhi6gULbOHorshkrEnKxpT
+0n3A8Y8SMpuwZx6aoix3ouFlmW8gHRSkeSJ2g7hU+kiHLDaQw3bmRDaTGfTnty7gPm0FHbIBg9U9
+oh1kZzAFLaue2R6htPCtAda2nGlDSUJ4PZBgCJBGVcwKTAMz/vJiLD+Oin5Z5QlvDPdulC6EsiyE
+NFzb7McNTKJzbJqzphx92VKRFY1idenzmq3K0emRcbWBD0ryqc4NZGmKOOOX9Pz5x+/l27tP797c
+f/z0d+4NruGNai8uAM0bfsYaw8itFk8ny41jsfpyO+BWlpqfhcG4yxLdi/0tQqoT4a8Vby382mt8
+p7XSo7aWGdPBc+b6utaBmCQ7rQKQoWtAuthQCiold2KfJIPTT8xwg9blPumc+YDZC/wYGdAyHpJk
+vUbHbHWAp5No6pK/WhhLEWrFjUwtPEv1Agf8YmnsuXUQYkeZoHm8ogP16gt2uHoxcEMdf2C6pmbw
+hUMsWGhanboh4IzzmsIpWs134jVPqD/c74bZHdY69UKKSn/+KfVhxLgUlToemayLMYQOqfEC61bh
+cbhwaqoGUzIyZRFHPmau5juaWqwRn3mpWmoEA5nhzS5gog/5jbcFQqOZvmBasZtwYlG93k5GEiyw
+buHhMWLjDarEGpMGB2LFs5nIJkhp/nUmZneFaRth++lieJtHepIvKgx6PJqIlD9X2j6pG1i9x3pZ
+5bHuCPFiirGHeO7McvoXkz786GaKVzC9DSpnOxJdc4xm6NSVq7lNEnKdVlnpu9BNYoKX2Iq3wvgh
+gGEUM66kK6j4NiyoneuPLSwaCWDxczgaolEWpiMyDVDb7dNuLAbriL8ig8mmeju31oNvQdpnvEPC
+1vAXbWacGRVrGt/uXN/gU0CDDwgooKRrHfTBb1/s9lYZ8ZqOBU0yLvpuP6+K9hLFsvIjeNhBi0KL
+MlOuWRn3FRwx5oHXjl0YImUx0+gLzjGchrgzca026ETmYJzPD+IpuKzNi8AFn048Thd63OdD86M6
+84zE8yQm0VqXdbbgvub2pKVnS76icBGdeTHHXTKspUmr4NYo/furFLKiMdQzFjHJNcdAnMhltBJK
+0/IKX3DVFqvPJ2dLE7bDBkH0l/PJ29074+F0CsGYOxsb7U3myTUncYfXqnLLfa6sJybX4g+hmcjO
+kMRBfA1JellfRRKJcyRpxdS4rIl6FdmQCWjo/o9Qz7yKffoP4JHjOvABcRn4CZIT2RH4jnxmfpVG
+qgLaAvQBNfuO6X0/Ux02nb4FKx3vgP+XnkX0QW9pLy/NsXgdN24dD3LxO2Nwil7Zlc1dqtP3d7/h
+kzp1/+7hGBuY4pk0XD/0Ao/oTe/XGrfyM773aB7iUhgkpy+dwAMalxMP0DrBcsVw/6p25+/hobP9
+GBknrWExDhLJ1bwt1NcCNblaFbMKCyvmX0PeRaQ=
+""")
+
+##file distutils.cfg
+DISTUTILS_CFG = convert("""
+eJxNj00KwkAMhfc9xYNuxe4Ft57AjYiUtDO1wXSmNJnK3N5pdSEEAu8nH6lxHVlRhtDHMPATA4uH
+xJ4EFmGbvfJiicSHFRzUSISMY6hq3GLCRLnIvSTnEefN0FIjw5tF0Hkk9Q5dRunBsVoyFi24aaLg
+9FDOlL0FPGluf4QjcInLlxd6f6rqkgPu/5nHLg0cXCscXoozRrP51DRT3j9QNl99AP53T2Q=
+""")
+
+##file activate_this.py
+ACTIVATE_THIS = convert("""
+eJyNU01v2zAMvetXEB4K21jnDOstQA4dMGCHbeihlyEIDMWmE62yJEiKE//7kXKdpEWLzYBt8evx
+kRSzLPs6wiEoswM8YdMpjUXcq1Dz6RZa1cSiTkJdr86GsoTRHuCotBayiWqQEYGtMCgfD1KjGYBe
+5a3p0cRKiEe2NtLAFikftnDco0ko/SFEVgEZ8aRCZDIPY9xbA8pE9M4jfW/B2CjiHq9zbJVZuOQq
+siwTIvpxKYCembPAU4Muwi/Z4zfvrZ/MXipKeB8C+qisSZYiWfjJfs+0/MFMdWn1hJcO5U7G/SLa
+xVx8zU6VG/PXLXvfsyyzUqjeWR8hjGE+2iCE1W1tQ82hsCJN9dzKaoexyB/uH79TnjwvxcW0ntSb
+yZ8jq1Z5Q1UXsyy3gf9nbjTEj7NzQMfCJa/YSmrQ+2D/BqfiOi6sclrGzvoeVivIj8rcfcmnIQRF
+7XCyeZI7DFe5/lhlCs5PRf5QW66VXT/NrlQ46oD/D6InkOmi3IQcbhKxAX2g4a+Xd5s3UtCtG2py
+m8eg6WYWqR6SL5OjKMGfSrYt/6kxxQtOpeAgj1LXBNmpE2ElmCSIy5H0zFd8gJ924HWijWhb2hRC
+6wNEm1QdDZtuSZcEprIUBo/XRNcbQe1OUbQ/r3hPTaPJJDNtFLu8KHV5XoNr3Eo6h6YtOKw8e8yw
+VF5PnJ+ts3a9/Mz38RpG/AUSzYUW
+""")
+
+##file python-config
+PYTHON_CONFIG = convert("""
+eJyNVV1P2zAUfc+v8ODBiSABxlulTipbO6p1LWqBgVhlhcZpPYUkctzSivHfd6+dpGloGH2Ja/ue
+e+65Hz78xNhtf3x90xmw7vCWsRPGLvpDNuz87MKfdKMWSWxZ4ilNpCLZJiuWc66SVFUOZkkcirll
+rfxIBAzOMtImDzSVPBRrekwoX/OZu/0r4lm0DHiG60g86u8sjPw5rCyy86NRkB8QuuBRSqfAKESn
+3orLTCQxE3GYkC9tYp8fk89OSwNsmXgizrhUtnumeSgeo5GbLUMk49Rv+2nK48Cm/qMwfp333J2/
+dVcAGE0CIQHBsgIeEr4Wij0LtWDLzJ9ze5YEvH2WI6CHTAVcSu9ZCsXtgxu81CIvp6/k4eXsdfo7
+PvDCRD75yi41QitfzlcPp1OI7i/1/iQitqnr0iMgQ+A6wa+IKwwdxyk9IiXNAzgquTFU8NIxAVjM
+osm1Zz526e+shQ4hKRVci69nPC3Kw4NQEmkQ65E7OodxorSvxjvpBjQHDmWFIQ1mlmzlS5vedseT
+/mgIEsMJ7Lxz2bLAF9M5xeLEhdbHxpWOw0GdkJApMVBRF1y+a0z3c9WZPAXGFcFrJgCIB+024uad
+0CrzmEoRa3Ub4swNIHPGf7QDV+2uj2OiFWsChgCwjKqN6rp5izpbH6Wc1O1TclQTP/XVwi6anTr1
+1sbubjZLI1+VptPSdCfwnFBrB1jvebrTA9uUhU2/9gad7xPqeFkaQcnnLbCViZK8d7R1kxzFrIJV
+8EaLYmKYpvGVkig+3C5HCXbM1jGCGekiM2pRCVPyRyXYdPf6kcbWEQ36F5V4Gq9N7icNNw+JHwRE
+LTgxRXACpvnQv/PuT0xCCAywY/K4hE6Now2qDwaSE5FB+1agsoUveYDepS83qFcF1NufvULD3fTl
+g6Hgf7WBt6lzMeiyyWVn3P1WVbwaczHmTzE9A5SyItTVgFYyvs/L/fXlaNgbw8v3azT+0eikVlWD
+/vBHbzQumP23uBCjsYdrL9OWARwxs/nuLOzeXbPJTa/Xv6sUmQir5pC1YRLz3eA+CD8Z0XpcW8v9
+MZWF36ryyXXf3yBIz6nzqz8Muyz0m5Qj7OexfYo/Ph3LqvkHUg7AuA==
+""")
+
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+FAT_MAGIC = 0xcafebabe
+BIG_ENDIAN = '>'
+LITTLE_ENDIAN = '<'
+LC_LOAD_DYLIB = 0xc
+maxint = majver == 3 and getattr(sys, 'maxsize') or getattr(sys, 'maxint')
+
+
+class fileview(object):
+ """
+ A proxy for file-like objects that exposes a given view of a file.
+ Modified from macholib.
+ """
+
+ def __init__(self, fileobj, start=0, size=maxint):
+ if isinstance(fileobj, fileview):
+ self._fileobj = fileobj._fileobj
+ else:
+ self._fileobj = fileobj
+ self._start = start
+ self._end = start + size
+ self._pos = 0
+
+ def __repr__(self):
+ return '<fileview [%d, %d] %r>' % (
+ self._start, self._end, self._fileobj)
+
+ def tell(self):
+ return self._pos
+
+ def _checkwindow(self, seekto, op):
+ if not (self._start <= seekto <= self._end):
+ raise IOError("%s to offset %d is outside window [%d, %d]" % (
+ op, seekto, self._start, self._end))
+
+ def seek(self, offset, whence=0):
+ seekto = offset
+ if whence == os.SEEK_SET:
+ seekto += self._start
+ elif whence == os.SEEK_CUR:
+ seekto += self._start + self._pos
+ elif whence == os.SEEK_END:
+ seekto += self._end
+ else:
+ raise IOError("Invalid whence argument to seek: %r" % (whence,))
+ self._checkwindow(seekto, 'seek')
+ self._fileobj.seek(seekto)
+ self._pos = seekto - self._start
+
+ def write(self, bytes):
+ here = self._start + self._pos
+ self._checkwindow(here, 'write')
+ self._checkwindow(here + len(bytes), 'write')
+ self._fileobj.seek(here, os.SEEK_SET)
+ self._fileobj.write(bytes)
+ self._pos += len(bytes)
+
+ def read(self, size=maxint):
+ assert size >= 0
+ here = self._start + self._pos
+ self._checkwindow(here, 'read')
+ size = min(size, self._end - here)
+ self._fileobj.seek(here, os.SEEK_SET)
+ bytes = self._fileobj.read(size)
+ self._pos += len(bytes)
+ return bytes
+
+
+def read_data(file, endian, num=1):
+ """
+ Read a given number of 32-bits unsigned integers from the given file
+ with the given endianness.
+ """
+ res = struct.unpack(endian + 'L' * num, file.read(num * 4))
+ if len(res) == 1:
+ return res[0]
+ return res
+
+
+def mach_o_change(path, what, value):
+ """
+ Replace a given name (what) in any LC_LOAD_DYLIB command found in
+ the given binary with a new name (value), provided it's shorter.
+ """
+
+ def do_macho(file, bits, endian):
+ # Read Mach-O header (the magic number is assumed read by the caller)
+ cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = read_data(file, endian, 6)
+ # 64-bits header has one more field.
+ if bits == 64:
+ read_data(file, endian)
+ # The header is followed by ncmds commands
+ for n in range(ncmds):
+ where = file.tell()
+ # Read command header
+ cmd, cmdsize = read_data(file, endian, 2)
+ if cmd == LC_LOAD_DYLIB:
+ # The first data field in LC_LOAD_DYLIB commands is the
+ # offset of the name, starting from the beginning of the
+ # command.
+ name_offset = read_data(file, endian)
+ file.seek(where + name_offset, os.SEEK_SET)
+ # Read the NUL terminated string
+ load = file.read(cmdsize - name_offset).decode()
+ load = load[:load.index('\0')]
+ # If the string is what is being replaced, overwrite it.
+ if load == what:
+ file.seek(where + name_offset, os.SEEK_SET)
+ file.write(value.encode() + '\0'.encode())
+ # Seek to the next command
+ file.seek(where + cmdsize, os.SEEK_SET)
+
+ def do_file(file, offset=0, size=maxint):
+ file = fileview(file, offset, size)
+ # Read magic number
+ magic = read_data(file, BIG_ENDIAN)
+ if magic == FAT_MAGIC:
+ # Fat binaries contain nfat_arch Mach-O binaries
+ nfat_arch = read_data(file, BIG_ENDIAN)
+ for n in range(nfat_arch):
+ # Read arch header
+ cputype, cpusubtype, offset, size, align = read_data(file, BIG_ENDIAN, 5)
+ do_file(file, offset, size)
+ elif magic == MH_MAGIC:
+ do_macho(file, 32, BIG_ENDIAN)
+ elif magic == MH_CIGAM:
+ do_macho(file, 32, LITTLE_ENDIAN)
+ elif magic == MH_MAGIC_64:
+ do_macho(file, 64, BIG_ENDIAN)
+ elif magic == MH_CIGAM_64:
+ do_macho(file, 64, LITTLE_ENDIAN)
+
+ assert(len(what) >= len(value))
+
+ with open(path, 'r+b') as f:
+ do_file(f)
+
+
+if __name__ == '__main__':
+ main()
+
+# TODO:
+# Copy python.exe.manifest
+# Monkeypatch distutils.sysconfig
diff --git a/python/virtualenv/virtualenv_embedded/activate.bat b/python/virtualenv/virtualenv_embedded/activate.bat
new file mode 100644
index 000000000..529b9733c
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/activate.bat
@@ -0,0 +1,30 @@
+@echo off
+set "VIRTUAL_ENV=__VIRTUAL_ENV__"
+
+if defined _OLD_VIRTUAL_PROMPT (
+ set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
+) else (
+ if not defined PROMPT (
+ set "PROMPT=$P$G"
+ )
+ set "_OLD_VIRTUAL_PROMPT=%PROMPT%"
+)
+set "PROMPT=__VIRTUAL_WINPROMPT__ %PROMPT%"
+
+REM Don't use () to avoid problems with them in %PATH%
+if defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME
+ set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"
+:ENDIFVHOME
+
+set PYTHONHOME=
+
+REM if defined _OLD_VIRTUAL_PATH (
+if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH1
+ set "PATH=%_OLD_VIRTUAL_PATH%"
+:ENDIFVPATH1
+REM ) else (
+if defined _OLD_VIRTUAL_PATH goto ENDIFVPATH2
+ set "_OLD_VIRTUAL_PATH=%PATH%"
+:ENDIFVPATH2
+
+set "PATH=%VIRTUAL_ENV%\__BIN_NAME__;%PATH%"
diff --git a/python/virtualenv/virtualenv_embedded/activate.csh b/python/virtualenv/virtualenv_embedded/activate.csh
new file mode 100644
index 000000000..864865b17
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/activate.csh
@@ -0,0 +1,36 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "__VIRTUAL_ENV__"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+
+
+
+if ("__VIRTUAL_PROMPT__" != "") then
+ set env_name = "__VIRTUAL_PROMPT__"
+else
+ set env_name = `basename "$VIRTUAL_ENV"`
+endif
+
+# Could be in a non-interactive environment,
+# in which case, $prompt is undefined and we wouldn't
+# care about the prompt anyway.
+if ( $?prompt ) then
+ set _OLD_VIRTUAL_PROMPT="$prompt"
+ set prompt = "[$env_name] $prompt"
+endif
+
+unset env_name
+
+alias pydoc python -m pydoc
+
+rehash
+
diff --git a/python/virtualenv/virtualenv_embedded/activate.fish b/python/virtualenv/virtualenv_embedded/activate.fish
new file mode 100644
index 000000000..f3d1797a3
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/activate.fish
@@ -0,0 +1,76 @@
+# This file must be used using `. bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.
+# Do not run it directly.
+
+function deactivate -d 'Exit virtualenv mode and return to the normal environment.'
+ # reset old environment variables
+ if test -n "$_OLD_VIRTUAL_PATH"
+ set -gx PATH $_OLD_VIRTUAL_PATH
+ set -e _OLD_VIRTUAL_PATH
+ end
+
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+ set -e _OLD_VIRTUAL_PYTHONHOME
+ end
+
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+ # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.
+ set -l fish_function_path
+
+ # Erase virtualenv's `fish_prompt` and restore the original.
+ functions -e fish_prompt
+ functions -c _old_fish_prompt fish_prompt
+ functions -e _old_fish_prompt
+ set -e _OLD_FISH_PROMPT_OVERRIDE
+ end
+
+ set -e VIRTUAL_ENV
+
+ if test "$argv[1]" != 'nondestructive'
+ # Self-destruct!
+ functions -e pydoc
+ functions -e deactivate
+ end
+end
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "__VIRTUAL_ENV__"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/__BIN_NAME__" $PATH
+
+# Unset `$PYTHONHOME` if set.
+if set -q PYTHONHOME
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+ set -e PYTHONHOME
+end
+
+function pydoc
+ python -m pydoc $argv
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+ # Copy the current `fish_prompt` function as `_old_fish_prompt`.
+ functions -c fish_prompt _old_fish_prompt
+
+ function fish_prompt
+ # Save the current $status, for fish_prompts that display it.
+ set -l old_status $status
+
+ # Prompt override provided?
+ # If not, just prepend the environment name.
+ if test -n "__VIRTUAL_PROMPT__"
+ printf '%s%s' "__VIRTUAL_PROMPT__" (set_color normal)
+ else
+ printf '%s(%s%s%s) ' (set_color normal) (set_color -o white) (basename "$VIRTUAL_ENV") (set_color normal)
+ end
+
+ # Restore the original $status
+ echo "exit $old_status" | source
+ _old_fish_prompt
+ end
+
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/python/virtualenv/virtualenv_embedded/activate.ps1 b/python/virtualenv/virtualenv_embedded/activate.ps1
new file mode 100644
index 000000000..0f4adf19f
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/activate.ps1
@@ -0,0 +1,150 @@
+# This file must be dot sourced from PoSh; you cannot run it
+# directly. Do this: . ./activate.ps1
+
+# FIXME: clean up unused vars.
+$script:THIS_PATH = $myinvocation.mycommand.path
+$script:BASE_DIR = split-path (resolve-path "$THIS_PATH/..") -Parent
+$script:DIR_NAME = split-path $BASE_DIR -Leaf
+
+function global:deactivate ( [switch] $NonDestructive ){
+
+ if ( test-path variable:_OLD_VIRTUAL_PATH ) {
+ $env:PATH = $variable:_OLD_VIRTUAL_PATH
+ remove-variable "_OLD_VIRTUAL_PATH" -scope global
+ }
+
+ if ( test-path function:_old_virtual_prompt ) {
+ $function:prompt = $function:_old_virtual_prompt
+ remove-item function:\_old_virtual_prompt
+ }
+
+ if ($env:VIRTUAL_ENV) {
+ $old_env = split-path $env:VIRTUAL_ENV -leaf
+ remove-item env:VIRTUAL_ENV -erroraction silentlycontinue
+ }
+
+ if ( !$NonDestructive ) {
+ # Self destruct!
+ remove-item function:deactivate
+ }
+}
+
+# unset irrelevant variables
+deactivate -nondestructive
+
+$VIRTUAL_ENV = $BASE_DIR
+$env:VIRTUAL_ENV = $VIRTUAL_ENV
+
+$global:_OLD_VIRTUAL_PATH = $env:PATH
+$env:PATH = "$env:VIRTUAL_ENV/Scripts;" + $env:PATH
+if (! $env:VIRTUAL_ENV_DISABLE_PROMPT) {
+ function global:_old_virtual_prompt { "" }
+ $function:_old_virtual_prompt = $function:prompt
+ function global:prompt {
+ # Add a prefix to the current prompt, but don't discard it.
+ write-host "($(split-path $env:VIRTUAL_ENV -leaf)) " -nonewline
+ & $function:_old_virtual_prompt
+ }
+}
+
+# SIG # Begin signature block
+# MIISeAYJKoZIhvcNAQcCoIISaTCCEmUCAQExCzAJBgUrDgMCGgUAMGkGCisGAQQB
+# gjcCAQSgWzBZMDQGCisGAQQBgjcCAR4wJgIDAQAABBAfzDtgWUsITrck0sYpfvNR
+# AgEAAgEAAgEAAgEAAgEAMCEwCQYFKw4DAhoFAAQUS5reBwSg3zOUwhXf2jPChZzf
+# yPmggg6tMIIGcDCCBFigAwIBAgIBJDANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQG
+# EwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERp
+# Z2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2Vy
+# dGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDcxMDI0MjIwMTQ2WhcNMTcxMDI0MjIw
+# MTQ2WjCBjDELMAkGA1UEBhMCSUwxFjAUBgNVBAoTDVN0YXJ0Q29tIEx0ZC4xKzAp
+# BgNVBAsTIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcxODA2BgNV
+# BAMTL1N0YXJ0Q29tIENsYXNzIDIgUHJpbWFyeSBJbnRlcm1lZGlhdGUgT2JqZWN0
+# IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyiOLIjUemqAbPJ1J
+# 0D8MlzgWKbr4fYlbRVjvhHDtfhFN6RQxq0PjTQxRgWzwFQNKJCdU5ftKoM5N4YSj
+# Id6ZNavcSa6/McVnhDAQm+8H3HWoD030NVOxbjgD/Ih3HaV3/z9159nnvyxQEckR
+# ZfpJB2Kfk6aHqW3JnSvRe+XVZSufDVCe/vtxGSEwKCaNrsLc9pboUoYIC3oyzWoU
+# TZ65+c0H4paR8c8eK/mC914mBo6N0dQ512/bkSdaeY9YaQpGtW/h/W/FkbQRT3sC
+# pttLVlIjnkuY4r9+zvqhToPjxcfDYEf+XD8VGkAqle8Aa8hQ+M1qGdQjAye8OzbV
+# uUOw7wIDAQABo4IB6TCCAeUwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+# AQYwHQYDVR0OBBYEFNBOD0CZbLhLGW87KLjg44gHNKq3MB8GA1UdIwQYMBaAFE4L
+# 7xqkQFulF2mHMMo0aEPQQa7yMD0GCCsGAQUFBwEBBDEwLzAtBggrBgEFBQcwAoYh
+# aHR0cDovL3d3dy5zdGFydHNzbC5jb20vc2ZzY2EuY3J0MFsGA1UdHwRUMFIwJ6Al
+# oCOGIWh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3Nmc2NhLmNybDAnoCWgI4YhaHR0
+# cDovL2NybC5zdGFydHNzbC5jb20vc2ZzY2EuY3JsMIGABgNVHSAEeTB3MHUGCysG
+# AQQBgbU3AQIBMGYwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29t
+# L3BvbGljeS5wZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29t
+# L2ludGVybWVkaWF0ZS5wZGYwEQYJYIZIAYb4QgEBBAQDAgABMFAGCWCGSAGG+EIB
+# DQRDFkFTdGFydENvbSBDbGFzcyAyIFByaW1hcnkgSW50ZXJtZWRpYXRlIE9iamVj
+# dCBTaWduaW5nIENlcnRpZmljYXRlczANBgkqhkiG9w0BAQUFAAOCAgEAcnMLA3Va
+# N4OIE9l4QT5OEtZy5PByBit3oHiqQpgVEQo7DHRsjXD5H/IyTivpMikaaeRxIv95
+# baRd4hoUcMwDj4JIjC3WA9FoNFV31SMljEZa66G8RQECdMSSufgfDYu1XQ+cUKxh
+# D3EtLGGcFGjjML7EQv2Iol741rEsycXwIXcryxeiMbU2TPi7X3elbwQMc4JFlJ4B
+# y9FhBzuZB1DV2sN2irGVbC3G/1+S2doPDjL1CaElwRa/T0qkq2vvPxUgryAoCppU
+# FKViw5yoGYC+z1GaesWWiP1eFKAL0wI7IgSvLzU3y1Vp7vsYaxOVBqZtebFTWRHt
+# XjCsFrrQBngt0d33QbQRI5mwgzEp7XJ9xu5d6RVWM4TPRUsd+DDZpBHm9mszvi9g
+# VFb2ZG7qRRXCSqys4+u/NLBPbXi/m/lU00cODQTlC/euwjk9HQtRrXQ/zqsBJS6U
+# J+eLGw1qOfj+HVBl/ZQpfoLk7IoWlRQvRL1s7oirEaqPZUIWY/grXq9r6jDKAp3L
+# ZdKQpPOnnogtqlU4f7/kLjEJhrrc98mrOWmVMK/BuFRAfQ5oDUMnVmCzAzLMjKfG
+# cVW/iMew41yfhgKbwpfzm3LBr1Zv+pEBgcgW6onRLSAn3XHM0eNtz+AkxH6rRf6B
+# 2mYhLEEGLapH8R1AMAo4BbVFOZR5kXcMCwowggg1MIIHHaADAgECAgIEuDANBgkq
+# hkiG9w0BAQUFADCBjDELMAkGA1UEBhMCSUwxFjAUBgNVBAoTDVN0YXJ0Q29tIEx0
+# ZC4xKzApBgNVBAsTIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcx
+# ODA2BgNVBAMTL1N0YXJ0Q29tIENsYXNzIDIgUHJpbWFyeSBJbnRlcm1lZGlhdGUg
+# T2JqZWN0IENBMB4XDTExMTIwMzE1MzQxOVoXDTEzMTIwMzE0NTgwN1owgYwxIDAe
+# BgNVBA0TFzU4MTc5Ni1HaDd4Zkp4a3hRU0lPNEUwMQswCQYDVQQGEwJERTEPMA0G
+# A1UECBMGQmVybGluMQ8wDQYDVQQHEwZCZXJsaW4xFjAUBgNVBAMTDUphbm5pcyBM
+# ZWlkZWwxITAfBgkqhkiG9w0BCQEWEmphbm5pc0BsZWlkZWwuaW5mbzCCAiIwDQYJ
+# KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMcPeABYdN7nPq/AkZ/EkyUBGx/l2Yui
+# Lfm8ZdLG0ulMb/kQL3fRY7sUjYPyn9S6PhqqlFnNoGHJvbbReCdUC9SIQYmOEjEA
+# raHfb7MZU10NjO4U2DdGucj2zuO5tYxKizizOJF0e4yRQZVxpUGdvkW/+GLjCNK5
+# L7mIv3Z1dagxDKHYZT74HXiS4VFUwHF1k36CwfM2vsetdm46bdgSwV+BCMmZICYT
+# IJAS9UQHD7kP4rik3bFWjUx08NtYYFAVOd/HwBnemUmJe4j3IhZHr0k1+eDG8hDH
+# KVvPgLJIoEjC4iMFk5GWsg5z2ngk0LLu3JZMtckHsnnmBPHQK8a3opUNd8hdMNJx
+# gOwKjQt2JZSGUdIEFCKVDqj0FmdnDMPfwy+FNRtpBMl1sz78dUFhSrnM0D8NXrqa
+# 4rG+2FoOXlmm1rb6AFtpjAKksHRpYcPk2DPGWp/1sWB+dUQkS3gOmwFzyqeTuXpT
+# 0juqd3iAxOGx1VRFQ1VHLLf3AzV4wljBau26I+tu7iXxesVucSdsdQu293jwc2kN
+# xK2JyHCoZH+RyytrwS0qw8t7rMOukU9gwP8mn3X6mgWlVUODMcHTULjSiCEtvyZ/
+# aafcwjUbt4ReEcnmuZtWIha86MTCX7U7e+cnpWG4sIHPnvVTaz9rm8RyBkIxtFCB
+# nQ3FnoQgyxeJAgMBAAGjggOdMIIDmTAJBgNVHRMEAjAAMA4GA1UdDwEB/wQEAwIH
+# gDAuBgNVHSUBAf8EJDAiBggrBgEFBQcDAwYKKwYBBAGCNwIBFQYKKwYBBAGCNwoD
+# DTAdBgNVHQ4EFgQUWyCgrIWo8Ifvvm1/YTQIeMU9nc8wHwYDVR0jBBgwFoAU0E4P
+# QJlsuEsZbzsouODjiAc0qrcwggIhBgNVHSAEggIYMIICFDCCAhAGCysGAQQBgbU3
+# AQICMIIB/zAuBggrBgEFBQcCARYiaHR0cDovL3d3dy5zdGFydHNzbC5jb20vcG9s
+# aWN5LnBkZjA0BggrBgEFBQcCARYoaHR0cDovL3d3dy5zdGFydHNzbC5jb20vaW50
+# ZXJtZWRpYXRlLnBkZjCB9wYIKwYBBQUHAgIwgeowJxYgU3RhcnRDb20gQ2VydGlm
+# aWNhdGlvbiBBdXRob3JpdHkwAwIBARqBvlRoaXMgY2VydGlmaWNhdGUgd2FzIGlz
+# c3VlZCBhY2NvcmRpbmcgdG8gdGhlIENsYXNzIDIgVmFsaWRhdGlvbiByZXF1aXJl
+# bWVudHMgb2YgdGhlIFN0YXJ0Q29tIENBIHBvbGljeSwgcmVsaWFuY2Ugb25seSBm
+# b3IgdGhlIGludGVuZGVkIHB1cnBvc2UgaW4gY29tcGxpYW5jZSBvZiB0aGUgcmVs
+# eWluZyBwYXJ0eSBvYmxpZ2F0aW9ucy4wgZwGCCsGAQUFBwICMIGPMCcWIFN0YXJ0
+# Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MAMCAQIaZExpYWJpbGl0eSBhbmQg
+# d2FycmFudGllcyBhcmUgbGltaXRlZCEgU2VlIHNlY3Rpb24gIkxlZ2FsIGFuZCBM
+# aW1pdGF0aW9ucyIgb2YgdGhlIFN0YXJ0Q29tIENBIHBvbGljeS4wNgYDVR0fBC8w
+# LTAroCmgJ4YlaHR0cDovL2NybC5zdGFydHNzbC5jb20vY3J0YzItY3JsLmNybDCB
+# iQYIKwYBBQUHAQEEfTB7MDcGCCsGAQUFBzABhitodHRwOi8vb2NzcC5zdGFydHNz
+# bC5jb20vc3ViL2NsYXNzMi9jb2RlL2NhMEAGCCsGAQUFBzAChjRodHRwOi8vYWlh
+# LnN0YXJ0c3NsLmNvbS9jZXJ0cy9zdWIuY2xhc3MyLmNvZGUuY2EuY3J0MCMGA1Ud
+# EgQcMBqGGGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tLzANBgkqhkiG9w0BAQUFAAOC
+# AQEAhrzEV6zwoEtKjnFRhCsjwiPykVpo5Eiye77Ve801rQDiRKgSCCiW6g3HqedL
+# OtaSs65Sj2pm3Viea4KR0TECLcbCTgsdaHqw2x1yXwWBQWZEaV6EB05lIwfr94P1
+# SFpV43zkuc+bbmA3+CRK45LOcCNH5Tqq7VGTCAK5iM7tvHwFlbQRl+I6VEL2mjpF
+# NsuRjDOVrv/9qw/a22YJ9R7Y1D0vUSs3IqZx2KMUaYDP7H2mSRxJO2nADQZBtriF
+# gTyfD3lYV12MlIi5CQwe3QC6DrrfSMP33i5Wa/OFJiQ27WPxmScYVhiqozpImFT4
+# PU9goiBv9RKXdgTmZE1PN0NQ5jGCAzUwggMxAgEBMIGTMIGMMQswCQYDVQQGEwJJ
+# TDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0
+# YWwgQ2VydGlmaWNhdGUgU2lnbmluZzE4MDYGA1UEAxMvU3RhcnRDb20gQ2xhc3Mg
+# MiBQcmltYXJ5IEludGVybWVkaWF0ZSBPYmplY3QgQ0ECAgS4MAkGBSsOAwIaBQCg
+# eDAYBgorBgEEAYI3AgEMMQowCKACgAChAoAAMBkGCSqGSIb3DQEJAzEMBgorBgEE
+# AYI3AgEEMBwGCisGAQQBgjcCAQsxDjAMBgorBgEEAYI3AgEVMCMGCSqGSIb3DQEJ
+# BDEWBBRVGw0FDSiaIi38dWteRUAg/9Pr6DANBgkqhkiG9w0BAQEFAASCAgCInvOZ
+# FdaNFzbf6trmFDZKMojyx3UjKMCqNjHVBbuKY0qXwFC/ElYDV1ShJ2CBZbdurydO
+# OQ6cIQ0KREOCwmX/xB49IlLHHUxNhEkVv7HGU3EKAFf9IBt9Yr7jikiR9cjIsfHK
+# 4cjkoKJL7g28yEpLLkHt1eo37f1Ga9lDWEa5Zq3U5yX+IwXhrUBm1h8Xr033FhTR
+# VEpuSz6LHtbrL/zgJnCzJ2ahjtJoYevdcWiNXffosJHFaSfYDDbiNsPRDH/1avmb
+# 5j/7BhP8BcBaR6Fp8tFbNGIcWHHGcjqLMnTc4w13b7b4pDhypqElBa4+lCmwdvv9
+# GydYtRgPz8GHeoBoKj30YBlMzRIfFYaIFGIC4Ai3UEXkuH9TxYohVbGm/W0Kl4Lb
+# RJ1FwiVcLcTOJdgNId2vQvKc+jtNrjcg5SP9h2v/C4aTx8tyc6tE3TOPh2f9b8DL
+# S+SbVArJpuJqrPTxDDoO1QNjTgLcdVYeZDE+r/NjaGZ6cMSd8db3EaG3ijD/0bud
+# SItbm/OlNVbQOFRR76D+ZNgPcU5iNZ3bmvQQIg6aSB9MHUpIE/SeCkNl9YeVk1/1
+# GFULgNMRmIYP4KLvu9ylh5Gu3hvD5VNhH6+FlXANwFy07uXks5uF8mfZVxVCnodG
+# xkNCx+6PsrA5Z7WP4pXcmYnMn97npP/Q9EHJWw==
+# SIG # End signature block
diff --git a/python/virtualenv/virtualenv_embedded/activate.sh b/python/virtualenv/virtualenv_embedded/activate.sh
new file mode 100644
index 000000000..477b7eca2
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/activate.sh
@@ -0,0 +1,78 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+ unset -f pydoc >/dev/null 2>&1
+
+ # reset old environment variables
+ # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
+ if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
+ PATH="$_OLD_VIRTUAL_PATH"
+ export PATH
+ unset _OLD_VIRTUAL_PATH
+ fi
+ if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
+ PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
+ export PYTHONHOME
+ unset _OLD_VIRTUAL_PYTHONHOME
+ fi
+
+ # This should detect bash and zsh, which have a hash command that must
+ # be called to get it to forget past commands. Without forgetting
+ # past commands the $PATH changes we made may not be respected
+ if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
+ hash -r 2>/dev/null
+ fi
+
+ if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
+ PS1="$_OLD_VIRTUAL_PS1"
+ export PS1
+ unset _OLD_VIRTUAL_PS1
+ fi
+
+ unset VIRTUAL_ENV
+ if [ ! "${1-}" = "nondestructive" ] ; then
+ # Self destruct!
+ unset -f deactivate
+ fi
+}
+
+# unset irrelevant variables
+deactivate nondestructive
+
+VIRTUAL_ENV="__VIRTUAL_ENV__"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+if ! [ -z "${PYTHONHOME+_}" ] ; then
+ _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
+ unset PYTHONHOME
+fi
+
+if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
+ _OLD_VIRTUAL_PS1="$PS1"
+ if [ "x__VIRTUAL_PROMPT__" != x ] ; then
+ PS1="__VIRTUAL_PROMPT__$PS1"
+ else
+ PS1="(`basename \"$VIRTUAL_ENV\"`) $PS1"
+ fi
+ export PS1
+fi
+
+# Make sure to unalias pydoc if it's already there
+alias pydoc 2>/dev/null >/dev/null && unalias pydoc
+
+pydoc () {
+ python -m pydoc "$@"
+}
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands. Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
+ hash -r 2>/dev/null
+fi
diff --git a/python/virtualenv/virtualenv_embedded/activate_this.py b/python/virtualenv/virtualenv_embedded/activate_this.py
new file mode 100644
index 000000000..f18193bf8
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/activate_this.py
@@ -0,0 +1,34 @@
+"""By using execfile(this_file, dict(__file__=this_file)) you will
+activate this virtualenv environment.
+
+This can be used when you must use an existing Python interpreter, not
+the virtualenv bin/python
+"""
+
+try:
+ __file__
+except NameError:
+ raise AssertionError(
+ "You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
+import sys
+import os
+
+old_os_path = os.environ.get('PATH', '')
+os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_path
+base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+if sys.platform == 'win32':
+ site_packages = os.path.join(base, 'Lib', 'site-packages')
+else:
+ site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
+prev_sys_path = list(sys.path)
+import site
+site.addsitedir(site_packages)
+sys.real_prefix = sys.prefix
+sys.prefix = base
+# Move the added items to the front of the path:
+new_sys_path = []
+for item in list(sys.path):
+ if item not in prev_sys_path:
+ new_sys_path.append(item)
+ sys.path.remove(item)
+sys.path[:0] = new_sys_path
diff --git a/python/virtualenv/virtualenv_embedded/deactivate.bat b/python/virtualenv/virtualenv_embedded/deactivate.bat
new file mode 100644
index 000000000..9228d3171
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/deactivate.bat
@@ -0,0 +1,19 @@
+@echo off
+
+set VIRTUAL_ENV=
+
+REM Don't use () to avoid problems with them in %PATH%
+if not defined _OLD_VIRTUAL_PROMPT goto ENDIFVPROMPT
+ set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
+ set _OLD_VIRTUAL_PROMPT=
+:ENDIFVPROMPT
+
+if not defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME
+ set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%"
+ set _OLD_VIRTUAL_PYTHONHOME=
+:ENDIFVHOME
+
+if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH
+ set "PATH=%_OLD_VIRTUAL_PATH%"
+ set _OLD_VIRTUAL_PATH=
+:ENDIFVPATH \ No newline at end of file
diff --git a/python/virtualenv/virtualenv_embedded/distutils-init.py b/python/virtualenv/virtualenv_embedded/distutils-init.py
new file mode 100644
index 000000000..29fc1da45
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/distutils-init.py
@@ -0,0 +1,101 @@
+import os
+import sys
+import warnings
+import imp
+import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib
+ # Important! To work on pypy, this must be a module that resides in the
+ # lib-python/modified-x.y.z directory
+
+dirname = os.path.dirname
+
+distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
+if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
+ warnings.warn(
+ "The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
+else:
+ __path__.insert(0, distutils_path)
+ real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ('', '', imp.PKG_DIRECTORY))
+ # Copy the relevant attributes
+ try:
+ __revision__ = real_distutils.__revision__
+ except AttributeError:
+ pass
+ __version__ = real_distutils.__version__
+
+from distutils import dist, sysconfig
+
+try:
+ basestring
+except NameError:
+ basestring = str
+
+## patch build_ext (distutils doesn't know how to get the libs directory
+## path on windows - it hardcodes the paths around the patched sys.prefix)
+
+if sys.platform == 'win32':
+ from distutils.command.build_ext import build_ext as old_build_ext
+ class build_ext(old_build_ext):
+ def finalize_options (self):
+ if self.library_dirs is None:
+ self.library_dirs = []
+ elif isinstance(self.library_dirs, basestring):
+ self.library_dirs = self.library_dirs.split(os.pathsep)
+
+ self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
+ old_build_ext.finalize_options(self)
+
+ from distutils.command import build_ext as build_ext_module
+ build_ext_module.build_ext = build_ext
+
+## distutils.dist patches:
+
+old_find_config_files = dist.Distribution.find_config_files
+def find_config_files(self):
+ found = old_find_config_files(self)
+ system_distutils = os.path.join(distutils_path, 'distutils.cfg')
+ #if os.path.exists(system_distutils):
+ # found.insert(0, system_distutils)
+ # What to call the per-user config file
+ if os.name == 'posix':
+ user_filename = ".pydistutils.cfg"
+ else:
+ user_filename = "pydistutils.cfg"
+ user_filename = os.path.join(sys.prefix, user_filename)
+ if os.path.isfile(user_filename):
+ for item in list(found):
+ if item.endswith('pydistutils.cfg'):
+ found.remove(item)
+ found.append(user_filename)
+ return found
+dist.Distribution.find_config_files = find_config_files
+
+## distutils.sysconfig patches:
+
+old_get_python_inc = sysconfig.get_python_inc
+def sysconfig_get_python_inc(plat_specific=0, prefix=None):
+ if prefix is None:
+ prefix = sys.real_prefix
+ return old_get_python_inc(plat_specific, prefix)
+sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
+sysconfig.get_python_inc = sysconfig_get_python_inc
+
+old_get_python_lib = sysconfig.get_python_lib
+def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+ if standard_lib and prefix is None:
+ prefix = sys.real_prefix
+ return old_get_python_lib(plat_specific, standard_lib, prefix)
+sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
+sysconfig.get_python_lib = sysconfig_get_python_lib
+
+old_get_config_vars = sysconfig.get_config_vars
+def sysconfig_get_config_vars(*args):
+ real_vars = old_get_config_vars(*args)
+ if sys.platform == 'win32':
+ lib_dir = os.path.join(sys.real_prefix, "libs")
+ if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
+ real_vars['LIBDIR'] = lib_dir # asked for all
+ elif isinstance(real_vars, list) and 'LIBDIR' in args:
+ real_vars = real_vars + [lib_dir] # asked for list
+ return real_vars
+sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
+sysconfig.get_config_vars = sysconfig_get_config_vars
diff --git a/python/virtualenv/virtualenv_embedded/distutils.cfg b/python/virtualenv/virtualenv_embedded/distutils.cfg
new file mode 100644
index 000000000..1af230ec9
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/distutils.cfg
@@ -0,0 +1,6 @@
+# This is a config file local to this virtualenv installation
+# You may include options that will be used by all distutils commands,
+# and by easy_install. For instance:
+#
+# [easy_install]
+# find_links = http://mylocalsite
diff --git a/python/virtualenv/virtualenv_embedded/python-config b/python/virtualenv/virtualenv_embedded/python-config
new file mode 100644
index 000000000..5e7a7c901
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/python-config
@@ -0,0 +1,78 @@
+#!__VIRTUAL_ENV__/__BIN_NAME__/python
+
+import sys
+import getopt
+import sysconfig
+
+valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
+ 'ldflags', 'help']
+
+if sys.version_info >= (3, 2):
+ valid_opts.insert(-1, 'extension-suffix')
+ valid_opts.append('abiflags')
+if sys.version_info >= (3, 3):
+ valid_opts.append('configdir')
+
+
+def exit_with_usage(code=1):
+ sys.stderr.write("Usage: {0} [{1}]\n".format(
+ sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
+ sys.exit(code)
+
+try:
+ opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
+except getopt.error:
+ exit_with_usage()
+
+if not opts:
+ exit_with_usage()
+
+pyver = sysconfig.get_config_var('VERSION')
+getvar = sysconfig.get_config_var
+
+opt_flags = [flag for (flag, val) in opts]
+
+if '--help' in opt_flags:
+ exit_with_usage(code=0)
+
+for opt in opt_flags:
+ if opt == '--prefix':
+ print(sysconfig.get_config_var('prefix'))
+
+ elif opt == '--exec-prefix':
+ print(sysconfig.get_config_var('exec_prefix'))
+
+ elif opt in ('--includes', '--cflags'):
+ flags = ['-I' + sysconfig.get_path('include'),
+ '-I' + sysconfig.get_path('platinclude')]
+ if opt == '--cflags':
+ flags.extend(getvar('CFLAGS').split())
+ print(' '.join(flags))
+
+ elif opt in ('--libs', '--ldflags'):
+ abiflags = getattr(sys, 'abiflags', '')
+ libs = ['-lpython' + pyver + abiflags]
+ libs += getvar('LIBS').split()
+ libs += getvar('SYSLIBS').split()
+ # add the prefix/lib/pythonX.Y/config dir, but only if there is no
+ # shared library in prefix/lib/.
+ if opt == '--ldflags':
+ if not getvar('Py_ENABLE_SHARED'):
+ libs.insert(0, '-L' + getvar('LIBPL'))
+ if not getvar('PYTHONFRAMEWORK'):
+ libs.extend(getvar('LINKFORSHARED').split())
+ print(' '.join(libs))
+
+ elif opt == '--extension-suffix':
+ ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
+ if ext_suffix is None:
+ ext_suffix = sysconfig.get_config_var('SO')
+ print(ext_suffix)
+
+ elif opt == '--abiflags':
+ if not getattr(sys, 'abiflags', None):
+ exit_with_usage()
+ print(sys.abiflags)
+
+ elif opt == '--configdir':
+ print(sysconfig.get_config_var('LIBPL'))
diff --git a/python/virtualenv/virtualenv_embedded/site.py b/python/virtualenv/virtualenv_embedded/site.py
new file mode 100644
index 000000000..7969769c3
--- /dev/null
+++ b/python/virtualenv/virtualenv_embedded/site.py
@@ -0,0 +1,758 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code. Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path. On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+It also supports the Debian convention of
+lib/python<version>/dist-packages. On other platforms (mainly Mac and
+Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
+but this is unlikely). The resulting directories, if they exist, are
+appended to sys.path, and also inspected for path configuration files.
+
+FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
+Local addons go into /usr/local/lib/python<version>/site-packages
+(resp. /usr/local/lib/site-python), Debian addons install into
+/usr/{lib,share}/python<version>/dist-packages.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path. Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once. Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth. Assume foo.pth contains the
+following:
+
+ # foo package configuration
+ foo
+ bar
+ bletch
+
+and bar.pth contains:
+
+ # bar package configuration
+ bar
+
+Then the following directories are added to sys.path, in this order:
+
+ /usr/local/lib/python2.X/site-packages/bar
+ /usr/local/lib/python2.X/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations. If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+try:
+ import __builtin__ as builtins
+except ImportError:
+ import builtins
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+# for distutils.commands.install
+USER_SITE = None
+USER_BASE = None
+
+_is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32
+_is_pypy = hasattr(sys, 'pypy_version_info')
+_is_jython = sys.platform[:4] == 'java'
+if _is_jython:
+ ModuleType = type(os)
+
+def makepath(*paths):
+ dir = os.path.join(*paths)
+ if _is_jython and (dir == '__classpath__' or
+ dir.startswith('__pyclasspath__')):
+ return dir, dir
+ dir = os.path.abspath(dir)
+ return dir, os.path.normcase(dir)
+
+def abs__file__():
+ """Set all module' __file__ attribute to an absolute path"""
+ for m in sys.modules.values():
+ if ((_is_jython and not isinstance(m, ModuleType)) or
+ hasattr(m, '__loader__')):
+ # only modules need the abspath in Jython. and don't mess
+ # with a PEP 302-supplied __file__
+ continue
+ f = getattr(m, '__file__', None)
+ if f is None:
+ continue
+ m.__file__ = os.path.abspath(f)
+
+def removeduppaths():
+ """ Remove duplicate entries from sys.path along with making them
+ absolute"""
+ # This ensures that the initial path provided by the interpreter contains
+ # only absolute pathnames, even if we're running from the build directory.
+ L = []
+ known_paths = set()
+ for dir in sys.path:
+ # Filter out duplicate paths (on case-insensitive file systems also
+ # if they only differ in case); turn relative paths into absolute
+ # paths.
+ dir, dircase = makepath(dir)
+ if not dircase in known_paths:
+ L.append(dir)
+ known_paths.add(dircase)
+ sys.path[:] = L
+ return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python. See http://www.python.org/sf/586680
+def addbuilddir():
+ """Append ./build/lib.<platform> in case we're running in the build dir
+ (especially for Guido :-)"""
+ from distutils.util import get_platform
+ s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+ if hasattr(sys, 'gettotalrefcount'):
+ s += '-pydebug'
+ s = os.path.join(os.path.dirname(sys.path[-1]), s)
+ sys.path.append(s)
+
+def _init_pathinfo():
+ """Return a set containing all existing directory entries from sys.path"""
+ d = set()
+ for dir in sys.path:
+ try:
+ if os.path.isdir(dir):
+ dir, dircase = makepath(dir)
+ d.add(dircase)
+ except TypeError:
+ continue
+ return d
+
+def addpackage(sitedir, name, known_paths):
+ """Add a new path to known_paths by combining sitedir and 'name' or execute
+ sitedir if it starts with 'import'"""
+ if known_paths is None:
+ _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ fullname = os.path.join(sitedir, name)
+ try:
+ f = open(fullname, "rU")
+ except IOError:
+ return
+ try:
+ for line in f:
+ if line.startswith("#"):
+ continue
+ if line.startswith("import"):
+ exec(line)
+ continue
+ line = line.rstrip()
+ dir, dircase = makepath(sitedir, line)
+ if not dircase in known_paths and os.path.exists(dir):
+ sys.path.append(dir)
+ known_paths.add(dircase)
+ finally:
+ f.close()
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+ """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+ 'sitedir'"""
+ if known_paths is None:
+ known_paths = _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ sitedir, sitedircase = makepath(sitedir)
+ if not sitedircase in known_paths:
+ sys.path.append(sitedir) # Add path component
+ try:
+ names = os.listdir(sitedir)
+ except os.error:
+ return
+ names.sort()
+ for name in names:
+ if name.endswith(os.extsep + "pth"):
+ addpackage(sitedir, name, known_paths)
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
+ """Add site-packages (and possibly site-python) to sys.path"""
+ prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
+ if exec_prefix != sys_prefix:
+ prefixes.append(os.path.join(exec_prefix, "local"))
+
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos') or _is_jython:
+ sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+ elif _is_pypy:
+ sitedirs = [os.path.join(prefix, 'site-packages')]
+ elif sys.platform == 'darwin' and prefix == sys_prefix:
+
+ if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
+
+ sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+ os.path.join(prefix, "Extras", "lib", "python")]
+
+ else: # any other Python distros on OSX work this way
+ sitedirs = [os.path.join(prefix, "lib",
+ "python" + sys.version[:3], "site-packages")]
+
+ elif os.sep == '/':
+ sitedirs = [os.path.join(prefix,
+ "lib",
+ "python" + sys.version[:3],
+ "site-packages"),
+ os.path.join(prefix, "lib", "site-python"),
+ os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
+ lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+ if (os.path.exists(lib64_dir) and
+ os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+ if _is_64bit:
+ sitedirs.insert(0, lib64_dir)
+ else:
+ sitedirs.append(lib64_dir)
+ try:
+ # sys.getobjects only available in --with-pydebug build
+ sys.getobjects
+ sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
+ except AttributeError:
+ pass
+ # Debian-specific dist-packages directories:
+ sitedirs.append(os.path.join(prefix, "local/lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ if sys.version[0] == '2':
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ else:
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[0],
+ "dist-packages"))
+ sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
+ else:
+ sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ sitedirs.append(
+ os.path.join(home,
+ 'Library',
+ 'Python',
+ sys.version[:3],
+ 'site-packages'))
+ for sitedir in sitedirs:
+ if os.path.isdir(sitedir):
+ addsitedir(sitedir, known_paths)
+ return None
+
+def check_enableusersite():
+ """Check if user site directory is safe for inclusion
+
+ The function tests for the command line flag (including environment var),
+ process uid/gid equal to effective uid/gid.
+
+ None: Disabled for security reasons
+ False: Disabled by user (command line option)
+ True: Safe and enabled
+ """
+ if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
+ return False
+
+ if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+ # check process uid == effective uid
+ if os.geteuid() != os.getuid():
+ return None
+ if hasattr(os, "getgid") and hasattr(os, "getegid"):
+ # check process gid == effective gid
+ if os.getegid() != os.getgid():
+ return None
+
+ return True
+
+def addusersitepackages(known_paths):
+ """Add a per user site-package to sys.path
+
+ Each user has its own python directory with site-packages in the
+ home directory.
+
+ USER_BASE is the root directory for all Python versions
+
+ USER_SITE is the user specific site-packages directory
+
+ USER_SITE/.. can be used for data.
+ """
+ global USER_BASE, USER_SITE, ENABLE_USER_SITE
+ env_base = os.environ.get("PYTHONUSERBASE", None)
+
+ def joinuser(*args):
+ return os.path.expanduser(os.path.join(*args))
+
+ #if sys.platform in ('os2emx', 'riscos'):
+ # # Don't know what to put here
+ # USER_BASE = ''
+ # USER_SITE = ''
+ if os.name == "nt":
+ base = os.environ.get("APPDATA") or "~"
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser(base, "Python")
+ USER_SITE = os.path.join(USER_BASE,
+ "Python" + sys.version[0] + sys.version[2],
+ "site-packages")
+ else:
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser("~", ".local")
+ USER_SITE = os.path.join(USER_BASE, "lib",
+ "python" + sys.version[:3],
+ "site-packages")
+
+ if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
+ addsitedir(USER_SITE, known_paths)
+ if ENABLE_USER_SITE:
+ for dist_libdir in ("lib", "local/lib"):
+ user_site = os.path.join(USER_BASE, dist_libdir,
+ "python" + sys.version[:3],
+ "dist-packages")
+ if os.path.isdir(user_site):
+ addsitedir(user_site, known_paths)
+ return known_paths
+
+
+
+def setBEGINLIBPATH():
+ """The OS/2 EMX port has optional extension modules that do double duty
+ as DLLs (and must use the .DLL file extension) for other extensions.
+ The library search path needs to be amended so these will be found
+ during module import. Use BEGINLIBPATH so that these are at the start
+ of the library search path.
+
+ """
+ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+ libpath = os.environ['BEGINLIBPATH'].split(';')
+ if libpath[-1]:
+ libpath.append(dllpath)
+ else:
+ libpath[-1] = dllpath
+ os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+ """Define new built-ins 'quit' and 'exit'.
+ These are simply strings that display a hint on how to exit.
+
+ """
+ if os.sep == ':':
+ eof = 'Cmd-Q'
+ elif os.sep == '\\':
+ eof = 'Ctrl-Z plus Return'
+ else:
+ eof = 'Ctrl-D (i.e. EOF)'
+
+ class Quitter(object):
+ def __init__(self, name):
+ self.name = name
+ def __repr__(self):
+ return 'Use %s() or %s to exit' % (self.name, eof)
+ def __call__(self, code=None):
+ # Shells like IDLE catch the SystemExit, but listen when their
+ # stdin wrapper is closed.
+ try:
+ sys.stdin.close()
+ except:
+ pass
+ raise SystemExit(code)
+ builtins.quit = Quitter('quit')
+ builtins.exit = Quitter('exit')
+
+
+class _Printer(object):
+ """interactive prompt objects for printing the license text, a list of
+ contributors and the copyright notice."""
+
+ MAXLINES = 23
+
+ def __init__(self, name, data, files=(), dirs=()):
+ self.__name = name
+ self.__data = data
+ self.__files = files
+ self.__dirs = dirs
+ self.__lines = None
+
+ def __setup(self):
+ if self.__lines:
+ return
+ data = None
+ for dir in self.__dirs:
+ for filename in self.__files:
+ filename = os.path.join(dir, filename)
+ try:
+ fp = open(filename, "rU")
+ data = fp.read()
+ fp.close()
+ break
+ except IOError:
+ pass
+ if data:
+ break
+ if not data:
+ data = self.__data
+ self.__lines = data.split('\n')
+ self.__linecnt = len(self.__lines)
+
+ def __repr__(self):
+ self.__setup()
+ if len(self.__lines) <= self.MAXLINES:
+ return "\n".join(self.__lines)
+ else:
+ return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+ def __call__(self):
+ self.__setup()
+ prompt = 'Hit Return for more, or q (and Return) to quit: '
+ lineno = 0
+ while 1:
+ try:
+ for i in range(lineno, lineno + self.MAXLINES):
+ print(self.__lines[i])
+ except IndexError:
+ break
+ else:
+ lineno += self.MAXLINES
+ key = None
+ while key is None:
+ try:
+ key = raw_input(prompt)
+ except NameError:
+ key = input(prompt)
+ if key not in ('', 'q'):
+ key = None
+ if key == 'q':
+ break
+
+def setcopyright():
+ """Set 'copyright' and 'credits' in __builtin__"""
+ builtins.copyright = _Printer("copyright", sys.copyright)
+ if _is_jython:
+ builtins.credits = _Printer(
+ "credits",
+ "Jython is maintained by the Jython developers (www.jython.org).")
+ elif _is_pypy:
+ builtins.credits = _Printer(
+ "credits",
+ "PyPy is maintained by the PyPy developers: http://pypy.org/")
+ else:
+ builtins.credits = _Printer("credits", """\
+ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+ for supporting Python development. See www.python.org for more information.""")
+ here = os.path.dirname(os.__file__)
+ builtins.license = _Printer(
+ "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+ ["LICENSE.txt", "LICENSE"],
+ [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+ """Define the built-in 'help'.
+ This is a wrapper around pydoc.help (with a twist).
+
+ """
+
+ def __repr__(self):
+ return "Type help() for interactive help, " \
+ "or help(object) for help about object."
+ def __call__(self, *args, **kwds):
+ import pydoc
+ return pydoc.help(*args, **kwds)
+
+def sethelper():
+ builtins.help = _Helper()
+
+def aliasmbcs():
+ """On Windows, some default encodings are not provided by Python,
+ while they are always available as "mbcs" in each locale. Make
+ them usable by aliasing to "mbcs" in such a case."""
+ if sys.platform == 'win32':
+ import locale, codecs
+ enc = locale.getdefaultlocale()[1]
+ if enc.startswith('cp'): # "cp***" ?
+ try:
+ codecs.lookup(enc)
+ except LookupError:
+ import encodings
+ encodings._cache[enc] = encodings._unknown
+ encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+ """Set the string encoding used by the Unicode implementation. The
+ default is 'ascii', but if you're willing to experiment, you can
+ change this."""
+ encoding = "ascii" # Default value set by _PyUnicode_Init()
+ if 0:
+ # Enable to support locale aware default string encodings.
+ import locale
+ loc = locale.getdefaultlocale()
+ if loc[1]:
+ encoding = loc[1]
+ if 0:
+ # Enable to switch off string to Unicode coercion and implicit
+ # Unicode to string conversion.
+ encoding = "undefined"
+ if encoding != "ascii":
+ # On Non-Unicode builds this will raise an AttributeError...
+ sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+ """Run custom site specific code, if available."""
+ try:
+ import sitecustomize
+ except ImportError:
+ pass
+
+def virtual_install_main_packages():
+ f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
+ sys.real_prefix = f.read().strip()
+ f.close()
+ pos = 2
+ hardcoded_relative_dirs = []
+ if sys.path[0] == '':
+ pos += 1
+ if _is_jython:
+ paths = [os.path.join(sys.real_prefix, 'Lib')]
+ elif _is_pypy:
+ if sys.version_info > (3, 2):
+ cpyver = '%d' % sys.version_info[0]
+ elif sys.pypy_version_info >= (1, 5):
+ cpyver = '%d.%d' % sys.version_info[:2]
+ else:
+ cpyver = '%d.%d.%d' % sys.version_info[:3]
+ paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
+ os.path.join(sys.real_prefix, 'lib-python', cpyver)]
+ if sys.pypy_version_info < (1, 9):
+ paths.insert(1, os.path.join(sys.real_prefix,
+ 'lib-python', 'modified-%s' % cpyver))
+ hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+ #
+ # This is hardcoded in the Python executable, but relative to sys.prefix:
+ for path in paths[:]:
+ plat_path = os.path.join(path, 'plat-%s' % sys.platform)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ elif sys.platform == 'win32':
+ paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
+ else:
+ paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
+ hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+ lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+ if os.path.exists(lib64_path):
+ if _is_64bit:
+ paths.insert(0, lib64_path)
+ else:
+ paths.append(lib64_path)
+ # This is hardcoded in the Python executable, but relative to
+ # sys.prefix. Debian change: we need to add the multiarch triplet
+ # here, which is where the real stuff lives. As per PEP 421, in
+ # Python 3.3+, this lives in sys.implementation, while in Python 2.7
+ # it lives in sys.
+ try:
+ arch = getattr(sys, 'implementation', sys)._multiarch
+ except AttributeError:
+ # This is a non-multiarch aware Python. Fallback to the old way.
+ arch = sys.platform
+ plat_path = os.path.join(sys.real_prefix, 'lib',
+ 'python'+sys.version[:3],
+ 'plat-%s' % arch)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ # This is hardcoded in the Python executable, but
+ # relative to sys.prefix, so we have to fix up:
+ for path in list(paths):
+ tk_dir = os.path.join(path, 'lib-tk')
+ if os.path.exists(tk_dir):
+ paths.append(tk_dir)
+
+ # These are hardcoded in the Apple's Python executable,
+ # but relative to sys.prefix, so we have to fix them up:
+ if sys.platform == 'darwin':
+ hardcoded_paths = [os.path.join(relative_dir, module)
+ for relative_dir in hardcoded_relative_dirs
+ for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
+
+ for path in hardcoded_paths:
+ if os.path.exists(path):
+ paths.append(path)
+
+ sys.path.extend(paths)
+
+def force_global_eggs_after_local_site_packages():
+ """
+ Force easy_installed eggs in the global environment to get placed
+ in sys.path after all packages inside the virtualenv. This
+ maintains the "least surprise" result that packages in the
+ virtualenv always mask global packages, never the other way
+ around.
+
+ """
+ egginsert = getattr(sys, '__egginsert', 0)
+ for i, path in enumerate(sys.path):
+ if i > egginsert and path.startswith(sys.prefix):
+ egginsert = i
+ sys.__egginsert = egginsert + 1
+
+def virtual_addsitepackages(known_paths):
+ force_global_eggs_after_local_site_packages()
+ return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
+
+def fixclasspath():
+ """Adjust the special classpath sys.path entries for Jython. These
+ entries should follow the base virtualenv lib directories.
+ """
+ paths = []
+ classpaths = []
+ for path in sys.path:
+ if path == '__classpath__' or path.startswith('__pyclasspath__'):
+ classpaths.append(path)
+ else:
+ paths.append(path)
+ sys.path = paths
+ sys.path.extend(classpaths)
+
+def execusercustomize():
+ """Run custom user specific code, if available."""
+ try:
+ import usercustomize
+ except ImportError:
+ pass
+
+
+def main():
+ global ENABLE_USER_SITE
+ virtual_install_main_packages()
+ abs__file__()
+ paths_in_sys = removeduppaths()
+ if (os.name == "posix" and sys.path and
+ os.path.basename(sys.path[-1]) == "Modules"):
+ addbuilddir()
+ if _is_jython:
+ fixclasspath()
+ GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
+ if not GLOBAL_SITE_PACKAGES:
+ ENABLE_USER_SITE = False
+ if ENABLE_USER_SITE is None:
+ ENABLE_USER_SITE = check_enableusersite()
+ paths_in_sys = addsitepackages(paths_in_sys)
+ paths_in_sys = addusersitepackages(paths_in_sys)
+ if GLOBAL_SITE_PACKAGES:
+ paths_in_sys = virtual_addsitepackages(paths_in_sys)
+ if sys.platform == 'os2emx':
+ setBEGINLIBPATH()
+ setquit()
+ setcopyright()
+ sethelper()
+ aliasmbcs()
+ setencoding()
+ execsitecustomize()
+ if ENABLE_USER_SITE:
+ execusercustomize()
+ # Remove sys.setdefaultencoding() so that users cannot change the
+ # encoding after initialization. The test for presence is needed when
+ # this module is run as a script, because this code is executed twice.
+ if hasattr(sys, "setdefaultencoding"):
+ del sys.setdefaultencoding
+
+main()
+
+def _script():
+ help = """\
+ %s [--user-base] [--user-site]
+
+ Without arguments print some useful information
+ With arguments print the value of USER_BASE and/or USER_SITE separated
+ by '%s'.
+
+ Exit codes with --user-base or --user-site:
+ 0 - user site directory is enabled
+ 1 - user site directory is disabled by user
+ 2 - uses site directory is disabled by super user
+ or for security reasons
+ >2 - unknown error
+ """
+ args = sys.argv[1:]
+ if not args:
+ print("sys.path = [")
+ for dir in sys.path:
+ print(" %r," % (dir,))
+ print("]")
+ def exists(path):
+ if os.path.isdir(path):
+ return "exists"
+ else:
+ return "doesn't exist"
+ print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)))
+ print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)))
+ print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE)
+ sys.exit(0)
+
+ buffer = []
+ if '--user-base' in args:
+ buffer.append(USER_BASE)
+ if '--user-site' in args:
+ buffer.append(USER_SITE)
+
+ if buffer:
+ print(os.pathsep.join(buffer))
+ if ENABLE_USER_SITE:
+ sys.exit(0)
+ elif ENABLE_USER_SITE is False:
+ sys.exit(1)
+ elif ENABLE_USER_SITE is None:
+ sys.exit(2)
+ else:
+ sys.exit(3)
+ else:
+ import textwrap
+ print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
+ sys.exit(10)
+
+if __name__ == '__main__':
+ _script()
diff --git a/python/virtualenv/virtualenv_support/__init__.py b/python/virtualenv/virtualenv_support/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/virtualenv/virtualenv_support/__init__.py
diff --git a/python/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whl b/python/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whl
new file mode 100644
index 000000000..dfef51d44
--- /dev/null
+++ b/python/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whl
Binary files differ
diff --git a/python/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whl b/python/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whl
new file mode 100644
index 000000000..cc49227a0
--- /dev/null
+++ b/python/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whl
Binary files differ
diff --git a/python/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whl b/python/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whl
new file mode 100644
index 000000000..02c8ce873
--- /dev/null
+++ b/python/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whl
Binary files differ
diff --git a/python/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whl b/python/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whl
new file mode 100644
index 000000000..506d5e520
--- /dev/null
+++ b/python/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whl
Binary files differ
diff --git a/python/voluptuous/COPYING b/python/voluptuous/COPYING
new file mode 100644
index 000000000..a19b7057f
--- /dev/null
+++ b/python/voluptuous/COPYING
@@ -0,0 +1,25 @@
+Copyright (c) 2010, Alec Thomas
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ - Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+ - Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ - Neither the name of SwapOff.org nor the names of its contributors may
+ be used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/python/voluptuous/MANIFEST.in b/python/voluptuous/MANIFEST.in
new file mode 100644
index 000000000..f03451d5a
--- /dev/null
+++ b/python/voluptuous/MANIFEST.in
@@ -0,0 +1,2 @@
+include *.md
+include COPYING
diff --git a/python/voluptuous/PKG-INFO b/python/voluptuous/PKG-INFO
new file mode 100644
index 000000000..1a9e9551a
--- /dev/null
+++ b/python/voluptuous/PKG-INFO
@@ -0,0 +1,611 @@
+Metadata-Version: 1.1
+Name: voluptuous
+Version: 0.8.11
+Summary: Voluptuous is a Python data validation library
+Home-page: https://github.com/alecthomas/voluptuous
+Author: Alec Thomas
+Author-email: alec@swapoff.org
+License: BSD
+Download-URL: https://pypi.python.org/pypi/voluptuous
+Description: Voluptuous is a Python data validation library
+ ==============================================
+
+ |Build Status| |Stories in Ready|
+
+ Voluptuous, *despite* the name, is a Python data validation library. It
+ is primarily intended for validating data coming into Python as JSON,
+ YAML, etc.
+
+ It has three goals:
+
+ 1. Simplicity.
+ 2. Support for complex data structures.
+ 3. Provide useful error messages.
+
+ Contact
+ -------
+
+ Voluptuous now has a mailing list! Send a mail to
+ ` <mailto:voluptuous@librelist.com>`__ to subscribe. Instructions will
+ follow.
+
+ You can also contact me directly via `email <mailto:alec@swapoff.org>`__
+ or `Twitter <https://twitter.com/alecthomas>`__.
+
+ To file a bug, create a `new
+ issue <https://github.com/alecthomas/voluptuous/issues/new>`__ on GitHub
+ with a short example of how to replicate the issue.
+
+ Show me an example
+ ------------------
+
+ Twitter's `user search
+ API <https://dev.twitter.com/docs/api/1/get/users/search>`__ accepts
+ query URLs like:
+
+ ::
+
+ $ curl 'http://api.twitter.com/1/users/search.json?q=python&per_page=20&page=1
+
+ To validate this we might use a schema like:
+
+ .. code:: pycon
+
+ >>> from voluptuous import Schema
+ >>> schema = Schema({
+ ... 'q': str,
+ ... 'per_page': int,
+ ... 'page': int,
+ ... })
+
+ This schema very succinctly and roughly describes the data required by
+ the API, and will work fine. But it has a few problems. Firstly, it
+ doesn't fully express the constraints of the API. According to the API,
+ ``per_page`` should be restricted to at most 20, defaulting to 5, for
+ example. To describe the semantics of the API more accurately, our
+ schema will need to be more thoroughly defined:
+
+ .. code:: pycon
+
+ >>> from voluptuous import Required, All, Length, Range
+ >>> schema = Schema({
+ ... Required('q'): All(str, Length(min=1)),
+ ... Required('per_page', default=5): All(int, Range(min=1, max=20)),
+ ... 'page': All(int, Range(min=0)),
+ ... })
+
+ This schema fully enforces the interface defined in Twitter's
+ documentation, and goes a little further for completeness.
+
+ "q" is required:
+
+ .. code:: pycon
+
+ >>> from voluptuous import MultipleInvalid, Invalid
+ >>> try:
+ ... schema({})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data['q']"
+ True
+
+ ...must be a string:
+
+ .. code:: pycon
+
+ >>> try:
+ ... schema({'q': 123})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected str for dictionary value @ data['q']"
+ True
+
+ ...and must be at least one character in length:
+
+ .. code:: pycon
+
+ >>> try:
+ ... schema({'q': ''})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "length of value must be at least 1 for dictionary value @ data['q']"
+ True
+ >>> schema({'q': '#topic'}) == {'q': '#topic', 'per_page': 5}
+ True
+
+ "per\_page" is a positive integer no greater than 20:
+
+ .. code:: pycon
+
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': 900})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "value must be at most 20 for dictionary value @ data['per_page']"
+ True
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': -10})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "value must be at least 1 for dictionary value @ data['per_page']"
+ True
+
+ "page" is an integer >= 0:
+
+ .. code:: pycon
+
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': 'one'})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "expected int for dictionary value @ data['per_page']"
+ >>> schema({'q': '#topic', 'page': 1}) == {'q': '#topic', 'page': 1, 'per_page': 5}
+ True
+
+ Defining schemas
+ ----------------
+
+ Schemas are nested data structures consisting of dictionaries, lists,
+ scalars and *validators*. Each node in the input schema is pattern
+ matched against corresponding nodes in the input data.
+
+ Literals
+ ~~~~~~~~
+
+ Literals in the schema are matched using normal equality checks:
+
+ .. code:: pycon
+
+ >>> schema = Schema(1)
+ >>> schema(1)
+ 1
+ >>> schema = Schema('a string')
+ >>> schema('a string')
+ 'a string'
+
+ Types
+ ~~~~~
+
+ Types in the schema are matched by checking if the corresponding value
+ is an instance of the type:
+
+ .. code:: pycon
+
+ >>> schema = Schema(int)
+ >>> schema(1)
+ 1
+ >>> try:
+ ... schema('one')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected int"
+ True
+
+ URL's
+ ~~~~~
+
+ URL's in the schema are matched by using ``urlparse`` library.
+
+ .. code:: pycon
+
+ >>> from voluptuous import Url
+ >>> schema = Schema(Url())
+ >>> schema('http://w3.org')
+ 'http://w3.org'
+ >>> try:
+ ... schema('one')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected a URL"
+ True
+
+ Lists
+ ~~~~~
+
+ Lists in the schema are treated as a set of valid values. Each element
+ in the schema list is compared to each value in the input data:
+
+ .. code:: pycon
+
+ >>> schema = Schema([1, 'a', 'string'])
+ >>> schema([1])
+ [1]
+ >>> schema([1, 1, 1])
+ [1, 1, 1]
+ >>> schema(['a', 1, 'string', 1, 'string'])
+ ['a', 1, 'string', 1, 'string']
+
+ Validation functions
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Validators are simple callables that raise an ``Invalid`` exception when
+ they encounter invalid data. The criteria for determining validity is
+ entirely up to the implementation; it may check that a value is a valid
+ username with ``pwd.getpwnam()``, it may check that a value is of a
+ specific type, and so on.
+
+ The simplest kind of validator is a Python function that raises
+ ValueError when its argument is invalid. Conveniently, many builtin
+ Python functions have this property. Here's an example of a date
+ validator:
+
+ .. code:: pycon
+
+ >>> from datetime import datetime
+ >>> def Date(fmt='%Y-%m-%d'):
+ ... return lambda v: datetime.strptime(v, fmt)
+
+ .. code:: pycon
+
+ >>> schema = Schema(Date())
+ >>> schema('2013-03-03')
+ datetime.datetime(2013, 3, 3, 0, 0)
+ >>> try:
+ ... schema('2013-03')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "not a valid value"
+ True
+
+ In addition to simply determining if a value is valid, validators may
+ mutate the value into a valid form. An example of this is the
+ ``Coerce(type)`` function, which returns a function that coerces its
+ argument to the given type:
+
+ .. code:: python
+
+ def Coerce(type, msg=None):
+ """Coerce a value to a type.
+
+ If the type constructor throws a ValueError, the value will be marked as
+ Invalid.
+ """
+ def f(v):
+ try:
+ return type(v)
+ except ValueError:
+ raise Invalid(msg or ('expected %s' % type.__name__))
+ return f
+
+ This example also shows a common idiom where an optional human-readable
+ message can be provided. This can vastly improve the usefulness of the
+ resulting error messages.
+
+ Dictionaries
+ ~~~~~~~~~~~~
+
+ Each key-value pair in a schema dictionary is validated against each
+ key-value pair in the corresponding data dictionary:
+
+ .. code:: pycon
+
+ >>> schema = Schema({1: 'one', 2: 'two'})
+ >>> schema({1: 'one'})
+ {1: 'one'}
+
+ Extra dictionary keys
+ ^^^^^^^^^^^^^^^^^^^^^
+
+ By default any additional keys in the data, not in the schema will
+ trigger exceptions:
+
+ .. code:: pycon
+
+ >>> schema = Schema({2: 3})
+ >>> try:
+ ... schema({1: 2, 2: 3})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "extra keys not allowed @ data[1]"
+ True
+
+ This behaviour can be altered on a per-schema basis. To allow additional
+ keys use ``Schema(..., extra=ALLOW_EXTRA)``:
+
+ .. code:: pycon
+
+ >>> from voluptuous import ALLOW_EXTRA
+ >>> schema = Schema({2: 3}, extra=ALLOW_EXTRA)
+ >>> schema({1: 2, 2: 3})
+ {1: 2, 2: 3}
+
+ To remove additional keys use ``Schema(..., extra=REMOVE_EXTRA)``:
+
+ .. code:: pycon
+
+ >>> from voluptuous import REMOVE_EXTRA
+ >>> schema = Schema({2: 3}, extra=REMOVE_EXTRA)
+ >>> schema({1: 2, 2: 3})
+ {2: 3}
+
+ It can also be overridden per-dictionary by using the catch-all marker
+ token ``extra`` as a key:
+
+ .. code:: pycon
+
+ >>> from voluptuous import Extra
+ >>> schema = Schema({1: {Extra: object}})
+ >>> schema({1: {'foo': 'bar'}})
+ {1: {'foo': 'bar'}}
+
+ Required dictionary keys
+ ^^^^^^^^^^^^^^^^^^^^^^^^
+
+ By default, keys in the schema are not required to be in the data:
+
+ .. code:: pycon
+
+ >>> schema = Schema({1: 2, 3: 4})
+ >>> schema({3: 4})
+ {3: 4}
+
+ Similarly to how extra\_ keys work, this behaviour can be overridden
+ per-schema:
+
+ .. code:: pycon
+
+ >>> schema = Schema({1: 2, 3: 4}, required=True)
+ >>> try:
+ ... schema({3: 4})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+
+ And per-key, with the marker token ``Required(key)``:
+
+ .. code:: pycon
+
+ >>> schema = Schema({Required(1): 2, 3: 4})
+ >>> try:
+ ... schema({3: 4})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+ >>> schema({1: 2})
+ {1: 2}
+
+ Optional dictionary keys
+ ^^^^^^^^^^^^^^^^^^^^^^^^
+
+ If a schema has ``required=True``, keys may be individually marked as
+ optional using the marker token ``Optional(key)``:
+
+ .. code:: pycon
+
+ >>> from voluptuous import Optional
+ >>> schema = Schema({1: 2, Optional(3): 4}, required=True)
+ >>> try:
+ ... schema({})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+ >>> schema({1: 2})
+ {1: 2}
+ >>> try:
+ ... schema({1: 2, 4: 5})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "extra keys not allowed @ data[4]"
+ True
+
+ .. code:: pycon
+
+ >>> schema({1: 2, 3: 4})
+ {1: 2, 3: 4}
+
+ Recursive schema
+ ~~~~~~~~~~~~~~~~
+
+ There is no syntax to have a recursive schema. The best way to do it is
+ to have a wrapper like this:
+
+ .. code:: pycon
+
+ >>> from voluptuous import Schema, Any
+ >>> def s2(v):
+ ... return s1(v)
+ ...
+ >>> s1 = Schema({"key": Any(s2, "value")})
+ >>> s1({"key": {"key": "value"}})
+ {'key': {'key': 'value'}}
+
+ Extending an existing Schema
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Often it comes handy to have a base ``Schema`` that is extended with
+ more requirements. In that case you can use ``Schema.extend`` to create
+ a new ``Schema``:
+
+ .. code:: pycon
+
+ >>> from voluptuous import Schema
+ >>> person = Schema({'name': str})
+ >>> person_with_age = person.extend({'age': int})
+ >>> sorted(list(person_with_age.schema.keys()))
+ ['age', 'name']
+
+ The original ``Schema`` remains unchanged.
+
+ Objects
+ ~~~~~~~
+
+ Each key-value pair in a schema dictionary is validated against each
+ attribute-value pair in the corresponding object:
+
+ .. code:: pycon
+
+ >>> from voluptuous import Object
+ >>> class Structure(object):
+ ... def __init__(self, q=None):
+ ... self.q = q
+ ... def __repr__(self):
+ ... return '<Structure(q={0.q!r})>'.format(self)
+ ...
+ >>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+ >>> schema(Structure(q='one'))
+ <Structure(q='one')>
+
+ Allow None values
+ ~~~~~~~~~~~~~~~~~
+
+ To allow value to be None as well, use Any:
+
+ .. code:: pycon
+
+ >>> from voluptuous import Any
+
+ >>> schema = Schema(Any(None, int))
+ >>> schema(None)
+ >>> schema(5)
+ 5
+
+ Error reporting
+ ---------------
+
+ Validators must throw an ``Invalid`` exception if invalid data is passed
+ to them. All other exceptions are treated as errors in the validator and
+ will not be caught.
+
+ Each ``Invalid`` exception has an associated ``path`` attribute
+ representing the path in the data structure to our currently validating
+ value, as well as an ``error_message`` attribute that contains the
+ message of the original exception. This is especially useful when you
+ want to catch ``Invalid`` exceptions and give some feedback to the user,
+ for instance in the context of an HTTP API.
+
+ .. code:: pycon
+
+ >>> def validate_email(email):
+ ... """Validate email."""
+ ... if not "@" in email:
+ ... raise Invalid("This email is invalid.")
+ ... return email
+ >>> schema = Schema({"email": validate_email})
+ >>> exc = None
+ >>> try:
+ ... schema({"email": "whatever"})
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "This email is invalid. for dictionary value @ data['email']"
+ >>> exc.path
+ ['email']
+ >>> exc.msg
+ 'This email is invalid.'
+ >>> exc.error_message
+ 'This email is invalid.'
+
+ The ``path`` attribute is used during error reporting, but also during
+ matching to determine whether an error should be reported to the user or
+ if the next match should be attempted. This is determined by comparing
+ the depth of the path where the check is, to the depth of the path where
+ the error occurred. If the error is more than one level deeper, it is
+ reported.
+
+ The upshot of this is that *matching is depth-first and fail-fast*.
+
+ To illustrate this, here is an example schema:
+
+ .. code:: pycon
+
+ >>> schema = Schema([[2, 3], 6])
+
+ Each value in the top-level list is matched depth-first in-order. Given
+ input data of ``[[6]]``, the inner list will match the first element of
+ the schema, but the literal ``6`` will not match any of the elements of
+ that list. This error will be reported back to the user immediately. No
+ backtracking is attempted:
+
+ .. code:: pycon
+
+ >>> try:
+ ... schema([[6]])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "not a valid value @ data[0][0]"
+ True
+
+ If we pass the data ``[6]``, the ``6`` is not a list type and so will
+ not recurse into the first element of the schema. Matching will continue
+ on to the second element in the schema, and succeed:
+
+ .. code:: pycon
+
+ >>> schema([6])
+ [6]
+
+ Running tests.
+ --------------
+
+ Voluptuous is using nosetests:
+
+ ::
+
+ $ nosetests
+
+ Why use Voluptuous over another validation library?
+ ---------------------------------------------------
+
+ **Validators are simple callables**
+ No need to subclass anything, just use a function.
+ **Errors are simple exceptions.**
+ A validator can just ``raise Invalid(msg)`` and expect the user to
+ get useful messages.
+ **Schemas are basic Python data structures.**
+ Should your data be a dictionary of integer keys to strings?
+ ``{int: str}`` does what you expect. List of integers, floats or
+ strings? ``[int, float, str]``.
+ **Designed from the ground up for validating more than just forms.**
+ Nested data structures are treated in the same way as any other
+ type. Need a list of dictionaries? ``[{}]``
+ **Consistency.**
+ Types in the schema are checked as types. Values are compared as
+ values. Callables are called to validate. Simple.
+
+ Other libraries and inspirations
+ --------------------------------
+
+ Voluptuous is heavily inspired by
+ `Validino <http://code.google.com/p/validino/>`__, and to a lesser
+ extent, `jsonvalidator <http://code.google.com/p/jsonvalidator/>`__ and
+ `json\_schema <http://blog.sendapatch.se/category/json_schema.html>`__.
+
+ I greatly prefer the light-weight style promoted by these libraries to
+ the complexity of libraries like FormEncode.
+
+ .. |Build Status| image:: https://travis-ci.org/alecthomas/voluptuous.png
+ :target: https://travis-ci.org/alecthomas/voluptuous
+ .. |Stories in Ready| image:: https://badge.waffle.io/alecthomas/voluptuous.png?label=ready&title=Ready
+ :target: https://waffle.io/alecthomas/voluptuous
+
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
diff --git a/python/voluptuous/README.md b/python/voluptuous/README.md
new file mode 100644
index 000000000..fd84c64e7
--- /dev/null
+++ b/python/voluptuous/README.md
@@ -0,0 +1,596 @@
+# Voluptuous is a Python data validation library
+
+[![Build Status](https://travis-ci.org/alecthomas/voluptuous.png)](https://travis-ci.org/alecthomas/voluptuous) [![Stories in Ready](https://badge.waffle.io/alecthomas/voluptuous.png?label=ready&title=Ready)](https://waffle.io/alecthomas/voluptuous)
+
+Voluptuous, *despite* the name, is a Python data validation library. It
+is primarily intended for validating data coming into Python as JSON,
+YAML, etc.
+
+It has three goals:
+
+1. Simplicity.
+2. Support for complex data structures.
+3. Provide useful error messages.
+
+## Contact
+
+Voluptuous now has a mailing list! Send a mail to
+[<voluptuous@librelist.com>](mailto:voluptuous@librelist.com) to subscribe. Instructions
+will follow.
+
+You can also contact me directly via [email](mailto:alec@swapoff.org) or
+[Twitter](https://twitter.com/alecthomas).
+
+To file a bug, create a [new issue](https://github.com/alecthomas/voluptuous/issues/new) on GitHub with a short example of how to replicate the issue.
+
+## Show me an example
+
+Twitter's [user search API](https://dev.twitter.com/docs/api/1/get/users/search) accepts
+query URLs like:
+
+```
+$ curl 'http://api.twitter.com/1/users/search.json?q=python&per_page=20&page=1
+```
+
+To validate this we might use a schema like:
+
+```pycon
+>>> from voluptuous import Schema
+>>> schema = Schema({
+... 'q': str,
+... 'per_page': int,
+... 'page': int,
+... })
+
+```
+
+This schema very succinctly and roughly describes the data required by
+the API, and will work fine. But it has a few problems. Firstly, it
+doesn't fully express the constraints of the API. According to the API,
+`per_page` should be restricted to at most 20, defaulting to 5, for
+example. To describe the semantics of the API more accurately, our
+schema will need to be more thoroughly defined:
+
+```pycon
+>>> from voluptuous import Required, All, Length, Range
+>>> schema = Schema({
+... Required('q'): All(str, Length(min=1)),
+... Required('per_page', default=5): All(int, Range(min=1, max=20)),
+... 'page': All(int, Range(min=0)),
+... })
+
+```
+
+This schema fully enforces the interface defined in Twitter's
+documentation, and goes a little further for completeness.
+
+"q" is required:
+
+```pycon
+>>> from voluptuous import MultipleInvalid, Invalid
+>>> try:
+... schema({})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "required key not provided @ data['q']"
+True
+
+```
+
+...must be a string:
+
+```pycon
+>>> try:
+... schema({'q': 123})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "expected str for dictionary value @ data['q']"
+True
+
+```
+
+...and must be at least one character in length:
+
+```pycon
+>>> try:
+... schema({'q': ''})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "length of value must be at least 1 for dictionary value @ data['q']"
+True
+>>> schema({'q': '#topic'}) == {'q': '#topic', 'per_page': 5}
+True
+
+```
+
+"per\_page" is a positive integer no greater than 20:
+
+```pycon
+>>> try:
+... schema({'q': '#topic', 'per_page': 900})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "value must be at most 20 for dictionary value @ data['per_page']"
+True
+>>> try:
+... schema({'q': '#topic', 'per_page': -10})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "value must be at least 1 for dictionary value @ data['per_page']"
+True
+
+```
+
+"page" is an integer \>= 0:
+
+```pycon
+>>> try:
+... schema({'q': '#topic', 'per_page': 'one'})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc)
+"expected int for dictionary value @ data['per_page']"
+>>> schema({'q': '#topic', 'page': 1}) == {'q': '#topic', 'page': 1, 'per_page': 5}
+True
+
+```
+
+## Defining schemas
+
+Schemas are nested data structures consisting of dictionaries, lists,
+scalars and *validators*. Each node in the input schema is pattern
+matched against corresponding nodes in the input data.
+
+### Literals
+
+Literals in the schema are matched using normal equality checks:
+
+```pycon
+>>> schema = Schema(1)
+>>> schema(1)
+1
+>>> schema = Schema('a string')
+>>> schema('a string')
+'a string'
+
+```
+
+### Types
+
+Types in the schema are matched by checking if the corresponding value
+is an instance of the type:
+
+```pycon
+>>> schema = Schema(int)
+>>> schema(1)
+1
+>>> try:
+... schema('one')
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "expected int"
+True
+
+```
+
+### URL's
+
+URL's in the schema are matched by using `urlparse` library.
+
+```pycon
+>>> from voluptuous import Url
+>>> schema = Schema(Url())
+>>> schema('http://w3.org')
+'http://w3.org'
+>>> try:
+... schema('one')
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "expected a URL"
+True
+
+```
+
+### Lists
+
+Lists in the schema are treated as a set of valid values. Each element
+in the schema list is compared to each value in the input data:
+
+```pycon
+>>> schema = Schema([1, 'a', 'string'])
+>>> schema([1])
+[1]
+>>> schema([1, 1, 1])
+[1, 1, 1]
+>>> schema(['a', 1, 'string', 1, 'string'])
+['a', 1, 'string', 1, 'string']
+
+```
+
+### Validation functions
+
+Validators are simple callables that raise an `Invalid` exception when
+they encounter invalid data. The criteria for determining validity is
+entirely up to the implementation; it may check that a value is a valid
+username with `pwd.getpwnam()`, it may check that a value is of a
+specific type, and so on.
+
+The simplest kind of validator is a Python function that raises
+ValueError when its argument is invalid. Conveniently, many builtin
+Python functions have this property. Here's an example of a date
+validator:
+
+```pycon
+>>> from datetime import datetime
+>>> def Date(fmt='%Y-%m-%d'):
+... return lambda v: datetime.strptime(v, fmt)
+
+```
+
+```pycon
+>>> schema = Schema(Date())
+>>> schema('2013-03-03')
+datetime.datetime(2013, 3, 3, 0, 0)
+>>> try:
+... schema('2013-03')
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "not a valid value"
+True
+
+```
+
+In addition to simply determining if a value is valid, validators may
+mutate the value into a valid form. An example of this is the
+`Coerce(type)` function, which returns a function that coerces its
+argument to the given type:
+
+```python
+def Coerce(type, msg=None):
+ """Coerce a value to a type.
+
+ If the type constructor throws a ValueError, the value will be marked as
+ Invalid.
+ """
+ def f(v):
+ try:
+ return type(v)
+ except ValueError:
+ raise Invalid(msg or ('expected %s' % type.__name__))
+ return f
+
+```
+
+This example also shows a common idiom where an optional human-readable
+message can be provided. This can vastly improve the usefulness of the
+resulting error messages.
+
+### Dictionaries
+
+Each key-value pair in a schema dictionary is validated against each
+key-value pair in the corresponding data dictionary:
+
+```pycon
+>>> schema = Schema({1: 'one', 2: 'two'})
+>>> schema({1: 'one'})
+{1: 'one'}
+
+```
+
+#### Extra dictionary keys
+
+By default any additional keys in the data, not in the schema will
+trigger exceptions:
+
+```pycon
+>>> schema = Schema({2: 3})
+>>> try:
+... schema({1: 2, 2: 3})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "extra keys not allowed @ data[1]"
+True
+
+```
+
+This behaviour can be altered on a per-schema basis. To allow
+additional keys use
+`Schema(..., extra=ALLOW_EXTRA)`:
+
+```pycon
+>>> from voluptuous import ALLOW_EXTRA
+>>> schema = Schema({2: 3}, extra=ALLOW_EXTRA)
+>>> schema({1: 2, 2: 3})
+{1: 2, 2: 3}
+
+```
+
+To remove additional keys use
+`Schema(..., extra=REMOVE_EXTRA)`:
+
+```pycon
+>>> from voluptuous import REMOVE_EXTRA
+>>> schema = Schema({2: 3}, extra=REMOVE_EXTRA)
+>>> schema({1: 2, 2: 3})
+{2: 3}
+
+```
+
+It can also be overridden per-dictionary by using the catch-all marker
+token `extra` as a key:
+
+```pycon
+>>> from voluptuous import Extra
+>>> schema = Schema({1: {Extra: object}})
+>>> schema({1: {'foo': 'bar'}})
+{1: {'foo': 'bar'}}
+
+```
+
+#### Required dictionary keys
+
+By default, keys in the schema are not required to be in the data:
+
+```pycon
+>>> schema = Schema({1: 2, 3: 4})
+>>> schema({3: 4})
+{3: 4}
+
+```
+
+Similarly to how extra\_ keys work, this behaviour can be overridden
+per-schema:
+
+```pycon
+>>> schema = Schema({1: 2, 3: 4}, required=True)
+>>> try:
+... schema({3: 4})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "required key not provided @ data[1]"
+True
+
+```
+
+And per-key, with the marker token `Required(key)`:
+
+```pycon
+>>> schema = Schema({Required(1): 2, 3: 4})
+>>> try:
+... schema({3: 4})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "required key not provided @ data[1]"
+True
+>>> schema({1: 2})
+{1: 2}
+
+```
+
+#### Optional dictionary keys
+
+If a schema has `required=True`, keys may be individually marked as
+optional using the marker token `Optional(key)`:
+
+```pycon
+>>> from voluptuous import Optional
+>>> schema = Schema({1: 2, Optional(3): 4}, required=True)
+>>> try:
+... schema({})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "required key not provided @ data[1]"
+True
+>>> schema({1: 2})
+{1: 2}
+>>> try:
+... schema({1: 2, 4: 5})
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "extra keys not allowed @ data[4]"
+True
+
+```
+
+```pycon
+>>> schema({1: 2, 3: 4})
+{1: 2, 3: 4}
+
+```
+
+### Recursive schema
+
+There is no syntax to have a recursive schema. The best way to do it is to have a wrapper like this:
+
+```pycon
+>>> from voluptuous import Schema, Any
+>>> def s2(v):
+... return s1(v)
+...
+>>> s1 = Schema({"key": Any(s2, "value")})
+>>> s1({"key": {"key": "value"}})
+{'key': {'key': 'value'}}
+
+```
+
+### Extending an existing Schema
+
+Often it comes handy to have a base `Schema` that is extended with more
+requirements. In that case you can use `Schema.extend` to create a new
+`Schema`:
+
+```pycon
+>>> from voluptuous import Schema
+>>> person = Schema({'name': str})
+>>> person_with_age = person.extend({'age': int})
+>>> sorted(list(person_with_age.schema.keys()))
+['age', 'name']
+
+```
+
+The original `Schema` remains unchanged.
+
+### Objects
+
+Each key-value pair in a schema dictionary is validated against each
+attribute-value pair in the corresponding object:
+
+```pycon
+>>> from voluptuous import Object
+>>> class Structure(object):
+... def __init__(self, q=None):
+... self.q = q
+... def __repr__(self):
+... return '<Structure(q={0.q!r})>'.format(self)
+...
+>>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+>>> schema(Structure(q='one'))
+<Structure(q='one')>
+
+```
+
+### Allow None values
+
+To allow value to be None as well, use Any:
+
+```pycon
+>>> from voluptuous import Any
+
+>>> schema = Schema(Any(None, int))
+>>> schema(None)
+>>> schema(5)
+5
+
+```
+
+## Error reporting
+
+Validators must throw an `Invalid` exception if invalid data is passed
+to them. All other exceptions are treated as errors in the validator and
+will not be caught.
+
+Each `Invalid` exception has an associated `path` attribute representing
+the path in the data structure to our currently validating value, as well
+as an `error_message` attribute that contains the message of the original
+exception. This is especially useful when you want to catch `Invalid`
+exceptions and give some feedback to the user, for instance in the context of
+an HTTP API.
+
+
+```pycon
+>>> def validate_email(email):
+... """Validate email."""
+... if not "@" in email:
+... raise Invalid("This email is invalid.")
+... return email
+>>> schema = Schema({"email": validate_email})
+>>> exc = None
+>>> try:
+... schema({"email": "whatever"})
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc)
+"This email is invalid. for dictionary value @ data['email']"
+>>> exc.path
+['email']
+>>> exc.msg
+'This email is invalid.'
+>>> exc.error_message
+'This email is invalid.'
+
+```
+
+The `path` attribute is used during error reporting, but also during matching
+to determine whether an error should be reported to the user or if the next
+match should be attempted. This is determined by comparing the depth of the
+path where the check is, to the depth of the path where the error occurred. If
+the error is more than one level deeper, it is reported.
+
+The upshot of this is that *matching is depth-first and fail-fast*.
+
+To illustrate this, here is an example schema:
+
+```pycon
+>>> schema = Schema([[2, 3], 6])
+
+```
+
+Each value in the top-level list is matched depth-first in-order. Given
+input data of `[[6]]`, the inner list will match the first element of
+the schema, but the literal `6` will not match any of the elements of
+that list. This error will be reported back to the user immediately. No
+backtracking is attempted:
+
+```pycon
+>>> try:
+... schema([[6]])
+... raise AssertionError('MultipleInvalid not raised')
+... except MultipleInvalid as e:
+... exc = e
+>>> str(exc) == "not a valid value @ data[0][0]"
+True
+
+```
+
+If we pass the data `[6]`, the `6` is not a list type and so will not
+recurse into the first element of the schema. Matching will continue on
+to the second element in the schema, and succeed:
+
+```pycon
+>>> schema([6])
+[6]
+
+```
+
+## Running tests.
+
+Voluptuous is using nosetests:
+
+ $ nosetests
+
+
+## Why use Voluptuous over another validation library?
+
+**Validators are simple callables**
+: No need to subclass anything, just use a function.
+
+**Errors are simple exceptions.**
+: A validator can just `raise Invalid(msg)` and expect the user to get
+useful messages.
+
+**Schemas are basic Python data structures.**
+: Should your data be a dictionary of integer keys to strings?
+`{int: str}` does what you expect. List of integers, floats or
+strings? `[int, float, str]`.
+
+**Designed from the ground up for validating more than just forms.**
+: Nested data structures are treated in the same way as any other
+type. Need a list of dictionaries? `[{}]`
+
+**Consistency.**
+: Types in the schema are checked as types. Values are compared as
+values. Callables are called to validate. Simple.
+
+## Other libraries and inspirations
+
+Voluptuous is heavily inspired by
+[Validino](http://code.google.com/p/validino/), and to a lesser extent,
+[jsonvalidator](http://code.google.com/p/jsonvalidator/) and
+[json\_schema](http://blog.sendapatch.se/category/json_schema.html).
+
+I greatly prefer the light-weight style promoted by these libraries to
+the complexity of libraries like FormEncode.
diff --git a/python/voluptuous/README.rst b/python/voluptuous/README.rst
new file mode 100644
index 000000000..aa68a7cb4
--- /dev/null
+++ b/python/voluptuous/README.rst
@@ -0,0 +1,589 @@
+Voluptuous is a Python data validation library
+==============================================
+
+|Build Status| |Stories in Ready|
+
+Voluptuous, *despite* the name, is a Python data validation library. It
+is primarily intended for validating data coming into Python as JSON,
+YAML, etc.
+
+It has three goals:
+
+1. Simplicity.
+2. Support for complex data structures.
+3. Provide useful error messages.
+
+Contact
+-------
+
+Voluptuous now has a mailing list! Send a mail to
+` <mailto:voluptuous@librelist.com>`__ to subscribe. Instructions will
+follow.
+
+You can also contact me directly via `email <mailto:alec@swapoff.org>`__
+or `Twitter <https://twitter.com/alecthomas>`__.
+
+To file a bug, create a `new
+issue <https://github.com/alecthomas/voluptuous/issues/new>`__ on GitHub
+with a short example of how to replicate the issue.
+
+Show me an example
+------------------
+
+Twitter's `user search
+API <https://dev.twitter.com/docs/api/1/get/users/search>`__ accepts
+query URLs like:
+
+::
+
+ $ curl 'http://api.twitter.com/1/users/search.json?q=python&per_page=20&page=1
+
+To validate this we might use a schema like:
+
+.. code:: pycon
+
+ >>> from voluptuous import Schema
+ >>> schema = Schema({
+ ... 'q': str,
+ ... 'per_page': int,
+ ... 'page': int,
+ ... })
+
+This schema very succinctly and roughly describes the data required by
+the API, and will work fine. But it has a few problems. Firstly, it
+doesn't fully express the constraints of the API. According to the API,
+``per_page`` should be restricted to at most 20, defaulting to 5, for
+example. To describe the semantics of the API more accurately, our
+schema will need to be more thoroughly defined:
+
+.. code:: pycon
+
+ >>> from voluptuous import Required, All, Length, Range
+ >>> schema = Schema({
+ ... Required('q'): All(str, Length(min=1)),
+ ... Required('per_page', default=5): All(int, Range(min=1, max=20)),
+ ... 'page': All(int, Range(min=0)),
+ ... })
+
+This schema fully enforces the interface defined in Twitter's
+documentation, and goes a little further for completeness.
+
+"q" is required:
+
+.. code:: pycon
+
+ >>> from voluptuous import MultipleInvalid, Invalid
+ >>> try:
+ ... schema({})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data['q']"
+ True
+
+...must be a string:
+
+.. code:: pycon
+
+ >>> try:
+ ... schema({'q': 123})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected str for dictionary value @ data['q']"
+ True
+
+...and must be at least one character in length:
+
+.. code:: pycon
+
+ >>> try:
+ ... schema({'q': ''})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "length of value must be at least 1 for dictionary value @ data['q']"
+ True
+ >>> schema({'q': '#topic'}) == {'q': '#topic', 'per_page': 5}
+ True
+
+"per\_page" is a positive integer no greater than 20:
+
+.. code:: pycon
+
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': 900})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "value must be at most 20 for dictionary value @ data['per_page']"
+ True
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': -10})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "value must be at least 1 for dictionary value @ data['per_page']"
+ True
+
+"page" is an integer >= 0:
+
+.. code:: pycon
+
+ >>> try:
+ ... schema({'q': '#topic', 'per_page': 'one'})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "expected int for dictionary value @ data['per_page']"
+ >>> schema({'q': '#topic', 'page': 1}) == {'q': '#topic', 'page': 1, 'per_page': 5}
+ True
+
+Defining schemas
+----------------
+
+Schemas are nested data structures consisting of dictionaries, lists,
+scalars and *validators*. Each node in the input schema is pattern
+matched against corresponding nodes in the input data.
+
+Literals
+~~~~~~~~
+
+Literals in the schema are matched using normal equality checks:
+
+.. code:: pycon
+
+ >>> schema = Schema(1)
+ >>> schema(1)
+ 1
+ >>> schema = Schema('a string')
+ >>> schema('a string')
+ 'a string'
+
+Types
+~~~~~
+
+Types in the schema are matched by checking if the corresponding value
+is an instance of the type:
+
+.. code:: pycon
+
+ >>> schema = Schema(int)
+ >>> schema(1)
+ 1
+ >>> try:
+ ... schema('one')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected int"
+ True
+
+URL's
+~~~~~
+
+URL's in the schema are matched by using ``urlparse`` library.
+
+.. code:: pycon
+
+ >>> from voluptuous import Url
+ >>> schema = Schema(Url())
+ >>> schema('http://w3.org')
+ 'http://w3.org'
+ >>> try:
+ ... schema('one')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "expected a URL"
+ True
+
+Lists
+~~~~~
+
+Lists in the schema are treated as a set of valid values. Each element
+in the schema list is compared to each value in the input data:
+
+.. code:: pycon
+
+ >>> schema = Schema([1, 'a', 'string'])
+ >>> schema([1])
+ [1]
+ >>> schema([1, 1, 1])
+ [1, 1, 1]
+ >>> schema(['a', 1, 'string', 1, 'string'])
+ ['a', 1, 'string', 1, 'string']
+
+Validation functions
+~~~~~~~~~~~~~~~~~~~~
+
+Validators are simple callables that raise an ``Invalid`` exception when
+they encounter invalid data. The criteria for determining validity is
+entirely up to the implementation; it may check that a value is a valid
+username with ``pwd.getpwnam()``, it may check that a value is of a
+specific type, and so on.
+
+The simplest kind of validator is a Python function that raises
+ValueError when its argument is invalid. Conveniently, many builtin
+Python functions have this property. Here's an example of a date
+validator:
+
+.. code:: pycon
+
+ >>> from datetime import datetime
+ >>> def Date(fmt='%Y-%m-%d'):
+ ... return lambda v: datetime.strptime(v, fmt)
+
+.. code:: pycon
+
+ >>> schema = Schema(Date())
+ >>> schema('2013-03-03')
+ datetime.datetime(2013, 3, 3, 0, 0)
+ >>> try:
+ ... schema('2013-03')
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "not a valid value"
+ True
+
+In addition to simply determining if a value is valid, validators may
+mutate the value into a valid form. An example of this is the
+``Coerce(type)`` function, which returns a function that coerces its
+argument to the given type:
+
+.. code:: python
+
+ def Coerce(type, msg=None):
+ """Coerce a value to a type.
+
+ If the type constructor throws a ValueError, the value will be marked as
+ Invalid.
+ """
+ def f(v):
+ try:
+ return type(v)
+ except ValueError:
+ raise Invalid(msg or ('expected %s' % type.__name__))
+ return f
+
+This example also shows a common idiom where an optional human-readable
+message can be provided. This can vastly improve the usefulness of the
+resulting error messages.
+
+Dictionaries
+~~~~~~~~~~~~
+
+Each key-value pair in a schema dictionary is validated against each
+key-value pair in the corresponding data dictionary:
+
+.. code:: pycon
+
+ >>> schema = Schema({1: 'one', 2: 'two'})
+ >>> schema({1: 'one'})
+ {1: 'one'}
+
+Extra dictionary keys
+^^^^^^^^^^^^^^^^^^^^^
+
+By default any additional keys in the data, not in the schema will
+trigger exceptions:
+
+.. code:: pycon
+
+ >>> schema = Schema({2: 3})
+ >>> try:
+ ... schema({1: 2, 2: 3})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "extra keys not allowed @ data[1]"
+ True
+
+This behaviour can be altered on a per-schema basis. To allow additional
+keys use ``Schema(..., extra=ALLOW_EXTRA)``:
+
+.. code:: pycon
+
+ >>> from voluptuous import ALLOW_EXTRA
+ >>> schema = Schema({2: 3}, extra=ALLOW_EXTRA)
+ >>> schema({1: 2, 2: 3})
+ {1: 2, 2: 3}
+
+To remove additional keys use ``Schema(..., extra=REMOVE_EXTRA)``:
+
+.. code:: pycon
+
+ >>> from voluptuous import REMOVE_EXTRA
+ >>> schema = Schema({2: 3}, extra=REMOVE_EXTRA)
+ >>> schema({1: 2, 2: 3})
+ {2: 3}
+
+It can also be overridden per-dictionary by using the catch-all marker
+token ``extra`` as a key:
+
+.. code:: pycon
+
+ >>> from voluptuous import Extra
+ >>> schema = Schema({1: {Extra: object}})
+ >>> schema({1: {'foo': 'bar'}})
+ {1: {'foo': 'bar'}}
+
+Required dictionary keys
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+By default, keys in the schema are not required to be in the data:
+
+.. code:: pycon
+
+ >>> schema = Schema({1: 2, 3: 4})
+ >>> schema({3: 4})
+ {3: 4}
+
+Similarly to how extra\_ keys work, this behaviour can be overridden
+per-schema:
+
+.. code:: pycon
+
+ >>> schema = Schema({1: 2, 3: 4}, required=True)
+ >>> try:
+ ... schema({3: 4})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+
+And per-key, with the marker token ``Required(key)``:
+
+.. code:: pycon
+
+ >>> schema = Schema({Required(1): 2, 3: 4})
+ >>> try:
+ ... schema({3: 4})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+ >>> schema({1: 2})
+ {1: 2}
+
+Optional dictionary keys
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+If a schema has ``required=True``, keys may be individually marked as
+optional using the marker token ``Optional(key)``:
+
+.. code:: pycon
+
+ >>> from voluptuous import Optional
+ >>> schema = Schema({1: 2, Optional(3): 4}, required=True)
+ >>> try:
+ ... schema({})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "required key not provided @ data[1]"
+ True
+ >>> schema({1: 2})
+ {1: 2}
+ >>> try:
+ ... schema({1: 2, 4: 5})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "extra keys not allowed @ data[4]"
+ True
+
+.. code:: pycon
+
+ >>> schema({1: 2, 3: 4})
+ {1: 2, 3: 4}
+
+Recursive schema
+~~~~~~~~~~~~~~~~
+
+There is no syntax to have a recursive schema. The best way to do it is
+to have a wrapper like this:
+
+.. code:: pycon
+
+ >>> from voluptuous import Schema, Any
+ >>> def s2(v):
+ ... return s1(v)
+ ...
+ >>> s1 = Schema({"key": Any(s2, "value")})
+ >>> s1({"key": {"key": "value"}})
+ {'key': {'key': 'value'}}
+
+Extending an existing Schema
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Often it comes handy to have a base ``Schema`` that is extended with
+more requirements. In that case you can use ``Schema.extend`` to create
+a new ``Schema``:
+
+.. code:: pycon
+
+ >>> from voluptuous import Schema
+ >>> person = Schema({'name': str})
+ >>> person_with_age = person.extend({'age': int})
+ >>> sorted(list(person_with_age.schema.keys()))
+ ['age', 'name']
+
+The original ``Schema`` remains unchanged.
+
+Objects
+~~~~~~~
+
+Each key-value pair in a schema dictionary is validated against each
+attribute-value pair in the corresponding object:
+
+.. code:: pycon
+
+ >>> from voluptuous import Object
+ >>> class Structure(object):
+ ... def __init__(self, q=None):
+ ... self.q = q
+ ... def __repr__(self):
+ ... return '<Structure(q={0.q!r})>'.format(self)
+ ...
+ >>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+ >>> schema(Structure(q='one'))
+ <Structure(q='one')>
+
+Allow None values
+~~~~~~~~~~~~~~~~~
+
+To allow value to be None as well, use Any:
+
+.. code:: pycon
+
+ >>> from voluptuous import Any
+
+ >>> schema = Schema(Any(None, int))
+ >>> schema(None)
+ >>> schema(5)
+ 5
+
+Error reporting
+---------------
+
+Validators must throw an ``Invalid`` exception if invalid data is passed
+to them. All other exceptions are treated as errors in the validator and
+will not be caught.
+
+Each ``Invalid`` exception has an associated ``path`` attribute
+representing the path in the data structure to our currently validating
+value, as well as an ``error_message`` attribute that contains the
+message of the original exception. This is especially useful when you
+want to catch ``Invalid`` exceptions and give some feedback to the user,
+for instance in the context of an HTTP API.
+
+.. code:: pycon
+
+ >>> def validate_email(email):
+ ... """Validate email."""
+ ... if not "@" in email:
+ ... raise Invalid("This email is invalid.")
+ ... return email
+ >>> schema = Schema({"email": validate_email})
+ >>> exc = None
+ >>> try:
+ ... schema({"email": "whatever"})
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "This email is invalid. for dictionary value @ data['email']"
+ >>> exc.path
+ ['email']
+ >>> exc.msg
+ 'This email is invalid.'
+ >>> exc.error_message
+ 'This email is invalid.'
+
+The ``path`` attribute is used during error reporting, but also during
+matching to determine whether an error should be reported to the user or
+if the next match should be attempted. This is determined by comparing
+the depth of the path where the check is, to the depth of the path where
+the error occurred. If the error is more than one level deeper, it is
+reported.
+
+The upshot of this is that *matching is depth-first and fail-fast*.
+
+To illustrate this, here is an example schema:
+
+.. code:: pycon
+
+ >>> schema = Schema([[2, 3], 6])
+
+Each value in the top-level list is matched depth-first in-order. Given
+input data of ``[[6]]``, the inner list will match the first element of
+the schema, but the literal ``6`` will not match any of the elements of
+that list. This error will be reported back to the user immediately. No
+backtracking is attempted:
+
+.. code:: pycon
+
+ >>> try:
+ ... schema([[6]])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == "not a valid value @ data[0][0]"
+ True
+
+If we pass the data ``[6]``, the ``6`` is not a list type and so will
+not recurse into the first element of the schema. Matching will continue
+on to the second element in the schema, and succeed:
+
+.. code:: pycon
+
+ >>> schema([6])
+ [6]
+
+Running tests.
+--------------
+
+Voluptuous is using nosetests:
+
+::
+
+ $ nosetests
+
+Why use Voluptuous over another validation library?
+---------------------------------------------------
+
+**Validators are simple callables**
+ No need to subclass anything, just use a function.
+**Errors are simple exceptions.**
+ A validator can just ``raise Invalid(msg)`` and expect the user to
+ get useful messages.
+**Schemas are basic Python data structures.**
+ Should your data be a dictionary of integer keys to strings?
+ ``{int: str}`` does what you expect. List of integers, floats or
+ strings? ``[int, float, str]``.
+**Designed from the ground up for validating more than just forms.**
+ Nested data structures are treated in the same way as any other
+ type. Need a list of dictionaries? ``[{}]``
+**Consistency.**
+ Types in the schema are checked as types. Values are compared as
+ values. Callables are called to validate. Simple.
+
+Other libraries and inspirations
+--------------------------------
+
+Voluptuous is heavily inspired by
+`Validino <http://code.google.com/p/validino/>`__, and to a lesser
+extent, `jsonvalidator <http://code.google.com/p/jsonvalidator/>`__ and
+`json\_schema <http://blog.sendapatch.se/category/json_schema.html>`__.
+
+I greatly prefer the light-weight style promoted by these libraries to
+the complexity of libraries like FormEncode.
+
+.. |Build Status| image:: https://travis-ci.org/alecthomas/voluptuous.png
+ :target: https://travis-ci.org/alecthomas/voluptuous
+.. |Stories in Ready| image:: https://badge.waffle.io/alecthomas/voluptuous.png?label=ready&title=Ready
+ :target: https://waffle.io/alecthomas/voluptuous
diff --git a/python/voluptuous/setup.cfg b/python/voluptuous/setup.cfg
new file mode 100644
index 000000000..f5b3b4dec
--- /dev/null
+++ b/python/voluptuous/setup.cfg
@@ -0,0 +1,10 @@
+[nosetests]
+doctest-extension = md
+with-doctest = 1
+where = .
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/python/voluptuous/setup.py b/python/voluptuous/setup.py
new file mode 100644
index 000000000..2fc07251d
--- /dev/null
+++ b/python/voluptuous/setup.py
@@ -0,0 +1,54 @@
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+import sys
+import os
+import atexit
+sys.path.insert(0, '.')
+version = __import__('voluptuous').__version__
+
+try:
+ import pypandoc
+ long_description = pypandoc.convert('README.md', 'rst')
+ with open('README.rst', 'w') as f:
+ f.write(long_description)
+ atexit.register(lambda: os.unlink('README.rst'))
+except (ImportError, OSError):
+ print('WARNING: Could not locate pandoc, using Markdown long_description.')
+ with open('README.md') as f:
+ long_description = f.read()
+
+description = long_description.splitlines()[0].strip()
+
+
+setup(
+ name='voluptuous',
+ url='https://github.com/alecthomas/voluptuous',
+ download_url='https://pypi.python.org/pypi/voluptuous',
+ version=version,
+ description=description,
+ long_description=long_description,
+ license='BSD',
+ platforms=['any'],
+ py_modules=['voluptuous'],
+ author='Alec Thomas',
+ author_email='alec@swapoff.org',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ ],
+ install_requires=[
+ 'setuptools >= 0.6b1',
+ ],
+)
diff --git a/python/voluptuous/tests.md b/python/voluptuous/tests.md
new file mode 100644
index 000000000..18f6fbafa
--- /dev/null
+++ b/python/voluptuous/tests.md
@@ -0,0 +1,268 @@
+Error reporting should be accurate:
+
+ >>> from voluptuous import *
+ >>> schema = Schema(['one', {'two': 'three', 'four': ['five'],
+ ... 'six': {'seven': 'eight'}}])
+ >>> schema(['one'])
+ ['one']
+ >>> schema([{'two': 'three'}])
+ [{'two': 'three'}]
+
+It should show the exact index and container type, in this case a list
+value:
+
+ >>> try:
+ ... schema(['one', 'two'])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc) == 'expected a dictionary @ data[1]'
+ True
+
+It should also be accurate for nested values:
+
+ >>> try:
+ ... schema([{'two': 'nine'}])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "not a valid value for dictionary value @ data[0]['two']"
+
+ >>> try:
+ ... schema([{'four': ['nine']}])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "not a valid value @ data[0]['four'][0]"
+
+ >>> try:
+ ... schema([{'six': {'seven': 'nine'}}])
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "not a valid value for dictionary value @ data[0]['six']['seven']"
+
+Errors should be reported depth-first:
+
+ >>> validate = Schema({'one': {'two': 'three', 'four': 'five'}})
+ >>> try:
+ ... validate({'one': {'four': 'six'}})
+ ... except Invalid as e:
+ ... print(e)
+ ... print(e.path)
+ not a valid value for dictionary value @ data['one']['four']
+ ['one', 'four']
+
+Voluptuous supports validation when extra fields are present in the
+data:
+
+ >>> schema = Schema({'one': 1, Extra: object})
+ >>> schema({'two': 'two', 'one': 1}) == {'two': 'two', 'one': 1}
+ True
+ >>> schema = Schema({'one': 1})
+ >>> try:
+ ... schema({'two': 2})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "extra keys not allowed @ data['two']"
+
+dict, list, and tuple should be available as type validators:
+
+ >>> Schema(dict)({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
+ True
+ >>> Schema(list)([1,2,3])
+ [1, 2, 3]
+ >>> Schema(tuple)((1,2,3))
+ (1, 2, 3)
+
+Validation should return instances of the right types when the types are
+subclasses of dict or list:
+
+ >>> class Dict(dict):
+ ... pass
+ >>>
+ >>> d = Schema(dict)(Dict(a=1, b=2))
+ >>> d == {'a': 1, 'b': 2}
+ True
+ >>> type(d) is Dict
+ True
+ >>> class List(list):
+ ... pass
+ >>>
+ >>> l = Schema(list)(List([1,2,3]))
+ >>> l
+ [1, 2, 3]
+ >>> type(l) is List
+ True
+
+Multiple errors are reported:
+
+ >>> schema = Schema({'one': 1, 'two': 2})
+ >>> try:
+ ... schema({'one': 2, 'two': 3, 'three': 4})
+ ... except MultipleInvalid as e:
+ ... errors = sorted(e.errors, key=lambda k: str(k))
+ ... print([str(i) for i in errors]) # doctest: +NORMALIZE_WHITESPACE
+ ["extra keys not allowed @ data['three']",
+ "not a valid value for dictionary value @ data['one']",
+ "not a valid value for dictionary value @ data['two']"]
+ >>> schema = Schema([[1], [2], [3]])
+ >>> try:
+ ... schema([1, 2, 3])
+ ... except MultipleInvalid as e:
+ ... print([str(i) for i in e.errors]) # doctest: +NORMALIZE_WHITESPACE
+ ['expected a list @ data[0]',
+ 'expected a list @ data[1]',
+ 'expected a list @ data[2]']
+
+Required fields in dictionary which are invalid should not have required :
+
+ >>> from voluptuous import *
+ >>> schema = Schema({'one': {'two': 3}}, required=True)
+ >>> try:
+ ... schema({'one': {'two': 2}})
+ ... except MultipleInvalid as e:
+ ... errors = e.errors
+ >>> 'required' in ' '.join([x.msg for x in errors])
+ False
+
+Multiple errors for nested fields in dicts and objects:
+
+> \>\>\> from collections import namedtuple \>\>\> validate = Schema({
+> ... 'anobject': Object({ ... 'strfield': str, ... 'intfield': int ...
+> }) ... }) \>\>\> try: ... SomeObj = namedtuple('SomeObj', ('strfield',
+> 'intfield')) ... validate({'anobject': SomeObj(strfield=123,
+> intfield='one')}) ... except MultipleInvalid as e: ...
+> print(sorted(str(i) for i in e.errors)) \# doctest:
+> +NORMALIZE\_WHITESPACE ["expected int for object value @
+> data['anobject']['intfield']", "expected str for object value @
+> data['anobject']['strfield']"]
+
+Custom classes validate as schemas:
+
+ >>> class Thing(object):
+ ... pass
+ >>> schema = Schema(Thing)
+ >>> t = schema(Thing())
+ >>> type(t) is Thing
+ True
+
+Classes with custom metaclasses should validate as schemas:
+
+ >>> class MyMeta(type):
+ ... pass
+ >>> class Thing(object):
+ ... __metaclass__ = MyMeta
+ >>> schema = Schema(Thing)
+ >>> t = schema(Thing())
+ >>> type(t) is Thing
+ True
+
+Schemas built with All() should give the same error as the original
+validator (Issue \#26):
+
+ >>> schema = Schema({
+ ... Required('items'): All([{
+ ... Required('foo'): str
+ ... }])
+ ... })
+
+ >>> try:
+ ... schema({'items': [{}]})
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "required key not provided @ data['items'][0]['foo']"
+
+Validator should return same instance of the same type for object:
+
+ >>> class Structure(object):
+ ... def __init__(self, q=None):
+ ... self.q = q
+ ... def __repr__(self):
+ ... return '{0.__name__}(q={1.q!r})'.format(type(self), self)
+ ...
+ >>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+ >>> type(schema(Structure(q='one'))) is Structure
+ True
+
+Object validator should treat cls argument as optional. In this case it
+shouldn't check object type:
+
+ >>> from collections import namedtuple
+ >>> NamedTuple = namedtuple('NamedTuple', ('q',))
+ >>> schema = Schema(Object({'q': 'one'}))
+ >>> named = NamedTuple(q='one')
+ >>> schema(named) == named
+ True
+ >>> schema(named)
+ NamedTuple(q='one')
+
+If cls argument passed to object validator we should check object type:
+
+ >>> schema = Schema(Object({'q': 'one'}, cls=Structure))
+ >>> schema(NamedTuple(q='one')) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ MultipleInvalid: expected a <class 'Structure'>
+ >>> schema = Schema(Object({'q': 'one'}, cls=NamedTuple))
+ >>> schema(NamedTuple(q='one'))
+ NamedTuple(q='one')
+
+Ensure that objects with \_\_slots\_\_ supported properly:
+
+ >>> class SlotsStructure(Structure):
+ ... __slots__ = ['q']
+ ...
+ >>> schema = Schema(Object({'q': 'one'}))
+ >>> schema(SlotsStructure(q='one'))
+ SlotsStructure(q='one')
+ >>> class DictStructure(object):
+ ... __slots__ = ['q', '__dict__']
+ ... def __init__(self, q=None, page=None):
+ ... self.q = q
+ ... self.page = page
+ ... def __repr__(self):
+ ... return '{0.__name__}(q={1.q!r}, page={1.page!r})'.format(type(self), self)
+ ...
+ >>> structure = DictStructure(q='one')
+ >>> structure.page = 1
+ >>> try:
+ ... schema(structure)
+ ... raise AssertionError('MultipleInvalid not raised')
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> str(exc)
+ "extra keys not allowed @ data['page']"
+
+ >>> schema = Schema(Object({'q': 'one', Extra: object}))
+ >>> schema(structure)
+ DictStructure(q='one', page=1)
+
+Ensure that objects can be used with other validators:
+
+ >>> schema = Schema({'meta': Object({'q': 'one'})})
+ >>> schema({'meta': Structure(q='one')})
+ {'meta': Structure(q='one')}
+
+Ensure that subclasses of Invalid of are raised as is.
+
+ >>> class SpecialInvalid(Invalid):
+ ... pass
+ ...
+ >>> def custom_validator(value):
+ ... raise SpecialInvalid('boom')
+ ...
+ >>> schema = Schema({'thing': custom_validator})
+ >>> try:
+ ... schema({'thing': 'not an int'})
+ ... except MultipleInvalid as e:
+ ... exc = e
+ >>> exc.errors[0].__class__.__name__
+ 'SpecialInvalid'
diff --git a/python/voluptuous/voluptuous.py b/python/voluptuous/voluptuous.py
new file mode 100644
index 000000000..132c3c476
--- /dev/null
+++ b/python/voluptuous/voluptuous.py
@@ -0,0 +1,1954 @@
+# encoding: utf-8
+#
+# Copyright (C) 2010-2013 Alec Thomas <alec@swapoff.org>
+# All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.
+#
+# Author: Alec Thomas <alec@swapoff.org>
+
+"""Schema validation for Python data structures.
+
+Given eg. a nested data structure like this:
+
+ {
+ 'exclude': ['Users', 'Uptime'],
+ 'include': [],
+ 'set': {
+ 'snmp_community': 'public',
+ 'snmp_timeout': 15,
+ 'snmp_version': '2c',
+ },
+ 'targets': {
+ 'localhost': {
+ 'exclude': ['Uptime'],
+ 'features': {
+ 'Uptime': {
+ 'retries': 3,
+ },
+ 'Users': {
+ 'snmp_community': 'monkey',
+ 'snmp_port': 15,
+ },
+ },
+ 'include': ['Users'],
+ 'set': {
+ 'snmp_community': 'monkeys',
+ },
+ },
+ },
+ }
+
+A schema like this:
+
+ >>> settings = {
+ ... 'snmp_community': str,
+ ... 'retries': int,
+ ... 'snmp_version': All(Coerce(str), Any('3', '2c', '1')),
+ ... }
+ >>> features = ['Ping', 'Uptime', 'Http']
+ >>> schema = Schema({
+ ... 'exclude': features,
+ ... 'include': features,
+ ... 'set': settings,
+ ... 'targets': {
+ ... 'exclude': features,
+ ... 'include': features,
+ ... 'features': {
+ ... str: settings,
+ ... },
+ ... },
+ ... })
+
+Validate like so:
+
+ >>> schema({
+ ... 'set': {
+ ... 'snmp_community': 'public',
+ ... 'snmp_version': '2c',
+ ... },
+ ... 'targets': {
+ ... 'exclude': ['Ping'],
+ ... 'features': {
+ ... 'Uptime': {'retries': 3},
+ ... 'Users': {'snmp_community': 'monkey'},
+ ... },
+ ... },
+ ... }) == {
+ ... 'set': {'snmp_version': '2c', 'snmp_community': 'public'},
+ ... 'targets': {
+ ... 'exclude': ['Ping'],
+ ... 'features': {'Uptime': {'retries': 3},
+ ... 'Users': {'snmp_community': 'monkey'}}}}
+ True
+"""
+import collections
+import datetime
+import inspect
+import os
+import re
+import sys
+from contextlib import contextmanager
+from functools import wraps
+
+
+if sys.version_info >= (3,):
+ import urllib.parse as urlparse
+ long = int
+ unicode = str
+ basestring = str
+ ifilter = filter
+ iteritems = lambda d: d.items()
+else:
+ from itertools import ifilter
+ import urlparse
+ iteritems = lambda d: d.iteritems()
+
+
+__author__ = 'Alec Thomas <alec@swapoff.org>'
+__version__ = '0.8.11'
+
+
+@contextmanager
+def raises(exc, msg=None, regex=None):
+ try:
+ yield
+ except exc as e:
+ if msg is not None:
+ assert str(e) == msg, '%r != %r' % (str(e), msg)
+ if regex is not None:
+ assert re.search(regex, str(e)), '%r does not match %r' % (str(e), regex)
+
+
+class Undefined(object):
+ def __nonzero__(self):
+ return False
+
+ def __repr__(self):
+ return '...'
+
+
+UNDEFINED = Undefined()
+
+
+def default_factory(value):
+ if value is UNDEFINED or callable(value):
+ return value
+ return lambda: value
+
+
+# options for extra keys
+PREVENT_EXTRA = 0 # any extra key not in schema will raise an error
+ALLOW_EXTRA = 1 # extra keys not in schema will be included in output
+REMOVE_EXTRA = 2 # extra keys not in schema will be excluded from output
+
+
+class Error(Exception):
+ """Base validation exception."""
+
+
+class SchemaError(Error):
+ """An error was encountered in the schema."""
+
+
+class Invalid(Error):
+ """The data was invalid.
+
+ :attr msg: The error message.
+ :attr path: The path to the error, as a list of keys in the source data.
+ :attr error_message: The actual error message that was raised, as a
+ string.
+
+ """
+
+ def __init__(self, message, path=None, error_message=None, error_type=None):
+ Error.__init__(self, message)
+ self.path = path or []
+ self.error_message = error_message or message
+ self.error_type = error_type
+
+ @property
+ def msg(self):
+ return self.args[0]
+
+ def __str__(self):
+ path = ' @ data[%s]' % ']['.join(map(repr, self.path)) \
+ if self.path else ''
+ output = Exception.__str__(self)
+ if self.error_type:
+ output += ' for ' + self.error_type
+ return output + path
+
+ def prepend(self, path):
+ self.path = path + self.path
+
+
+class MultipleInvalid(Invalid):
+ def __init__(self, errors=None):
+ self.errors = errors[:] if errors else []
+
+ def __repr__(self):
+ return 'MultipleInvalid(%r)' % self.errors
+
+ @property
+ def msg(self):
+ return self.errors[0].msg
+
+ @property
+ def path(self):
+ return self.errors[0].path
+
+ @property
+ def error_message(self):
+ return self.errors[0].error_message
+
+ def add(self, error):
+ self.errors.append(error)
+
+ def __str__(self):
+ return str(self.errors[0])
+
+ def prepend(self, path):
+ for error in self.errors:
+ error.prepend(path)
+
+
+class RequiredFieldInvalid(Invalid):
+ """Required field was missing."""
+
+
+class ObjectInvalid(Invalid):
+ """The value we found was not an object."""
+
+
+class DictInvalid(Invalid):
+ """The value found was not a dict."""
+
+
+class ExclusiveInvalid(Invalid):
+ """More than one value found in exclusion group."""
+
+
+class InclusiveInvalid(Invalid):
+ """Not all values found in inclusion group."""
+
+
+class SequenceTypeInvalid(Invalid):
+ """The type found is not a sequence type."""
+
+
+class TypeInvalid(Invalid):
+ """The value was not of required type."""
+
+
+class ValueInvalid(Invalid):
+ """The value was found invalid by evaluation function."""
+
+
+class ScalarInvalid(Invalid):
+ """Scalars did not match."""
+
+
+class CoerceInvalid(Invalid):
+ """Impossible to coerce value to type."""
+
+
+class AnyInvalid(Invalid):
+ """The value did not pass any validator."""
+
+
+class AllInvalid(Invalid):
+ """The value did not pass all validators."""
+
+
+class MatchInvalid(Invalid):
+ """The value does not match the given regular expression."""
+
+
+class RangeInvalid(Invalid):
+ """The value is not in given range."""
+
+
+class TrueInvalid(Invalid):
+ """The value is not True."""
+
+
+class FalseInvalid(Invalid):
+ """The value is not False."""
+
+
+class BooleanInvalid(Invalid):
+ """The value is not a boolean."""
+
+
+class UrlInvalid(Invalid):
+ """The value is not a url."""
+
+
+class FileInvalid(Invalid):
+ """The value is not a file."""
+
+
+class DirInvalid(Invalid):
+ """The value is not a directory."""
+
+
+class PathInvalid(Invalid):
+ """The value is not a path."""
+
+
+class LiteralInvalid(Invalid):
+ """The literal values do not match."""
+
+
+class VirtualPathComponent(str):
+ def __str__(self):
+ return '<' + self + '>'
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class Schema(object):
+ """A validation schema.
+
+ The schema is a Python tree-like structure where nodes are pattern
+ matched against corresponding trees of values.
+
+ Nodes can be values, in which case a direct comparison is used, types,
+ in which case an isinstance() check is performed, or callables, which will
+ validate and optionally convert the value.
+ """
+
+ _extra_to_name = {
+ REMOVE_EXTRA: 'REMOVE_EXTRA',
+ ALLOW_EXTRA: 'ALLOW_EXTRA',
+ PREVENT_EXTRA: 'PREVENT_EXTRA',
+ }
+
+ def __init__(self, schema, required=False, extra=PREVENT_EXTRA):
+ """Create a new Schema.
+
+ :param schema: Validation schema. See :module:`voluptuous` for details.
+ :param required: Keys defined in the schema must be in the data.
+ :param extra: Specify how extra keys in the data are treated:
+ - :const:`~voluptuous.PREVENT_EXTRA`: to disallow any undefined
+ extra keys (raise ``Invalid``).
+ - :const:`~voluptuous.ALLOW_EXTRA`: to include undefined extra
+ keys in the output.
+ - :const:`~voluptuous.REMOVE_EXTRA`: to exclude undefined extra keys
+ from the output.
+ - Any value other than the above defaults to
+ :const:`~voluptuous.PREVENT_EXTRA`
+ """
+ self.schema = schema
+ self.required = required
+ self.extra = int(extra) # ensure the value is an integer
+ self._compiled = self._compile(schema)
+
+ def __repr__(self):
+ return "<Schema(%s, extra=%s, required=%s) object at 0x%x>" % (
+ self.schema, self._extra_to_name.get(self.extra, '??'),
+ self.required, id(self))
+
+ def __call__(self, data):
+ """Validate data against this schema."""
+ try:
+ return self._compiled([], data)
+ except MultipleInvalid:
+ raise
+ except Invalid as e:
+ raise MultipleInvalid([e])
+ # return self.validate([], self.schema, data)
+
+ def _compile(self, schema):
+ if schema is Extra:
+ return lambda _, v: v
+ if isinstance(schema, Object):
+ return self._compile_object(schema)
+ if isinstance(schema, collections.Mapping):
+ return self._compile_dict(schema)
+ elif isinstance(schema, list):
+ return self._compile_list(schema)
+ elif isinstance(schema, tuple):
+ return self._compile_tuple(schema)
+ type_ = type(schema)
+ if type_ is type:
+ type_ = schema
+ if type_ in (bool, int, long, str, unicode, float, complex, object,
+ list, dict, type(None)) or callable(schema):
+ return _compile_scalar(schema)
+ raise SchemaError('unsupported schema data type %r' %
+ type(schema).__name__)
+
+ def _compile_mapping(self, schema, invalid_msg=None):
+ """Create validator for given mapping."""
+ invalid_msg = invalid_msg or 'mapping value'
+
+ # Keys that may be required
+ all_required_keys = set(key for key in schema
+ if key is not Extra
+ and ((self.required and not isinstance(key, (Optional, Remove)))
+ or isinstance(key, Required)))
+
+ # Keys that may have defaults
+ all_default_keys = set(key for key in schema
+ if isinstance(key, Required)
+ or isinstance(key, Optional))
+
+ _compiled_schema = {}
+ for skey, svalue in iteritems(schema):
+ new_key = self._compile(skey)
+ new_value = self._compile(svalue)
+ _compiled_schema[skey] = (new_key, new_value)
+
+ candidates = list(_iterate_mapping_candidates(_compiled_schema))
+
+ def validate_mapping(path, iterable, out):
+ required_keys = all_required_keys.copy()
+ # keeps track of all default keys that haven't been filled
+ default_keys = all_default_keys.copy()
+ error = None
+ errors = []
+ for key, value in iterable:
+ key_path = path + [key]
+ remove_key = False
+
+ # compare each given key/value against all compiled key/values
+ # schema key, (compiled key, compiled value)
+ for skey, (ckey, cvalue) in candidates:
+ try:
+ new_key = ckey(key_path, key)
+ except Invalid as e:
+ if len(e.path) > len(key_path):
+ raise
+ if not error or len(e.path) > len(error.path):
+ error = e
+ continue
+ # Backtracking is not performed once a key is selected, so if
+ # the value is invalid we immediately throw an exception.
+ exception_errors = []
+ # check if the key is marked for removal
+ is_remove = new_key is Remove
+ try:
+ cval = cvalue(key_path, value)
+ # include if it's not marked for removal
+ if not is_remove:
+ out[new_key] = cval
+ else:
+ remove_key = True
+ continue
+ except MultipleInvalid as e:
+ exception_errors.extend(e.errors)
+ except Invalid as e:
+ exception_errors.append(e)
+
+ if exception_errors:
+ if is_remove or remove_key:
+ continue
+ for err in exception_errors:
+ if len(err.path) <= len(key_path):
+ err.error_type = invalid_msg
+ errors.append(err)
+ # If there is a validation error for a required
+ # key, this means that the key was provided.
+ # Discard the required key so it does not
+ # create an additional, noisy exception.
+ required_keys.discard(skey)
+ break
+
+ # Key and value okay, mark any Required() fields as found.
+ required_keys.discard(skey)
+
+ # No need for a default if it was filled
+ default_keys.discard(skey)
+
+ break
+ else:
+ if remove_key:
+ # remove key
+ continue
+ elif self.extra == ALLOW_EXTRA:
+ out[key] = value
+ elif self.extra != REMOVE_EXTRA:
+ errors.append(Invalid('extra keys not allowed', key_path))
+ # else REMOVE_EXTRA: ignore the key so it's removed from output
+
+ # set defaults for any that can have defaults
+ for key in default_keys:
+ if not isinstance(key.default, Undefined): # if the user provides a default with the node
+ out[key.schema] = key.default()
+ if key in required_keys:
+ required_keys.discard(key)
+
+ # for any required keys left that weren't found and don't have defaults:
+ for key in required_keys:
+ msg = key.msg if hasattr(key, 'msg') and key.msg else 'required key not provided'
+ errors.append(RequiredFieldInvalid(msg, path + [key]))
+ if errors:
+ raise MultipleInvalid(errors)
+
+ return out
+
+ return validate_mapping
+
+ def _compile_object(self, schema):
+ """Validate an object.
+
+ Has the same behavior as dictionary validator but work with object
+ attributes.
+
+ For example:
+
+ >>> class Structure(object):
+ ... def __init__(self, one=None, three=None):
+ ... self.one = one
+ ... self.three = three
+ ...
+ >>> validate = Schema(Object({'one': 'two', 'three': 'four'}, cls=Structure))
+ >>> with raises(MultipleInvalid, "not a valid value for object value @ data['one']"):
+ ... validate(Structure(one='three'))
+
+ """
+ base_validate = self._compile_mapping(
+ schema, invalid_msg='object value')
+
+ def validate_object(path, data):
+ if (schema.cls is not UNDEFINED
+ and not isinstance(data, schema.cls)):
+ raise ObjectInvalid('expected a {0!r}'.format(schema.cls), path)
+ iterable = _iterate_object(data)
+ iterable = ifilter(lambda item: item[1] is not None, iterable)
+ out = base_validate(path, iterable, {})
+ return type(data)(**out)
+
+ return validate_object
+
+ def _compile_dict(self, schema):
+ """Validate a dictionary.
+
+ A dictionary schema can contain a set of values, or at most one
+ validator function/type.
+
+ A dictionary schema will only validate a dictionary:
+
+ >>> validate = Schema({})
+ >>> with raises(MultipleInvalid, 'expected a dictionary'):
+ ... validate([])
+
+ An invalid dictionary value:
+
+ >>> validate = Schema({'one': 'two', 'three': 'four'})
+ >>> with raises(MultipleInvalid, "not a valid value for dictionary value @ data['one']"):
+ ... validate({'one': 'three'})
+
+ An invalid key:
+
+ >>> with raises(MultipleInvalid, "extra keys not allowed @ data['two']"):
+ ... validate({'two': 'three'})
+
+
+ Validation function, in this case the "int" type:
+
+ >>> validate = Schema({'one': 'two', 'three': 'four', int: str})
+
+ Valid integer input:
+
+ >>> validate({10: 'twenty'})
+ {10: 'twenty'}
+
+ By default, a "type" in the schema (in this case "int") will be used
+ purely to validate that the corresponding value is of that type. It
+ will not Coerce the value:
+
+ >>> with raises(MultipleInvalid, "extra keys not allowed @ data['10']"):
+ ... validate({'10': 'twenty'})
+
+ Wrap them in the Coerce() function to achieve this:
+
+ >>> validate = Schema({'one': 'two', 'three': 'four',
+ ... Coerce(int): str})
+ >>> validate({'10': 'twenty'})
+ {10: 'twenty'}
+
+ Custom message for required key
+
+ >>> validate = Schema({Required('one', 'required'): 'two'})
+ >>> with raises(MultipleInvalid, "required @ data['one']"):
+ ... validate({})
+
+ (This is to avoid unexpected surprises.)
+
+ Multiple errors for nested field in a dict:
+
+ >>> validate = Schema({
+ ... 'adict': {
+ ... 'strfield': str,
+ ... 'intfield': int
+ ... }
+ ... })
+ >>> try:
+ ... validate({
+ ... 'adict': {
+ ... 'strfield': 123,
+ ... 'intfield': 'one'
+ ... }
+ ... })
+ ... except MultipleInvalid as e:
+ ... print(sorted(str(i) for i in e.errors)) # doctest: +NORMALIZE_WHITESPACE
+ ["expected int for dictionary value @ data['adict']['intfield']",
+ "expected str for dictionary value @ data['adict']['strfield']"]
+
+ """
+ base_validate = self._compile_mapping(
+ schema, invalid_msg='dictionary value')
+
+ groups_of_exclusion = {}
+ groups_of_inclusion = {}
+ for node in schema:
+ if isinstance(node, Exclusive):
+ g = groups_of_exclusion.setdefault(node.group_of_exclusion, [])
+ g.append(node)
+ elif isinstance(node, Inclusive):
+ g = groups_of_inclusion.setdefault(node.group_of_inclusion, [])
+ g.append(node)
+
+ def validate_dict(path, data):
+ if not isinstance(data, dict):
+ raise DictInvalid('expected a dictionary', path)
+
+ errors = []
+ for label, group in groups_of_exclusion.items():
+ exists = False
+ for exclusive in group:
+ if exclusive.schema in data:
+ if exists:
+ msg = exclusive.msg if hasattr(exclusive, 'msg') and exclusive.msg else \
+ "two or more values in the same group of exclusion '%s'" % label
+ next_path = path + [VirtualPathComponent(label)]
+ errors.append(ExclusiveInvalid(msg, next_path))
+ break
+ exists = True
+
+ if errors:
+ raise MultipleInvalid(errors)
+
+ for label, group in groups_of_inclusion.items():
+ included = [node.schema in data for node in group]
+ if any(included) and not all(included):
+ msg = "some but not all values in the same group of inclusion '%s'" % label
+ for g in group:
+ if hasattr(g, 'msg') and g.msg:
+ msg = g.msg
+ break
+ next_path = path + [VirtualPathComponent(label)]
+ errors.append(InclusiveInvalid(msg, next_path))
+ break
+
+ if errors:
+ raise MultipleInvalid(errors)
+
+ out = {}
+ return base_validate(path, iteritems(data), out)
+
+ return validate_dict
+
+ def _compile_sequence(self, schema, seq_type):
+ """Validate a sequence type.
+
+ This is a sequence of valid values or validators tried in order.
+
+ >>> validator = Schema(['one', 'two', int])
+ >>> validator(['one'])
+ ['one']
+ >>> with raises(MultipleInvalid, 'expected int @ data[0]'):
+ ... validator([3.5])
+ >>> validator([1])
+ [1]
+ """
+ _compiled = [self._compile(s) for s in schema]
+ seq_type_name = seq_type.__name__
+
+ def validate_sequence(path, data):
+ if not isinstance(data, seq_type):
+ raise SequenceTypeInvalid('expected a %s' % seq_type_name, path)
+
+ # Empty seq schema, allow any data.
+ if not schema:
+ return data
+
+ out = []
+ invalid = None
+ errors = []
+ index_path = UNDEFINED
+ for i, value in enumerate(data):
+ index_path = path + [i]
+ invalid = None
+ for validate in _compiled:
+ try:
+ cval = validate(index_path, value)
+ if cval is not Remove: # do not include Remove values
+ out.append(cval)
+ break
+ except Invalid as e:
+ if len(e.path) > len(index_path):
+ raise
+ invalid = e
+ else:
+ errors.append(invalid)
+ if errors:
+ raise MultipleInvalid(errors)
+ return type(data)(out)
+ return validate_sequence
+
+ def _compile_tuple(self, schema):
+ """Validate a tuple.
+
+ A tuple is a sequence of valid values or validators tried in order.
+
+ >>> validator = Schema(('one', 'two', int))
+ >>> validator(('one',))
+ ('one',)
+ >>> with raises(MultipleInvalid, 'expected int @ data[0]'):
+ ... validator((3.5,))
+ >>> validator((1,))
+ (1,)
+ """
+ return self._compile_sequence(schema, tuple)
+
+ def _compile_list(self, schema):
+ """Validate a list.
+
+ A list is a sequence of valid values or validators tried in order.
+
+ >>> validator = Schema(['one', 'two', int])
+ >>> validator(['one'])
+ ['one']
+ >>> with raises(MultipleInvalid, 'expected int @ data[0]'):
+ ... validator([3.5])
+ >>> validator([1])
+ [1]
+ """
+ return self._compile_sequence(schema, list)
+
+ def extend(self, schema, required=None, extra=None):
+ """Create a new `Schema` by merging this and the provided `schema`.
+
+ Neither this `Schema` nor the provided `schema` are modified. The
+ resulting `Schema` inherits the `required` and `extra` parameters of
+ this, unless overridden.
+
+ Both schemas must be dictionary-based.
+
+ :param schema: dictionary to extend this `Schema` with
+ :param required: if set, overrides `required` of this `Schema`
+ :param extra: if set, overrides `extra` of this `Schema`
+ """
+
+ assert type(self.schema) == dict and type(schema) == dict, 'Both schemas must be dictionary-based'
+
+ result = self.schema.copy()
+ result.update(schema)
+
+ result_required = (required if required is not None else self.required)
+ result_extra = (extra if extra is not None else self.extra)
+ return Schema(result, required=result_required, extra=result_extra)
+
+
+def _compile_scalar(schema):
+ """A scalar value.
+
+ The schema can either be a value or a type.
+
+ >>> _compile_scalar(int)([], 1)
+ 1
+ >>> with raises(Invalid, 'expected float'):
+ ... _compile_scalar(float)([], '1')
+
+ Callables have
+ >>> _compile_scalar(lambda v: float(v))([], '1')
+ 1.0
+
+ As a convenience, ValueError's are trapped:
+
+ >>> with raises(Invalid, 'not a valid value'):
+ ... _compile_scalar(lambda v: float(v))([], 'a')
+ """
+ if isinstance(schema, type):
+ def validate_instance(path, data):
+ if isinstance(data, schema):
+ return data
+ else:
+ msg = 'expected %s' % schema.__name__
+ raise TypeInvalid(msg, path)
+ return validate_instance
+
+ if callable(schema):
+ def validate_callable(path, data):
+ try:
+ return schema(data)
+ except ValueError as e:
+ raise ValueInvalid('not a valid value', path)
+ except Invalid as e:
+ e.prepend(path)
+ raise
+ return validate_callable
+
+ def validate_value(path, data):
+ if data != schema:
+ raise ScalarInvalid('not a valid value', path)
+ return data
+
+ return validate_value
+
+
+def _compile_itemsort():
+ '''return sort function of mappings'''
+ def is_extra(key_):
+ return key_ is Extra
+
+ def is_remove(key_):
+ return isinstance(key_, Remove)
+
+ def is_marker(key_):
+ return isinstance(key_, Marker)
+
+ def is_type(key_):
+ return inspect.isclass(key_)
+
+ def is_callable(key_):
+ return callable(key_)
+
+ # priority list for map sorting (in order of checking)
+ # We want Extra to match last, because it's a catch-all. On the other hand,
+ # Remove markers should match first (since invalid values will not
+ # raise an Error, instead the validator will check if other schemas match
+ # the same value).
+ priority = [(1, is_remove), # Remove highest priority after values
+ (2, is_marker), # then other Markers
+ (4, is_type), # types/classes lowest before Extra
+ (3, is_callable), # callables after markers
+ (5, is_extra)] # Extra lowest priority
+
+ def item_priority(item_):
+ key_ = item_[0]
+ for i, check_ in priority:
+ if check_(key_):
+ return i
+ # values have hightest priorities
+ return 0
+
+ return item_priority
+
+_sort_item = _compile_itemsort()
+
+
+def _iterate_mapping_candidates(schema):
+ """Iterate over schema in a meaningful order."""
+ # Without this, Extra might appear first in the iterator, and fail to
+ # validate a key even though it's a Required that has its own validation,
+ # generating a false positive.
+ return sorted(iteritems(schema), key=_sort_item)
+
+
+def _iterate_object(obj):
+ """Return iterator over object attributes. Respect objects with
+ defined __slots__.
+
+ """
+ d = {}
+ try:
+ d = vars(obj)
+ except TypeError:
+ # maybe we have named tuple here?
+ if hasattr(obj, '_asdict'):
+ d = obj._asdict()
+ for item in iteritems(d):
+ yield item
+ try:
+ slots = obj.__slots__
+ except AttributeError:
+ pass
+ else:
+ for key in slots:
+ if key != '__dict__':
+ yield (key, getattr(obj, key))
+ raise StopIteration()
+
+
+class Object(dict):
+ """Indicate that we should work with attributes, not keys."""
+
+ def __init__(self, schema, cls=UNDEFINED):
+ self.cls = cls
+ super(Object, self).__init__(schema)
+
+
+class Marker(object):
+ """Mark nodes for special treatment."""
+
+ def __init__(self, schema, msg=None):
+ self.schema = schema
+ self._schema = Schema(schema)
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ return self._schema(v)
+ except Invalid as e:
+ if not self.msg or len(e.path) > 1:
+ raise
+ raise Invalid(self.msg)
+
+ def __str__(self):
+ return str(self.schema)
+
+ def __repr__(self):
+ return repr(self.schema)
+
+ def __lt__(self, other):
+ return self.schema < other.schema
+
+
+class Optional(Marker):
+ """Mark a node in the schema as optional, and optionally provide a default
+
+ >>> schema = Schema({Optional('key'): str})
+ >>> schema({})
+ {}
+ >>> schema = Schema({Optional('key', default='value'): str})
+ >>> schema({})
+ {'key': 'value'}
+ >>> schema = Schema({Optional('key', default=list): list})
+ >>> schema({})
+ {'key': []}
+
+ If 'required' flag is set for an entire schema, optional keys aren't required
+
+ >>> schema = Schema({
+ ... Optional('key'): str,
+ ... 'key2': str
+ ... }, required=True)
+ >>> schema({'key2':'value'})
+ {'key2': 'value'}
+ """
+ def __init__(self, schema, msg=None, default=UNDEFINED):
+ super(Optional, self).__init__(schema, msg=msg)
+ self.default = default_factory(default)
+
+
+class Exclusive(Optional):
+ """Mark a node in the schema as exclusive.
+
+ Exclusive keys inherited from Optional:
+
+ >>> schema = Schema({Exclusive('alpha', 'angles'): int, Exclusive('beta', 'angles'): int})
+ >>> schema({'alpha': 30})
+ {'alpha': 30}
+
+ Keys inside a same group of exclusion cannot be together, it only makes sense for dictionaries:
+
+ >>> with raises(MultipleInvalid, "two or more values in the same group of exclusion 'angles' @ data[<angles>]"):
+ ... schema({'alpha': 30, 'beta': 45})
+
+ For example, API can provides multiple types of authentication, but only one works in the same time:
+
+ >>> msg = 'Please, use only one type of authentication at the same time.'
+ >>> schema = Schema({
+ ... Exclusive('classic', 'auth', msg=msg):{
+ ... Required('email'): basestring,
+ ... Required('password'): basestring
+ ... },
+ ... Exclusive('internal', 'auth', msg=msg):{
+ ... Required('secret_key'): basestring
+ ... },
+ ... Exclusive('social', 'auth', msg=msg):{
+ ... Required('social_network'): basestring,
+ ... Required('token'): basestring
+ ... }
+ ... })
+
+ >>> with raises(MultipleInvalid, "Please, use only one type of authentication at the same time. @ data[<auth>]"):
+ ... schema({'classic': {'email': 'foo@example.com', 'password': 'bar'},
+ ... 'social': {'social_network': 'barfoo', 'token': 'tEMp'}})
+ """
+ def __init__(self, schema, group_of_exclusion, msg=None):
+ super(Exclusive, self).__init__(schema, msg=msg)
+ self.group_of_exclusion = group_of_exclusion
+
+
+class Inclusive(Optional):
+ """ Mark a node in the schema as inclusive.
+
+ Exclusive keys inherited from Optional:
+
+ >>> schema = Schema({
+ ... Inclusive('filename', 'file'): str,
+ ... Inclusive('mimetype', 'file'): str
+ ... })
+ >>> data = {'filename': 'dog.jpg', 'mimetype': 'image/jpeg'}
+ >>> data == schema(data)
+ True
+
+ Keys inside a same group of inclusive must exist together, it only makes sense for dictionaries:
+
+ >>> with raises(MultipleInvalid, "some but not all values in the same group of inclusion 'file' @ data[<file>]"):
+ ... schema({'filename': 'dog.jpg'})
+
+ If none of the keys in the group are present, it is accepted:
+
+ >>> schema({})
+ {}
+
+ For example, API can return 'height' and 'width' together, but not separately.
+
+ >>> msg = "Height and width must exist together"
+ >>> schema = Schema({
+ ... Inclusive('height', 'size', msg=msg): int,
+ ... Inclusive('width', 'size', msg=msg): int
+ ... })
+
+ >>> with raises(MultipleInvalid, msg + " @ data[<size>]"):
+ ... schema({'height': 100})
+
+ >>> with raises(MultipleInvalid, msg + " @ data[<size>]"):
+ ... schema({'width': 100})
+
+ >>> data = {'height': 100, 'width': 100}
+ >>> data == schema(data)
+ True
+ """
+
+ def __init__(self, schema, group_of_inclusion, msg=None):
+ super(Inclusive, self).__init__(schema, msg=msg)
+ self.group_of_inclusion = group_of_inclusion
+
+
+class Required(Marker):
+ """Mark a node in the schema as being required, and optionally provide a default value.
+
+ >>> schema = Schema({Required('key'): str})
+ >>> with raises(MultipleInvalid, "required key not provided @ data['key']"):
+ ... schema({})
+
+ >>> schema = Schema({Required('key', default='value'): str})
+ >>> schema({})
+ {'key': 'value'}
+ >>> schema = Schema({Required('key', default=list): list})
+ >>> schema({})
+ {'key': []}
+ """
+ def __init__(self, schema, msg=None, default=UNDEFINED):
+ super(Required, self).__init__(schema, msg=msg)
+ self.default = default_factory(default)
+
+
+class Remove(Marker):
+ """Mark a node in the schema to be removed and excluded from the validated
+ output. Keys that fail validation will not raise ``Invalid``. Instead, these
+ keys will be treated as extras.
+
+ >>> schema = Schema({str: int, Remove(int): str})
+ >>> with raises(MultipleInvalid, "extra keys not allowed @ data[1]"):
+ ... schema({'keep': 1, 1: 1.0})
+ >>> schema({1: 'red', 'red': 1, 2: 'green'})
+ {'red': 1}
+ >>> schema = Schema([int, Remove(float), Extra])
+ >>> schema([1, 2, 3, 4.0, 5, 6.0, '7'])
+ [1, 2, 3, 5, '7']
+ """
+ def __call__(self, v):
+ super(Remove, self).__call__(v)
+ return self.__class__
+
+ def __repr__(self):
+ return "Remove(%r)" % (self.schema,)
+
+
+def Extra(_):
+ """Allow keys in the data that are not present in the schema."""
+ raise SchemaError('"Extra" should never be called')
+
+
+# As extra() is never called there's no way to catch references to the
+# deprecated object, so we just leave an alias here instead.
+extra = Extra
+
+class Msg(object):
+ """Report a user-friendly message if a schema fails to validate.
+
+ >>> validate = Schema(
+ ... Msg(['one', 'two', int],
+ ... 'should be one of "one", "two" or an integer'))
+ >>> with raises(MultipleInvalid, 'should be one of "one", "two" or an integer'):
+ ... validate(['three'])
+
+ Messages are only applied to invalid direct descendants of the schema:
+
+ >>> validate = Schema(Msg([['one', 'two', int]], 'not okay!'))
+ >>> with raises(MultipleInvalid, 'expected int @ data[0][0]'):
+ ... validate([['three']])
+
+ The type which is thrown can be overridden but needs to be a subclass of Invalid
+
+ >>> with raises(SchemaError, 'Msg can only use subclases of Invalid as custom class'):
+ ... validate = Schema(Msg([int], 'should be int', cls=KeyError))
+
+ If you do use a subclass of Invalid, that error will be thrown (wrapped in a MultipleInvalid)
+
+ >>> validate = Schema(Msg([['one', 'two', int]], 'not okay!', cls=RangeInvalid))
+ >>> try:
+ ... validate(['three'])
+ ... except MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], RangeInvalid)
+ """
+
+ def __init__(self, schema, msg, cls=None):
+ if cls and not issubclass(cls, Invalid):
+ raise SchemaError("Msg can only use subclases of"
+ " Invalid as custom class")
+ self._schema = schema
+ self.schema = Schema(schema)
+ self.msg = msg
+ self.cls = cls
+
+ def __call__(self, v):
+ try:
+ return self.schema(v)
+ except Invalid as e:
+ if len(e.path) > 1:
+ raise e
+ else:
+ raise (self.cls or Invalid)(self.msg)
+
+ def __repr__(self):
+ return 'Msg(%s, %s, cls=%s)' % (self._schema, self.msg, self.cls)
+
+
+def message(default=None, cls=None):
+ """Convenience decorator to allow functions to provide a message.
+
+ Set a default message:
+
+ >>> @message('not an integer')
+ ... def isint(v):
+ ... return int(v)
+
+ >>> validate = Schema(isint())
+ >>> with raises(MultipleInvalid, 'not an integer'):
+ ... validate('a')
+
+ The message can be overridden on a per validator basis:
+
+ >>> validate = Schema(isint('bad'))
+ >>> with raises(MultipleInvalid, 'bad'):
+ ... validate('a')
+
+ The class thrown too:
+
+ >>> class IntegerInvalid(Invalid): pass
+ >>> validate = Schema(isint('bad', clsoverride=IntegerInvalid))
+ >>> try:
+ ... validate('a')
+ ... except MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], IntegerInvalid)
+ """
+ if cls and not issubclass(cls, Invalid):
+ raise SchemaError("message can only use subclases of Invalid as custom class")
+
+ def decorator(f):
+ @wraps(f)
+ def check(msg=None, clsoverride=None):
+ @wraps(f)
+ def wrapper(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except ValueError:
+ raise (clsoverride or cls or ValueInvalid)(msg or default or 'invalid value')
+ return wrapper
+ return check
+ return decorator
+
+
+def truth(f):
+ """Convenience decorator to convert truth functions into validators.
+
+ >>> @truth
+ ... def isdir(v):
+ ... return os.path.isdir(v)
+ >>> validate = Schema(isdir)
+ >>> validate('/')
+ '/'
+ >>> with raises(MultipleInvalid, 'not a valid value'):
+ ... validate('/notavaliddir')
+ """
+ @wraps(f)
+ def check(v):
+ t = f(v)
+ if not t:
+ raise ValueError
+ return v
+ return check
+
+
+class Coerce(object):
+ """Coerce a value to a type.
+
+ If the type constructor throws a ValueError or TypeError, the value
+ will be marked as Invalid.
+
+ Default behavior:
+
+ >>> validate = Schema(Coerce(int))
+ >>> with raises(MultipleInvalid, 'expected int'):
+ ... validate(None)
+ >>> with raises(MultipleInvalid, 'expected int'):
+ ... validate('foo')
+
+ With custom message:
+
+ >>> validate = Schema(Coerce(int, "moo"))
+ >>> with raises(MultipleInvalid, 'moo'):
+ ... validate('foo')
+ """
+
+ def __init__(self, type, msg=None):
+ self.type = type
+ self.msg = msg
+ self.type_name = type.__name__
+
+ def __call__(self, v):
+ try:
+ return self.type(v)
+ except (ValueError, TypeError):
+ msg = self.msg or ('expected %s' % self.type_name)
+ raise CoerceInvalid(msg)
+
+ def __repr__(self):
+ return 'Coerce(%s, msg=%r)' % (self.type_name, self.msg)
+
+
+@message('value was not true', cls=TrueInvalid)
+@truth
+def IsTrue(v):
+ """Assert that a value is true, in the Python sense.
+
+ >>> validate = Schema(IsTrue())
+
+ "In the Python sense" means that implicitly false values, such as empty
+ lists, dictionaries, etc. are treated as "false":
+
+ >>> with raises(MultipleInvalid, "value was not true"):
+ ... validate([])
+ >>> validate([1])
+ [1]
+ >>> with raises(MultipleInvalid, "value was not true"):
+ ... validate(False)
+
+ ...and so on.
+
+ >>> try:
+ ... validate([])
+ ... except MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], TrueInvalid)
+ """
+ return v
+
+
+@message('value was not false', cls=FalseInvalid)
+def IsFalse(v):
+ """Assert that a value is false, in the Python sense.
+
+ (see :func:`IsTrue` for more detail)
+
+ >>> validate = Schema(IsFalse())
+ >>> validate([])
+ []
+ >>> with raises(MultipleInvalid, "value was not false"):
+ ... validate(True)
+
+ >>> try:
+ ... validate(True)
+ ... except MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], FalseInvalid)
+ """
+ if v:
+ raise ValueError
+ return v
+
+
+@message('expected boolean', cls=BooleanInvalid)
+def Boolean(v):
+ """Convert human-readable boolean values to a bool.
+
+ Accepted values are 1, true, yes, on, enable, and their negatives.
+ Non-string values are cast to bool.
+
+ >>> validate = Schema(Boolean())
+ >>> validate(True)
+ True
+ >>> validate("1")
+ True
+ >>> validate("0")
+ False
+ >>> with raises(MultipleInvalid, "expected boolean"):
+ ... validate('moo')
+ >>> try:
+ ... validate('moo')
+ ... except MultipleInvalid as e:
+ ... assert isinstance(e.errors[0], BooleanInvalid)
+ """
+ if isinstance(v, basestring):
+ v = v.lower()
+ if v in ('1', 'true', 'yes', 'on', 'enable'):
+ return True
+ if v in ('0', 'false', 'no', 'off', 'disable'):
+ return False
+ raise ValueError
+ return bool(v)
+
+
+class Any(object):
+ """Use the first validated value.
+
+ :param msg: Message to deliver to user if validation fails.
+ :param kwargs: All other keyword arguments are passed to the sub-Schema constructors.
+ :returns: Return value of the first validator that passes.
+
+ >>> validate = Schema(Any('true', 'false',
+ ... All(Any(int, bool), Coerce(bool))))
+ >>> validate('true')
+ 'true'
+ >>> validate(1)
+ True
+ >>> with raises(MultipleInvalid, "not a valid value"):
+ ... validate('moo')
+
+ msg argument is used
+
+ >>> validate = Schema(Any(1, 2, 3, msg="Expected 1 2 or 3"))
+ >>> validate(1)
+ 1
+ >>> with raises(MultipleInvalid, "Expected 1 2 or 3"):
+ ... validate(4)
+ """
+
+ def __init__(self, *validators, **kwargs):
+ self.validators = validators
+ self.msg = kwargs.pop('msg', None)
+ self._schemas = [Schema(val, **kwargs) for val in validators]
+
+ def __call__(self, v):
+ error = None
+ for schema in self._schemas:
+ try:
+ return schema(v)
+ except Invalid as e:
+ if error is None or len(e.path) > len(error.path):
+ error = e
+ else:
+ if error:
+ raise error if self.msg is None else AnyInvalid(self.msg)
+ raise AnyInvalid(self.msg or 'no valid value found')
+
+ def __repr__(self):
+ return 'Any([%s])' % (", ".join(repr(v) for v in self.validators))
+
+
+# Convenience alias
+Or = Any
+
+
+class All(object):
+ """Value must pass all validators.
+
+ The output of each validator is passed as input to the next.
+
+ :param msg: Message to deliver to user if validation fails.
+ :param kwargs: All other keyword arguments are passed to the sub-Schema constructors.
+
+ >>> validate = Schema(All('10', Coerce(int)))
+ >>> validate('10')
+ 10
+ """
+
+ def __init__(self, *validators, **kwargs):
+ self.validators = validators
+ self.msg = kwargs.pop('msg', None)
+ self._schemas = [Schema(val, **kwargs) for val in validators]
+
+ def __call__(self, v):
+ try:
+ for schema in self._schemas:
+ v = schema(v)
+ except Invalid as e:
+ raise e if self.msg is None else AllInvalid(self.msg)
+ return v
+
+ def __repr__(self):
+ return 'All(%s, msg=%r)' % (
+ ", ".join(repr(v) for v in self.validators),
+ self.msg
+ )
+
+
+# Convenience alias
+And = All
+
+
+class Match(object):
+ """Value must be a string that matches the regular expression.
+
+ >>> validate = Schema(Match(r'^0x[A-F0-9]+$'))
+ >>> validate('0x123EF4')
+ '0x123EF4'
+ >>> with raises(MultipleInvalid, "does not match regular expression"):
+ ... validate('123EF4')
+
+ >>> with raises(MultipleInvalid, 'expected string or buffer'):
+ ... validate(123)
+
+ Pattern may also be a _compiled regular expression:
+
+ >>> validate = Schema(Match(re.compile(r'0x[A-F0-9]+', re.I)))
+ >>> validate('0x123ef4')
+ '0x123ef4'
+ """
+
+ def __init__(self, pattern, msg=None):
+ if isinstance(pattern, basestring):
+ pattern = re.compile(pattern)
+ self.pattern = pattern
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ match = self.pattern.match(v)
+ except TypeError:
+ raise MatchInvalid("expected string or buffer")
+ if not match:
+ raise MatchInvalid(self.msg or 'does not match regular expression')
+ return v
+
+ def __repr__(self):
+ return 'Match(%r, msg=%r)' % (self.pattern.pattern, self.msg)
+
+
+class Replace(object):
+ """Regex substitution.
+
+ >>> validate = Schema(All(Replace('you', 'I'),
+ ... Replace('hello', 'goodbye')))
+ >>> validate('you say hello')
+ 'I say goodbye'
+ """
+
+ def __init__(self, pattern, substitution, msg=None):
+ if isinstance(pattern, basestring):
+ pattern = re.compile(pattern)
+ self.pattern = pattern
+ self.substitution = substitution
+ self.msg = msg
+
+ def __call__(self, v):
+ return self.pattern.sub(self.substitution, v)
+
+ def __repr__(self):
+ return 'Replace(%r, %r, msg=%r)' % (self.pattern.pattern,
+ self.substitution,
+ self.msg)
+
+
+def _url_validation(v):
+ parsed = urlparse.urlparse(v)
+ if not parsed.scheme or not parsed.netloc:
+ raise UrlInvalid("must have a URL scheme and host")
+ return parsed
+
+
+@message('expected a Fully qualified domain name URL', cls=UrlInvalid)
+def FqdnUrl(v):
+ """Verify that the value is a Fully qualified domain name URL.
+
+ >>> s = Schema(FqdnUrl())
+ >>> with raises(MultipleInvalid, 'expected a Fully qualified domain name URL'):
+ ... s("http://localhost/")
+ >>> s('http://w3.org')
+ 'http://w3.org'
+ """
+ try:
+ parsed_url = _url_validation(v)
+ if "." not in parsed_url.netloc:
+ raise UrlInvalid("must have a domain name in URL")
+ return v
+ except:
+ raise ValueError
+
+
+@message('expected a URL', cls=UrlInvalid)
+def Url(v):
+ """Verify that the value is a URL.
+
+ >>> s = Schema(Url())
+ >>> with raises(MultipleInvalid, 'expected a URL'):
+ ... s(1)
+ >>> s('http://w3.org')
+ 'http://w3.org'
+ """
+ try:
+ _url_validation(v)
+ return v
+ except:
+ raise ValueError
+
+
+@message('not a file', cls=FileInvalid)
+@truth
+def IsFile(v):
+ """Verify the file exists.
+
+ >>> os.path.basename(IsFile()(__file__)).startswith('voluptuous.py')
+ True
+ >>> with raises(FileInvalid, 'not a file'):
+ ... IsFile()("random_filename_goes_here.py")
+ """
+ return os.path.isfile(v)
+
+
+@message('not a directory', cls=DirInvalid)
+@truth
+def IsDir(v):
+ """Verify the directory exists.
+
+ >>> IsDir()('/')
+ '/'
+ """
+ return os.path.isdir(v)
+
+
+@message('path does not exist', cls=PathInvalid)
+@truth
+def PathExists(v):
+ """Verify the path exists, regardless of its type.
+
+ >>> os.path.basename(PathExists()(__file__)).startswith('voluptuous.py')
+ True
+ >>> with raises(Invalid, 'path does not exist'):
+ ... PathExists()("random_filename_goes_here.py")
+ """
+ return os.path.exists(v)
+
+
+class Range(object):
+ """Limit a value to a range.
+
+ Either min or max may be omitted.
+ Either min or max can be excluded from the range of accepted values.
+
+ :raises Invalid: If the value is outside the range.
+
+ >>> s = Schema(Range(min=1, max=10, min_included=False))
+ >>> s(5)
+ 5
+ >>> s(10)
+ 10
+ >>> with raises(MultipleInvalid, 'value must be at most 10'):
+ ... s(20)
+ >>> with raises(MultipleInvalid, 'value must be higher than 1'):
+ ... s(1)
+ >>> with raises(MultipleInvalid, 'value must be lower than 10'):
+ ... Schema(Range(max=10, max_included=False))(20)
+ """
+
+ def __init__(self, min=None, max=None, min_included=True,
+ max_included=True, msg=None):
+ self.min = min
+ self.max = max
+ self.min_included = min_included
+ self.max_included = max_included
+ self.msg = msg
+
+ def __call__(self, v):
+ if self.min_included:
+ if self.min is not None and v < self.min:
+ raise RangeInvalid(
+ self.msg or 'value must be at least %s' % self.min)
+ else:
+ if self.min is not None and v <= self.min:
+ raise RangeInvalid(
+ self.msg or 'value must be higher than %s' % self.min)
+ if self.max_included:
+ if self.max is not None and v > self.max:
+ raise RangeInvalid(
+ self.msg or 'value must be at most %s' % self.max)
+ else:
+ if self.max is not None and v >= self.max:
+ raise RangeInvalid(
+ self.msg or 'value must be lower than %s' % self.max)
+ return v
+
+ def __repr__(self):
+ return ('Range(min=%r, max=%r, min_included=%r,'
+ ' max_included=%r, msg=%r)' % (self.min, self.max,
+ self.min_included,
+ self.max_included,
+ self.msg))
+
+
+class Clamp(object):
+ """Clamp a value to a range.
+
+ Either min or max may be omitted.
+ >>> s = Schema(Clamp(min=0, max=1))
+ >>> s(0.5)
+ 0.5
+ >>> s(5)
+ 1
+ >>> s(-1)
+ 0
+ """
+
+ def __init__(self, min=None, max=None, msg=None):
+ self.min = min
+ self.max = max
+ self.msg = msg
+
+ def __call__(self, v):
+ if self.min is not None and v < self.min:
+ v = self.min
+ if self.max is not None and v > self.max:
+ v = self.max
+ return v
+
+ def __repr__(self):
+ return 'Clamp(min=%s, max=%s)' % (self.min, self.max)
+
+
+class LengthInvalid(Invalid):
+ pass
+
+
+class Length(object):
+ """The length of a value must be in a certain range."""
+
+ def __init__(self, min=None, max=None, msg=None):
+ self.min = min
+ self.max = max
+ self.msg = msg
+
+ def __call__(self, v):
+ if self.min is not None and len(v) < self.min:
+ raise LengthInvalid(
+ self.msg or 'length of value must be at least %s' % self.min)
+ if self.max is not None and len(v) > self.max:
+ raise LengthInvalid(
+ self.msg or 'length of value must be at most %s' % self.max)
+ return v
+
+ def __repr__(self):
+ return 'Length(min=%s, max=%s)' % (self.min, self.max)
+
+
+class DatetimeInvalid(Invalid):
+ """The value is not a formatted datetime string."""
+
+
+class Datetime(object):
+ """Validate that the value matches the datetime format."""
+
+ DEFAULT_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+
+ def __init__(self, format=None, msg=None):
+ self.format = format or self.DEFAULT_FORMAT
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ datetime.datetime.strptime(v, self.format)
+ except (TypeError, ValueError):
+ raise DatetimeInvalid(
+ self.msg or 'value does not match'
+ ' expected format %s' % self.format)
+ return v
+
+ def __repr__(self):
+ return 'Datetime(format=%s)' % self.format
+
+
+class InInvalid(Invalid):
+ pass
+
+
+class In(object):
+ """Validate that a value is in a collection."""
+
+ def __init__(self, container, msg=None):
+ self.container = container
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ check = v not in self.container
+ except TypeError:
+ check = True
+ if check:
+ raise InInvalid(self.msg or 'value is not allowed')
+ return v
+
+ def __repr__(self):
+ return 'In(%s)' % (self.container,)
+
+
+class NotInInvalid(Invalid):
+ pass
+
+
+class NotIn(object):
+ """Validate that a value is not in a collection."""
+
+ def __init__(self, container, msg=None):
+ self.container = container
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ check = v in self.container
+ except TypeError:
+ check = True
+ if check:
+ raise NotInInvalid(self.msg or 'value is not allowed')
+ return v
+
+ def __repr__(self):
+ return 'NotIn(%s)' % (self.container,)
+
+
+def Lower(v):
+ """Transform a string to lower case.
+
+ >>> s = Schema(Lower)
+ >>> s('HI')
+ 'hi'
+ """
+ return str(v).lower()
+
+
+def Upper(v):
+ """Transform a string to upper case.
+
+ >>> s = Schema(Upper)
+ >>> s('hi')
+ 'HI'
+ """
+ return str(v).upper()
+
+
+def Capitalize(v):
+ """Capitalise a string.
+
+ >>> s = Schema(Capitalize)
+ >>> s('hello world')
+ 'Hello world'
+ """
+ return str(v).capitalize()
+
+
+def Title(v):
+ """Title case a string.
+
+ >>> s = Schema(Title)
+ >>> s('hello world')
+ 'Hello World'
+ """
+ return str(v).title()
+
+
+def Strip(v):
+ """Strip whitespace from a string.
+
+ >>> s = Schema(Strip)
+ >>> s(' hello world ')
+ 'hello world'
+ """
+ return str(v).strip()
+
+
+class DefaultTo(object):
+ """Sets a value to default_value if none provided.
+
+ >>> s = Schema(DefaultTo(42))
+ >>> s(None)
+ 42
+ >>> s = Schema(DefaultTo(list))
+ >>> s(None)
+ []
+ """
+
+ def __init__(self, default_value, msg=None):
+ self.default_value = default_factory(default_value)
+ self.msg = msg
+
+ def __call__(self, v):
+ if v is None:
+ v = self.default_value()
+ return v
+
+ def __repr__(self):
+ return 'DefaultTo(%s)' % (self.default_value(),)
+
+
+class SetTo(object):
+ """Set a value, ignoring any previous value.
+
+ >>> s = Schema(Any(int, SetTo(42)))
+ >>> s(2)
+ 2
+ >>> s("foo")
+ 42
+ """
+
+ def __init__(self, value):
+ self.value = default_factory(value)
+
+ def __call__(self, v):
+ return self.value()
+
+ def __repr__(self):
+ return 'SetTo(%s)' % (self.value(),)
+
+
+class ExactSequenceInvalid(Invalid):
+ pass
+
+
+class ExactSequence(object):
+ """Matches each element in a sequence against the corresponding element in
+ the validators.
+
+ :param msg: Message to deliver to user if validation fails.
+ :param kwargs: All other keyword arguments are passed to the sub-Schema
+ constructors.
+
+ >>> from voluptuous import *
+ >>> validate = Schema(ExactSequence([str, int, list, list]))
+ >>> validate(['hourly_report', 10, [], []])
+ ['hourly_report', 10, [], []]
+ >>> validate(('hourly_report', 10, [], []))
+ ('hourly_report', 10, [], [])
+ """
+
+ def __init__(self, validators, **kwargs):
+ self.validators = validators
+ self.msg = kwargs.pop('msg', None)
+ self._schemas = [Schema(val, **kwargs) for val in validators]
+
+ def __call__(self, v):
+ if not isinstance(v, (list, tuple)):
+ raise ExactSequenceInvalid(self.msg)
+ try:
+ v = type(v)(schema(x) for x, schema in zip(v, self._schemas))
+ except Invalid as e:
+ raise e if self.msg is None else ExactSequenceInvalid(self.msg)
+ return v
+
+ def __repr__(self):
+ return 'ExactSequence([%s])' % (", ".join(repr(v)
+ for v in self.validators))
+
+
+class Literal(object):
+ def __init__(self, lit):
+ self.lit = lit
+
+ def __call__(self, value, msg=None):
+ if self.lit != value:
+ raise LiteralInvalid(
+ msg or '%s not match for %s' % (value, self.lit)
+ )
+ else:
+ return self.lit
+
+ def __str__(self):
+ return str(self.lit)
+
+ def __repr__(self):
+ return repr(self.lit)
+
+
+class Unique(object):
+ """Ensure an iterable does not contain duplicate items.
+
+ Only iterables convertable to a set are supported (native types and
+ objects with correct __eq__).
+
+ JSON does not support set, so they need to be presented as arrays.
+ Unique allows ensuring that such array does not contain dupes.
+
+ >>> s = Schema(Unique())
+ >>> s([])
+ []
+ >>> s([1, 2])
+ [1, 2]
+ >>> with raises(Invalid, 'contains duplicate items: [1]'):
+ ... s([1, 1, 2])
+ >>> with raises(Invalid, "contains duplicate items: ['one']"):
+ ... s(['one', 'two', 'one'])
+ >>> with raises(Invalid, regex="^contains unhashable elements: "):
+ ... s([set([1, 2]), set([3, 4])])
+ >>> s('abc')
+ 'abc'
+ >>> with raises(Invalid, regex="^contains duplicate items: "):
+ ... s('aabbc')
+ """
+
+ def __init__(self, msg=None):
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ set_v = set(v)
+ except TypeError as e:
+ raise TypeInvalid(
+ self.msg or 'contains unhashable elements: {0}'.format(e))
+ if len(set_v) != len(v):
+ seen = set()
+ dupes = list(set(x for x in v if x in seen or seen.add(x)))
+ raise Invalid(
+ self.msg or 'contains duplicate items: {0}'.format(dupes))
+ return v
+
+ def __repr__(self):
+ return 'Unique()'
+
+
+class Set(object):
+ """Convert a list into a set.
+
+ >>> s = Schema(Set())
+ >>> s([]) == set([])
+ True
+ >>> s([1, 2]) == set([1, 2])
+ True
+ >>> with raises(Invalid, regex="^cannot be presented as set: "):
+ ... s([set([1, 2]), set([3, 4])])
+ """
+
+ def __init__(self, msg=None):
+ self.msg = msg
+
+ def __call__(self, v):
+ try:
+ set_v = set(v)
+ except Exception as e:
+ raise TypeInvalid(
+ self.msg or 'cannot be presented as set: {0}'.format(e))
+ return set_v
+
+ def __repr__(self):
+ return 'Set()'
+
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/python/which/LICENSE.txt b/python/which/LICENSE.txt
new file mode 100644
index 000000000..de85cd53a
--- /dev/null
+++ b/python/which/LICENSE.txt
@@ -0,0 +1,21 @@
+Copyright (c) 2002-2005 ActiveState Corp.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/python/which/MANIFEST.in b/python/which/MANIFEST.in
new file mode 100644
index 000000000..3b8970ab8
--- /dev/null
+++ b/python/which/MANIFEST.in
@@ -0,0 +1,3 @@
+include *.py *.cpp *.in which.exe Makefile* *.txt logo.jpg
+exclude *~
+recursive-include test *.txt *.py
diff --git a/python/which/Makefile.win b/python/which/Makefile.win
new file mode 100644
index 000000000..c400aafd4
--- /dev/null
+++ b/python/which/Makefile.win
@@ -0,0 +1,21 @@
+# Copyright (c) 2002-2003 ActiveState Corp.
+# Author: Trent Mick (TrentM@ActiveState.com)
+#
+# A Makefile to do this: launcher.cpp -> foo.exe
+
+APPNAME=which
+
+# for release:
+CFLAGS=-D_CONSOLE -D_MBCS -DWIN32 -W3 -Ox -DNDEBUG -D_NDEBUG -MD
+LDFLAGS=/subsystem:console kernel32.lib user32.lib gdi32.lib advapi32.lib shlwapi.lib
+# for debug:
+# CFLAGS = -D_CONSOLE -D_MBCS /DWIN32 /Zi /Od /DDEBUG /D_DEBUG /MDd
+# LDFLAGS += /DEBUG
+
+$(APPNAME).exe: launcher.cpp
+ cl -nologo $(CFLAGS) -c launcher.cpp
+ link -nologo $(LDFLAGS) launcher.obj -out:$(APPNAME).exe
+
+clean:
+ if exist launcher.obj; del launcher.obj
+ if exist $(APPNAME).exe; del $(APPNAME).exe
diff --git a/python/which/PKG-INFO b/python/which/PKG-INFO
new file mode 100644
index 000000000..bfa8baa28
--- /dev/null
+++ b/python/which/PKG-INFO
@@ -0,0 +1,21 @@
+Metadata-Version: 1.0
+Name: which
+Version: 1.1.0
+Summary: a portable GNU which replacement
+Home-page: http://trentm.com/projects/which/
+Author: Trent Mick
+Author-email: TrentM@ActiveState.com
+License: MIT License
+Description: This is a GNU which replacement with the following features:
+ - it is portable (Windows, Linux);
+ - it understands PATHEXT on Windows;
+ - it can print <em>all</em> matches on the PATH;
+ - it can note "near misses" on the PATH (e.g. files that match but
+ may not, say, have execute permissions; and
+ - it can be used as a Python module.
+
+Keywords: which,find,path,where
+Platform: Windows
+Platform: Linux
+Platform: Mac OS X
+Platform: Unix
diff --git a/python/which/README.txt b/python/which/README.txt
new file mode 100644
index 000000000..6ece7f6de
--- /dev/null
+++ b/python/which/README.txt
@@ -0,0 +1,229 @@
+which.py -- a portable GNU which replacement
+============================================
+
+Download the latest which.py packages from here:
+ (source) http://trentm.com/downloads/which/1.1.0/which-1.1.0.zip
+
+
+Home : http://trentm.com/projects/which/
+License : MIT (see LICENSE.txt)
+Platforms : Windows, Linux, Mac OS X, Unix
+Current Version : 1.1
+Dev Status : mature, has been heavily used in a commercial product for
+ over 2 years
+Requirements : Python >= 2.3 (http://www.activestate.com/ActivePython/)
+
+
+What's new?
+-----------
+
+I have moved hosting of `which.py` from my old [Starship
+pages](http://starship.python.net/~tmick/) to this site. These starter
+docs have been improved a little bit. See the [Change Log](#changelog)
+below for more.
+
+**WARNING**: If you are upgrading your `which.py` and you also use my
+[process.py](../process/) module, you must upgrade `process.py` as well
+because of the `_version_/__version__` change in v1.1.0.
+
+
+Why which.py?
+-------------
+
+`which.py` is a small GNU-which replacement. It has the following
+features:
+
+- it is portable (Windows, Linux, Mac OS X, Un*x);
+- it understands PATHEXT and "App Paths" registration on Windows
+ (i.e. it will find everything that `start` does from the command shell);
+- it can print all matches on the PATH;
+- it can note "near misses" on the PATH (e.g. files that match but may
+ not, say, have execute permissions); and
+- it can be used as a Python module.
+
+I also would be happy to have this be a replacement for the `which.py` in the
+Python CVS tree at `dist/src/Tools/scripts/which.py` which is
+Unix-specific and not usable as a module; and perhaps for inclusion in
+the stdlib.
+
+Please send any feedback to [Trent Mick](mailto:TrentM@ActiveState.com).
+
+
+Install Notes
+-------------
+
+Download the latest `which.py` source package, unzip it, and run
+`python setup.py install`:
+
+ unzip which-1.1.0.zip
+ cd which-1.1.0
+ python setup.py install
+
+If your install fails then please visit [the Troubleshooting
+FAQ](http://trentm.com/faq.html#troubleshooting-python-package-installation).
+
+`which.py` can be used both as a module and as a script. By default,
+`which.py` will be installed into your Python's `site-packages`
+directory so it can be used as a module. On *Windows only*, `which.py`
+(and the launcher stub `which.exe`) will be installed in the Python
+install dir to (hopefully) put `which` on your PATH.
+
+On Un*x platforms (including Linux and Mac OS X) there is often a
+`which` executable already on your PATH. To use this `which` instead of
+your system's on those platforms you can manually do one of the
+following:
+
+- Copy `which.py` to `which` somewhere on your PATH ahead of the system
+ `which`. This can be a symlink, as well:
+
+ ln -s /PATH/TO/site-packages/which.py /usr/local/bin/which
+
+- Python 2.4 users might want to use Python's new '-m' switch and setup
+ and alias:
+
+ alias which='python -m which'
+
+ or stub script like this:
+
+ #!/bin/sh
+ python -m which $@
+
+
+Getting Started
+---------------
+
+Currently the best intro to using `which.py` as a module is its module
+documentation. Either install `which.py` and run:
+
+ pydoc which
+
+take a look at `which.py` in your editor or [here](which.py), or read
+on. Most commonly you'll use the `which()` method to find an
+executable:
+
+ >>> import which
+ >>> which.which("perl")
+ '/usr/local/bin/perl'
+
+Or you might want to know if you have multiple versions on your path:
+
+ >>> which.whichall("perl")
+ ['/usr/local/bin/perl', '/usr/bin/perl']
+
+Use `verbose` to see where your executable is being found. (On Windows
+this might not always be so obvious as your PATH environment variable.
+There is an "App Paths" area of the registry where the `start` command
+will find "registered" executables -- `which.py` mimics this.)
+
+ >>> which.whichall("perl", verbose=True)
+ [('/usr/local/bin/perl', 'from PATH element 10'),
+ ('/usr/bin/perl', 'from PATH element 15')]
+
+You can restrict the searched path:
+
+ >>> which.whichall("perl", path=["/usr/bin"])
+ ['/usr/bin/perl']
+
+There is a generator interface:
+
+ >>> for perl in which.whichgen("perl"):
+ ... print "found a perl here:", perl
+ ...
+ found a perl here: /usr/local/bin/perl
+ found a perl here: /usr/bin/perl
+
+An exception is raised if your executable is not found:
+
+ >>> which.which("fuzzywuzzy")
+ Traceback (most recent call last):
+ ...
+ which.WhichError: Could not find 'fuzzywuzzy' on the path.
+ >>>
+
+There are some other options too:
+
+ >>> help(which.which)
+ ...
+
+Run `which --help` to see command-line usage:
+
+ $ which --help
+ Show the full path of commands.
+
+ Usage:
+ which [<options>...] [<command-name>...]
+
+ Options:
+ -h, --help Print this help and exit.
+ -V, --version Print the version info and exit.
+
+ -a, --all Print *all* matching paths.
+ -v, --verbose Print out how matches were located and
+ show near misses on stderr.
+ -q, --quiet Just print out matches. I.e., do not print out
+ near misses.
+
+ -p <altpath>, --path=<altpath>
+ An alternative path (list of directories) may
+ be specified for searching.
+ -e <exts>, --exts=<exts>
+ Specify a list of extensions to consider instead
+ of the usual list (';'-separate list, Windows
+ only).
+
+ Show the full path to the program that would be run for each given
+ command name, if any. Which, like GNU's which, returns the number of
+ failed arguments, or -1 when no <command-name> was given.
+
+ Near misses include duplicates, non-regular files and (on Un*x)
+ files without executable access.
+
+
+Change Log
+----------
+
+### v1.1.0
+- Change version attributes and semantics. Before: had a _version_
+ tuple. After: __version__ is a string, __version_info__ is a tuple.
+
+### v1.0.3
+- Move hosting of which.py to trentm.com. Tweaks to associated bits
+ (README.txt, etc.)
+
+### v1.0.2:
+- Rename mainline handler function from _main() to main(). I can
+ conceive of it being called from externally.
+
+### v1.0.1:
+- Add an optimization for Windows to allow the optional
+ specification of a list of exts to consider when searching the
+ path.
+
+### v1.0.0:
+- Simpler interface: What was which() is now called whichgen() -- it
+ is a generator of matches. The simpler which() and whichall()
+ non-generator interfaces were added.
+
+### v0.8.1:
+- API change: 0.8.0's API change making "verbose" output the default
+ was a mistake -- it breaks backward compatibility for existing
+ uses of which in scripts. This makes verbose, once again, optional
+ but NOT the default.
+
+### v0.8.0:
+- bug fix: "App Paths" lookup had been crippled in 0.7.0. Restore that.
+- feature/module API change: Now print out (and return for the module
+ interface) from where a match was found, e.g. "(from PATH element 3)".
+ The module interfaces now returns (match, from-where) tuples.
+- bug fix: --path argument was broken (-p shortform was fine)
+
+### v0.7.0:
+- bug fix: Handle "App Paths" registered executable that does not
+ exist.
+- feature: Allow an alternate PATH to be specified via 'path'
+ optional argument to which.which() and via -p|--path command line
+ option.
+
+### v0.6.1:
+- first public release
+
diff --git a/python/which/TODO.txt b/python/which/TODO.txt
new file mode 100644
index 000000000..6df2de7f7
--- /dev/null
+++ b/python/which/TODO.txt
@@ -0,0 +1,113 @@
+# High Priority
+
+- Figure out the script story on the various platforms. On Windows, look into
+ the launcher thing that effbot has. Unix, don't install the script my
+ default. They can always do "python -m which ..." with Python >= 2.4.
+ Suggest an alias that some folks might want to use for that.
+
+
+# Medium Priority
+
+- define __all__?
+- improve test suite
+- test with other versions of Python
+- get the PATHEXT attached extension to reflect the actual canonical
+ case of file matches on Windows, currently the extension from PATHEXT
+ is always uppercase
+- What to do with Change 145624 by shanec. It is a bit of a
+ bastardization. Maybe allow this with a special option to allow the change
+ in semantics.
+
+ > Change 145624 by shanec@shanec-ocelotl on 2005/05/24 16:51:55
+ >
+ > make which work better on OSX
+ > - add support for searching /Applications and /Network/Applications
+ > - add support for .app bundles
+ >
+ > Affected files ...
+ >
+ > ... //depot/main/Apps/Komodo-devel/src/python-sitelib/which.py#7 edit
+ >
+ > Differences ...
+ >
+ > ==== //depot/main/Apps/Komodo-devel/src/python-sitelib/which.py#7 (text) ====
+ >
+ > @@ -126,10 +126,11 @@
+ > sys.stderr.write("duplicate: %s (%s)\n" % potential)
+ > return None
+ > else:
+ > - if not stat.S_ISREG(os.stat(potential[0]).st_mode):
+ > + darwinApp = sys.platform == 'darwin' and potential[0][-4:]=='.app'
+ > + if not darwinApp and not stat.S_ISREG(os.stat(potential[0]).st_mode):
+ > if verbose:
+ > sys.stderr.write("not a regular file: %s (%s)\n" % potential)
+ > - elif not os.access(potential[0], os.X_OK):
+ > + elif not darwinApp and not os.access(potential[0], os.X_OK):
+ > if verbose:
+ > sys.stderr.write("no executable access: %s (%s)\n"\
+ > % potential)
+ > @@ -166,6 +167,9 @@
+ > path = os.environ.get("PATH", "").split(os.pathsep)
+ > if sys.platform.startswith("win"):
+ > path.insert(0, os.curdir) # implied by Windows shell
+ > + if sys.platform == 'darwin':
+ > + path.insert(0, '/Network/Applications')
+ > + path.insert(0, '/Applications')
+ > else:
+ > usingGivenPath = 1
+ >
+ > @@ -182,6 +186,9 @@
+ > exts = ['.COM', '.EXE', '.BAT']
+ > elif not isinstance(exts, list):
+ > raise TypeError("'exts' argument must be a list or None")
+ > + elif sys.platform == 'darwin':
+ > + if exts is None:
+ > + exts = ['.app']
+ > else:
+ > if exts is not None:
+ > raise WhichError("'exts' argument is not supported on "\
+ > @@ -202,7 +209,8 @@
+ > for ext in ['']+exts:
+ > absName = os.path.abspath(
+ > os.path.normpath(os.path.join(dirName, command+ext)))
+ > - if os.path.isfile(absName):
+ > + if os.path.isfile(absName) or (sys.platform == 'darwin' and \
+ > + absName[-4:]=='.app' and os.path.isdir(absName)):
+ > if usingGivenPath:
+ > fromWhere = "from given path element %d" % i
+ > elif not sys.platform.startswith("win"):
+
+ Here is a start with slight improvements:
+
+ > Index: which.py
+ > ===================================================================
+ > --- which.py (revision 270)
+ > +++ which.py (working copy)
+ > @@ -126,9 +126,18 @@
+ > sys.stderr.write("duplicate: %s (%s)\n" % potential)
+ > return None
+ > else:
+ > - if not stat.S_ISREG(os.stat(potential[0]).st_mode):
+ > + st_mode = os.stat(potential[0]).st_mode
+ > + isMacAppBundle = sys.platform == "darwin" \
+ > + and potential[0].endswith(".app") \
+ > + and stat.S_ISDIR(st_mode)
+ > + if not isMacAppBundle and not stat.S_ISREG(st_mode):
+ > if verbose:
+ > - sys.stderr.write("not a regular file: %s (%s)\n" % potential)
+ > + if sys.platform == "darwin":
+ > + sys.stderr.write("not a regular file or .app bundle: "
+ > + "%s (%s)\n" % potential)
+ > + else:
+ > + sys.stderr.write("not a regular file: %s (%s)\n"
+ > + % potential)
+ > elif not os.access(potential[0], os.X_OK):
+ > if verbose:
+ > sys.stderr.write("no executable access: %s (%s)\n"\
+
+
+# Low Priority
+
+- have a version for pre-generators (i.e. Python 2.1)
+- add a "logging" interface
+
diff --git a/python/which/build.py b/python/which/build.py
new file mode 100644
index 000000000..3c8f09d39
--- /dev/null
+++ b/python/which/build.py
@@ -0,0 +1,442 @@
+#!/usr/bin/env python
+# Copyright (c) 2002-2005 ActiveState
+# See LICENSE.txt for license details.
+
+"""
+ which.py dev build script
+
+ Usage:
+ python build.py [<options>...] [<targets>...]
+
+ Options:
+ --help, -h Print this help and exit.
+ --targets, -t List all available targets.
+
+ This is the primary build script for the which.py project. It exists
+ to assist in building, maintaining, and distributing this project.
+
+ It is intended to have Makefile semantics. I.e. 'python build.py'
+ will build execute the default target, 'python build.py foo' will
+ build target foo, etc. However, there is no intelligent target
+ interdependency tracking (I suppose I could do that with function
+ attributes).
+"""
+
+import os
+from os.path import basename, dirname, splitext, isfile, isdir, exists, \
+ join, abspath, normpath
+import sys
+import getopt
+import types
+import getpass
+import shutil
+import glob
+import logging
+import re
+
+
+
+#---- exceptions
+
+class Error(Exception):
+ pass
+
+
+
+#---- globals
+
+log = logging.getLogger("build")
+
+
+
+
+#---- globals
+
+_project_name_ = "which"
+
+
+
+#---- internal support routines
+
+def _get_trentm_com_dir():
+ """Return the path to the local trentm.com source tree."""
+ d = normpath(join(dirname(__file__), os.pardir, "trentm.com"))
+ if not isdir(d):
+ raise Error("could not find 'trentm.com' src dir at '%s'" % d)
+ return d
+
+def _get_local_bits_dir():
+ import imp
+ info = imp.find_module("tmconfig", [_get_trentm_com_dir()])
+ tmconfig = imp.load_module("tmconfig", *info)
+ return tmconfig.bitsDir
+
+def _get_project_bits_dir():
+ d = normpath(join(dirname(__file__), "bits"))
+ return d
+
+def _get_project_version():
+ import imp, os
+ data = imp.find_module(_project_name_, [os.path.dirname(__file__)])
+ mod = imp.load_module(_project_name_, *data)
+ return mod.__version__
+
+
+# Recipe: run (0.5.1) in /Users/trentm/tm/recipes/cookbook
+_RUN_DEFAULT_LOGSTREAM = ("RUN", "DEFAULT", "LOGSTREAM")
+def __run_log(logstream, msg, *args, **kwargs):
+ if not logstream:
+ pass
+ elif logstream is _RUN_DEFAULT_LOGSTREAM:
+ try:
+ log.debug(msg, *args, **kwargs)
+ except NameError:
+ pass
+ else:
+ logstream(msg, *args, **kwargs)
+
+def _run(cmd, logstream=_RUN_DEFAULT_LOGSTREAM):
+ """Run the given command.
+
+ "cmd" is the command to run
+ "logstream" is an optional logging stream on which to log the command.
+ If None, no logging is done. If unspecifed, this looks for a Logger
+ instance named 'log' and logs the command on log.debug().
+
+ Raises OSError is the command returns a non-zero exit status.
+ """
+ __run_log(logstream, "running '%s'", cmd)
+ retval = os.system(cmd)
+ if hasattr(os, "WEXITSTATUS"):
+ status = os.WEXITSTATUS(retval)
+ else:
+ status = retval
+ if status:
+ #TODO: add std OSError attributes or pick more approp. exception
+ raise OSError("error running '%s': %r" % (cmd, status))
+
+def _run_in_dir(cmd, cwd, logstream=_RUN_DEFAULT_LOGSTREAM):
+ old_dir = os.getcwd()
+ try:
+ os.chdir(cwd)
+ __run_log(logstream, "running '%s' in '%s'", cmd, cwd)
+ _run(cmd, logstream=None)
+ finally:
+ os.chdir(old_dir)
+
+
+# Recipe: rmtree (0.5) in /Users/trentm/tm/recipes/cookbook
+def _rmtree_OnError(rmFunction, filePath, excInfo):
+ if excInfo[0] == OSError:
+ # presuming because file is read-only
+ os.chmod(filePath, 0777)
+ rmFunction(filePath)
+def _rmtree(dirname):
+ import shutil
+ shutil.rmtree(dirname, 0, _rmtree_OnError)
+
+
+# Recipe: pretty_logging (0.1) in /Users/trentm/tm/recipes/cookbook
+class _PerLevelFormatter(logging.Formatter):
+ """Allow multiple format string -- depending on the log level.
+
+ A "fmtFromLevel" optional arg is added to the constructor. It can be
+ a dictionary mapping a log record level to a format string. The
+ usual "fmt" argument acts as the default.
+ """
+ def __init__(self, fmt=None, datefmt=None, fmtFromLevel=None):
+ logging.Formatter.__init__(self, fmt, datefmt)
+ if fmtFromLevel is None:
+ self.fmtFromLevel = {}
+ else:
+ self.fmtFromLevel = fmtFromLevel
+ def format(self, record):
+ record.levelname = record.levelname.lower()
+ if record.levelno in self.fmtFromLevel:
+ #XXX This is a non-threadsafe HACK. Really the base Formatter
+ # class should provide a hook accessor for the _fmt
+ # attribute. *Could* add a lock guard here (overkill?).
+ _saved_fmt = self._fmt
+ self._fmt = self.fmtFromLevel[record.levelno]
+ try:
+ return logging.Formatter.format(self, record)
+ finally:
+ self._fmt = _saved_fmt
+ else:
+ return logging.Formatter.format(self, record)
+
+def _setup_logging():
+ hdlr = logging.StreamHandler()
+ defaultFmt = "%(name)s: %(levelname)s: %(message)s"
+ infoFmt = "%(name)s: %(message)s"
+ fmtr = _PerLevelFormatter(fmt=defaultFmt,
+ fmtFromLevel={logging.INFO: infoFmt})
+ hdlr.setFormatter(fmtr)
+ logging.root.addHandler(hdlr)
+ log.setLevel(logging.INFO)
+
+
+def _getTargets():
+ """Find all targets and return a dict of targetName:targetFunc items."""
+ targets = {}
+ for name, attr in sys.modules[__name__].__dict__.items():
+ if name.startswith('target_'):
+ targets[ name[len('target_'):] ] = attr
+ return targets
+
+def _listTargets(targets):
+ """Pretty print a list of targets."""
+ width = 77
+ nameWidth = 15 # min width
+ for name in targets.keys():
+ nameWidth = max(nameWidth, len(name))
+ nameWidth += 2 # space btwn name and doc
+ format = "%%-%ds%%s" % nameWidth
+ print format % ("TARGET", "DESCRIPTION")
+ for name, func in sorted(targets.items()):
+ doc = _first_paragraph(func.__doc__ or "", True)
+ if len(doc) > (width - nameWidth):
+ doc = doc[:(width-nameWidth-3)] + "..."
+ print format % (name, doc)
+
+
+# Recipe: first_paragraph (1.0.1) in /Users/trentm/tm/recipes/cookbook
+def _first_paragraph(text, join_lines=False):
+ """Return the first paragraph of the given text."""
+ para = text.lstrip().split('\n\n', 1)[0]
+ if join_lines:
+ lines = [line.strip() for line in para.splitlines(0)]
+ para = ' '.join(lines)
+ return para
+
+
+
+#---- build targets
+
+def target_default():
+ target_all()
+
+def target_all():
+ """Build all release packages."""
+ log.info("target: default")
+ if sys.platform == "win32":
+ target_launcher()
+ target_sdist()
+ target_webdist()
+
+
+def target_clean():
+ """remove all build/generated bits"""
+ log.info("target: clean")
+ if sys.platform == "win32":
+ _run("nmake -f Makefile.win clean")
+
+ ver = _get_project_version()
+ dirs = ["dist", "build", "%s-%s" % (_project_name_, ver)]
+ for d in dirs:
+ print "removing '%s'" % d
+ if os.path.isdir(d): _rmtree(d)
+
+ patterns = ["*.pyc", "*~", "MANIFEST",
+ os.path.join("test", "*~"),
+ os.path.join("test", "*.pyc"),
+ ]
+ for pattern in patterns:
+ for file in glob.glob(pattern):
+ print "removing '%s'" % file
+ os.unlink(file)
+
+
+def target_launcher():
+ """Build the Windows launcher executable."""
+ log.info("target: launcher")
+ assert sys.platform == "win32", "'launcher' target only supported on Windows"
+ _run("nmake -f Makefile.win")
+
+
+def target_docs():
+ """Regenerate some doc bits from project-info.xml."""
+ log.info("target: docs")
+ _run("projinfo -f project-info.xml -R -o README.txt --force")
+ _run("projinfo -f project-info.xml --index-markdown -o index.markdown --force")
+
+
+def target_sdist():
+ """Build a source distribution."""
+ log.info("target: sdist")
+ target_docs()
+ bitsDir = _get_project_bits_dir()
+ _run("python setup.py sdist -f --formats zip -d %s" % bitsDir,
+ log.info)
+
+
+def target_webdist():
+ """Build a web dist package.
+
+ "Web dist" packages are zip files with '.web' package. All files in
+ the zip must be under a dir named after the project. There must be a
+ webinfo.xml file at <projname>/webinfo.xml. This file is "defined"
+ by the parsing in trentm.com/build.py.
+ """
+ assert sys.platform != "win32", "'webdist' not implemented for win32"
+ log.info("target: webdist")
+ bitsDir = _get_project_bits_dir()
+ buildDir = join("build", "webdist")
+ distDir = join(buildDir, _project_name_)
+ if exists(buildDir):
+ _rmtree(buildDir)
+ os.makedirs(distDir)
+
+ target_docs()
+
+ # Copy the webdist bits to the build tree.
+ manifest = [
+ "project-info.xml",
+ "index.markdown",
+ "LICENSE.txt",
+ "which.py",
+ "logo.jpg",
+ ]
+ for src in manifest:
+ if dirname(src):
+ dst = join(distDir, dirname(src))
+ os.makedirs(dst)
+ else:
+ dst = distDir
+ _run("cp %s %s" % (src, dst))
+
+ # Zip up the webdist contents.
+ ver = _get_project_version()
+ bit = abspath(join(bitsDir, "%s-%s.web" % (_project_name_, ver)))
+ if exists(bit):
+ os.remove(bit)
+ _run_in_dir("zip -r %s %s" % (bit, _project_name_), buildDir, log.info)
+
+
+def target_install():
+ """Use the setup.py script to install."""
+ log.info("target: install")
+ _run("python setup.py install")
+
+
+def target_upload_local():
+ """Update release bits to *local* trentm.com bits-dir location.
+
+ This is different from the "upload" target, which uploads release
+ bits remotely to trentm.com.
+ """
+ log.info("target: upload_local")
+ assert sys.platform != "win32", "'upload_local' not implemented for win32"
+
+ ver = _get_project_version()
+ localBitsDir = _get_local_bits_dir()
+ uploadDir = join(localBitsDir, _project_name_, ver)
+
+ bitsPattern = join(_get_project_bits_dir(),
+ "%s-*%s*" % (_project_name_, ver))
+ bits = glob.glob(bitsPattern)
+ if not bits:
+ log.info("no bits matching '%s' to upload", bitsPattern)
+ else:
+ if not exists(uploadDir):
+ os.makedirs(uploadDir)
+ for bit in bits:
+ _run("cp %s %s" % (bit, uploadDir), log.info)
+
+
+def target_upload():
+ """Upload binary and source distribution to trentm.com bits
+ directory.
+ """
+ log.info("target: upload")
+
+ ver = _get_project_version()
+ bitsDir = _get_project_bits_dir()
+ bitsPattern = join(bitsDir, "%s-*%s*" % (_project_name_, ver))
+ bits = glob.glob(bitsPattern)
+ if not bits:
+ log.info("no bits matching '%s' to upload", bitsPattern)
+ return
+
+ # Ensure have all the expected bits.
+ expectedBits = [
+ re.compile("%s-.*\.zip$" % _project_name_),
+ re.compile("%s-.*\.web$" % _project_name_)
+ ]
+ for expectedBit in expectedBits:
+ for bit in bits:
+ if expectedBit.search(bit):
+ break
+ else:
+ raise Error("can't find expected bit matching '%s' in '%s' dir"
+ % (expectedBit.pattern, bitsDir))
+
+ # Upload the bits.
+ user = "trentm"
+ host = "trentm.com"
+ remoteBitsBaseDir = "~/data/bits"
+ remoteBitsDir = join(remoteBitsBaseDir, _project_name_, ver)
+ if sys.platform == "win32":
+ ssh = "plink"
+ scp = "pscp -unsafe"
+ else:
+ ssh = "ssh"
+ scp = "scp"
+ _run("%s %s@%s 'mkdir -p %s'" % (ssh, user, host, remoteBitsDir), log.info)
+ for bit in bits:
+ _run("%s %s %s@%s:%s" % (scp, bit, user, host, remoteBitsDir),
+ log.info)
+
+
+def target_check_version():
+ """grep for version strings in source code
+
+ List all things that look like version strings in the source code.
+ Used for checking that versioning is updated across the board.
+ """
+ sources = [
+ "which.py",
+ "project-info.xml",
+ ]
+ pattern = r'[0-9]\+\(\.\|, \)[0-9]\+\(\.\|, \)[0-9]\+'
+ _run('grep -n "%s" %s' % (pattern, ' '.join(sources)), None)
+
+
+
+#---- mainline
+
+def build(targets=[]):
+ log.debug("build(targets=%r)" % targets)
+ available = _getTargets()
+ if not targets:
+ if available.has_key('default'):
+ return available['default']()
+ else:
+ log.warn("No default target available. Doing nothing.")
+ else:
+ for target in targets:
+ if available.has_key(target):
+ retval = available[target]()
+ if retval:
+ raise Error("Error running '%s' target: retval=%s"\
+ % (target, retval))
+ else:
+ raise Error("Unknown target: '%s'" % target)
+
+def main(argv):
+ _setup_logging()
+
+ # Process options.
+ optlist, targets = getopt.getopt(argv[1:], 'ht', ['help', 'targets'])
+ for opt, optarg in optlist:
+ if opt in ('-h', '--help'):
+ sys.stdout.write(__doc__ + '\n')
+ return 0
+ elif opt in ('-t', '--targets'):
+ return _listTargets(_getTargets())
+
+ return build(targets)
+
+if __name__ == "__main__":
+ sys.exit( main(sys.argv) )
+
diff --git a/python/which/launcher.cpp b/python/which/launcher.cpp
new file mode 100644
index 000000000..36bbbe866
--- /dev/null
+++ b/python/which/launcher.cpp
@@ -0,0 +1,404 @@
+/*
+ * Copyright (c) 2002-2003 ActiveState Corp.
+ * Author: Trent Mick (TrentM@ActiveState.com)
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the
+ * "Software"), to deal in the Software without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included
+ * in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/* Console launch executable.
+ *
+ * This program exists solely to launch:
+ * python <installdir>/<exename>.py <argv>
+ * on Windows. "<exename>.py" must be in the same directory.
+ *
+ * Rationale:
+ * - On some Windows flavours .py *can* be put on the PATHEXT to be
+ * able to find "<exename>.py" if it is on the PATH. This is fine
+ * until you need shell redirection to work. It does NOT for
+ * extensions to PATHEXT. Redirection *does* work for "python
+ * <script>.py" so we will try to do that.
+ */
+
+#ifdef WIN32
+ #include <windows.h>
+ #include <process.h>
+ #include <direct.h>
+ #include <shlwapi.h>
+#else /* linux */
+ #include <unistd.h>
+#endif /* WIN32 */
+#include <sys/stat.h>
+#include <errno.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdarg.h>
+
+//---- constants
+
+#define BUF_LENGTH 2048
+#define MAX_PYTHON_ARGS 50
+#define MAX_FILES 50
+#define MAXPATHLEN 1024
+#ifdef WIN32
+ #define SEP '\\'
+ #define ALTSEP '/'
+ // path list element separator
+ #define DELIM ';'
+#else /* linux */
+ #define SEP '/'
+ // path list element separator
+ #define DELIM ':'
+#endif
+
+#ifdef WIN32
+ #define spawnvp _spawnvp
+ #if defined(_MSC_VER) && _MSC_VER < 1900
+ #define snprintf _snprintf
+ #define vsnprintf _vsnprintf
+ #endif
+ //NOTE: this is for the stat *call* and the stat *struct*
+ #define stat _stat
+#endif
+
+
+//---- globals
+
+char* programName = NULL;
+char* programPath = NULL;
+#ifndef WIN32 /* i.e. linux */
+ extern char **environ; // the user environment
+#endif /* linux */
+
+//---- error logging functions
+
+void _LogError(const char* format ...)
+{
+ va_list ap;
+ va_start(ap, format);
+#if defined(WIN32) && defined(_WINDOWS)
+ // put up a MessageBox
+ char caption[BUF_LENGTH+1];
+ snprintf(caption, BUF_LENGTH, "Error in %s", programName);
+ char msg[BUF_LENGTH+1];
+ vsnprintf(msg, BUF_LENGTH, format, ap);
+ va_end(ap);
+ MessageBox(NULL, msg, caption, MB_OK | MB_ICONEXCLAMATION);
+#else
+ fprintf(stderr, "%s: error: ", programName);
+ vfprintf(stderr, format, ap);
+ va_end(ap);
+#endif /* WIN32 && _WINDOWS */
+}
+
+
+void _LogWarning(const char* format ...)
+{
+ va_list ap;
+ va_start(ap, format);
+#if defined(WIN32) && defined(_WINDOWS)
+ // put up a MessageBox
+ char caption[BUF_LENGTH+1];
+ snprintf(caption, BUF_LENGTH, "Warning in %s", programName);
+ char msg[BUF_LENGTH+1];
+ vsnprintf(msg, BUF_LENGTH, format, ap);
+ va_end(ap);
+ MessageBox(NULL, msg, caption, MB_OK | MB_ICONWARNING);
+#else
+ fprintf(stderr, "%s: warning: ", programName);
+ vfprintf(stderr, format, ap);
+ va_end(ap);
+#endif /* WIN32 && _WINDOWS */
+}
+
+
+
+//---- utilities functions
+
+/* _IsDir: Is the given dirname an existing directory */
+static int _IsDir(char *dirname)
+{
+#ifdef WIN32
+ DWORD dwAttrib;
+ dwAttrib = GetFileAttributes(dirname);
+ if (dwAttrib == -1) {
+ return 0;
+ }
+ if (dwAttrib & FILE_ATTRIBUTE_DIRECTORY) {
+ return 1;
+ }
+ return 0;
+#else /* i.e. linux */
+ struct stat buf;
+ if (stat(dirname, &buf) != 0)
+ return 0;
+ if (!S_ISDIR(buf.st_mode))
+ return 0;
+ return 1;
+#endif
+}
+
+
+/* _IsLink: Is the given filename a symbolic link */
+static int _IsLink(char *filename)
+{
+#ifdef WIN32
+ return 0;
+#else /* i.e. linux */
+ struct stat buf;
+ if (lstat(filename, &buf) != 0)
+ return 0;
+ if (!S_ISLNK(buf.st_mode))
+ return 0;
+ return 1;
+#endif
+}
+
+
+/* Is executable file
+ * On Linux: check 'x' permission. On Windows: just check existence.
+ */
+static int _IsExecutableFile(char *filename)
+{
+#ifdef WIN32
+ return (int)PathFileExists(filename);
+#else /* i.e. linux */
+ struct stat buf;
+ if (stat(filename, &buf) != 0)
+ return 0;
+ if (!S_ISREG(buf.st_mode))
+ return 0;
+ if ((buf.st_mode & 0111) == 0)
+ return 0;
+ return 1;
+#endif /* WIN32 */
+}
+
+
+/* _GetProgramPath: Determine the absolute path to the given program name.
+ *
+ * Takes into account the current working directory, etc.
+ * The implementations require the global 'programName' to be set.
+ */
+#ifdef WIN32
+ static char* _GetProgramPath(void)
+ {
+ //XXX this is ugly but I didn't want to use malloc, no reason
+ static char progPath[MAXPATHLEN+1];
+ // get absolute path to module
+ if (!GetModuleFileName(NULL, progPath, MAXPATHLEN)) {
+ _LogError("could not get absolute program name from "\
+ "GetModuleFileName\n");
+ exit(1);
+ }
+ // just need dirname
+ for (char* p = progPath+strlen(progPath);
+ *p != SEP && *p != ALTSEP;
+ --p)
+ {
+ *p = '\0';
+ }
+ *p = '\0'; // remove the trailing SEP as well
+
+ return progPath;
+ }
+#else
+
+ /* _JoinPath requires that any buffer argument passed to it has at
+ least MAXPATHLEN + 1 bytes allocated. If this requirement is met,
+ it guarantees that it will never overflow the buffer. If stuff
+ is too long, buffer will contain a truncated copy of stuff.
+ */
+ static void
+ _JoinPath(char *buffer, char *stuff)
+ {
+ size_t n, k;
+ if (stuff[0] == SEP)
+ n = 0;
+ else {
+ n = strlen(buffer);
+ if (n > 0 && buffer[n-1] != SEP && n < MAXPATHLEN)
+ buffer[n++] = SEP;
+ }
+ k = strlen(stuff);
+ if (n + k > MAXPATHLEN)
+ k = MAXPATHLEN - n;
+ strncpy(buffer+n, stuff, k);
+ buffer[n+k] = '\0';
+ }
+
+
+ static char*
+ _GetProgramPath(void)
+ {
+ /* XXX this routine does *no* error checking */
+ char* path = getenv("PATH");
+ static char progPath[MAXPATHLEN+1];
+
+ /* If there is no slash in the argv0 path, then we have to
+ * assume the program is on the user's $PATH, since there's no
+ * other way to find a directory to start the search from. If
+ * $PATH isn't exported, you lose.
+ */
+ if (strchr(programName, SEP)) {
+ strncpy(progPath, programName, MAXPATHLEN);
+ }
+ else if (path) {
+ int bufspace = MAXPATHLEN;
+ while (1) {
+ char *delim = strchr(path, DELIM);
+
+ if (delim) {
+ size_t len = delim - path;
+ if (len > bufspace) {
+ len = bufspace;
+ }
+ strncpy(progPath, path, len);
+ *(progPath + len) = '\0';
+ bufspace -= len;
+ }
+ else {
+ strncpy(progPath, path, bufspace);
+ }
+
+ _JoinPath(progPath, programName);
+ if (_IsExecutableFile(progPath)) {
+ break;
+ }
+
+ if (!delim) {
+ progPath[0] = '\0';
+ break;
+ }
+ path = delim + 1;
+ }
+ }
+ else {
+ progPath[0] = '\0';
+ }
+
+ // now we have to resolve a string of possible symlinks
+ // - we'll just handle the simple case of a single level of
+ // indirection
+ //
+ // XXX note this does not handle multiple levels of symlinks
+ // here is pseudo-code for that (please implement it :):
+ // while 1:
+ // if islink(progPath):
+ // linkText = readlink(progPath)
+ // if isabsolute(linkText):
+ // progPath = os.path.join(dirname(progPath), linkText)
+ // else:
+ // progPath = linkText
+ // else:
+ // break
+ if (_IsLink(progPath)) {
+ char newProgPath[MAXPATHLEN+1];
+ readlink(progPath, newProgPath, MAXPATHLEN);
+ strncpy(progPath, newProgPath, MAXPATHLEN);
+ }
+
+
+ // prefix with the current working directory if the path is
+ // relative to conform with the Windows version of this
+ if (strlen(progPath) != 0 && progPath[0] != SEP) {
+ char cwd[MAXPATHLEN+1];
+ char tmp[MAXPATHLEN+1];
+ //XXX should check for failure retvals
+ getcwd(cwd, MAXPATHLEN);
+ snprintf(tmp, MAXPATHLEN, "%s%c%s", cwd, SEP, progPath);
+ strncpy(progPath, tmp, MAXPATHLEN);
+ }
+
+ // 'progPath' now contains the full path to the program *and* the program
+ // name. The latter is not desire.
+ char* pLetter = progPath + strlen(progPath);
+ for (;pLetter != progPath && *pLetter != SEP; --pLetter) {
+ /* do nothing */
+ }
+ *pLetter = '\0';
+
+ return progPath;
+ }
+#endif /* WIN32 */
+
+
+//---- mainline
+
+int main(int argc, char** argv)
+{
+ programName = argv[0];
+ programPath = _GetProgramPath();
+
+ // Determine the extension-less program basename.
+ // XXX Will not always handle app names with '.' in them (other than
+ // the '.' for the extension.
+ char programNameNoExt[MAXPATHLEN+1];
+ char *pStart, *pEnd;
+ pStart = pEnd = programName + strlen(programName) - 1;
+ while (pStart != programName && *(pStart-1) != SEP) {
+ pStart--;
+ }
+ while (1) {
+ if (pEnd == pStart) {
+ pEnd = programName + strlen(programName) - 1;
+ break;
+ }
+ pEnd--;
+ if (*(pEnd+1) == '.') {
+ break;
+ }
+ }
+ strncpy(programNameNoExt, pStart, pEnd-pStart+1);
+ *(programNameNoExt+(pEnd-pStart+1)) = '\0';
+
+ // determine the full path to "<exename>.py"
+ char pyFile[MAXPATHLEN+1];
+ snprintf(pyFile, MAXPATHLEN, "%s%c%s.py", programPath, SEP,
+ programNameNoExt);
+
+ // Build the argument array for launching.
+ char* pythonArgs[MAX_PYTHON_ARGS+1];
+ int nPythonArgs = 0;
+ pythonArgs[nPythonArgs++] = "python";
+ pythonArgs[nPythonArgs++] = "-tt";
+ pythonArgs[nPythonArgs++] = pyFile;
+ for (int i = 1; i < argc; ++i) {
+ pythonArgs[nPythonArgs++] = argv[i];
+ }
+ pythonArgs[nPythonArgs++] = NULL;
+
+ return _spawnvp(_P_WAIT, pythonArgs[0], pythonArgs);
+}
+
+
+//---- mainline for win32 subsystem:windows app
+#ifdef WIN32
+ int WINAPI WinMain(
+ HINSTANCE hInstance, /* handle to current instance */
+ HINSTANCE hPrevInstance, /* handle to previous instance */
+ LPSTR lpCmdLine, /* pointer to command line */
+ int nCmdShow /* show state of window */
+ )
+ {
+ return main(__argc, __argv);
+ }
+#endif
+
diff --git a/python/which/logo.jpg b/python/which/logo.jpg
new file mode 100644
index 000000000..300c23f14
--- /dev/null
+++ b/python/which/logo.jpg
Binary files differ
diff --git a/python/which/setup.py b/python/which/setup.py
new file mode 100644
index 000000000..3f6d1072c
--- /dev/null
+++ b/python/which/setup.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# Copyright (c) 2002-2005 ActiveState Corp.
+# Author: Trent Mick (TrentM@ActiveState.com)
+
+"""Distutils setup script for 'which'."""
+
+import sys
+import os
+import shutil
+from distutils.core import setup
+
+
+#---- support routines
+
+def _getVersion():
+ import which
+ return which.__version__
+
+def _getBinDir():
+ """Return the current Python's bindir."""
+ if sys.platform.startswith("win"):
+ bindir = sys.prefix
+ else:
+ bindir = os.path.join(sys.prefix, "bin")
+ return bindir
+
+
+#---- setup mainline
+
+if sys.platform == "win32":
+ scripts = []
+ binFiles = ["which.exe", "which.py"]
+else:
+ #XXX Disable installing which as a script on non-Windows platforms.
+ # It can get in the way of the system which.
+ #
+ #if os.path.exists("which"):
+ # os.remove("which")
+ #shutil.copy2("which.py", "which")
+ #scripts = ["which"]
+ binFiles = []
+ scripts = []
+
+setup(name="which",
+ version=_getVersion(),
+ description="a portable GNU which replacement",
+ author="Trent Mick",
+ author_email="TrentM@ActiveState.com",
+ url="http://trentm.com/projects/which/",
+ license="MIT License",
+ platforms=["Windows", "Linux", "Mac OS X", "Unix"],
+ long_description="""\
+This is a GNU which replacement with the following features:
+ - it is portable (Windows, Linux);
+ - it understands PATHEXT on Windows;
+ - it can print <em>all</em> matches on the PATH;
+ - it can note "near misses" on the PATH (e.g. files that match but
+ may not, say, have execute permissions; and
+ - it can be used as a Python module.
+""",
+ keywords=["which", "find", "path", "where"],
+
+ py_modules=['which'],
+ scripts=scripts,
+ # Install the Windows script/executable bits as data files with
+ # distutils chosen scripts install dir on Windows,
+ # "<prefix>/Scripts", is just wrong.
+ data_files=[ (_getBinDir(), binFiles) ],
+ )
+
diff --git a/python/which/test/test_which.py b/python/which/test/test_which.py
new file mode 100644
index 000000000..b56976989
--- /dev/null
+++ b/python/which/test/test_which.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+# Copyright (c) 2002-2003 ActiveState Corp.
+# Author: Trent Mick (TrentM@ActiveState.com)
+
+"""Test suite for which.py."""
+
+import sys
+import os
+import re
+import tempfile
+import unittest
+
+import testsupport
+
+#XXX:TODO
+# - def test_registry_success(self): ...App Paths setting
+# - def test_registry_noexist(self):
+# - test all the other options
+# - test on linux
+# - test the module API
+
+class WhichTestCase(unittest.TestCase):
+ def setUp(self):
+ """Create a temp directory with a couple test "commands".
+ The temp dir can be added to the PATH, etc, for testing purposes.
+ """
+ # Find the which.py to call.
+ whichPy = os.path.join(os.path.dirname(__file__),
+ os.pardir, "which.py")
+ self.which = sys.executable + " " + whichPy
+
+ # Setup the test environment.
+ self.tmpdir = tempfile.mktemp()
+ os.makedirs(self.tmpdir)
+ if sys.platform.startswith("win"):
+ self.testapps = ['whichtestapp1.exe',
+ 'whichtestapp2.exe',
+ 'whichtestapp3.wta']
+ else:
+ self.testapps = ['whichtestapp1', 'whichtestapp2']
+ for app in self.testapps:
+ path = os.path.join(self.tmpdir, app)
+ open(path, 'wb').write('\n')
+ os.chmod(path, 0755)
+
+ def tearDown(self):
+ testsupport.rmtree(self.tmpdir)
+
+ def test_opt_h(self):
+ output, error, retval = testsupport.run(self.which+' --h')
+ token = 'Usage:'
+ self.failUnless(output.find(token) != -1,
+ "'%s' was not found in 'which -h' output: '%s' "\
+ % (token, output))
+ self.failUnless(retval == 0,
+ "'which -h' did not return 0: retval=%d" % retval)
+
+ def test_opt_help(self):
+ output, error, retval = testsupport.run(self.which+' --help')
+ token = 'Usage:'
+ self.failUnless(output.find(token) != -1,
+ "'%s' was not found in 'which --help' output: '%s' "\
+ % (token, output))
+ self.failUnless(retval == 0,
+ "'which --help' did not return 0: retval=%d" % retval)
+
+ def test_opt_version(self):
+ output, error, retval = testsupport.run(self.which+' --version')
+ versionRe = re.compile("^which \d+\.\d+\.\d+$")
+ versionMatch = versionRe.search(output.strip())
+ self.failUnless(versionMatch,
+ "Version, '%s', from 'which --version' does not "\
+ "match pattern, '%s'."\
+ % (output.strip(), versionRe.pattern))
+ self.failUnless(retval == 0,
+ "'which --version' did not return 0: retval=%d"\
+ % retval)
+
+ def test_no_args(self):
+ output, error, retval = testsupport.run(self.which)
+ self.failUnless(retval == -1,
+ "'which' with no args should return -1: retval=%d"\
+ % retval)
+
+ def test_one_failure(self):
+ output, error, retval = testsupport.run(
+ self.which+' whichtestapp1')
+ self.failUnless(retval == 1,
+ "One failure did not return 1: retval=%d" % retval)
+
+ def test_two_failures(self):
+ output, error, retval = testsupport.run(
+ self.which+' whichtestapp1 whichtestapp2')
+ self.failUnless(retval == 2,
+ "Two failures did not return 2: retval=%d" % retval)
+
+ def _match(self, path1, path2):
+ #print "_match: %r =?= %r" % (path1, path2)
+ if sys.platform.startswith('win'):
+ path1 = os.path.normpath(os.path.normcase(path1))
+ path2 = os.path.normpath(os.path.normcase(path2))
+ path1 = os.path.splitext(path1)[0]
+ path2 = os.path.splitext(path2)[0]
+ return path1 == path2
+ else:
+ return os.path.samefile(path1, path2)
+
+ def test_one_success(self):
+ os.environ["PATH"] += os.pathsep + self.tmpdir
+ output, error, retval = testsupport.run(self.which+' -q whichtestapp1')
+ expectedOutput = os.path.join(self.tmpdir, "whichtestapp1")
+ self.failUnless(self._match(output.strip(), expectedOutput),
+ "Output, %r, and expected output, %r, do not match."\
+ % (output.strip(), expectedOutput))
+ self.failUnless(retval == 0,
+ "'which ...' should have returned 0: retval=%d" % retval)
+
+ def test_two_successes(self):
+ os.environ["PATH"] += os.pathsep + self.tmpdir
+ apps = ['whichtestapp1', 'whichtestapp2']
+ output, error, retval = testsupport.run(
+ self.which + ' -q ' + ' '.join(apps))
+ lines = output.strip().split("\n")
+ for app, line in zip(apps, lines):
+ expected = os.path.join(self.tmpdir, app)
+ self.failUnless(self._match(line, expected),
+ "Output, %r, and expected output, %r, do not match."\
+ % (line, expected))
+ self.failUnless(retval == 0,
+ "'which ...' should have returned 0: retval=%d" % retval)
+
+ if sys.platform.startswith("win"):
+ def test_PATHEXT_failure(self):
+ os.environ["PATH"] += os.pathsep + self.tmpdir
+ output, error, retval = testsupport.run(self.which+' whichtestapp3')
+ self.failUnless(retval == 1,
+ "'which ...' should have returned 1: retval=%d" % retval)
+
+ def test_PATHEXT_success(self):
+ os.environ["PATH"] += os.pathsep + self.tmpdir
+ os.environ["PATHEXT"] += os.pathsep + '.wta'
+ output, error, retval = testsupport.run(self.which+' whichtestapp3')
+ expectedOutput = os.path.join(self.tmpdir, "whichtestapp3")
+ self.failUnless(self._match(output.strip(), expectedOutput),
+ "Output, %r, and expected output, %r, do not match."\
+ % (output.strip(), expectedOutput))
+ self.failUnless(retval == 0,
+ "'which ...' should have returned 0: retval=%d" % retval)
+
+ def test_exts(self):
+ os.environ["PATH"] += os.pathsep + self.tmpdir
+ output, error, retval = testsupport.run(self.which+' -e .wta whichtestapp3')
+ expectedOutput = os.path.join(self.tmpdir, "whichtestapp3")
+ self.failUnless(self._match(output.strip(), expectedOutput),
+ "Output, %r, and expected output, %r, do not match."\
+ % (output.strip(), expectedOutput))
+ self.failUnless(retval == 0,
+ "'which ...' should have returned 0: retval=%d" % retval)
+
+
+
+def suite():
+ """Return a unittest.TestSuite to be used by test.py."""
+ return unittest.makeSuite(WhichTestCase)
+
+if __name__ == "__main__":
+ unittest.main()
+
diff --git a/python/which/test/testsupport.py b/python/which/test/testsupport.py
new file mode 100644
index 000000000..8ff519fad
--- /dev/null
+++ b/python/which/test/testsupport.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# Copyright (c) 2002-2003 ActiveState Corp.
+# Author: Trent Mick (TrentM@ActiveState.com)
+
+import os
+import sys
+import types
+
+
+#---- Support routines
+
+def _escapeArg(arg):
+ """Escape the given command line argument for the shell."""
+ #XXX There is a *lot* more that we should escape here.
+ return arg.replace('"', r'\"')
+
+
+def _joinArgv(argv):
+ r"""Join an arglist to a string appropriate for running.
+ >>> import os
+ >>> _joinArgv(['foo', 'bar "baz'])
+ 'foo "bar \\"baz"'
+ """
+ cmdstr = ""
+ for arg in argv:
+ if ' ' in arg:
+ cmdstr += '"%s"' % _escapeArg(arg)
+ else:
+ cmdstr += _escapeArg(arg)
+ cmdstr += ' '
+ if cmdstr.endswith(' '): cmdstr = cmdstr[:-1] # strip trailing space
+ return cmdstr
+
+
+def run(argv):
+ """Prepare and run the given arg vector, 'argv', and return the
+ results. Returns (<stdout lines>, <stderr lines>, <return value>).
+ Note: 'argv' may also just be the command string.
+ """
+ if type(argv) in (types.ListType, types.TupleType):
+ cmd = _joinArgv(argv)
+ else:
+ cmd = argv
+ if sys.platform.startswith('win'):
+ i, o, e = os.popen3(cmd)
+ output = o.read()
+ error = e.read()
+ i.close()
+ e.close()
+ try:
+ retval = o.close()
+ except IOError:
+ # IOError is raised iff the spawned app returns -1. Go
+ # figure.
+ retval = -1
+ if retval is None:
+ retval = 0
+ else:
+ import popen2
+ p = popen2.Popen3(cmd, 1)
+ i, o, e = p.tochild, p.fromchild, p.childerr
+ output = o.read()
+ error = e.read()
+ i.close()
+ o.close()
+ e.close()
+ retval = (p.wait() & 0xFF00) >> 8
+ if retval > 2**7: # 8-bit signed 1's-complement conversion
+ retval -= 2**8
+ return output, error, retval
+
+
+def _rmtreeOnError(rmFunction, filePath, excInfo):
+ if excInfo[0] == OSError:
+ # presuming because file is read-only
+ os.chmod(filePath, 0777)
+ rmFunction(filePath)
+
+def rmtree(dirname):
+ import shutil
+ shutil.rmtree(dirname, 0, _rmtreeOnError)
+
+
diff --git a/python/which/which.py b/python/which/which.py
new file mode 100644
index 000000000..8dd790518
--- /dev/null
+++ b/python/which/which.py
@@ -0,0 +1,335 @@
+#!/usr/bin/env python
+# Copyright (c) 2002-2005 ActiveState Corp.
+# See LICENSE.txt for license details.
+# Author:
+# Trent Mick (TrentM@ActiveState.com)
+# Home:
+# http://trentm.com/projects/which/
+
+r"""Find the full path to commands.
+
+which(command, path=None, verbose=0, exts=None)
+ Return the full path to the first match of the given command on the
+ path.
+
+whichall(command, path=None, verbose=0, exts=None)
+ Return a list of full paths to all matches of the given command on
+ the path.
+
+whichgen(command, path=None, verbose=0, exts=None)
+ Return a generator which will yield full paths to all matches of the
+ given command on the path.
+
+By default the PATH environment variable is searched (as well as, on
+Windows, the AppPaths key in the registry), but a specific 'path' list
+to search may be specified as well. On Windows, the PATHEXT environment
+variable is applied as appropriate.
+
+If "verbose" is true then a tuple of the form
+ (<fullpath>, <matched-where-description>)
+is returned for each match. The latter element is a textual description
+of where the match was found. For example:
+ from PATH element 0
+ from HKLM\SOFTWARE\...\perl.exe
+"""
+
+_cmdlnUsage = """
+ Show the full path of commands.
+
+ Usage:
+ which [<options>...] [<command-name>...]
+
+ Options:
+ -h, --help Print this help and exit.
+ -V, --version Print the version info and exit.
+
+ -a, --all Print *all* matching paths.
+ -v, --verbose Print out how matches were located and
+ show near misses on stderr.
+ -q, --quiet Just print out matches. I.e., do not print out
+ near misses.
+
+ -p <altpath>, --path=<altpath>
+ An alternative path (list of directories) may
+ be specified for searching.
+ -e <exts>, --exts=<exts>
+ Specify a list of extensions to consider instead
+ of the usual list (';'-separate list, Windows
+ only).
+
+ Show the full path to the program that would be run for each given
+ command name, if any. Which, like GNU's which, returns the number of
+ failed arguments, or -1 when no <command-name> was given.
+
+ Near misses include duplicates, non-regular files and (on Un*x)
+ files without executable access.
+"""
+
+__revision__ = "$Id: which.py 430 2005-08-20 03:11:58Z trentm $"
+__version_info__ = (1, 1, 0)
+__version__ = '.'.join(map(str, __version_info__))
+
+import os
+import sys
+import getopt
+import stat
+
+
+#---- exceptions
+
+class WhichError(Exception):
+ pass
+
+
+
+#---- internal support stuff
+
+def _getRegisteredExecutable(exeName):
+ """Windows allow application paths to be registered in the registry."""
+ registered = None
+ if sys.platform.startswith('win'):
+ if os.path.splitext(exeName)[1].lower() != '.exe':
+ exeName += '.exe'
+ import _winreg
+ try:
+ key = "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths\\" +\
+ exeName
+ value = _winreg.QueryValue(_winreg.HKEY_LOCAL_MACHINE, key)
+ registered = (value, "from HKLM\\"+key)
+ except _winreg.error:
+ pass
+ if registered and not os.path.exists(registered[0]):
+ registered = None
+ return registered
+
+def _samefile(fname1, fname2):
+ if sys.platform.startswith('win'):
+ return ( os.path.normpath(os.path.normcase(fname1)) ==\
+ os.path.normpath(os.path.normcase(fname2)) )
+ else:
+ return os.path.samefile(fname1, fname2)
+
+def _cull(potential, matches, verbose=0):
+ """Cull inappropriate matches. Possible reasons:
+ - a duplicate of a previous match
+ - not a disk file
+ - not executable (non-Windows)
+ If 'potential' is approved it is returned and added to 'matches'.
+ Otherwise, None is returned.
+ """
+ for match in matches: # don't yield duplicates
+ if _samefile(potential[0], match[0]):
+ if verbose:
+ sys.stderr.write("duplicate: %s (%s)\n" % potential)
+ return None
+ else:
+ if not stat.S_ISREG(os.stat(potential[0]).st_mode):
+ if verbose:
+ sys.stderr.write("not a regular file: %s (%s)\n" % potential)
+ elif not os.access(potential[0], os.X_OK):
+ if verbose:
+ sys.stderr.write("no executable access: %s (%s)\n"\
+ % potential)
+ else:
+ matches.append(potential)
+ return potential
+
+
+#---- module API
+
+def whichgen(command, path=None, verbose=0, exts=None):
+ """Return a generator of full paths to the given command.
+
+ "command" is a the name of the executable to search for.
+ "path" is an optional alternate path list to search. The default it
+ to use the PATH environment variable.
+ "verbose", if true, will cause a 2-tuple to be returned for each
+ match. The second element is a textual description of where the
+ match was found.
+ "exts" optionally allows one to specify a list of extensions to use
+ instead of the standard list for this system. This can
+ effectively be used as an optimization to, for example, avoid
+ stat's of "foo.vbs" when searching for "foo" and you know it is
+ not a VisualBasic script but ".vbs" is on PATHEXT. This option
+ is only supported on Windows.
+
+ This method returns a generator which yields either full paths to
+ the given command or, if verbose, tuples of the form (<path to
+ command>, <where path found>).
+ """
+ matches = []
+ if path is None:
+ usingGivenPath = 0
+ path = os.environ.get("PATH", "").split(os.pathsep)
+ if sys.platform.startswith("win"):
+ path.insert(0, os.curdir) # implied by Windows shell
+ else:
+ usingGivenPath = 1
+
+ # Windows has the concept of a list of extensions (PATHEXT env var).
+ if sys.platform.startswith("win"):
+ if exts is None:
+ exts = os.environ.get("PATHEXT", "").split(os.pathsep)
+ # If '.exe' is not in exts then obviously this is Win9x and
+ # or a bogus PATHEXT, then use a reasonable default.
+ for ext in exts:
+ if ext.lower() == ".exe":
+ break
+ else:
+ exts = ['.COM', '.EXE', '.BAT']
+ elif not isinstance(exts, list):
+ raise TypeError("'exts' argument must be a list or None")
+ else:
+ if exts is not None:
+ raise WhichError("'exts' argument is not supported on "\
+ "platform '%s'" % sys.platform)
+ exts = []
+
+ # File name cannot have path separators because PATH lookup does not
+ # work that way.
+ if os.sep in command or os.altsep and os.altsep in command:
+ pass
+ else:
+ for i in range(len(path)):
+ dirName = path[i]
+ # On windows the dirName *could* be quoted, drop the quotes
+ if sys.platform.startswith("win") and len(dirName) >= 2\
+ and dirName[0] == '"' and dirName[-1] == '"':
+ dirName = dirName[1:-1]
+ for ext in ['']+exts:
+ absName = os.path.abspath(
+ os.path.normpath(os.path.join(dirName, command+ext)))
+ if os.path.isfile(absName):
+ if usingGivenPath:
+ fromWhere = "from given path element %d" % i
+ elif not sys.platform.startswith("win"):
+ fromWhere = "from PATH element %d" % i
+ elif i == 0:
+ fromWhere = "from current directory"
+ else:
+ fromWhere = "from PATH element %d" % (i-1)
+ match = _cull((absName, fromWhere), matches, verbose)
+ if match:
+ if verbose:
+ yield match
+ else:
+ yield match[0]
+ match = _getRegisteredExecutable(command)
+ if match is not None:
+ match = _cull(match, matches, verbose)
+ if match:
+ if verbose:
+ yield match
+ else:
+ yield match[0]
+
+
+def which(command, path=None, verbose=0, exts=None):
+ """Return the full path to the first match of the given command on
+ the path.
+
+ "command" is a the name of the executable to search for.
+ "path" is an optional alternate path list to search. The default it
+ to use the PATH environment variable.
+ "verbose", if true, will cause a 2-tuple to be returned. The second
+ element is a textual description of where the match was found.
+ "exts" optionally allows one to specify a list of extensions to use
+ instead of the standard list for this system. This can
+ effectively be used as an optimization to, for example, avoid
+ stat's of "foo.vbs" when searching for "foo" and you know it is
+ not a VisualBasic script but ".vbs" is on PATHEXT. This option
+ is only supported on Windows.
+
+ If no match is found for the command, a WhichError is raised.
+ """
+ try:
+ match = whichgen(command, path, verbose, exts).next()
+ except StopIteration:
+ raise WhichError("Could not find '%s' on the path." % command)
+ return match
+
+
+def whichall(command, path=None, verbose=0, exts=None):
+ """Return a list of full paths to all matches of the given command
+ on the path.
+
+ "command" is a the name of the executable to search for.
+ "path" is an optional alternate path list to search. The default it
+ to use the PATH environment variable.
+ "verbose", if true, will cause a 2-tuple to be returned for each
+ match. The second element is a textual description of where the
+ match was found.
+ "exts" optionally allows one to specify a list of extensions to use
+ instead of the standard list for this system. This can
+ effectively be used as an optimization to, for example, avoid
+ stat's of "foo.vbs" when searching for "foo" and you know it is
+ not a VisualBasic script but ".vbs" is on PATHEXT. This option
+ is only supported on Windows.
+ """
+ return list( whichgen(command, path, verbose, exts) )
+
+
+
+#---- mainline
+
+def main(argv):
+ all = 0
+ verbose = 0
+ altpath = None
+ exts = None
+ try:
+ optlist, args = getopt.getopt(argv[1:], 'haVvqp:e:',
+ ['help', 'all', 'version', 'verbose', 'quiet', 'path=', 'exts='])
+ except getopt.GetoptError, msg:
+ sys.stderr.write("which: error: %s. Your invocation was: %s\n"\
+ % (msg, argv))
+ sys.stderr.write("Try 'which --help'.\n")
+ return 1
+ for opt, optarg in optlist:
+ if opt in ('-h', '--help'):
+ print _cmdlnUsage
+ return 0
+ elif opt in ('-V', '--version'):
+ print "which %s" % __version__
+ return 0
+ elif opt in ('-a', '--all'):
+ all = 1
+ elif opt in ('-v', '--verbose'):
+ verbose = 1
+ elif opt in ('-q', '--quiet'):
+ verbose = 0
+ elif opt in ('-p', '--path'):
+ if optarg:
+ altpath = optarg.split(os.pathsep)
+ else:
+ altpath = []
+ elif opt in ('-e', '--exts'):
+ if optarg:
+ exts = optarg.split(os.pathsep)
+ else:
+ exts = []
+
+ if len(args) == 0:
+ return -1
+
+ failures = 0
+ for arg in args:
+ #print "debug: search for %r" % arg
+ nmatches = 0
+ for match in whichgen(arg, path=altpath, verbose=verbose, exts=exts):
+ if verbose:
+ print "%s (%s)" % match
+ else:
+ print match
+ nmatches += 1
+ if not all:
+ break
+ if not nmatches:
+ failures += 1
+ return failures
+
+
+if __name__ == "__main__":
+ sys.exit( main(sys.argv) )
+
+